diff --git a/.github/actions/workspace-release/action.yml b/.github/actions/workspace-release/action.yml new file mode 100644 index 0000000000..44797e2825 --- /dev/null +++ b/.github/actions/workspace-release/action.yml @@ -0,0 +1,111 @@ +name: workspace-release +description: "Dry-run or publish Rust workspace crates using cargo" + +inputs: + mode: + description: "Release mode: dry-run or publish" + required: true + default: "dry-run" + verify-branch-head: + description: "If true, ensure the triggering SHA matches the release branch HEAD" + required: false + default: "false" + release-branch: + description: "Branch to verify HEAD against (used when verify-branch-head is true)" + required: false + default: "main" + +runs: + using: "composite" + steps: + # Optional: guard that release happens from latest release branch + - name: Verify tag matches release branch HEAD + if: ${{ inputs.verify-branch-head == 'true' }} + shell: bash + run: | + git fetch origin ${{ inputs.release-branch }} --depth=1 + branch_sha="$(git rev-parse origin/${{ inputs.release-branch }})" + tag_sha="$(git rev-parse HEAD)" + + echo "branch_sha=$branch_sha" + echo "tag_sha=$tag_sha" + + if [ "$branch_sha" != "$tag_sha" ]; then + echo "::error::The release/tag commit does not match origin/${{ inputs.release-branch }} HEAD. Aborting." + exit 1 + fi + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + + - name: Cache cargo registry and git index + uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: | + ${{ runner.os }}-cargo- + + - name: Cleanup large tools for build space + uses: ./.github/actions/cleanup-runner + + # Install cargo-msrv for MSRV checks + # Using binstall with --force to avoid stale cached binaries (see PR #2234) + - name: Install cargo-binstall + uses: taiki-e/install-action@v2 + with: + tool: cargo-binstall + + - name: Install cargo-msrv + shell: bash + run: cargo binstall --no-confirm --force cargo-msrv + + # Keep your existing MSRV check + # PATH export required for check-msrv.sh subprocess (see PR #2234) + - name: Check MSRV + shell: bash + run: | + export PATH="$HOME/.cargo/bin:$PATH" + chmod +x scripts/check-msrv.sh + ./scripts/check-msrv.sh + + # Clean packaging directory to avoid stale/corrupted tmp-registry state + - name: Clean packaging directory + shell: bash + run: | + echo "Cleaning target/package directory to ensure fresh state" + rm -rf target/package + + # Clear cargo registry to ensure fresh resolution during verification. + # This prevents issues where cached metadata from previous runs + # might interfere with workspace dependency feature resolution + # during the verification step (related to cargo#14283, cargo#14789). + # Specifically, this ensures the temp registry used during workspace + # publish verification doesn't conflict with cached crates.io data. + - name: Clear cargo registry for fresh resolution + if: ${{ inputs.mode == 'dry-run' }} + shell: bash + run: | + echo "Clearing cargo registry for fresh resolution" + rm -rf ~/.cargo/registry/cache + rm -rf ~/.cargo/registry/index + rm -rf ~/.cargo/registry/src + + # Dry-run vs real publish + - name: Dry-run workspace publish + if: ${{ inputs.mode == 'dry-run' }} + shell: bash + run: | + echo "Running cargo publish --workspace --dry-run" + cargo publish --workspace --dry-run + + - name: Publish workspace crates + if: ${{ inputs.mode == 'publish' }} + shell: bash + env: + CARGO_REGISTRY_TOKEN: ${{ env.CARGO_REGISTRY_TOKEN }} + run: | + echo "Publishing workspace crates to crates.io" + cargo publish --workspace diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 8da873adce..884a7be16a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,6 +12,7 @@ on: branches: [main, next] pull_request: types: [opened, reopened, synchronize] + merge_group: permissions: contents: read @@ -66,4 +67,4 @@ jobs: # cache hits and cache evictions (github has a 10GB cache limit). save-if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/next' }} - name: build benches - run: cargo bench --no-run + run: cargo build --benches diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c7ffceecce..dee254de61 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,6 +12,7 @@ on: branches: [main, next] pull_request: types: [opened, reopened, synchronize] + merge_group: permissions: contents: read diff --git a/.github/workflows/release-plz-dry-run.yml b/.github/workflows/release-plz-dry-run.yml deleted file mode 100644 index be787b1270..0000000000 --- a/.github/workflows/release-plz-dry-run.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: Release-plz (dry-run) - -permissions: - contents: read - -on: - push: - branches: [main, next] - -concurrency: - group: "${{ github.workflow }} @ ${{ github.ref }}" - cancel-in-progress: true - -jobs: - release-plz-dry-run-release: - name: Release-plz dry-run - runs-on: ubuntu-latest - if: ${{ github.repository_owner == '0xMiden' }} - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Cleanup large tools for build space - uses: ./.github/actions/cleanup-runner - - name: Install dependencies - run: sudo apt-get update && sudo apt-get install -y jq - - name: Update Rust toolchain - run: rustup update --no-self-update - - uses: Swatinem/rust-cache@v2 - - uses: taiki-e/install-action@v2 - with: - tool: cargo-binstall - - name: Install cargo-msrv - run: cargo binstall --no-confirm --force cargo-msrv - - name: Check MSRV for each workspace member - run: | - export PATH="$HOME/.cargo/bin:$PATH" - ./scripts/check-msrv.sh - - name: Run release-plz - uses: release-plz/action@v0.5 - with: - command: release --dry-run - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} diff --git a/.github/workflows/release-plz.yml b/.github/workflows/release-plz.yml deleted file mode 100644 index a9de6bdeb8..0000000000 --- a/.github/workflows/release-plz.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: Release-plz (main) - -permissions: - contents: read - -on: - release: - types: [published] - -jobs: - release-plz-release: - name: Release-plz release - runs-on: ubuntu-latest - if: ${{ github.repository_owner == '0xMiden' }} - steps: - - name: Checkout repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - ref: main - # Ensure the release tag refers to the latest commit on main. - # Compare the commit SHA that triggered the workflow with the HEAD of the branch we just - # checked out (main). - - name: Verify release was triggered from main HEAD - run: | - tag_sha="${{ github.sha }}" - main_sha="$(git rev-parse HEAD)" - - echo "Tag points to: $tag_sha" - echo "Current main HEAD is: $main_sha" - - if [ "$tag_sha" != "$main_sha" ]; then - echo "::error::The release tag was not created from the latest commit on main. Aborting." - exit 1 - fi - echo "Release tag matches main HEAD — continuing." - - name: Cleanup large tools for build space - uses: ./.github/actions/cleanup-runner - - name: Install dependencies - run: sudo apt-get update && sudo apt-get install -y jq - - name: Update Rust toolchain - run: rustup update --no-self-update - - uses: Swatinem/rust-cache@v2 - - uses: taiki-e/install-action@v2 - with: - tool: cargo-binstall - - name: Install cargo-msrv - run: cargo binstall --no-confirm --force cargo-msrv - - name: Check MSRV for each workspace member - run: | - export PATH="$HOME/.cargo/bin:$PATH" - ./scripts/check-msrv.sh - - name: Run release-plz - uses: release-plz/action@v0.5 - with: - command: release - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f08787e669..da076ca883 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,7 +11,8 @@ on: push: branches: [main, next] pull_request: - types: [opened, reopened, synchronize] + types: [opened, reopened, synchronize] + merge_group: permissions: contents: read @@ -31,9 +32,9 @@ jobs: - name: Install rust run: rustup update --no-self-update - name: Build tests - run: make test-build + run: make test-release-build - name: test - run: make test + run: make test-release # runs without debug mode doc-tests: name: doc-tests diff --git a/.github/workflows/workspace-dry-run.yml b/.github/workflows/workspace-dry-run.yml new file mode 100644 index 0000000000..8d23123df3 --- /dev/null +++ b/.github/workflows/workspace-dry-run.yml @@ -0,0 +1,34 @@ +name: Workspace release dry-run + +on: + push: + branches: + - main + - next + - 'release/**' + +permissions: + contents: read + id-token: write # Required for OIDC token exchange + +concurrency: + group: "${{ github.workflow }} @ ${{ github.ref }}" + cancel-in-progress: true + +jobs: + release-dry-run: + if: ${{ github.repository_owner == '0xMiden' }} + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Dry-run workspace release + uses: ./.github/actions/workspace-release + with: + mode: "dry-run" + verify-branch-head: "false" + # ref left blank: uses the pushed ref diff --git a/.github/workflows/workspace-publish.yml b/.github/workflows/workspace-publish.yml new file mode 100644 index 0000000000..f9f6d9e8ee --- /dev/null +++ b/.github/workflows/workspace-publish.yml @@ -0,0 +1,35 @@ +name: Publish workspace to crates.io + +on: + release: + types: [published] + +permissions: + contents: read + id-token: write # Required for OIDC token exchange + +jobs: + publish: + if: ${{ github.repository_owner == '0xMiden' }} + runs-on: ubuntu-latest + environment: release # Optional: for enhanced security + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ github.event.release.target_commitish }} + + - name: Authenticate with crates.io + uses: rust-lang/crates-io-auth-action@v1 + id: auth + + - name: Publish workspace crates + uses: ./.github/actions/workspace-release + with: + mode: "publish" + verify-branch-head: "true" + release-branch: ${{ github.event.release.target_commitish }} + env: + CARGO_REGISTRY_TOKEN: ${{ steps.auth.outputs.token }} diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..0333b7b402 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,10 @@ +[submodule "crates/miden-agglayer/solidity-compat/lib/forge-std"] + path = crates/miden-agglayer/solidity-compat/lib/forge-std + url = https://github.com/foundry-rs/forge-std +[submodule "crates/miden-agglayer/solidity-compat/lib/agglayer-contracts"] + path = crates/miden-agglayer/solidity-compat/lib/agglayer-contracts + url = https://github.com/agglayer/agglayer-contracts +[submodule "crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable"] + path = crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable + url = https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable.git + branch = release-v4.9 diff --git a/.taplo.toml b/.taplo.toml index b735451f6e..b10bcd148d 100644 --- a/.taplo.toml +++ b/.taplo.toml @@ -1,3 +1,5 @@ +exclude = ["crates/miden-agglayer/solidity-compat/lib/*"] + [formatting] align_entries = true column_width = 120 diff --git a/CHANGELOG.md b/CHANGELOG.md index 8a1d9120e5..46be84c4e4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,100 @@ # Changelog +## 0.14.0 (TBD) + +### Features + +- Made `NoteMetadataHeader` and `NoteMetadata::to_header()` public, added `NoteMetadata::from_header()` constructor, and exported `NoteMetadataHeader` from the `note` module ([#2561](https://github.com/0xMiden/protocol/pull/2561)). +- Introduce NOTE_MAX_SIZE (32 KiB) and enforce it on individual output notes ([#2205](https://github.com/0xMiden/miden-base/pull/2205)) +- Added AggLayer faucet registry to bridge account with conversion metadata, `CONFIG_AGG_BRIDGE` note for faucet registration, and FPI-based asset conversion in `bridge_out` ([#2426](https://github.com/0xMiden/miden-base/pull/2426)). +- Enable `CodeBuilder` to add advice map entries to compiled scripts ([#2275](https://github.com/0xMiden/miden-base/pull/2275)). +- Added `BlockNumber::MAX` constant to represent the maximum block number ([#2324](https://github.com/0xMiden/miden-base/pull/2324)). +- Added single-word `Array` standard ([#2203](https://github.com/0xMiden/miden-base/pull/2203)). +- Added B2AGG and UPDATE_GER note attachment target checks ([#2334](https://github.com/0xMiden/miden-base/pull/2334)). +- Added double-word array data structure abstraction over storage maps ([#2299](https://github.com/0xMiden/miden-base/pull/2299)). +- Implemented verification of AggLayer deposits (claims) against GER ([#2295](https://github.com/0xMiden/miden-base/pull/2295), [#2288](https://github.com/0xMiden/miden-base/pull/2288)). +- Added `SignedBlock` struct ([#2355](https://github.com/0xMiden/miden-base/pull/2235)). +- Added `PackageKind` and `ProcedureExport` ([#2358](https://github.com/0xMiden/miden-base/pull/2358)). +- Added `AccountTargetNetworkNote` type and `NetworkNoteExt` trait with `is_network_note()` / `as_account_target_network_note()` helpers ([#2365](https://github.com/0xMiden/miden-base/pull/2365)). +- Changed GER storage to a map ([#2388](https://github.com/0xMiden/miden-base/pull/2388)). +- Implemented `assert_valid_ger` procedure for verifying GER against storage ([#2388](https://github.com/0xMiden/miden-base/pull/2388)). +- Added `P2idNoteStorage` and `P2ideNoteStorage` ([#2389](https://github.com/0xMiden/miden-base/pull/2389)). +- [BREAKING] Added `get_asset` and `get_initial_asset` kernel procedures and removed `get_balance`, `get_initial_balance` and `has_non_fungible_asset` kernel procedures ([#2369](https://github.com/0xMiden/miden-base/pull/2369)). +- Added `p2id::new` MASM constructor procedure for creating P2ID notes from MASM code ([#2381](https://github.com/0xMiden/miden-base/pull/2381)). +- Introduced `TokenMetadata` type to encapsulate fungible faucet metadata ([#2344](https://github.com/0xMiden/miden-base/issues/2344)). +- Increased `TokenSymbol` max allowed length from 6 to 12 uppercase characters ([#2420](https://github.com/0xMiden/miden-base/pull/2420)). +- Added `StandardNote::from_script_root()` and `StandardNote::name()` methods, and exposed `NoteType` `PUBLIC`/`PRIVATE` masks as public constants ([#2411](https://github.com/0xMiden/miden-base/pull/2411)). +- Resolve standard note scripts directly in `TransactionExecutorHost` instead of querying the data store ([#2417](https://github.com/0xMiden/miden-base/pull/2417)). +- Added `DEFAULT_TAG` constant to `miden::standards::note_tag` MASM module ([#2482](https://github.com/0xMiden/miden-base/pull/2482)). +- Added `NoteExecutionHint` variant constants (`NONE`, `ALWAYS`, `AFTER_BLOCK`, `ON_BLOCK_SLOT`) to `miden::standards::note::execution_hint` MASM module ([#2493](https://github.com/0xMiden/miden-base/pull/2493)). +- Added `Ownable2Step` account component with two-step ownership transfer (`transfer_ownership`, `accept_ownership`, `renounce_ownership`) and `owner`, `nominated_owner` procedures ([#2292](https://github.com/0xMiden/miden-base/pull/2292)). +- Added PSM authentication procedures and integrated them into `AuthMultisig` ([#2527](https://github.com/0xMiden/protocol/pull/2527)). +- Added `CodeBuilder::with_warnings_as_errors()` to promote assembler warning diagnostics to errors ([#2558](https://github.com/0xMiden/protocol/pull/2558)). +- Added `MockChain::add_pending_batch()` to allow submitting user batches directly ([#2565](https://github.com/0xMiden/protocol/pull/2565)). +- Added `create_fungible_key` for construction of fungible asset keys ([#2575](https://github.com/0xMiden/protocol/pull/2575)). +- Implemented the `on_before_asset_added_to_account` asset callback ([#2571](https://github.com/0xMiden/protocol/pull/2571)). +- Implemented the `on_before_asset_added_to_note` asset callback ([#2595](https://github.com/0xMiden/protocol/pull/2595)). +- Added `InputNoteCommitment::from_parts()` for construction of input note commitments from a nullifier and optional note header ([#2588](https://github.com/0xMiden/protocol/pull/2588)). +- Added `bool` schema type to the type registry and updated ACL auth component to use it for boolean config fields ([#2591](https://github.com/0xMiden/protocol/pull/2591)). +- Added `component_metadata()` to all account components to expose their metadata ([#2596](https://github.com/0xMiden/protocol/pull/2596)). + +### Changes + +- [BREAKING] Removed `NoteAssets::add_asset`; `OutputNoteBuilder` now accumulates assets in a `Vec` and computes the commitment only when `build()` is called, avoiding rehashing on every asset addition. ([#2577](https://github.com/0xMiden/protocol/pull/2577)) +- [BREAKING] Made `supported_types` a required parameter of `AccountComponentMetadata::new()`; removed `with_supported_type`, `with_supported_types`, `with_supports_all_types`, and `with_supports_regular_types` builder methods; added `AccountType::all()` and `AccountType::regular()` helpers ([#2554](https://github.com/0xMiden/protocol/pull/2554)). +- [BREAKING] Migrated to miden-vm 0.21 and miden-crypto 0.22 ([#2508](https://github.com/0xMiden/miden-base/pull/2508)). +- [BREAKING] The stack orientation changed from big-endian to little-endian - see PR description ([#2508](https://github.com/0xMiden/miden-base/pull/2508)). +- [BREAKING] The native hash function changed from RPO256 to Poseidon2 - see PR description ([#2508](https://github.com/0xMiden/miden-base/pull/2508)). +- Introduced `StorageMapKey` and `StorageMapKeyHash` Word wrappers for type-safe storage map key handling ([#2431](https://github.com/0xMiden/miden-base/pull/2431)). +- Restructured `miden-agglayer/asm` directory to separate bridge and faucet into per-component libraries, preventing cross-component procedure exposure ([#2294](https://github.com/0xMiden/miden-base/issues/2294)). +- Prefixed standard account component names with `miden::standards::components` ([#2400](https://github.com/0xMiden/miden-base/pull/2400)). +- Made kernel procedure offset constants public and replaced accessor procedures with direct constant usage ([#2375](https://github.com/0xMiden/miden-base/pull/2375)). +- [BREAKING] Made `AccountComponentMetadata` a required parameter of `AccountComponent::new()`; removed `with_supported_type`, `with_supports_all_types`, and `with_metadata` methods from `AccountComponent`; simplified `AccountComponentMetadata::new()` to take just `name`; renamed `AccountComponentTemplateError` to `ComponentMetadataError` ([#2373](https://github.com/0xMiden/miden-base/pull/2373), [#2395](https://github.com/0xMiden/miden-base/pull/2395)). +- Fixed MASM inline comment casing to adhere to commenting conventions ([#2398](https://github.com/0xMiden/miden-base/pull/2398)). +- [BREAKING] Removed `ProvenTransactionBuilder` in favor of `ProvenTransaction::new()` constructor ([#2567](https://github.com/0xMiden/miden-base/pull/2567)). +- Removed redundant note storage item count from advice map ([#2376](https://github.com/0xMiden/miden-base/pull/2376)). +- Moved `NoteExecutionHint` to `miden-standards` ([#2378](https://github.com/0xMiden/miden-base/pull/2378)). +- Added `miden::protocol::auth` module with public auth event constants ([#2377](https://github.com/0xMiden/miden-base/pull/2377)). +- Replaced auth event constant workarounds with direct imports now that `miden-assembly` v0.20.6 supports it ([#2404](https://github.com/0xMiden/miden-base/pull/2404)). +- [BREAKING] Prefixed transaction kernel events with `miden::protocol` ([#2364](https://github.com/0xMiden/miden-base/pull/2364)). +- [BREAKING] Simplified `NoteMetadata::new()` constructor to not require tag parameter; tag defaults to zero and can be set via `with_tag()` builder method ([#2384](https://github.com/0xMiden/miden-base/pull/2384)). +- [BREAKING] Renamed `WellKnownComponent` to `StandardAccountComponent`, `WellKnownNote` to `StandardNote`, and `WellKnownNoteAttachment` to `StandardNoteAttachment` ([#2332](https://github.com/0xMiden/miden-base/pull/2332)). +- Skip requests to the `DataStore` for asset vault witnesses which are already in transaction inputs ([#2298](https://github.com/0xMiden/miden-base/pull/2298)). +- [BREAKING] Refactored `TransactionAuthenticator::get_public_key()` method to return `Arc `instead of `&PublicKey` ([#2304](https://github.com/0xMiden/miden-base/pull/2304)). +- [BREAKING] Renamed `NoteInputs` to `NoteStorage` to better reflect that values are stored data associated with a note rather than inputs ([#1662](https://github.com/0xMiden/miden-base/issues/1662), [#2316](https://github.com/0xMiden/miden-base/issues/2316)). +- Removed `NoteType::Encrypted` ([#2315](https://github.com/0xMiden/miden-base/pull/2315)). +- [BREAKING] Changed note scripts to be compiled as libraries with `@note_script` annotation for marking the entrypoint procedure ([#2339](https://github.com/0xMiden/miden-base/issues/2339), [#2374](https://github.com/0xMiden/miden-base/pull/2374)). +- Remove protocol-reserved faucet sysdata storage slot ([#2335](https://github.com/0xMiden/miden-base/pull/2335)). +- Updated note tag length to support up to 32 bits ([#2329](https://github.com/0xMiden/miden-base/pull/2329)). +- [BREAKING] Updated note tag length to support up to 32 bits ([#2329](https://github.com/0xMiden/miden-base/pull/2329)). +- [BREAKING] Moved standard note code into individual note modules ([#2363](https://github.com/0xMiden/miden-base/pull/2363)). +- [BREAKING] Added `miden::standards::note_tag` module for account target note tags ([#2366](https://github.com/0xMiden/miden-base/pull/2366)). +- [BREAKING] Refactored assets in the tx kernel and `miden::protocol` from one to two words, i.e. `ASSET` becomes `ASSET_KEY` and `ASSET_VALUE` ([#2396](https://github.com/0xMiden/miden-base/pull/2396), [#2410](https://github.com/0xMiden/miden-base/pull/2410)). +- [BREAKING] Rename `miden::protocol::asset::build_fungible_asset` to `miden::protocol::asset::create_fungible_asset` ([#2410](https://github.com/0xMiden/miden-base/pull/2410)). +- [BREAKING] Rename `miden::protocol::asset::build_non_fungible_asset` to `miden::protocol::asset::create_non_fungible_asset` ([#2410](https://github.com/0xMiden/miden-base/pull/2410)). +- [BREAKING] Change the layout of fungible and non-fungible assets ([#2437](https://github.com/0xMiden/miden-base/pull/2437)). +- [BREAKING] Refactored assets in the tx kernel from one to two words, i.e. `ASSET` becomes `ASSET_KEY` and `ASSET_VALUE` ([#2396](https://github.com/0xMiden/miden-base/pull/2396)). +- Unified the underlying representation of `ExitRoot` and `SmtNode` and use type aliases ([#2387](https://github.com/0xMiden/miden-base/pull/2387)). +- [BREAKING] Moved padding to the end of `CLAIM` `NoteStorage` layout ([#2405](https://github.com/0xMiden/miden-base/pull/2405)). +- [BREAKING] Consolidated authentication components ([#2390] (https://github.com/0xMiden/miden-base/pull/2390)) +- [BREAKING] Refactored account ID and nonce memory and advice stack layout ([#2442](https://github.com/0xMiden/miden-base/pull/2442)). +- [BREAKING] Removed `hash_account` ([#2442](https://github.com/0xMiden/miden-base/pull/2442)). +- [BREAKING] Renamed `AccountHeader::commitment`, `Account::commitment` and `PartialAccount::commitment` to `to_commitment` ([#2442](https://github.com/0xMiden/miden-base/pull/2442)). +- [BREAKING] Remove `BlockSigner` trait ([#2447](https://github.com/0xMiden/miden-base/pull/2447)). +- Updated account schema commitment construction to accept borrowed schema iterators; added extension trait to enable `AccountBuilder::with_schema_commitment()` helper ([#2419](https://github.com/0xMiden/miden-base/pull/2419)). +- Introducing a dedicated AccountIdKey type to unify and centralize all AccountId → SMT and advice-map key conversions ([#2495](https://github.com/0xMiden/miden-base/pull/2495)). +- [BREAKING] Renamed `SchemaTypeId` to `SchemaType` ([#2494](https://github.com/0xMiden/miden-base/pull/2494)). +- Updated stale `miden-base` references to `protocol` across docs, READMEs, code comments, and Cargo.toml repository URL ([#2503](https://github.com/0xMiden/protocol/pull/2503)). +- [BREAKING] Reverse the order of the transaction summary on the stack ([#2512](https://github.com/0xMiden/miden-base/pull/2512)). +- [BREAKING] Use `@auth_script` MASM attribute instead of `auth_` prefix to identify authentication procedures in account components ([#2534](https://github.com/0xMiden/protocol/pull/2534)). +- [BREAKING] Changed `TransactionId` to include fee asset in hash computation, making it commit to entire `TransactionHeader` contents. +- Explicitly use `get_native_account_active_storage_slots_ptr` in `account::set_item` and `account::set_map_item`. +- Added Ownable2Step as an Account Component ([#2572](https://github.com/0xMiden/protocol/pull/2572)) +- [BREAKING] Introduced `PrivateNoteHeader` for output notes and removed `RawOutputNote::Header` variant ([#2569](https://github.com/0xMiden/protocol/pull/2569)). +- [BREAKING] Changed `asset::create_fungible_asset` and `faucet::create_fungible_asset` signature to take `enable_callbacks` flag ([#2571](https://github.com/0xMiden/protocol/pull/2571)). + +- [BREAKING] Fixed `TokenSymbol::try_from(Felt)` to reject values below `MIN_ENCODED_VALUE`; implemented `Display` for `TokenSymbol` replacing the fallible `to_string()` method; removed `Default` derive ([#2464](https://github.com/0xMiden/protocol/issues/2464)). + ## 0.13.3 (2026-01-27) - Fixed `CLAIM` note creation to use `NetworkAccountTarget` attachment ([#2352](https://github.com/0xMiden/miden-base/pull/2352)). @@ -21,12 +116,15 @@ - [BREAKING] Refactored storage slots to be accessed by names instead of indices ([#1987](https://github.com/0xMiden/miden-base/pull/1987), [#2025](https://github.com/0xMiden/miden-base/pull/2025), [#2149](https://github.com/0xMiden/miden-base/pull/2149), [#2150](https://github.com/0xMiden/miden-base/pull/2150), [#2153](https://github.com/0xMiden/miden-base/pull/2153), [#2154](https://github.com/0xMiden/miden-base/pull/2154), [#2160](https://github.com/0xMiden/miden-base/pull/2160), [#2161](https://github.com/0xMiden/miden-base/pull/2161), [#2170](https://github.com/0xMiden/miden-base/pull/2170)). - [BREAKING] Allowed account components to share identical account code procedures ([#2164](https://github.com/0xMiden/miden-base/pull/2164)). - Add `AccountId::parse()` helper function to parse both hex and bech32 formats ([#2223](https://github.com/0xMiden/miden-base/pull/2223)). +- Add Keccak-based MMR frontier structure to the Agglayer library ([#2245](https://github.com/0xMiden/miden-base/pull/2245)). - Add `read_foreign_account_inputs()`, `read_vault_asset_witnesses()`, and `read_storage_map_witness()` for `TransactionInputs` ([#2246](https://github.com/0xMiden/miden-base/pull/2246)). -- [BREAKING] Introduced `NoteAttachment` as part of `NoteMetadata` and remove `aux` and `execution_hint` ([#2249](https://github.com/0xMiden/miden-base/pull/2249), [#2252](https://github.com/0xMiden/miden-base/pull/2252), [#2260](https://github.com/0xMiden/miden-base/pull/2260), [#2268](https://github.com/0xMiden/miden-base/pull/2268), [#2279](https://github.com/0xMiden/miden-base/pull/2279)). +- [BREAKING] Introduce `NoteAttachment` as part of `NoteMetadata` and remove `aux` and `execution_hint` ([#2249](https://github.com/0xMiden/miden-base/pull/2249), [#2252](https://github.com/0xMiden/miden-base/pull/2252), [#2260](https://github.com/0xMiden/miden-base/pull/2260), [#2268](https://github.com/0xMiden/miden-base/pull/2268), [#2279](https://github.com/0xMiden/miden-base/pull/2279)). +- Introduce standard `NetworkAccountTarget` attachment for use in network transactions which replaces `NoteTag::NetworkAccount` ([#2257](https://github.com/0xMiden/miden-base/pull/2257)). +- Add a foundry test suite for verifying AggLayer contracts compatibility ([#2312](https://github.com/0xMiden/miden-base/pull/2312)). - Added `AccountSchemaCommitment` component to expose account storage schema commitments ([#2253](https://github.com/0xMiden/miden-base/pull/2253)). -- Introduced standard `NetworkAccountTarget` attachment for use in network transactions which replaces `NoteTag::NetworkAccount` ([#2257](https://github.com/0xMiden/miden-base/pull/2257)). - Added an `AccountBuilder` extension trait to help build the schema commitment; added `AccountComponentMetadata` to `AccountComponent` ([#2269](https://github.com/0xMiden/miden-base/pull/2269)). - Added `miden::standards::access::ownable` standard module for component ownership management, and integrated it into the `network_fungible` faucet (including new tests). ([#2228](https://github.com/0xMiden/miden-base/pull/2228)). +- [BREAKING] Add `leaf_value` to `CLAIM` note inputs ([#2290](https://github.com/0xMiden/miden-base/pull/2290)). ### Changes @@ -52,6 +150,7 @@ - [BREAKING] Refactored `AccountStorageDelta` to use a new `StorageSlotDelta` type ([#2182](https://github.com/0xMiden/miden-base/pull/2182)). - [BREAKING] Removed OLD_MAP_ROOT from being returned when calling [`native_account::set_map_item`](crates/miden-lib/asm/miden/native_account.masm) ([#2194](https://github.com/0xMiden/miden-base/pull/2194)). - [BREAKING] Refactored account component templates into `StorageSchema` ([#2193](https://github.com/0xMiden/miden-base/pull/2193)). +- Added `StorageSchema::commitment()` ([#2244](https://github.com/0xMiden/miden-base/pull/2244)). - [BREAKING] Refactored account component templates into `AccountStorageSchema` ([#2193](https://github.com/0xMiden/miden-base/pull/2193)). - [BREAKING] Refactor note tags to be arbitrary `u32` values and drop previous validation ([#2219](https://github.com/0xMiden/miden-base/pull/2219)). - [BREAKING] Refactored `InitStorageData` to support native types ([#2230](https://github.com/0xMiden/miden-base/pull/2230)). diff --git a/Cargo.lock b/Cargo.lock index 3806cb3bb8..01f569efe3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -106,7 +106,7 @@ version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ - "windows-sys 0.61.2", + "windows-sys", ] [[package]] @@ -117,17 +117,14 @@ checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", "once_cell_polyfill", - "windows-sys 0.61.2", + "windows-sys", ] [[package]] name = "anyhow" -version = "1.0.100" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" -dependencies = [ - "backtrace", -] +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" [[package]] name = "arrayref" @@ -177,15 +174,6 @@ dependencies = [ "rustc-demangle", ] -[[package]] -name = "backtrace-ext" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "537beee3be4a18fb023b570f80e3ae28003db9167a751266b259926e25539d50" -dependencies = [ - "backtrace", -] - [[package]] name = "base16ct" version = "0.2.0" @@ -234,6 +222,15 @@ dependencies = [ "tokio", ] +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + [[package]] name = "bit-set" version = "0.8.0" @@ -257,9 +254,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" [[package]] name = "blake3" @@ -286,15 +283,15 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.19.1" +version = "3.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" [[package]] name = "bytemuck" -version = "1.24.0" +version = "1.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4" +checksum = "c8efb64bd706a16a1bdde310ae86b351e4d21550d98d056f22f8a7f7a2183fec" [[package]] name = "byteorder" @@ -310,9 +307,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.2.53" +version = "1.2.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "755d2fce177175ffca841e9a06afdb2c4ab0f593d53b4dee48147dfaade85932" +checksum = "aebf35691d1bfb0ac386a69bac2fde4dd276fb618cf8bf4f5318fe285e821bb2" dependencies = [ "find-msvc-tools", "jobserver", @@ -390,18 +387,18 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.54" +version = "4.5.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6e6ff9dcd79cff5cd969a17a545d79e84ab086e444102a591e288a8aa3ce394" +checksum = "2797f34da339ce31042b27d23607e051786132987f595b02ba4f6a6dffb7030a" dependencies = [ "clap_builder", ] [[package]] name = "clap_builder" -version = "4.5.54" +version = "4.5.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa42cf4d2b7a41bc8f663a7cab4031ebafa1bf3875705bfaf8466dc60ab52c00" +checksum = "24a241312cea5059b13574bb9b3861cabf758b879c15190b37b6d6fd63ab6876" dependencies = [ "anstyle", "clap_lex", @@ -409,9 +406,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.7" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3e64b0cc0439b12df2fa678eae89a1c56a529fd067a9115f7827f1fffd22b32" +checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831" [[package]] name = "color-eyre" @@ -605,7 +602,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -645,7 +642,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version 0.4.1", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -732,18 +729,18 @@ dependencies = [ [[package]] name = "ena" -version = "0.14.3" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" +checksum = "eabffdaee24bd1bf95c5ef7cec31260444317e72ea56c4c91750e8b7ee58d5f1" dependencies = [ "log", ] [[package]] name = "env_filter" -version = "0.1.4" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bf3c259d255ca70051b30e2e95b5446cdb8949ac4cd22c0d7fd634d89f568e2" +checksum = "7a1c3cc8e57274ec99de65301228b537f1e4eedc1b8e0f9411c6caac8ae7308f" dependencies = [ "log", "regex", @@ -751,9 +748,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.11.8" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c863f0904021b108aa8b2f55046443e6b1ebde8fd4a15c399893aae4fa069f" +checksum = "b2daee4ea451f429a58296525ddf28b45a3b64f1acf6587e2067437bb11e218d" dependencies = [ "anstream", "anstyle", @@ -779,7 +776,7 @@ checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -795,7 +792,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.61.2", + "windows-sys", ] [[package]] @@ -832,9 +829,9 @@ checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" [[package]] name = "find-msvc-tools" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8591b0bcc8a98a64310a2fae1bb3e9b8564dd10e381e6e28010fde8e8e8568db" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" [[package]] name = "findshlibs" @@ -875,6 +872,12 @@ dependencies = [ "spin 0.9.8", ] +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "foldhash" version = "0.2.0" @@ -883,18 +886,18 @@ checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" [[package]] name = "fs-err" -version = "3.2.2" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf68cef89750956493a66a10f512b9e58d9db21f2a573c079c0bdf1207a54a7" +checksum = "73fde052dbfc920003cfd2c8e2c6e6d4cc7c1091538c3a24226cec0665ab08c0" dependencies = [ "autocfg", ] [[package]] name = "futures" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" dependencies = [ "futures-channel", "futures-core", @@ -906,9 +909,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" dependencies = [ "futures-core", "futures-sink", @@ -916,38 +919,38 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" [[package]] name = "futures-io" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" [[package]] name = "futures-macro" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] name = "futures-sink" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" [[package]] name = "futures-task" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" [[package]] name = "futures-timer" @@ -957,16 +960,15 @@ checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" [[package]] name = "futures-util" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" dependencies = [ "futures-core", "futures-macro", "futures-sink", "futures-task", "pin-project-lite", - "pin-utils", "slab", ] @@ -1018,11 +1020,24 @@ dependencies = [ "cfg-if", "js-sys", "libc", - "r-efi", + "r-efi 5.3.0", "wasip2", "wasm-bindgen", ] +[[package]] +name = "getrandom" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" +dependencies = [ + "cfg-if", + "libc", + "r-efi 6.0.0", + "wasip2", + "wasip3", +] + [[package]] name = "gimli" version = "0.28.1" @@ -1057,6 +1072,15 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash 0.1.5", +] + [[package]] name = "hashbrown" version = "0.16.1" @@ -1065,12 +1089,18 @@ checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" dependencies = [ "allocator-api2", "equivalent", - "foldhash", + "foldhash 0.2.0", "rayon", "serde", "serde_core", ] +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + [[package]] name = "hermit-abi" version = "0.5.2" @@ -1101,6 +1131,12 @@ dependencies = [ "digest", ] +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + [[package]] name = "indenter" version = "0.3.4" @@ -1114,7 +1150,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.16.1", + "serde", + "serde_core", ] [[package]] @@ -1152,15 +1190,9 @@ checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" dependencies = [ "hermit-abi", "libc", - "windows-sys 0.61.2", + "windows-sys", ] -[[package]] -name = "is_ci" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45" - [[package]] name = "is_terminal_polyfill" version = "1.70.2" @@ -1202,9 +1234,9 @@ checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" [[package]] name = "jiff" -version = "0.2.18" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e67e8da4c49d6d9909fe03361f9b620f58898859f5c7aded68351e85e71ecf50" +checksum = "1a3546dc96b6d42c5f24902af9e2538e82e39ad350b0c766eb3fbf2d8f3d8359" dependencies = [ "jiff-static", "log", @@ -1215,13 +1247,13 @@ dependencies = [ [[package]] name = "jiff-static" -version = "0.2.18" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0c84ee7f197eca9a86c6fd6cb771e55eb991632f15f2bc3ca6ec838929e6e78" +checksum = "2a8c8b344124222efd714b73bb41f8b5120b27a7cc1c75593a6ff768d9d05aa4" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -1236,9 +1268,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.85" +version = "0.3.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" +checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c" dependencies = [ "once_cell", "wasm-bindgen", @@ -1260,9 +1292,9 @@ dependencies = [ [[package]] name = "keccak" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +checksum = "cb26cec98cce3a3d96cbb7bced3c4b16e3d13f27ec56dbd62cbc8f39cfb9d653" dependencies = [ "cpufeatures", ] @@ -1304,28 +1336,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] -name = "libc" -version = "0.2.180" +name = "leb128fmt" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" [[package]] -name = "libm" -version = "0.2.15" +name = "libc" +version = "0.2.182" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" +checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" [[package]] -name = "linux-raw-sys" -version = "0.4.15" +name = "libm" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" +checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" [[package]] name = "linux-raw-sys" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" [[package]] name = "lock_api" @@ -1366,53 +1398,56 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.6" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" [[package]] name = "memmap2" -version = "0.9.9" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744133e4a0e0a658e1374cf3bf8e415c4052a15a111acd372764c55b4177d490" +checksum = "714098028fe011992e1c3962653c96b2d578c4b4bce9036e15ff220319b1e0e3" dependencies = [ "libc", ] [[package]] name = "miden-agglayer" -version = "0.13.3" +version = "0.14.0-alpha.1" dependencies = [ "fs-err", "miden-agglayer", "miden-assembly", "miden-core", "miden-core-lib", + "miden-crypto", "miden-protocol", "miden-standards", "miden-utils-sync", + "primitive-types", "regex", + "thiserror", "walkdir", ] [[package]] name = "miden-air" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d819876b9e9b630e63152400e6df2a201668a9bdfd33d54d6806b9d7b992ff8" +checksum = "d8aa2b3bc95d9eece8b47edbc6621b5742e212b359ff6b82ebb813b3d9b28985" dependencies = [ "miden-core", + "miden-crypto", "miden-utils-indexing", "thiserror", - "winter-air", - "winter-prover", + "tracing", ] [[package]] name = "miden-assembly" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24c6a18e29c03141cf9044604390a00691c7342924ec865b4acfdd560ff41ede" +checksum = "89369e85051e14e21c52f8e38456b4db958151afb32a3cef0a522e04163ec5c2" dependencies = [ "env_logger", "log", @@ -1425,9 +1460,9 @@ dependencies = [ [[package]] name = "miden-assembly-syntax" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7458ff670f5a514bf972aa84d6e1851a4c4e9afa351f53b71bdc2218b99254b6" +checksum = "9069e6fa110d918662ce77eecfc3d7f906050023fad899f414fc63122e31b771" dependencies = [ "aho-corasick", "env_logger", @@ -1449,7 +1484,7 @@ dependencies = [ [[package]] name = "miden-block-prover" -version = "0.13.3" +version = "0.14.0-alpha.1" dependencies = [ "miden-protocol", "thiserror", @@ -1457,9 +1492,9 @@ dependencies = [ [[package]] name = "miden-core" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21a5c9c8c3d42ae8381ed49e47ff9ad2d2e345c4726761be36b7d4000ebb40ae" +checksum = "9a9ebf937ab3ebc6d540cc7c48dd5cfc08da8b19e38757f71229d6b50414268b" dependencies = [ "derive_more", "itertools 0.14.0", @@ -1468,20 +1503,19 @@ dependencies = [ "miden-formatting", "miden-utils-core-derive", "miden-utils-indexing", + "miden-utils-sync", "num-derive", "num-traits", "proptest", "proptest-derive", "thiserror", - "winter-math", - "winter-utils", ] [[package]] name = "miden-core-lib" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6556494ea5576803730fa15015bee6bd9d1a117450f22e7df0883421e7423674" +checksum = "fa496b3a7546c0022e8d5a92d88726907e380074f1fb634859b5e2094270dacf" dependencies = [ "env_logger", "fs-err", @@ -1490,15 +1524,14 @@ dependencies = [ "miden-crypto", "miden-processor", "miden-utils-sync", - "sha2", "thiserror", ] [[package]] name = "miden-crypto" -version = "0.19.4" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e28b6e110f339c2edc2760a8cb94863f0a055ee658a49bc90c8560eff2feef4" +checksum = "4b8fc3ec2033d3e17a40611f3ab7c20b0578ccf5e6ddcc9a1df9f26599e6ebdd" dependencies = [ "blake3", "cc", @@ -1507,12 +1540,30 @@ dependencies = [ "ed25519-dalek", "flume", "glob", - "hashbrown", + "hashbrown 0.16.1", "hkdf", "k256", "miden-crypto-derive", + "miden-field", + "miden-serde-utils", "num", "num-complex", + "p3-air", + "p3-blake3", + "p3-challenger", + "p3-commit", + "p3-dft", + "p3-field", + "p3-goldilocks", + "p3-keccak", + "p3-matrix", + "p3-maybe-rayon", + "p3-merkle-tree", + "p3-miden-air", + "p3-miden-fri", + "p3-miden-prover", + "p3-symmetric", + "p3-util", "rand", "rand_chacha", "rand_core 0.9.5", @@ -1522,27 +1573,24 @@ dependencies = [ "sha3", "subtle", "thiserror", - "winter-crypto", - "winter-math", - "winter-utils", "x25519-dalek", ] [[package]] name = "miden-crypto-derive" -version = "0.19.4" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40e95b9c7c99ed6bbf073d9e02721d812dedd2c195019c0a0e0a3dbb9cbf034" +checksum = "207828f24e358b4e1e0641c37802816b8730816ff92ddb4d271ef3a00f8696bb" dependencies = [ "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] name = "miden-debug-types" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19123e896f24b575e69921a79a39a0a4babeb98404a8601017feb13b75d653b3" +checksum = "6bbdee85c103fe0979ed05f888da8c0b078446b2feee17a67f56d75d6189adae" dependencies = [ "memchr", "miden-crypto", @@ -1556,6 +1604,23 @@ dependencies = [ "thiserror", ] +[[package]] +name = "miden-field" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f821a07c16cfa6e500d5a56d05c11523984e3cd562cfc80ef657e4264d708067" +dependencies = [ + "miden-serde-utils", + "num-bigint", + "p3-challenger", + "p3-field", + "p3-goldilocks", + "paste", + "rand", + "serde", + "thiserror", +] + [[package]] name = "miden-formatting" version = "0.1.1" @@ -1567,13 +1632,14 @@ dependencies = [ [[package]] name = "miden-mast-package" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0d6a322b91efa1bb71e224395ca1fb9ca00e2614f89427e35d8c42a903868a3" +checksum = "47f6dfbe2e3a2ca9977a46551d378cf4c5232624d50bd604c644eaa95342a5c1" dependencies = [ "derive_more", "miden-assembly-syntax", "miden-core", + "miden-debug-types", "thiserror", ] @@ -1583,25 +1649,19 @@ version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eef536978f24a179d94fa2a41e4f92b28e7d8aab14b8d23df28ad2a3d7098b20" dependencies = [ - "backtrace", - "backtrace-ext", "cfg-if", "futures", "indenter", "lazy_static", "miden-miette-derive", - "owo-colors 4.2.3", + "owo-colors 4.3.0", "regex", "rustc_version 0.2.3", "rustversion", "serde_json", "spin 0.9.8", "strip-ansi-escapes", - "supports-color", - "supports-hyperlinks", - "supports-unicode", - "syn 2.0.114", - "terminal_size", + "syn 2.0.117", "textwrap", "thiserror", "trybuild", @@ -1616,14 +1676,14 @@ checksum = "86a905f3ea65634dd4d1041a4f0fd0a3e77aa4118341d265af1a94339182222f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] name = "miden-processor" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a659fac55de14647e2695f03d96b83ff94fe65fd31e74d81c225ec52af25acf" +checksum = "13c83cc2f87364c88f6b7d7acb0c7908b63064ed94e0b2b68a0f5990f74a42c5" dependencies = [ "itertools 0.14.0", "miden-air", @@ -1636,12 +1696,11 @@ dependencies = [ "thiserror", "tokio", "tracing", - "winter-prover", ] [[package]] name = "miden-protocol" -version = "0.13.3" +version = "0.14.0-alpha.1" dependencies = [ "anyhow", "assert_matches", @@ -1650,7 +1709,6 @@ dependencies = [ "criterion 0.5.1", "fs-err", "getrandom 0.3.4", - "miden-air", "miden-assembly", "miden-assembly-syntax", "miden-core", @@ -1674,37 +1732,48 @@ dependencies = [ "thiserror", "toml", "walkdir", - "winter-air", - "winter-rand-utils", ] [[package]] name = "miden-protocol-macros" -version = "0.13.3" +version = "0.14.0-alpha.1" dependencies = [ "miden-protocol", "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] name = "miden-prover" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e5df61f50f27886f6f777d6e0cdf785f7db87dd881799a84a801e7330c189c8" +checksum = "d0fe4c03cc2a5c0404596f10c076e8e265d87fb7a9c5fbe21b15bc12874f7855" dependencies = [ + "bincode", "miden-air", + "miden-core", + "miden-crypto", "miden-debug-types", "miden-processor", + "serde", + "tokio", "tracing", - "winter-maybe-async", - "winter-prover", +] + +[[package]] +name = "miden-serde-utils" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fe74c2e7d8a8b8758e067de10665816928222c1d0561d95c12ac4bcaefc2a2a" +dependencies = [ + "p3-field", + "p3-goldilocks", ] [[package]] name = "miden-standards" -version = "0.13.3" +version = "0.14.0-alpha.1" dependencies = [ "anyhow", "assert_matches", @@ -1723,7 +1792,7 @@ dependencies = [ [[package]] name = "miden-testing" -version = "0.13.3" +version = "0.14.0-alpha.1" dependencies = [ "anyhow", "assert_matches", @@ -1743,14 +1812,15 @@ dependencies = [ "rand", "rand_chacha", "rstest", + "serde", + "serde_json", + "thiserror", "tokio", - "winter-rand-utils", - "winterfell", ] [[package]] name = "miden-tx" -version = "0.13.3" +version = "0.14.0-alpha.1" dependencies = [ "anyhow", "assert_matches", @@ -1767,7 +1837,7 @@ dependencies = [ [[package]] name = "miden-tx-batch-prover" -version = "0.13.3" +version = "0.14.0-alpha.1" dependencies = [ "miden-protocol", "miden-tx", @@ -1775,9 +1845,9 @@ dependencies = [ [[package]] name = "miden-utils-core-derive" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa207ffd8b26a79d9b5b246a352812f0015c0bb8f75492ec089c5c8e6d5f9e2b" +checksum = "ad5c364abe484d43d171afc320e7560db37ece00fe625569068c1053ed186540" dependencies = [ "proc-macro2", "quote", @@ -1786,9 +1856,9 @@ dependencies = [ [[package]] name = "miden-utils-diagnostics" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b2f55477d410542a5d8990ca04856adf5bef91bfa3b54ca3c03a5ff14a6e25c" +checksum = "467d8eafd735ab1e0db7bf6a6a8b5bcf4c31a56c0cd7f80cba1932d4bb984b12" dependencies = [ "miden-crypto", "miden-debug-types", @@ -1799,35 +1869,38 @@ dependencies = [ [[package]] name = "miden-utils-indexing" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f39efae17e14ec8f8a1266cffd29eb7a08ac837143cd09223b1af361bbb55730" +checksum = "bc42cfa3aef68d21238b3ce4c2db00a1278f8075ef492c23c035ab6c75774790" dependencies = [ + "miden-crypto", "thiserror", ] [[package]] name = "miden-utils-sync" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da7fa8f5fd27f122c83f55752f2a964bbfc2b713de419e9c152f7dcc05c194ec" +checksum = "b7e09bb239449e63e9a81f9b4ca5db1762327f44fb50777527fdba6fdbcab890" dependencies = [ "lock_api", "loom", + "once_cell", "parking_lot", ] [[package]] name = "miden-verifier" -version = "0.20.2" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbddac2e76486fb657929338323c68b9e7f40e33b8cfb593d0fb5bf637db046e" +checksum = "fbb4d3120e2c9cce41b5dac7507cd86154951938b9effbc322c57983065bfa4a" dependencies = [ + "bincode", "miden-air", "miden-core", + "miden-crypto", "thiserror", "tracing", - "winter-verifier", ] [[package]] @@ -1882,7 +1955,7 @@ version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.61.2", + "windows-sys", ] [[package]] @@ -1926,7 +1999,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -2021,194 +2094,530 @@ checksum = "2386b4ebe91c2f7f51082d4cefa145d030e33a1842a96b12e4885cc3c01f7a55" [[package]] name = "owo-colors" -version = "4.2.3" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52" +checksum = "d211803b9b6b570f68772237e415a029d5a50c65d382910b879fb19d3271f94d" [[package]] -name = "parking_lot" -version = "0.12.5" +name = "p3-air" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +checksum = "0141a56ed9924ce0265e7e91cd29bbcd230262744b7a7f0c448bfbf212f73182" dependencies = [ - "lock_api", - "parking_lot_core", + "p3-field", + "p3-matrix", ] [[package]] -name = "parking_lot_core" -version = "0.9.12" +name = "p3-blake3" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +checksum = "006330bae15fdda0d460e73e03e7ebf06e8848dfda8355f9d568a7fed7c37719" dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-link", + "blake3", + "p3-symmetric", + "p3-util", ] [[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "petgraph" -version = "0.7.1" +name = "p3-challenger" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" +checksum = "20e42ba74a49c08c6e99f74cd9b343bfa31aa5721fea55079b18e3fd65f1dcbc" dependencies = [ - "fixedbitset", - "indexmap", + "p3-field", + "p3-maybe-rayon", + "p3-monty-31", + "p3-symmetric", + "p3-util", + "tracing", ] [[package]] -name = "phf_shared" -version = "0.11.3" +name = "p3-commit" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +checksum = "498211e7b9a0f8366b410b4a9283ae82ff2fc91f473b1c5816aa6e90e74b125d" dependencies = [ - "siphasher", + "itertools 0.14.0", + "p3-challenger", + "p3-dft", + "p3-field", + "p3-matrix", + "p3-util", + "serde", ] [[package]] -name = "pin-project-lite" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" - -[[package]] -name = "pin-utils" -version = "0.1.0" +name = "p3-dft" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +checksum = "e63fa5eb1bd12a240089e72ae3fe10350944d9c166d00a3bfd2a1794db65cf5c" +dependencies = [ + "itertools 0.14.0", + "p3-field", + "p3-matrix", + "p3-maybe-rayon", + "p3-util", + "spin 0.10.0", + "tracing", +] [[package]] -name = "pkcs8" -version = "0.10.2" +name = "p3-field" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +checksum = "2ebfdb6ef992ae64e9e8f449ac46516ffa584f11afbdf9ee244288c2a633cdf4" dependencies = [ - "der", - "spki", + "itertools 0.14.0", + "num-bigint", + "p3-maybe-rayon", + "p3-util", + "paste", + "rand", + "serde", + "tracing", ] [[package]] -name = "plotters" -version = "0.3.7" +name = "p3-goldilocks" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +checksum = "64716244b5612622d4e78a4f48b74f6d3bb7b4085b7b6b25364b1dfca7198c66" dependencies = [ - "num-traits", - "plotters-backend", - "plotters-svg", - "wasm-bindgen", - "web-sys", + "num-bigint", + "p3-challenger", + "p3-dft", + "p3-field", + "p3-mds", + "p3-poseidon2", + "p3-symmetric", + "p3-util", + "paste", + "rand", + "serde", ] [[package]] -name = "plotters-backend" -version = "0.3.7" +name = "p3-interpolation" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" +checksum = "1d877565a94a527c89459fc8ccb0eb58769d8c86456575d1315a1651bd24616d" +dependencies = [ + "p3-field", + "p3-matrix", + "p3-maybe-rayon", + "p3-util", +] [[package]] -name = "plotters-svg" -version = "0.3.7" +name = "p3-keccak" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +checksum = "1d57334537d10316e0f1cda622f0a5b3239f219a5dcd2a95ea87e41e00df6a92" dependencies = [ - "plotters-backend", + "p3-field", + "p3-symmetric", + "p3-util", + "tiny-keccak", ] [[package]] -name = "poly1305" -version = "0.8.0" +name = "p3-matrix" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" +checksum = "5542f96504dae8100c91398fb1e3f5ec669eb9c73d9e0b018a93b5fe32bad230" dependencies = [ - "cpufeatures", - "opaque-debug", - "universal-hash", + "itertools 0.14.0", + "p3-field", + "p3-maybe-rayon", + "p3-util", + "rand", + "serde", + "tracing", + "transpose", ] [[package]] -name = "portable-atomic" -version = "1.13.0" +name = "p3-maybe-rayon" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f89776e4d69bb58bc6993e99ffa1d11f228b839984854c7daeb5d37f87cbe950" +checksum = "0e5669ca75645f99cd001e9d0289a4eeff2bc2cd9dc3c6c3aaf22643966e83df" +dependencies = [ + "rayon", +] [[package]] -name = "portable-atomic-util" -version = "0.2.4" +name = "p3-mds" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" +checksum = "038763af23df9da653065867fd85b38626079031576c86fd537097e5be6a0da0" dependencies = [ - "portable-atomic", + "p3-dft", + "p3-field", + "p3-symmetric", + "p3-util", + "rand", ] [[package]] -name = "pprof" -version = "0.15.0" +name = "p3-merkle-tree" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38a01da47675efa7673b032bf8efd8214f1917d89685e07e395ab125ea42b187" +checksum = "8d93625a3041effddc72ee2511c919f710b7f91fd0f9931ab8a70aeba586fd6e" dependencies = [ - "aligned-vec", - "backtrace", - "cfg-if", - "criterion 0.5.1", - "findshlibs", - "inferno", - "libc", - "log", - "nix", - "once_cell", - "smallvec", - "spin 0.10.0", - "symbolic-demangle", - "tempfile", + "itertools 0.14.0", + "p3-commit", + "p3-field", + "p3-matrix", + "p3-maybe-rayon", + "p3-symmetric", + "p3-util", + "rand", + "serde", "thiserror", + "tracing", ] [[package]] -name = "ppv-lite86" -version = "0.2.21" +name = "p3-miden-air" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +checksum = "45a88e6ee9c92ff6c0b64f1ec0d61eda72fb432bda45337d876c46bd43748508" dependencies = [ - "zerocopy", + "p3-air", + "p3-field", + "p3-matrix", ] [[package]] -name = "precomputed-hash" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" - -[[package]] -name = "primitive-types" -version = "0.14.0" +name = "p3-miden-fri" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "721a1da530b5a2633218dc9f75713394c983c352be88d2d7c9ee85e2c4c21794" +checksum = "e282998bc1d12dceaa0ed8979fa507b8369d663fa377da695d578f5f3a035935" dependencies = [ - "fixed-hash", - "uint", + "itertools 0.14.0", + "p3-challenger", + "p3-commit", + "p3-dft", + "p3-field", + "p3-interpolation", + "p3-matrix", + "p3-maybe-rayon", + "p3-util", + "rand", + "serde", + "tracing", ] [[package]] -name = "proc-macro-crate" -version = "3.4.0" +name = "p3-miden-prover" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" +checksum = "f05a61c10cc2d6a73e192ac34a9884e4f26bd877f3eaea441d7b7ebfdffdf6c7" dependencies = [ - "toml_edit", + "itertools 0.14.0", + "p3-challenger", + "p3-commit", + "p3-dft", + "p3-field", + "p3-interpolation", + "p3-matrix", + "p3-maybe-rayon", + "p3-miden-air", + "p3-miden-uni-stark", + "p3-util", + "serde", + "tracing", ] [[package]] -name = "proc-macro2" -version = "1.0.106" +name = "p3-miden-uni-stark" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a78b6a5b5f6bdc55439d343d2a0a2a8e7cb6544b03296f54d2214a84e91e130" +dependencies = [ + "itertools 0.14.0", + "p3-air", + "p3-challenger", + "p3-commit", + "p3-dft", + "p3-field", + "p3-matrix", + "p3-maybe-rayon", + "p3-uni-stark", + "p3-util", + "serde", + "thiserror", + "tracing", +] + +[[package]] +name = "p3-monty-31" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57a981d60da3d8cbf8561014e2c186068578405fd69098fa75b43d4afb364a47" +dependencies = [ + "itertools 0.14.0", + "num-bigint", + "p3-dft", + "p3-field", + "p3-matrix", + "p3-maybe-rayon", + "p3-mds", + "p3-poseidon2", + "p3-symmetric", + "p3-util", + "paste", + "rand", + "serde", + "spin 0.10.0", + "tracing", + "transpose", +] + +[[package]] +name = "p3-poseidon2" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "903b73e4f9a7781a18561c74dc169cf03333497b57a8dd02aaeb130c0f386599" +dependencies = [ + "p3-field", + "p3-mds", + "p3-symmetric", + "p3-util", + "rand", +] + +[[package]] +name = "p3-symmetric" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cd788f04e86dd5c35dd87cad29eefdb6371d2fd5f7664451382eeacae3c3ed0" +dependencies = [ + "itertools 0.14.0", + "p3-field", + "serde", +] + +[[package]] +name = "p3-uni-stark" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68d409704a8cbdb6c77f6b83a05c6b16a3c8a2c00d880146fa34181977a0d3ac" +dependencies = [ + "itertools 0.14.0", + "p3-air", + "p3-challenger", + "p3-commit", + "p3-dft", + "p3-field", + "p3-matrix", + "p3-maybe-rayon", + "p3-util", + "serde", + "thiserror", + "tracing", +] + +[[package]] +name = "p3-util" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "663b16021930bc600ecada915c6c3965730a3b9d6a6c23434ccf70bfc29d6881" +dependencies = [ + "rayon", + "serde", +] + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-link", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "petgraph" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "phf_shared" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" + +[[package]] +name = "plotters-svg" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "poly1305" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" +dependencies = [ + "cpufeatures", + "opaque-debug", + "universal-hash", +] + +[[package]] +name = "portable-atomic" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" + +[[package]] +name = "portable-atomic-util" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a9db96d7fa8782dd8c15ce32ffe8680bbd1e978a43bf51a34d39483540495f5" +dependencies = [ + "portable-atomic", +] + +[[package]] +name = "pprof" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38a01da47675efa7673b032bf8efd8214f1917d89685e07e395ab125ea42b187" +dependencies = [ + "aligned-vec", + "backtrace", + "cfg-if", + "criterion 0.5.1", + "findshlibs", + "inferno", + "libc", + "log", + "nix", + "once_cell", + "smallvec", + "spin 0.10.0", + "symbolic-demangle", + "tempfile", + "thiserror", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn 2.0.117", +] + +[[package]] +name = "primitive-types" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "721a1da530b5a2633218dc9f75713394c983c352be88d2d7c9ee85e2c4c21794" +dependencies = [ + "fixed-hash", + "uint", +] + +[[package]] +name = "proc-macro-crate" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e67ba7e9b2b56446f1d419b1d807906278ffa1a658a8a5d8a39dcb1f5a78614f" +dependencies = [ + "toml_edit", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" dependencies = [ @@ -2217,11 +2626,11 @@ dependencies = [ [[package]] name = "proptest" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40" +checksum = "37566cb3fdacef14c0737f9546df7cfeadbfbc9fef10991038bf5015d0c80532" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "num-traits", "rand", "rand_chacha", @@ -2238,7 +2647,7 @@ checksum = "fb6dc647500e84a25a85b100e76c85b8ace114c209432dc174f20aac11d4ed6c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -2252,9 +2661,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.43" +version = "1.0.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" dependencies = [ "proc-macro2", ] @@ -2265,6 +2674,12 @@ version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" +[[package]] +name = "r-efi" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" + [[package]] name = "rand" version = "0.9.2" @@ -2356,14 +2771,14 @@ version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", ] [[package]] name = "regex" -version = "1.12.2" +version = "1.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" dependencies = [ "aho-corasick", "memchr", @@ -2373,9 +2788,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" dependencies = [ "aho-corasick", "memchr", @@ -2384,9 +2799,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" +checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" [[package]] name = "relative-path" @@ -2406,9 +2821,9 @@ dependencies = [ [[package]] name = "rgb" -version = "0.8.52" +version = "0.8.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c6a884d2998352bb4daf0183589aec883f16a6da1f4dde84d8e2e9a5409a1ce" +checksum = "47b34b781b31e5d73e9fbc8689c70551fd1ade9a19e3e28cfec8580a79290cc4" dependencies = [ "bytemuck", ] @@ -2438,7 +2853,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.1", - "syn 2.0.114", + "syn 2.0.117", "unicode-ident", ] @@ -2468,28 +2883,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" -dependencies = [ - "bitflags 2.10.0", - "errno", - "libc", - "linux-raw-sys 0.4.15", - "windows-sys 0.59.0", -] - -[[package]] -name = "rustix" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" dependencies = [ - "bitflags 2.10.0", + "bitflags 2.11.0", "errno", "libc", - "linux-raw-sys 0.11.0", - "windows-sys 0.61.2", + "linux-raw-sys", + "windows-sys", ] [[package]] @@ -2585,7 +2987,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -2659,15 +3061,15 @@ dependencies = [ [[package]] name = "siphasher" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" +checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" [[package]] name = "slab" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" [[package]] name = "smallvec" @@ -2727,6 +3129,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9091b6114800a5f2141aee1d1b9d6ca3592ac062dc5decb3764ec5895a47b4eb" +[[package]] +name = "strength_reduce" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe895eb47f22e2ddd4dabc02bce419d2e643c8e3b585c78158b349195bc24d82" + [[package]] name = "string_cache" version = "0.8.9" @@ -2754,32 +3162,11 @@ version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" -[[package]] -name = "supports-color" -version = "3.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6" -dependencies = [ - "is_ci", -] - -[[package]] -name = "supports-hyperlinks" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e396b6523b11ccb83120b115a0b7366de372751aa6edf19844dfb13a6af97e91" - -[[package]] -name = "supports-unicode" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7401a30af6cb5818bb64852270bb722533397edcfc7344954a38f420819ece2" - [[package]] name = "symbolic-common" -version = "12.17.1" +version = "12.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520cf51c674f8b93d533f80832babe413214bb766b6d7cb74ee99ad2971f8467" +checksum = "751a2823d606b5d0a7616499e4130a516ebd01a44f39811be2b9600936509c23" dependencies = [ "debugid", "memmap2", @@ -2789,9 +3176,9 @@ dependencies = [ [[package]] name = "symbolic-demangle" -version = "12.17.1" +version = "12.17.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f0de2ee0ffa2641e17ba715ad51d48b9259778176517979cb38b6aa86fa7425" +checksum = "79b237cfbe320601dd24b4ac817a5b68bb28f5508e33f08d42be0682cadc8ac9" dependencies = [ "rustc-demangle", "symbolic-common", @@ -2810,9 +3197,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.114" +version = "2.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" dependencies = [ "proc-macro2", "quote", @@ -2827,15 +3214,15 @@ checksum = "591ef38edfb78ca4771ee32cf494cb8771944bee237a9b91fc9c1424ac4b777b" [[package]] name = "tempfile" -version = "3.24.0" +version = "3.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" +checksum = "82a72c767771b47409d2345987fda8628641887d5466101319899796367354a0" dependencies = [ "fastrand", - "getrandom 0.3.4", + "getrandom 0.4.2", "once_cell", - "rustix 1.1.3", - "windows-sys 0.61.2", + "rustix", + "windows-sys", ] [[package]] @@ -2844,7 +3231,7 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8c27177b12a6399ffc08b98f76f7c9a1f4fe9fc967c784c5a071fa8d93cf7e1" dependencies = [ - "windows-sys 0.61.2", + "windows-sys", ] [[package]] @@ -2856,16 +3243,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "terminal_size" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" -dependencies = [ - "rustix 0.38.44", - "windows-sys 0.48.0", -] - [[package]] name = "textwrap" version = "0.16.2" @@ -2894,7 +3271,7 @@ checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -2906,6 +3283,15 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + [[package]] name = "tinytemplate" version = "1.2.1" @@ -2918,9 +3304,9 @@ dependencies = [ [[package]] name = "tokio" -version = "1.49.0" +version = "1.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" +checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d" dependencies = [ "pin-project-lite", "tokio-macros", @@ -2928,13 +3314,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.6.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -2961,9 +3347,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.9.11+spec-1.1.0" +version = "1.0.5+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3afc9a848309fe1aaffaed6e1546a7a14de1f935dc9d89d32afd9a44bab7c46" +checksum = "8825697d11e3935e3ab440a9d672022e540d016ff2f193de4295d11d18244774" dependencies = [ "indexmap", "serde_core", @@ -2976,18 +3362,18 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.7.5+spec-1.1.0" +version = "1.0.0+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" +checksum = "32c2555c699578a4f59f0cc68e5116c8d7cabbd45e1409b989d4be085b53f13e" dependencies = [ "serde_core", ] [[package]] name = "toml_edit" -version = "0.23.10+spec-1.0.0" +version = "0.25.4+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" +checksum = "7193cbd0ce53dc966037f54351dbbcf0d5a642c7f0038c382ef9e677ce8c13f2" dependencies = [ "indexmap", "toml_datetime", @@ -2997,9 +3383,9 @@ dependencies = [ [[package]] name = "toml_parser" -version = "1.0.6+spec-1.1.0" +version = "1.0.9+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" +checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4" dependencies = [ "winnow", ] @@ -3029,7 +3415,7 @@ checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -3092,11 +3478,21 @@ dependencies = [ "tracing-log", ] +[[package]] +name = "transpose" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad61aed86bc3faea4300c7aee358b4c6d0c8d6ccc36524c96e4c92ccf26e77e" +dependencies = [ + "num-integer", + "strength_reduce", +] + [[package]] name = "trybuild" -version = "1.0.114" +version = "1.0.116" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e17e807bff86d2a06b52bca4276746584a78375055b6e45843925ce2802b335" +checksum = "47c635f0191bd3a2941013e5062667100969f8c4e9cd787c14f977265d73616e" dependencies = [ "dissimilar", "glob", @@ -3134,9 +3530,9 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicode-ident" -version = "1.0.22" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" [[package]] name = "unicode-linebreak" @@ -3180,9 +3576,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.19.0" +version = "1.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a" +checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37" dependencies = [ "js-sys", "wasm-bindgen", @@ -3234,11 +3630,20 @@ dependencies = [ "wit-bindgen", ] +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + [[package]] name = "wasm-bindgen" -version = "0.2.108" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" +checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e" dependencies = [ "cfg-if", "once_cell", @@ -3249,9 +3654,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.108" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" +checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3259,31 +3664,65 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.108" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" +checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3" dependencies = [ "bumpalo", "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.108" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" +checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16" dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags 2.11.0", + "hashbrown 0.15.5", + "indexmap", + "semver 1.0.27", +] + [[package]] name = "web-sys" -version = "0.3.85" +version = "0.3.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598" +checksum = "854ba17bb104abfb26ba36da9729addc7ce7f06f5c0f90f3c391f8461cca21f9" dependencies = [ "js-sys", "wasm-bindgen", @@ -3311,7 +3750,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.61.2", + "windows-sys", ] [[package]] @@ -3335,24 +3774,6 @@ dependencies = [ "windows-link", ] -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - [[package]] name = "windows-sys" version = "0.61.2" @@ -3362,255 +3783,103 @@ dependencies = [ "windows-link", ] -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - [[package]] name = "winnow" -version = "0.7.14" +version = "0.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" +checksum = "df79d97927682d2fd8adb29682d1140b343be4ac0f08fd68b7765d9c059d3945" dependencies = [ "memchr", ] [[package]] -name = "winter-air" -version = "0.13.1" +name = "wit-bindgen" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef01227f23c7c331710f43b877a8333f5f8d539631eea763600f1a74bf018c7c" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" dependencies = [ - "libm", - "winter-crypto", - "winter-fri", - "winter-math", - "winter-utils", + "wit-bindgen-rust-macro", ] [[package]] -name = "winter-crypto" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cdb247bc142438798edb04067ab72a22cf815f57abbd7b78a6fa986fc101db8" -dependencies = [ - "blake3", - "sha3", - "winter-math", - "winter-utils", -] - -[[package]] -name = "winter-fri" -version = "0.13.1" +name = "wit-bindgen-core" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd592b943f9d65545683868aaf1b601eb66e52bfd67175347362efff09101d3a" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" dependencies = [ - "winter-crypto", - "winter-math", - "winter-utils", + "anyhow", + "heck", + "wit-parser", ] [[package]] -name = "winter-math" -version = "0.13.1" +name = "wit-bindgen-rust" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7aecfb48ee6a8b4746392c8ff31e33e62df8528a3b5628c5af27b92b14aef1ea" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" dependencies = [ - "winter-utils", + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn 2.0.117", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", ] [[package]] -name = "winter-maybe-async" -version = "0.13.1" +name = "wit-bindgen-rust-macro" +version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d31a19dae58475d019850e25b0170e94b16d382fbf6afee9c0e80fdc935e73e" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", "quote", - "syn 2.0.114", -] - -[[package]] -name = "winter-prover" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84cc631ed56cd39b78ef932c1ec4060cc6a44d114474291216c32f56655b3048" -dependencies = [ - "tracing", - "winter-air", - "winter-crypto", - "winter-fri", - "winter-math", - "winter-maybe-async", - "winter-utils", -] - -[[package]] -name = "winter-rand-utils" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4ff3b651754a7bd216f959764d0a5ab6f4b551c9a3a08fb9ccecbed594b614a" -dependencies = [ - "rand", - "winter-utils", + "syn 2.0.117", + "wit-bindgen-core", + "wit-bindgen-rust", ] [[package]] -name = "winter-utils" -version = "0.13.1" +name = "wit-component" +version = "0.244.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9951263ef5317740cd0f49e618db00c72fabb70b75756ea26c4d5efe462c04dd" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" dependencies = [ - "rayon", -] - -[[package]] -name = "winter-verifier" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0425ea81f8f703a1021810216da12003175c7974a584660856224df04b2e2fdb" -dependencies = [ - "winter-air", - "winter-crypto", - "winter-fri", - "winter-math", - "winter-utils", + "anyhow", + "bitflags 2.11.0", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", ] [[package]] -name = "winterfell" -version = "0.13.1" +name = "wit-parser" +version = "0.244.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f824ddd5aec8ca6a54307f20c115485a8a919ea94dd26d496d856ca6185f4f" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" dependencies = [ - "winter-air", - "winter-prover", - "winter-verifier", + "anyhow", + "id-arena", + "indexmap", + "log", + "semver 1.0.27", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", ] -[[package]] -name = "wit-bindgen" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" - [[package]] name = "x25519-dalek" version = "2.0.1" @@ -3623,22 +3892,22 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.8.33" +version = "0.8.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" +checksum = "a789c6e490b576db9f7e6b6d661bcc9799f7c0ac8352f56ea20193b2681532e5" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.33" +version = "0.8.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" +checksum = "f65c489a7071a749c849713807783f70672b28094011623e200cb86dcb835953" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.117", ] [[package]] @@ -3649,6 +3918,6 @@ checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" [[package]] name = "zmij" -version = "1.0.16" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfcd145825aace48cff44a8844de64bf75feec3080e0aa5cdbde72961ae51a65" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/Cargo.toml b/Cargo.toml index e73379167e..c5b384107a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,9 +19,9 @@ edition = "2024" exclude = [".github/"] homepage = "https://miden.xyz" license = "MIT" -repository = "https://github.com/0xMiden/miden-base" +repository = "https://github.com/0xMiden/protocol" rust-version = "1.90" -version = "0.13.3" +version = "0.14.0-alpha.1" [profile.release] codegen-units = 1 @@ -31,45 +31,41 @@ lto = true inherits = "dev" opt-level = 1 -# Avoid running the expensive debug assertion in winter-prover -# https://github.com/facebook/winterfell/blob/cd32dce2fd4986c94516113568eefd938fafe31c/prover/src/lib.rs#L355C1-L356 -[profile.test-dev.package.winter-prover] -debug-assertions = false - [profile.bench] codegen-units = 1 lto = true [workspace.dependencies] # Workspace crates -miden-agglayer = { default-features = false, path = "crates/miden-agglayer", version = "0.13" } -miden-block-prover = { default-features = false, path = "crates/miden-block-prover", version = "0.13" } -miden-protocol = { default-features = false, path = "crates/miden-protocol", version = "0.13" } -miden-protocol-macros = { default-features = false, path = "crates/miden-protocol-macros", version = "0.13" } -miden-standards = { default-features = false, path = "crates/miden-standards", version = "0.13" } -miden-testing = { default-features = false, path = "crates/miden-testing", version = "0.13" } -miden-tx = { default-features = false, path = "crates/miden-tx", version = "0.13" } -miden-tx-batch-prover = { default-features = false, path = "crates/miden-tx-batch-prover", version = "0.13" } +miden-agglayer = { default-features = false, path = "crates/miden-agglayer", version = "=0.14.0-alpha.1" } +miden-block-prover = { default-features = false, path = "crates/miden-block-prover", version = "=0.14.0-alpha.1" } +miden-protocol = { default-features = false, path = "crates/miden-protocol", version = "=0.14.0-alpha.1" } +miden-protocol-macros = { default-features = false, path = "crates/miden-protocol-macros", version = "=0.14.0-alpha.1" } +miden-standards = { default-features = false, path = "crates/miden-standards", version = "=0.14.0-alpha.1" } +miden-testing = { default-features = false, path = "crates/miden-testing", version = "=0.14.0-alpha.1" } +miden-tx = { default-features = false, path = "crates/miden-tx", version = "=0.14.0-alpha.1" } +miden-tx-batch-prover = { default-features = false, path = "crates/miden-tx-batch-prover", version = "=0.14.0-alpha.1" } # Miden dependencies -miden-air = { default-features = false, version = "0.20" } -miden-assembly = { default-features = false, version = "0.20" } -miden-assembly-syntax = { default-features = false, version = "0.20" } -miden-core = { default-features = false, version = "0.20" } -miden-core-lib = { default-features = false, version = "0.20" } -miden-crypto = { default-features = false, version = "0.19" } -miden-mast-package = { default-features = false, version = "0.20" } -miden-processor = { default-features = false, version = "0.20" } -miden-prover = { default-features = false, version = "0.20" } -miden-utils-sync = { default-features = false, version = "0.20" } -miden-verifier = { default-features = false, version = "0.20" } +miden-assembly = { default-features = false, version = "0.21" } +miden-assembly-syntax = { default-features = false, version = "0.21" } +miden-core = { default-features = false, version = "0.21" } +miden-core-lib = { default-features = false, version = "0.21" } +miden-crypto = { default-features = false, version = "0.22" } +miden-mast-package = { default-features = false, version = "0.21" } +miden-processor = { default-features = false, version = "0.21" } +miden-prover = { default-features = false, version = "0.21" } +miden-utils-sync = { default-features = false, version = "0.21" } +miden-verifier = { default-features = false, version = "0.21" } # External dependencies anyhow = { default-features = false, features = ["backtrace", "std"], version = "1.0" } assert_matches = { default-features = false, version = "1.5" } +fs-err = { default-features = false, version = "3" } primitive-types = { default-features = false, version = "0.14" } rand = { default-features = false, version = "0.9" } rand_chacha = { default-features = false, version = "0.9" } rstest = { version = "0.26" } +serde = { default-features = false, version = "1.0" } thiserror = { default-features = false, version = "2.0" } tokio = { default-features = false, features = ["sync"], version = "1" } diff --git a/Makefile b/Makefile index 10f3f8823a..019bbd2bf9 100644 --- a/Makefile +++ b/Makefile @@ -75,16 +75,29 @@ serve-docs: ## Serves the docs # --- testing ------------------------------------------------------------------------------------- -.PHONY: test-build -test-build: ## Build the test binary - $(BUILD_GENERATED_FILES_IN_SRC) cargo nextest run --cargo-profile test-dev --features concurrent,testing,std --no-run +.PHONY: test-release-build +test-release-build: ## Build the test binary + $(BUILD_GENERATED_FILES_IN_SRC) cargo nextest run --cargo-profile test-dev --no-default-features --features concurrent,testing,std --no-run +# Run all tests without debug mode. This is fast but produces worse error message. +# Running `make test-release name=test_name` will only run the test `test_name`. +.PHONY: test-release +test-release: + $(BUILD_GENERATED_FILES_IN_SRC) $(BACKTRACE) cargo nextest run --profile default --cargo-profile test-dev --no-default-features --features concurrent,testing,std $(name) + +# Shorthand for make test-release. +.PHONY: test-release testr +testr: test-release + +# Run all tests with debug mode. This is slower but produces better error message. +# Running `make test name=test_name` will only run the test `test_name`. .PHONY: test -test: ## Run all tests. Running `make test name=test_name` will only run the test `test_name`. +test: $(BUILD_GENERATED_FILES_IN_SRC) $(BACKTRACE) cargo nextest run --profile default --cargo-profile test-dev --features concurrent,testing,std $(name) +# Run all tests except the proving tests (imperfectly filtered based on name) with debug mode. # This uses the std feature to be able to load the MASM source files back into the assembler # source manager (see `source_manager_ext::load_masm_source_files`). .PHONY: test-dev @@ -129,6 +142,16 @@ build-no-std: ## Build without the standard library build-no-std-testing: ## Build without the standard library. Includes the `testing` feature $(BUILD_GENERATED_FILES_IN_SRC) cargo build --no-default-features --target wasm32-unknown-unknown --workspace --exclude bench-transaction --features testing +# --- test vectors -------------------------------------------------------------------------------- + +.PHONY: generate-solidity-test-vectors +generate-solidity-test-vectors: ## Regenerate Solidity MMR test vectors using Foundry + cd crates/miden-agglayer/solidity-compat && forge test -vv --match-test test_generateVectors + cd crates/miden-agglayer/solidity-compat && forge test -vv --match-test test_generateCanonicalZeros + cd crates/miden-agglayer/solidity-compat && forge test -vv --match-test test_generateVerificationProofData + cd crates/miden-agglayer/solidity-compat && forge test -vv --match-test test_generateLeafValueVectors + cd crates/miden-agglayer/solidity-compat && forge test -vv --match-test test_generateClaimAssetVectors + # --- benchmarking -------------------------------------------------------------------------------- .PHONY: bench-tx diff --git a/README.md b/README.md index df78fec0f4..0f5a0fbe68 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@ # Miden protocol -[![LICENSE](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/0xMiden/miden-base/blob/main/LICENSE) -[![test](https://github.com/0xMiden/miden-base/actions/workflows/test.yml/badge.svg)](https://github.com/0xMiden/miden-base/actions/workflows/test.yml) -[![build](https://github.com/0xMiden/miden-base/actions/workflows/build.yml/badge.svg)](https://github.com/0xMiden/miden-base/actions/workflows/build.yml) +[![LICENSE](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/0xMiden/protocol/blob/main/LICENSE) +[![test](https://github.com/0xMiden/protocol/actions/workflows/test.yml/badge.svg)](https://github.com/0xMiden/protocol/actions/workflows/test.yml) +[![build](https://github.com/0xMiden/protocol/actions/workflows/build.yml/badge.svg)](https://github.com/0xMiden/protocol/actions/workflows/build.yml) [![RUST_VERSION](https://img.shields.io/badge/rustc-1.90+-lightgray.svg)](https://www.rust-lang.org/tools/install) -[![GitHub Release](https://img.shields.io/github/release/0xMiden/miden-base)](https://github.com/0xMiden/miden-base/releases/) +[![GitHub Release](https://img.shields.io/github/release/0xMiden/protocol)](https://github.com/0xMiden/protocol/releases/) Description and core structures for the Miden Rollup protocol. @@ -16,7 +16,7 @@ Miden is a zero-knowledge rollup for high-throughput and private applications. M If you want to join the technical discussion or learn more about the project, please check out -- the [Documentation](https://0xMiden.github.io/miden-docs). +- the [Documentation](https://docs.miden.xyz/protocol/). - the [Telegram](https://t.me/BuildOnMiden) - the [Repo](https://github.com/0xMiden) - the [Roadmap](https://miden.xyz/roadmap) @@ -28,15 +28,15 @@ Miden is currently on release v0.13. This is an early version of the protocol an ### Feature highlights - **Private accounts**. The Miden Operator tracks only commitments to account data in the public database. The users are responsible for keeping track of the state of their accounts. -- **Public accounts**. With public accounts users are be able to store the entire state of their accounts on-chain, thus, eliminating the need to keep track of account states locally (albeit by sacrificing privacy and at a higher cost). +- **Public accounts**. With public accounts, users are able to store the entire state of their accounts on-chain, thus, eliminating the need to keep track of account states locally (albeit by sacrificing privacy and at a higher cost). - **Private notes**. Like with private accounts, the Miden Operator tracks only commitments to notes in the public database. Users need to communicate note details to each other via side channels. -- **Public notes**. With public notes, the users are be able to store all note details on-chain, thus, eliminating the need to communicate note details via side-channels. +- **Public notes**. With public notes, users are able to store all note details on-chain, thus, eliminating the need to communicate note details via side-channels. - **Local transactions**. Users can execute and prove transactions locally on their devices. The Miden Operator verifies the proofs and if the proofs are valid, updates the state of the rollup accordingly. - **Standard account**. Users can create accounts using a small number of standard account interfaces (e.g., basic wallet). In the future, the set of standard smart contracts will be expanded. -- **Standard notes**. Can create notes using standardized note scripts such as Pay-to-ID (`P2ID`) and atomic swap (`SWAP`). In the future, the set of standardized notes will be expanded. -- **Delegated note inclusion proofs**. By delegating note inclusion proofs, users can create chains of dependent notes which are included into a block as a single batch. +- **Standard notes**. Users can create notes using standardized note scripts such as Pay-to-ID (`P2ID`) and atomic swap (`SWAP`). In the future, the set of standardized notes will be expanded. +- **Delegated note inclusion proofs**. By delegating note inclusion proofs, users can create chains of dependent transactions which are included into a block as a single batch. - **Transaction recency conditions**. Users are able to specify how close to the chain tip their transactions are to be executed. This enables things like rate limiting and oracles. -- **Network transactions**. Users will be able to create notes intended for network execution. Such notes will be included into transactions executed and proven by the Miden operator. +- **Network transactions**. Users are able to create notes intended for network execution. Such notes are included into transactions executed and proven by the Miden operator. ### Planned features @@ -44,12 +44,12 @@ Miden is currently on release v0.13. This is an early version of the protocol an ## Project structure -| Crate | Description | -| ------------------------------- | ------------------------------------------------------------------------------- | -| [miden-protocol](crates/miden-protocol) | Contains core components defining the Miden protocol, including the transaction kernel. | -| [miden-standards](crates/miden-standards) | Contains the code of Miden's standardized smart contracts. | -| [miden-tx](crates/miden-tx) | Contains tool for creating, executing, and proving Miden rollup transaction. | -| [bench-tx](bin/bench-tx) | Contains transaction execution and proving benchmarks. | +| Crate | Description | +| ----------------------------------------- | --------------------------------------------------------------------------------------- | +| [miden-protocol](crates/miden-protocol) | Contains core components defining the Miden protocol, including the transaction kernel. | +| [miden-standards](crates/miden-standards) | Contains the code of Miden's standardized smart contracts. | +| [miden-tx](crates/miden-tx) | Contains tools for creating, executing, and proving Miden rollup transactions. | +| [bench-tx](bin/bench-tx) | Contains transaction execution and proving benchmarks. | ## Make commands @@ -71,7 +71,7 @@ Some of the functions in this project are computationally intensive and may take ## Documentation -The documentation in the `docs/` folder is built using Docusaurus and is automatically absorbed into the main [miden-docs](https://github.com/0xMiden/miden-docs) repository for the main documentation website. Changes to the `next` branch trigger an automated deployment workflow. The docs folder requires npm packages to be installed before building. +The documentation in the `docs/` folder is built using Docusaurus and is automatically absorbed into the main [miden-docs](https://docs.miden.xyz/protocol/) repository for the main documentation website. Changes to the `next` branch trigger an automated deployment workflow. The docs folder requires npm packages to be installed before building. ## License diff --git a/_typos.toml b/_typos.toml index b3babf7b56..6bc0c6f202 100644 --- a/_typos.toml +++ b/_typos.toml @@ -1,2 +1,5 @@ [default] extend-ignore-identifiers-re = [".*1st.*", ".*2nd.*", ".*3rd.*"] + +[files] +extend-exclude = ["crates/miden-agglayer/solidity-compat/lib"] diff --git a/bin/bench-note-checker/Cargo.toml b/bin/bench-note-checker/Cargo.toml index 630d1b9189..09f0485573 100644 --- a/bin/bench-note-checker/Cargo.toml +++ b/bin/bench-note-checker/Cargo.toml @@ -19,8 +19,8 @@ miden-tx = { workspace = true } # External dependencies anyhow = { workspace = true } -serde = { features = ["derive"], version = "1.0" } -tokio = { features = ["macros", "rt"], version = "1.0" } +serde = { features = ["derive"], workspace = true } +tokio = { features = ["macros", "rt"], workspace = true } [dev-dependencies] criterion = { features = ["async_tokio", "html_reports"], version = "0.6" } diff --git a/bin/bench-transaction/Cargo.toml b/bin/bench-transaction/Cargo.toml index 9b07fccd84..e932ad28fa 100644 --- a/bin/bench-transaction/Cargo.toml +++ b/bin/bench-transaction/Cargo.toml @@ -24,7 +24,7 @@ miden-tx = { workspace = true } # External dependencies anyhow = { workspace = true } -serde = { features = ["derive"], version = "1.0" } +serde = { features = ["derive"], workspace = true } serde_json = { features = ["preserve_order"], package = "serde_json", version = "1.0" } tokio = { features = ["macros", "rt"], workspace = true } diff --git a/bin/bench-transaction/README.md b/bin/bench-transaction/README.md index 6d229c8c78..800542c8a4 100644 --- a/bin/bench-transaction/README.md +++ b/bin/bench-transaction/README.md @@ -44,7 +44,7 @@ Benchmarks consist of two groups: You can run the benchmarks in two ways: -### Option 1: Using Make (from miden-base directory) +### Option 1: Using Make (from protocol directory) ```bash make bench-tx @@ -52,7 +52,7 @@ make bench-tx This command will run both the cycle counting and the time counting benchmarks. -### Option 2: Running each benchmark individually (from miden-base directory) +### Option 2: Running each benchmark individually (from protocol directory) ```bash # Run the cycle counting benchmarks diff --git a/bin/bench-transaction/bench-tx.json b/bin/bench-transaction/bench-tx.json index c1dbe2abc3..67dbed5252 100644 --- a/bin/bench-transaction/bench-tx.json +++ b/bin/bench-transaction/bench-tx.json @@ -1,40 +1,40 @@ { "consume single P2ID note": { - "prologue": 3173, - "notes_processing": 1714, + "prologue": 3487, + "notes_processing": 1831, "note_execution": { - "0xa030091e37d38b506d764d5666f3a13af9e5702a0159974a3bc27053d7a55e01": 1674 + "0x1421e92d0f84f11b3e6f84e4e1d193e648eb820666ffb8c50ea818c25a32990c": 1791 }, "tx_script_processing": 42, "epilogue": { - "total": 63977, - "auth_procedure": 62667, - "after_tx_cycles_obtained": 574 + "total": 71195, + "auth_procedure": 69694, + "after_tx_cycles_obtained": 608 } }, "consume two P2ID notes": { - "prologue": 4131, - "notes_processing": 3431, + "prologue": 4509, + "notes_processing": 3668, "note_execution": { - "0x209ecf97790d4328e60a3b15160760934383ecff02550cb5df72e3f6d459fa70": 1708, - "0x4f9da5658d9f717fdcfa674906e92a7424d86da93f3a21fe0362a220f0e457b7": 1674 + "0x702c078c74683d33b507e16d9fc67f0be0cc943cd94c1f652e3a60e0f4164d9f": 1791, + "0x92cc0c8c208e3b8bad970c23b2c4b4c24cc8d42626b3f56363ce1a6bbf4c7ac2": 1828 }, "tx_script_processing": 42, "epilogue": { - "total": 63949, - "auth_procedure": 62653, - "after_tx_cycles_obtained": 574 + "total": 71143, + "auth_procedure": 69668, + "after_tx_cycles_obtained": 608 } }, "create single P2ID note": { - "prologue": 1681, + "prologue": 1766, "notes_processing": 32, "note_execution": {}, - "tx_script_processing": 1497, + "tx_script_processing": 1682, "epilogue": { - "total": 64803, - "auth_procedure": 62899, - "after_tx_cycles_obtained": 574 + "total": 72099, + "auth_procedure": 69906, + "after_tx_cycles_obtained": 608 } } } \ No newline at end of file diff --git a/bin/bench-transaction/src/context_setups.rs b/bin/bench-transaction/src/context_setups.rs index e49b8f882d..43de089977 100644 --- a/bin/bench-transaction/src/context_setups.rs +++ b/bin/bench-transaction/src/context_setups.rs @@ -1,9 +1,9 @@ use anyhow::Result; -use miden_protocol::Word; +use miden_protocol::account::auth::AuthScheme; use miden_protocol::asset::{Asset, FungibleAsset}; use miden_protocol::note::NoteType; use miden_protocol::testing::account_id::ACCOUNT_ID_SENDER; -use miden_protocol::transaction::OutputNote; +use miden_protocol::transaction::RawOutputNote; use miden_standards::code_builder::CodeBuilder; use miden_testing::{Auth, MockChain, TransactionContext}; @@ -12,7 +12,12 @@ use miden_testing::{Auth, MockChain, TransactionContext}; pub fn tx_create_single_p2id_note() -> Result { let mut builder = MockChain::builder(); let fungible_asset = FungibleAsset::mock(150); - let account = builder.add_existing_wallet_with_assets(Auth::BasicAuth, [fungible_asset])?; + let account = builder.add_existing_wallet_with_assets( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + [fungible_asset], + )?; let output_note = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), @@ -37,9 +42,10 @@ pub fn tx_create_single_p2id_note() -> Result { # => [note_idx] # move the asset to the note - push.{asset} + dup + push.{ASSET_VALUE} + push.{ASSET_KEY} call.::miden::standards::wallets::basic::move_asset_to_note - dropw # => [note_idx] # truncate the stack @@ -49,7 +55,8 @@ pub fn tx_create_single_p2id_note() -> Result { RECIPIENT = output_note.recipient().digest(), note_type = NoteType::Public as u8, tag = output_note.metadata().tag(), - asset = Word::from(fungible_asset), + ASSET_KEY = fungible_asset.to_key_word(), + ASSET_VALUE = fungible_asset.to_value_word(), ); let tx_script = CodeBuilder::default().compile_tx_script(tx_note_creation_script)?; @@ -57,7 +64,7 @@ pub fn tx_create_single_p2id_note() -> Result { // construct the transaction context mock_chain .build_tx_context(account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) .tx_script(tx_script) .disable_debug_mode() .build() @@ -72,7 +79,9 @@ pub fn tx_consume_single_p2id_note() -> Result { let mut builder = MockChain::builder(); // Create target account - let target_account = builder.create_new_wallet(Auth::BasicAuth)?; + let target_account = builder.create_new_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; // Create the note let note = builder @@ -98,7 +107,9 @@ pub fn tx_consume_single_p2id_note() -> Result { pub fn tx_consume_two_p2id_notes() -> Result { let mut builder = MockChain::builder(); - let account = builder.add_existing_wallet(Auth::BasicAuth)?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let fungible_asset_1: Asset = FungibleAsset::mock(100); let fungible_asset_2: Asset = FungibleAsset::mock(23); diff --git a/bin/bench-transaction/src/time_counting_benchmarks/prove.rs b/bin/bench-transaction/src/time_counting_benchmarks/prove.rs index 5dafb4604d..edad46c21e 100644 --- a/bin/bench-transaction/src/time_counting_benchmarks/prove.rs +++ b/bin/bench-transaction/src/time_counting_benchmarks/prove.rs @@ -89,12 +89,15 @@ fn core_benchmarks(c: &mut Criterion) { }, |tx_context| async move { // benchmark the transaction execution and proving - black_box(prove_transaction( - tx_context - .execute() - .await - .expect("execution of the single P2ID note consumption tx failed"), - )) + black_box( + prove_transaction( + tx_context + .execute() + .await + .expect("execution of the single P2ID note consumption tx failed"), + ) + .await, + ) }, BatchSize::SmallInput, ); @@ -110,12 +113,15 @@ fn core_benchmarks(c: &mut Criterion) { }, |tx_context| async move { // benchmark the transaction execution and proving - black_box(prove_transaction( - tx_context - .execute() - .await - .expect("execution of the two P2ID note consumption tx failed"), - )) + black_box( + prove_transaction( + tx_context + .execute() + .await + .expect("execution of the two P2ID note consumption tx failed"), + ) + .await, + ) }, BatchSize::SmallInput, ); @@ -124,10 +130,10 @@ fn core_benchmarks(c: &mut Criterion) { execute_and_prove_group.finish(); } -fn prove_transaction(executed_transaction: ExecutedTransaction) -> Result<()> { +async fn prove_transaction(executed_transaction: ExecutedTransaction) -> Result<()> { let executed_transaction_id = executed_transaction.id(); let proven_transaction: ProvenTransaction = - LocalTransactionProver::default().prove(executed_transaction)?; + LocalTransactionProver::default().prove(executed_transaction).await?; assert_eq!(proven_transaction.id(), executed_transaction_id); Ok(()) diff --git a/crates/miden-agglayer/Cargo.toml b/crates/miden-agglayer/Cargo.toml index 0bae5deba1..70ca24bc7a 100644 --- a/crates/miden-agglayer/Cargo.toml +++ b/crates/miden-agglayer/Cargo.toml @@ -23,18 +23,24 @@ testing = ["miden-protocol/testing"] # Miden dependencies miden-assembly = { workspace = true } miden-core = { workspace = true } +miden-core-lib = { workspace = true } miden-protocol = { workspace = true } miden-standards = { workspace = true } miden-utils-sync = { workspace = true } +# Third-party dependencies +primitive-types = { workspace = true } +thiserror = { workspace = true } + [dev-dependencies] miden-agglayer = { features = ["testing"], path = "." } [build-dependencies] -fs-err = { version = "3" } +fs-err = { workspace = true } miden-assembly = { workspace = true } miden-core = { workspace = true } miden-core-lib = { workspace = true } +miden-crypto = { workspace = true } miden-protocol = { features = ["testing"], workspace = true } miden-standards = { workspace = true } regex = { version = "1.11" } diff --git a/crates/miden-agglayer/asm/agglayer/bridge/bridge_config.masm b/crates/miden-agglayer/asm/agglayer/bridge/bridge_config.masm new file mode 100644 index 0000000000..5fb27c3516 --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/bridge/bridge_config.masm @@ -0,0 +1,223 @@ +use miden::core::crypto::hashes::poseidon2 +use miden::protocol::account_id +use miden::protocol::active_account +use miden::protocol::active_note +use miden::protocol::native_account + +# ERRORS +# ================================================================================================= + +const ERR_GER_NOT_FOUND = "GER not found in storage" +const ERR_FAUCET_NOT_REGISTERED="faucet is not registered in the bridge's faucet registry" +const ERR_SENDER_NOT_BRIDGE_ADMIN="note sender is not the bridge admin" +const ERR_SENDER_NOT_GER_MANAGER="note sender is not the global exit root manager" + +# CONSTANTS +# ================================================================================================= + +# Storage slots +const BRIDGE_ADMIN_SLOT=word("miden::agglayer::bridge::admin") +const GER_MANAGER_SLOT=word("miden::agglayer::bridge::ger_manager") +const GER_STORAGE_SLOT=word("miden::agglayer::bridge::ger") +const FAUCET_REGISTRY_SLOT=word("miden::agglayer::bridge::faucet_registry") + +# Flags +const GER_KNOWN_FLAG=1 +const IS_FAUCET_REGISTERED_FLAG=1 + +# PUBLIC INTERFACE +# ================================================================================================= + +#! Updates the Global Exit Root (GER) in the bridge account storage. +#! +#! Computes hash(GER) = poseidon2::merge(GER_LOWER, GER_UPPER) and stores it in a map +#! with value [GER_KNOWN_FLAG, 0, 0, 0] to indicate the GER is known. +#! +#! Panics if the note sender is not the global exit root manager. +#! +#! Inputs: [GER_LOWER[4], GER_UPPER[4], pad(8)] +#! Outputs: [pad(16)] +#! +#! Invocation: call +pub proc update_ger + # assert the note sender is the global exit root manager. + exec.assert_sender_is_ger_manager + # => [GER_LOWER[4], GER_UPPER[4], pad(8)] + + # compute hash(GER) = poseidon2::merge(GER_LOWER, GER_UPPER) + exec.poseidon2::merge + # => [GER_HASH, pad(12)] + + # prepare VALUE = [GER_KNOWN_FLAG, 0, 0, 0] + push.0.0.0.GER_KNOWN_FLAG + # => [GER_KNOWN_FLAG, 0, 0, 0, GER_HASH, pad(12)] + + swapw + # => [GER_HASH, VALUE, pad(12)] + + push.GER_STORAGE_SLOT[0..2] + # => [slot_id_prefix, slot_id_suffix, GER_HASH, VALUE, pad(12)] + + exec.native_account::set_map_item + # => [OLD_VALUE, pad(12)] + dropw + # => [pad(16)] +end + +#! Asserts that the provided GER is valid (exists in storage). +#! +#! Computes hash(GER) = poseidon2::merge(GER_LOWER, GER_UPPER) and looks up the hash in +#! the GER storage map. Panics if the GER has never been stored. +#! +#! Inputs: [GER_ROOT[8]] +#! Outputs: [] +#! +#! Panics if: +#! - the GER is not found in storage. +#! +#! Invocation: exec +pub proc assert_valid_ger + # compute hash(GER) + exec.poseidon2::merge + # => [GER_HASH] + + push.GER_STORAGE_SLOT[0..2] + # => [slot_id_prefix, slot_id_suffix, GER_HASH] + + exec.active_account::get_map_item + # => [VALUE] + + # assert the GER is known in storage (VALUE = [GER_KNOWN_FLAG, 0, 0, 0]) + push.0.0.0.GER_KNOWN_FLAG + # => [GER_KNOWN_FLAG, 0, 0, 0, VALUE] + + assert_eqw.err=ERR_GER_NOT_FOUND + # => [] +end + +#! Registers a faucet in the bridge's faucet registry. +#! +#! Writes `KEY -> [1, 0, 0, 0]` into the `faucet_registry` map, where +#! `KEY = [0, 0, faucet_id_suffix, faucet_id_prefix]`. +#! +#! The sentinel value `[1, 0, 0, 0]` distinguishes registered faucets from +#! non-existent entries (SMTs return EMPTY_WORD for missing keys). +#! +#! Panics if the note sender is not the bridge admin. +#! +#! Inputs: [faucet_id_suffix, faucet_id_prefix, pad(14)] +#! Outputs: [pad(16)] +#! +#! Invocation: call +pub proc register_faucet + # assert the note sender is the bridge admin. + exec.assert_sender_is_bridge_admin + # => [faucet_id_suffix, faucet_id_prefix, pad(14)] + + # set_map_item expects [slot_id(2), KEY, VALUE] and returns [OLD_VALUE]. + # Build KEY = [0, 0, suffix, prefix] and VALUE = [IS_FAUCET_REGISTERED_FLAG, 0, 0, 0] + push.0.0.0.IS_FAUCET_REGISTERED_FLAG + # => [IS_FAUCET_REGISTERED_FLAG, 0, 0, 0, suffix, prefix, pad(14)] + + movup.5 movup.5 + # => [suffix, prefix, IS_FAUCET_REGISTERED_FLAG, 0, 0, 0, pad(14)] + + push.0.0 + # => [[0, 0, suffix, prefix], [IS_FAUCET_REGISTERED_FLAG, 0, 0, 0], pad(14)] + + # Place slot ID on top + push.FAUCET_REGISTRY_SLOT[0..2] + # => [slot0, slot1, KEY, VALUE, pad(14)] + + exec.native_account::set_map_item + # => [OLD_VALUE, pad(14)] + + dropw + # => [pad(16)] +end + +#! Asserts that a faucet is registered in the bridge's faucet registry. +#! +#! Looks up the faucet ID in the faucet registry map and asserts the registration +#! flag is set. +#! +#! Inputs: [faucet_id_suffix, faucet_id_prefix] +#! Outputs: [] +#! +#! Panics if: +#! - the faucet is not registered in the faucet registry. +#! +#! Invocation: exec +pub proc assert_faucet_registered + # Build KEY = [0, 0, faucet_id_suffix, faucet_id_prefix] + push.0.0 + # => [0, 0, faucet_id_suffix, faucet_id_prefix] + + push.FAUCET_REGISTRY_SLOT[0..2] + exec.active_account::get_map_item + # => [VALUE] + + # the stored word must be [1, 0, 0, 0] for registered faucets + assert.err=ERR_FAUCET_NOT_REGISTERED drop drop drop + # => [] +end + +#! Asserts that the note sender matches the bridge admin stored in account storage. +#! +#! Reads the bridge admin account ID from BRIDGE_ADMIN_SLOT and compares it against +#! the sender of the currently executing note. Panics if they do not match. +#! +#! Inputs: [pad(16)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - the note sender does not match the bridge admin account ID. +#! +#! Invocation: exec +pub proc assert_sender_is_bridge_admin + # => [pad(16)] + + push.BRIDGE_ADMIN_SLOT[0..2] + exec.active_account::get_item + # => [0, 0, admin_suffix, admin_prefix, pad(16)] + + drop drop + # => [admin_suffix, admin_prefix, pad(16)] + + exec.active_note::get_sender + # => [sender_suffix, sender_prefix, admin_suffix, admin_prefix, pad(16)] + + exec.account_id::is_equal + assert.err=ERR_SENDER_NOT_BRIDGE_ADMIN + # => [pad(16)] +end + +#! Asserts that the note sender matches the global exit root manager stored in account storage. +#! +#! Reads the GER manager account ID from GER_MANAGER_SLOT and compares it against +#! the sender of the currently executing note. Panics if they do not match. +#! +#! Inputs: [pad(16)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - the note sender does not match the GER manager account ID. +#! +#! Invocation: exec +pub proc assert_sender_is_ger_manager + # => [pad(16)] + + push.GER_MANAGER_SLOT[0..2] + exec.active_account::get_item + # => [0, 0, mgr_suffix, mgr_prefix, pad(16)] + + drop drop + # => [mgr_suffix, mgr_prefix, pad(16)] + + exec.active_note::get_sender + # => [sender_suffix, sender_prefix, mgr_suffix, mgr_prefix, pad(16)] + + exec.account_id::is_equal + assert.err=ERR_SENDER_NOT_GER_MANAGER + # => [pad(16)] +end diff --git a/crates/miden-agglayer/asm/agglayer/bridge/bridge_in.masm b/crates/miden-agglayer/asm/agglayer/bridge/bridge_in.masm new file mode 100644 index 0000000000..53a17ded37 --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/bridge/bridge_in.masm @@ -0,0 +1,362 @@ +use miden::agglayer::bridge::bridge_config +use miden::agglayer::bridge::leaf_utils +use miden::agglayer::common::utils +use miden::core::crypto::hashes::keccak256 +use miden::core::mem +use miden::core::word + +# TYPE ALIASES +# ================================================================================================= + +type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } +type DoubleWord = struct { word_lo: BeWord, word_hi: BeWord } +type MemoryAddress = u32 + +# ERRORS +# ================================================================================================= + +const ERR_BRIDGE_NOT_MAINNET = "bridge not mainnet" +const ERR_LEADING_BITS_NON_ZERO = "leading bits of global index must be zero" +const ERR_ROLLUP_INDEX_NON_ZERO = "rollup index must be zero for a mainnet deposit" +const ERR_SMT_ROOT_VERIFICATION_FAILED = "merkle proof verification failed: provided SMT root does not match the computed root" + +# CONSTANTS +# ================================================================================================= + +# Memory pointers for proof data layout +const PROOF_DATA_PTR = 0 +const SMT_PROOF_LOCAL_EXIT_ROOT_PTR = 0 # local SMT proof is first +const GLOBAL_INDEX_PTR = PROOF_DATA_PTR + 2 * 256 # 512 +const EXIT_ROOTS_PTR = GLOBAL_INDEX_PTR + 8 # 520 +const MAINNET_EXIT_ROOT_PTR = EXIT_ROOTS_PTR # it's the first exit root + +# the memory address where leaf data is stored for get_leaf_value +const LEAF_DATA_START_PTR = 0 + +# The offset of the first half of the current Keccak256 hash value in the local memory of the +# `calculate_root` procedure. +const CUR_HASH_LO_LOCAL = 0 + +# The offset of the second half of the current Keccak256 hash value in the local memory of the +# `calculate_root` procedure. +const CUR_HASH_HI_LOCAL = 4 + +# Data sizes +const PROOF_DATA_WORD_LEN = 134 +# the number of words (4 felts each) in the advice map leaf data +const LEAF_DATA_NUM_WORDS = 8 + +# PUBLIC INTERFACE +# ================================================================================================= + +#! Computes the leaf value and verifies it against the AggLayer bridge state. +#! +#! Verification is delegated to `verify_leaf` to mimic the AggLayer Solidity contracts. +#! The steps involved in verification are: +#! 1. Compute the GER from the mainnet and rollup exit roots. +#! 2. Assert that the computed GER is valid (exists in storage). +#! 3. Process the global index to determine if it's a mainnet or rollup deposit. +#! 4. Verify the Merkle proof for the provided leaf-index tuple against the computed GER. +#! +#! Inputs: +#! Operand stack: [LEAF_DATA_KEY, PROOF_DATA_KEY, pad(8)] +#! Advice map: { +#! PROOF_DATA_KEY => [ +#! smtProofLocalExitRoot[256], // SMT proof for local exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) +#! smtProofRollupExitRoot[256], // SMT proof for rollup exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) +#! globalIndex[8], // Global index (8 felts, uint256 as 8 u32 felts) +#! mainnetExitRoot[8], // Mainnet exit root hash (8 felts, bytes32 as 8 u32 felts) +#! rollupExitRoot[8], // Rollup exit root hash (8 felts, bytes32 as 8 u32 felts) +#! ], +#! LEAF_DATA_KEY => [ +#! leafType[1], // Leaf type (1 felt, uint32) +#! originNetwork[1], // Origin network identifier (1 felt, uint32) +#! originTokenAddress[5], // Origin token address (5 felts, address as 5 u32 felts) +#! destinationNetwork[1], // Destination network identifier (1 felt, uint32) +#! destinationAddress[5], // Destination address (5 felts, address as 5 u32 felts) +#! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) +#! metadata[8], // ABI encoded metadata (8 felts, fixed size) +#! padding[3], // padding (3 felts) - not used in the hash +#! ], +#! } +#! +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - the computed GER is invalid (never injected). +#! - the global index is invalid. +#! - the Merkle proof for the provided leaf-index tuple against the computed GER is invalid. +#! +#! Invocation: call +pub proc verify_leaf_bridge + # get the leaf value. We have all the necessary leaf data in the advice map + exec.get_leaf_value + # => [LEAF_VALUE[8], PROOF_DATA_KEY, pad(8)] + + movupw.3 dropw + # => [LEAF_VALUE[8], PROOF_DATA_KEY, pad(4)] + + # delegate proof verification + exec.verify_leaf + # => [pad(16)] +end + +#! Assert the global index is valid for a mainnet deposit. +#! +#! Each element of the global index is a LE-packed u32 felt (as produced by +#! `bytes_to_packed_u32_felts` / `GlobalIndex::to_elements()`). +#! +#! Inputs: [GLOBAL_INDEX[8]] +#! Outputs: [leaf_index] +#! +#! Panics if: +#! - the leading bits of the global index are not zero. +#! - the mainnet flag is not 1. +#! - the rollup index is not 0. +#! +#! Invocation: exec +pub proc process_global_index_mainnet + # for v0.1, let's only implement the mainnet branch + # the top 191 bits of the global index are zero + repeat.5 assertz.err=ERR_LEADING_BITS_NON_ZERO end + + # the next element is the mainnet flag (LE-packed u32) + # byte-swap to get the BE value, then assert it is exactly 1 + # => [mainnet_flag_le, rollup_index_le, leaf_index_le, ...] + exec.utils::swap_u32_bytes + assert.err=ERR_BRIDGE_NOT_MAINNET + + # the next element is the rollup index, must be zero for a mainnet deposit + # (zero is byte-order-independent, so no swap needed) + assertz.err=ERR_ROLLUP_INDEX_NON_ZERO + + # the leaf index is the last element; byte-swap from LE to BE to get the actual index + exec.utils::swap_u32_bytes + # => [leaf_index] +end + +#! Computes the Global Exit Tree (GET) root from the mainnet and rollup exit roots. +#! +#! The mainnet exit root is expected at `exit_roots_ptr` and +#! the rollup exit root is expected at `exit_roots_ptr + 8`. +#! +#! Inputs: [exit_roots_ptr] +#! Outputs: [GER_ROOT[8]] +#! +#! Invocation: exec +pub proc compute_ger(exit_roots_ptr: MemoryAddress) -> DoubleWord + push.64 swap + # => [exit_roots_ptr, len_bytes] + exec.keccak256::hash_bytes + # => [GER_ROOT[8]] +end + +#! Verifies a Merkle proof for a leaf value against a root. +#! +#! Verifies that the root, computed using the provided Merkle path and the leaf with its index, +#! matches the provided root. +#! +#! Inputs: [LEAF_VALUE_LO, LEAF_VALUE_HI, merkle_path_ptr, leaf_idx, expected_root_ptr] +#! Outputs: [verification_flag] +#! +#! Where: +#! - expected_root_ptr is the pointer to the memory where the expected SMT root is stored. +#! - [LEAF_VALUE_LO, LEAF_VALUE_HI] is the leaf for the provided Merkle path. +#! - merkle_path_ptr is the pointer to the memory where the merkle path is stored. This path is +#! represented as 32 Keccak256Digest values (64 words). +#! - leaf_idx is the index of the provided leaf in the SMT. +#! - [ROOT_LO, ROOT_HI] is the calculated root. +#! - verification_flag is the binary flag indicating whether the verification was successful. +pub proc verify_merkle_proof( + leaf_value: DoubleWord, + merkle_path_ptr: MemoryAddress, + leaf_idx: u32, + expected_root_ptr: MemoryAddress +) -> i1 + # calculate the root of the SMT + exec.calculate_root + # => [CALCULATED_ROOT_LO, CALCULATED_ROOT_HI, expected_root_ptr] + + # load the expected root onto the stack + movup.8 exec.utils::mem_load_double_word + # => [EXPECTED_ROOT_LO, EXPECTED_ROOT_HI, CALCULATED_ROOT_LO, CALCULATED_ROOT_HI] + + # assert the roots are equal + swapw.3 exec.word::eq + # => [exp_hi_equal_calc_hi, CALCULATED_ROOT_LO, EXPECTED_ROOT_LO] + + movdn.8 exec.word::eq and + # => [verification_flag] +end + +# HELPER PROCEDURES +# ================================================================================================= + +#! Given the leaf data key, loads the leaf data from advice map to memory, packs the data in-place, +#! and computes the leaf value by hashing the packed bytes. +#! +#! Inputs: +#! Operand stack: [LEAF_DATA_KEY] +#! Advice map: { +#! LEAF_DATA_KEY => [ +#! leafType[1], // Leaf type (1 felt, uint8) +#! originNetwork[1], // Origin network identifier (1 felt, uint32) +#! originTokenAddress[5], // Origin token address (5 felts, address as 5 u32 felts) +#! destinationNetwork[1], // Destination network identifier (1 felt, uint32) +#! destinationAddress[5], // Destination address (5 felts, address as 5 u32 felts) +#! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) +#! metadata_hash[8], // Metadata hash (8 felts, bytes32 as 8 u32 felts) +#! padding[3], // padding (3 felts) - not used in the hash +#! ], +#! } +#! Outputs: [LEAF_VALUE[8]] +#! +#! Invocation: exec +pub proc get_leaf_value(leaf_data_key: BeWord) -> DoubleWord + adv.push_mapval + # => [LEAF_DATA_KEY] + + push.LEAF_DATA_START_PTR push.LEAF_DATA_NUM_WORDS + exec.mem::pipe_preimage_to_memory drop + # => [] + + # compute the leaf value for elements in memory starting at LEAF_DATA_START_PTR + push.LEAF_DATA_START_PTR + exec.leaf_utils::compute_leaf_value + # => [LEAF_VALUE[8]] +end + +#! Verify leaf and checks that it has not been claimed. +#! +#! Inputs: +#! Operand stack: [LEAF_VALUE[8], PROOF_DATA_KEY] +#! +#! Outputs: [] +#! +#! Panics if: +#! - the computed GER is invalid (never injected). +#! - the global index is invalid. +#! - the Merkle proof for the provided leaf-index tuple against the computed GER is invalid. +#! +#! Invocation: exec +proc verify_leaf + movupw.2 + # load proof data from the advice map into memory + adv.push_mapval + # => [PROOF_DATA_KEY, LEAF_VALUE[8]] + + push.SMT_PROOF_LOCAL_EXIT_ROOT_PTR push.PROOF_DATA_WORD_LEN + exec.mem::pipe_preimage_to_memory drop + + # 1. compute GER from mainnet + rollup exit roots + push.EXIT_ROOTS_PTR + # => [exit_roots_ptr, LEAF_VALUE[8]] + exec.compute_ger + # => [GER[8], LEAF_VALUE[8]] + + # 2. assert the GER is valid + exec.bridge_config::assert_valid_ger + # => [LEAF_VALUE[8]] + + # 3. load global index from memory + padw mem_loadw_le.GLOBAL_INDEX_PTR + padw push.GLOBAL_INDEX_PTR add.4 mem_loadw_le swapw + # => [GLOBAL_INDEX[8], LEAF_VALUE[8]] + + # to see if we're dealing with a deposit from mainnet or from a rollup, process the global index + # TODO currently only implemented for mainnet deposits (mainnet flag must be 1) + exec.process_global_index_mainnet + # => [leaf_index, LEAF_VALUE[8]] + + # load the pointers to the merkle proof and root, to pass to `verify_merkle_proof` + push.MAINNET_EXIT_ROOT_PTR swap + push.SMT_PROOF_LOCAL_EXIT_ROOT_PTR + # => [smt_proof_ptr, leaf_index, mainnet_exit_root_ptr, LEAF_VALUE[8]] + + # prepare the stack for the verify_merkle_proof procedure: move the pointers deep in the stack + movdn.10 movdn.10 movdn.10 + # => [LEAF_VALUE[8], smt_proof_ptr, leaf_index, mainnet_exit_root_ptr] + + exec.verify_merkle_proof + # => [verification_flag] + + # verify_merkle_proof procedure returns `true` if the verification was successful and `false` + # otherwise. Assert that `true` was returned. + assert.err=ERR_SMT_ROOT_VERIFICATION_FAILED + # => [] +end + +#! Computes the root of the SMT based on the provided Merkle path, leaf value and leaf index. +#! +#! Inputs: [LEAF_VALUE_LO, LEAF_VALUE_HI, merkle_path_ptr, leaf_idx] +#! Outputs: [ROOT_LO, ROOT_HI] +#! +#! Where: +#! - [LEAF_VALUE_LO, LEAF_VALUE_HI] is the leaf for the provided Merkle path. +#! - merkle_path_ptr is the pointer to the memory where the merkle path is stored. This path is +#! represented as 32 Keccak256Digest values (64 words). +#! - leaf_idx is the index of the provided leaf in the SMT. +#! - [ROOT_LO, ROOT_HI] is the calculated root. +@locals(8) # current hash +proc calculate_root( + leaf_value: DoubleWord, + merkle_path_ptr: MemoryAddress, + leaf_idx: u32 +) -> DoubleWord + # Local memory stores the current hash. It is initialized to the leaf value + loc_storew_le.CUR_HASH_LO_LOCAL dropw loc_storew_le.CUR_HASH_HI_LOCAL dropw + # => [merkle_path_ptr, leaf_idx] + + # Merkle path is guaranteed to contain 32 nodes + repeat.32 + # load the Merkle path node word-by-word in LE-felt order + padw dup.4 mem_loadw_le + # => [PATH_NODE_LO, merkle_path_ptr, leaf_idx] + padw dup.8 add.4 mem_loadw_le + swapw + # => [PATH_NODE_LO, PATH_NODE_HI, merkle_path_ptr, leaf_idx] + + # advance merkle_path_ptr by 8 (two words = 8 element addresses) + movup.8 add.8 movdn.8 + # => [PATH_NODE_LO, PATH_NODE_HI, merkle_path_ptr+8, leaf_idx] + + # determine whether the last `leaf_idx` bit is 1 (is `leaf_idx` odd) + dup.9 u32and.1 + # => [is_odd, PATH_NODE_LO, PATH_NODE_HI, merkle_path_ptr+8, leaf_idx] + + # load the hash respective to the current height from the local memory + padw loc_loadw_le.CUR_HASH_HI_LOCAL padw loc_loadw_le.CUR_HASH_LO_LOCAL + # => [CURR_HASH_LO, CURR_HASH_HI, is_odd, PATH_NODE_LO, PATH_NODE_HI, merkle_path_ptr, leaf_idx] + + # move the `is_odd` flag to the top of the stack + movup.8 + # => [is_odd, CURR_HASH_LO, CURR_HASH_HI, PATH_NODE_LO, PATH_NODE_HI, merkle_path_ptr, leaf_idx] + + # if is_odd flag equals 1 (`leaf_idx` is odd), change the order of the nodes on the stack + if.true + # rearrange the hashes: current position of the hash is odd, so it should be on the + # right + swapdw + # => [PATH_NODE_LO, PATH_NODE_HI, CURR_HASH_LO, CURR_HASH_HI, merkle_path_ptr, leaf_idx] + end + + # compute the next height hash + exec.keccak256::merge + # => [CURR_HASH_LO', CURR_HASH_HI', merkle_path_ptr, leaf_idx] + + # store the resulting hash to the local memory and drop the hash words + loc_storew_le.CUR_HASH_LO_LOCAL dropw + loc_storew_le.CUR_HASH_HI_LOCAL dropw + # => [merkle_path_ptr, leaf_idx] + + # update the `leaf_idx` (shift it right by 1 bit) + swap u32shr.1 swap + # => [merkle_path_ptr, leaf_idx>>1] + end + + # after all 32 hashes have been computed, the current hash stored in local memory represents + # the root of the SMT, which should be returned + drop drop + padw loc_loadw_le.CUR_HASH_HI_LOCAL padw loc_loadw_le.CUR_HASH_LO_LOCAL + # => [ROOT_LO, ROOT_HI] +end diff --git a/crates/miden-agglayer/asm/agglayer/bridge/bridge_out.masm b/crates/miden-agglayer/asm/agglayer/bridge/bridge_out.masm new file mode 100644 index 0000000000..935e079d9c --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/bridge/bridge_out.masm @@ -0,0 +1,536 @@ +use miden::protocol::active_note +use miden::protocol::active_account +use miden::protocol::asset +use miden::protocol::native_account +use miden::protocol::note +use miden::protocol::tx +use miden::standards::data_structures::double_word_array +use miden::standards::attachments::network_account_target +use miden::standards::note_tag::DEFAULT_TAG +use miden::standards::note::execution_hint::ALWAYS +use miden::protocol::output_note +use miden::core::crypto::hashes::keccak256 +use miden::core::crypto::hashes::poseidon2 +use miden::core::word +use miden::agglayer::common::utils +use miden::agglayer::faucet -> agglayer_faucet +use miden::agglayer::bridge::bridge_config +use miden::agglayer::bridge::leaf_utils +use miden::agglayer::bridge::mmr_frontier32_keccak + + +# TYPE ALIASES +# ================================================================================================= + +type EthereumAddressFormat = struct @bigendian { a: felt, b: felt, c: felt, d: felt, e: felt } +type MemoryAddress = u32 + +# CONSTANTS +# ================================================================================================= + +# Storage slot constants for the LET (Local Exit Tree). +# The frontier is stored as a double-word array in a map slot. +# The root and num_leaves are stored in separate value slots. +const LOCAL_EXIT_TREE_SLOT=word("miden::agglayer::let") +const LET_ROOT_LO_SLOT=word("miden::agglayer::let::root_lo") +const LET_ROOT_HI_SLOT=word("miden::agglayer::let::root_hi") +const LET_NUM_LEAVES_SLOT=word("miden::agglayer::let::num_leaves") + +# Memory pointers +const LEAF_DATA_START_PTR=44 +# Memory pointer for loading the LET (Local Exit Tree) frontier into memory. +# The memory layout at this address matches what append_and_update_frontier expects: +# [num_leaves, 0, 0, 0, [[FRONTIER_NODE_LO, FRONTIER_NODE_HI]; 32]] +const LET_FRONTIER_MEM_PTR=100 + +# Leaf data field offsets (relative to LEAF_DATA_START_PTR) +const LEAF_TYPE_OFFSET=0 +const ORIGIN_NETWORK_OFFSET=1 +const ORIGIN_TOKEN_ADDRESS_OFFSET=2 +const DESTINATION_NETWORK_OFFSET=7 +const DESTINATION_ADDRESS_OFFSET=8 +const AMOUNT_OFFSET=13 +const METADATA_HASH_OFFSET=21 +const PADDING_OFFSET=29 + +# bridge_out memory locals +const BRIDGE_OUT_BURN_ASSET_LOC=0 +const DESTINATION_ADDRESS_0_LOC=8 +const DESTINATION_ADDRESS_1_LOC=9 +const DESTINATION_ADDRESS_2_LOC=10 +const DESTINATION_ADDRESS_3_LOC=11 +const DESTINATION_ADDRESS_4_LOC=12 +const DESTINATION_NETWORK_LOC=13 + +# create_burn_note memory locals +const CREATE_BURN_NOTE_BURN_ASSET_LOC=0 +const ATTACHMENT_LOC=8 +const ATTACHMENT_SCHEME_LOC=12 +const ATTACHMENT_KIND_LOC=13 + +# Other constants +const LEAF_TYPE_ASSET=0 +const PUBLIC_NOTE=1 +const BURN_NOTE_NUM_STORAGE_ITEMS=0 + +# PUBLIC INTERFACE +# ================================================================================================= + +#! Bridges an asset out via the AggLayer. +#! +#! This procedure handles the complete bridge-out operation: +#! 1. Validates the asset's faucet is registered in the bridge's faucet registry +#! 2. Queries the faucet for origin asset conversion data via FPI +#! 3. Builds the leaf data (origin token, destination, amount, metadata) +#! 4. Computes Keccak hash and adds it to the MMR frontier +#! 5. Creates a BURN note with the bridged out asset +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE, dest_network_id, dest_address(5), pad(2)] +#! Outputs: [pad(16)] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to be bridged out. +#! - ASSET_VALUE is the value of the asset to be bridged out. +#! - dest_network_id is the u32 destination network/chain ID. +#! - dest_address(5) are 5 u32 values representing a 20-byte Ethereum address. +#! +#! Invocation: call +@locals(14) +pub proc bridge_out + # => [ASSET_KEY, ASSET_VALUE, dest_network_id, dest_address(5), pad(2)] + + # Save ASSET to local memory for later BURN note creation + locaddr.BRIDGE_OUT_BURN_ASSET_LOC + exec.asset::store + # => [dest_network_id, dest_address(5), pad(10)] + + loc_store.DESTINATION_NETWORK_LOC + loc_store.DESTINATION_ADDRESS_0_LOC + loc_store.DESTINATION_ADDRESS_1_LOC + loc_store.DESTINATION_ADDRESS_2_LOC + loc_store.DESTINATION_ADDRESS_3_LOC + loc_store.DESTINATION_ADDRESS_4_LOC + # => [pad(16)] + + # --- 1. Validate faucet registration and convert asset via FPI --- + locaddr.BRIDGE_OUT_BURN_ASSET_LOC + exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, pad(16)] + + exec.convert_asset + # => [AMOUNT_U256(8), origin_addr(5), origin_network, pad(16)] + + # --- 2. Write all leaf data fields to memory --- + + # Store scaled AMOUNT (8 felts) + push.LEAF_DATA_START_PTR push.AMOUNT_OFFSET add + movdn.8 + exec.utils::mem_store_double_word_unaligned + # => [origin_addr(5), origin_network, pad(16)] + + # Store origin_token_address (5 felts) + push.LEAF_DATA_START_PTR push.ORIGIN_TOKEN_ADDRESS_OFFSET add + exec.write_address_to_memory + # => [origin_network, pad(16)] + + # Store origin_network + push.LEAF_DATA_START_PTR push.ORIGIN_NETWORK_OFFSET add + mem_store + # => [pad(16)] + + # Store destination_network + loc_load.DESTINATION_NETWORK_LOC + push.LEAF_DATA_START_PTR push.DESTINATION_NETWORK_OFFSET add + mem_store + # => [pad(16)] + + # Store destination_address + loc_load.DESTINATION_ADDRESS_4_LOC + loc_load.DESTINATION_ADDRESS_3_LOC + loc_load.DESTINATION_ADDRESS_2_LOC + loc_load.DESTINATION_ADDRESS_1_LOC + loc_load.DESTINATION_ADDRESS_0_LOC + push.LEAF_DATA_START_PTR push.DESTINATION_ADDRESS_OFFSET add + exec.write_address_to_memory + # => [pad(16)] + + # TODO construct metadata hash + padw padw + # => [METADATA_HASH(8), pad(16)] + push.LEAF_DATA_START_PTR push.METADATA_HASH_OFFSET add + movdn.8 + # => [METADATA_HASH(8), metadata_hash_ptr, pad(16)] + exec.utils::mem_store_double_word_unaligned + + # Explicitly zero the 3 padding felts after METADATA_HASH for + # leaf_utils::pack_leaf_data + push.0 + push.LEAF_DATA_START_PTR push.PADDING_OFFSET add + mem_store + + push.0 + push.LEAF_DATA_START_PTR push.PADDING_OFFSET add.1 add + mem_store + + push.0 + push.LEAF_DATA_START_PTR push.PADDING_OFFSET add.2 add + mem_store + # => [pad(16)] + + # Leaf type + push.LEAF_TYPE_ASSET + exec.utils::swap_u32_bytes + push.LEAF_DATA_START_PTR push.LEAF_TYPE_OFFSET add + # => [leaf_type_ptr, leaf_type, pad(16)] + mem_store + + # --- 3. Compute leaf value and add to MMR frontier --- + push.LEAF_DATA_START_PTR + exec.add_leaf_bridge + # => [pad(16)] + + # --- 4. Create BURN output note for ASSET --- + locaddr.BRIDGE_OUT_BURN_ASSET_LOC + exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, pad(16)] + exec.create_burn_note + # => [pad(16)] +end + +# HELPER PROCEDURES +# ================================================================================================= + +#! Validates that a faucet is registered in the bridge's faucet registry, then performs +#! an FPI call to the faucet's `asset_to_origin_asset` procedure to obtain the scaled +#! amount, origin token address, and origin network. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [AMOUNT_U256[0](4), AMOUNT_U256[1](4), origin_addr(5), origin_network] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to be bridged out. +#! - ASSET_VALUE is the value of the asset to be bridged out. +#! - AMOUNT_U256: scaled amount as 8 u32 limbs (little-endian) +#! - origin_addr: origin token address (5 u32 felts) +#! - origin_network: origin network identifier +#! +#! Panics if: +#! - The faucet is not registered in the faucet registry. +#! - The FPI call to asset_to_origin_asset fails. +#! +#! Invocation: exec +proc convert_asset + # --- Step 1: Assert faucet is registered --- + swapw + exec.asset::fungible_value_into_amount + movdn.4 + # => [ASSET_KEY, amount] + + exec.asset::key_into_faucet_id + # => [faucet_id_suffix, faucet_id_prefix, amount] + + dup.1 dup.1 + exec.bridge_config::assert_faucet_registered + # => [faucet_id_suffix, faucet_id_prefix, amount] + + # --- Step 2: FPI to faucet's asset_to_origin_asset --- + + procref.agglayer_faucet::asset_to_origin_asset + # => [PROC_MAST_ROOT, faucet_id_suffix, faucet_id_prefix, amount] + + # Move faucet_id above PROC_MAST_ROOT + movup.5 movup.5 + # => [faucet_id_prefix, faucet_id_suffix, PROC_MAST_ROOT, amount] + + exec.tx::execute_foreign_procedure + # => [AMOUNT_U256[0](4), AMOUNT_U256[1](4), origin_addr(5), origin_network, pad(2)] + + # drop the 2 trailing padding elements + movup.15 drop movup.14 drop + # => [AMOUNT_U256[0](4), AMOUNT_U256[1](4), origin_addr(5), origin_network] +end + +#! Computes the leaf value from the leaf data in memory and adds it to the MMR frontier. +#! +#! Inputs: [leaf_data_start_ptr] +#! Outputs: [] +#! +#! Memory layout (starting at leaf_data_start_ptr): +#! [ +#! leafType[1], +#! originNetwork[1], +#! originTokenAddress[5], +#! destinationNetwork[1], +#! destinationAddress[5], +#! amount[8], +#! metadataHash[8], +#! padding[3], +#! ] +#! +#! Invocation: exec +proc add_leaf_bridge(leaf_data_start_ptr: MemoryAddress) + exec.leaf_utils::compute_leaf_value + # => [LEAF_VALUE_LO, LEAF_VALUE_HI] + + # Load the LET frontier from storage into memory at LET_FRONTIER_MEM_PTR + exec.load_let_frontier_to_memory + # => [LEAF_VALUE_LO, LEAF_VALUE_HI] + + # Push frontier pointer below the leaf value + push.LET_FRONTIER_MEM_PTR movdn.8 + # => [LEAF_VALUE_LO, LEAF_VALUE_HI, let_frontier_ptr] + + # Append the leaf to the frontier and compute the new root + exec.mmr_frontier32_keccak::append_and_update_frontier + # => [NEW_ROOT_LO, NEW_ROOT_HI, new_leaf_count] + + # Save the root and num_leaves to their value slots + exec.save_let_root_and_num_leaves + # => [] + + # Write the updated frontier from memory back to the map + exec.save_let_frontier_to_storage + # => [] +end + +#! Loads the LET (Local Exit Tree) frontier from account storage into memory. +#! +#! The num_leaves is read from its dedicated value slot, and the 32 frontier entries are read +#! from the LET map slot (double-word array, indices 0..31). The data is placed into memory at +#! LET_FRONTIER_MEM_PTR, matching the layout expected by append_and_update_frontier: +#! [num_leaves, 0, 0, 0, [[FRONTIER_NODE_LO, FRONTIER_NODE_HI]; 32]] +#! +#! Empty (uninitialized) map entries return zeros, which is the correct initial state for the +#! frontier when there are no leaves. +#! +#! Inputs: [] +#! Outputs: [] +#! +#! Invocation: exec +proc load_let_frontier_to_memory + # 1. Load num_leaves from its value slot + push.LET_NUM_LEAVES_SLOT[0..2] + exec.active_account::get_item + # => [num_leaves_word] + + push.LET_FRONTIER_MEM_PTR mem_storew_le dropw + # => [] + + # 2. Load 32 frontier double-word entries from the map via double_word_array::get + push.0 + # => [h=0] + + repeat.32 + # => [h] + + # Read frontier[h] as a double word from the map + dup push.LOCAL_EXIT_TREE_SLOT[0..2] + exec.double_word_array::get + # => [VALUE_0, VALUE_1, h] + + # Compute memory address and store the double word + dup.8 mul.8 add.LET_FRONTIER_MEM_PTR add.4 movdn.8 + # => [VALUE_0, VALUE_1, mem_addr, h] + exec.utils::mem_store_double_word + dropw dropw drop + # => [h] + + add.1 + # => [h+1] + end + + drop + # => [] +end + +#! Saves the Local Exit Root and num_leaves to their dedicated value slots. +#! +#! Inputs: [NEW_ROOT_LO, NEW_ROOT_HI, new_leaf_count] +#! Outputs: [] +#! +#! Invocation: exec +proc save_let_root_and_num_leaves + # 1. Save root lo word to its value slot + push.LET_ROOT_LO_SLOT[0..2] + exec.native_account::set_item + dropw + # => [NEW_ROOT_HI, new_leaf_count] + + # 2. Save root hi word to its value slot + push.LET_ROOT_HI_SLOT[0..2] + exec.native_account::set_item + dropw + # => [new_leaf_count] + + # 3. Save new_leaf_count to its value slot as [new_leaf_count, 0, 0, 0] + push.0.0.0 movup.3 + # => [new_leaf_count, 0, 0, 0] + push.LET_NUM_LEAVES_SLOT[0..2] + exec.native_account::set_item + dropw + # => [] +end + +#! Writes the 32 frontier entries from memory back to the LET map slot. +#! +#! Each frontier entry is a double word (Keccak256 digest) stored at +#! LET_FRONTIER_MEM_PTR + 4 + h * 8, and is written to the map at double_word_array index h. +#! +#! Inputs: [] +#! Outputs: [] +#! +#! Invocation: exec +proc save_let_frontier_to_storage + push.0 + # => [h=0] + + repeat.32 + # => [h] + + # Load frontier[h] double word from memory + dup mul.8 add.LET_FRONTIER_MEM_PTR add.4 + exec.utils::mem_load_double_word + # => [VALUE_0, VALUE_1, h] + + # Write it back to the map at index h + dup.8 push.LOCAL_EXIT_TREE_SLOT[0..2] + exec.double_word_array::set + dropw dropw + # => [h] + + add.1 + # => [h+1] + end + + drop + # => [] +end + +#! Writes an Ethereum address (5 u32 felts) to consecutive memory locations. +#! +#! Inputs: [mem_ptr, address(5)] +#! Outputs: [] +#! +#! Invocation: exec +proc write_address_to_memory(mem_ptr: MemoryAddress, address: EthereumAddressFormat) + dup movdn.6 mem_store movup.4 add.1 + # => [mem_ptr+1, address(4)] + + dup movdn.5 mem_store movup.3 add.1 + # => [mem_ptr+2, address(3)] + + dup movdn.4 mem_store movup.2 add.1 + # => [mem_ptr+3, address(2)] + + dup movdn.3 mem_store swap add.1 + # => [mem_ptr+4, address(1)] + + mem_store +end + +#! Computes the SERIAL_NUM of the outputted BURN note. +#! +#! The serial number is computed as hash(B2AGG_SERIAL_NUM, ASSET_KEY). +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [SERIAL_NUM] +#! +#! Where: +#! - ASSET_KEY is the vault key from which to compute the burn note serial number. +#! - SERIAL_NUM is the computed serial number for the BURN note. +#! +#! Invocation: exec +proc compute_burn_note_serial_num + exec.active_note::get_serial_number + # => [B2AGG_SERIAL_NUM, ASSET_KEY] + + exec.poseidon2::merge + # => [SERIAL_NUM] +end + +#! Creates a BURN note for the specified asset with a NetworkAccountTarget attachment. +#! +#! This procedure creates an output note that represents a burn operation for the given asset. +#! The note targets the faucet account via a NetworkAccountTarget attachment. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to be burnt. +#! - ASSET_VALUE is the value of the asset to be burnt. +#! +#! Invocation: exec +@locals(14) +proc create_burn_note + swapw dupw.1 + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY] + + locaddr.CREATE_BURN_NOTE_BURN_ASSET_LOC + exec.asset::store + # => [ASSET_KEY] + + exec.asset::key_to_faucet_id + # => [faucet_id_suffix, faucet_id_prefix, ASSET_KEY] + + # Create NetworkAccountTarget attachment for the faucet + push.ALWAYS movdn.2 + # => [faucet_id_suffix, faucet_id_prefix, exec_hint, ASSET_KEY] + + exec.network_account_target::new + # => [attachment_scheme, attachment_kind, NOTE_ATTACHMENT, ASSET_KEY] + + # Save attachment data to locals + loc_store.ATTACHMENT_SCHEME_LOC + loc_store.ATTACHMENT_KIND_LOC + loc_storew_le.ATTACHMENT_LOC dropw + # => [ASSET_KEY] + + exec.compute_burn_note_serial_num + # => [SERIAL_NUM] + + procref.::miden::standards::notes::burn::main swapw + # => [SERIAL_NUM, SCRIPT_ROOT] + + push.BURN_NOTE_NUM_STORAGE_ITEMS push.0 + # => [storage_ptr, num_storage_items, SERIAL_NUM, SCRIPT_ROOT] + + exec.note::build_recipient + # => [RECIPIENT] + + push.PUBLIC_NOTE + push.DEFAULT_TAG + # => [tag, note_type, RECIPIENT] + + # pad the stack before the call invocation + push.0 movdn.6 push.0 movdn.6 padw padw swapdw + # => [tag, note_type, RECIPIENT, pad(10)] + + call.output_note::create + # => [note_idx, pad(15)] + + # duplicate note_idx: one for set_attachment, one for add_asset + dup + swapw loc_loadw_le.ATTACHMENT_LOC + # => [NOTE_ATTACHMENT, note_idx, note_idx, pad(11)] + + loc_load.ATTACHMENT_KIND_LOC + loc_load.ATTACHMENT_SCHEME_LOC + # => [scheme, kind, NOTE_ATTACHMENT, note_idx, note_idx, pad(11)] + + movup.6 + # => [note_idx, scheme, kind, NOTE_ATTACHMENT, note_idx, pad(11)] + + exec.output_note::set_attachment + # => [note_idx, pad(11)] + + locaddr.CREATE_BURN_NOTE_BURN_ASSET_LOC + exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(11)] + + exec.output_note::add_asset + # => [pad(11)] + + dropw dropw drop drop drop + # => [] +end diff --git a/crates/miden-agglayer/asm/agglayer/bridge/canonical_zeros.masm b/crates/miden-agglayer/asm/agglayer/bridge/canonical_zeros.masm new file mode 100644 index 0000000000..01ecacfba4 --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/bridge/canonical_zeros.masm @@ -0,0 +1,142 @@ +# This file is generated by build.rs, do not modify + +# This file contains the canonical zeros for the Keccak hash function. +# Zero of height `n` (ZERO_N) is the root of the binary tree of height `n` with leaves equal zero. +# +# Since the Keccak hash is represented by eight u32 values, each constant consists of two Words. + +const ZERO_0_L = [0, 0, 0, 0] +const ZERO_0_R = [0, 0, 0, 0] + +const ZERO_1_L = [3056087725, 3453220726, 1151697986, 2532382527] +const ZERO_1_R = [2447652395, 2990541491, 3846789184, 3042949783] + +const ZERO_2_L = [1360642484, 2406448277, 4132056164, 1186125340] +const ZERO_2_R = [3704028736, 3993486975, 2661877378, 806175122] + +const ZERO_3_L = [2746866977, 1063027030, 3055947948, 836748766] +const ZERO_3_R = [3686444836, 2778422344, 2319049635, 2243606276] + +const ZERO_4_L = [3010037733, 4058651434, 1513564138, 224004420] +const ZERO_4_R = [3462706719, 3881358125, 2360852476, 1150525734] + +const ZERO_5_L = [3206459406, 1344794057, 3386889228, 523052921] +const ZERO_5_R = [2680951561, 2114802790, 293668224, 768598281] + +const ZERO_6_L = [3173153928, 1087590535, 1715252246, 756088757] +const ZERO_6_R = [4134788524, 283579568, 578821813, 1746508463] + +const ZERO_7_L = [1459738623, 4234379492, 91932979, 40140559] +const ZERO_7_R = [1818541875, 613780937, 3475749318, 2205136186] + +const ZERO_8_L = [1607231384, 2473269631, 2128798138, 611590243] +const ZERO_8_R = [4069577285, 1227307046, 3321779339, 2941712185] + +const ZERO_9_L = [3855940302, 3113795592, 4275626407, 4216691121] +const ZERO_9_R = [1972812290, 1903710296, 1154705673, 3763621903] + +const ZERO_10_L = [2134826233, 1356863200, 861991663, 3567589455] +const ZERO_10_R = [2182953470, 4112065289, 774786966, 2781069751] + +const ZERO_11_L = [1228583416, 167150306, 866654147, 1838648827] +const ZERO_11_R = [1467765009, 2720076317, 4149924453, 2465787000] + +const ZERO_12_L = [3469119540, 3960096235, 2195882716, 270336915] +const ZERO_12_R = [4164671431, 79648606, 171349786, 2631517602] + +const ZERO_13_L = [3649232833, 326416580, 82830058, 3551827087] +const ZERO_13_R = [1944734805, 2047814617, 1895984889, 3152187846] + +const ZERO_14_L = [3618465628, 49531590, 3755895333, 3658789242] +const ZERO_14_R = [1894305546, 2762164692, 3598841737, 3435063385] + +const ZERO_15_L = [2681109466, 3055060558, 843132861, 3700193742] +const ZERO_15_R = [3790037114, 2574387782, 708101859, 3525744215] + +const ZERO_16_L = [266679079, 4207046226, 824943129, 4065390056] +const ZERO_16_R = [4194160956, 3981742412, 2718529082, 530120689] + +const ZERO_17_L = [3367359457, 3833704967, 3603315816, 1543068721] +const ZERO_17_R = [1099357850, 598998238, 650244466, 2062522595] + +const ZERO_18_L = [181284186, 3144187786, 2400147060, 746357617] +const ZERO_18_R = [4157324078, 2923625471, 1072797208, 2692314236] + +const ZERO_19_L = [3056102068, 4164965877, 1039549588, 1032730592] +const ZERO_19_R = [2665487122, 3986541574, 1491476508, 2691355510] + +const ZERO_20_L = [1167482566, 3062253412, 719184416, 4242360534] +const ZERO_20_R = [1535003327, 3478010394, 1732703975, 3803705507] + +const ZERO_21_L = [2290434548, 1168258541, 971767692, 4045815225] +const ZERO_21_R = [1001466509, 3853444828, 899251086, 3655320222] + +const ZERO_22_L = [3692469338, 418371072, 1866109879, 3411854989] +const ZERO_22_R = [946955861, 3934089079, 3698331664, 2011403911] + +const ZERO_23_L = [1078982733, 696388782, 2651248336, 2805567324] +const ZERO_23_R = [2053609922, 4234662665, 3168994683, 1390808632] + +const ZERO_24_L = [2502281165, 276516087, 4292988995, 1681176506] +const ZERO_24_R = [4220355468, 1910056709, 565969590, 4011431532] + +const ZERO_25_L = [3378167562, 1475191156, 2768897524, 1956437264] +const ZERO_25_R = [2066155765, 999806777, 3318538162, 2371989742] + +const ZERO_26_L = [74763704, 4030198639, 2385297319, 1678762243] +const ZERO_26_R = [2038831148, 1786802573, 3649628337, 3498569445] + +const ZERO_27_L = [1431735427, 3418759627, 1513828739, 3748991331] +const ZERO_27_R = [1916245748, 2165369292, 3360338824, 516194684] + +const ZERO_28_L = [3722718822, 3165837101, 2975955312, 79972070] +const ZERO_28_R = [3067898230, 2366459736, 1571753335, 787185022] + +const ZERO_29_L = [246581816, 1909551909, 3876094376, 2551087773] +const ZERO_29_R = [2215341298, 1244629930, 3146618532, 581144193] + +const ZERO_30_L = [1350312851, 1223587258, 2904706143, 1078065138] +const ZERO_30_R = [1787682571, 2128594844, 578217418, 903308566] + +const ZERO_31_L = [2340505732, 1648733876, 2660540036, 3759582231] +const ZERO_31_R = [2389186238, 4049365781, 1653344606, 2840985724] + +use ::miden::agglayer::common::utils::mem_store_double_word + +#! Inputs: [zeros_ptr] +#! Outputs: [] +pub proc load_zeros_to_memory + push.ZERO_0_R.ZERO_0_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_1_R.ZERO_1_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_2_R.ZERO_2_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_3_R.ZERO_3_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_4_R.ZERO_4_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_5_R.ZERO_5_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_6_R.ZERO_6_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_7_R.ZERO_7_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_8_R.ZERO_8_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_9_R.ZERO_9_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_10_R.ZERO_10_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_11_R.ZERO_11_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_12_R.ZERO_12_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_13_R.ZERO_13_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_14_R.ZERO_14_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_15_R.ZERO_15_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_16_R.ZERO_16_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_17_R.ZERO_17_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_18_R.ZERO_18_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_19_R.ZERO_19_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_20_R.ZERO_20_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_21_R.ZERO_21_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_22_R.ZERO_22_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_23_R.ZERO_23_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_24_R.ZERO_24_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_25_R.ZERO_25_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_26_R.ZERO_26_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_27_R.ZERO_27_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_28_R.ZERO_28_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_29_R.ZERO_29_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_30_R.ZERO_30_L exec.mem_store_double_word dropw dropw add.8 + push.ZERO_31_R.ZERO_31_L exec.mem_store_double_word dropw dropw add.8 + drop +end diff --git a/crates/miden-agglayer/asm/agglayer/bridge/leaf_utils.masm b/crates/miden-agglayer/asm/agglayer/bridge/leaf_utils.masm new file mode 100644 index 0000000000..673b3a4fda --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/bridge/leaf_utils.masm @@ -0,0 +1,154 @@ +use miden::core::crypto::hashes::keccak256 + +# TYPE ALIASES +# ================================================================================================= + +type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } +type DoubleWord = struct { word_lo: BeWord, word_hi: BeWord } +type MemoryAddress = u32 + +# CONSTANTS +# ================================================================================================= + +# the number of bytes in the leaf data to hash (matches Solidity's abi.encodePacked output) +const LEAF_DATA_BYTES = 113 + +# the local memory offset where we store the leaf data start pointer +const PACKING_START_PTR_LOCAL= 0 + +# the number of elements to pack (113 bytes = 29 elements, rounding up from 28.25) +const PACKED_DATA_NUM_ELEMENTS = 29 + +# PUBLIC INTERFACE +# ================================================================================================= + +#! Given a memory address where the unpacked leaf data starts, packs the leaf data in-place, and +#! computes the leaf value by hashing the packed bytes. +#! +#! Inputs: [LEAF_DATA_START_PTR] +#! Outputs: [LEAF_VALUE[8]] +#! +#! Invocation: exec +pub proc compute_leaf_value(leaf_data_start_ptr: MemoryAddress) -> DoubleWord + dup + # => [leaf_data_start_ptr, leaf_data_start_ptr] + exec.pack_leaf_data + # => [leaf_data_start_ptr] + + push.LEAF_DATA_BYTES swap + # => [start_ptr, byte_len] + + exec.keccak256::hash_bytes + # => [LEAF_VALUE[8]] +end + +#! Packs the raw leaf data by shifting left 3 bytes to match Solidity's abi.encodePacked format. +#! +#! The raw data has leafType occupying 4 bytes (as a u32 felt) but Solidity's abi.encodePacked +#! only uses 1 byte for uint8 leafType. This procedure shifts all data left by 3 bytes so that: +#! - Byte 0: leafType (1 byte) +#! - Bytes 1-4: originNetwork (4 bytes) +#! - etc. +#! +#! The Keccak precompile expects u32 values packed in little-endian byte order. +#! For each packed element, we drop the leading 3 bytes and rebuild the u32 so that +#! bytes [b0, b1, b2, b3] map to u32::from_le_bytes([b0, b1, b2, b3]). +#! With little-endian input limbs, the first byte comes from the MSB of `curr` and +#! the next three bytes come from the LSBs of `next`: +#! packed = ((curr >> 24) & 0xFF) +#! | (next & 0xFF) << 8 +#! | ((next >> 8) & 0xFF) << 16 +#! | ((next >> 16) & 0xFF) << 24 +#! +#! To help visualize the packing process, consider that each field element represents a 4-byte +#! value [u8; 4] (LE). +#! Memory before is: +#! ptr+0: 1 felt: [a, b, c, d] +#! ptr+1: 1 felt: [e, f, g, h] +#! ptr+2..6: 5 felts: [i, j, k, l, m, ...] +#! +#! Memory after: +#! ptr+0: 1 felt: [d, e, f, g] +#! ptr+1: 1 felt: [h, i, j, k] +#! ptr+2..6: 5 felts: [l, ...] +#! +#! Inputs: [leaf_data_start_ptr] +#! Outputs: [] +#! +#! Invocation: exec +@locals(1) # start_ptr +pub proc pack_leaf_data(leaf_data_start_ptr: MemoryAddress) + loc_store.PACKING_START_PTR_LOCAL + # => [] + + # initialize loop counter to 0 + push.0 + + # push initial condition (true) to enter the loop + push.1 + + # loop through elements from 0 to PACKED_DATA_NUM_ELEMENTS - 1 (28) + while.true + # => [counter] + + # compute source address: packing_start_ptr + counter + dup loc_load.PACKING_START_PTR_LOCAL add + # => [src_addr, counter] + + # load current element + mem_load + # => [curr_elem, counter] + + # extract MSB (upper 8 bits) which becomes the first little-endian byte + dup u32shr.24 + # => [curr_msb, curr_elem, counter] + + # compute source address for next element (counter + 1) + dup.2 loc_load.PACKING_START_PTR_LOCAL add add.1 + # => [next_src_addr, curr_lsb, curr_elem, counter] + + # load next element + mem_load + # => [next_elem, curr_lsb, curr_elem, counter] + + # keep curr_msb on top for combination + swap + # => [curr_msb, next_elem, curr_elem, counter] + + # add next byte0 (bits 0..7) into bits 8..15 + dup.1 u32and.0xFF u32shl.8 u32or + # => [partial, next_elem, curr_elem, counter] + + # add next byte1 (bits 8..15) into bits 16..23 + dup.1 u32shr.8 u32and.0xFF u32shl.16 u32or + # => [partial, next_elem, curr_elem, counter] + + # add next byte2 (bits 16..23) into bits 24..31 + dup.1 u32shr.16 u32and.0xFF u32shl.24 u32or + # => [packed_elem, next_elem, curr_elem, counter] + + # drop the next and current elements (no longer needed) + movdn.2 drop drop + # => [packed_elem, counter] + + # compute destination address: packing_start_ptr + counter (in-place) + dup.1 loc_load.PACKING_START_PTR_LOCAL add + # => [dest_addr, packed_elem, counter] + + # store packed element + mem_store + # => [counter] + + # increment counter + add.1 + # => [counter + 1] + + # check if we should continue (counter < PACKED_DATA_NUM_ELEMENTS) + dup push.PACKED_DATA_NUM_ELEMENTS lt + # => [should_continue, counter] + end + # => [counter] + + drop + # => [] +end diff --git a/crates/miden-agglayer/asm/agglayer/bridge/mmr_frontier32_keccak.masm b/crates/miden-agglayer/asm/agglayer/bridge/mmr_frontier32_keccak.masm new file mode 100644 index 0000000000..8ff7efb453 --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/bridge/mmr_frontier32_keccak.masm @@ -0,0 +1,322 @@ +use miden::core::crypto::hashes::keccak256 +use ::miden::agglayer::bridge::canonical_zeros::load_zeros_to_memory +use ::miden::agglayer::common::utils::mem_store_double_word +use ::miden::agglayer::common::utils::mem_load_double_word + +# An MMR Frontier is a data structure based on an MMR, which combines some features of an MMR and an +# SMT. +# +# # Basics & Terminology +# +# +# The main entity in this structure is a _frontier_: it is a set of roots of all individual trees in +# the MMR. Let's consider the tree below as an example. +# +# 7 +# / \ +# 3 6 10 +# / \ / \ / \ +# 1 2 4 5 8 9 11 +# +# The frontier will consist of nodes 7, 10, and 11, because they represent roots of each subtree and +# they are sufficient to compute the root of the entire MMR. If we add another node, the tree will +# become a full binary one and will look like so: +# +# 15 +# / \ +# / \ +# / \ +# 7 14 +# / \ / \ +# 3 6 10 13 +# / \ / \ / \ / \ +# 1 2 4 5 8 9 11 12 +# +# So in that case the frontier will consist of just one node 15. +# +# An MMR frontier consists of the current number of leaves in the range and the array containing the +# frontier. +# For the sake of simplicity, this array has a fixed length, equal to the maximum tree height. +# Indexes of 1's in the binary representation of the total leaves number show the indexes of the +# relevant frontier values in the frontier array for the current height. For example, if we have 10 +# leaves (1010 in binary representation), relevant frontier values will be stored at frontier[1] and +# frontier[3]. +# +# To compute the hash of two MMR nodes, a Keccak256 hash function is used. +# +# Each node in this MMR is represented by the Keccak256Digest. Notice that this hash is canonically +# represented on the stack by the 8 u32 values, or two words. So each node of the MMR will occupy +# two words on the stack, while being only a 256 bit value. +# +# Each state of the MMR frontier is represented by the root. This root is essentially equal to the +# root of the SMT which has the height equal to the maximum height of the current MMR (for this +# implementation this maximum height is set to 32), and the leaves equal to the MMR frontier leaves +# plus the "zero hash" leaves (Keccak256::hash(&[0u8; 32])) for all other ones. +# +# # Layout +# +# The memory layout of the MMR frontier looks like so: +# +# [num_leaves, 0, 0, 0, [FRONTIER_VALUE_DW]] +# +# Where: +# - num_leaves is the number of leaves in the MMR before adding the new leaf. +# - [FRONTIER_VALUE_DW] is an array containing the double words which represent the frontier MMR +# nodes. Notice that the index of a frontier value in this array represent its height in the tree. +# +# Zero hashes which are used during the root computation are stored in the local memory of the +# `append_and_update_frontier` procedure. + +# ERRORS +# ================================================================================================= + +const ERR_MMR_FRONTIER_LEAVES_NUM_EXCEED_LIMIT = "number of leaves in the MMR of the MMR Frontier would exceed 4294967295 (2^32 - 1)" + +# CONSTANTS +# ================================================================================================= + +# The maximum number of leaves which could be added to the MMR. +# +# If the height is 32, the leaves num will be equal to 4294967295 (2**32 - 1) +const MAX_LEAVES_NUM = 4294967295 +const MAX_LEAVES_MINUS_1 = 4294967294 + +# The total height of the full MMR tree, whose root represents the commitment to the current +# frontier. +const TREE_HEIGHT = 32 + +# The number of the stack elements which one node occupy. +const NODE_SIZE = 8 + +# The offset of the number of leaves in the current MMR state. +const NUM_LEAVES_OFFSET = 0 + +# The offset of the array of the frontier nodes of respective heights. +const FRONTIER_OFFSET = 4 # 32 double words, 256 felts in total + +# The offset of the first half of the current Keccak256 hash value in the local memory of the +# `append_and_update_frontier` procedure. +const CUR_HASH_LO_LOCAL = 0 + +# The offset of the second half of the current Keccak256 hash value in the local memory of the +# `append_and_update_frontier` procedure. +const CUR_HASH_HI_LOCAL = 4 + +# The offset of the canonical zeros stored in the local memory of the `append_and_update_frontier` +# procedure. +const CANONICAL_ZEROES_LOCAL = 8 + +# PUBLIC API +# ================================================================================================= + +#! Updates the existing frontier with the new leaf, returns a new leaf count and a new MMR root. +#! +#! The memory layout at the `mmr_frontier_ptr` is expected to be: +#! [num_leaves, [[FRONTIER_NODE_LO, FRONTIER_NODE_HI]; 32]] +#! Empty uninitialized memory is a valid state for the frontier in the case where there are no +#! leaves. +#! +#! The layout of the local memory of this `append_and_update_frontier` procedure looks like so: +#! [CUR_HASH_LO, CUR_HASH_HI, [[CANONICAL_ZERO_LO, CANONICAL_ZERO_HI]; 32]] +#! So the first 8 felt values is occupied by the current Keccak256 hash, and next 32 * 8 felt values +#! is occupied by the canonical zeros, 8 values each, 32 zeros total. +#! +#! Inputs: [NEW_LEAF_LO, NEW_LEAF_HI, mmr_frontier_ptr] +#! Outputs: [NEW_ROOT_LO, NEW_ROOT_HI, new_leaf_count] +#! +#! Where: +#! - [NEW_LEAF_LO, NEW_LEAF_HI] is the new leaf, represented as Keccak256 hash, which will be added +#! to the MMR. +#! - mmr_frontier_ptr is the pointer to the memory where the MMR Frontier structure is located. +#! - [NEW_ROOT_LO, NEW_ROOT_HI] is the new root of the MMR, represented as Keccak256 hash. +#! - new_leaf_count is the number of leaves in the MMR after the new leaf was added. +#! +#! Panics if: +#! - The number of leaves in the MMR has reached the maximum limit of 2^32. +@locals(264) # new_leaf/curr_hash + canonical_zeros +pub proc append_and_update_frontier + # set CUR_HASH = NEW_LEAF and store to local memory + loc_storew_le.CUR_HASH_LO_LOCAL dropw + loc_storew_le.CUR_HASH_HI_LOCAL dropw + # => [mmr_frontier_ptr] + + # get the current leaves number + dup add.NUM_LEAVES_OFFSET mem_load + # => [num_leaves, mmr_frontier_ptr] + + # make sure that the MMR is not full yet and we still can store the new leaf + # the MMR is full when the number of leaves is equal to 2^TREE_HEIGHT - 1 (as per the + # Solidity implementation), so the last call to this procedure will be when the number of + # leaves would be equal to 2^32 - 2. + # first assert that the number of leaves is a valid u32, else the u32lt assertion is undefined + u32assert.err=ERR_MMR_FRONTIER_LEAVES_NUM_EXCEED_LIMIT + dup u32lte.MAX_LEAVES_MINUS_1 assert.err=ERR_MMR_FRONTIER_LEAVES_NUM_EXCEED_LIMIT + # => [num_leaves, mmr_frontier_ptr] + + # get the memory pointer where the canonical zeros will be stored + locaddr.CANONICAL_ZEROES_LOCAL + # => [zeros_ptr, num_leaves, mmr_frontier_ptr] + + # load the canonical zeros into the memory + exec.load_zeros_to_memory + # => [num_leaves, mmr_frontier_ptr] + + # update the leaves number and store it into the memory + dup add.1 dup.2 add.NUM_LEAVES_OFFSET + # => [num_leaves_ptr, num_leaves+1, num_leaves, mmr_frontier_ptr] + + mem_store + # => [num_leaves, mmr_frontier_ptr] + + # iterate `TREE_HEIGHT` times to get the root of the tree + # + # iter_counter in that case will show the current tree height + push.0 push.1 + # => [loop_flag=1, iter_counter=0, num_leaves, mmr_frontier_ptr] + + while.true + # => [curr_tree_height, num_leaves, mmr_frontier_ptr] + + # get the pointer to the frontier node of the current height + # + # notice that the initial state of the frontier array is zeros + dup.2 add.FRONTIER_OFFSET dup.1 mul.NODE_SIZE add + # => [frontier[curr_tree_height]_ptr, curr_tree_height, num_leaves, mmr_frontier_ptr] + + # determine whether the last `num_leaves` bit is 1 (is `num_leaves` odd) + dup.2 u32and.1 + # => [ + # is_odd, frontier[curr_tree_height]_ptr, curr_tree_height, num_leaves, mmr_frontier_ptr + # ] + + if.true + # => [frontier[curr_tree_height]_ptr, curr_tree_height, num_leaves, mmr_frontier_ptr] + # + # this height already had a subtree root stored in frontier[curr_tree_height], merge + # into parent. + exec.mem_load_double_word + # => [ + # FRONTIER[curr_tree_height]_LO, FRONTIER[curr_tree_height]_HI, curr_tree_height, + # num_leaves, mmr_frontier_ptr + # ] + + # load the current hash from the local memory back to the stack + # + # in the first iteration the current hash will be equal to the new node + padw loc_loadw_le.CUR_HASH_HI_LOCAL + padw loc_loadw_le.CUR_HASH_LO_LOCAL + swapdw + # => [ + # FRONTIER[curr_tree_height]_LO, FRONTIER[curr_tree_height]_HI, CUR_HASH_LO, + # CUR_HASH_HI, curr_tree_height, num_leaves, mmr_frontier_ptr + # ] + + # merge the frontier node of this height with the current hash to get the current hash + # of the next height (merge(frontier[h], cur)) + exec.keccak256::merge + # => [CUR_HASH_LO', CUR_HASH_HI', curr_tree_height, num_leaves, mmr_frontier_ptr] + + # store the current hash of the next height back to the local memory + loc_storew_le.CUR_HASH_LO_LOCAL dropw + loc_storew_le.CUR_HASH_HI_LOCAL dropw + # => [curr_tree_height, num_leaves, mmr_frontier_ptr] + else + # => [frontier[curr_tree_height]_ptr, curr_tree_height, num_leaves, mmr_frontier_ptr] + # + # this height wasn't "occupied" yet: store the current hash as the subtree root + # (frontier node) at height `curr_tree_height` + padw loc_loadw_le.CUR_HASH_HI_LOCAL + padw loc_loadw_le.CUR_HASH_LO_LOCAL + # => [ + # CUR_HASH_LO, CUR_HASH_HI, frontier[curr_tree_height]_ptr, curr_tree_height, + # num_leaves, mmr_frontier_ptr + # ] + + # store the CUR_HASH to the frontier[curr_tree_height]_ptr + exec.mem_store_double_word movup.8 drop + # => [CUR_HASH_LO, CUR_HASH_HI, curr_tree_height, num_leaves, mmr_frontier_ptr] + + # get the pointer to the canonical zero node of the current height + locaddr.CANONICAL_ZEROES_LOCAL dup.9 mul.NODE_SIZE add + # => [ + # zeros[curr_tree_height], CUR_HASH_LO, CUR_HASH_HI, curr_tree_height, num_leaves, + # mmr_frontier_ptr + # ] + + # load the zero node to the stack + exec.mem_load_double_word swapdw + # => [ + # CUR_HASH_LO, CUR_HASH_HI, ZERO_H_LO, ZERO_H_HI, curr_tree_height, num_leaves, + # mmr_frontier_ptr + # ] + + # merge the current hash with the zero node of this height to get the current hash of + # the next height (merge(cur, zeroes[h])) + exec.keccak256::merge + # => [CUR_HASH_LO', CUR_HASH_HI', curr_tree_height, num_leaves, mmr_frontier_ptr] + + # store the current hash of the next height back to the local memory + loc_storew_le.CUR_HASH_LO_LOCAL dropw + loc_storew_le.CUR_HASH_HI_LOCAL dropw + # => [curr_tree_height, num_leaves, mmr_frontier_ptr] + end + # => [curr_tree_height, num_leaves, mmr_frontier_ptr] + + # update the current tree height + add.1 + # => [curr_tree_height+1, num_leaves, mmr_frontier_ptr] + + # update the `num_leaves` (shift it right by 1 bit) + swap u32shr.1 swap + # => [curr_tree_height+1, num_leaves>>1, mmr_frontier_ptr] + + # compute the cycle flag + dup neq.TREE_HEIGHT + # => [loop_flag, curr_tree_height+1, num_leaves>>1, mmr_frontier_ptr] + end + # => [curr_tree_height=TREE_HEIGHT, num_leaves=0, mmr_frontier_ptr] + + # clean the stack + drop drop + # => [mmr_frontier_ptr] + + # load the final number of leaves onto the stack + add.NUM_LEAVES_OFFSET mem_load + # => [new_leaf_count] + + # The current (final) hash represents the root of the whole tree. + # + # Notice that there is no need to update the frontier[tree_height] value, which in theory could + # represent the frontier in case the tree is full. The frontier nodes are used only for the + # computation of the next height hash, but if the tree is full, there is no next hash to + # compute. + + # load the final hash (which is also the root of the tree) + padw loc_loadw_le.CUR_HASH_HI_LOCAL + padw loc_loadw_le.CUR_HASH_LO_LOCAL + # => [NEW_ROOT_LO, NEW_ROOT_HI, new_leaf_count] +end + +# HELPER PROCEDURES +# ================================================================================================= + +#! Stores the canonical zeros from the advice map to the memory at the provided address. +#! +#! Inputs: [zeros_ptr] +#! Outputs: [] +proc store_canonical_zeros + # prepare the stack for the adv_pipe instruction + padw padw padw + # => [PAD, PAD, PAD, zeros_ptr] + + # TODO: use constant once constant usage will be implemented + repeat.32 + adv_pipe + # => [ZERO_I_L, ZERO_I_R, PAD, zeros_ptr+8] + end + # => [ZERO_31_L, ZERO_31_R, PAD, zeros_ptr+256] + + # clean the stack + dropw dropw dropw drop + # => [] +end diff --git a/crates/miden-agglayer/asm/agglayer/common/asset_conversion.masm b/crates/miden-agglayer/asm/agglayer/common/asset_conversion.masm new file mode 100644 index 0000000000..6441baef01 --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/common/asset_conversion.masm @@ -0,0 +1,395 @@ +use miden::core::math::u64 +use miden::core::word +use miden::agglayer::common::utils +use ::miden::protocol::asset::FUNGIBLE_ASSET_MAX_AMOUNT + +# ERRORS +# ================================================================================================= + +const ERR_SCALE_AMOUNT_EXCEEDED_LIMIT="maximum scaling factor is 18" +const ERR_X_TOO_LARGE="the agglayer bridge in u256 value is larger than 2**128 and cannot be verifiably scaled to u64" +const ERR_UNDERFLOW="x < y*10^s (underflow detected)" +const ERR_REMAINDER_TOO_LARGE="remainder z must be < 10^s" +const ERR_Y_TOO_LARGE="y exceeds max fungible token amount" + +# CONSTANTS +# ================================================================================================= + +const MAX_SCALING_FACTOR=18 + +#! Calculate 10^scale where scale is a u8 exponent. +#! +#! Inputs: [scale] +#! Outputs: [10^scale] +#! +#! Where: +#! - scale is expected to be a small integer (0-18 typical for crypto decimals) +#! +#! Panics if: +#! - scale > 18 (overflow protection) +proc pow10 + u32assert.err=ERR_SCALE_AMOUNT_EXCEEDED_LIMIT + # => [scale] + + dup u32lte.MAX_SCALING_FACTOR assert.err=ERR_SCALE_AMOUNT_EXCEEDED_LIMIT + # => [scale] + + push.1 swap + # => [scale, result] + + dup neq.0 + # => [is_not_zero, scale, result] + + # Loop to calculate 10^scale + while.true + # => [scale, result] + + # result *= 10 + swap mul.10 swap + # => [scale, result*10] + + # scale -= 1 + sub.1 + # => [scale-1, result*10] + + dup neq.0 + # => [is_not_zero, scale-1, result*10] + end + # => [0, result] + + drop + # => [result] +end + +#! Convert an asset amount to a scaled U256 representation for bridging to Agglayer. +#! +#! This procedure is used to convert Miden asset amounts to EVM asset amounts. +#! It multiplies the input amount by 10^target_scale to adjust for decimal differences +#! between the current representation and the target chain's native decimals. +#! +#! The procedure first calculates 10^target_scale using the pow10 helper, then converts +#! both the amount and scale factor to U64 format, performs U64 multiplication, and +#! returns the result as 8 u32 limbs in little-endian order (U256 format). +#! +#! Inputs: [amount, target_scale] +#! Outputs: [[RESULT_U256[0], RESULT_U256[1]]] +#! +#! Where: +#! - amount: The asset amount to be converted (range: 0 to 2^63 - 2^31) +#! - target_scale: Exponent for scaling factor (10^target_scale) +#! - [RESULT_U256[0], RESULT_U256[1]]: U256 value as 8 u32 limbs in little-endian order +#! (least significant limb at the top of the stack, each limb stored in little-endian format) +#! +#! Examples: +#! - USDC: amount=1000000000, target_scale=0 → 1000000000 (no scaling) +#! - ETH: amount=1e10, target_scale=8 → 1e18 +#! +#! Invocation: exec +pub proc scale_native_amount_to_u256 + swap + # => [target_scale, amount] + + exec.pow10 + # => [scale, amount] + + u32split + # => [scale_lo, scale_hi, amount] + + movup.2 u32split + # => [amount_lo, amount_hi, scale_lo, scale_hi] + + # Perform U64 multiplication: amount * scale + # This is safe because both the scaling factor and amount are guaranteed to be smaller + # than 2^64, so we will never overflow a 256-bit value. + exec.u64::widening_mul + # => [res_lo, res_mid_lo, res_mid_hi, res_hi] + + # convert to U256 & little endian + padw swapw + # => [RESULT_U256[0], RESULT_U256[1]] +end + +#! Reverse the limbs and change the byte endianness of the result. +pub proc reverse_limbs_and_change_byte_endianness + # reverse the felts within each word + # [a, b, c, d, e, f, g, h] -> [h, g, f, e, d, c, b, a] + exec.word::reverse + swapw + exec.word::reverse + + # change the byte endianness of each felt + repeat.8 + exec.utils::swap_u32_bytes + movdn.7 + end + + # => [RESULT_U256[0], RESULT_U256[1]] +end + +#! Subtract two 128-bit integers (little-endian u32 limbs) and assert no underflow. +#! +#! Computes: +#! z = x - y +#! with the constraint: +#! y <= x +#! +#! Each 128-bit value is stored as 4 u32 limbs in little-endian order: +#! value = limb0 + limb1·2^32 + limb2·2^64 + limb3·2^96 +#! +#! The subtraction is performed in three steps using u64::overflowing_sub: +#! 1. (z0, z1) = (x0, x1) - (y0, y1) -> borrow_lo +#! 2. (t0, t1) = (x2, x3) - (y2, y3) -> underflow_hi_raw +#! 3. (z2, z3) = (t0, t1) - borrow_lo -> underflow_hi_borrow +#! +#! Inputs: [y0, y1, y2, y3, x0, x1, x2, x3] +#! Outputs: [z0, z1, z2, z3] +#! +#! Panics if: +#! - y > x (ERR_UNDERFLOW) +proc u128_sub_no_underflow + # Put x-word on top for easier access. + swapw + # => [x0, x1, x2, x3, y0, y1, y2, y3] + + # ============================================================================================= + # Step 1: (z0, z1) = (x0, x1) - (y0, y1) + # u64::overflowing_sub expects [b_lo, b_hi, a_lo, a_hi], computes a - b + # ============================================================================================= + movup.5 + movup.5 + # => [y0, y1, x0, x1, x2, x3, y2, y3] + + exec.u64::overflowing_sub + # => [borrow_lo, z0, z1, x2, x3, y2, y3] + + # ============================================================================================= + # Step 2: (t0, t1) = (x2, x3) - (y2, y3) [raw, without borrow] + # Arrange as [y2, y3, x2, x3] + # ============================================================================================= + + movup.4 + movup.4 + # => [x2, x3, borrow_lo, z0, z1, y2, y3] + + movup.6 + movup.6 + # => [y2, y3, x2, x3, borrow_lo, z0, z1] + + exec.u64::overflowing_sub + # => [underflow_hi_raw, t0, t1, borrow_lo, z0, z1] + + # ============================================================================================= + # Step 3: (z2, z3) = (t0, t1) - borrow_lo + # Arrange as [borrow_lo, 0, t0, t1] + # ============================================================================================= + swap.3 + # => [borrow_lo, t0, t1, underflow_hi_raw, z0, z1] + + push.0 swap + # => [borrow_lo, 0, t0, t1, underflow_hi_raw, z0, z1] + + exec.u64::overflowing_sub + # => [underflow_hi_borrow, z2, z3, underflow_hi_raw, z0, z1] + + # Underflow iff either high-half step underflowed. + movup.3 or + assertz.err=ERR_UNDERFLOW + # => [z2, z3, z0, z1] + + # Rearrange to little-endian order. + movup.3 movup.3 + # => [z0, z1, z2, z3] +end + +#! Verify conversion from a U128 amount to a Miden native amount (Felt) +#! +#! Specification: +#! Verify that a provided y is the quotient of dividing x by 10^scale_exp: +#! y = floor(x / 10^scale_exp) +#! +#! This procedure does NOT perform division. It proves the quotient is correct by checking: +#! 1) y is within the allowed fungible token amount range +#! 2) y_scaled = y * 10^scale_exp (computed via scale_native_amount_to_u256) +#! 3) z = x - y_scaled (must not underflow, i.e. y_scaled <= x) +#! 4) z fits in 64 bits (upper 192 bits are zero) +#! 5) (z1, z0) < 10^scale_exp (remainder bound) +#! +#! These conditions prove: +#! x = y_scaled + z, with 0 <= z < 10^scale_exp +#! which uniquely implies: +#! y = floor(x / 10^scale_exp) +#! +#! Example (ETH -> Miden base 1e8): +#! - EVM amount: 100 ETH = 100 * 10^18 +#! - Miden amount: 100 ETH = 100 * 10^8 +#! - Therefore the scale-down factor is: +#! scale = 10^(18 - 8) = 10^10 +#! scale_exp = 10 +#! - Inputs/expected values: +#! x = 100 * 10^18 +#! y = floor(x / 10^10) = 100 * 10^8 +#! y_scaled = y * 10^10 = 100 * 10^18 +#! z = x - y_scaled = 0 +#! +#! Inputs: [x0, x1, x2, x3, scale_exp, y] +#! Where x is encoded as 4 u32 limbs in little-endian order. +#! (x0 is least significant limb) +#! Outputs: [y] +#! +#! Where: +#! - x: The original amount as an unsigned 128-bit integer (U128). +#! It is provided on the operand stack as 4 little-endian u32 limbs: +#! x = x0 + x1·2^32 + x2·2^64 + x3·2^96 +#! - x0..x3: 32-bit limbs of x in little-endian order (x0 is least significant). +#! - scale_exp: The base-10 exponent used for scaling down (an integer in [0, 18]). +#! - y: The provided quotient (Miden native amount) as a Felt interpreted as an unsigned u64. +#! - y_scaled: The 256-bit value y * 10^scale_exp represented as 8 u32 limbs (big-endian). +#! - z: The remainder-like difference z = x - y_scaled (essentially dust that is lost in the +#! conversion due to precision differences). This verifier requires z < 10^scale_exp. +#! +#! Panics if: +#! - scale_exp > 18 (asserted in pow10 via scale_native_amount_to_u256) +#! - y exceeds the max fungible token amount +#! - x < y * 10^scale_exp (underflow) +#! - z does not fit in 64 bits +#! - (z1, z0) >= 10^scale_exp (remainder too large) +pub proc verify_u128_to_native_amount_conversion + # => [x0, x1, x2, x3, scale_exp, y] + + # ============================================================================================= + # Step 1: Enforce y <= MAX_FUNGIBLE_TOKEN_AMOUNT + # Constraint: y <= MAX_FUNGIBLE_TOKEN_AMOUNT + # ============================================================================================= + dup.5 + push.FUNGIBLE_ASSET_MAX_AMOUNT + lte + # => [is_lte, x0, x1, x2, x3, scale_exp, y] + + assert.err=ERR_Y_TOO_LARGE + # => [x0, x1, x2, x3, scale_exp, y] + + # ============================================================================================= + # Step 2: Compute y_scaled = y * 10^scale_exp + # + # Call: + # scale_native_amount_to_u256(amount=y, target_scale=scale_exp) + # ============================================================================================= + movup.4 + movup.5 + # => [y, scale_exp, x0, x1, x2, x3] + + dup.1 dup.1 + # => [y, scale_exp, y, scale_exp, x0, x1, x2, x3] + + exec.scale_native_amount_to_u256 + # => [y_scaled0..y_scaled7, y, scale_exp, x0, x1, x2, x3] + + # Drop the upper word as it's guaranteed to be zero since y_scaled will fit in 123 bits + # (amount: 63 bits, 10^target_scale: 60 bits). + swapw dropw + # => [y_scaled0, y_scaled1, y_scaled2, y_scaled3, y, scale_exp, x0, x1, x2, x3] + + # ============================================================================================= + # Step 3: Compute z = x - y_scaled and prove no underflow + # z := x - y_scaled + # Constraint: y_scaled <= x + # ============================================================================================= + movup.5 movup.5 + # => [y, scale_exp, y_scaled0, y_scaled1, y_scaled2, y_scaled3, x0, x1, x2, x3] + + movdn.9 movdn.9 + # => [y_scaled0, y_scaled1, y_scaled2, y_scaled3, x0, x1, x2, x3, y, scale_exp] + + exec.u128_sub_no_underflow + # => [z0, z1, z2, z3, y, scale_exp] + + # ============================================================================================= + # Step 4: Enforce z < 10^scale_exp (remainder bound) + # + # We compare z against 10^scale_exp using a u64 comparison on (z1, z0). + # To make that comparison complete, we must first prove z fits into 64 bits, i.e. z2 == z3 == 0. + # + # This is justified because scale_exp <= 18, so 10^scale_exp <= 10^18 < 2^60. + # Therefore any valid remainder z < 10^scale_exp must be < 2^60 and thus must have z2 == z3 == 0. + # ============================================================================================= + # u128_sub_no_underflow returns [z0, z1, z2, z3] in LE order. + # Assert z2 == z3 == 0 (remainder fits in 64 bits). + movup.3 + assertz.err=ERR_REMAINDER_TOO_LARGE # z3 == 0 + movup.2 + assertz.err=ERR_REMAINDER_TOO_LARGE # z2 == 0 + # => [z0, z1, y, scale_exp] + + movup.3 + exec.pow10 + # => [scale, z0, z1, y] + + u32split + # => [scale_lo, scale_hi, z0, z1, y] + + exec.u64::lt + # => [is_lt, y] + + assert.err=ERR_REMAINDER_TOO_LARGE + # => [y] +end + +#! Verify conversion from an AggLayer U256 amount to a Miden native amount (Felt) +#! +#! This procedure first checks that the U256 value fits in 128 bits (x4..x7 == 0), +#! then delegates to verify_u128_to_native_amount_conversion for the actual verification. +#! +#! Specification: +#! Verify that a provided y is the quotient of dividing x by 10^scale_exp: +#! y = floor(x / 10^scale_exp) +#! +#! Example (ETH -> Miden base 1e8): +#! - EVM amount: 100 ETH = 100 * 10^18 +#! - Miden amount: 100 ETH = 100 * 10^8 +#! - Therefore the scale-down factor is: +#! scale = 10^(18 - 8) = 10^10 +#! scale_exp = 10 +#! - Inputs/expected values: +#! x = 100 * 10^18 +#! y = floor(x / 10^10) = 100 * 10^8 +#! y_scaled = y * 10^10 = 100 * 10^18 +#! z = x - y_scaled = 0 +#! +#! Inputs: [x7, x6, x5, x4, x3, x2, x1, x0, scale_exp, y] +#! Where x is encoded as 8 u32 limbs in big-endian order. +#! (x7 is most significant limb and is at the top of the stack) +#! Each limb is expected to contain little-endian bytes. +#! Outputs: [y] +#! +#! Where: +#! - x: The original AggLayer amount as an unsigned 256-bit integer (U256). +#! It is provided on the operand stack as 8 big-endian u32 limbs: +#! x = x0 + x1·2^32 + x2·2^64 + x3·2^96 + x4·2^128 + x5·2^160 + x6·2^192 + x7·2^224 +#! - x0..x7: 32-bit limbs of x in big-endian order (x0 is least significant). +#! - scale_exp: The base-10 exponent used for scaling down (an integer in [0, 18]). +#! - y: The provided quotient (Miden native amount) as a Felt interpreted as an unsigned u64. +#! +#! Panics if: +#! - x does not fit into 128 bits (x4..x7 are not all zero) +#! - scale_exp > 18 (asserted in pow10 via scale_native_amount_to_u256) +#! - y exceeds the max fungible token amount +#! - x < y * 10^scale_exp (underflow) +#! - z does not fit in 64 bits +#! - (z1, z0) >= 10^scale_exp (remainder too large) +pub proc verify_u256_to_native_amount_conversion + + # reverse limbs and byte endianness + exec.reverse_limbs_and_change_byte_endianness + # => [x0, x1, x2, x3, x4, x5, x6, x7, scale_exp, y] + + # Enforce x < 2^128 + # Constraint: x4 == x5 == x6 == x7 == 0 + swapw + exec.word::eqz + assert.err=ERR_X_TOO_LARGE + # => [x0, x1, x2, x3, scale_exp, y] + + # Delegate to verify_u128_to_native_amount_conversion for the remaining verification + exec.verify_u128_to_native_amount_conversion + # => [y] +end diff --git a/crates/miden-agglayer/asm/bridge/eth_address.masm b/crates/miden-agglayer/asm/agglayer/common/eth_address.masm similarity index 50% rename from crates/miden-agglayer/asm/bridge/eth_address.masm rename to crates/miden-agglayer/asm/agglayer/common/eth_address.masm index 57a8e9f298..ada1f577c5 100644 --- a/crates/miden-agglayer/asm/bridge/eth_address.masm +++ b/crates/miden-agglayer/asm/agglayer/common/eth_address.masm @@ -1,18 +1,70 @@ +use miden::agglayer::common::utils use miden::core::crypto::hashes::keccak256 use miden::core::word +# ERRORS +# ================================================================================================= + +const ERR_NOT_U32="address limb is not u32" +const ERR_MSB_NONZERO="most-significant 4 bytes must be zero for AccountId" +const ERR_FELT_OUT_OF_FIELD="combined u64 doesn't fit in field" + # CONSTANTS # ================================================================================================= const U32_MAX=4294967295 const TWO_POW_32=4294967296 -const ERR_NOT_U32="address limb is not u32" -const ERR_ADDR4_NONZERO="most-significant 4 bytes (addr4) must be zero" -const ERR_FELT_OUT_OF_FIELD="combined u64 doesn't fit in field" +# PUBLIC INTERFACE +# ================================================================================================= +#! Converts an Ethereum address format (address[5] type) back into an AccountId [prefix, suffix] type. +#! +#! The Ethereum address format is represented as 5 u32 limbs (20 bytes total) in *big-endian limb order* +#! (matching Solidity ABI encoding). Each limb encodes its 4 bytes in little-endian order: +#! limb0 = bytes[0..4] (most-significant 4 bytes, must be zero for AccountId) +#! limb1 = bytes[4..8] +#! limb2 = bytes[8..12] +#! limb3 = bytes[12..16] +#! limb4 = bytes[16..20] (least-significant 4 bytes) +#! +#! The most-significant 4 bytes must be zero for a valid AccountId conversion (be0 == 0). +#! The remaining 16 bytes are treated as two 8-byte words (conceptual u64 values): +#! prefix = (bswap(limb1) << 32) | bswap(limb2) # bytes[4..12] +#! suffix = (bswap(limb3) << 32) | bswap(limb4) # bytes[12..20] +#! +#! These 8-byte words are represented as field elements by packing two u32 limbs into a felt. +#! The packing is done via build_felt, which validates limbs are u32 and checks the packed value +#! did not reduce mod p (i.e. the word fits in the field). +#! +#! Inputs: [limb0, limb1, limb2, limb3, limb4] +#! Outputs: [suffix, prefix] +#! +#! Invocation: exec +pub proc to_account_id + # limb0 must be 0 (most-significant limb, on top) + assertz.err=ERR_MSB_NONZERO + # => [limb1, limb2, limb3, limb4] + + # Reorder for suffix = build_felt(limb4, limb3) where limb4=lo, limb3=hi + movup.2 movup.3 + # => [limb4, limb3, limb1, limb2] + + exec.build_felt + # => [suffix, limb1, limb2] + + # Reorder for prefix = build_felt(limb2, limb1) where limb2=lo, limb1=hi + swap movup.2 + # => [limb2, limb1, suffix] + + exec.build_felt + # => [prefix, suffix] -# ETHEREUM ADDRESS PROCEDURES + swap + # => [suffix, prefix] +end + +# HELPER PROCEDURES # ================================================================================================= #! Builds a single felt from two u32 limbs (little-endian limb order). @@ -25,64 +77,33 @@ const ERR_FELT_OUT_OF_FIELD="combined u64 doesn't fit in field" proc build_felt # --- validate u32 limbs --- u32assert2.err=ERR_NOT_U32 + # => [lo_be, hi_be] + + # limbs are little-endian bytes; swap to big-endian for building account ID + exec.utils::swap_u32_bytes + swap + exec.utils::swap_u32_bytes + swap # => [lo, hi] # keep copies for the overflow check dup.1 dup.1 - # => [lo, hi, lo, hi] + # => [lo_be, hi_be, lo_be, hi_be] # felt = (hi * 2^32) + lo swap push.TWO_POW_32 mul add - # => [felt, lo, hi] + # => [felt, lo_be, hi_be] # ensure no reduction mod p happened: - # split felt back into (hi, lo) and compare to inputs + # split felt back into (lo, hi) and compare to inputs dup u32split - # => [hi2, lo2, felt, lo, hi] + # => [lo2, hi2, felt, lo_be, hi_be] - movup.4 assert_eq.err=ERR_FELT_OUT_OF_FIELD - # => [lo2, felt, lo] + movup.3 assert_eq.err=ERR_FELT_OUT_OF_FIELD + # => [hi2, felt, hi_be] movup.2 assert_eq.err=ERR_FELT_OUT_OF_FIELD # => [felt] end - -#! Converts an Ethereum address format (address[5] type) back into an AccountId [prefix, suffix] type. -#! -#! The Ethereum address format is represented as 5 u32 limbs (20 bytes total) in *little-endian limb order*: -#! addr0 = bytes[16..19] (least-significant 4 bytes) -#! addr1 = bytes[12..15] -#! addr2 = bytes[ 8..11] -#! addr3 = bytes[ 4.. 7] -#! addr4 = bytes[ 0.. 3] (most-significant 4 bytes) -#! -#! The most-significant 4 bytes must be zero for a valid AccountId conversion (addr4 == 0). -#! The remaining 16 bytes are treated as two 8-byte words (conceptual u64 values): -#! prefix = (addr3 << 32) | addr2 # bytes[4..11] -#! suffix = (addr1 << 32) | addr0 # bytes[12..19] -#! -#! These 8-byte words are represented as field elements by packing two u32 limbs into a felt. -#! The packing is done via build_felt, which validates limbs are u32 and checks the packed value -#! did not reduce mod p (i.e. the word fits in the field). -#! -#! Inputs: [addr0, addr1, addr2, addr3, addr4] -#! Outputs: [prefix, suffix] -#! -#! Invocation: exec -pub proc to_account_id - # addr4 must be 0 (most-significant limb) - movup.4 - eq.0 assert.err=ERR_ADDR4_NONZERO - # => [addr0, addr1, addr2, addr3] - - exec.build_felt - # => [suffix, addr2, addr3] - - movdn.2 - # => [addr2, addr3, suffix] - - exec.build_felt - # => [prefix, suffix] -end diff --git a/crates/miden-agglayer/asm/agglayer/common/utils.masm b/crates/miden-agglayer/asm/agglayer/common/utils.masm new file mode 100644 index 0000000000..6a17598b2c --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/common/utils.masm @@ -0,0 +1,94 @@ +# Utility module containing helper procedures for double word handling and byte manipulation. + +# TYPE ALIASES +# ================================================================================================= + +type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } +type DoubleWord = struct { word_lo: BeWord, word_hi: BeWord } +type MemoryAddress = u32 + +# BYTE MANIPULATION +# ================================================================================================= + +#! Swaps byte order in a u32 limb (LE <-> BE). +#! +#! Inputs: [value] +#! Outputs: [swapped] +pub proc swap_u32_bytes + # part0 = (value & 0xFF) << 24 + dup u32and.0xFF u32shl.24 + # => [part0, value] + + # part1 = ((value >> 8) & 0xFF) << 16 + dup.1 u32shr.8 u32and.0xFF u32shl.16 u32or + # => [part01, value] + + # part2 = ((value >> 16) & 0xFF) << 8 + dup.1 u32shr.16 u32and.0xFF u32shl.8 u32or + # => [part012, value] + + # part3 = (value >> 24) + dup.1 u32shr.24 u32or + # => [swapped, value] + + swap drop + # => [swapped] +end + +# DOUBLE WORD MEMORY OPERATIONS +# ================================================================================================= + +#! Stores two words to the provided global memory address. +#! +#! Inputs: [WORD_1, WORD_2, ptr] +#! Outputs: [WORD_1, WORD_2, ptr] +pub proc mem_store_double_word( + double_word_to_store: DoubleWord, + mem_ptr: MemoryAddress +) -> (DoubleWord, MemoryAddress) + dup.8 mem_storew_le swapw + # => [WORD_2, WORD_1, ptr] + + dup.8 add.4 mem_storew_le swapw + # => [WORD_1, WORD_2, ptr] +end + +#! Stores two words to the provided unaligned (not a multiple of 4) memory address. +#! +#! Inputs: [WORD_1, WORD_2, ptr] +#! Outputs: [] +pub proc mem_store_double_word_unaligned( + double_word_to_store: DoubleWord, + mem_ptr: MemoryAddress +) + # bring ptr to the top of the stack + dup.8 + # => [ptr, WORD_1, WORD_2, ptr] + + # store each element individually at consecutive addresses + mem_store dup.7 add.1 + mem_store dup.6 add.2 + mem_store dup.5 add.3 + mem_store + # => [WORD_2, ptr] + + dup.4 add.4 + mem_store dup.3 add.5 + mem_store dup.2 add.6 + mem_store dup.1 add.7 + mem_store + drop + # => [] +end + +#! Loads two words from the provided global memory address. +#! +#! Inputs: [ptr] +#! Outputs: [WORD_1, WORD_2] +pub proc mem_load_double_word(mem_ptr: MemoryAddress) -> DoubleWord + padw dup.4 add.4 mem_loadw_le + # => [WORD_2, ptr] + + padw movup.8 mem_loadw_le + # => [WORD_1, WORD_2] +end diff --git a/crates/miden-agglayer/asm/agglayer/faucet/mod.masm b/crates/miden-agglayer/asm/agglayer/faucet/mod.masm new file mode 100644 index 0000000000..4151cc6086 --- /dev/null +++ b/crates/miden-agglayer/asm/agglayer/faucet/mod.masm @@ -0,0 +1,457 @@ +use miden::agglayer::bridge::bridge_in +use miden::core::sys +use miden::agglayer::common::utils +use miden::agglayer::common::asset_conversion +use miden::agglayer::common::eth_address +use miden::protocol::active_account +use miden::protocol::active_note +use miden::standards::faucets +use miden::standards::note_tag +use miden::protocol::note +use miden::protocol::tx +use miden::core::mem + + +# ERRORS +# ================================================================================================= + +const ERR_INVALID_CLAIM_PROOF = "invalid claim proof" + +# CONSTANTS +# ================================================================================================= + +# Storage slots +# The slot in this component's storage layout where the bridge account ID is stored. +const BRIDGE_ID_SLOT = word("miden::agglayer::faucet") +# Storage slots for conversion metadata. +# Slot 1: [addr_felt0, addr_felt1, addr_felt2, addr_felt3] — first 4 felts of origin token address +const CONVERSION_INFO_1_SLOT = word("miden::agglayer::faucet::conversion_info_1") +# Slot 2: [addr_felt4, origin_network, scale, 0] — remaining address felt + origin network + scale +const CONVERSION_INFO_2_SLOT = word("miden::agglayer::faucet::conversion_info_2") + +# Memory pointers for piped advice map data +const PROOF_DATA_START_PTR = 0 +const LEAF_DATA_START_PTR = 536 +const OUTPUT_NOTE_DATA_START_PTR = 568 + +# Memory addresses for stored keys +const PROOF_DATA_KEY_MEM_ADDR = 700 +const LEAF_DATA_KEY_MEM_ADDR = 704 +const OUTPUT_NOTE_DATA_MEM_ADDR = 708 +const CLAIM_NOTE_DATA_MEM_ADDR = 712 + +# Memory addresses for output note fields (derived from leaf data layout) +const DESTINATION_ADDRESS_0 = 544 +const DESTINATION_ADDRESS_1 = 545 +const DESTINATION_ADDRESS_2 = 546 +const DESTINATION_ADDRESS_3 = 547 +const DESTINATION_ADDRESS_4 = 548 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_0 = 549 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_1 = 550 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_2 = 551 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_3 = 552 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_4 = 553 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_5 = 554 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_6 = 555 +const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_7 = 556 +const OUTPUT_NOTE_STORAGE_PTR = 0 +const OUTPUT_NOTE_TARGET_ACCOUNT_ID_SUFFIX_PTR = OUTPUT_NOTE_STORAGE_PTR +const OUTPUT_NOTE_TARGET_ACCOUNT_ID_PREFIX_PTR = OUTPUT_NOTE_STORAGE_PTR + 1 +const OUTPUT_NOTE_FAUCET_AMOUNT = 568 + +# Memory locals in claim +const CLAIM_PREFIX_MEM_LOC = 8 +const CLAIM_SUFFIX_MEM_LOC = 9 +const CLAIM_AMOUNT_MEM_LOC_0 = 0 +const CLAIM_AMOUNT_MEM_LOC_1 = 4 + +# Memory locals in build_p2id_output_note +const BUILD_P2ID_AMOUNT_MEM_LOC_0 = 0 +const BUILD_P2ID_AMOUNT_MEM_LOC_1 = 4 +const BUILD_P2ID_PREFIX_MEM_LOC = 8 + +# Data sizes +const PROOF_DATA_WORD_LEN = 134 +const LEAF_DATA_WORD_LEN = 8 +const OUTPUT_NOTE_DATA_WORD_LEN = 2 + +# P2ID output note constants +const P2ID_NOTE_NUM_STORAGE_ITEMS = 2 +const OUTPUT_NOTE_TYPE_PUBLIC = 1 +const OUTPUT_NOTE_AUX = 0 + +# PUBLIC INTERFACE +# ================================================================================================= + +#! Returns the origin token address (5 felts) from faucet conversion storage. +#! +#! Reads conversion_info_1 (first 4 felts of address) and conversion_info_2 (5th felt) +#! from storage. +#! +#! Inputs: [] +#! Outputs: [addr0, addr1, addr2, addr3, addr4] +#! +#! Invocation: exec +pub proc get_origin_token_address + push.CONVERSION_INFO_1_SLOT[0..2] + exec.active_account::get_item + # => [addr0, addr1, addr2, addr3] + + # Read slot 2: [addr4, origin_network, scale, 0] + push.CONVERSION_INFO_2_SLOT[0..2] + exec.active_account::get_item + # => [addr4, origin_network, scale, 0, addr0, addr1, addr2, addr3] + + # Keep only addr4, drop origin_network, scale, 0 + movdn.7 drop drop drop + # => [addr0, addr1, addr2, addr3, addr4] +end + +#! Returns the origin network identifier from faucet conversion storage. +#! +#! Inputs: [] +#! Outputs: [origin_network] +#! +#! Invocation: exec +pub proc get_origin_network + push.CONVERSION_INFO_2_SLOT[0..2] + exec.active_account::get_item + # => [addr4, origin_network, scale, 0] + + drop movdn.2 drop drop + # => [origin_network] +end + +#! Returns the scale factor from faucet conversion storage. +#! +#! Inputs: [] +#! Outputs: [scale] +#! +#! Invocation: exec +pub proc get_scale + push.CONVERSION_INFO_2_SLOT[0..2] + exec.active_account::get_item + # => [addr4, origin_network, scale, 0] + + drop drop swap drop + # => [scale] +end + +#! Converts a native Miden asset amount to origin asset data using the stored +#! conversion metadata (origin_token_address, origin_network, and scale). +#! +#! This procedure is intended to be called via FPI from the bridge account. +#! It reads the faucet's conversion metadata from storage, scales the native amount +#! to U256 format, and returns the result along with origin token address and network. +#! +#! Inputs: [amount, pad(15)] +#! Outputs: [AMOUNT_U256[0], AMOUNT_U256[1], addr0, addr1, addr2, addr3, addr4, origin_network, pad(2)] +#! +#! Where: +#! - amount: The native Miden asset amount +#! - AMOUNT_U256: The scaled amount as 8 u32 limbs (little-endian U256) +#! - addr0..addr4: Origin token address (5 felts, u32 limbs) +#! - origin_network: Origin network identifier +#! +#! Invocation: call +pub proc asset_to_origin_asset + # => [amount, pad(15)] + + # Step 1: Get scale from storage + exec.get_scale + # => [scale, amount, pad(15)] + swap + # => [amount, scale, pad(15)] + + # Step 2: Scale amount to U256 + exec.asset_conversion::scale_native_amount_to_u256 + exec.asset_conversion::reverse_limbs_and_change_byte_endianness + # => [U256_LO, U256_HI, pad(15)] + + # Step 3: Get origin token address + exec.get_origin_token_address + # => [addr0, addr1, addr2, addr3, addr4, U256_LO, U256_HI, pad(15)] + + # Move address below the U256 amount + repeat.5 movdn.12 end + # => [U256_LO, U256_HI, addr0, addr1, addr2, addr3, addr4, pad(15)] + + # Step 4: Get origin network + exec.get_origin_network + exec.utils::swap_u32_bytes + # => [origin_network, U256_LO, U256_HI, addr0..addr4, pad(15)] + + # Move origin_network after the address fields + movdn.13 + # => [U256_LO, U256_HI, addr0, addr1, addr2, addr3, addr4, origin_network, pad(15)] + + exec.sys::truncate_stack +end + +# CLAIM PROCEDURES +# ================================================================================================= + +#! Inputs: [LEAF_DATA_KEY, PROOF_DATA_KEY] +#! Outputs: [] +#! +#! Panics if: +#! - the bridge account ID is not properly configured in storage. +#! - the foreign procedure invocation fails. +#! - the claim proof validation fails. +#! +#! Invocation: exec +proc validate_claim + # get bridge_in::verify_leaf_bridge procedure MAST root + procref.bridge_in::verify_leaf_bridge + # => [BRIDGE_PROC_MAST_ROOT, LEAF_DATA_KEY, PROOF_DATA_KEY] + + push.BRIDGE_ID_SLOT[0..2] + # => [bridge_id_idx, BRIDGE_PROC_MAST_ROOT, LEAF_DATA_KEY, PROOF_DATA_KEY] + + # get bridge account ID + exec.active_account::get_item + # => [0, 0, bridge_account_id_suffix, bridge_account_id_prefix, BRIDGE_PROC_MAST_ROOT, LEAF_DATA_KEY, PROOF_DATA_KEY] + + drop drop + # => [bridge_account_id_suffix, bridge_account_id_prefix, BRIDGE_PROC_MAST_ROOT, LEAF_DATA_KEY, PROOF_DATA_KEY] + + # call bridge_in::verify_leaf_bridge + exec.tx::execute_foreign_procedure + # => [] +end + +# Inputs: [] +# Outputs: [U256[0], U256[1]] +proc get_raw_claim_amount + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_7 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_6 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_5 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_4 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_3 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_2 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_1 + mem_load.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_0 +end + +# Inputs: [U256[0], U256[1]] +# Outputs: [amount] +proc scale_down_amount + repeat.7 drop end +end + +# Inputs: [PROOF_DATA_KEY, LEAF_DATA_KEY] +# Outputs: [] +proc batch_pipe_double_words + # 1) Verify PROOF_DATA_KEY + mem_storew_be.PROOF_DATA_KEY_MEM_ADDR + adv.push_mapval + # => [PROOF_DATA_KEY] + + push.PROOF_DATA_START_PTR push.PROOF_DATA_WORD_LEN + exec.mem::pipe_double_words_preimage_to_memory drop + + # 2) Verify LEAF_DATA_KEY + mem_storew_be.LEAF_DATA_KEY_MEM_ADDR + adv.push_mapval + # => [LEAF_DATA_KEY] + + push.LEAF_DATA_START_PTR push.LEAF_DATA_WORD_LEN + exec.mem::pipe_double_words_preimage_to_memory drop +end + +#! Extracts the destination account ID as address[5] from memory. +#! +#! This procedure reads the destination address from the leaf data and converts it from +#! Ethereum address format to AccountId format (prefix, suffix). +#! +#! Inputs: [] +#! Outputs: [suffix, prefix] +#! +#! Invocation: exec +proc get_destination_account_id_data + mem_load.DESTINATION_ADDRESS_4 + mem_load.DESTINATION_ADDRESS_3 + mem_load.DESTINATION_ADDRESS_2 + mem_load.DESTINATION_ADDRESS_1 + mem_load.DESTINATION_ADDRESS_0 + # => [address[5]] + + exec.eth_address::to_account_id + # => [suffix, prefix] +end + +#! Builds a P2ID output note for the claim recipient. +#! +#! This procedure expects the claim data to be already written to memory via batch_pipe_double_words. +#! It reads the destination account ID, amount, and other note parameters from memory to construct +#! the output note. +#! +#! Inputs: [suffix, prefix, AMOUNT[0], AMOUNT[1]] +#! Outputs: [] +#! +#! WARNING: This procedure currently assumes the claim amount fits within 128 bits (i.e. AMOUNT[1] +#! is all zeros). This assumption holds for all practical token amounts but is not explicitly +#! enforced here. See the TODO below. +#! +#! TODO: Add an explicit assertion that AMOUNT[1] is zero. +#! +#! Note: This procedure will be refactored in a follow-up to use leaf data to build the output note. +@locals(9) +proc build_p2id_output_note + # save prefix to local memory for later use in note tag computation + dup.1 loc_store.BUILD_P2ID_PREFIX_MEM_LOC + + # write destination account id into note storage for use in note::build_recipient + # the expected P2ID storage layout is: [target_account_id_suffix, target_account_id_prefix] + # => [suffix, prefix, AMOUNT[0], AMOUNT[1]] + mem_store.OUTPUT_NOTE_TARGET_ACCOUNT_ID_SUFFIX_PTR + mem_store.OUTPUT_NOTE_TARGET_ACCOUNT_ID_PREFIX_PTR + + # store amount in memory locals for use in faucets::distribute + loc_storew_be.BUILD_P2ID_AMOUNT_MEM_LOC_0 dropw loc_storew_be.BUILD_P2ID_AMOUNT_MEM_LOC_1 dropw + # => [pad(16)] + + # Build P2ID output note + procref.::miden::standards::notes::p2id::main + # => [SCRIPT_ROOT] + + # Use PROOF_DATA_KEY as the P2ID serial number + swapw mem_loadw_be.PROOF_DATA_KEY_MEM_ADDR + # => [SERIAL_NUM, SCRIPT_ROOT] + + push.P2ID_NOTE_NUM_STORAGE_ITEMS + # => [note_num_storage_items, SERIAL_NUM, SCRIPT_ROOT] + + push.OUTPUT_NOTE_STORAGE_PTR + # => [storage_ptr, note_num_storage_items, SERIAL_NUM, SCRIPT_ROOT] + + exec.note::build_recipient + # => [RECIPIENT] + + push.OUTPUT_NOTE_TYPE_PUBLIC + # => [note_type, RECIPIENT] + + # Compute note tag from destination account prefix (read from local memory) + loc_load.BUILD_P2ID_PREFIX_MEM_LOC + # => [account_id_prefix, note_type, RECIPIENT] + + exec.note_tag::create_account_target + # => [tag, note_type, RECIPIENT] + + padw loc_loadw_be.BUILD_P2ID_AMOUNT_MEM_LOC_1 padw loc_loadw_be.BUILD_P2ID_AMOUNT_MEM_LOC_0 + # => [AMOUNT[0], AMOUNT[1], tag, note_type, RECIPIENT] + + mem_load.OUTPUT_NOTE_FAUCET_AMOUNT movdn.8 + # => [AMOUNT[0], AMOUNT[1], native_amount, tag, note_type, RECIPIENT] + + exec.get_scale movdn.8 + # => [AMOUNT[0], AMOUNT[1], scale, native_amount, tag, note_type, RECIPIENT] + + exec.asset_conversion::verify_u256_to_native_amount_conversion + # => [amount, tag, note_type, RECIPIENT] + + exec.faucets::distribute + # => [pad(16)] +end + +#! Validates a claim against the AggLayer bridge and mints the corresponding asset to the recipient. +#! +#! This procedure validates the rollup exit root Merkle Proof via FPI against the agglayer bridge, +#! and if validation passes, mints the asset and creates an output note for the recipient. +#! +#! WARNING: The EVM claim asset amount is currently assumed to fit within 128 bits. See the WARNING in +#! build_p2id_output_note for details. +#! +#! TODO: Expand this description to cover the double-spend protection mechanism in detail. +#! Double-spend can be prevented in two ways: +#! 1) While it's possible to create two identical P2ID notes, only one can actually be consumed. +#! If the claim note is consumed twice, only one P2ID output note will be successfully consumed. +#! 2) We can have a mapping in the bridge or in the faucet that stores consumed claim proofs +#! as a hash -> bool value (similar to how it's done in the agglayer solidity contract). +#! +#! Inputs: [PROOF_DATA_KEY, LEAF_DATA_KEY, faucet_mint_amount, pad(7)] +#! Outputs: [pad(16)] +#! +#! Advice map: { +#! PROOF_DATA_KEY => [ +#! smtProofLocalExitRoot[256], // SMT proof for local exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) +#! smtProofRollupExitRoot[256], // SMT proof for rollup exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) +#! globalIndex[8], // Global index (8 felts, uint256 as 8 u32 felts) +#! mainnetExitRoot[8], // Mainnet exit root hash (8 felts, bytes32 as 8 u32 felts) +#! rollupExitRoot[8], // Rollup exit root hash (8 felts, bytes32 as 8 u32 felts) +#! ], +#! LEAF_DATA_KEY => [ +#! leafType[1], // Leaf type (1 felt, uint8) +#! originNetwork[1], // Origin network identifier (1 felt, uint32) +#! originTokenAddress[5], // Origin token address (5 felts, address as 5 u32 felts) +#! destinationNetwork[1], // Destination network identifier (1 felt, uint32) +#! destinationAddress[5], // Destination address (5 felts, address as 5 u32 felts) +#! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) +#! metadata[8], // ABI encoded metadata (8 felts, fixed size) +#! ], +#! } +#! +#! Panics if: +#! - the rollup exit root Merkle Proof validation via FPI fails. +#! - any of the validations in faucets::distribute fail. +#! +#! Invocation: call +@locals(10) # 2 for prefix and suffix, 8 for amount +pub proc claim + # Write output note faucet amount to memory + movup.8 mem_store.OUTPUT_NOTE_FAUCET_AMOUNT + # => [PROOF_DATA_KEY, LEAF_DATA_KEY, pad(7)] + + # Check AdviceMap values hash to keys & write CLAIM inputs & DATA_KEYs to global memory + exec.batch_pipe_double_words + # => [pad(7)] + + # validate_claim will overwrite memory in-place, so we need to load the account and amount + # before calling validate_claim and store it in memory locals + exec.get_destination_account_id_data + loc_store.CLAIM_SUFFIX_MEM_LOC loc_store.CLAIM_PREFIX_MEM_LOC + # => [pad(7)] + + exec.get_raw_claim_amount + loc_storew_be.CLAIM_AMOUNT_MEM_LOC_0 dropw loc_storew_be.CLAIM_AMOUNT_MEM_LOC_1 dropw + # => [pad(7)] + + # VALIDATE CLAIM + mem_loadw_be.PROOF_DATA_KEY_MEM_ADDR + # => [PROOF_DATA_KEY, pad(7)] + + swapw mem_loadw_be.LEAF_DATA_KEY_MEM_ADDR + # => [LEAF_DATA_KEY, PROOF_DATA_KEY, pad(7)] + + # Errors on invalid proof + exec.validate_claim + # => [pad(16)] + + # Create P2ID output note + loc_loadw_be.CLAIM_AMOUNT_MEM_LOC_1 swapw loc_loadw_be.CLAIM_AMOUNT_MEM_LOC_0 + # => [AMOUNT[0], AMOUNT[1], pad(8)] + + loc_load.CLAIM_PREFIX_MEM_LOC loc_load.CLAIM_SUFFIX_MEM_LOC + # => [suffix, prefix, AMOUNT[0], AMOUNT[1], pad(8)] + + exec.build_p2id_output_note + # => [pad(16)] +end + +#! Burns the fungible asset from the active note. +#! +#! This procedure retrieves the asset from the active note and burns it. The note must contain +#! exactly one asset, which must be a fungible asset issued by this faucet. +#! +#! Inputs: [pad(16)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - the procedure is not called from a note context (active_note::get_assets will fail). +#! - the note does not contain exactly one asset. +#! - the transaction is executed against an account which is not a fungible asset faucet. +#! - the transaction is executed against a faucet which is not the origin of the specified asset. +#! - the amount about to be burned is greater than the outstanding supply of the asset. +#! +#! Invocation: call +pub use ::miden::standards::faucets::basic_fungible::burn diff --git a/crates/miden-agglayer/asm/bridge/agglayer_faucet.masm b/crates/miden-agglayer/asm/bridge/agglayer_faucet.masm deleted file mode 100644 index 4c12783065..0000000000 --- a/crates/miden-agglayer/asm/bridge/agglayer_faucet.masm +++ /dev/null @@ -1,260 +0,0 @@ -use miden::agglayer::bridge_in -use miden::agglayer::asset_conversion -use miden::protocol::active_account -use miden::protocol::active_note -use miden::standards::faucets -use miden::protocol::note -use miden::protocol::tx -use miden::core::mem - - -# CONSTANTS -# ================================================================================================= - -# The slot in this component's storage layout where the bridge account ID is stored. -const BRIDGE_ID_SLOT = word("miden::agglayer::faucet") - -const PROOF_DATA_WORD_LEN = 134 -const LEAF_DATA_WORD_LEN = 6 -const OUTPUT_NOTE_DATA_WORD_LEN = 2 - -const PROOF_DATA_START_PTR = 0 -const LEAF_DATA_START_PTR = 536 -const OUTPUT_NOTE_DATA_START_PTR = 568 - -# Memory Addresses -const PROOF_DATA_KEY_MEM_ADDR = 700 -const LEAF_DATA_KEY_MEM_ADDR = 704 -const OUTPUT_NOTE_DATA_MEM_ADDR = 708 -const CLAIM_NOTE_DATA_MEM_ADDR = 712 - -const OUTPUT_NOTE_INPUTS_MEM_ADDR = 0 -const OUTPUT_NOTE_TAG_MEM_ADDR = 574 -const OUTPUT_NOTE_SERIAL_NUM_MEM_ADDR = 568 -const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_0 = 548 -const OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_1 = 552 - -# P2ID output note constants -const P2ID_SCRIPT_ROOT = [13362761878458161062, 15090726097241769395, 444910447169617901, 3558201871398422326] -const P2ID_NOTE_NUM_INPUTS = 2 -const OUTPUT_NOTE_TYPE_PUBLIC = 1 -const EXECUTION_HINT_ALWAYS = 1 -const OUTPUT_NOTE_AUX = 0 - -const P2ID_OUTPUT_NOTE_AMOUNT_MEM_PTR = 611 -# ERRORS -# ================================================================================================= - -const ERR_INVALID_CLAIM_PROOF = "invalid claim proof" - -#! Inputs: [PROOF_DATA_KEY, LEAF_DATA_KEY] -#! Outputs: [] -#! -#! Panics if: -#! - the bridge account ID is not properly configured in storage. -#! - the foreign procedure invocation fails. -#! - the claim proof validation fails. -#! -#! Invocation: exec -proc validate_claim - # Get bridge_in::check_claim_proof procedure MAST root - procref.bridge_in::check_claim_proof - # => [BRIDGE_PROC_MAST_ROOT] - - push.BRIDGE_ID_SLOT[0..2] - # => [bridge_id_idx, BRIDGE_PROC_MAST_ROOT] - - # Get Bridge AccountId - exec.active_account::get_item - # => [bridge_account_id_prefix, bridge_account_id_suffix, 0, 0, BRIDGE_PROC_MAST_ROOT] - - movup.2 drop movup.2 drop - # => [bridge_account_id_prefix, bridge_account_id_suffix, BRIDGE_PROC_MAST_ROOT] - - # Call check_claim_proof procedure on Bridge - # Calling: bridge_in::check_claim_proof - exec.tx::execute_foreign_procedure - # => [validation_result] - - # Assert valid proof data - assert.err=ERR_INVALID_CLAIM_PROOF drop - # => [] -end - -# Inputs: [] -# Outputs: [U256[0], U256[1]] -proc get_raw_claim_amount - padw mem_loadw_be.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_0 - padw mem_loadw_be.OUTPUT_NOTE_ASSET_AMOUNT_MEM_ADDR_1 -end - -# Inputs: [U256[0], U256[1]] -# Outputs: [amount] -proc scale_down_amount - repeat.7 drop end -end - -# Inputs: [] -# Outputs: [prefix, suffix] -proc get_destination_account_id - mem_load.543 mem_load.544 -end - -# Inputs: [PROOF_DATA_KEY, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] -# Outputs: [] -proc batch_pipe_double_words - # 1) Verify PROOF_DATA_KEY - mem_storew_be.PROOF_DATA_KEY_MEM_ADDR - adv.push_mapval - # => [PROOF_DATA_KEY] - - push.PROOF_DATA_START_PTR push.PROOF_DATA_WORD_LEN - exec.mem::pipe_double_words_preimage_to_memory drop - - # 2) Verify LEAF_DATA_KEY - mem_storew_be.LEAF_DATA_KEY_MEM_ADDR - adv.push_mapval - # => [LEAF_DATA_KEY] - - push.LEAF_DATA_START_PTR push.LEAF_DATA_WORD_LEN - exec.mem::pipe_double_words_preimage_to_memory drop - - # 3) Verify OUTPUT_NOTE_DATA_KEY - mem_storew_be.OUTPUT_NOTE_DATA_MEM_ADDR - adv.push_mapval - # => [OUTPUT_NOTE_DATA_KEY] - - push.OUTPUT_NOTE_DATA_START_PTR push.OUTPUT_NOTE_DATA_WORD_LEN - exec.mem::pipe_double_words_preimage_to_memory drop -end - -#! Builds a P2ID output note for the claim recipient. -#! -#! This procedure expects the claim data to be already written to memory via batch_pipe_double_words. -#! It reads the destination account ID, amount, and other note parameters from memory to construct -#! the output note. -#! -#! Inputs: [] -#! Outputs: [] -#! -#! Note: This procedure will be refactored in a follow-up to use leaf data to build the output note. -proc build_p2id_output_note - # Build P2ID output note - push.P2ID_SCRIPT_ROOT[0..4] - # => [SCRIPT_ROOT] - - swapw mem_loadw_be.OUTPUT_NOTE_SERIAL_NUM_MEM_ADDR - # => [SERIAL_NUM, SCRIPT_ROOT] - - push.P2ID_NOTE_NUM_INPUTS - # => [num_output_note_inputs, SERIAL_NUM, SCRIPT_ROOT] - - exec.get_destination_account_id - # => [account_id_prefix, account_id_suffix, num_output_note_inputs, SERIAL_NUM, SCRIPT_ROOT] - - mem_store.0 mem_store.1 - # => [num_output_note_inputs, SERIAL_NUM, SCRIPT_ROOT] - - push.OUTPUT_NOTE_INPUTS_MEM_ADDR - # => [inputs_ptr = 0, num_output_note_inputs, SERIAL_NUM, SCRIPT_ROOT] - - exec.note::build_recipient - # => [RECIPIENT] - - push.OUTPUT_NOTE_TYPE_PUBLIC - # => [note_type, RECIPIENT] - - mem_load.OUTPUT_NOTE_TAG_MEM_ADDR - # => [tag, RECIPIENT] - - exec.get_raw_claim_amount - # => [AMOUNT[1], AMOUNT[0], tag, note_type, RECIPIENT] - - # TODO: implement scale down logic; stubbed out for now - exec.asset_conversion::scale_u256_to_native_amount - # => [amount, tag, note_type, RECIPIENT] - - exec.faucets::distribute - # => [pad(16)] -end - -#! Validates a claim against the AggLayer bridge and mints the corresponding asset to the recipient. -#! -#! This procedure validates the rollup exit root Merkle Proof via FPI against the agglayer bridge, -#! and if validation passes, mints the asset and creates an output note for the recipient. -#! -#! TODO: Expand this description to cover the double-spend protection mechanism in detail. -#! Double-spend can be prevented in two ways: -#! 1) While it's possible to create two identical P2ID notes, only one can actually be consumed. -#! If the claim note is consumed twice, only one P2ID output note will be successfully consumed. -#! 2) We can have a mapping in the bridge or in the faucet that stores consumed claim proofs -#! as a hash -> bool value (similar to how it's done in the agglayer solidity contract). -#! -#! Inputs: [PROOF_DATA_KEY, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY, pad(4)] -#! Outputs: [pad(16)] -#! -#! Advice map: { -#! PROOF_DATA_KEY => [ -#! smtProofLocalExitRoot[256], // SMT proof for local exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) -#! smtProofRollupExitRoot[256], // SMT proof for rollup exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) -#! globalIndex[8], // Global index (8 felts, uint256 as 8 u32 felts) -#! mainnetExitRoot[8], // Mainnet exit root hash (8 felts, bytes32 as 8 u32 felts) -#! rollupExitRoot[8], // Rollup exit root hash (8 felts, bytes32 as 8 u32 felts) -#! ], -#! LEAF_DATA_KEY => [ -#! originNetwork[1], // Origin network identifier (1 felt, uint32) -#! originTokenAddress[5], // Origin token address (5 felts, address as 5 u32 felts) -#! destinationNetwork[1], // Destination network identifier (1 felt, uint32) -#! destinationAddress[5], // Destination address (5 felts, address as 5 u32 felts) -#! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) -#! metadata[8], // ABI encoded metadata (8 felts, fixed size) -#! EMPTY_WORD // padding -#! ], -#! OUTPUT_NOTE_DATA_KEY => [ -#! output_p2id_serial_num[4], // P2ID note serial number (4 felts, Word) -#! agglayer_faucet_account_id[2], // Agglayer faucet account ID (2 felts, prefix and suffix) -#! output_note_tag[1], // P2ID output note tag -#! ] -#! } -#! -#! Panics if: -#! - the rollup exit root Merkle Proof validation via FPI fails. -#! - any of the validations in faucets::distribute fail. -#! -#! Invocation: call -pub proc claim - # Check AdviceMap values hash to keys & write CLAIM inputs & DATA_KEYs to global memory - exec.batch_pipe_double_words - # => [] - - # VALIDATE CLAIM - mem_loadw_be.LEAF_DATA_KEY_MEM_ADDR padw - mem_loadw_be.PROOF_DATA_KEY_MEM_ADDR - # => [PROOF_DATA_KEY, LEAF_DATA_KEY] - - # Errors on invalid proof - exec.validate_claim - # => [] - - # Create P2ID output note - exec.build_p2id_output_note - # => [] -end - -#! Burns the fungible asset from the active note. -#! -#! This procedure retrieves the asset from the active note and burns it. The note must contain -#! exactly one asset, which must be a fungible asset issued by this faucet. -#! -#! Inputs: [pad(16)] -#! Outputs: [pad(16)] -#! -#! Panics if: -#! - the procedure is not called from a note context (active_note::get_assets will fail). -#! - the note does not contain exactly one asset. -#! - the transaction is executed against an account which is not a fungible asset faucet. -#! - the transaction is executed against a faucet which is not the origin of the specified asset. -#! - the amount about to be burned is greater than the outstanding supply of the asset. -#! -#! Invocation: call -pub use ::miden::standards::faucets::basic_fungible::burn diff --git a/crates/miden-agglayer/asm/bridge/asset_conversion.masm b/crates/miden-agglayer/asm/bridge/asset_conversion.masm deleted file mode 100644 index e4f59f17d4..0000000000 --- a/crates/miden-agglayer/asm/bridge/asset_conversion.masm +++ /dev/null @@ -1,114 +0,0 @@ -use miden::core::math::u64 -use miden::core::word - -# CONSTANTS -# ================================================================================================= - -const MAX_SCALING_FACTOR=18 - -# ERRORS -# ================================================================================================= -const ERR_SCALE_AMOUNT_EXCEEDED_LIMIT="maximum scaling factor is 18" - -#! Calculate 10^scale where scale is a u8 exponent. -#! -#! Inputs: [scale] -#! Outputs: [10^scale] -#! -#! Where: -#! - scale is expected to be a small integer (0-18 typical for crypto decimals) -#! -#! Panics if: -#! - scale > 18 (overflow protection) -proc pow10 - u32assert.err=ERR_SCALE_AMOUNT_EXCEEDED_LIMIT - # => [scale] - - dup u32lte.MAX_SCALING_FACTOR assert.err=ERR_SCALE_AMOUNT_EXCEEDED_LIMIT - # => [scale] - - push.1 swap - # => [scale, result] - - dup neq.0 - # => [is_not_zero, scale, result] - - # Loop to calculate 10^scale - while.true - # => [scale, result] - - # result *= 10 - swap mul.10 swap - # => [scale, result*10] - - # scale -= 1 - sub.1 - # => [scale-1, result*10] - - dup neq.0 - # => [is_not_zero, scale-1, result*10] - end - # => [0, result] - - drop - # => [result] -end - -#! Convert an asset amount to a scaled U256 representation for bridging to Agglayer. -#! -#! This procedure is used to convert Miden asset amounts to EVM asset amounts. -#! It multiplies the input amount by 10^target_scale to adjust for decimal differences -#! between the current representation and the target chain's native decimals. -#! -#! The procedure first calculates 10^target_scale using the pow10 helper, then converts -#! both the amount and scale factor to U64 format, performs U64 multiplication, and -#! returns the result as 8 u32 limbs in little-endian order (U256 format). -#! -#! Inputs: [amount, target_scale] -#! Outputs: [[RESULT_U256[0], RESULT_U256[1]]] -#! -#! Where: -#! - amount: The asset amount to be converted (range: 0 to 2^63 - 2^31) -#! - target_scale: Exponent for scaling factor (10^target_scale) -#! - [RESULT_U256[0], RESULT_U256[1]]: U256 value as 8 u32 limbs in little-endian order -#! (least significant limb at the top of the stack, each limb stored in little-endian format) -#! -#! Examples: -#! - USDC: amount=1000000000, target_scale=0 → 1000000000 (no scaling) -#! - ETH: amount=1e10, target_scale=8 → 1e18 -#! -#! Invocation: exec -pub proc scale_native_amount_to_u256 - swap - # => [target_scale, amount] - - exec.pow10 - # => [scale, amount] - - u32split - # => [scale_hi, scale_lo, amount] - - movup.2 u32split - # => [amount_hi, amount_lo, scale_hi, scale_lo] - - # Perform U64 multiplication: amount * scale - # This is safe because both the scaling factor and amount are guaranteed to be smaller - # than 2^64, so we will never overflow a 256-bit value. - exec.u64::overflowing_mul - # => [res_hi, res_mid_hi, res_mid_lo, res_lo] - - exec.word::reverse - # => [res_lo, res_mid_lo, res_mid_hi, res_hi] - - # convert to U256 & little endian - padw swapw - # => [RESULT_U256[0], RESULT_U256[1]] -end - -#! TODO: implement scaling down -#! -#! Inputs: [U256[0], U256[1]] -#! Outputs: [amount] -pub proc scale_u256_to_native_amount - repeat.7 drop end -end diff --git a/crates/miden-agglayer/asm/bridge/bridge_in.masm b/crates/miden-agglayer/asm/bridge/bridge_in.masm deleted file mode 100644 index 1862fc5e35..0000000000 --- a/crates/miden-agglayer/asm/bridge/bridge_in.masm +++ /dev/null @@ -1,43 +0,0 @@ -use miden::agglayer::crypto_utils - -# Inputs: [] -# Output: [GER_ROOT[8]] -pub proc get_rollup_exit_root - # Push dummy GER (8 elements) - push.0.0.0.0.0.0.0.0 # dummy GER -end - -#! Checks the validity of the GET proof -#! -#! Inputs: -#! Operand stack: [PROOF_DATA_KEY, LEAF_DATA_KEY, pad(8)] -#! Advice map: { -#! PROOF_DATA_KEY => [ -#! smtProofLocalExitRoot[256], // SMT proof for local exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) -#! smtProofRollupExitRoot[256], // SMT proof for rollup exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) -#! globalIndex[8], // Global index (8 felts, uint256 as 8 u32 felts) -#! mainnetExitRoot[8], // Mainnet exit root hash (8 felts, bytes32 as 8 u32 felts) -#! rollupExitRoot[8], // Rollup exit root hash (8 felts, bytes32 as 8 u32 felts) -#! ], -#! LEAF_DATA_KEY => [ -#! originNetwork[1], // Origin network identifier (1 felt, uint32) -#! originTokenAddress[5], // Origin token address (5 felts, address as 5 u32 felts) -#! destinationNetwork[1], // Destination network identifier (1 felt, uint32) -#! destinationAddress[5], // Destination address (5 felts, address as 5 u32 felts) -#! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) -#! metadata[8], // ABI encoded metadata (8 felts, fixed size) -#! EMPTY_WORD // padding -#! ], -#! } -#! -#! Invocation: call -pub proc check_claim_proof - exec.get_rollup_exit_root - # => [GER_ROOT[8], CLAIM_NOTE_RPO_COMMITMENT] - - # Check CLAIM note proof data against current GER - exec.crypto_utils::verify_claim_proof - # => [is_valid_claim_proof] - - swap drop -end diff --git a/crates/miden-agglayer/asm/bridge/bridge_out.masm b/crates/miden-agglayer/asm/bridge/bridge_out.masm deleted file mode 100644 index 3c53b62763..0000000000 --- a/crates/miden-agglayer/asm/bridge/bridge_out.masm +++ /dev/null @@ -1,160 +0,0 @@ -use miden::protocol::active_note -use miden::protocol::note -use miden::protocol::output_note -use miden::core::crypto::hashes::keccak256 -use miden::core::crypto::hashes::rpo256 -use miden::core::word -use miden::agglayer::local_exit_tree - -# CONSTANTS -# ================================================================================================= -const MMR_PTR=42 -const LOCAL_EXIT_TREE_SLOT=word("miden::agglayer::let") - -const BURN_NOTE_ROOT = [15615638671708113717, 1774623749760042586, 2028263167268363492, 12931944505143778072] -const PUBLIC_NOTE=1 -const NUM_BURN_NOTE_INPUTS=0 -const BURN_ASSET_MEM_PTR=24 - -#! Computes the SERIAL_NUM of the outputted BURN note. -#! -#! The serial number is computed as hash(B2AGG_SERIAL_NUM, ASSET). -#! -#! Inputs: [ASSET] -#! Outputs: [SERIAL_NUM] -#! -#! Where: -#! - ASSET is the asset for which to compute the burn note serial number. -#! - SERIAL_NUM is the computed serial number for the BURN note. -#! -#! Invocation: exec -proc compute_burn_note_serial_num - exec.active_note::get_serial_number - # => [B2AGG_SERIAL_NUM, ASSET] - - exec.rpo256::merge - # => [SERIAL_NUM] -end - -#! Creates a BURN note for the specified asset. -#! -#! This procedure creates an output note that represents a burn operation for the given asset. -#! The note is configured with the appropriate recipient, tag, and execution hint. -#! -#! Inputs: [ASSET] -#! Outputs: [] -#! -#! Where: -#! - ASSET is the asset to be burned. -#! -#! Invocation: exec -@locals(8) -proc create_burn_note - loc_storew_be.0 dupw - # => [ASSET, ASSET] - - movup.2 drop movup.2 drop - # => [faucet_id_prefix, faucet_id_suffix, ASSET] - - exec.note::build_note_tag_for_network_account - # => [network_faucet_tag, ASSET] - - loc_store.5 - # => [ASSET] - - exec.compute_burn_note_serial_num - # => [SERIAL_NUM] - - push.BURN_NOTE_ROOT swapw - # => [SERIAL_NUM, SCRIPT_ROOT] - - push.NUM_BURN_NOTE_INPUTS push.0 - # => [inputs_ptr, num_inputs, SERIAL_NUM, SCRIPT_ROOT] - - exec.note::build_recipient - # => [RECIPIENT] - - push.PUBLIC_NOTE - loc_load.5 - # => [tag, note_type, RECIPIENT] - - call.output_note::create - # => [note_idx] - - movdn.4 loc_loadw_be.0 - # => [ASSET, note_idx] - - exec.output_note::add_asset - # => [] -end - -#! Bridges an asset out via the AggLayer -#! -#! This procedure handles the complete bridge-out operation, including: -#! - Converting asset data to u32 format -#! - Computing Keccak hash of the data -#! - Adding the hash to the MMR frontier -#! - Storing the updated MMR root in account storage -#! - Creating a BURN note with the bridged out asset -#! -#! Inputs: [ASSET, dest_network, dest_address(5)] -#! Outputs: [] -#! -#! Where: -#! - ASSET is the asset to be bridged out. -#! - dest_network is the u32 destination network/chain ID. -#! - dest_address(5) are 5 u32 values representing a 20-byte Ethereum address. -#! -#! Invocation: call -pub proc bridge_out - mem_storew_be.BURN_ASSET_MEM_PTR - # => [ASSET, dest_network, dest_address(5)] - - # @dev TODO: Look up asset faucet id in asset registry - # -> return scaling factor - - # @dev TODO: Convert ASSET amount to EVM amount using scaling factor - # -> return amount from here: https://github.com/0xMiden/miden-base/pull/2141 - - # Converting SCALED_ASSET, dest_network, dest_address(5) to u32 representation - # in preparation for keccak256 hashing - - # keccak256 inputs: - # => [ASSET, dest_network, dest_address(5)] - # TODO we should convert Miden->Ethereum asset values, incl. amount conversion etc. - - # TODO: make building bridge message a separate procedure - # TODO: match Agglayer addLeafBridge logic - # TODO: convert Miden asset amount to Ethereum amount - # Store ASSET as u32 limbs in memory starting at address 0 - push.0 movdn.4 exec.word::store_word_u32s_le - # => [dest_network, dest_address(5)] - - # Store [dest_network, dest_address[0..3]] as u32 limbs in memory starting at address 8 - push.8 movdn.4 exec.word::store_word_u32s_le - # => [dest_address(2), 0, 0] - - # Store [dest_address[3..5], 0, 0] as u32 limbs in memory starting at address 16 - push.16 movdn.4 exec.word::store_word_u32s_le - # => [] - - # 1 u32 = 4 bytes - # 10 u32 values = 40 bytes - push.40 push.0 - # => [ptr, len_bytes] - - exec.keccak256::hash_bytes - # => [DIGEST_U32[8]] - - # adding DIGEST_U32 double word leaf to mmr frontier - exec.local_exit_tree::add_asset_message - # => [] - - # creating BURN output note for ASSET - mem_loadw_be.BURN_ASSET_MEM_PTR - # => [ASSET] - - exec.create_burn_note - # => [] -end - diff --git a/crates/miden-agglayer/asm/bridge/crypto_utils.masm b/crates/miden-agglayer/asm/bridge/crypto_utils.masm deleted file mode 100644 index 7796c1f94f..0000000000 --- a/crates/miden-agglayer/asm/bridge/crypto_utils.masm +++ /dev/null @@ -1,82 +0,0 @@ -use miden::core::crypto::hashes::keccak256 - -#! Given the leaf data returns the leaf value. -#! -#! Inputs: [leaf_type, origin_network, ORIGIN_ADDRESS, destination_network, DESTINATION_ADDRESS, amount, METADATA_HASH] -#! Outputs: [LEAF_VALUE] -#! -#! Where: -#! - leaf_type is the leaf type: [0] transfer Ether / ERC20 tokens, [1] message. -#! - origin_network is the origin network identifier. -#! - ORIGIN_ADDRESS is the origin token address (5 elements) -#! - destination_network is the destination network identifier. -#! - DESTINATION_ADDRESS is the destination address (5 elements). -#! - amount is the amount: [0] Amount of tokens/ether, [1] Amount of ether. -#! - METADATA_HASH is the hash of the metadata (8 elements). -#! - LEAF_VALUE is the computed leaf value (8 elements). -#! -#! This function computes the keccak256 hash of the abi.encodePacked data. -#! -#! Invocation: exec -pub proc get_leaf_value - # TODO: implement getLeafValue() - # https://github.com/agglayer/agglayer-contracts/blob/e468f9b0967334403069aa650d9f1164b1731ebb/contracts/v2/lib/DepositContractV2.sol#L22 - - # stubbed out: - push.1.1.1.1 - push.1.1.1.1 - - # exec.keccak256::hash_bytes - # => [LEAF_VALUE[8]] -end - -#! Verify leaf and checks that it has not been claimed. -#! -#! This procedure verifies that a claim proof is valid against the Global Exit Tree (GET) -#! and that the leaf has not been previously claimed. -#! -#! Inputs: -#! Operand stack: [GER_ROOT[8], CLAIM_PROOF_RPO_COMMITMENT, pad(12)] -#! Advice map: { -#! PROOF_DATA_KEY => [ -#! smtProofLocalExitRoot[256], // SMT proof for local exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) -#! smtProofRollupExitRoot[256], // SMT proof for rollup exit root (256 felts, bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) -#! globalIndex[8], // Global index (8 felts, uint256 as 8 u32 felts) -#! mainnetExitRoot[8], // Mainnet exit root hash (8 felts, bytes32 as 8 u32 felts) -#! rollupExitRoot[8], // Rollup exit root hash (8 felts, bytes32 as 8 u32 felts) -#! ], -#! LEAF_DATA_KEY => [ -#! originNetwork[1], // Origin network identifier (1 felt, uint32) -#! originTokenAddress[5], // Origin token address (5 felts, address as 5 u32 felts) -#! destinationNetwork[1], // Destination network identifier (1 felt, uint32) -#! destinationAddress[5], // Destination address (5 felts, address as 5 u32 felts) -#! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) -#! metadata[8], // ABI encoded metadata (8 felts, fixed size) -#! EMPTY_WORD // padding -#! ], -#! } -#! Outputs: -#! Operand stack: [is_valid] -#! -#! Where: -#! - RPO_CLAIM_NOTE_INPUTS_COMMITMENT is the RPO hash commitment of all claim note inputs -#! - leafType is the leaf type: [0] transfer Ether / ERC20 tokens, [1] message -#! - originNetwork is the origin network identifier (u32 as Felt) -#! - originAddress is the origin address (5 felts representing address) -#! - destinationNetwork is the destination network identifier (u32 as Felt) -#! - destinationAddress is the destination address (5 felts representing address) -#! - amount is the amount of tokens (u256 as Felt) -#! - metadata is the metadata (4 felts representing 4 u32 0 values) -#! - index is the index of the leaf (u32 as Felt) -#! - claimRoot is the claim root (8 felts representing bytes32) -#! - smtProof is the SMT proof data (570 felts) -#! - is_valid is 1 if the leaf is valid and not claimed, 0 otherwise -#! -#! Invocation: exec -pub proc verify_claim_proof - # TODO: Implement actual Global Exit Tree proof verification - - # For now, drop all inputs and return 1 (valid) - dropw dropw dropw dropw - push.1 -end diff --git a/crates/miden-agglayer/asm/bridge/local_exit_tree.masm b/crates/miden-agglayer/asm/bridge/local_exit_tree.masm deleted file mode 100644 index 89e744507b..0000000000 --- a/crates/miden-agglayer/asm/bridge/local_exit_tree.masm +++ /dev/null @@ -1,120 +0,0 @@ -use miden::protocol::active_account -use miden::protocol::native_account - -# CONSTANTS -# ================================================================================================= - -const MMR_PTR=42 -const LOCAL_EXIT_TREE_SLOT=word("miden::agglayer::let") - -#! Adds a leaf to the MMR frontier using Keccak hashing (stubbed implementation). -#! -#! This is a stubbed implementation that currently drops all inputs without performing -#! the actual MMR frontier addition operation. -#! -#! Inputs: [LEAF[1], LEAF[0], mmr_ptr] -#! Outputs: [] -#! -#! Where: -#! - LEAF[1], LEAF[0] are the leaf data to add to the MMR frontier. -#! - mmr_ptr is the pointer to the MMR frontier data structure. -#! -#! Invocation: exec -proc mmr_frontier_keccak_add - dropw dropw drop - # => [] -end - -#! Gets the root of the MMR frontier using Keccak hashing (stubbed implementation). -#! -#! This is a stubbed implementation that returns placeholder values instead of -#! computing the actual MMR frontier root. -#! -#! Inputs: [mmr_ptr] -#! Outputs: [ROOT[1], ROOT[0]] -#! -#! Where: -#! - ROOT[1], ROOT[0] are the root hash components of the MMR frontier whose memory location starts at mmr_ptr -#! -#! Invocation: exec -pub proc mmr_frontier_keccak_get_root - # stubbed out for now - drop - # => [] - - push.0.0.0.1 push.LOCAL_EXIT_TREE_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY] - - exec.active_account::get_map_item - # => [ROOT[0]] - - push.0.0.0.0 push.LOCAL_EXIT_TREE_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, ROOT[0]] - - exec.active_account::get_map_item - # => [ROOT[1], ROOT[0]] -end - -#! Writes the MMR frontier root to account storage. -#! -#! This procedure retrieves the current MMR frontier root and stores it as a double word -#! in the account's storage map. The root is split across two storage keys: -#! - Key [0,0,0,0] stores ROOT[1] (high part) -#! - Key [0,0,0,1] stores ROOT[0] (low part) -#! -#! Inputs: [] -#! Outputs: [] -#! -#! Invocation: exec -proc write_mmr_frontier_root - push.MMR_PTR - # => [MMR_PTR] - - # getting mmr frontier root - exec.mmr_frontier_keccak_get_root - # => [ROOT[1], ROOT[0]] - - # writing double word root to map keys [0,0,0,0] & [0,0,0,1] - push.0.0.0.0 push.LOCAL_EXIT_TREE_SLOT[0..2] - # => [index, KEY, ROOT[1], ROOT[0]] - - exec.native_account::set_map_item - # => [OLD_MAP_ROOT, OLD_MAP_VALUE, ROOT[0]] - - dropw dropw - # => [ROOT[0]] - - push.1.0.0.0 push.LOCAL_EXIT_TREE_SLOT[0..2] - # => [index, KEY, ROOT[0]] - - exec.native_account::set_map_item - # => [OLD_MAP_ROOT, OLD_MAP_VALUE] - - dropw dropw - # => [] -end - -#! Adds an asset message to the MMR frontier and updates the stored root. -#! -#! This procedure takes a Keccak digest (represented as 8 u32 values) and adds it -#! as a leaf to the MMR frontier. After adding the leaf, it updates the MMR root -#! in the account's storage to reflect the new state. -#! -#! Inputs: [DIGEST_U32[8]] -#! Outputs: [] -#! -#! Where: -#! - DIGEST_U32[8] is a Keccak256 hash represented as 8 u32 values (256 bits total). -#! -#! Invocation: exec -pub proc add_asset_message - push.MMR_PTR movdn.8 - # => [LEAF[1], LEAF[0], mmr_ptr] - - exec.mmr_frontier_keccak_add - # => [] - - exec.write_mmr_frontier_root - # => [] -end - diff --git a/crates/miden-agglayer/asm/components/bridge.masm b/crates/miden-agglayer/asm/components/bridge.masm new file mode 100644 index 0000000000..98c8287576 --- /dev/null +++ b/crates/miden-agglayer/asm/components/bridge.masm @@ -0,0 +1,9 @@ +# The MASM code of the AggLayer Bridge Account Component. +# +# This is a thin wrapper that re-exports bridge-related procedures from the +# agglayer library. + +pub use ::miden::agglayer::bridge::bridge_config::register_faucet +pub use ::miden::agglayer::bridge::bridge_config::update_ger +pub use ::miden::agglayer::bridge::bridge_in::verify_leaf_bridge +pub use ::miden::agglayer::bridge::bridge_out::bridge_out diff --git a/crates/miden-agglayer/asm/components/faucet.masm b/crates/miden-agglayer/asm/components/faucet.masm new file mode 100644 index 0000000000..641b6089e7 --- /dev/null +++ b/crates/miden-agglayer/asm/components/faucet.masm @@ -0,0 +1,10 @@ +# The MASM code of the AggLayer Faucet Account Component. +# +# This is a thin wrapper that re-exports faucet-related procedures from the +# agglayer library. Only procedures relevant to faucet accounts are exposed +# here, so that bridge-specific procedures (like `bridge_out`) are not +# available on faucet accounts. + +pub use ::miden::agglayer::faucet::claim +pub use ::miden::agglayer::faucet::asset_to_origin_asset +pub use ::miden::agglayer::faucet::burn diff --git a/crates/miden-agglayer/asm/note_scripts/B2AGG.masm b/crates/miden-agglayer/asm/note_scripts/B2AGG.masm index 80bdcfbb7f..b6e6c15697 100644 --- a/crates/miden-agglayer/asm/note_scripts/B2AGG.masm +++ b/crates/miden-agglayer/asm/note_scripts/B2AGG.masm @@ -1,43 +1,56 @@ -use miden::agglayer::bridge_out +use miden::agglayer::bridge::bridge_out use miden::protocol::account_id use miden::protocol::active_account use miden::protocol::active_note +use miden::protocol::asset +use miden::protocol::asset::ASSET_VALUE_MEMORY_OFFSET +use miden::protocol::note +use miden::standards::attachments::network_account_target use miden::standards::wallets::basic->basic_wallet # CONSTANTS # ================================================================================================= -const B2AGG_NOTE_INPUTS_COUNT=6 +const ASSET_PTR=0 +const B2AGG_NOTE_NUM_STORAGE_ITEMS=6 + +const STORAGE_START_PTR=8 +const STORAGE_END_PTR=STORAGE_START_PTR + 8 # ERRORS # ================================================================================================= const ERR_B2AGG_WRONG_NUMBER_OF_ASSETS="B2AGG script requires exactly 1 note asset" +const ERR_B2AGG_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS="B2AGG script expects exactly 6 note storage items" +const ERR_B2AGG_TARGET_ACCOUNT_MISMATCH="B2AGG note attachment target account does not match consuming account" -const ERR_B2AGG_WRONG_NUMBER_OF_INPUTS="B2AGG script expects exactly 6 note inputs" +# NOTE SCRIPT +# ================================================================================================= #! Bridge-to-AggLayer (B2AGG) note script: bridges assets from Miden to an AggLayer-connected chain. #! #! This note can be consumed in two ways: #! - If the consuming account is the sender (reclaim): the note's assets are added back to the consuming account. -#! - If the consuming account is the Agglayer Bridge: the note's assets are moved to a BURN note, +#! - If the consuming account is the Agglayer Bridge: the note's assets are moved to a BURN note, #! and the note details are hashed into a leaf and appended to the Local Exit Tree. -#! global exit root (GER) merkle tree structure. #! #! Inputs: [] #! Outputs: [] #! -#! Note inputs are assumed to be as follows: -#! - destination_network: u32 value representing the target chain ID -#! - destination_address: split into 5 u32 values representing a 20-byte Ethereum address: -#! - destination_address_0: bytes 0-3 -#! - destination_address_1: bytes 4-7 -#! - destination_address_2: bytes 8-11 -#! - destination_address_3: bytes 12-15 -#! - destination_address_4: bytes 16-19 +#! Note storage layout (6 felts total): +#! - destination_network [0] : 1 felt +#! - destination_address [1..5] : 5 felts +#! +#! Where: +#! - destination_network: Destination network identifier (uint32) +#! - destination_address: 20-byte Ethereum address as 5 u32 felts +#! +#! Note attachment is constructed from a NetworkAccountTarget standard: +#! - [0, exec_hint_tag, target_id_prefix, target_id_suffix] #! #! Panics if: -#! - The note does not contain exactly 6 inputs. +#! - The note does not contain exactly 6 storage items. #! - The note does not contain exactly 1 asset. +#! - The note attachment does not target the consuming account. #! begin dropw @@ -58,30 +71,38 @@ begin exec.basic_wallet::add_assets_to_account # => [pad(16)] else - # Store note inputs -> mem[8..14] - push.8 exec.active_note::get_inputs - # => [num_inputs, dest_ptr, pad(16)] + # Ensure note attachment targets the consuming bridge account. + exec.network_account_target::active_account_matches_target_account + assert.err=ERR_B2AGG_TARGET_ACCOUNT_MISMATCH + # => [pad(16)] + + # Store note storage -> mem[8..14] + push.STORAGE_START_PTR exec.active_note::get_storage + # => [num_storage_items, dest_ptr, pad(16)] - push.B2AGG_NOTE_INPUTS_COUNT assert_eq.err=ERR_B2AGG_WRONG_NUMBER_OF_INPUTS drop + push.B2AGG_NOTE_NUM_STORAGE_ITEMS assert_eq.err=ERR_B2AGG_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS drop # => [pad(16)] - # Store note assets -> mem[0..4] - push.0 exec.active_note::get_assets + # Store note assets -> mem[0..8] + push.ASSET_PTR exec.active_note::get_assets # => [num_assets, ptr, pad(16)] # Must be exactly 1 asset push.1 assert_eq.err=ERR_B2AGG_WRONG_NUMBER_OF_ASSETS drop # => [pad(16)] - # load the 6 B2AGG note input felts as two words - mem_loadw_be.12 swapw.2 mem_loadw_be.8 swapw - # => [EMPTY_WORD, dest_network, dest_address(5), pad(6)] + # load the 6 B2AGG felts from note storage as two words + push.STORAGE_START_PTR add.4 mem_loadw_le swapw mem_loadw_le.STORAGE_START_PTR + # => [dest_network, dest_address(5), pad(10)] - # Load ASSET onto the stack - mem_loadw_be.0 - # => [ASSET, dest_network, dest_address(5), pad(6)] + # Load asset onto the stack from ASSET_PTR + push.ASSET_PTR exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, dest_network, dest_address(5), pad(10)] call.bridge_out::bridge_out + # => [pad(24)] + + dropw dropw # => [pad(16)] end # => [pad(16)] diff --git a/crates/miden-agglayer/asm/note_scripts/CLAIM.masm b/crates/miden-agglayer/asm/note_scripts/CLAIM.masm index 83c41a65bc..5d47c53e16 100644 --- a/crates/miden-agglayer/asm/note_scripts/CLAIM.masm +++ b/crates/miden-agglayer/asm/note_scripts/CLAIM.masm @@ -1,54 +1,26 @@ -use miden::agglayer::agglayer_faucet -> agg_faucet -use miden::protocol::account_id -use miden::protocol::active_account +use miden::agglayer::faucet -> agg_faucet use miden::protocol::active_note use miden::protocol::note use miden::core::crypto::hashes::keccak256 -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 use miden::core::mem +use miden::standards::attachments::network_account_target # CONSTANTS # ================================================================================================= const PROOF_DATA_SIZE = 536 -const LEAF_DATA_SIZE = 24 +const LEAF_DATA_SIZE = 32 const OUTPUT_NOTE_SIZE = 8 const PROOF_DATA_START_PTR = 0 const LEAF_DATA_START_PTR = 536 -const OUTPUT_NOTE_DATA_START_PTR = 568 - -const TARGET_FAUCET_PREFIX_MEM_ADDR = 572 -const TARGET_FAUCET_SUFFIX_MEM_ADDR = 573 +const FAUCET_MINT_AMOUNT = 568 # ERRORS # ================================================================================================= -const ERR_CLAIM_TARGET_ACCT_MISMATCH = "CLAIM's target account address and transaction address do not match" - -#! Asserts that the consuming account matches the target agglayer faucet account. -#! -#! This procedure ensures that only the specified agglayer faucet account can consume -#! this CLAIM note. It assumes that the note inputs have already been loaded into memory -#! via active_note::get_inputs. -#! -#! Inputs: [] -#! Output: [] -#! -#! Panics if: -#! - The consuming account ID does not match the target faucet account ID stored in memory -proc assert_aggfaucet_is_consumer - # Load target faucet ID (assumes active_note::get_inputs has been called) - mem_load.TARGET_FAUCET_SUFFIX_MEM_ADDR mem_load.TARGET_FAUCET_PREFIX_MEM_ADDR - # => [target_faucet_prefix, target_faucet_suffix] - - exec.active_account::get_id - # => [account_id_prefix, account_id_suffix, target_faucet_prefix, target_faucet_suffix] - - # ensure only the specified target faucet can consume this CLAIM note, not any other account - exec.account_id::is_equal assert.err=ERR_CLAIM_TARGET_ACCT_MISMATCH - # => [] -end +const ERR_CLAIM_TARGET_ACCT_MISMATCH = "CLAIM note attachment target account does not match consuming account" #! Reads claim data from memory and inserts it into the advice map under three separate keys. #! @@ -56,7 +28,7 @@ end #! into the advice map under separate keys for easier access. #! #! Inputs: [] -#! Outputs: [PROOF_DATA_KEY, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] +#! Outputs: [PROOF_DATA_KEY, LEAF_DATA_KEY] #! #! Advice map entries created: #! PROOF_DATA_KEY => [ @@ -68,76 +40,54 @@ end #! ] #! #! LEAF_DATA_KEY => [ +#! leafType[1], // Leaf type (1 felt, uint32) #! originNetwork[1], // Origin network identifier (1 felt, uint32) #! originTokenAddress[5], // Origin token address (5 felts, address as 5 u32 felts) #! destinationNetwork[1], // Destination network identifier (1 felt, uint32) #! destinationAddress[5], // Destination address (5 felts, address as 5 u32 felts) #! amount[8], // Amount of tokens (8 felts, uint256 as 8 u32 felts) #! metadata[8], // ABI encoded metadata (8 felts, fixed size) -#! EMPTY_WORD // padding -#! ] -#! -#! TODO: Will be removed in future PR -#! OUTPUT_NOTE_DATA_KEY => [ -#! output_p2id_serial_num[4], // P2ID note serial number (4 felts, Word) -#! target_faucet_account_id[2], // Target faucet account ID (2 felts, prefix and suffix) -#! output_note_tag[1], // P2ID output note tag +#! padding[3], // padding (3 felts) #! ] #! #! Invocation: exec proc write_claim_data_into_advice_map_by_key - # 1) Get OUTPUT_NOTE_DATA_KEY - push.OUTPUT_NOTE_SIZE push.OUTPUT_NOTE_DATA_START_PTR - exec.rpo256::hash_elements - # => [OUTPUT_NOTE_DATA_KEY] - - push.OUTPUT_NOTE_SIZE add.OUTPUT_NOTE_DATA_START_PTR push.OUTPUT_NOTE_DATA_START_PTR - movdn.5 movdn.5 - # => [OUTPUT_NOTE_DATA_KEY, start_ptr, end_ptr] - - adv.insert_mem - # OS => [OUTPUT_NOTE_DATA_KEY, start_ptr, end_ptr, pad(16)] - # AM => {OUTPUT_NOTE_DATA_KEY: mem[start_ptr..end_ptr] } - - movup.4 drop movup.4 drop - # => [OUTPUT_NOTE_DATA_KEY] - - # 2) Get LEAF_DATA_KEY + # 1) Get LEAF_DATA_KEY push.LEAF_DATA_SIZE push.LEAF_DATA_START_PTR - exec.rpo256::hash_elements - # => [LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] + exec.poseidon2::hash_elements + # => [LEAF_DATA_KEY] push.LEAF_DATA_SIZE add.LEAF_DATA_START_PTR push.LEAF_DATA_START_PTR movdn.5 movdn.5 - # => [LEAF_DATA_KEY, start_ptr, end_ptr, OUTPUT_NOTE_DATA_KEY] + # => [LEAF_DATA_KEY, start_ptr, end_ptr] adv.insert_mem # OS => [LEAF_DATA_KEY, start_ptr, end_ptr] # AM => {LEAF_DATA_KEY: mem[start_ptr..end_ptr] } movup.4 drop movup.4 drop - # => [LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] + # => [LEAF_DATA_KEY] - # 3) Get PROOF_DATA_KEY + # 2) Get PROOF_DATA_KEY push.PROOF_DATA_SIZE push.PROOF_DATA_START_PTR - exec.rpo256::hash_elements - # => [PROOF_DATA_KEY, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] + exec.poseidon2::hash_elements + # => [PROOF_DATA_KEY, LEAF_DATA_KEY] push.PROOF_DATA_SIZE push.PROOF_DATA_START_PTR movdn.5 movdn.5 - # => [PROOF_DATA_KEY, start_ptr, end_ptr, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] + # => [PROOF_DATA_KEY, start_ptr, end_ptr, LEAF_DATA_KEY] adv.insert_mem - # OS => [PROOF_DATA_KEY, start_ptr, end_ptr, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] + # OS => [PROOF_DATA_KEY, start_ptr, end_ptr, LEAF_DATA_KEY] # AM => {PROOF_DATA_KEY: mem[start_ptr..end_ptr] } movup.4 drop movup.4 drop - # => [PROOF_DATA_KEY, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY] + # => [PROOF_DATA_KEY, LEAF_DATA_KEY] end #! Agglayer Faucet CLAIM script: claims assets by calling the agglayer faucet's claim function. #! #! This note can only be consumed by the specific agglayer faucet account whose ID is provided -#! in the note inputs (target_faucet_account_id). Upon consumption, it will create a P2ID note. +#! in the note attachment (NetworkAccountTarget). Upon consumption, it will create a P2ID note. #! #! Requires that the account exposes: #! - agglayer::agglayer_faucet::claim procedure. @@ -145,22 +95,21 @@ end #! Inputs: [ARGS, pad(12)] #! Outputs: [pad(16)] #! -#! NoteInputs layout (575 felts total): +#! NoteStorage layout (569 felts total): #! - smtProofLocalExitRoot [0..255] : 256 felts #! - smtProofRollupExitRoot [256..511]: 256 felts #! - globalIndex [512..519]: 8 felts #! - mainnetExitRoot [520..527]: 8 felts #! - rollupExitRoot [528..535]: 8 felts -#! - originNetwork [536] : 1 felt -#! - originTokenAddress [537..541]: 5 felts -#! - destinationNetwork [542] : 1 felt -#! - destinationAddress [543..547]: 5 felts -#! - amount [548..555]: 8 felts -#! - metadata [556..563]: 8 felts -#! - EMPTY_WORD [564..567]: 4 felts -#! - output_p2id_serial_num [568..571]: 4 felts -#! - target_faucet_account_id [572..573]: 2 felts -#! - output_note_tag [574] : 1 felt +#! - leafType [536] : 1 felt +#! - originNetwork [537] : 1 felt +#! - originTokenAddress [538..542]: 5 felts +#! - destinationNetwork [543] : 1 felt +#! - destinationAddress [544..548]: 5 felts +#! - amount [549..556]: 8 felts +#! - metadata [557..564]: 8 felts +#! - padding [565..567]: 3 felts +#! - miden_claim_amount [568] : 1 felt #! #! Where: #! - smtProofLocalExitRoot: SMT proof for local exit root (bytes32[_DEPOSIT_CONTRACT_TREE_DEPTH]) @@ -172,40 +121,46 @@ end #! - Top 191 bits are ignored (not required to be zero), so indexers must decode it exactly like the contract does #! - mainnetExitRoot: Mainnet exit root hash (bytes32 as 8 u32 felts) #! - rollupExitRoot: Rollup exit root hash (bytes32 as 8 u32 felts) +#! - leafType: Leaf type (uint32): [0] transfer Ether / ERC20 tokens, [1] message #! - originNetwork: Origin network identifier (uint32) #! - originTokenAddress: Origin token address (address as 5 u32 felts) #! - destinationNetwork: Destination network identifier (uint32) -#! - destinationAddress: Destination address (address as 5 u32 felts) +#! - destinationAddress: 20-byte Ethereum address decodable into a Miden AccountId (5 u32 felts) #! - amount: Amount of tokens (uint256 as 8 u32 felts) #! - metadata: ABI encoded metadata (fixed size) -#! - EMPTY_WORD: Padding word -#! - output_p2id_serial_num: P2ID note serial number (Word) -#! - target_faucet_account_id: Target agglayer faucet account ID (prefix and suffix). Only this specific -#! account can consume the note - any other account will cause a panic. -#! - output_note_tag: P2ID output note tag +#! - miden_claim_amount: Scaled-down Miden token amount (Felt). This is the Y value computed from +#! scaling down the Ethereum amount (X) by the scale exponent: Y = floor(X / 10^scale_exp) #! #! Panics if: #! - account does not expose claim procedure. -#! - target faucet account ID does not match the consuming account ID. +#! - note attachment target account does not match the consuming account. begin dropw # => [pad(16)] - # Load CLAIM note inputs into memory, starting at address 0 - push.0 exec.active_note::get_inputs drop drop + # Ensure note attachment targets the consuming faucet account. + exec.network_account_target::active_account_matches_target_account + assert.err=ERR_CLAIM_TARGET_ACCT_MISMATCH # => [pad(16)] - # Check consuming account == aggfaucet - exec.assert_aggfaucet_is_consumer + # Load CLAIM note storage into memory, starting at address 0 + push.0 exec.active_note::get_storage drop drop # => [pad(16)] exec.write_claim_data_into_advice_map_by_key - # => [PROOF_DATA_KEY, LEAF_DATA_KEY, OUTPUT_NOTE_DATA_KEY, pad(4)] + # => [PROOF_DATA_KEY, LEAF_DATA_KEY, pad(16)] + + mem_load.FAUCET_MINT_AMOUNT + # => [faucet_mint_amount, PROOF_DATA_KEY, LEAF_DATA_KEY, pad(16)] + + movdn.8 + # => [PROOF_DATA_KEY, LEAF_DATA_KEY, faucet_mint_amount, pad(16)] - # Call the Aggfaucet Claim procedure + # call the Aggfaucet Claim procedure call.agg_faucet::claim - # => [pad(16), pad(12)] + # => [pad(16), pad(9)] - dropw dropw dropw + # a call invocation consumes and returns 16 elements, but we had trailing padding + dropw dropw drop # => [pad(16)] end diff --git a/crates/miden-agglayer/asm/note_scripts/CONFIG_AGG_BRIDGE.masm b/crates/miden-agglayer/asm/note_scripts/CONFIG_AGG_BRIDGE.masm new file mode 100644 index 0000000000..19d37c8d68 --- /dev/null +++ b/crates/miden-agglayer/asm/note_scripts/CONFIG_AGG_BRIDGE.masm @@ -0,0 +1,68 @@ +use miden::agglayer::bridge::bridge_config +use miden::protocol::active_note +use miden::protocol::active_account +use miden::protocol::account_id +use miden::standards::attachments::network_account_target + +# CONSTANTS +# ================================================================================================= + +const STORAGE_START_PTR = 0 +const CONFIG_AGG_BRIDGE_NUM_STORAGE_ITEMS = 2 + +# ERRORS +# ================================================================================================= + +const ERR_CONFIG_AGG_BRIDGE_UNEXPECTED_STORAGE_ITEMS = "CONFIG_AGG_BRIDGE expects exactly 2 note storage items" +const ERR_CONFIG_AGG_BRIDGE_TARGET_ACCOUNT_MISMATCH = "CONFIG_AGG_BRIDGE note attachment target account does not match consuming account" + +#! Registers a faucet in the bridge's faucet registry. +#! +#! This note can only be consumed by the Agglayer Bridge account that is targeted by the note +#! attachment, and only if the note was sent by the bridge admin. +#! Upon consumption, it registers the faucet ID from note storage in the bridge's +#! faucet registry. +#! +#! Requires that the account exposes: +#! - agglayer::bridge_config::register_faucet procedure. +#! +#! Inputs: [ARGS, pad(12)] +#! Outputs: [pad(16)] +#! +#! NoteStorage layout (2 felts total): +#! - faucet_id_suffix [0]: 1 felt +#! - faucet_id_prefix [1]: 1 felt +#! +#! Where: +#! - faucet_id_suffix: Suffix felt of the faucet account ID to register. +#! - faucet_id_prefix: Prefix felt of the faucet account ID to register. +#! +#! Panics if: +#! - The note attachment target account does not match the consuming bridge account. +#! - The note does not contain exactly 2 storage items. +#! - The account does not expose the register_faucet procedure. +#! +begin + dropw + # => [pad(16)] + + # Ensure note attachment targets the consuming bridge account. + exec.network_account_target::active_account_matches_target_account + assert.err=ERR_CONFIG_AGG_BRIDGE_TARGET_ACCOUNT_MISMATCH + # => [pad(16)] + + # Load note storage to memory + push.STORAGE_START_PTR exec.active_note::get_storage + # => [num_storage_items, dest_ptr, pad(16)] + + push.CONFIG_AGG_BRIDGE_NUM_STORAGE_ITEMS assert_eq.err=ERR_CONFIG_AGG_BRIDGE_UNEXPECTED_STORAGE_ITEMS drop + # => [pad(16)] + + # Load the faucet ID from memory, replacing the top 4 zeros + mem_loadw_le.STORAGE_START_PTR + # => [faucet_id_suffix, faucet_id_prefix, pad(14)] + + # Register the faucet in the bridge's faucet registry + call.bridge_config::register_faucet + # => [pad(16)] +end diff --git a/crates/miden-agglayer/asm/note_scripts/UPDATE_GER.masm b/crates/miden-agglayer/asm/note_scripts/UPDATE_GER.masm new file mode 100644 index 0000000000..c1f5cb89d4 --- /dev/null +++ b/crates/miden-agglayer/asm/note_scripts/UPDATE_GER.masm @@ -0,0 +1,68 @@ +use miden::agglayer::bridge::bridge_config +use miden::protocol::active_note +use miden::protocol::active_account +use miden::protocol::account_id +use miden::protocol::note +use miden::standards::attachments::network_account_target + +# CONSTANTS +# ================================================================================================= +const UPDATE_GER_NOTE_NUM_STORAGE_ITEMS = 8 +const STORAGE_PTR_GER_LOWER = 0 +const STORAGE_PTR_GER_UPPER = 4 + +# ERRORS +# ================================================================================================= +const ERR_UPDATE_GER_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS = "UPDATE_GER script expects exactly 8 note storage items" +const ERR_UPDATE_GER_TARGET_ACCOUNT_MISMATCH = "UPDATE_GER note attachment target account does not match consuming account" + +# NOTE SCRIPT +# ================================================================================================= + +#! Agglayer Bridge UPDATE_GER script: updates the GER by calling the bridge_config::update_ger function. +#! +#! This note can only be consumed by the specific agglayer bridge account whose ID is provided +#! in the note attachment (target_account_id), and only if the note was sent by the +#! global exit root manager. +#! +#! Requires that the account exposes: +#! - agglayer::bridge_config::update_ger procedure. +#! +#! Inputs: [ARGS, pad(12)] +#! Outputs: [pad(16)] +#! NoteStorage layout (8 felts total): +#! - GER_LOWER [0..3] +#! - GER_UPPER [4..7] +#! +#! Panics if: +#! - account does not expose update_ger procedure. +#! - target account ID does not match the consuming account ID. +#! - number of note storage items is not exactly 8. +begin + dropw + # => [pad(16)] + + # Ensure note attachment targets the consuming bridge account. + exec.network_account_target::active_account_matches_target_account + assert.err=ERR_UPDATE_GER_TARGET_ACCOUNT_MISMATCH + # => [pad(16)] + + # proceed with the GER update logic + + push.STORAGE_PTR_GER_LOWER exec.active_note::get_storage + # => [num_storage_items, dest_ptr, pad(16)] + + push.UPDATE_GER_NOTE_NUM_STORAGE_ITEMS assert_eq.err=ERR_UPDATE_GER_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS drop + # => [pad(16)] + + # Load GER_LOWER and GER_UPPER from note storage + mem_loadw_le.STORAGE_PTR_GER_UPPER + # => [GER_UPPER[4], pad(12)] + swapw + mem_loadw_le.STORAGE_PTR_GER_LOWER + # => [GER_LOWER[4], GER_UPPER[4], pad(8)] + + call.bridge_config::update_ger + # => [pad(16)] + +end diff --git a/crates/miden-agglayer/build.rs b/crates/miden-agglayer/build.rs index fbd2cd06e7..436447346f 100644 --- a/crates/miden-agglayer/build.rs +++ b/crates/miden-agglayer/build.rs @@ -2,9 +2,10 @@ use std::env; use std::path::Path; use fs_err as fs; -use miden_assembly::diagnostics::{IntoDiagnostic, Result, WrapErr}; -use miden_assembly::utils::Serializable; +use miden_assembly::diagnostics::{IntoDiagnostic, NamedSource, Result, WrapErr}; +use miden_assembly::serde::Serializable; use miden_assembly::{Assembler, Library, Report}; +use miden_crypto::hash::keccak::{Keccak256, Keccak256Digest}; use miden_protocol::transaction::TransactionKernel; // CONSTANTS @@ -18,7 +19,9 @@ const BUILD_GENERATED_FILES_IN_SRC: bool = option_env!("BUILD_GENERATED_FILES_IN const ASSETS_DIR: &str = "assets"; const ASM_DIR: &str = "asm"; const ASM_NOTE_SCRIPTS_DIR: &str = "note_scripts"; -const ASM_BRIDGE_DIR: &str = "bridge"; +const ASM_AGGLAYER_DIR: &str = "agglayer"; +const ASM_AGGLAYER_BRIDGE_DIR: &str = "agglayer/bridge"; +const ASM_COMPONENTS_DIR: &str = "components"; const AGGLAYER_ERRORS_FILE: &str = "src/errors/agglayer.rs"; const AGGLAYER_ERRORS_ARRAY_NAME: &str = "AGGLAYER_ERRORS"; @@ -27,8 +30,9 @@ const AGGLAYER_ERRORS_ARRAY_NAME: &str = "AGGLAYER_ERRORS"; // ================================================================================================ /// Read and parse the contents from `./asm`. +/// - Compiles the contents of asm/agglayer directory into a single agglayer.masl library. +/// - Compiles the contents of asm/components directory into individual per-component .masl files. /// - Compiles the contents of asm/note_scripts directory into individual .masb files. -/// - Compiles the contents of asm/account_components directory into individual .masl files. fn main() -> Result<()> { // re-build when the MASM code changes println!("cargo::rerun-if-changed={ASM_DIR}/"); @@ -38,6 +42,10 @@ fn main() -> Result<()> { let crate_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); let build_dir = env::var("OUT_DIR").unwrap(); let src = Path::new(&crate_dir).join(ASM_DIR); + + // generate canonical zeros in `asm/agglayer/bridge/canonical_zeros.masm` + generate_canonical_zeros(&src.join(ASM_AGGLAYER_BRIDGE_DIR))?; + let dst = Path::new(&build_dir).to_path_buf(); shared::copy_directory(src, &dst, ASM_DIR)?; @@ -54,6 +62,13 @@ fn main() -> Result<()> { let mut assembler = TransactionKernel::assembler(); assembler.link_static_library(agglayer_lib)?; + // compile account components (thin wrappers per component) + compile_account_components( + &source_dir.join(ASM_COMPONENTS_DIR), + &target_dir.join(ASM_COMPONENTS_DIR), + assembler.clone(), + )?; + // compile note scripts compile_note_scripts( &source_dir.join(ASM_NOTE_SCRIPTS_DIR), @@ -69,7 +84,7 @@ fn main() -> Result<()> { // COMPILE AGGLAYER LIB // ================================================================================================ -/// Reads the MASM files from "{source_dir}/bridge" directory, compiles them into a Miden +/// Reads the MASM files from "{source_dir}/agglayer" directory, compiles them into a Miden /// assembly library, saves the library into "{target_dir}/agglayer.masl", and returns the compiled /// library. fn compile_agglayer_lib( @@ -77,7 +92,7 @@ fn compile_agglayer_lib( target_dir: &Path, mut assembler: Assembler, ) -> Result { - let source_dir = source_dir.join(ASM_BRIDGE_DIR); + let source_dir = source_dir.join(ASM_AGGLAYER_DIR); // Add the miden-standards library to the assembler so agglayer components can use it let standards_lib = miden_standards::StandardsLib::default(); @@ -131,38 +146,28 @@ fn compile_note_scripts( Ok(()) } -// COMPILE ACCOUNT COMPONENTS (DEPRECATED) +// COMPILE ACCOUNT COMPONENTS // ================================================================================================ -/// Compiles the bridge components in `source_dir` into MASL libraries and stores the compiled +/// Compiles the account components in `source_dir` into MASL libraries and stores the compiled /// files in `target_dir`. /// -/// NOTE: This function is deprecated and replaced by compile_agglayer_lib -fn _compile_bridge_components( +/// Each `.masm` file in the components directory is a thin wrapper that re-exports specific +/// procedures from the main agglayer library. This ensures each component (bridge, faucet) +/// only exposes the procedures relevant to its role. +/// +/// The assembler must already have the agglayer library linked so that `pub use` re-exports +/// can resolve. +fn compile_account_components( source_dir: &Path, target_dir: &Path, - mut assembler: Assembler, -) -> Result { + assembler: Assembler, +) -> Result<()> { if !target_dir.exists() { fs::create_dir_all(target_dir).unwrap(); } - // Add the miden-standards library to the assembler so agglayer components can use it - let standards_lib = miden_standards::StandardsLib::default(); - assembler.link_static_library(standards_lib)?; - - // Compile all components together as a single library under the "miden::agglayer" namespace - // This allows cross-references between components (e.g., bridge_out using - // miden::agglayer::local_exit_tree) - let agglayer_library = assembler.assemble_library_from_dir(source_dir, "miden::agglayer")?; - - // Write the combined library - let library_path = target_dir.join("agglayer").with_extension(Library::LIBRARY_EXTENSION); - agglayer_library.write_to_file(library_path).into_diagnostic()?; - - // Also write individual component files for reference - let masm_files = shared::get_masm_files(source_dir).unwrap(); - for masm_file_path in &masm_files { + for masm_file_path in shared::get_masm_files(source_dir).unwrap() { let component_name = masm_file_path .file_stem() .expect("masm file should have a file stem") @@ -170,14 +175,22 @@ fn _compile_bridge_components( .expect("file stem should be valid UTF-8") .to_owned(); - let component_source_code = fs::read_to_string(masm_file_path) + let component_source_code = fs::read_to_string(&masm_file_path) .expect("reading the component's MASM source code should succeed"); - let individual_file_path = target_dir.join(&component_name).with_extension("masm"); - fs::write(individual_file_path, component_source_code).into_diagnostic()?; + let named_source = NamedSource::new(component_name.clone(), component_source_code); + + let component_library = assembler + .clone() + .assemble_library([named_source]) + .expect("library assembly should succeed"); + + let component_file_path = + target_dir.join(component_name).with_extension(Library::LIBRARY_EXTENSION); + component_library.write_to_file(component_file_path).into_diagnostic()?; } - Ok(agglayer_library) + Ok(()) } // ERROR CONSTANTS FILE GENERATION @@ -230,6 +243,77 @@ fn generate_error_constants(asm_source_dir: &Path) -> Result<()> { Ok(()) } +// CANONICAL ZEROS FILE GENERATION +// ================================================================================================ + +fn generate_canonical_zeros(target_dir: &Path) -> Result<()> { + if !BUILD_GENERATED_FILES_IN_SRC { + return Ok(()); + } + + const TREE_HEIGHT: u8 = 32; + + let mut zeros_by_height = Vec::with_capacity(TREE_HEIGHT as usize); + + // Push the zero of height 0 to the zeros vec. This is done separately because the zero of + // height 0 is just a plain zero array ([0u8; 32]), it doesn't require to perform any hashing. + zeros_by_height.push(Keccak256Digest::default()); + + // Compute the canonical zeros for each height from 1 to TREE_HEIGHT + // Zero of height `n` is computed as: `ZERO_N = Keccak256::merge(ZERO_{N-1}, ZERO_{N-1})` + for _ in 1..TREE_HEIGHT { + let current_height_zero = + Keccak256::merge(&[*zeros_by_height.last().unwrap(), *zeros_by_height.last().unwrap()]); + zeros_by_height.push(current_height_zero); + } + + // convert the keccak digest into the sequence of u32 values and create two word constants from + // them to represent the hash + let mut zero_constants = String::from( + "# This file is generated by build.rs, do not modify\n +# This file contains the canonical zeros for the Keccak hash function. +# Zero of height `n` (ZERO_N) is the root of the binary tree of height `n` with leaves equal zero. +# +# Since the Keccak hash is represented by eight u32 values, each constant consists of two Words.\n", + ); + + for (height, zero) in zeros_by_height.iter().enumerate() { + let zero_as_u32_vec = zero + .chunks(4) + .map(|chunk_u32| u32::from_le_bytes(chunk_u32.try_into().unwrap()).to_string()) + .collect::>(); + + zero_constants.push_str(&format!( + "\nconst ZERO_{height}_L = [{}]\n", + zero_as_u32_vec[..4].join(", ") + )); + zero_constants + .push_str(&format!("const ZERO_{height}_R = [{}]\n", zero_as_u32_vec[4..].join(", "))); + } + + // remove once CANONICAL_ZEROS advice map is available + zero_constants.push_str( + " +use ::miden::agglayer::common::utils::mem_store_double_word + +#! Inputs: [zeros_ptr] +#! Outputs: [] +pub proc load_zeros_to_memory\n", + ); + + for zero_index in 0..32 { + zero_constants.push_str(&format!("\tpush.ZERO_{zero_index}_R.ZERO_{zero_index}_L exec.mem_store_double_word dropw dropw add.8\n")); + } + + zero_constants.push_str("\tdrop\nend\n"); + + // write the resulting masm content into the file only if it changed to avoid + // invalidating the cargo fingerprint for the `asm/` directory + shared::write_if_changed(target_dir.join("canonical_zeros.masm"), zero_constants)?; + + Ok(()) +} + /// This module should be kept in sync with the copy in miden-protocol's and miden-standards' /// build.rs. mod shared { @@ -470,11 +554,26 @@ mod shared { .into_diagnostic()?; } - std::fs::write(module.file_name, output).into_diagnostic()?; + write_if_changed(module.file_name, output)?; Ok(()) } + /// Writes `contents` to `path` only if the file doesn't exist or its current contents + /// differ. This avoids updating the file's mtime when nothing changed, which prevents + /// cargo from treating the crate as dirty on the next build. + pub fn write_if_changed(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result<()> { + let path = path.as_ref(); + let new_contents = contents.as_ref(); + if path.exists() { + let existing = std::fs::read(path).into_diagnostic()?; + if existing == new_contents { + return Ok(()); + } + } + std::fs::write(path, new_contents).into_diagnostic() + } + pub type ErrorName = String; #[derive(Debug, Clone)] diff --git a/crates/miden-agglayer/solidity-compat/.gitignore b/crates/miden-agglayer/solidity-compat/.gitignore new file mode 100644 index 0000000000..16fe32f772 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/.gitignore @@ -0,0 +1,9 @@ +# Foundry artifacts +/out/ +/cache/ + +# Foundry broadcast files +/broadcast/ + +# Environment +.env diff --git a/crates/miden-agglayer/solidity-compat/README.md b/crates/miden-agglayer/solidity-compat/README.md new file mode 100644 index 0000000000..b45b6edced --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/README.md @@ -0,0 +1,57 @@ +# Solidity Compatibility Tests + +This directory contains Foundry tests for generating test vectors to verify +that the Miden MMR Frontier implementation is compatible with the Solidity +`DepositContractBase.sol` from [agglayer-contracts v2](https://github.com/agglayer/agglayer-contracts). + +## Prerequisites + +Install [Foundry](https://book.getfoundry.sh/getting-started/installation): + +```bash +curl -L https://foundry.paradigm.xyz | bash +foundryup +``` + +## Generating Test Vectors + +From the repository root, you can regenerate both canonical zeros and MMR frontier test vectors with: + +```bash +make generate-solidity-test-vectors +``` + +Or from this directory: + +```bash +# Install dependencies (first time only) +forge install + +# Generate canonical zeros (test-vectors/canonical_zeros.json) +forge test -vv --match-test test_generateCanonicalZeros + +# Generate MMR frontier vectors (test-vectors/mmr_frontier_vectors.json) +forge test -vv --match-test test_generateVectors +``` + +## Generated Files + +- `test-vectors/canonical_zeros.json` - Canonical zeros for each tree height (ZERO_n = keccak256(ZERO_{n-1} || ZERO_{n-1})) +- `test-vectors/mmr_frontier_vectors.json` - Leaf-root pairs after adding leaves 0..31 + +### Canonical Zeros + +The canonical zeros should match the constants in: +`crates/miden-agglayer/asm/bridge/canonical_zeros.masm` + +### MMR Frontier Vectors + +The `test_generateVectors` adds 32 leaves and outputs the root after each addition. +Each leaf uses: + +- `amounts[i] = i + 1` +- `destination_networks[i]` and `destination_addresses[i]` generated deterministically from + a fixed seed in `MMRTestVectors.t.sol` + +This gives reproducible "random-looking" destination parameters while keeping vector generation +stable across machines and reruns. diff --git a/crates/miden-agglayer/solidity-compat/foundry.lock b/crates/miden-agglayer/solidity-compat/foundry.lock new file mode 100644 index 0000000000..196c826d70 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/foundry.lock @@ -0,0 +1,17 @@ +{ + "lib/agglayer-contracts": { + "rev": "e468f9b0967334403069aa650d9f1164b1731ebb" + }, + "lib/forge-std": { + "tag": { + "name": "v1.14.0", + "rev": "1801b0541f4fda118a10798fd3486bb7051c5dd6" + } + }, + "lib/openzeppelin-contracts-upgradeable": { + "branch": { + "name": "release-v4.9", + "rev": "2d081f24cac1a867f6f73d512f2022e1fa987854" + } + } +} \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/foundry.toml b/crates/miden-agglayer/solidity-compat/foundry.toml new file mode 100644 index 0000000000..e841d3e689 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/foundry.toml @@ -0,0 +1,20 @@ +[profile.default] +libs = ["lib"] +optimizer = true +optimizer_runs = 200 +out = "out" +solc = "0.8.20" +src = "src" +via_ir = true + +remappings = [ + "@agglayer/=lib/agglayer-contracts/contracts/", + "@openzeppelin/contracts-upgradeable4/=lib/openzeppelin-contracts-upgradeable/contracts/", +] + +# Emit extra output for test vector generation +ffi = false +verbosity = 2 + +# Allow writing test vectors to file +fs_permissions = [{ access = "read-write", path = "test-vectors" }] diff --git a/crates/miden-agglayer/solidity-compat/lib/agglayer-contracts b/crates/miden-agglayer/solidity-compat/lib/agglayer-contracts new file mode 160000 index 0000000000..e468f9b096 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/lib/agglayer-contracts @@ -0,0 +1 @@ +Subproject commit e468f9b0967334403069aa650d9f1164b1731ebb diff --git a/crates/miden-agglayer/solidity-compat/lib/forge-std b/crates/miden-agglayer/solidity-compat/lib/forge-std new file mode 160000 index 0000000000..f61e4dd133 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/lib/forge-std @@ -0,0 +1 @@ +Subproject commit f61e4dd133379a4536a54ee57a808c9c00019b60 diff --git a/crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable b/crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable new file mode 160000 index 0000000000..2d081f24ca --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/lib/openzeppelin-contracts-upgradeable @@ -0,0 +1 @@ +Subproject commit 2d081f24cac1a867f6f73d512f2022e1fa987854 diff --git a/crates/miden-agglayer/solidity-compat/test-vectors/canonical_zeros.json b/crates/miden-agglayer/solidity-compat/test-vectors/canonical_zeros.json new file mode 100644 index 0000000000..fbf41c38bc --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test-vectors/canonical_zeros.json @@ -0,0 +1,36 @@ +{ + "canonical_zeros": [ + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9" + ] +} \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_local_tx.json b/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_local_tx.json new file mode 100644 index 0000000000..3bf850e8e0 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_local_tx.json @@ -0,0 +1,86 @@ +{ + "amount": 100000000000000000000, + "deposit_count": 1, + "description": "L1 bridgeAsset transaction test vectors with valid Merkle proofs", + "destination_address": "0x00000000AA0000000000bb000000cc000000Dd00", + "destination_network": 20, + "global_exit_root": "0xc84f1e3744c151b345a8899034b3677c0fdbaf45aa3aaf18a3f97dbcf70836cb", + "global_index": "0x0000000000000000000000000000000000000000000000010000000000000000", + "leaf_type": 0, + "leaf_value": "0x9d85d7c56264697df18f458b4b12a457b87b7e7f7a9b16dcb368514729ef680d", + "local_exit_root": "0xc9e095ea4cfe19b7e9a6d1aff6c55914ccc8df34954f9f6a2ad8e42d2632a0ab", + "mainnet_exit_root": "0xc9e095ea4cfe19b7e9a6d1aff6c55914ccc8df34954f9f6a2ad8e42d2632a0ab", + "metadata": "0x000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000000a5465737420546f6b656e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000045445535400000000000000000000000000000000000000000000000000000000", + "metadata_hash": "0x4d0d9fb7f9ab2f012da088dc1c228173723db7e09147fe4fea2657849d580161", + "origin_network": 0, + "origin_token_address": "0x2DC70fb75b88d2eB4715bc06E1595E6D97c34DFF", + "rollup_exit_root": "0xd18cc25ae65a4e3d95587ffea9411747238567d6e5d3744240554713edefc197", + "smt_proof_local_exit_root": [ + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9" + ], + "smt_proof_rollup_exit_root": [ + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000" + ] +} \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_real_tx.json b/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_real_tx.json new file mode 100644 index 0000000000..b0819ea63d --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_real_tx.json @@ -0,0 +1,82 @@ +{ + "amount": 100000000000000, + "destination_address": "0x00000000b0E79c68cafC54802726C6F102Cca300", + "destination_network": 20, + "global_exit_root": "0xe1cbfbde30bd598ee9aa2ac913b60d53e3297e51ed138bf86c500dd7d2391e7d", + "global_index": "0x0000000000000000000000000000000000000000000000010000000000039e88", + "leaf_type": 0, + "leaf_value": "0xc58420b9b4ba439bb5f6f68096270f4df656553ec67150d4d087416b9ef6ea9d", + "mainnet_exit_root": "0x31d3268d3a0145d65482b336935fa07dab0822f7dccd865f361d2bf122c4905c", + "metadata_hash": "0x945d61756eddd06a335ceff22d61480fc2086e85e74a55db5485f814626247d5", + "origin_network": 0, + "origin_token_address": "0x2DC70fb75b88d2eB4715bc06E1595E6D97c34DFF", + "rollup_exit_root": "0x8452a95fd710163c5fa8ca2b2fe720d8781f0222bb9e82c2a442ec986c374858", + "smt_proof_local_exit_root": [ + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0xe37d456460231cf80063f57ee83a02f70d810c568b3bfb71156d52445f7a885a", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0x3236bf576fca1adf85917ec7888c4b89cce988564b6028f7d66807763aaa7b04", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0x054ba828046324ff4794fce22adefb23b3ce749cd4df75ade2dc9f41dd327c31", + "0x4e9220076c344bf223c7e7cb2d47c9f0096c48def6a9056e41568de4f01d2716", + "0xca6369acd49a7515892f5936227037cc978a75853409b20f1145f1d44ceb7622", + "0x5a925caf7bfdf31344037ba5b42657130d049f7cb9e87877317e79fce2543a0c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0x4111a1a05cc06ad682bb0f213170d7d57049920d20fc4e0f7556a21b283a7e2a", + "0x77a0f8b0e0b4e5a57f5e381b3892bb41a0bcdbfdf3c7d591fae02081159b594d", + "0x361122b4b1d18ab577f2aeb6632c690713456a66a5670649ceb2c0a31e43ab46", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9" + ], + "smt_proof_rollup_exit_root": [ + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x0000000000000000000000000000000000000000000000000000000000000000" + ] +} \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/test-vectors/exit_roots.json b/crates/miden-agglayer/solidity-compat/test-vectors/exit_roots.json new file mode 100644 index 0000000000..b04fbdf844 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test-vectors/exit_roots.json @@ -0,0 +1,14 @@ +{ + "global_exit_roots": [ + "0x207f0b7db488bbc423fc3d12db21b97e574453e12b49ca21205181af677d7b04", + "0x8e10e03b7db5ffe76edbea651052f8045289ece97947297de6279ce9f6730252" + ], + "mainnet_exit_roots": [ + "0x98c911b6dcface93fd0bb490d09390f2f7f9fcf36fc208cbb36528a229298326", + "0xbb71d991caf89fe64878259a61ae8d0b4310c176e66d90fd2370b02573e80c90" + ], + "rollup_exit_roots": [ + "0x6a2533a24cc2a3feecf5c09b6a270bbb24a5e2ce02c18c0e26cd54c3dddc2d70", + "0xd9b546933b59acd388dc0c6520cbf2d4dbb9bac66f74f167ba70f221d82a440c" + ] +} \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/test-vectors/leaf_value_vectors.json b/crates/miden-agglayer/solidity-compat/test-vectors/leaf_value_vectors.json new file mode 100644 index 0000000000..8d89835c88 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test-vectors/leaf_value_vectors.json @@ -0,0 +1,10 @@ +{ + "amount": 2000000000000000000, + "destination_address": "0xD9b20Fe633b609B01081aD0428e81f8Dd604F5C5", + "destination_network": 7, + "leaf_type": 0, + "leaf_value": "0xb67e42971034605367b7e92d1ad1d4648c3ffe0bea9b08115cd9aa2e616b2f88", + "metadata_hash": "0x6c7a91a5fb41dee8f0bc1c86b5587334583186f14acfa253e2f7c2833d1d6fdf", + "origin_network": 0, + "origin_token_address": "0xD9343a049D5DBd89CD19DC6BcA8c48fB3a0a42a7" +} \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/test-vectors/merkle_proof_vectors.json b/crates/miden-agglayer/solidity-compat/test-vectors/merkle_proof_vectors.json new file mode 100644 index 0000000000..32e6008245 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test-vectors/merkle_proof_vectors.json @@ -0,0 +1,1096 @@ +{ + "leaves": [ + "0x0000000000000000000000000000000000000000000000000000000000000001", + "0x0000000000000000000000000000000000000000000000000000000000000002", + "0x0000000000000000000000000000000000000000000000000000000000000003", + "0x0000000000000000000000000000000000000000000000000000000000000004", + "0x0000000000000000000000000000000000000000000000000000000000000005", + "0x0000000000000000000000000000000000000000000000000000000000000006", + "0x0000000000000000000000000000000000000000000000000000000000000007", + "0x0000000000000000000000000000000000000000000000000000000000000008", + "0x0000000000000000000000000000000000000000000000000000000000000009", + "0x000000000000000000000000000000000000000000000000000000000000000a", + "0x000000000000000000000000000000000000000000000000000000000000000b", + "0x000000000000000000000000000000000000000000000000000000000000000c", + "0x000000000000000000000000000000000000000000000000000000000000000d", + "0x000000000000000000000000000000000000000000000000000000000000000e", + "0x000000000000000000000000000000000000000000000000000000000000000f", + "0x0000000000000000000000000000000000000000000000000000000000000010", + "0x0000000000000000000000000000000000000000000000000000000000000011", + "0x0000000000000000000000000000000000000000000000000000000000000012", + "0x0000000000000000000000000000000000000000000000000000000000000013", + "0x0000000000000000000000000000000000000000000000000000000000000014", + "0x0000000000000000000000000000000000000000000000000000000000000015", + "0x0000000000000000000000000000000000000000000000000000000000000016", + "0x0000000000000000000000000000000000000000000000000000000000000017", + "0x0000000000000000000000000000000000000000000000000000000000000018", + "0x0000000000000000000000000000000000000000000000000000000000000019", + "0x000000000000000000000000000000000000000000000000000000000000001a", + "0x000000000000000000000000000000000000000000000000000000000000001b", + "0x000000000000000000000000000000000000000000000000000000000000001c", + "0x000000000000000000000000000000000000000000000000000000000000001d", + "0x000000000000000000000000000000000000000000000000000000000000001e", + "0x000000000000000000000000000000000000000000000000000000000000001f", + "0x0000000000000000000000000000000000000000000000000000000000000020" + ], + "merkle_paths": [ + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000001", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xe90b7bceb6e7df5418fb78d8ee546e97c83a08bbccc01a0644d599ccd2a7c2e0", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000003", + "0xe90b7bceb6e7df5418fb78d8ee546e97c83a08bbccc01a0644d599ccd2a7c2e0", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xa9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000005", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xa9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xbfd358e93f18da3ed276c3afdbdba00b8f0b6008a03476a6a86bd6320ee6938b", + "0xa9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000007", + "0xbfd358e93f18da3ed276c3afdbdba00b8f0b6008a03476a6a86bd6320ee6938b", + "0xa9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x6f4feb766c4e9e71bf038b8df02f0966e2bf98fe1eaacfd96e5d036664ca1b3c", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000009", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x6f4feb766c4e9e71bf038b8df02f0966e2bf98fe1eaacfd96e5d036664ca1b3c", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x825eb4cda6b8b44578c55770496c59e6dc3cf2235f690bcdaf51a61898ceb284", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x6f4feb766c4e9e71bf038b8df02f0966e2bf98fe1eaacfd96e5d036664ca1b3c", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x000000000000000000000000000000000000000000000000000000000000000b", + "0x825eb4cda6b8b44578c55770496c59e6dc3cf2235f690bcdaf51a61898ceb284", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x6f4feb766c4e9e71bf038b8df02f0966e2bf98fe1eaacfd96e5d036664ca1b3c", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0x41c242dcf7d95e223b291ac50602debef77ded7ede32e6f8ffe959dcf7252a7a", + "0x6f4feb766c4e9e71bf038b8df02f0966e2bf98fe1eaacfd96e5d036664ca1b3c", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x000000000000000000000000000000000000000000000000000000000000000d", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0x41c242dcf7d95e223b291ac50602debef77ded7ede32e6f8ffe959dcf7252a7a", + "0x6f4feb766c4e9e71bf038b8df02f0966e2bf98fe1eaacfd96e5d036664ca1b3c", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x9d0231707eb2041153c28e130d22114ee38b252cf17233585036af02278e4181", + "0x41c242dcf7d95e223b291ac50602debef77ded7ede32e6f8ffe959dcf7252a7a", + "0x6f4feb766c4e9e71bf038b8df02f0966e2bf98fe1eaacfd96e5d036664ca1b3c", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x000000000000000000000000000000000000000000000000000000000000000f", + "0x9d0231707eb2041153c28e130d22114ee38b252cf17233585036af02278e4181", + "0x41c242dcf7d95e223b291ac50602debef77ded7ede32e6f8ffe959dcf7252a7a", + "0x6f4feb766c4e9e71bf038b8df02f0966e2bf98fe1eaacfd96e5d036664ca1b3c", + "0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000011", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x6b485436b9b234417e59960d9ab1366322cfad1c365f281a05863557ce7f5ce4", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000013", + "0x6b485436b9b234417e59960d9ab1366322cfad1c365f281a05863557ce7f5ce4", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xc7be30f88cf2ecf57e79e1e1710c411ee9e22587a8053db312e33d3c0cd6c9d4", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000015", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xc7be30f88cf2ecf57e79e1e1710c411ee9e22587a8053db312e33d3c0cd6c9d4", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xe3be994f4df1ce307d3423eeebbbb0cd25598c1b97b848f71392674439a00e37", + "0xc7be30f88cf2ecf57e79e1e1710c411ee9e22587a8053db312e33d3c0cd6c9d4", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000017", + "0xe3be994f4df1ce307d3423eeebbbb0cd25598c1b97b848f71392674439a00e37", + "0xc7be30f88cf2ecf57e79e1e1710c411ee9e22587a8053db312e33d3c0cd6c9d4", + "0x21ddb9a356815c3fac1026b6dec5df3124afbadb485c9ba5a3e3398a04b7ba85", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0xe5422bc118f48ba443c276c6eece85e41b7dbeb30f209266d208f1a4f6b289b0", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000019", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0xe5422bc118f48ba443c276c6eece85e41b7dbeb30f209266d208f1a4f6b289b0", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x86c475ea737f13a5a246616792c8fe016fa512aa588a8c13275a280b7d0cb15a", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0xe5422bc118f48ba443c276c6eece85e41b7dbeb30f209266d208f1a4f6b289b0", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x000000000000000000000000000000000000000000000000000000000000001b", + "0x86c475ea737f13a5a246616792c8fe016fa512aa588a8c13275a280b7d0cb15a", + "0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30", + "0xe5422bc118f48ba443c276c6eece85e41b7dbeb30f209266d208f1a4f6b289b0", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xebc2f26506912a873346107a90e0212c5f00f04e541bbba11cd17e0804ae0abf", + "0xe5422bc118f48ba443c276c6eece85e41b7dbeb30f209266d208f1a4f6b289b0", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x000000000000000000000000000000000000000000000000000000000000001d", + "0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5", + "0xebc2f26506912a873346107a90e0212c5f00f04e541bbba11cd17e0804ae0abf", + "0xe5422bc118f48ba443c276c6eece85e41b7dbeb30f209266d208f1a4f6b289b0", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x0000000000000000000000000000000000000000000000000000000000000000", + "0x764104d099bd8eeca4083ffb17dea07928f5b33ecb9e5f5855689fa69803386d", + "0xebc2f26506912a873346107a90e0212c5f00f04e541bbba11cd17e0804ae0abf", + "0xe5422bc118f48ba443c276c6eece85e41b7dbeb30f209266d208f1a4f6b289b0", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9", + "0x000000000000000000000000000000000000000000000000000000000000001f", + "0x764104d099bd8eeca4083ffb17dea07928f5b33ecb9e5f5855689fa69803386d", + "0xebc2f26506912a873346107a90e0212c5f00f04e541bbba11cd17e0804ae0abf", + "0xe5422bc118f48ba443c276c6eece85e41b7dbeb30f209266d208f1a4f6b289b0", + "0x59b52a20e3252cc46cdac45bb75f28e521319d3109ae473cc6001c9b748d48e6", + "0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d", + "0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968", + "0xffd70157e48063fc33c97a050f7f640233bf646cc98d9524c6b92bcf3ab56f83", + "0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af", + "0xcefad4e508c098b9a7e1d8feb19955fb02ba9675585078710969d3440f5054e0", + "0xf9dc3e7fe016e050eff260334f18a5d4fe391d82092319f5964f2e2eb7c1c3a5", + "0xf8b13a49e282f609c317a833fb8d976d11517c571d1221a265d25af778ecf892", + "0x3490c6ceeb450aecdc82e28293031d10c7d73bf85e57bf041a97360aa2c5d99c", + "0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb", + "0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc", + "0xda7bce9f4e8618b6bd2f4132ce798cdc7a60e7e1460a7299e3c6342a579626d2", + "0x2733e50f526ec2fa19a22b31e8ed50f23cd1fdf94c9154ed3a7609a2f1ff981f", + "0xe1d3b5c807b281e4683cc6d6315cf95b9ade8641defcb32372f1c126e398ef7a", + "0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0", + "0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0", + "0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2", + "0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9", + "0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377", + "0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652", + "0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef", + "0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d", + "0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0", + "0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e", + "0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e", + "0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322", + "0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735", + "0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9" + ], + "roots": [ + "0x21db8421fb719c4d28af3cda6aeee3388f75e2cc467bfc7b950d32a425f7d355", + "0xed910e47f3c21d47debc7c730c32e06da6c54ba6b88b2378a61018f206903982", + "0x9384545e9aa4ebf1b8beb19916049a38744f06ef954a3f45560632d84ce6d533", + "0x7f19d9d94592351c6243e6bcce3a26858f74a31b6254789b2150744068c605ff", + "0x5f213be2fc640249a552ca4702de66233832cb68b9df97e230cc872d5f6cb9f9", + "0x6a0f4c48041afeac25806059b6e6179035e67bde1df0b089a4eaf0ac0df6bede", + "0x351a89e8327bb977532b302b57e2245ad68d05e2750f3324179920200a0f638a", + "0x38549f7c3628e3dbbfff8a696bf672c575b832d162491d610c1b8a6a0bc7561f", + "0xe14b66b351fd28eda1af3c03ba2a9dfd60484f70fd9f81c78a5cb6811dac5ea4", + "0x0637e5960d1ee7a5b1bb0c3857ef05acc23b4a709585e4959514fd79e91ce87c", + "0x5464040bc626006595b5e766a3f9304a0cf4d273c462cc708e3aade24d5e702a", + "0xb8b51ef34cf4e32d0805ea2c9fe4852d922053bf3684938c4fa7f43379d4e343", + "0xeb44a97bc5197c63c6c93bf6fdc7f4580a3dacd4578052d38538b42efbc9d3c0", + "0x26eee5cb8da8799016b92c262a6abc344e03174f79168197be5b9db6999deae3", + "0xaa6e8163229da7b95fe79ec7c96265b959a592ed895b632823b005a112359a53", + "0xdff63dcf9fc201fb5574ff828e88832305b83eecc072470b97de44a33b3240f6", + "0x2807a065ab1528d94a6e4331d5e8cb24ceab45ace87d76b8f0a8d077d7453067", + "0x1780d5b9898266e1fdf29518d871fef4cf37bfd8fe85e605c2adb2e5e40184a4", + "0x12910a95f5e6e9ab88c460109b6dac9958f440da307e6295823ba0b8f7d08113", + "0x6bcc607d23337c15d7c38f24b637a20730e039ea7e9e3b038a633e4a28dbac30", + "0xc2ad09f3e28f9c4b33d9c24382ccdb40f4b363bc4279b04c1ce9f86fc5158966", + "0x0f3a1ab79e3ae7ec6c194316de70d30216418323387ed2699b85e0e179ca9ae4", + "0xfa21bda7ccf431411a9f1e280871f6844dd57d49b4c3da89cacc559188b71e01", + "0x1353b7507a48e5368938a0509e49b930e9a4b16593485c82ba23cf7bdbd2f755", + "0x3d54e0d77b266fe0cd3c08a6234cbb5da409c369b608c5229ee7f555576666b4", + "0x6f5eb8c44bd89bd453e8276c780bb8cade32e914320f2f8269f8da512ed5c26c", + "0x5d83580ced8a725c6940898df7206e114078b722fc0f9331fc674ec41c79f39d", + "0xbd8f0f4d65d34fa78ed76b127ac25c6f173303dae7c283c2110d1622c1562378", + "0xf8aa0f2cf2e576ca7458b02981f29feda322684d6de4c4974c7d7b746c691b4d", + "0xda53b0a86569aed1c624f93df12ed4b9f6731a3b12d83e91aa7b60b0a3f81861", + "0x47a023340aabdc3853af05d3db190ee5be1b47133935b3b5d289bec6a144fb03", + "0xa6e29137a2d8363c701b8d8e3dc4015b2998386cfa2013e921cbf37ad20eaa51" + ] +} \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/test-vectors/mmr_frontier_vectors.json b/crates/miden-agglayer/solidity-compat/test-vectors/mmr_frontier_vectors.json new file mode 100644 index 0000000000..79c76364dc --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test-vectors/mmr_frontier_vectors.json @@ -0,0 +1,207 @@ +{ + "amounts": [ + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32 + ], + "counts": [ + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32 + ], + "destination_addresses": [ + "0xB48074703337bEf6e94A9e2E1FfFe71632B42D56", + "0xBA60cd3cBD12619e6983B5D0E1CbcF2f4fed9d7b", + "0x89510362d6EdeB958F059727C9eD0F99298aAFa4", + "0xD62Cf6356E0a48e2014b71Cf942BEbBbFb00F7d7", + "0xFA5eacb9668731D74F2BB5Ad5bfB319f5A91c87D", + "0x90DD6647e5c91f9104a548876868a54795696B34", + "0x0E76F5f993A9a7f961e06397BC71d15c278A0b6c", + "0xe022226D1fFcCf12ac0e84D0aB9430F3fd56C613", + "0x1F9ecff77E28Bca8Ef18434B842A30579Bfd4EaA", + "0xe51D207B549Db157BeE9faeBd51C35aB47d180EF", + "0x9f30d6d0335E91e0593f13a567E4Fee661e1259F", + "0xE8F13Da1BDb719ba364a890a623454040A932eCf", + "0xb6EE19bf265563aA76dbe202e8dC71F8f42a58B1", + "0xf62d45e4D0DC57259B4557b5d79Ea23F67D0E381", + "0xaa94f5480aD0C906044E5E7Da8BB6BC4395aA498", + "0x060ddd9f6e6CF285004e33C30b46710ad75918Dd", + "0x8B743c166e1dA1444781AD2b5Fe2291578ABCeb1", + "0x8B08d9A773273Df976fb7448D38FeEeB15Dc34F8", + "0xbe931f6F189e6F8Da14f7B67Eb2E67b5D7f71c1d", + "0x2F891C182b23d1422D8Fddd9CC30B25BB849Bd5F", + "0x93fD7DEd75058ABA1B76C35c4Ac4e9355e596EdC", + "0x25B9eBC8D7d48a6B0e71e82Aa66832aCC9419E3A", + "0xbb086ECaC1316B81107e3CA591ef645831094E5a", + "0x08c7a5Db749DEf9280108Ec5e0354d4957CB17cF", + "0x0da76aA44116fad143F778f25907046E52F8c4d3", + "0xcFd0a3bfA35E771aad88C64EF0A310efF6730cDa", + "0xa7439b51638F31f054C93EC869C8c7E982699BAC", + "0x5C9A97f096CB18903994C44ddC07FfD921490B2c", + "0x0e52786aF0b48D764a255f6506C9C297d5BA2Dc3", + "0x5C2093921171F2c2d657eAA681D463Fe36c965d1", + "0xf8de801F1ba2a676d96Eb1F1ccB0B0CADFCbbE9e", + "0x31D230FAbAd05777Bb3E1a062e781446Bc422b80" + ], + "destination_networks": [ + 1538671592, + 1271685039, + 2812858243, + 1717044446, + 1618236512, + 1846799397, + 1114625417, + 1980472020, + 3445581035, + 1216050355, + 1334555263, + 1595653741, + 1406956437, + 2339872987, + 1591634953, + 2036330440, + 948554316, + 1629580568, + 4209912969, + 3528172732, + 4197496357, + 2020389543, + 1365501531, + 2591126838, + 273689805, + 543018504, + 3291055054, + 2685286074, + 3030491074, + 4166649488, + 1541470110, + 1181416010 + ], + "leaves": [ + "0xe460585d9b2385592b26a34d6908ea58165586cb39e5e6cb365b68246d29d7f8", + "0x5a7295b074b2ffeb07bd8bacbdd97aa97b0b269db43779112ef24b52548a9a2a", + "0xde239e1e8b54de83c9b0e3f32c269b265dd0efcda92c93a2146f44302e604080", + "0x98681050a4c0e39d25f1a44d95b343a05f7139cc882f628c569b3a1ae889f0e6", + "0xd3d70b40cc2a71e9a4996a2afaabcafe93af95ba9de147e3835ccddba2d82fdd", + "0xd46fec5943f6d40c9a68076fbc325daf7763607aaa60ca9be297cade5a1efca5", + "0x4c54e0aab6332cea9a9f867933caee83c6167aa78f663129d10e56cee35aacdd", + "0xf487aba0c467c53aa4fc9a7319817e1448efd774dedb235a1ab95a5dd2592d21", + "0xc734b7fd5abe87f4dff06da98980e19894117e92738e27e8dc0826eb4dee7202", + "0x8bcc65728c792dfaa58c6b63d192c2e37cd3db7c62774e7b40b9b3232597073a", + "0x6dbb052d9082cf78a0464cae809cd6c1be9d5657fc75a0fa1efade46f047aa01", + "0x11ea20a8fb14ed8b5ba47e83935f4dc1c032be3a3a9895a65fabed6e1adbef5c", + "0xd108801a4cfa732a19995a6f930ccdda98e91ce393f55eae7f63781568b44c74", + "0x423b7a7716ba307d27c05a6bbfde03b35c9544dffcf6702f69a205cff40a51da", + "0xed832ce8f80ed861bd13b1104490724dd38ab1c9ff18fd8e02ad13eb287af68f", + "0x6eca57794d8d55ec934427971898952017d87bd2773b64c554629f32f55fc7dc", + "0xc7abf795f5ebe46e9f86ba72d58f38ef535475cc41a11913fa1ec51cf902ee1a", + "0xbffebb2a3584cb6f96af4f8da6f5eea2e64066f0caa4bc6f44abb69b621a2b79", + "0x04de39dc7a9f11eba923271d07b5fda4f6b38012858a9a5a9d8f6557706981bd", + "0xef5e2f249fce6c67f5483b52e87384c6a6f6b5f8f102ecfede50cc9f8dfa78af", + "0x34e1511b36260dd619fcb205311055b87d31bb6440c9fb2a8b272bc1dcb1d699", + "0x0640b605ee9f8d8b38118c8dd1f51ca30f3b3f9037c29e598f39b91326825c46", + "0xfe7de1151f56cc10894b6bd63fc995a741c54d9069ee97247cb28627a4838da8", + "0xf17bb6827fe8873b839ecefe872776f757ca087dd65c2c2882523b71dcd24f05", + "0x7a11106b01c8d98348739c89007dddca673f18e9c38ef2d953315a1a49b23ce0", + "0xa7f0a37834fab9ce2cfbe364ccc4c50c88d48a061f0901889cc4fdc6b088a3ea", + "0xb386fae6a43e096a3d66147212a4fc756f7ed921febb2404f1d060111e4521e8", + "0x98484766860a98231a6834276f1ca84c8cf381e4931d635268b9b7d9db976958", + "0xd5007290e81283abd144a619da55be689e7b3eeb8a8b79f0de5e1f2793b056fe", + "0xac6812ede94056979e789ac4bd7dc5e4e682ea93aaaa1aadb22645ec44e21772", + "0xdc0662d88af437d468ed541ee9088464770bbd149a5ce5b3cdd9e836888c5b9d", + "0x6c8e78ff6214e87c5a791423385e31659921f3bb09376b302dd3933f98f346b4" + ], + "origin_token_address": "0x7a6fC3e8b57c6D1924F1A9d0E2b3c4D5e6F70891", + "roots": [ + "0xacd6f8510c036081e605dd2c8749d2b7d3b289913514d10af9538cb4b32b7ded", + "0x2d7b622637d38f862a074a0160bc1e54ad7df147ff3374af82777b37021b22e1", + "0xf9bdf29ab9c4cbd2927b759b9f8ddafa90317bdb91f388b8eee08038ff5ded00", + "0x80134ca84d0d742662f3ec22543f4cf33f02dc0b628f51d1df1c521ef3018395", + "0x21d6f3b63306929d624f01ffdbe216acb822bf080bcf04b7e6021db957e7bee4", + "0x7932d55a970d094161976d0b562805779d55a81b08a501983c2b121a0c989a1e", + "0x43f09c6c8a277ee6fbc0e3f8261ba4570f32d1cbfff06bf662aa8e5feeb742bc", + "0x9ae3a76a5c7fcc2af6e3cb937b7e1a4ba397a46029987b06fec29257ba408564", + "0x007e432139766ea419be4aeda41a59e20114c0b772b61e43b3a344fa1c4e1196", + "0xdf60f37334585bc10d67b107b417a14181158ac9828f56a9337684a81e7405d9", + "0xba49ac55a723278ef6cd8f12193a405bc90cd2b6e88f8791f8d48d69fe952104", + "0x4ab8529bce44bcfb8c8e90c9adebebca9e51f44b0e8a048d99bf7717cb58eae7", + "0xf9313f060db170a5287bcc78443267e893f638731dd48a9131b120f9c5833f88", + "0x49a9e6e504f2a6938bbefba42ec2b4930eed298a04eac403af1e0a6286017960", + "0xe318ce76597523c02da0094bcfd970e88c9544c6393d9bfe17d96e2a17f4856d", + "0x00d4099acc3d2a2cdd76f693fb527b218f369bc8e92af4a39328809738497a9d", + "0xf4db3da65c8fda88ad4a1ad1aca66e9260d5230a962791b57d396948a78fe96e", + "0x6813db5a7b4ac98c11d84412df7d6552941d30c7adb95e7025b13d747cf0f3f7", + "0xf1e93cbb96e5fabaee7cbb44f87f44832c9c290a5f85631d8c86493bab6ba0d5", + "0x654a2e78a6e49c969a0fedad0e4372862950ca371406c122779cf62e16dfe7e7", + "0x1a07ce13254cfb6697256a401063d6c43e5a89b8b1945c90bce62c464da1ba27", + "0xedaf2d835d1e6fdd801555835b2cadcd04517f8668f30658019869d0376c6c36", + "0x82adda5fd38a4718f37b2d4fe9fe99b364cced5de9bdfa4c6bdcd118da42c64c", + "0x2d28e62dd13f99153b5e9eb4d68cc1f99a5bd510375f2d1ed522c0062a2d38d7", + "0xd87e80ebe2f69df6735911707780df6b882189db786b5507310249a26d3db69d", + "0x5406d2fbc12edcccd2b8c755b7063ababc760ce23da62032d500a10d49756994", + "0xce99e7d0f9d77226cae034297dfec349d866f892eb753a8c7f5bba4bec52364c", + "0x4419d0e6c47cac3e4fc917f91d878582ed4496bef8e7df219be4d483e496ff0a", + "0xafe2c2b44e58c34576299a201d4918f47d5a48b6fa7a229eaf59e226120b12ac", + "0x9d2989190f9edb660b043a55f3051412280dd7bb7d4d042e3695d3a2b23f5b8d", + "0x18b772e2e093d5f69151c3b6da00d42a2066d1f5980e5f9210ae902f5a5643ca", + "0x6717e563a6c40e1562235c4cbbc2ba0de5be6be07101715e8d3922361b77d394" + ] +} \ No newline at end of file diff --git a/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsLocalTx.t.sol b/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsLocalTx.t.sol new file mode 100644 index 0000000000..64517d8580 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsLocalTx.t.sol @@ -0,0 +1,156 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import "forge-std/Test.sol"; +import "@agglayer/v2/lib/DepositContractV2.sol"; +import "@agglayer/lib/GlobalExitRootLib.sol"; +import "./DepositContractTestHelpers.sol"; + +/** + * @title ClaimAssetTestVectorsLocalTx + * @notice Test contract that generates test vectors for an L1 bridgeAsset transaction. + * This simulates calling bridgeAsset() on the PolygonZkEVMBridgeV2 contract + * and captures all relevant data including VALID Merkle proofs. + * + * Run with: forge test -vv --match-contract ClaimAssetTestVectorsLocalTx + * + * The output can be used to verify Miden's ability to process L1 bridge transactions. + */ +contract ClaimAssetTestVectorsLocalTx is Test, DepositContractV2, DepositContractTestHelpers { + /** + * @notice Generates bridge asset test vectors with VALID Merkle proofs. + * Simulates a user calling bridgeAsset() to bridge tokens from L1 to Miden. + * + * Output file: test-vectors/bridge_asset_vectors.json + */ + function test_generateClaimAssetVectorsLocalTx() public { + string memory obj = "root"; + + // ====== BRIDGE TRANSACTION PARAMETERS ====== + + uint8 leafType = 0; + uint32 originNetwork = 0; + address originTokenAddress = 0x2DC70fb75b88d2eB4715bc06E1595E6D97c34DFF; + uint32 destinationNetwork = 20; + address destinationAddress = 0x00000000AA0000000000bb000000cc000000Dd00; + uint256 amount = 100000000000000000000; + + bytes memory metadata = abi.encode("Test Token", "TEST", uint8(18)); + bytes32 metadataHash = keccak256(metadata); + + // ====== COMPUTE LEAF VALUE AND ADD TO TREE ====== + + bytes32 leafValue = getLeafValue( + leafType, originNetwork, originTokenAddress, destinationNetwork, destinationAddress, amount, metadataHash + ); + + // Add the leaf to the deposit tree to generate valid Merkle proof + _addLeaf(leafValue); + + // Get the deposit count (leaf index) - depositCount is uint256 in DepositContractBase + uint256 depositCountValue = uint256(depositCount); + + // Get the local exit root (root of the deposit tree) + bytes32 localExitRoot = getRoot(); + + // ====== GENERATE MERKLE PROOF ====== + + // Generate canonical zeros for the Merkle proof + bytes32[32] memory canonicalZeros = _computeCanonicalZeros(); + + // Build the Merkle proof from _branch array and canonical zeros + // The leaf index is depositCountValue - 1 (0-indexed) + uint256 leafIndex = depositCountValue - 1; + bytes32[32] memory smtProofLocal = _generateLocalProof(leafIndex, canonicalZeros); + + // For mainnet deposits, the rollup proof is all zeros + bytes32[32] memory smtProofRollup; + for (uint256 i = 0; i < 32; i++) { + smtProofRollup[i] = bytes32(0); + } + + // ====== COMPUTE EXIT ROOTS ====== + + // For a simulated L1 bridge transaction: + // - mainnetExitRoot is the local exit root from the deposit tree + // - rollupExitRoot is simulated (deterministic for reproducibility) + bytes32 mainnetExitRoot = localExitRoot; + bytes32 rollupExitRoot = keccak256(abi.encodePacked("rollup_exit_root_simulated")); + + // Compute global exit root + bytes32 globalExitRoot = GlobalExitRootLib.calculateGlobalExitRoot(mainnetExitRoot, rollupExitRoot); + + // ====== VERIFY MERKLE PROOF ====== + + // Verify that the generated proof is valid + require( + this.verifyMerkleProof(leafValue, smtProofLocal, uint32(leafIndex), mainnetExitRoot), + "Generated Merkle proof is invalid!" + ); + + // ====== COMPUTE GLOBAL INDEX ====== + + // Global index for mainnet deposits: (1 << 64) | leafIndex + // Note: leafIndex is 0-based (depositCount - 1), matching how the bridge contract + // extracts it via uint32(globalIndex) in _verifyLeaf() + uint256 globalIndex = (uint256(1) << 64) | uint256(leafIndex); + + // ====== SERIALIZE SMT PROOFS ====== + _serializeProofs(obj, smtProofLocal, smtProofRollup); + + // Scoped block 2: Serialize transaction parameters + { + vm.serializeUint(obj, "leaf_type", leafType); + vm.serializeUint(obj, "origin_network", originNetwork); + vm.serializeAddress(obj, "origin_token_address", originTokenAddress); + vm.serializeUint(obj, "destination_network", destinationNetwork); + vm.serializeAddress(obj, "destination_address", destinationAddress); + vm.serializeUint(obj, "amount", amount); + vm.serializeBytes(obj, "metadata", metadata); + vm.serializeBytes32(obj, "metadata_hash", metadataHash); + vm.serializeBytes32(obj, "leaf_value", leafValue); + } + + // Scoped block 3: Serialize state, exit roots, and finalize + { + vm.serializeUint(obj, "deposit_count", depositCountValue); + vm.serializeBytes32(obj, "global_index", bytes32(globalIndex)); + vm.serializeBytes32(obj, "local_exit_root", localExitRoot); + vm.serializeBytes32(obj, "mainnet_exit_root", mainnetExitRoot); + vm.serializeBytes32(obj, "rollup_exit_root", rollupExitRoot); + vm.serializeBytes32(obj, "global_exit_root", globalExitRoot); + + string memory json = vm.serializeString( + obj, "description", "L1 bridgeAsset transaction test vectors with valid Merkle proofs" + ); + + string memory outputPath = "test-vectors/claim_asset_vectors_local_tx.json"; + vm.writeJson(json, outputPath); + + console.log("Generated claim asset local tx test vectors with valid Merkle proofs"); + console.log("Output file:", outputPath); + console.log("Leaf index:", leafIndex); + console.log("Deposit count:", depositCountValue); + } + } + + /** + * @notice Helper function to serialize SMT proofs (avoids stack too deep) + * @param obj The JSON object key + * @param smtProofLocal The local exit root proof + * @param smtProofRollup The rollup exit root proof + */ + function _serializeProofs(string memory obj, bytes32[32] memory smtProofLocal, bytes32[32] memory smtProofRollup) + internal + { + bytes32[] memory smtProofLocalDyn = new bytes32[](32); + bytes32[] memory smtProofRollupDyn = new bytes32[](32); + for (uint256 i = 0; i < 32; i++) { + smtProofLocalDyn[i] = smtProofLocal[i]; + smtProofRollupDyn[i] = smtProofRollup[i]; + } + + vm.serializeBytes32(obj, "smt_proof_local_exit_root", smtProofLocalDyn); + vm.serializeBytes32(obj, "smt_proof_rollup_exit_root", smtProofRollupDyn); + } +} diff --git a/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsRealTx.t.sol b/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsRealTx.t.sol new file mode 100644 index 0000000000..0f4e56bb5b --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test/ClaimAssetTestVectorsRealTx.t.sol @@ -0,0 +1,137 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import "forge-std/Test.sol"; +import "@agglayer/v2/lib/DepositContractV2.sol"; +import "@agglayer/lib/GlobalExitRootLib.sol"; + +/** + * @title ClaimAssetTestVectorsRealTx + * @notice Test contract that generates comprehensive test vectors for verifying + * compatibility between Solidity's claimAsset and Miden's implementation. + * + * Generates vectors for both LeafData and ProofData from a real transaction. + * + * Run with: forge test -vv --match-contract ClaimAssetTestVectorsRealTx + * + * The output can be compared against the Rust ClaimNoteStorage implementation. + */ +contract ClaimAssetTestVectorsRealTx is Test, DepositContractV2 { + /** + * @notice Generates claim asset test vectors from real Katana transaction and saves to JSON. + * Uses real transaction data from Katana explorer: + * https://katanascan.com/tx/0x685f6437c4a54f5d6c59ea33de74fe51bc2401fea65dc3d72a976def859309bf + * + * Output file: test-vectors/claim_asset_vectors.json + */ + function test_generateClaimAssetVectors() public { + string memory obj = "root"; + + // ====== PROOF DATA ====== + // Scoped block keeps stack usage under Solidity limits. + { + // SMT proof for local exit root (32 nodes) + bytes32[32] memory smtProofLocalExitRoot = [ + bytes32(0x0000000000000000000000000000000000000000000000000000000000000000), + bytes32(0xad3228b676f7d3cd4284a5443f17f1962b36e491b30a40b2405849e597ba5fb5), + bytes32(0xb4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30), + bytes32(0xe37d456460231cf80063f57ee83a02f70d810c568b3bfb71156d52445f7a885a), + bytes32(0xe58769b32a1beaf1ea27375a44095a0d1fb664ce2dd358e7fcbfb78c26a19344), + bytes32(0x0eb01ebfc9ed27500cd4dfc979272d1f0913cc9f66540d7e8005811109e1cf2d), + bytes32(0x887c22bd8750d34016ac3c66b5ff102dacdd73f6b014e710b51e8022af9a1968), + bytes32(0x3236bf576fca1adf85917ec7888c4b89cce988564b6028f7d66807763aaa7b04), + bytes32(0x9867cc5f7f196b93bae1e27e6320742445d290f2263827498b54fec539f756af), + bytes32(0x054ba828046324ff4794fce22adefb23b3ce749cd4df75ade2dc9f41dd327c31), + bytes32(0x4e9220076c344bf223c7e7cb2d47c9f0096c48def6a9056e41568de4f01d2716), + bytes32(0xca6369acd49a7515892f5936227037cc978a75853409b20f1145f1d44ceb7622), + bytes32(0x5a925caf7bfdf31344037ba5b42657130d049f7cb9e87877317e79fce2543a0c), + bytes32(0xc1df82d9c4b87413eae2ef048f94b4d3554cea73d92b0f7af96e0271c691e2bb), + bytes32(0x5c67add7c6caf302256adedf7ab114da0acfe870d449a3a489f781d659e8becc), + bytes32(0x4111a1a05cc06ad682bb0f213170d7d57049920d20fc4e0f7556a21b283a7e2a), + bytes32(0x77a0f8b0e0b4e5a57f5e381b3892bb41a0bcdbfdf3c7d591fae02081159b594d), + bytes32(0x361122b4b1d18ab577f2aeb6632c690713456a66a5670649ceb2c0a31e43ab46), + bytes32(0x5a2dce0a8a7f68bb74560f8f71837c2c2ebbcbf7fffb42ae1896f13f7c7479a0), + bytes32(0xb46a28b6f55540f89444f63de0378e3d121be09e06cc9ded1c20e65876d36aa0), + bytes32(0xc65e9645644786b620e2dd2ad648ddfcbf4a7e5b1a3a4ecfe7f64667a3f0b7e2), + bytes32(0xf4418588ed35a2458cffeb39b93d26f18d2ab13bdce6aee58e7b99359ec2dfd9), + bytes32(0x5a9c16dc00d6ef18b7933a6f8dc65ccb55667138776f7dea101070dc8796e377), + bytes32(0x4df84f40ae0c8229d0d6069e5c8f39a7c299677a09d367fc7b05e3bc380ee652), + bytes32(0xcdc72595f74c7b1043d0e1ffbab734648c838dfb0527d971b602bc216c9619ef), + bytes32(0x0abf5ac974a1ed57f4050aa510dd9c74f508277b39d7973bb2dfccc5eeb0618d), + bytes32(0xb8cd74046ff337f0a7bf2c8e03e10f642c1886798d71806ab1e888d9e5ee87d0), + bytes32(0x838c5655cb21c6cb83313b5a631175dff4963772cce9108188b34ac87c81c41e), + bytes32(0x662ee4dd2dd7b2bc707961b1e646c4047669dcb6584f0d8d770daf5d7e7deb2e), + bytes32(0x388ab20e2573d171a88108e79d820e98f26c0b84aa8b2f4aa4968dbb818ea322), + bytes32(0x93237c50ba75ee485f4c22adf2f741400bdf8d6a9cc7df7ecae576221665d735), + bytes32(0x8448818bb4ae4562849e949e17ac16e0be16688e156b5cf15e098c627c0056a9) + ]; + + // forge-std JSON serialization supports `bytes32[]` but not `bytes32[32]`. + bytes32[] memory smtProofLocalExitRootDyn = new bytes32[](32); + for (uint256 i = 0; i < 32; i++) { + smtProofLocalExitRootDyn[i] = smtProofLocalExitRoot[i]; + } + + // SMT proof for rollup exit root (32 nodes - all zeros for this rollup claim). + bytes32[] memory smtProofRollupExitRootDyn = new bytes32[](32); + + // Global index (uint256) - encodes rollup_id and deposit_count. + uint256 globalIndex = 18446744073709788808; + + // Exit roots + bytes32 mainnetExitRoot = 0x31d3268d3a0145d65482b336935fa07dab0822f7dccd865f361d2bf122c4905c; + bytes32 rollupExitRoot = 0x8452a95fd710163c5fa8ca2b2fe720d8781f0222bb9e82c2a442ec986c374858; + + // Compute global exit root: keccak256(mainnetExitRoot || rollupExitRoot) + bytes32 globalExitRoot = GlobalExitRootLib.calculateGlobalExitRoot(mainnetExitRoot, rollupExitRoot); + + vm.serializeBytes32(obj, "smt_proof_local_exit_root", smtProofLocalExitRootDyn); + vm.serializeBytes32(obj, "smt_proof_rollup_exit_root", smtProofRollupExitRootDyn); + vm.serializeBytes32(obj, "global_index", bytes32(globalIndex)); + vm.serializeBytes32(obj, "mainnet_exit_root", mainnetExitRoot); + vm.serializeBytes32(obj, "rollup_exit_root", rollupExitRoot); + vm.serializeBytes32(obj, "global_exit_root", globalExitRoot); + } + + // ====== LEAF DATA ====== + // Scoped block keeps stack usage under Solidity limits. + { + uint8 leafType = 0; // 0 for ERC20/ETH transfer + uint32 originNetwork = 0; + address originTokenAddress = 0x2DC70fb75b88d2eB4715bc06E1595E6D97c34DFF; + uint32 destinationNetwork = 20; + address destinationAddress = 0x00000000b0E79c68cafC54802726C6F102Cca300; + uint256 amount = 100000000000000; // 1e14 (0.0001 vbETH) + + // Original metadata from the transaction (ABI encoded: name, symbol, decimals) + // name = "Vault Bridge ETH", symbol = "vbETH", decimals = 18 + bytes memory metadata = + hex"000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000105661756c7420427269646765204554480000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000057662455448000000000000000000000000000000000000000000000000000000"; + bytes32 metadataHash = keccak256(metadata); + + // Compute the leaf value using the official DepositContractV2 implementation + bytes32 leafValue = getLeafValue( + leafType, + originNetwork, + originTokenAddress, + destinationNetwork, + destinationAddress, + amount, + metadataHash + ); + + vm.serializeUint(obj, "leaf_type", leafType); + vm.serializeUint(obj, "origin_network", originNetwork); + vm.serializeAddress(obj, "origin_token_address", originTokenAddress); + vm.serializeUint(obj, "destination_network", destinationNetwork); + vm.serializeAddress(obj, "destination_address", destinationAddress); + vm.serializeUint(obj, "amount", amount); + vm.serializeBytes32(obj, "metadata_hash", metadataHash); + string memory json = vm.serializeBytes32(obj, "leaf_value", leafValue); + + // Save to file + string memory outputPath = "test-vectors/claim_asset_vectors_real_tx.json"; + vm.writeJson(json, outputPath); + } + } +} diff --git a/crates/miden-agglayer/solidity-compat/test/DepositContractTestHelpers.sol b/crates/miden-agglayer/solidity-compat/test/DepositContractTestHelpers.sol new file mode 100644 index 0000000000..997349c05d --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test/DepositContractTestHelpers.sol @@ -0,0 +1,47 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import "@agglayer/v2/lib/DepositContractBase.sol"; + +/** + * @title DepositContractTestHelpers + * @notice Shared helpers for Sparse Merkle Tree test vector generation. + * Inherited by SMTMerkleProofVectors and ClaimAssetTestVectorsLocalTx. + */ +abstract contract DepositContractTestHelpers is DepositContractBase { + /** + * @notice Computes the canonical zero hashes for the Sparse Merkle Tree. + * @dev Each level i has zero hash: keccak256(zero[i-1], zero[i-1]) + * @return canonicalZeros Array of 32 zero hashes, one per tree level + */ + function _computeCanonicalZeros() internal pure returns (bytes32[32] memory canonicalZeros) { + bytes32 current = bytes32(0); + for (uint256 i = 0; i < 32; i++) { + canonicalZeros[i] = current; + current = keccak256(abi.encodePacked(current, current)); + } + } + + /** + * @notice Generates the SMT proof for a given leaf index using the current _branch state. + * @dev For each level i: + * - If bit i of leafIndex is 1: use _branch[i] (sibling on left) + * - If bit i of leafIndex is 0: use canonicalZeros[i] (sibling on right) + * @param leafIndex The 0-indexed position of the leaf in the tree + * @param canonicalZeros The precomputed canonical zero hashes + * @return smtProof The 32-element Merkle proof array + */ + function _generateLocalProof(uint256 leafIndex, bytes32[32] memory canonicalZeros) + internal + view + returns (bytes32[32] memory smtProof) + { + for (uint256 i = 0; i < 32; i++) { + if ((leafIndex >> i) & 1 == 1) { + smtProof[i] = _branch[i]; + } else { + smtProof[i] = canonicalZeros[i]; + } + } + } +} diff --git a/crates/miden-agglayer/solidity-compat/test/ExitRoots.t.sol b/crates/miden-agglayer/solidity-compat/test/ExitRoots.t.sol new file mode 100644 index 0000000000..b518e5de15 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test/ExitRoots.t.sol @@ -0,0 +1,57 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import "forge-std/Test.sol"; +import "@agglayer/lib/GlobalExitRootLib.sol"; + +/** + * @title ExitRootsTestVectors + * @notice Test contract that generates global exit root test vectors from + * mainnet-rollup exit root pairs. + * + * Run with: forge test -vv --match-contract ExitRootsTestVectors + * + * The output can be compared against Rust implementations that compute + * the global exit root as keccak256(mainnetExitRoot || rollupExitRoot). + */ +contract ExitRootsTestVectors is Test { + /** + * @notice Generates global exit root vectors from mainnet-rollup pairs + * and saves to JSON file. + * + * Output file: test-vectors/exit_roots.json + */ + function test_generateExitRootVectors() public { + // Input: pairs of (mainnetExitRoot, rollupExitRoot) from mainnet transactions + // Source transaction hashes from https://explorer.lumia.org/: + // TX 1: 0xe1a20811d757c48eba534f63041f58cd39eec762bfb6e4496dccf4e675fd5619 + // TX 2: 0xe64254ff002b3d46b46af077fa24c6ef5b54d950759d70d6d9a693b1d36de188 + bytes32[] memory mainnetExitRoots = new bytes32[](2); + bytes32[] memory rollupExitRoots = new bytes32[](2); + + // Pair 1 (TX: 0xe1a20811d757c48eba534f63041f58cd39eec762bfb6e4496dccf4e675fd5619) + mainnetExitRoots[0] = bytes32(0x98c911b6dcface93fd0bb490d09390f2f7f9fcf36fc208cbb36528a229298326); + rollupExitRoots[0] = bytes32(0x6a2533a24cc2a3feecf5c09b6a270bbb24a5e2ce02c18c0e26cd54c3dddc2d70); + + // Pair 2 (TX: 0xe64254ff002b3d46b46af077fa24c6ef5b54d950759d70d6d9a693b1d36de188) + mainnetExitRoots[1] = bytes32(0xbb71d991caf89fe64878259a61ae8d0b4310c176e66d90fd2370b02573e80c90); + rollupExitRoots[1] = bytes32(0xd9b546933b59acd388dc0c6520cbf2d4dbb9bac66f74f167ba70f221d82a440c); + + // Compute global exit roots + bytes32[] memory globalExitRoots = new bytes32[](mainnetExitRoots.length); + for (uint256 i = 0; i < mainnetExitRoots.length; i++) { + globalExitRoots[i] = GlobalExitRootLib.calculateGlobalExitRoot(mainnetExitRoots[i], rollupExitRoots[i]); + } + + // Serialize parallel arrays to JSON + string memory obj = "root"; + vm.serializeBytes32(obj, "mainnet_exit_roots", mainnetExitRoots); + vm.serializeBytes32(obj, "rollup_exit_roots", rollupExitRoots); + string memory json = vm.serializeBytes32(obj, "global_exit_roots", globalExitRoots); + + // Save to file + string memory outputPath = "test-vectors/exit_roots.json"; + vm.writeJson(json, outputPath); + console.log("Saved exit root vectors to:", outputPath); + } +} diff --git a/crates/miden-agglayer/solidity-compat/test/LeafValueTestVectors.t.sol b/crates/miden-agglayer/solidity-compat/test/LeafValueTestVectors.t.sol new file mode 100644 index 0000000000..3d39576a02 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test/LeafValueTestVectors.t.sol @@ -0,0 +1,58 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import "forge-std/Test.sol"; +import "@agglayer/v2/lib/DepositContractV2.sol"; + +/** + * @title LeafValueTestVectors + * @notice Test contract that generates test vectors for verifying compatibility + * between Solidity's getLeafValue and Miden's keccak hash implementation. + * + * Run with: forge test -vv --match-contract LeafValueTestVectors + * + * The output can be compared against the Rust get_leaf_value implementation. + */ +contract LeafValueTestVectors is Test, DepositContractV2 { + /** + * @notice Generates leaf value test vectors and saves to JSON file. + * Uses real transaction data from Lumia explorer: + * https://explorer.lumia.org/tx/0xe64254ff002b3d46b46af077fa24c6ef5b54d950759d70d6d9a693b1d36de188 + * + * Output file: test-vectors/leaf_value_vectors.json + */ + function test_generateLeafValueVectors() public { + // Test vector from real Lumia bridge transaction + uint8 leafType = 0; // 0 for ERC20/ETH transfer + uint32 originNetwork = 0; + address originTokenAddress = 0xD9343a049D5DBd89CD19DC6BcA8c48fB3a0a42a7; + uint32 destinationNetwork = 7; + address destinationAddress = 0xD9b20Fe633b609B01081aD0428e81f8Dd604F5C5; + uint256 amount = 2000000000000000000; // 2e18 + + // Original metadata from the transaction (ABI encoded: name, symbol, decimals) + bytes memory metadata = + hex"000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000000b4c756d696120546f6b656e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000054c554d4941000000000000000000000000000000000000000000000000000000"; + bytes32 metadataHash = keccak256(metadata); + + // Compute the leaf value using the official DepositContractV2 implementation + bytes32 leafValue = getLeafValue( + leafType, originNetwork, originTokenAddress, destinationNetwork, destinationAddress, amount, metadataHash + ); + + // Serialize to JSON + string memory obj = "root"; + vm.serializeUint(obj, "leaf_type", leafType); + vm.serializeUint(obj, "origin_network", originNetwork); + vm.serializeAddress(obj, "origin_token_address", originTokenAddress); + vm.serializeUint(obj, "destination_network", destinationNetwork); + vm.serializeAddress(obj, "destination_address", destinationAddress); + vm.serializeUint(obj, "amount", amount); + vm.serializeBytes32(obj, "metadata_hash", metadataHash); + string memory json = vm.serializeBytes32(obj, "leaf_value", leafValue); + + // Save to file + string memory outputPath = "test-vectors/leaf_value_vectors.json"; + vm.writeJson(json, outputPath); + } +} diff --git a/crates/miden-agglayer/solidity-compat/test/MMRTestVectors.t.sol b/crates/miden-agglayer/solidity-compat/test/MMRTestVectors.t.sol new file mode 100644 index 0000000000..b3b090b471 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test/MMRTestVectors.t.sol @@ -0,0 +1,129 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import "forge-std/Test.sol"; +import "@agglayer/v2/lib/DepositContractV2.sol"; + +/** + * @title MMRTestVectors + * @notice Test contract that generates test vectors for verifying compatibility + * between Solidity's DepositContractBase and Miden's MMR Frontier implementation. + * + * Leaves are constructed via getLeafValue using the same hardcoded fields that + * bridge_out.masm uses (leafType=0, originNetwork=64, originTokenAddress=fixed random value, + * metadataHash=0), parametrised by amount (i+1) and deterministic per-leaf + * destination network/address values derived from a fixed seed. + * + * Run with: forge test -vv --match-contract MMRTestVectors + * + * The output can be compared against the Rust KeccakMmrFrontier32 implementation + * in crates/miden-testing/tests/agglayer/mmr_frontier.rs + */ +contract MMRTestVectors is Test, DepositContractV2 { + // Constants matching bridge_out.masm hardcoded values + uint8 constant LEAF_TYPE = 0; + uint32 constant ORIGIN_NETWORK = 64; + address constant ORIGIN_TOKEN_ADDR = 0x7a6fC3e8b57c6D1924F1A9d0E2b3c4D5e6F70891; + bytes32 constant METADATA_HASH = bytes32(0); + + // Fixed seed for deterministic "random" destination vectors. + // Keeping this constant ensures everyone regenerates the exact same JSON vectors. + uint256 constant VECTOR_SEED = uint256(keccak256("miden::agglayer::mmr_frontier_vectors::v2")); + + /** + * @notice Builds a leaf hash identical to what bridge_out.masm would produce for the + * given amount. + */ + function _createLeaf(uint256 amount, uint32 destinationNetwork, address destinationAddress) + internal + pure + returns (bytes32) + { + return getLeafValue( + LEAF_TYPE, ORIGIN_NETWORK, ORIGIN_TOKEN_ADDR, destinationNetwork, destinationAddress, amount, METADATA_HASH + ); + } + + function _destinationNetworkAt(uint256 idx) internal pure returns (uint32) { + return uint32(uint256(keccak256(abi.encodePacked(VECTOR_SEED, bytes1(0x01), idx)))); + } + + function _destinationAddressAt(uint256 idx) internal pure returns (address) { + return address(uint160(uint256(keccak256(abi.encodePacked(VECTOR_SEED, bytes1(0x02), idx))))); + } + + /** + * @notice Generates the canonical zeros and saves to JSON file. + * ZERO_0 = 0x0...0 (32 zero bytes) + * ZERO_n = keccak256(ZERO_{n-1} || ZERO_{n-1}) + * + * Output file: test-vectors/canonical_zeros.json + */ + function test_generateCanonicalZeros() public { + bytes32[] memory zeros = new bytes32[](32); + + bytes32 z = bytes32(0); + for (uint256 i = 0; i < 32; i++) { + zeros[i] = z; + z = keccak256(abi.encodePacked(z, z)); + } + + // Foundry serializes bytes32[] to a JSON array automatically + string memory json = vm.serializeBytes32("root", "canonical_zeros", zeros); + + // Save to file + string memory outputPath = "test-vectors/canonical_zeros.json"; + vm.writeJson(json, outputPath); + console.log("Saved canonical zeros to:", outputPath); + } + + /** + * @notice Generates MMR frontier vectors (leaf-root pairs) and saves to JSON file. + * Each leaf is created via _createLeaf(i+1, network[i], address[i]) so that: + * - amounts are 1..32 + * - destination networks/addresses are deterministic per index from VECTOR_SEED + * + * The destination vectors are also written to JSON so the Rust bridge_out test + * can construct matching B2AGG notes. + * + * Output file: test-vectors/mmr_frontier_vectors.json + */ + function test_generateVectors() public { + bytes32[] memory leaves = new bytes32[](32); + bytes32[] memory roots = new bytes32[](32); + uint256[] memory counts = new uint256[](32); + uint256[] memory amounts = new uint256[](32); + uint256[] memory destinationNetworks = new uint256[](32); + address[] memory destinationAddresses = new address[](32); + + for (uint256 i = 0; i < 32; i++) { + uint256 amount = i + 1; + uint32 destinationNetwork = _destinationNetworkAt(i); + address destinationAddress = _destinationAddressAt(i); + bytes32 leaf = _createLeaf(amount, destinationNetwork, destinationAddress); + _addLeaf(leaf); + + leaves[i] = leaf; + roots[i] = getRoot(); + counts[i] = depositCount; + amounts[i] = amount; + destinationNetworks[i] = destinationNetwork; + destinationAddresses[i] = destinationAddress; + } + + // Serialize parallel arrays to JSON + string memory obj = "root"; + vm.serializeBytes32(obj, "leaves", leaves); + vm.serializeBytes32(obj, "roots", roots); + vm.serializeUint(obj, "counts", counts); + vm.serializeUint(obj, "amounts", amounts); + vm.serializeUint(obj, "destination_networks", destinationNetworks); + vm.serializeAddress(obj, "origin_token_address", ORIGIN_TOKEN_ADDR); + string memory json = vm.serializeAddress(obj, "destination_addresses", destinationAddresses); + + // Save to file + string memory outputPath = "test-vectors/mmr_frontier_vectors.json"; + vm.writeJson(json, outputPath); + console.log("Saved MMR frontier vectors to:", outputPath); + } +} diff --git a/crates/miden-agglayer/solidity-compat/test/SMTMerkleProofVectors.t.sol b/crates/miden-agglayer/solidity-compat/test/SMTMerkleProofVectors.t.sol new file mode 100644 index 0000000000..e6b466e521 --- /dev/null +++ b/crates/miden-agglayer/solidity-compat/test/SMTMerkleProofVectors.t.sol @@ -0,0 +1,69 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.20; + +import "forge-std/Test.sol"; +import "./DepositContractTestHelpers.sol"; + +/** + * @title SMTMerkleProofVectors + * @notice Test contract that generates test vectors for Merkle proofs verification. + * + * Run with: forge test -vv --match-contract SMTMerkleProofVectors + * + * The output can be used during the bridge-in tests in + * crates/miden-testing/tests/agglayer/bridge_in.rs + */ +contract SMTMerkleProofVectors is Test, DepositContractTestHelpers { + /** + * @notice Generates vectors of leaves, roots and merkle paths and saves them to the JSON. + * Notice that each value in the leaves/roots array corresponds to 32 values in the + * merkle paths array. + */ + function test_generateVerificationProofData() public { + bytes32[] memory leaves = new bytes32[](32); + bytes32[] memory roots = new bytes32[](32); + bytes32[] memory merkle_paths = new bytes32[](1024); + + // This array represents a merkle path during each iteration. + // This is a workaround which allows to provide the merkle path to verifyMerkleProof + // since the merkle_paths array cannot be sliced. + bytes32[32] memory current_path; + + bytes32[32] memory canonicalZeros = _computeCanonicalZeros(); + + // generate leaves, roots, and merkle_paths arrays + for (uint256 i = 0; i < 32; i++) { + // use bytes32(i + 1) as leaf here just to avoid the zero leaf + bytes32 leaf = bytes32(i + 1); + + // Merkle path in the _branch array during the `i`th iteration actually corresponds to + // the leaf and root with indexes `i - 1` (because the merkle path is computed based on + // the overall number of leaves in the SMT instead of the index of the last leaf), so we + // first update the merkle_paths array and only after that actually add a leaf and + // recompute the _branch. + current_path = _generateLocalProof(i, canonicalZeros); + for (uint256 j = 0; j < 32; j++) { + merkle_paths[i * 32 + j] = current_path[j]; + } + + _addLeaf(leaf); + + leaves[i] = leaf; + roots[i] = getRoot(); + + // perform the sanity check to make sure that the generated data is valid + assert(this.verifyMerkleProof(leaves[i], current_path, uint32(i), roots[i])); + } + + // Serialize parallel arrays to JSON + string memory obj = "root"; + vm.serializeBytes32(obj, "leaves", leaves); + vm.serializeBytes32(obj, "roots", roots); + string memory json = vm.serializeBytes32(obj, "merkle_paths", merkle_paths); + + // Save to file + string memory outputPath = "test-vectors/merkle_proof_vectors.json"; + vm.writeJson(json, outputPath); + console.log("Saved Merkle path vectors to:", outputPath); + } +} diff --git a/crates/miden-agglayer/src/b2agg_note.rs b/crates/miden-agglayer/src/b2agg_note.rs new file mode 100644 index 0000000000..336fab0491 --- /dev/null +++ b/crates/miden-agglayer/src/b2agg_note.rs @@ -0,0 +1,132 @@ +//! Bridge Out note creation utilities. +//! +//! This module provides helpers for creating B2AGG (Bridge to AggLayer) notes, +//! which are used to bridge assets out from Miden to the AggLayer network. + +use alloc::string::ToString; +use alloc::vec::Vec; + +use miden_assembly::serde::Deserializable; +use miden_core::program::Program; +use miden_core::{Felt, Word}; +use miden_protocol::account::AccountId; +use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::errors::NoteError; +use miden_protocol::note::{ + Note, + NoteAssets, + NoteAttachment, + NoteMetadata, + NoteRecipient, + NoteScript, + NoteStorage, + NoteType, +}; +use miden_standards::note::{NetworkAccountTarget, NoteExecutionHint}; +use miden_utils_sync::LazyLock; + +use crate::EthAddressFormat; + +// NOTE SCRIPT +// ================================================================================================ + +// Initialize the B2AGG note script only once +static B2AGG_SCRIPT: LazyLock = LazyLock::new(|| { + let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/B2AGG.masb")); + let program = Program::read_from_bytes(bytes).expect("shipped B2AGG script is well-formed"); + NoteScript::new(program) +}); + +// B2AGG NOTE +// ================================================================================================ + +/// B2AGG (Bridge to AggLayer) note. +/// +/// This note is used to bridge assets from Miden to another network via the AggLayer. +/// When consumed by a bridge account, the assets are burned and a corresponding +/// claim can be made on the destination network. B2AGG notes are always public. +pub struct B2AggNote; + +impl B2AggNote { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// Expected number of storage items for a B2AGG note. + pub const NUM_STORAGE_ITEMS: usize = 6; + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the B2AGG (Bridge to AggLayer) note script. + pub fn script() -> NoteScript { + B2AGG_SCRIPT.clone() + } + + /// Returns the B2AGG note script root. + pub fn script_root() -> Word { + B2AGG_SCRIPT.root() + } + + // BUILDERS + // -------------------------------------------------------------------------------------------- + + /// Creates a B2AGG (Bridge to AggLayer) note. + /// + /// This note is used to bridge assets from Miden to another network via the AggLayer. + /// When consumed by a bridge account, the assets are burned and a corresponding + /// claim can be made on the destination network. B2AGG notes are always public. + /// + /// # Parameters + /// - `destination_network`: The AggLayer-assigned network ID for the destination chain + /// - `destination_address`: The Ethereum address on the destination network + /// - `assets`: The assets to bridge (must be fungible assets from a network faucet) + /// - `target_account_id`: The account ID that will consume this note (bridge account) + /// - `sender_account_id`: The account ID of the note creator + /// - `rng`: Random number generator for creating the note serial number + /// + /// # Errors + /// Returns an error if note creation fails. + pub fn create( + destination_network: u32, + destination_address: EthAddressFormat, + assets: NoteAssets, + target_account_id: AccountId, + sender_account_id: AccountId, + rng: &mut R, + ) -> Result { + let note_storage = build_note_storage(destination_network, destination_address)?; + + let attachment = NoteAttachment::from( + NetworkAccountTarget::new(target_account_id, NoteExecutionHint::Always) + .map_err(|e| NoteError::other(e.to_string()))?, + ); + + let metadata = + NoteMetadata::new(sender_account_id, NoteType::Public).with_attachment(attachment); + + let recipient = NoteRecipient::new(rng.draw_word(), Self::script(), note_storage); + + Ok(Note::new(assets, metadata, recipient)) + } +} + +// HELPER FUNCTIONS +// ================================================================================================ + +/// Builds the note storage for a B2AGG note. +/// +/// The storage layout is: +/// - 1 felt: destination_network +/// - 5 felts: destination_address (20 bytes as 5 u32 values) +fn build_note_storage( + destination_network: u32, + destination_address: EthAddressFormat, +) -> Result { + let mut elements = Vec::with_capacity(6); + + let destination_network = u32::from_le_bytes(destination_network.to_be_bytes()); + elements.push(Felt::from(destination_network)); + elements.extend(destination_address.to_elements()); + + NoteStorage::new(elements) +} diff --git a/crates/miden-agglayer/src/claim_note.rs b/crates/miden-agglayer/src/claim_note.rs new file mode 100644 index 0000000000..6cd1c04cad --- /dev/null +++ b/crates/miden-agglayer/src/claim_note.rs @@ -0,0 +1,228 @@ +use alloc::string::ToString; +use alloc::vec; +use alloc::vec::Vec; + +use miden_core::utils::bytes_to_packed_u32_elements; +use miden_core::{Felt, Word}; +use miden_protocol::account::AccountId; +use miden_protocol::crypto::SequentialCommit; +use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::errors::NoteError; +use miden_protocol::note::{Note, NoteAssets, NoteMetadata, NoteRecipient, NoteStorage, NoteType}; +use miden_standards::note::{NetworkAccountTarget, NoteExecutionHint}; + +use crate::{EthAddressFormat, EthAmount, GlobalIndex, MetadataHash, claim_script}; + +// CLAIM NOTE STRUCTURES +// ================================================================================================ + +/// Keccak256 output representation (32-byte hash) +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct Keccak256Output([u8; 32]); + +impl Keccak256Output { + /// Creates a new Keccak256 output from a 32-byte array + pub fn new(bytes: [u8; 32]) -> Self { + Self(bytes) + } + + /// Returns the inner 32-byte array + pub fn as_bytes(&self) -> &[u8; 32] { + &self.0 + } + + /// Converts the Keccak256 output to 8 Felt elements (32-byte value as 8 u32 values in + /// little-endian) + pub fn to_elements(&self) -> Vec { + bytes_to_packed_u32_elements(&self.0) + } + + /// Converts the Keccak256 output to two [`Word`]s: `[lo, hi]`. + /// + /// - `lo` contains the first 4 u32-packed felts (bytes 0..16). + /// - `hi` contains the last 4 u32-packed felts (bytes 16..32). + #[cfg(any(test, feature = "testing"))] + pub fn to_words(&self) -> [Word; 2] { + let elements = self.to_elements(); + let lo: [Felt; 4] = elements[0..4].try_into().expect("to_elements returns 8 felts"); + let hi: [Felt; 4] = elements[4..8].try_into().expect("to_elements returns 8 felts"); + [Word::new(lo), Word::new(hi)] + } +} + +impl From<[u8; 32]> for Keccak256Output { + fn from(bytes: [u8; 32]) -> Self { + Self::new(bytes) + } +} + +/// SMT node representation (32-byte Keccak256 hash) +pub type SmtNode = Keccak256Output; + +/// Exit root representation (32-byte Keccak256 hash) +pub type ExitRoot = Keccak256Output; + +/// Proof data for CLAIM note creation. +/// Contains SMT proofs and root hashes using typed representations. +#[derive(Clone)] +pub struct ProofData { + /// SMT proof for local exit root (32 SMT nodes) + pub smt_proof_local_exit_root: [SmtNode; 32], + /// SMT proof for rollup exit root (32 SMT nodes) + pub smt_proof_rollup_exit_root: [SmtNode; 32], + /// Global index (uint256 as 32 bytes) + pub global_index: GlobalIndex, + /// Mainnet exit root hash + pub mainnet_exit_root: ExitRoot, + /// Rollup exit root hash + pub rollup_exit_root: ExitRoot, +} + +impl SequentialCommit for ProofData { + type Commitment = Word; + + fn to_elements(&self) -> Vec { + const PROOF_DATA_ELEMENT_COUNT: usize = 536; // 32*8 + 32*8 + 8 + 8 + 8 (proofs + global_index + 2 exit roots) + let mut elements = Vec::with_capacity(PROOF_DATA_ELEMENT_COUNT); + + // Convert SMT proof elements to felts (each node is 8 felts) + for node in self.smt_proof_local_exit_root.iter() { + elements.extend(node.to_elements()); + } + + for node in self.smt_proof_rollup_exit_root.iter() { + elements.extend(node.to_elements()); + } + + // Global index (uint256 as 32 bytes) + elements.extend(self.global_index.to_elements()); + + // Mainnet and rollup exit roots + elements.extend(self.mainnet_exit_root.to_elements()); + elements.extend(self.rollup_exit_root.to_elements()); + + elements + } +} + +/// Leaf data for CLAIM note creation. +/// Contains network, address, amount, and metadata using typed representations. +#[derive(Clone)] +pub struct LeafData { + /// Origin network identifier (uint32) + pub origin_network: u32, + /// Origin token address + pub origin_token_address: EthAddressFormat, + /// Destination network identifier (uint32) + pub destination_network: u32, + /// Destination address + pub destination_address: EthAddressFormat, + /// Amount of tokens (uint256) + pub amount: EthAmount, + /// Metadata hash (32 bytes) + pub metadata_hash: MetadataHash, +} + +impl SequentialCommit for LeafData { + type Commitment = Word; + + fn to_elements(&self) -> Vec { + const LEAF_DATA_ELEMENT_COUNT: usize = 32; // 1 + 1 + 5 + 1 + 5 + 8 + 8 + 3 (leafType + networks + addresses + amount + metadata + padding) + let mut elements = Vec::with_capacity(LEAF_DATA_ELEMENT_COUNT); + + // LeafType (uint32 as Felt): 0u32 for transfer Ether / ERC20 tokens, 1u32 for message + // passing. + // for a `CLAIM` note, leafType is always 0 (transfer Ether / ERC20 tokens) + elements.push(Felt::ZERO); + + // Origin network (encode as little-endian bytes for keccak) + let origin_network = u32::from_le_bytes(self.origin_network.to_be_bytes()); + elements.push(Felt::from(origin_network)); + + // Origin token address (5 u32 felts) + elements.extend(self.origin_token_address.to_elements()); + + // Destination network (encode as little-endian bytes for keccak) + let destination_network = u32::from_le_bytes(self.destination_network.to_be_bytes()); + elements.push(Felt::from(destination_network)); + + // Destination address (5 u32 felts) + elements.extend(self.destination_address.to_elements()); + + // Amount (uint256 as 8 u32 felts) + elements.extend(self.amount.to_elements()); + + // Metadata hash (8 u32 felts) + elements.extend(self.metadata_hash.to_elements()); + + // Padding + elements.extend(vec![Felt::ZERO; 3]); + + elements + } +} + +/// Data for creating a CLAIM note. +/// +/// This struct groups the core data needed to create a CLAIM note that exactly +/// matches the agglayer claimAsset function signature. +#[derive(Clone)] +pub struct ClaimNoteStorage { + /// Proof data containing SMT proofs and root hashes + pub proof_data: ProofData, + /// Leaf data containing network, address, amount, and metadata + pub leaf_data: LeafData, + /// Miden claim amount (scaled-down token amount as Felt) + pub miden_claim_amount: Felt, +} + +impl TryFrom for NoteStorage { + type Error = NoteError; + + fn try_from(storage: ClaimNoteStorage) -> Result { + // proof_data + leaf_data + miden_claim_amount + // 536 + 32 + 1 + let mut claim_storage = Vec::with_capacity(569); + + claim_storage.extend(storage.proof_data.to_elements()); + claim_storage.extend(storage.leaf_data.to_elements()); + claim_storage.push(storage.miden_claim_amount); + + NoteStorage::new(claim_storage) + } +} + +// CLAIM NOTE CREATION +// ================================================================================================ + +/// Generates a CLAIM note - a note that instructs an agglayer faucet to validate and mint assets. +/// +/// # Parameters +/// - `storage`: The core storage for creating the CLAIM note +/// - `target_faucet_id`: The account ID of the agglayer faucet that should consume this note. +/// Encoded as a `NetworkAccountTarget` attachment on the note metadata. +/// - `sender_account_id`: The account ID of the CLAIM note creator +/// - `rng`: Random number generator for creating the CLAIM note serial number +/// +/// # Errors +/// Returns an error if note creation fails. +pub fn create_claim_note( + storage: ClaimNoteStorage, + target_faucet_id: AccountId, + sender_account_id: AccountId, + rng: &mut R, +) -> Result { + let note_storage = NoteStorage::try_from(storage.clone())?; + + let attachment = NetworkAccountTarget::new(target_faucet_id, NoteExecutionHint::Always) + .map_err(|e| NoteError::other(e.to_string()))? + .into(); + + let metadata = + NoteMetadata::new(sender_account_id, NoteType::Public).with_attachment(attachment); + + let recipient = NoteRecipient::new(rng.draw_word(), claim_script(), note_storage); + let assets = NoteAssets::new(vec![])?; + + Ok(Note::new(assets, metadata, recipient)) +} diff --git a/crates/miden-agglayer/src/config_note.rs b/crates/miden-agglayer/src/config_note.rs new file mode 100644 index 0000000000..9cd8b5d439 --- /dev/null +++ b/crates/miden-agglayer/src/config_note.rs @@ -0,0 +1,115 @@ +//! CONFIG_AGG_BRIDGE note creation utilities. +//! +//! This module provides helpers for creating CONFIG_AGG_BRIDGE notes, +//! which are used to register faucets in the bridge's faucet registry. + +extern crate alloc; + +use alloc::string::ToString; +use alloc::vec; + +use miden_assembly::serde::Deserializable; +use miden_core::Word; +use miden_core::program::Program; +use miden_protocol::account::AccountId; +use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::errors::NoteError; +use miden_protocol::note::{ + Note, + NoteAssets, + NoteAttachment, + NoteMetadata, + NoteRecipient, + NoteScript, + NoteStorage, + NoteType, +}; +use miden_standards::note::{NetworkAccountTarget, NoteExecutionHint}; +use miden_utils_sync::LazyLock; + +// NOTE SCRIPT +// ================================================================================================ + +// Initialize the CONFIG_AGG_BRIDGE note script only once +static CONFIG_AGG_BRIDGE_SCRIPT: LazyLock = LazyLock::new(|| { + let bytes = + include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/CONFIG_AGG_BRIDGE.masb")); + let program = + Program::read_from_bytes(bytes).expect("shipped CONFIG_AGG_BRIDGE script is well-formed"); + NoteScript::new(program) +}); + +// CONFIG_AGG_BRIDGE NOTE +// ================================================================================================ + +/// CONFIG_AGG_BRIDGE note. +/// +/// This note is used to register a faucet in the bridge's faucet registry. +/// It carries the faucet account ID and is always public. +pub struct ConfigAggBridgeNote; + +impl ConfigAggBridgeNote { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// Expected number of storage items for a CONFIG_AGG_BRIDGE note. + pub const NUM_STORAGE_ITEMS: usize = 2; + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the CONFIG_AGG_BRIDGE note script. + pub fn script() -> NoteScript { + CONFIG_AGG_BRIDGE_SCRIPT.clone() + } + + /// Returns the CONFIG_AGG_BRIDGE note script root. + pub fn script_root() -> Word { + CONFIG_AGG_BRIDGE_SCRIPT.root() + } + + // BUILDERS + // -------------------------------------------------------------------------------------------- + + /// Creates a CONFIG_AGG_BRIDGE note to register a faucet in the bridge's registry. + /// + /// The note storage contains 2 felts: + /// - `faucet_id_prefix`: The prefix of the faucet account ID + /// - `faucet_id_suffix`: The suffix of the faucet account ID + /// + /// # Parameters + /// - `faucet_account_id`: The account ID of the faucet to register + /// - `sender_account_id`: The account ID of the note creator + /// - `target_account_id`: The bridge account ID that will consume this note + /// - `rng`: Random number generator for creating the note serial number + /// + /// # Errors + /// Returns an error if note creation fails. + pub fn create( + faucet_account_id: AccountId, + sender_account_id: AccountId, + target_account_id: AccountId, + rng: &mut R, + ) -> Result { + let storage_values = vec![faucet_account_id.suffix(), faucet_account_id.prefix().as_felt()]; + + let note_storage = NoteStorage::new(storage_values)?; + + // Generate a serial number for the note + let serial_num = rng.draw_word(); + + let recipient = NoteRecipient::new(serial_num, Self::script(), note_storage); + + let attachment = NoteAttachment::from( + NetworkAccountTarget::new(target_account_id, NoteExecutionHint::Always) + .map_err(|e| NoteError::other(e.to_string()))?, + ); + let metadata = + NoteMetadata::new(sender_account_id, NoteType::Public).with_attachment(attachment); + + // CONFIG_AGG_BRIDGE notes don't carry assets + let assets = NoteAssets::new(vec![])?; + + Ok(Note::new(assets, metadata, recipient)) + } +} diff --git a/crates/miden-agglayer/src/errors/agglayer.rs b/crates/miden-agglayer/src/errors/agglayer.rs index efa9275dee..91e98d3725 100644 --- a/crates/miden-agglayer/src/errors/agglayer.rs +++ b/crates/miden-agglayer/src/errors/agglayer.rs @@ -9,25 +9,75 @@ use miden_protocol::errors::MasmError; // AGGLAYER ERRORS // ================================================================================================ -/// Error Message: "most-significant 4 bytes (addr4) must be zero" -pub const ERR_ADDR4_NONZERO: MasmError = MasmError::from_static_str("most-significant 4 bytes (addr4) must be zero"); - +/// Error Message: "B2AGG note attachment target account does not match consuming account" +pub const ERR_B2AGG_TARGET_ACCOUNT_MISMATCH: MasmError = MasmError::from_static_str("B2AGG note attachment target account does not match consuming account"); +/// Error Message: "B2AGG script expects exactly 6 note storage items" +pub const ERR_B2AGG_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS: MasmError = MasmError::from_static_str("B2AGG script expects exactly 6 note storage items"); /// Error Message: "B2AGG script requires exactly 1 note asset" pub const ERR_B2AGG_WRONG_NUMBER_OF_ASSETS: MasmError = MasmError::from_static_str("B2AGG script requires exactly 1 note asset"); -/// Error Message: "B2AGG script expects exactly 6 note inputs" -pub const ERR_B2AGG_WRONG_NUMBER_OF_INPUTS: MasmError = MasmError::from_static_str("B2AGG script expects exactly 6 note inputs"); -/// Error Message: "CLAIM's target account address and transaction address do not match" -pub const ERR_CLAIM_TARGET_ACCT_MISMATCH: MasmError = MasmError::from_static_str("CLAIM's target account address and transaction address do not match"); +/// Error Message: "bridge not mainnet" +pub const ERR_BRIDGE_NOT_MAINNET: MasmError = MasmError::from_static_str("bridge not mainnet"); + +/// Error Message: "CLAIM note attachment target account does not match consuming account" +pub const ERR_CLAIM_TARGET_ACCT_MISMATCH: MasmError = MasmError::from_static_str("CLAIM note attachment target account does not match consuming account"); + +/// Error Message: "CONFIG_AGG_BRIDGE note attachment target account does not match consuming account" +pub const ERR_CONFIG_AGG_BRIDGE_TARGET_ACCOUNT_MISMATCH: MasmError = MasmError::from_static_str("CONFIG_AGG_BRIDGE note attachment target account does not match consuming account"); +/// Error Message: "CONFIG_AGG_BRIDGE expects exactly 2 note storage items" +pub const ERR_CONFIG_AGG_BRIDGE_UNEXPECTED_STORAGE_ITEMS: MasmError = MasmError::from_static_str("CONFIG_AGG_BRIDGE expects exactly 2 note storage items"); + +/// Error Message: "faucet is not registered in the bridge's faucet registry" +pub const ERR_FAUCET_NOT_REGISTERED: MasmError = MasmError::from_static_str("faucet is not registered in the bridge's faucet registry"); /// Error Message: "combined u64 doesn't fit in field" pub const ERR_FELT_OUT_OF_FIELD: MasmError = MasmError::from_static_str("combined u64 doesn't fit in field"); +/// Error Message: "GER not found in storage" +pub const ERR_GER_NOT_FOUND: MasmError = MasmError::from_static_str("GER not found in storage"); + /// Error Message: "invalid claim proof" pub const ERR_INVALID_CLAIM_PROOF: MasmError = MasmError::from_static_str("invalid claim proof"); +/// Error Message: "leading bits of global index must be zero" +pub const ERR_LEADING_BITS_NON_ZERO: MasmError = MasmError::from_static_str("leading bits of global index must be zero"); + +/// Error Message: "number of leaves in the MMR of the MMR Frontier would exceed 4294967295 (2^32 - 1)" +pub const ERR_MMR_FRONTIER_LEAVES_NUM_EXCEED_LIMIT: MasmError = MasmError::from_static_str("number of leaves in the MMR of the MMR Frontier would exceed 4294967295 (2^32 - 1)"); + +/// Error Message: "most-significant 4 bytes must be zero for AccountId" +pub const ERR_MSB_NONZERO: MasmError = MasmError::from_static_str("most-significant 4 bytes must be zero for AccountId"); + /// Error Message: "address limb is not u32" pub const ERR_NOT_U32: MasmError = MasmError::from_static_str("address limb is not u32"); +/// Error Message: "remainder z must be < 10^s" +pub const ERR_REMAINDER_TOO_LARGE: MasmError = MasmError::from_static_str("remainder z must be < 10^s"); + +/// Error Message: "rollup index must be zero for a mainnet deposit" +pub const ERR_ROLLUP_INDEX_NON_ZERO: MasmError = MasmError::from_static_str("rollup index must be zero for a mainnet deposit"); + /// Error Message: "maximum scaling factor is 18" pub const ERR_SCALE_AMOUNT_EXCEEDED_LIMIT: MasmError = MasmError::from_static_str("maximum scaling factor is 18"); + +/// Error Message: "note sender is not the bridge admin" +pub const ERR_SENDER_NOT_BRIDGE_ADMIN: MasmError = MasmError::from_static_str("note sender is not the bridge admin"); +/// Error Message: "note sender is not the global exit root manager" +pub const ERR_SENDER_NOT_GER_MANAGER: MasmError = MasmError::from_static_str("note sender is not the global exit root manager"); + +/// Error Message: "merkle proof verification failed: provided SMT root does not match the computed root" +pub const ERR_SMT_ROOT_VERIFICATION_FAILED: MasmError = MasmError::from_static_str("merkle proof verification failed: provided SMT root does not match the computed root"); + +/// Error Message: "x < y*10^s (underflow detected)" +pub const ERR_UNDERFLOW: MasmError = MasmError::from_static_str("x < y*10^s (underflow detected)"); + +/// Error Message: "UPDATE_GER note attachment target account does not match consuming account" +pub const ERR_UPDATE_GER_TARGET_ACCOUNT_MISMATCH: MasmError = MasmError::from_static_str("UPDATE_GER note attachment target account does not match consuming account"); +/// Error Message: "UPDATE_GER script expects exactly 8 note storage items" +pub const ERR_UPDATE_GER_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS: MasmError = MasmError::from_static_str("UPDATE_GER script expects exactly 8 note storage items"); + +/// Error Message: "the agglayer bridge in u256 value is larger than 2**128 and cannot be verifiably scaled to u64" +pub const ERR_X_TOO_LARGE: MasmError = MasmError::from_static_str("the agglayer bridge in u256 value is larger than 2**128 and cannot be verifiably scaled to u64"); + +/// Error Message: "y exceeds max fungible token amount" +pub const ERR_Y_TOO_LARGE: MasmError = MasmError::from_static_str("y exceeds max fungible token amount"); diff --git a/crates/miden-agglayer/src/eth_address.rs b/crates/miden-agglayer/src/eth_types/address.rs similarity index 82% rename from crates/miden-agglayer/src/eth_address.rs rename to crates/miden-agglayer/src/eth_types/address.rs index f2a94ed6df..8b489badf8 100644 --- a/crates/miden-agglayer/src/eth_address.rs +++ b/crates/miden-agglayer/src/eth_types/address.rs @@ -1,8 +1,9 @@ use alloc::format; use alloc::string::{String, ToString}; +use alloc::vec::Vec; use core::fmt; -use miden_core::FieldElement; +use miden_core::utils::bytes_to_packed_u32_elements; use miden_protocol::Felt; use miden_protocol::account::AccountId; use miden_protocol::utils::{HexParseError, bytes_to_hex_string, hex_to_bytes}; @@ -17,15 +18,16 @@ use miden_protocol::utils::{HexParseError, bytes_to_hex_string, hex_to_bytes}; /// /// - Raw bytes: `[u8; 20]` in the conventional Ethereum big-endian byte order (`bytes[0]` is the /// most-significant byte). -/// - MASM "address\[5\]" limbs: 5 x u32 limbs in *little-endian limb order*: -/// - addr0 = bytes[16..19] (least-significant 4 bytes) -/// - addr1 = bytes[12..15] -/// - addr2 = bytes[ 8..11] -/// - addr3 = bytes[ 4.. 7] -/// - addr4 = bytes[ 0.. 3] (most-significant 4 bytes) +/// - MASM "address\[5\]" limbs: 5 x u32 limbs in *big-endian limb order* (each limb encodes its 4 +/// bytes in little-endian order so felts map to keccak bytes directly): +/// - `address[0]` = bytes[0..4] (most-significant 4 bytes, zero for embedded AccountId) +/// - `address[1]` = bytes[4..8] +/// - `address[2]` = bytes[8..12] +/// - `address[3]` = bytes[12..16] +/// - `address[4]` = bytes[16..20] (least-significant 4 bytes) /// - Embedded AccountId format: `0x00000000 || prefix(8) || suffix(8)`, where: -/// - prefix = (addr3 << 32) | addr2 = bytes[4..11] as a big-endian u64 -/// - suffix = (addr1 << 32) | addr0 = bytes[12..19] as a big-endian u64 +/// - prefix = bytes[4..12] as a big-endian u64 +/// - suffix = bytes[12..20] as a big-endian u64 /// /// Note: prefix/suffix are *conceptual* 64-bit words; when converting to [`Felt`], we must ensure /// `Felt::new(u64)` does not reduce mod p (checked explicitly in `to_account_id`). @@ -80,8 +82,8 @@ impl EthAddressFormat { let felts: [Felt; 2] = account_id.into(); let mut out = [0u8; 20]; - out[4..12].copy_from_slice(&felts[0].as_int().to_be_bytes()); - out[12..20].copy_from_slice(&felts[1].as_int().to_be_bytes()); + out[4..12].copy_from_slice(&felts[0].as_canonical_u64().to_be_bytes()); + out[12..20].copy_from_slice(&felts[1].as_canonical_u64().to_be_bytes()); Self(out) } @@ -104,31 +106,22 @@ impl EthAddressFormat { // INTERNAL API - For CLAIM note processing // -------------------------------------------------------------------------------------------- - /// Converts the Ethereum address format into an array of 5 [`Felt`] values for MASM processing. + /// Converts the Ethereum address format into an array of 5 [`Felt`] values for Miden VM. /// /// **Internal API**: This function is used internally during CLAIM note processing to convert - /// the address format into the MASM `address[5]` representation expected by the + /// the address into the MASM `address[5]` representation expected by the /// `to_account_id` procedure. /// - /// The returned order matches the MASM `address\[5\]` convention (*little-endian limb order*): - /// - addr0 = bytes[16..19] (least-significant 4 bytes) - /// - addr1 = bytes[12..15] - /// - addr2 = bytes[ 8..11] - /// - addr3 = bytes[ 4.. 7] - /// - addr4 = bytes[ 0.. 3] (most-significant 4 bytes) + /// The returned order matches the Solidity ABI encoding convention (*big-endian limb order*): + /// - `address[0]` = bytes[0..4] (most-significant 4 bytes, zero for embedded AccountId) + /// - `address[1]` = bytes[4..8] + /// - `address[2]` = bytes[8..12] + /// - `address[3]` = bytes[12..16] + /// - `address[4]` = bytes[16..20] (least-significant 4 bytes) /// - /// Each limb is interpreted as a big-endian `u32` and stored in a [`Felt`]. - pub fn to_elements(&self) -> [Felt; 5] { - let mut result = [Felt::ZERO; 5]; - - // i=0 -> bytes[16..20], i=4 -> bytes[0..4] - for (felt, chunk) in result.iter_mut().zip(self.0.chunks(4).skip(1).rev()) { - let value = u32::from_be_bytes([chunk[0], chunk[1], chunk[2], chunk[3]]); - // u32 values always fit in Felt, so this conversion is safe - *felt = Felt::try_from(value as u64).expect("u32 value should always fit in Felt"); - } - - result + /// Each limb is interpreted as a little-endian `u32` and stored in a [`Felt`]. + pub fn to_elements(&self) -> Vec { + bytes_to_packed_u32_elements(&self.0) } /// Converts the Ethereum address format back to an [`AccountId`]. @@ -153,7 +146,7 @@ impl EthAddressFormat { let suffix_felt = Felt::try_from(suffix).map_err(|_| AddressConversionError::FeltOutOfField)?; - AccountId::try_from([prefix_felt, suffix_felt]) + AccountId::try_from_elements(suffix_felt, prefix_felt) .map_err(|_| AddressConversionError::InvalidAccountId) } @@ -162,7 +155,7 @@ impl EthAddressFormat { /// Convert `[u8; 20]` -> `(prefix, suffix)` by extracting the last 16 bytes. /// Requires the first 4 bytes be zero. - /// Returns prefix and suffix values that match the MASM little-endian limb implementation: + /// Returns prefix and suffix values that match the MASM little-endian limb byte encoding: /// - prefix = bytes[4..12] as big-endian u64 = (addr3 << 32) | addr2 /// - suffix = bytes[12..20] as big-endian u64 = (addr1 << 32) | addr0 fn bytes20_to_prefix_suffix(bytes: [u8; 20]) -> Result<(u64, u64), AddressConversionError> { diff --git a/crates/miden-agglayer/src/eth_types/amount.rs b/crates/miden-agglayer/src/eth_types/amount.rs new file mode 100644 index 0000000000..9fda836856 --- /dev/null +++ b/crates/miden-agglayer/src/eth_types/amount.rs @@ -0,0 +1,146 @@ +use alloc::vec::Vec; + +use miden_core::utils::bytes_to_packed_u32_elements; +use miden_protocol::Felt; +use miden_protocol::asset::FungibleAsset; +use primitive_types::U256; +use thiserror::Error; + +// ================================================================================================ +// ETHEREUM AMOUNT ERROR +// ================================================================================================ + +/// Error type for Ethereum amount conversions. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Error)] +pub enum EthAmountError { + /// The amount doesn't fit in the target type. + #[error("amount overflow: value doesn't fit in target type")] + Overflow, + /// The scaling factor is too large (> 18). + #[error("scaling factor too large: maximum is 18")] + ScaleTooLarge, + /// The scaled-down value doesn't fit in a u64. + #[error("scaled value doesn't fit in u64")] + ScaledValueDoesNotFitU64, + /// The scaled-down value exceeds the maximum fungible token amount. + #[error("scaled value exceeds the maximum fungible token amount")] + ScaledValueExceedsMaxFungibleAmount, +} + +// ================================================================================================ +// ETHEREUM AMOUNT +// ================================================================================================ + +/// Represents an Ethereum uint256 amount as 8 u32 values. +/// +/// This type provides a more typed representation of Ethereum amounts compared to raw `[u32; 8]` +/// arrays, while maintaining compatibility with the existing MASM processing pipeline. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct EthAmount([u8; 32]); + +impl EthAmount { + /// Creates an [`EthAmount`] from a 32-byte array. + pub fn new(bytes: [u8; 32]) -> Self { + Self(bytes) + } + + /// Creates an [`EthAmount`] from a decimal (uint) string. + /// + /// The string should contain only ASCII decimal digits (e.g. `"2000000000000000000"`). + /// The value is stored as a 32-byte big-endian array, matching the Solidity uint256 layout. + /// + /// # Errors + /// + /// Returns [`EthAmountError`] if the string is empty, contains non-digit characters, + /// or represents a value that overflows uint256. + pub fn from_uint_str(s: &str) -> Result { + let value = U256::from_dec_str(s).map_err(|_| EthAmountError::Overflow)?; + Ok(Self(value.to_big_endian())) + } + + /// Converts the EthAmount to a U256 for easier arithmetic operations. + pub fn to_u256(&self) -> U256 { + U256::from_big_endian(&self.0) + } + + /// Creates an EthAmount from a U256 value. + /// + /// This constructor is only available in test code to make test arithmetic easier. + #[cfg(any(test, feature = "testing"))] + pub fn from_u256(value: U256) -> Self { + Self(value.to_big_endian()) + } + + /// Converts the amount to a vector of field elements for note storage. + /// + /// Each u32 value in the amount array is converted to a [`Felt`]. + pub fn to_elements(&self) -> Vec { + bytes_to_packed_u32_elements(&self.0) + } + + /// Returns the raw 32-byte array. + pub const fn as_bytes(&self) -> &[u8; 32] { + &self.0 + } +} + +// ================================================================================================ +// U256 SCALING DOWN HELPERS +// ================================================================================================ + +/// Maximum scaling factor for decimal conversions +const MAX_SCALING_FACTOR: u32 = 18; + +/// Calculate 10^scale where scale is a u32 exponent. +/// +/// # Errors +/// Returns [`EthAmountError::ScaleTooLarge`] if scale > 18. +fn pow10_u64(scale: u32) -> Result { + if scale > MAX_SCALING_FACTOR { + return Err(EthAmountError::ScaleTooLarge); + } + Ok(10_u64.pow(scale)) +} + +impl EthAmount { + /// Converts a U256 amount to a Miden Felt by scaling down by 10^scale_exp. + /// + /// This is the deterministic reference implementation that computes: + /// - `y = floor(x / 10^scale_exp)` (the Miden amount as a Felt) + /// + /// # Arguments + /// * `scale_exp` - The scaling exponent (0-18) + /// + /// # Returns + /// The scaled-down Miden amount as a Felt + /// + /// # Errors + /// - [`EthAmountError::ScaleTooLarge`] if scale_exp > 18 + /// - [`EthAmountError::ScaledValueDoesNotFitU64`] if the result doesn't fit in a u64 + /// - [`EthAmountError::ScaledValueExceedsMaxFungibleAmount`] if the scaled value exceeds the + /// maximum fungible token amount + /// + /// # Example + /// ```ignore + /// let eth_amount = EthAmount::from_u64(1_000_000_000_000_000_000); // 1 ETH in wei + /// let miden_amount = eth_amount.scale_to_token_amount(12)?; + /// // Result: 1_000_000 (1e6, Miden representation) + /// ``` + pub fn scale_to_token_amount(&self, scale_exp: u32) -> Result { + let x = self.to_u256(); + let scale = U256::from(pow10_u64(scale_exp)?); + + let y_u256 = x / scale; + + // y must fit into u64; canonical Felt is guaranteed by max amount bound + let y_u64: u64 = y_u256.try_into().map_err(|_| EthAmountError::ScaledValueDoesNotFitU64)?; + + if y_u64 > FungibleAsset::MAX_AMOUNT { + return Err(EthAmountError::ScaledValueExceedsMaxFungibleAmount); + } + + // Safe because FungibleAsset::MAX_AMOUNT < Felt modulus + let y_felt = Felt::try_from(y_u64).expect("scaled value must fit into canonical Felt"); + Ok(y_felt) + } +} diff --git a/crates/miden-agglayer/src/eth_types/global_index.rs b/crates/miden-agglayer/src/eth_types/global_index.rs new file mode 100644 index 0000000000..13a228ac81 --- /dev/null +++ b/crates/miden-agglayer/src/eth_types/global_index.rs @@ -0,0 +1,152 @@ +use alloc::vec::Vec; + +use miden_core::utils::bytes_to_packed_u32_elements; +use miden_protocol::Felt; +use miden_protocol::utils::{HexParseError, hex_to_bytes}; + +// ================================================================================================ +// GLOBAL INDEX ERROR +// ================================================================================================ + +/// Error type for GlobalIndex validation. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum GlobalIndexError { + /// The leading 160 bits of the global index are not zero. + LeadingBitsNonZero, + /// The mainnet flag is not 1. + InvalidMainnetFlag, + /// The rollup index is not zero for a mainnet deposit. + RollupIndexNonZero, +} + +// ================================================================================================ +// GLOBAL INDEX +// ================================================================================================ + +/// Represents an AggLayer global index as a 256-bit value (32 bytes). +/// +/// The global index is a uint256 that encodes (from MSB to LSB): +/// - Top 160 bits (limbs 0-4): must be zero +/// - 32 bits (limb 5): mainnet flag (value = 1 for mainnet, 0 for rollup) +/// - 32 bits (limb 6): rollup index (must be 0 for mainnet deposits) +/// - 32 bits (limb 7): leaf index (deposit index in the local exit tree) +/// +/// Bytes are stored in big-endian order, matching Solidity's uint256 representation. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct GlobalIndex([u8; 32]); + +impl GlobalIndex { + /// Creates a [`GlobalIndex`] from a hex string (with or without "0x" prefix). + /// + /// The hex string should represent a Solidity uint256 in big-endian format + /// (64 hex characters for 32 bytes). + pub fn from_hex(hex_str: &str) -> Result { + let bytes: [u8; 32] = hex_to_bytes(hex_str)?; + Ok(Self(bytes)) + } + + /// Creates a new [`GlobalIndex`] from a 32-byte array (big-endian). + pub fn new(bytes: [u8; 32]) -> Self { + Self(bytes) + } + + /// Validates that this is a valid mainnet deposit global index. + /// + /// Checks that: + /// - The top 160 bits (limbs 0-4, bytes 0-19) are zero + /// - The mainnet flag (limb 5, bytes 20-23) is exactly 1 + /// - The rollup index (limb 6, bytes 24-27) is 0 + pub fn validate_mainnet(&self) -> Result<(), GlobalIndexError> { + // Check limbs 0-4 are zero (bytes 0-19) + if self.0[0..20].iter().any(|&b| b != 0) { + return Err(GlobalIndexError::LeadingBitsNonZero); + } + + // Check mainnet flag limb (bytes 20-23) is exactly 1 + if !self.is_mainnet() { + return Err(GlobalIndexError::InvalidMainnetFlag); + } + + // Check rollup index is zero (bytes 24-27) + if u32::from_be_bytes([self.0[24], self.0[25], self.0[26], self.0[27]]) != 0 { + return Err(GlobalIndexError::RollupIndexNonZero); + } + + Ok(()) + } + + /// Returns the leaf index (limb 7, lowest 32 bits). + pub fn leaf_index(&self) -> u32 { + u32::from_be_bytes([self.0[28], self.0[29], self.0[30], self.0[31]]) + } + + /// Returns the rollup index (limb 6). + pub fn rollup_index(&self) -> u32 { + u32::from_be_bytes([self.0[24], self.0[25], self.0[26], self.0[27]]) + } + + /// Returns true if this is a mainnet deposit (mainnet flag = 1). + pub fn is_mainnet(&self) -> bool { + u32::from_be_bytes([self.0[20], self.0[21], self.0[22], self.0[23]]) == 1 + } + + /// Converts to field elements for note storage / MASM processing. + pub fn to_elements(&self) -> Vec { + bytes_to_packed_u32_elements(&self.0) + } + + /// Returns the raw 32-byte array (big-endian). + pub const fn as_bytes(&self) -> &[u8; 32] { + &self.0 + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_mainnet_global_indices_from_production() { + // Real mainnet global indices from production + // Format: (1 << 64) + leaf_index for mainnet deposits + // 18446744073709786619 = 0x1_0000_0000_0003_95FB (leaf_index = 235003) + // 18446744073709786590 = 0x1_0000_0000_0003_95DE (leaf_index = 234974) + let test_cases = [ + ("0x00000000000000000000000000000000000000000000000100000000000395fb", 235003u32), + ("0x00000000000000000000000000000000000000000000000100000000000395de", 234974u32), + ]; + + for (hex, expected_leaf_index) in test_cases { + let gi = GlobalIndex::from_hex(hex).expect("valid hex"); + + // Validate as mainnet + assert!(gi.validate_mainnet().is_ok(), "should be valid mainnet global index"); + + // Construction sanity checks + assert!(gi.is_mainnet()); + assert_eq!(gi.rollup_index(), 0); + assert_eq!(gi.leaf_index(), expected_leaf_index); + + // Verify to_elements produces correct LE-packed u32 felts + // -------------------------------------------------------------------------------- + + let elements = gi.to_elements(); + assert_eq!(elements.len(), 8); + + // leading zeros + assert_eq!(elements[0..5], [Felt::ZERO; 5]); + + // mainnet flag: BE value 1 → LE-packed as 0x01000000 + assert_eq!(elements[5], Felt::new(u32::from_le_bytes(1u32.to_be_bytes()) as u64)); + + // rollup index + assert_eq!(elements[6], Felt::ZERO); + + // leaf index: BE value → LE-packed + assert_eq!( + elements[7], + Felt::new(u32::from_le_bytes(expected_leaf_index.to_be_bytes()) as u64) + ); + } + } +} diff --git a/crates/miden-agglayer/src/eth_types/metadata_hash.rs b/crates/miden-agglayer/src/eth_types/metadata_hash.rs new file mode 100644 index 0000000000..51aa41d026 --- /dev/null +++ b/crates/miden-agglayer/src/eth_types/metadata_hash.rs @@ -0,0 +1,34 @@ +use alloc::vec::Vec; + +use miden_core::utils::bytes_to_packed_u32_elements; +use miden_protocol::Felt; + +// ================================================================================================ +// METADATA HASH +// ================================================================================================ + +/// Represents a Keccak256 metadata hash as 32 bytes. +/// +/// This type provides a typed representation of metadata hashes for the agglayer bridge, +/// while maintaining compatibility with the existing MASM processing pipeline. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct MetadataHash([u8; 32]); + +impl MetadataHash { + /// Creates a new [`MetadataHash`] from a 32-byte array. + pub const fn new(bytes: [u8; 32]) -> Self { + Self(bytes) + } + + /// Returns the raw 32-byte array. + pub const fn as_bytes(&self) -> &[u8; 32] { + &self.0 + } + + /// Converts the metadata hash to 8 Felt elements for MASM processing. + /// + /// Each 4-byte chunk is converted to a u32 using little-endian byte order. + pub fn to_elements(&self) -> Vec { + bytes_to_packed_u32_elements(&self.0) + } +} diff --git a/crates/miden-agglayer/src/eth_types/mod.rs b/crates/miden-agglayer/src/eth_types/mod.rs new file mode 100644 index 0000000000..3bee167e5d --- /dev/null +++ b/crates/miden-agglayer/src/eth_types/mod.rs @@ -0,0 +1,9 @@ +pub mod address; +pub mod amount; +pub mod global_index; +pub mod metadata_hash; + +pub use address::EthAddressFormat; +pub use amount::{EthAmount, EthAmountError}; +pub use global_index::{GlobalIndex, GlobalIndexError}; +pub use metadata_hash::MetadataHash; diff --git a/crates/miden-agglayer/src/lib.rs b/crates/miden-agglayer/src/lib.rs index f0d9ab661c..b8870a89ef 100644 --- a/crates/miden-agglayer/src/lib.rs +++ b/crates/miden-agglayer/src/lib.rs @@ -2,13 +2,14 @@ extern crate alloc; -use alloc::string::ToString; use alloc::vec; use alloc::vec::Vec; use miden_assembly::Library; -use miden_assembly::utils::Deserializable; -use miden_core::{Felt, FieldElement, Program, Word}; +use miden_assembly::serde::Deserializable; +use miden_core::program::Program; +use miden_core::{Felt, Word}; +use miden_protocol::account::component::AccountComponentMetadata; use miden_protocol::account::{ Account, AccountBuilder, @@ -20,49 +21,39 @@ use miden_protocol::account::{ StorageSlotName, }; use miden_protocol::asset::TokenSymbol; -use miden_protocol::crypto::rand::FeltRng; -use miden_protocol::errors::NoteError; -use miden_protocol::note::{ - Note, - NoteAssets, - NoteExecutionHint, - NoteInputs, - NoteMetadata, - NoteRecipient, - NoteScript, - NoteTag, - NoteType, -}; +use miden_protocol::block::account_tree::AccountIdKey; +use miden_protocol::note::NoteScript; use miden_standards::account::auth::NoAuth; -use miden_standards::account::faucets::NetworkFungibleFaucet; -use miden_standards::note::NetworkAccountTarget; +use miden_standards::account::faucets::{FungibleFaucetError, TokenMetadata}; use miden_utils_sync::LazyLock; +pub mod b2agg_note; +pub mod claim_note; +pub mod config_note; pub mod errors; -pub mod eth_address; -pub mod utils; - -pub use eth_address::EthAddressFormat; -use utils::bytes32_to_felts; +pub mod eth_types; +pub mod update_ger_note; + +pub use b2agg_note::B2AggNote; +pub use claim_note::{ClaimNoteStorage, ExitRoot, LeafData, ProofData, SmtNode, create_claim_note}; +pub use config_note::ConfigAggBridgeNote; +pub use eth_types::{ + EthAddressFormat, + EthAmount, + EthAmountError, + GlobalIndex, + GlobalIndexError, + MetadataHash, +}; +pub use update_ger_note::UpdateGerNote; // AGGLAYER NOTE SCRIPTS // ================================================================================================ -// Initialize the B2AGG note script only once -static B2AGG_SCRIPT: LazyLock = LazyLock::new(|| { - let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/B2AGG.masb")); - Program::read_from_bytes(bytes).expect("Shipped B2AGG script is well-formed") -}); - -/// Returns the B2AGG (Bridge to AggLayer) note script. -pub fn b2agg_script() -> Program { - B2AGG_SCRIPT.clone() -} - // Initialize the CLAIM note script only once static CLAIM_SCRIPT: LazyLock = LazyLock::new(|| { let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/CLAIM.masb")); - let program = Program::read_from_bytes(bytes).expect("Shipped CLAIM script is well-formed"); + let program = Program::read_from_bytes(bytes).expect("shipped CLAIM script is well-formed"); NoteScript::new(program) }); @@ -74,83 +65,176 @@ pub fn claim_script() -> NoteScript { // AGGLAYER ACCOUNT COMPONENTS // ================================================================================================ -// Initialize the unified AggLayer library only once static AGGLAYER_LIBRARY: LazyLock = LazyLock::new(|| { let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/agglayer.masl")); - Library::read_from_bytes(bytes).expect("Shipped AggLayer library is well-formed") + Library::read_from_bytes(bytes).expect("shipped AggLayer library is well-formed") +}); + +static BRIDGE_COMPONENT_LIBRARY: LazyLock = LazyLock::new(|| { + let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/components/bridge.masl")); + Library::read_from_bytes(bytes).expect("shipped bridge component library is well-formed") }); -/// Returns the unified AggLayer Library containing all agglayer modules. +static FAUCET_COMPONENT_LIBRARY: LazyLock = LazyLock::new(|| { + let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/components/faucet.masl")); + Library::read_from_bytes(bytes).expect("shipped faucet component library is well-formed") +}); + +/// Returns the AggLayer Library containing all agglayer modules. pub fn agglayer_library() -> Library { AGGLAYER_LIBRARY.clone() } -/// Returns the Bridge Out Library. -/// -/// Note: This is now the same as agglayer_library() since all agglayer components -/// are compiled into a single library. -pub fn bridge_out_library() -> Library { - agglayer_library() +/// Returns the Bridge component library. +fn agglayer_bridge_component_library() -> Library { + BRIDGE_COMPONENT_LIBRARY.clone() } -/// Returns the Local Exit Tree Library. -/// -/// Note: This is now the same as agglayer_library() since all agglayer components -/// are compiled into a single library. -pub fn local_exit_tree_library() -> Library { - agglayer_library() +/// Returns the Faucet component library. +fn agglayer_faucet_component_library() -> Library { + FAUCET_COMPONENT_LIBRARY.clone() } -/// Creates a Local Exit Tree component with the specified storage slots. -/// -/// This component uses the local_exit_tree library and can be added to accounts -/// that need to manage local exit tree functionality. -pub fn local_exit_tree_component(storage_slots: Vec) -> AccountComponent { - let library = local_exit_tree_library(); - - AccountComponent::new(library, storage_slots) - .expect("local_exit_tree component should satisfy the requirements of a valid account component") - .with_supports_all_types() -} +/// Creates an AggLayer Bridge component with the specified storage slots. +fn bridge_component(storage_slots: Vec) -> AccountComponent { + let library = agglayer_bridge_component_library(); + let metadata = AccountComponentMetadata::new("agglayer::bridge", AccountType::all()) + .with_description("Bridge component for AggLayer"); -/// Creates a Bridge Out component with the specified storage slots. -/// -/// This component uses the bridge_out library and can be added to accounts -/// that need to bridge assets out to the AggLayer. -pub fn bridge_out_component(storage_slots: Vec) -> AccountComponent { - let library = bridge_out_library(); - - AccountComponent::new(library, storage_slots) - .expect("bridge_out component should satisfy the requirements of a valid account component") - .with_supports_all_types() + AccountComponent::new(library, storage_slots, metadata) + .expect("bridge component should satisfy the requirements of a valid account component") } -/// Returns the Bridge In Library. +// AGGLAYER BRIDGE STRUCT +// ================================================================================================ + +static GER_MAP_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::agglayer::bridge::ger") + .expect("bridge storage slot name should be valid") +}); +static LET_FRONTIER_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::agglayer::let").expect("LET storage slot name should be valid") +}); +static LET_ROOT_LO_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::agglayer::let::root_lo") + .expect("LET root_lo storage slot name should be valid") +}); +static LET_ROOT_HI_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::agglayer::let::root_hi") + .expect("LET root_hi storage slot name should be valid") +}); +static LET_NUM_LEAVES_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::agglayer::let::num_leaves") + .expect("LET num_leaves storage slot name should be valid") +}); +static FAUCET_REGISTRY_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::agglayer::bridge::faucet_registry") + .expect("faucet registry storage slot name should be valid") +}); +static BRIDGE_ADMIN_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::agglayer::bridge::admin") + .expect("bridge admin storage slot name should be valid") +}); +static GER_MANAGER_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::agglayer::bridge::ger_manager") + .expect("GER manager storage slot name should be valid") +}); + +/// An [`AccountComponent`] implementing the AggLayer Bridge. +/// +/// It reexports the procedures from `miden::agglayer::bridge`. When linking against this +/// component, the `agglayer` library must be available to the assembler. +/// The procedures of this component are: +/// - `assert_sender_is_bridge_admin`, which validates CONFIG note senders. +/// - `assert_sender_is_ger_manager`, which validates UPDATE_GER note senders. +/// - `register_faucet`, which registers a faucet in the bridge. +/// - `update_ger`, which injects a new GER into the storage map. +/// - `verify_leaf_bridge`, which verifies a deposit leaf against one of the stored GERs. +/// - `bridge_out`, which bridges an asset out of Miden to the destination network. +/// +/// ## Storage Layout +/// +/// - [`Self::ger_map_slot_name`]: Stores the GERs. +/// - [`Self::let_frontier_slot_name`]: Stores the Local Exit Tree (LET) frontier. +/// - [`Self::ler_lo_slot_name`]: Stores the lower 32 bits of the LET root. +/// - [`Self::ler_hi_slot_name`]: Stores the upper 32 bits of the LET root. +/// - [`Self::let_num_leaves_slot_name`]: Stores the number of leaves in the LET frontier. +/// - [`Self::faucet_registry_slot_name`]: Stores the faucet registry map. +/// - [`Self::bridge_admin_slot_name`]: Stores the bridge admin account ID. +/// - [`Self::ger_manager_slot_name`]: Stores the GER manager account ID. /// -/// Note: This is now the same as agglayer_library() since all agglayer components -/// are compiled into a single library. -pub fn bridge_in_library() -> Library { - agglayer_library() +/// The bridge starts with an empty faucet registry; faucets are registered at runtime via +/// CONFIG_AGG_BRIDGE notes. +#[derive(Debug, Clone)] +pub struct AggLayerBridge { + bridge_admin_id: AccountId, + ger_manager_id: AccountId, } -/// Creates a Bridge In component with the specified storage slots. -/// -/// This component uses the agglayer library and can be added to accounts -/// that need to bridge assets in from the AggLayer. -pub fn bridge_in_component(storage_slots: Vec) -> AccountComponent { - let library = bridge_in_library(); - - AccountComponent::new(library, storage_slots) - .expect("bridge_in component should satisfy the requirements of a valid account component") - .with_supports_all_types() +impl AggLayerBridge { + /// Creates a new AggLayer bridge component with the standard configuration. + pub fn new(bridge_admin_id: AccountId, ger_manager_id: AccountId) -> Self { + Self { bridge_admin_id, ger_manager_id } + } + + /// Storage slot name for the GERs map. + pub fn ger_map_slot_name() -> &'static StorageSlotName { + &GER_MAP_SLOT_NAME + } + + /// Storage slot name for the Local Exit Tree (LET) frontier. + pub fn let_frontier_slot_name() -> &'static StorageSlotName { + &LET_FRONTIER_SLOT_NAME + } + + /// Storage slot name for the lower 32 bits of the LET root. + pub fn ler_lo_slot_name() -> &'static StorageSlotName { + &LET_ROOT_LO_SLOT_NAME + } + + /// Storage slot name for the upper 32 bits of the LET root. + pub fn ler_hi_slot_name() -> &'static StorageSlotName { + &LET_ROOT_HI_SLOT_NAME + } + + /// Storage slot name for the number of leaves in the LET frontier. + pub fn let_num_leaves_slot_name() -> &'static StorageSlotName { + &LET_NUM_LEAVES_SLOT_NAME + } + + /// Storage slot name for the faucet registry map. + pub fn faucet_registry_slot_name() -> &'static StorageSlotName { + &FAUCET_REGISTRY_SLOT_NAME + } + + /// Storage slot name for the bridge admin account ID. + pub fn bridge_admin_slot_name() -> &'static StorageSlotName { + &BRIDGE_ADMIN_SLOT_NAME + } + + /// Storage slot name for the GER manager account ID. + pub fn ger_manager_slot_name() -> &'static StorageSlotName { + &GER_MANAGER_SLOT_NAME + } } -/// Returns the Agglayer Faucet Library. -/// -/// Note: This is now the same as agglayer_library() since all agglayer components -/// are compiled into a single library. -pub fn agglayer_faucet_library() -> Library { - agglayer_library() +impl From for AccountComponent { + fn from(bridge: AggLayerBridge) -> Self { + let bridge_admin_word = AccountIdKey::new(bridge.bridge_admin_id).as_word(); + let ger_manager_word = AccountIdKey::new(bridge.ger_manager_id).as_word(); + + let bridge_storage_slots = vec![ + StorageSlot::with_empty_map(GER_MAP_SLOT_NAME.clone()), + StorageSlot::with_empty_map(LET_FRONTIER_SLOT_NAME.clone()), + StorageSlot::with_value(LET_ROOT_LO_SLOT_NAME.clone(), Word::empty()), + StorageSlot::with_value(LET_ROOT_HI_SLOT_NAME.clone(), Word::empty()), + StorageSlot::with_value(LET_NUM_LEAVES_SLOT_NAME.clone(), Word::empty()), + StorageSlot::with_empty_map(FAUCET_REGISTRY_SLOT_NAME.clone()), + StorageSlot::with_value(BRIDGE_ADMIN_SLOT_NAME.clone(), bridge_admin_word), + StorageSlot::with_value(GER_MANAGER_SLOT_NAME.clone(), ger_manager_word), + ]; + bridge_component(bridge_storage_slots) + } } /// Creates an Agglayer Faucet component with the specified storage slots. @@ -158,143 +242,290 @@ pub fn agglayer_faucet_library() -> Library { /// This component combines network faucet functionality with bridge validation /// via Foreign Procedure Invocation (FPI). It provides a "claim" procedure that /// validates CLAIM notes against a bridge MMR account before minting assets. -pub fn agglayer_faucet_component(storage_slots: Vec) -> AccountComponent { - let library = agglayer_faucet_library(); +fn agglayer_faucet_component(storage_slots: Vec) -> AccountComponent { + let library = agglayer_faucet_component_library(); + let metadata = AccountComponentMetadata::new("agglayer::faucet", [AccountType::FungibleFaucet]) + .with_description("AggLayer faucet component with bridge validation"); - AccountComponent::new(library, storage_slots) - .expect("agglayer_faucet component should satisfy the requirements of a valid account component") - .with_supports_all_types() + AccountComponent::new(library, storage_slots, metadata).expect( + "agglayer_faucet component should satisfy the requirements of a valid account component", + ) } -/// Creates a combined Bridge Out component that includes both bridge_out and local_exit_tree -/// modules. -/// -/// This is a convenience function that creates a component with multiple modules. -/// For more fine-grained control, use the individual component functions and combine them -/// using the AccountBuilder pattern. -pub fn bridge_out_with_local_exit_tree_component( - storage_slots: Vec, -) -> Vec { - vec![ - bridge_out_component(storage_slots.clone()), - local_exit_tree_component(vec![]), // local_exit_tree typically doesn't need storage slots - ] -} +// FAUCET CONVERSION STORAGE HELPERS +// ================================================================================================ -/// Creates an Asset Conversion component with the specified storage slots. +/// Builds the two storage slot values for faucet conversion metadata. +/// +/// The conversion metadata is stored in two value storage slots: +/// - Slot 1 (`miden::agglayer::faucet::conversion_info_1`): `[addr0, addr1, addr2, addr3]` — first +/// 4 felts of the origin token address (5 × u32 limbs). +/// - Slot 2 (`miden::agglayer::faucet::conversion_info_2`): `[addr4, origin_network, scale, 0]` — +/// remaining address felt + origin network + scale factor. /// -/// This component uses the agglayer library (which includes asset_conversion) and can be added to -/// accounts that need to convert assets between Miden and Ethereum formats. -pub fn asset_conversion_component(storage_slots: Vec) -> AccountComponent { - let library = agglayer_library(); - - AccountComponent::new(library, storage_slots) - .expect("asset_conversion component should satisfy the requirements of a valid account component") - .with_supports_all_types() +/// # Parameters +/// - `origin_token_address`: The EVM token address in Ethereum format +/// - `origin_network`: The origin network/chain ID +/// - `scale`: The decimal scaling factor (exponent for 10^scale) +/// +/// # Returns +/// A tuple of two `Word` values representing the two storage slot contents. +fn agglayer_faucet_conversion_slots( + origin_token_address: &EthAddressFormat, + origin_network: u32, + scale: u8, +) -> (Word, Word) { + let addr_elements = origin_token_address.to_elements(); + + let slot1 = Word::new([addr_elements[0], addr_elements[1], addr_elements[2], addr_elements[3]]); + + let slot2 = + Word::new([addr_elements[4], Felt::from(origin_network), Felt::from(scale), Felt::ZERO]); + + (slot1, slot2) } -// AGGLAYER ACCOUNT CREATION HELPERS +// AGGLAYER FAUCET STRUCT // ================================================================================================ -/// Creates a bridge account component with the standard bridge storage slot. +static AGGLAYER_FAUCET_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::agglayer::faucet") + .expect("agglayer faucet storage slot name should be valid") +}); +static CONVERSION_INFO_1_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::agglayer::faucet::conversion_info_1") + .expect("conversion info 1 storage slot name should be valid") +}); +static CONVERSION_INFO_2_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::agglayer::faucet::conversion_info_2") + .expect("conversion info 2 storage slot name should be valid") +}); + +/// An [`AccountComponent`] implementing the AggLayer Faucet. /// -/// This is a convenience function that creates the bridge storage slot with the standard -/// name "miden::agglayer::bridge" and returns the bridge_out component. +/// It reexports the procedures from `miden::agglayer::faucet`. When linking against this +/// component, the `agglayer` library must be available to the assembler. +/// The procedures of this component are: +/// - `claim`, which validates a CLAIM note against one of the stored GERs in the bridge. +/// - `asset_to_origin_asset`, which converts an asset to the origin asset (used in FPI from +/// bridge). +/// - `burn`, which burns an asset. /// -/// # Returns -/// Returns an [`AccountComponent`] configured for bridge operations with MMR validation. -pub fn create_bridge_account_component() -> AccountComponent { - let bridge_storage_slot_name = StorageSlotName::new("miden::agglayer::bridge") - .expect("Bridge storage slot name should be valid"); - let bridge_storage_slots = vec![StorageSlot::with_empty_map(bridge_storage_slot_name)]; - bridge_out_component(bridge_storage_slots) +/// ## Storage Layout +/// +/// - [`Self::metadata_slot`]: Stores [`TokenMetadata`]. +/// - [`Self::bridge_account_id_slot`]: Stores the AggLayer bridge account ID. +/// - [`Self::conversion_info_1_slot`]: Stores the first 4 felts of the origin token address. +/// - [`Self::conversion_info_2_slot`]: Stores the remaining 5th felt of the origin token address + +/// origin network + scale. +#[derive(Debug, Clone)] +pub struct AggLayerFaucet { + metadata: TokenMetadata, + bridge_account_id: AccountId, + origin_token_address: EthAddressFormat, + origin_network: u32, + scale: u8, +} + +impl AggLayerFaucet { + /// Creates a new AggLayer faucet component from the given configuration. + /// + /// # Errors + /// Returns an error if: + /// - The decimals parameter exceeds maximum value of [`TokenMetadata::MAX_DECIMALS`]. + /// - The max supply exceeds maximum possible amount for a fungible asset. + /// - The token supply exceeds the max supply. + pub fn new( + symbol: TokenSymbol, + decimals: u8, + max_supply: Felt, + token_supply: Felt, + bridge_account_id: AccountId, + origin_token_address: EthAddressFormat, + origin_network: u32, + scale: u8, + ) -> Result { + let metadata = TokenMetadata::with_supply(symbol, decimals, max_supply, token_supply)?; + Ok(Self { + metadata, + bridge_account_id, + origin_token_address, + origin_network, + scale, + }) + } + + /// Sets the token supply for an existing faucet (e.g. for testing scenarios). + /// + /// # Errors + /// Returns an error if the token supply exceeds the max supply. + pub fn with_token_supply(mut self, token_supply: Felt) -> Result { + self.metadata = self.metadata.with_token_supply(token_supply)?; + Ok(self) + } + + /// Storage slot name for [`TokenMetadata`]. + pub fn metadata_slot() -> &'static StorageSlotName { + TokenMetadata::metadata_slot() + } + + /// Storage slot name for the AggLayer bridge account ID. + pub fn bridge_account_id_slot() -> &'static StorageSlotName { + &AGGLAYER_FAUCET_SLOT_NAME + } + + /// Storage slot name for the first 4 felts of the origin token address. + pub fn conversion_info_1_slot() -> &'static StorageSlotName { + &CONVERSION_INFO_1_SLOT_NAME + } + + /// Storage slot name for the 5th felt of the origin token address, origin network, and scale. + pub fn conversion_info_2_slot() -> &'static StorageSlotName { + &CONVERSION_INFO_2_SLOT_NAME + } +} + +impl From for AccountComponent { + fn from(faucet: AggLayerFaucet) -> Self { + let metadata_slot = StorageSlot::from(faucet.metadata); + + let bridge_account_id_word = AccountIdKey::new(faucet.bridge_account_id).as_word(); + let bridge_slot = + StorageSlot::with_value(AGGLAYER_FAUCET_SLOT_NAME.clone(), bridge_account_id_word); + + let (conversion_slot1_word, conversion_slot2_word) = agglayer_faucet_conversion_slots( + &faucet.origin_token_address, + faucet.origin_network, + faucet.scale, + ); + let conversion_slot1 = + StorageSlot::with_value(CONVERSION_INFO_1_SLOT_NAME.clone(), conversion_slot1_word); + let conversion_slot2 = + StorageSlot::with_value(CONVERSION_INFO_2_SLOT_NAME.clone(), conversion_slot2_word); + + let agglayer_storage_slots = + vec![metadata_slot, bridge_slot, conversion_slot1, conversion_slot2]; + agglayer_faucet_component(agglayer_storage_slots) + } } +// AGGLAYER ACCOUNT CREATION HELPERS +// ================================================================================================ + /// Creates an agglayer faucet account component with the specified configuration. /// /// This function creates all the necessary storage slots for an agglayer faucet: -/// - Network faucet metadata slot (max_supply, decimals, token_symbol) +/// - Network faucet metadata slot (token_supply, max_supply, decimals, token_symbol) /// - Bridge account reference slot for FPI validation +/// - Conversion info slot 1: first 4 felts of origin token address +/// - Conversion info slot 2: 5th address felt + origin network + scale /// /// # Parameters /// - `token_symbol`: The symbol for the fungible token (e.g., "AGG") /// - `decimals`: Number of decimal places for the token /// - `max_supply`: Maximum supply of the token +/// - `token_supply`: Initial outstanding token supply (0 for new faucets) /// - `bridge_account_id`: The account ID of the bridge account for validation +/// - `origin_token_address`: The EVM origin token address +/// - `origin_network`: The origin network/chain ID +/// - `scale`: The decimal scaling factor (exponent for 10^scale) /// /// # Returns /// Returns an [`AccountComponent`] configured for agglayer faucet operations. /// /// # Panics -/// Panics if the token symbol is invalid or storage slot names are malformed. -pub fn create_agglayer_faucet_component( +/// Panics if the token symbol is invalid or metadata validation fails. +fn create_agglayer_faucet_component( token_symbol: &str, decimals: u8, max_supply: Felt, + token_supply: Felt, bridge_account_id: AccountId, + origin_token_address: &EthAddressFormat, + origin_network: u32, + scale: u8, ) -> AccountComponent { - // Create network faucet metadata slot: [max_supply, decimals, token_symbol, 0] - let token_symbol = TokenSymbol::new(token_symbol).expect("Token symbol should be valid"); - let metadata_word = - Word::new([max_supply, Felt::from(decimals), token_symbol.into(), FieldElement::ZERO]); - let metadata_slot = - StorageSlot::with_value(NetworkFungibleFaucet::metadata_slot().clone(), metadata_word); - - // Create agglayer-specific bridge storage slot - let bridge_account_id_word = Word::new([ - Felt::new(0), - Felt::new(0), - bridge_account_id.suffix(), - bridge_account_id.prefix().as_felt(), - ]); - let agglayer_storage_slot_name = StorageSlotName::new("miden::agglayer::faucet") - .expect("Agglayer faucet storage slot name should be valid"); - let bridge_slot = StorageSlot::with_value(agglayer_storage_slot_name, bridge_account_id_word); - - // Combine all storage slots for the agglayer faucet component - let agglayer_storage_slots = vec![metadata_slot, bridge_slot]; - agglayer_faucet_component(agglayer_storage_slots) + let symbol = TokenSymbol::new(token_symbol).expect("token symbol should be valid"); + AggLayerFaucet::new( + symbol, + decimals, + max_supply, + token_supply, + bridge_account_id, + *origin_token_address, + origin_network, + scale, + ) + .expect("agglayer faucet metadata should be valid") + .into() } /// Creates a complete bridge account builder with the standard configuration. -pub fn create_bridge_account_builder(seed: Word) -> AccountBuilder { - let bridge_component = create_bridge_account_component(); +/// +/// The bridge starts with an empty faucet registry. Faucets are registered at runtime +/// via CONFIG_AGG_BRIDGE notes that call `bridge_config::register_faucet`. +fn create_bridge_account_builder( + seed: Word, + bridge_admin_id: AccountId, + ger_manager_id: AccountId, +) -> AccountBuilder { Account::builder(seed.into()) - .storage_mode(AccountStorageMode::Public) - .with_component(bridge_component) + .storage_mode(AccountStorageMode::Network) + .with_component(AggLayerBridge::new(bridge_admin_id, ger_manager_id)) } /// Creates a new bridge account with the standard configuration. /// /// This creates a new account suitable for production use. -pub fn create_bridge_account(seed: Word) -> Account { - create_bridge_account_builder(seed) +pub fn create_bridge_account( + seed: Word, + bridge_admin_id: AccountId, + ger_manager_id: AccountId, +) -> Account { + create_bridge_account_builder(seed, bridge_admin_id, ger_manager_id) .with_auth_component(AccountComponent::from(NoAuth)) .build() - .expect("Bridge account should be valid") + .expect("bridge account should be valid") } /// Creates an existing bridge account with the standard configuration. /// /// This creates an existing account suitable for testing scenarios. #[cfg(any(feature = "testing", test))] -pub fn create_existing_bridge_account(seed: Word) -> Account { - create_bridge_account_builder(seed) +pub fn create_existing_bridge_account( + seed: Word, + bridge_admin_id: AccountId, + ger_manager_id: AccountId, +) -> Account { + create_bridge_account_builder(seed, bridge_admin_id, ger_manager_id) .with_auth_component(AccountComponent::from(NoAuth)) .build_existing() - .expect("Bridge account should be valid") + .expect("bridge account should be valid") } /// Creates a complete agglayer faucet account builder with the specified configuration. -pub fn create_agglayer_faucet_builder( +#[allow(clippy::too_many_arguments)] +fn create_agglayer_faucet_builder( seed: Word, token_symbol: &str, decimals: u8, max_supply: Felt, + token_supply: Felt, bridge_account_id: AccountId, + origin_token_address: &EthAddressFormat, + origin_network: u32, + scale: u8, ) -> AccountBuilder { - let agglayer_component = - create_agglayer_faucet_component(token_symbol, decimals, max_supply, bridge_account_id); + let agglayer_component = create_agglayer_faucet_component( + token_symbol, + decimals, + max_supply, + token_supply, + bridge_account_id, + origin_token_address, + origin_network, + scale, + ); Account::builder(seed.into()) .account_type(AccountType::FungibleFaucet) @@ -311,307 +542,54 @@ pub fn create_agglayer_faucet( decimals: u8, max_supply: Felt, bridge_account_id: AccountId, + origin_token_address: &EthAddressFormat, + origin_network: u32, + scale: u8, ) -> Account { - create_agglayer_faucet_builder(seed, token_symbol, decimals, max_supply, bridge_account_id) - .with_auth_component(AccountComponent::from(NoAuth)) - .build() - .expect("Agglayer faucet account should be valid") + create_agglayer_faucet_builder( + seed, + token_symbol, + decimals, + max_supply, + Felt::ZERO, + bridge_account_id, + origin_token_address, + origin_network, + scale, + ) + .with_auth_component(AccountComponent::from(NoAuth)) + .build() + .expect("agglayer faucet account should be valid") } /// Creates an existing agglayer faucet account with the specified configuration. /// /// This creates an existing account suitable for testing scenarios. #[cfg(any(feature = "testing", test))] +#[allow(clippy::too_many_arguments)] pub fn create_existing_agglayer_faucet( seed: Word, token_symbol: &str, decimals: u8, max_supply: Felt, + token_supply: Felt, bridge_account_id: AccountId, + origin_token_address: &EthAddressFormat, + origin_network: u32, + scale: u8, ) -> Account { - create_agglayer_faucet_builder(seed, token_symbol, decimals, max_supply, bridge_account_id) - .with_auth_component(AccountComponent::from(NoAuth)) - .build_existing() - .expect("Agglayer faucet account should be valid") -} - -// AGGLAYER NOTE CREATION HELPERS -// ================================================================================================ - -/// Parameters for creating a CLAIM note. -/// -/// This struct groups all the parameters needed to create a CLAIM note that exactly -/// matches the agglayer claimAsset function signature. -pub struct ClaimNoteParams<'a, R: FeltRng> { - /// AGGLAYER claimAsset function parameters - /// SMT proof for local exit root (bytes32\[_DEPOSIT_CONTRACT_TREE_DEPTH\]) - pub smt_proof_local_exit_root: Vec, - /// SMT proof for rollup exit root (bytes32\[_DEPOSIT_CONTRACT_TREE_DEPTH\]) - pub smt_proof_rollup_exit_root: Vec, - /// Global index (uint256 as 8 u32 felts) - pub global_index: [Felt; 8], - /// Mainnet exit root hash (bytes32 as 32-byte array) - pub mainnet_exit_root: &'a [u8; 32], - /// Rollup exit root hash (bytes32 as 32-byte array) - pub rollup_exit_root: &'a [u8; 32], - /// Origin network identifier (uint32) - pub origin_network: Felt, - /// Origin token address (address as 20-byte array) - pub origin_token_address: &'a [u8; 20], - /// Destination network identifier (uint32) - pub destination_network: Felt, - /// Destination address (address as 20-byte array) - pub destination_address: &'a [u8; 20], - /// Amount of tokens (uint256 as 8 u32 felts) - pub amount: [Felt; 8], - /// ABI encoded metadata (fixed size of 8 felts) - pub metadata: [Felt; 8], - /// CLAIM note required parameters - /// CLAIM note sender account id - pub claim_note_creator_account_id: AccountId, - /// Agglayer faucet AccountId - pub agglayer_faucet_account_id: AccountId, - /// Output P2ID note tag - pub output_note_tag: NoteTag, - /// P2ID note serial number (4 felts as Word) - pub p2id_serial_number: Word, - /// TODO: remove and use destination_address: [u8; 20] - pub destination_account_id: AccountId, - /// RNG for creating CLAIM note serial number - pub rng: &'a mut R, -} - -/// Generates a CLAIM note - a note that instructs an agglayer faucet to validate and mint assets. -/// -/// # Parameters -/// - `params`: The parameters for creating the CLAIM note (including RNG) -/// -/// # Errors -/// Returns an error if note creation fails. -pub fn create_claim_note(params: ClaimNoteParams<'_, R>) -> Result { - // Validate SMT proof lengths - each should be 256 felts (32 bytes32 values * 8 u32 per bytes32) - if params.smt_proof_local_exit_root.len() != 256 { - return Err(NoteError::other(alloc::format!( - "SMT proof local exit root must be exactly 256 felts, got {}", - params.smt_proof_local_exit_root.len() - ))); - } - if params.smt_proof_rollup_exit_root.len() != 256 { - return Err(NoteError::other(alloc::format!( - "SMT proof rollup exit root must be exactly 256 felts, got {}", - params.smt_proof_rollup_exit_root.len() - ))); - } - // Create claim inputs matching exactly the agglayer claimAsset function parameters - let mut claim_inputs = vec![]; - - // 1) PROOF DATA - // smtProofLocalExitRoot (256 felts) - first SMT proof parameter - claim_inputs.extend(params.smt_proof_local_exit_root); - // smtProofRollupExitRoot (256 felts) - second SMT proof parameter - claim_inputs.extend(params.smt_proof_rollup_exit_root); - - // globalIndex (uint256 as 8 u32 felts) - claim_inputs.extend(params.global_index); - - // mainnetExitRoot (bytes32 as 8 u32 felts) - let mainnet_exit_root_felts = bytes32_to_felts(params.mainnet_exit_root); - claim_inputs.extend(mainnet_exit_root_felts); - - // rollupExitRoot (bytes32 as 8 u32 felts) - let rollup_exit_root_felts = bytes32_to_felts(params.rollup_exit_root); - claim_inputs.extend(rollup_exit_root_felts); - - // 2) LEAF DATA - // originNetwork (uint32 as Felt) - claim_inputs.push(params.origin_network); - - // originTokenAddress (address as 5 u32 felts) - let origin_token_address_felts = - EthAddressFormat::new(*params.origin_token_address).to_elements().to_vec(); - claim_inputs.extend(origin_token_address_felts); - - // destinationNetwork (uint32 as Felt) - claim_inputs.push(params.destination_network); - - // destinationAddress (address as 5 u32 felts) - // Use AccountId prefix and suffix directly to get [suffix, prefix, 0, 0, 0] - // TODO: refactor to use destination_address: [u8; 20] instead once conversion function - // exists [u8; 20] -> [address as 5 Felts] - let destination_address_felts = vec![ - params.destination_account_id.prefix().as_felt(), - params.destination_account_id.suffix(), - Felt::new(0), - Felt::new(0), - Felt::new(0), - ]; - claim_inputs.extend(destination_address_felts); - - // amount (uint256 as 8 u32 felts) - claim_inputs.extend(params.amount); - - // metadata (fixed size of 8 felts) - claim_inputs.extend(params.metadata); - - let padding = vec![Felt::ZERO; 4]; - claim_inputs.extend(padding); - - // 3) CLAIM NOTE DATA - // TODO: deterministically compute serial number of p2id hash(GER, leaf index) - // output_p2id_serial_num (4 felts as Word) - claim_inputs.extend(params.p2id_serial_number); - - // agglayer_faucet_account_id (2 felts: prefix and suffix) - claim_inputs.push(params.agglayer_faucet_account_id.prefix().as_felt()); - claim_inputs.push(params.agglayer_faucet_account_id.suffix()); - - // output note tag - claim_inputs.push(params.output_note_tag.as_u32().into()); - - let inputs = NoteInputs::new(claim_inputs)?; - - let tag = NoteTag::with_account_target(params.agglayer_faucet_account_id); - - let claim_script = claim_script(); - let serial_num = params.rng.draw_word(); - - let note_type = NoteType::Public; - - let attachment = - NetworkAccountTarget::new(params.agglayer_faucet_account_id, NoteExecutionHint::Always) - .map_err(|e| NoteError::other(e.to_string()))? - .into(); - // Use a default sender since we don't have sender anymore - create from destination address - let metadata = NoteMetadata::new(params.claim_note_creator_account_id, note_type, tag) - .with_attachment(attachment); - let assets = NoteAssets::new(vec![])?; - let recipient = NoteRecipient::new(serial_num, claim_script, inputs); - - Ok(Note::new(assets, metadata, recipient)) -} - -// TESTING HELPERS -// ================================================================================================ - -#[cfg(any(feature = "testing", test))] -/// Type alias for the complex return type of claim_note_test_inputs. -/// -/// Contains: -/// - smt_proof_local_exit_root: `Vec` (256 felts) -/// - smt_proof_rollup_exit_root: `Vec` (256 felts) -/// - global_index: [Felt; 8] -/// - mainnet_exit_root: [u8; 32] -/// - rollup_exit_root: [u8; 32] -/// - origin_network: Felt -/// - origin_token_address: [u8; 20] -/// - destination_network: Felt -/// - destination_address: [u8; 20] -/// - amount: [Felt; 8] -/// - metadata: [Felt; 8] -pub type ClaimNoteTestInputs = ( - Vec, - Vec, - [Felt; 8], - [u8; 32], - [u8; 32], - Felt, - [u8; 20], - Felt, - [u8; 20], - [Felt; 8], - [Felt; 8], -); - -#[cfg(any(feature = "testing", test))] -/// Returns dummy test inputs for creating CLAIM notes. -/// -/// This is a convenience function for testing that provides realistic dummy data -/// for all the agglayer claimAsset function inputs. -/// -/// # Parameters -/// - `amount`: The amount as a single Felt for Miden operations -/// - `destination_account_id`: The destination account ID to convert to address bytes -/// -/// # Returns -/// A tuple containing: -/// - smt_proof_local_exit_root: `Vec` (256 felts) -/// - smt_proof_rollup_exit_root: `Vec` (256 felts) -/// - global_index: [Felt; 8] -/// - mainnet_exit_root: [u8; 32] -/// - rollup_exit_root: [u8; 32] -/// - origin_network: Felt -/// - origin_token_address: [u8; 20] -/// - destination_network: Felt -/// - destination_address: [u8; 20] -/// - amount: [Felt; 8] -/// - metadata: [Felt; 8] -pub fn claim_note_test_inputs( - amount: Felt, - destination_account_id: AccountId, -) -> ClaimNoteTestInputs { - // Create SMT proofs with 256 felts each (32 bytes32 values * 8 u32 per bytes32) - let smt_proof_local_exit_root = vec![Felt::new(0); 256]; - let smt_proof_rollup_exit_root = vec![Felt::new(0); 256]; - let global_index = [ - Felt::new(12345), - Felt::new(0), - Felt::new(0), - Felt::new(0), - Felt::new(0), - Felt::new(0), - Felt::new(0), - Felt::new(0), - ]; - - let mainnet_exit_root: [u8; 32] = [ - 0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, - 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, - 0x77, 0x88, - ]; - - let rollup_exit_root: [u8; 32] = [ - 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, - 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, - 0x88, 0x99, - ]; - - let origin_network = Felt::new(1); - - let origin_token_address: [u8; 20] = [ - 0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, - 0x88, 0x99, 0xaa, 0xbb, 0xcc, - ]; - - let destination_network = Felt::new(2); - - // Convert AccountId to destination address bytes - let destination_address = - EthAddressFormat::from_account_id(destination_account_id).into_bytes(); - - // Convert amount Felt to u256 array for agglayer - let amount_u256 = [ - amount, - Felt::new(0), - Felt::new(0), - Felt::new(0), - Felt::new(0), - Felt::new(0), - Felt::new(0), - Felt::new(0), - ]; - let metadata: [Felt; 8] = [Felt::new(0); 8]; - - ( - smt_proof_local_exit_root, - smt_proof_rollup_exit_root, - global_index, - mainnet_exit_root, - rollup_exit_root, - origin_network, + create_agglayer_faucet_builder( + seed, + token_symbol, + decimals, + max_supply, + token_supply, + bridge_account_id, origin_token_address, - destination_network, - destination_address, - amount_u256, - metadata, + origin_network, + scale, ) + .with_auth_component(AccountComponent::from(NoAuth)) + .build_existing() + .expect("agglayer faucet account should be valid") } diff --git a/crates/miden-agglayer/src/update_ger_note.rs b/crates/miden-agglayer/src/update_ger_note.rs new file mode 100644 index 0000000000..07246db9f6 --- /dev/null +++ b/crates/miden-agglayer/src/update_ger_note.rs @@ -0,0 +1,115 @@ +//! UPDATE_GER note creation utilities. +//! +//! This module provides helpers for creating UPDATE_GER notes, +//! which are used to update the Global Exit Root in the bridge account. + +extern crate alloc; + +use alloc::string::ToString; +use alloc::vec; + +use miden_assembly::serde::Deserializable; +use miden_core::Word; +use miden_core::program::Program; +use miden_protocol::account::AccountId; +use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::errors::NoteError; +use miden_protocol::note::{ + Note, + NoteAssets, + NoteAttachment, + NoteMetadata, + NoteRecipient, + NoteScript, + NoteStorage, + NoteType, +}; +use miden_standards::note::{NetworkAccountTarget, NoteExecutionHint}; +use miden_utils_sync::LazyLock; + +use crate::ExitRoot; + +// NOTE SCRIPT +// ================================================================================================ + +// Initialize the UPDATE_GER note script only once +static UPDATE_GER_SCRIPT: LazyLock = LazyLock::new(|| { + let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/UPDATE_GER.masb")); + let program = + Program::read_from_bytes(bytes).expect("shipped UPDATE_GER script is well-formed"); + NoteScript::new(program) +}); + +// UPDATE_GER NOTE +// ================================================================================================ + +/// UPDATE_GER note. +/// +/// This note is used to update the Global Exit Root (GER) in the bridge account. +/// It carries the new GER data and is always public. +pub struct UpdateGerNote; + +impl UpdateGerNote { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// Expected number of storage items for an UPDATE_GER note. + pub const NUM_STORAGE_ITEMS: usize = 8; + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the UPDATE_GER note script. + pub fn script() -> NoteScript { + UPDATE_GER_SCRIPT.clone() + } + + /// Returns the UPDATE_GER note script root. + pub fn script_root() -> Word { + UPDATE_GER_SCRIPT.root() + } + + // BUILDERS + // -------------------------------------------------------------------------------------------- + + /// Creates an UPDATE_GER note with the given GER (Global Exit Root) data. + /// + /// The note storage contains 8 felts: GER[0..7] + /// + /// # Parameters + /// - `ger`: The Global Exit Root data + /// - `sender_account_id`: The account ID of the note creator + /// - `target_account_id`: The account ID that will consume this note (bridge account) + /// - `rng`: Random number generator for creating the note serial number + /// + /// # Errors + /// Returns an error if note creation fails. + pub fn create( + ger: ExitRoot, + sender_account_id: AccountId, + target_account_id: AccountId, + rng: &mut R, + ) -> Result { + // Create note storage with 8 felts: GER[0..7] + let storage_values = ger.to_elements().to_vec(); + + let note_storage = NoteStorage::new(storage_values)?; + + // Generate a serial number for the note + let serial_num = rng.draw_word(); + + let recipient = NoteRecipient::new(serial_num, Self::script(), note_storage); + + let attachment = NoteAttachment::from( + NetworkAccountTarget::new(target_account_id, NoteExecutionHint::Always) + .map_err(|e| NoteError::other(e.to_string()))?, + ); + let metadata = + NoteMetadata::new(sender_account_id, NoteType::Public).with_attachment(attachment); + + // UPDATE_GER notes don't carry assets + let assets = NoteAssets::new(vec![])?; + + Ok(Note::new(assets, metadata, recipient)) + } +} diff --git a/crates/miden-agglayer/src/utils.rs b/crates/miden-agglayer/src/utils.rs deleted file mode 100644 index 88850de58c..0000000000 --- a/crates/miden-agglayer/src/utils.rs +++ /dev/null @@ -1,28 +0,0 @@ -use miden_core::FieldElement; -use miden_protocol::Felt; - -// UTILITY FUNCTIONS -// ================================================================================================ - -/// Converts a bytes32 value (32 bytes) into an array of 8 Felt values. -/// -/// Note: These utility functions will eventually be replaced with similar functions from miden-vm. -pub fn bytes32_to_felts(bytes32: &[u8; 32]) -> [Felt; 8] { - let mut result = [Felt::ZERO; 8]; - for (i, chunk) in bytes32.chunks(4).enumerate() { - let value = u32::from_be_bytes([chunk[0], chunk[1], chunk[2], chunk[3]]); - result[i] = Felt::from(value); - } - result -} - -/// Convert 8 Felt values (u32 limbs in little-endian order) to U256 bytes in little-endian format. -pub fn felts_to_u256_bytes(limbs: [Felt; 8]) -> [u8; 32] { - let mut bytes = [0u8; 32]; - for (i, limb) in limbs.iter().enumerate() { - let u32_value = limb.as_int() as u32; - let limb_bytes = u32_value.to_le_bytes(); - bytes[i * 4..(i + 1) * 4].copy_from_slice(&limb_bytes); - } - bytes -} diff --git a/crates/miden-block-prover/src/local_block_prover.rs b/crates/miden-block-prover/src/local_block_prover.rs index 4960d1a593..2af43f1856 100644 --- a/crates/miden-block-prover/src/local_block_prover.rs +++ b/crates/miden-block-prover/src/local_block_prover.rs @@ -24,7 +24,7 @@ impl LocalBlockProver { pub fn prove( &self, _tx_batches: OrderedBatches, - _block_header: BlockHeader, + _block_header: &BlockHeader, _block_inputs: BlockInputs, ) -> Result { Ok(BlockProof {}) diff --git a/crates/miden-protocol-macros/tests/integration_test.rs b/crates/miden-protocol-macros/tests/integration_test.rs index 05a925e4d5..46f807852f 100644 --- a/crates/miden-protocol-macros/tests/integration_test.rs +++ b/crates/miden-protocol-macros/tests/integration_test.rs @@ -1,6 +1,6 @@ #[cfg(test)] mod tests { - use miden_protocol::{Felt, FieldElement, Word}; + use miden_protocol::{Felt, Word}; use miden_protocol_macros::WordWrapper; #[derive(Debug, Clone, Copy, PartialEq, Eq, WordWrapper)] diff --git a/crates/miden-protocol/Cargo.toml b/crates/miden-protocol/Cargo.toml index ea4b9c629d..74f08d132b 100644 --- a/crates/miden-protocol/Cargo.toml +++ b/crates/miden-protocol/Cargo.toml @@ -31,7 +31,7 @@ std = [ "miden-processor/std", "miden-verifier/std", ] -testing = ["dep:rand_chacha", "dep:rand_xoshiro", "dep:winter-rand-utils", "miden-air/testing"] +testing = ["dep:rand_chacha", "dep:rand_xoshiro", "miden-core/testing", "miden-crypto/std"] [dependencies] # Miden dependencies @@ -45,16 +45,15 @@ miden-processor = { workspace = true } miden-protocol-macros = { workspace = true } miden-utils-sync = { workspace = true } miden-verifier = { workspace = true } -winter-rand-utils = { optional = true, version = "0.13" } # External dependencies bech32 = { default-features = false, features = ["alloc"], version = "0.11" } rand = { workspace = true } rand_xoshiro = { default-features = false, optional = true, version = "0.7" } semver = { features = ["serde"], version = "1.0" } -serde = { features = ["derive"], optional = true, version = "1.0" } +serde = { features = ["derive"], optional = true, workspace = true } thiserror = { workspace = true } -toml = { optional = true, version = "0.9" } +toml = { optional = true, version = "1.0" } # for SecretKey generation rand_chacha = { optional = true, workspace = true } @@ -64,18 +63,15 @@ getrandom = { features = ["wasm_js"], version = "0.3" } [dev-dependencies] anyhow = { features = ["backtrace", "std"], workspace = true } assert_matches = { workspace = true } +color-eyre = { version = "0.5" } criterion = { default-features = false, features = ["html_reports"], version = "0.5" } miden-protocol = { features = ["testing"], path = "." } pprof = { default-features = false, features = ["criterion", "flamegraph"], version = "0.15" } rstest = { workspace = true } tempfile = { version = "3.19" } -winter-air = { version = "0.13" } -# for HashFunction/ExecutionProof::new_dummy -color-eyre = { version = "0.5" } -miden-air = { features = ["std", "testing"], workspace = true } [build-dependencies] -fs-err = { version = "3" } +fs-err = { workspace = true } miden-assembly = { workspace = true } miden-core = { workspace = true } miden-core-lib = { workspace = true } diff --git a/crates/miden-protocol/README.md b/crates/miden-protocol/README.md index c731a35159..657155802f 100644 --- a/crates/miden-protocol/README.md +++ b/crates/miden-protocol/README.md @@ -11,13 +11,13 @@ Here is a broad overview of each module, with links to additional documentation. Structures used to define accounts, including abstractions over its code, storage, and vault. -[Documentation](https://0xmiden.github.io/miden-base/account/overview.html). +[Documentation](https://0xmiden.github.io/protocol/account/overview.html). ### Assets Structures used to define fungible and non-fungible assets. Accounts own assets and store them in their vaults. -[Documentation](https://0xMiden.github.io/miden-base/asset.html) +[Documentation](https://0xMiden.github.io/protocol/asset.html) ### Block @@ -28,13 +28,13 @@ Structures used to define a block. These objects contain authentication structur Structures used to define notes. Notes are messages that contain code and assets. They describe their own behavior and allow for interaction among accounts. Notes come in multiple flavors, refer to the docs for additional details. -[Documentation](https://0xMiden.github.io/miden-base/note.html) +[Documentation](https://0xMiden.github.io/protocol/note.html) ### Transaction Structures used to define Miden blockchain transactions. Transactions describe changes to an account, and may include consumption and production of notes. The objects in this module allow for the representation of transactions at multiple stages of its lifecycle, from creation, to data aggregation, execution with trace collection, and finally an executed transaction with a corresponding STARK proof. -[Documentation](https://0xMiden.github.io/miden-base/transaction.html). +[Documentation](https://0xMiden.github.io/protocol/transaction.html). ## Features diff --git a/crates/miden-protocol/asm/kernels/transaction/api.masm b/crates/miden-protocol/asm/kernels/transaction/api.masm index a67556ad67..0ee3b5d49a 100644 --- a/crates/miden-protocol/asm/kernels/transaction/api.masm +++ b/crates/miden-protocol/asm/kernels/transaction/api.masm @@ -1,13 +1,18 @@ +use $kernel::asset use $kernel::account use $kernel::account_delta use $kernel::account_id use $kernel::faucet use $kernel::input_note use $kernel::memory -use $kernel::note use $kernel::output_note use $kernel::tx +use $kernel::memory::UPCOMING_FOREIGN_PROCEDURE_PTR +use $kernel::memory::UPCOMING_FOREIGN_PROC_INPUT_VALUE_15_PTR + +use miden::core::word + # NOTE # ================================================================================================= # `exec_kernel_proc` procedure is expected to be invoked using a `syscall` instruction. It makes # @@ -17,14 +22,13 @@ use $kernel::tx # the memory. # # ================================================================================================= -# ERRORS +# TYPE ALIASES # ================================================================================================= -const ERR_FAUCET_STORAGE_DATA_SLOT_IS_RESERVED="for faucets the FAUCET_STORAGE_DATA_SLOT storage slot is reserved and can not be used with set_account_item" +type AccountID = struct { prefix: felt, suffix: felt } -const ERR_FAUCET_TOTAL_ISSUANCE_PROC_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_FAUCET="the faucet_get_total_fungible_asset_issuance procedure can only be called on a fungible faucet" - -const ERR_FAUCET_IS_NF_ASSET_ISSUED_PROC_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET="the faucet_is_non_fungible_asset_issued procedure can only be called on a non-fungible faucet" +# ERRORS +# ================================================================================================= const ERR_KERNEL_PROCEDURE_OFFSET_OUT_OF_BOUNDS="provided kernel procedure offset is out of bounds" @@ -34,12 +38,16 @@ const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_RECIPIENT_WHILE_NO_NOTE_BEING_PROCESSED="f const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_METADATA_WHILE_NO_NOTE_BEING_PROCESSED="failed to access note metadata of active note because no note is currently being processed" -const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_INPUTS_WHILE_NO_NOTE_BEING_PROCESSED="failed to access note inputs of active note because no note is currently being processed" +const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_STORAGE_WHILE_NO_NOTE_BEING_PROCESSED="failed to access note storage of active note because no note is currently being processed" const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_SCRIPT_ROOT_WHILE_NO_NOTE_BEING_PROCESSED="failed to access note script root of active note because no note is currently being processed" const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_SERIAL_NUMBER_WHILE_NO_NOTE_BEING_PROCESSED="failed to access note serial number of active note because no note is currently being processed" +const ERR_FOREIGN_ACCOUNT_PROCEDURE_ROOT_IS_ZERO="root of the provided foreign procedure equals zero indicating that tx_prepare_fpi was not called" + +const ERR_FOREIGN_ACCOUNT_ID_IS_ZERO="ID of the provided foreign account equals zero indicating that tx_prepare_fpi was not called" + # AUTHENTICATION # ================================================================================================= @@ -161,7 +169,7 @@ end #! Returns the ID of the specified account. #! #! Inputs: [is_native, pad(15)] -#! Outputs: [account_id_prefix, account_id_suffix, pad(14)] +#! Outputs: [account_id_suffix, account_id_prefix, pad(14)] #! #! Where: #! - is_native is a boolean flag that indicates whether the account ID was requested for the native @@ -172,38 +180,38 @@ end pub proc account_get_id # get the native account ID exec.memory::get_native_account_id - # => [native_account_id_prefix, native_account_id_suffix, is_native, pad(15)] + # => [native_account_id_suffix, native_account_id_prefix, is_native, pad(15)] # get the active account ID exec.account::get_id # => [ - # active_account_id_prefix, active_account_id_suffix, - # native_account_id_prefix, native_account_id_suffix, + # active_account_id_suffix, active_account_id_prefix, + # native_account_id_suffix, native_account_id_prefix, # is_native, pad(15) # ] # prepare the stack for the first cdrop movup.2 dup.4 # => [ - # is_native, native_account_id_prefix, active_account_id_prefix, - # active_account_id_suffix, native_account_id_suffix, is_native, pad(15) + # is_native, native_account_id_suffix, active_account_id_suffix, + # active_account_id_prefix, native_account_id_prefix, is_native, pad(15) # ] - # drop the prefix corresponding to the is_native flag + # drop the suffix corresponding to the is_native flag cdrop - # => [account_id_prefix, active_account_id_suffix, native_account_id_suffix, is_native, pad(15)] + # => [account_id_suffix, active_account_id_prefix, native_account_id_prefix, is_native, pad(15)] # prepare the stack for the second cdrop movdn.3 swap movup.2 - # => [is_native, native_account_id_suffix, active_account_id_suffix, account_id_prefix, pad(15)] + # => [is_native, native_account_id_prefix, active_account_id_prefix, account_id_suffix, pad(15)] - # drop the suffix corresponding to the is_native flag + # drop the prefix corresponding to the is_native flag cdrop - # => [account_id_suffix, account_id_prefix, pad(15)] + # => [account_id_prefix, account_id_suffix, pad(15)] - # rearrange the ID parts and truncate the stack + # rearrange the ID parts and truncate the stack swap movup.2 drop - # => [account_id_prefix, account_id_suffix, pad(14)] + # => [account_id_suffix, account_id_prefix, pad(14)] end #! Returns the active account nonce. @@ -326,11 +334,11 @@ end #! Gets an item from the account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, pad(14)] +#! Inputs: [slot_id_suffix, slot_id_prefix, pad(14)] #! Outputs: [VALUE, pad(12)] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the item. #! @@ -341,7 +349,7 @@ end pub proc account_get_item # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [slot_id_prefix, slot_id_suffix, pad(14)] + # => [slot_id_suffix, slot_id_prefix, pad(14)] # fetch the account storage item exec.account::get_item @@ -354,11 +362,11 @@ end #! Sets an item in the account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, VALUE, pad(10)] +#! Inputs: [slot_id_suffix, slot_id_prefix, VALUE, pad(10)] #! Outputs: [OLD_VALUE, pad(12)] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value to set. #! - OLD_VALUE is the previous value of the item. @@ -366,28 +374,16 @@ end #! Panics if: #! - a slot with the provided slot ID does not exist in account storage. #! - the invocation of this procedure does not originate from the native account. -#! - the native account is a faucet and the provided slot ID points to the reserved faucet storage slot. #! #! Invocation: dynexec pub proc account_set_item # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [slot_id_prefix, slot_id_suffix, VALUE, pad(10)] - - # if the transaction is being executed against a faucet account then assert - # the slot that is being written to is not the reserved faucet slot. - dup.1 dup.1 exec.account::is_faucet_storage_data_slot - # => [is_faucet_storage_data_slot, slot_id_prefix, slot_id_suffix, VALUE, pad(10)] - - exec.account::get_id swap drop exec.account_id::is_faucet - # => [is_faucet_account, is_faucet_storage_data_slot, slot_id_prefix, slot_id_suffix, VALUE, pad(10)] - - and assertz.err=ERR_FAUCET_STORAGE_DATA_SLOT_IS_RESERVED - # => [slot_id_prefix, slot_id_suffix, VALUE, pad(10)] + # => [slot_id_suffix, slot_id_prefix, VALUE, pad(10)] # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [slot_id_prefix, slot_id_suffix, VALUE, pad(10)] + # => [slot_id_suffix, slot_id_prefix, VALUE, pad(10)] # set the account storage item exec.account::set_item @@ -397,11 +393,11 @@ end #! Returns the VALUE located under the specified KEY within the map contained in the account #! storage slot identified by the slot ID. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY, pad(10)] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY, pad(10)] #! Outputs: [VALUE, pad(12)] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the map item at KEY. #! @@ -413,7 +409,7 @@ end pub proc account_get_map_item # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [slot_id_prefix, slot_id_suffix, KEY, pad(10)] + # => [slot_id_suffix, slot_id_prefix, KEY, pad(10)] # fetch the map item from account storage exec.account::get_map_item @@ -422,11 +418,11 @@ end #! Gets an item from the account storage at its initial state (beginning of transaction). #! -#! Inputs: [slot_id_prefix, slot_id_suffix, pad(14)] +#! Inputs: [slot_id_suffix, slot_id_prefix, pad(14)] #! Outputs: [INIT_VALUE, pad(12)] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - INIT_VALUE is the initial value of the item at the beginning of the transaction. #! @@ -437,7 +433,7 @@ end pub proc account_get_initial_item # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [slot_id_prefix, slot_id_suffix, pad(14)] + # => [slot_id_suffix, slot_id_prefix, pad(14)] # fetch the initial account storage item exec.account::get_initial_item @@ -451,11 +447,11 @@ end #! Returns the initial VALUE located under the specified KEY within the map contained in the #! account storage slot identified by the slot ID at the beginning of the transaction. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY, pad(10)] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY, pad(10)] #! Outputs: [INIT_VALUE, pad(12)] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - the slot must point to the root of the storage map. #! - INIT_VALUE is the initial value of the map item at KEY at the beginning of the transaction. @@ -468,7 +464,7 @@ end pub proc account_get_initial_map_item # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [slot_id_prefix, slot_id_suffix, KEY, pad(10)] + # => [slot_id_suffix, slot_id_prefix, KEY, pad(10)] # fetch the initial map item from account storage exec.account::get_initial_map_item @@ -478,11 +474,11 @@ end #! Stores NEW_VALUE under the specified KEY within the map contained in the given account storage #! slot. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY, NEW_VALUE, pad(6)] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY, NEW_VALUE, pad(6)] #! Outputs: [OLD_VALUE, pad(12)] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - the slot must point to the root of the storage map. #! - NEW_VALUE is the value of the new map item for the respective KEY. @@ -499,11 +495,11 @@ end pub proc account_set_map_item # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [slot_id_prefix, slot_id_suffix, KEY, NEW_VALUE, pad(6)] + # => [slot_id_suffix, slot_id_prefix, KEY, NEW_VALUE, pad(6)] # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [slot_id_prefix, slot_id_suffix, KEY, NEW_VALUE, pad(6)] + # => [slot_id_suffix, slot_id_prefix, KEY, NEW_VALUE, pad(6)] # set the new map item exec.account::set_map_item @@ -554,15 +550,16 @@ end #! Adds the specified asset to the vault. #! -#! Inputs: [ASSET, pad(12)] -#! Outputs: [ASSET', pad(12)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] +#! Outputs: [ASSET_VALUE', pad(12)] #! #! Where: -#! - ASSET is the asset to add to the vault. -#! - ASSET' final asset in the account vault defined as follows: -#! - If ASSET is a non-fungible asset, then ASSET' is the same as ASSET. -#! - If ASSET is a fungible asset, then ASSET' is the total fungible asset in the account vault -#! after ASSET was added to it. +#! - ASSET_KEY is the vault key of the asset that is added to the vault. +#! - ASSET_VALUE is the value of the asset to add to the vault. +#! - ASSET_VALUE' final asset in the account vault defined as follows: +#! - If ASSET_VALUE is a non-fungible asset, then ASSET_VALUE' is the same as ASSET_VALUE. +#! - If ASSET_VALUE is a fungible asset, then ASSET_VALUE' is the total fungible asset in the account vault +#! after ASSET_VALUE was added to it. #! #! Panics if: #! - the asset is not valid. @@ -575,24 +572,25 @@ end pub proc account_add_asset # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # add the specified asset to the account vault, emitting the corresponding events exec.account::add_asset_to_vault - # => [ASSET', pad(12)] + # => [ASSET_VALUE', pad(12)] end #! Removes the specified asset from the vault. #! -#! Inputs: [ASSET, pad(12)] -#! Outputs: [ASSET, pad(12)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] +#! Outputs: [ASSET_VALUE, pad(12)] #! #! Where: -#! - ASSET is the asset to remove from the vault. +#! - ASSET_KEY is the vault key of the asset to remove from the vault. +#! - ASSET_VALUE is the value of the asset to remove from the vault. #! #! Panics if: #! - the fungible asset is not found in the vault. @@ -604,74 +602,53 @@ end pub proc account_remove_asset # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # remove the specified asset from the account vault, emitting the corresponding events exec.account::remove_asset_from_vault - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] end -#! Returns the balance of the fungible asset associated with the provided faucet_id in the active -#! account's vault. +#! Returns the asset associated with the provided asset vault key in the active account's vault. #! -#! Inputs: [faucet_id_prefix, faucet_id_suffix, pad(14)] -#! Outputs: [balance, pad(15)] +#! Inputs: [ASSET_KEY, pad(12)] +#! Outputs: [ASSET_VALUE, pad(12)] #! #! Where: -#! - faucet_id_{prefix,suffix} are the prefix and suffix felts of the faucet id of the fungible -#! asset of interest. -#! - balance is the vault balance of the fungible asset. -#! -#! Panics if: -#! - the provided faucet ID is not an ID of a fungible faucet. +#! - ASSET_KEY is the asset vault key of the asset to fetch. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't +#! present. #! #! Invocation: dynexec -pub proc account_get_balance - exec.account::get_balance - # => [balance, pad(15)] -end +pub proc account_get_asset + exec.asset::validate_key + # => [ASSET_KEY, pad(12)] -#! Returns the balance of the fungible asset associated with the provided faucet_id in the active -#! account's vault at the beginning of the transaction. -#! -#! Inputs: [faucet_id_prefix, faucet_id_suffix, pad(14)] -#! Outputs: [init_balance, pad(15)] -#! -#! Where: -#! - faucet_id_{prefix,suffix} are the prefix and suffix felts of the faucet id of the fungible -#! asset of interest. -#! - init_balance is the vault balance of the fungible asset at the beginning of the transaction. -#! -#! Panics if: -#! - the provided faucet ID is not an ID of a fungible faucet. -#! -#! Invocation: dynexec -pub proc account_get_initial_balance - exec.account::get_initial_balance - # => [init_balance, pad(15)] + exec.account::get_asset + # => [ASSET_VALUE, pad(12)] end -#! Returns a boolean indicating whether the non-fungible asset is present in the active account's -#! vault. +#! Returns the asset associated with the provided asset vault key in the active account's vault at +#! the beginning of the transaction. #! -#! Inputs: [ASSET, pad(12)] -#! Outputs: [has_asset, pad(15)] +#! Inputs: [ASSET_KEY, pad(12)] +#! Outputs: [ASSET_VALUE, pad(12)] #! #! Where: -#! - ASSET is the non-fungible asset of interest. -#! - has_asset is a boolean indicating whether the account vault has the asset of interest. -#! -#! Panics if: -#! - the ASSET is a fungible asset. +#! - ASSET_KEY is the asset vault key of the asset to fetch. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't present. #! #! Invocation: dynexec -pub proc account_has_non_fungible_asset - exec.account::has_non_fungible_asset - # => [has_asset, pad(15)] +pub proc account_get_initial_asset + exec.asset::validate_key + # => [ASSET_KEY, pad(12)] + + exec.account::get_initial_asset + # => [ASSET_VALUE, pad(12)] end #! Returns 1 if a native account procedure was called during transaction execution, and 0 otherwise. @@ -764,11 +741,15 @@ end #! Mint an asset from the faucet the transaction is being executed against. #! -#! Inputs: [ASSET, pad(12)] -#! Outputs: [ASSET, pad(12)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] +#! Outputs: [NEW_ASSET_VALUE, pad(12)] #! #! Where: -#! - ASSET is the asset that was minted. +#! - ASSET_KEY is the vault key of the asset to mint. +#! - ASSET_VALUE is the value of the asset that was minted. +#! - NEW_ASSET_VALUE is: +#! - For fungible assets: the ASSET_VALUE merged with the existing vault asset value, if any. +#! - For non-fungible assets: identical to ASSET_VALUE. #! #! Panics if: #! - the transaction is not being executed against a faucet. @@ -785,24 +766,25 @@ end pub proc faucet_mint_asset # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # mint the asset exec.faucet::mint - # => [ASSET, pad(12)] + # => [NEW_ASSET_VALUE, pad(12)] end #! Burn an asset from the faucet the transaction is being executed against. #! -#! Inputs: [ASSET, pad(12)] -#! Outputs: [ASSET, pad(12)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] +#! Outputs: [ASSET_VALUE, pad(12)] #! #! Where: -#! - ASSET is the asset that was burned. +#! - ASSET_KEY is the vault key of the asset to burn. +#! - ASSET_VALUE is the value of the asset that was burned. #! #! Panics if: #! - the transaction is not being executed against a faucet. @@ -820,73 +802,33 @@ end pub proc faucet_burn_asset # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # authenticate that the procedure invocation originates from the account context exec.authenticate_account_origin - # => [ASSET, pad(12)] + # => [ASSET_KEY, ASSET_VALUE, pad(8)] # burn the asset exec.faucet::burn - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] end -#! Returns the total issuance of the fungible faucet the transaction is being executed against. +#! Returns whether the active account defines callbacks. #! #! Inputs: [pad(16)] -#! Outputs: [total_issuance, pad(15)] -#! -#! Where: -#! - total_issuance is the total issuance of the fungible faucet the transaction is being executed -#! against. -#! -#! Panics if: -#! - the transaction is not being executed against a fungible faucet. -#! -#! Invocation: dynexec -pub proc faucet_get_total_fungible_asset_issuance - # assert that we are executing a transaction against a fungible faucet (access checks) - exec.account::get_id swap drop exec.account_id::is_fungible_faucet - assert.err=ERR_FAUCET_TOTAL_ISSUANCE_PROC_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_FAUCET - # => [pad(16)] - - # get the total issuance - exec.faucet::get_total_issuance - # => [total_issuance, pad(16)] - - # truncate the stack - swap drop - # => [total_issuance, pad(15)] -end - -#! Returns a boolean indicating whether the provided non-fungible asset has been already issued by -#! this faucet. -#! -#! Inputs: [ASSET, pad(12)] -#! Outputs: [is_issued, pad(15)] +#! Outputs: [has_callbacks, pad(15)] #! #! Where: -#! - ASSET is the non-fungible asset that is being checked. -#! - is_issued is a boolean indicating whether the non-fungible asset has been issued. -#! -#! Panics if: -#! - the ASSET is a fungible asset. -#! - the ASSET is not associated with the faucet the transaction is being executed against. +#! - has_callbacks is 1 if the account defines callbacks, 0 otherwise. #! #! Invocation: dynexec -pub proc faucet_is_non_fungible_asset_issued - # assert that we are executing a transaction against a non-fungible faucet (access checks) - exec.account::get_id swap drop exec.account_id::is_non_fungible_faucet - assert.err=ERR_FAUCET_IS_NF_ASSET_ISSUED_PROC_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET - # => [ASSET, pad(12)] - - # get the issuance flag - exec.faucet::is_non_fungible_asset_issued - # => [is_issued, pad(16)] +pub proc faucet_has_callbacks + exec.account::has_callbacks + # => [has_callbacks, pad(16)] # truncate the stack swap drop - # => [is_issued, pad(15)] + # => [has_callbacks, pad(15)] end # INPUT NOTE @@ -907,7 +849,7 @@ end #! #! Panics if: #! - the note index is greater or equal to the total number of input notes. -#! - is_active_note is 1 and no input note is not being processed (attempted to access note inputs +#! - is_active_note is 1 and no input note is not being processed (attempted to access note storage #! from incorrect context). #! #! Invocation: dynexec @@ -941,11 +883,11 @@ end #! the active note or from the note with the specified index. #! - note_index is the index of the input note whose assets info should be returned. Notice that if #! is_active_note is 1, note_index is ignored. -#! - RECIPIENT is the commitment to the input note's script, inputs, the serial number. +#! - RECIPIENT is the commitment to the input note's script, storage, the serial number. #! #! Panics if: #! - the note index is greater or equal to the total number of input notes. -#! - is_active_note is 1 and no input note is not being processed (attempted to access note inputs +#! - is_active_note is 1 and no input note is not being processed (attempted to access note storage #! from incorrect context). #! #! Invocation: dynexec @@ -984,7 +926,7 @@ end #! #! Panics if: #! - the note index is greater or equal to the total number of input notes. -#! - is_active_note is 1 and no input note is not being processed (attempted to access note inputs +#! - is_active_note is 1 and no input note is not being processed (attempted to access note storage #! from incorrect context). #! #! Invocation: dynexec @@ -1030,7 +972,7 @@ end #! #! Panics if: #! - the note index is greater or equal to the total number of input notes. -#! - is_active_note is 1 and no input note is not being processed (attempted to access note inputs +#! - is_active_note is 1 and no input note is not being processed (attempted to access note storage #! from incorrect context). #! #! Invocation: dynexec @@ -1056,26 +998,26 @@ pub proc input_note_get_serial_number # => [SERIAL_NUMBER, pad(12)] end -#! Returns the inputs commitment and length of the specified input note. +#! Returns the storage commitment and length of the specified input note. #! #! Inputs: [is_active_note, note_index, pad(14)] -#! Outputs: [NOTE_INPUTS_COMMITMENT, num_inputs, pad(11)] +#! Outputs: [NOTE_STORAGE_COMMITMENT, num_storage_items, pad(11)] #! #! Where: -#! - is_active_note is the boolean flag indicating whether we should return the inputs commitment +#! - is_active_note is the boolean flag indicating whether we should return the storage commitment #! and length from the active note or from the note with the specified index. #! - note_index is the index of the input note whose data should be returned. Notice that if #! is_active_note is 1, note_index is ignored. -#! - NOTE_INPUTS_COMMITMENT is the inputs commitment of the specified input note. -#! - num_inputs is the number of inputs of the specified input note. +#! - NOTE_STORAGE_COMMITMENT is the storage commitment of the specified input note. +#! - num_storage_items is the number of storage items of the specified input note. #! #! Panics if: #! - the note index is greater or equal to the total number of input notes. -#! - is_active_note is 1 and no input note is not being processed (attempted to access note inputs +#! - is_active_note is 1 and no input note is not being processed (attempted to access note storage #! from incorrect context). #! #! Invocation: dynexec -pub proc input_note_get_inputs_info +pub proc input_note_get_storage_info # get the input note pointer depending on whether the requested note is current or it was # requested by index. exec.get_requested_note_ptr @@ -1083,22 +1025,22 @@ pub proc input_note_get_inputs_info # assert the pointer is not zero - this would suggest the procedure has been called from an # incorrect context - dup neq.0 assert.err=ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_INPUTS_WHILE_NO_NOTE_BEING_PROCESSED + dup neq.0 assert.err=ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_STORAGE_WHILE_NO_NOTE_BEING_PROCESSED # => [input_note_ptr, pad(15)] - # get the note inputs length - dup exec.memory::get_input_note_num_inputs swap - # => [input_note_ptr, num_inputs, pad(16)] + # get the note's number of storage items + dup exec.memory::get_input_note_num_storage_items swap + # => [input_note_ptr, num_storage_items, pad(16)] - # get the inputs commitment - exec.memory::get_input_note_inputs_commitment - # => [NOTE_INPUTS_COMMITMENT, num_inputs, pad(16)] + # get the storage commitment + exec.memory::get_input_note_storage_commitment + # => [NOTE_STORAGE_COMMITMENT, num_storage_items, pad(16)] # truncate the stack repeat.5 movup.5 drop end - # => [NOTE_INPUTS_COMMITMENT, num_inputs, pad(11)] + # => [NOTE_STORAGE_COMMITMENT, num_storage_items, pad(11)] end #! Returns the script root of the specified input note. @@ -1107,7 +1049,7 @@ end #! Outputs: [SCRIPT_ROOT, pad(12)] #! #! Where: -#! - is_active_note is the boolean flag indicating whether we should return the inputs commitment +#! - is_active_note is the boolean flag indicating whether we should return the storage commitment #! and length from the active note or from the note with the specified index. #! - note_index is the index of the input note whose data should be returned. Notice that if #! is_active_note is 1, note_index is ignored. @@ -1115,7 +1057,7 @@ end #! #! Panics if: #! - the note index is greater or equal to the total number of input notes. -#! - is_active_note is 1 and no input note is not being processed (attempted to access note inputs +#! - is_active_note is 1 and no input note is not being processed (attempted to access note storage #! from incorrect context). #! #! Invocation: dynexec @@ -1165,14 +1107,15 @@ pub proc output_note_create # => [note_idx, pad(15)] end -#! Adds the ASSET to the note specified by the index. +#! Adds the asset to the note specified by the index. #! -#! Inputs: [note_idx, ASSET, pad(11)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] #! Outputs: [pad(16)] #! #! Where: #! - note_idx is the index of the note to which the asset is added. -#! - ASSET can be a fungible or non-fungible asset. +#! - ASSET_KEY is the vault key of the asset to add. +#! - ASSET_VALUE is the value of the asset to add. #! #! Panics if: #! - the procedure is called when the active account is not the native one. @@ -1181,7 +1124,7 @@ end pub proc output_note_add_asset # check that this procedure was executed against the native account exec.memory::assert_native_account - # => [note_idx, ASSET, pad(11)] + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] exec.output_note::add_asset # => [pad(16)] @@ -1251,7 +1194,7 @@ end #! #! Where: #! - note_index is the index of the output note whose recipient should be returned. -#! - RECIPIENT is the commitment to the output note's script, inputs, the serial number. +#! - RECIPIENT is the commitment to the output note's script, storage, the serial number. #! #! Panics if: #! - the note index is greater or equal to the total number of output notes. @@ -1454,76 +1397,112 @@ pub proc tx_get_block_timestamp # => [timestamp, pad(15)] end -#! Starts a foreign account context. +#! Saves the foreign account ID, foreign procedure root, and the 16th (last) element of the foreign +#! procedure inputs to the memory. #! -#! This allows calling procedures on an account different from the native account. It loads the -#! foreign account into memory, unless already loaded. It pushes the foreign account onto the -#! account stack, which makes the foreign account the active account. +#! To work around the 15 value limitation of the `exec_kernel_proc` kernel procedure we store the +#! 16th value of the foreign procedure inputs to the kernel memory: this allows to FPI any account +#! procedure, even if it has 16 input values. #! -#! Inputs: -#! Operand stack: [foreign_account_id_prefix, foreign_account_id_suffix, pad(14)] -#! Advice map: { -#! FOREIGN_ACCOUNT_ID: [[foreign_account_id_suffix, foreign_account_id_prefix, 0, account_nonce], -#! VAULT_ROOT, STORAGE_ROOT, CODE_ROOT], -#! STORAGE_ROOT: [[STORAGE_SLOT_DATA]], -#! CODE_ROOT: [num_procs, [ACCOUNT_PROCEDURE_DATA]] -#! } -#! Outputs: -#! Operand stack: [pad(16)] +#! Inputs: [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, foreign_proc_input_value_15, pad(9)] +#! Outputs: [pad(16)] #! #! Where: #! - foreign_account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the foreign #! account whose procedure is going to be executed. -#! - FOREIGN_ACCOUNT_ID is the word constructed from the foreign_account_id as follows: -#! [foreign_account_id_suffix, foreign_account_id_prefix, 0, 0]. -#! - account_nonce is the nonce of the foreign account. -#! - VAULT_ROOT is the commitment of the foreign account's vault. -#! - STORAGE_ROOT is the commitment of the foreign account's storage. -#! - STORAGE_SLOT_DATA is the data contained in the storage slot which is constructed as follows: -#! [SLOT_VALUE, slot_type, 0, 0, 0]. -#! - CODE_COMMITMENT is the commitment of the foreign account's code. -#! - ACCOUNT_PROCEDURE_DATA are the roots of the public procedures of the foreign account. +#! - FOREIGN_PROC_ROOT is the root of the foreign procedure which will be executed during the FPI +#! call. +#! - foreign_proc_input_value_15 is the 16th (last) value of the foreign procedure inputs. #! #! Panics if: -#! - foreign context is created against the native account. +#! - the provided foreign account ID is invalid. #! #! Invocation: dynexec -pub proc tx_start_foreign_context - # get the memory address and a flag whether this account was already loaded. - exec.account::get_account_data_ptr - # OS => [was_loaded, ptr, foreign_account_id_prefix, foreign_account_id_suffix, pad(14)] - - if.true - exec.memory::push_ptr_to_account_stack drop drop - # OS => [pad(16)] - else - exec.memory::push_ptr_to_account_stack - # OS => [foreign_account_id_prefix, foreign_account_id_suffix, pad(14)] - - # load the advice data into the active account memory section - exec.account::load_foreign_account - end +pub proc tx_prepare_fpi(foreign_account_id: AccountID, foreign_proc_root: word, foreign_procedure_input_15: felt) + # validate the provided foreign account ID + dup.1 dup.1 exec.account_id::validate + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, foreign_proc_input_value_15, pad(9)] + + # store the foreign account ID + exec.memory::set_fpi_account_id + # => [FOREIGN_PROC_ROOT, foreign_proc_input_value_15, pad(11)] + + # store the foreign procedure root + exec.memory::set_fpi_procedure_root dropw + # => [foreign_proc_input_value_15, pad(15)] - # make sure that the state of the loaded foreign account corresponds to this commitment in the - # account database - exec.account::validate_active_foreign_account + # store the 16th value of the foreign procedure inputs + mem_store.UPCOMING_FOREIGN_PROC_INPUT_VALUE_15_PTR # => [pad(16)] end -#! Ends a foreign account context. +#! Executes the procedure against the foreign account. #! -#! This pops the top of the account stack, making the previous account the active account. +#! This procedure should be executed after the `tx_prepare_fpi`, so it is expected for the foreign +#! account ID and foreign procedure root to be already loaded to the kernel memory. #! -#! Inputs: [pad(16)] -#! Outputs: [pad(16)] +#! Inputs: [foreign_procedure_inputs(15), pad] +#! Outputs: [foreign_procedure_outputs(16)] +#! +#! Where: +#! - foreign_procedure_inputs are the inputs to the foreign procedure padded to 15 felts. Notice +#! that the 16th (last) element of the inputs is not here and stored in the memory: it will be +#! loaded during the setup. +#! - foreign_procedure_outputs are the outputs of the foreign procedure padded to 16 felts. #! #! Panics if: -#! - the active account is the native account. +#! - foreign context is created against the native account. +#! - the ID of the foreign account loaded from the kernel memory is zero (ID was not set). +#! - the root of the foreign procedure loaded from the kernel memory is zero (root was not set). #! #! Invocation: dynexec -pub proc tx_end_foreign_context - exec.memory::pop_ptr_from_account_stack - # => [pad(16)] +pub proc tx_exec_foreign_proc + # move up the pad value to the top of the stack so we could drop it later + movup.15 + # => [pad, foreign_procedure_inputs(15)] + + # load the 16th foreign procedure input value onto the stack + mem_load.UPCOMING_FOREIGN_PROC_INPUT_VALUE_15_PTR + # => [foreign_proc_input_value_15, pad, foreign_procedure_inputs(15)] + + # drop the excess pad and move the 16th inputs value to its 15th position + swap drop movdn.15 + # => [foreign_procedure_inputs(16)] + + # load the ID of the foreign account onto the stack + exec.memory::get_fpi_account_id + # => [foreign_account_id_suffix, foreign_account_id_prefix, foreign_procedure_inputs(16)] + + # check that foreign account ID is not equal zero + dup.1 eq.0 dup.1 eq.0 and not assert.err=ERR_FOREIGN_ACCOUNT_ID_IS_ZERO + # => [foreign_account_id_suffix, foreign_account_id_prefix, foreign_procedure_inputs(16)] + + # load the foreign account to the memory + exec.tx::start_foreign_context + # => [foreign_procedure_inputs(16)] + + # get the pointer to the foreign procedure root to perform the dyncall + push.UPCOMING_FOREIGN_PROCEDURE_PTR + # => [foreign_proc_root_ptr, foreign_procedure_inputs(16)] + + # check that the foreign procedure root is not zero + padw mem_loadw_le.UPCOMING_FOREIGN_PROCEDURE_PTR + # => [FOREIGN_PROC_ROOT, foreign_proc_root_ptr, foreign_procedure_inputs(16)] + + exec.word::eqz assertz.err=ERR_FOREIGN_ACCOUNT_PROCEDURE_ROOT_IS_ZERO + # => [foreign_proc_root_ptr, foreign_procedure_inputs(16)] + + # call the foreign procedure + dyncall + # => [foreign_procedure_outputs(16)] + + # end the foreign context + exec.tx::end_foreign_context + # => [foreign_procedure_outputs(16)] + + # clear the foreign procedure ID and root in memory + exec.tx::clear_fpi_memory + # => [foreign_procedure_outputs(16)] end #! Updates the transaction expiration block delta. diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/account.masm b/crates/miden-protocol/asm/kernels/transaction/lib/account.masm index 7c2458f0ba..d0f405de0c 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/account.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/account.masm @@ -1,14 +1,19 @@ use $kernel::account_delta use $kernel::account_id use $kernel::asset_vault +use $kernel::callbacks +use $kernel::callbacks::ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_PROC_ROOT_SLOT +use $kernel::callbacks::ON_BEFORE_ASSET_ADDED_TO_NOTE_PROC_ROOT_SLOT use $kernel::constants::ACCOUNT_PROCEDURE_DATA_LENGTH use $kernel::constants::EMPTY_SMT_ROOT use $kernel::constants::STORAGE_SLOT_TYPE_MAP use $kernel::constants::STORAGE_SLOT_TYPE_VALUE use $kernel::memory +use $kernel::memory::ACCT_ID_SUFFIX_OFFSET +use $kernel::memory::ACCT_ID_PREFIX_OFFSET use miden::core::collections::smt use miden::core::collections::sorted_array -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 use miden::core::mem use miden::core::word @@ -49,8 +54,6 @@ const ERR_ACCOUNT_STORAGE_COMMITMENT_MISMATCH="computed account storage commitme const ERR_ACCOUNT_STORAGE_MAP_ENTRIES_DO_NOT_MATCH_MAP_ROOT="storage map entries provided as advice inputs do not have the same storage map root as the root of the map the new account commits to" -const ERR_FOREIGN_ACCOUNT_ID_IS_ZERO="ID of the provided foreign account equals zero" - const ERR_FOREIGN_ACCOUNT_MAX_NUMBER_EXCEEDED="maximum allowed number of foreign account to be loaded (64) was exceeded" const ERR_FOREIGN_ACCOUNT_INVALID_COMMITMENT="commitment of the foreign account in the advice provider does not match the commitment in the account tree" @@ -64,11 +67,6 @@ const ERR_ACCOUNT_READING_MAP_VALUE_FROM_NON_MAP_SLOT="failed to read an account # CONSTANTS # ================================================================================================= -# The name of the account storage slot at which faucet data is stored. -# Fungible faucet: The faucet data consists of [0, 0, 0, total_issuance] -# Non-fungible faucet: The faucet data consists of SMT root containing minted non-fungible assets. -const FAUCET_SYSDATA_SLOT=word("miden::protocol::faucet::sysdata") - # The maximum storage slot index const MAX_STORAGE_SLOT_INDEX=254 @@ -139,63 +137,46 @@ const ACCOUNT_SLOT_VALUE_OFFSET=4 # ================================================================================================= # Event emitted before a foreign account is loaded from the advice inputs. -const ACCOUNT_BEFORE_FOREIGN_LOAD_EVENT=event("miden::account::before_foreign_load") +const ACCOUNT_BEFORE_FOREIGN_LOAD_EVENT=event("miden::protocol::account::before_foreign_load") # Event emitted before an asset is added to the account vault. -const ACCOUNT_VAULT_BEFORE_ADD_ASSET_EVENT=event("miden::account::vault_before_add_asset") +const ACCOUNT_VAULT_BEFORE_ADD_ASSET_EVENT=event("miden::protocol::account::vault_before_add_asset") # Event emitted after an asset is added to the account vault. -const ACCOUNT_VAULT_AFTER_ADD_ASSET_EVENT=event("miden::account::vault_after_add_asset") +const ACCOUNT_VAULT_AFTER_ADD_ASSET_EVENT=event("miden::protocol::account::vault_after_add_asset") # Event emitted before an asset is removed from the account vault. -const ACCOUNT_VAULT_BEFORE_REMOVE_ASSET_EVENT=event("miden::account::vault_before_remove_asset") +const ACCOUNT_VAULT_BEFORE_REMOVE_ASSET_EVENT=event("miden::protocol::account::vault_before_remove_asset") # Event emitted after an asset is removed from the account vault. -const ACCOUNT_VAULT_AFTER_REMOVE_ASSET_EVENT=event("miden::account::vault_after_remove_asset") - -# Event emitted before a fungible asset's balance is fetched from the account vault. -const ACCOUNT_VAULT_BEFORE_GET_BALANCE_EVENT=event("miden::account::vault_before_get_balance") +const ACCOUNT_VAULT_AFTER_REMOVE_ASSET_EVENT=event("miden::protocol::account::vault_after_remove_asset") -# Event emitted before it is checked whether a non-fungible asset exists in the account vault. -const ACCOUNT_VAULT_BEFORE_HAS_NON_FUNGIBLE_ASSET_EVENT=event("miden::account::vault_before_has_non_fungible_asset") +# Event emitted before an asset is fetched from the account vault. +const ACCOUNT_VAULT_BEFORE_GET_ASSET_EVENT=event("miden::protocol::account::vault_before_get_asset") # Event emitted before an account storage item is updated. -const ACCOUNT_STORAGE_BEFORE_SET_ITEM_EVENT=event("miden::account::storage_before_set_item") +const ACCOUNT_STORAGE_BEFORE_SET_ITEM_EVENT=event("miden::protocol::account::storage_before_set_item") # Event emitted after an account storage item is updated. -const ACCOUNT_STORAGE_AFTER_SET_ITEM_EVENT=event("miden::account::storage_after_set_item") +const ACCOUNT_STORAGE_AFTER_SET_ITEM_EVENT=event("miden::protocol::account::storage_after_set_item") # Event emitted before an account storage map item is accessed. -const ACCOUNT_STORAGE_BEFORE_GET_MAP_ITEM_EVENT=event("miden::account::storage_before_get_map_item") +const ACCOUNT_STORAGE_BEFORE_GET_MAP_ITEM_EVENT=event("miden::protocol::account::storage_before_get_map_item") # Event emitted before an account storage map item is updated. -const ACCOUNT_STORAGE_BEFORE_SET_MAP_ITEM_EVENT=event("miden::account::storage_before_set_map_item") +const ACCOUNT_STORAGE_BEFORE_SET_MAP_ITEM_EVENT=event("miden::protocol::account::storage_before_set_map_item") # Event emitted after an account storage map item is updated. -const ACCOUNT_STORAGE_AFTER_SET_MAP_ITEM_EVENT=event("miden::account::storage_after_set_map_item") +const ACCOUNT_STORAGE_AFTER_SET_MAP_ITEM_EVENT=event("miden::protocol::account::storage_after_set_map_item") # Event emitted before an account nonce is incremented. -const ACCOUNT_BEFORE_INCREMENT_NONCE_EVENT=event("miden::account::before_increment_nonce") +const ACCOUNT_BEFORE_INCREMENT_NONCE_EVENT=event("miden::protocol::account::before_increment_nonce") # Event emitted after an account nonce is incremented. -const ACCOUNT_AFTER_INCREMENT_NONCE_EVENT=event("miden::account::after_increment_nonce") +const ACCOUNT_AFTER_INCREMENT_NONCE_EVENT=event("miden::protocol::account::after_increment_nonce") # Event emitted to push the index of the account procedure at the top of the operand stack onto # the advice stack. -const ACCOUNT_PUSH_PROCEDURE_INDEX_EVENT=event("miden::account::push_procedure_index") +const ACCOUNT_PUSH_PROCEDURE_INDEX_EVENT=event("miden::protocol::account::push_procedure_index") # CONSTANT ACCESSORS # ================================================================================================= -#! Returns the account storage slot at which faucet data is stored. -#! Fungible faucet: The faucet data consists of [0, 0, 0, total_issuance] -#! Non-fungible faucet: The faucet data consists of SMT root containing minted non-fungible assets. -#! -#! Inputs: [] -#! Outputs: [faucet_slot_id_prefix, faucet_slot_id_suffix] -#! -#! Where: -#! - faucet_slot_id{prefix,suffix} are the prefix and suffix felts of the slot identifier, at which -#! faucet data is stored. -pub proc get_faucet_sysdata_slot_id - push.FAUCET_SYSDATA_SLOT[0..2] -end - #! Returns the maximum number of account storage slots. #! #! Inputs: [] @@ -242,9 +223,7 @@ pub use memory::get_account_id->get_id #! - nonce is the account nonce. pub use memory::get_account_nonce->get_nonce -#! Increments the account nonce by one and returns the new nonce. -#! -#! Assumes that it is executed only when the active account is the native account. +#! Increments the native account's nonce by one and returns the new nonce. #! #! Inputs: [] #! Outputs: [new_nonce] @@ -262,7 +241,7 @@ pub proc incr_nonce # emit event to signal that account nonce is being incremented emit.ACCOUNT_BEFORE_INCREMENT_NONCE_EVENT - exec.memory::get_account_nonce + exec.memory::get_native_account_nonce # => [current_nonce] # if the current nonce is the maximum felt value, then incrementing the nonce would overflow @@ -273,7 +252,7 @@ pub proc incr_nonce add.1 # => [new_nonce] - dup exec.memory::set_account_nonce + dup exec.memory::set_native_account_nonce # => [new_nonce] emit.ACCOUNT_AFTER_INCREMENT_NONCE_EVENT @@ -325,17 +304,18 @@ pub proc compute_commitment # => [] # prepare the stack for computing the account commitment - exec.memory::get_active_account_data_ptr padw padw padw - # => [RATE, RATE, PERM, account_data_ptr] + exec.memory::get_active_account_data_ptr + exec.poseidon2::init_no_padding + # => [RATE0, RATE1, CAPACITY, account_data_ptr] # stream account data and compute sequential hash. We perform two `mem_stream` operations # because the account data consists of exactly 4 words. - mem_stream exec.rpo256::permute - mem_stream exec.rpo256::permute - # => [RATE, RATE, PERM, account_data_ptr'] + mem_stream exec.poseidon2::permute + mem_stream exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY, account_data_ptr'] # extract account commitment - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # => [ACCOUNT_COMMITMENT, account_data_ptr'] # drop account_data_ptr @@ -433,11 +413,11 @@ end #! Gets an item from the account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix] +#! Inputs: [slot_id_suffix, slot_id_prefix] #! Outputs: [VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the item. #! @@ -446,9 +426,9 @@ end pub proc get_item # get account storage slots section offset exec.memory::get_account_active_storage_slots_section_ptr - # => [acct_storage_slots_section_offset, slot_id_prefix, slot_id_suffix] + # => [acct_storage_slots_section_offset, slot_id_suffix, slot_id_prefix] - exec.find_storage_slot + exec.get_storage_slot_ptr # => [slot_ptr] # get the item from storage @@ -456,13 +436,50 @@ pub proc get_item # => [VALUE] end +#! Finds an item in the active account's storage by slot ID, returning whether the slot was found +#! along with its value. +#! +#! Unlike `get_item`, this procedure does not panic if the slot does not exist. Instead, it +#! returns `is_found = 0` and the empty word. +#! +#! Inputs: [slot_id_suffix, slot_id_prefix] +#! Outputs: [is_found, VALUE] +#! +#! Where: +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are +#! the first two felts of the hashed slot name. +#! - is_found is 1 if the slot was found, 0 otherwise. +#! - VALUE is the value of the item, or the empty word if the slot was not found. +pub proc find_item + # get account storage slots section offset + exec.memory::get_account_active_storage_slots_section_ptr + # => [acct_storage_slots_section_offset, slot_id_suffix, slot_id_prefix] + + exec.find_storage_slot + # => [is_found, slot_ptr] + + if.true + # slot was found, read its value + exec.get_item_raw + # => [VALUE] + + push.1 + # => [is_found = 1, VALUE] + else + # slot was not found, drop slot_ptr and return empty word + drop padw push.0 + # => [is_found = 0, EMPTY_WORD] + end + # => [is_found, VALUE] +end + #! Gets an item and its slot type from the account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix] +#! Inputs: [slot_id_suffix, slot_id_prefix] #! Outputs: [VALUE, slot_type] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the item. #! - slot_type is the type of the slot. @@ -472,9 +489,9 @@ end pub proc get_typed_item # get account storage slots section offset exec.memory::get_account_active_storage_slots_section_ptr - # => [acct_storage_slots_section_offset, slot_id_prefix, slot_id_suffix] + # => [acct_storage_slots_section_offset, slot_id_suffix, slot_id_prefix] - exec.find_storage_slot + exec.get_storage_slot_ptr # => [slot_ptr] dup add.ACCOUNT_SLOT_TYPE_OFFSET mem_load @@ -487,11 +504,11 @@ end #! Gets an item from the account storage at its initial state (beginning of transaction). #! -#! Inputs: [slot_id_prefix, slot_id_suffix] +#! Inputs: [slot_id_suffix, slot_id_prefix] #! Outputs: [INIT_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - INIT_VALUE is the initial value of the item at the beginning of the transaction. #! @@ -500,9 +517,9 @@ end pub proc get_initial_item # get account initial storage slots section offset exec.memory::get_account_initial_storage_slots_ptr - # => [account_initial_storage_slots_ptr, slot_id_prefix, slot_id_suffix] + # => [account_initial_storage_slots_ptr, slot_id_suffix, slot_id_prefix] - exec.find_storage_slot + exec.get_storage_slot_ptr # => [slot_ptr] # get the item from initial storage @@ -510,13 +527,13 @@ pub proc get_initial_item # => [INIT_VALUE] end -#! Sets an item in the account storage. +#! Sets an item in the account storage of the native account. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, VALUE] +#! Inputs: [slot_id_suffix, slot_id_prefix, VALUE] #! Outputs: [OLD_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value to set. #! - OLD_VALUE is the previous value of the item. @@ -526,12 +543,12 @@ end #! - the storage slot type is not value. pub proc set_item emit.ACCOUNT_STORAGE_BEFORE_SET_ITEM_EVENT - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] - exec.memory::get_account_active_storage_slots_section_ptr - # => [storage_slots_ptr, slot_id_prefix, slot_id_suffix, VALUE] + exec.memory::get_native_account_active_storage_slots_ptr + # => [storage_slots_ptr, slot_id_suffix, slot_id_prefix, VALUE] - exec.find_storage_slot + exec.get_storage_slot_ptr # => [slot_ptr, VALUE] # load the slot type @@ -564,11 +581,11 @@ end #! Returns the VALUE located under the specified KEY within the map contained in the account #! storage slot identified by the slot ID. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY] #! Outputs: [VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the map item at KEY. #! @@ -577,7 +594,7 @@ end #! - the requested storage slot type is not map. pub proc get_map_item exec.memory::get_account_active_storage_slots_section_ptr - # => [storage_slots_ptr, slot_id_prefix, slot_id_suffix, KEY] + # => [storage_slots_ptr, slot_id_suffix, slot_id_prefix, KEY] exec.get_map_item_raw end @@ -585,11 +602,11 @@ end #! Returns the VALUE located under the specified KEY within the map contained in the given #! account storage slot at its initial state (beginning of transaction). #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY] #! Outputs: [INIT_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - INIT_VALUE is the initial value of the map item at KEY at the beginning of the transaction. #! @@ -598,18 +615,19 @@ end #! - the requested storage slot type is not map. pub proc get_initial_map_item exec.memory::get_account_initial_storage_slots_ptr - # => [initial_storage_slots_ptr, slot_id_prefix, slot_id_suffix, KEY] + # => [initial_storage_slots_ptr, slot_id_suffix, slot_id_prefix, KEY] exec.get_map_item_raw end -#! Stores NEW_VALUE under the specified KEY within the map contained in the given account storage slot. +#! Stores NEW_VALUE under the specified KEY within the map contained in the specified storage slot +#! of the native account. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY, NEW_VALUE] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY, NEW_VALUE] #! Outputs: [OLD_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - the slot must point to the root of the storage map. #! - NEW_VALUE is the value to set under KEY. @@ -621,11 +639,11 @@ end #! - the storage slot type is not map. #! - no map with the root of the slot is found. pub proc set_map_item - exec.memory::get_account_active_storage_slots_section_ptr - # => [storage_slots_ptr, slot_id_prefix, slot_id_suffix, KEY, NEW_VALUE] + exec.memory::get_native_account_active_storage_slots_ptr + # => [storage_slots_ptr, slot_id_suffix, slot_id_prefix, KEY, NEW_VALUE] # resolve the slot name to its pointer - exec.find_storage_slot + exec.get_storage_slot_ptr # => [slot_ptr, KEY, NEW_VALUE] # load the slot type @@ -641,7 +659,7 @@ pub proc set_map_item # => [OLD_VALUE] end -#! Returns the type of the storage slot at the provided index. +#! Returns the type of the storage slot at the provided index for the native account. #! #! WARNING: The index must be in bounds. #! @@ -651,7 +669,7 @@ end #! Where: #! - index is the location in memory of the storage slot. #! - slot_type is the type of the storage slot. -pub proc get_storage_slot_type +pub proc get_native_storage_slot_type # convert the index into a memory offset mul.ACCOUNT_STORAGE_SLOT_DATA_LENGTH # => [offset] @@ -671,15 +689,16 @@ end #! Adds the specified asset to the account vault. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET'] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_VALUE'] #! #! Where: -#! - ASSET is the asset that is added to the vault. -#! - ASSET' final asset in the account vault defined as follows: -#! - If ASSET is a non-fungible asset, then ASSET' is the same as ASSET. -#! - If ASSET is a fungible asset, then ASSET' is the total fungible asset in the account vault -#! after ASSET was added to it. +#! - ASSET_KEY is the vault key of the asset that is added to the vault. +#! - ASSET_VALUE is the value of the asset that is added to the vault. +#! - ASSET_VALUE' final asset in the account vault defined as follows: +#! - If ASSET_VALUE is a non-fungible asset, then ASSET_VALUE' is the same as ASSET_VALUE. +#! - If ASSET_VALUE is a fungible asset, then ASSET_VALUE' is the total fungible asset in the account vault +#! after ASSET_VALUE was added to it. #! #! Panics if: #! - the asset is not valid. @@ -687,143 +706,124 @@ end #! added. #! - the vault already contains the same non-fungible asset. pub proc add_asset_to_vault - # duplicate the ASSET to be able to emit an event after an asset is being added - dupw - # => [ASSET, ASSET] + swapw dupw.1 + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY] - # fetch the account vault root - exec.memory::get_account_vault_root_ptr movdn.4 - # => [ASSET, acct_vault_root_ptr, ASSET] + exec.callbacks::on_before_asset_added_to_account + swapw + # => [ASSET_KEY, PROCESSED_ASSET_VALUE] + + # duplicate the asset for the later event and delta update + dupw.1 dupw.1 + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, ASSET_KEY, PROCESSED_ASSET_VALUE] + + # push the account vault root ptr + exec.memory::get_account_vault_root_ptr movdn.8 + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, account_vault_root_ptr, ASSET_KEY, PROCESSED_ASSET_VALUE] # emit event to signal that an asset is going to be added to the account vault emit.ACCOUNT_VAULT_BEFORE_ADD_ASSET_EVENT + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, account_vault_root_ptr, ASSET_KEY, PROCESSED_ASSET_VALUE] # add the asset to the account vault exec.asset_vault::add_asset - # => [ASSET', ASSET] - - swapw - # => [ASSET, ASSET'] + # => [PROCESSED_ASSET_VALUE', ASSET_KEY, PROCESSED_ASSET_VALUE] - dupw exec.account_delta::add_asset - # => [ASSET, ASSET'] + movdnw.2 + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, PROCESSED_ASSET_VALUE'] # emit event to signal that an asset is being added to the account vault emit.ACCOUNT_VAULT_AFTER_ADD_ASSET_EVENT - dropw - # => [ASSET'] + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, PROCESSED_ASSET_VALUE'] + + exec.account_delta::add_asset + # => [PROCESSED_ASSET_VALUE'] end #! Removes the specified asset from the account vault. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_VALUE] #! #! Where: -#! - ASSET is the asset to remove from the vault. +#! - ASSET_KEY is the asset vault key of the asset to remove from the vault. +#! - ASSET_VALUE is the value of the asset to remove from the vault. #! #! Panics if: #! - the fungible asset is not found in the vault. #! - the amount of the fungible asset in the vault is less than the amount to be removed. #! - the non-fungible asset is not found in the vault. pub proc remove_asset_from_vault - # fetch the vault root - exec.memory::get_account_vault_root_ptr movdn.4 - # => [ASSET, acct_vault_root_ptr] + # duplicate the asset for the later event and delta update + dupw.1 dupw.1 + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY, ASSET_VALUE] + + # push the vault root ptr + exec.memory::get_account_vault_root_ptr movdn.8 + # => [ASSET_KEY, ASSET_VALUE, account_vault_root_ptr, ASSET_KEY, ASSET_VALUE] # emit event to signal that an asset is going to be removed from the account vault emit.ACCOUNT_VAULT_BEFORE_REMOVE_ASSET_EVENT + # => [ASSET_KEY, ASSET_VALUE, account_vault_root_ptr, ASSET_KEY, ASSET_VALUE] # remove the asset from the account vault exec.asset_vault::remove_asset - # => [ASSET] + # => [ASSET_VALUE, ASSET_KEY, ASSET_VALUE] - dupw exec.account_delta::remove_asset - # => [ASSET] + swapw + # => [ASSET_KEY, ASSET_VALUE, ASSET_VALUE] # emit event to signal that an asset is being removed from the account vault emit.ACCOUNT_VAULT_AFTER_REMOVE_ASSET_EVENT - # => [ASSET] -end + # => [ASSET_KEY, ASSET_VALUE, ASSET_VALUE] -#! Returns the balance of the fungible asset associated with the provided faucet_id in the active -#! account's vault. -#! -#! Inputs: [faucet_id_prefix, faucet_id_suffix] -#! Outputs: [balance] -#! -#! Where: -#! - faucet_id_{prefix, suffix} are the prefix and suffix felts of the faucet id of the fungible -#! asset of interest. -#! - balance is the vault balance of the fungible asset. -#! -#! Panics if: -#! - the provided faucet ID is not an ID of a fungible faucet. -pub proc get_balance - # get the vault root - exec.memory::get_account_vault_root_ptr movdn.2 - # => [faucet_id_prefix, faucet_id_suffix, vault_root_ptr] - - # emit event to signal that an asset's balance is requested - emit.ACCOUNT_VAULT_BEFORE_GET_BALANCE_EVENT - # => [faucet_id_prefix, faucet_id_suffix, vault_root_ptr] - - # get the asset balance - exec.asset_vault::get_balance - # => [balance] + exec.account_delta::remove_asset + # => [ASSET_VALUE] end -#! Returns the balance of the fungible asset associated with the provided faucet_id in the active -#! account's vault at the beginning of the transaction. +#! Returns the ASSET_VALUE associated with the provided asset vault key in the active account's vault. #! -#! Inputs: [faucet_id_prefix, faucet_id_suffix] -#! Outputs: [init_balance] +#! Inputs: [ASSET_KEY] +#! Outputs: [ASSET_VALUE] #! #! Where: -#! - faucet_id_{prefix, suffix} are the prefix and suffix felts of the faucet id of the fungible -#! asset of interest. -#! - init_balance is the vault balance of the fungible asset at the beginning of the transaction. -#! -#! Panics if: -#! - the provided faucet ID is not an ID of a fungible faucet. -pub proc get_initial_balance - # get the vault root associated with the initial vault root of the native account - exec.memory::get_account_initial_vault_root_ptr movdn.2 - # => [faucet_id_prefix, faucet_id_suffix, init_native_vault_root_ptr] +#! - ASSET_KEY is the asset vault key of the asset to fetch. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't present. +pub proc get_asset + # get the vault root ptr + exec.memory::get_account_vault_root_ptr movdn.4 + # => [ASSET_KEY, vault_root_ptr] - # emit event to signal that an asset's balance is requested - emit.ACCOUNT_VAULT_BEFORE_GET_BALANCE_EVENT - # => [faucet_id_prefix, faucet_id_suffix, init_native_vault_root_ptr] + # emit event to signal that an asset is requested + emit.ACCOUNT_VAULT_BEFORE_GET_ASSET_EVENT + # => [ASSET_KEY, vault_root_ptr] - # get the asset balance - exec.asset_vault::get_balance - # => [init_balance] + # get the asset + exec.asset_vault::get_asset + # => [ASSET_VALUE] end -#! Returns a boolean indicating whether the non-fungible asset is present in the active account's -#! vault. +#! Returns the ASSET_VALUE associated with the provided asset vault key in the active account's vault at +#! the beginning of the transaction. #! -#! Inputs: [ASSET] -#! Outputs: [has_asset] +#! Inputs: [ASSET_KEY] +#! Outputs: [ASSET_VALUE] #! #! Where: -#! - ASSET is the non-fungible asset of interest. -#! - has_asset is a boolean indicating whether the account vault has the asset of interest. -#! -#! Panics if: -#! - the ASSET is a fungible asset. -pub proc has_non_fungible_asset - # get the vault root - exec.memory::get_account_vault_root_ptr movdn.4 - # => [ASSET, vault_root_ptr] +#! - ASSET_KEY is the asset vault key of the asset to fetch. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't present. +pub proc get_initial_asset + # get the vault root associated with the initial vault root of the native account + exec.memory::get_account_initial_vault_root_ptr movdn.4 + # => [ASSET_KEY, init_native_vault_root_ptr] - # emit event to signal that an asset's presence is being checked - emit.ACCOUNT_VAULT_BEFORE_HAS_NON_FUNGIBLE_ASSET_EVENT - # => [ASSET, vault_root_ptr] + # emit event to signal that an asset is requested + emit.ACCOUNT_VAULT_BEFORE_GET_ASSET_EVENT + # => [ASSET_KEY, init_native_vault_root_ptr] - # check if the account vault has the non-fungible asset - exec.asset_vault::has_non_fungible_asset - # => [has_asset] + # get the asset + exec.asset_vault::get_asset + # => [ASSET_VALUE] end # CODE @@ -902,59 +902,49 @@ pub proc validate_seed # Compute the hash of (SEED, CODE_COMMITMENT, STORAGE_COMMITMENT, EMPTY_WORD). # --------------------------------------------------------------------------------------------- - # push an empty word as padding for account ID hashing - padw - # => [EMPTY_WORD] + # initialize capacity of the hasher and rate1 with the code commitment + padw exec.memory::get_account_code_commitment + # => [CODE_COMMITMENT, CAPACITY] # push the advice map key at which the seed is located - exec.memory::get_account_id push.0.0 - # => [0, 0, account_id_prefix, account_id_suffix, EMPTY_WORD] + exec.memory::get_native_account_id + exec.create_id_key + # => [ACCOUNT_ID_KEY, CODE_COMMITMENT, CAPACITY] - # populate first four elements of the rate with the account ID seed + # overwrite ACCOUNT_ID_KEY with the SEED adv.push_mapval adv_loadw - # => [SEED, EMPTY_WORD] - - # pad capacity element of hasher - padw swapw - # => [SEED, 0, 0, 0, 0, EMPTY_WORD] + # => [SEED, CODE_COMMITMENT, CAPACITY] - # populate last four elements of the hasher rate with the code commitment - exec.memory::get_account_code_commitment - # => [CODE_COMMITMENT, SEED, 0, 0, 0, 0, EMPTY_WORD] - - # perform first permutation of seed and code_commitment (from advice stack) - # perm(seed, code_commitment) - exec.rpo256::permute - # => [RATE, RATE, PERM, EMPTY_WORD] + # perform first permutation over (SEED, CODE_COMMITMENT) + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] # clear rate elements dropw dropw - # => [PERM, EMPTY_WORD] - - # perform second permutation perm(storage_commitment, 0, 0, 0, 0) - swapw exec.memory::get_account_storage_commitment swapw - # => [EMPTY_WORD, STORAGE_COMMITMENT, PERM] + # => [CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, CAP] + # perform second permutation over (STORAGE_COMMITMENT, EMPTY_WORD) + padw exec.memory::get_account_storage_commitment + # => [STORAGE_COMMITMENT, EMPTY_WORD, CAPACITY] - # extract digest - exec.rpo256::squeeze_digest + exec.poseidon2::permute + exec.poseidon2::squeeze_digest # => [DIGEST] + # => [digest0, digest1, digest2, digest3] # Shape suffix to set the lower 8 bits to zero and compare the computed and provided ID. # --------------------------------------------------------------------------------------------- - # extract account ID from digest - drop drop swap - # => [hashed_account_id_prefix, hashed_account_id_suffix] + # extract account ID from digest where suffix = digest0 and prefix = digest1 + movup.2 drop movup.2 drop + # => [hashed_account_id_suffix, hashed_account_id_prefix] exec.memory::get_account_id movdn.3 movdn.3 - # => [hashed_account_id_prefix, hashed_account_id_suffix, account_id_prefix, account_id_suffix] + # => [hashed_account_id_suffix, hashed_account_id_prefix, account_id_suffix, account_id_prefix] # shape suffix of hashed id by setting the lower 8 bits to zero - swap exec.account_id::shape_suffix swap - # => [hashed_account_id_prefix, hashed_account_id_suffix, account_id_prefix, account_id_suffix] + exec.account_id::shape_suffix + # => [hashed_account_id_suffix, hashed_account_id_prefix, account_id_suffix, account_id_prefix] # assert the account ID matches the account ID of the new account exec.account_id::is_equal assert.err=ERR_ACCOUNT_SEED_AND_COMMITMENT_DIGEST_MISMATCH @@ -994,15 +984,15 @@ pub proc validate_storage # => [curr_slot_idx] dup exec.get_slot_id - # => [curr_slot_id_prefix, curr_slot_id_suffix, curr_slot_idx] + # => [curr_slot_id_suffix, curr_slot_id_prefix, curr_slot_idx] # we are guaranteed to not underflow because curr_slot_idx is at least 1 at the # beginning of the loop dup.2 sub.1 - # => [prev_slot_idx, curr_slot_id_prefix, curr_slot_id_suffix, curr_slot_idx] + # => [prev_slot_idx, curr_slot_id_suffix, curr_slot_id_prefix, curr_slot_idx] exec.get_slot_id - # => [prev_slot_id_prefix, prev_slot_id_suffix, curr_slot_id_prefix, curr_slot_id_suffix, curr_slot_idx] + # => [prev_slot_id_suffix, prev_slot_id_prefix, curr_slot_id_suffix, curr_slot_id_prefix, curr_slot_idx] # this effectively checks that slots are sorted _and_ unique, since duplicate slot IDs are # not less than each other @@ -1027,31 +1017,28 @@ end #! #! This procedure is public so it can be tested. #! -#! Inputs: [prev_slot_id_prefix, prev_slot_id_suffix, curr_slot_id_prefix, curr_slot_id_suffix] +#! Inputs: [prev_slot_id_suffix, prev_slot_id_prefix, curr_slot_id_suffix, curr_slot_id_prefix] #! Outputs: [is_prev_lt_curr] pub proc is_slot_id_lt movup.2 - # => [curr_slot_id_prefix, prev_slot_id_prefix, prev_slot_id_suffix, curr_slot_id_suffix] - - # compute prev == curr for prefix - dup dup.2 eq - # => [is_prefix_eq, curr_slot_id_prefix, prev_slot_id_prefix, prev_slot_id_suffix, curr_slot_id_suffix] - - movdn.4 - # => [curr_slot_id_prefix, prev_slot_id_prefix, prev_slot_id_suffix, curr_slot_id_suffix, is_prefix_eq] + # => [curr_slot_id_suffix, prev_slot_id_suffix, prev_slot_id_prefix, curr_slot_id_prefix] - # compute prev < curr for prefix + # compute prev < curr for suffix lt - # => [is_prev_lt_curr_prefix, prev_slot_id_suffix, curr_slot_id_suffix, is_prefix_eq] + # => [is_prev_lt_curr_suffix, prev_slot_id_prefix, curr_slot_id_prefix] - swap.2 - # => [curr_slot_id_suffix, prev_slot_id_suffix, is_prev_lt_curr_prefix, is_prefix_eq] + movdn.2 + # => [prev_slot_id_prefix, curr_slot_id_prefix, is_prev_lt_curr_suffix] - # compute prev < curr for suffix + dup dup.2 + # => [curr_slot_id_prefix, prev_slot_id_prefix, prev_slot_id_prefix, curr_slot_id_prefix, is_prev_lt_curr_suffix] + + # compute prev < curr for prefix lt - # => [is_prev_lt_curr_suffix, is_prev_lt_curr_prefix, is_prefix_eq] + # => [is_prev_lt_curr_prefix, prev_slot_id_prefix, curr_slot_id_prefix, is_prev_lt_curr_suffix] - movup.2 + # compute prev == curr for prefix + movdn.3 eq # => [is_prefix_eq, is_prev_lt_curr_suffix, is_prev_lt_curr_prefix] # compute result as is_prefix_lt || (is_suffix_lt && is_prefix_eq) @@ -1067,10 +1054,10 @@ end #! Loads account data from the advice inputs into the _active_ account's memory section. #! #! Inputs: -#! Operand stack: [account_id_prefix, account_id_suffix] +#! Operand stack: [account_id_suffix, account_id_prefix] #! Advice map: { -#! ACCOUNT_ID: [[account_id_suffix, account_id_prefix, 0, account_nonce], -#! VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT], +#! ACCOUNT_ID_KEY: [[account_id_suffix, account_id_prefix, 0, account_nonce], +#! VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT], #! STORAGE_COMMITMENT: [[STORAGE_SLOT_DATA]], #! CODE_COMMITMENT: [[ACCOUNT_PROCEDURE_DATA]], #! } @@ -1078,9 +1065,8 @@ end #! Operand stack: [] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the account. -#! - ACCOUNT_ID is the word constructed from the account_id as follows: -#! [account_id_suffix, account_id_prefix, 0, 0]. +#! - account_id_{suffix,prefix} are the suffix and prefix felts of the ID of the account. +#! - ACCOUNT_ID_KEY is the map key constructed from the account ID as done by create_id_key. #! - account_nonce is the nonce of the account. #! - VAULT_ROOT is the commitment of the account's vault. #! - STORAGE_COMMITMENT is the commitment to the account's storage. @@ -1096,26 +1082,29 @@ end #! - the computed account storage commitment does not match the provided account storage commitment. pub proc load_foreign_account emit.ACCOUNT_BEFORE_FOREIGN_LOAD_EVENT - # => [account_id_prefix, account_id_suffix] + # => [account_id_suffix, account_id_prefix] - # construct the word with account ID to load the core account data from the advice map - push.0.0 - # OS => [0, 0, account_id_prefix, account_id_suffix] + # construct the advice map key from the account ID to load the core account data + exec.create_id_key + # OS => [ACCOUNT_ID_KEY] # move the core account data to the advice stack adv.push_mapval - # OS => [0, 0, account_id_prefix, account_id_suffix] - # AS => [[account_id_prefix, account_id_suffix, 0, account_nonce], VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT] + # OS => [ACCOUNT_ID_KEY] + # AS => [ACCOUNT_ID_AND_NONCE, VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT] - # store the id and nonce of the foreign account to the memory + # store the id and nonce of the foreign account to memory adv_loadw + # OS => [account_nonce, 0, account_id_suffix, account_id_prefix] + # AS => [VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT] + exec.memory::set_account_id_and_nonce - # OS => [] + # OS => [account_nonce, 0, account_id_suffix, account_id_prefix] # AS => [VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT] # store the vault root of the foreign account to the memory adv_loadw exec.memory::set_account_vault_root - # OS => [] + # OS => [VAULT_ROOT] # AS => [STORAGE_COMMITMENT, CODE_COMMITMENT] # move the storage root and the code root to the operand stack @@ -1191,18 +1180,18 @@ pub proc save_account_storage_data # OS => [acct_storage_slots_ptr, end_ptr, STORAGE_COMMITMENT] # AS => [[STORAGE_SLOT_DATA]] - # pad stack before reading from advice stack - padw padw padw - # OS => [PAD, PAD, PAD, acct_storage_slots_ptr, end_ptr, STORAGE_COMMITMENT] + # initialize hasher state before reading from advice stack + exec.poseidon2::init_no_padding + # OS => [RATE0, RATE1, CAPACITY, acct_storage_slots_ptr, end_ptr, STORAGE_COMMITMENT] # AS => [[STORAGE_SLOT_DATA]] # read the data from advice stack to memory and hash exec.mem::pipe_double_words_to_memory - # OS => [PERM, PERM, PERM, end_ptr', STORAGE_COMMITMENT] + # OS => [RATE0, RATE1, CAPACITY, end_ptr', STORAGE_COMMITMENT] # AS => [] # extract the digest - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # OS => [DIGEST, end_ptr', STORAGE_COMMITMENT] # drop end_ptr @@ -1272,11 +1261,11 @@ pub proc save_account_procedure_data # read the data from advice stack to memory and hash exec.mem::pipe_words_to_memory - # OS => [PERM, PERM, PERM, end_ptr', CODE_COMMITMENT] + # OS => [RATE0, RATE1, CAPACITY, end_ptr', CODE_COMMITMENT] # AS => [] # extract the digest - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # OS => [DIGEST, end_ptr', CODE_COMMITMENT] # drop end_ptr @@ -1308,7 +1297,7 @@ pub proc insert_new_storage sub.1 # => [slot_idx] - dup exec.get_storage_slot_type + dup exec.get_native_storage_slot_type # => [slot_type, slot_idx] push.STORAGE_SLOT_TYPE_MAP eq @@ -1394,7 +1383,7 @@ proc insert_and_validate_storage_map # => [remaining_entries, slot_ptr, MAP_ROOT] # push a key-value pair (8 felts) to the operand stack - adv_push.8 + padw adv_loadw padw adv_loadw # => [KEY, VALUE, remaining_entries, slot_ptr, MAP_ROOT] dup.9 @@ -1427,42 +1416,17 @@ end # HELPER PROCEDURES # ================================================================================================= -#! Returns 1 if the provided slot ID is equal to the reserved faucet storage data slot, 0 -#! otherwise. -#! -#! Inputs: [slot_id_prefix, slot_id_suffix] -#! Outputs: [is_faucet_storage_data_slot] -#! -#! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are -#! the first two felts of the hashed slot name. -#! - is_faucet_storage_data_slot is a boolean value indicating whether the provided slot is the -#! reserved faucet data slot. -pub proc is_faucet_storage_data_slot - exec.get_faucet_sysdata_slot_id - # => [faucet_slot_id_prefix, faucet_slot_id_suffix, slot_id_prefix, slot_id_suffix] - - movup.2 eq - # => [prefix_eq, faucet_slot_id_suffix, slot_id_suffix] - - movdn.2 eq - # => [prefix_eq, suffix_eq] - - and - # => [is_faucet_storage_data_slot] -end - #! Gets the initial and current value of an item from the storage slot at the provided index for #! the native account. #! #! WARNING: The index must be in bounds. #! #! Inputs: [index] -#! Outputs: [INITIAL_VALUE, CURRENT_VALUE, slot_id_prefix, slot_id_suffix] +#! Outputs: [INITIAL_VALUE, CURRENT_VALUE, slot_id_suffix, slot_id_prefix] #! #! Where: #! - index is the index of the slot. -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - INITIAL_VALUE is the initial value of the item at the beginning of the transaction. #! - CURRENT_VALUE is the current value of the item. @@ -1479,25 +1443,25 @@ pub proc get_item_delta # => [slot_ptr, slot_ptr, offset] # load the slot ID - add.ACCOUNT_SLOT_ID_SUFFIX_OFFSET mem_load - # => [slot_id_suffix, slot_ptr, offset] + add.ACCOUNT_SLOT_ID_PREFIX_OFFSET mem_load + # => [slot_id_prefix, slot_ptr, offset] - dup.1 add.ACCOUNT_SLOT_ID_PREFIX_OFFSET mem_load - # => [slot_id_prefix, slot_id_suffix, slot_ptr, offset] + dup.1 add.ACCOUNT_SLOT_ID_SUFFIX_OFFSET mem_load + # => [slot_id_suffix, slot_id_prefix, slot_ptr, offset] # load the current value movup.2 exec.get_item_raw - # => [CURRENT_VALUE, slot_id_prefix, slot_id_suffix, offset] + # => [CURRENT_VALUE, slot_id_suffix, slot_id_prefix, offset] # get account initial storage slots section offset exec.memory::get_account_initial_storage_slots_ptr - # => [init_storage_slots_ptr, CURRENT_VALUE, slot_id_prefix, slot_id_suffix, offset] + # => [init_storage_slots_ptr, CURRENT_VALUE, slot_id_suffix, slot_id_prefix, offset] movup.7 add - # => [init_slot_ptr, CURRENT_VALUE, slot_id_prefix, slot_id_suffix] + # => [init_slot_ptr, CURRENT_VALUE, slot_id_suffix, slot_id_prefix] exec.get_item_raw - # => [INITIAL_VALUE, CURRENT_VALUE, slot_id_prefix, slot_id_suffix] + # => [INITIAL_VALUE, CURRENT_VALUE, slot_id_suffix, slot_id_prefix] end #! Gets the slot ID of the storage slot at the provided index. @@ -1505,11 +1469,11 @@ end #! WARNING: The index must be in bounds. #! #! Inputs: [index] -#! Outputs: [slot_id_prefix, slot_id_suffix] +#! Outputs: [slot_id_suffix, slot_id_prefix] #! #! Where: #! - index is the index of the slot. -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. pub proc get_slot_id # convert the index into a memory offset @@ -1520,11 +1484,11 @@ pub proc get_slot_id add # => [slot_ptr] - dup add.ACCOUNT_SLOT_ID_SUFFIX_OFFSET mem_load - # => [slot_id_suffix, slot_ptr] + dup add.ACCOUNT_SLOT_ID_PREFIX_OFFSET mem_load + # => [slot_id_prefix, slot_ptr] - swap add.ACCOUNT_SLOT_ID_PREFIX_OFFSET mem_load - # => [slot_id_prefix, slot_id_suffix] + swap add.ACCOUNT_SLOT_ID_SUFFIX_OFFSET mem_load + # => [slot_id_suffix, slot_id_prefix] end #! Sets the value of the storage slot located at the memory address specified by the provided @@ -1541,7 +1505,7 @@ end #! - slot_ptr is the pointer to a slot. #! - VALUE is the new value of the item. proc set_item_raw - add.ACCOUNT_SLOT_VALUE_OFFSET mem_storew_be dropw + add.ACCOUNT_SLOT_VALUE_OFFSET mem_storew_le dropw # => [] # set the storage commitment dirty flag to indicate that the commitment is outdated @@ -1598,7 +1562,7 @@ proc set_map_item_raw # => [OLD_VALUE, NEW_ROOT, KEY, NEW_VALUE] # store OLD_VALUE until the end of the procedure - loc_storew_be.4 swapw + loc_storew_le.4 swapw # => [NEW_ROOT, OLD_VALUE, KEY, NEW_VALUE] # store NEW_ROOT into the map slot's VALUE @@ -1623,7 +1587,7 @@ proc set_map_item_raw # => [] # load OLD_VALUE as return value on the stack - padw loc_loadw_be.4 + padw loc_loadw_le.4 # => [OLD_VALUE] end @@ -1644,19 +1608,19 @@ proc get_item_raw # => [slot_value_ptr] # load the item from memory - padw movup.4 mem_loadw_be + padw movup.4 mem_loadw_le # => [VALUE] end #! Finds the storage map root in the storage slot with the provided name in the provided storage #! slots section and returns the VALUE associated with the KEY in the corresponding map. #! -#! Inputs: [storage_slots_ptr, slot_id_prefix, slot_id_suffix, KEY] +#! Inputs: [storage_slots_ptr, slot_id_suffix, slot_id_prefix, KEY] #! Outputs: [VALUE] #! #! Where: #! - KEY is the key to look up in the map. -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the map item at KEY. #! @@ -1664,7 +1628,7 @@ end #! - a slot with the provided slot ID does not exist in account storage. #! - the requested storage slot type is not map. proc get_map_item_raw - exec.find_storage_slot + exec.get_storage_slot_ptr # => [slot_ptr, KEY] emit.ACCOUNT_STORAGE_BEFORE_GET_MAP_ITEM_EVENT @@ -1696,37 +1660,53 @@ proc get_map_item_raw # => [VALUE] end -#! Finds the slot identified by the key [slot_id_prefix, slot_id_suffix, 0, 0] (stack order) and -#! returns the pointer to that slot. +#! Finds the slot identified by the key [_, _, slot_id_suffix, slot_id_prefix] and returns a flag +#! indicating whether the slot was found and the pointer to that slot. #! -#! Inputs: [storage_slots_ptr, slot_id_prefix, slot_id_suffix] -#! Outputs: [slot_ptr] +#! Inputs: [storage_slots_ptr, slot_id_suffix, slot_id_prefix] +#! Outputs: [is_found, slot_ptr] #! #! Where: #! - storage_slots_ptr is the pointer to the storage slots section. +#! - is_found is 1 if the slot was found, 0 otherwise. #! - slot_ptr is the pointer to the resolved storage slot. -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. -#! -#! Panics if: -#! - a slot with the provided slot ID does not exist in account storage. proc find_storage_slot # construct the start and end pointers of the storage slot section in which we will search dup exec.memory::get_num_storage_slots mul.ACCOUNT_STORAGE_SLOT_DATA_LENGTH add - # => [storage_slots_end_ptr, storage_slots_start_ptr, slot_id_prefix, slot_id_suffix] + # => [storage_slots_end_ptr, storage_slots_start_ptr, slot_id_suffix, slot_id_prefix] - movdn.3 movdn.2 - # => [slot_id_prefix, slot_id_suffix, storage_slots_start_ptr, storage_slots_end_ptr] + swap movup.3 movup.3 + # => [slot_id_suffix, slot_id_prefix, storage_slots_start_ptr, storage_slots_end_ptr] - # find the slot whose slot key matches [slot_id_prefix, slot_id_suffix, 0, 0] # if the slot key does not exist, this procedure will validate its absence exec.sorted_array::find_half_key_value # => [is_slot_found, slot_ptr, storage_slots_start_ptr, storage_slots_end_ptr] - assert.err=ERR_ACCOUNT_UNKNOWN_STORAGE_SLOT_NAME - # => [slot_ptr, storage_slots_start_ptr, storage_slots_end_ptr] + movup.2 drop movup.2 drop + # => [is_slot_found, slot_ptr] +end - swap.2 drop drop +#! Finds the slot identified by the key [_, _, slot_id_suffix, slot_id_prefix] and returns the +#! pointer to that slot. +#! +#! Inputs: [storage_slots_ptr, slot_id_suffix, slot_id_prefix] +#! Outputs: [slot_ptr] +#! +#! Where: +#! - storage_slots_ptr is the pointer to the storage slots section. +#! - slot_ptr is the pointer to the resolved storage slot. +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are +#! the first two felts of the hashed slot name. +#! +#! Panics if: +#! - a slot with the provided slot ID does not exist in account storage. +proc get_storage_slot_ptr + exec.find_storage_slot + # => [is_found, slot_ptr] + + assert.err=ERR_ACCOUNT_UNKNOWN_STORAGE_SLOT_NAME # => [slot_ptr] end @@ -1773,18 +1753,18 @@ proc get_procedure_root # => [proc_ptr] # load procedure root from memory - padw movup.4 mem_loadw_be + padw movup.4 mem_loadw_le # => [PROC_ROOT] end #! Returns the pointer to the next vacant memory slot if the account was not loaded before, and the #! pointer to the account data otherwise. #! -#! Inputs: [foreign_account_id_prefix, foreign_account_id_suffix] -#! Outputs: [was_loaded, ptr, foreign_account_id_prefix, foreign_account_id_suffix] +#! Inputs: [foreign_account_id_suffix, foreign_account_id_prefix] +#! Outputs: [was_loaded, ptr, foreign_account_id_suffix, foreign_account_id_prefix] #! #! Where: -#! - foreign_account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the foreign +#! - foreign_account_id_{suffix,prefix} are the suffix and prefix felts of the ID of the foreign #! account whose procedure is going to be executed. #! - was_loaded is the binary flag indicating whether the foreign account was already loaded to the #! memory. @@ -1792,17 +1772,16 @@ end #! data, depending on the value of the was_loaded flag. #! #! Panics if: -#! - the prefix or suffix of the provided foreign account ID equal zero. #! - the maximum allowed number of foreign account to be loaded (64) was exceeded. pub proc get_account_data_ptr - # check that foreign account ID is not equal zero - dup.1 eq.0 dup.1 eq.0 and not assert.err=ERR_FOREIGN_ACCOUNT_ID_IS_ZERO - # => [foreign_account_id_prefix, foreign_account_id_suffix] - # move pointer one account block back so that the first account pointer in the cycle will point # to the native account exec.memory::get_native_account_data_ptr exec.memory::get_account_data_length sub - # => [curr_account_ptr, foreign_account_id_prefix, foreign_account_id_suffix] + # => [curr_account_ptr, foreign_account_id_suffix, foreign_account_id_prefix] + + # push the pad element onto the stack: it will represent the `is_equal_id` flag during the cycle + push.0 movdn.3 + # => [curr_account_ptr, foreign_account_id_suffix, foreign_account_id_prefix, is_equal_id=0] # push the flag to enter the loop push.1 @@ -1811,41 +1790,40 @@ pub proc get_account_data_ptr # drop the flag left from the previous loop # in the first iteration this will be a pad element movup.3 drop - # => [curr_account_ptr, foreign_account_id_prefix, foreign_account_id_suffix] + # => [curr_account_ptr, foreign_account_id_suffix, foreign_account_id_prefix] # move the current account pointer to the next account data block exec.memory::get_account_data_length add - # => [curr_account_ptr', foreign_account_id_prefix, foreign_account_id_suffix] + # => [curr_account_ptr', foreign_account_id_suffix, foreign_account_id_prefix] - # load the first data word at the current account pointer - padw dup.4 mem_loadw_be - # => [FIRST_DATA_WORD, curr_account_ptr', foreign_account_id_prefix, foreign_account_id_suffix] + dup add.ACCT_ID_PREFIX_OFFSET mem_load + dup.1 add.ACCT_ID_SUFFIX_OFFSET mem_load + # => [account_id_suffix, account_id_prefix, curr_account_ptr', foreign_account_id_suffix, foreign_account_id_prefix] - # check whether the last value in the word equals zero - # if so it means this memory block was not yet initialized - drop drop dup.1 eq.0 - # => [is_empty_block, maybe_account_id_prefix, maybe_account_id_suffix, curr_account_ptr', foreign_account_id_prefix, foreign_account_id_suffix] + # check whether the ID is equal to zero, if so it means this memory block was not yet initialized + dup.1 dup.1 push.0 push.0 exec.account_id::is_equal + # => [is_empty_block, account_id_suffix, account_id_prefix, curr_account_ptr', foreign_account_id_suffix, foreign_account_id_prefix] # check whether the current id matches the foreign id movdn.2 dup.5 dup.5 exec.account_id::is_equal - # => [is_equal_id, is_empty_word, curr_account_ptr', foreign_account_id_prefix, foreign_account_id_suffix] + # => [is_equal_id, is_empty_block, curr_account_ptr', foreign_account_id_suffix, foreign_account_id_prefix] # get the loop flag # it equals 1 if both `is_equal_id` and `is_empty_block` flags are equal to 0, so we should # continue iterating dup movdn.5 or not - # => [loop_flag, curr_account_ptr', foreign_account_id_prefix, foreign_account_id_suffix, is_equal_id] + # => [loop_flag, curr_account_ptr', foreign_account_id_suffix, foreign_account_id_prefix, is_equal_id] end # check that the loading of one more account won't exceed the maximum number of the foreign # accounts which can be loaded. dup exec.memory::get_max_foreign_account_ptr lte assert.err=ERR_FOREIGN_ACCOUNT_MAX_NUMBER_EXCEEDED - # => [curr_account_ptr, foreign_account_id_prefix, foreign_account_id_suffix, is_equal_id] + # => [curr_account_ptr, foreign_account_id_suffix, foreign_account_id_prefix, is_equal_id] # the resulting `was_loaded` flag is essentially equal to the `is_equal_id` flag movup.3 - # => [was_loaded, curr_account_ptr, foreign_account_id_prefix, foreign_account_id_suffix] + # => [was_loaded, curr_account_ptr, foreign_account_id_suffix, foreign_account_id_prefix] end #! Checks that the state of the active foreign account is valid. @@ -1854,15 +1832,18 @@ end #! Outputs: [] #! #! Panics if: -#! - the hash of the active account is not represented in the account database. +#! - the commitment of the loaded foreign account does not match the commitment stored in the +#! account tree. pub proc validate_active_foreign_account # get the account database root exec.memory::get_account_db_root # => [ACCOUNT_DB_ROOT] - # get the active account ID - push.0.0 exec.memory::get_account_id - # => [account_id_prefix, account_id_suffix, 0, 0, ACCOUNT_DB_ROOT] + # get the account ID of the foreign account (the currently active account) and build the + # corresponding key in the account tree + exec.memory::get_account_id + exec.create_id_key + # => [ACCOUNT_ID_KEY, ACCOUNT_DB_ROOT] # retrieve the commitment of the foreign account from the active account tree # this would abort if the proof for the commitment was invalid for the account root, @@ -1887,7 +1868,7 @@ end #! Inputs: [KEY] #! Outputs: [HASHED_KEY] proc hash_map_key - exec.rpo256::hash + exec.poseidon2::hash # => [HASHED_KEY] end @@ -1921,15 +1902,15 @@ proc refresh_storage_commitment # => [start_ptr, end_ptr] # pad stack to read and hash from memory - padw padw padw - # => [PAD, PAD, PAD, start_ptr, end_ptr] + exec.poseidon2::init_no_padding + # => [RATE0, RATE1, CAPACITY, start_ptr, end_ptr] # hash elements from memory - exec.rpo256::absorb_double_words_from_memory - # => [PERM, PERM, PERM, start_ptr, end_ptr] + exec.poseidon2::absorb_double_words_from_memory + # => [RATE0, RATE1, CAPACITY, start_ptr, end_ptr] # extract the digest - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # => [DIGEST, end_ptr, end_ptr] # clean stack @@ -2037,7 +2018,7 @@ pub proc has_procedure # => [PROC_ROOT, curr_proc_ptr, end_ptr, is_procedure_available] # load the root of the current procedure - padw dup.8 mem_loadw_be + padw dup.8 mem_loadw_le # => [CURR_PROC_ROOT, PROC_ROOT, curr_proc_ptr, end_ptr, is_procedure_available] # check whether the current root is equal to the provided root @@ -2069,3 +2050,65 @@ pub proc has_procedure dropw drop drop # => [is_procedure_available'] end + +# CALLBACKS +# ------------------------------------------------------------------------------------------------- + +#! Returns whether the active account defines callbacks. +#! +#! Inputs: [] +#! Outputs: [has_callbacks] +#! +#! Where: +#! - has_callbacks is 1 if the account defines callbacks, 0 otherwise. +pub proc has_callbacks + # check if the on_before_asset_added_to_account callback slot exists and is non-empty + push.ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_PROC_ROOT_SLOT[0..2] + exec.has_non_empty_slot + # => [has_account_callback] + + # check if the on_before_asset_added_to_note callback slot exists and is non-empty + push.ON_BEFORE_ASSET_ADDED_TO_NOTE_PROC_ROOT_SLOT[0..2] + exec.has_non_empty_slot + # => [has_note_callback, has_account_callback] + + or + # => [has_callbacks] +end + +#! Checks whether a storage slot with the given slot ID exists in the active account's storage +#! and has a non-empty value. +#! +#! Inputs: [slot_id_suffix, slot_id_prefix] +#! Outputs: [has_non_empty_value] +#! +#! Where: +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are +#! the first two felts of the hashed slot name. +#! - has_non_empty_value is 1 if the slot exists and its value is non-empty, 0 otherwise. +proc has_non_empty_slot + exec.find_item + # => [is_found, VALUE] + + # check if is_found && value is non-empty + movdn.4 exec.word::eqz not + # => [is_non_empty_value, is_found] + + and + # => [has_non_empty_value] +end + +#! Returns the key built from the provided account ID for use in the advice map or the account +#! tree. +#! +#! Inputs: [account_id_suffix, account_id_prefix] +#! Outputs: [ACCOUNT_ID_KEY] +#! +#! Where: +#! - account_id_{suffix,prefix} are the suffix and prefix felts of the account ID. +#! - ACCOUNT_ID_KEY is the key word built from the provided account ID. +proc create_id_key + push.0.0 + # => [0, 0, account_id_suffix, account_id_prefix] + # => [ACCOUNT_ID_KEY] +end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/account_delta.masm b/crates/miden-protocol/asm/kernels/transaction/lib/account_delta.masm index 229103d606..b2b44e81c3 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/account_delta.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/account_delta.masm @@ -1,10 +1,13 @@ use $kernel::account use $kernel::asset -use $kernel::asset_vault use $kernel::constants::STORAGE_SLOT_TYPE_VALUE +use $kernel::fungible_asset use $kernel::link_map use $kernel::memory -use miden::core::crypto::hashes::rpo256 +use $kernel::memory::ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR +use $kernel::memory::ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR +use $kernel::util::asset::FUNGIBLE_ASSET_MAX_AMOUNT +use miden::core::crypto::hashes::poseidon2 use miden::core::word # ERRORS @@ -44,39 +47,35 @@ const FELT_MAX = 0xffffffff00000000 #! Panics if: #! - the vault or storage delta is not empty but the nonce increment is zero. pub proc compute_commitment - # pad capacity element of the hasher - padw - # => [CAPACITY] - - exec.was_nonce_incremented push.0 - # => [0, was_nonce_incremented, CAPACITY] - # note that the delta of the nonce is equal to was_nonce_incremented - # => [0, nonce_delta, CAPACITY] + # pad capacity and RATE1 of the hasher with empty words + padw padw + # => [EMPTY_WORD, CAPACITY] exec.memory::get_native_account_id - # => [native_acct_id_prefix, native_acct_id_suffix, 0, nonce_delta, CAPACITY] - # => [ID_AND_NONCE, CAPACITY] + # => [native_acct_id_suffix, native_acct_id_prefix, EMPTY_WORD, CAPACITY] - padw - # => [EMPTY_WORD, ID_AND_NONCE, CAPACITY] + # the delta of the nonce is equal to was_nonce_incremented + push.0 exec.was_nonce_incremented + # => [nonce_delta, 0, native_acct_id_suffix, native_acct_id_prefix, EMPTY_WORD, CAPACITY] + # => [ID_AND_NONCE, EMPTY_WORD, CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, PERM] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] - # save the ID and nonce digest (the 2nd rate word) for a later check - dupw.1 movdnw.3 - # => [RATE, RATE, PERM, ID_AND_NONCE_DIGEST] + # save the ID and nonce digest (RATE0 word) for a later check + exec.poseidon2::copy_digest movdnw.3 + # => [RATE0, RATE1, CAPACITY, ID_AND_NONCE_DIGEST] exec.update_fungible_asset_delta - # => [RATE, RATE, PERM, ID_AND_NONCE_DIGEST] + # => [RATE0, RATE1, CAPACITY, ID_AND_NONCE_DIGEST] exec.update_non_fungible_asset_delta - # => [RATE, RATE, PERM, ID_AND_NONCE_DIGEST] + # => [RATE0, RATE1, CAPACITY, ID_AND_NONCE_DIGEST] exec.update_storage_delta - # => [RATE, RATE, PERM, ID_AND_NONCE_DIGEST] + # => [RATE0, RATE1, CAPACITY, ID_AND_NONCE_DIGEST] - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # => [DELTA_COMMITMENT, ID_AND_NONCE_DIGEST] exec.was_nonce_incremented not @@ -101,118 +100,114 @@ end #! Updates the given delta hasher with the storage slots. #! -#! Inputs: [RATE, RATE, PERM] -#! Outputs: [RATE, RATE, PERM] +#! Inputs: [RATE0, RATE1, CAPACITY] +#! Outputs: [RATE0, RATE1, CAPACITY] proc update_storage_delta exec.memory::get_num_storage_slots movdn.12 - # => [RATE, RATE, PERM, num_storage_slots] + # => [RATE0, RATE1, CAPACITY, num_storage_slots] push.0 movdn.12 - # => [RATE, RATE, PERM, slot_idx = 0, num_storage_slots] + # => [RATE0, RATE1, CAPACITY, slot_idx = 0, num_storage_slots] # loop if num_storage_slots != 0 dup.13 neq.0 - # => [should_loop, RATE, RATE, PERM, slot_idx, num_storage_slots] + # => [should_loop, RATE0, RATE1, CAPACITY, slot_idx, num_storage_slots] while.true dup.12 - # => [slot_idx, RATE, RATE, PERM, slot_idx, num_storage_slots] + # => [slot_idx, RATE0, RATE1, CAPACITY, slot_idx, num_storage_slots] exec.update_slot_delta - # => [RATE, RATE, PERM, slot_idx, num_storage_slots] + # => [RATE0, RATE1, CAPACITY, slot_idx, num_storage_slots] # increment slot index movup.12 add.1 - # => [next_slot_idx, RATE, RATE, PERM, num_storage_slots] + # => [next_slot_idx, RATE0, RATE1, CAPACITY, num_storage_slots] dup movdn.13 - # => [next_slot_idx, RATE, RATE, PERM, next_slot_idx, num_storage_slots] + # => [next_slot_idx, RATE0, RATE1, CAPACITY, next_slot_idx, num_storage_slots] # continue if next_slot_idx != num_storage_slots # we use neq instead of lt for efficiency dup.14 neq - # => [should_loop, RATE, RATE, PERM, next_slot_idx, num_storage_slots] + # => [should_loop, RATE0, RATE1, CAPACITY, next_slot_idx, num_storage_slots] end - # => [RATE, RATE, PERM, next_slot_idx, num_storage_slots] + # => [RATE0, RATE1, CAPACITY, next_slot_idx, num_storage_slots] # clean the stack movup.12 drop movup.12 drop - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end #! Updates the given delta hasher with the storage slot at the provided index. #! -#! Inputs: [slot_idx, RATE, RATE, PERM] -#! Outputs: [RATE, RATE, PERM] +#! Inputs: [slot_idx, RATE0, RATE1, CAPACITY] +#! Outputs: [RATE0, RATE1, CAPACITY] proc update_slot_delta - dup exec.account::get_storage_slot_type - # => [storage_slot_type, slot_idx, RATE, RATE, PERM] + dup exec.account::get_native_storage_slot_type + # => [storage_slot_type, slot_idx, RATE0, RATE1, CAPACITY] # check if slot is of type value push.STORAGE_SLOT_TYPE_VALUE eq - # => [is_value_slot_type, slot_idx, RATE, RATE, PERM] + # => [is_value_slot_type, slot_idx, RATE0, RATE1, CAPACITY] if.true exec.update_value_slot_delta else exec.update_map_slot_delta end - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end #! Updates the given delta hasher with the value storage slot at the provided index. #! -#! Inputs: [slot_idx, RATE, RATE, PERM] -#! Outputs: [RATE, RATE, PERM] +#! Inputs: [slot_idx, RATE0, RATE1, CAPACITY] +#! Outputs: [RATE0, RATE1, CAPACITY] proc update_value_slot_delta exec.account::get_item_delta - # => [INIT_VALUE, CURRENT_VALUE, slot_id_prefix, slot_id_suffix, RATE, RATE, PERM] + # => [INIT_VALUE, CURRENT_VALUE, slot_id_suffix, slot_id_prefix, RATE0, RATE1, CAPACITY] exec.word::test_eq not - # => [was_changed, INIT_VALUE, CURRENT_VALUE, slot_id_prefix, slot_id_suffix, RATE, RATE, PERM] + # => [was_changed, INIT_VALUE, CURRENT_VALUE, slot_id_suffix, slot_id_prefix, RATE0, RATE1, CAPACITY] # set was_changed to true if the account is new # generally, the delta for a new account must include all its storage slots, regardless of the # initial value and even if it is an empty word, because the initial delta for an account must # represent its full state exec.memory::is_new_account or - # => [was_changed, INIT_VALUE, CURRENT_VALUE, slot_id_prefix, slot_id_suffix, RATE, RATE, PERM] + # => [was_changed, INIT_VALUE, CURRENT_VALUE, slot_id_suffix, slot_id_prefix, RATE0, RATE1, CAPACITY] # only include in delta if the slot's value has changed or the account is new if.true # drop init value dropw - # => [CURRENT_VALUE, slot_id_prefix, slot_id_suffix, RATE, RATE, PERM] + # => [CURRENT_VALUE, slot_id_suffix, slot_id_prefix, RATE0, RATE1, CAPACITY] # build value slot metadata - push.DOMAIN_VALUE push.0 - # => [0, domain, CURRENT_VALUE, slot_id_prefix, slot_id_suffix, RATE, RATE, PERM] + movup.5 movup.5 + # => [slot_id_suffix, slot_id_prefix, CURRENT_VALUE, RATE0, RATE1, CAPACITY] - movup.7 movup.7 - # => [slot_id_prefix, slot_id_suffix, 0, domain, CURRENT_VALUE, RATE, RATE, PERM] + push.0.DOMAIN_VALUE + # => [[domain, 0, slot_id_suffix, slot_id_prefix], CURRENT_VALUE, RATE0, RATE1, CAPACITY] # clear rate elements swapdw dropw dropw - # => [slot_id_prefix, slot_id_suffix, 0, domain, CURRENT_VALUE, PERM] - - # arrange rate words in correct order - swapw - # => [CURRENT_VALUE, slot_id_prefix, slot_id_suffix, 0, domain, PERM] + # => [[domain, 0, slot_id_suffix, slot_id_prefix], CURRENT_VALUE, CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, PERM] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] else # drop init value, current value and slot name dropw dropw drop drop - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end #! Updates the given delta hasher with the map storage slot at the provided index. #! -#! Inputs: [slot_idx, RATE, RATE, PERM] -#! Outputs: [RATE, RATE, PERM] +#! Inputs: [slot_idx, RATE0, RATE1, CAPACITY] +#! Outputs: [RATE0, RATE1, CAPACITY] #! #! Locals: #! - 0: slot_id_suffix @@ -226,19 +221,19 @@ proc update_map_slot_delta # this is necessary because this procedure can be called multiple times and the second # invocation shouldn't reuse the first invocation's value push.0 loc_store.4 - # => [slot_idx, RATE, RATE, PERM] + # => [slot_idx, RATE0, RATE1, CAPACITY] dup exec.account::get_slot_id - # => [slot_id_prefix, slot_id_suffix, slot_idx, RATE, RATE, PERM] + # => [slot_id_suffix, slot_id_prefix, slot_idx, RATE0, RATE1, CAPACITY] - loc_store.1 loc_store.0 - # => [slot_idx, RATE, RATE, PERM] + loc_store.0 loc_store.1 + # => [slot_idx, RATE0, RATE1, CAPACITY] exec.memory::get_account_delta_storage_map_ptr - # => [account_delta_storage_map_ptr, RATE, RATE, PERM] + # => [account_delta_storage_map_ptr, RATE0, RATE1, CAPACITY] exec.link_map::iter - # => [has_next, iter, RATE, RATE, PERM] + # => [has_next, iter, RATE0, RATE1, CAPACITY] # enter loop if the link map is not empty while.true @@ -262,73 +257,73 @@ proc update_map_slot_delta # if the key-value pair has actually changed, update the hasher if.true # drop the initial value - swapw dropw - # => [NEW_VALUE, KEY, RATE, RATE, PERM] + swapw dropw swapw + # => [KEY, NEW_VALUE, RATE0, RATE1, CAPACITY] # increment number of changed entries in local loc_load.4 add.1 loc_store.4 - # => [NEW_VALUE, KEY, RATE, RATE, PERM] + # => [KEY, NEW_VALUE, RATE0, RATE1, CAPACITY] # drop previous RATE elements swapdw dropw dropw - # => [NEW_VALUE, KEY, PERM] + # => [KEY, NEW_VALUE, CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, PERM] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] else # discard the key and init and new value words loaded from the map dropw dropw dropw - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] # load iter and has_next loc_load.3 - # => [iter, RATE, RATE, PERM] + # => [iter, RATE0, RATE1, CAPACITY] loc_load.2 - # => [has_next, iter, RATE, RATE, PERM] + # => [has_next, iter, RATE0, RATE1, CAPACITY] end # drop iter drop - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] # only include the map slot metadata if there were entries in the map that resulted in an # update to the hasher state loc_load.4 neq.0 - # => [is_num_changed_entries_non_zero, RATE, RATE, PERM] + # => [is_num_changed_entries_non_zero, RATE0, RATE1, CAPACITY] # if the account is new (nonce == 0) include the map header even if it is an empty map # in order to have the delta commit to this initial storage slot. exec.memory::is_new_account or - # => [should_include_map_header, RATE, RATE, PERM] + # => [should_include_map_header, RATE0, RATE1, CAPACITY] if.true # drop the previous RATE elements dropw dropw - # => [PERM] + # => [CAPACITY] - push.DOMAIN_MAP loc_load.4 loc_load.0 loc_load.1 padw - # => [EMPTY_WORD, [slot_id_prefix, slot_id_suffix, num_changed_entries, domain], PERM] + padw loc_load.1 loc_load.0 loc_load.4 push.DOMAIN_MAP + # => [[domain, num_changed_entries, slot_id_suffix, slot_id_prefix], EMPTY_WORD, CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, PERM] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] end - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end #! Updates the given delta hasher with the fungible asset vault delta. #! -#! Inputs: [RATE, RATE, PERM] -#! Outputs: [RATE, RATE, PERM] +#! Inputs: [RATE0, RATE1, CAPACITY] +#! Outputs: [RATE0, RATE1, CAPACITY] @locals(2) proc update_fungible_asset_delta - exec.memory::get_account_delta_fungible_asset_ptr - # => [account_delta_fungible_asset_ptr, RATE, RATE, PERM] + push.ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR + # => [account_delta_fungible_asset_ptr, RATE0, RATE1, CAPACITY] exec.link_map::iter - # => [has_next, iter, RATE, RATE, PERM] + # => [has_next, iter, RATE0, RATE1, CAPACITY] # enter loop if the link map is not empty while.true @@ -343,132 +338,130 @@ proc update_fungible_asset_delta movup.8 loc_store.1 # => [KEY, VALUE0, ...] # this stack state is equivalent to: - # => [[faucet_id_prefix, faucet_id_suffix, 0, 0], [delta_amount, 0, 0, 0], ...] + # => [[0, 0, faucet_id_suffix_and_metadata, faucet_id_prefix], [delta_amount, 0, 0, 0], ...] swapw - # => [[delta_amount, 0, 0, 0], [faucet_id_prefix, faucet_id_suffix, 0, 0], ...] + # => [[delta_amount, 0, 0, 0], [0, 0, faucet_id_suffix_and_metadata, faucet_id_prefix], ...] # compute the absolute value of delta amount with a flag indicating whether it's positive exec.delta_amount_absolute - # => [[is_delta_amount_positive, delta_amount_abs, 0, 0, 0], ...] + # => [is_delta_amount_positive, [delta_amount_abs, 0, 0, 0], [0, 0, faucet_id_suffix_and_metadata, faucet_id_prefix], ...] - # rename is_delta_amount_positive to was_added - swap.3 drop - # => [[delta_amount_abs, 0, was_added, 0], ...] + # define the was_added value as equivalent to is_delta_amount_positive + # this value is 1 if the amount was added and 0 if the amount was removed + swap.6 drop + # => [[delta_amount_abs, 0, 0, 0], [0, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], ...] dup neq.0 - # => [is_delta_amount_non_zero, [delta_amount_abs, 0, was_added, 0], [faucet_id_prefix, faucet_id_suffix, 0, 0], ...] + # => [is_delta_amount_non_zero, [delta_amount_abs, 0, 0, 0], [0, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], ...] # if delta amount is non-zero, update the hasher if.true - swap.7 - # => [[0, 0, was_added, 0], [faucet_id_prefix, faucet_id_suffix, 0, delta_amount_abs], ...] - - drop push.DOMAIN_ASSET - # => [[domain, 0, was_added, 0], [faucet_id_prefix, faucet_id_suffix, 0, delta_amount_abs], ...] - - swap.3 - # => [[0, 0, was_added, domain], [faucet_id_prefix, faucet_id_suffix, 0, delta_amount_abs], ...] + push.DOMAIN_ASSET swap.5 drop + # => [[delta_amount_abs, 0, 0, 0], [domain, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], ...] + # swap value and metadata words swapw - # => [[faucet_id_prefix, faucet_id_suffix, 0, delta_amount_abs], [0, 0, was_added, domain], RATE, RATE, PERM] + # => [[domain, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], [delta_amount_abs, 0, 0, 0], RATE0, RATE1, CAPACITY] # drop previous RATE elements swapdw dropw dropw - # => [[faucet_id_prefix, faucet_id_suffix, 0, delta_amount_abs], [0, 0, was_added, domain], PERM] + # => [[domain, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], [delta_amount_abs, 0, 0, 0], CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, PERM] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] else # discard values loaded from map: KEY, VALUE0 dropw dropw - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] # load iter and has_next loc_load.1 - # => [iter, RATE, RATE, PERM] + # => [iter, RATE0, RATE1, CAPACITY] loc_load.0 - # => [has_next, iter, RATE, RATE, PERM] + # => [has_next, iter, RATE0, RATE1, CAPACITY] end # drop iter drop - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end #! Updates the given delta hasher with the non-fungible asset vault delta. #! -#! Inputs: [RATE, RATE, PERM] -#! Outputs: [RATE, RATE, PERM] +#! Inputs: [RATE0, RATE1, CAPACITY] +#! Outputs: [RATE0, RATE1, CAPACITY] @locals(2) proc update_non_fungible_asset_delta - exec.memory::get_account_delta_non_fungible_asset_ptr - # => [account_delta_non_fungible_asset_ptr, RATE, RATE, PERM] + push.ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR + # => [account_delta_non_fungible_asset_ptr, RATE0, RATE1, CAPACITY] exec.link_map::iter - # => [has_next, iter, RATE, RATE, PERM] + # => [has_next, iter, RATE0, RATE1, CAPACITY] # enter loop if the link map is not empty while.true - exec.link_map::next_key_value - # => [KEY, VALUE0, has_next, iter, ...] + exec.link_map::next_key_double_value + # => [KEY, VALUE0, VALUE1, has_next, iter, ...] # store has_next - movup.8 loc_store.0 - # => [KEY, VALUE0, iter, ...] + movup.12 loc_store.0 + # => [KEY, VALUE0, VALUE1, iter, ...] # store iter - movup.8 loc_store.1 - # => [KEY, VALUE0, ...] + movup.12 loc_store.1 + # => [KEY, VALUE0, VALUE1, ...] # this stack state is equivalent to: - # => [ASSET, [was_added, 0, 0, 0], ...] + # => [ASSET_KEY, [was_added, 0, 0, 0], ASSET_VALUE, ...] dup.4 neq.0 - # => [was_added_or_removed, ASSET, [was_added, 0, 0, 0], ...] + # => [was_added_or_removed, ASSET_KEY, [was_added, 0, 0, 0], ASSET_VALUE, ...] # if the asset was added or removed (i.e. if was_added != 0), update the hasher if.true - movup.4 - # => [was_added, ASSET, [0, 0, 0], ...] + swapw + # => [[was_added, 0, 0, 0], ASSET_KEY, ASSET_VALUE, ...] # convert was_added to a boolean # was_added is 1 if the asset was added and 0 - 1 if it was removed eq.1 - # => [was_added, ASSET, [0, 0, 0], ...] + # => [[was_added, 0, 0, 0], [asset_id_suffix, asset_id_prefix, faucet_id_suffix_and_metadata, faucet_id_prefix], ASSET_VALUE, ...] - movdn.6 - # => [ASSET, [0, 0, was_added, 0], ...] + # replace asset_id_prefix with was_added and drop the remaining word + swap.5 dropw + # => [[asset_id_suffix, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], ASSET_VALUE, ...] - push.DOMAIN_ASSET swap.8 drop - # => [ASSET, [0, 0, was_added, domain], RATE, RATE, PERM] + # replace asset_id_suffix with domain + drop push.DOMAIN_ASSET + # => [[domain, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], ASSET_VALUE, ...] # drop previous RATE elements swapdw dropw dropw - # => [ASSET, [0, 0, was_added, domain], PERM] + # => [[domain, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix], ASSET_VALUE, CAPACITY] - exec.rpo256::permute - # => [RATE, RATE, PERM] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] else - # discard the two key and value words loaded from the map - dropw dropw - # => [RATE, RATE, PERM] + # discard the key, value0 and value1 words loaded from the map + dropw dropw dropw + # => [RATE0, RATE1, CAPACITY] end - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] # load iter and has_next loc_load.1 - # => [iter, RATE, RATE, PERM] + # => [iter, RATE0, RATE1, CAPACITY] loc_load.0 - # => [has_next, iter, RATE, RATE, PERM] + # => [has_next, iter, RATE0, RATE1, CAPACITY] end # drop iter drop - # => [RATE, RATE, PERM] + # => [RATE0, RATE1, CAPACITY] end # DELTA BOOKKEEPING @@ -496,21 +489,23 @@ end #! #! Assumes the asset is valid, so it should be called after asset_vault::add_asset. #! -#! Inputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] #! Outputs: [] #! #! Where: -#! - ASSET is the asset. +#! - ASSET_KEY is the vault key of the asset that is added. +#! - ASSET_VALUE is the value of the asset that is added. pub proc add_asset # check if the asset is a fungible asset - exec.asset::is_fungible_asset - # => [is_fungible_asset, ASSET] + exec.asset::is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE] if.true - exec.asset_vault::build_fungible_asset_vault_key swapw - # => [ASSET, ASSET_KEY] + swapw + # => [ASSET_VALUE, ASSET_KEY] - drop drop drop movdn.4 + exec.fungible_asset::value_into_amount + movdn.4 # => [ASSET_KEY, amount] exec.add_fungible_asset @@ -526,21 +521,23 @@ end #! Assumes the asset is valid, so it should be called after asset_vault::remove_asset #! (which would abort if the asset is invalid). #! -#! Inputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] #! Outputs: [] #! #! Where: -#! - ASSET is the asset. +#! - ASSET_KEY is the vault key of the asset that is removed. +#! - ASSET_VALUE is the value of the asset that is removed. pub proc remove_asset # check if the asset is a fungible asset - exec.asset::is_fungible_asset - # => [is_fungible_asset, ASSET, vault_root_ptr] + exec.asset::is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE] if.true - exec.asset_vault::build_fungible_asset_vault_key swapw - # => [ASSET, ASSET_KEY] + swapw + # => [ASSET_VALUE, ASSET_KEY] - drop drop drop movdn.4 + exec.fungible_asset::value_into_amount + movdn.4 # => [ASSET_KEY, amount] exec.remove_fungible_asset @@ -560,7 +557,7 @@ end #! - ASSET_KEY is the asset key of the fungible asset. #! - amount is the amount by which the fungible asset's amount increases. pub proc add_fungible_asset - dupw exec.memory::get_account_delta_fungible_asset_ptr + dupw push.ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR # => [fungible_delta_map_ptr, ASSET_KEY, ASSET_KEY, amount] # retrieve the current delta amount @@ -581,7 +578,7 @@ pub proc add_fungible_asset swapw padw movdnw.2 # => [ASSET_KEY, delta_amount, 0, 0, 0, EMPTY_WORD] - exec.memory::get_account_delta_fungible_asset_ptr + push.ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR # => [fungible_delta_map_ptr, ASSET_KEY, delta_amount, 0, 0, 0, EMPTY_WORD] exec.link_map::set drop @@ -597,7 +594,7 @@ end #! - ASSET_KEY is the asset key of the fungible asset. #! - amount is the amount by which the fungible asset's amount decreases. pub proc remove_fungible_asset - dupw exec.memory::get_account_delta_fungible_asset_ptr + dupw push.ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR # => [fungible_delta_map_ptr, ASSET_KEY, ASSET_KEY, amount] # retrieve the current delta amount @@ -618,7 +615,7 @@ pub proc remove_fungible_asset swapw padw movdnw.2 # => [ASSET_KEY, delta_amount, 0, 0, 0, EMPTY_WORD] - exec.memory::get_account_delta_fungible_asset_ptr + push.ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR # => [fungible_delta_map_ptr, ASSET_KEY, delta_amount, 0, 0, 0, EMPTY_WORD] exec.link_map::set drop @@ -627,7 +624,7 @@ end #! Adds the given non-fungible asset to the non-fungible asset vault delta. #! -#! ASSET must be a valid non-fungible asset. +#! ASSET_VALUE must be a valid non-fungible asset. #! #! If the key does not exist in the delta map, the non-fungible asset's was_added value is 0. #! When it is added to the account vault, was_added is incremented by 1; when it is removed from @@ -641,29 +638,48 @@ end #! 0 -> no change to the asset #! +1 -> asset was added #! -#! Inputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] #! Outputs: [] #! #! Where: -#! - ASSET is the non-fungible asset to be added. +#! - ASSET_KEY is the vault key of the non-fungible asset to be added. +#! - ASSET_VALUE is the value of the non-fungible asset to be added. pub proc add_non_fungible_asset - dupw exec.memory::get_account_delta_non_fungible_asset_ptr - # => [non_fungible_delta_map_ptr, ASSET, ASSET] + dupw push.ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR + # => [non_fungible_delta_map_ptr, ASSET_KEY, ASSET_KEY, ASSET_VALUE] # retrieve the current delta - # contains_key can be ignored because the default value is an empty word and the - # was_added value is therefore 0 + # contains_key can be ignored because the asset vault ensures each asset key is only added to + # the delta once + # if no entry exists, the default value is an empty word and so the was_added value is 0 exec.link_map::get drop - # => [was_added, 0, 0, 0, EMPTY_WORD, ASSET] + # => [was_added, 0, 0, 0, PREV_ASSET_VALUE, ASSET_KEY, ASSET_VALUE] + + dupw.3 movupw.2 + # => [PREV_ASSET_VALUE, ASSET_VALUE, was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + + # the asset vault guarantees that this procedure is only called when the asset was not yet + # _added_ to the vault, so it can either be absent or it could have been removed + # absent means PREV_ASSET_VALUE is the EMPTY_WORD + # removal means PREV_ASSET_VALUE is equal to ASSET_VALUE + # sanity check that this assumption is true + exec.word::testz movdn.8 + # => [PREV_ASSET_VALUE, ASSET_VALUE, is_empty_word, was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + + exec.word::eq or + assert.err="add: prev_asset_value must be empty or equal to asset_value for non-fungible assets" + # => [was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + + # add 1 to cancel out a previous removal (was_added = 0) or mark the asset as added (was_added = 1) add.1 - # => [was_added, 0, 0, 0, EMPTY_WORD, ASSET] + # => [was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] - movupw.2 - # => [ASSET, was_added, 0, 0, 0, EMPTY_WORD] + swapw + # => [ASSET_KEY, was_added, 0, 0, 0, ASSET_VALUE] - exec.memory::get_account_delta_non_fungible_asset_ptr - # => [non_fungible_delta_map_ptr, ASSET, was_added, 0, 0, 0, EMPTY_WORD] + push.ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR + # => [non_fungible_delta_map_ptr, ASSET_KEY, was_added, 0, 0, 0, ASSET_VALUE] exec.link_map::set drop # => [] @@ -671,33 +687,52 @@ end #! Removes the given non-fungible asset from the non-fungible asset vault delta. #! -#! ASSET must be a valid non-fungible asset. +#! ASSET_VALUE must be a valid non-fungible asset. #! #! See add_non_fungible_asset for documentation. #! -#! Inputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] #! Outputs: [] #! #! Where: -#! - ASSET is the non-fungible asset to be removed. +#! - ASSET_KEY is the vault key of the non-fungible asset to be removed. +#! - ASSET_VALUE is the value of the non-fungible asset to be removed. pub proc remove_non_fungible_asset - dupw exec.memory::get_account_delta_non_fungible_asset_ptr - # => [non_fungible_delta_map_ptr, ASSET, ASSET] + dupw push.ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR + # => [non_fungible_delta_map_ptr, ASSET_KEY, ASSET_KEY, ASSET_VALUE] # retrieve the current delta - # contains_key can be ignored because the default value is an empty word and the - # was_added value is therefore 0 + # contains_key can be ignored because the asset vault ensures each asset key is only removed + # from the delta once + # if no entry exists, the default value is an empty word and so the was_added value is 0 exec.link_map::get drop - # => [was_added, 0, 0, 0, EMPTY_WORD, ASSET] + # => [was_added, 0, 0, 0, PREV_ASSET_VALUE, ASSET_KEY, ASSET_VALUE] + + dupw.3 movupw.2 + # => [PREV_ASSET_VALUE, ASSET_VALUE, was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + + # the asset vault guarantees that this procedure is only called when the asset was not yet + # _removed_ from the vault, so it can either be present or it could have been removed + # absent means PREV_ASSET_VALUE is the EMPTY_WORD + # addition means PREV_ASSET_VALUE is equal to ASSET_VALUE + # sanity check that this assumption is true + + exec.word::testz movdn.8 + # => [PREV_ASSET_VALUE, ASSET_VALUE, is_empty_word, was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + + exec.word::eq or + assert.err="remove: prev_asset_value must be empty or equal to asset_value for non-fungible assets" + # => [was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + # sub 1 to cancel out a previous addition (was_added = 1) or mark the asset as removed (was_added = -1) sub.1 - # => [was_added, 0, 0, 0, EMPTY_WORD, ASSET] + # => [was_added, 0, 0, 0, ASSET_KEY, ASSET_VALUE] - movupw.2 - # => [ASSET, was_added, 0, 0, 0, EMPTY_WORD] + swapw + # => [ASSET_KEY, was_added, 0, 0, 0, ASSET_VALUE] - exec.memory::get_account_delta_non_fungible_asset_ptr - # => [non_fungible_delta_map_ptr, ASSET, was_added, 0, 0, 0, EMPTY_WORD] + push.ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR + # => [non_fungible_delta_map_ptr, ASSET_KEY, was_added, 0, 0, 0, ASSET_VALUE] exec.link_map::set drop # => [] @@ -729,7 +764,7 @@ pub proc set_map_item # => [KEY, PREV_VALUE, NEW_VALUE] # store KEY in local - loc_storew_be.4 + loc_storew_le.4 # => [KEY, PREV_VALUE, NEW_VALUE] loc_load.0 @@ -760,7 +795,7 @@ pub proc set_map_item # => [INITIAL_VALUE, NEW_VALUE] # load key and index from locals - padw loc_loadw_be.4 loc_load.0 + padw loc_loadw_le.4 loc_load.0 # => [account_delta_storage_map_ptr, KEY, INITIAL_VALUE, NEW_VALUE] exec.link_map::set drop @@ -784,9 +819,9 @@ end # # Don't we have to check for overflows? No, because we're building on top of the guarantees of the # asset vault. It guarantees that the max value that can be added to the vault within a transaction -# is asset::get_fungible_asset_max_amount and that the max value that can be removed is also that. +# is asset::FUNGIBLE_ASSET_MAX_AMOUNT and that the max value that can be removed is also that. # Since the delta amount range can represent positive and negative -# asset::get_fungible_asset_max_amount, this works out. +# asset::FUNGIBLE_ASSET_MAX_AMOUNT, this works out. # # With these ranges every positive value has a negative counterpart and vice versa. This is # **unlike** two's complements because the goldilocks modulus is odd while the "modulus" in @@ -835,6 +870,6 @@ end #! - is_delta_amount_negative indicates whether the delta amount represents a negative value. proc is_delta_amount_negative # delta_amount represents a negative number if it is greater than the max amount - exec.asset::get_fungible_asset_max_amount gt + gt.FUNGIBLE_ASSET_MAX_AMOUNT # => [is_delta_amount_negative] end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/asset.masm b/crates/miden-protocol/asm/kernels/transaction/lib/asset.masm index 6429f273d0..b45b093c61 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/asset.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/asset.masm @@ -1,181 +1,110 @@ use $kernel::account_id +use $kernel::fungible_asset +use $kernel::non_fungible_asset # ERRORS # ================================================================================================= -const ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ONE_MUST_BE_ZERO="malformed fungible asset: `ASSET[1]` must be 0" - -const ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_TWO_AND_THREE_MUST_BE_FUNGIBLE_FAUCET_ID="malformed fungible asset: `ASSET[2]` and `ASSET[3]` must be a valid fungible faucet id" - -const ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ZERO_MUST_BE_WITHIN_LIMITS="malformed fungible asset: `ASSET[0]` exceeds the maximum allowed amount" - -const ERR_NON_FUNGIBLE_ASSET_FORMAT_ELEMENT_THREE_MUST_BE_FUNGIBLE_FAUCET_ID="malformed non-fungible asset: `ASSET[3]` is not a valid non-fungible faucet id" - -const ERR_NON_FUNGIBLE_ASSET_FORMAT_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO="malformed non-fungible asset: the most significant bit must be 0" - -const ERR_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN="the origin of the fungible asset is not this faucet" - -const ERR_NON_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN="the origin of the non-fungible asset is not this faucet" +const ERR_VAULT_ASSET_KEY_ACCOUNT_ID_MUST_BE_FAUCET="account ID in asset vault key must be either of type fungible or non-fungible faucet" # CONSTANT ACCESSORS # ================================================================================================= -#! Returns the maximum amount of a fungible asset. -#! -#! Inputs: [] -#! Outputs: [fungible_asset_max_amount] -#! -#! Where: -#! - fungible_asset_max_amount is the maximum amount of a fungible asset. -pub use ::$kernel::util::asset::get_fungible_asset_max_amount +pub use ::$kernel::util::asset::FUNGIBLE_ASSET_MAX_AMOUNT +pub use ::$kernel::util::asset::ASSET_SIZE +pub use ::$kernel::util::asset::ASSET_VALUE_MEMORY_OFFSET +pub use ::$kernel::util::asset::key_to_faucet_id +pub use ::$kernel::util::asset::key_into_faucet_id +pub use ::$kernel::util::asset::key_to_asset_id +pub use ::$kernel::util::asset::key_into_asset_id +pub use ::$kernel::util::asset::key_to_callbacks_enabled +pub use ::$kernel::util::asset::store +pub use ::$kernel::util::asset::load # PROCEDURES # ================================================================================================= -#! Validates that a fungible asset is well formed. -#! -#! Inputs: [ASSET] -#! Outputs: [ASSET] -#! -#! Where: -#! - ASSET is the asset to validate. -#! -#! Panics if: -#! - the asset is not well formed. -pub proc validate_fungible_asset - # assert that ASSET[1] == ZERO - dup.2 not assert.err=ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ONE_MUST_BE_ZERO - # => [ASSET] - - # assert that the tuple (ASSET[3], ASSET[2]) forms a valid account ID - dup.1 dup.1 exec.account_id::validate - # => [ASSET] - - # assert that the prefix (ASSET[3]) of the account ID is of type fungible faucet - dup exec.account_id::is_fungible_faucet - assert.err=ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_TWO_AND_THREE_MUST_BE_FUNGIBLE_FAUCET_ID - # => [ASSET] - - # assert that the max amount (ASSET[0]) of a fungible asset is not exceeded - dup.3 exec.::$kernel::util::asset::get_fungible_asset_max_amount lte - assert.err=ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ZERO_MUST_BE_WITHIN_LIMITS - # => [ASSET] -end - #! Returns a boolean indicating whether the asset is fungible. #! -#! Inputs: [ASSET] -#! Outputs: [is_fungible_asset, ASSET] +#! Inputs: [ASSET_KEY] +#! Outputs: [is_fungible_asset, ASSET_KEY] #! #! Where: -#! - ASSET is the asset to check. +#! - ASSET_KEY is the vault key of the asset to check. #! - is_fungible_asset is a boolean indicating whether the asset is fungible. -pub proc is_fungible_asset - # check the first element, it will be: - # - zero for a fungible asset - # - non zero for a non-fungible asset - dup.2 eq.0 - # => [is_fungible_asset, ASSET] +pub proc is_fungible_asset_key + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix, faucet_id_prefix] + + dup.3 exec.account_id::is_fungible_faucet + # => [is_fungible_asset, ASSET_KEY] end -#! Validates that a non fungible asset is well formed. +#! Validates that an asset's vault key is well formed. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY] +#! Outputs: [ASSET_KEY] #! #! Where: -#! - ASSET is the asset to validate. +#! - ASSET_KEY is the vault key of the asset to validate. #! #! Panics if: -#! - the asset is not well formed. -pub proc validate_non_fungible_asset - # assert that ASSET[3] is a valid account ID prefix - # hack: because we only have the prefix we add a 0 as the suffix which is always valid - push.0 dup.1 exec.account_id::validate - # => [ASSET] +#! - the asset key is not a valid fungible or non-fungible asset key (see +#! fungible_asset::validate_key and non_fungible_asset::validate_key). +pub proc validate_key + # check if the asset key is fungible + exec.is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY] - # assert that the account ID prefix ASSET[3] is of type non fungible faucet - dup exec.account_id::is_non_fungible_faucet - assert.err=ERR_NON_FUNGIBLE_ASSET_FORMAT_ELEMENT_THREE_MUST_BE_FUNGIBLE_FAUCET_ID - # => [ASSET] + if.true + exec.fungible_asset::validate_key + # => [ASSET_KEY] + else + exec.non_fungible_asset::validate_key + # => [ASSET_KEY] + end + # => [ASSET_KEY] end #! Returns a boolean indicating whether the asset is non-fungible. #! -#! Inputs: [ASSET] -#! Outputs: [is_non_fungible_asset, ASSET] +#! Inputs: [ASSET_KEY] +#! Outputs: [is_non_fungible_asset, ASSET_KEY] #! #! Where: -#! - ASSET is the asset to check. +#! - ASSET_KEY is the vault key of the asset to check. #! - is_non_fungible_asset is a boolean indicating whether the asset is non-fungible. -pub proc is_non_fungible_asset - # check the first element, it will be: - # - zero for a fungible asset - # - non zero for a non-fungible asset - exec.is_fungible_asset not - # => [is_non_fungible_asset, ASSET] +pub proc is_non_fungible_asset_key + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix, faucet_id_prefix] + + dup.3 exec.account_id::is_non_fungible_faucet + # => [is_non_fungible_asset, ASSET_KEY] end #! Validates that an asset is well formed. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_KEY, ASSET_VALUE] #! #! Where: -#! - ASSET is the asset to validate. +#! - ASSET_KEY is the vault key of the asset to validate. +#! - ASSET_VALUE is the value of the asset to validate. #! #! Panics if: -#! - the asset is not well formed. -pub proc validate_asset +#! - the asset is not a valid fungible or non-fungible asset (see fungible_asset::validate and +#! non_fungible_asset::validate_key). +pub proc validate # check if the asset is fungible - exec.is_fungible_asset - # => [is_fungible_asset, ASSET] + exec.is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE] # if the asset is fungible, validate the fungible asset if.true - exec.validate_fungible_asset + exec.fungible_asset::validate + # => [ASSET_KEY, ASSET_VALUE] else # if the asset is non fungible, validate the non fungible asset - exec.validate_non_fungible_asset + exec.non_fungible_asset::validate + # => [ASSET_KEY, ASSET_VALUE] end - # => [ASSET] -end - -#! Validates that a fungible asset is associated with the provided faucet_id. -#! -#! Inputs: [faucet_id_prefix, faucet_id_suffix, ASSET] -#! Outputs: [ASSET] -#! -#! Where: -#! - faucet_id_prefix is the prefix of the faucet's account ID. -#! - ASSET is the asset to validate. -pub proc validate_fungible_asset_origin - # assert the origin of the asset is the faucet_id provided via the stack - dup.3 dup.3 - # => [asset_id_prefix, asset_id_suffix, faucet_id_prefix, faucet_id_suffix, ASSET] - - exec.account_id::is_equal assert.err=ERR_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN - # => [ASSET] - - # assert the fungible asset is valid - exec.validate_fungible_asset - # => [ASSET] -end - -#! Validates that a non-fungible asset is associated with the provided faucet_id. -#! -#! Inputs: [faucet_id_prefix, ASSET] -#! Outputs: [ASSET] -#! -#! Where: -#! - faucet_id_prefix is the prefix of the faucet's account ID. -#! - ASSET is the asset to validate. -pub proc validate_non_fungible_asset_origin - # assert the origin of the asset is the faucet_id prefix provided via the stack - dup.1 assert_eq.err=ERR_NON_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN - # => [ASSET] - - # assert the non-fungible asset is valid - exec.validate_non_fungible_asset - # => [ASSET] + # => [ASSET_KEY, ASSET_VALUE] end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/asset_vault.masm b/crates/miden-protocol/asm/kernels/transaction/lib/asset_vault.masm index 2559bed3bc..a65766a267 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/asset_vault.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/asset_vault.masm @@ -1,166 +1,89 @@ use miden::core::collections::smt -use miden::core::word -use $kernel::account_id use $kernel::asset -use $kernel::memory +use $kernel::fungible_asset +use $kernel::non_fungible_asset # ERRORS # ================================================================================================= -const ERR_VAULT_GET_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET="get_balance can only be called on a fungible asset" - -const ERR_VAULT_PEEK_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET="peek_balance can only be called on a fungible asset" - -const ERR_VAULT_HAS_NON_FUNGIBLE_ASSET_PROC_CAN_BE_CALLED_ONLY_WITH_NON_FUNGIBLE_ASSET="the has_non_fungible_asset procedure can only be called on a non-fungible faucet" - -const ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED="adding the fungible asset to the vault would exceed the max amount of 9223372036854775807" - const ERR_VAULT_ADD_FUNGIBLE_ASSET_FAILED_INITIAL_VALUE_INVALID="failed to add fungible asset to the asset vault due to the initial value being invalid" const ERR_VAULT_NON_FUNGIBLE_ASSET_ALREADY_EXISTS="the non-fungible asset already exists in the asset vault" -const ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW="failed to remove the fungible asset from the vault since the amount of the asset in the vault is less than the amount to remove" - const ERR_VAULT_REMOVE_FUNGIBLE_ASSET_FAILED_INITIAL_VALUE_INVALID="failed to remove fungible asset from the asset vault due to the initial value being invalid" const ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND="failed to remove non-existent non-fungible asset from the vault" -# CONSTANTS -# ================================================================================================= - -# The bitmask that when applied will set the fungible bit to zero. -const INVERSE_FUNGIBLE_BITMASK_U32=0xffffffdf # last byte: 0b1101_1111 - # ACCESSORS # ================================================================================================= -#! Returns the balance of a fungible asset associated with a faucet_id. +#! Returns the ASSET_VALUE associated with the provided asset vault key. #! -#! Inputs: [faucet_id_prefix, faucet_id_suffix, vault_root_ptr] -#! Outputs: [balance] +#! Inputs: [ASSET_KEY, vault_root_ptr] +#! Outputs: [ASSET_VALUE] #! #! Where: #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. -#! - faucet_id_{prefix, suffix} are the prefix and suffix felts of the faucet id of the fungible -#! asset of interest. -#! - balance is the vault balance of the fungible asset. -#! -#! Panics if: -#! - the provided faucet ID is not an ID of a fungible faucet. -pub proc get_balance - # assert that the faucet id is a fungible faucet - dup exec.account_id::is_fungible_faucet - assert.err=ERR_VAULT_GET_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET - # => [faucet_id_prefix, faucet_id_suffix, vault_root_ptr] - +#! - ASSET_KEY is the asset vault key of the asset to fetch. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't present. +pub proc get_asset # load the asset vault root from memory - padw movup.6 mem_loadw_be - # => [ASSET_VAULT_ROOT, faucet_id_prefix, faucet_id_suffix] + padw movup.8 mem_loadw_le + # => [ASSET_VAULT_ROOT, ASSET_KEY] - # prepare the key for fungible asset lookup (pad least significant elements with zeros) - push.0.0 movup.7 movup.7 - # => [faucet_id_prefix, faucet_id_suffix, 0, 0, ASSET_VAULT_ROOT] + swapw + # => [ASSET_KEY, ASSET_VAULT_ROOT] # lookup asset exec.smt::get swapw dropw - # => [ASSET] - - # extract the asset's balance - exec.::$kernel::util::asset::get_balance_from_fungible_asset - # => [balance] + # => [ASSET_VALUE] end -#! Returns the _peeked_ balance of a fungible asset associated with the provided faucet_id. +#! Returns the _peeked_ asset associated with the provided asset vault key. #! -#! WARNING: Peeked means the balance is loaded from the advice provider, which is susceptible to +#! WARNING: Peeked means the asset is loaded from the advice provider, which is susceptible to #! manipulation from a malicious host. Therefore this should only be used when the inclusion of the -#! peeked balance is verified at a later point. +#! peeked asset is verified at a later point. +#! +#! To get the verified asset, use get_asset. peek_asset is useful when updating a value in an SMT +#! with smt::set, which returns the previous value. As long as that previous value is the same as +#! the peeked value, using peek_asset is safe from manipulation. Using smt::get instead would work +#! as well, but since both smt::get and smt::set prove inclusion of the current/previous value, +#! this is unnecessary double work that can be skipped by peeking. #! #! WARNING: This is a generic vault procedure and so it cannot emit an event to lazy load asset #! merkle paths from the merkle store, since this is only possible for the account vault. Ensure #! that the merkle paths are present prior to calling. #! -#! To get the verified balance, use get_balance. -#! -#! Inputs: [faucet_id_prefix, faucet_id_suffix, vault_root_ptr] -#! Outputs: [balance] +#! Inputs: [ASSET_KEY, vault_root_ptr] +#! Outputs: [ASSET_VALUE] #! #! Where: #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. -#! - faucet_id_{prefix, suffix} are the prefix and suffix felts of the faucet id of the fungible -#! asset of interest. -#! - balance is the vault balance of the fungible asset. -#! -#! Panics if: -#! - the asset is not a fungible asset. -pub proc peek_balance - # assert that the faucet id is a fungible faucet - dup exec.account_id::is_fungible_faucet - assert.err=ERR_VAULT_PEEK_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET - # => [faucet_id_prefix, faucet_id_suffix, vault_root_ptr] - +#! - ASSET_KEY is the asset vault key of the asset to fetch. +#! - ASSET_VALUE is the retrieved asset. +pub proc peek_asset # load the asset vault root from memory - padw movup.6 mem_loadw_be - # => [ASSET_VAULT_ROOT, faucet_id_prefix, faucet_id_suffix] + padw movup.8 mem_loadw_le + # => [ASSET_VAULT_ROOT, ASSET_KEY] - # prepare the vault key for fungible asset lookup (pad least significant elements with zeros) - push.0.0 movup.7 movup.7 - # => [faucet_id_prefix, faucet_id_suffix, 0, 0, ASSET_VAULT_ROOT] + swapw # => [ASSET_KEY, ASSET_VAULT_ROOT] # lookup asset exec.smt::peek # OS => [ASSET_KEY, ASSET_VAULT_ROOT] - # AS => [ASSET] + # AS => [ASSET_VALUE] dropw # OS => [ASSET_VAULT_ROOT] - # AS => [ASSET] + # AS => [ASSET_VALUE] # this overwrites the vault root adv_loadw - # OS => [ASSET] + # OS => [ASSET_VALUE] # AS => [] - - # extract the asset's balance - exec.::$kernel::util::asset::get_balance_from_fungible_asset - # => [balance] -end - -#! Returns a boolean indicating whether the non-fungible asset is present in the vault. -#! -#! Inputs: [ASSET, vault_root_ptr] -#! Outputs: [has_asset] -#! -#! Where: -#! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. -#! - ASSET is the non-fungible asset of interest -#! - has_asset is a boolean indicating whether the account vault has the asset of interest -#! -#! Panics if: -#! - the ASSET is a fungible asset. -pub proc has_non_fungible_asset - # check if the asset is a non-fungible asset - exec.asset::is_non_fungible_asset - assert.err=ERR_VAULT_HAS_NON_FUNGIBLE_ASSET_PROC_CAN_BE_CALLED_ONLY_WITH_NON_FUNGIBLE_ASSET - # => [ASSET, vault_root_ptr] - - # build the asset key from the non-fungible asset - exec.build_non_fungible_asset_vault_key - # => [ASSET_KEY, vault_root_ptr] - - # prepare the stack to read non-fungible asset from vault - padw movup.8 mem_loadw_be swapw - # => [ASSET_KEY, ACCT_VAULT_ROOT] - - # lookup asset - exec.smt::get swapw dropw - # => [ASSET] - - # compare with EMPTY_WORD to assess if the asset exists in the vault - exec.word::eqz not - # => [has_asset] end # ADD ASSET @@ -172,325 +95,293 @@ end #! If the amount to be added is zero and the asset does not already exist in the vault, the vault #! remains unchanged. #! -#! Inputs: [ASSET, vault_root_ptr] -#! Outputs: [ASSET'] +#! Inputs: [ASSET_KEY, ASSET_VALUE, vault_root_ptr] +#! Outputs: [ASSET_VALUE'] #! #! Where: #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. -#! - ASSET is the fungible asset to add to the vault. -#! - ASSET' is the total fungible asset in the account vault after ASSET was added to it. +#! - ASSET_KEY is the vault key of the fungible asset to add to the vault. +#! - ASSET_VALUE is the fungible asset to add to the vault. +#! - ASSET_VALUE' is the total fungible asset in the account vault after ASSET_VALUE was added to it. +#! +#! Locals: +#! - 0: vault_root_ptr #! #! Panics if: -#! - the total value of assets is greater than or equal to 2^63. +#! - the total value of assets is greater than or equal to FUNGIBLE_ASSET_MAX_AMOUNT. +@locals(1) pub proc add_fungible_asset - # Create the asset key from the asset. + # Get the current asset using `peek_asset`. # --------------------------------------------------------------------------------------------- - exec.build_fungible_asset_vault_key - # => [ASSET_KEY, faucet_id_prefix, faucet_id_suffix, 0, amount, vault_root_ptr] + # store the vault_root_ptr + movup.8 loc_store.0 + # => [ASSET_KEY, ASSET_VALUE] - movup.6 drop - # => [[faucet_id_prefix, faucet_id_suffix, 0, 0], faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] + dupw loc_load.0 movdn.4 + # => [ASSET_KEY, vault_root_ptr, ASSET_KEY, ASSET_VALUE] - # Get the asset vault root and read the current asset using the `push_smtpeek` decorator. - # --------------------------------------------------------------------------------------------- + exec.peek_asset + # => [CUR_VAULT_VALUE, ASSET_KEY, ASSET_VALUE] - padw dup.11 - # => [vault_root_ptr, pad(4), ASSET_KEY, faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] + # since we have peeked the value, we need to later assert that the actual value matches this + # one, so we'll keep a copy for later + # set the current asset value equal to the current vault value + swapw dupw.1 + # => [CURRENT_ASSET_VALUE, ASSET_KEY, CUR_VAULT_VALUE, ASSET_VALUE] - # the current asset may be the empty word if it does not exist and so its faucet id would be zeroes - # we therefore overwrite the faucet id with the faucet id from ASSET to account for this edge case - mem_loadw_be swapw - # => [ASSET_KEY, VAULT_ROOT, faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] + movupw.3 + # => [ASSET_VALUE, CURRENT_ASSET_VALUE, ASSET_KEY, CUR_VAULT_VALUE] - exec.smt::peek adv_loadw - # => [CUR_VAULT_VALUE, VAULT_ROOT, faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] - swapw - # => [VAULT_ROOT, CUR_VAULT_VALUE, faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] - dupw.1 - # => [CUR_VAULT_VALUE, VAULT_ROOT, CUR_VAULT_VALUE, faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] - drop drop - # => [[0, cur_amount], VAULT_ROOT, CUR_VAULT_VALUE, faucet_id_prefix, faucet_id_suffix, amount, vault_root_ptr] - movup.11 movup.11 - # => [[faucet_id_prefix, faucet_id_suffix, 0, cur_amount], VAULT_ROOT, CUR_VAULT_VALUE, amount, vault_root_ptr] - - # Check the new amount does not exceed the maximum allowed amount and add the two - # fungible assets together. + # Merge the assets. # --------------------------------------------------------------------------------------------- - # arrange amounts - movup.3 movup.12 dup - # => [amount, amount, cur_amount, faucet_id_prefix, faucet_id_suffix, 0, VAULT_ROOT, CUR_VAULT_VALUE, vault_root_ptr] - - # compute max_amount - cur_amount - exec.asset::get_fungible_asset_max_amount dup.3 sub - # => [(max_amount - cur_amount), amount, amount, cur_amount, faucet_id_prefix, faucet_id_suffix, 0, VAULT_ROOT, - # CUR_VAULT_VALUE, vault_root_ptr] + exec.fungible_asset::merge + # => [MERGED_ASSET_VALUE, ASSET_KEY, CUR_VAULT_VALUE] - # assert amount + cur_amount < max_amount - lte assert.err=ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED - # => [amount, cur_amount, faucet_id_prefix, faucet_id_suffix, 0, VAULT_ROOT, CUR_VAULT_VALUE, vault_root_ptr] + # store a copy of MERGED_ASSET_VALUE for returning + movdnw.2 dupw.2 + # => [MERGED_ASSET_VALUE, ASSET_KEY, CUR_VAULT_VALUE, MERGED_ASSET_VALUE] - # add asset amounts - add movdn.3 - # => [ASSET', VAULT_ROOT, CUR_VAULT_VALUE, vault_root_ptr] - - # Create the asset key and insert the updated asset. + # Insert the merged asset. # --------------------------------------------------------------------------------------------- - # create the asset key to prepare insertion of the asset into the vault - dupw movdnw.3 - # => [ASSET', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] - dupw - # => [ASSET', ASSET', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] - push.0 swap.4 drop - # => [[faucet_id_prefix, faucet_id_suffix, 0, 0], ASSET', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] - swapw - # => [ASSET', ASSET_KEY', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] + # load the vault root + padw loc_load.0 mem_loadw_le + # => [VAULT_ROOT, MERGED_ASSET_VALUE, ASSET_KEY, CUR_VAULT_VALUE, MERGED_ASSET_VALUE] - # pad empty word for insertion - padw - # => [EMPTY_WORD, ASSET', ASSET_KEY', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] + movdnw.2 padw + # => [EMPTY_WORD, MERGED_ASSET_VALUE, ASSET_KEY, VAULT_ROOT, CUR_VAULT_VALUE, MERGED_ASSET_VALUE] # check if amount of new asset is zero # if it is zero, insert EMPTY_WORD to keep the merkle tree sparse - dup.7 eq.0 - # => [is_amount_zero, EMPTY_WORD, ASSET', ASSET_KEY', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] + dupw.1 + exec.fungible_asset::value_into_amount + eq.0 + # => [is_amount_zero, EMPTY_WORD, MERGED_ASSET_VALUE, ASSET_KEY, VAULT_ROOT, CUR_VAULT_VALUE, MERGED_ASSET_VALUE] # If is_amount_zero EMPTY_WORD remains. - # If !is_amount_zero ASSET' remains. + # If !is_amount_zero MERGED_ASSET_VALUE remains. cdropw - # => [EMPTY_WORD_OR_ASSET', ASSET_KEY', VAULT_ROOT, CUR_VAULT_VALUE, ASSET', vault_root_ptr] + # => [EMPTY_WORD_OR_MERGED_ASSET_VALUE, ASSET_KEY, VAULT_ROOT, CUR_VAULT_VALUE, MERGED_ASSET_VALUE] - # update asset in vault and assert the old value is equivalent to the value provided via the - # decorator + # update asset in vault exec.smt::set - # => [PREV_ASSET, VAULT_ROOT', CUR_VAULT_VALUE, ASSET', vault_root_ptr] + # => [PREV_VAULT_VALUE, NEW_VAULT_ROOT, CUR_VAULT_VALUE, MERGED_ASSET_VALUE] + # assert PREV_VAULT_VALUE = CUR_VAULT_VALUE to make sure peek_asset returned the correct asset movupw.2 assert_eqw.err=ERR_VAULT_ADD_FUNGIBLE_ASSET_FAILED_INITIAL_VALUE_INVALID - # => [VAULT_ROOT', ASSET', vault_root_ptr] + # => [NEW_VAULT_ROOT, MERGED_ASSET_VALUE] # update the vault root - movup.8 mem_storew_be dropw - # => [ASSET'] + loc_load.0 mem_storew_le dropw + # => [MERGED_ASSET_VALUE] + # => [ASSET_VALUE'] end #! Add the specified non-fungible asset to the vault. #! -#! Inputs: [ASSET, vault_root_ptr] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE, vault_root_ptr] +#! Outputs: [ASSET_VALUE] #! #! Where: #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. -#! - ASSET is the non-fungible asset that is added to the vault. +#! - ASSET_KEY is the vault key of the non-fungible asset that is added to the vault. +#! - ASSET_VALUE is the non-fungible asset that is added to the vault. #! #! Panics if: #! - the vault already contains the same non-fungible asset. pub proc add_non_fungible_asset - # Build the asset key from the non-fungible asset. - # --------------------------------------------------------------------------------------------- - - dupw exec.build_non_fungible_asset_vault_key - # => [ASSET_KEY, ASSET, vault_root_ptr] - # Load VAULT_ROOT and insert asset. # --------------------------------------------------------------------------------------------- padw dup.12 - # => [vault_root_ptr, pad(4), ASSET_KEY, ASSET, vault_root_ptr] - mem_loadw_be swapw - # => [ASSET_KEY, VAULT_ROOT, ASSET, vault_root_ptr] + # => [vault_root_ptr, pad(4), ASSET_KEY, ASSET_VALUE, vault_root_ptr] + + mem_loadw_le swapw + # => [ASSET_KEY, VAULT_ROOT, ASSET_VALUE, vault_root_ptr] + dupw.2 - # => [ASSET, ASSET_KEY, VAULT_ROOT, ASSET, vault_root_ptr] + # => [ASSET_VALUE, ASSET_KEY, VAULT_ROOT, ASSET_VALUE, vault_root_ptr] # insert asset into vault exec.smt::set - # => [OLD_VAL, VAULT_ROOT', ASSET, vault_root_ptr] + # => [OLD_VAL, VAULT_ROOT', ASSET_VALUE, vault_root_ptr] # assert old value was empty padw assert_eqw.err=ERR_VAULT_NON_FUNGIBLE_ASSET_ALREADY_EXISTS - # => [VAULT_ROOT', ASSET, vault_root_ptr] + # => [VAULT_ROOT', ASSET_VALUE, vault_root_ptr] # update the vault root - movup.8 mem_storew_be dropw - # => [ASSET] + movup.8 mem_storew_le dropw + # => [ASSET_VALUE] end #! Add the specified asset to the vault. #! -#! Inputs: [ASSET, vault_root_ptr] -#! Outputs: [ASSET'] +#! Inputs: [ASSET_KEY, ASSET_VALUE, vault_root_ptr] +#! Outputs: [ASSET_VALUE'] #! #! Where: -#! - ASSET is the asset that is added to the vault. +#! - ASSET_KEY is the vault key of the asset that is added to the vault. +#! - ASSET_VALUE is the value of the asset that is added to the vault. #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. -#! - ASSET' final asset in the account vault defined as follows: -#! - If ASSET is a non-fungible asset, then ASSET' is the same as ASSET. -#! - If ASSET is a fungible asset, then ASSET' is the total fungible asset in the account vault -#! after ASSET was added to it. +#! - ASSET_VALUE' final asset in the account vault defined as follows: +#! - If ASSET_VALUE is a non-fungible asset, then ASSET_VALUE' is the same as ASSET_VALUE. +#! - If ASSET_VALUE is a fungible asset, then ASSET_VALUE' is the total fungible asset in the account vault +#! after ASSET_VALUE was added to it. #! #! Panics if: #! - the asset is not valid. -#! - the total value of two fungible assets is greater than or equal to 2^63. +#! - the total value of two fungible assets is greater than FUNGIBLE_ASSET_MAX_AMOUNT. #! - the vault already contains the same non-fungible asset. pub proc add_asset # check if the asset is a fungible asset - exec.asset::is_fungible_asset - # => [is_fungible_asset, ASSET] + exec.asset::is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE, vault_root_ptr] # add the asset to the asset vault if.true # validate the fungible asset - exec.asset::validate_fungible_asset - # => [ASSET] + exec.fungible_asset::validate + # => [ASSET_KEY, ASSET_VALUE, vault_root_ptr] exec.add_fungible_asset - # => [ASSET'] + # => [ASSET_VALUE'] else # validate the non-fungible asset - exec.asset::validate_non_fungible_asset - # => [ASSET] + exec.non_fungible_asset::validate + # => [ASSET_KEY, ASSET_VALUE, vault_root_ptr] exec.add_non_fungible_asset - # => [ASSET'] + # => [ASSET_VALUE'] end end # REMOVE ASSET # ================================================================================================= -#! Remove the specified fungible asset from the vault. +#! Splits ASSET_VALUE off the existing asset in the vault associated with the ASSET_KEY. #! -#! Inputs: [ASSET, vault_root_ptr] -#! Outputs: [ASSET] +#! For instance, if ASSET_KEY points to a fungible asset with amount 100, and ASSET_VALUE has +#! amount 30, then a fungible asset with amount 70 remains in the vault. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE, vault_root_ptr] +#! Outputs: [ASSET_VALUE] #! #! Where: -#! - ASSET is the fungible asset to remove from the vault. +#! - ASSET_KEY is the asset vault key of the fungible asset to remove from the vault. +#! - ASSET_VALUE is the fungible asset that was removed from the vault. #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. #! #! Locals: -#! - 0..4: ASSET +#! - 0..4: ASSET_VALUE #! #! Panics if: #! - the amount of the asset in the vault is less than the amount to be removed. @locals(4) pub proc remove_fungible_asset - exec.build_fungible_asset_vault_key - # => [ASSET_KEY, ASSET, vault_root_ptr] - dupw movdnw.2 - # => [ASSET_KEY, ASSET, ASSET_KEY, vault_root_ptr] + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY, vault_root_ptr] dup.12 movdn.4 - # => [ASSET_KEY, vault_root_ptr, ASSET, ASSET_KEY, vault_root_ptr] + # => [ASSET_KEY, vault_root_ptr, ASSET_VALUE, ASSET_KEY, vault_root_ptr] exec.peek_asset - # => [PEEKED_ASSET, ASSET, ASSET_KEY, vault_root_ptr] + # => [PEEKED_ASSET_VALUE, ASSET_VALUE, ASSET_KEY, vault_root_ptr] movdnw.2 - # => [ASSET, ASSET_KEY, PEEKED_ASSET, vault_root_ptr] - - # store ASSET so we can return it later - loc_storew_be.0 - # => [ASSET, ASSET_KEY, PEEKED_ASSET, vault_root_ptr] + # => [ASSET_VALUE, ASSET_KEY, PEEKED_ASSET_VALUE, vault_root_ptr] - dup.3 dup.12 - # => [peeked_amount, amount, ASSET, ASSET_KEY, PEEKED_ASSET, vault_root_ptr] + # store ASSET_VALUE so we can return it later + loc_storew_le.0 + # => [ASSET_VALUE, ASSET_KEY, PEEKED_ASSET_VALUE, vault_root_ptr] - # assert amount <= peeked_amount - lte assert.err=ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW - # => [ASSET, ASSET_KEY, PEEKED_ASSET, vault_root_ptr] - # => [[faucet_id_prefix, faucet_id_suffix, 0, amount], ASSET_KEY, PEEKED_ASSET, vault_root_ptr] + dupw.2 swapw + # => [ASSET_VALUE, PEEKED_ASSET_VALUE, ASSET_KEY, PEEKED_ASSET_VALUE, vault_root_ptr] - dup.11 movup.4 - # => [amount, peeked_amount, [faucet_id_prefix, faucet_id_suffix, 0], ASSET_KEY, PEEKED_ASSET, vault_root_ptr] + # compute PEEKED_ASSET_VALUE - ASSET_VALUE + exec.fungible_asset::split + # => [NEW_ASSET_VALUE, ASSET_KEY, PEEKED_ASSET_VALUE, vault_root_ptr] - # compute peeked_amount - amount - sub - # => [new_amount, [faucet_id_prefix, faucet_id_suffix, 0], ASSET_KEY, PEEKED_ASSET, vault_root_ptr] - - movdn.3 - # => [[faucet_id_prefix, faucet_id_suffix, new_amount], ASSET_KEY, PEEKED_ASSET, vault_root_ptr] - # => [ASSET', ASSET_KEY, PEEKED_ASSET, vault_root_ptr] - - padw dup.7 - # => [new_amount, EMPTY_WORD, ASSET', ASSET_KEY, PEEKED_ASSET, vault_root_ptr] + padw dupw.1 exec.fungible_asset::value_into_amount + # => [new_amount, EMPTY_WORD, NEW_ASSET_VALUE, ASSET_KEY, PEEKED_ASSET_VALUE, vault_root_ptr] eq.0 - # => [is_new_amount_zero, EMPTY_WORD, ASSET', ASSET_KEY, PEEKED_ASSET, vault_root_ptr] + # => [is_new_amount_zero, EMPTY_WORD, NEW_ASSET_VALUE, ASSET_KEY, PEEKED_ASSET_VALUE, vault_root_ptr] # If is_new_amount_zero EMPTY_WORD remains. - # If !is_new_amount_zero ASSET' remains. + # If !is_new_amount_zero NEW_ASSET_VALUE remains. cdropw - # => [EMPTY_WORD_OR_ASSET', ASSET_KEY, PEEKED_ASSET, vault_root_ptr] + # => [EMPTY_WORD_OR_NEW_ASSET_VALUE, ASSET_KEY, PEEKED_ASSET_VALUE, vault_root_ptr] - dup.12 padw movup.4 mem_loadw_be - # => [VAULT_ROOT, EMPTY_WORD_OR_ASSET', ASSET_KEY, PEEKED_ASSET, vault_root_ptr] + dup.12 padw movup.4 mem_loadw_le + # => [VAULT_ROOT, EMPTY_WORD_OR_NEW_ASSET_VALUE, ASSET_KEY, PEEKED_ASSET_VALUE, vault_root_ptr] movdnw.2 - # => [EMPTY_WORD_OR_ASSET', ASSET_KEY, VAULT_ROOT, PEEKED_ASSET, vault_root_ptr] + # => [EMPTY_WORD_OR_NEW_ASSET_VALUE, ASSET_KEY, VAULT_ROOT, PEEKED_ASSET_VALUE, vault_root_ptr] # update asset in vault and assert the old value is equivalent to the peeked value provided # via peek_asset exec.smt::set - # => [OLD_VALUE, NEW_VAULT_ROOT, PEEKED_ASSET, vault_root_ptr] + # => [OLD_VALUE, NEW_VAULT_ROOT, PEEKED_ASSET_VALUE, vault_root_ptr] # assert OLD_VALUE == PEEKED_ASSET movupw.2 assert_eqw.err=ERR_VAULT_REMOVE_FUNGIBLE_ASSET_FAILED_INITIAL_VALUE_INVALID # => [NEW_VAULT_ROOT, vault_root_ptr] # update vault root - movup.4 mem_storew_be + movup.4 mem_storew_le # => [NEW_VAULT_ROOT] - loc_loadw_be.0 - # => [ASSET] + loc_loadw_le.0 + # => [ASSET_VALUE] end #! Remove the specified non-fungible asset from the vault. #! -#! Inputs: [ASSET, vault_root_ptr] -#! Outputs: [ASSET] +#! Note that the ASSET_VALUE is only needed to check against the asset that was removed from the +#! vault. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE, vault_root_ptr] +#! Outputs: [ASSET_VALUE] #! #! Where: -#! - ASSET is the non-fungible asset to remove from the vault. +#! - ASSET_KEY is the asset vault key of the non-fungible asset to remove from the vault. +#! - ASSET_VALUE is the non-fungible asset that was removed from the vault. #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. #! #! Panics if: #! - the non-fungible asset is not found in the vault. pub proc remove_non_fungible_asset - # build non-fungible asset key - dupw exec.build_non_fungible_asset_vault_key padw - # => [pad(4), ASSET_KEY, ASSET, vault_root_ptr] - # load vault root - dup.12 mem_loadw_be - # => [VAULT_ROOT, ASSET_KEY, ASSET, vault_root_ptr] + padw dup.12 mem_loadw_le + # => [VAULT_ROOT, ASSET_KEY, ASSET_VALUE, vault_root_ptr] # prepare insertion of an EMPTY_WORD into the vault at the asset key to remove the asset swapw padw - # => [EMPTY_WORD, ASSET_KEY, VAULT_ROOT, ASSET, vault_root_ptr] + # => [EMPTY_WORD, ASSET_KEY, VAULT_ROOT, ASSET_VALUE, vault_root_ptr] - # update asset in vault + # insert empty word into the vault to remove the asset exec.smt::set - # => [OLD_VAL, VAULT_ROOT', ASSET, vault_root_ptr] + # => [REMOVED_ASSET_VALUE, NEW_VAULT_ROOT, ASSET_VALUE, vault_root_ptr] - # assert old value was not empty (we only need to check ASSET[3] which is the faucet id) - eq.0 assertz.err=ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND drop drop drop - # => [VAULT_ROOT', ASSET, vault_root_ptr] + dupw.2 assert_eqw.err=ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND + # => [NEW_VAULT_ROOT, ASSET_VALUE, vault_root_ptr] # update the vault root - movup.8 mem_storew_be dropw - # => [ASSET] + movup.8 mem_storew_le dropw + # => [ASSET_VALUE] end #! Remove the specified asset from the vault. #! -#! Inputs: [ASSET, vault_root_ptr] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE, vault_root_ptr] +#! Outputs: [ASSET_VALUE] #! #! Where: -#! - ASSET is the asset to remove from the vault. +#! - ASSET_KEY is the asset vault key of the asset to remove from the vault. +#! - ASSET_VALUE is the value of the asset to remove from the vault. #! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. #! #! Panics if: @@ -499,102 +390,15 @@ end #! - the non-fungible asset is not found in the vault. pub proc remove_asset # check if the asset is a fungible asset - exec.asset::is_fungible_asset - # => [is_fungible_asset, ASSET, vault_root_ptr] + exec.asset::is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE, vault_root_ptr] # remove the asset from the asset vault if.true exec.remove_fungible_asset - # => [ASSET] + # => [ASSET_VALUE] else exec.remove_non_fungible_asset - # => [ASSET] + # => [ASSET_VALUE] end end - -# HELPER PROCEDURES -# ================================================================================================= - -#! Returns the _peeked_ asset associated with the provided faucet_id. -#! -#! Note that ASSET can be the EMPTY_WORD if the value at the provided ASSET_KEY does not exist. -#! -#! Inputs: [ASSET_KEY, vault_root_ptr] -#! Outputs: [ASSET] -#! -#! Where: -#! - vault_root_ptr is a pointer to the memory location at which the vault root is stored. -#! - faucet_id_{prefix, suffix} are the prefix and suffix felts of the faucet id of the fungible -#! asset of interest. -#! - asset is the peeked asset from the vault. -proc peek_asset - # load the asset vault root from memory - padw movup.8 mem_loadw_be - # => [ASSET_VAULT_ROOT, ASSET_KEY] - - swapw - # => [ASSET_KEY, ASSET_VAULT_ROOT] - - # lookup asset - exec.smt::peek - # OS => [ASSET_KEY, ASSET_VAULT_ROOT] - # AS => [ASSET] - - dropw - # OS => [ASSET_VAULT_ROOT] - # AS => [ASSET] - - # this overwrites the vault root - adv_loadw - # OS => [ASSET] - # AS => [] -end - -#! Builds the vault key of a non fungible asset. The asset is NOT validated and therefore must -#! be a valid non-fungible asset. -#! -#! Inputs: [ASSET] -#! Outputs: [ASSET_KEY] -#! -#! Where: -#! - ASSET is the non-fungible asset for which the vault key is built. -#! - ASSET_KEY is the vault key of the non-fungible asset. -pub proc build_non_fungible_asset_vault_key - # create the asset key from the non-fungible asset by swapping hash0 with the faucet id - # => [faucet_id_prefix, hash2, hash1, hash0] - swap.3 - # => [hash0, hash2, hash1 faucet_id_prefix] - - # disassemble hash0 into u32 limbs - u32split swap - # => [hash0_lo, hash0_hi, hash2, hash1 faucet_id_prefix] - - # set the fungible bit to 0 - u32and.INVERSE_FUNGIBLE_BITMASK_U32 - # => [hash0_lo', hash0_hi, hash2, hash1 faucet_id_prefix] - - # reassemble hash0 felt by multiplying the high part with 2^32 and adding the lo part - swap mul.0x0100000000 add - # => [ASSET_KEY] -end - -#! TODO: Add Rust <-> MASM test. -#! -#! Builds the vault key of a fungible asset. The asset is NOT validated and therefore must -#! be a valid fungible asset. -#! -#! Inputs: [ASSET] -#! Outputs: [ASSET_KEY, ASSET] -#! -#! Where: -#! - ASSET is the fungible asset for which the vault key is built. -#! - ASSET_KEY is the vault key of the fungible asset. -pub proc build_fungible_asset_vault_key - # => [faucet_id_prefix, faucet_id_suffix, 0, amount] - - push.0.0 - # => [0, 0, faucet_id_prefix, faucet_id_suffix, 0, amount] - - dup.3 dup.3 - # => [faucet_id_prefix, faucet_id_suffix, 0, 0, faucet_id_prefix, faucet_id_suffix, 0, amount] -end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/callbacks.masm b/crates/miden-protocol/asm/kernels/transaction/lib/callbacks.masm new file mode 100644 index 0000000000..c54522f0d5 --- /dev/null +++ b/crates/miden-protocol/asm/kernels/transaction/lib/callbacks.masm @@ -0,0 +1,197 @@ +use $kernel::tx +use $kernel::asset +use $kernel::account +use miden::core::word + +# CONSTANTS +# ================================================================================================== + +# The index of the local memory slot that contains the procedure root of the callback. +const CALLBACK_PROC_ROOT_LOC = 0 + +# The name of the storage slot where the procedure root for the on_before_asset_added_to_account callback +# is stored. +pub const ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_PROC_ROOT_SLOT = word("miden::protocol::faucet::callback::on_before_asset_added_to_account") + +# The name of the storage slot where the procedure root for the on_before_asset_added_to_note callback +# is stored. +pub const ON_BEFORE_ASSET_ADDED_TO_NOTE_PROC_ROOT_SLOT = word("miden::protocol::faucet::callback::on_before_asset_added_to_note") + +# PROCEDURES +# ================================================================================================== + +#! Invokes the `on_before_asset_added_to_account` callback on the faucet that issued the asset, +#! if the asset has callbacks enabled. +#! +#! The callback invocation is skipped in these cases: +#! - If the global callback flag in the asset key is `Disabled`. +#! - If the faucet does not have the callback storage slot. +#! - If the callback storage slot contains the empty word. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [PROCESSED_ASSET_VALUE] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset being added. +#! - ASSET_VALUE is the value of the asset being added. +#! - PROCESSED_ASSET_VALUE is the asset value returned by the callback, or the original +#! ASSET_VALUE if callbacks are disabled. +pub proc on_before_asset_added_to_account + exec.asset::key_to_callbacks_enabled + # => [callbacks_enabled, ASSET_KEY, ASSET_VALUE] + + if.true + # set custom_data = 0 + push.0 movdn.8 + # => [ASSET_KEY, ASSET_VALUE, custom_data = 0] + + push.ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_PROC_ROOT_SLOT[0..2] + exec.invoke_callback + # => [PROCESSED_ASSET_VALUE] + else + # drop asset key + dropw + # => [ASSET_VALUE] + end + # => [PROCESSED_ASSET_VALUE] +end + +#! Invokes the `on_before_asset_added_to_note` callback on the faucet that issued the asset, +#! if the asset has callbacks enabled. +#! +#! The callback invocation is skipped in these cases: +#! - If the global callback flag in the asset key is `Disabled`. +#! - If the faucet does not have the callback storage slot. +#! - If the callback storage slot contains the empty word. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx] +#! Outputs: [PROCESSED_ASSET_VALUE] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset being added. +#! - ASSET_VALUE is the value of the asset being added. +#! - note_idx is the index of the output note the asset is being added to. +#! - PROCESSED_ASSET_VALUE is the asset value returned by the callback, or the original +#! ASSET_VALUE if callbacks are disabled. +pub proc on_before_asset_added_to_note + exec.asset::key_to_callbacks_enabled + # => [callbacks_enabled, ASSET_KEY, ASSET_VALUE, note_idx] + + if.true + push.ON_BEFORE_ASSET_ADDED_TO_NOTE_PROC_ROOT_SLOT[0..2] + exec.invoke_callback + # => [PROCESSED_ASSET_VALUE] + else + # drop asset key and note index + dropw movup.4 drop + # => [ASSET_VALUE] + end + # => [PROCESSED_ASSET_VALUE] +end + +#! Invokes a callback by starting a foreign context against the faucet, reading the callback +#! procedure root from the provided slot ID in the faucet's storage, and invoking it via `dyncall`. +#! +#! If the faucet does not have the callback storage slot, or if the slot contains the empty word, +#! the callback is skipped and the original ASSET_VALUE is returned. +#! +#! custom_data should be set to 0 for the account callback and to note_idx for the note callback. +#! +#! Inputs: [slot_id_suffix, slot_id_prefix, ASSET_KEY, ASSET_VALUE, custom_data] +#! Outputs: [PROCESSED_ASSET_VALUE] +#! +#! Where: +#! - slot_id* is the ID of the slot that contains the callback procedure root. +#! - ASSET_KEY is the vault key of the asset being added. +#! - ASSET_VALUE is the value of the asset being added. +#! - PROCESSED_ASSET_VALUE is the asset value returned by the callback, or the original +#! ASSET_VALUE if no callback is configured. +@locals(4) +proc invoke_callback + exec.start_foreign_callback_context + # => [should_invoke, PROC_ROOT, ASSET_KEY, ASSET_VALUE, custom_data] + + # only invoke the callback if the procedure root is not the empty word + if.true + # prepare for dyncall by storing procedure root in local memory + loc_storew_le.CALLBACK_PROC_ROOT_LOC dropw + # => [ASSET_KEY, ASSET_VALUE, custom_data] + + # pad the stack to 16 for the call + repeat.7 push.0 movdn.9 end + # => [ASSET_KEY, ASSET_VALUE, custom_data, pad(7)] + + # invoke the callback + locaddr.CALLBACK_PROC_ROOT_LOC + dyncall + # => [PROCESSED_ASSET_VALUE, pad(12)] + + # truncate the stack after the call + swapdw dropw dropw swapw dropw + # => [PROCESSED_ASSET_VALUE] + else + # drop proc root, asset key and custom_data + dropw dropw movup.4 drop + # => [ASSET_VALUE] + end + # => [PROCESSED_ASSET_VALUE] + + exec.end_foreign_callback_context + # => [PROCESSED_ASSET_VALUE] +end + +#! Prepares the invocation of a faucet callback by starting a foreign context against the faucet +#! identified by the asset key's faucet ID, looking up the callback procedure root from the +#! faucet's storage, and computing whether the callback should be invoked. +#! +#! The callback should be invoked if the storage slot exists and contains a non-empty procedure +#! root. +#! +#! Inputs: [slot_id_suffix, slot_id_prefix, ASSET_KEY, ASSET_VALUE] +#! Outputs: [should_invoke, PROC_ROOT, ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - slot_id_suffix and slot_id_prefix identify the storage slot containing the callback procedure root. +#! - ASSET_KEY is the vault key of the asset being added. +#! - ASSET_VALUE is the value of the asset being added. +#! - should_invoke is 1 if the callback should be invoked, 0 otherwise. +#! - PROC_ROOT is the procedure root of the callback, or the empty word if not found. +proc start_foreign_callback_context + # move slot IDs past ASSET_KEY and ASSET_VALUE + movdn.9 movdn.9 + # => [ASSET_KEY, ASSET_VALUE, slot_id_suffix, slot_id_prefix] + + exec.asset::key_to_faucet_id + # => [faucet_id_suffix, faucet_id_prefix, ASSET_KEY, ASSET_VALUE, slot_id_suffix, slot_id_prefix] + + # start a foreign context against the faucet + exec.tx::start_foreign_context + # => [ASSET_KEY, ASSET_VALUE, slot_id_suffix, slot_id_prefix] + + # bring slot IDs back to top + movup.9 movup.9 + # => [slot_id_suffix, slot_id_prefix, ASSET_KEY, ASSET_VALUE] + + # try to find the callback procedure root in the faucet's storage + exec.account::find_item + # => [is_found, PROC_ROOT, ASSET_KEY, ASSET_VALUE] + + movdn.4 exec.word::testz not + # => [is_non_empty_word, PROC_ROOT, is_found, ASSET_KEY, ASSET_VALUE] + + # should_invoke = is_found && is_non_empty_word + movup.5 and + # => [should_invoke, PROC_ROOT, ASSET_KEY, ASSET_VALUE] +end + +#! Ends a foreign callback context. +#! +#! This pops the top of the account stack, making the previous account the active account. +#! +#! This wrapper exists only for uniformity with start_foreign_callback_context. +#! +#! Inputs: [] +#! Outputs: [] +proc end_foreign_callback_context + exec.tx::end_foreign_context +end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/constants.masm b/crates/miden-protocol/asm/kernels/transaction/lib/constants.masm index 5c58398d3c..2d64bb61ad 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/constants.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/constants.masm @@ -4,8 +4,8 @@ # The number of elements in a Word pub const WORD_SIZE = 4 -# The maximum number of input values associated with a single note. -pub const MAX_INPUTS_PER_NOTE = 1024 +# The maximum number of storage items associated with a single note. +pub const MAX_NOTE_STORAGE_ITEMS = 1024 # The maximum number of assets that can be stored in a single note. pub const MAX_ASSETS_PER_NOTE = 256 @@ -14,7 +14,7 @@ pub const MAX_ASSETS_PER_NOTE = 256 pub const MAX_INPUT_NOTES_PER_TX = 1024 # The size of the memory segment allocated to each note. -pub const NOTE_MEM_SIZE = 2048 +pub const NOTE_MEM_SIZE = 3072 # The depth of the Merkle tree used to commit to notes produced in a block. pub const NOTE_TREE_DEPTH = 16 @@ -29,7 +29,7 @@ pub const ACCOUNT_PROCEDURE_DATA_LENGTH = 4 # ================================================================================================= # Root of an empty Sparse Merkle Tree -pub const EMPTY_SMT_ROOT = [15321474589252129342, 17373224439259377994, 15071539326562317628, 3312677166725950353] +pub const EMPTY_SMT_ROOT = [11569107685829756166, 7187477731240244145, 8326334713638926095, 2239973196746300865] # Type of storage slot item in the account storage pub const STORAGE_SLOT_TYPE_VALUE = 0 diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/epilogue.masm b/crates/miden-protocol/asm/kernels/transaction/lib/epilogue.masm index 52916708b9..4ac2b7ceea 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/epilogue.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/epilogue.masm @@ -1,11 +1,14 @@ use $kernel::account use $kernel::account_delta +use $kernel::asset +use $kernel::asset::ASSET_SIZE use $kernel::asset_vault use $kernel::constants::NOTE_MEM_SIZE +use $kernel::fungible_asset use $kernel::memory use $kernel::note -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 use miden::core::word # ERRORS @@ -23,16 +26,16 @@ const ERR_EPILOGUE_NONCE_CANNOT_BE_0="nonce cannot be 0 after an account-creatin # ================================================================================================= # Event emitted to signal that the compute_fee procedure has obtained the current number of cycles. -const EPILOGUE_AFTER_TX_CYCLES_OBTAINED_EVENT=event("miden::epilogue::after_tx_cycles_obtained") +const EPILOGUE_AFTER_TX_CYCLES_OBTAINED_EVENT=event("miden::protocol::epilogue::after_tx_cycles_obtained") # Event emitted to signal that the fee was computed. -const EPILOGUE_BEFORE_TX_FEE_REMOVED_FROM_ACCOUNT_EVENT=event("miden::epilogue::before_tx_fee_removed_from_account") +const EPILOGUE_BEFORE_TX_FEE_REMOVED_FROM_ACCOUNT_EVENT=event("miden::protocol::epilogue::before_tx_fee_removed_from_account") # Event emitted to signal that an execution of the authentication procedure has started. -const EPILOGUE_AUTH_PROC_START_EVENT=event("miden::epilogue::auth_proc_start") +const EPILOGUE_AUTH_PROC_START_EVENT=event("miden::protocol::epilogue::auth_proc_start") # Event emitted to signal that an execution of the authentication procedure has ended. -const EPILOGUE_AUTH_PROC_END_EVENT=event("miden::epilogue::auth_proc_end") +const EPILOGUE_AUTH_PROC_END_EVENT=event("miden::protocol::epilogue::auth_proc_end") # An additional number of cyclces to account for the number of cycles that smt::set will take when # removing the computed fee from the asset vault. @@ -46,7 +49,7 @@ const SMT_SET_ADDITIONAL_CYCLES=250 # that this includes at least smt::set's best case number of cycles. # This can be _estimated_ using the transaction measurements on ExecutedTransaction and can be set # to the lowest observed value. -const NUM_POST_COMPUTE_FEE_CYCLES=500 +const NUM_POST_COMPUTE_FEE_CYCLES=608 # The number of cycles the epilogue is estimated to take after compute_fee has been executed. const ESTIMATED_AFTER_COMPUTE_FEE_CYCLES=NUM_POST_COMPUTE_FEE_CYCLES+SMT_SET_ADDITIONAL_CYCLES @@ -151,7 +154,7 @@ proc build_output_vault # output_notes_end_ptr] # compute the end pointer for output note asset looping - dup.3 mul.4 add swap + dup.3 mul.ASSET_SIZE add swap # => [assets_start_ptr, assets_end_ptr, output_vault_root_ptr, num_assets, note_data_ptr, # output_notes_end_ptr] @@ -168,8 +171,8 @@ proc build_output_vault # num_assets, note_data_ptr, output_notes_end_ptr] # read the output note asset from memory - padw dup.5 mem_loadw_be - # => [ASSET, output_vault_root_ptr, assets_start_ptr, assets_end_ptr, + dup.1 exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, output_vault_root_ptr, assets_start_ptr, assets_end_ptr, # output_vault_root_ptr, num_assets, note_data_ptr, output_notes_end_ptr] # insert output note asset into output vault @@ -178,7 +181,7 @@ proc build_output_vault # note_data_ptr, output_notes_end_ptr] # increment assets_start_ptr and asses if we should loop again - add.4 dup.1 dup.1 neq + add.ASSET_SIZE dup.1 dup.1 neq # => [should_loop, assets_start_ptr, assets_end_ptr, output_vault_root_ptr, num_assets, # note_data_ptr, output_notes_end_ptr] end @@ -216,7 +219,7 @@ proc execute_auth_procedure push.0 exec.memory::get_account_procedure_ptr # => [auth_procedure_ptr, AUTH_ARGS, pad(12)] - padw dup.4 mem_loadw_be + padw dup.4 mem_loadw_le # => [AUTH_PROC_ROOT, auth_procedure_ptr, AUTH_ARGS, pad(12)] # if auth procedure was called already, it must have been called by a user, which is disallowed @@ -273,23 +276,29 @@ proc compute_fee # => [verification_cost] end -#! Builds the fee asset with the provided fee amount and the native asset ID of the transaction's +#! Creates the fee asset with the provided fee amount and the native asset ID of the transaction's #! reference block as the faucet ID. #! #! Inputs: [fee_amount] -#! Outputs: [FEE_ASSET] +#! Outputs: [FEE_ASSET_KEY, FEE_ASSET_VALUE] #! #! Where: #! - fee_amount is the computed fee amount of the transaction in the native asset. -#! - FEE_ASSET is the fungible asset with amount set to fee_amount and the faucet ID set to the -#! native asset. -proc build_native_fee_asset +#! - FEE_ASSET_KEY is the asset vault key of the fee asset. +#! - FEE_ASSET_VALUE is the fungible asset with amount set to fee_amount and the faucet ID set to +#! the native asset. +proc create_native_fee_asset exec.memory::get_native_asset_id - # => [native_asset_id_prefix, native_asset_id_suffix, fee_amount] + # => [native_asset_id_suffix, native_asset_id_prefix, fee_amount] + + # assume the fee asset does not have callbacks + # this should be addressed more holistically with a fee construction refactor + push.0 + # => [enable_callbacks, native_asset_id_suffix, native_asset_id_prefix, fee_amount] - push.0 movdn.2 - # => [native_asset_id_prefix, native_asset_id_suffix, 0, fee_amount] - # => [FEE_ASSET] + # SAFETY: native asset ID should be fungible and amount should not be exceeded + exec.fungible_asset::create_unchecked + # => [FEE_ASSET_KEY, FEE_ASSET_VALUE] end #! Computes the fee of this transaction and removes the asset from the native account's vault. @@ -300,26 +309,33 @@ end #! check. That's okay, because the logic is entirely determined by the transaction kernel. #! #! Inputs: [] -#! Outputs: [FEE_ASSET] +#! Outputs: [native_asset_id_suffix, native_asset_id_prefix, fee_amount] #! #! Where: #! - fee_amount is the computed fee amount of the transaction in the native asset. -#! - FEE_ASSET is the fungible asset with amount set to fee_amount and the faucet ID set to the +#! - native_asset_id_{prefix,suffix} are the prefix and suffix felts of the faucet that issues the #! native asset. #! #! Panics if: -#! - the account vault does not contain the computed fee. +#! - the account vault contains less than the computed fee. proc compute_and_remove_fee # compute the fee the tx needs to pay - exec.compute_fee - # => [fee_amount] + exec.compute_fee dup + # => [fee_amount, fee_amount] # build the native asset from the fee amount - exec.build_native_fee_asset - # => [FEE_ASSET] + exec.create_native_fee_asset + # => [FEE_ASSET_KEY, FEE_ASSET_VALUE, fee_amount] emit.EPILOGUE_BEFORE_TX_FEE_REMOVED_FROM_ACCOUNT_EVENT - # => [FEE_ASSET] + # => [FEE_ASSET_KEY, FEE_ASSET_VALUE, fee_amount] + + # prepare the return value + exec.asset::key_to_faucet_id + # => [native_asset_id_suffix, native_asset_id_prefix, FEE_ASSET_KEY, FEE_ASSET_VALUE, fee_amount] + + movdn.9 movdn.9 + # => [FEE_ASSET_KEY, FEE_ASSET_VALUE, native_asset_id_suffix, native_asset_id_prefix, fee_amount] # remove the fee from the native account's vault # note that this deliberately does not use account::remove_asset_from_vault, because that @@ -329,13 +345,13 @@ proc compute_and_remove_fee # commitment has already been computed and so any modifications done to the delta at this point # are essentially ignored. - # fetch the vault root - exec.memory::get_account_vault_root_ptr movdn.4 - # => [FEE_ASSET, acct_vault_root_ptr] + # fetch the vault root ptr + exec.memory::get_account_vault_root_ptr movdn.8 + # => [FEE_ASSET_KEY, FEE_ASSET_VALUE, account_vault_root_ptr, native_asset_id_suffix, native_asset_id_prefix, fee_amount] # remove the asset from the account vault - exec.asset_vault::remove_fungible_asset - # => [FEE_ASSET] + exec.asset_vault::remove_fungible_asset dropw + # => [native_asset_id_suffix, native_asset_id_prefix, fee_amount] end # TRANSACTION EPILOGUE PROCEDURE @@ -355,18 +371,23 @@ end #! them in the fee and calculating is easiest when the operations are simple. #! #! Inputs: [] -#! Outputs: [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, FEE_ASSET, tx_expiration_block_num] +#! Outputs: [ +#! OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, +#! native_asset_id_suffix, native_asset_id_prefix, fee_amount, tx_expiration_block_num +#! ] #! #! Where: #! - OUTPUT_NOTES_COMMITMENT is the commitment of the output notes. #! - ACCOUNT_UPDATE_COMMITMENT is the hash of the the final account commitment and account #! delta commitment. -#! - FEE_ASSET is the fungible asset used as the transaction fee. +#! - fee_amount is the computed fee amount of the transaction denominated in the native asset. +#! - native_asset_id_{prefix,suffix} are the prefix and suffix felts of the faucet that issues the +#! native asset. #! - tx_expiration_block_num is the transaction expiration block number. #! #! Locals: #! - 0..4: OUTPUT_NOTES_COMMITMENT -#! - 4..8: FEE_ASSET +#! - 4..8: FEE_ASSET_INFO #! - 8..12: ACCOUNT_DELTA_COMMITMENT #! #! Panics if: @@ -415,7 +436,7 @@ pub proc finalize_transaction # => [OUTPUT_NOTES_COMMITMENT] # store commitment in local - loc_storew_be.0 dropw + loc_storew_le.0 dropw # => [] # ------ Compute account delta commitment ------ @@ -424,7 +445,7 @@ pub proc finalize_transaction # => [ACCOUNT_DELTA_COMMITMENT] # store commitment in local - loc_storew_be.8 + loc_storew_le.8 # => [ACCOUNT_DELTA_COMMITMENT] # ------ Assert that account was changed or notes were consumed ------ @@ -450,10 +471,14 @@ pub proc finalize_transaction # ------ Compute fees ------ exec.compute_and_remove_fee - # => [FEE_ASSET] + # => [native_asset_id_suffix, native_asset_id_prefix, fee_amount] + + # pad to word size so we can store the info as a word + push.0 movdn.3 + # => [native_asset_id_suffix, native_asset_id_prefix, fee_amount, 0] - # store fee asset in local - loc_storew_be.4 dropw + # store fee info in local memory + loc_storew_le.4 dropw # => [] # ------ Insert final account data into advice provider ------ @@ -481,27 +506,29 @@ pub proc finalize_transaction # ------ Compute and insert account update commitment ------ # load account delta commitment from local - padw loc_loadw_be.8 - # => [ACCOUNT_DELTA_COMMITMENT, FINAL_ACCOUNT_COMMITMENT] + padw loc_loadw_le.8 swapw + # => [FINAL_ACCOUNT_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] # insert into advice map ACCOUNT_UPDATE_COMMITMENT: (FINAL_ACCOUNT_COMMITMENT, ACCOUNT_DELTA_COMMITMENT), # where ACCOUNT_UPDATE_COMMITMENT = hash(FINAL_ACCOUNT_COMMITMENT || ACCOUNT_DELTA_COMMITMENT) adv.insert_hdword - # => [ACCOUNT_DELTA_COMMITMENT, FINAL_ACCOUNT_COMMITMENT] + # => [FINAL_ACCOUNT_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] - exec.rpo256::merge + exec.poseidon2::merge # => [ACCOUNT_UPDATE_COMMITMENT] # ------ Build output stack ------ - exec.memory::get_expiration_block_num movdn.4 - # => [ACCOUNT_UPDATE_COMMITMENT, tx_expiration_block_num] - # load fee asset from local - padw loc_loadw_be.4 swapw - # => [ACCOUNT_UPDATE_COMMITMENT, FEE_ASSET, tx_expiration_block_num] + padw loc_loadw_le.4 swapw + # => [ACCOUNT_UPDATE_COMMITMENT, [native_asset_id_suffix, native_asset_id_prefix, fee_amount, 0]] + + # replace 0 with expiration block num + exec.memory::get_expiration_block_num swap.8 drop + # => [ACCOUNT_UPDATE_COMMITMENT, [native_asset_id_suffix, native_asset_id_prefix, fee_amount, tx_expiration_block_num]] # load output notes commitment from local - padw loc_loadw_be.0 - # => [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, FEE_ASSET, tx_expiration_block_num] + padw loc_loadw_le.0 + # => [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, + # native_asset_id_suffix, native_asset_id_prefix, fee_amount, tx_expiration_block_num] end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/faucet.masm b/crates/miden-protocol/asm/kernels/transaction/lib/faucet.masm index e0cdb36314..1dca7cf99d 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/faucet.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/faucet.masm @@ -1,122 +1,73 @@ use $kernel::account -use $kernel::account_id use $kernel::asset use $kernel::asset_vault +use $kernel::fungible_asset +use $kernel::non_fungible_asset use $kernel::memory -# ERRORS -# ================================================================================================= - -const ERR_FAUCET_NEW_TOTAL_SUPPLY_WOULD_EXCEED_MAX_ASSET_AMOUNT="asset mint operation would cause the new total supply to exceed the maximum allowed asset amount" - -const ERR_FAUCET_BURN_CANNOT_EXCEED_EXISTING_TOTAL_SUPPLY="asset amount to burn can not exceed the existing total supply" - -const ERR_FAUCET_NON_FUNGIBLE_ASSET_ALREADY_ISSUED="failed to mint new non-fungible asset because it was already issued" - -const ERR_FAUCET_BURN_NON_FUNGIBLE_ASSET_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET="the burn_non_fungible_asset procedure can only be called on a non-fungible faucet" - -const ERR_FAUCET_NON_FUNGIBLE_ASSET_TO_BURN_NOT_FOUND="failed to burn non-existent non-fungible asset in the vault" - # FUNGIBLE ASSETS # ================================================================================================== #! Mints a fungible asset associated with the fungible faucet the transaction is being executed #! against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [NEW_ASSET_VALUE] #! #! Where: -#! - amount is the amount of the fungible asset to mint. -#! - ASSET is the asset that was minted. +#! - ASSET_KEY is the vault key of the asset to mint. +#! - ASSET_VALUE is the value of the asset value to mint. +#! - NEW_ASSET_VALUE is ASSET_VALUE merged with the existing vault asset value, if any. #! #! Panics if: #! - the transaction is not being executed against a fungible faucet. -#! - the fungible asset being minted is not associated with the faucet the transaction is being -#! executed against. +#! - the fungible asset's faucet ID does not match the native account's ID. #! - the asset is not well formed. -#! - the total issuance after minting is greater than the maximum amount allowed. pub proc mint_fungible_asset - # assert that the asset is associated with the faucet the transaction is being executed against - # and that the asset is valid - exec.account::get_id exec.asset::validate_fungible_asset_origin - # => [ASSET] - - # get the current total issuance - exec.account::get_faucet_sysdata_slot_id exec.account::get_item - # => [TOTAL_ISSUANCE, ASSET] - - # prepare stack to ensure that minting the asset will not exceed the maximum - dup.7 dup exec.asset::get_fungible_asset_max_amount dup.3 - # => [total_issuance, max_allowed_issuance, amount, amount, TOTAL_ISSUANCE, ASSET] - - # compute difference to ensure that the total issuance will not exceed the maximum - sub lte assert.err=ERR_FAUCET_NEW_TOTAL_SUPPLY_WOULD_EXCEED_MAX_ASSET_AMOUNT - # => [amount, TOTAL_ISSUANCE, ASSET] - - # update the total issuance - add exec.account::get_faucet_sysdata_slot_id exec.account::set_item dropw - # => [ASSET] - - # add the asset to the input vault for asset preservation checks - dupw exec.memory::get_input_vault_root_ptr movdn.4 exec.asset_vault::add_fungible_asset dropw - # => [ASSET] + # assert that the asset was issued by the faucet the transaction is being executed against and + # that the asset is valid + exec.account::get_id + exec.fungible_asset::validate_origin + # => [ASSET_KEY, ASSET_VALUE] + + exec.memory::get_input_vault_root_ptr + movdn.8 + # => [ASSET_KEY, ASSET_VALUE, input_vault_root_ptr] + + # add the asset to the input vault for asset preservation + exec.asset_vault::add_fungible_asset + # => [NEW_ASSET_VALUE] end #! Burns a fungible asset associated with the fungible faucet the transaction is being executed #! against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_VALUE] #! #! Where: -#! - ASSET is the asset that was burned. +#! - ASSET_KEY is the vault key of the asset to burn. +#! - ASSET_VALUE is the value of the asset value to burn. #! #! Panics if: #! - the transaction is not being executed against a fungible faucet. #! - the fungible asset being burned is not associated with the faucet the transaction is being #! executed against. #! - the asset is not well formed. -#! - the amount being burned is greater than the total input to the transaction. proc burn_fungible_asset # assert that the asset is associated with the faucet the transaction is being executed against # and that the asset is valid - exec.account::get_id exec.asset::validate_fungible_asset_origin - # => [ASSET] - - # fetch TOTAL_ISSUANCE such that we can compute the new total issuance - exec.account::get_faucet_sysdata_slot_id exec.account::get_item - # => [TOTAL_ISSUANCE, ASSET] + exec.account::get_id + exec.fungible_asset::validate_origin + # => [ASSET_KEY, ASSET_VALUE] - # assert that the asset amount being burned is less or equal to the total issuance - dup.7 dup dup.2 lte assert.err=ERR_FAUCET_BURN_CANNOT_EXCEED_EXISTING_TOTAL_SUPPLY - # => [amount, TOTAL_ISSUANCE, ASSET] - - # compute new total issuance - sub exec.account::get_faucet_sysdata_slot_id exec.account::set_item dropw - # => [ASSET] - - # remove the asset from the input vault - dupw exec.memory::get_input_vault_root_ptr movdn.4 exec.asset_vault::remove_fungible_asset dropw - # => [ASSET] -end - -#! Returns the total issuance of the fungible faucet the transaction is being executed against. -#! -#! Inputs: [] -#! Outputs: [total_issuance] -#! -#! Where: -#! - total_issuance is the total issuance of the fungible faucet the transaction is being executed -#! against. -pub proc get_total_issuance - # fetch the TOTAL_ISSUANCE from storage - exec.account::get_faucet_sysdata_slot_id exec.account::get_item - # => [TOTAL_ISSUANCE] + exec.memory::get_input_vault_root_ptr + movdn.8 + # => [ASSET_KEY, ASSET_VALUE, input_vault_root_ptr] - # extract the total_issuance and purge the padding - movdn.3 drop drop drop - # => [total_issuance] + # remove the asset from the input vault for asset preservation + exec.asset_vault::remove_fungible_asset + # => [ASSET_VALUE] end # NON-FUNGIBLE ASSETS @@ -125,130 +76,63 @@ end #! Mints a non-fungible asset associated with the non-fungible faucet the transaction is being #! executed against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [NEW_ASSET_VALUE] #! #! Where: -#! - ASSET is the asset that was minted. +#! - ASSET_KEY is the vault key of the asset to mint. +#! - ASSET_VALUE is the value of the asset value to mint. +#! - NEW_ASSET_VALUE is identical to ASSET_VALUE. This is to maintain API uniformity with +#! mint_fungible_asset. #! #! Panics if: #! - the transaction is not being executed against a non-fungible faucet. #! - the non-fungible asset being minted is not associated with the faucet the transaction is being #! executed against. -#! - the non-fungible asset being minted already exists. proc mint_non_fungible_asset # assert that the asset is associated with the faucet the transaction is being executed against # and that the asset is valid - exec.account::get_id swap drop exec.asset::validate_non_fungible_asset_origin - # => [ASSET] + exec.account::get_id + exec.non_fungible_asset::validate_origin + # => [ASSET_KEY, ASSET_VALUE] - # duplicate the asset on the stack and get the non-fungible asset key - dupw dupw exec.asset_vault::build_non_fungible_asset_vault_key - # => [ASSET_KEY, ASSET, ASSET] + exec.memory::get_input_vault_root_ptr + movdn.8 + # => [ASSET_KEY, ASSET_VALUE, input_vault_root_ptr] - # get the faucet storage data slot - exec.account::get_faucet_sysdata_slot_id - # => [faucet_slot_name_prefix, faucet_slot_name_suffix, ASSET_KEY, ASSET, ASSET] - - # insert the non-fungible asset into the tracking SMT - exec.account::set_map_item - # => [OLD_VAL, ASSET] - - # Assert the `OLD_VAL` is an EMPTY_WORD, indicating that the non-fungible asset has not been - # issued yet. We only need to check OLD_VAL[3] as this is always set to the faucet_id_prefix - # and can not be 0. - eq.0 assert.err=ERR_FAUCET_NON_FUNGIBLE_ASSET_ALREADY_ISSUED drop drop drop - # => [ASSET] - - # add the non-fungible asset to the input vault for asset preservation checks - exec.memory::get_input_vault_root_ptr movdn.4 exec.asset_vault::add_non_fungible_asset - # => [ASSET] + # add the non-fungible asset to the input vault for asset preservation + exec.asset_vault::add_non_fungible_asset + # => [NEW_ASSET_VALUE] end #! Burns a non-fungible asset associated with the non-fungible faucet the transaction is being #! executed against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_VALUE] #! #! Where: -#! - ASSET is the asset that was burned. +#! - ASSET_KEY is the vault key of the asset to burn. +#! - ASSET_VALUE is the value of the asset value to burn. #! #! Panics if: #! - the transaction is not being executed against a non-fungible faucet. #! - the non-fungible asset being burned is not associated with the faucet the transaction is being #! executed against. -#! - the non-fungible asset being burned does not exist or was not provided as input to the -#! transaction via a note or the accounts vault. proc burn_non_fungible_asset - # assert that we are executing a transaction against the non-fungible faucet (access checks) - exec.account::get_id swap drop exec.account_id::is_non_fungible_faucet - assert.err=ERR_FAUCET_BURN_NON_FUNGIBLE_ASSET_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET - # => [ASSET] - - # get the non-fungible asset key - dupw exec.asset_vault::build_non_fungible_asset_vault_key - # => [ASSET_KEY, ASSET] - - # add an empty word to the stack to overwrite the old value with - padw swapw - # => [ASSET_KEY, EMPTY_WORD, ASSET] - - # get the faucet storage data slot - exec.account::get_faucet_sysdata_slot_id - # => [faucet_storage_data_slot, ASSET_KEY, EMPTY_WORD, ASSET] - - # remove the non-fungible asset from the tracking SMT - exec.account::set_map_item - # => [OLD_VAL, ASSET] - - # Assert the `OLD_VAL` is not an EMPTY_WORD, indicating that the non-fungible asset exists. We - # only need to check OLD_VAL[3] as this is always set to the faucet_id_prefix and can not be 0. - eq.0 not assert.err=ERR_FAUCET_NON_FUNGIBLE_ASSET_TO_BURN_NOT_FOUND drop drop drop - # => [ASSET] - - # remove the non-fungible asset from the input vault for asset preservation checks - exec.memory::get_input_vault_root_ptr movdn.4 exec.asset_vault::remove_non_fungible_asset - # => [ASSET] -end - -#! Returns a boolean indicating whether the provided non-fungible asset has been already issued by -#! this faucet. -#! -#! Inputs: [ASSET] -#! Outputs: [is_issued] -#! -#! Where: -#! - ASSET is the non-fungible asset that is being checked. -#! - is_issued is a boolean indicating whether the non-fungible asset has been issued. -#! -#! Panics if: -#! - the ASSET is a fungible asset. -#! - the ASSET is not associated with the faucet the transaction is being executed against. -pub proc is_non_fungible_asset_issued - # assert that the asset is associated with the faucet the transaction is being executed against - # and that the asset is valid - exec.account::get_id swap drop exec.asset::validate_non_fungible_asset_origin - # => [ASSET] - - # get the asset vault key from the asset - exec.asset_vault::build_non_fungible_asset_vault_key - # => [ASSET_KEY] - - # get the storage index where faucet's assets map is stored - exec.account::get_faucet_sysdata_slot_id - # => [map_slot_index, ASSET_KEY] - - # get the non-fungible asset stored by the computed account key - exec.account::get_map_item - # => [STORED_ASSET] - - # Check whether the `STORED_ASSET` is an EMPTY_WORD, indicating that the non-fungible asset has - # not been issued yet. We only need to check STORED_ASSET[3] as this is always set to the - # faucet_id_prefix and can not be 0 (in reversed stack order it will be top stack element). - # Equality of the STORED_ASSET[3] to zero will become a flag that this asset is not issued. - neq.0 movdn.3 drop drop drop - # => [is_issued] + # assert that the asset was issued by the faucet the transaction is being executed against and + # that the asset is valid + exec.account::get_id + exec.non_fungible_asset::validate_origin + # => [ASSET_KEY, ASSET_VALUE] + + # remove the non-fungible asset from the input vault for asset preservation + exec.memory::get_input_vault_root_ptr + movdn.8 + # => [ASSET_KEY, ASSET_VALUE, input_vault_root_ptr] + + exec.asset_vault::remove_non_fungible_asset + # => [ASSET_VALUE] end # PUBLIC INTERFACE @@ -256,11 +140,15 @@ end #! Mint an asset from the faucet the transaction is being executed against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [NEW_ASSET_VALUE] #! #! Where: -#! - ASSET is the asset that was minted. +#! - ASSET_KEY is the vault key of the asset to mint. +#! - ASSET_VALUE is the value of the asset value to mint. +#! - NEW_ASSET_VALUE is: +#! - For fungible assets: the ASSET_VALUE merged with the existing vault asset value, if any. +#! - For non-fungible assets: identical to ASSET_VALUE. #! #! Panics if: #! - the transaction is not being executed against a faucet. @@ -272,27 +160,28 @@ end #! - For non-fungible faucets if the non-fungible asset being minted already exists. pub proc mint # check if the asset is a fungible asset - exec.asset::is_fungible_asset - # => [is_fungible_asset, ASSET] + exec.asset::is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE] if.true # mint the fungible asset exec.mint_fungible_asset - # => [ASSET] + # => [NEW_ASSET_VALUE] else # mint the non-fungible asset exec.mint_non_fungible_asset - # => [ASSET] + # => [NEW_ASSET_VALUE] end end #! Burn an asset from the faucet the transaction is being executed against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_VALUE] #! #! Where: -#! - ASSET is the asset that was burned. +#! - ASSET_KEY is the vault key of the asset to burn. +#! - ASSET_VALUE is the value of the asset value to burn. #! #! Panics if: #! - the transaction is not being executed against a faucet. @@ -305,16 +194,16 @@ end #! provided as input to the transaction via a note or the accounts vault. pub proc burn # check if the asset is a fungible asset - exec.asset::is_fungible_asset - # => [is_fungible_asset, ASSET] + exec.asset::is_fungible_asset_key + # => [is_fungible_asset, ASSET_KEY, ASSET_VALUE] if.true # burn the fungible asset exec.burn_fungible_asset - # => [ASSET] + # => [ASSET_VALUE] else # burn the non-fungible asset exec.burn_non_fungible_asset - # => [ASSET] + # => [ASSET_VALUE] end end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/fungible_asset.masm b/crates/miden-protocol/asm/kernels/transaction/lib/fungible_asset.masm new file mode 100644 index 0000000000..6a89223ba4 --- /dev/null +++ b/crates/miden-protocol/asm/kernels/transaction/lib/fungible_asset.masm @@ -0,0 +1,195 @@ +# Contains procedures for the built-in fungible asset. + +use $kernel::account_id +use $kernel::util::asset::FUNGIBLE_ASSET_MAX_AMOUNT +use $kernel::asset + +# RE-EXPORTS +# ================================================================================================= + +pub use $kernel::util::asset::create_fungible_key->create_key +pub use $kernel::util::asset::create_fungible_asset_unchecked->create_unchecked +pub use $kernel::util::asset::fungible_to_amount->to_amount +pub use $kernel::util::asset::fungible_value_into_amount->value_into_amount + +# ERRORS +# ================================================================================================= + +const ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED="adding the fungible asset to the vault would exceed the max amount" + +const ERR_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_FUNGIBLE = "fungible asset vault key's account ID must be of type fungible faucet" + +const ERR_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN="the origin of the fungible asset is not this faucet" + +const ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_AMOUNT="fungible asset amount exceeds the maximum allowed amount" + +const ERR_FUNGIBLE_ASSET_VALUE_MOST_SIGNIFICANT_ELEMENTS_MUST_BE_ZERO="fungible asset value elements 1, 2 and 3 must be zeros" + +const ERR_FUNGIBLE_ASSET_KEY_ASSET_ID_MUST_BE_ZERO="fungible asset key asset ID prefix and suffix must be zero" + +const ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW="failed to remove the fungible asset from the vault since the amount of the asset in the vault is less than the amount to remove" + +# PROCEDURES +# ================================================================================================= + +#! Merges two fungible assets. +#! +#! WARNING: This procedure assumes the assets have been validated. +#! +#! Inputs: [ASSET_VALUE_0, ASSET_VALUE_1] +#! Outputs: [MERGED_ASSET_VALUE] +#! +#! Where: +#! - ASSET_VALUE_{0, 1} are the assets to merge. +#! - MERGED_ASSET_VALUE is the merged asset. +#! +#! Panics if: +#! - adding the two asset values would exceed FUNGIBLE_ASSET_MAX_AMOUNT. +pub proc merge + # extract amounts from assets + exec.value_into_amount movdn.4 exec.value_into_amount + # => [amount_1, amount_0] + + # compute max_add_amount = FUNGIBLE_ASSET_MAX_AMOUNT - amount_0 + # this is the amount that can at most be added to amount_0 still have a valid asset + dup push.FUNGIBLE_ASSET_MAX_AMOUNT dup.3 sub + # => [max_add_amount, amount_1, amount_1, amount_0] + + # assert it is safe to add the amounts together, i.e. amount_1 <= max_add_amount + lte assert.err=ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED + # => [amount_1, amount_0] + + # add the amounts + add + # => [merged_amount] + + # reconstruct the asset value + push.0.0.0 movup.3 + # => [MERGED_ASSET_VALUE] +end + +#! Computes ASSET_VALUE_0 - ASSET_VALUE_1 and returns the result. +#! +#! For instance, split(40, 100) returns 60. The operand order matches the `sub` instruction. +#! +#! WARNING: This procedure assumes the assets have been validated. +#! +#! Inputs: [ASSET_VALUE_1, ASSET_VALUE_0] +#! Outputs: [NEW_ASSET_VALUE_0] +#! +#! Where: +#! - ASSET_VALUE_{0, 1} are the assets to split. +#! - NEW_ASSET_VALUE_0 is the result of the split computation. +#! +#! Panics if: +#! - ASSET_VALUE_0 does not contain at least the amount of ASSET_VALUE_1. +pub proc split + # extract amounts from assets + exec.value_into_amount movdn.4 exec.value_into_amount swap + # => [amount_1, amount_0] + + # assert amount_1 <= amount_0 so we can safely subtract + dup dup.2 + # => [amount_0, amount_1, amount_1, amount_0] + + lte assert.err=ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW + # => [amount_1, amount_0] + + sub + # => [new_amount] + + # reconstruct the asset value + push.0.0.0 movup.3 + # => [NEW_ASSET_VALUE] +end + +#! Validates that a fungible asset is well formed. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to validate. +#! - ASSET_VALUE is the value of the asset to validate. +#! +#! Panics if: +#! - the asset key is invalid (see validate_key). +#! - the three most significant elements in the value are not 0. +#! - the amount exceeds FUNGIBLE_ASSET_MAX_AMOUNT. +pub proc validate + exec.validate_key + # => [ASSET_KEY, ASSET_VALUE] + + dupw.1 + # => [ASSET_VALUE, ASSET_KEY, ASSET_VALUE] + + # assuming the asset is valid, its layout is: + # => [amount, 0, 0, 0, ASSET_KEY, ASSET_VALUE] + + # assert amount <= FUNGIBLE_ASSET_MAX_AMOUNT + lte.FUNGIBLE_ASSET_MAX_AMOUNT + assert.err=ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_AMOUNT + # => [0, 0, 0, ASSET_KEY, ASSET_VALUE] + + # assert the last three elements are zeros + eq.0 assert.err=ERR_FUNGIBLE_ASSET_VALUE_MOST_SIGNIFICANT_ELEMENTS_MUST_BE_ZERO + eq.0 assert.err=ERR_FUNGIBLE_ASSET_VALUE_MOST_SIGNIFICANT_ELEMENTS_MUST_BE_ZERO + eq.0 assert.err=ERR_FUNGIBLE_ASSET_VALUE_MOST_SIGNIFICANT_ELEMENTS_MUST_BE_ZERO + # => [ASSET_KEY, ASSET_VALUE] +end + +#! Validates that a fungible asset's vault key is well formed. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [ASSET_KEY] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to validate. +#! +#! Panics if: +#! - the asset key's account ID is not valid. +#! - the asset key's faucet ID is not a fungible one. +pub proc validate_key + exec.asset::key_to_faucet_id + exec.account_id::validate + # => [ASSET_KEY] + + exec.asset::is_fungible_asset_key + assert.err=ERR_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_FUNGIBLE + # => [ASSET_KEY] + + exec.asset::key_to_asset_id + # => [asset_id_suffix, asset_id_prefix, ASSET_KEY] + + eq.0 assert.err=ERR_FUNGIBLE_ASSET_KEY_ASSET_ID_MUST_BE_ZERO + eq.0 assert.err=ERR_FUNGIBLE_ASSET_KEY_ASSET_ID_MUST_BE_ZERO + # => [ASSET_KEY] +end + +#! Validates that a fungible asset is associated with the provided faucet_id. +#! +#! Inputs: [faucet_id_suffix, faucet_id_prefix, ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - faucet_id_{suffix,prefix} are the suffix and prefix of the faucet's account ID. +#! - ASSET_KEY is the vault key of the asset to validate. +#! - ASSET_VALUE is the value of the asset to validate. +pub proc validate_origin + movdn.9 movdn.9 + # => [ASSET_KEY, ASSET_VALUE, faucet_id_suffix, faucet_id_prefix] + + # assert the fungible asset key and value are valid + exec.validate + # => [ASSET_KEY, ASSET_VALUE, faucet_id_suffix, faucet_id_prefix] + + # assert the origin of the asset is the faucet_id provided via the stack + exec.asset::key_to_faucet_id + # => [key_faucet_id_suffix, key_faucet_id_prefix, ASSET_KEY, ASSET_VALUE, faucet_id_suffix, faucet_id_prefix] + + movup.11 movup.11 + # => [faucet_id_suffix, faucet_id_prefix, key_faucet_id_suffix, key_faucet_id_prefix, ASSET_KEY, ASSET_VALUE] + + exec.account_id::is_equal assert.err=ERR_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN + # => [ASSET_KEY, ASSET_VALUE] +end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/link_map.masm b/crates/miden-protocol/asm/kernels/transaction/lib/link_map.masm index caa4323e13..05f5eae74d 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/link_map.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/link_map.masm @@ -1,4 +1,3 @@ -use miden::core::collections::smt use miden::core::word use $kernel::memory @@ -167,10 +166,10 @@ const GET_OPERATION_ABSENT_AT_HEAD=1 # ================================================================================================= # Event emitted when an entry is set. -const LINK_MAP_SET_EVENT=event("miden::link_map::set") +const LINK_MAP_SET_EVENT=event("miden::protocol::link_map::set") # Event emitted when an entry is fetched. -const LINK_MAP_GET_EVENT=event("miden::link_map::get") +const LINK_MAP_GET_EVENT=event("miden::protocol::link_map::get") # LINK MAP PROCEDURES # ================================================================================================= @@ -723,10 +722,10 @@ proc set_value dup movdn.5 # => [entry_ptr, VALUE0, entry_ptr, VALUE1] - add.VALUE0_OFFSET mem_storew_be dropw + add.VALUE0_OFFSET mem_storew_le dropw # => [entry_ptr, VALUE1] - add.VALUE1_OFFSET mem_storew_be dropw + add.VALUE1_OFFSET mem_storew_le dropw # => [] end @@ -750,7 +749,7 @@ proc get_value0 padw movup.4 # => [entry_ptr, pad(4)] - add.VALUE0_OFFSET mem_loadw_be + add.VALUE0_OFFSET mem_loadw_le # => [VALUE0] end @@ -762,7 +761,7 @@ proc get_value1 padw movup.4 # => [entry_ptr, pad(4)] - add.VALUE1_OFFSET mem_loadw_be + add.VALUE1_OFFSET mem_loadw_le # => [VALUE1] end @@ -771,7 +770,7 @@ end #! Inputs: [entry_ptr, KEY] #! Outputs: [] proc set_key - add.KEY_OFFSET mem_storew_be dropw + add.KEY_OFFSET mem_storew_le dropw end #! Returns the key of the entry pointer. @@ -782,7 +781,7 @@ proc get_key padw movup.4 # => [entry_ptr, pad(4)] - add.KEY_OFFSET mem_loadw_be + add.KEY_OFFSET mem_loadw_le # => [KEY] end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/memory.masm b/crates/miden-protocol/asm/kernels/transaction/lib/memory.masm index 2d9e11ef30..a16d4e8254 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/memory.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/memory.masm @@ -3,6 +3,12 @@ use $kernel::constants::MAX_ASSETS_PER_NOTE use $kernel::constants::NOTE_MEM_SIZE use miden::core::mem +# TYPE ALIASES +# ================================================================================================= + +type AccountID = struct { suffix: felt, prefix: felt } +type MemoryAddress = u32 + # ERRORS # ================================================================================================= @@ -25,36 +31,51 @@ const ERR_LINK_MAP_MAX_ENTRIES_EXCEEDED="number of link map entries exceeds maxi # ------------------------------------------------------------------------------------------------- # The memory address at which a pointer to the currently active input note is stored. -const ACTIVE_INPUT_NOTE_PTR=0 +const ACTIVE_INPUT_NOTE_PTR = 0 # The memory address at which the number of output notes is stored. -const NUM_OUTPUT_NOTES_PTR=4 - -# The memory address at which the input vault root is stored. -const INPUT_VAULT_ROOT_PTR=8 +const NUM_OUTPUT_NOTES_PTR = 1 -# The memory address at which the output vault root is stored. -const OUTPUT_VAULT_ROOT_PTR=12 +# The memory address at which the absolute expiration block number is stored. +const TX_EXPIRATION_BLOCK_NUM_PTR = 2 # The memory address at which the dirty flag of the storage commitment of the native account is # stored. # # This binary flag specifies whether the commitment is outdated: it holds 1 if some changes were # made to the account storage since the last re-computation, and 0 otherwise. -const NATIVE_ACCT_STORAGE_COMMITMENT_DIRTY_FLAG_PTR=16 +const NATIVE_ACCT_STORAGE_COMMITMENT_DIRTY_FLAG_PTR = 3 -# The memory address at which the absolute expiration block number is stored. -const TX_EXPIRATION_BLOCK_NUM_PTR=20 +# The memory address at which the input vault root is stored. +const INPUT_VAULT_ROOT_PTR = 4 + +# The memory address at which the output vault root is stored. +const OUTPUT_VAULT_ROOT_PTR = 8 + +# Pointer to the prefix and suffix of the ID of the foreign account which will be loaded during the +# upcoming FPI call. This ID is updated during the `prepare_fpi_call` kernel procedure. +const UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR = 12 +const UPCOMING_FOREIGN_ACCOUNT_PREFIX_PTR = UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR + 1 + +# Pointer to the 16th input value (with index 15) of the foreign procedure which will be loaded +# during the upcoming FPI call. This "buffer" value helps to work around the 15 value limitation of +# the `exec_kernel_proc` kernel procedure, so that any account procedure, even if it has 16 input +# values, could be executed as foreign. +const UPCOMING_FOREIGN_PROC_INPUT_VALUE_15_PTR = 14 + +# Pointer to the root of the foreign procedure which will be executed during the upcoming FPI call. +# This root is updated during the `prepare_fpi_call` kernel procedure. +const UPCOMING_FOREIGN_PROCEDURE_PTR = 16 # The memory address at which the pointer to the account stack element containing the pointer to the # currently accessing account (active account) data is stored. -const ACCOUNT_STACK_TOP_PTR=24 +const ACCOUNT_STACK_TOP_PTR=20 # Pointer to the first element on the account stack. -const MIN_ACCOUNT_STACK_PTR=25 +const MIN_ACCOUNT_STACK_PTR=21 # Pointer to the last element on the account stack. -const MAX_ACCOUNT_STACK_PTR=88 +const MAX_ACCOUNT_STACK_PTR=84 # GLOBAL INPUTS # ------------------------------------------------------------------------------------------------- @@ -65,8 +86,10 @@ const GLOBAL_INPUTS_SECTION_OFFSET=400 # The memory address at which the transaction reference block's commitment is stored. const BLOCK_COMMITMENT_PTR=400 -# The memory address at which the native account ID is stored. -const NATIVE_ACCT_ID_PTR=404 +# The memory address at which the native account ID provided as a global transaction input is +# stored. +const GLOBAL_ACCOUNT_ID_SUFFIX_PTR = 404 +const GLOBAL_ACCOUNT_ID_PREFIX_PTR = GLOBAL_ACCOUNT_ID_SUFFIX_PTR + 1 # The memory address at which the initial account commitment is stored. const INIT_ACCOUNT_COMMITMENT_PTR=408 @@ -123,12 +146,16 @@ const VALIDATOR_KEY_COMMITMENT_PTR=824 const BLOCK_METADATA_PTR=828 # The memory address at which the fee parameters are stored. These occupy a double word. -# [native_asset_id_suffix, native_asset_id_prefix, verification_base_fee, 0] +# [0, verification_base_fee, native_asset_id_suffix, native_asset_id_prefix] # [0, 0, 0, 0] const FEE_PARAMETERS_PTR=832 # The memory address at which the verification base fee is stored. -const VERIFICATION_BASE_FEE_PTR=FEE_PARAMETERS_PTR+2 +const VERIFICATION_BASE_FEE_PTR = FEE_PARAMETERS_PTR + 1 + +# The memory address at which the native asset ID is stored. +const NATIVE_ASSET_ID_SUFFIX_PTR = FEE_PARAMETERS_PTR + 2 +const NATIVE_ASSET_ID_PREFIX_PTR = FEE_PARAMETERS_PTR + 3 # The memory address at which the note root is stored const NOTE_ROOT_PTR=840 @@ -164,13 +191,17 @@ const MAX_FOREIGN_ACCOUNT_PTR=524288 # The memory address at which the native account data is stored. const NATIVE_ACCOUNT_DATA_PTR=8192 +const NATIVE_ACCOUNT_ID_SUFFIX_PTR = NATIVE_ACCOUNT_DATA_PTR + ACCT_ID_SUFFIX_OFFSET +const NATIVE_ACCOUNT_ID_PREFIX_PTR = NATIVE_ACCOUNT_DATA_PTR + ACCT_ID_PREFIX_OFFSET # The length of the memory interval that the account data occupies. const ACCOUNT_DATA_LENGTH=8192 # The offsets at which the account data is stored relative to the start of the account data segment. +const ACCT_NONCE_OFFSET=0 const ACCT_ID_AND_NONCE_OFFSET=0 -const ACCT_NONCE_OFFSET=3 +const ACCT_ID_SUFFIX_OFFSET=2 +const ACCT_ID_PREFIX_OFFSET=3 const ACCT_VAULT_ROOT_OFFSET=4 const ACCT_STORAGE_COMMITMENT_OFFSET=8 const ACCT_CODE_COMMITMENT_OFFSET=12 @@ -191,10 +222,10 @@ const ACCT_ACTIVE_STORAGE_SLOTS_SECTION_OFFSET=2340 # ------------------------------------------------------------------------------------------------- # The link map pointer at which the delta of the fungible asset vault is stored. -const ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR=532480 +pub const ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR=532480 # The link map pointer at which the delta of the non-fungible asset vault is stored. -const ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR=ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR+4 +pub const ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR=ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR+4 # The section of link map pointers where storage map deltas are stored. # This section is offset by `slot index` to get the link map ptr for the storage map @@ -221,13 +252,13 @@ const INPUT_NOTE_ID_OFFSET=0 const INPUT_NOTE_CORE_DATA_OFFSET=4 const INPUT_NOTE_SERIAL_NUM_OFFSET=4 const INPUT_NOTE_SCRIPT_ROOT_OFFSET=8 -const INPUT_NOTE_INPUTS_COMMITMENT_OFFSET=12 +const INPUT_NOTE_STORAGE_COMMITMENT_OFFSET=12 const INPUT_NOTE_ASSETS_COMMITMENT_OFFSET=16 const INPUT_NOTE_RECIPIENT_OFFSET=20 const INPUT_NOTE_METADATA_HEADER_OFFSET=24 const INPUT_NOTE_ATTACHMENT_OFFSET=28 const INPUT_NOTE_ARGS_OFFSET=32 -const INPUT_NOTE_NUM_INPUTS_OFFSET=36 +const INPUT_NOTE_NUM_STORAGE_ITEMS_OFFSET=36 const INPUT_NOTE_NUM_ASSETS_OFFSET=40 const INPUT_NOTE_ASSETS_OFFSET=44 @@ -340,7 +371,7 @@ end #! Where: #! - INPUT_VAULT_ROOT is the input vault root. pub proc get_input_vault_root - padw mem_loadw_be.INPUT_VAULT_ROOT_PTR + padw mem_loadw_le.INPUT_VAULT_ROOT_PTR end #! Sets the input vault root. @@ -351,7 +382,7 @@ end #! Where: #! - INPUT_VAULT_ROOT is the input vault root. pub proc set_input_vault_root - mem_storew_be.INPUT_VAULT_ROOT_PTR + mem_storew_le.INPUT_VAULT_ROOT_PTR end #! Returns the pointer to the memory address at which the output vault root is stored. @@ -374,7 +405,7 @@ end #! Where: #! - OUTPUT_VAULT_ROOT is the output vault root. pub proc get_output_vault_root - padw mem_loadw_be.OUTPUT_VAULT_ROOT_PTR + padw mem_loadw_le.OUTPUT_VAULT_ROOT_PTR end #! Sets the output vault root. @@ -385,7 +416,53 @@ end #! Where: #! - OUTPUT_VAULT_ROOT is the output vault root. pub proc set_output_vault_root - mem_storew_be.OUTPUT_VAULT_ROOT_PTR + mem_storew_le.OUTPUT_VAULT_ROOT_PTR +end + +#! Sets the ID of the foreign account which is going to be loaded during the upcoming FPI call. +#! +#! Inputs: [foreign_account_id_suffix, foreign_account_id_prefix] +#! Outputs: [] +#! +#! Where: +#! - foreign_account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the foreign +#! account whose procedure is going to be executed. +pub proc set_fpi_account_id(foreign_account_id: AccountID) + mem_store.UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR + # => [foreign_account_id_prefix] + + mem_store.UPCOMING_FOREIGN_ACCOUNT_PREFIX_PTR + # => [] +end + +#! Returns the ID of the foreign account which is going to be loaded during the upcoming FPI call. +#! +#! WARNING: The ID felts may be zero. +#! +#! Inputs: [] +#! Outputs: [foreign_account_id_suffix, foreign_account_id_prefix] +#! +#! Where: +#! - foreign_account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the foreign +#! account whose procedure is going to be executed. +pub proc get_fpi_account_id() -> (AccountID) + mem_load.UPCOMING_FOREIGN_ACCOUNT_PREFIX_PTR + # => [foreign_account_id_prefix] + + mem_load.UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR + # => [foreign_account_id_suffix, foreign_account_id_prefix] +end + +#! Sets the root of the foreign procedure which is going to be loaded during the upcoming FPI call. +#! +#! Inputs: [FOREIGN_PROC_ROOT] +#! Outputs: [FOREIGN_PROC_ROOT] +#! +#! Where: +#! - FOREIGN_PROC_ROOT is the root of the foreign procedure which will be executed during the FPI +#! call. +pub proc set_fpi_procedure_root(foreign_proc_root: word) -> word + mem_storew_le.UPCOMING_FOREIGN_PROCEDURE_PTR end # GLOBAL INPUTS @@ -399,7 +476,7 @@ end #! Where: #! - BLOCK_COMMITMENT is the commitment of the transaction reference block. pub proc set_block_commitment - mem_storew_be.BLOCK_COMMITMENT_PTR + mem_storew_le.BLOCK_COMMITMENT_PTR end #! Returns the block commitment of the reference block. @@ -410,35 +487,33 @@ end #! Where: #! - BLOCK_COMMITMENT is the commitment of the transaction reference block. pub proc get_block_commitment - padw mem_loadw_be.BLOCK_COMMITMENT_PTR + padw mem_loadw_le.BLOCK_COMMITMENT_PTR end -#! Sets the ID of the native account. +#! Sets the global ID of the native account. #! -#! Inputs: [account_id_prefix, account_id_suffix] +#! Inputs: [account_id_suffix, account_id_prefix] #! Outputs: [] #! #! Where: #! - account_id_{prefix,suffix} are the prefix and suffix felts of the account ID. pub proc set_global_account_id - push.0.0 - # => [0, 0, account_id_prefix, account_id_suffix] - mem_storew_be.NATIVE_ACCT_ID_PTR - dropw + mem_store.GLOBAL_ACCOUNT_ID_SUFFIX_PTR + mem_store.GLOBAL_ACCOUNT_ID_PREFIX_PTR # => [] end -#! Returns the ID of the native account. +#! Returns the global ID of the native account. #! #! Inputs: [] -#! Outputs: [account_id_prefix, account_id_suffix] +#! Outputs: [account_id_suffix, account_id_prefix] #! #! Where: #! - account_id_{prefix,suffix} are the prefix and suffix felts of the account ID. pub proc get_global_account_id - padw mem_loadw_be.NATIVE_ACCT_ID_PTR - # => [0, 0, account_id_prefix, account_id_suffix] - drop drop + mem_load.GLOBAL_ACCOUNT_ID_PREFIX_PTR + mem_load.GLOBAL_ACCOUNT_ID_SUFFIX_PTR + # => [account_id_suffix, account_id_prefix] end #! Sets the native account commitment at the beginning of the transaction. @@ -449,7 +524,7 @@ end #! Where: #! - INIT_ACCOUNT_COMMITMENT is the initial account commitment. pub proc set_init_account_commitment - mem_storew_be.INIT_ACCOUNT_COMMITMENT_PTR + mem_storew_le.INIT_ACCOUNT_COMMITMENT_PTR end #! Returns the native account commitment at the beginning of the transaction. @@ -460,7 +535,7 @@ end #! Where: #! - INIT_ACCOUNT_COMMITMENT is the initial account commitment. pub proc get_init_account_commitment - padw mem_loadw_be.INIT_ACCOUNT_COMMITMENT_PTR + padw mem_loadw_le.INIT_ACCOUNT_COMMITMENT_PTR end #! Sets the initial account nonce. @@ -493,7 +568,7 @@ end #! Where: #! - INIT_NATIVE_ACCOUNT_VAULT_ROOT is the initial vault root of the native account. pub proc set_init_native_account_vault_root - mem_storew_be.INIT_NATIVE_ACCOUNT_VAULT_ROOT_PTR + mem_storew_le.INIT_NATIVE_ACCOUNT_VAULT_ROOT_PTR end #! Returns the vault root of the native account at the beginning of the transaction. @@ -504,7 +579,7 @@ end #! Where: #! - INIT_NATIVE_ACCOUNT_VAULT_ROOT is the initial vault root of the native account. pub proc get_init_native_account_vault_root - padw mem_loadw_be.INIT_NATIVE_ACCOUNT_VAULT_ROOT_PTR + padw mem_loadw_le.INIT_NATIVE_ACCOUNT_VAULT_ROOT_PTR end #! Returns the memory address of the vault root of the native account at the beginning of the @@ -528,7 +603,7 @@ end #! Where: #! - INIT_NATIVE_ACCOUNT_STORAGE_COMMITMENT is the initial storage commitment of the native account. pub proc set_init_account_storage_commitment - mem_storew_be.INIT_NATIVE_ACCOUNT_STORAGE_COMMITMENT_PTR + mem_storew_le.INIT_NATIVE_ACCOUNT_STORAGE_COMMITMENT_PTR end #! Returns the storage commitment of the native account at the beginning of the transaction. @@ -539,7 +614,7 @@ end #! Where: #! - INIT_NATIVE_ACCOUNT_STORAGE_COMMITMENT is the initial storage commitment of the native account. pub proc get_init_account_storage_commitment - padw mem_loadw_be.INIT_NATIVE_ACCOUNT_STORAGE_COMMITMENT_PTR + padw mem_loadw_le.INIT_NATIVE_ACCOUNT_STORAGE_COMMITMENT_PTR end #! Returns the input notes commitment. @@ -552,7 +627,7 @@ end #! Where: #! - INPUT_NOTES_COMMITMENT is the input notes commitment. pub proc get_input_notes_commitment - padw mem_loadw_be.INPUT_NOTES_COMMITMENT_PTR + padw mem_loadw_le.INPUT_NOTES_COMMITMENT_PTR end #! Sets the input notes' commitment. @@ -563,7 +638,7 @@ end #! Where: #! - INPUT_NOTES_COMMITMENT is the notes' commitment. pub proc set_nullifier_commitment - mem_storew_be.INPUT_NOTES_COMMITMENT_PTR + mem_storew_le.INPUT_NOTES_COMMITMENT_PTR end #! Returns the memory address of the transaction script root. @@ -585,7 +660,7 @@ end #! Where: #! - TX_SCRIPT_ROOT is the transaction script root. pub proc set_tx_script_root - mem_storew_be.TX_SCRIPT_ROOT_PTR + mem_storew_le.TX_SCRIPT_ROOT_PTR end #! Returns the transaction script arguments. @@ -597,7 +672,7 @@ end #! - TX_SCRIPT_ARGS is the word of values which could be used directly or could be used to obtain #! some values associated with it from the advice map. pub proc get_tx_script_args - padw mem_loadw_be.TX_SCRIPT_ARGS_PTR + padw mem_loadw_le.TX_SCRIPT_ARGS_PTR end #! Sets the transaction script arguments. @@ -609,7 +684,7 @@ end #! - TX_SCRIPT_ARGS is the word of values which could be used directly or could be used to obtain #! some values associated with it from the advice map. pub proc set_tx_script_args - mem_storew_be.TX_SCRIPT_ARGS_PTR + mem_storew_le.TX_SCRIPT_ARGS_PTR end #! Returns the auth procedure arguments. @@ -620,7 +695,7 @@ end #! Where: #! - AUTH_ARGS is the argument passed to the auth procedure. pub proc get_auth_args - padw mem_loadw_be.AUTH_ARGS_PTR + padw mem_loadw_le.AUTH_ARGS_PTR end #! Sets the auth procedure arguments. @@ -631,7 +706,7 @@ end #! Where: #! - AUTH_ARGS is the argument passed to the auth procedure. pub proc set_auth_args - mem_storew_be.AUTH_ARGS_PTR + mem_storew_le.AUTH_ARGS_PTR end # BLOCK DATA @@ -656,7 +731,7 @@ end #! Where: #! - PREV_BLOCK_COMMITMENT_PTR is the block commitment of the transaction reference block. pub proc get_prev_block_commitment - padw mem_loadw_be.PREV_BLOCK_COMMITMENT_PTR + padw mem_loadw_le.PREV_BLOCK_COMMITMENT_PTR end #! Returns the block number of the transaction reference block. @@ -697,14 +772,15 @@ end #! Returns the faucet ID of the native asset as defined in the transaction's reference block. #! #! Inputs: [] -#! Outputs: [native_asset_id_prefix, native_asset_id_suffix] +#! Outputs: [native_asset_id_suffix, native_asset_id_prefix] #! #! Where: #! - native_asset_id_{prefix,suffix} are the prefix and suffix felts of the faucet ID that defines #! the native asset. pub proc get_native_asset_id - padw mem_loadw_be.FEE_PARAMETERS_PTR drop drop - # => [native_asset_id_prefix, native_asset_id_suffix] + mem_load.NATIVE_ASSET_ID_PREFIX_PTR + mem_load.NATIVE_ASSET_ID_SUFFIX_PTR + # => [native_asset_id_suffix, native_asset_id_prefix] end #! Returns the verification base fee from the transaction's reference block. @@ -727,7 +803,7 @@ end #! Where: #! - CHAIN_COMMITMENT is the chain commitment of the transaction reference block. pub proc get_chain_commitment - padw mem_loadw_be.CHAIN_COMMITMENT_PTR + padw mem_loadw_le.CHAIN_COMMITMENT_PTR end #! Returns the account db root of the transaction reference block. @@ -738,7 +814,7 @@ end #! Where: #! - ACCT_DB_ROOT is the account database root of the transaction reference block. pub proc get_account_db_root - padw mem_loadw_be.ACCT_DB_ROOT_PTR + padw mem_loadw_le.ACCT_DB_ROOT_PTR end #! Returns the nullifier db root of the transaction reference block. @@ -749,7 +825,7 @@ end #! Where: #! - NULLIFIER_ROOT is the nullifier root of the transaction reference block. pub proc get_nullifier_db_root - padw mem_loadw_be.NULLIFIER_ROOT_PTR + padw mem_loadw_le.NULLIFIER_ROOT_PTR end #! Returns the tx commitment of the transaction reference block. @@ -760,7 +836,7 @@ end #! Where: #! - TX_COMMITMENT is the tx commitment of the transaction reference block. pub proc get_tx_commitment - padw mem_loadw_be.TX_COMMITMENT_PTR + padw mem_loadw_le.TX_COMMITMENT_PTR end #! Returns the transaction kernel commitment of the transaction reference block. @@ -771,7 +847,7 @@ end #! Where: #! - TX_KERNEL_COMMITMENT is the sequential hash of the kernel procedures. pub proc get_tx_kernel_commitment - padw mem_loadw_be.TX_KERNEL_COMMITMENT_PTR + padw mem_loadw_le.TX_KERNEL_COMMITMENT_PTR end #! Returns the validator key commitment of the transaction reference block. @@ -782,7 +858,7 @@ end #! Where: #! - VALIDATOR_KEY_COMMITMENT is the public key commitment of the transaction reference block. pub proc get_validator_key_commitment - padw mem_loadw_be.VALIDATOR_KEY_COMMITMENT_PTR + padw mem_loadw_le.VALIDATOR_KEY_COMMITMENT_PTR end #! Returns the note root of the transaction reference block. @@ -793,7 +869,7 @@ end #! Where: #! - NOTE_ROOT is the note root of the transaction reference block. pub proc get_note_root - padw mem_loadw_be.NOTE_ROOT_PTR + padw mem_loadw_le.NOTE_ROOT_PTR end #! Sets the note root of the transaction reference block. @@ -804,7 +880,7 @@ end #! Where: #! - NOTE_ROOT is the note root of the transaction reference block. pub proc set_note_root - mem_storew_be.NOTE_ROOT_PTR + mem_storew_le.NOTE_ROOT_PTR end # CHAIN DATA @@ -1019,43 +1095,46 @@ end #! Returns the ID of the active account. #! #! Inputs: [] -#! Outputs: [account_id_prefix, account_id_suffix] +#! Outputs: [account_id_suffix, account_id_prefix] #! #! Where: #! - account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the active account. pub proc get_account_id - padw exec.get_active_account_data_ptr add.ACCT_ID_AND_NONCE_OFFSET mem_loadw_be - # => [nonce, 0, account_id_prefix, account_id_suffix] - drop drop - # => [account_id_prefix, account_id_suffix] + exec.get_active_account_data_ptr + # => [active_account_data_ptr] + + dup add.ACCT_ID_PREFIX_OFFSET mem_load + # => [account_id_prefix, active_account_data_ptr] + + swap add.ACCT_ID_SUFFIX_OFFSET mem_load + # => [account_id_suffix, account_id_prefix] end #! Returns the ID of the native account of the transaction. #! #! Inputs: [] -#! Outputs: [account_id_prefix, account_id_suffix] +#! Outputs: [account_id_suffix, account_id_prefix] #! #! Where: #! - account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the native account #! of the transaction. pub proc get_native_account_id - padw push.NATIVE_ACCOUNT_DATA_PTR add.ACCT_ID_AND_NONCE_OFFSET mem_loadw_be - # => [nonce, 0, account_id_prefix, account_id_suffix] - drop drop - # => [account_id_prefix, account_id_suffix] + mem_load.NATIVE_ACCOUNT_ID_PREFIX_PTR + mem_load.NATIVE_ACCOUNT_ID_SUFFIX_PTR + # => [account_id_suffix, account_id_prefix] end #! Sets the account ID and nonce. #! -#! Inputs: [nonce, 0, account_id_prefix, account_id_suffix] -#! Outputs: [nonce, 0, account_id_prefix, account_id_suffix] +#! Inputs: [nonce, 0, account_id_suffix, account_id_prefix] +#! Outputs: [nonce, 0, account_id_suffix, account_id_prefix] #! #! Where: #! - account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the active account. #! - nonce is the nonce of the active account. pub proc set_account_id_and_nonce exec.get_active_account_data_ptr add.ACCT_ID_AND_NONCE_OFFSET - mem_storew_be + mem_storew_le end #! Returns the nonce of the active account. @@ -1082,20 +1161,16 @@ pub proc get_native_account_nonce mem_load end -#! Sets the nonce of the active account. +#! Sets the nonce of the native account. #! #! Inputs: [nonce] #! Outputs: [] #! #! Where: -#! - nonce is the nonce of the active account. -pub proc set_account_nonce - exec.get_active_account_data_ptr add.ACCT_ID_AND_NONCE_OFFSET padw - # => [0, 0, 0, 0, account_id_and_nonce_ptr, new_nonce] - dup.4 mem_loadw_be - # => [old_nonce, 0, old_id_prefix, old_id_suffix, account_id_and_nonce_ptr, new_nonce] - drop movup.4 movup.4 mem_storew_be dropw - # => [] +#! - nonce is the new nonce of the native account. +pub proc set_native_account_nonce + push.NATIVE_ACCOUNT_DATA_PTR add.ACCT_NONCE_OFFSET + mem_store end ### ACCOUNT VAULT ################################################# @@ -1121,7 +1196,7 @@ end pub proc get_account_vault_root padw exec.get_active_account_data_ptr add.ACCT_VAULT_ROOT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the account vault root. @@ -1133,7 +1208,7 @@ end #! - ACCT_VAULT_ROOT is the account vault root to be set. pub proc set_account_vault_root exec.get_active_account_data_ptr add.ACCT_VAULT_ROOT_OFFSET - mem_storew_be + mem_storew_le end #! Returns the memory pointer to the initial vault root of the active account. @@ -1148,12 +1223,12 @@ end #! Where: #! - account_initial_vault_root_ptr is the memory pointer to the initial vault root. pub proc get_account_initial_vault_root_ptr - # For foreign account, use the regular vault root pointer since foreign accounts are read-only + # for foreign account, use the regular vault root pointer since foreign accounts are read-only # and initial == current exec.get_account_vault_root_ptr # => [account_vault_root_ptr] - # For native account, use the initial vault root pointer + # for native account, use the initial vault root pointer exec.get_init_native_account_vault_root_ptr # => [native_account_initial_vault_root_ptr, account_vault_root_ptr] @@ -1178,7 +1253,7 @@ end pub proc get_account_code_commitment padw exec.get_active_account_data_ptr add.ACCT_CODE_COMMITMENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the code commitment of the account. @@ -1190,7 +1265,7 @@ end #! - CODE_COMMITMENT is the code commitment to be set. pub proc set_account_code_commitment exec.get_active_account_data_ptr add.ACCT_CODE_COMMITMENT_OFFSET - mem_storew_be + mem_storew_le end #! Sets the transaction expiration block number. @@ -1285,7 +1360,7 @@ end pub proc get_account_storage_commitment padw exec.get_active_account_data_ptr add.ACCT_STORAGE_COMMITMENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the account storage commitment. @@ -1297,7 +1372,7 @@ end #! - STORAGE_COMMITMENT is the account storage commitment. pub proc set_account_storage_commitment exec.get_active_account_data_ptr add.ACCT_STORAGE_COMMITMENT_OFFSET - mem_storew_be + mem_storew_le end #! Sets the dirty flag for the native account storage commitment. @@ -1408,12 +1483,12 @@ end #! Where: #! - account_initial_storage_slots_ptr is the memory pointer to the initial storage slot values. pub proc get_account_initial_storage_slots_ptr - # For foreign account, use the regular storage slots pointer since foreign accounts are + # for foreign account, use the regular storage slots pointer since foreign accounts are # read-only and initial == current exec.get_account_active_storage_slots_section_ptr # => [account_storage_slots_ptr] - # For native account, use the initial storage slots pointer + # for native account, use the initial storage slots pointer exec.get_native_account_initial_storage_slots_ptr # => [native_account_initial_storage_slots_ptr, account_storage_slots_ptr] @@ -1428,28 +1503,6 @@ end ### ACCOUNT DELTA ################################################# -#! Returns the link map pointer to the fungible asset vault delta. -#! -#! Inputs: [] -#! Outputs: [account_delta_fungible_asset_ptr] -#! -#! Where: -#! - account_delta_fungible_asset_ptr is the link map pointer to the fungible asset vault delta. -pub proc get_account_delta_fungible_asset_ptr - push.ACCOUNT_DELTA_FUNGIBLE_ASSET_PTR -end - -#! Returns the link map pointer to the non-fungible asset vault delta. -#! -#! Inputs: [] -#! Outputs: [account_delta_non_fungible_asset_ptr] -#! -#! Where: -#! - account_delta_non_fungible_asset_ptr is the link map pointer to the non-fungible asset vault delta. -pub proc get_account_delta_non_fungible_asset_ptr - push.ACCOUNT_DELTA_NON_FUNGIBLE_ASSET_PTR -end - #! Returns the link map pointer to the storage map delta of the storage map in the given slot index. #! #! Inputs: [slot_idx] @@ -1526,7 +1579,7 @@ end #! - note_ptr is the input note's the memory address. #! - NOTE_ID is the note's id. pub proc set_input_note_id - mem_storew_be + mem_storew_le end #! Computes a pointer to the memory address at which the nullifier associated a note with `idx` is @@ -1551,7 +1604,7 @@ end #! - idx is the index of the input note. #! - nullifier is the nullifier of the input note. pub proc get_input_note_nullifier - mul.4 padw movup.4 add.INPUT_NOTE_NULLIFIER_SECTION_PTR mem_loadw_be + mul.4 padw movup.4 add.INPUT_NOTE_NULLIFIER_SECTION_PTR mem_loadw_le end #! Returns a pointer to the start of the input note core data segment for the note located at the @@ -1578,7 +1631,7 @@ end pub proc get_input_note_script_root padw movup.4 add.INPUT_NOTE_SCRIPT_ROOT_OFFSET - mem_loadw_be + mem_loadw_le end #! Returns the memory address of the script root of an input note. @@ -1596,15 +1649,15 @@ end #! Returns the inputs commitment of an input note located at the specified memory address. #! #! Inputs: [note_ptr] -#! Outputs: [INPUTS_COMMITMENT] +#! Outputs: [STORAGE_COMMITMENT] #! #! Where: #! - note_ptr is the memory address at which the input note data begins. -#! - INPUTS_COMMITMENT is the inputs commitment of the input note. -pub proc get_input_note_inputs_commitment +#! - STORAGE_COMMITMENT is the inputs commitment of the input note. +pub proc get_input_note_storage_commitment padw - movup.4 add.INPUT_NOTE_INPUTS_COMMITMENT_OFFSET - mem_loadw_be + movup.4 add.INPUT_NOTE_STORAGE_COMMITMENT_OFFSET + mem_loadw_le end #! Returns the metadata of an input note located at the specified memory address. @@ -1618,7 +1671,7 @@ end pub proc get_input_note_metadata_header padw movup.4 add.INPUT_NOTE_METADATA_HEADER_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the metadata for an input note located at the specified memory address. @@ -1631,7 +1684,7 @@ end #! - NOTE_METADATA_HEADER is the metadata header of the input note. pub proc set_input_note_metadata_header add.INPUT_NOTE_METADATA_HEADER_OFFSET - mem_storew_be + mem_storew_le end #! Returns the attachment of an input note located at the specified memory address. @@ -1645,7 +1698,7 @@ end pub proc get_input_note_attachment padw movup.4 add.INPUT_NOTE_ATTACHMENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the attachment for an input note located at the specified memory address. @@ -1658,7 +1711,7 @@ end #! - NOTE_ATTACHMENT is the attachment of the input note. pub proc set_input_note_attachment add.INPUT_NOTE_ATTACHMENT_OFFSET - mem_storew_be + mem_storew_le end #! Returns the note's args. @@ -1672,7 +1725,7 @@ end pub proc get_input_note_args padw movup.4 add.INPUT_NOTE_ARGS_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the note args for an input note located at the specified memory address. @@ -1685,32 +1738,32 @@ end #! - NOTE_ARGS are optional note args of the input note. pub proc set_input_note_args add.INPUT_NOTE_ARGS_OFFSET - mem_storew_be + mem_storew_le end #! Returns the number of inputs of the note located at the specified memory address. #! #! Inputs: [note_ptr] -#! Outputs: [num_inputs] +#! Outputs: [num_storage_items] #! #! Where: #! - note_ptr is the memory address at which the input note data begins. -#! - num_inputs is the number of inputs in in the input note. -pub proc get_input_note_num_inputs - add.INPUT_NOTE_NUM_INPUTS_OFFSET +#! - num_storage_items is the number of storage items of the input note. +pub proc get_input_note_num_storage_items + add.INPUT_NOTE_NUM_STORAGE_ITEMS_OFFSET mem_load end #! Sets the number of inputs for an input note located at the specified memory address. #! -#! Inputs: [note_ptr, num_inputs] +#! Inputs: [note_ptr, num_storage_items] #! Outputs: [] #! #! Where: #! - note_ptr is the memory address at which the input note data begins. -#! - num_inputs is the number of inputs in the input note. -pub proc set_input_note_num_inputs - add.INPUT_NOTE_NUM_INPUTS_OFFSET +#! - num_storage_items is the number of storage items of the input note. +pub proc set_input_note_num_storage_items + add.INPUT_NOTE_NUM_STORAGE_ITEMS_OFFSET mem_store end @@ -1760,11 +1813,11 @@ end #! #! Where: #! - note_ptr is the memory address at which the input note data begins. -#! - RECIPIENT is the commitment to the note's script, inputs and the serial number. +#! - RECIPIENT is the commitment to the note's script, storage and the serial number. pub proc get_input_note_recipient padw movup.4 add.INPUT_NOTE_RECIPIENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the input note's recipient. @@ -1774,10 +1827,10 @@ end #! #! Where: #! - note_ptr is the memory address at which the output note data begins. -#! - RECIPIENT is the commitment to the note's script, inputs and the serial number. +#! - RECIPIENT is the commitment to the note's script, storage and the serial number. pub proc set_input_note_recipient add.INPUT_NOTE_RECIPIENT_OFFSET - mem_storew_be + mem_storew_le end #! Returns the assets commitment for the input note located at the specified memory address. @@ -1791,7 +1844,7 @@ end pub proc get_input_note_assets_commitment padw movup.4 add.INPUT_NOTE_ASSETS_COMMITMENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Returns the serial number for the input note located at the specified memory address. @@ -1805,7 +1858,7 @@ end pub proc get_input_note_serial_num padw movup.4 add.INPUT_NOTE_SERIAL_NUM_OFFSET - mem_loadw_be + mem_loadw_le end # OUTPUT NOTES @@ -1842,11 +1895,11 @@ end #! #! Where: #! - note_ptr is the memory address at which the output note data begins. -#! - RECIPIENT is the commitment to the note's script, inputs and the serial number. +#! - RECIPIENT is the commitment to the note's script, storage and the serial number. pub proc get_output_note_recipient padw movup.4 add.OUTPUT_NOTE_RECIPIENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the output note's recipient. @@ -1856,10 +1909,10 @@ end #! #! Where: #! - note_ptr is the memory address at which the output note data begins. -#! - RECIPIENT is the commitment to the note's script, inputs and the serial number. +#! - RECIPIENT is the commitment to the note's script, storage and the serial number. pub proc set_output_note_recipient add.OUTPUT_NOTE_RECIPIENT_OFFSET - mem_storew_be + mem_storew_le end #! Returns the output note's metadata. @@ -1875,7 +1928,7 @@ pub proc get_output_note_metadata_header # => [0, 0, 0, 0, note_ptr] movup.4 add.OUTPUT_NOTE_METADATA_HEADER_OFFSET # => [(note_ptr + offset), 0, 0, 0, 0] - mem_loadw_be + mem_loadw_le # => [METADATA_HEADER] end @@ -1889,7 +1942,7 @@ end #! - note_ptr is the memory address at which the output note data begins. pub proc set_output_note_metadata_header add.OUTPUT_NOTE_METADATA_HEADER_OFFSET - mem_storew_be + mem_storew_le end #! Sets the output note's attachment kind and scheme in the metadata header. @@ -1916,7 +1969,7 @@ end pub proc get_output_note_attachment padw movup.4 add.OUTPUT_NOTE_ATTACHMENT_OFFSET - mem_loadw_be + mem_loadw_le # => [ATTACHMENT] end @@ -1930,7 +1983,7 @@ end #! - note_ptr is the memory address at which the output note data begins. pub proc set_output_note_attachment add.OUTPUT_NOTE_ATTACHMENT_OFFSET - mem_storew_be + mem_storew_le dropw end @@ -1967,6 +2020,24 @@ pub proc set_output_note_num_assets mem_store end +#! Increments the number of assets in the output note by 1. +#! +#! Inputs: [note_ptr] +#! Outputs: [] +#! +#! Where: +#! - note_ptr is the memory address at which the output note data begins. +#! +#! Panics if: +#! - the number of assets exceeds the maximum allowed number of assets per note. +pub proc increment_output_note_num_assets + dup exec.get_output_note_num_assets add.1 + # => [num_assets + 1, note_ptr] + + swap exec.set_output_note_num_assets + # => [] +end + #! Returns the dirty flag for the assets commitment. #! #! This binary flag specifies whether the assets commitment stored in the specified note is @@ -2020,7 +2091,7 @@ end pub proc get_output_note_assets_commitment padw movup.4 add.OUTPUT_NOTE_ASSETS_COMMITMENT_OFFSET - mem_loadw_be + mem_loadw_le end #! Sets the output note assets commitment for the output note located at the specified memory @@ -2034,7 +2105,7 @@ end #! - ASSETS_COMMITMENT is the sequential hash of the padded assets of an output note. pub proc set_output_note_assets_commitment add.OUTPUT_NOTE_ASSETS_COMMITMENT_OFFSET - mem_storew_be + mem_storew_le end # KERNEL DATA @@ -2123,9 +2194,9 @@ pub proc link_map_malloc add.LINK_MAP_REGION_START_PTR # => [entry_ptr] - # If entry_ptr is the end_ptr the entry would be allocated in the next memory region so + # if entry_ptr is the end_ptr the entry would be allocated in the next memory region so # we must abort. - # We can use neq because of how the end ptr is chosen. See its docs for details. + # we can use neq because of how the end ptr is chosen. See its docs for details. dup neq.LINK_MAP_REGION_END_PTR assert.err=ERR_LINK_MAP_MAX_ENTRIES_EXCEEDED # => [entry_ptr] end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/non_fungible_asset.masm b/crates/miden-protocol/asm/kernels/transaction/lib/non_fungible_asset.masm new file mode 100644 index 0000000000..bfef314d73 --- /dev/null +++ b/crates/miden-protocol/asm/kernels/transaction/lib/non_fungible_asset.masm @@ -0,0 +1,92 @@ +use $kernel::account_id +use $kernel::asset + +# ERRORS +# ================================================================================================= + +const ERR_NON_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_NON_FUNGIBLE = "non-fungible asset vault key's account ID must be of type non-fungible faucet" + +const ERR_NON_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN="the origin of the non-fungible asset is not this faucet" + +const ERR_NON_FUNGIBLE_ASSET_ID_SUFFIX_MUST_MATCH_HASH0="the asset ID suffix in a non-fungible asset's vault key must match hash0 of the asset value" + +const ERR_NON_FUNGIBLE_ASSET_ID_PREFIX_MUST_MATCH_HASH1="the asset ID prefix in a non-fungible asset's vault key must match hash1 of the asset value" + +# PROCEDURES +# ================================================================================================= + +#! Validates that a non fungible asset is well formed. +#! +#! The value itself is not validated since any value is valid. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to validate. +#! - ASSET_VALUE is the value of the asset to validate. +#! +#! Panics if: +#! - the asset key's account ID is not valid. +#! - the asset key's faucet ID is not a non-fungible one. +#! - the asset ID suffix of the key does not match hash0 of the value. +#! - the asset ID prefix of the key does not match hash1 of the value. +pub proc validate + exec.validate_key + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix, faucet_id_prefix, hash0, hash1, hash2, hash3] + + # assert that hash0 matches asset_id_suffix and hash1 matches asset_id_prefix + dup.4 dup.1 assert_eq.err=ERR_NON_FUNGIBLE_ASSET_ID_SUFFIX_MUST_MATCH_HASH0 + dup.5 dup.2 assert_eq.err=ERR_NON_FUNGIBLE_ASSET_ID_PREFIX_MUST_MATCH_HASH1 + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix, faucet_id_prefix, ASSET_VALUE] +end + +#! Validates that a non fungible asset's key is well formed. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [ASSET_KEY] +#! +#! Where: +#! - ASSET_KEY is the vault key of the asset to validate. +#! +#! Panics if: +#! - the asset key's account ID is not valid. +#! - the asset key's faucet ID is not a non-fungible one. +pub proc validate_key + exec.asset::key_to_faucet_id + exec.account_id::validate + # => [ASSET_KEY] + + exec.asset::is_non_fungible_asset_key + assert.err=ERR_NON_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_NON_FUNGIBLE + # => [ASSET_KEY] +end + +#! Validates that a non-fungible asset is associated with the provided faucet_id. +#! +#! The value itself is not validated since any value is valid. +#! +#! Inputs: [faucet_id_suffix, faucet_id_prefix, ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - faucet_id_{suffix,prefix} are the suffix and prefix of the faucet's account ID. +#! - ASSET_KEY is the vault key of the asset to validate. +pub proc validate_origin + movdn.9 movdn.9 + # => [ASSET_KEY, ASSET_VALUE, faucet_id_suffix, faucet_id_prefix] + + # assert the non-fungible asset key is valid + exec.validate + # => [ASSET_KEY, ASSET_VALUE, faucet_id_suffix, faucet_id_prefix] + + # assert the origin of the asset is the faucet_id provided via the stack + exec.asset::key_to_faucet_id + # => [key_faucet_id_suffix, key_faucet_id_prefix, ASSET_KEY, ASSET_VALUE, faucet_id_suffix, faucet_id_prefix] + + movup.11 movup.11 + # => [faucet_id_suffix, faucet_id_prefix, key_faucet_id_suffix, key_faucet_id_prefix, ASSET_KEY, ASSET_VALUE] + + exec.account_id::is_equal assert.err=ERR_NON_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN + # => [ASSET_KEY, ASSET_VALUE] +end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/note.masm b/crates/miden-protocol/asm/kernels/transaction/lib/note.masm index 581d6598d7..a2fa70b19b 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/note.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/note.masm @@ -1,5 +1,6 @@ -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 +use $kernel::asset::ASSET_SIZE use $kernel::constants::NOTE_MEM_SIZE use $kernel::memory @@ -102,52 +103,20 @@ pub proc compute_output_note_assets_commitment # => [note_data_ptr] # duplicate note pointer and fetch num_assets - dup dup exec.memory::get_output_note_num_assets - # => [num_assets, note_data_ptr, note_data_ptr] - - # calculate the number of pairs of assets (takes ceiling if we have an odd number) - add.1 - u32assert.err=ERR_NOTE_NUM_OF_ASSETS_EXCEED_LIMIT - u32div.2 - # => [num_asset_pairs, note_data_ptr, note_data_ptr] - - # initiate counter for assets - push.0 - # => [asset_counter, num_asset_pairs, note_data_ptr, note_data_ptr] - - # prepare address and stack for reading assets - movup.2 exec.memory::get_output_note_asset_data_ptr padw padw padw - # => [PAD, PAD, PAD, asset_data_ptr, asset_counter, num_asset_pairs, note_data_ptr] - - # check if we should loop - dup.14 dup.14 neq - # => [should_loop, PAD, PAD, PAD, asset_data_ptr, asset_counter, num_asset_pairs, note_data_ptr] - - # loop and read assets from memory - while.true - # read assets from memory. - # if this is the last permutation of the loop and we have an odd number of assets then we - # implicitly pad the last word of the hasher rate with zeros by reading from empty memory. - mem_stream exec.rpo256::permute - # => [PERM, PERM, PERM, asset_data_ptr, asset_counter, num_asset_pairs, note_data_ptr] - - # check if we should loop again - movup.13 add.1 dup movdn.14 dup.15 neq - # => [should_loop, PERM, PERM, PERM, asset_data_ptr, asset_counter, num_asset_pairs, - # note_data_ptr] - end - - # extract digest - exec.rpo256::squeeze_digest - # => [ASSETS_COMMITMENT, asset_data_ptr, asset_counter, num_asset_pairs, note_data_ptr] - - # drop accessory variables from stack - movup.4 drop - movup.4 drop - movup.4 drop + dup exec.memory::get_output_note_asset_data_ptr + # => [assets_ptr, note_data_ptr] + + dup.1 exec.memory::get_output_note_num_assets + # => [num_assets, assets_ptr, note_data_ptr] + + # compute the asset_end_ptr + mul.ASSET_SIZE dup.1 add swap + # => [assets_ptr, assets_end_ptr, note_data_ptr] + + exec.poseidon2::hash_double_words # => [ASSETS_COMMITMENT, note_data_ptr] - # save the assets hash to memory + # save the assets commitment to memory dup.4 exec.memory::set_output_note_assets_commitment # => [ASSETS_COMMITMENT, note_data_ptr] @@ -166,7 +135,7 @@ end #! #! The note ID is computed as follows: #! - we define, recipient = -#! hash(hash(hash(serial_num, [0; 4]), script_root), input_commitment) +#! hash(hash(hash(serial_num, [0; 4]), script_root), storage_commitment) #! - we then compute the output note ID as: #! hash(recipient, assets_commitment) #! @@ -177,24 +146,19 @@ end #! - note_data_ptr is a pointer to the data section of the output note. #! - NOTE_ID is the ID of the output note located at note_data_ptr. proc compute_output_note_id - # pad capacity elements of hasher - padw - # => [EMPTY_WORD, note_data_ptr] + # compute assets commitment + dup exec.compute_output_note_assets_commitment + # => [ASSETS_COMMITMENT, note_data_ptr] - # insert output note recipient into the first four elements of the hasher rate dup.4 exec.memory::get_output_note_recipient - # => [RECIPIENT, EMPTY_WORD, note_data_ptr] + # => [RECIPIENT, ASSETS_COMMITMENT, note_data_ptr] - # populate the last four elements of the hasher rate with the output note's asset commitment - dup.8 exec.compute_output_note_assets_commitment - # => [ASSETS_COMMITMENT, RECIPIENT, EMPTY_WORD, note_data_ptr] - - # compute output note commitment (which is also note ID) and extract digest - exec.rpo256::permute exec.rpo256::squeeze_digest + # compute output note ID + exec.poseidon2::merge # => [NOTE_ID, note_data_ptr] # save the output note commitment (note ID) to memory - movup.4 mem_storew_be + movup.4 mem_storew_le # => [NOTE_ID] end @@ -216,52 +180,56 @@ pub proc compute_output_notes_commitment # => [current_index = 0, num_notes] # prepare stack for hashing - padw padw padw - # => [PERM, PERM, PERM, current_index, num_notes] + exec.poseidon2::init_no_padding + # => [RATE0, RATE1, CAPACITY, current_index, num_notes] # starting looping if num_notes != 0 dup.13 neq.0 - # => [should_loop, PERM, PERM, PERM, current_index, num_notes] + # => [should_loop, RATE0, RATE1, CAPACITY, current_index, num_notes] # loop and hash output notes while.true dup.12 exec.memory::get_output_note_ptr - # => [current_note_ptr, PERM, PERM, PERM, current_index, num_notes] + # => [current_note_ptr, RATE0, RATE1, CAPACITY, current_index, num_notes] # compute and save output note ID to memory (this also computes the note's asset commitment) dup exec.compute_output_note_id - # => [NOTE_ID, current_note_ptr, PERM, PERM, PERM, current_index, num_notes] + # => [NOTE_ID, current_note_ptr, RATE0, RATE1, CAPACITY, current_index, num_notes] - dup.4 exec.memory::get_output_note_metadata_header - # => [NOTE_METADATA_HEADER, NOTE_ID, current_note_ptr, PERM, PERM, PERM, current_index, num_notes] + dup.4 exec.memory::get_output_note_attachment + # => [NOTE_ATTACHMENT, NOTE_ID, current_note_ptr, RATE0, RATE1, CAPACITY, current_index, num_notes] - movup.8 exec.memory::get_output_note_attachment - # => [NOTE_ATTACHMENT, NOTE_METADATA_HEADER, NOTE_ID, current_note_ptr, PERM, PERM, PERM, current_index, num_notes] + movup.8 exec.memory::get_output_note_metadata_header + # => [NOTE_METADATA_HEADER, NOTE_ATTACHMENT, NOTE_ID, RATE0, RATE1, CAPACITY, current_index, num_notes] # compute hash(NOTE_METADATA_HEADER || NOTE_ATTACHMENT) - exec.rpo256::merge - # => [NOTE_METADATA_COMMITMENT, NOTE_ID, current_note_ptr, PERM, PERM, PERM, current_index, num_notes] + exec.poseidon2::merge + # => [NOTE_METADATA_COMMITMENT, NOTE_ID, RATE0, RATE1, CAPACITY, current_index, num_notes] # replace rate words with note ID and metadata commitment swapdw dropw dropw - # => [NOTE_METADATA_COMMITMENT, NOTE_ID, PERM, current_index, num_notes] + # => [NOTE_METADATA_COMMITMENT, NOTE_ID, CAPACITY, current_index, num_notes] + + # move note ID to the top of the stack + swapw + # => [NOTE_ID, NOTE_METADATA_COMMITMENT, CAPACITY, current_index, num_notes] - # permute over (note_id, note_metadata_commitment) - exec.rpo256::permute - # => [PERM, PERM, PERM, current_index, num_notes] + # permute over (NOTE_ID, NOTE_METADATA_COMMITMENT) + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY, current_index, num_notes] # increment current_index movup.12 add.1 movdn.12 - # => [PERM, PERM, PERM, current_index + 1, num_notes] + # => [RATE0, RATE1, CAPACITY, current_index + 1, num_notes] # continue looping if current_index != num_notes dup.13 dup.13 neq - # => [should_loop, PERM, PERM, PERM, current_index + 1, num_notes] + # => [should_loop, RATE0, RATE1, CAPACITY, current_index + 1, num_notes] end - # => [PERM, PERM, PERM, current_index + 1, num_notes] + # => [RATE0, RATE1, CAPACITY, current_index + 1, num_notes] # extract digest - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # => [OUTPUT_NOTES_COMMITMENT, current_index + 1, num_notes] # drop accessory variables from stack diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/output_note.masm b/crates/miden-protocol/asm/kernels/transaction/lib/output_note.masm index 654e8ae6a3..c8a80d5f06 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/output_note.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/output_note.masm @@ -1,9 +1,14 @@ use $kernel::account +use $kernel::asset +use $kernel::callbacks +use $kernel::fungible_asset use $kernel::memory use $kernel::note -use $kernel::asset use $kernel::constants::MAX_OUTPUT_NOTES_PER_TX -use miden::core::mem +use $kernel::util::note::ATTACHMENT_KIND_NONE +use $kernel::util::note::ATTACHMENT_KIND_ARRAY +use $kernel::asset::ASSET_SIZE +use $kernel::asset::ASSET_VALUE_MEMORY_OFFSET use miden::core::word # CONSTANTS @@ -12,12 +17,6 @@ use miden::core::word # Constants for different note types const PUBLIC_NOTE=1 # 0b01 const PRIVATE_NOTE=2 # 0b10 -const ENCRYPTED_NOTE=3 # 0b11 - -# Constants for note attachment kinds -const ATTACHMENT_KIND_NONE=0 -const ATTACHMENT_KIND_WORD=1 -const ATTACHMENT_KIND_ARRAY=2 # The default value of the felt at index 3 in the note metadata header when a new note is created. # All zeros sets the attachment kind to None and the user-defined attachment scheme to "none". @@ -45,8 +44,6 @@ const ERR_OUTPUT_NOTE_ATTACHMENT_KIND_NONE_MUST_BE_EMPTY_WORD="attachment kind N const ERR_NOTE_INVALID_INDEX="failed to find note at the given index; index must be within [0, num_of_notes]" -const ERR_NOTE_FUNGIBLE_MAX_AMOUNT_EXCEEDED="adding a fungible asset to a note cannot exceed the max_amount of 9223372036854775807" - const ERR_NON_FUNGIBLE_ASSET_ALREADY_EXISTS="non-fungible asset that already exists in the note cannot be added again" const ERR_NOTE_TAG_MUST_BE_U32="the note's tag must fit into a u32 so the 32 most significant bits of the felt must be zero" @@ -55,17 +52,17 @@ const ERR_NOTE_TAG_MUST_BE_U32="the note's tag must fit into a u32 so the 32 mos # ================================================================================================= # Event emitted before a new note is created. -const NOTE_BEFORE_CREATED_EVENT=event("miden::note::before_created") +const NOTE_BEFORE_CREATED_EVENT=event("miden::protocol::note::before_created") # Event emitted after a new note is created. -const NOTE_AFTER_CREATED_EVENT=event("miden::note::after_created") +const NOTE_AFTER_CREATED_EVENT=event("miden::protocol::note::after_created") -# Event emitted before an ASSET is added to a note -const NOTE_BEFORE_ADD_ASSET_EVENT=event("miden::note::before_add_asset") -# Event emitted after an ASSET is added to a note -const NOTE_AFTER_ADD_ASSET_EVENT=event("miden::note::after_add_asset") +# Event emitted before an asset is added to a note +const NOTE_BEFORE_ADD_ASSET_EVENT=event("miden::protocol::note::before_add_asset") +# Event emitted after an asset is added to a note +const NOTE_AFTER_ADD_ASSET_EVENT=event("miden::protocol::note::after_add_asset") # Event emitted before an ATTACHMENT is added to a note -const NOTE_BEFORE_SET_ATTACHMENT_EVENT=event("miden::note::before_set_attachment") +const NOTE_BEFORE_SET_ATTACHMENT_EVENT=event("miden::protocol::note::before_set_attachment") # OUTPUT NOTE PROCEDURES # ================================================================================================= @@ -160,15 +157,15 @@ pub proc get_assets_info # word further to make the assets number even (the same way it is done in the # `note::compute_output_note_assets_commitment` procedure) movup.4 exec.memory::get_output_note_asset_data_ptr - # => [assets_data_ptr, ASSETS_COMMITMENT, num_assets] + # => [assets_start_ptr, ASSETS_COMMITMENT, num_assets] - dup dup.6 dup is_odd add - # => [padded_num_assets, assets_data_ptr, assets_data_ptr, ASSETS_COMMITMENT, num_assets] + movdn.4 dup.4 + # => [assets_start_ptr, ASSETS_COMMITMENT, assets_start_ptr, num_assets] - mul.4 add - # => [assets_end_ptr, assets_start_ptr, ASSETS_COMMITMENT, num_assets] + dup.6 mul.ASSET_SIZE add + # => [assets_end_ptr, ASSETS_COMMITMENT, assets_start_ptr, num_assets] - movdn.5 movdn.4 + movdn.5 # => [ASSETS_COMMITMENT, assets_start_ptr, assets_end_ptr, num_assets] # store the assets data to the advice map using ASSETS_COMMITMENT as a key @@ -180,68 +177,64 @@ pub proc get_assets_info # => [ASSETS_COMMITMENT, num_assets] end -#! Adds the ASSET to the note specified by the index. +#! Adds the asset to the note specified by the index. #! -#! Inputs: [note_idx, ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx] #! Outputs: [] #! #! Where: +#! - ASSET_KEY is the vault key of the asset to add. +#! - ASSET_VALUE is the value of the asset to add. #! - note_idx is the index of the note to which the asset is added. -#! - ASSET can be a fungible or non-fungible asset. #! #! Panics if: #! - the note index points to a non-existent output note. -#! - the ASSET is malformed (e.g., invalid faucet ID). +#! - the asset key or value are malformed (e.g., invalid faucet ID). #! - the max amount of fungible assets is exceeded. #! - the non-fungible asset already exists in the note. #! - the total number of ASSETs exceeds the maximum of 256. pub proc add_asset # check if the note exists, it must be within [0, num_of_notes] - dup exec.memory::get_num_output_notes lte assert.err=ERR_NOTE_INVALID_INDEX - # => [note_idx, ASSET] + dup.8 exec.memory::get_num_output_notes lte assert.err=ERR_NOTE_INVALID_INDEX + # => [ASSET_KEY, ASSET_VALUE, note_idx] - # get a pointer to the memory address of the note at which the asset will be stored - dup movdn.5 exec.memory::get_output_note_ptr - # => [note_ptr, ASSET, note_idx] + # validate the asset + exec.asset::validate + # => [ASSET_KEY, ASSET_VALUE, note_idx] - # get current num of assets - dup exec.memory::get_output_note_num_assets movdn.5 - # => [note_ptr, ASSET, num_of_assets, note_idx] + # emit event to signal that a new asset is going to be added to the note + emit.NOTE_BEFORE_ADD_ASSET_EVENT + # => [ASSET_KEY, ASSET_VALUE, note_idx] - # validate the ASSET - movdn.4 exec.asset::validate_asset - # => [ASSET, note_ptr, num_of_assets, note_idx] + # prepare the stack for the callback + swapw dupw.1 + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY, note_idx] - # emit event to signal that a new asset is going to be added to the note. - emit.NOTE_BEFORE_ADD_ASSET_EVENT - # => [ASSET, note_ptr, num_of_assets, note_idx] + dup.12 movdn.8 + # => [ASSET_KEY, ASSET_VALUE, note_idx, ASSET_KEY, note_idx] - # Check if ASSET to add is fungible - exec.asset::is_fungible_asset - # => [is_fungible_asset?, ASSET, note_ptr, num_of_assets, note_idx] + # invoke the callback + exec.callbacks::on_before_asset_added_to_note + swapw + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, note_idx] - if.true - # ASSET to add is fungible - exec.add_fungible_asset - # => [note_ptr, note_idx] - else - # ASSET to add is non-fungible - exec.add_non_fungible_asset - # => [note_ptr, note_idx] - end - # => [note_ptr, note_idx] + movup.8 exec.memory::get_output_note_ptr dup + # => [note_ptr, note_ptr, ASSET_KEY, PROCESSED_ASSET_VALUE] - # update the assets commitment dirty flag to signal that the current assets commitment is not - # valid anymore - push.1 swap exec.memory::set_output_note_dirty_flag - # => [note_idx] + movdn.9 movdn.9 + # => [ASSET_KEY, PROCESSED_ASSET_VALUE, note_ptr, note_ptr] + + # add the asset to the note + exec.add_asset_raw + # => [note_ptr] - # emit event to signal that a new asset was added to the note. + # emit event to signal that a new asset was added to the note emit.NOTE_AFTER_ADD_ASSET_EVENT - # => [note_idx] + # => [note_ptr] - # drop the note index - drop + # update the assets commitment dirty flag to signal that the current assets commitment is not + # valid anymore + push.1 swap exec.memory::set_output_note_dirty_flag # => [] end @@ -340,20 +333,19 @@ pub proc build_metadata_header # Merge note type and sender ID suffix. # -------------------------------------------------------------------------------------------- - exec.account::get_id swap + exec.account::get_id # => [sender_id_suffix, sender_id_prefix, tag, note_type] # the lower bits of an account ID suffix are guaranteed to be zero, so we can safely use that # space to encode the note type - movup.3 add swap - # => [sender_id_prefix, sender_id_suffix_and_note_type, tag] + movup.3 add + # => [sender_id_suffix_and_note_type, sender_id_prefix, tag] # Build metadata header. # -------------------------------------------------------------------------------------------- - movup.2 - push.ATTACHMENT_DEFAULT_KIND_AND_SCHEME - # => [attachment_kind_scheme, tag, sender_id_prefix, sender_id_suffix_and_note_type] + push.ATTACHMENT_DEFAULT_KIND_AND_SCHEME movdn.3 + # => [sender_id_suffix_and_note_type, sender_id_prefix, tag, attachment_kind_scheme] # => [NOTE_METADATA_HEADER] end @@ -462,154 +454,124 @@ proc increment_num_output_notes # => [note_idx] end -#! Adds a fungible asset to a note. If the note already holds an asset issued by the same faucet id -#! the two quantities are summed up and the new quantity is stored at the old position in the note. -#! In the other case, the asset is stored at the next available position. -#! Returns the pointer to the note the asset was stored at. +#! Adds the asset to the note specified by the ptr. +#! +#! This procedure attempts to find an asset with the same key in the note's assets. +#! - If the asset is not found, the asset is appended at the end and the number of assets is +#! incremented. +#! - If the asset is found and the asset is +#! - fungible: the existing asset and the new asset are merged together. +#! - non-fungible: the procedure panics since non-fungible assets cannot be merged. #! -#! Inputs: [ASSET, note_ptr, num_of_assets, note_idx] -#! Outputs: [note_ptr] +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_ptr] +#! Outputs: [] #! #! Where: -#! - ASSET is the fungible asset to be added to the note. +#! - ASSET_KEY is the vault key of the asset to add. +#! - ASSET_VALUE is the value of the asset to add. #! - note_ptr is the pointer to the note the asset will be added to. -#! - num_of_assets is the current number of assets. -#! - note_idx is the index of the note the asset will be added to. #! #! Panics if -#! - the summed amounts exceed the maximum amount of fungible assets. -proc add_fungible_asset - dup.4 exec.memory::get_output_note_asset_data_ptr - # => [asset_ptr, ASSET, note_ptr, num_of_assets, note_idx] +#! - asset is fungible and adding the two asset values would exceed FUNGIBLE_ASSET_MAX_AMOUNT. +#! - asset is non-fungible and the note already contains an asset with the same key. +proc add_asset_raw + dup.8 exec.memory::get_output_note_asset_data_ptr movdn.8 + # => [ASSET_KEY, ASSET_VALUE, asset_ptr, note_ptr] + + # get the number of assets in the output note + dup.9 exec.memory::get_output_note_num_assets + # => [num_assets, ASSET_KEY, ASSET_VALUE, asset_ptr, note_ptr] # compute the pointer at which we should stop iterating - dup dup.7 mul.4 add - # => [end_asset_ptr, asset_ptr, ASSET, note_ptr, num_of_assets, note_idx] + mul.ASSET_SIZE dup.9 add movdn.9 + # => [ASSET_KEY, ASSET_VALUE, asset_ptr, asset_end_ptr, note_ptr] - # reorganize and pad the stack, prepare for the loop - movdn.5 movdn.5 padw dup.9 - # => [asset_ptr, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] + # initialize loop variable is_existing_asset to false + push.0 movdn.8 + # => [ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] - # compute the loop latch - dup dup.10 neq - # => [latch, asset_ptr, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, - # note_idx] + # enter loop if asset_ptr != asset_end_ptr + dup.10 dup.10 neq + # => [has_assets, ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] while.true - mem_loadw_be - # => [STORED_ASSET, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] - - dup.4 eq - # => [are_equal, 0, 0, stored_amount, ASSET, end_asset_ptr, asset_ptr, note_ptr, - # num_of_assets, note_idx] - - if.true - # add the asset quantity, we don't overflow here, bc both ASSETs are valid. - movup.2 movup.6 add - # => [updated_amount, 0, 0, faucet_id, 0, 0, end_asset_ptr, asset_ptr, note_ptr, - # num_of_assets, note_idx] - - # check that we don't overflow bc we use lte - dup exec.asset::get_fungible_asset_max_amount lte - assert.err=ERR_NOTE_FUNGIBLE_MAX_AMOUNT_EXCEEDED - # => [updated_amount, 0, 0, faucet_id, 0, 0, end_asset_ptr, asset_ptr, note_ptr, - # num_of_assets, note_idx] - - # prepare stack to store the "updated" ASSET'' with the new quantity - movdn.5 - # => [0, 0, ASSET'', end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] - - # decrease num_of_assets by 1 to offset incrementing it later - movup.9 sub.1 movdn.9 - # => [0, 0, ASSET'', end_asset_ptr, asset_ptr, note_ptr, num_of_assets - 1, note_idx] - - # end the loop we add 0's to the stack to have the correct number of elements - push.0.0 dup.9 push.0 - # => [0, asset_ptr, 0, 0, 0, 0, ASSET'', end_asset_ptr, asset_ptr, note_ptr, - # num_of_assets - 1, note_idx] - else - # => [0, 0, stored_amount, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, - # note_idx] - - # drop ASSETs and increment the asset pointer - movup.2 drop push.0.0 movup.9 add.4 dup movdn.10 - # => [asset_ptr + 4, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr + 4, note_ptr, - # num_of_assets, note_idx] - - # check if we reached the end of the loop - dup dup.10 neq - end - end - # => [asset_ptr, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] - # prepare stack for storing the ASSET - movdn.4 dropw - # => [asset_ptr, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] - - # Store the fungible asset, either the combined ASSET or the new ASSET - mem_storew_be dropw drop drop - # => [note_ptr, num_of_assets, note_idx] - - # increase the number of assets in the note - swap add.1 dup.1 exec.memory::set_output_note_num_assets - # => [note_ptr, note_idx] -end + padw dup.13 mem_loadw_le + # => [STORED_ASSET_KEY, ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] -#! Adds a non-fungible asset to a note at the next available position. -#! Returns the pointer to the note the asset was stored at. -#! -#! Inputs: [ASSET, note_ptr, num_of_assets, note_idx] -#! Outputs: [note_ptr, note_idx] -#! -#! Where: -#! - ASSET is the non-fungible asset to be added to the note. -#! - note_ptr is the pointer to the note the asset will be added to. -#! - num_of_assets is the current number of assets. -#! - note_idx is the index of the note the asset will be added to. -#! -#! Panics if: -#! - the non-fungible asset already exists in the note. -proc add_non_fungible_asset - dup.4 exec.memory::get_output_note_asset_data_ptr - # => [asset_ptr, ASSET, note_ptr, num_of_assets, note_idx] + exec.word::test_eq + # => [is_matching_key, STORED_ASSET_KEY, ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] - # compute the pointer at which we should stop iterating - dup dup.7 mul.4 add - # => [end_asset_ptr, asset_ptr, ASSET, note_ptr, num_of_assets, note_idx] + # set is_existing_asset = is_matching_key + swap.13 drop dropw + # => [ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] - # reorganize and pad the stack, prepare for the loop - movdn.5 movdn.5 padw dup.9 - # => [asset_ptr, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] + # if is_existing_asset, increment asset_ptr by 0 (so the ptr points to the existing asset + # after the loop) + # if !is_existing_asset, increment asset_ptr by ASSET_SIZE + dup.8 not mul.ASSET_SIZE + # => [asset_size_or_0, ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] - # compute the loop latch - dup dup.10 neq - # => [latch, asset_ptr, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, - # note_idx] + # compute asset_ptr + asset_size_or_0 + movup.10 add movdn.9 + # => [ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] - while.true - # load the asset and compare - mem_loadw_be exec.word::test_eq - assertz.err=ERR_NON_FUNGIBLE_ASSET_ALREADY_EXISTS - # => [ASSET', ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] - - # drop ASSET' and increment the asset pointer - dropw movup.5 add.4 dup movdn.6 padw movup.4 - # => [asset_ptr + 4, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr + 4, note_ptr, - # num_of_assets, note_idx] - - # check if we reached the end of the loop - dup dup.10 neq + # continue looping if (!is_existing_asset) && asset_ptr != asset_end_ptr + dup.10 dup.10 neq + # => [is_end_reached, ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] + + dup.9 not and + # => [should_loop, ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] end - # => [asset_ptr, 0, 0, 0, 0, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] + # => [ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, asset_end_ptr, note_ptr] + + # after the loop: + # if is_existing_asset: asset_ptr points to the entry where the existing asset is stored + # if !is_existing_asset: asset_ptr points to the entry where the asset can be appended, + # i.e. asset_ptr = asset_end_ptr - # prepare stack for storing the ASSET - movdn.4 dropw - # => [asset_ptr, ASSET, end_asset_ptr, asset_ptr, note_ptr, num_of_assets, note_idx] + # discard asset end ptr + movup.10 drop + # => [ASSET_KEY, ASSET_VALUE, is_existing_asset, asset_ptr, note_ptr] - # end of the loop reached, no error so we can store the non-fungible asset - mem_storew_be dropw drop drop - # => [note_ptr, num_of_assets, note_idx] + movup.8 + # => [is_existing_asset, ASSET_KEY, ASSET_VALUE, asset_ptr, note_ptr] - # increase the number of assets in the note - swap add.1 dup.1 exec.memory::set_output_note_num_assets - # => [note_ptr, note_idx] + if.true + # if the asset exists, do not increment num assets + + # abort if the asset is non-fungible since it cannot be merged + exec.asset::is_fungible_asset_key + assert.err=ERR_NON_FUNGIBLE_ASSET_ALREADY_EXISTS + # => [ASSET_KEY, ASSET_VALUE, asset_ptr, note_ptr] + + # if the asset is fungible, merge the asset values + # overwrite asset key on the stack with the stored asset value + # note that asset_ptr already stores ASSET_KEY so there is no need to overwrite it + dup.8 add.ASSET_VALUE_MEMORY_OFFSET mem_loadw_le + # => [STORED_ASSET_VALUE, ASSET_VALUE, asset_ptr, note_ptr] + + # merge the two fungible assets + exec.fungible_asset::merge + # => [MERGED_ASSET_VALUE, asset_ptr, note_ptr] + + # store the merged asset value + movup.4 add.ASSET_VALUE_MEMORY_OFFSET mem_storew_le dropw drop + # => [] + else + # if the asset does not exist, increment num assets and append the asset + + # increment number of assets + # this panics if the max allowed number of assets is exceeded + # this implicitly validates that asset_ptr is not out of bounds + movup.9 exec.memory::increment_output_note_num_assets + # => [ASSET_KEY, ASSET_VALUE, asset_ptr] + + # store ASSET_KEY + dup.8 mem_storew_le dropw + # => [ASSET_VALUE, asset_ptr] + + # store ASSET_VALUE + movup.4 add.ASSET_VALUE_MEMORY_OFFSET mem_storew_le dropw + # => [] + end end diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/prologue.masm b/crates/miden-protocol/asm/kernels/transaction/lib/prologue.masm index bb0ce6e9d5..9381fb6359 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/prologue.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/prologue.masm @@ -1,19 +1,18 @@ use miden::core::mem use miden::core::collections::mmr -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 use miden::core::word use $kernel::account -use $kernel::account_delta use $kernel::account_id use $kernel::asset_vault +use $kernel::asset::ASSET_SIZE +use $kernel::asset use $kernel::constants::EMPTY_SMT_ROOT use $kernel::constants::MAX_ASSETS_PER_NOTE use $kernel::constants::MAX_INPUT_NOTES_PER_TX -use $kernel::constants::MAX_INPUTS_PER_NOTE +use $kernel::constants::MAX_NOTE_STORAGE_ITEMS use $kernel::constants::NOTE_TREE_DEPTH -use $kernel::constants::STORAGE_SLOT_TYPE_MAP -use $kernel::constants::STORAGE_SLOT_TYPE_VALUE use $kernel::memory # CONSTS @@ -26,7 +25,7 @@ const MAX_BLOCK_NUM=0xFFFFFFFF #================================================================================================= # Emission of an equivalent to `ACCOUNT_VAULT_BEFORE_ADD_ASSET_EVENT`, use in `add_input_note_assets_to_vault` -const ACCOUNT_VAULT_BEFORE_ADD_ASSET_EVENT=event("miden::account::vault_before_add_asset") +const ACCOUNT_VAULT_BEFORE_ADD_ASSET_EVENT=event("miden::protocol::account::vault_before_add_asset") # ERRORS # ================================================================================================= @@ -41,14 +40,6 @@ const ERR_PROLOGUE_VERIFICATION_BASE_FEE_MUST_BE_U32="verification base fee must const ERR_PROLOGUE_NEW_ACCOUNT_VAULT_MUST_BE_EMPTY="new account must have an empty vault" -const ERR_PROLOGUE_NEW_FUNGIBLE_FAUCET_RESERVED_SLOT_MUST_BE_EMPTY="reserved slot for new fungible faucet is not empty" - -const ERR_PROLOGUE_NEW_FUNGIBLE_FAUCET_RESERVED_SLOT_INVALID_TYPE="reserved slot for new fungible faucet has an invalid type" - -const ERR_PROLOGUE_NEW_NON_FUNGIBLE_FAUCET_RESERVED_SLOT_MUST_BE_VALID_EMPTY_SMT="reserved slot for non-fungible faucet is not a valid empty SMT" - -const ERR_PROLOGUE_NEW_NON_FUNGIBLE_FAUCET_RESERVED_SLOT_INVALID_TYPE="reserved slot for new non-fungible faucet has an invalid type" - const ERR_PROLOGUE_PROVIDED_ACCOUNT_DATA_DOES_NOT_MATCH_ON_CHAIN_COMMITMENT="account data provided does not match the commitment recorded on-chain" const ERR_PROLOGUE_EXISTING_ACCOUNT_MUST_HAVE_NON_ZERO_NONCE="existing accounts must have a non-zero nonce" @@ -67,7 +58,7 @@ const ERR_PROLOGUE_INPUT_NOTES_COMMITMENT_MISMATCH="note commitment computed fro const ERR_PROLOGUE_NEW_ACCOUNT_NONCE_MUST_BE_ZERO="new account must have a zero nonce" -const ERR_PROLOGUE_NUMBER_OF_NOTE_INPUTS_EXCEEDED_LIMIT="number of note inputs exceeded the maximum limit of 1024" +const ERR_PROLOGUE_NUMBER_OF_NOTE_STORAGE_ITEMS_EXCEEDED_LIMIT="number of note storage items exceeded the maximum limit of 1024" const ERR_PROLOGUE_NOTE_AUTHENTICATION_FAILED="failed to authenticate note inclusion in block" @@ -153,7 +144,7 @@ proc process_kernel_data # AS => [] # extract the resulting hash - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # OS => [SEQ_KERNEL_PROC_HASH, kernel_procs_ptr', TX_KERNEL_COMMITMENT] # AS => [] @@ -179,7 +170,7 @@ end #! TX_KERNEL_COMMITMENT #! VALIDATOR_KEY_COMMITMENT, #! [block_num, version, timestamp, 0], -#! [native_asset_id_suffix, native_asset_id_prefix, verification_base_fee, 0], +#! [0, verification_base_fee, native_asset_id_suffix, native_asset_id_prefix], #! [0, 0, 0, 0], #! NOTE_ROOT, #! ] @@ -209,22 +200,25 @@ proc process_block_data # read block data and compute its sub commitment # see `Advice stack` above for details. - padw padw padw - adv_pipe exec.rpo256::permute - adv_pipe exec.rpo256::permute - adv_pipe exec.rpo256::permute - adv_pipe exec.rpo256::permute - adv_pipe exec.rpo256::permute - exec.rpo256::squeeze_digest + exec.poseidon2::init_no_padding + adv_pipe exec.poseidon2::permute + adv_pipe exec.poseidon2::permute + adv_pipe exec.poseidon2::permute + adv_pipe exec.poseidon2::permute + adv_pipe exec.poseidon2::permute + exec.poseidon2::squeeze_digest # => [SUB_COMMITMENT, block_data_ptr', block_num] - # store the note root in memory - padw adv_loadw - dupw exec.memory::set_note_root dropw + # load and store the note root + padw adv_loadw exec.memory::set_note_root # => [NOTE_ROOT, SUB_COMMITMENT, block_data_ptr', block_num] + # move the note root to the top of the stack for merging + swapw + # => [SUB_COMMITMENT, NOTE_ROOT, block_data_ptr', block_num] + # merge the note root with the sub commitment to get the block commitment - exec.rpo256::merge + exec.poseidon2::merge # => [BLOCK_COMMITMENT, block_data_ptr', block_num] # assert that the block commitment matches the commitment in global inputs @@ -319,50 +313,6 @@ proc validate_new_account assert_eqw.err=ERR_PROLOGUE_NEW_ACCOUNT_VAULT_MUST_BE_EMPTY # => [] - # Assert faucet reserved slot is correctly initialized - # --------------------------------------------------------------------------------------------- - # check if the account is a faucet - exec.account::get_id swap drop dup exec.account_id::is_faucet - # => [is_faucet, acct_id_prefix] - - # process conditional logic depending on whether the account is a faucet - if.true - # get the faucet reserved slot - exec.account::get_faucet_sysdata_slot_id exec.account::get_typed_item - # => [FAUCET_RESERVED_SLOT, slot_type, acct_id_prefix] - - # check if the account is a fungible faucet - movup.5 exec.account_id::is_fungible_faucet - # => [is_fungible_faucet, FAUCET_RESERVED_SLOT, slot_type] - - if.true - # assert the fungible faucet reserved slot is initialized correctly (EMPTY_WORD) - exec.word::eqz not - assertz.err=ERR_PROLOGUE_NEW_FUNGIBLE_FAUCET_RESERVED_SLOT_MUST_BE_EMPTY - # => [slot_type] - - # assert the fungible faucet reserved slot type == value - push.STORAGE_SLOT_TYPE_VALUE eq - assert.err=ERR_PROLOGUE_NEW_FUNGIBLE_FAUCET_RESERVED_SLOT_INVALID_TYPE - # => [] - else - # assert the non-fungible faucet reserved slot is initialized correctly (root of - # empty SMT) - push.EMPTY_SMT_ROOT - assert_eqw.err=ERR_PROLOGUE_NEW_NON_FUNGIBLE_FAUCET_RESERVED_SLOT_MUST_BE_VALID_EMPTY_SMT - # => [slot_type] - - # assert the non-fungible faucet reserved slot type == map - push.STORAGE_SLOT_TYPE_MAP eq - assert.err=ERR_PROLOGUE_NEW_NON_FUNGIBLE_FAUCET_RESERVED_SLOT_INVALID_TYPE - # => [] - end - else - # drop the hi part of the ID - drop - # => [] - end - # Assert the provided account seed satisfies the seed requirements # --------------------------------------------------------------------------------------------- exec.account::validate_seed @@ -387,7 +337,7 @@ end #! Inputs: #! Operand stack: [] #! Advice stack: [ -#! account_id_suffix, account_id_prefix, 0, account_nonce, +#! account_nonce, 0, account_id_suffix, account_id_prefix #! ACCOUNT_VAULT_ROOT, #! ACCOUNT_STORAGE_COMMITMENT, #! ACCOUNT_CODE_COMMITMENT @@ -415,10 +365,10 @@ proc process_account_data # => [acct_data_ptr] # read account details and compute its digest. See `Advice stack` above for details. - padw padw padw - adv_pipe exec.rpo256::permute - adv_pipe exec.rpo256::permute - exec.rpo256::squeeze_digest + exec.poseidon2::init_no_padding + adv_pipe exec.poseidon2::permute + adv_pipe exec.poseidon2::permute + exec.poseidon2::squeeze_digest # => [ACCOUNT_COMMITMENT, acct_data_ptr'] movup.4 drop @@ -542,23 +492,21 @@ proc authenticate_note # --------------------------------------------------------------------------------------------- # read data from advice and compute hash(BLOCK_SUB_COMMITMENT || NOTE_ROOT) - padw padw padw - adv_pipe exec.rpo256::permute - # => [PERM, COMPUTED_BLOCK_COMMITMENT, PERM, mem_ptr', BLOCK_COMMITMENT, NOTE_COMMITMENT] - - dropw - # => [COMPUTED_BLOCK_COMMITMENT, PERM, mem_ptr', BLOCK_COMMITMENT, NOTE_COMMITMENT] + exec.poseidon2::init_no_padding + adv_pipe exec.poseidon2::permute + exec.poseidon2::squeeze_digest + # => [COMPUTED_BLOCK_COMMITMENT, mem_ptr', BLOCK_COMMITMENT, NOTE_COMMITMENT] # assert the computed block commitment matches - movup.8 drop movupw.2 + movup.4 drop assert_eqw.err=ERR_PROLOGUE_MISMATCH_OF_REFERENCE_BLOCK_MMR_AND_NOTE_AUTHENTICATION_MMR - # => [PERM, NOTE_COMMITMENT] + # => [NOTE_COMMITMENT] # Authenticate the NOTE_COMMITMENT # --------------------------------------------------------------------------------------------- # load the note root from memory - loc_loadw_be.4 swapw + padw loc_loadw_le.4 swapw # => [NOTE_COMMITMENT, NOTE_ROOT] # load the index of the note @@ -584,7 +532,7 @@ end #! Advice stack: [ #! SERIAL_NUMBER, #! SCRIPT_ROOT, -#! INPUTS_COMMITMENT, +#! STORAGE_COMMITMENT, #! ASSETS_COMMITMENT, #! ] #! Outputs: @@ -595,19 +543,19 @@ end #! - note_ptr is the memory location for the input note. #! - SERIAL_NUMBER is the note's serial. #! - SCRIPT_ROOT is the note's script root. -#! - INPUTS_COMMITMENT is the sequential hash of the padded note's inputs. +#! - STORAGE_COMMITMENT is the sequential hash of the padded note's storage. #! - ASSETS_COMMITMENT is the sequential hash of the padded note's assets. #! - NULLIFIER is the result of -#! `hash(SERIAL_NUMBER || SCRIPT_ROOT || INPUTS_COMMITMENT || ASSETS_COMMITMENT)`. +#! `hash(SERIAL_NUMBER || SCRIPT_ROOT || STORAGE_COMMITMENT || ASSETS_COMMITMENT)`. proc process_input_note_details exec.memory::get_input_note_core_ptr # => [note_data_ptr] # read input note's data and compute its digest. See `Advice stack` above for details. - padw padw padw - adv_pipe exec.rpo256::permute - adv_pipe exec.rpo256::permute - exec.rpo256::squeeze_digest + exec.poseidon2::init_no_padding + adv_pipe exec.poseidon2::permute + adv_pipe exec.poseidon2::permute + exec.poseidon2::squeeze_digest # => [NULLIFIER, note_data_ptr + 16] movup.4 drop @@ -629,50 +577,51 @@ end #! #! Inputs: #! Operand stack: [note_ptr] -#! Advice stack: [NOTE_ARGS, NOTE_METADATA_HEADER, NOTE_ATTACHMENT] +#! Advice stack: [NOTE_ARGS, NOTE_ATTACHMENT, NOTE_METADATA_HEADER] #! Outputs: -#! Operand stack: [NOTE_ATTACHMENT, NOTE_METADATA_HEADER] +#! Operand stack: [NOTE_METADATA_HEADER, NOTE_ATTACHMENT] #! Advice stack: [] #! #! Where: #! - note_ptr is the memory location for the input note. #! - NOTE_ARGS are the user arguments passed to the note. -#! - NOTE_METADATA_HEADER is the note's metadata. +#! - NOTE_METADATA_HEADER is the note's metadata header. +#! - NOTE_ATTACHMENT is the note's attachment. proc process_note_args_and_metadata padw adv_loadw dup.4 exec.memory::set_input_note_args dropw # => [note_ptr] - padw adv_loadw dup.4 exec.memory::set_input_note_metadata_header - # => [NOTE_METADATA_HEADER, note_ptr] + padw adv_loadw dup.4 exec.memory::set_input_note_attachment + # => [NOTE_ATTACHMENT] - padw adv_loadw movup.8 exec.memory::set_input_note_attachment - # => [NOTE_ATTACHMENT, NOTE_METADATA_HEADER] + padw adv_loadw movup.8 exec.memory::set_input_note_metadata_header + # => [NOTE_METADATA_HEADER, NOTE_ATTACHMENT, note_ptr] end -#! Checks that the number of note inputs is within limit and stores it to memory. +#! Checks that the number of note storage is within limit and stores it to memory. #! #! Inputs: #! Operand stack: [note_ptr] -#! Advice stack: [inputs_len] +#! Advice stack: [num_storage_items] #! Outputs: #! Operand stack: [note_ptr] #! Advice stack: [] #! #! Where: #! - note_ptr is the memory location for the input note. -#! - inputs_len is the note's input count. -proc process_note_inputs_length - # move the inputs length from the advice stack to the operand stack +#! - num_storage_items is the note's number of storage items. +proc process_note_num_storage_items + # move the number of storage items from the advice stack to the operand stack adv_push.1 - # => [inputs_len, note_ptr] + # => [num_storage_items, note_ptr] - # validate the input length - dup push.MAX_INPUTS_PER_NOTE lte - assert.err=ERR_PROLOGUE_NUMBER_OF_NOTE_INPUTS_EXCEEDED_LIMIT - # => [inputs_len, note_ptr] + # validate the number of storage items + dup push.MAX_NOTE_STORAGE_ITEMS lte + assert.err=ERR_PROLOGUE_NUMBER_OF_NOTE_STORAGE_ITEMS_EXCEEDED_LIMIT + # => [num_storage_items, note_ptr] - # store the inputs length into the memory - dup.1 exec.memory::set_input_note_num_inputs + # store the number of storage items into the memory + dup.1 exec.memory::set_input_note_num_storage_items # => [note_ptr] end @@ -680,83 +629,51 @@ end #! #! Inputs: #! Operand stack: [note_ptr] -#! Advice stack: [assets_count, ASSET_0, ..., ASSET_N] +#! Advice stack: [num_assets, ASSET_KEY_0, ASSET_VALUE_0, ..., ASSET_KEY_N, ASSET_VALUE_N] #! Outputs: #! Operand stack: [] #! Advice stack: [] #! #! Where: #! - note_ptr is the memory location for the input note. -#! - assets_count is the note's assets count. -#! - ASSET_0, ..., ASSET_N are the padded note's assets. +#! - num_assets is the number of note assets. +#! - ASSET_KEY_0, ASSET_VALUE_0, ..., ASSET_KEY_N, ASSET_VALUE_N are the note's assets. proc process_note_assets - # verify and save the assets count + # Validate num_assets and setup commitment computation. # --------------------------------------------------------------------------------------------- adv_push.1 - # => [assets_count, note_ptr] + # => [num_assets, note_ptr] - dup push.MAX_ASSETS_PER_NOTE lte + dup lte.MAX_ASSETS_PER_NOTE assert.err=ERR_PROLOGUE_NUMBER_OF_NOTE_ASSETS_EXCEEDS_LIMIT - # => [assets_count, note_ptr] + # => [num_assets, note_ptr] dup dup.2 exec.memory::set_input_note_num_assets - # => [assets_count, note_ptr] + # => [num_assets, note_ptr] - # round up the number of assets, to the its padded length - dup u32and.1 add - # => [rounded_num_assets, note_ptr] + dup.1 exec.memory::get_input_note_assets_ptr swap + # => [num_assets, assets_ptr, note_ptr] - # read the note's assets - # --------------------------------------------------------------------------------------------- + mul.ASSET_SIZE dup.1 add swap + # => [assets_ptr, assets_end_ptr, note_ptr] - # Stack organization: - # - Top of the stack contains the hash state. The complete state is needed to extract the final - # hash. - # - Followed by the assets_ptr, with the target address used to pipe data from the advice - # provider. - # - Followed by a copy of the note_ptr for later use. - # - Followed by the loop variables, the current counter and rounded_num_assets, laid at this - # depth because dup.15 is an efficient operation. + exec.poseidon2::init_no_padding + # => [RATE0, RATE1, CAPACITY, assets_ptr, assets_end_ptr, note_ptr] - push.0 movup.2 - # => [note_ptr, counter, rounded_num_assets] - - dup exec.memory::get_input_note_assets_ptr - # => [assets_ptr, note_ptr, counter, rounded_num_assets] - - padw padw padw - # => [PERM, PERM, PERM, assets_ptr, note_ptr, counter, rounded_num_assets] - - # loop condition: counter != rounded_num_assets - dup.15 dup.15 neq - # => [should_loop, PERM, PERM, PERM, assets_ptr, note_ptr, counter, rounded_num_assets] - - # loop and read assets from the advice provider - while.true - # read data and compute its digest. See `Advice stack` above for details. - adv_pipe exec.rpo256::permute - # => [PERM, PERM, PERM, assets_ptr+8, note_ptr, counter, rounded_num_assets] - - # update counter - swapw.3 movup.2 add.2 movdn.2 swapw.3 - # => [PERM, PERM, PERM, assets_ptr+8, note_ptr, counter+2, rounded_num_assets] - - # loop condition: counter != rounded_num_assets - dup.15 dup.15 neq - # => [should_loop, PERM, PERM, PERM, assets_ptr+8, note_ptr, counter+2, rounded_num_assets] - end - # => [PERM, PERM, PERM, assets_ptr+8n, note_ptr, counter+2n, rounded_num_assets] - - exec.rpo256::squeeze_digest - # => [ASSET_COMMITMENT_COMPUTED, assets_ptr+8n, note_ptr, counter+2n, rounded_num_assets] + # Compute assets commitment and validate. + # --------------------------------------------------------------------------------------------- - swapw drop movdn.2 drop drop - # => [note_ptr, ASSET_COMMITMENT_COMPUTED] + exec.mem::pipe_double_words_to_memory + exec.poseidon2::squeeze_digest + # => [COMPUTED_ASSETS_COMMITMENT, assets_ptr, note_ptr] - # VERIFY: computed ASSET_COMMITMENT matches the provided hash - exec.memory::get_input_note_assets_commitment + # assert COMPUTED_ASSETS_COMMITMENT matches the provided commitment + movup.6 exec.memory::get_input_note_assets_commitment assert_eqw.err=ERR_PROLOGUE_PROVIDED_INPUT_ASSETS_INFO_DOES_NOT_MATCH_ITS_COMMITMENT + # => [assets_ptr] + + drop # => [] end @@ -777,7 +694,7 @@ proc add_input_note_assets_to_vault dup.1 exec.memory::get_input_note_assets_ptr # => [assets_start_ptr, input_vault_root_ptr, note_ptr] - dup movup.3 exec.memory::get_input_note_num_assets mul.4 add swap + dup movup.3 exec.memory::get_input_note_num_assets mul.ASSET_SIZE add swap # => [assets_start_ptr, assets_end_ptr, input_vault_root_ptr] # add input note's assets to input vault @@ -791,20 +708,21 @@ proc add_input_note_assets_to_vault dup.2 # => [input_vault_root_ptr, assets_start_ptr, assets_end_ptr, input_vault_root_ptr] - padw dup.5 mem_loadw_be - # => [ASSET, input_vault_root_ptr, assets_start_ptr, assets_end_ptr, input_vault_root_ptr] + # load asset key and value + dup.1 exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, input_vault_root_ptr, assets_start_ptr, assets_end_ptr, input_vault_root_ptr] # the witnesses for the note assets should be added prior to transaction execution and so # there should be no need to fetch them lazily via an event. exec.asset_vault::add_asset dropw # => [assets_start_ptr, assets_end_ptr, input_vault_root_ptr] - add.4 - # => [assets_start_ptr+4, assets_end_ptr, input_vault_root_ptr] + add.ASSET_SIZE + # => [assets_start_ptr+ASSET_SIZE, assets_end_ptr, input_vault_root_ptr] # loop condition: assets_start_ptr != assets_end_ptr dup.1 dup.1 neq - # => [should_loop, assets_start_ptr+4, assets_end_ptr, input_vault_root_ptr] + # => [should_loop, assets_start_ptr+ASSET_SIZE, assets_end_ptr, input_vault_root_ptr] end drop drop drop @@ -818,26 +736,38 @@ end #! #! Where: #! - note_ptr is the memory location for the input note. -#! - NOTE_ID is the note's id, i.e. `hash(RECIPIENT || ASSET_COMMITMENT)`. +#! - NOTE_ID is the note's id, i.e. `hash(RECIPIENT || ASSETS_COMMITMENT)`. proc compute_input_note_id + # load all inputs on the stack + dup exec.memory::get_input_note_assets_commitment + dup.4 exec.memory::get_input_note_storage_commitment + dup.8 exec.memory::get_input_note_script_root + # => [SCRIPT_ROOT, STORAGE_COMMITMENT, ASSETS_COMMITMENT, note_ptr] + + dup.12 padw + # => [EMPTY_WORD, note_ptr, SCRIPT_ROOT, STORAGE_COMMITMENT, ASSETS_COMMITMENT, note_ptr] + + movup.4 exec.memory::get_input_note_serial_num + # => [SERIAL_NUM, EMPTY_WORD, SCRIPT_ROOT, STORAGE_COMMITMENT, ASSETS_COMMITMENT, note_ptr] + # compute SERIAL_COMMITMENT: hash(SERIAL_NUMBER || EMPTY_WORD) - dup exec.memory::get_input_note_serial_num padw exec.rpo256::merge - # => [SERIAL_COMMITMENT, note_ptr] + exec.poseidon2::merge + # => [SERIAL_COMMITMENT, SCRIPT_ROOT, STORAGE_COMMITMENT, ASSETS_COMMITMENT, note_ptr] - # compute MERGE_SCRIPT: hash(SERIAL_COMMITMENT || SCRIPT_ROOT) - dup.4 exec.memory::get_input_note_script_root exec.rpo256::merge - # => [MERGE_SCRIPT, note_ptr] + # compute SERIAL_SCRIPT_COMMITMENT: hash(SERIAL_COMMITMENT || SCRIPT_ROOT) + exec.poseidon2::merge + # => [SERIAL_SCRIPT_COMMITMENT, STORAGE_COMMITMENT, ASSETS_COMMITMENT, note_ptr] - # compute RECIPIENT: hash(MERGE_SCRIPT || INPUT_COMMITMENT) - dup.4 exec.memory::get_input_note_inputs_commitment exec.rpo256::merge - # => [RECIPIENT, note_ptr] + # compute RECIPIENT: hash(SERIAL_SCRIPT_COMMITMENT || STORAGE_COMMITMENT) + exec.poseidon2::merge + # => [RECIPIENT, ASSETS_COMMITMENT, note_ptr] # store the recipient in memory - dup.4 exec.memory::set_input_note_recipient - # => [RECIPIENT, note_ptr] + movup.8 exec.memory::set_input_note_recipient + # => [RECIPIENT, ASSETS_COMMITMENT, note_ptr] - # compute NOTE_ID: hash(RECIPIENT || ASSET_COMMITMENT) - movup.4 exec.memory::get_input_note_assets_commitment exec.rpo256::merge + # compute NOTE_ID: hash(RECIPIENT || ASSETS_COMMITMENT) + exec.poseidon2::merge # => [NOTE_ID] end @@ -849,16 +779,16 @@ end #! commitment can be extracted. #! #! Inputs: -#! Operand stack: [idx, HASHER_CAPACITY] +#! Operand stack: [idx, CAPACITY] #! Advice stack: [ #! SERIAL_NUMBER, #! SCRIPT_ROOT, -#! INPUTS_COMMITMENT, +#! STORAGE_COMMITMENT, #! ASSETS_COMMITMENT, #! NOTE_ARGS, -#! NOTE_METADATA_HEADER, #! NOTE_ATTACHMENT, -#! assets_count, +#! NOTE_METADATA_HEADER, +#! num_assets, #! ASSET_0, ..., ASSET_N, #! is_authenticated, #! ( @@ -868,21 +798,20 @@ end #! )? #! ] #! Outputs: -#! Operand stack: [PERM, PERM, PERM] +#! Operand stack: [RATE0, RATE1, CAPACITY] #! Advice stack: [] #! #! Where: #! - idx is the index of the input note. -#! - HASHER_CAPACITY is the state of the hasher capacity word, with the commitment to the previous -#! notes. +#! - CAPACITY is the state of the hasher capacity word, with the commitment to the previous notes. #! - SERIAL_NUMBER is the note's serial. #! - SCRIPT_ROOT is the note's script root. -#! - INPUTS_COMMITMENT is the sequential hash of the padded note's inputs. +#! - STORAGE_COMMITMENT is the sequential hash of the padded note's storage. #! - ASSETS_COMMITMENT is the sequential hash of the padded note's assets. #! - NOTE_METADATA_HEADER is the note's metadata header. #! - NOTE_ATTACHMENT is the note's attachment. #! - NOTE_ARGS are the user arguments passed to the note. -#! - assets_count is the note's assets count. +#! - num_assets is the number of note assets. #! - ASSET_0, ..., ASSET_N are the padded note's assets. #! - is_authenticated is the boolean indicating if the note contains an authentication proof. #! - optional values, required if `is_authenticated` is true: @@ -894,78 +823,81 @@ proc process_input_note # --------------------------------------------------------------------------------------------- dup exec.memory::get_input_note_ptr dup - # => [note_ptr, note_ptr, idx, HASHER_CAPACITY] + # => [note_ptr, note_ptr, idx, CAPACITY] exec.process_input_note_details - # => [NULLIFIER, note_ptr, idx, HASHER_CAPACITY] + # => [NULLIFIER, note_ptr, idx, CAPACITY] # save NULLIFIER to memory - movup.5 exec.memory::get_input_note_nullifier_ptr mem_storew_be - # => [NULLIFIER, note_ptr, HASHER_CAPACITY] + movup.5 exec.memory::get_input_note_nullifier_ptr mem_storew_le + # => [NULLIFIER, note_ptr, CAPACITY] # note metadata & args # --------------------------------------------------------------------------------------------- movup.4 - # => [note_ptr, NULLIFIER, HASHER_CAPACITY] + # => [note_ptr, NULLIFIER, CAPACITY] dup exec.process_note_args_and_metadata - # => [NOTE_ATTACHMENT, NOTE_METADATA_HEADER, note_ptr, NULLIFIER, HASHER_CAPACITY] + # => [NOTE_METADATA_HEADER, NOTE_ATTACHMENT, note_ptr, NULLIFIER, CAPACITY] # compute hash(NOTE_METADATA_HEADER || NOTE_ATTACHMENT) - exec.rpo256::merge - # => [NOTE_METADATA_COMMITMENT, note_ptr, NULLIFIER, HASHER_CAPACITY] + exec.poseidon2::merge + # => [NOTE_METADATA_COMMITMENT, note_ptr, NULLIFIER, CAPACITY] movup.4 - # => [note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, CAPACITY] - # note inputs len + # note number of storage items # --------------------------------------------------------------------------------------------- - exec.process_note_inputs_length - # => [note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + exec.process_note_num_storage_items + # => [note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, CAPACITY] # note assets # --------------------------------------------------------------------------------------------- dup exec.process_note_assets dup exec.add_input_note_assets_to_vault - # => [note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, CAPACITY] # note id # --------------------------------------------------------------------------------------------- dup exec.compute_input_note_id - # => [NOTE_ID, note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [NOTE_ID, note_ptr, NOTE_METADATA_COMMITMENT, NULLIFIER, CAPACITY] # save note id to memory movup.4 exec.memory::set_input_note_id - # => [NOTE_ID, NOTE_METADATA_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [NOTE_ID, NOTE_METADATA_COMMITMENT, NULLIFIER, CAPACITY] # note authentication # --------------------------------------------------------------------------------------------- # NOTE_COMMITMENT: `hash(NOTE_ID || NOTE_METADATA_COMMITMENT)` - swapw exec.rpo256::merge - # => [NOTE_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + exec.poseidon2::merge + # => [NOTE_COMMITMENT, NULLIFIER, CAPACITY] adv_push.1 - # => [is_authenticated, NOTE_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [is_authenticated, NOTE_COMMITMENT, NULLIFIER, CAPACITY] if.true - # => [NOTE_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [NOTE_COMMITMENT, NULLIFIER, CAPACITY] exec.authenticate_note - # => [NULLIFIER, HASHER_CAPACITY] + # => [NULLIFIER, CAPACITY] padw - # => [EMPTY_WORD, NULLIFIER, HASHER_CAPACITY] + # => [EMPTY_WORD, NULLIFIER, CAPACITY] end - # => [EMPTY_WORD_OR_NOTE_COMMITMENT, NULLIFIER, HASHER_CAPACITY] + # => [EMPTY_WORD_OR_NOTE_COMMITMENT, NULLIFIER, CAPACITY] - # update the input note commitment - exec.rpo256::permute - # => [PERM, PERM, PERM] + swapw + # => [NULLIFIER, EMPTY_WORD_OR_NOTE_COMMITMENT, CAPACITY] + + # update the input notes commitment with hash(NULLIFIER || EMPTY_WORD_OR_NOTE_COMMITMENT) + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY] end #! Process the input notes data provided via the advice provider. This involves reading the data @@ -1030,12 +962,14 @@ proc process_input_notes_data # - The [idx, num_notes] pair is kept in a word boundary, so that its word can be swapped with a # cheap swapw.3 instruction to update the `idx` counter. - push.0 padw padw padw - # => [PERM, PERM, PERM, idx, num_notes] + # start at note index 0 + push.0 + exec.poseidon2::init_no_padding + # => [RATE0, RATE1, CAPACITY, idx, num_notes] # loop condition: idx != num_notes dup.13 dup.13 neq - # => [has_more_notes, PERM, PERM, PERM, idx, num_notes] + # => [has_more_notes, RATE0, RATE1, CAPACITY, idx, num_notes] while.true # the hasher operates in overwrite mode, so discard the rate words, and keep the capacity @@ -1044,18 +978,18 @@ proc process_input_notes_data # process the note dup.4 exec.process_input_note - # => [PERM, PERM, PERM, idx, num_notes] + # => [RATE0, RATE1, CAPACITY, idx, num_notes] # update the idx counter swapw.3 add.1 swapw.3 - # => [PERM, PERM, PERM, idx+1, num_notes] + # => [RATE0, RATE1, CAPACITY, idx+1, num_notes] # loop condition: idx != num_notes dup.13 dup.13 neq - # => [has_more_notes, PERM, PERM, PERM, idx+1, num_notes] + # => [has_more_notes, RATE0, RATE1, CAPACITY, idx+1, num_notes] end - exec.rpo256::squeeze_digest + exec.poseidon2::squeeze_digest # => [INPUT_NOTES_COMMITMENT, idx+1, num_notes] # assert the input notes and the commitment matches @@ -1155,9 +1089,11 @@ end #! TX_KERNEL_COMMITMENT #! VALIDATOR_KEY_COMMITMENT, #! [block_num, version, timestamp, 0], +#! [0, verification_base_fee, native_asset_id_suffix, native_asset_id_prefix] +#! [0, 0, 0, 0] #! NOTE_ROOT, #! kernel_version -#! [account_id_suffix, account_id_prefix, 0, account_nonce], +#! [account_nonce, 0, account_id_suffix, account_id_prefix] #! ACCOUNT_VAULT_ROOT, #! ACCOUNT_STORAGE_COMMITMENT, #! ACCOUNT_CODE_COMMITMENT, diff --git a/crates/miden-protocol/asm/kernels/transaction/lib/tx.masm b/crates/miden-protocol/asm/kernels/transaction/lib/tx.masm index df4c0e9d42..aaed2d01fc 100644 --- a/crates/miden-protocol/asm/kernels/transaction/lib/tx.masm +++ b/crates/miden-protocol/asm/kernels/transaction/lib/tx.masm @@ -1,3 +1,4 @@ +use $kernel::account use $kernel::memory use $kernel::note @@ -143,3 +144,92 @@ pub proc get_expiration_delta exec.get_block_number sub end end + +#! Starts a foreign account context. +#! +#! This allows calling procedures on an account different from the native account. It loads the +#! foreign account into memory, unless already loaded. It pushes the foreign account onto the +#! account stack, which makes the foreign account the active account. +#! +#! Inputs: +#! Operand stack: [foreign_account_id_suffix, foreign_account_id_prefix] +#! Advice map: { +#! FOREIGN_ACCOUNT_ID_KEY: [[foreign_account_id_suffix, foreign_account_id_prefix, 0, account_nonce], +#! VAULT_ROOT, STORAGE_ROOT, CODE_ROOT], +#! STORAGE_ROOT: [[STORAGE_SLOT_DATA]], +#! CODE_ROOT: [num_procs, [ACCOUNT_PROCEDURE_DATA]] +#! } +#! Outputs: +#! Operand stack: [] +#! +#! Where: +#! - foreign_account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the foreign +#! account whose procedure is going to be executed. +#! - FOREIGN_ACCOUNT_ID_KEY is the map key constructed from the foreign_account_id as done by +#! account::create_id_key. +#! - account_nonce is the nonce of the foreign account. +#! - VAULT_ROOT is the commitment of the foreign account's vault. +#! - STORAGE_ROOT is the commitment of the foreign account's storage. +#! - STORAGE_SLOT_DATA is the data contained in the storage slot which is constructed as follows: +#! [SLOT_VALUE, slot_type, 0, 0, 0]. +#! - CODE_COMMITMENT is the commitment of the foreign account's code. +#! - ACCOUNT_PROCEDURE_DATA are the roots of the public procedures of the foreign account. +#! +#! Panics if: +#! - foreign context is created against the native account. +#! +#! Invocation: exec +pub proc start_foreign_context + # get the memory address and a flag whether this account was already loaded. + exec.account::get_account_data_ptr + # OS => [was_loaded, ptr, foreign_account_id_suffix, foreign_account_id_prefix] + + if.true + exec.memory::push_ptr_to_account_stack drop drop + # OS => [] + else + exec.memory::push_ptr_to_account_stack + # OS => [foreign_account_id_suffix, foreign_account_id_prefix] + + # load the advice data into the active account memory section + exec.account::load_foreign_account + # => [] + + # make sure that the state of the loaded foreign account corresponds to its commitment in + # the account database + exec.account::validate_active_foreign_account + # => [] + end + # => [] +end + +#! Ends a foreign account context. +#! +#! This pops the top of the account stack, making the previous account the active account. +#! +#! Inputs: [] +#! Outputs: [] +#! +#! Panics if: +#! - the active account is the native account. +#! +#! Invocation: dynexec +pub proc end_foreign_context + exec.memory::pop_ptr_from_account_stack + # => [] +end + +#! Resets the foreign procedure info (foreign account ID and foreign procedure root) in the kernel +#! memory to zeros. +#! +#! Inputs: [] +#! Outputs: [] +proc clear_fpi_memory + # set the upcoming foreign account ID to zero + push.0 push.0 exec.memory::set_fpi_account_id + # => [] + + # set the upcoming foreign procedure root to zero + padw exec.memory::set_fpi_procedure_root dropw + # => [] +end diff --git a/crates/miden-protocol/asm/kernels/transaction/main.masm b/crates/miden-protocol/asm/kernels/transaction/main.masm index 61a0763a06..e0f0313ebb 100644 --- a/crates/miden-protocol/asm/kernels/transaction/main.masm +++ b/crates/miden-protocol/asm/kernels/transaction/main.masm @@ -9,29 +9,29 @@ use $kernel::prologue # ================================================================================================= # Event emitted to signal that an execution of the transaction prologue has started. -const PROLOGUE_START_EVENT=event("miden::tx::prologue_start") +const PROLOGUE_START_EVENT=event("miden::protocol::tx::prologue_start") # Event emitted to signal that an execution of the transaction prologue has ended. -const PROLOGUE_END_EVENT=event("miden::tx::prologue_end") +const PROLOGUE_END_EVENT=event("miden::protocol::tx::prologue_end") # Event emitted to signal that the notes processing has started. -const NOTES_PROCESSING_START_EVENT=event("miden::tx::notes_processing_start") +const NOTES_PROCESSING_START_EVENT=event("miden::protocol::tx::notes_processing_start") # Event emitted to signal that the notes processing has ended. -const NOTES_PROCESSING_END_EVENT=event("miden::tx::notes_processing_end") +const NOTES_PROCESSING_END_EVENT=event("miden::protocol::tx::notes_processing_end") # Event emitted to signal that the note consuming has started. -const NOTE_EXECUTION_START_EVENT=event("miden::tx::note_execution_start") +const NOTE_EXECUTION_START_EVENT=event("miden::protocol::tx::note_execution_start") # Event emitted to signal that the note consuming has ended. -const NOTE_EXECUTION_END_EVENT=event("miden::tx::note_execution_end") +const NOTE_EXECUTION_END_EVENT=event("miden::protocol::tx::note_execution_end") # Event emitted to signal that the transaction script processing has started. -const TX_SCRIPT_PROCESSING_START_EVENT=event("miden::tx::tx_script_processing_start") +const TX_SCRIPT_PROCESSING_START_EVENT=event("miden::protocol::tx::tx_script_processing_start") # Event emitted to signal that the transaction script processing has ended. -const TX_SCRIPT_PROCESSING_END_EVENT=event("miden::tx::tx_script_processing_end") +const TX_SCRIPT_PROCESSING_END_EVENT=event("miden::protocol::tx::tx_script_processing_end") # Event emitted to signal that an execution of the transaction epilogue has started. -const EPILOGUE_START_EVENT=event("miden::tx::epilogue_start") +const EPILOGUE_START_EVENT=event("miden::protocol::tx::epilogue_start") # Event emitted to signal that an execution of the transaction epilogue has ended. -const EPILOGUE_END_EVENT=event("miden::tx::epilogue_end") +const EPILOGUE_END_EVENT=event("miden::protocol::tx::epilogue_end") # MAIN # ================================================================================================= @@ -52,9 +52,13 @@ const EPILOGUE_END_EVENT=event("miden::tx::epilogue_end") #! BLOCK_COMMITMENT, #! INITIAL_ACCOUNT_COMMITMENT, #! INPUT_NOTES_COMMITMENT, -#! account_id_prefix, account_id_suffix, block_num, pad(1) +#! account_id_suffix, account_id_prefix, block_num, pad(1) +#! ] +#! Outputs: [ +#! OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, +#! native_asset_id_suffix, native_asset_id_prefix, fee_amount, tx_expiration_block_num, +#! pad(4) #! ] -#! Outputs: [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, FEE_ASSET, tx_expiration_block_num, pad(3)] #! #! Where: #! - BLOCK_COMMITMENT is the reference block for the transaction execution. @@ -65,7 +69,10 @@ const EPILOGUE_END_EVENT=event("miden::tx::epilogue_end") #! - OUTPUT_NOTES_COMMITMENT is the commitment to the notes created by the transaction. #! - ACCOUNT_UPDATE_COMMITMENT is the hash of the the final account commitment and account #! delta commitment. -#! - FEE_ASSET is the fungible asset used as the transaction fee. +#! - fee_amount is the computed fee amount of the transaction denominated in the native asset. +#! - native_asset_id_{prefix,suffix} are the prefix and suffix felts of the faucet that issues the +#! native asset. +#! - tx_expiration_block_num is the transaction expiration block number. @locals(1) proc main # Prologue @@ -130,7 +137,7 @@ proc main # get the memory address of the transaction script root and load it to the stack exec.memory::get_tx_script_root_ptr - padw dup.4 mem_loadw_be + padw dup.4 mem_loadw_le # => [TX_SCRIPT_ROOT, tx_script_root_ptr, pad(16)] exec.word::eqz not @@ -167,11 +174,13 @@ proc main # execute the transaction epilogue exec.epilogue::finalize_transaction - # => [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, FEE_ASSET, tx_expiration_block_num, pad(16)] + # => [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, + # native_asset_id_suffix, native_asset_id_prefix, fee_amount, tx_expiration_block_num, pad(16)] # truncate the stack to contain 16 elements in total - repeat.13 movup.13 drop end - # => [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, FEE_ASSET, tx_expiration_block_num, pad(3)] + repeat.3 movupw.3 dropw end + # => [OUTPUT_NOTES_COMMITMENT, ACCOUNT_UPDATE_COMMITMENT, + # native_asset_id_suffix, native_asset_id_prefix, fee_amount, tx_expiration_block_num, pad(4)] emit.EPILOGUE_END_EVENT end diff --git a/crates/miden-protocol/asm/kernels/transaction/tx_script_main.masm b/crates/miden-protocol/asm/kernels/transaction/tx_script_main.masm index b0b12ea8cf..b51ea9a44f 100644 --- a/crates/miden-protocol/asm/kernels/transaction/tx_script_main.masm +++ b/crates/miden-protocol/asm/kernels/transaction/tx_script_main.masm @@ -24,7 +24,7 @@ const ERR_TX_TRANSACTION_SCRIPT_IS_MISSING="the transaction script is missing" #! BLOCK_COMMITMENT, #! INITIAL_ACCOUNT_COMMITMENT, #! INPUT_NOTES_COMMITMENT, -#! account_id_prefix, account_id_suffix, block_num, +#! account_id_suffix, account_id_prefix, block_num, #! ] #! Outputs: [] #! @@ -46,7 +46,7 @@ proc main # get the memory address of the transaction script root and load it to the stack exec.memory::get_tx_script_root_ptr - padw dup.4 mem_loadw_be + padw dup.4 mem_loadw_le # => [TX_SCRIPT_ROOT, tx_script_root_ptr] # return an error if the transaction script was not specified diff --git a/crates/miden-protocol/asm/protocol/active_account.masm b/crates/miden-protocol/asm/protocol/active_account.masm index 9f69e30cc1..724700ece3 100644 --- a/crates/miden-protocol/asm/protocol/active_account.masm +++ b/crates/miden-protocol/asm/protocol/active_account.masm @@ -1,18 +1,45 @@ -use miden::protocol::kernel_proc_offsets +use miden::protocol::account_id +use miden::protocol::asset +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_ID_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_NONCE_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_INITIAL_COMMITMENT_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_COMPUTE_COMMITMENT_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_CODE_COMMITMENT_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_INITIAL_STORAGE_COMMITMENT_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_COMPUTE_STORAGE_COMMITMENT_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_INITIAL_VAULT_ROOT_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_VAULT_ROOT_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_ITEM_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_INITIAL_ITEM_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_MAP_ITEM_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_INITIAL_MAP_ITEM_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_ASSET_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_INITIAL_ASSET_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_NUM_PROCEDURES_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_PROCEDURE_ROOT_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_HAS_PROCEDURE_OFFSET +use miden::core::word # ACTIVE ACCOUNT PROCEDURES # ================================================================================================= +# ERRORS +# ------------------------------------------------------------------------------------------------- + +const ERR_VAULT_GET_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET="get_balance can only be called on a fungible asset" + +const ERR_VAULT_HAS_NON_FUNGIBLE_ASSET_PROC_CAN_BE_CALLED_ONLY_WITH_NON_FUNGIBLE_ASSET="the has_non_fungible_asset procedure can only be called on a non-fungible faucet" + # ID AND NONCE # ------------------------------------------------------------------------------------------------- #! Returns the ID of the active account. #! #! Inputs: [] -#! Outputs: [account_id_prefix, account_id_suffix] +#! Outputs: [account_id_suffix, account_id_prefix] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of the ID of the active account. +#! - account_id_{suffix,prefix} are the suffix and prefix felts of the ID of the active account. #! #! Invocation: exec pub proc get_id @@ -24,15 +51,15 @@ pub proc get_id push.0 # => [is_native = 0, pad(14)] - exec.kernel_proc_offsets::account_get_id_offset + push.ACCOUNT_GET_ID_OFFSET # => [offset, is_native = 0, pad(14)] syscall.exec_kernel_proc - # => [account_id_prefix, account_id_suffix, pad(14)] + # => [account_id_suffix, account_id_prefix, pad(14)] # clean the stack swapdw dropw dropw swapw dropw movdn.3 movdn.3 drop drop - # => [account_id_prefix, account_id_suffix] + # => [account_id_suffix, account_id_prefix] end #! Returns the nonce of the active account. @@ -53,7 +80,7 @@ pub proc get_nonce padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::account_get_nonce_offset + push.ACCOUNT_GET_NONCE_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -81,7 +108,7 @@ pub proc get_initial_commitment padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::account_get_initial_commitment_offset + push.ACCOUNT_GET_INITIAL_COMMITMENT_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -106,7 +133,7 @@ pub proc compute_commitment padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::account_compute_commitment_offset + push.ACCOUNT_COMPUTE_COMMITMENT_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -144,7 +171,7 @@ pub proc get_code_commitment padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::account_get_code_commitment_offset + push.ACCOUNT_GET_CODE_COMMITMENT_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -174,7 +201,7 @@ pub proc get_initial_storage_commitment padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::account_get_initial_storage_commitment_offset + push.ACCOUNT_GET_INITIAL_STORAGE_COMMITMENT_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -208,7 +235,7 @@ pub proc compute_storage_commitment padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::account_compute_storage_commitment_offset + push.ACCOUNT_COMPUTE_STORAGE_COMMITMENT_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -233,7 +260,7 @@ pub proc get_initial_vault_root padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::account_get_initial_vault_root_offset + push.ACCOUNT_GET_INITIAL_VAULT_ROOT_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -258,7 +285,7 @@ pub proc get_vault_root padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::account_get_vault_root_offset + push.ACCOUNT_GET_VAULT_ROOT_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -274,11 +301,11 @@ end #! Gets an item from the active account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix] +#! Inputs: [slot_id_suffix, slot_id_prefix] #! Outputs: [VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value of the item. #! @@ -288,14 +315,14 @@ end #! Invocation: exec pub proc get_item push.0 movdn.2 - # => [slot_id_prefix, slot_id_suffix, 0] + # => [slot_id_suffix, slot_id_prefix, 0] - exec.kernel_proc_offsets::account_get_item_offset - # => [offset, slot_id_prefix, slot_id_suffix, 0] + push.ACCOUNT_GET_ITEM_OFFSET + # => [offset, slot_id_suffix, slot_id_prefix, 0] # pad the stack padw swapw padw padw swapdw - # => [offset, slot_id_prefix, slot_id_suffix, pad(13)] + # => [offset, slot_id_suffix, slot_id_prefix, pad(13)] syscall.exec_kernel_proc # => [VALUE, pad(12)] @@ -308,11 +335,11 @@ end #! Gets the initial item from the active account storage slot as it was at the beginning of the #! transaction. #! -#! Inputs: [slot_id_prefix, slot_id_suffix] +#! Inputs: [slot_id_suffix, slot_id_prefix] #! Outputs: [INIT_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - INIT_VALUE is the initial value of the item at the beginning of the transaction. #! @@ -322,14 +349,14 @@ end #! Invocation: exec pub proc get_initial_item push.0 movdn.2 - # => [slot_id_prefix, slot_id_suffix, 0] + # => [slot_id_suffix, slot_id_prefix, 0] - exec.kernel_proc_offsets::account_get_initial_item_offset - # => [offset, slot_id_prefix, slot_id_suffix, 0] + push.ACCOUNT_GET_INITIAL_ITEM_OFFSET + # => [offset, slot_id_suffix, slot_id_prefix, 0] # pad the stack padw swapw padw padw swapdw - # => [offset, slot_id_prefix, slot_id_suffix, pad(13)] + # => [offset, slot_id_suffix, slot_id_prefix, pad(13)] syscall.exec_kernel_proc # => [INIT_VALUE, pad(12)] @@ -341,11 +368,11 @@ end #! Gets a map item from the active account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY] #! Outputs: [VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - the slot must point to the root of the storage map. #! - KEY is the key of the item to get. @@ -357,12 +384,12 @@ end #! #! Invocation: exec pub proc get_map_item - exec.kernel_proc_offsets::account_get_map_item_offset - # => [offset, slot_id_prefix, slot_id_suffix, KEY] + push.ACCOUNT_GET_MAP_ITEM_OFFSET + # => [offset, slot_id_suffix, slot_id_prefix, KEY] # pad the stack push.0 movdn.7 padw padw swapdw - # => [0, offset, slot_id_prefix, slot_id_suffix, KEY, pad(9)] + # => [offset, slot_id_suffix, slot_id_prefix, KEY, pad(9)] syscall.exec_kernel_proc # => [VALUE, pad(12)] @@ -375,11 +402,11 @@ end #! Gets the initial VALUE from the active account storage map as it was at the beginning of the #! transaction. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY] #! Outputs: [INIT_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - KEY is the key of the item to get. #! - INIT_VALUE is the initial value of the item at the beginning of the transaction. @@ -390,11 +417,11 @@ end #! #! Invocation: exec pub proc get_initial_map_item - exec.kernel_proc_offsets::account_get_initial_map_item_offset - # => [offset, slot_id_prefix, slot_id_suffix, KEY] + push.ACCOUNT_GET_INITIAL_MAP_ITEM_OFFSET + # => [offset, slot_id_suffix, slot_id_prefix, KEY] push.0 movdn.7 padw padw swapdw - # => [0, offset, slot_id_prefix, slot_id_suffix, KEY, pad(9)] + # => [offset, slot_id_suffix, slot_id_prefix, KEY, pad(9)] syscall.exec_kernel_proc # => [INIT_VALUE, pad(12)] @@ -407,14 +434,71 @@ end # VAULT # ------------------------------------------------------------------------------------------------- +#! Returns the asset associated with the provided asset vault key in the active account's vault. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [ASSET_VALUE] +#! +#! Where: +#! - ASSET_KEY is the asset vault key of the asset to fetch. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't +#! present. +#! +#! Invocation: exec +pub proc get_asset + push.ACCOUNT_GET_ASSET_OFFSET + # => [offset, ASSET_KEY] + + # pad the stack + push.0 movdn.5 push.0 movdn.5 push.0 movdn.5 + padw padw swapdw + # => [offset, ASSET_KEY, pad(11)] + + syscall.exec_kernel_proc + # => [ASSET_VALUE, pad(12)] + + # clean the stack + swapdw dropw dropw swapw dropw + # => [ASSET_VALUE] +end + +#! Returns the asset associated with the provided asset vault key in the active account's vault at +#! the beginning of the transaction. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [ASSET_VALUE] +#! +#! Where: +#! - ASSET_KEY is the asset vault key of the asset to fetch. +#! - ASSET_VALUE is the value of the asset from the vault, which can be the EMPTY_WORD if it isn't +#! present. +#! +#! Invocation: exec +pub proc get_initial_asset + push.ACCOUNT_GET_INITIAL_ASSET_OFFSET + # => [offset, ASSET_KEY] + + # pad the stack + push.0 movdn.5 push.0 movdn.5 push.0 movdn.5 + padw padw swapdw + # => [offset, ASSET_KEY, pad(11)] + + syscall.exec_kernel_proc + # => [ASSET_VALUE, pad(12)] + + # clean the stack + swapdw dropw dropw swapw dropw + # => [ASSET_VALUE] +end + #! Returns the balance of the fungible asset associated with the provided faucet_id in the active #! account's vault. #! -#! Inputs: [faucet_id_prefix, faucet_id_suffix] +#! Inputs: [faucet_id_suffix, faucet_id_prefix] #! Outputs: [balance] #! #! Where: -#! - faucet_id_{prefix,suffix} are the prefix and suffix felts of the faucet ID of the fungible +#! - faucet_id_{suffix,prefix} are the suffix and prefix felts of the faucet ID of the fungible #! asset of interest. #! - balance is the vault balance of the fungible asset. #! @@ -423,29 +507,34 @@ end #! #! Invocation: exec pub proc get_balance - exec.kernel_proc_offsets::account_get_balance_offset - # => [offset, faucet_id_prefix, faucet_id_suffix] + # assert that the faucet id is a fungible faucet + dup.1 exec.account_id::is_fungible_faucet + assert.err=ERR_VAULT_GET_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET + # => [faucet_id_suffix, faucet_id_prefix] - # pad the stack - push.0 movdn.3 padw swapw padw padw swapdw - # => [offset, faucet_id_prefix, faucet_id_suffix, pad(13)] + # TODO(callbacks): This should take ASSET_KEY as input to avoid hardcoding the callbacks flag. + push.0 + # => [enable_callbacks = 0, faucet_id_suffix, faucet_id_prefix] - syscall.exec_kernel_proc - # => [balance, pad(15)] + exec.asset::create_fungible_key + # => [ASSET_KEY] - # clean the stack - swapdw dropw dropw swapw dropw movdn.3 drop drop drop + exec.get_asset + # => [ASSET_VALUE] + + # extract the asset's balance + exec.::miden::protocol::util::asset::fungible_value_into_amount # => [balance] end -#! Returns the balance of the fungible asset associated with the provided faucet_id in the active +#! Returns the balance of the fungible asset associated with the provided faucet_id in the active #! account's vault at the beginning of the transaction. #! -#! Inputs: [faucet_id_prefix, faucet_id_suffix] +#! Inputs: [faucet_id_suffix, faucet_id_prefix] #! Outputs: [init_balance] #! #! Where: -#! - faucet_id_{prefix, suffix} are the prefix and suffix felts of the faucet id of the fungible +#! - faucet_id_{suffix, prefix} are the suffix and prefix felts of the faucet id of the fungible #! asset of interest. #! - init_balance is the vault balance of the fungible asset at the beginning of the transaction. #! @@ -454,48 +543,53 @@ end #! #! Invocation: exec pub proc get_initial_balance - exec.kernel_proc_offsets::account_get_initial_balance_offset - # => [offset, faucet_id_prefix, faucet_id_suffix] + # assert that the faucet id is a fungible faucet + dup.1 exec.account_id::is_fungible_faucet + assert.err=ERR_VAULT_GET_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET + # => [faucet_id_suffix, faucet_id_prefix] - # pad the stack - push.0 movdn.3 padw swapw padw padw swapdw - # => [offset, faucet_id_prefix, faucet_id_suffix, pad(13)] + # TODO(callbacks): This should take ASSET_KEY as input to avoid hardcoding the callbacks flag. + push.0 + # => [enable_callbacks = 0, faucet_id_suffix, faucet_id_prefix] - syscall.exec_kernel_proc - # => [init_balance, pad(15)] + exec.asset::create_fungible_key + # => [ASSET_KEY] - # clean the stack - swapdw dropw dropw swapw dropw movdn.3 drop drop drop - # => [init_balance] + exec.get_initial_asset + # => [ASSET_VALUE] + + # extract the asset's balance + exec.::miden::protocol::util::asset::fungible_value_into_amount + # => [balance] end -#! Returns a boolean indicating whether the non-fungible asset is present in the active account's -#! vault. +#! Returns a boolean indicating whether the active account stores an asset with the provided +#! non-fungible asset vault key in its vault. #! -#! Inputs: [ASSET] +#! Inputs: [ASSET_KEY] #! Outputs: [has_asset] #! #! Where: -#! - ASSET is the non-fungible asset of interest -#! - has_asset is a boolean indicating whether the account vault has the asset of interest +#! - ASSET_KEY is the key of the fungible asset to check. +#! - has_asset is a boolean indicating whether the account vault has the asset. #! #! Panics if: -#! - the ASSET is a fungible asset. +#! - the ASSET_VALUE is a fungible asset. #! #! Invocation: exec pub proc has_non_fungible_asset - exec.kernel_proc_offsets::account_has_non_fungible_asset_offset - # => [offset, ASSET] + # => [faucet_id_prefix, faucet_id_suffix, asset_id_prefix, asset_id_suffix] - # pad the stack - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [offset, ASSET, pad(11)] + # assert that the faucet id is a non-fungible faucet + dup.3 exec.account_id::is_non_fungible_faucet + assert.err=ERR_VAULT_HAS_NON_FUNGIBLE_ASSET_PROC_CAN_BE_CALLED_ONLY_WITH_NON_FUNGIBLE_ASSET + # => [ASSET_KEY] - syscall.exec_kernel_proc - # => [has_asset, pad(15)] + exec.get_asset + # => [ASSET_VALUE] - # clean the stack - swapdw dropw dropw swapw dropw movdn.3 drop drop drop + # compare with EMPTY_WORD to assess if the asset exists in the vault + exec.word::eqz not # => [has_asset] end @@ -513,7 +607,7 @@ pub proc get_num_procedures padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::account_get_num_procedures_offset + push.ACCOUNT_GET_NUM_PROCEDURES_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -541,7 +635,7 @@ pub proc get_procedure_root # => [index] push.0.0 movup.2 - exec.kernel_proc_offsets::account_get_procedure_root_offset + push.ACCOUNT_GET_PROCEDURE_ROOT_OFFSET # => [offset, index, 0, 0] # pad the stack @@ -566,12 +660,12 @@ end #! #! Where: #! - PROC_ROOT is the hash of the procedure of interest. -#! - is_procedure_available is the binary flag indicating whether the procedure with PROC_ROOT is +#! - is_procedure_available is the binary flag indicating whether the procedure with PROC_ROOT is #! available on the active account. #! #! Invocation: exec pub proc has_procedure - exec.kernel_proc_offsets::account_has_procedure_offset + push.ACCOUNT_HAS_PROCEDURE_OFFSET # => [offset, PROC_ROOT] # pad the stack @@ -585,3 +679,4 @@ pub proc has_procedure swapdw dropw dropw swapw dropw movdn.3 drop drop drop # => [is_procedure_available] end + diff --git a/crates/miden-protocol/asm/protocol/active_note.masm b/crates/miden-protocol/asm/protocol/active_note.masm index 77865a19bc..8a34ba12fd 100644 --- a/crates/miden-protocol/asm/protocol/active_note.masm +++ b/crates/miden-protocol/asm/protocol/active_note.masm @@ -1,15 +1,20 @@ -use miden::core::crypto::hashes::rpo256 +use miden::core::crypto::hashes::poseidon2 use miden::core::mem -use miden::protocol::kernel_proc_offsets +use ::miden::protocol::kernel_proc_offsets::INPUT_NOTE_GET_ASSETS_INFO_OFFSET +use ::miden::protocol::kernel_proc_offsets::INPUT_NOTE_GET_RECIPIENT_OFFSET +use ::miden::protocol::kernel_proc_offsets::INPUT_NOTE_GET_STORAGE_INFO_OFFSET +use ::miden::protocol::kernel_proc_offsets::INPUT_NOTE_GET_METADATA_OFFSET +use ::miden::protocol::kernel_proc_offsets::INPUT_NOTE_GET_SERIAL_NUMBER_OFFSET +use ::miden::protocol::kernel_proc_offsets::INPUT_NOTE_GET_SCRIPT_ROOT_OFFSET use miden::protocol::note -# ERRORS +# ERRORS # ================================================================================================= const ERR_NOTE_DATA_DOES_NOT_MATCH_COMMITMENT="note data does not match the commitment" -const ERR_NOTE_INVALID_NUMBER_OF_INPUTS="the specified number of note inputs does not match the actual number" +const ERR_NOTE_INVALID_NUMBER_OF_STORAGE_ITEMS="the specified number of note storage items does not match the actual number" # ACTIVE NOTE PROCEDURES # ================================================================================================= @@ -39,7 +44,7 @@ pub proc get_assets push.1 # => [is_active_note = 1, pad(14), dest_ptr] - exec.kernel_proc_offsets::input_note_get_assets_info_offset + push.INPUT_NOTE_GET_ASSETS_INFO_OFFSET # => [offset, is_active_note = 1, pad(14), dest_ptr] syscall.exec_kernel_proc @@ -60,7 +65,7 @@ end #! Outputs: [RECIPIENT] #! #! Where: -#! - RECIPIENT is the commitment to the active note's script, inputs, the serial number. +#! - RECIPIENT is the commitment to the active note's script, storage, the serial number. #! #! Panics if: #! - no note is currently active. @@ -75,7 +80,7 @@ pub proc get_recipient push.1 # => [is_active_note = 1, pad(14)] - exec.kernel_proc_offsets::input_note_get_recipient_offset + push.INPUT_NOTE_GET_RECIPIENT_OFFSET # => [offset, is_active_note = 1, pad(14)] syscall.exec_kernel_proc @@ -86,24 +91,24 @@ pub proc get_recipient # => [RECIPIENT] end -#! Writes the active note's inputs to memory starting at the specified address. +#! Writes the active note's storage to memory starting at the specified address. #! #! Inputs: #! Stack: [dest_ptr] -#! Advice Map: { NOTE_INPUTS_COMMITMENT: [INPUTS] } +#! Advice Map: { NOTE_STORAGE_COMMITMENT: [STORAGE] } #! Outputs: -#! Stack: [num_inputs, dest_ptr] +#! Stack: [num_storage_items, dest_ptr] #! #! Where: -#! - dest_ptr is the memory address to write the note inputs. -#! - NOTE_INPUTS_COMMITMENT is the commitment to the note's inputs. -#! - INPUTS is the data corresponding to the note's inputs. +#! - dest_ptr is the memory address to write the note storage. +#! - NOTE_STORAGE_COMMITMENT is the commitment to the note's storage. +#! - STORAGE is the data corresponding to the note's storage. #! #! Panics if: #! - no note is currently active. #! #! Invocation: exec -pub proc get_inputs +pub proc get_storage # pad the stack padw padw padw push.0.0 # => [pad(14), dest_ptr] @@ -112,20 +117,20 @@ pub proc get_inputs push.1 # => [is_active_note = 1, pad(14), dest_ptr] - exec.kernel_proc_offsets::input_note_get_inputs_info_offset + push.INPUT_NOTE_GET_STORAGE_INFO_OFFSET # => [offset, is_active_note = 1, pad(14), dest_ptr] syscall.exec_kernel_proc - # => [NOTE_INPUTS_COMMITMENT, num_inputs, pad(11), dest_ptr] + # => [NOTE_STORAGE_COMMITMENT, num_storage_items, pad(11), dest_ptr] # clean the stack swapdw dropw dropw movup.5 drop movup.5 drop movup.5 drop - # => [NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] + # => [NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # write the inputs to the memory using the provided destination pointer - exec.write_inputs_to_memory - # => [num_inputs, dest_ptr] + exec.write_storage_to_memory + # => [num_storage_items, dest_ptr] end #! Returns the metadata of the active note. @@ -150,7 +155,7 @@ pub proc get_metadata push.1 # => [is_active_note = 1, pad(14)] - exec.kernel_proc_offsets::input_note_get_metadata_offset + push.INPUT_NOTE_GET_METADATA_OFFSET # => [offset, is_active_note = 1, pad(14)] syscall.exec_kernel_proc @@ -164,10 +169,10 @@ end #! Returns the sender of the active note. #! #! Inputs: [] -#! Outputs: [sender_id_prefix, sender_id_suffix] +#! Outputs: [sender_id_suffix, sender_id_prefix] #! #! Where: -#! - sender_{prefix,suffix} are the prefix and suffix felts of the sender of the active note. +#! - sender_{suffix,prefix} are the suffix and prefix felts of the sender of the active note. #! #! Panics if: #! - no note is currently active. @@ -180,7 +185,7 @@ pub proc get_sender # extract the sender ID from the metadata header exec.note::extract_sender_from_metadata - # => [sender_id_prefix, sender_id_suffix] + # => [sender_id_suffix, sender_id_prefix] end #! Returns the serial number of the active note. @@ -204,7 +209,7 @@ pub proc get_serial_number push.1 # => [is_active_note = 1, pad(14)] - exec.kernel_proc_offsets::input_note_get_serial_number_offset + push.INPUT_NOTE_GET_SERIAL_NUMBER_OFFSET # => [offset, is_active_note = 1, pad(14)] syscall.exec_kernel_proc @@ -236,7 +241,7 @@ pub proc get_script_root push.1 # => [is_active_note = 1, pad(14)] - exec.kernel_proc_offsets::input_note_get_script_root_offset + push.INPUT_NOTE_GET_SCRIPT_ROOT_OFFSET # => [offset, is_active_note = 1, pad(14)] syscall.exec_kernel_proc @@ -247,87 +252,87 @@ pub proc get_script_root # => [SCRIPT_ROOT] end -# HELPER PROCEDURES +# HELPER PROCEDURES # ================================================================================================= -#! Writes the note inputs stored in the advice map to the memory specified by the provided +#! Writes the note storage stored in the advice map to the memory specified by the provided #! destination pointer. #! #! Inputs: -#! Operand stack: [NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] +#! Operand stack: [NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] #! Advice map: { -#! NOTE_INPUTS_COMMITMENT: [[INPUT_VALUES]] +#! NOTE_STORAGE_COMMITMENT: [[INPUT_VALUES]] #! } #! Outputs: -#! Operand stack: [num_inputs, dest_ptr] -proc write_inputs_to_memory +#! Operand stack: [num_storage_items, dest_ptr] +proc write_storage_to_memory # load the inputs from the advice map to the advice stack # we pad the number of inputs to the next multiple of 8 so that we can use the # `pipe_double_words_to_memory` instruction. The padded zeros don't affect the commitment # computation. adv.push_mapvaln.8 - # OS => [NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] - # AS => [advice_num_inputs, [INPUT_VALUES]] + # OS => [NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] + # AS => [advice_num_storage_items, [INPUT_VALUES]] # move the number of inputs obtained from advice map to the operand stack adv_push.1 dup.5 - # OS => [num_inputs, advice_num_inputs, NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] + # OS => [num_storage_items, advice_num_storage_items, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [[INPUT_VALUES]] - assert_eq.err=ERR_NOTE_INVALID_NUMBER_OF_INPUTS - # OS => [NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] + assert_eq.err=ERR_NOTE_INVALID_NUMBER_OF_STORAGE_ITEMS + # OS => [NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [[INPUT_VALUES]] # calculate the number of words required to store the inputs dup.4 u32divmod.4 neq.0 add - # OS => [num_words, NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] + # OS => [num_words, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [[INPUT_VALUES]] # round up the number of words to the next multiple of 2 dup is_odd add - # OS => [even_num_words, NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] + # OS => [even_num_words, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [[INPUT_VALUES]] # compute the end pointer for writing the padded inputs (even_num_words * 4 elements) dup.6 swap mul.4 add - # OS => [end_ptr, NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] + # OS => [end_ptr, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [[INPUT_VALUES]] # prepare the stack for the `pipe_double_words_to_memory` procedure. # - # To match `rpo256::hash_elements` (used for NOTE_INPUTS_COMMITMENT), we set the first capacity - # element to `num_inputs % 8`. + # To match `poseidon2::hash_elements` (used for NOTE_STORAGE_COMMITMENT), we set the first capacity + # element to `num_storage_items % 8`. dup.6 dup.6 - # OS => [num_inputs, write_ptr, end_ptr, NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] + # OS => [num_storage_items, write_ptr, end_ptr, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [[INPUT_VALUES]] u32divmod.8 swap drop - # OS => [num_inputs_mod_8, write_ptr, end_ptr, NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] + # OS => [num_storage_items_mod_8, write_ptr, end_ptr, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [[INPUT_VALUES]] - push.0.0.0 - # OS => [A, write_ptr, end_ptr, NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr], where A = [0, 0, 0, num_inputs_mod_8] + push.0.0.0 movup.3 + # OS => [CAPACITY = [num_storage_items_mod_8, 0, 0, 0], write_ptr, end_ptr, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [[INPUT_VALUES]] padw padw - # OS => [PAD, PAD, A, write_ptr, end_ptr, NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] + # OS => [RATE0, RATE1, CAPACITY, write_ptr, end_ptr, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [[INPUT_VALUES]] # write the inputs from the advice stack into memory exec.mem::pipe_double_words_to_memory - # OS => [PERM, PERM, PERM, end_ptr', NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] + # OS => [RATE0, RATE1, CAPACITY, end_ptr', NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # AS => [] # extract the computed commitment from the hasher state - exec.rpo256::squeeze_digest - # OS => [COMPUTED_COMMITMENT, end_ptr', NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] + exec.poseidon2::squeeze_digest + # OS => [COMPUTED_COMMITMENT, end_ptr', NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # drop end_ptr' movup.4 drop - # OS => [COMPUTED_COMMITMENT, NOTE_INPUTS_COMMITMENT, num_inputs, dest_ptr] + # OS => [COMPUTED_COMMITMENT, NOTE_STORAGE_COMMITMENT, num_storage_items, dest_ptr] # validate that the inputs written to memory match the inputs commitment assert_eqw.err=ERR_NOTE_DATA_DOES_NOT_MATCH_COMMITMENT - # => [num_inputs, dest_ptr] + # => [num_storage_items, dest_ptr] end diff --git a/crates/miden-protocol/asm/protocol/asset.masm b/crates/miden-protocol/asm/protocol/asset.masm index e8da408a8a..ce2ecfb614 100644 --- a/crates/miden-protocol/asm/protocol/asset.masm +++ b/crates/miden-protocol/asm/protocol/asset.masm @@ -1,72 +1,94 @@ use miden::protocol::account_id +use miden::protocol::util::asset -# ERRORS +# RE-EXPORTS # ================================================================================================= -const ERR_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID="failed to build the fungible asset because the provided faucet id is not from a fungible faucet" +pub use ::miden::protocol::util::asset::FUNGIBLE_ASSET_MAX_AMOUNT +pub use ::miden::protocol::util::asset::ASSET_SIZE +pub use ::miden::protocol::util::asset::ASSET_VALUE_MEMORY_OFFSET +pub use ::miden::protocol::util::asset::key_to_faucet_id +pub use ::miden::protocol::util::asset::key_into_faucet_id +pub use ::miden::protocol::util::asset::key_to_asset_id +pub use ::miden::protocol::util::asset::key_into_asset_id +pub use ::miden::protocol::util::asset::key_to_callbacks_enabled +pub use ::miden::protocol::util::asset::store +pub use ::miden::protocol::util::asset::load +pub use ::miden::protocol::util::asset::fungible_value_into_amount +pub use ::miden::protocol::util::asset::fungible_to_amount +pub use ::miden::protocol::util::asset::create_fungible_key + +# ERRORS +# ================================================================================================= const ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_ALLOWED_AMOUNT="fungible asset build operation called with amount that exceeds the maximum allowed asset amount" +const ERR_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID="failed to build the fungible asset because the provided faucet id is not from a fungible faucet" + const ERR_NON_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID="failed to build the non-fungible asset because the provided faucet id is not from a non-fungible faucet" # PROCEDURES # ================================================================================================= -#! Builds a fungible asset for the specified fungible faucet and amount. +#! Creates a fungible asset for the specified fungible faucet and amount. #! -#! Inputs: [faucet_id_prefix, faucet_id_suffix, amount] -#! Outputs: [ASSET] +#! Inputs: [enable_callbacks, faucet_id_suffix, faucet_id_prefix, amount] +#! Outputs: [ASSET_KEY, ASSET_VALUE] #! #! Where: -#! - faucet_id_{prefix,suffix} are the prefix and suffix felts of the faucet to create the asset +#! - enable_callbacks is a flag (0 or 1) indicating whether asset callbacks are enabled. +#! - faucet_id_{suffix,prefix} are the suffix and prefix felts of the faucet to create the asset #! for. #! - amount is the amount of the asset to create. -#! - ASSET is the built fungible asset. +#! - ASSET_KEY is the vault key of the created fungible asset. +#! - ASSET_VALUE is the value of the created fungible asset. +#! +#! Panics if: +#! - the provided faucet ID is not a fungible faucet. +#! - the provided amount exceeds FUNGIBLE_ASSET_MAX_AMOUNT. +#! - enable_callbacks is not 0 or 1. #! #! Invocation: exec -pub proc build_fungible_asset +pub proc create_fungible_asset # assert the faucet is a fungible faucet - dup exec.account_id::is_fungible_faucet assert.err=ERR_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID - # => [faucet_id_prefix, faucet_id_suffix, amount] + dup.2 exec.account_id::is_fungible_faucet assert.err=ERR_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID + # => [enable_callbacks, faucet_id_suffix, faucet_id_prefix, amount] # assert the amount is valid - dup.2 exec.get_fungible_asset_max_amount lte + dup.3 lte.FUNGIBLE_ASSET_MAX_AMOUNT assert.err=ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_ALLOWED_AMOUNT - # => [faucet_id_prefix, faucet_id_suffix, amount] + # => [enable_callbacks, faucet_id_suffix, faucet_id_prefix, amount] - # create the asset - push.0 movdn.2 - # => [ASSET] + # SAFETY: faucet ID and amount were validated + exec.asset::create_fungible_asset_unchecked + # => [ASSET_KEY, ASSET_VALUE] end -#! Builds a non fungible asset for the specified non-fungible faucet and amount. +#! Creates a non fungible asset for the specified non-fungible faucet. #! -#! Inputs: [faucet_id_prefix, DATA_HASH] -#! Outputs: [ASSET] +#! Inputs: [enable_callbacks, faucet_id_suffix, faucet_id_prefix, DATA_HASH] +#! Outputs: [ASSET_KEY, ASSET_VALUE] #! #! Where: -#! - faucet_id_{prefix,suffix} are the prefix and suffix felts of the faucet to create the asset +#! - enable_callbacks is a flag (0 or 1) indicating whether asset callbacks are enabled. +#! - faucet_id_{suffix,prefix} are the suffix and prefix felts of the faucet to create the asset #! for. -#! - DATA_HASH is the data hash of the non-fungible asset to build. -#! - ASSET is the built non-fungible asset. +#! - DATA_HASH is the data hash of the non-fungible asset to create. +#! - ASSET_KEY is the vault key of the created non-fungible asset. +#! - ASSET_VALUE is the value of the created non-fungible asset, which is identical to DATA_HASH. +#! +#! Panics if: +#! - the provided faucet ID is not a non-fungible faucet. +#! - enable_callbacks is not 0 or 1. #! #! Invocation: exec -pub proc build_non_fungible_asset +pub proc create_non_fungible_asset # assert the faucet is a non-fungible faucet - dup exec.account_id::is_non_fungible_faucet + dup.2 exec.account_id::is_non_fungible_faucet assert.err=ERR_NON_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID - # => [faucet_id_prefix, hash3, hash2, hash1, hash0] + # => [enable_callbacks, faucet_id_suffix, faucet_id_prefix, DATA_HASH] - # build the asset - swap drop - # => [faucet_id_prefix, hash2, hash1, hash0] - # => [ASSET] + # SAFETY: faucet ID was validated + exec.::miden::protocol::util::asset::create_non_fungible_asset_unchecked + # => [ASSET_KEY, ASSET_VALUE] end - -#! Returns the maximum amount of a fungible asset. -#! -#! Stack: [] -#! Outputs: [fungible_asset_max_amount] -#! -#! fungible_asset_max_amount is the maximum amount of a fungible asset. -pub use ::miden::protocol::util::asset::get_fungible_asset_max_amount diff --git a/crates/miden-protocol/asm/protocol/auth.masm b/crates/miden-protocol/asm/protocol/auth.masm new file mode 100644 index 0000000000..86694b6b88 --- /dev/null +++ b/crates/miden-protocol/asm/protocol/auth.masm @@ -0,0 +1,8 @@ +# EVENTS +# ================================================================================================= + +#! The event to request an authentication signature. +pub const AUTH_REQUEST_EVENT = event("miden::protocol::auth::request") + +#! The event emitted when authentication is unauthorized. +pub const AUTH_UNAUTHORIZED_EVENT = event("miden::protocol::auth::unauthorized") diff --git a/crates/miden-protocol/asm/protocol/faucet.masm b/crates/miden-protocol/asm/protocol/faucet.masm index f3afe38e9b..15648cc698 100644 --- a/crates/miden-protocol/asm/protocol/faucet.masm +++ b/crates/miden-protocol/asm/protocol/faucet.masm @@ -1,15 +1,18 @@ use miden::protocol::asset use miden::protocol::active_account -use miden::protocol::kernel_proc_offsets +use ::miden::protocol::kernel_proc_offsets::FAUCET_MINT_ASSET_OFFSET +use ::miden::protocol::kernel_proc_offsets::FAUCET_BURN_ASSET_OFFSET +use ::miden::protocol::kernel_proc_offsets::FAUCET_HAS_CALLBACKS_OFFSET #! Creates a fungible asset for the faucet the transaction is being executed against. #! #! Inputs: [amount] -#! Outputs: [ASSET] +#! Outputs: [ASSET_KEY, ASSET_VALUE] #! #! Where: #! - amount is the amount of the asset to create. -#! - ASSET is the created fungible asset. +#! - ASSET_KEY is the vault key of the created fungible asset. +#! - ASSET_VALUE is the value of the created fungible asset. #! #! Panics if: #! - the active account is not a fungible faucet. @@ -18,43 +21,56 @@ use miden::protocol::kernel_proc_offsets pub proc create_fungible_asset # fetch the id of the faucet the transaction is being executed against. exec.active_account::get_id - # => [id_prefix, id_suffix, amount] + # => [id_suffix, id_prefix, amount] - # build the fungible asset - exec.asset::build_fungible_asset - # => [ASSET] + # check whether the faucet has callbacks defined + exec.has_callbacks + # => [has_callbacks, id_suffix, id_prefix, amount] + + # create the fungible asset + exec.asset::create_fungible_asset + # => [ASSET_KEY, ASSET_VALUE] end #! Creates a non-fungible asset for the faucet the transaction is being executed against. #! #! Inputs: [DATA_HASH] -#! Outputs: [ASSET] +#! Outputs: [ASSET_KEY, ASSET_VALUE] #! #! Where: #! - DATA_HASH is the data hash of the non-fungible asset to create. -#! - ASSET is the created non-fungible asset. +#! - ASSET_KEY is the vault key of the created non-fungible asset. +#! - ASSET_VALUE is the value of the created non-fungible asset. #! #! Panics if: #! - the active account is not a non-fungible faucet. #! #! Invocation: exec pub proc create_non_fungible_asset - # get the id of the faucet the transaction is being executed against - exec.active_account::get_id swap drop - # => [faucet_id_prefix, DATA_HASH] + # fetch the id of the faucet the transaction is being executed against + exec.active_account::get_id + # => [id_suffix, id_prefix, DATA_HASH] + + # check whether the faucet has callbacks defined + exec.has_callbacks + # => [has_callbacks, id_suffix, id_prefix, DATA_HASH] # build the non-fungible asset - exec.asset::build_non_fungible_asset - # => [ASSET] + exec.asset::create_non_fungible_asset + # => [ASSET_KEY, ASSET_VALUE] end #! Mint an asset from the faucet the transaction is being executed against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [NEW_ASSET_VALUE] #! #! Where: -#! - ASSET is the asset that was minted. +#! - ASSET_KEY is the vault key of the asset to mint. +#! - ASSET_VALUE is the value of the asset that was minted. +#! - NEW_ASSET_VALUE is: +#! - For fungible assets: the ASSET_VALUE merged with the existing vault asset value, if any. +#! - For non-fungible assets: identical to ASSET_VALUE. #! #! Panics if: #! - the transaction is not being executed against a faucet. @@ -67,28 +83,29 @@ end #! #! Invocation: exec pub proc mint - exec.kernel_proc_offsets::faucet_mint_asset_offset - # => [offset, ASSET] - # pad the stack - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [offset, ASSET, pad(11)] + padw padw swapdw movup.8 drop + # => [ASSET_KEY, ASSET_VALUE, pad(7)] + + push.FAUCET_MINT_ASSET_OFFSET + # => [offset, ASSET_KEY, ASSET_VALUE, pad(7)] syscall.exec_kernel_proc - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] # clean the stack swapdw dropw dropw swapw dropw - # => [ASSET] + # => [ASSET_VALUE] end #! Burn an asset from the faucet the transaction is being executed against. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_VALUE] #! #! Where: -#! - ASSET is the asset that was burned. +#! - ASSET_KEY is the vault key of the asset to burn. +#! - ASSET_VALUE is the value of the asset that was burned. #! #! Panics if: #! - the transaction is not being executed against a faucet. @@ -102,77 +119,45 @@ end #! #! Invocation: exec pub proc burn - exec.kernel_proc_offsets::faucet_burn_asset_offset - # => [offset, ASSET] - # pad the stack - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [offset, ASSET, pad(11)] + padw padw swapdw movup.8 drop + # => [ASSET_KEY, ASSET_VALUE, pad(7)] + + push.FAUCET_BURN_ASSET_OFFSET + # => [offset, ASSET_KEY, ASSET_VALUE, pad(7)] syscall.exec_kernel_proc - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] # clean the stack swapdw dropw dropw swapw dropw - # => [ASSET] + # => [ASSET_VALUE] end -#! Returns the total issuance of the fungible faucet the transaction is being executed against. +#! Returns whether the active account defines callbacks. +#! +#! The account defines callbacks if any callback storage slot is present and it contains not the +#! empty word. #! #! Inputs: [] -#! Outputs: [total_issuance] +#! Outputs: [has_callbacks] #! #! Where: -#! - total_issuance is the total issuance of the fungible faucet the transaction is being executed -#! against. -#! -#! Panics if: -#! - the transaction is not being executed against a fungible faucet. +#! - has_callbacks is 1 if the account defines callbacks, 0 otherwise. #! #! Invocation: exec -pub proc get_total_issuance +pub proc has_callbacks # pad the stack padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::faucet_get_total_fungible_asset_issuance_offset + push.FAUCET_HAS_CALLBACKS_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc - # => [total_issuance, pad(15)] - - # clean the stack - swapdw dropw dropw swapw dropw movdn.3 drop drop drop - # => [total_issuance] -end - -#! Returns a boolean indicating whether the provided non-fungible asset has been already issued by -#! this faucet. -#! -#! Inputs: [ASSET] -#! Outputs: [is_issued] -#! -#! Where: -#! - ASSET is the non-fungible asset that is being checked. -#! - is_issued is a boolean indicating whether the non-fungible asset has been issued. -#! -#! Panics if: -#! - the ASSET is a fungible asset. -#! - the ASSET is not associated with the faucet the transaction is being executed against. -#! -#! Invocation: exec -pub proc is_non_fungible_asset_issued - exec.kernel_proc_offsets::faucet_is_non_fungible_asset_issued_offset - # => [offset, ASSET] - - # pad the stack - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [offset, ASSET, pad(11)] - - syscall.exec_kernel_proc - # => [is_issued, pad(15)] + # => [has_callbacks, pad(15)] # clean the stack swapdw dropw dropw swapw dropw movdn.3 drop drop drop - # => [total_issuance] + # => [has_callbacks] end diff --git a/crates/miden-protocol/asm/protocol/input_note.masm b/crates/miden-protocol/asm/protocol/input_note.masm index a08d5a5dd2..5d09f5bc5f 100644 --- a/crates/miden-protocol/asm/protocol/input_note.masm +++ b/crates/miden-protocol/asm/protocol/input_note.masm @@ -1,4 +1,9 @@ -use miden::protocol::kernel_proc_offsets +use ::miden::protocol::kernel_proc_offsets::INPUT_NOTE_GET_ASSETS_INFO_OFFSET +use ::miden::protocol::kernel_proc_offsets::INPUT_NOTE_GET_RECIPIENT_OFFSET +use ::miden::protocol::kernel_proc_offsets::INPUT_NOTE_GET_METADATA_OFFSET +use ::miden::protocol::kernel_proc_offsets::INPUT_NOTE_GET_STORAGE_INFO_OFFSET +use ::miden::protocol::kernel_proc_offsets::INPUT_NOTE_GET_SCRIPT_ROOT_OFFSET +use ::miden::protocol::kernel_proc_offsets::INPUT_NOTE_GET_SERIAL_NUMBER_OFFSET use miden::protocol::note # PROCEDURES @@ -30,7 +35,7 @@ pub proc get_assets_info push.0 # => [is_active_note = 0, note_index, 0] - exec.kernel_proc_offsets::input_note_get_assets_info_offset + push.INPUT_NOTE_GET_ASSETS_INFO_OFFSET # => [offset, is_active_note = 0, note_index, 0] # pad the stack @@ -41,14 +46,14 @@ pub proc get_assets_info # => [ASSETS_COMMITMENT, num_assets, pad(11)] # clean the stack - swapdw dropw dropw + swapdw dropw dropw repeat.3 movup.5 drop end # => [ASSETS_COMMITMENT, num_assets] end -#! Writes the assets of the input note with the specified index into memory starting at the +#! Writes the assets of the input note with the specified index into memory starting at the #! specified address. #! #! Attention: memory starting from the `dest_ptr` should have enough space to store all the assets @@ -88,7 +93,7 @@ end #! #! Where: #! - note_index is the index of the input note whose recipient should be returned. -#! - RECIPIENT is the commitment to the input note's script, inputs, the serial number. +#! - RECIPIENT is the commitment to the input note's script, storage, the serial number. #! #! Panics if: #! - the note index is greater or equal to the total number of input notes. @@ -104,7 +109,7 @@ pub proc get_recipient push.0 # => [is_active_note = 0, note_index, 0] - exec.kernel_proc_offsets::input_note_get_recipient_offset + push.INPUT_NOTE_GET_RECIPIENT_OFFSET # => [offset, is_active_note = 0, note_index, 0] # pad the stack @@ -143,7 +148,7 @@ pub proc get_metadata push.0 # => [is_active_note = 0, note_index, 0] - exec.kernel_proc_offsets::input_note_get_metadata_offset + push.INPUT_NOTE_GET_METADATA_OFFSET # => [offset, is_active_note = 0, note_index, 0] # pad the stack @@ -161,11 +166,11 @@ end #! Returns the sender of the input note with the specified index. #! #! Inputs: [note_index] -#! Outputs: [sender_id_prefix, sender_id_suffix] +#! Outputs: [sender_id_suffix, sender_id_prefix] #! #! Where: #! - note_index is the index of the input note whose sender should be returned. -#! - sender_{prefix,suffix} are the prefix and suffix felts of the specified note. +#! - sender_{suffix,prefix} are the suffix and prefix felts of the specified note. #! #! Panics if: #! - the note index is greater or equal to the total number of input notes. @@ -178,24 +183,24 @@ pub proc get_sender # extract the sender ID from the metadata header exec.note::extract_sender_from_metadata - # => [sender_id_prefix, sender_id_suffix] + # => [sender_id_suffix, sender_id_prefix] end #! Returns the inputs commitment and length of the input note with the specified index. #! #! Inputs: [note_index] -#! Outputs: [NOTE_INPUTS_COMMITMENT, num_inputs] +#! Outputs: [NOTE_STORAGE_COMMITMENT, num_storage_items] #! #! Where: #! - note_index is the index of the input note whose data should be returned. -#! - NOTE_INPUTS_COMMITMENT is the inputs commitment of the specified input note. -#! - num_inputs is the number of input values of the specified input note. +#! - NOTE_STORAGE_COMMITMENT is the inputs commitment of the specified input note. +#! - num_storage_items is the number of input values of the specified input note. #! #! Panics if: #! - the note index is greater or equal to the total number of input notes. #! #! Invocation: exec -pub proc get_inputs_info +pub proc get_storage_info # start padding the stack push.0 swap # => [note_index, 0] @@ -205,7 +210,7 @@ pub proc get_inputs_info push.0 # => [is_active_note = 0, note_index, 0] - exec.kernel_proc_offsets::input_note_get_inputs_info_offset + push.INPUT_NOTE_GET_STORAGE_INFO_OFFSET # => [offset, is_active_note = 0, note_index, 0] # pad the stack @@ -213,14 +218,14 @@ pub proc get_inputs_info # => [offset, is_active_note = 0, note_index, pad(13)] syscall.exec_kernel_proc - # => [NOTE_INPUTS_COMMITMENT, num_inputs, pad(11)] + # => [NOTE_STORAGE_COMMITMENT, num_storage_items, pad(11)] # clean the stack - swapdw dropw dropw + swapdw dropw dropw repeat.3 movup.5 drop end - # => [NOTE_INPUTS_COMMITMENT, num_inputs] + # => [NOTE_STORAGE_COMMITMENT, num_storage_items] end #! Returns the script root of the input note with the specified index. @@ -246,7 +251,7 @@ pub proc get_script_root push.0 # => [is_active_note = 0, note_index, 0] - exec.kernel_proc_offsets::input_note_get_script_root_offset + push.INPUT_NOTE_GET_SCRIPT_ROOT_OFFSET # => [offset, is_active_note = 0, note_index, 0] # pad the stack @@ -279,12 +284,12 @@ pub proc get_serial_number push.0 swap # => [note_index, 0] - # push the flag indicating that we want to request serial number from the note with the + # push the flag indicating that we want to request serial number from the note with the # specified index push.0 # => [is_active_note = 0, note_index, 0] - exec.kernel_proc_offsets::input_note_get_serial_number_offset + push.INPUT_NOTE_GET_SERIAL_NUMBER_OFFSET # => [offset, is_active_note = 0, note_index, 0] # pad the stack diff --git a/crates/miden-protocol/asm/protocol/kernel_proc_offsets.masm b/crates/miden-protocol/asm/protocol/kernel_proc_offsets.masm index ae698f2df7..eeb370179c 100644 --- a/crates/miden-protocol/asm/protocol/kernel_proc_offsets.masm +++ b/crates/miden-protocol/asm/protocol/kernel_proc_offsets.masm @@ -4,740 +4,88 @@ ### Account ##################################### # Entire account commitment -const ACCOUNT_GET_INITIAL_COMMITMENT_OFFSET=0 -const ACCOUNT_COMPUTE_COMMITMENT_OFFSET=1 +pub const ACCOUNT_GET_INITIAL_COMMITMENT_OFFSET=0 +pub const ACCOUNT_COMPUTE_COMMITMENT_OFFSET=1 # ID -const ACCOUNT_GET_ID_OFFSET=2 +pub const ACCOUNT_GET_ID_OFFSET=2 # Nonce -const ACCOUNT_GET_NONCE_OFFSET=3 # accessor -const ACCOUNT_INCR_NONCE_OFFSET=4 # mutator +pub const ACCOUNT_GET_NONCE_OFFSET=3 # accessor +pub const ACCOUNT_INCR_NONCE_OFFSET=4 # mutator # Code -const ACCOUNT_GET_CODE_COMMITMENT_OFFSET=5 +pub const ACCOUNT_GET_CODE_COMMITMENT_OFFSET=5 # Storage -const ACCOUNT_GET_INITIAL_STORAGE_COMMITMENT_OFFSET=6 -const ACCOUNT_COMPUTE_STORAGE_COMMITMENT_OFFSET=7 -const ACCOUNT_GET_ITEM_OFFSET=8 -const ACCOUNT_GET_INITIAL_ITEM_OFFSET=9 -const ACCOUNT_SET_ITEM_OFFSET=10 -const ACCOUNT_GET_MAP_ITEM_OFFSET=11 -const ACCOUNT_GET_INITIAL_MAP_ITEM_OFFSET=12 -const ACCOUNT_SET_MAP_ITEM_OFFSET=13 +pub const ACCOUNT_GET_INITIAL_STORAGE_COMMITMENT_OFFSET=6 +pub const ACCOUNT_COMPUTE_STORAGE_COMMITMENT_OFFSET=7 +pub const ACCOUNT_GET_ITEM_OFFSET=8 +pub const ACCOUNT_GET_INITIAL_ITEM_OFFSET=9 +pub const ACCOUNT_SET_ITEM_OFFSET=10 +pub const ACCOUNT_GET_MAP_ITEM_OFFSET=11 +pub const ACCOUNT_GET_INITIAL_MAP_ITEM_OFFSET=12 +pub const ACCOUNT_SET_MAP_ITEM_OFFSET=13 # Vault -const ACCOUNT_GET_INITIAL_VAULT_ROOT_OFFSET=14 -const ACCOUNT_GET_VAULT_ROOT_OFFSET=15 -const ACCOUNT_ADD_ASSET_OFFSET=16 -const ACCOUNT_REMOVE_ASSET_OFFSET=17 -const ACCOUNT_GET_BALANCE_OFFSET=18 -const ACCOUNT_GET_INITIAL_BALANCE_OFFSET=19 -const ACCOUNT_HAS_NON_FUNGIBLE_ASSET_OFFSET=20 +pub const ACCOUNT_GET_INITIAL_VAULT_ROOT_OFFSET=14 +pub const ACCOUNT_GET_VAULT_ROOT_OFFSET=15 +pub const ACCOUNT_ADD_ASSET_OFFSET=16 +pub const ACCOUNT_REMOVE_ASSET_OFFSET=17 +pub const ACCOUNT_GET_ASSET_OFFSET=18 +pub const ACCOUNT_GET_INITIAL_ASSET_OFFSET=19 # Delta -const ACCOUNT_COMPUTE_DELTA_COMMITMENT_OFFSET=21 +pub const ACCOUNT_COMPUTE_DELTA_COMMITMENT_OFFSET=20 # Procedure introspection -const ACCOUNT_GET_NUM_PROCEDURES_OFFSET=22 -const ACCOUNT_GET_PROCEDURE_ROOT_OFFSET=23 -const ACCOUNT_WAS_PROCEDURE_CALLED_OFFSET=24 -const ACCOUNT_HAS_PROCEDURE_OFFSET=25 +pub const ACCOUNT_GET_NUM_PROCEDURES_OFFSET=21 +pub const ACCOUNT_GET_PROCEDURE_ROOT_OFFSET=22 +pub const ACCOUNT_WAS_PROCEDURE_CALLED_OFFSET=23 +pub const ACCOUNT_HAS_PROCEDURE_OFFSET=24 ### Faucet ###################################### -const FAUCET_MINT_ASSET_OFFSET=26 -const FAUCET_BURN_ASSET_OFFSET=27 -const FAUCET_GET_TOTAL_FUNGIBLE_ASSET_ISSUANCE_OFFSET=28 -const FAUCET_IS_NON_FUNGIBLE_ASSET_ISSUED_OFFSET=29 +pub const FAUCET_MINT_ASSET_OFFSET=25 +pub const FAUCET_BURN_ASSET_OFFSET=26 +pub const FAUCET_HAS_CALLBACKS_OFFSET=27 ### Note ######################################## # input notes -const INPUT_NOTE_GET_METADATA_OFFSET=30 -const INPUT_NOTE_GET_ASSETS_INFO_OFFSET=31 -const INPUT_NOTE_GET_SCRIPT_ROOT_OFFSET=32 -const INPUT_NOTE_GET_INPUTS_INFO_OFFSET=33 -const INPUT_NOTE_GET_SERIAL_NUMBER_OFFSET=34 -const INPUT_NOTE_GET_RECIPIENT_OFFSET=35 +pub const INPUT_NOTE_GET_METADATA_OFFSET=28 +pub const INPUT_NOTE_GET_ASSETS_INFO_OFFSET=29 +pub const INPUT_NOTE_GET_SCRIPT_ROOT_OFFSET=30 +pub const INPUT_NOTE_GET_STORAGE_INFO_OFFSET=31 +pub const INPUT_NOTE_GET_SERIAL_NUMBER_OFFSET=32 +pub const INPUT_NOTE_GET_RECIPIENT_OFFSET=33 # output notes -const OUTPUT_NOTE_CREATE_OFFSET=36 -const OUTPUT_NOTE_GET_METADATA_OFFSET=37 -const OUTPUT_NOTE_GET_ASSETS_INFO_OFFSET=38 -const OUTPUT_NOTE_GET_RECIPIENT_OFFSET=39 -const OUTPUT_NOTE_ADD_ASSET_OFFSET=40 -const OUTPUT_NOTE_SET_ATTACHMENT_OFFSET=41 +pub const OUTPUT_NOTE_CREATE_OFFSET=34 +pub const OUTPUT_NOTE_GET_METADATA_OFFSET=35 +pub const OUTPUT_NOTE_GET_ASSETS_INFO_OFFSET=36 +pub const OUTPUT_NOTE_GET_RECIPIENT_OFFSET=37 +pub const OUTPUT_NOTE_ADD_ASSET_OFFSET=38 +pub const OUTPUT_NOTE_SET_ATTACHMENT_OFFSET=39 ### Tx ########################################## # input notes -const TX_GET_NUM_INPUT_NOTES_OFFSET=42 -const TX_GET_INPUT_NOTES_COMMITMENT_OFFSET=43 +pub const TX_GET_NUM_INPUT_NOTES_OFFSET=40 +pub const TX_GET_INPUT_NOTES_COMMITMENT_OFFSET=41 # output notes -const TX_GET_NUM_OUTPUT_NOTES_OFFSET=44 -const TX_GET_OUTPUT_NOTES_COMMITMENT_OFFSET=45 +pub const TX_GET_NUM_OUTPUT_NOTES_OFFSET=42 +pub const TX_GET_OUTPUT_NOTES_COMMITMENT_OFFSET=43 # block info -const TX_GET_BLOCK_COMMITMENT_OFFSET=46 -const TX_GET_BLOCK_NUMBER_OFFSET=47 -const TX_GET_BLOCK_TIMESTAMP_OFFSET=48 +pub const TX_GET_BLOCK_COMMITMENT_OFFSET=44 +pub const TX_GET_BLOCK_NUMBER_OFFSET=45 +pub const TX_GET_BLOCK_TIMESTAMP_OFFSET=46 # foreign context -const TX_START_FOREIGN_CONTEXT_OFFSET=49 -const TX_END_FOREIGN_CONTEXT_OFFSET=50 +pub const TX_PREPARE_FPI_OFFSET = 47 +pub const TX_EXEC_FOREIGN_PROC_OFFSET = 48 # expiration data -const TX_GET_EXPIRATION_DELTA_OFFSET=51 # accessor -const TX_UPDATE_EXPIRATION_BLOCK_DELTA_OFFSET=52 # mutator - -# ACCESSORS -# ------------------------------------------------------------------------------------------------- - -### ACCOUNT ##################################### - -#! Returns the offset of the `account_get_initial_commitment` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_initial_commitment` kernel procedure required to -#! get the address where this procedure is stored. -pub proc account_get_initial_commitment_offset - push.ACCOUNT_GET_INITIAL_COMMITMENT_OFFSET -end - -#! Returns the offset of the `account_compute_commitment` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_compute_commitment` kernel procedure required to get -#! the address where this procedure is stored. -pub proc account_compute_commitment_offset - push.ACCOUNT_COMPUTE_COMMITMENT_OFFSET -end - -#! Returns the offset of the `account_compute_delta_commitment` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_compute_delta_commitment` kernel procedure required -#! to get the address where this procedure is stored. -pub proc account_compute_delta_commitment_offset - push.ACCOUNT_COMPUTE_DELTA_COMMITMENT_OFFSET -end - -#! Returns the offset of the `account_get_id` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_id` kernel procedure required to get the address -#! where this procedure is stored. -pub proc account_get_id_offset - push.ACCOUNT_GET_ID_OFFSET -end - -#! Returns the offset of the `account_get_nonce` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_nonce` kernel procedure required to get the -#! address where this procedure is stored. -pub proc account_get_nonce_offset - push.ACCOUNT_GET_NONCE_OFFSET -end - -#! Returns the offset of the `account_incr_nonce` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_incr_nonce` kernel procedure required to get the -#! address where this procedure is stored. -pub proc account_incr_nonce_offset - push.ACCOUNT_INCR_NONCE_OFFSET -end - -#! Returns the offset of the `account_get_code_commitment` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_code_commitment` kernel procedure required to get -#! the address where this procedure is stored. -pub proc account_get_code_commitment_offset - push.ACCOUNT_GET_CODE_COMMITMENT_OFFSET -end - -#! Returns the offset of the `account_get_initial_storage_commitment` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_initial_storage_commitment` kernel procedure -#! required to get the address where this procedure is stored. -pub proc account_get_initial_storage_commitment_offset - push.ACCOUNT_GET_INITIAL_STORAGE_COMMITMENT_OFFSET -end - -#! Returns the offset of the `account_compute_storage_commitment` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_compute_storage_commitment` kernel procedure required -#! to get the address where this procedure is stored. -pub proc account_compute_storage_commitment_offset - push.ACCOUNT_COMPUTE_STORAGE_COMMITMENT_OFFSET -end - -#! Returns the offset of the `account_get_item` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_item` kernel procedure required to get the -#! address where this procedure is stored. -pub proc account_get_item_offset - push.ACCOUNT_GET_ITEM_OFFSET -end - -#! Returns the offset of the `account_set_item` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_set_item` kernel procedure required to get the -#! address where this procedure is stored. -pub proc account_set_item_offset - push.ACCOUNT_SET_ITEM_OFFSET -end - -#! Returns the offset of the `account_get_map_item` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_map_item` kernel procedure required to get the -#! address where this procedure is stored. -pub proc account_get_map_item_offset - push.ACCOUNT_GET_MAP_ITEM_OFFSET -end - -#! Returns the offset of the `account_set_map_item` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_set_map_item` kernel procedure required to get the -#! address where this procedure is stored. -pub proc account_set_map_item_offset - push.ACCOUNT_SET_MAP_ITEM_OFFSET -end - -#! Returns the offset of the `account_get_initial_item` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_initial_item` kernel procedure required to get -#! the address where this procedure is stored. -pub proc account_get_initial_item_offset - push.ACCOUNT_GET_INITIAL_ITEM_OFFSET -end - -#! Returns the offset of the `account_get_initial_map_item` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_initial_map_item` kernel procedure required to -#! get the address where this procedure is stored. -pub proc account_get_initial_map_item_offset - push.ACCOUNT_GET_INITIAL_MAP_ITEM_OFFSET -end - -#! Returns the offset of the `account_get_initial_vault_root` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_initial_vault_root` kernel procedure required -#! to get the address where this procedure is stored. -pub proc account_get_initial_vault_root_offset - push.ACCOUNT_GET_INITIAL_VAULT_ROOT_OFFSET -end - -#! Returns the offset of the `account_get_vault_root` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_vault_root` kernel procedure required to -#! get the address where this procedure is stored. -pub proc account_get_vault_root_offset - push.ACCOUNT_GET_VAULT_ROOT_OFFSET -end - -#! Returns the offset of the `account_add_asset` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_add_asset` kernel procedure required to get the -#! address where this procedure is stored. -pub proc account_add_asset_offset - push.ACCOUNT_ADD_ASSET_OFFSET -end - -#! Returns the offset of the `account_remove_asset` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_remove_asset` kernel procedure required to get the -#! address where this procedure is stored. -pub proc account_remove_asset_offset - push.ACCOUNT_REMOVE_ASSET_OFFSET -end - -#! Returns the offset of the `account_get_balance` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_balance` kernel procedure required to get the -#! address where this procedure is stored. -pub proc account_get_balance_offset - push.ACCOUNT_GET_BALANCE_OFFSET -end - -#! Returns the offset of the `account_get_initial_balance` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_initial_balance` kernel procedure required to get -#! the address where this procedure is stored. -pub proc account_get_initial_balance_offset - push.ACCOUNT_GET_INITIAL_BALANCE_OFFSET -end - -#! Returns the offset of the `account_has_non_fungible_asset` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_has_non_fungible_asset` kernel procedure required to -#! get the address where this procedure is stored. -pub proc account_has_non_fungible_asset_offset - push.ACCOUNT_HAS_NON_FUNGIBLE_ASSET_OFFSET -end - -#! Returns the offset of the `account_was_procedure_called` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_was_procedure_called` kernel procedure required to -#! get the address where this procedure is stored. -pub proc account_was_procedure_called_offset - push.ACCOUNT_WAS_PROCEDURE_CALLED_OFFSET -end - -#! Returns the offset of the `account_has_procedure` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_has_procedure` kernel procedure required to get the -#! address where this procedure is stored. -pub proc account_has_procedure_offset - push.ACCOUNT_HAS_PROCEDURE_OFFSET -end - -#! Returns the offset of the `account_get_num_procedures` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_num_procedures` kernel procedure required to -#! get the address where this procedure is stored. -pub proc account_get_num_procedures_offset - push.ACCOUNT_GET_NUM_PROCEDURES_OFFSET -end - -#! Returns the offset of the `account_get_procedure_root` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `account_get_procedure_root` kernel procedure required to -#! get the address where this procedure is stored. -pub proc account_get_procedure_root_offset - push.ACCOUNT_GET_PROCEDURE_ROOT_OFFSET -end - -### FAUCET ###################################### - -#! Returns the offset of the `faucet_mint_asset` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `faucet_mint_asset` kernel procedure required to get the -#! address where this procedure is stored. -pub proc faucet_mint_asset_offset - push.FAUCET_MINT_ASSET_OFFSET -end - -#! Returns the offset of the `faucet_burn_asset` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `faucet_burn_asset` kernel procedure required to get the -#! address where this procedure is stored. -pub proc faucet_burn_asset_offset - push.FAUCET_BURN_ASSET_OFFSET -end - -#! Returns the offset of the `faucet_get_total_fungible_asset_issuance` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `faucet_get_total_fungible_asset_issuance` kernel procedure -#! required to get the address where this procedure is stored. -pub proc faucet_get_total_fungible_asset_issuance_offset - push.FAUCET_GET_TOTAL_FUNGIBLE_ASSET_ISSUANCE_OFFSET -end - -#! Returns the offset of the `faucet_is_non_fungible_asset_issued` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `faucet_is_non_fungible_asset_issued` kernel procedure -#! required to get the address where this procedure is stored. -pub proc faucet_is_non_fungible_asset_issued_offset - push.FAUCET_IS_NON_FUNGIBLE_ASSET_ISSUED_OFFSET -end - -### OUTPUT NOTE ######################################## - -#! Returns the offset of the `output_note_create` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `output_note_create` kernel procedure required to get the -#! address where this procedure is stored. -pub proc output_note_create_offset - push.OUTPUT_NOTE_CREATE_OFFSET -end - -#! Returns the offset of the `output_note_add_asset` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `output_note_add_asset` kernel procedure required to get the -#! address where this procedure is stored. -pub proc output_note_add_asset_offset - push.OUTPUT_NOTE_ADD_ASSET_OFFSET -end - -#! Returns the offset of the `output_note_set_attachment` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `output_note_set_attachment` kernel procedure required to get -#! the address where this procedure is stored. -pub proc output_note_set_attachment_offset - push.OUTPUT_NOTE_SET_ATTACHMENT_OFFSET -end - - -#! Returns the offset of the `output_note_get_assets_info` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `output_note_get_assets_info` kernel procedure required to get -#! the address where this procedure is stored. -pub proc output_note_get_assets_info_offset - push.OUTPUT_NOTE_GET_ASSETS_INFO_OFFSET -end - -#! Returns the offset of the `output_note_get_recipient` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `output_note_get_recipient` kernel procedure required to get -#! the address where this procedure is stored. -pub proc output_note_get_recipient_offset - push.OUTPUT_NOTE_GET_RECIPIENT_OFFSET -end - -#! Returns the offset of the `output_note_get_metadata` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `output_note_get_metadata` kernel procedure required to get -#! the address where this procedure is stored. -pub proc output_note_get_metadata_offset - push.OUTPUT_NOTE_GET_METADATA_OFFSET -end - -### INPUT NOTE ######################################## - -#! Returns the offset of the `input_note_get_assets_info` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `input_note_get_assets_info` kernel procedure required to get -#! the address where this procedure is stored. -pub proc input_note_get_assets_info_offset - push.INPUT_NOTE_GET_ASSETS_INFO_OFFSET -end - -#! Returns the offset of the `input_note_get_recipient` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `input_note_get_recipient` kernel procedure required to get -#! the address where this procedure is stored. -pub proc input_note_get_recipient_offset - push.INPUT_NOTE_GET_RECIPIENT_OFFSET -end - -#! Returns the offset of the `input_note_get_metadata` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `input_note_get_metadata` kernel procedure required to get -#! the address where this procedure is stored. -pub proc input_note_get_metadata_offset - push.INPUT_NOTE_GET_METADATA_OFFSET -end - -#! Returns the offset of the `input_note_get_serial_number` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `input_note_get_serial_number` kernel procedure required to -#! get the address where this procedure is stored. -pub proc input_note_get_serial_number_offset - push.INPUT_NOTE_GET_SERIAL_NUMBER_OFFSET -end - -#! Returns the offset of the `input_note_get_inputs_info` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `input_note_get_inputs_info` kernel procedure required to get -#! the address where this procedure is stored. -pub proc input_note_get_inputs_info_offset - push.INPUT_NOTE_GET_INPUTS_INFO_OFFSET -end - -#! Returns the offset of the `input_note_get_script_root` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `input_note_get_script_root` kernel procedure required to get -#! the address where this procedure is stored. -pub proc input_note_get_script_root_offset - push.INPUT_NOTE_GET_SCRIPT_ROOT_OFFSET -end - -### TRANSACTION ################################# - -#! Returns the offset of the `tx_get_input_notes_commitment` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `tx_get_input_notes_commitment` kernel procedure required to -#! get the address where this procedure is stored. -pub proc tx_get_input_notes_commitment_offset - push.TX_GET_INPUT_NOTES_COMMITMENT_OFFSET -end - -#! Returns the offset of the `tx_get_output_notes_commitment` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `tx_get_output_notes_commitment` kernel procedure required to -#! get the address where this procedure is stored. -pub proc tx_get_output_notes_commitment_offset - push.TX_GET_OUTPUT_NOTES_COMMITMENT_OFFSET -end - -#! Returns the offset of the `tx_get_num_input_notes` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `tx_get_num_input_notes` kernel procedure required to get the -#! address where this procedure is stored. -pub proc tx_get_num_input_notes_offset - push.TX_GET_NUM_INPUT_NOTES_OFFSET -end - -#! Returns the offset of the `tx_get_num_output_notes` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `tx_get_num_output_notes` kernel procedure required to get the -#! address where this procedure is stored. -pub proc tx_get_num_output_notes_offset - push.TX_GET_NUM_OUTPUT_NOTES_OFFSET -end - -#! Returns the offset of the `tx_get_block_commitment` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `tx_get_block_commitment` kernel procedure required to get the -#! address where this procedure is stored. -pub proc tx_get_block_commitment_offset - push.TX_GET_BLOCK_COMMITMENT_OFFSET -end - -#! Returns the offset of the `tx_get_block_number` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `tx_get_block_number` kernel procedure required to get the -#! address where this procedure is stored. -pub proc tx_get_block_number_offset - push.TX_GET_BLOCK_NUMBER_OFFSET -end - -#! Returns the offset of the `tx_get_block_timestamp` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `tx_get_block_timestamp` kernel procedure required to get the -#! address where this procedure is stored. -pub proc tx_get_block_timestamp_offset - push.TX_GET_BLOCK_TIMESTAMP_OFFSET -end - -#! Returns the offset of the `tx_start_foreign_context` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `tx_start_foreign_context` kernel procedure required to get -#! the address where this procedure is stored. -pub proc tx_start_foreign_context_offset - push.TX_START_FOREIGN_CONTEXT_OFFSET -end - -#! Returns the offset of the `tx_end_foreign_context` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `tx_end_foreign_context` kernel procedure required to get the -#! address where this procedure is stored. -pub proc tx_end_foreign_context_offset - push.TX_END_FOREIGN_CONTEXT_OFFSET -end - -#! Returns the offset of the `tx_update_expiration_block_delta` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `tx_update_expiration_block_delta` kernel procedure required -#! to get the address where this procedure is stored. -pub proc tx_update_expiration_block_delta_offset - push.TX_UPDATE_EXPIRATION_BLOCK_DELTA_OFFSET -end - -#! Returns the offset of the `tx_get_expiration_delta` kernel procedure. -#! -#! Inputs: [] -#! Outputs: [proc_offset] -#! -#! Where: -#! - proc_offset is the offset of the `tx_get_expiration_delta` kernel procedure required to get the -#! address where this procedure is stored. -pub proc tx_get_expiration_delta_offset - push.TX_GET_EXPIRATION_DELTA_OFFSET -end +pub const TX_GET_EXPIRATION_DELTA_OFFSET=49 # accessor +pub const TX_UPDATE_EXPIRATION_BLOCK_DELTA_OFFSET=50 # mutator diff --git a/crates/miden-protocol/asm/protocol/native_account.masm b/crates/miden-protocol/asm/protocol/native_account.masm index 197f52ba25..1c67fa5492 100644 --- a/crates/miden-protocol/asm/protocol/native_account.masm +++ b/crates/miden-protocol/asm/protocol/native_account.masm @@ -1,4 +1,11 @@ -use miden::protocol::kernel_proc_offsets +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_GET_ID_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_INCR_NONCE_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_COMPUTE_DELTA_COMMITMENT_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_SET_ITEM_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_SET_MAP_ITEM_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_ADD_ASSET_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_REMOVE_ASSET_OFFSET +use ::miden::protocol::kernel_proc_offsets::ACCOUNT_WAS_PROCEDURE_CALLED_OFFSET # NATIVE ACCOUNT PROCEDURES # ================================================================================================= @@ -9,10 +16,10 @@ use miden::protocol::kernel_proc_offsets #! Returns the ID of the native account of the transaction. #! #! Inputs: [] -#! Outputs: [account_id_prefix, account_id_suffix] +#! Outputs: [account_id_suffix, account_id_prefix] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of the native account ID of the +#! - account_id_{suffix,prefix} are the suffix and prefix felts of the native account ID of the #! transaction. #! #! Invocation: exec @@ -25,15 +32,15 @@ pub proc get_id push.1 # => [is_native = 1, pad(14)] - exec.kernel_proc_offsets::account_get_id_offset - # => [offset, is_native = 0, pad(14)] + push.ACCOUNT_GET_ID_OFFSET + # => [offset, is_native = 1, pad(14)] syscall.exec_kernel_proc - # => [account_id_prefix, account_id_suffix, pad(14)] + # => [account_id_suffix, account_id_prefix, pad(14)] # clean the stack swapdw dropw dropw swapw dropw movdn.3 movdn.3 drop drop - # => [account_id_prefix, account_id_suffix] + # => [account_id_suffix, account_id_prefix] end #! Increments the nonce of the native account by one and returns the new nonce. @@ -57,7 +64,7 @@ pub proc incr_nonce padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::account_incr_nonce_offset + push.ACCOUNT_INCR_NONCE_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -82,7 +89,7 @@ end #! return the empty word even if the initial storage commitment and the current storage commitment #! are identical (storage hasn't changed). This is because the delta for a new account must #! represent its entire newly created state, and the initial storage in a transaction is initialized -#! to the storage that the account ID commits to, which may be non-empty. This does not have any +#! to the storage that the account ID commits to, which may be non-empty. This does not have any #! consequences other than being inconsistent in this edge case. #! #! Inputs: [] @@ -98,7 +105,7 @@ pub proc compute_delta_commitment padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::account_compute_delta_commitment_offset + push.ACCOUNT_COMPUTE_DELTA_COMMITMENT_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -114,11 +121,11 @@ end #! Sets an item in the native account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, VALUE] +#! Inputs: [slot_id_suffix, slot_id_prefix, VALUE] #! Outputs: [OLD_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - VALUE is the value to set. #! - OLD_VALUE is the previous value of the item. @@ -126,16 +133,15 @@ end #! Panics if: #! - a slot with the provided slot ID does not exist in account storage. #! - the invocation of this procedure does not originate from the native account. -#! - the native account is a faucet and the provided slot ID points to the reserved faucet storage slot. #! #! Invocation: exec pub proc set_item - exec.kernel_proc_offsets::account_set_item_offset - # => [offset, slot_id_prefix, slot_id_suffix, VALUE] + push.ACCOUNT_SET_ITEM_OFFSET + # => [offset, slot_id_suffix, slot_id_prefix, VALUE] # pad the stack push.0 movdn.7 padw padw swapdw - # => [offset, slot_id_prefix, slot_id_suffix, VALUE, pad(9)] + # => [offset, slot_id_suffix, slot_id_prefix, VALUE, pad(9)] syscall.exec_kernel_proc # => [OLD_VALUE, pad(12)] @@ -147,11 +153,11 @@ end #! Sets a map item in the native account storage. #! -#! Inputs: [slot_id_prefix, slot_id_suffix, KEY, VALUE] +#! Inputs: [slot_id_suffix, slot_id_prefix, KEY, VALUE] #! Outputs: [OLD_VALUE] #! #! Where: -#! - slot_id_{prefix, suffix} are the prefix and suffix felts of the slot identifier, which are +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier, which are #! the first two felts of the hashed slot name. #! - the slot must point to the root of the storage map. #! - KEY is the key to set at VALUE. @@ -166,18 +172,18 @@ end #! #! Invocation: exec pub proc set_map_item - exec.kernel_proc_offsets::account_set_map_item_offset - # => [offset, slot_id_prefix, slot_id_suffix, KEY, VALUE] + push.ACCOUNT_SET_MAP_ITEM_OFFSET + # => [offset, slot_id_suffix, slot_id_prefix, KEY, VALUE] # pad the stack push.0 padw - # => [pad(4), 0, offset, slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [pad(4), 0, offset, slot_id_suffix, slot_id_prefix, KEY, VALUE] movdnw.3 - # => [0, offset, slot_id_prefix, slot_id_suffix, KEY, VALUE, pad(4)] + # => [0, offset, slot_id_suffix, slot_id_prefix, KEY, VALUE, pad(4)] movdn.11 - # => [offset, slot_id_prefix, slot_id_suffix, KEY, VALUE, pad(5)] + # => [offset, slot_id_suffix, slot_id_prefix, KEY, VALUE, pad(5)] syscall.exec_kernel_proc # => [OLD_VALUE, pad(12)] @@ -192,14 +198,16 @@ end #! Add the specified asset to the vault. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET'] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_VALUE'] #! #! Where: -#! - ASSET' is a final asset in the account vault defined as follows: -#! - If ASSET is a non-fungible asset, then ASSET' is the same as ASSET. -#! - If ASSET is a fungible asset, then ASSET' is the total fungible asset in the account vault -#! after ASSET was added to it. +#! - ASSET_KEY is the vault key of the asset that is added to the vault. +#! - ASSET_VALUE is the value of the asset to add to the vault. +#! - ASSET_VALUE' final asset in the account vault defined as follows: +#! - If ASSET_VALUE is a non-fungible asset, then ASSET_VALUE' is the same as ASSET_VALUE. +#! - If ASSET_VALUE is a fungible asset, then ASSET_VALUE' is the total fungible asset in the account vault +#! after ASSET_VALUE was added to it. #! #! Panics if: #! - the asset is not valid. @@ -208,28 +216,29 @@ end #! #! Invocation: exec pub proc add_asset - exec.kernel_proc_offsets::account_add_asset_offset - # => [offset, ASSET] - # pad the stack - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [offset, ASSET, pad(11)] + padw padw swapdw movup.8 drop + # => [ASSET_KEY, ASSET_VALUE, pad(7)] + + push.ACCOUNT_ADD_ASSET_OFFSET + # => [offset, ASSET_KEY, ASSET_VALUE, pad(7)] syscall.exec_kernel_proc - # => [ASSET', pad(12)] + # => [ASSET_VALUE', pad(12)] # clean the stack swapdw dropw dropw swapw dropw - # => [ASSET'] + # => [ASSET_VALUE'] end #! Remove the specified asset from the vault. #! -#! Inputs: [ASSET] -#! Outputs: [ASSET] +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [ASSET_VALUE] #! #! Where: -#! - ASSET is the asset to remove from the vault. +#! - ASSET_KEY is the vault key of the asset to remove from the vault. +#! - ASSET_VALUE is the value of the asset to remove from the vault. #! #! Panics if: #! - the fungible asset is not found in the vault. @@ -238,19 +247,19 @@ end #! #! Invocation: exec pub proc remove_asset - exec.kernel_proc_offsets::account_remove_asset_offset - # => [offset, ASSET] - # pad the stack - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [offset, ASSET, pad(11)] + padw padw swapdw movup.8 drop + # => [ASSET_KEY, ASSET_VALUE, pad(7)] + + push.ACCOUNT_REMOVE_ASSET_OFFSET + # => [offset, ASSET_KEY, ASSET_VALUE, pad(7)] syscall.exec_kernel_proc - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] # clean the stack swapdw dropw dropw swapw dropw - # => [ASSET] + # => [ASSET_VALUE] end # CODE @@ -271,7 +280,7 @@ end #! #! Invocation: exec pub proc was_procedure_called - exec.kernel_proc_offsets::account_was_procedure_called_offset + push.ACCOUNT_WAS_PROCEDURE_CALLED_OFFSET # => [offset, PROC_ROOT] # pad the stack diff --git a/crates/miden-protocol/asm/protocol/note.masm b/crates/miden-protocol/asm/protocol/note.masm index 962f8ecc6b..482a264547 100644 --- a/crates/miden-protocol/asm/protocol/note.masm +++ b/crates/miden-protocol/asm/protocol/note.masm @@ -1,47 +1,46 @@ use miden::protocol::account_id -use miden::core::crypto::hashes::rpo256 -use miden::core::math::u64 +use miden::core::crypto::hashes::poseidon2 use miden::core::mem # Re-export the max inputs per note constant. -pub use ::miden::protocol::util::note::MAX_INPUTS_PER_NOTE +pub use ::miden::protocol::util::note::MAX_NOTE_STORAGE_ITEMS # ERRORS # ================================================================================================= -const ERR_PROLOGUE_NOTE_INPUTS_LEN_EXCEEDED_LIMIT="number of note inputs exceeded the maximum limit of 1024" +const ERR_PROLOGUE_NOTE_NUM_STORAGE_ITEMS_EXCEEDED_LIMIT="number of note storage exceeded the maximum limit of 1024" # NOTE UTILITY PROCEDURES # ================================================================================================= -#! Computes the commitment to the note inputs starting at the specified memory address. +#! Computes the commitment to the note storage starting at the specified memory address. #! -#! This procedure checks that the provided number of note inputs is within limits and then computes +#! This procedure checks that the provided number of note storage items is within limits and then computes #! the commitment. #! -#! If the number of note inputs is 0, procedure returns the empty word: [0, 0, 0, 0]. +#! If the number of note storage items is 0, procedure returns the empty word: [0, 0, 0, 0]. #! -#! Inputs: [inputs_ptr, num_inputs] -#! Outputs: [INPUTS_COMMITMENT] +#! Inputs: [storage_ptr, num_storage_items] +#! Outputs: [STORAGE_COMMITMENT] #! #! Cycles: #! - If number of elements divides by 8: 56 cycles + 3 * words #! - Else: 189 cycles + 3 * words #! #! Panics if: -#! - inputs_ptr is not word-aligned (i.e., is not a multiple of 4). -#! - num_inputs is greater than 1024. +#! - storage_ptr is not word-aligned (i.e., is not a multiple of 4). +#! - num_storage_items is greater than 1024. #! #! Invocation: exec -pub proc compute_inputs_commitment - # check that number of inputs is less than or equal to MAX_INPUTS_PER_NOTE - dup.1 push.MAX_INPUTS_PER_NOTE u32assert2.err=ERR_PROLOGUE_NOTE_INPUTS_LEN_EXCEEDED_LIMIT - u32lte assert.err=ERR_PROLOGUE_NOTE_INPUTS_LEN_EXCEEDED_LIMIT - # => [inputs_ptr, num_inputs] - - # compute the inputs commitment (over the unpadded values) - exec.rpo256::hash_elements - # => [INPUTS_COMMITMENT] +pub proc compute_storage_commitment + # check that number of storage items is less than or equal to MAX_NOTE_STORAGE_ITEMS + dup.1 push.MAX_NOTE_STORAGE_ITEMS u32assert2.err=ERR_PROLOGUE_NOTE_NUM_STORAGE_ITEMS_EXCEEDED_LIMIT + u32lte assert.err=ERR_PROLOGUE_NOTE_NUM_STORAGE_ITEMS_EXCEEDED_LIMIT + # => [storage_ptr, num_storage_items] + + # compute the storage commitment (over the unpadded values) + exec.poseidon2::hash_elements + # => [STORAGE_COMMITMENT] end #! Writes the assets data stored in the advice map to the memory specified by the provided @@ -60,160 +59,140 @@ pub proc write_assets_to_memory # OS => [ASSETS_COMMITMENT, num_assets, dest_ptr] # AS => [[ASSETS_DATA]] - # calculate number of assets rounded up to an even number - dup.4 dup is_odd add - # OS => [even_num_assets, ASSETS_COMMITMENT, num_assets, dest_ptr] + dup.5 dup.5 + # OS => [num_assets, dest_ptr, ASSETS_COMMITMENT, num_assets, dest_ptr] # AS => [[ASSETS_DATA]] - # prepare the stack for the `pipe_preimage_to_memory` procedure - dup.6 swap - # OS => [even_num_assets, dest_ptr, ASSETS_COMMITMENT, num_assets, dest_ptr] + # each asset takes up two words, so num_words = 2 * num_assets + # this also guarantees we pass an even number to pipe_double_words_preimage_to_memory + mul.2 + # OS => [num_words, dest_ptr, ASSETS_COMMITMENT, num_assets, dest_ptr] # AS => [[ASSETS_DATA]] # write the data from the advice stack into memory - exec.mem::pipe_preimage_to_memory drop + exec.mem::pipe_double_words_preimage_to_memory drop # OS => [num_assets, dest_ptr] # AS => [] end -#! Builds the recipient hash from note inputs, script root, and serial number. +#! Builds the recipient hash from note storage, script root, and serial number. #! -#! This procedure computes the commitment of the note inputs and then uses it to calculate the note +#! This procedure computes the commitment of the note storage and then uses it to calculate the note #! recipient by hashing this commitment, the provided script root, and the serial number. #! #! Inputs: -#! Operand stack: [inputs_ptr, num_inputs, SERIAL_NUM, SCRIPT_ROOT] +#! Operand stack: [storage_ptr, num_storage_items, SERIAL_NUM, SCRIPT_ROOT] #! Advice map: { -#! INPUTS_COMMITMENT: [INPUTS], +#! STORAGE_COMMITMENT: [INPUTS], #! } #! Outputs: #! Operand stack: [RECIPIENT] #! Advice map: { -#! INPUTS_COMMITMENT: [INPUTS], -#! RECIPIENT: [SERIAL_SCRIPT_HASH, INPUTS_COMMITMENT], +#! STORAGE_COMMITMENT: [INPUTS], +#! RECIPIENT: [SERIAL_SCRIPT_HASH, STORAGE_COMMITMENT], #! SERIAL_SCRIPT_HASH: [SERIAL_HASH, SCRIPT_ROOT], #! SERIAL_HASH: [SERIAL_NUM, EMPTY_WORD], #! } #! #! Where: -#! - inputs_ptr is the memory address where the note inputs are stored. -#! - num_inputs is the number of input values. +#! - storage_ptr is the memory address where the note storage are stored. +#! - num_storage_items is the number of input values. #! - SCRIPT_ROOT is the script root of the note. #! - SERIAL_NUM is the serial number of the note. -#! - RECIPIENT is the commitment to the input note's script, inputs, and the serial number. +#! - RECIPIENT is the commitment to the input note's script, storage, and the serial number. #! #! Locals: -#! - 0: inputs_ptr -#! - 1: num_inputs +#! - 0: storage_ptr +#! - 1: num_storage_items #! #! Panics if: -#! - inputs_ptr is not word-aligned (i.e., is not a multiple of 4). -#! - num_inputs is greater than 1024. +#! - storage_ptr is not word-aligned (i.e., is not a multiple of 4). +#! - num_storage_items is greater than 1024. #! #! Invocation: exec -@locals(1) pub proc build_recipient dup.1 dup.1 - # => [inputs_ptr, num_inputs, inputs_ptr, num_inputs, SERIAL_NUM, SCRIPT_ROOT] - - exec.compute_inputs_commitment - # => [INPUTS_COMMITMENT, inputs_ptr, num_inputs, SERIAL_NUM, SCRIPT_ROOT] - - # store num_inputs into local memory - dup.5 loc_store.0 - # => [INPUTS_COMMITMENT, inputs_ptr, num_inputs, SERIAL_NUM, SCRIPT_ROOT] - - locaddr.0 add.1 - locaddr.0 - # => [num_inputs_start_ptr, num_inputs_end_ptr, INPUTS_COMMITMENT, inputs_ptr, num_inputs, SERIAL_NUM, SCRIPT_ROOT] - - dup.5 dup.5 dup.5 dup.5 - # => [INPUTS_COMMITMENT, num_inputs_start_ptr, num_inputs_end_ptr, - # INPUTS_COMMITMENT, inputs_ptr, num_inputs, SERIAL_NUM, SCRIPT_ROOT] - - # compute the advice map key for num_inputs by hashing the inputs commitment - exec.rpo256::hash - # => [hash(INPUTS_COMMITMENT), num_inputs_start_ptr, num_inputs_end_ptr, - # INPUTS_COMMITMENT, inputs_ptr, num_inputs, SERIAL_NUM, SCRIPT_ROOT] + # => [storage_ptr, num_storage_items, storage_ptr, num_storage_items, SERIAL_NUM, SCRIPT_ROOT] - adv.insert_mem - dropw drop drop - # => [INPUTS_COMMITMENT, inputs_ptr, num_inputs, SERIAL_NUM, SCRIPT_ROOT] + exec.compute_storage_commitment + # => [STORAGE_COMMITMENT, storage_ptr, num_storage_items, SERIAL_NUM, SCRIPT_ROOT] movup.5 movup.5 dup movdn.2 - # => [inputs_ptr, num_inputs, inputs_ptr, INPUTS_COMMITMENT, SERIAL_NUM, SCRIPT_ROOT] + # => [storage_ptr, num_storage_items, storage_ptr, STORAGE_COMMITMENT, SERIAL_NUM, SCRIPT_ROOT] add swap - # => [inputs_ptr, end_ptr, INPUTS_COMMITMENT, SERIAL_NUM, SCRIPT_ROOT] + # => [storage_ptr, end_ptr, STORAGE_COMMITMENT, SERIAL_NUM, SCRIPT_ROOT] movdn.5 movdn.5 - # => [INPUTS_COMMITMENT, inputs_ptr, end_ptr, SERIAL_NUM, SCRIPT_ROOT] + # => [STORAGE_COMMITMENT, storage_ptr, end_ptr, SERIAL_NUM, SCRIPT_ROOT] adv.insert_mem - # => [INPUTS_COMMITMENT, inputs_ptr, end_ptr, SERIAL_NUM, SCRIPT_ROOT] + # => [STORAGE_COMMITMENT, storage_ptr, end_ptr, SERIAL_NUM, SCRIPT_ROOT] movup.4 drop movup.4 drop - # => [INPUTS_COMMITMENT, SERIAL_NUM, SCRIPT_ROOT] + # => [STORAGE_COMMITMENT, SERIAL_NUM, SCRIPT_ROOT] movdnw.2 - # => [SERIAL_NUM, SCRIPT_ROOT, INPUTS_COMMITMENT] + # => [SERIAL_NUM, SCRIPT_ROOT, STORAGE_COMMITMENT] + + padw swapw + # => [SERIAL_NUM, EMPTY_WORD, SCRIPT_ROOT, STORAGE_COMMITMENT] - padw adv.insert_hdword exec.rpo256::merge - # => [SERIAL_HASH, SCRIPT_ROOT, INPUTS_COMMITMENT] + adv.insert_hdword exec.poseidon2::merge + # => [SERIAL_COMMITMENT, SCRIPT_ROOT, STORAGE_COMMITMENT] - swapw adv.insert_hdword exec.rpo256::merge - # => [SERIAL_SCRIPT_HASH, INPUTS_COMMITMENT] + adv.insert_hdword exec.poseidon2::merge + # => [SERIAL_SCRIPT_COMMITMENT, STORAGE_COMMITMENT] - swapw adv.insert_hdword exec.rpo256::merge + adv.insert_hdword exec.poseidon2::merge # => [RECIPIENT] end -#! Returns the RECIPIENT for a specified SERIAL_NUM, SCRIPT_ROOT, and inputs commitment. +#! Returns the RECIPIENT for a specified SERIAL_NUM, SCRIPT_ROOT and STORAGE_COMMITMENT. #! -#! Inputs: [SERIAL_NUM, SCRIPT_ROOT, INPUT_COMMITMENT] +#! Inputs: [SERIAL_NUM, SCRIPT_ROOT, STORAGE_COMMITMENT] #! Outputs: [RECIPIENT] #! #! Where: #! - SERIAL_NUM is the serial number of the recipient. #! - SCRIPT_ROOT is the commitment of the note script. -#! - INPUT_COMMITMENT is the commitment of the note inputs. +#! - STORAGE_COMMITMENT is the commitment of the note storage. #! - RECIPIENT is the recipient of the note. #! #! Invocation: exec pub proc build_recipient_hash - padw exec.rpo256::merge - # => [SERIAL_NUM_HASH, SCRIPT_ROOT, INPUT_COMMITMENT] + padw swapw + # => [SERIAL_NUM, EMPTY_WORD, SCRIPT_ROOT, STORAGE_COMMITMENT] + + exec.poseidon2::merge + # => [SERIAL_NUM_HASH, SCRIPT_ROOT, STORAGE_COMMITMENT] - swapw exec.rpo256::merge - # => [MERGE_SCRIPT, INPUT_COMMITMENT] + exec.poseidon2::merge + # => [MERGE_SCRIPT, STORAGE_COMMITMENT] - swapw exec.rpo256::merge - # [RECIPIENT] + exec.poseidon2::merge + # => [RECIPIENT] end #! Extracts the sender ID from the provided metadata header. #! #! Inputs: [METADATA_HEADER] -#! Outputs: [sender_id_prefix, sender_id_suffix] +#! Outputs: [sender_id_suffix, sender_id_prefix] #! #! Where: #! - METADATA_HEADER is the metadata of a note. -#! - sender_{prefix,suffix} are the prefix and suffix felts of the sender ID of the note which +#! - sender_{suffix,prefix} are the suffix and prefix felts of the sender ID of the note which #! metadata was provided. pub proc extract_sender_from_metadata - # => [attachment_kind_scheme, tag, sender_id_prefix, sender_id_suffix_and_note_type] + # => [sender_id_suffix_and_note_type, sender_id_prefix, tag, attachment_kind_scheme] - # drop attachment kind, attachment scheme and tag - drop drop swap + # drop tag and attachment_kind_scheme + movup.3 drop movup.2 drop # => [sender_id_suffix_and_note_type, sender_id_prefix] # extract suffix of sender from merged layout, which means clearing the least significant byte exec.account_id::shape_suffix # => [sender_id_suffix, sender_id_prefix] - - # rearrange suffix and prefix - swap - # => [sender_id_prefix, sender_id_suffix] end #! Extracts the attachment kind and scheme from the provided metadata header. @@ -228,44 +207,13 @@ end #! #! Invocation: exec pub proc extract_attachment_info_from_metadata - # => [attachment_kind_scheme, METADATA_HEADER[1..4]] - movdn.3 drop drop drop + # => [sender_id_suffix_and_note_type, sender_id_prefix, tag, attachment_kind_scheme] + drop drop drop # => [attachment_kind_scheme] # deconstruct the attachment_kind_scheme to extract the attachment_scheme # attachment_kind_scheme = [30 zero bits | attachment_kind (2 bits) | attachment_scheme (32 bits)] - # u32split splits into [high, low] where low is attachment_scheme - u32split + # u32split splits into [lo, hi] where lo is attachment_scheme + u32split swap # => [attachment_kind, attachment_scheme] end - -#! Computes the tag for a network note for a given network account such that it is -#! picked up by the network transaction builder. -#! -#! This procedure implements the same logic as in Rust in NoteTag::from_network_account_id(). -#! Note: This procedure does not check if the account id is a network account id. -#! -#! Inputs: [account_id_prefix, account_id_suffix] -#! Outputs: [network_account_tag] -#! -#! Where: -#! - account_id_prefix, account_id_suffix is the account id to compute the note tag for. -#! - network_account_tag is the computed network note tag. -#! -#! Invocation: exec -pub proc build_note_tag_for_network_account - swap drop - # => [account_id_prefix] - - u32split - # => [a_hi, a_lo] - - push.34 - # => [b, a_hi, a_lo] - - exec.u64::shr - # => [c_hi, c_lo] - - drop - # => [network_account_tag] -end diff --git a/crates/miden-protocol/asm/protocol/output_note.masm b/crates/miden-protocol/asm/protocol/output_note.masm index d8898bea68..afc344f195 100644 --- a/crates/miden-protocol/asm/protocol/output_note.masm +++ b/crates/miden-protocol/asm/protocol/output_note.masm @@ -1,13 +1,18 @@ -use miden::protocol::kernel_proc_offsets +use ::miden::protocol::kernel_proc_offsets::OUTPUT_NOTE_CREATE_OFFSET +use ::miden::protocol::kernel_proc_offsets::OUTPUT_NOTE_GET_ASSETS_INFO_OFFSET +use ::miden::protocol::kernel_proc_offsets::OUTPUT_NOTE_ADD_ASSET_OFFSET +use ::miden::protocol::kernel_proc_offsets::OUTPUT_NOTE_SET_ATTACHMENT_OFFSET +use ::miden::protocol::kernel_proc_offsets::OUTPUT_NOTE_GET_RECIPIENT_OFFSET +use ::miden::protocol::kernel_proc_offsets::OUTPUT_NOTE_GET_METADATA_OFFSET use miden::protocol::note -# CONSTANTS +# CONSTANTS # ================================================================================================= -# Constants for note attachment kinds -pub const ATTACHMENT_KIND_NONE=0 -pub const ATTACHMENT_KIND_WORD=1 -pub const ATTACHMENT_KIND_ARRAY=2 +# Re-export constants for note attachment kinds +pub use ::miden::protocol::util::note::ATTACHMENT_KIND_NONE +pub use ::miden::protocol::util::note::ATTACHMENT_KIND_WORD +pub use ::miden::protocol::util::note::ATTACHMENT_KIND_ARRAY # PROCEDURES # ================================================================================================= @@ -38,7 +43,7 @@ pub proc create padw padw swapdw drop # => [tag, note_type, RECIPIENT, pad(9)] - exec.kernel_proc_offsets::output_note_create_offset + push.OUTPUT_NOTE_CREATE_OFFSET # => [offset, tag, note_type, RECIPIENT, pad(9)] syscall.exec_kernel_proc @@ -70,7 +75,7 @@ pub proc get_assets_info push.0.0 movup.2 # => [note_index, 0, 0] - exec.kernel_proc_offsets::output_note_get_assets_info_offset + push.OUTPUT_NOTE_GET_ASSETS_INFO_OFFSET # => [offset, note_index, 0, 0] # pad the stack @@ -121,24 +126,24 @@ pub proc get_assets # => [num_assets, dest_ptr, note_index] end -#! Adds the ASSET to the note specified by the index. +#! Adds the asset to the note specified by the index. #! -#! Inputs: [ASSET, note_idx] +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx] #! Outputs: [] #! #! Where: #! - note_idx is the index of the note to which the asset is added. -#! - ASSET can be a fungible or non-fungible asset. +#! - ASSET_KEY is the vault key of the asset to add. +#! - ASSET_VALUE is the value of the asset to add. #! #! Invocation: exec pub proc add_asset - movup.4 exec.kernel_proc_offsets::output_note_add_asset_offset - # => [offset, note_idx, ASSET] + push.OUTPUT_NOTE_ADD_ASSET_OFFSET + # => [offset, ASSET_KEY, ASSET_VALUE, note_idx] - # pad the stack before the syscall to prevent accidental modification of the deeper stack - # elements - push.0.0 movdn.7 movdn.7 padw padw swapdw - # => [offset, note_idx, ASSET, pad(10)] + # pad the stack + repeat.6 push.0 movdn.10 end + # => [offset, ASSET_KEY, ASSET_VALUE, note_idx, pad(6)] syscall.exec_kernel_proc # => [pad(16)] @@ -175,7 +180,7 @@ end #! #! Invocation: exec pub proc set_attachment - exec.kernel_proc_offsets::output_note_set_attachment_offset + push.OUTPUT_NOTE_SET_ATTACHMENT_OFFSET # => [offset, note_idx, attachment_scheme, attachment_kind, ATTACHMENT] # pad the stack before the syscall @@ -255,7 +260,7 @@ end #! #! Where: #! - note_index is the index of the output note whose recipient should be returned. -#! - RECIPIENT is the commitment to the note note's script, inputs, the serial number. +#! - RECIPIENT is the commitment to the note note's script, storage, the serial number. #! #! Panics if: #! - the note index is greater or equal to the total number of output notes. @@ -266,7 +271,7 @@ pub proc get_recipient push.0.0 movup.2 # => [note_index, 0, 0] - exec.kernel_proc_offsets::output_note_get_recipient_offset + push.OUTPUT_NOTE_GET_RECIPIENT_OFFSET # => [offset, note_index, 0, 0] # pad the stack @@ -300,7 +305,7 @@ pub proc get_metadata push.0.0 movup.2 # => [note_index, 0, 0] - exec.kernel_proc_offsets::output_note_get_metadata_offset + push.OUTPUT_NOTE_GET_METADATA_OFFSET # => [offset, note_index, 0, 0] # pad the stack diff --git a/crates/miden-protocol/asm/protocol/tx.masm b/crates/miden-protocol/asm/protocol/tx.masm index b55b354ebb..2e5e6fdf88 100644 --- a/crates/miden-protocol/asm/protocol/tx.masm +++ b/crates/miden-protocol/asm/protocol/tx.masm @@ -1,4 +1,14 @@ -use miden::protocol::kernel_proc_offsets +use ::miden::protocol::kernel_proc_offsets::TX_GET_BLOCK_NUMBER_OFFSET +use ::miden::protocol::kernel_proc_offsets::TX_GET_BLOCK_COMMITMENT_OFFSET +use ::miden::protocol::kernel_proc_offsets::TX_GET_BLOCK_TIMESTAMP_OFFSET +use ::miden::protocol::kernel_proc_offsets::TX_GET_INPUT_NOTES_COMMITMENT_OFFSET +use ::miden::protocol::kernel_proc_offsets::TX_GET_OUTPUT_NOTES_COMMITMENT_OFFSET +use ::miden::protocol::kernel_proc_offsets::TX_GET_NUM_INPUT_NOTES_OFFSET +use ::miden::protocol::kernel_proc_offsets::TX_GET_NUM_OUTPUT_NOTES_OFFSET +use ::miden::protocol::kernel_proc_offsets::TX_PREPARE_FPI_OFFSET +use ::miden::protocol::kernel_proc_offsets::TX_EXEC_FOREIGN_PROC_OFFSET +use ::miden::protocol::kernel_proc_offsets::TX_UPDATE_EXPIRATION_BLOCK_DELTA_OFFSET +use ::miden::protocol::kernel_proc_offsets::TX_GET_EXPIRATION_DELTA_OFFSET #! Returns the block number of the transaction reference block. #! @@ -14,7 +24,7 @@ pub proc get_block_number padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::tx_get_block_number_offset + push.TX_GET_BLOCK_NUMBER_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -39,7 +49,7 @@ pub proc get_block_commitment padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::tx_get_block_commitment_offset + push.TX_GET_BLOCK_COMMITMENT_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -82,7 +92,7 @@ pub proc get_block_timestamp padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::tx_get_block_timestamp_offset + push.TX_GET_BLOCK_TIMESTAMP_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -109,7 +119,7 @@ pub proc get_input_notes_commitment padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::tx_get_input_notes_commitment_offset + push.TX_GET_INPUT_NOTES_COMMITMENT_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -135,7 +145,7 @@ pub proc get_output_notes_commitment padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::tx_get_output_notes_commitment_offset + push.TX_GET_OUTPUT_NOTES_COMMITMENT_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -160,7 +170,7 @@ pub proc get_num_input_notes padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::tx_get_num_input_notes_offset + push.TX_GET_NUM_INPUT_NOTES_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -188,7 +198,7 @@ pub proc get_num_output_notes padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::tx_get_num_output_notes_offset + push.TX_GET_NUM_OUTPUT_NOTES_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc @@ -201,55 +211,57 @@ end #! Executes the provided procedure against the foreign account. #! -#! WARNING: the procedure to be invoked can not have more than 15 inputs and it can not return more -#! than 15 elements back. Otherwise exceeding elements will not be provided to the procedure and -#! will not be returned from it. -#! -#! Inputs: [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, , pad(n)] -#! Outputs: [] +#! Inputs: [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, foreign_procedure_inputs(16)] +#! Outputs: [foreign_procedure_outputs(16)] #! #! Where: -#! - foreign_account_id_{prefix,suffix} are the prefix and suffix felts of the account ID of the +#! - foreign_account_id_{suffix,prefix} are the suffix and prefix felts of the account ID of the #! foreign account to execute the procedure on. -#! - pad(n) is the exact number of pads needed to set the number of procedure inputs to 16 at the -#! moment of the foreign procedure execution (n = 16 - mem_addr_size - foreign_inputs_len). +#! - foreign_procedure_inputs are the inputs to the foreign procedure padded to 16 felts. +#! - foreign_procedure_outputs are the outputs of the foreign procedure padded to 16 felts. +#! +#! Panics if: +#! - the provided foreign account ID is invalid. #! #! Invocation: exec -@locals(4) +@locals(6) # foreign proc root (4) + foreign account ID (2) pub proc execute_foreign_procedure - # get the start_foreign_context procedure offset - push.0 movup.2 movup.2 exec.kernel_proc_offsets::tx_start_foreign_context_offset - # => [offset, foreign_account_id_prefix, foreign_account_id_suffix, 0, FOREIGN_PROC_ROOT, , pad(n)] - - # pad the stack before the syscall - padw swapw padw padw swapdw - # => [offset, foreign_account_id_prefix, foreign_account_id_suffix, pad(13), FOREIGN_PROC_ROOT, , pad(n)] - - # load the foreign account to the memory - syscall.exec_kernel_proc - # => [pad(16), FOREIGN_PROC_ROOT, , pad(n)] + # store the foreign account ID and foreign procedure root to the local memory + # this will allow us to get the 16th element of the foreign procedure inputs to pass it to the + # `tx_prepare_fpi` kernel procedure + loc_store.4 loc_store.5 loc_storew_le.0 dropw + # OS => [foreign_procedure_inputs(16)] + # LM => [FOREIGN_PROC_ROOT, foreign_account_id_suffix, foreign_account_id_prefix] - # prepare the stack for the dyncall - dropw dropw dropw dropw - # => [FOREIGN_PROC_ROOT, , pad(n)] + # move up the last element of the foreign procedure inputs + movup.15 + # => [foreign_proc_input_value_15, foreign_procedure_inputs(15)] - # store the foreign procedure root to the first local memory slot and get its absolute memory - # address - loc_storew_be.0 dropw locaddr.0 - # => [foreign_proc_root_ptr, , pad(n)] + # load the foreign account ID and foreign procedure root back to the operand stack + padw loc_loadw_le.0 loc_load.5 loc_load.4 + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, foreign_proc_input_value_15, foreign_procedure_inputs(15)] - # execute the foreign procedure - dyncall - # => [] + # get the tx_prepare_fpi procedure offset + push.TX_PREPARE_FPI_OFFSET + # => [offset, foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, foreign_proc_input_value_15, foreign_procedure_inputs(15)] - # reset the active account data offset to the native offset (2048) - push.0.0.0 padw padw padw - exec.kernel_proc_offsets::tx_end_foreign_context_offset - # => [offset, pad(15), ] + # pad the stack before the syscall + padw padw swapdw + # => [offset, foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # foreign_proc_input_value_15, pad(8), foreign_procedure_inputs(15)] + # store the foreign account ID, foreign procedure root, and the 16th (last) element of the + # foreign procedure inputs to the memory syscall.exec_kernel_proc + # => [pad(16), foreign_procedure_inputs(15)] + + # clean the stack dropw dropw dropw dropw - # => [] + # => [foreign_procedure_inputs(15)] + + # perform the FPI call + push.TX_EXEC_FOREIGN_PROC_OFFSET syscall.exec_kernel_proc + # => [foreign_procedure_outputs(16)] end #! Updates the transaction expiration delta. @@ -270,7 +282,7 @@ end #! #! Annotation hint: is not used anywhere pub proc update_expiration_block_delta - exec.kernel_proc_offsets::tx_update_expiration_block_delta_offset + push.TX_UPDATE_EXPIRATION_BLOCK_DELTA_OFFSET # => [offset, expiration_delta, ...] # pad the stack @@ -297,7 +309,7 @@ pub proc get_expiration_block_delta padw padw padw push.0.0.0 # => [pad(15)] - exec.kernel_proc_offsets::tx_get_expiration_delta_offset + push.TX_GET_EXPIRATION_DELTA_OFFSET # => [offset, pad(15)] syscall.exec_kernel_proc diff --git a/crates/miden-protocol/asm/shared_modules/account_id.masm b/crates/miden-protocol/asm/shared_modules/account_id.masm index 4dcf7d4e11..9f1637a9cc 100644 --- a/crates/miden-protocol/asm/shared_modules/account_id.masm +++ b/crates/miden-protocol/asm/shared_modules/account_id.masm @@ -51,11 +51,11 @@ const ACCOUNT_ID_STORAGE_MODE_INVALID_U32=0xc0 # 0b1100_0000 #! Returns a boolean indicating whether the account is a fungible faucet. #! -#! Inputs: [acct_id_prefix] +#! Inputs: [account_id_prefix] #! Outputs: [is_fungible_faucet] #! #! Where: -#! - acct_id_prefix is the prefix of the account ID. +#! - account_id_prefix is the prefix of the account ID. #! - is_fungible_faucet is a boolean indicating whether the account is a fungible faucet. pub proc is_fungible_faucet exec.id_type eq.FUNGIBLE_FAUCET_ACCOUNT @@ -64,11 +64,11 @@ end #! Returns a boolean indicating whether the account is a non-fungible faucet. #! -#! Inputs: [acct_id_prefix] +#! Inputs: [account_id_prefix] #! Outputs: [is_non_fungible_faucet] #! #! Where: -#! - acct_id_prefix is the prefix of the account ID. +#! - account_id_prefix is the prefix of the account ID. #! - is_non_fungible_faucet is a boolean indicating whether the account is a non-fungible faucet. pub proc is_non_fungible_faucet exec.id_type eq.NON_FUNGIBLE_FAUCET_ACCOUNT @@ -77,43 +77,43 @@ end #! Returns a boolean indicating whether the given account_ids are equal. #! -#! Inputs: [acct_id_prefix, acct_id_suffix, other_acct_id_prefix, other_acct_id_suffix] +#! Inputs: [account_id_suffix, account_id_prefix, other_account_id_suffix, other_account_id_prefix] #! Outputs: [is_id_equal] #! #! Where: -#! - acct_id_{prefix,suffix} are the prefix and suffix felts of an account ID. -#! - other_acct_id_{prefix,suffix} are the prefix and suffix felts of the other account ID to +#! - account_id_{suffix,prefix} are the suffix and prefix felts of an account ID. +#! - other_account_id_{suffix,prefix} are the suffix and prefix felts of the other account ID to #! compare against. #! - is_id_equal is a boolean indicating whether the account IDs are equal. pub proc is_equal movup.2 eq - # => [is_prefix_equal, acct_id_suffix, other_acct_id_suffix] + # => [is_suffix_equal, account_id_prefix, other_account_id_prefix] movdn.2 eq - # => [is_suffix_equal, is_prefix_equal] + # => [is_prefix_equal, is_suffix_equal] and # => [is_id_equal] end #! Returns a boolean indicating whether the account is a faucet. #! -#! Inputs: [acct_id_prefix] +#! Inputs: [account_id_prefix] #! Outputs: [is_faucet] #! #! Where: -#! - acct_id_prefix is the prefix of the account ID. +#! - account_id_prefix is the prefix of the account ID. #! - is_faucet is a boolean indicating whether the account is a faucet. pub proc is_faucet - u32split drop u32and.FAUCET_ACCOUNT neq.0 + u32split swap drop u32and.FAUCET_ACCOUNT neq.0 # => [is_faucet] end #! Returns a boolean indicating whether the account is a regular updatable account. #! -#! Inputs: [acct_id_prefix] +#! Inputs: [account_id_prefix] #! Outputs: [is_updatable_account] #! #! Where: -#! - acct_id_prefix is the prefix of the account ID. +#! - account_id_prefix is the prefix of the account ID. #! - is_updatable_account is a boolean indicating whether the account is a regular updatable #! account. pub proc is_updatable_account @@ -123,11 +123,11 @@ end #! Returns a boolean indicating whether the account is a regular immutable account. #! -#! Inputs: [acct_id_prefix] +#! Inputs: [account_id_prefix] #! Outputs: [is_immutable_account] #! #! Where: -#! - acct_id_prefix is the prefix of the account ID. +#! - account_id_prefix is the prefix of the account ID. #! - is_immutable_account is a boolean indicating whether the account is a regular immutable #! account. pub proc is_immutable_account @@ -138,11 +138,11 @@ end #! Validates an account ID. Note that this does not validate anything about the account type, #! since any 2-bit pattern is a valid account type. #! -#! Inputs: [account_id_prefix, account_id_suffix] +#! Inputs: [account_id_suffix, account_id_prefix] #! Outputs: [] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of the account ID. +#! - account_id_{suffix,prefix} are the suffix and prefix felts of the account ID. #! #! Panics if: #! - account_id_prefix does not contain version zero. @@ -150,13 +150,31 @@ end #! - account_id_suffix does not have its most significant bit set to zero. #! - account_id_suffix does not have its lower 8 bits set to zero. pub proc validate + # Validate lower 8 bits of suffix are zero. + # --------------------------------------------------------------------------------------------- + + u32split + # => [account_id_suffix_lo, account_id_suffix_hi, account_id_prefix] + u32and.0xff eq.0 + # => [is_least_significant_byte_zero, account_id_suffix_hi, account_id_prefix] + assert.err=ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO + # => [account_id_suffix_hi, account_id_prefix] + + # Validate most significant bit in suffix is zero. + # --------------------------------------------------------------------------------------------- + + u32shr.31 eq.0 + # => [is_most_significant_bit_zero, account_id_prefix] + assert.err=ERR_ACCOUNT_ID_SUFFIX_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO + # => [account_id_prefix] + # Validate version in prefix. For now only version 0 is supported. # --------------------------------------------------------------------------------------------- dup exec.id_version - # => [id_version, account_id_prefix, account_id_suffix] + # => [id_version, account_id_prefix] assertz.err=ERR_ACCOUNT_ID_UNKNOWN_VERSION - # => [account_id_prefix, account_id_suffix] + # => [account_id_prefix] # Validate storage mode in prefix. # --------------------------------------------------------------------------------------------- @@ -164,30 +182,13 @@ pub proc validate # there are 3 valid and 1 invalid storage mode # instead of checking the presence of any of the valid modes, we check the absence of the # invalid mode - u32split drop - # => [account_id_prefix_lo, account_id_suffix] + u32split swap drop + # => [account_id_prefix_lo] u32and.ACCOUNT_ID_STORAGE_MODE_MASK_U32 - # => [id_storage_mode_masked, account_id_suffix] + # => [id_storage_mode_masked] eq.ACCOUNT_ID_STORAGE_MODE_INVALID_U32 - # => [is_storage_mode_invalid, account_id_suffix] + # => [is_storage_mode_invalid] assertz.err=ERR_ACCOUNT_ID_UNKNOWN_STORAGE_MODE - # => [account_id_suffix] - - # Validate most significant bit in suffix is zero. - # --------------------------------------------------------------------------------------------- - - u32split - # => [account_id_suffix_hi, account_id_suffix_lo] - u32shr.31 eq.0 - # => [is_most_significant_bit_zero, account_id_suffix_lo] - assert.err=ERR_ACCOUNT_ID_SUFFIX_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO - - # Validate lower 8 bits of suffix are zero. - # --------------------------------------------------------------------------------------------- - - u32and.0xff eq.0 - # => [is_least_significant_byte_zero] - assert.err=ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO # => [] end @@ -202,7 +203,7 @@ end #! of an account ID. #! - account_id_suffix is the suffix of an account ID. pub proc shape_suffix - u32split swap + u32split # => [seed_digest_suffix_lo, seed_digest_suffix_hi] # clear lower 8 bits of the lo part @@ -227,7 +228,7 @@ end #! - id_version is the version number of the ID. proc id_version # extract the lower 32 bits - u32split drop + u32split swap drop # => [account_id_prefix_lo] # mask out the version @@ -243,13 +244,13 @@ end #! - FUNGIBLE_FAUCET_ACCOUNT #! - NON_FUNGIBLE_FAUCET_ACCOUNT #! -#! Inputs: [acct_id_prefix] -#! Outputs: [acct_type] +#! Inputs: [account_id_prefix] +#! Outputs: [account_type] #! #! Where: -#! - acct_id_prefix is the prefix of the account ID. -#! - acct_type is the account type. +#! - account_id_prefix is the prefix of the account ID. +#! - account_type is the account type. proc id_type - u32split drop u32and.ACCOUNT_ID_TYPE_MASK_U32 - # => [acct_type] + u32split swap drop u32and.ACCOUNT_ID_TYPE_MASK_U32 + # => [account_type] end diff --git a/crates/miden-protocol/asm/shared_utils/util/asset.masm b/crates/miden-protocol/asm/shared_utils/util/asset.masm index 225ed0366d..50e20e69a7 100644 --- a/crates/miden-protocol/asm/shared_utils/util/asset.masm +++ b/crates/miden-protocol/asm/shared_utils/util/asset.masm @@ -1,37 +1,347 @@ +# ERRORS +# ================================================================================================= + +const ERR_VAULT_INVALID_ENABLE_CALLBACKS = "enable_callbacks must be 0 or 1" + # CONSTANTS # ================================================================================================= # Specifies the maximum amount a fungible asset can represent. # # This is 2^63 - 2^31. See account_delta.masm for more details. -const FUNGIBLE_ASSET_MAX_AMOUNT=0x7fffffff80000000 +pub const FUNGIBLE_ASSET_MAX_AMOUNT=0x7fffffff80000000 + +# The number of elements in an asset, i.e. vault key and value. +pub const ASSET_SIZE = 8 + +# The offset of the asset value in an asset stored in memory. +pub const ASSET_VALUE_MEMORY_OFFSET = 4 + +# The flag representing disabled callbacks. +pub const CALLBACKS_DISABLED = 0 + +# The flag representing enabled callbacks. +pub const CALLBACKS_ENABLED = 1 # PROCEDURES # ================================================================================================= -#! Returns the maximum amount of a fungible asset. +#! Stores an asset key and value into memory at the given pointer. #! -#! Inputs: [] -#! Outputs: [fungible_asset_max_amount] +#! The memory range pointer..pointer+8 will be overwritten. +#! +#! Inputs: [ptr, ASSET_KEY, ASSET_VALUE] +#! Outputs: [] #! #! Where: -#! - fungible_asset_max_amount is the maximum amount of a fungible asset. -pub proc get_fungible_asset_max_amount - push.FUNGIBLE_ASSET_MAX_AMOUNT - # => [fungible_asset_max_amount] +#! - ptr is the memory address where the asset will be stored. +#! - ASSET_KEY is the 4-element word representing the asset key. +#! - ASSET_VALUE is the 4-element word representing the asset value. +pub proc store + # store asset key + movdn.4 dup.4 + # => [ptr, ASSET_KEY, ptr, ASSET_VALUE] + + mem_storew_le dropw + # => [ptr, ASSET_VALUE] + + # store asset value + add.ASSET_VALUE_MEMORY_OFFSET mem_storew_le dropw + # => [] end -#! Returns the balance of the given fungible asset. +#! Loads an asset key and value from memory given a pointer to the asset. +#! +#! Inputs: [ptr] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - ptr is the memory address of the asset. +#! - ASSET_KEY is the 4-element word representing the asset key. +#! - ASSET_VALUE is the 4-element word representing the asset value. +pub proc load + # load asset value + padw dup.4 add.ASSET_VALUE_MEMORY_OFFSET mem_loadw_le + # => [ASSET_VALUE, ptr] + + # load asset key + padw movup.8 mem_loadw_le + # => [ASSET_KEY, ASSET_VALUE] +end + +#! Returns the balance of the given fungible asset and consumes it. #! -#! Note: Assumes that the given asset is fungible and does NOT validate it. +#! WARNING: Assumes that the given asset value is fungible and does NOT validate it. #! -#! Inputs: [ASSET] +#! Inputs: [ASSET_VALUE] #! Outputs: [balance] #! #! Where: -#! - ASSET is the fungible asset from which to extract the balance. +#! - ASSET_VALUE is the fungible asset from which to extract the balance. #! - balance is the amount of the fungible asset. -pub proc get_balance_from_fungible_asset - drop drop drop +pub proc fungible_value_into_amount + movdn.3 drop drop drop # => [balance] end + +#! Returns the balance of the given fungible asset. +#! +#! WARNING: Assumes that the given asset value is fungible and does NOT validate it. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE] +#! Outputs: [amount, ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - ASSET_VALUE is the fungible asset from which to extract the balance. +#! - amount is the amount of the fungible asset. +pub proc fungible_to_amount + # => [ASSET_KEY, [amount, 0, 0, 0]] + dup.4 + # => [amount, ASSET_KEY, ASSET_VALUE] +end + +#! Returns the faucet ID from an asset vault key. +#! +#! WARNING: The faucet ID is not validated. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [faucet_id_suffix, faucet_id_prefix, ASSET_KEY] +#! +#! Where: +#! - faucet_id is the account ID in the vault key. +#! - ASSET_KEY is the vault key from which to extract the faucet ID. +pub proc key_to_faucet_id + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix_and_metadata, faucet_id_prefix] + + dup.3 dup.3 + # => [faucet_id_suffix_and_metadata, faucet_id_prefix, ASSET_KEY] + + exec.split_suffix_and_metadata drop + # => [faucet_id_suffix, faucet_id_prefix, ASSET_KEY] +end + +#! Returns the faucet ID from an asset vault key and consumes it. +#! +#! WARNING: The faucet ID is not validated. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [faucet_id_suffix, faucet_id_prefix] +#! +#! Where: +#! - faucet_id is the account ID in the vault key. +#! - ASSET_KEY is the vault key from which to extract the faucet ID. +pub proc key_into_faucet_id + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix_and_metadata, faucet_id_prefix] + + drop drop + # => [faucet_id_suffix_and_metadata, faucet_id_prefix] + + exec.split_suffix_and_metadata drop + # => [faucet_id_suffix, faucet_id_prefix] +end + +#! Returns the asset ID from an asset vault key. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [asset_id_suffix, asset_id_prefix, ASSET_KEY] +#! +#! Where: +#! - asset_id is the asset ID in the vault key. +#! - ASSET_KEY is the vault key from which to extract the asset ID. +pub proc key_to_asset_id + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix, faucet_id_prefix] + + dup.1 dup.1 + # => [asset_id_suffix, asset_id_prefix, ASSET_KEY] +end + +#! Returns the asset ID from an asset vault key and consumes it. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [asset_id_suffix, asset_id_prefix] +#! +#! Where: +#! - asset_id is the asset ID in the vault key. +#! - ASSET_KEY is the vault key from which to extract the asset ID. +pub proc key_into_asset_id + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix, faucet_id_prefix] + + movup.2 drop movup.2 drop + # => [asset_id_suffix, asset_id_prefix] +end + +#! Returns the asset callbacks flag from an asset vault key. +#! +#! Inputs: [ASSET_KEY] +#! Outputs: [callbacks_enabled, ASSET_KEY] +#! +#! Where: +#! - ASSET_KEY is the vault key from which to extract the metadata. +#! - callbacks_enabled is 1 if callbacks are enabled and 0 if disabled. +pub proc key_to_callbacks_enabled + # => [asset_id_suffix, asset_id_prefix, faucet_id_suffix_and_metadata, faucet_id_prefix] + + dup.2 + # => [faucet_id_suffix_and_metadata, ASSET_KEY] + + exec.split_suffix_and_metadata swap drop + # => [asset_metadata, ASSET_KEY] + + exec.metadata_into_callbacks_enabled + # => [callbacks_enabled, ASSET_KEY] +end + +#! Creates a fungible asset vault key for the specified faucet. +#! +#! Inputs: [enable_callbacks, faucet_id_suffix, faucet_id_prefix] +#! Outputs: [ASSET_KEY] +#! +#! Where: +#! - enable_callbacks is a flag (0 or 1) indicating whether asset callbacks are enabled. +#! - faucet_id_{suffix,prefix} are the suffix and prefix felts of the fungible faucet. +#! - ASSET_KEY is the vault key for the fungible asset. +#! +#! Panics if: +#! - enable_callbacks is not 0 or 1. +#! +#! Invocation: exec +pub proc create_fungible_key + exec.create_metadata + # => [asset_metadata, faucet_id_suffix, faucet_id_prefix] + + # merge the asset metadata into the lower 8 bits of the suffix + # this is safe since create_metadata builds only valid metadata + add + # => [faucet_id_suffix_and_metadata, faucet_id_prefix] + + push.0.0 + # => [0, 0, faucet_id_suffix_and_metadata, faucet_id_prefix] + # => [ASSET_KEY] +end + +#! Creates a fungible asset for the specified fungible faucet and amount. +#! +#! WARNING: Does not validate the faucet ID or amount. +#! +#! Inputs: [enable_callbacks, faucet_id_suffix, faucet_id_prefix, amount] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - enable_callbacks is a flag (0 or 1) indicating whether asset callbacks are enabled. +#! - faucet_id_{suffix,prefix} are the suffix and prefix felts of the faucet to create the asset +#! for. +#! - amount is the amount of the asset to create. +#! - ASSET_KEY is the vault key of the created fungible asset +#! - ASSET_VALUE is the value of the created fungible asset. +#! +#! Panics if: +#! - enable_callbacks is not 0 or 1. +#! +#! Invocation: exec +pub proc create_fungible_asset_unchecked + # => [enable_callbacks, faucet_id_suffix, faucet_id_prefix, amount] + + # pad amount into ASSET_VALUE + repeat.3 push.0 movdn.4 end + # => [enable_callbacks, faucet_id_suffix, faucet_id_prefix, ASSET_VALUE] + + exec.create_fungible_key + # => [ASSET_KEY, ASSET_VALUE] +end + +#! Creates a non fungible asset for the specified non-fungible faucet. +#! +#! WARNING: Does not validate its inputs. +#! +#! Inputs: [enable_callbacks, faucet_id_suffix, faucet_id_prefix, DATA_HASH] +#! Outputs: [ASSET_KEY, ASSET_VALUE] +#! +#! Where: +#! - enable_callbacks is a flag (0 or 1) indicating whether asset callbacks are enabled. +#! - faucet_id_{suffix,prefix} are the suffix and prefix felts of the faucet to create the asset +#! for. +#! - DATA_HASH is the data hash of the non-fungible asset to create. +#! - ASSET_KEY is the vault key of the created non-fungible asset. +#! - ASSET_VALUE is the value of the created non-fungible asset, which is identical to DATA_HASH. +#! +#! Panics if: +#! - enable_callbacks is not 0 or 1. +#! +#! Invocation: exec +pub proc create_non_fungible_asset_unchecked + exec.create_metadata + # => [asset_metadata, faucet_id_suffix, faucet_id_prefix, DATA_HASH] + + # merge the asset metadata into the lower 8 bits of the suffix + add + # => [faucet_id_suffix_and_metadata, faucet_id_prefix, DATA_HASH] + + # copy hashes at indices 0 and 1 in the data hash word to the corresponding index in the key + # word + dup.3 dup.3 + # => [hash0, hash1, faucet_id_suffix_and_metadata, faucet_id_prefix, DATA_HASH] + # => [ASSET_KEY, ASSET_VALUE] +end + +#! Splits the merged faucet ID suffix and the asset metadata. +#! +#! Inputs: [faucet_id_suffix_and_metadata] +#! Outputs: [asset_metadata, faucet_id_suffix] +#! +#! Where: +#! - faucet_id_suffix_and_metadata is the faucet ID suffix merged with the asset metadata. +#! - faucet_id_suffix is the suffix of the account ID. +#! - asset_metadata is the asset metadata. +proc split_suffix_and_metadata + u32split + # => [suffix_metadata_lo, suffix_metadata_hi] + + dup movdn.2 + # => [suffix_metadata_lo, suffix_metadata_hi, suffix_metadata_lo] + + # clear lower 8 bits of the lo part to get the actual ID suffix + u32and.0xffffff00 swap + # => [suffix_metadata_hi, suffix_metadata_lo', suffix_metadata_lo] + + # reassemble the ID suffix by multiplying the hi part with 2^32 and adding the lo part + mul.0x0100000000 add + # => [faucet_id_suffix, suffix_metadata_lo] + + # extract lower 8 bits of the lo part to get the metadata + swap u32and.0xff + # => [asset_metadata, faucet_id_suffix] +end + +#! Creates asset metadata from the provided inputs. +#! +#! Inputs: [enable_callbacks] +#! Outputs: [asset_metadata] +#! +#! Where: +#! - enable_callbacks is a flag (0 or 1) indicating whether the asset callbacks flag should be set. +#! - asset_metadata is the asset metadata. +#! +#! Panics if: +#! - enable_callbacks is not 0 or 1. +proc create_metadata + u32assert.err=ERR_VAULT_INVALID_ENABLE_CALLBACKS + dup u32lte.CALLBACKS_ENABLED + assert.err=ERR_VAULT_INVALID_ENABLE_CALLBACKS + # => [asset_metadata] +end + +#! Extracts the asset callback flag from asset metadata. +#! +#! WARNING: asset_metadata is assumed to be a byte (in particular a valid u32) +#! +#! Inputs: [asset_metadata] +#! Outputs: [callbacks_enabled] +#! +#! Where: +#! - asset_metadata is the asset metadata. +#! - callbacks_enabled is 1 if callbacks are enabled and 0 if disabled. +proc metadata_into_callbacks_enabled + # extract the least significant bit of the metadata + u32and.1 + # => [callbacks_enabled] +end diff --git a/crates/miden-protocol/asm/shared_utils/util/note.masm b/crates/miden-protocol/asm/shared_utils/util/note.masm index 8c502a8f71..066dfcd2fb 100644 --- a/crates/miden-protocol/asm/shared_utils/util/note.masm +++ b/crates/miden-protocol/asm/shared_utils/util/note.masm @@ -1,5 +1,12 @@ # CONSTANTS # ================================================================================================= -# The maximum number of input values associated with a single note. -pub const MAX_INPUTS_PER_NOTE = 1024 +# The maximum number of storage values associated with a single note. +pub const MAX_NOTE_STORAGE_ITEMS = 1024 + +#! Signals the absence of a note attachment. +pub const ATTACHMENT_KIND_NONE=0 +#! A note attachment consisting of a single Word. +pub const ATTACHMENT_KIND_WORD=1 +#! A note attachment consisting of the commitment to a set of felts. +pub const ATTACHMENT_KIND_ARRAY=2 diff --git a/crates/miden-protocol/build.rs b/crates/miden-protocol/build.rs index a15237e295..f981e48ed8 100644 --- a/crates/miden-protocol/build.rs +++ b/crates/miden-protocol/build.rs @@ -6,17 +6,13 @@ use std::sync::Arc; use fs_err as fs; use miden_assembly::diagnostics::{IntoDiagnostic, Result, WrapErr, miette}; use miden_assembly::{Assembler, DefaultSourceManager, KernelLibrary, Library}; +use miden_core::events::EventId; use regex::Regex; use walkdir::WalkDir; // CONSTANTS // ================================================================================================ -/// Defines whether the build script should generate files in `/src`. -/// The docs.rs build pipeline has a read-only filesystem, so we have to avoid writing to `src`, -/// otherwise the docs will fail to build there. Note that writing to `OUT_DIR` is fine. -const BUILD_GENERATED_FILES_IN_SRC: bool = option_env!("BUILD_GENERATED_FILES_IN_SRC").is_some(); - const ASSETS_DIR: &str = "assets"; const ASM_DIR: &str = "asm"; const ASM_PROTOCOL_DIR: &str = "protocol"; @@ -24,12 +20,12 @@ const ASM_PROTOCOL_DIR: &str = "protocol"; const SHARED_UTILS_DIR: &str = "shared_utils"; const SHARED_MODULES_DIR: &str = "shared_modules"; const ASM_TX_KERNEL_DIR: &str = "kernels/transaction"; -const KERNEL_PROCEDURES_RS_FILE: &str = "src/transaction/kernel/procedures.rs"; const PROTOCOL_LIB_NAMESPACE: &str = "miden::protocol"; -const TX_KERNEL_ERRORS_FILE: &str = "src/errors/tx_kernel.rs"; -const PROTOCOL_LIB_ERRORS_FILE: &str = "src/errors/protocol.rs"; +const KERNEL_PROCEDURES_RS_FILE: &str = "procedures.rs"; +const TX_KERNEL_ERRORS_RS_FILE: &str = "tx_kernel_errors.rs"; +const PROTOCOL_LIB_ERRORS_RS_FILE: &str = "protocol_errors.rs"; const TX_KERNEL_ERRORS_ARRAY_NAME: &str = "TX_KERNEL_ERRORS"; const PROTOCOL_LIB_ERRORS_ARRAY_NAME: &str = "PROTOCOL_LIB_ERRORS"; @@ -61,7 +57,6 @@ const TX_KERNEL_ERROR_CATEGORIES: [&str; 14] = [ fn main() -> Result<()> { // re-build when the MASM code changes println!("cargo::rerun-if-changed={ASM_DIR}/"); - println!("cargo::rerun-if-env-changed=BUILD_GENERATED_FILES_IN_SRC"); // Copies the MASM code to the build directory let crate_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); @@ -80,14 +75,17 @@ fn main() -> Result<()> { let target_dir = Path::new(&build_dir).join(ASSETS_DIR); // compile transaction kernel - let mut assembler = - compile_tx_kernel(&source_dir.join(ASM_TX_KERNEL_DIR), &target_dir.join("kernels"))?; + let mut assembler = compile_tx_kernel( + &source_dir.join(ASM_TX_KERNEL_DIR), + &target_dir.join("kernels"), + &build_dir, + )?; // compile protocol library let protocol_lib = compile_protocol_lib(&source_dir, &target_dir, assembler.clone())?; assembler.link_dynamic_library(protocol_lib)?; - generate_error_constants(&source_dir)?; + generate_error_constants(&source_dir, &build_dir)?; generate_event_constants(&source_dir, &target_dir)?; @@ -118,7 +116,7 @@ fn main() -> Result<()> { /// - {target_dir}/tx_script_main.masb -> contains the executable compiled from /// tx_script_main.masm. /// - src/transaction/procedures/kernel_v0.rs -> contains the kernel procedures table. -fn compile_tx_kernel(source_dir: &Path, target_dir: &Path) -> Result { +fn compile_tx_kernel(source_dir: &Path, target_dir: &Path, build_dir: &str) -> Result { let shared_utils_path = std::path::Path::new(ASM_DIR).join(SHARED_UTILS_DIR); let kernel_path = miden_assembly::Path::kernel_path(); @@ -131,7 +129,7 @@ fn compile_tx_kernel(source_dir: &Path, target_dir: &Path) -> Result .assemble_kernel_from_dir(source_dir.join("api.masm"), Some(source_dir.join("lib")))?; // generate kernel `procedures.rs` file - generate_kernel_proc_hash_file(kernel_lib.clone())?; + generate_kernel_proc_hash_file(kernel_lib.clone(), build_dir)?; let output_file = target_dir.join("tx_kernel").with_extension(Library::LIBRARY_EXTENSION); kernel_lib.write_to_file(output_file).into_diagnostic()?; @@ -192,14 +190,10 @@ fn compile_tx_script_main( tx_script_main.write_to_file(masb_file_path).into_diagnostic() } -/// Generates kernel `procedures.rs` file based on the kernel library -fn generate_kernel_proc_hash_file(kernel: KernelLibrary) -> Result<()> { - // Because the kernel Rust file will be stored under ./src, this should be a no-op if we can't - // write there - if !BUILD_GENERATED_FILES_IN_SRC { - return Ok(()); - } - +/// Generates kernel `procedures.rs` file based on the kernel library. +/// +/// The file is written to `{build_dir}/procedures.rs` and included via `include!` in the source. +fn generate_kernel_proc_hash_file(kernel: KernelLibrary, build_dir: &str) -> Result<()> { let (_, module_info, _) = kernel.into_parts(); let to_exclude = BTreeSet::from_iter(["exec_kernel_proc"]); @@ -230,8 +224,9 @@ fn generate_kernel_proc_hash_file(kernel: KernelLibrary) -> Result<()> { txt }).collect::>().join("\n"); + let output_path = Path::new(build_dir).join(KERNEL_PROCEDURES_RS_FILE); fs::write( - KERNEL_PROCEDURES_RS_FILE, + output_path, format!( r#"// This file is generated by build.rs, do not modify @@ -251,7 +246,8 @@ pub const KERNEL_PROCEDURES: [Word; {proc_count}] = [ } fn parse_proc_offsets(filename: impl AsRef) -> Result> { - let regex: Regex = Regex::new(r"^const\s*(?P\w+)_OFFSET\s*=\s*(?P\d+)").unwrap(); + let regex: Regex = + Regex::new(r"^(?:pub\s+)?const\s*(?P\w+)_OFFSET\s*=\s*(?P\d+)").unwrap(); let mut result = BTreeMap::new(); for line in fs::read_to_string(filename).into_diagnostic()?.lines() { if let Some(captures) = regex.captures(line) { @@ -299,6 +295,7 @@ fn build_assembler(kernel: Option) -> Result { kernel .map(|kernel| Assembler::with_kernel(Arc::new(DefaultSourceManager::default()), kernel)) .unwrap_or_default() + .with_warnings_as_errors(true) .with_dynamic_library(miden_core_lib::CoreLibrary::default()) } @@ -352,14 +349,9 @@ fn copy_shared_modules>(source_dir: T) -> Result<()> { /// The function ensures that a constant is not defined twice, except if their error message is /// the same. This can happen across multiple files. /// -/// Because the error files will be written to ./src/errors, this should be a no-op if ./src is -/// read-only. To enable writing to ./src, set the `BUILD_GENERATED_FILES_IN_SRC` environment -/// variable. -fn generate_error_constants(asm_source_dir: &Path) -> Result<()> { - if !BUILD_GENERATED_FILES_IN_SRC { - return Ok(()); - } - +/// The generated files are written to `build_dir` (i.e. `OUT_DIR`) and included via `include!` +/// in the source. +fn generate_error_constants(asm_source_dir: &Path, build_dir: &str) -> Result<()> { // Transaction kernel errors // ------------------------------------------ @@ -370,7 +362,7 @@ fn generate_error_constants(asm_source_dir: &Path) -> Result<()> { shared::generate_error_file( shared::ErrorModule { - file_name: TX_KERNEL_ERRORS_FILE, + file_path: Path::new(build_dir).join(TX_KERNEL_ERRORS_RS_FILE), array_name: TX_KERNEL_ERRORS_ARRAY_NAME, is_crate_local: true, }, @@ -386,7 +378,7 @@ fn generate_error_constants(asm_source_dir: &Path) -> Result<()> { shared::generate_error_file( shared::ErrorModule { - file_name: PROTOCOL_LIB_ERRORS_FILE, + file_path: Path::new(build_dir).join(PROTOCOL_LIB_ERRORS_RS_FILE), array_name: PROTOCOL_LIB_ERRORS_ARRAY_NAME, is_crate_local: true, }, @@ -421,12 +413,7 @@ fn validate_tx_kernel_category(errors: &[shared::NamedError]) -> Result<()> { /// then generates the transaction_events.rs file with constants. fn generate_event_constants(asm_source_dir: &Path, target_dir: &Path) -> Result<()> { // Extract all event definitions from MASM files - let mut events = extract_all_event_definitions(asm_source_dir)?; - - // Add two additional events we want in `TransactionEventId` that do not appear in kernel or - // protocol lib modules. - events.insert("miden::auth::request".to_owned(), "AUTH_REQUEST".to_owned()); - events.insert("miden::auth::unauthorized".to_owned(), "AUTH_UNAUTHORIZED".to_owned()); + let events = extract_all_event_definitions(asm_source_dir)?; // Generate the events file in OUT_DIR let event_file_content = generate_event_file_content(&events).into_diagnostic()?; @@ -516,7 +503,7 @@ fn generate_event_file_content( // want to error out as early as possible: // TODO: make the error out at build-time to be able to present better error hints for (event_path, event_name) in events { - let value = miden_core::EventId::from_name(event_path).as_felt().as_int(); + let value = EventId::from_name(event_path).as_felt().as_canonical_u64(); debug_assert!(!event_name.is_empty()); writeln!(&mut output, "const {}: u64 = {};", event_name, value)?; } @@ -741,7 +728,7 @@ mod shared { } /// Generates the content of an error file for the given category and the set of errors and - /// writes it to the category's file. + /// writes it to the file at the path specified in the module. pub fn generate_error_file(module: ErrorModule, errors: Vec) -> Result<()> { let mut output = String::new(); @@ -788,7 +775,7 @@ mod shared { .into_diagnostic()?; } - std::fs::write(module.file_name, output).into_diagnostic()?; + fs::write(module.file_path, output).into_diagnostic()?; Ok(()) } @@ -806,9 +793,9 @@ mod shared { pub message: String, } - #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] + #[derive(Debug, Clone)] pub struct ErrorModule { - pub file_name: &'static str, + pub file_path: PathBuf, pub array_name: &'static str, pub is_crate_local: bool, } diff --git a/crates/miden-protocol/masm_doc_comment_fmt.md b/crates/miden-protocol/masm_doc_comment_fmt.md index dbf2eba095..b46daa8d1c 100644 --- a/crates/miden-protocol/masm_doc_comment_fmt.md +++ b/crates/miden-protocol/masm_doc_comment_fmt.md @@ -85,10 +85,17 @@ Example: Each variable could represent a single value or a sequence of four values (a Word). Variable representing a single value should be written in lowercase, and a variable for the word should be written in uppercase. +For multi-element values that are not exactly one word (4 felts), append `(N)` to indicate the count: + +- `value` is a single felt. +- `value(N)` are N felts (where N is not 4). +- `VALUE` is a word (4 felts). No `(4)` suffix is needed since uppercase already implies a word. + Example: ```masm #! Inputs: [single_value, SOME_WORD] +#! Inputs: [dest_address(5), amount_u256(8), pad(2)] ``` Variable, which represents a memory address, should have a `_ptr` suffix in its name. For example, `note_script_commitment_ptr`. diff --git a/crates/miden-protocol/src/account/account_id/account_type.rs b/crates/miden-protocol/src/account/account_id/account_type.rs index e02f9e7cef..1ea4c02f98 100644 --- a/crates/miden-protocol/src/account/account_id/account_type.rs +++ b/crates/miden-protocol/src/account/account_id/account_type.rs @@ -2,7 +2,13 @@ use core::fmt; use core::str::FromStr; use crate::errors::AccountIdError; -use crate::utils::serde::{ByteReader, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // ACCOUNT TYPE // ================================================================================================ @@ -23,6 +29,24 @@ pub enum AccountType { } impl AccountType { + /// Returns all account types. + pub fn all() -> [AccountType; 4] { + [ + AccountType::FungibleFaucet, + AccountType::NonFungibleFaucet, + AccountType::RegularAccountImmutableCode, + AccountType::RegularAccountUpdatableCode, + ] + } + + /// Returns the regular account types (immutable and updatable code). + pub fn regular() -> [AccountType; 2] { + [ + AccountType::RegularAccountImmutableCode, + AccountType::RegularAccountUpdatableCode, + ] + } + /// Returns `true` if the account is a faucet. pub fn is_faucet(&self) -> bool { matches!(self, Self::FungibleFaucet | Self::NonFungibleFaucet) @@ -62,7 +86,7 @@ impl rand::distr::Distribution for rand::distr::StandardUniform { // ================================================================================================ impl Serializable for AccountType { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { target.write_u8(*self as u8); } } diff --git a/crates/miden-protocol/src/account/account_id/id_prefix.rs b/crates/miden-protocol/src/account/account_id/id_prefix.rs index 46207bcc85..63669c59ee 100644 --- a/crates/miden-protocol/src/account/account_id/id_prefix.rs +++ b/crates/miden-protocol/src/account/account_id/id_prefix.rs @@ -4,7 +4,7 @@ use core::fmt; use super::v0; use crate::Felt; use crate::account::account_id::AccountIdPrefixV0; -use crate::account::{AccountIdV0, AccountIdVersion, AccountStorageMode, AccountType}; +use crate::account::{AccountIdVersion, AccountStorageMode, AccountType}; use crate::errors::AccountIdError; use crate::utils::serde::{ ByteReader, @@ -57,7 +57,7 @@ impl AccountIdPrefix { pub fn new_unchecked(prefix: Felt) -> Self { // The prefix contains the metadata. // If we add more versions in the future, we may need to generalize this. - match v0::extract_version(prefix.as_int()) + match v0::extract_version(prefix.as_canonical_u64()) .expect("prefix should contain a valid account ID version") { AccountIdVersion::Version0 => Self::V0(AccountIdPrefixV0::new_unchecked(prefix)), @@ -73,7 +73,7 @@ impl AccountIdPrefix { pub fn new(prefix: Felt) -> Result { // The prefix contains the metadata. // If we add more versions in the future, we may need to generalize this. - match v0::extract_version(prefix.as_int())? { + match v0::extract_version(prefix.as_canonical_u64())? { AccountIdVersion::Version0 => AccountIdPrefixV0::new(prefix).map(Self::V0), } } @@ -89,14 +89,14 @@ impl AccountIdPrefix { } /// Returns the prefix as a [`u64`]. - pub const fn as_u64(&self) -> u64 { + pub fn as_u64(&self) -> u64 { match self { AccountIdPrefix::V0(id_prefix) => id_prefix.as_u64(), } } /// Returns the type of this account ID. - pub const fn account_type(&self) -> AccountType { + pub fn account_type(&self) -> AccountType { match self { AccountIdPrefix::V0(id_prefix) => id_prefix.account_type(), } @@ -153,20 +153,6 @@ impl AccountIdPrefix { AccountIdPrefix::V0(id_prefix) => id_prefix.to_hex(), } } - - /// Returns `felt` with the fungible bit set to zero. The version must be passed as the location - /// of the fungible bit may depend on the underlying account ID version. - pub(crate) fn clear_fungible_bit(version: AccountIdVersion, felt: Felt) -> Felt { - match version { - AccountIdVersion::Version0 => { - // Set the fungible bit to zero by taking the bitwise `and` of the felt with the - // inverted is_faucet mask. - let clear_fungible_bit_mask = !AccountIdV0::IS_FAUCET_MASK; - Felt::try_from(felt.as_int() & clear_fungible_bit_mask) - .expect("felt should still be valid as we cleared a bit and did not set any") - }, - } - } } // CONVERSIONS FROM ACCOUNT ID PREFIX @@ -237,8 +223,11 @@ impl TryFrom for AccountIdPrefix { /// Returns an error if any of the ID constraints are not met. See the [constraints /// documentation](super::AccountId#constraints) for details. fn try_from(value: u64) -> Result { - let element = Felt::try_from(value.to_le_bytes().as_slice()) - .map_err(AccountIdError::AccountIdInvalidPrefixFieldElement)?; + let element = Felt::try_from(value).map_err(|err| { + AccountIdError::AccountIdInvalidPrefixFieldElement(DeserializationError::InvalidValue( + err.to_string(), + )) + })?; Self::new(element) } } diff --git a/crates/miden-protocol/src/account/account_id/mod.rs b/crates/miden-protocol/src/account/account_id/mod.rs index 03a575fbf8..0b0c9c137a 100644 --- a/crates/miden-protocol/src/account/account_id/mod.rs +++ b/crates/miden-protocol/src/account/account_id/mod.rs @@ -19,13 +19,18 @@ use core::fmt; use bech32::primitives::decode::ByteIter; pub use id_version::AccountIdVersion; use miden_core::Felt; -use miden_core::utils::{ByteReader, Deserializable, Serializable}; use miden_crypto::utils::hex_to_bytes; -use miden_processor::DeserializationError; use crate::Word; use crate::address::NetworkId; use crate::errors::{AccountError, AccountIdError}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// The identifier of an [`Account`](crate::account::Account). /// @@ -142,13 +147,29 @@ impl AccountId { pub fn new_unchecked(elements: [Felt; 2]) -> Self { // The prefix contains the metadata. // If we add more versions in the future, we may need to generalize this. - match v0::extract_version(elements[0].as_int()) + match v0::extract_version(elements[0].as_canonical_u64()) .expect("prefix should contain a valid account ID version") { AccountIdVersion::Version0 => Self::V0(AccountIdV0::new_unchecked(elements)), } } + /// Decodes an [`AccountId`] from the provided suffix and prefix felts. + /// + /// # Errors + /// + /// Returns an error if any of the ID constraints are not met. See the [constraints + /// documentation](AccountId#constraints) for details. + pub fn try_from_elements(suffix: Felt, prefix: Felt) -> Result { + // The prefix contains the metadata. + // If we add more versions in the future, we may need to generalize this. + match v0::extract_version(prefix.as_canonical_u64())? { + AccountIdVersion::Version0 => { + AccountIdV0::try_from_elements(suffix, prefix).map(Self::V0) + }, + } + } + /// Constructs an [`AccountId`] for testing purposes with the given account type, storage /// mode. /// @@ -207,7 +228,7 @@ impl AccountId { // -------------------------------------------------------------------------------------------- /// Returns the type of this account ID. - pub const fn account_type(&self) -> AccountType { + pub fn account_type(&self) -> AccountType { match self { AccountId::V0(account_id) => account_id.account_type(), } @@ -398,25 +419,6 @@ impl From for AccountId { } } -impl TryFrom<[Felt; 2]> for AccountId { - type Error = AccountIdError; - - /// Returns an [`AccountId`] instantiated with the provided field elements where `elements[0]` - /// is taken as the prefix and `elements[1]` is taken as the suffix. - /// - /// # Errors - /// - /// Returns an error if any of the ID constraints are not met. See the [constraints - /// documentation](AccountId#constraints) for details. - fn try_from(elements: [Felt; 2]) -> Result { - // The prefix contains the metadata. - // If we add more versions in the future, we may need to generalize this. - match v0::extract_version(elements[0].as_int())? { - AccountIdVersion::Version0 => AccountIdV0::try_from(elements).map(Self::V0), - } - } -} - impl TryFrom<[u8; 15]> for AccountId { type Error = AccountIdError; @@ -481,7 +483,7 @@ impl fmt::Display for AccountId { // ================================================================================================ impl Serializable for AccountId { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { match self { AccountId::V0(account_id) => { account_id.write_into(target); diff --git a/crates/miden-protocol/src/account/account_id/seed.rs b/crates/miden-protocol/src/account/account_id/seed.rs index 8ad1be02a7..ba3f285bd8 100644 --- a/crates/miden-protocol/src/account/account_id/seed.rs +++ b/crates/miden-protocol/src/account/account_id/seed.rs @@ -2,7 +2,7 @@ use alloc::vec::Vec; use crate::account::account_id::AccountIdVersion; use crate::account::account_id::v0::{compute_digest, validate_prefix}; -use crate::account::{AccountStorageMode, AccountType}; +use crate::account::{AccountIdV0, AccountStorageMode, AccountType}; use crate::errors::AccountError; use crate::{Felt, Word}; @@ -52,9 +52,9 @@ fn compute_account_seed_single( loop { // Check if the seed satisfies the specified type, storage mode and version. Additionally, // the most significant bit of the suffix must be zero to ensure felt validity. - let prefix = current_digest.as_elements()[0]; - let suffix = current_digest.as_elements()[1]; - let is_suffix_msb_zero = suffix.as_int() >> 63 == 0; + let suffix = current_digest[AccountIdV0::SEED_DIGEST_SUFFIX_ELEMENT_IDX]; + let prefix = current_digest[AccountIdV0::SEED_DIGEST_PREFIX_ELEMENT_IDX]; + let is_suffix_msb_zero = suffix.as_canonical_u64() >> 63 == 0; if let Ok((computed_account_type, computed_storage_mode, computed_version)) = validate_prefix(prefix) diff --git a/crates/miden-protocol/src/account/account_id/v0/mod.rs b/crates/miden-protocol/src/account/account_id/v0/mod.rs index 34ad3ebbb9..03fb3bc5a0 100644 --- a/crates/miden-protocol/src/account/account_id/v0/mod.rs +++ b/crates/miden-protocol/src/account/account_id/v0/mod.rs @@ -20,7 +20,13 @@ use crate::account::account_id::storage_mode::{NETWORK, PRIVATE, PUBLIC}; use crate::account::{AccountIdVersion, AccountStorageMode, AccountType}; use crate::address::AddressType; use crate::errors::{AccountError, AccountIdError, Bech32Error}; -use crate::utils::{ByteReader, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{EMPTY_WORD, Felt, Hasher, Word}; // ACCOUNT ID VERSION 0 @@ -31,14 +37,14 @@ use crate::{EMPTY_WORD, Felt, Hasher, Word}; /// See the [`AccountId`](super::AccountId) type's documentation for details. #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub struct AccountIdV0 { - prefix: Felt, suffix: Felt, + prefix: Felt, } impl Hash for AccountIdV0 { fn hash(&self, state: &mut H) { - self.prefix.inner().hash(state); - self.suffix.inner().hash(state); + self.prefix.as_canonical_u64().hash(state); + self.suffix.as_canonical_u64().hash(state); } } @@ -61,8 +67,11 @@ impl AccountIdV0 { pub(crate) const STORAGE_MODE_MASK: u8 = 0b11 << Self::STORAGE_MODE_SHIFT; pub(crate) const STORAGE_MODE_SHIFT: u64 = 6; - /// The bit at index 5 of the prefix encodes whether the account is a faucet. - pub(crate) const IS_FAUCET_MASK: u64 = 0b10 << Self::TYPE_SHIFT; + /// The element index in the seed digest that becomes the account ID suffix (after + /// [`shape_suffix`]). + pub(crate) const SEED_DIGEST_SUFFIX_ELEMENT_IDX: usize = 0; + /// The element index in the seed digest that becomes the account ID prefix. + pub(crate) const SEED_DIGEST_PREFIX_ELEMENT_IDX: usize = 1; // CONSTRUCTORS // -------------------------------------------------------------------------------------------- @@ -75,13 +84,14 @@ impl AccountIdV0 { ) -> Result { let seed_digest = compute_digest(seed, code_commitment, storage_commitment); - let mut felts: [Felt; 2] = seed_digest.as_elements()[0..2] - .try_into() - .expect("we should have sliced off 2 elements"); + // Use the first half-word of the seed digest as the account ID, where the prefix is the + // most significant element. + let mut suffix = seed_digest[Self::SEED_DIGEST_SUFFIX_ELEMENT_IDX]; + let prefix = seed_digest[Self::SEED_DIGEST_PREFIX_ELEMENT_IDX]; - felts[1] = shape_suffix(felts[1]); + suffix = shape_suffix(suffix); - account_id_from_felts(felts) + Self::try_from_elements(suffix, prefix) } /// See [`AccountId::new_unchecked`](super::AccountId::new_unchecked) for details. @@ -98,6 +108,14 @@ impl AccountIdV0 { Self { prefix, suffix } } + /// See [`AccountId::try_from_elements`](super::AccountId::try_from_elements) for details. + pub fn try_from_elements(suffix: Felt, prefix: Felt) -> Result { + validate_suffix(suffix)?; + validate_prefix(prefix)?; + + Ok(AccountIdV0 { suffix, prefix }) + } + /// See [`AccountId::dummy`](super::AccountId::dummy) for details. #[cfg(any(feature = "testing", test))] pub fn dummy( @@ -130,12 +148,12 @@ impl AccountIdV0 { let mut suffix = Felt::new(u64::from_be_bytes(suffix_bytes)); // Clear the most significant bit of the suffix. - suffix = Felt::try_from(suffix.as_int() & 0x7fff_ffff_ffff_ffff) + suffix = Felt::try_from(suffix.as_canonical_u64() & 0x7fff_ffff_ffff_ffff) .expect("no bits were set so felt should still be valid"); suffix = shape_suffix(suffix); - let account_id = account_id_from_felts([prefix, suffix]) + let account_id = Self::try_from_elements(suffix, prefix) .expect("we should have shaped the felts to produce a valid id"); debug_assert_eq!(account_id.account_type(), account_type); @@ -167,8 +185,8 @@ impl AccountIdV0 { // -------------------------------------------------------------------------------------------- /// See [`AccountId::account_type`](super::AccountId::account_type) for details. - pub const fn account_type(&self) -> AccountType { - extract_type(self.prefix.as_int()) + pub fn account_type(&self) -> AccountType { + extract_type(self.prefix.as_canonical_u64()) } /// See [`AccountId::is_faucet`](super::AccountId::is_faucet) for details. @@ -211,7 +229,7 @@ impl AccountIdV0 { // big-endian hex string. Only then can we cut off the last zero byte by truncating. We // cannot use `:014x` padding. let mut hex_string = - format!("0x{:016x}{:016x}", self.prefix().as_u64(), self.suffix().as_int()); + format!("0x{:016x}{:016x}", self.prefix().as_u64(), self.suffix().as_canonical_u64()); hex_string.truncate(32); hex_string } @@ -322,7 +340,7 @@ impl From for [u8; 15] { let mut result = [0_u8; 15]; result[..8].copy_from_slice(&id.prefix().as_u64().to_be_bytes()); // The last byte of the suffix is always zero so we skip it here. - result[8..].copy_from_slice(&id.suffix().as_int().to_be_bytes()[..7]); + result[8..].copy_from_slice(&id.suffix().as_canonical_u64().to_be_bytes()[..7]); result } } @@ -330,7 +348,7 @@ impl From for [u8; 15] { impl From for u128 { fn from(id: AccountIdV0) -> Self { let mut le_bytes = [0_u8; 16]; - le_bytes[..8].copy_from_slice(&id.suffix().as_int().to_le_bytes()); + le_bytes[..8].copy_from_slice(&id.suffix().as_canonical_u64().to_le_bytes()); le_bytes[8..].copy_from_slice(&id.prefix().as_u64().to_le_bytes()); u128::from_le_bytes(le_bytes) } @@ -339,16 +357,6 @@ impl From for u128 { // CONVERSIONS TO ACCOUNT ID // ================================================================================================ -impl TryFrom<[Felt; 2]> for AccountIdV0 { - type Error = AccountIdError; - - /// See [`TryFrom<[Felt; 2]> for - /// AccountId`](super::AccountId#impl-TryFrom<%5BFelt;+2%5D>-for-AccountId) for details. - fn try_from(elements: [Felt; 2]) -> Result { - account_id_from_felts(elements) - } -} - impl TryFrom<[u8; 15]> for AccountIdV0 { type Error = AccountIdError; @@ -369,13 +377,22 @@ impl TryFrom<[u8; 15]> for AccountIdV0 { let mut suffix_bytes = [0; 8]; suffix_bytes[1..8].copy_from_slice(suffix_slice); - let prefix = Felt::try_from(prefix_slice) - .map_err(AccountIdError::AccountIdInvalidPrefixFieldElement)?; + let prefix = Felt::try_from(u64::from_le_bytes( + prefix_slice.try_into().expect("prefix slice should be 8 bytes"), + )) + .map_err(|err| { + AccountIdError::AccountIdInvalidPrefixFieldElement(DeserializationError::InvalidValue( + err.to_string(), + )) + })?; - let suffix = Felt::try_from(suffix_bytes.as_slice()) - .map_err(AccountIdError::AccountIdInvalidSuffixFieldElement)?; + let suffix = Felt::try_from(u64::from_le_bytes(suffix_bytes)).map_err(|err| { + AccountIdError::AccountIdInvalidSuffixFieldElement(DeserializationError::InvalidValue( + err.to_string(), + )) + })?; - Self::try_from([prefix, suffix]) + Self::try_from_elements(suffix, prefix) } } @@ -396,7 +413,7 @@ impl TryFrom for AccountIdV0 { // ================================================================================================ impl Serializable for AccountIdV0 { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { let bytes: [u8; 15] = (*self).into(); bytes.write_into(target); } @@ -417,25 +434,12 @@ impl Deserializable for AccountIdV0 { // HELPER FUNCTIONS // ================================================================================================ -/// Returns an [AccountId] instantiated with the provided field elements. -/// -/// # Errors -/// -/// Returns an error if any of the ID constraints are not met. See the [constraints -/// documentation](AccountId#constraints) for details. -fn account_id_from_felts(elements: [Felt; 2]) -> Result { - validate_prefix(elements[0])?; - validate_suffix(elements[1])?; - - Ok(AccountIdV0 { prefix: elements[0], suffix: elements[1] }) -} - /// Checks that the prefix: /// - has known values for metadata (storage mode, type and version). pub(crate) fn validate_prefix( prefix: Felt, ) -> Result<(AccountType, AccountStorageMode, AccountIdVersion), AccountIdError> { - let prefix = prefix.as_int(); + let prefix = prefix.as_canonical_u64(); // Validate storage bits. let storage_mode = extract_storage_mode(prefix)?; @@ -451,8 +455,8 @@ pub(crate) fn validate_prefix( /// Checks that the suffix: /// - has its most significant bit set to zero. /// - has its lower 8 bits set to zero. -const fn validate_suffix(suffix: Felt) -> Result<(), AccountIdError> { - let suffix = suffix.as_int(); +fn validate_suffix(suffix: Felt) -> Result<(), AccountIdError> { + let suffix = suffix.as_canonical_u64(); // Validate most significant bit is zero. if suffix >> 63 != 0 { @@ -503,7 +507,7 @@ pub(crate) const fn extract_type(prefix: u64) -> AccountType { /// Shapes the suffix so it meets the requirements of the account ID, by setting the lower 8 bits to /// zero. fn shape_suffix(suffix: Felt) -> Felt { - let mut suffix = suffix.as_int(); + let mut suffix = suffix.as_canonical_u64(); // Clear the lower 8 bits. suffix &= 0xffff_ffff_ffff_ff00; diff --git a/crates/miden-protocol/src/account/account_id/v0/prefix.rs b/crates/miden-protocol/src/account/account_id/v0/prefix.rs index d2b0f6d116..26a5ffb325 100644 --- a/crates/miden-protocol/src/account/account_id/v0/prefix.rs +++ b/crates/miden-protocol/src/account/account_id/v0/prefix.rs @@ -3,12 +3,17 @@ use core::fmt; use core::hash::Hash; use miden_core::Felt; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; use crate::account::account_id::v0::{self, validate_prefix}; use crate::account::{AccountIdVersion, AccountStorageMode, AccountType}; use crate::errors::AccountIdError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // ACCOUNT ID PREFIX VERSION 0 // ================================================================================================ @@ -23,7 +28,7 @@ pub struct AccountIdPrefixV0 { impl Hash for AccountIdPrefixV0 { fn hash(&self, state: &mut H) { - self.prefix.inner().hash(state); + self.prefix.as_canonical_u64().hash(state); } } @@ -65,14 +70,14 @@ impl AccountIdPrefixV0 { } /// See [`AccountIdPrefix::as_u64`](crate::account::AccountIdPrefix::as_u64) for details. - pub const fn as_u64(&self) -> u64 { - self.prefix.as_int() + pub fn as_u64(&self) -> u64 { + self.prefix.as_canonical_u64() } /// See [`AccountIdPrefix::account_type`](crate::account::AccountIdPrefix::account_type) for /// details. - pub const fn account_type(&self) -> AccountType { - v0::extract_type(self.prefix.as_int()) + pub fn account_type(&self) -> AccountType { + v0::extract_type(self.prefix.as_canonical_u64()) } /// See [`AccountIdPrefix::is_faucet`](crate::account::AccountIdPrefix::is_faucet) for details. @@ -89,7 +94,7 @@ impl AccountIdPrefixV0 { /// See [`AccountIdPrefix::storage_mode`](crate::account::AccountIdPrefix::storage_mode) for /// details. pub fn storage_mode(&self) -> AccountStorageMode { - v0::extract_storage_mode(self.prefix.as_int()) + v0::extract_storage_mode(self.prefix.as_canonical_u64()) .expect("account ID prefix should have been constructed with a valid storage mode") } @@ -100,13 +105,13 @@ impl AccountIdPrefixV0 { /// See [`AccountIdPrefix::version`](crate::account::AccountIdPrefix::version) for details. pub fn version(&self) -> AccountIdVersion { - v0::extract_version(self.prefix.as_int()) + v0::extract_version(self.prefix.as_canonical_u64()) .expect("account ID prefix should have been constructed with a valid version") } /// See [`AccountIdPrefix::to_hex`](crate::account::AccountIdPrefix::to_hex) for details. pub fn to_hex(self) -> String { - format!("0x{:016x}", self.prefix.as_int()) + format!("0x{:016x}", self.prefix.as_canonical_u64()) } } @@ -122,14 +127,14 @@ impl From for Felt { impl From for [u8; 8] { fn from(id: AccountIdPrefixV0) -> Self { let mut result = [0_u8; 8]; - result[..8].copy_from_slice(&id.prefix.as_int().to_be_bytes()); + result[..8].copy_from_slice(&id.prefix.as_canonical_u64().to_be_bytes()); result } } impl From for u64 { fn from(id: AccountIdPrefixV0) -> Self { - id.prefix.as_int() + id.prefix.as_canonical_u64() } } @@ -143,11 +148,16 @@ impl TryFrom<[u8; 8]> for AccountIdPrefixV0 { /// AccountIdPrefix`](crate::account::AccountIdPrefix#impl-TryFrom<%5Bu8;+8% /// 5D>-for-AccountIdPrefix) for details. fn try_from(mut value: [u8; 8]) -> Result { - // Felt::try_from expects little-endian order. + // Reverse to little-endian order. value.reverse(); - Felt::try_from(value.as_slice()) - .map_err(AccountIdError::AccountIdInvalidPrefixFieldElement) + let num = u64::from_le_bytes(value); + Felt::try_from(num) + .map_err(|err| { + AccountIdError::AccountIdInvalidPrefixFieldElement( + DeserializationError::InvalidValue(err.to_string()), + ) + }) .and_then(Self::new) } } @@ -159,8 +169,11 @@ impl TryFrom for AccountIdPrefixV0 { /// AccountIdPrefix`](crate::account::AccountIdPrefix#impl-TryFrom-for-AccountIdPrefix) /// for details. fn try_from(value: u64) -> Result { - let element = Felt::try_from(value.to_le_bytes().as_slice()) - .map_err(AccountIdError::AccountIdInvalidPrefixFieldElement)?; + let element = Felt::try_from(value).map_err(|err| { + AccountIdError::AccountIdInvalidPrefixFieldElement(DeserializationError::InvalidValue( + err.to_string(), + )) + })?; Self::new(element) } } @@ -187,7 +200,7 @@ impl PartialOrd for AccountIdPrefixV0 { impl Ord for AccountIdPrefixV0 { fn cmp(&self, other: &Self) -> core::cmp::Ordering { - self.prefix.as_int().cmp(&other.prefix.as_int()) + self.prefix.as_canonical_u64().cmp(&other.prefix.as_canonical_u64()) } } diff --git a/crates/miden-protocol/src/account/auth.rs b/crates/miden-protocol/src/account/auth.rs index 22a747898b..e4947095db 100644 --- a/crates/miden-protocol/src/account/auth.rs +++ b/crates/miden-protocol/src/account/auth.rs @@ -1,8 +1,11 @@ +use alloc::borrow::ToOwned; +use alloc::string::ToString; use alloc::vec::Vec; +use core::str::FromStr; use rand::{CryptoRng, Rng}; -use crate::crypto::dsa::{ecdsa_k256_keccak, falcon512_rpo}; +use crate::crypto::dsa::{ecdsa_k256_keccak, falcon512_poseidon2}; use crate::errors::AuthSchemeError; use crate::utils::serde::{ ByteReader, @@ -11,15 +14,18 @@ use crate::utils::serde::{ DeserializationError, Serializable, }; -use crate::{Felt, Hasher, Word}; +use crate::{Felt, Word}; // AUTH SCHEME // ================================================================================================ /// Identifier of signature schemes use for transaction authentication -const FALCON_512_RPO: u8 = 0; +const FALCON512_POSEIDON2: u8 = 2; const ECDSA_K256_KECCAK: u8 = 1; +const FALCON512_POSEIDON2_STR: &str = "Falcon512Poseidon2"; +const ECDSA_K256_KECCAK_STR: &str = "EcdsaK256Keccak"; + /// Defines standard authentication schemes (i.e., signature schemes) available in the Miden /// protocol. #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -28,10 +34,10 @@ const ECDSA_K256_KECCAK: u8 = 1; pub enum AuthScheme { /// A deterministic Falcon512 signature scheme. /// - /// This version differs from the reference Falcon512 implementation in its use of the RPO - /// algebraic hash function in its hash-to-point algorithm to make signatures very efficient - /// to verify inside Miden VM. - Falcon512Rpo = FALCON_512_RPO, + /// This version differs from the reference Falcon512 implementation in its use of the poseidon2 + /// hash function in its hash-to-point algorithm to make signatures very efficient to verify + /// inside Miden VM. + Falcon512Poseidon2 = FALCON512_POSEIDON2, /// ECDSA signature scheme over secp256k1 curve using Keccak to hash the messages when signing. EcdsaK256Keccak = ECDSA_K256_KECCAK, @@ -47,8 +53,8 @@ impl AuthScheme { impl core::fmt::Display for AuthScheme { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { match self { - Self::Falcon512Rpo => f.write_str("Falcon512Rpo"), - Self::EcdsaK256Keccak => f.write_str("EcdsaK256Keccak"), + Self::Falcon512Poseidon2 => f.write_str(FALCON512_POSEIDON2_STR), + Self::EcdsaK256Keccak => f.write_str(ECDSA_K256_KECCAK_STR), } } } @@ -58,9 +64,21 @@ impl TryFrom for AuthScheme { fn try_from(value: u8) -> Result { match value { - FALCON_512_RPO => Ok(Self::Falcon512Rpo), + FALCON512_POSEIDON2 => Ok(Self::Falcon512Poseidon2), ECDSA_K256_KECCAK => Ok(Self::EcdsaK256Keccak), - value => Err(AuthSchemeError::InvalidAuthSchemeIdentifier(value)), + value => Err(AuthSchemeError::InvalidAuthSchemeIdentifier(value.to_string())), + } + } +} + +impl FromStr for AuthScheme { + type Err = AuthSchemeError; + + fn from_str(input: &str) -> Result { + match input { + FALCON512_POSEIDON2_STR => Ok(AuthScheme::Falcon512Poseidon2), + ECDSA_K256_KECCAK_STR => Ok(AuthScheme::EcdsaK256Keccak), + other => Err(AuthSchemeError::InvalidAuthSchemeIdentifier(other.to_owned())), } } } @@ -79,7 +97,7 @@ impl Serializable for AuthScheme { impl Deserializable for AuthScheme { fn read_from(source: &mut R) -> Result { match source.read_u8()? { - FALCON_512_RPO => Ok(Self::Falcon512Rpo), + FALCON512_POSEIDON2 => Ok(Self::Falcon512Poseidon2), ECDSA_K256_KECCAK => Ok(Self::EcdsaK256Keccak), value => Err(DeserializationError::InvalidValue(format!( "auth scheme identifier `{value}` is not valid" @@ -96,20 +114,20 @@ impl Deserializable for AuthScheme { #[non_exhaustive] #[repr(u8)] pub enum AuthSecretKey { - Falcon512Rpo(falcon512_rpo::SecretKey) = FALCON_512_RPO, + Falcon512Poseidon2(falcon512_poseidon2::SecretKey) = FALCON512_POSEIDON2, EcdsaK256Keccak(ecdsa_k256_keccak::SecretKey) = ECDSA_K256_KECCAK, } impl AuthSecretKey { - /// Generates an Falcon512Rpo secret key from the OS-provided randomness. + /// Generates an Falcon512Poseidon2 secret key from the OS-provided randomness. #[cfg(feature = "std")] - pub fn new_falcon512_rpo() -> Self { - Self::Falcon512Rpo(falcon512_rpo::SecretKey::new()) + pub fn new_falcon512_poseidon2() -> Self { + Self::Falcon512Poseidon2(falcon512_poseidon2::SecretKey::new()) } - /// Generates an Falcon512Rpo secrete key using the provided random number generator. - pub fn new_falcon512_rpo_with_rng(rng: &mut R) -> Self { - Self::Falcon512Rpo(falcon512_rpo::SecretKey::with_rng(rng)) + /// Generates an Falcon512Poseidon2 secrete key using the provided random number generator. + pub fn new_falcon512_poseidon2_with_rng(rng: &mut R) -> Self { + Self::Falcon512Poseidon2(falcon512_poseidon2::SecretKey::with_rng(rng)) } /// Generates an EcdsaK256Keccak secret key from the OS-provided randomness. @@ -123,10 +141,36 @@ impl AuthSecretKey { Self::EcdsaK256Keccak(ecdsa_k256_keccak::SecretKey::with_rng(rng)) } + /// Generates a new secret key for the specified authentication scheme using the provided + /// random number generator. + /// + /// Returns an error if the specified authentication scheme is not supported. + pub fn with_scheme_and_rng( + scheme: AuthScheme, + rng: &mut R, + ) -> Result { + match scheme { + AuthScheme::Falcon512Poseidon2 => Ok(Self::new_falcon512_poseidon2_with_rng(rng)), + AuthScheme::EcdsaK256Keccak => Ok(Self::new_ecdsa_k256_keccak_with_rng(rng)), + } + } + + /// Generates a new secret key for the specified authentication scheme from the + /// OS-provided randomness. + /// + /// Returns an error if the specified authentication scheme is not supported. + #[cfg(feature = "std")] + pub fn with_scheme(scheme: AuthScheme) -> Result { + match scheme { + AuthScheme::Falcon512Poseidon2 => Ok(Self::new_falcon512_poseidon2()), + AuthScheme::EcdsaK256Keccak => Ok(Self::new_ecdsa_k256_keccak()), + } + } + /// Returns the authentication scheme of this secret key. pub fn auth_scheme(&self) -> AuthScheme { match self { - AuthSecretKey::Falcon512Rpo(_) => AuthScheme::Falcon512Rpo, + AuthSecretKey::Falcon512Poseidon2(_) => AuthScheme::Falcon512Poseidon2, AuthSecretKey::EcdsaK256Keccak(_) => AuthScheme::EcdsaK256Keccak, } } @@ -134,7 +178,9 @@ impl AuthSecretKey { /// Returns a public key associated with this secret key. pub fn public_key(&self) -> PublicKey { match self { - AuthSecretKey::Falcon512Rpo(key) => PublicKey::Falcon512Rpo(key.public_key()), + AuthSecretKey::Falcon512Poseidon2(key) => { + PublicKey::Falcon512Poseidon2(key.public_key()) + }, AuthSecretKey::EcdsaK256Keccak(key) => PublicKey::EcdsaK256Keccak(key.public_key()), } } @@ -142,7 +188,9 @@ impl AuthSecretKey { /// Signs the provided message with this secret key. pub fn sign(&self, message: Word) -> Signature { match self { - AuthSecretKey::Falcon512Rpo(key) => Signature::Falcon512Rpo(key.sign(message)), + AuthSecretKey::Falcon512Poseidon2(key) => { + Signature::Falcon512Poseidon2(key.sign(message)) + }, AuthSecretKey::EcdsaK256Keccak(key) => Signature::EcdsaK256Keccak(key.sign(message)), } } @@ -152,7 +200,7 @@ impl Serializable for AuthSecretKey { fn write_into(&self, target: &mut W) { self.auth_scheme().write_into(target); match self { - AuthSecretKey::Falcon512Rpo(key) => key.write_into(target), + AuthSecretKey::Falcon512Poseidon2(key) => key.write_into(target), AuthSecretKey::EcdsaK256Keccak(key) => key.write_into(target), } } @@ -161,9 +209,9 @@ impl Serializable for AuthSecretKey { impl Deserializable for AuthSecretKey { fn read_from(source: &mut R) -> Result { match source.read::()? { - AuthScheme::Falcon512Rpo => { - let secret_key = falcon512_rpo::SecretKey::read_from(source)?; - Ok(AuthSecretKey::Falcon512Rpo(secret_key)) + AuthScheme::Falcon512Poseidon2 => { + let secret_key = falcon512_poseidon2::SecretKey::read_from(source)?; + Ok(AuthSecretKey::Falcon512Poseidon2(secret_key)) }, AuthScheme::EcdsaK256Keccak => { let secret_key = ecdsa_k256_keccak::SecretKey::read_from(source)?; @@ -186,8 +234,8 @@ impl core::fmt::Display for PublicKeyCommitment { } } -impl From for PublicKeyCommitment { - fn from(value: falcon512_rpo::PublicKey) -> Self { +impl From for PublicKeyCommitment { + fn from(value: falcon512_poseidon2::PublicKey) -> Self { Self(value.to_commitment()) } } @@ -208,7 +256,7 @@ impl From for PublicKeyCommitment { #[derive(Clone, Debug)] #[non_exhaustive] pub enum PublicKey { - Falcon512Rpo(falcon512_rpo::PublicKey), + Falcon512Poseidon2(falcon512_poseidon2::PublicKey), EcdsaK256Keccak(ecdsa_k256_keccak::PublicKey), } @@ -216,7 +264,7 @@ impl PublicKey { /// Returns the authentication scheme of this public key. pub fn auth_scheme(&self) -> AuthScheme { match self { - PublicKey::Falcon512Rpo(_) => AuthScheme::Falcon512Rpo, + PublicKey::Falcon512Poseidon2(_) => AuthScheme::Falcon512Poseidon2, PublicKey::EcdsaK256Keccak(_) => AuthScheme::EcdsaK256Keccak, } } @@ -224,7 +272,7 @@ impl PublicKey { /// Returns a commitment to this public key. pub fn to_commitment(&self) -> PublicKeyCommitment { match self { - PublicKey::Falcon512Rpo(key) => key.to_commitment().into(), + PublicKey::Falcon512Poseidon2(key) => key.to_commitment().into(), PublicKey::EcdsaK256Keccak(key) => key.to_commitment().into(), } } @@ -232,7 +280,7 @@ impl PublicKey { /// Verifies the provided signature against the provided message and this public key. pub fn verify(&self, message: Word, signature: Signature) -> bool { match (self, signature) { - (PublicKey::Falcon512Rpo(key), Signature::Falcon512Rpo(sig)) => { + (PublicKey::Falcon512Poseidon2(key), Signature::Falcon512Poseidon2(sig)) => { key.verify(message, &sig) }, (PublicKey::EcdsaK256Keccak(key), Signature::EcdsaK256Keccak(sig)) => { @@ -247,7 +295,7 @@ impl Serializable for PublicKey { fn write_into(&self, target: &mut W) { self.auth_scheme().write_into(target); match self { - PublicKey::Falcon512Rpo(pub_key) => pub_key.write_into(target), + PublicKey::Falcon512Poseidon2(pub_key) => pub_key.write_into(target), PublicKey::EcdsaK256Keccak(pub_key) => pub_key.write_into(target), } } @@ -256,9 +304,9 @@ impl Serializable for PublicKey { impl Deserializable for PublicKey { fn read_from(source: &mut R) -> Result { match source.read::()? { - AuthScheme::Falcon512Rpo => { - let pub_key = falcon512_rpo::PublicKey::read_from(source)?; - Ok(PublicKey::Falcon512Rpo(pub_key)) + AuthScheme::Falcon512Poseidon2 => { + let pub_key = falcon512_poseidon2::PublicKey::read_from(source)?; + Ok(PublicKey::Falcon512Poseidon2(pub_key)) }, AuthScheme::EcdsaK256Keccak => { let pub_key = ecdsa_k256_keccak::PublicKey::read_from(source)?; @@ -278,7 +326,7 @@ impl Deserializable for PublicKey { /// provider. To prepare the signature, use the provided `to_prepared_signature` method: /// ```rust,no_run /// use miden_protocol::account::auth::Signature; -/// use miden_protocol::crypto::dsa::falcon512_rpo::SecretKey; +/// use miden_protocol::crypto::dsa::falcon512_poseidon2::SecretKey; /// use miden_protocol::{Felt, Word}; /// /// let secret_key = SecretKey::new(); @@ -289,7 +337,7 @@ impl Deserializable for PublicKey { #[derive(Clone, Debug)] #[repr(u8)] pub enum Signature { - Falcon512Rpo(falcon512_rpo::Signature) = FALCON_512_RPO, + Falcon512Poseidon2(falcon512_poseidon2::Signature) = FALCON512_POSEIDON2, EcdsaK256Keccak(ecdsa_k256_keccak::Signature) = ECDSA_K256_KECCAK, } @@ -297,7 +345,7 @@ impl Signature { /// Returns the authentication scheme of this signature. pub fn auth_scheme(&self) -> AuthScheme { match self { - Signature::Falcon512Rpo(_) => AuthScheme::Falcon512Rpo, + Signature::Falcon512Poseidon2(_) => AuthScheme::Falcon512Poseidon2, Signature::EcdsaK256Keccak(_) => AuthScheme::EcdsaK256Keccak, } } @@ -310,25 +358,22 @@ impl Signature { pub fn to_prepared_signature(&self, msg: Word) -> Vec { // TODO: the `expect()` should be changed to an error; but that will be a part of a bigger // refactoring - let mut result = match self { - Signature::Falcon512Rpo(sig) => prepare_falcon512_rpo_signature(sig), + match self { + Signature::Falcon512Poseidon2(sig) => { + miden_core_lib::dsa::falcon512_poseidon2::encode_signature(sig.public_key(), sig) + }, Signature::EcdsaK256Keccak(sig) => { let pk = ecdsa_k256_keccak::PublicKey::recover_from(msg, sig) .expect("inferring public key from signature and message should succeed"); miden_core_lib::dsa::ecdsa_k256_keccak::encode_signature(&pk, sig) }, - }; - - // reverse the signature data so that when it is pushed onto the advice stack, the first - // element of the vector is at the top of the stack - result.reverse(); - result + } } } -impl From for Signature { - fn from(signature: falcon512_rpo::Signature) -> Self { - Signature::Falcon512Rpo(signature) +impl From for Signature { + fn from(signature: falcon512_poseidon2::Signature) -> Self { + Signature::Falcon512Poseidon2(signature) } } @@ -336,7 +381,7 @@ impl Serializable for Signature { fn write_into(&self, target: &mut W) { self.auth_scheme().write_into(target); match self { - Signature::Falcon512Rpo(signature) => signature.write_into(target), + Signature::Falcon512Poseidon2(signature) => signature.write_into(target), Signature::EcdsaK256Keccak(signature) => signature.write_into(target), } } @@ -345,9 +390,9 @@ impl Serializable for Signature { impl Deserializable for Signature { fn read_from(source: &mut R) -> Result { match source.read::()? { - AuthScheme::Falcon512Rpo => { - let signature = falcon512_rpo::Signature::read_from(source)?; - Ok(Signature::Falcon512Rpo(signature)) + AuthScheme::Falcon512Poseidon2 => { + let signature = falcon512_poseidon2::Signature::read_from(source)?; + Ok(Signature::Falcon512Poseidon2(signature)) }, AuthScheme::EcdsaK256Keccak => { let signature = ecdsa_k256_keccak::Signature::read_from(source)?; @@ -356,53 +401,3 @@ impl Deserializable for Signature { } } } - -// SIGNATURE PREPARATION -// ================================================================================================ - -/// Converts a Falcon [falcon512_rpo::Signature] to a vector of values to be pushed onto the -/// advice stack. The values are the ones required for a Falcon signature verification inside the VM -/// and they are: -/// -/// 1. The challenge point at which we evaluate the polynomials in the subsequent three bullet -/// points, i.e. `h`, `s2` and `pi`, to check the product relationship. -/// 2. The expanded public key represented as the coefficients of a polynomial `h` of degree < 512. -/// 3. The signature represented as the coefficients of a polynomial `s2` of degree < 512. -/// 4. The product of the above two polynomials `pi` in the ring of polynomials with coefficients in -/// the Miden field. -/// 5. The nonce represented as 8 field elements. -fn prepare_falcon512_rpo_signature(sig: &falcon512_rpo::Signature) -> Vec { - use falcon512_rpo::Polynomial; - - // The signature is composed of a nonce and a polynomial s2 - // The nonce is represented as 8 field elements. - let nonce = sig.nonce(); - // We convert the signature to a polynomial - let s2 = sig.sig_poly(); - // We also need in the VM the expanded key corresponding to the public key that was provided - // via the operand stack - let h = sig.public_key(); - // Lastly, for the probabilistic product routine that is part of the verification procedure, - // we need to compute the product of the expanded key and the signature polynomial in - // the ring of polynomials with coefficients in the Miden field. - let pi = Polynomial::mul_modulo_p(h, s2); - - // We now push the expanded key, the signature polynomial, and the product of the - // expanded key and the signature polynomial to the advice stack. We also push - // the challenge point at which the previous polynomials will be evaluated. - // Finally, we push the nonce needed for the hash-to-point algorithm. - - let mut polynomials: Vec = - h.coefficients.iter().map(|a| Felt::from(a.value() as u32)).collect(); - polynomials.extend(s2.coefficients.iter().map(|a| Felt::from(a.value() as u32))); - polynomials.extend(pi.iter().map(|a| Felt::new(*a))); - - let digest_polynomials = Hasher::hash_elements(&polynomials); - let challenge = (digest_polynomials[0], digest_polynomials[1]); - - let mut result: Vec = vec![challenge.0, challenge.1]; - result.extend_from_slice(&polynomials); - result.extend_from_slice(&nonce.to_elements()); - - result -} diff --git a/crates/miden-protocol/src/account/builder/mod.rs b/crates/miden-protocol/src/account/builder/mod.rs index 478a2013d3..7a0e7bec6e 100644 --- a/crates/miden-protocol/src/account/builder/mod.rs +++ b/crates/miden-protocol/src/account/builder/mod.rs @@ -1,8 +1,6 @@ use alloc::boxed::Box; use alloc::vec::Vec; -use miden_core::FieldElement; - use crate::account::component::StorageSchema; use crate::account::{ Account, @@ -51,9 +49,6 @@ use crate::{Felt, Word}; /// that the auth procedure must be at procedure index 0 within an [`AccountCode`]. That also /// affects the storage slot order and means the auth component's storage comes first, if it has any /// storage. -/// -/// Faucet accounts have a protocol-reserved storage slot which is at index 0. This means -/// user-defined storage slots start at index 1. #[derive(Debug, Clone)] pub struct AccountBuilder { #[cfg(any(feature = "testing", test))] @@ -118,7 +113,7 @@ impl AccountBuilder { /// Adds a designated authentication [`AccountComponent`] to the builder. /// /// This component may contain multiple procedures, but is expected to contain exactly one - /// authentication procedure (named `auth_*`). + /// authentication procedure (marked with the `@auth_script` attribute). /// Calling this method multiple times will override the previous auth component. /// /// Procedures from this component will be placed at the beginning of the account procedure @@ -128,14 +123,12 @@ impl AccountBuilder { self } - /// Returns an iterator of storage schemas attached to the builder's components, if any. - /// - /// Components constructed without metadata will not contribute a schema. + /// Returns an iterator of storage schemas attached to the builder's components. pub fn storage_schemas(&self) -> impl Iterator + '_ { self.auth_component .iter() .chain(self.components.iter()) - .filter_map(|component| component.storage_schema()) + .map(|component| component.storage_schema()) } /// Builds the common parts of testing and non-testing code. @@ -203,7 +196,8 @@ impl AccountBuilder { /// - Authentication component is missing. /// - Multiple authentication procedures are found. /// - The number of [`StorageSlot`](crate::account::StorageSlot)s of all components exceeds 255. - /// - [`MastForest::merge`](miden_processor::MastForest::merge) fails on the given components. + /// - [`MastForest::merge`](miden_processor::mast::MastForest::merge) fails on the given + /// components. /// - If duplicate assets were added to the builder (only under the `testing` feature). /// - If the vault is not empty on new accounts (only under the `testing` feature). pub fn build(mut self) -> Result { @@ -299,10 +293,10 @@ mod tests { use assert_matches::assert_matches; use miden_assembly::{Assembler, Library}; - use miden_core::FieldElement; - use miden_processor::MastNodeExt; + use miden_core::mast::MastNodeExt; use super::*; + use crate::account::component::AccountComponentMetadata; use crate::account::{AccountProcedureRoot, StorageSlot, StorageSlotName}; use crate::testing::noop_auth_component::NoopAuthComponent; @@ -349,12 +343,14 @@ mod tests { let mut value = Word::empty(); value[0] = Felt::new(custom.slot0); + let metadata = + AccountComponentMetadata::new("test::custom_component1", AccountType::all()); AccountComponent::new( CUSTOM_LIBRARY1.clone(), vec![StorageSlot::with_value(CUSTOM_COMPONENT1_SLOT_NAME.clone(), value)], + metadata, ) .expect("component should be valid") - .with_supports_all_types() } } @@ -369,15 +365,17 @@ mod tests { let mut value1 = Word::empty(); value1[3] = Felt::new(custom.slot1); + let metadata = + AccountComponentMetadata::new("test::custom_component2", AccountType::all()); AccountComponent::new( CUSTOM_LIBRARY2.clone(), vec![ StorageSlot::with_value(CUSTOM_COMPONENT2_SLOT_NAME0.clone(), value0), StorageSlot::with_value(CUSTOM_COMPONENT2_SLOT_NAME1.clone(), value1), ], + metadata, ) .expect("component should be valid") - .with_supports_all_types() } } diff --git a/crates/miden-protocol/src/account/code/header.rs b/crates/miden-protocol/src/account/code/header.rs deleted file mode 100644 index aae5b5a3ac..0000000000 --- a/crates/miden-protocol/src/account/code/header.rs +++ /dev/null @@ -1,82 +0,0 @@ -use alloc::vec::Vec; - -use miden_core::{ - Felt, - utils::{Deserializable, Serializable}, -}; -use miden_processor::Digest; - -use super::{AccountCode, build_procedure_commitment, procedures_as_elements}; -use crate::account::AccountProcedureInfo; - -/// A lightweight representation of account code that contains only procedure metadata without the -/// actual program instructions. -/// -/// Account code header consists of the following components: -/// - Code commitment, which uniquely identifies the account code. -/// - Procedure information, which contains metadata about each procedure in the account code, -/// including MAST roots, storage access permissions, and other relevant attributes. -/// -/// The header is used to provide verifiable information about account code structure and -/// storage access patterns without the need to include the full program instructions. -/// This is particularly useful for verification purposes and when the actual code execution -/// is not required. -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct AccountCodeHeader { - commitment: Digest, - procedures: Vec, -} - -impl AccountCodeHeader { - /// Returns a new instance of account code header with the specified procedures. - /// - /// The code commitment is computed during instantiation based on the provided procedures. - pub fn new(procedures: Vec) -> Self { - let commitment = build_procedure_commitment(&procedures); - AccountCodeHeader { procedures, commitment } - } - - /// Returns the commitment of this account code header. - pub fn commitment(&self) -> Digest { - self.commitment - } - - /// Returns a reference to the procedure information stored in this account code header. - pub fn procedures(&self) -> &[AccountProcedureInfo] { - &self.procedures - } - - /// Converts procedure information in this [AccountCodeHeader] into a vector of field elements. - /// - /// This is done by first converting each procedure into 8 field elements as follows: - /// ```text - /// [PROCEDURE_MAST_ROOT, storage_offset, storage_size, 0, 0] - /// ``` - /// And then concatenating the resulting elements into a single vector. - pub fn as_elements(&self) -> Vec { - procedures_as_elements(&self.procedures) - } -} - -impl From for AccountCodeHeader { - fn from(value: AccountCode) -> Self { - AccountCodeHeader::new(value.procedures) - } -} - -impl Serializable for AccountCodeHeader { - fn write_into(&self, target: &mut W) { - target.write(&self.procedures); - } -} - -impl Deserializable for AccountCodeHeader { - fn read_from( - source: &mut R, - ) -> Result { - let procedures: Vec = source.read()?; - let commitment = build_procedure_commitment(&procedures); - - Ok(AccountCodeHeader { procedures, commitment }) - } -} diff --git a/crates/miden-protocol/src/account/code/mod.rs b/crates/miden-protocol/src/account/code/mod.rs index e079b0746c..c8825b9f3c 100644 --- a/crates/miden-protocol/src/account/code/mod.rs +++ b/crates/miden-protocol/src/account/code/mod.rs @@ -283,7 +283,9 @@ impl Deserializable for AccountCode { fn read_from(source: &mut R) -> Result { let module = Arc::new(MastForest::read_from(source)?); let num_procedures = (source.read_u8()? as usize) + 1; - let procedures = source.read_many::(num_procedures)?; + let procedures = source + .read_many_iter(num_procedures)? + .collect::, _>>()?; Ok(Self::from_parts(module, procedures)) } @@ -411,6 +413,7 @@ mod tests { use super::{AccountCode, Deserializable, Serializable}; use crate::account::code::build_procedure_commitment; + use crate::account::component::AccountComponentMetadata; use crate::account::{AccountComponent, AccountType}; use crate::errors::AccountError; use crate::testing::account_code::CODE; @@ -445,7 +448,8 @@ mod tests { #[test] fn test_account_code_no_auth_component() { let library = Assembler::default().assemble_library([CODE]).unwrap(); - let component = AccountComponent::new(library, vec![]).unwrap().with_supports_all_types(); + let metadata = AccountComponentMetadata::new("test::no_auth", AccountType::all()); + let component = AccountComponent::new(library, vec![], metadata).unwrap(); let err = AccountCode::from_components(&[component], AccountType::RegularAccountUpdatableCode) @@ -470,17 +474,20 @@ mod tests { use miden_assembly::Assembler; let code_with_multiple_auth = " + @auth_script pub proc auth_basic push.1 drop end + @auth_script pub proc auth_secondary push.0 drop end "; let library = Assembler::default().assemble_library([code_with_multiple_auth]).unwrap(); - let component = AccountComponent::new(library, vec![]).unwrap().with_supports_all_types(); + let metadata = AccountComponentMetadata::new("test::multiple_auth", AccountType::all()); + let component = AccountComponent::new(library, vec![], metadata).unwrap(); let err = AccountCode::from_components(&[component], AccountType::RegularAccountUpdatableCode) diff --git a/crates/miden-protocol/src/account/code/procedure.rs b/crates/miden-protocol/src/account/code/procedure.rs index fbef026007..a88fde5b0d 100644 --- a/crates/miden-protocol/src/account/code/procedure.rs +++ b/crates/miden-protocol/src/account/code/procedure.rs @@ -3,7 +3,7 @@ use alloc::sync::Arc; use miden_core::mast::MastForest; use miden_core::prettier::PrettyPrint; -use miden_processor::{MastNode, MastNodeExt, MastNodeId}; +use miden_processor::mast::{MastNode, MastNodeExt, MastNodeId}; use miden_protocol_macros::WordWrapper; use super::Felt; diff --git a/crates/miden-protocol/src/account/component/code.rs b/crates/miden-protocol/src/account/component/code.rs index 2ddcdfca53..ee8e752e65 100644 --- a/crates/miden-protocol/src/account/component/code.rs +++ b/crates/miden-protocol/src/account/component/code.rs @@ -1,5 +1,7 @@ use miden_assembly::Library; -use miden_processor::MastForest; +use miden_processor::mast::MastForest; + +use crate::vm::AdviceMap; // ACCOUNT COMPONENT CODE // ================================================================================================ @@ -23,6 +25,19 @@ impl AccountComponentCode { pub fn into_library(self) -> Library { self.0 } + + /// Returns a new [AccountComponentCode] with the provided advice map entries merged into the + /// underlying [Library]'s [MastForest]. + /// + /// This allows adding advice map entries to an already-compiled account component, + /// which is useful when the entries are determined after compilation. + pub fn with_advice_map(self, advice_map: AdviceMap) -> Self { + if advice_map.is_empty() { + return self; + } + + Self(self.0.with_advice_map(advice_map)) + } } impl AsRef for AccountComponentCode { @@ -45,3 +60,43 @@ impl From for Library { value.into_library() } } + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use miden_core::{Felt, Word}; + + use super::*; + use crate::assembly::Assembler; + + #[test] + fn test_account_component_code_with_advice_map() { + let assembler = Assembler::default(); + let library = assembler + .assemble_library(["pub proc test nop end"]) + .expect("failed to assemble library"); + let component_code = AccountComponentCode::from(library); + + assert!(component_code.mast_forest().advice_map().is_empty()); + + // Empty advice map should be a no-op (digest stays the same) + let cloned = component_code.clone(); + let original_digest = cloned.as_library().digest(); + let component_code = component_code.with_advice_map(AdviceMap::default()); + assert_eq!(original_digest, component_code.as_library().digest()); + + // Non-empty advice map should add entries + let key = Word::from([10u32, 20, 30, 40]); + let value = vec![Felt::new(200)]; + let mut advice_map = AdviceMap::default(); + advice_map.insert(key, value.clone()); + + let component_code = component_code.with_advice_map(advice_map); + + let mast = component_code.mast_forest(); + let stored = mast.advice_map().get(&key).expect("entry should be present"); + assert_eq!(stored.as_ref(), value.as_slice()); + } +} diff --git a/crates/miden-protocol/src/account/component/metadata/mod.rs b/crates/miden-protocol/src/account/component/metadata/mod.rs index a0f58ae9cd..b02c007014 100644 --- a/crates/miden-protocol/src/account/component/metadata/mod.rs +++ b/crates/miden-protocol/src/account/component/metadata/mod.rs @@ -2,13 +2,18 @@ use alloc::collections::{BTreeMap, BTreeSet}; use alloc::string::{String, ToString}; use core::str::FromStr; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; use miden_mast_package::{Package, SectionId}; -use miden_processor::DeserializationError; use semver::Version; use super::{AccountType, SchemaRequirement, StorageSchema, StorageValueName}; use crate::errors::AccountError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // ACCOUNT COMPONENT METADATA // ================================================================================================ @@ -26,7 +31,6 @@ use crate::errors::AccountError; /// # Guarantees /// /// - The metadata's storage schema does not contain duplicate slot names. -/// - The schema cannot contain protocol-reserved slot names. /// - Each init-time value name uniquely identifies a single value. The expected init-time metadata /// can be retrieved with [AccountComponentMetadata::schema_requirements()], which returns a map /// from keys to [SchemaRequirement] (which indicates the expected value type and optional @@ -35,14 +39,13 @@ use crate::errors::AccountError; /// # Example /// /// ``` -/// use std::collections::{BTreeMap, BTreeSet}; +/// use std::collections::BTreeMap; /// -/// use miden_protocol::account::StorageSlotName; /// use miden_protocol::account::component::{ /// AccountComponentMetadata, /// FeltSchema, /// InitStorageData, -/// SchemaTypeId, +/// SchemaType, /// StorageSchema, /// StorageSlotSchema, /// StorageValueName, @@ -50,7 +53,7 @@ use crate::errors::AccountError; /// WordSchema, /// WordValue, /// }; -/// use semver::Version; +/// use miden_protocol::account::{AccountType, StorageSlotName}; /// /// let slot_name = StorageSlotName::new("demo::test_value")?; /// @@ -58,7 +61,7 @@ use crate::errors::AccountError; /// FeltSchema::new_void(), /// FeltSchema::new_void(), /// FeltSchema::new_void(), -/// FeltSchema::new_typed(SchemaTypeId::native_felt(), "foo"), +/// FeltSchema::felt("foo"), /// ]); /// /// let storage_schema = StorageSchema::new([( @@ -66,13 +69,9 @@ use crate::errors::AccountError; /// StorageSlotSchema::Value(ValueSlotSchema::new(Some("demo slot".into()), word)), /// )])?; /// -/// let metadata = AccountComponentMetadata::new( -/// "test name".into(), -/// "description of the component".into(), -/// Version::parse("0.1.0")?, -/// BTreeSet::new(), -/// storage_schema, -/// ); +/// let metadata = AccountComponentMetadata::new("test name", AccountType::all()) +/// .with_description("description of the component") +/// .with_storage_schema(storage_schema); /// /// // Init value keys are derived from slot name: `demo::test_value.foo`. /// let value_name = StorageValueName::from_slot_name_with_suffix(&slot_name, "foo")?; @@ -106,23 +105,45 @@ pub struct AccountComponentMetadata { } impl AccountComponentMetadata { - /// Create a new [AccountComponentMetadata]. + /// Create a new [AccountComponentMetadata] with the given name and supported account types. + /// + /// Other fields are initialized to sensible defaults: + /// - `description`: empty string + /// - `version`: 1.0.0 + /// - `storage_schema`: default (empty) + /// + /// Use the `with_*` mutator methods to customize these fields. pub fn new( - name: String, - description: String, - version: Version, - targets: BTreeSet, - storage_schema: StorageSchema, + name: impl Into, + supported_types: impl IntoIterator, ) -> Self { Self { - name, - description, - version, - supported_types: targets, - storage_schema, + name: name.into(), + description: String::new(), + version: Version::new(1, 0, 0), + supported_types: supported_types.into_iter().collect(), + storage_schema: StorageSchema::default(), } } + /// Sets the description of the component. + pub fn with_description(mut self, description: impl Into) -> Self { + self.description = description.into(); + self + } + + /// Sets the version of the component. + pub fn with_version(mut self, version: Version) -> Self { + self.version = version; + self + } + + /// Sets the storage schema of the component. + pub fn with_storage_schema(mut self, schema: StorageSchema) -> Self { + self.storage_schema = schema; + self + } + /// Returns the init-time values requirements for this schema. /// /// These values are used for initializing storage slot values or storage map entries. For a diff --git a/crates/miden-protocol/src/account/component/mod.rs b/crates/miden-protocol/src/account/component/mod.rs index b77d2c2e0e..b9da72c1f5 100644 --- a/crates/miden-protocol/src/account/component/mod.rs +++ b/crates/miden-protocol/src/account/component/mod.rs @@ -2,6 +2,7 @@ use alloc::collections::BTreeSet; use alloc::vec::Vec; use miden_mast_package::{MastArtifact, Package}; +use miden_processor::mast::MastNodeExt; mod metadata; pub use metadata::*; @@ -17,6 +18,9 @@ use crate::assembly::Path; use crate::errors::AccountError; use crate::{MastForest, Word}; +/// The attribute name used to mark the authentication procedure in an account component. +const AUTH_SCRIPT_ATTRIBUTE: &str = "auth_script"; + // ACCOUNT COMPONENT // ================================================================================================ @@ -39,16 +43,15 @@ use crate::{MastForest, Word}; pub struct AccountComponent { pub(super) code: AccountComponentCode, pub(super) storage_slots: Vec, - pub(super) metadata: Option, - pub(super) supported_types: BTreeSet, + pub(super) metadata: AccountComponentMetadata, } impl AccountComponent { // CONSTRUCTORS // -------------------------------------------------------------------------------------------- - /// Returns a new [`AccountComponent`] constructed from the provided `library` and - /// `storage_slots`. + /// Returns a new [`AccountComponent`] constructed from the provided `library`, + /// `storage_slots`, and `metadata`. /// /// All procedures exported from the provided code will become members of the account's public /// interface when added to an [`AccountCode`](crate::account::AccountCode). @@ -64,6 +67,7 @@ impl AccountComponent { pub fn new( code: impl Into, storage_slots: Vec, + metadata: AccountComponentMetadata, ) -> Result { // Check that we have less than 256 storage slots. u8::try_from(storage_slots.len()) @@ -72,8 +76,7 @@ impl AccountComponent { Ok(Self { code: code.into(), storage_slots, - metadata: None, - supported_types: BTreeSet::new(), + metadata, }) } @@ -123,8 +126,7 @@ impl AccountComponent { /// # Arguments /// /// * `library` - The component's assembled code - /// * `account_component_metadata` - The component's metadata, which describes the storage - /// layout + /// * `metadata` - The component's metadata, which describes the storage layout /// * `init_storage_data` - The initialization data for storage slots /// /// # Errors @@ -137,18 +139,17 @@ impl AccountComponent { /// - The component creation fails pub fn from_library( library: &AccountComponentCode, - account_component_metadata: &AccountComponentMetadata, + metadata: &AccountComponentMetadata, init_storage_data: &InitStorageData, ) -> Result { - let storage_slots = account_component_metadata + let storage_slots = metadata .storage_schema() .build_storage_slots(init_storage_data) .map_err(|err| { AccountError::other_with_source("failed to instantiate account component", err) })?; - Ok(AccountComponent::new(library.clone(), storage_slots)? - .with_metadata(account_component_metadata.clone())) + AccountComponent::new(library.clone(), storage_slots, metadata.clone()) } // ACCESSORS @@ -175,33 +176,42 @@ impl AccountComponent { self.storage_slots.as_slice() } - /// Returns the component metadata, if any. - pub fn metadata(&self) -> Option<&AccountComponentMetadata> { - self.metadata.as_ref() + /// Returns the component metadata. + pub fn metadata(&self) -> &AccountComponentMetadata { + &self.metadata } - /// Returns the storage schema associated with this component, if any. - pub fn storage_schema(&self) -> Option<&StorageSchema> { - self.metadata.as_ref().map(AccountComponentMetadata::storage_schema) + /// Returns the storage schema associated with this component. + pub fn storage_schema(&self) -> &StorageSchema { + self.metadata.storage_schema() } /// Returns a reference to the supported [`AccountType`]s. pub fn supported_types(&self) -> &BTreeSet { - &self.supported_types + self.metadata.supported_types() } /// Returns `true` if this component supports the given `account_type`, `false` otherwise. pub fn supports_type(&self, account_type: AccountType) -> bool { - self.supported_types.contains(&account_type) + self.metadata.supported_types().contains(&account_type) } /// Returns a vector of tuples (digest, is_auth) for all procedures in this component. + /// + /// A procedure is considered an authentication procedure if it has the `@auth_script` + /// attribute. pub fn get_procedures(&self) -> Vec<(Word, bool)> { + let library = self.code.as_library(); let mut procedures = Vec::new(); - for module in self.code.as_library().module_infos() { - for (_, procedure_info) in module.procedures() { - let is_auth = procedure_info.name.starts_with("auth_"); - procedures.push((procedure_info.digest, is_auth)); + for export in library.exports() { + if let Some(proc_export) = export.as_procedure() { + let digest = library + .mast_forest() + .get_node_by_id(proc_export.node) + .expect("export node not in the forest") + .digest(); + let is_auth = proc_export.attributes.has(AUTH_SCRIPT_ATTRIBUTE); + procedures.push((digest, is_auth)); } } procedures @@ -212,45 +222,6 @@ impl AccountComponent { pub fn get_procedure_root_by_path(&self, proc_name: impl AsRef) -> Option { self.code.as_library().get_procedure_root_by_path(proc_name) } - - // MUTATORS - // -------------------------------------------------------------------------------------------- - - /// Adds `supported_type` to the set of [`AccountType`]s supported by this component. - /// - /// This function has the semantics of [`BTreeSet::insert`], i.e. adding a type twice is fine - /// and it can be called multiple times with different account types. - pub fn with_supported_type(mut self, supported_type: AccountType) -> Self { - self.supported_types.insert(supported_type); - self - } - - /// Overwrites any previously set supported types with the given set. - /// - /// This can be used to reset the supported types of a component to a chosen set, which may be - /// useful after cloning an existing component. - pub fn with_supported_types(mut self, supported_types: BTreeSet) -> Self { - self.supported_types = supported_types; - self - } - - /// Attaches metadata to this component for downstream schema commitments and introspection. - pub fn with_metadata(mut self, metadata: AccountComponentMetadata) -> Self { - self.supported_types = metadata.supported_types().clone(); - self.metadata = Some(metadata); - self - } - - /// Sets the [`AccountType`]s supported by this component to all account types. - pub fn with_supports_all_types(mut self) -> Self { - self.supported_types.extend([ - AccountType::FungibleFaucet, - AccountType::NonFungibleFaucet, - AccountType::RegularAccountImmutableCode, - AccountType::RegularAccountUpdatableCode, - ]); - self - } } impl From for AccountComponentCode { @@ -261,12 +232,10 @@ impl From for AccountComponentCode { #[cfg(test)] mod tests { - use alloc::collections::BTreeSet; use alloc::string::ToString; use alloc::sync::Arc; use miden_assembly::Assembler; - use miden_core::utils::Serializable; use miden_mast_package::{ MastArtifact, Package, @@ -279,6 +248,7 @@ mod tests { use super::*; use crate::testing::account_code::CODE; + use crate::utils::serde::Serializable; #[test] fn test_extract_metadata_from_package() { @@ -287,12 +257,11 @@ mod tests { // Test with metadata let metadata = AccountComponentMetadata::new( - "test_component".to_string(), - "A test component".to_string(), - Version::new(1, 0, 0), - BTreeSet::from_iter([AccountType::RegularAccountImmutableCode]), - StorageSchema::default(), - ); + "test_component", + [AccountType::RegularAccountImmutableCode], + ) + .with_description("A test component") + .with_version(Version::new(1, 0, 0)); let metadata_bytes = metadata.to_bytes(); let package_with_metadata = Package { @@ -341,16 +310,9 @@ mod tests { let component_code = AccountComponentCode::from(library.clone()); // Create metadata for the component - let metadata = AccountComponentMetadata::new( - "test_component".to_string(), - "A test component".to_string(), - Version::new(1, 0, 0), - BTreeSet::from_iter([ - AccountType::RegularAccountImmutableCode, - AccountType::RegularAccountUpdatableCode, - ]), - StorageSchema::default(), - ); + let metadata = AccountComponentMetadata::new("test_component", AccountType::regular()) + .with_description("A test component") + .with_version(Version::new(1, 0, 0)); // Test with empty init data - this tests the complete workflow: // Library + Metadata -> AccountComponent diff --git a/crates/miden-protocol/src/account/component/storage/init_storage_data.rs b/crates/miden-protocol/src/account/component/storage/init_storage_data.rs index 999f552a3c..6a0f560d55 100644 --- a/crates/miden-protocol/src/account/component/storage/init_storage_data.rs +++ b/crates/miden-protocol/src/account/component/storage/init_storage_data.rs @@ -5,8 +5,10 @@ use alloc::vec::Vec; use thiserror::Error; use super::StorageValueName; +use super::value_name::StorageValueNameError; use crate::account::StorageSlotName; -use crate::{Felt, FieldElement, Word}; +use crate::errors::StorageSlotNameError; +use crate::{Felt, Word}; /// A word value provided via [`InitStorageData`]. /// @@ -23,6 +25,9 @@ pub enum WordValue { Elements([String; 4]), } +// CONVERSIONS +// ==================================================================================================== + impl From for WordValue { fn from(value: Word) -> Self { WordValue::FullyTyped(value) @@ -41,9 +46,6 @@ impl From<&str> for WordValue { } } -// CONVERSIONS -// ==================================================================================================== - impl From for WordValue { /// Converts a [`Felt`] to a [`WordValue`] as a Word in the form `[0, 0, 0, felt]`. fn from(value: Felt) -> Self { @@ -57,6 +59,46 @@ impl From<[Felt; 4]> for WordValue { } } +impl From for WordValue { + /// Converts a [`u8`] to a [`WordValue::Atomic`] string representation. + fn from(value: u8) -> Self { + WordValue::Atomic(value.to_string()) + } +} + +impl From for WordValue { + /// Converts a [`u16`] to a [`WordValue::Atomic`] string representation. + fn from(value: u16) -> Self { + WordValue::Atomic(value.to_string()) + } +} + +impl From for WordValue { + /// Converts a [`u32`] to a [`WordValue::Atomic`] string representation. + fn from(value: u32) -> Self { + WordValue::Atomic(value.to_string()) + } +} + +impl From for WordValue { + /// Converts a [`u64`] to a [`WordValue::Atomic`] string representation. + fn from(value: u64) -> Self { + WordValue::Atomic(value.to_string()) + } +} + +impl From<[u32; 4]> for WordValue { + /// Converts a `[u32; 4]` to a [`WordValue`] by converting each element to a [`Felt`]. + fn from(value: [u32; 4]) -> Self { + WordValue::FullyTyped(Word::from([ + Felt::from(value[0]), + Felt::from(value[1]), + Felt::from(value[2]), + Felt::from(value[3]), + ])) + } +} + // INIT STORAGE DATA // ==================================================================================================== @@ -154,18 +196,29 @@ impl InitStorageData { /// Inserts a value entry, returning an error on duplicate or conflicting keys. /// + /// The name can be any type that implements `TryInto`, e.g.: + /// + /// - `StorageValueName`: used directly + /// - `&str` or `String`: parsed into a `StorageValueName` + /// /// The value can be any type that implements `Into`, e.g.: /// /// - `Word`: a fully-typed word value /// - `[Felt; 4]`: converted to a Word /// - `Felt`: converted to `[0, 0, 0, felt]` + /// - `u32`, `u64`, `u8`, `u16`: converted to a Felt, then to `[0, 0, 0, felt]` /// - `String` or `&str`: a parseable string value /// - `WordValue`: a word value (fully typed, atomic, or elements) - pub fn insert_value( + pub fn insert_value( &mut self, - name: StorageValueName, + name: N, value: impl Into, - ) -> Result<(), InitStorageDataError> { + ) -> Result<(), InitStorageDataError> + where + N: TryInto, + InitStorageDataError: From, + { + let name = name.try_into().map_err(InitStorageDataError::from)?; if self.value_entries.contains_key(&name) { return Err(InitStorageDataError::DuplicateKey(name.to_string())); } @@ -179,11 +232,18 @@ impl InitStorageData { /// Sets a value entry, overriding any existing entry for the name. /// /// Returns an error if the [`StorageValueName`] has been used for a map slot. - pub fn set_value( + /// + /// See [`Self::insert_value`] for accepted types for `name` and `value`. + pub fn set_value( &mut self, - name: StorageValueName, + name: N, value: impl Into, - ) -> Result<(), InitStorageDataError> { + ) -> Result<(), InitStorageDataError> + where + N: TryInto, + InitStorageDataError: From, + { + let name = name.try_into().map_err(InitStorageDataError::from)?; if self.map_entries.contains_key(name.slot_name()) { return Err(InitStorageDataError::ConflictingEntries(name.slot_name().as_str().into())); } @@ -193,13 +253,23 @@ impl InitStorageData { /// Inserts a single map entry, returning an error on duplicate or conflicting keys. /// + /// The slot_name can be any type that implements `TryInto`, e.g.: + /// + /// - `StorageSlotName`: used directly + /// - `&str` or `String`: parsed into a `StorageSlotName` + /// /// See [`Self::insert_value`] for examples of supported types for `key` and `value`. - pub fn insert_map_entry( + pub fn insert_map_entry( &mut self, - slot_name: StorageSlotName, + slot_name: S, key: impl Into, value: impl Into, - ) -> Result<(), InitStorageDataError> { + ) -> Result<(), InitStorageDataError> + where + S: TryInto, + InitStorageDataError: From, + { + let slot_name = slot_name.try_into().map_err(InitStorageDataError::from)?; if self.has_value_entries_for_slot(&slot_name) { return Err(InitStorageDataError::ConflictingEntries(slot_name.as_str().into())); } @@ -221,11 +291,18 @@ impl InitStorageData { /// Sets map entries for the slot, replacing any existing entries. /// /// Returns an error if there are conflicting value entries. - pub fn set_map_values( + /// + /// See [`Self::insert_map_entry`] for accepted types for `slot_name`. + pub fn set_map_values( &mut self, - slot_name: StorageSlotName, + slot_name: S, entries: Vec<(WordValue, WordValue)>, - ) -> Result<(), InitStorageDataError> { + ) -> Result<(), InitStorageDataError> + where + S: TryInto, + InitStorageDataError: From, + { + let slot_name = slot_name.try_into().map_err(InitStorageDataError::from)?; if self.has_value_entries_for_slot(&slot_name) { return Err(InitStorageDataError::ConflictingEntries(slot_name.as_str().into())); } @@ -253,10 +330,20 @@ impl InitStorageData { // ==================================================================================================== /// Error returned when creating [`InitStorageData`] with invalid entries. -#[derive(Debug, Error, PartialEq, Eq)] +#[derive(Debug, Error)] pub enum InitStorageDataError { #[error("duplicate init key `{0}`")] DuplicateKey(String), #[error("conflicting init entries for `{0}`")] ConflictingEntries(String), + #[error("invalid storage value name")] + InvalidValueName(#[from] StorageValueNameError), + #[error("invalid storage slot name")] + InvalidSlotName(#[from] StorageSlotNameError), +} + +impl From for InitStorageDataError { + fn from(err: core::convert::Infallible) -> Self { + match err {} + } } diff --git a/crates/miden-protocol/src/account/component/storage/mod.rs b/crates/miden-protocol/src/account/component/storage/mod.rs index 06b24bd768..41745782bf 100644 --- a/crates/miden-protocol/src/account/component/storage/mod.rs +++ b/crates/miden-protocol/src/account/component/storage/mod.rs @@ -5,7 +5,7 @@ mod value_name; pub use value_name::{StorageValueName, StorageValueNameError}; mod type_registry; -pub use type_registry::{SchemaRequirement, SchemaTypeError, SchemaTypeId}; +pub use type_registry::{SchemaRequirement, SchemaType, SchemaTypeError}; mod init_storage_data; pub use init_storage_data::{InitStorageData, InitStorageDataError, WordValue}; diff --git a/crates/miden-protocol/src/account/component/storage/schema.rs b/crates/miden-protocol/src/account/component/storage/schema.rs deleted file mode 100644 index 710e70af94..0000000000 --- a/crates/miden-protocol/src/account/component/storage/schema.rs +++ /dev/null @@ -1,1231 +0,0 @@ -use alloc::boxed::Box; -use alloc::collections::BTreeMap; -use alloc::string::{String, ToString}; -use alloc::vec::Vec; - -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; - -use super::type_registry::{SCHEMA_TYPE_REGISTRY, SchemaRequirement, SchemaTypeId}; -use super::{InitStorageData, StorageValueName, WordValue}; -use crate::account::storage::is_reserved_slot_name; -use crate::account::{StorageMap, StorageSlot, StorageSlotName}; -use crate::crypto::utils::bytes_to_elements_with_padding; -use crate::errors::AccountComponentTemplateError; -use crate::{Felt, FieldElement, Hasher, Word}; - -// STORAGE SCHEMA -// ================================================================================================ - -/// Describes the storage schema of an account component in terms of its named storage slots. -#[derive(Debug, Clone, Default, PartialEq, Eq)] -pub struct StorageSchema { - slots: BTreeMap, -} - -impl StorageSchema { - /// Creates a new [`StorageSchema`]. - /// - /// # Errors - /// - If `fields` contains duplicate slot names. - /// - If `fields` contains the protocol-reserved faucet metadata slot name. - /// - If any slot schema is invalid. - /// - If multiple schema fields map to the same init value name. - pub fn new( - slots: impl IntoIterator, - ) -> Result { - let mut map = BTreeMap::new(); - for (slot_name, schema) in slots { - if map.insert(slot_name.clone(), schema).is_some() { - return Err(AccountComponentTemplateError::DuplicateSlotName(slot_name)); - } - } - - let schema = Self { slots: map }; - schema.validate()?; - Ok(schema) - } - - /// Returns an iterator over `(slot_name, schema)` pairs in slot-id order. - pub fn iter(&self) -> impl Iterator { - self.slots.iter() - } - - /// Returns a reference to the underlying slots map. - pub fn slots(&self) -> &BTreeMap { - &self.slots - } - - /// Builds the initial [`StorageSlot`]s for this schema using the provided initialization data. - pub fn build_storage_slots( - &self, - init_storage_data: &InitStorageData, - ) -> Result, AccountComponentTemplateError> { - self.slots - .iter() - .map(|(slot_name, schema)| schema.try_build_storage_slot(slot_name, init_storage_data)) - .collect() - } - - /// Returns a commitment to this storage schema definition. - /// - /// The commitment is computed over the serialized schema and does not include defaults. - pub fn commitment(&self) -> Word { - let mut bytes = Vec::new(); - self.write_into_with_optional_defaults(&mut bytes, false); - let elements = bytes_to_elements_with_padding(&bytes); - Hasher::hash_elements(&elements) - } - - /// Returns init-value requirements for the entire schema. - /// - /// The returned map includes both required values (no `default_value`) and optional values - /// (with `default_value`), and excludes map entries. - pub fn schema_requirements( - &self, - ) -> Result, AccountComponentTemplateError> { - let mut requirements = BTreeMap::new(); - for (slot_name, schema) in self.slots.iter() { - schema.collect_init_value_requirements(slot_name, &mut requirements)?; - } - Ok(requirements) - } - - /// Serializes the schema, optionally ignoring the default values (used for committing to a - /// schema definition). - fn write_into_with_optional_defaults( - &self, - target: &mut W, - include_defaults: bool, - ) { - target.write_u16(self.slots.len() as u16); - for (slot_name, schema) in self.slots.iter() { - target.write(slot_name); - schema.write_into_with_optional_defaults(target, include_defaults); - } - } - - /// Validates schema-level invariants across all slots. - fn validate(&self) -> Result<(), AccountComponentTemplateError> { - let mut init_values = BTreeMap::new(); - - for (slot_name, schema) in self.slots.iter() { - if is_reserved_slot_name(slot_name) { - return Err(AccountComponentTemplateError::ReservedSlotName(slot_name.clone())); - } - - schema.validate()?; - schema.collect_init_value_requirements(slot_name, &mut init_values)?; - } - - Ok(()) - } -} - -impl Serializable for StorageSchema { - fn write_into(&self, target: &mut W) { - self.write_into_with_optional_defaults(target, true); - } -} - -impl Deserializable for StorageSchema { - fn read_from(source: &mut R) -> Result { - let num_entries = source.read_u16()? as usize; - let mut fields = BTreeMap::new(); - - for _ in 0..num_entries { - let slot_name = StorageSlotName::read_from(source)?; - let schema = StorageSlotSchema::read_from(source)?; - - if fields.insert(slot_name.clone(), schema).is_some() { - return Err(DeserializationError::InvalidValue(format!( - "duplicate slot name in storage schema: {slot_name}", - ))); - } - } - - let schema = StorageSchema::new(fields) - .map_err(|err| DeserializationError::InvalidValue(err.to_string()))?; - Ok(schema) - } -} - -fn validate_description_ascii(description: &str) -> Result<(), AccountComponentTemplateError> { - if description.is_ascii() { - Ok(()) - } else { - Err(AccountComponentTemplateError::InvalidSchema( - "description must contain only ASCII characters".to_string(), - )) - } -} - -// STORAGE SLOT SCHEMA -// ================================================================================================ - -/// Describes the schema for a storage slot. -/// Can describe either a value slot, or a map slot. -#[allow(clippy::large_enum_variant)] -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum StorageSlotSchema { - Value(ValueSlotSchema), - Map(MapSlotSchema), -} - -impl StorageSlotSchema { - fn collect_init_value_requirements( - &self, - slot_name: &StorageSlotName, - requirements: &mut BTreeMap, - ) -> Result<(), AccountComponentTemplateError> { - let slot_name = StorageValueName::from_slot_name(slot_name); - match self { - StorageSlotSchema::Value(slot) => { - slot.collect_init_value_requirements(slot_name, requirements) - }, - StorageSlotSchema::Map(_) => Ok(()), - } - } - - /// Builds a [`StorageSlot`] for the specified `slot_name` using the provided initialization - /// data. - pub fn try_build_storage_slot( - &self, - slot_name: &StorageSlotName, - init_storage_data: &InitStorageData, - ) -> Result { - match self { - StorageSlotSchema::Value(slot) => { - let word = slot.try_build_word(init_storage_data, slot_name)?; - Ok(StorageSlot::with_value(slot_name.clone(), word)) - }, - StorageSlotSchema::Map(slot) => { - let storage_map = slot.try_build_map(init_storage_data, slot_name)?; - Ok(StorageSlot::with_map(slot_name.clone(), storage_map)) - }, - } - } - - /// Validates this slot schema's internal invariants. - pub(crate) fn validate(&self) -> Result<(), AccountComponentTemplateError> { - match self { - StorageSlotSchema::Value(slot) => slot.validate()?, - StorageSlotSchema::Map(slot) => slot.validate()?, - } - - Ok(()) - } - - /// Serializes the schema, optionally ignoring the default values (used for committing to a - /// schema definition). - fn write_into_with_optional_defaults( - &self, - target: &mut W, - include_defaults: bool, - ) { - match self { - StorageSlotSchema::Value(slot) => { - target.write_u8(0u8); - slot.write_into_with_optional_defaults(target, include_defaults); - }, - StorageSlotSchema::Map(slot) => { - target.write_u8(1u8); - slot.write_into_with_optional_defaults(target, include_defaults); - }, - } - } -} - -impl Serializable for StorageSlotSchema { - fn write_into(&self, target: &mut W) { - self.write_into_with_optional_defaults(target, true); - } -} - -impl Deserializable for StorageSlotSchema { - fn read_from(source: &mut R) -> Result { - let variant_tag = source.read_u8()?; - match variant_tag { - 0 => Ok(StorageSlotSchema::Value(ValueSlotSchema::read_from(source)?)), - 1 => Ok(StorageSlotSchema::Map(MapSlotSchema::read_from(source)?)), - _ => Err(DeserializationError::InvalidValue(format!( - "unknown variant tag '{variant_tag}' for StorageSlotSchema" - ))), - } - } -} - -// WORDS -// ================================================================================================ - -/// Defines how a word slot is described within the component's storage schema. -/// -/// Each word schema can either describe a whole-word typed value supplied at instantiation time -/// (`Simple`) or a composite word that explicitly defines each felt element (`Composite`). -#[derive(Debug, Clone, PartialEq, Eq)] -#[allow(clippy::large_enum_variant)] -pub enum WordSchema { - /// A whole-word typed value supplied at instantiation time. - Simple { - r#type: SchemaTypeId, - default_value: Option, - }, - /// A composed word that may mix defaults and typed fields. - Composite { value: [FeltSchema; 4] }, -} - -impl WordSchema { - pub fn new_simple(r#type: SchemaTypeId) -> Self { - WordSchema::Simple { r#type, default_value: None } - } - - pub fn new_simple_with_default(r#type: SchemaTypeId, default_value: Word) -> Self { - WordSchema::Simple { - r#type, - default_value: Some(default_value), - } - } - - pub fn new_value(value: impl Into<[FeltSchema; 4]>) -> Self { - WordSchema::Composite { value: value.into() } - } - - pub fn value(&self) -> Option<&[FeltSchema; 4]> { - match self { - WordSchema::Composite { value } => Some(value), - WordSchema::Simple { .. } => None, - } - } - - /// Returns the schema type identifier associated with whole-word init-supplied values. - pub fn word_type(&self) -> SchemaTypeId { - match self { - WordSchema::Simple { r#type, .. } => r#type.clone(), - WordSchema::Composite { .. } => SchemaTypeId::native_word(), - } - } - - fn collect_init_value_requirements( - &self, - value_name: StorageValueName, - description: Option, - requirements: &mut BTreeMap, - ) -> Result<(), AccountComponentTemplateError> { - match self { - WordSchema::Simple { r#type, default_value } => { - if *r#type == SchemaTypeId::void() { - return Ok(()); - } - - let default_value = default_value.map(|word| { - SCHEMA_TYPE_REGISTRY.display_word(r#type, word).value().to_string() - }); - - if requirements - .insert( - value_name.clone(), - SchemaRequirement { - description, - r#type: r#type.clone(), - default_value, - }, - ) - .is_some() - { - return Err(AccountComponentTemplateError::DuplicateInitValueName(value_name)); - } - - Ok(()) - }, - WordSchema::Composite { value } => { - for felt in value.iter() { - felt.collect_init_value_requirements(value_name.clone(), requirements)?; - } - Ok(()) - }, - } - } - - /// Validates the word schema type, defaults, and inner felts (if any). - fn validate(&self) -> Result<(), AccountComponentTemplateError> { - let type_exists = SCHEMA_TYPE_REGISTRY.contains_word_type(&self.word_type()); - if !type_exists { - return Err(AccountComponentTemplateError::InvalidType( - self.word_type().to_string(), - "Word".into(), - )); - } - - if let WordSchema::Simple { - r#type, - default_value: Some(default_value), - } = self - { - SCHEMA_TYPE_REGISTRY - .validate_word_value(r#type, *default_value) - .map_err(AccountComponentTemplateError::StorageValueParsingError)?; - } - - if let Some(felts) = self.value() { - for felt in felts { - felt.validate()?; - } - } - - Ok(()) - } - - /// Builds a [`Word`] from the provided initialization data according to this schema. - /// - /// For simple schemas, expects a direct slot value (not map or field entries). - /// For composite schemas, either parses a single value or builds the word from individual - /// felt entries. - pub(crate) fn try_build_word( - &self, - init_storage_data: &InitStorageData, - slot_name: &StorageSlotName, - ) -> Result { - let slot_prefix = StorageValueName::from_slot_name(slot_name); - let slot_value = init_storage_data.slot_value_entry(slot_name); - let has_fields = init_storage_data.has_field_entries_for_slot(slot_name); - - if init_storage_data.map_entries(slot_name).is_some() { - return Err(AccountComponentTemplateError::InvalidInitStorageValue( - slot_prefix, - "expected a value, got a map".into(), - )); - } - - match self { - WordSchema::Simple { r#type, default_value } => { - if has_fields { - return Err(AccountComponentTemplateError::InvalidInitStorageValue( - slot_prefix, - "expected a value, got field entries".into(), - )); - } - match slot_value { - Some(value) => parse_storage_value_with_schema(self, value, &slot_prefix), - None => { - if *r#type == SchemaTypeId::void() { - Ok(Word::empty()) - } else { - default_value.as_ref().copied().ok_or_else(|| { - AccountComponentTemplateError::InitValueNotProvided(slot_prefix) - }) - } - }, - } - }, - WordSchema::Composite { value } => { - if let Some(value) = slot_value { - if has_fields { - return Err(AccountComponentTemplateError::InvalidInitStorageValue( - slot_prefix, - "expected a single value, got both value and field entries".into(), - )); - } - return parse_storage_value_with_schema(self, value, &slot_prefix); - } - - let mut result = [Felt::ZERO; 4]; - for (index, felt_schema) in value.iter().enumerate() { - result[index] = felt_schema.try_build_felt(init_storage_data, slot_name)?; - } - Ok(Word::from(result)) - }, - } - } - - pub(crate) fn validate_word_value( - &self, - slot_prefix: &StorageValueName, - label: &str, - word: Word, - ) -> Result<(), AccountComponentTemplateError> { - match self { - WordSchema::Simple { r#type, .. } => { - SCHEMA_TYPE_REGISTRY.validate_word_value(r#type, word).map_err(|err| { - AccountComponentTemplateError::InvalidInitStorageValue( - slot_prefix.clone(), - format!("{label} does not match `{}`: {err}", r#type), - ) - }) - }, - WordSchema::Composite { value } => { - for (index, felt_schema) in value.iter().enumerate() { - let felt_type = felt_schema.felt_type(); - SCHEMA_TYPE_REGISTRY.validate_felt_value(&felt_type, word[index]).map_err( - |err| { - AccountComponentTemplateError::InvalidInitStorageValue( - slot_prefix.clone(), - format!("{label}[{index}] does not match `{felt_type}`: {err}"), - ) - }, - )?; - } - - Ok(()) - }, - } - } - - /// Serializes the schema, optionally ignoring the default values (used for committing to a - /// schema definition). - fn write_into_with_optional_defaults( - &self, - target: &mut W, - include_defaults: bool, - ) { - match self { - WordSchema::Simple { r#type, default_value } => { - target.write_u8(0); - target.write(r#type); - let default_value = if include_defaults { *default_value } else { None }; - target.write(default_value); - }, - WordSchema::Composite { value } => { - target.write_u8(1); - for felt in value.iter() { - felt.write_into_with_optional_defaults(target, include_defaults); - } - }, - } - } -} - -impl Serializable for WordSchema { - fn write_into(&self, target: &mut W) { - self.write_into_with_optional_defaults(target, true); - } -} - -impl Deserializable for WordSchema { - fn read_from(source: &mut R) -> Result { - let tag = source.read_u8()?; - match tag { - 0 => { - let r#type = SchemaTypeId::read_from(source)?; - let default_value = Option::::read_from(source)?; - Ok(WordSchema::Simple { r#type, default_value }) - }, - 1 => { - let value = <[FeltSchema; 4]>::read_from(source)?; - Ok(WordSchema::Composite { value }) - }, - other => Err(DeserializationError::InvalidValue(format!( - "unknown tag '{other}' for WordSchema" - ))), - } - } -} - -impl From<[FeltSchema; 4]> for WordSchema { - fn from(value: [FeltSchema; 4]) -> Self { - WordSchema::new_value(value) - } -} - -impl From<[Felt; 4]> for WordSchema { - fn from(value: [Felt; 4]) -> Self { - WordSchema::new_simple_with_default(SchemaTypeId::native_word(), Word::from(value)) - } -} - -// FELT SCHEMA -// ================================================================================================ - -/// Supported element schema descriptors for a component's storage entries. -/// -/// Each felt element in a composed word slot is typed, can have an optional default value, and can -/// optionally be named to allow overriding at instantiation time. -/// -/// To avoid non-overridable constants, unnamed elements are allowed only when `type = "void"`, -/// which always evaluates to `0` and does not require init data. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct FeltSchema { - name: Option, - description: Option, - r#type: SchemaTypeId, - default_value: Option, -} - -impl FeltSchema { - /// Creates a new required typed felt field. - pub fn new_typed(r#type: SchemaTypeId, name: impl Into) -> Self { - FeltSchema { - name: Some(name.into()), - description: None, - r#type, - default_value: None, - } - } - - /// Creates a new typed felt field with a default value. - pub fn new_typed_with_default( - r#type: SchemaTypeId, - name: impl Into, - default_value: Felt, - ) -> Self { - FeltSchema { - name: Some(name.into()), - description: None, - r#type, - default_value: Some(default_value), - } - } - - /// Creates an unnamed `void` felt element. - pub fn new_void() -> Self { - FeltSchema { - name: None, - description: None, - r#type: SchemaTypeId::void(), - default_value: None, - } - } - - /// Sets the description of the [`FeltSchema`] and returns `self`. - pub fn with_description(self, description: impl Into) -> Self { - FeltSchema { - description: Some(description.into()), - ..self - } - } - - /// Returns the felt type. - pub fn felt_type(&self) -> SchemaTypeId { - self.r#type.clone() - } - - pub fn name(&self) -> Option<&str> { - self.name.as_deref() - } - - pub fn description(&self) -> Option<&String> { - self.description.as_ref() - } - - pub fn default_value(&self) -> Option { - self.default_value - } - - fn collect_init_value_requirements( - &self, - slot_prefix: StorageValueName, - requirements: &mut BTreeMap, - ) -> Result<(), AccountComponentTemplateError> { - if self.r#type == SchemaTypeId::void() { - return Ok(()); - } - - let Some(name) = self.name.as_deref() else { - return Err(AccountComponentTemplateError::InvalidSchema( - "non-void felt elements must be named".into(), - )); - }; - let value_name = - StorageValueName::from_slot_name_with_suffix(slot_prefix.slot_name(), name) - .map_err(|err| AccountComponentTemplateError::InvalidSchema(err.to_string()))?; - - let default_value = self - .default_value - .map(|felt| SCHEMA_TYPE_REGISTRY.display_felt(&self.r#type, felt)); - - if requirements - .insert( - value_name.clone(), - SchemaRequirement { - description: self.description.clone(), - r#type: self.r#type.clone(), - default_value, - }, - ) - .is_some() - { - return Err(AccountComponentTemplateError::DuplicateInitValueName(value_name)); - } - - Ok(()) - } - - /// Attempts to convert the [`FeltSchema`] into a [`Felt`]. - /// - /// If the schema variant is typed, the value is retrieved from `init_storage_data`, - /// identified by its key. Otherwise, the returned value is just the inner element. - pub(crate) fn try_build_felt( - &self, - init_storage_data: &InitStorageData, - slot_name: &StorageSlotName, - ) -> Result { - let value_name = match self.name.as_deref() { - Some(name) => Some( - StorageValueName::from_slot_name_with_suffix(slot_name, name) - .map_err(|err| AccountComponentTemplateError::InvalidSchema(err.to_string()))?, - ), - None => None, - }; - - if let Some(value_name) = value_name.clone() - && let Some(raw_value) = init_storage_data.value_entry(&value_name) - { - match raw_value { - WordValue::Atomic(raw) => { - let felt = SCHEMA_TYPE_REGISTRY - .try_parse_felt(&self.r#type, raw) - .map_err(AccountComponentTemplateError::StorageValueParsingError)?; - return Ok(felt); - }, - WordValue::Elements(_) => { - return Err(AccountComponentTemplateError::InvalidInitStorageValue( - value_name, - "expected an atomic value, got a 4-element array".into(), - )); - }, - WordValue::FullyTyped(_) => { - return Err(AccountComponentTemplateError::InvalidInitStorageValue( - value_name, - "expected an atomic value, got a word".into(), - )); - }, - } - } - - if self.r#type == SchemaTypeId::void() { - return Ok(Felt::ZERO); - } - - if let Some(default_value) = self.default_value { - return Ok(default_value); - } - - let Some(value_name) = value_name else { - return Err(AccountComponentTemplateError::InvalidSchema( - "non-void felt elements must be named".into(), - )); - }; - - Err(AccountComponentTemplateError::InitValueNotProvided(value_name)) - } - - /// Serializes the schema, optionally ignoring the default values (used for committing to a - /// schema definition). - fn write_into_with_optional_defaults( - &self, - target: &mut W, - include_defaults: bool, - ) { - target.write(&self.name); - target.write(&self.description); - target.write(&self.r#type); - let default_value = if include_defaults { self.default_value } else { None }; - target.write(default_value); - } - - /// Validates the felt type, naming rules, and default value (if any). - fn validate(&self) -> Result<(), AccountComponentTemplateError> { - if let Some(description) = self.description.as_deref() { - validate_description_ascii(description)?; - } - - let type_exists = SCHEMA_TYPE_REGISTRY.contains_felt_type(&self.felt_type()); - if !type_exists { - return Err(AccountComponentTemplateError::InvalidType( - self.felt_type().to_string(), - "Felt".into(), - )); - } - - if self.r#type == SchemaTypeId::void() { - if self.name.is_some() { - return Err(AccountComponentTemplateError::InvalidSchema( - "void felt elements must be unnamed".into(), - )); - } - if self.default_value.is_some() { - return Err(AccountComponentTemplateError::InvalidSchema( - "void felt elements cannot define `default-value`".into(), - )); - } - return Ok(()); - } - - if self.name.is_none() { - return Err(AccountComponentTemplateError::InvalidSchema( - "non-void felt elements must be named".into(), - )); - } - - if let Some(value) = self.default_value { - SCHEMA_TYPE_REGISTRY - .validate_felt_value(&self.felt_type(), value) - .map_err(AccountComponentTemplateError::StorageValueParsingError)?; - } - Ok(()) - } -} - -impl Serializable for FeltSchema { - fn write_into(&self, target: &mut W) { - self.write_into_with_optional_defaults(target, true); - } -} - -impl Deserializable for FeltSchema { - fn read_from(source: &mut R) -> Result { - let name = Option::::read_from(source)?; - let description = Option::::read_from(source)?; - let r#type = SchemaTypeId::read_from(source)?; - let default_value = Option::::read_from(source)?; - Ok(FeltSchema { name, description, r#type, default_value }) - } -} - -/// Describes the schema for a storage value slot. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ValueSlotSchema { - description: Option, - word: WordSchema, -} - -impl ValueSlotSchema { - pub fn new(description: Option, word: WordSchema) -> Self { - Self { description, word } - } - - pub fn description(&self) -> Option<&String> { - self.description.as_ref() - } - - pub fn word(&self) -> &WordSchema { - &self.word - } - - fn collect_init_value_requirements( - &self, - value_name: StorageValueName, - requirements: &mut BTreeMap, - ) -> Result<(), AccountComponentTemplateError> { - self.word.collect_init_value_requirements( - value_name, - self.description.clone(), - requirements, - ) - } - - /// Builds a [Word] from the provided initialization data using the inner word schema. - pub fn try_build_word( - &self, - init_storage_data: &InitStorageData, - slot_name: &StorageSlotName, - ) -> Result { - self.word.try_build_word(init_storage_data, slot_name) - } - - /// Serializes the schema, optionally ignoring the default values (used for committing to a - /// schema definition). - fn write_into_with_optional_defaults( - &self, - target: &mut W, - include_defaults: bool, - ) { - target.write(&self.description); - self.word.write_into_with_optional_defaults(target, include_defaults); - } - - /// Validates the slot's word schema. - pub(crate) fn validate(&self) -> Result<(), AccountComponentTemplateError> { - if let Some(description) = self.description.as_deref() { - validate_description_ascii(description)?; - } - self.word.validate()?; - Ok(()) - } -} - -impl Serializable for ValueSlotSchema { - fn write_into(&self, target: &mut W) { - self.write_into_with_optional_defaults(target, true); - } -} - -impl Deserializable for ValueSlotSchema { - fn read_from(source: &mut R) -> Result { - let description = Option::::read_from(source)?; - let word = WordSchema::read_from(source)?; - Ok(ValueSlotSchema::new(description, word)) - } -} - -/// Describes the schema for a storage map slot. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct MapSlotSchema { - description: Option, - default_values: Option>, - key_schema: WordSchema, - value_schema: WordSchema, -} - -impl MapSlotSchema { - pub fn new( - description: Option, - default_values: Option>, - key_schema: WordSchema, - value_schema: WordSchema, - ) -> Self { - Self { - description, - default_values, - key_schema, - value_schema, - } - } - - pub fn description(&self) -> Option<&String> { - self.description.as_ref() - } - - /// Builds a [`StorageMap`] from the provided initialization data. - /// - /// Merges any default values with entries from the init data, validating that the data - /// contains map entries (not a direct value or field entries). - pub fn try_build_map( - &self, - init_storage_data: &InitStorageData, - slot_name: &StorageSlotName, - ) -> Result { - let mut entries = self.default_values.clone().unwrap_or_default(); - let slot_prefix = StorageValueName::from_slot_name(slot_name); - - if init_storage_data.slot_value_entry(slot_name).is_some() { - return Err(AccountComponentTemplateError::InvalidInitStorageValue( - slot_prefix, - "expected a map, got a value".into(), - )); - } - if init_storage_data.has_field_entries_for_slot(slot_name) { - return Err(AccountComponentTemplateError::InvalidInitStorageValue( - slot_prefix, - "expected a map, got field entries".into(), - )); - } - if let Some(init_entries) = init_storage_data.map_entries(slot_name) { - let mut parsed_entries = Vec::with_capacity(init_entries.len()); - for (raw_key, raw_value) in init_entries.iter() { - let key = parse_storage_value_with_schema(&self.key_schema, raw_key, &slot_prefix)?; - let value = - parse_storage_value_with_schema(&self.value_schema, raw_value, &slot_prefix)?; - - parsed_entries.push((key, value)); - } - - for (key, value) in parsed_entries.iter() { - entries.insert(*key, *value); - } - } - - if entries.is_empty() { - return Ok(StorageMap::new()); - } - - StorageMap::with_entries(entries) - .map_err(|err| AccountComponentTemplateError::StorageMapHasDuplicateKeys(Box::new(err))) - } - - pub fn key_schema(&self) -> &WordSchema { - &self.key_schema - } - - pub fn value_schema(&self) -> &WordSchema { - &self.value_schema - } - - pub fn default_values(&self) -> Option> { - self.default_values.clone() - } - - /// Serializes the schema, optionally ignoring the default values (used for committing to a - /// schema definition). - fn write_into_with_optional_defaults( - &self, - target: &mut W, - include_defaults: bool, - ) { - target.write(&self.description); - let default_values = if include_defaults { - self.default_values.clone() - } else { - None - }; - target.write(&default_values); - self.key_schema.write_into_with_optional_defaults(target, include_defaults); - self.value_schema.write_into_with_optional_defaults(target, include_defaults); - } - - /// Validates key/value word schemas for this map slot. - fn validate(&self) -> Result<(), AccountComponentTemplateError> { - if let Some(description) = self.description.as_deref() { - validate_description_ascii(description)?; - } - self.key_schema.validate()?; - self.value_schema.validate()?; - Ok(()) - } -} - -pub(super) fn parse_storage_value_with_schema( - schema: &WordSchema, - raw_value: &WordValue, - slot_prefix: &StorageValueName, -) -> Result { - let word = match (schema, raw_value) { - (_, WordValue::FullyTyped(word)) => *word, - (WordSchema::Simple { r#type, .. }, raw_value) => { - parse_simple_word_value(r#type, raw_value, slot_prefix)? - }, - (WordSchema::Composite { value }, WordValue::Elements(elements)) => { - parse_composite_elements(value, elements, slot_prefix)? - }, - (WordSchema::Composite { .. }, WordValue::Atomic(value)) => SCHEMA_TYPE_REGISTRY - .try_parse_word(&SchemaTypeId::native_word(), value) - .map_err(|err| { - AccountComponentTemplateError::InvalidInitStorageValue( - slot_prefix.clone(), - format!("failed to parse value as `word`: {err}"), - ) - })?, - }; - - schema.validate_word_value(slot_prefix, "value", word)?; - Ok(word) -} - -fn parse_simple_word_value( - schema_type: &SchemaTypeId, - raw_value: &WordValue, - slot_prefix: &StorageValueName, -) -> Result { - match raw_value { - WordValue::Atomic(value) => { - SCHEMA_TYPE_REGISTRY.try_parse_word(schema_type, value).map_err(|err| { - AccountComponentTemplateError::InvalidInitStorageValue( - slot_prefix.clone(), - format!("failed to parse value as `{}`: {err}", schema_type), - ) - }) - }, - WordValue::Elements(elements) => { - let felts: Vec = elements - .iter() - .map(|element| { - SCHEMA_TYPE_REGISTRY.try_parse_felt(&SchemaTypeId::native_felt(), element) - }) - .collect::>() - .map_err(|err| { - AccountComponentTemplateError::InvalidInitStorageValue( - slot_prefix.clone(), - format!("failed to parse value element as `felt`: {err}"), - ) - })?; - let felts: [Felt; 4] = felts.try_into().expect("length is 4"); - Ok(Word::from(felts)) - }, - WordValue::FullyTyped(word) => Ok(*word), - } -} - -fn parse_composite_elements( - schema: &[FeltSchema; 4], - elements: &[String; 4], - slot_prefix: &StorageValueName, -) -> Result { - let mut felts = [Felt::ZERO; 4]; - for (index, felt_schema) in schema.iter().enumerate() { - let felt_type = felt_schema.felt_type(); - felts[index] = - SCHEMA_TYPE_REGISTRY - .try_parse_felt(&felt_type, &elements[index]) - .map_err(|err| { - AccountComponentTemplateError::InvalidInitStorageValue( - slot_prefix.clone(), - format!("failed to parse value[{index}] as `{felt_type}`: {err}"), - ) - })?; - } - Ok(Word::from(felts)) -} - -impl Serializable for MapSlotSchema { - fn write_into(&self, target: &mut W) { - self.write_into_with_optional_defaults(target, true); - } -} - -impl Deserializable for MapSlotSchema { - fn read_from(source: &mut R) -> Result { - let description = Option::::read_from(source)?; - let default_values = Option::>::read_from(source)?; - let key_schema = WordSchema::read_from(source)?; - let value_schema = WordSchema::read_from(source)?; - Ok(MapSlotSchema::new(description, default_values, key_schema, value_schema)) - } -} - -// TESTS -// ================================================================================================ - -#[cfg(test)] -mod tests { - use alloc::collections::BTreeMap; - - use super::*; - - #[test] - fn map_slot_schema_default_values_returns_map() { - let word_schema = WordSchema::new_simple(SchemaTypeId::native_word()); - let mut default_values = BTreeMap::new(); - default_values.insert( - Word::from([Felt::new(1), Felt::new(0), Felt::new(0), Felt::new(0)]), - Word::from([Felt::new(10), Felt::new(11), Felt::new(12), Felt::new(13)]), - ); - let slot = MapSlotSchema::new( - Some("static map".into()), - Some(default_values), - word_schema.clone(), - word_schema, - ); - - let mut expected = BTreeMap::new(); - expected.insert( - Word::from([Felt::new(1), Felt::new(0), Felt::new(0), Felt::new(0)]), - Word::from([Felt::new(10), Felt::new(11), Felt::new(12), Felt::new(13)]), - ); - - assert_eq!(slot.default_values(), Some(expected)); - } - - #[test] - fn value_slot_schema_exposes_felt_schema_types() { - let felt_values = [ - FeltSchema::new_typed(SchemaTypeId::u8(), "a"), - FeltSchema::new_typed(SchemaTypeId::u16(), "b"), - FeltSchema::new_typed(SchemaTypeId::u32(), "c"), - FeltSchema::new_typed(SchemaTypeId::new("felt").unwrap(), "d"), - ]; - - let slot = ValueSlotSchema::new(None, WordSchema::new_value(felt_values)); - let WordSchema::Composite { value } = slot.word() else { - panic!("expected composite word schema"); - }; - - assert_eq!(value[0].felt_type(), SchemaTypeId::u8()); - assert_eq!(value[1].felt_type(), SchemaTypeId::u16()); - assert_eq!(value[2].felt_type(), SchemaTypeId::u32()); - assert_eq!(value[3].felt_type(), SchemaTypeId::new("felt").unwrap()); - } - - #[test] - fn map_slot_schema_key_and_value_types() { - let key_schema = WordSchema::new_simple(SchemaTypeId::new("sampling::Key").unwrap()); - - let value_schema = WordSchema::new_value([ - FeltSchema::new_typed(SchemaTypeId::native_felt(), "a"), - FeltSchema::new_typed(SchemaTypeId::native_felt(), "b"), - FeltSchema::new_typed(SchemaTypeId::native_felt(), "c"), - FeltSchema::new_typed(SchemaTypeId::native_felt(), "d"), - ]); - - let slot = MapSlotSchema::new(None, None, key_schema, value_schema); - - assert_eq!( - slot.key_schema(), - &WordSchema::new_simple(SchemaTypeId::new("sampling::Key").unwrap()) - ); - - let WordSchema::Composite { value } = slot.value_schema() else { - panic!("expected composite word schema for map values"); - }; - for felt in value.iter() { - assert_eq!(felt.felt_type(), SchemaTypeId::native_felt()); - } - } - - #[test] - fn value_slot_schema_accepts_typed_word_init_value() { - let slot = ValueSlotSchema::new(None, WordSchema::new_simple(SchemaTypeId::native_word())); - let slot_name: StorageSlotName = "demo::slot".parse().unwrap(); - - let expected = Word::from([Felt::new(1), Felt::new(2), Felt::new(3), Felt::new(4)]); - let mut init_data = InitStorageData::default(); - init_data - .set_value(StorageValueName::from_slot_name(&slot_name), expected) - .unwrap(); - - let built = slot.try_build_word(&init_data, &slot_name).unwrap(); - assert_eq!(built, expected); - } - - #[test] - fn value_slot_schema_accepts_felt_typed_word_init_value() { - let slot = ValueSlotSchema::new(None, WordSchema::new_simple(SchemaTypeId::u8())); - let slot_name: StorageSlotName = "demo::u8_word".parse().unwrap(); - - let mut init_data = InitStorageData::default(); - init_data.set_value(StorageValueName::from_slot_name(&slot_name), "6").unwrap(); - - let built = slot.try_build_word(&init_data, &slot_name).unwrap(); - assert_eq!(built, Word::from([Felt::new(0), Felt::new(0), Felt::new(0), Felt::new(6)])); - } - - #[test] - fn value_slot_schema_accepts_typed_felt_init_value_in_composed_word() { - let word = WordSchema::new_value([ - FeltSchema::new_typed(SchemaTypeId::u8(), "a"), - FeltSchema::new_typed_with_default(SchemaTypeId::native_felt(), "b", Felt::new(2)), - FeltSchema::new_typed_with_default(SchemaTypeId::native_felt(), "c", Felt::new(3)), - FeltSchema::new_typed_with_default(SchemaTypeId::native_felt(), "d", Felt::new(4)), - ]); - let slot = ValueSlotSchema::new(None, word); - let slot_name: StorageSlotName = "demo::slot".parse().unwrap(); - - let mut init_data = InitStorageData::default(); - init_data - .set_value(StorageValueName::from_slot_name_with_suffix(&slot_name, "a").unwrap(), "1") - .unwrap(); - - let built = slot.try_build_word(&init_data, &slot_name).unwrap(); - assert_eq!(built, Word::from([Felt::new(1), Felt::new(2), Felt::new(3), Felt::new(4)])); - } - - #[test] - fn map_slot_schema_accepts_typed_map_init_value() { - let word_schema = WordSchema::new_simple(SchemaTypeId::native_word()); - let slot = MapSlotSchema::new(None, None, word_schema.clone(), word_schema); - let slot_name: StorageSlotName = "demo::map".parse().unwrap(); - - let entries = vec![( - WordValue::Elements(["1".into(), "0".into(), "0".into(), "0".into()]), - WordValue::Elements(["10".into(), "11".into(), "12".into(), "13".into()]), - )]; - let mut init_data = InitStorageData::default(); - init_data.set_map_values(slot_name.clone(), entries.clone()).unwrap(); - - let built = slot.try_build_map(&init_data, &slot_name).unwrap(); - let expected = StorageMap::with_entries([( - Word::from([Felt::new(1), Felt::new(0), Felt::new(0), Felt::new(0)]), - Word::from([Felt::new(10), Felt::new(11), Felt::new(12), Felt::new(13)]), - )]) - .unwrap(); - assert_eq!(built, expected); - } - - #[test] - fn map_slot_schema_missing_init_value_defaults_to_empty_map() { - let word_schema = WordSchema::new_simple(SchemaTypeId::native_word()); - let slot = MapSlotSchema::new(None, None, word_schema.clone(), word_schema); - let built = slot - .try_build_map(&InitStorageData::default(), &"demo::map".parse().unwrap()) - .unwrap(); - assert_eq!(built, StorageMap::new()); - } -} diff --git a/crates/miden-protocol/src/account/component/storage/schema/felt.rs b/crates/miden-protocol/src/account/component/storage/schema/felt.rs new file mode 100644 index 0000000000..1bf4a4ff9a --- /dev/null +++ b/crates/miden-protocol/src/account/component/storage/schema/felt.rs @@ -0,0 +1,301 @@ +use alloc::collections::BTreeMap; +use alloc::string::{String, ToString}; + +use super::super::type_registry::{SCHEMA_TYPE_REGISTRY, SchemaRequirement, SchemaType}; +use super::super::{InitStorageData, StorageValueName, WordValue}; +use super::validate_description_ascii; +use crate::Felt; +use crate::account::StorageSlotName; +use crate::errors::ComponentMetadataError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; + +// FELT SCHEMA +// ================================================================================================ + +/// Supported element schema descriptors for a component's storage entries. +/// +/// Each felt element in a composed word slot is typed, can have an optional default value, and can +/// optionally be named to allow overriding at instantiation time. +/// +/// To avoid non-overridable constants, unnamed elements are allowed only when `type = "void"`, +/// which always evaluates to `0` and does not require init data. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct FeltSchema { + name: Option, + description: Option, + r#type: SchemaType, + default_value: Option, +} + +impl FeltSchema { + /// Creates a new required typed felt field. + pub fn new_typed(r#type: SchemaType, name: impl Into) -> Self { + FeltSchema { + name: Some(name.into()), + description: None, + r#type, + default_value: None, + } + } + + /// Creates a new typed felt field with a default value. + pub fn new_typed_with_default( + r#type: SchemaType, + name: impl Into, + default_value: Felt, + ) -> Self { + FeltSchema { + name: Some(name.into()), + description: None, + r#type, + default_value: Some(default_value), + } + } + + /// Creates an unnamed `void` felt element. + pub fn new_void() -> Self { + FeltSchema { + name: None, + description: None, + r#type: SchemaType::void(), + default_value: None, + } + } + + /// Creates a new required felt field typed as [`SchemaType::native_felt()`]. + pub fn felt(name: impl Into) -> Self { + Self::new_typed(SchemaType::native_felt(), name) + } + + /// Creates a new required felt field typed as [`SchemaType::native_word()`]. + pub fn word(name: impl Into) -> Self { + Self::new_typed(SchemaType::native_word(), name) + } + + /// Creates a new required felt field typed as [`SchemaType::u8()`]. + pub fn u8(name: impl Into) -> Self { + Self::new_typed(SchemaType::u8(), name) + } + + /// Creates a new required felt field typed as [`SchemaType::u16()`]. + pub fn u16(name: impl Into) -> Self { + Self::new_typed(SchemaType::u16(), name) + } + + /// Creates a new required felt field typed as [`SchemaType::u32()`]. + pub fn u32(name: impl Into) -> Self { + Self::new_typed(SchemaType::u32(), name) + } + + /// Creates a new required felt field typed as [`SchemaType::bool()`]. + pub fn bool(name: impl Into) -> Self { + Self::new_typed(SchemaType::bool(), name) + } + + /// Sets the default value of the [`FeltSchema`] and returns `self`. + pub fn with_default(self, default_value: Felt) -> Self { + FeltSchema { + default_value: Some(default_value), + ..self + } + } + + /// Sets the description of the [`FeltSchema`] and returns `self`. + pub fn with_description(self, description: impl Into) -> Self { + FeltSchema { + description: Some(description.into()), + ..self + } + } + + /// Returns the felt type. + pub fn felt_type(&self) -> SchemaType { + self.r#type.clone() + } + + pub fn name(&self) -> Option<&str> { + self.name.as_deref() + } + + pub fn description(&self) -> Option<&String> { + self.description.as_ref() + } + + pub fn default_value(&self) -> Option { + self.default_value + } + + pub(super) fn collect_init_value_requirements( + &self, + slot_prefix: StorageValueName, + requirements: &mut BTreeMap, + ) -> Result<(), ComponentMetadataError> { + if self.r#type == SchemaType::void() { + return Ok(()); + } + + let Some(name) = self.name.as_deref() else { + return Err(ComponentMetadataError::InvalidSchema( + "non-void felt elements must be named".into(), + )); + }; + let value_name = + StorageValueName::from_slot_name_with_suffix(slot_prefix.slot_name(), name) + .map_err(|err| ComponentMetadataError::InvalidSchema(err.to_string()))?; + + let default_value = self + .default_value + .map(|felt| SCHEMA_TYPE_REGISTRY.display_felt(&self.r#type, felt)); + + if requirements + .insert( + value_name.clone(), + SchemaRequirement { + description: self.description.clone(), + r#type: self.r#type.clone(), + default_value, + }, + ) + .is_some() + { + return Err(ComponentMetadataError::DuplicateInitValueName(value_name)); + } + + Ok(()) + } + + /// Attempts to convert the [`FeltSchema`] into a [`Felt`]. + /// + /// If the schema variant is typed, the value is retrieved from `init_storage_data`, + /// identified by its key. Otherwise, the returned value is just the inner element. + pub(crate) fn try_build_felt( + &self, + init_storage_data: &InitStorageData, + slot_name: &StorageSlotName, + ) -> Result { + let value_name = match self.name.as_deref() { + Some(name) => Some( + StorageValueName::from_slot_name_with_suffix(slot_name, name) + .map_err(|err| ComponentMetadataError::InvalidSchema(err.to_string()))?, + ), + None => None, + }; + + if let Some(value_name) = value_name.clone() + && let Some(raw_value) = init_storage_data.value_entry(&value_name) + { + match raw_value { + WordValue::Atomic(raw) => { + let felt = SCHEMA_TYPE_REGISTRY + .try_parse_felt(&self.r#type, raw) + .map_err(ComponentMetadataError::StorageValueParsingError)?; + return Ok(felt); + }, + WordValue::Elements(_) => { + return Err(ComponentMetadataError::InvalidInitStorageValue( + value_name, + "expected an atomic value, got a 4-element array".into(), + )); + }, + WordValue::FullyTyped(_) => { + return Err(ComponentMetadataError::InvalidInitStorageValue( + value_name, + "expected an atomic value, got a word".into(), + )); + }, + } + } + + if self.r#type == SchemaType::void() { + return Ok(Felt::ZERO); + } + + if let Some(default_value) = self.default_value { + return Ok(default_value); + } + + let Some(value_name) = value_name else { + return Err(ComponentMetadataError::InvalidSchema( + "non-void felt elements must be named".into(), + )); + }; + + Err(ComponentMetadataError::InitValueNotProvided(value_name)) + } + + /// Validates that the defined felt type exists. + pub(super) fn validate(&self) -> Result<(), ComponentMetadataError> { + if let Some(description) = self.description.as_deref() { + validate_description_ascii(description)?; + } + + let type_exists = SCHEMA_TYPE_REGISTRY.contains_felt_type(&self.felt_type()); + if !type_exists { + return Err(ComponentMetadataError::InvalidType( + self.felt_type().to_string(), + "Felt".into(), + )); + } + + if self.r#type == SchemaType::void() { + if self.name.is_some() { + return Err(ComponentMetadataError::InvalidSchema( + "void felt elements must be unnamed".into(), + )); + } + if self.default_value.is_some() { + return Err(ComponentMetadataError::InvalidSchema( + "void felt elements cannot define `default-value`".into(), + )); + } + return Ok(()); + } + + if self.name.is_none() { + return Err(ComponentMetadataError::InvalidSchema( + "non-void felt elements must be named".into(), + )); + } + + if let Some(value) = self.default_value { + SCHEMA_TYPE_REGISTRY + .validate_felt_value(&self.felt_type(), value) + .map_err(ComponentMetadataError::StorageValueParsingError)?; + } + Ok(()) + } + + pub(super) fn write_into_with_optional_defaults( + &self, + target: &mut W, + include_defaults: bool, + ) { + target.write(&self.name); + target.write(&self.description); + target.write(&self.r#type); + let default_value = if include_defaults { self.default_value } else { None }; + target.write(default_value); + } +} + +impl Serializable for FeltSchema { + fn write_into(&self, target: &mut W) { + self.write_into_with_optional_defaults(target, true); + } +} + +impl Deserializable for FeltSchema { + fn read_from(source: &mut R) -> Result { + let name = Option::::read_from(source)?; + let description = Option::::read_from(source)?; + let r#type = SchemaType::read_from(source)?; + let default_value = Option::::read_from(source)?; + Ok(FeltSchema { name, description, r#type, default_value }) + } +} diff --git a/crates/miden-protocol/src/account/component/storage/schema/map_slot.rs b/crates/miden-protocol/src/account/component/storage/schema/map_slot.rs new file mode 100644 index 0000000000..065d5877ca --- /dev/null +++ b/crates/miden-protocol/src/account/component/storage/schema/map_slot.rs @@ -0,0 +1,151 @@ +use alloc::boxed::Box; +use alloc::collections::BTreeMap; +use alloc::string::String; +use alloc::vec::Vec; + +use super::super::{InitStorageData, StorageValueName}; +use super::{WordSchema, parse_storage_value_with_schema, validate_description_ascii}; +use crate::Word; +use crate::account::{StorageMap, StorageMapKey, StorageSlotName}; +use crate::errors::ComponentMetadataError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; + +// MAP SLOT SCHEMA +// ================================================================================================ + +/// Describes the schema for a storage map slot. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct MapSlotSchema { + description: Option, + default_values: Option>, + key_schema: WordSchema, + value_schema: WordSchema, +} + +impl MapSlotSchema { + pub fn new( + description: Option, + default_values: Option>, + key_schema: WordSchema, + value_schema: WordSchema, + ) -> Self { + Self { + description, + default_values, + key_schema, + value_schema, + } + } + + pub fn description(&self) -> Option<&String> { + self.description.as_ref() + } + + /// Builds a [`StorageMap`] from the provided initialization data. + /// + /// Merges any default values with entries from the init data, validating that the data + /// contains map entries (not a direct value or field entries). + pub fn try_build_map( + &self, + init_storage_data: &InitStorageData, + slot_name: &StorageSlotName, + ) -> Result { + let mut entries = self.default_values.clone().unwrap_or_default(); + let slot_prefix = StorageValueName::from_slot_name(slot_name); + + if init_storage_data.slot_value_entry(slot_name).is_some() { + return Err(ComponentMetadataError::InvalidInitStorageValue( + slot_prefix, + "expected a map, got a value".into(), + )); + } + if init_storage_data.has_field_entries_for_slot(slot_name) { + return Err(ComponentMetadataError::InvalidInitStorageValue( + slot_prefix, + "expected a map, got field entries".into(), + )); + } + if let Some(init_entries) = init_storage_data.map_entries(slot_name) { + let mut parsed_entries = Vec::with_capacity(init_entries.len()); + for (raw_key, raw_value) in init_entries.iter() { + let key = parse_storage_value_with_schema(&self.key_schema, raw_key, &slot_prefix)?; + let value = + parse_storage_value_with_schema(&self.value_schema, raw_value, &slot_prefix)?; + + parsed_entries.push((key, value)); + } + + for (key, value) in parsed_entries.iter() { + entries.insert(*key, *value); + } + } + + if entries.is_empty() { + return Ok(StorageMap::new()); + } + + StorageMap::with_entries( + entries.into_iter().map(|(key, value)| (StorageMapKey::from_raw(key), value)), + ) + .map_err(|err| ComponentMetadataError::StorageMapHasDuplicateKeys(Box::new(err))) + } + + pub fn key_schema(&self) -> &WordSchema { + &self.key_schema + } + + pub fn value_schema(&self) -> &WordSchema { + &self.value_schema + } + + pub fn default_values(&self) -> Option> { + self.default_values.clone() + } + + pub(super) fn write_into_with_optional_defaults( + &self, + target: &mut W, + include_defaults: bool, + ) { + target.write(&self.description); + let default_values = if include_defaults { + self.default_values.clone() + } else { + None + }; + target.write(&default_values); + self.key_schema.write_into_with_optional_defaults(target, include_defaults); + self.value_schema.write_into_with_optional_defaults(target, include_defaults); + } + + pub(super) fn validate(&self) -> Result<(), ComponentMetadataError> { + if let Some(description) = self.description.as_deref() { + validate_description_ascii(description)?; + } + self.key_schema.validate()?; + self.value_schema.validate()?; + Ok(()) + } +} + +impl Serializable for MapSlotSchema { + fn write_into(&self, target: &mut W) { + self.write_into_with_optional_defaults(target, true); + } +} + +impl Deserializable for MapSlotSchema { + fn read_from(source: &mut R) -> Result { + let description = Option::::read_from(source)?; + let default_values = Option::>::read_from(source)?; + let key_schema = WordSchema::read_from(source)?; + let value_schema = WordSchema::read_from(source)?; + Ok(MapSlotSchema::new(description, default_values, key_schema, value_schema)) + } +} diff --git a/crates/miden-protocol/src/account/component/storage/schema/mod.rs b/crates/miden-protocol/src/account/component/storage/schema/mod.rs new file mode 100644 index 0000000000..fd93070c9d --- /dev/null +++ b/crates/miden-protocol/src/account/component/storage/schema/mod.rs @@ -0,0 +1,179 @@ +use alloc::collections::BTreeMap; +use alloc::string::ToString; +use alloc::vec::Vec; + +use super::type_registry::SchemaRequirement; +use super::{InitStorageData, StorageValueName}; +use crate::account::{StorageSlot, StorageSlotName}; +use crate::crypto::utils::bytes_to_elements_with_padding; +use crate::errors::ComponentMetadataError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Hasher, Word}; + +mod felt; +pub use felt::FeltSchema; + +mod map_slot; +pub use map_slot::MapSlotSchema; + +mod parse; +pub(crate) use parse::parse_storage_value_with_schema; + +mod slot; +pub use slot::StorageSlotSchema; + +mod value_slot; +pub use value_slot::ValueSlotSchema; + +mod word; +pub use word::WordSchema; + +#[cfg(test)] +mod tests; + +// STORAGE SCHEMA +// ================================================================================================ + +/// Describes the storage schema of an account component in terms of its named storage slots. +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct StorageSchema { + slots: BTreeMap, +} + +impl StorageSchema { + /// Creates a new [`StorageSchema`]. + /// + /// # Errors + /// - If `fields` contains duplicate slot names. + /// - If any slot schema is invalid. + /// - If multiple schema fields map to the same init value name. + pub fn new( + slots: impl IntoIterator, + ) -> Result { + let mut map = BTreeMap::new(); + for (slot_name, schema) in slots { + if map.insert(slot_name.clone(), schema).is_some() { + return Err(ComponentMetadataError::DuplicateSlotName(slot_name)); + } + } + + let schema = Self { slots: map }; + schema.validate()?; + Ok(schema) + } + + /// Returns an iterator over `(slot_name, schema)` pairs in slot-id order. + pub fn iter(&self) -> impl Iterator { + self.slots.iter() + } + + /// Returns a reference to the underlying slots map. + pub fn slots(&self) -> &BTreeMap { + &self.slots + } + + /// Builds the initial [`StorageSlot`]s for this schema using the provided initialization data. + pub fn build_storage_slots( + &self, + init_storage_data: &InitStorageData, + ) -> Result, ComponentMetadataError> { + self.slots + .iter() + .map(|(slot_name, schema)| schema.try_build_storage_slot(slot_name, init_storage_data)) + .collect() + } + + /// Returns a commitment to this storage schema definition. + /// + /// The commitment is computed over the serialized schema and does not include defaults. + pub fn commitment(&self) -> Word { + let mut bytes = Vec::new(); + self.write_into_with_optional_defaults(&mut bytes, false); + let elements = bytes_to_elements_with_padding(&bytes); + Hasher::hash_elements(&elements) + } + + /// Returns init-value requirements for the entire schema. + /// + /// The returned map includes both required values (no `default_value`) and optional values + /// (with `default_value`), and excludes map entries. + pub fn schema_requirements( + &self, + ) -> Result, ComponentMetadataError> { + let mut requirements = BTreeMap::new(); + for (slot_name, schema) in self.slots.iter() { + schema.collect_init_value_requirements(slot_name, &mut requirements)?; + } + Ok(requirements) + } + + /// Serializes the schema, optionally ignoring the default values (used for committing to a + /// schema definition). + fn write_into_with_optional_defaults( + &self, + target: &mut W, + include_defaults: bool, + ) { + target.write_u16(self.slots.len() as u16); + for (slot_name, schema) in self.slots.iter() { + target.write(slot_name); + schema.write_into_with_optional_defaults(target, include_defaults); + } + } + + /// Validates schema-level invariants across all slots. + fn validate(&self) -> Result<(), ComponentMetadataError> { + let mut init_values = BTreeMap::new(); + + for (slot_name, schema) in self.slots.iter() { + schema.validate()?; + schema.collect_init_value_requirements(slot_name, &mut init_values)?; + } + + Ok(()) + } +} + +impl Serializable for StorageSchema { + fn write_into(&self, target: &mut W) { + self.write_into_with_optional_defaults(target, true); + } +} + +impl Deserializable for StorageSchema { + fn read_from(source: &mut R) -> Result { + let num_entries = source.read_u16()? as usize; + let mut fields = BTreeMap::new(); + + for _ in 0..num_entries { + let slot_name = StorageSlotName::read_from(source)?; + let schema = StorageSlotSchema::read_from(source)?; + + if fields.insert(slot_name.clone(), schema).is_some() { + return Err(DeserializationError::InvalidValue(format!( + "duplicate slot name in storage schema: {slot_name}", + ))); + } + } + + let schema = StorageSchema::new(fields) + .map_err(|err| DeserializationError::InvalidValue(err.to_string()))?; + Ok(schema) + } +} + +pub(super) fn validate_description_ascii(description: &str) -> Result<(), ComponentMetadataError> { + if description.is_ascii() { + Ok(()) + } else { + Err(ComponentMetadataError::InvalidSchema( + "description must contain only ASCII characters".to_string(), + )) + } +} diff --git a/crates/miden-protocol/src/account/component/storage/schema/parse.rs b/crates/miden-protocol/src/account/component/storage/schema/parse.rs new file mode 100644 index 0000000000..c3e993c6bd --- /dev/null +++ b/crates/miden-protocol/src/account/component/storage/schema/parse.rs @@ -0,0 +1,93 @@ +use alloc::string::String; +use alloc::vec::Vec; + +use super::super::type_registry::{SCHEMA_TYPE_REGISTRY, SchemaType}; +use super::super::{StorageValueName, WordValue}; +use super::{FeltSchema, WordSchema}; +use crate::errors::ComponentMetadataError; +use crate::{Felt, Word}; + +// HELPER FUNCTIONS +// ================================================================================================ + +pub(crate) fn parse_storage_value_with_schema( + schema: &WordSchema, + raw_value: &WordValue, + slot_prefix: &StorageValueName, +) -> Result { + let word = match (schema, raw_value) { + (_, WordValue::FullyTyped(word)) => *word, + (WordSchema::Simple { r#type, .. }, raw_value) => { + parse_simple_word_value(r#type, raw_value, slot_prefix)? + }, + (WordSchema::Composite { value }, WordValue::Elements(elements)) => { + parse_composite_elements(value, elements, slot_prefix)? + }, + (WordSchema::Composite { .. }, WordValue::Atomic(value)) => SCHEMA_TYPE_REGISTRY + .try_parse_word(&SchemaType::native_word(), value) + .map_err(|err| { + ComponentMetadataError::InvalidInitStorageValue( + slot_prefix.clone(), + format!("failed to parse value as `word`: {err}"), + ) + })?, + }; + + schema.validate_word_value(slot_prefix, "value", word)?; + Ok(word) +} + +fn parse_simple_word_value( + schema_type: &SchemaType, + raw_value: &WordValue, + slot_prefix: &StorageValueName, +) -> Result { + match raw_value { + WordValue::Atomic(value) => { + SCHEMA_TYPE_REGISTRY.try_parse_word(schema_type, value).map_err(|err| { + ComponentMetadataError::InvalidInitStorageValue( + slot_prefix.clone(), + format!("failed to parse value as `{}`: {err}", schema_type), + ) + }) + }, + WordValue::Elements(elements) => { + let felts: Vec = elements + .iter() + .map(|element| { + SCHEMA_TYPE_REGISTRY.try_parse_felt(&SchemaType::native_felt(), element) + }) + .collect::>() + .map_err(|err| { + ComponentMetadataError::InvalidInitStorageValue( + slot_prefix.clone(), + format!("failed to parse value element as `felt`: {err}"), + ) + })?; + let felts: [Felt; 4] = felts.try_into().expect("length is 4"); + Ok(Word::from(felts)) + }, + WordValue::FullyTyped(word) => Ok(*word), + } +} + +fn parse_composite_elements( + schema: &[FeltSchema; 4], + elements: &[String; 4], + slot_prefix: &StorageValueName, +) -> Result { + let mut felts = [Felt::ZERO; 4]; + for (index, felt_schema) in schema.iter().enumerate() { + let felt_type = felt_schema.felt_type(); + felts[index] = + SCHEMA_TYPE_REGISTRY + .try_parse_felt(&felt_type, &elements[index]) + .map_err(|err| { + ComponentMetadataError::InvalidInitStorageValue( + slot_prefix.clone(), + format!("failed to parse value[{index}] as `{felt_type}`: {err}"), + ) + })?; + } + Ok(Word::from(felts)) +} diff --git a/crates/miden-protocol/src/account/component/storage/schema/slot.rs b/crates/miden-protocol/src/account/component/storage/schema/slot.rs new file mode 100644 index 0000000000..6677b5eb05 --- /dev/null +++ b/crates/miden-protocol/src/account/component/storage/schema/slot.rs @@ -0,0 +1,129 @@ +use alloc::collections::BTreeMap; +use alloc::string::String; + +use super::super::type_registry::{SchemaRequirement, SchemaType}; +use super::super::{InitStorageData, StorageValueName}; +use super::{MapSlotSchema, ValueSlotSchema, WordSchema}; +use crate::account::{StorageSlot, StorageSlotName}; +use crate::errors::ComponentMetadataError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; + +// STORAGE SLOT SCHEMA +// ================================================================================================ + +/// Describes the schema for a storage slot. +/// Can describe either a value slot, or a map slot. +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum StorageSlotSchema { + Value(ValueSlotSchema), + Map(MapSlotSchema), +} + +impl StorageSlotSchema { + /// Creates a value slot schema with the given description and word schema. + /// + /// Accepts anything convertible to [`WordSchema`]: a [`SchemaType`] for simple typed slots, + /// a `[FeltSchema; 4]` for composite slots, or a [`WordSchema`] directly. + pub fn value(description: impl Into, word: impl Into) -> Self { + Self::Value(ValueSlotSchema::new(Some(description.into()), word.into())) + } + + /// Creates a map slot schema with the given description and simple key/value types. + pub fn map( + description: impl Into, + key_type: SchemaType, + value_type: SchemaType, + ) -> Self { + Self::Map(MapSlotSchema::new( + Some(description.into()), + None, + WordSchema::new_simple(key_type), + WordSchema::new_simple(value_type), + )) + } + + pub(super) fn collect_init_value_requirements( + &self, + slot_name: &StorageSlotName, + requirements: &mut BTreeMap, + ) -> Result<(), ComponentMetadataError> { + let slot_name = StorageValueName::from_slot_name(slot_name); + match self { + StorageSlotSchema::Value(slot) => { + slot.collect_init_value_requirements(slot_name, requirements) + }, + StorageSlotSchema::Map(_) => Ok(()), + } + } + + /// Builds a [`StorageSlot`] for the specified `slot_name` using the provided initialization + /// data. + pub fn try_build_storage_slot( + &self, + slot_name: &StorageSlotName, + init_storage_data: &InitStorageData, + ) -> Result { + match self { + StorageSlotSchema::Value(slot) => { + let word = slot.try_build_word(init_storage_data, slot_name)?; + Ok(StorageSlot::with_value(slot_name.clone(), word)) + }, + StorageSlotSchema::Map(slot) => { + let storage_map = slot.try_build_map(init_storage_data, slot_name)?; + Ok(StorageSlot::with_map(slot_name.clone(), storage_map)) + }, + } + } + + pub(super) fn validate(&self) -> Result<(), ComponentMetadataError> { + match self { + StorageSlotSchema::Value(slot) => slot.validate()?, + StorageSlotSchema::Map(slot) => slot.validate()?, + } + + Ok(()) + } + + pub(super) fn write_into_with_optional_defaults( + &self, + target: &mut W, + include_defaults: bool, + ) { + match self { + StorageSlotSchema::Value(slot) => { + target.write_u8(0u8); + slot.write_into_with_optional_defaults(target, include_defaults); + }, + StorageSlotSchema::Map(slot) => { + target.write_u8(1u8); + slot.write_into_with_optional_defaults(target, include_defaults); + }, + } + } +} + +impl Serializable for StorageSlotSchema { + fn write_into(&self, target: &mut W) { + self.write_into_with_optional_defaults(target, true); + } +} + +impl Deserializable for StorageSlotSchema { + fn read_from(source: &mut R) -> Result { + let variant_tag = source.read_u8()?; + match variant_tag { + 0 => Ok(StorageSlotSchema::Value(ValueSlotSchema::read_from(source)?)), + 1 => Ok(StorageSlotSchema::Map(MapSlotSchema::read_from(source)?)), + _ => Err(DeserializationError::InvalidValue(format!( + "unknown variant tag '{variant_tag}' for StorageSlotSchema" + ))), + } + } +} diff --git a/crates/miden-protocol/src/account/component/storage/schema/tests.rs b/crates/miden-protocol/src/account/component/storage/schema/tests.rs new file mode 100644 index 0000000000..99161c5994 --- /dev/null +++ b/crates/miden-protocol/src/account/component/storage/schema/tests.rs @@ -0,0 +1,149 @@ +use alloc::collections::BTreeMap; + +use super::super::{InitStorageData, SchemaType}; +use super::{FeltSchema, MapSlotSchema, ValueSlotSchema, WordSchema}; +use crate::account::{StorageMap, StorageMapKey, StorageSlotName}; +use crate::{Felt, Word}; + +#[test] +fn map_slot_schema_default_values_returns_map() { + let word_schema = WordSchema::new_simple(SchemaType::native_word()); + let mut default_values = BTreeMap::new(); + default_values.insert( + Word::from([Felt::new(1), Felt::new(0), Felt::new(0), Felt::new(0)]), + Word::from([Felt::new(10), Felt::new(11), Felt::new(12), Felt::new(13)]), + ); + let slot = MapSlotSchema::new( + Some("static map".into()), + Some(default_values), + word_schema.clone(), + word_schema, + ); + + let mut expected = BTreeMap::new(); + expected.insert( + Word::from([Felt::new(1), Felt::new(0), Felt::new(0), Felt::new(0)]), + Word::from([Felt::new(10), Felt::new(11), Felt::new(12), Felt::new(13)]), + ); + + assert_eq!(slot.default_values(), Some(expected)); +} + +#[test] +fn value_slot_schema_exposes_felt_schema_types() { + let felt_values = [ + FeltSchema::u8("a"), + FeltSchema::u16("b"), + FeltSchema::u32("c"), + FeltSchema::new_typed(SchemaType::new("felt").unwrap(), "d"), + ]; + + let slot = ValueSlotSchema::new(None, WordSchema::new_value(felt_values)); + let WordSchema::Composite { value } = slot.word() else { + panic!("expected composite word schema"); + }; + + assert_eq!(value[0].felt_type(), SchemaType::u8()); + assert_eq!(value[1].felt_type(), SchemaType::u16()); + assert_eq!(value[2].felt_type(), SchemaType::u32()); + assert_eq!(value[3].felt_type(), SchemaType::new("felt").unwrap()); +} + +#[test] +fn map_slot_schema_key_and_value_types() { + let key_schema = WordSchema::new_simple(SchemaType::new("sampling::Key").unwrap()); + + let value_schema = WordSchema::new_value([ + FeltSchema::felt("a"), + FeltSchema::felt("b"), + FeltSchema::felt("c"), + FeltSchema::felt("d"), + ]); + + let slot = MapSlotSchema::new(None, None, key_schema, value_schema); + + assert_eq!( + slot.key_schema(), + &WordSchema::new_simple(SchemaType::new("sampling::Key").unwrap()) + ); + + let WordSchema::Composite { value } = slot.value_schema() else { + panic!("expected composite word schema for map values"); + }; + for felt in value.iter() { + assert_eq!(felt.felt_type(), SchemaType::native_felt()); + } +} + +#[test] +fn value_slot_schema_accepts_typed_word_init_value() { + let slot = ValueSlotSchema::new(None, WordSchema::new_simple(SchemaType::native_word())); + let slot_name: StorageSlotName = "demo::slot".parse().unwrap(); + + let mut init_data = InitStorageData::default(); + init_data.set_value("demo::slot", [1u32, 2, 3, 4]).unwrap(); + + let built = slot.try_build_word(&init_data, &slot_name).unwrap(); + let expected = Word::from([Felt::new(1), Felt::new(2), Felt::new(3), Felt::new(4)]); + assert_eq!(built, expected); +} + +#[test] +fn value_slot_schema_accepts_felt_typed_word_init_value() { + let slot = ValueSlotSchema::new(None, WordSchema::new_simple(SchemaType::u8())); + let slot_name: StorageSlotName = "demo::u8_word".parse().unwrap(); + + let mut init_data = InitStorageData::default(); + init_data.set_value("demo::u8_word", 6u8).unwrap(); + + let built = slot.try_build_word(&init_data, &slot_name).unwrap(); + assert_eq!(built, Word::from([Felt::new(0), Felt::new(0), Felt::new(0), Felt::new(6)])); +} + +#[test] +fn value_slot_schema_accepts_typed_felt_init_value_in_composed_word() { + let word = WordSchema::new_value([ + FeltSchema::u8("a"), + FeltSchema::felt("b").with_default(Felt::new(2)), + FeltSchema::felt("c").with_default(Felt::new(3)), + FeltSchema::felt("d").with_default(Felt::new(4)), + ]); + let slot = ValueSlotSchema::new(None, word); + let slot_name: StorageSlotName = "demo::slot".parse().unwrap(); + + let mut init_data = InitStorageData::default(); + init_data.set_value("demo::slot.a", 1u8).unwrap(); + + let built = slot.try_build_word(&init_data, &slot_name).unwrap(); + assert_eq!(built, Word::from([Felt::new(1), Felt::new(2), Felt::new(3), Felt::new(4)])); +} + +#[test] +fn map_slot_schema_accepts_typed_map_init_value() { + let word_schema = WordSchema::new_simple(SchemaType::native_word()); + let slot = MapSlotSchema::new(None, None, word_schema.clone(), word_schema); + let slot_name: StorageSlotName = "demo::map".parse().unwrap(); + + let mut init_data = InitStorageData::default(); + init_data + .insert_map_entry("demo::map", [1u32, 0, 0, 0], [10u32, 11, 12, 13]) + .unwrap(); + + let built = slot.try_build_map(&init_data, &slot_name).unwrap(); + let expected = StorageMap::with_entries([( + StorageMapKey::from_array([1, 0, 0, 0]), + Word::from([Felt::new(10), Felt::new(11), Felt::new(12), Felt::new(13)]), + )]) + .unwrap(); + assert_eq!(built, expected); +} + +#[test] +fn map_slot_schema_missing_init_value_defaults_to_empty_map() { + let word_schema = WordSchema::new_simple(SchemaType::native_word()); + let slot = MapSlotSchema::new(None, None, word_schema.clone(), word_schema); + let built = slot + .try_build_map(&InitStorageData::default(), &"demo::map".parse().unwrap()) + .unwrap(); + assert_eq!(built, StorageMap::new()); +} diff --git a/crates/miden-protocol/src/account/component/storage/schema/value_slot.rs b/crates/miden-protocol/src/account/component/storage/schema/value_slot.rs new file mode 100644 index 0000000000..c472a45778 --- /dev/null +++ b/crates/miden-protocol/src/account/component/storage/schema/value_slot.rs @@ -0,0 +1,92 @@ +use alloc::collections::BTreeMap; +use alloc::string::String; + +use super::super::type_registry::SchemaRequirement; +use super::super::{InitStorageData, StorageValueName}; +use super::{WordSchema, validate_description_ascii}; +use crate::Word; +use crate::account::StorageSlotName; +use crate::errors::ComponentMetadataError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; + +// VALUE SLOT SCHEMA +// ================================================================================================ + +/// Describes the schema for a storage value slot. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct ValueSlotSchema { + description: Option, + word: WordSchema, +} + +impl ValueSlotSchema { + pub fn new(description: Option, word: WordSchema) -> Self { + Self { description, word } + } + + pub fn description(&self) -> Option<&String> { + self.description.as_ref() + } + + pub fn word(&self) -> &WordSchema { + &self.word + } + + pub(super) fn collect_init_value_requirements( + &self, + value_name: StorageValueName, + requirements: &mut BTreeMap, + ) -> Result<(), ComponentMetadataError> { + self.word.collect_init_value_requirements( + value_name, + self.description.clone(), + requirements, + ) + } + + /// Builds a [Word] from the provided initialization data using the inner word schema. + pub fn try_build_word( + &self, + init_storage_data: &InitStorageData, + slot_name: &StorageSlotName, + ) -> Result { + self.word.try_build_word(init_storage_data, slot_name) + } + + pub(super) fn write_into_with_optional_defaults( + &self, + target: &mut W, + include_defaults: bool, + ) { + target.write(&self.description); + self.word.write_into_with_optional_defaults(target, include_defaults); + } + + pub(super) fn validate(&self) -> Result<(), ComponentMetadataError> { + if let Some(description) = self.description.as_deref() { + validate_description_ascii(description)?; + } + self.word.validate()?; + Ok(()) + } +} + +impl Serializable for ValueSlotSchema { + fn write_into(&self, target: &mut W) { + self.write_into_with_optional_defaults(target, true); + } +} + +impl Deserializable for ValueSlotSchema { + fn read_from(source: &mut R) -> Result { + let description = Option::::read_from(source)?; + let word = WordSchema::read_from(source)?; + Ok(ValueSlotSchema::new(description, word)) + } +} diff --git a/crates/miden-protocol/src/account/component/storage/schema/word.rs b/crates/miden-protocol/src/account/component/storage/schema/word.rs new file mode 100644 index 0000000000..de609ebb25 --- /dev/null +++ b/crates/miden-protocol/src/account/component/storage/schema/word.rs @@ -0,0 +1,299 @@ +use alloc::collections::BTreeMap; +use alloc::string::{String, ToString}; + +use super::super::type_registry::{SCHEMA_TYPE_REGISTRY, SchemaRequirement, SchemaType}; +use super::super::{InitStorageData, StorageValueName}; +use super::FeltSchema; +use crate::account::StorageSlotName; +use crate::errors::ComponentMetadataError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Felt, Word}; + +// WORD SCHEMA +// ================================================================================================ + +/// Defines how a word slot is described within the component's storage schema. +/// +/// Each word schema can either describe a whole-word typed value supplied at instantiation time +/// (`Simple`) or a composite word that explicitly defines each felt element (`Composite`). +#[derive(Debug, Clone, PartialEq, Eq)] +#[allow(clippy::large_enum_variant)] +pub enum WordSchema { + /// A whole-word typed value supplied at instantiation time. + Simple { + r#type: SchemaType, + default_value: Option, + }, + /// A composed word that may mix defaults and typed fields. + Composite { value: [FeltSchema; 4] }, +} + +impl WordSchema { + pub fn new_simple(r#type: SchemaType) -> Self { + WordSchema::Simple { r#type, default_value: None } + } + + pub fn new_simple_with_default(r#type: SchemaType, default_value: Word) -> Self { + WordSchema::Simple { + r#type, + default_value: Some(default_value), + } + } + + pub fn new_value(value: impl Into<[FeltSchema; 4]>) -> Self { + WordSchema::Composite { value: value.into() } + } + + pub fn value(&self) -> Option<&[FeltSchema; 4]> { + match self { + WordSchema::Composite { value } => Some(value), + WordSchema::Simple { .. } => None, + } + } + + /// Returns the schema type associated with whole-word init-supplied values. + pub fn word_type(&self) -> SchemaType { + match self { + WordSchema::Simple { r#type, .. } => r#type.clone(), + WordSchema::Composite { .. } => SchemaType::native_word(), + } + } + + pub(super) fn collect_init_value_requirements( + &self, + value_name: StorageValueName, + description: Option, + requirements: &mut BTreeMap, + ) -> Result<(), ComponentMetadataError> { + match self { + WordSchema::Simple { r#type, default_value } => { + if *r#type == SchemaType::void() { + return Ok(()); + } + + let default_value = default_value.map(|word| { + SCHEMA_TYPE_REGISTRY.display_word(r#type, word).value().to_string() + }); + + if requirements + .insert( + value_name.clone(), + SchemaRequirement { + description, + r#type: r#type.clone(), + default_value, + }, + ) + .is_some() + { + return Err(ComponentMetadataError::DuplicateInitValueName(value_name)); + } + + Ok(()) + }, + WordSchema::Composite { value } => { + for felt in value.iter() { + felt.collect_init_value_requirements(value_name.clone(), requirements)?; + } + Ok(()) + }, + } + } + + /// Validates that the defined word type exists and its inner felts (if any) are valid. + pub(super) fn validate(&self) -> Result<(), ComponentMetadataError> { + let type_exists = SCHEMA_TYPE_REGISTRY.contains_word_type(&self.word_type()); + if !type_exists { + return Err(ComponentMetadataError::InvalidType( + self.word_type().to_string(), + "Word".into(), + )); + } + + if let WordSchema::Simple { + r#type, + default_value: Some(default_value), + } = self + { + SCHEMA_TYPE_REGISTRY + .validate_word_value(r#type, *default_value) + .map_err(ComponentMetadataError::StorageValueParsingError)?; + } + + if let Some(felts) = self.value() { + for felt in felts { + felt.validate()?; + } + } + + Ok(()) + } + + /// Builds a [`Word`] from the provided initialization data according to this schema. + /// + /// For simple schemas, expects a direct slot value (not map or field entries). + /// For composite schemas, either parses a single value or builds the word from individual + /// felt entries. + pub(crate) fn try_build_word( + &self, + init_storage_data: &InitStorageData, + slot_name: &StorageSlotName, + ) -> Result { + let slot_prefix = StorageValueName::from_slot_name(slot_name); + let slot_value = init_storage_data.slot_value_entry(slot_name); + let has_fields = init_storage_data.has_field_entries_for_slot(slot_name); + + if init_storage_data.map_entries(slot_name).is_some() { + return Err(ComponentMetadataError::InvalidInitStorageValue( + slot_prefix, + "expected a value, got a map".into(), + )); + } + + match self { + WordSchema::Simple { r#type, default_value } => { + if has_fields { + return Err(ComponentMetadataError::InvalidInitStorageValue( + slot_prefix, + "expected a value, got field entries".into(), + )); + } + match slot_value { + Some(value) => { + super::parse_storage_value_with_schema(self, value, &slot_prefix) + }, + None => { + if *r#type == SchemaType::void() { + Ok(Word::empty()) + } else { + default_value.as_ref().copied().ok_or_else(|| { + ComponentMetadataError::InitValueNotProvided(slot_prefix) + }) + } + }, + } + }, + WordSchema::Composite { value } => { + if let Some(value) = slot_value { + if has_fields { + return Err(ComponentMetadataError::InvalidInitStorageValue( + slot_prefix, + "expected a single value, got both value and field entries".into(), + )); + } + return super::parse_storage_value_with_schema(self, value, &slot_prefix); + } + + let mut result = [Felt::ZERO; 4]; + for (index, felt_schema) in value.iter().enumerate() { + result[index] = felt_schema.try_build_felt(init_storage_data, slot_name)?; + } + Ok(Word::from(result)) + }, + } + } + + pub(crate) fn validate_word_value( + &self, + slot_prefix: &StorageValueName, + label: &str, + word: Word, + ) -> Result<(), ComponentMetadataError> { + match self { + WordSchema::Simple { r#type, .. } => { + SCHEMA_TYPE_REGISTRY.validate_word_value(r#type, word).map_err(|err| { + ComponentMetadataError::InvalidInitStorageValue( + slot_prefix.clone(), + format!("{label} does not match `{}`: {err}", r#type), + ) + }) + }, + WordSchema::Composite { value } => { + for (index, felt_schema) in value.iter().enumerate() { + let felt_type = felt_schema.felt_type(); + SCHEMA_TYPE_REGISTRY.validate_felt_value(&felt_type, word[index]).map_err( + |err| { + ComponentMetadataError::InvalidInitStorageValue( + slot_prefix.clone(), + format!("{label}[{index}] does not match `{felt_type}`: {err}"), + ) + }, + )?; + } + + Ok(()) + }, + } + } + + pub(super) fn write_into_with_optional_defaults( + &self, + target: &mut W, + include_defaults: bool, + ) { + match self { + WordSchema::Simple { r#type, default_value } => { + target.write_u8(0); + target.write(r#type); + let default_value = if include_defaults { *default_value } else { None }; + target.write(default_value); + }, + WordSchema::Composite { value } => { + target.write_u8(1); + for felt in value.iter() { + felt.write_into_with_optional_defaults(target, include_defaults); + } + }, + } + } +} + +impl Serializable for WordSchema { + fn write_into(&self, target: &mut W) { + self.write_into_with_optional_defaults(target, true); + } +} + +impl Deserializable for WordSchema { + fn read_from(source: &mut R) -> Result { + let tag = source.read_u8()?; + match tag { + 0 => { + let r#type = SchemaType::read_from(source)?; + let default_value = Option::::read_from(source)?; + Ok(WordSchema::Simple { r#type, default_value }) + }, + 1 => { + let value = <[FeltSchema; 4]>::read_from(source)?; + Ok(WordSchema::Composite { value }) + }, + other => Err(DeserializationError::InvalidValue(format!( + "unknown tag '{other}' for WordSchema" + ))), + } + } +} + +impl From for WordSchema { + fn from(r#type: SchemaType) -> Self { + WordSchema::new_simple(r#type) + } +} + +impl From<[FeltSchema; 4]> for WordSchema { + fn from(value: [FeltSchema; 4]) -> Self { + WordSchema::new_value(value) + } +} + +impl From<[Felt; 4]> for WordSchema { + fn from(value: [Felt; 4]) -> Self { + WordSchema::new_simple_with_default(SchemaType::native_word(), Word::from(value)) + } +} diff --git a/crates/miden-protocol/src/account/component/storage/toml/mod.rs b/crates/miden-protocol/src/account/component/storage/toml/mod.rs index 9d40674cb0..a5850d1afb 100644 --- a/crates/miden-protocol/src/account/component/storage/toml/mod.rs +++ b/crates/miden-protocol/src/account/component/storage/toml/mod.rs @@ -18,9 +18,9 @@ use super::super::{ WordValue, }; use crate::account::component::storage::type_registry::SCHEMA_TYPE_REGISTRY; -use crate::account::component::{AccountComponentMetadata, SchemaTypeId}; +use crate::account::component::{AccountComponentMetadata, SchemaType}; use crate::account::{AccountType, StorageSlotName}; -use crate::errors::AccountComponentTemplateError; +use crate::errors::ComponentMetadataError; mod init_storage_data; mod serde_impls; @@ -51,12 +51,12 @@ impl AccountComponentMetadata { /// - If deserialization fails /// - If the schema specifies storage slots with duplicates. /// - If the schema contains invalid slot definitions. - pub fn from_toml(toml_string: &str) -> Result { + pub fn from_toml(toml_string: &str) -> Result { let raw: RawAccountComponentMetadata = toml::from_str(toml_string) - .map_err(AccountComponentTemplateError::TomlDeserializationError)?; + .map_err(ComponentMetadataError::TomlDeserializationError)?; if !raw.description.is_ascii() { - return Err(AccountComponentTemplateError::InvalidSchema( + return Err(ComponentMetadataError::InvalidSchema( "description must contain only ASCII characters".to_string(), )); } @@ -69,19 +69,15 @@ impl AccountComponentMetadata { } let storage_schema = StorageSchema::new(fields)?; - Ok(Self::new( - raw.name, - raw.description, - raw.version, - raw.supported_types, - storage_schema, - )) + Ok(Self::new(raw.name, raw.supported_types) + .with_description(raw.description) + .with_version(raw.version) + .with_storage_schema(storage_schema)) } /// Serializes the account component metadata into a TOML string. - pub fn to_toml(&self) -> Result { - let toml = - toml::to_string(self).map_err(AccountComponentTemplateError::TomlSerializationError)?; + pub fn to_toml(&self) -> Result { + let toml = toml::to_string(self).map_err(ComponentMetadataError::TomlSerializationError)?; Ok(toml) } } @@ -106,8 +102,8 @@ struct RawStorageSchema { /// Storage slot type descriptor. /// /// This field accepts either: -/// - a type identifier (e.g. `"word"`, `"u16"`, `"miden::standards::auth::falcon512_rpo::pub_key"`) -/// for simple word slots, +/// - a type identifier (e.g. `"word"`, `"u16"`, `"miden::standards::auth::pub_key"`) for simple +/// word slots, /// - an array of 4 [`FeltSchema`] descriptors for composite word slots, or /// - a table `{ key = ..., value = ... }` for map slots. #[derive(Debug, Clone, Deserialize, Serialize)] @@ -121,7 +117,7 @@ enum RawSlotType { #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(untagged)] enum RawWordType { - TypeIdentifier(SchemaTypeId), + TypeIdentifier(SchemaType), FeltSchemaArray(Vec), } @@ -278,7 +274,7 @@ impl RawStorageSlotSchema { /// Converts the raw representation into a tuple of the storage slot name and its schema. fn try_into_slot_schema( self, - ) -> Result<(StorageSlotName, StorageSlotSchema), AccountComponentTemplateError> { + ) -> Result<(StorageSlotName, StorageSlotSchema), ComponentMetadataError> { let RawStorageSlotSchema { name, description, @@ -289,7 +285,7 @@ impl RawStorageSlotSchema { let slot_name_raw = name; let slot_name = StorageSlotName::new(slot_name_raw.clone()).map_err(|err| { - AccountComponentTemplateError::InvalidSchema(format!( + ComponentMetadataError::InvalidSchema(format!( "invalid storage slot name `{slot_name_raw}`: {err}" )) })?; @@ -300,7 +296,7 @@ impl RawStorageSlotSchema { let slot_prefix = StorageValueName::from_slot_name(&slot_name); if default_value.is_some() && default_values.is_some() { - return Err(AccountComponentTemplateError::InvalidSchema( + return Err(ComponentMetadataError::InvalidSchema( "storage slot schema cannot define both `default-value` and `default-values`" .into(), )); @@ -309,7 +305,7 @@ impl RawStorageSlotSchema { match r#type { RawSlotType::Map(map_type) => { if default_value.is_some() { - return Err(AccountComponentTemplateError::InvalidSchema( + return Err(ComponentMetadataError::InvalidSchema( "map slots cannot define `default-value`".into(), )); } @@ -342,7 +338,7 @@ impl RawStorageSlotSchema { RawSlotType::Word(word_type) => { if default_values.is_some() { - return Err(AccountComponentTemplateError::InvalidSchema( + return Err(ComponentMetadataError::InvalidSchema( "`default-values` can be specified only for map slots (use `type = { ... }`)" .into(), )); @@ -351,7 +347,7 @@ impl RawStorageSlotSchema { match word_type { RawWordType::TypeIdentifier(r#type) => { if r#type.as_str() == "map" { - return Err(AccountComponentTemplateError::InvalidSchema( + return Err(ComponentMetadataError::InvalidSchema( "value slots cannot use `type = \"map\"`; use `type = { key = , value = }` instead" .into(), )); @@ -384,7 +380,7 @@ impl RawStorageSlotSchema { RawWordType::FeltSchemaArray(elements) => { if default_value.is_some() { - return Err(AccountComponentTemplateError::InvalidSchema( + return Err(ComponentMetadataError::InvalidSchema( "composite word slots cannot define `default-value`".into(), )); } @@ -406,7 +402,7 @@ impl RawStorageSlotSchema { fn parse_word_schema( raw: RawWordType, label: &str, - ) -> Result { + ) -> Result { match raw { RawWordType::TypeIdentifier(r#type) => Ok(WordSchema::new_simple(r#type)), RawWordType::FeltSchemaArray(elements) => { @@ -419,9 +415,9 @@ impl RawStorageSlotSchema { fn parse_felt_schema_array( elements: Vec, label: &str, - ) -> Result<[FeltSchema; 4], AccountComponentTemplateError> { + ) -> Result<[FeltSchema; 4], ComponentMetadataError> { if elements.len() != 4 { - return Err(AccountComponentTemplateError::InvalidSchema(format!( + return Err(ComponentMetadataError::InvalidSchema(format!( "{label} must be an array of 4 elements, got {}", elements.len() ))); @@ -434,15 +430,13 @@ impl RawStorageSlotSchema { key_schema: &WordSchema, value_schema: &WordSchema, slot_prefix: &StorageValueName, - ) -> Result, AccountComponentTemplateError> { + ) -> Result, ComponentMetadataError> { let mut map = BTreeMap::new(); let parse = |schema: &WordSchema, raw: &WordValue, label: &str| { super::schema::parse_storage_value_with_schema(schema, raw, slot_prefix).map_err( |err| { - AccountComponentTemplateError::InvalidSchema(format!( - "invalid map `{label}`: {err}" - )) + ComponentMetadataError::InvalidSchema(format!("invalid map `{label}`: {err}")) }, ) }; @@ -455,7 +449,7 @@ impl RawStorageSlotSchema { let value = parse(value_schema, &entry.value, &value_label)?; if map.insert(key, value).is_some() { - return Err(AccountComponentTemplateError::InvalidSchema(format!( + return Err(ComponentMetadataError::InvalidSchema(format!( "map storage slot `default-values[{index}]` contains a duplicate key" ))); } @@ -468,23 +462,23 @@ impl RawStorageSlotSchema { impl WordValue { pub(super) fn try_parse_as_typed_word( &self, - schema_type: &SchemaTypeId, + schema_type: &SchemaType, slot_prefix: &StorageValueName, label: &str, - ) -> Result { + ) -> Result { let word = match self { WordValue::FullyTyped(word) => *word, WordValue::Atomic(value) => SCHEMA_TYPE_REGISTRY .try_parse_word(schema_type, value) - .map_err(AccountComponentTemplateError::StorageValueParsingError)?, + .map_err(ComponentMetadataError::StorageValueParsingError)?, WordValue::Elements(elements) => { let felts = elements .iter() .map(|element| { - SCHEMA_TYPE_REGISTRY.try_parse_felt(&SchemaTypeId::native_felt(), element) + SCHEMA_TYPE_REGISTRY.try_parse_felt(&SchemaType::native_felt(), element) }) .collect::, _>>() - .map_err(AccountComponentTemplateError::StorageValueParsingError)?; + .map_err(ComponentMetadataError::StorageValueParsingError)?; let felts: [Felt; 4] = felts.try_into().expect("length is 4"); Word::from(felts) }, @@ -498,7 +492,7 @@ impl WordValue { Ok(word) } - pub(super) fn from_word(schema_type: &SchemaTypeId, word: Word) -> Self { + pub(super) fn from_word(schema_type: &SchemaType, word: Word) -> Self { WordValue::Atomic(SCHEMA_TYPE_REGISTRY.display_word(schema_type, word).value().to_string()) } } diff --git a/crates/miden-protocol/src/account/component/storage/toml/serde_impls.rs b/crates/miden-protocol/src/account/component/storage/toml/serde_impls.rs index 3dc8f9033c..b25eb91538 100644 --- a/crates/miden-protocol/src/account/component/storage/toml/serde_impls.rs +++ b/crates/miden-protocol/src/account/component/storage/toml/serde_impls.rs @@ -5,7 +5,7 @@ use serde::ser::{Error as SerError, SerializeStruct}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use super::super::type_registry::SCHEMA_TYPE_REGISTRY; -use super::super::{FeltSchema, SchemaTypeId, WordValue}; +use super::super::{FeltSchema, SchemaType, WordValue}; // FELT SCHEMA SERIALIZATION // ================================================================================================ @@ -15,9 +15,9 @@ impl Serialize for FeltSchema { where S: Serializer, { - if self.felt_type() == SchemaTypeId::void() { + if self.felt_type() == SchemaType::void() { let mut state = serializer.serialize_struct("FeltSchema", 2)?; - state.serialize_field("type", &SchemaTypeId::void())?; + state.serialize_field("type", &SchemaType::void())?; if let Some(description) = self.description() { state.serialize_field("description", description)?; } @@ -33,7 +33,7 @@ impl Serialize for FeltSchema { if let Some(description) = self.description() { state.serialize_field("description", description)?; } - if self.felt_type() != SchemaTypeId::native_felt() { + if self.felt_type() != SchemaType::native_felt() { state.serialize_field("type", &self.felt_type())?; } if let Some(default_value) = self.default_value() { @@ -61,12 +61,12 @@ impl<'de> Deserialize<'de> for FeltSchema { #[serde(default, rename = "default-value")] default_value: Option, #[serde(default, rename = "type")] - r#type: Option, + r#type: Option, } let raw = RawFeltSchema::deserialize(deserializer)?; - let felt_type = raw.r#type.unwrap_or_else(SchemaTypeId::native_felt); + let felt_type = raw.r#type.unwrap_or_else(SchemaType::native_felt); let description = raw.description.and_then(|description| { if description.trim().is_empty() { @@ -76,7 +76,7 @@ impl<'de> Deserialize<'de> for FeltSchema { } }); - if felt_type == SchemaTypeId::void() { + if felt_type == SchemaType::void() { if raw.name.is_some() { return Err(D::Error::custom("`type = \"void\"` elements must omit `name`")); } @@ -108,12 +108,10 @@ impl<'de> Deserialize<'de> for FeltSchema { }) .transpose()?; - let schema = match default_value { - Some(default_value) => { - FeltSchema::new_typed_with_default(felt_type, name, default_value) - }, - None => FeltSchema::new_typed(felt_type, name), - }; + let mut schema = FeltSchema::new_typed(felt_type, name); + if let Some(default_value) = default_value { + schema = schema.with_default(default_value); + } Ok(match description { Some(description) => schema.with_description(description), None => schema, diff --git a/crates/miden-protocol/src/account/component/storage/toml/tests.rs b/crates/miden-protocol/src/account/component/storage/toml/tests.rs index dd6ead16cf..221c18c0a6 100644 --- a/crates/miden-protocol/src/account/component/storage/toml/tests.rs +++ b/crates/miden-protocol/src/account/component/storage/toml/tests.rs @@ -1,7 +1,6 @@ use alloc::string::ToString; use core::error::Error; -use miden_air::FieldElement; use miden_core::{Felt, Word}; use crate::account::component::toml::init_storage_data::InitStorageDataError; @@ -9,16 +8,16 @@ use crate::account::component::{ AccountComponentMetadata, InitStorageData, InitStorageDataError as CoreInitStorageDataError, - SchemaTypeId, + SchemaType, StorageSlotSchema, StorageValueName, StorageValueNameError, WordSchema, WordValue, }; -use crate::account::{AccountStorage, StorageSlotContent, StorageSlotName}; +use crate::account::{StorageMapKey, StorageSlotContent, StorageSlotName}; use crate::asset::TokenSymbol; -use crate::errors::AccountComponentTemplateError; +use crate::errors::ComponentMetadataError; #[test] fn from_toml_str_with_nested_table_and_flattened() { @@ -271,7 +270,7 @@ fn metadata_from_toml_rejects_non_ascii_component_description() { assert_matches::assert_matches!( AccountComponentMetadata::from_toml(toml_str), - Err(AccountComponentTemplateError::InvalidSchema(_)) + Err(ComponentMetadataError::InvalidSchema(_)) ); } @@ -291,7 +290,7 @@ fn metadata_from_toml_rejects_non_ascii_slot_description() { assert_matches::assert_matches!( AccountComponentMetadata::from_toml(toml_str), - Err(AccountComponentTemplateError::InvalidSchema(_)) + Err(ComponentMetadataError::InvalidSchema(_)) ); } @@ -435,30 +434,7 @@ fn metadata_from_toml_rejects_typed_fields_in_static_map_values() { assert_matches::assert_matches!( AccountComponentMetadata::from_toml(toml_str), - Err(AccountComponentTemplateError::TomlDeserializationError(_)) - ); -} - -#[test] -fn metadata_from_toml_rejects_reserved_slot_names() { - let reserved_slot = AccountStorage::faucet_sysdata_slot().as_str(); - - let toml_str = format!( - r#" - name = "Test Component" - description = "Test description" - version = "0.1.0" - supported-types = [] - - [[storage.slots]] - name = "{reserved_slot}" - type = "word" - "# - ); - - assert_matches::assert_matches!( - AccountComponentMetadata::from_toml(&toml_str), - Err(AccountComponentTemplateError::ReservedSlotName(_)) + Err(ComponentMetadataError::TomlDeserializationError(_)) ); } @@ -521,7 +497,7 @@ fn metadata_toml_round_trip_composed_slot_with_typed_fields() { .remove(&"demo::composed.a".parse::().unwrap()) .unwrap() .r#type, - SchemaTypeId::u16() + SchemaType::u16() ); let round_trip_toml = original.to_toml().expect("serialize to toml"); @@ -545,7 +521,7 @@ fn metadata_toml_round_trip_typed_slots() { [[storage.slots]] name = "demo::typed_map" - type = { key = "miden::standards::auth::falcon512_rpo::pub_key", value = "miden::standards::auth::falcon512_rpo::pub_key" } + type = { key = "miden::standards::auth::pub_key", value = "miden::standards::auth::pub_key" } "#; let metadata = @@ -561,7 +537,7 @@ fn metadata_toml_round_trip_typed_slots() { _ => panic!("expected value slot"), }; - let typed_value = SchemaTypeId::native_word(); + let typed_value = SchemaType::native_word(); assert_eq!(value_slot.word(), &WordSchema::new_simple(typed_value.clone())); let map_slot = schema @@ -573,7 +549,7 @@ fn metadata_toml_round_trip_typed_slots() { _ => panic!("expected map slot"), }; - let pub_key_type = SchemaTypeId::new("miden::standards::auth::falcon512_rpo::pub_key").unwrap(); + let pub_key_type = SchemaType::new("miden::standards::auth::pub_key").unwrap(); assert_eq!(map_slot.key_schema(), &WordSchema::new_simple(pub_key_type.clone())); assert_eq!(map_slot.value_schema(), &WordSchema::new_simple(pub_key_type)); @@ -605,11 +581,11 @@ fn metadata_toml_round_trip_typed_slots() { let map_type = typed_map_entry.get("type").unwrap().as_table().unwrap(); assert_eq!( map_type.get("key").unwrap().as_str().unwrap(), - "miden::standards::auth::falcon512_rpo::pub_key" + "miden::standards::auth::pub_key" ); assert_eq!( map_type.get("value").unwrap().as_str().unwrap(), - "miden::standards::auth::falcon512_rpo::pub_key" + "miden::standards::auth::pub_key" ); } @@ -636,7 +612,7 @@ fn extensive_schema_metadata_and_init_toml_example() { [[storage.slots]] name = "demo::owner_pub_key" description = "Owner public key" - type = "miden::standards::auth::falcon512_rpo::pub_key" + type = "miden::standards::auth::pub_key" # simple felt-typed word slot (parsed as felt, stored as [0,0,0,]) [[storage.slots]] @@ -699,8 +675,8 @@ fn extensive_schema_metadata_and_init_toml_example() { else { panic!("expected map slot schema"); }; - assert_eq!(default_map.key_schema(), &WordSchema::new_simple(SchemaTypeId::native_word())); - assert_eq!(default_map.value_schema(), &WordSchema::new_simple(SchemaTypeId::native_word())); + assert_eq!(default_map.key_schema(), &WordSchema::new_simple(SchemaType::native_word())); + assert_eq!(default_map.value_schema(), &WordSchema::new_simple(SchemaType::native_word())); // `type.key`/`type.value` parse as schema/type descriptors (not literal words). let typed_map_new_name = StorageSlotName::new("demo::typed_map_new").unwrap(); @@ -709,7 +685,7 @@ fn extensive_schema_metadata_and_init_toml_example() { else { panic!("expected map slot schema"); }; - assert_eq!(typed_map_new.value_schema(), &WordSchema::new_simple(SchemaTypeId::u16())); + assert_eq!(typed_map_new.value_schema(), &WordSchema::new_simple(SchemaType::u16())); assert!(matches!(typed_map_new.key_schema(), WordSchema::Composite { .. })); // used storage slots @@ -731,7 +707,7 @@ fn extensive_schema_metadata_and_init_toml_example() { .expect("symbol should be reported with a default value"); assert_eq!( symbol_requirement.r#type, - SchemaTypeId::new("miden::standards::fungible_faucets::metadata::token_symbol").unwrap() + SchemaType::new("miden::standards::fungible_faucets::metadata::token_symbol").unwrap() ); assert_eq!(symbol_requirement.default_value.as_deref(), Some("TST")); assert!( @@ -802,9 +778,12 @@ fn extensive_schema_metadata_and_init_toml_example() { panic!("expected map slot for static_map"); }; assert_eq!(static_map.num_entries(), 2); - assert_eq!(static_map.get(&Word::parse("0x1").unwrap()), Word::parse("0x10").unwrap()); assert_eq!( - static_map.get(&Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(2)])), + static_map.get(&StorageMapKey::from_raw(Word::parse("0x1").unwrap())), + Word::parse("0x10").unwrap() + ); + assert_eq!( + static_map.get(&StorageMapKey::from_array([0, 0, 0, 2])), Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(32)]) ); @@ -850,9 +829,8 @@ fn extensive_schema_metadata_and_init_toml_example() { }; assert_eq!(typed_map_new_contents.num_entries(), 2); - let key1 = Word::from([Felt::new(1), Felt::new(2), Felt::ZERO, Felt::ZERO]); assert_eq!( - typed_map_new_contents.get(&key1), + typed_map_new_contents.get(&StorageMapKey::from_array([1, 2, 0, 0])), Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(16)]) ); @@ -877,12 +855,18 @@ fn extensive_schema_metadata_and_init_toml_example() { panic!("expected map slot for static_map"); }; assert_eq!(static_map.num_entries(), 3); - assert_eq!(static_map.get(&Word::parse("0x1").unwrap()), Word::parse("0x99").unwrap()); assert_eq!( - static_map.get(&Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(2)])), + static_map.get(&StorageMapKey::from_raw(Word::parse("0x1").unwrap())), + Word::parse("0x99").unwrap() + ); + assert_eq!( + static_map.get(&StorageMapKey::from_array([0, 0, 0, 2])), Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(32)]) ); - assert_eq!(static_map.get(&Word::parse("0x3").unwrap()), Word::parse("0x30").unwrap()); + assert_eq!( + static_map.get(&StorageMapKey::from_raw(Word::parse("0x3").unwrap())), + Word::parse("0x30").unwrap() + ); } #[test] @@ -917,7 +901,7 @@ fn typed_map_init_entries_are_validated() { assert_matches::assert_matches!( metadata.storage_schema().build_storage_slots(&init_data), - Err(AccountComponentTemplateError::InvalidInitStorageValue(name, msg)) + Err(ComponentMetadataError::InvalidInitStorageValue(name, msg)) if &name.to_string() == "demo::typed_map" && msg.contains("void") ); } @@ -958,5 +942,5 @@ fn typed_map_supports_non_numeric_value_types() { let key = Word::parse("0x1").unwrap(); let symbol_felt: Felt = TokenSymbol::new("BTC").unwrap().into(); let expected_value = Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, symbol_felt]); - assert_eq!(map.get(&key), expected_value); + assert_eq!(map.get(&StorageMapKey::from_raw(key)), expected_value); } diff --git a/crates/miden-protocol/src/account/component/storage/type_registry.rs b/crates/miden-protocol/src/account/component/storage/type_registry.rs index 45512f5e95..6db8c9716c 100644 --- a/crates/miden-protocol/src/account/component/storage/type_registry.rs +++ b/crates/miden-protocol/src/account/component/storage/type_registry.rs @@ -3,14 +3,20 @@ use alloc::collections::BTreeMap; use alloc::string::{String, ToString}; use core::error::Error; use core::fmt::{self, Display}; +use core::str::FromStr; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_core::{Felt, FieldElement, Word}; -use miden_crypto::dsa::{ecdsa_k256_keccak, falcon512_rpo}; -use miden_processor::DeserializationError; +use miden_core::{Felt, Word}; use thiserror::Error; +use crate::account::auth::{AuthScheme, PublicKey}; use crate::asset::TokenSymbol; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::utils::sync::LazyLock; /// A global registry for schema type converters. @@ -23,11 +29,12 @@ pub static SCHEMA_TYPE_REGISTRY: LazyLock = LazyLock::new(|| registry.register_felt_type::(); registry.register_felt_type::(); registry.register_felt_type::(); + registry.register_felt_type::(); registry.register_felt_type::(); registry.register_felt_type::(); + registry.register_felt_type::(); registry.register_word_type::(); - registry.register_word_type::(); - registry.register_word_type::(); + registry.register_word_type::(); registry }); @@ -43,24 +50,24 @@ pub enum SchemaTypeError { #[error("conversion error: {0}")] ConversionError(String), #[error("felt type ` {0}` not found in the type registry")] - FeltTypeNotFound(SchemaTypeId), + FeltTypeNotFound(SchemaType), #[error("invalid type name `{0}`: {1}")] InvalidTypeName(String, String), #[error("failed to parse input `{input}` as `{schema_type}`")] ParseError { input: String, - schema_type: SchemaTypeId, + schema_type: SchemaType, source: Box, }, #[error("word type ` {0}` not found in the type registry")] - WordTypeNotFound(SchemaTypeId), + WordTypeNotFound(SchemaType), } impl SchemaTypeError { /// Creates a [`SchemaTypeError::ParseError`]. pub fn parse( input: impl Into, - schema_type: SchemaTypeId, + schema_type: SchemaType, source: impl Error + Send + Sync + 'static, ) -> Self { SchemaTypeError::ParseError { @@ -74,22 +81,23 @@ impl SchemaTypeError { // SCHEMA TYPE // ================================================================================================ -/// A newtype wrapper around a `String`, representing a schema type identifier. +/// A newtype wrapper around a `String`, representing a schema type. /// -/// A valid schema identifier is a name in the style of Rust namespaces, composed of one or more +/// A valid schema type is a name in the style of Rust namespaces, composed of one or more /// non-empty segments separated by `::`. Each segment can contain only ASCII alphanumerics or `_`. /// /// Some examples: /// - `u32` /// - `felt` -/// - `miden::standards::auth::falcon512_rpo::pub_key` +/// - `miden::standards::auth::pub_key` +/// - `miden::standards::auth::scheme` #[derive(Debug, Clone, PartialEq, Eq, Ord, PartialOrd)] #[cfg_attr(feature = "std", derive(::serde::Deserialize, ::serde::Serialize))] #[cfg_attr(feature = "std", serde(transparent))] -pub struct SchemaTypeId(String); +pub struct SchemaType(String); -impl SchemaTypeId { - /// Creates a new [`SchemaTypeId`] from a `String`. +impl SchemaType { + /// Creates a new [`SchemaType`] from a `String`. /// /// The name must follow a Rust-style namespace format, consisting of one or more segments /// (non-empty, and alphanumerical) separated by double-colon (`::`) delimiters. @@ -104,14 +112,14 @@ impl SchemaTypeId { if s.is_empty() { return Err(SchemaTypeError::InvalidTypeName( s.clone(), - "schema type identifier is empty".to_string(), + "schema type is empty".to_string(), )); } for segment in s.split("::") { if segment.is_empty() { return Err(SchemaTypeError::InvalidTypeName( s.clone(), - "empty segment in schema type identifier".to_string(), + "empty segment in schema type".to_string(), )); } if !segment.chars().all(|c| c.is_ascii_alphanumeric() || c == '_') { @@ -124,36 +132,57 @@ impl SchemaTypeId { Ok(Self(s)) } - /// Returns the schema type identifier for the `void` type. + /// Returns the schema type for the `void` type. /// /// The `void` type always parses to `0` and is intended to model reserved or padding felts. - pub fn void() -> SchemaTypeId { - SchemaTypeId::new("void").expect("type is well formed") + pub fn void() -> SchemaType { + SchemaType::new("void").expect("type is well formed") } - /// Returns the schema type identifier for the native [`Felt`] type. - pub fn native_felt() -> SchemaTypeId { - SchemaTypeId::new("felt").expect("type is well formed") + /// Returns the schema type for the native [`Felt`] type. + pub fn native_felt() -> SchemaType { + SchemaType::new("felt").expect("type is well formed") } - /// Returns the schema type identifier for the native [`Word`] type. - pub fn native_word() -> SchemaTypeId { - SchemaTypeId::new("word").expect("type is well formed") + /// Returns the schema type for the native [`Word`] type. + pub fn native_word() -> SchemaType { + SchemaType::new("word").expect("type is well formed") } - /// Returns the schema type identifier for the native `u8` type. - pub fn u8() -> SchemaTypeId { - SchemaTypeId::new("u8").expect("type is well formed") + /// Returns the schema type for the native `u8` type. + pub fn u8() -> SchemaType { + SchemaType::new("u8").expect("type is well formed") } - /// Returns the schema type identifier for the native `u16` type. - pub fn u16() -> SchemaTypeId { - SchemaTypeId::new("u16").expect("type is well formed") + /// Returns the schema type for the native `u16` type. + pub fn u16() -> SchemaType { + SchemaType::new("u16").expect("type is well formed") } - /// Returns the schema type identifier for the native `u32` type. - pub fn u32() -> SchemaTypeId { - SchemaTypeId::new("u32").expect("type is well formed") + /// Returns the schema type for the native `u32` type. + pub fn u32() -> SchemaType { + SchemaType::new("u32").expect("type is well formed") + } + + /// Returns the schema type for the native `bool` type. + pub fn bool() -> SchemaType { + SchemaType::new("bool").expect("type is well formed") + } + + /// Returns the schema type for auth scheme identifiers. + pub fn auth_scheme() -> SchemaType { + SchemaType::new("miden::standards::auth::scheme").expect("type is well formed") + } + + /// Returns the schema type for public key commitments. + pub fn pub_key() -> SchemaType { + SchemaType::new("miden::standards::auth::pub_key").expect("type is well formed") + } + + /// Returns the schema type for fungible faucet token symbols. + pub fn token_symbol() -> SchemaType { + SchemaType::new("miden::standards::fungible_faucets::metadata::token_symbol") + .expect("type is well formed") } /// Returns a reference to the inner string. @@ -162,23 +191,23 @@ impl SchemaTypeId { } } -impl Display for SchemaTypeId { +impl Display for SchemaType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(self.as_str()) } } -impl Serializable for SchemaTypeId { +impl Serializable for SchemaType { fn write_into(&self, target: &mut W) { target.write(self.0.clone()) } } -impl Deserializable for SchemaTypeId { +impl Deserializable for SchemaType { fn read_from(source: &mut R) -> Result { let id: String = source.read()?; - SchemaTypeId::new(id).map_err(|err| DeserializationError::InvalidValue(err.to_string())) + SchemaType::new(id).map_err(|err| DeserializationError::InvalidValue(err.to_string())) } } @@ -195,7 +224,7 @@ impl Deserializable for SchemaTypeId { #[derive(Debug, Clone, PartialEq, Eq)] pub struct SchemaRequirement { /// The expected type identifier. - pub r#type: SchemaTypeId, + pub r#type: SchemaType, /// An optional description providing additional context. pub description: Option, /// An optional default value, which can be overridden at component instantiation time. @@ -208,7 +237,7 @@ pub struct SchemaRequirement { /// Trait for converting a string into a single `Felt`. pub trait FeltType: Send + Sync { /// Returns the type identifier. - fn type_name() -> SchemaTypeId + fn type_name() -> SchemaType where Self: Sized; @@ -226,7 +255,7 @@ pub trait FeltType: Send + Sync { /// Trait for converting a string into a single `Word`. pub trait WordType: Send + Sync { /// Returns the type identifier. - fn type_name() -> SchemaTypeId + fn type_name() -> SchemaType where Self: Sized; @@ -245,7 +274,7 @@ impl WordType for T where T: FeltType, { - fn type_name() -> SchemaTypeId { + fn type_name() -> SchemaType { ::type_name() } @@ -268,12 +297,41 @@ where // FELT IMPLS FOR NATIVE TYPES // ================================================================================================ +/// A boolean felt type: `0` (false) or `1` (true). +struct Bool; + +impl FeltType for Bool { + fn type_name() -> SchemaType { + SchemaType::bool() + } + + fn parse_str(input: &str) -> Result { + match input { + "true" | "1" => Ok(Felt::new(1)), + "false" | "0" => Ok(Felt::new(0)), + _ => Err(SchemaTypeError::ConversionError(format!( + "invalid bool value `{input}`: expected `true`, `false`, `1`, or `0`" + ))), + } + } + + fn display_felt(value: Felt) -> Result { + match value.as_canonical_u64() { + 0 => Ok("false".into()), + 1 => Ok("true".into()), + other => Err(SchemaTypeError::ConversionError(format!( + "value `{other}` is not a valid bool (expected 0 or 1)" + ))), + } + } +} + /// A felt type that represents irrelevant elements in a storage schema definition. struct Void; impl FeltType for Void { - fn type_name() -> SchemaTypeId { - SchemaTypeId::void() + fn type_name() -> SchemaType { + SchemaType::void() } fn parse_str(input: &str) -> Result { @@ -293,8 +351,8 @@ impl FeltType for Void { } impl FeltType for u8 { - fn type_name() -> SchemaTypeId { - SchemaTypeId::u8() + fn type_name() -> SchemaType { + SchemaType::u8() } fn parse_str(input: &str) -> Result { @@ -305,16 +363,50 @@ impl FeltType for u8 { } fn display_felt(value: Felt) -> Result { - let native = u8::try_from(value.as_int()).map_err(|_| { + let native = u8::try_from(value.as_canonical_u64()).map_err(|_| { SchemaTypeError::ConversionError(format!("value `{}` is out of range for u8", value)) })?; Ok(native.to_string()) } } +impl FeltType for AuthScheme { + fn type_name() -> SchemaType { + SchemaType::auth_scheme() + } + + fn parse_str(input: &str) -> Result { + let auth_scheme = if let Ok(scheme_id) = input.parse::() { + AuthScheme::try_from(scheme_id).map_err(|err| { + SchemaTypeError::parse(input.to_string(), ::type_name(), err) + })? + } else { + AuthScheme::from_str(input) + .map_err(|err| SchemaTypeError::ConversionError(err.to_string()))? + }; + + Ok(Felt::from(auth_scheme.as_u8())) + } + + fn display_felt(value: Felt) -> Result { + let scheme_id = u8::try_from(value.as_canonical_u64()).map_err(|_| { + SchemaTypeError::ConversionError(format!( + "value `{}` is out of range for auth scheme id", + value + )) + })?; + + let auth_scheme = AuthScheme::try_from(scheme_id).map_err(|err| { + SchemaTypeError::ConversionError(format!("invalid auth scheme id `{scheme_id}`: {err}")) + })?; + + Ok(auth_scheme.to_string()) + } +} + impl FeltType for u16 { - fn type_name() -> SchemaTypeId { - SchemaTypeId::u16() + fn type_name() -> SchemaType { + SchemaType::u16() } fn parse_str(input: &str) -> Result { @@ -325,7 +417,7 @@ impl FeltType for u16 { } fn display_felt(value: Felt) -> Result { - let native = u16::try_from(value.as_int()).map_err(|_| { + let native = u16::try_from(value.as_canonical_u64()).map_err(|_| { SchemaTypeError::ConversionError(format!("value `{}` is out of range for u16", value)) })?; Ok(native.to_string()) @@ -333,8 +425,8 @@ impl FeltType for u16 { } impl FeltType for u32 { - fn type_name() -> SchemaTypeId { - SchemaTypeId::u32() + fn type_name() -> SchemaType { + SchemaType::u32() } fn parse_str(input: &str) -> Result { @@ -345,7 +437,7 @@ impl FeltType for u32 { } fn display_felt(value: Felt) -> Result { - let native = u32::try_from(value.as_int()).map_err(|_| { + let native = u32::try_from(value.as_canonical_u64()).map_err(|_| { SchemaTypeError::ConversionError(format!("value `{}` is out of range for u32", value)) })?; Ok(native.to_string()) @@ -353,8 +445,8 @@ impl FeltType for u32 { } impl FeltType for Felt { - fn type_name() -> SchemaTypeId { - SchemaTypeId::new("felt").expect("type is well formed") + fn type_name() -> SchemaType { + SchemaType::native_felt() } fn parse_str(input: &str) -> Result { @@ -370,14 +462,13 @@ impl FeltType for Felt { } fn display_felt(value: Felt) -> Result { - Ok(format!("0x{:x}", value.as_int())) + Ok(format!("0x{:x}", value.as_canonical_u64())) } } impl FeltType for TokenSymbol { - fn type_name() -> SchemaTypeId { - SchemaTypeId::new("miden::standards::fungible_faucets::metadata::token_symbol") - .expect("type is well formed") + fn type_name() -> SchemaType { + SchemaType::token_symbol() } fn parse_str(input: &str) -> Result { let token = TokenSymbol::new(input).map_err(|err| { @@ -390,15 +481,10 @@ impl FeltType for TokenSymbol { let token = TokenSymbol::try_from(value).map_err(|err| { SchemaTypeError::ConversionError(format!( "invalid token_symbol value `{}`: {err}", - value.as_int() + value.as_canonical_u64() )) })?; - token.to_string().map_err(|err| { - SchemaTypeError::ConversionError(format!( - "failed to display token_symbol value `{}`: {err}", - value.as_int() - )) - }) + Ok(token.to_string()) } } @@ -425,8 +511,8 @@ fn pad_hex_string(input: &str) -> String { } impl WordType for Word { - fn type_name() -> SchemaTypeId { - SchemaTypeId::native_word() + fn type_name() -> SchemaType { + SchemaType::native_word() } fn parse_str(input: &str) -> Result { Word::parse(input).map_err(|err| { @@ -443,32 +529,9 @@ impl WordType for Word { } } -impl WordType for falcon512_rpo::PublicKey { - fn type_name() -> SchemaTypeId { - SchemaTypeId::new("miden::standards::auth::falcon512_rpo::pub_key") - .expect("type is well formed") - } - fn parse_str(input: &str) -> Result { - let padded_input = pad_hex_string(input); - - Word::try_from(padded_input.as_str()).map_err(|err| { - SchemaTypeError::parse( - input.to_string(), // Use original input in error - Self::type_name(), - WordParseError(err.to_string()), - ) - }) - } - - fn display_word(value: Word) -> Result { - Ok(value.to_string()) - } -} - -impl WordType for ecdsa_k256_keccak::PublicKey { - fn type_name() -> SchemaTypeId { - SchemaTypeId::new("miden::standards::auth::ecdsa_k256_keccak::pub_key") - .expect("type is well formed") +impl WordType for PublicKey { + fn type_name() -> SchemaType { + SchemaType::pub_key() } fn parse_str(input: &str) -> Result { let padded_input = pad_hex_string(input); @@ -536,10 +599,10 @@ enum TypeKind { /// corresponding storage values. #[derive(Clone, Debug, Default)] pub struct SchemaTypeRegistry { - felt: BTreeMap, - word: BTreeMap, - felt_display: BTreeMap, - word_display: BTreeMap, + felt: BTreeMap, + word: BTreeMap, + felt_display: BTreeMap, + word_display: BTreeMap, } impl SchemaTypeRegistry { @@ -578,7 +641,7 @@ impl SchemaTypeRegistry { /// - If the type is not registered or if the conversion fails. pub fn try_parse_felt( &self, - type_name: &SchemaTypeId, + type_name: &SchemaType, value: &str, ) -> Result { let converter = self @@ -591,7 +654,7 @@ impl SchemaTypeRegistry { /// Validates that the given [`Felt`] conforms to the specified schema type. pub fn validate_felt_value( &self, - type_name: &SchemaTypeId, + type_name: &SchemaType, felt: Felt, ) -> Result<(), SchemaTypeError> { let display = self @@ -607,7 +670,7 @@ impl SchemaTypeRegistry { /// Validates that the given [`Word`] conforms to the specified schema type. pub fn validate_word_value( &self, - type_name: &SchemaTypeId, + type_name: &SchemaType, word: Word, ) -> Result<(), SchemaTypeError> { match self.type_kind(type_name) { @@ -628,15 +691,15 @@ impl SchemaTypeRegistry { /// /// This is intended for serializing schemas to TOML (e.g. default values). #[allow(dead_code)] - pub fn display_felt(&self, type_name: &SchemaTypeId, felt: Felt) -> String { + pub fn display_felt(&self, type_name: &SchemaType, felt: Felt) -> String { self.felt_display .get(type_name) .and_then(|display| display(felt).ok()) - .unwrap_or_else(|| format!("0x{:x}", felt.as_int())) + .unwrap_or_else(|| format!("0x{:x}", felt.as_canonical_u64())) } /// Converts a [`Word`] into a canonical string representation and reports how it was produced. - pub fn display_word(&self, type_name: &SchemaTypeId, word: Word) -> WordDisplay { + pub fn display_word(&self, type_name: &SchemaType, word: Word) -> WordDisplay { if let Some(display) = self.word_display.get(type_name) { let value = display(word).unwrap_or_else(|_| word.to_string()); return WordDisplay::Word(value); @@ -664,7 +727,7 @@ impl SchemaTypeRegistry { /// - If the type is not registered or if the conversion fails. pub fn try_parse_word( &self, - type_name: &SchemaTypeId, + type_name: &SchemaType, value: &str, ) -> Result { if let Some(converter) = self.word.get(type_name) { @@ -681,11 +744,11 @@ impl SchemaTypeRegistry { } /// Returns `true` if a `FeltType` is registered for the given type. - pub fn contains_felt_type(&self, type_name: &SchemaTypeId) -> bool { + pub fn contains_felt_type(&self, type_name: &SchemaType) -> bool { self.felt.contains_key(type_name) } - fn type_kind(&self, type_name: &SchemaTypeId) -> TypeKind { + fn type_kind(&self, type_name: &SchemaType) -> TypeKind { if self.contains_felt_type(type_name) { TypeKind::Felt } else { @@ -697,7 +760,63 @@ impl SchemaTypeRegistry { /// /// This also returns `true` for any registered felt type (as those can be embedded into a word /// with zero-padding). - pub fn contains_word_type(&self, type_name: &SchemaTypeId) -> bool { + pub fn contains_word_type(&self, type_name: &SchemaType) -> bool { self.word.contains_key(type_name) || self.felt.contains_key(type_name) } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn auth_scheme_type_supports_named_and_numeric_values() { + let auth_scheme_type = SchemaType::auth_scheme(); + + let numeric_word = SCHEMA_TYPE_REGISTRY + .try_parse_word(&auth_scheme_type, "2") + .expect("numeric auth scheme id should parse"); + assert_eq!(numeric_word, Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::from(2u8)])); + + let named_word = SCHEMA_TYPE_REGISTRY + .try_parse_word(&auth_scheme_type, "EcdsaK256Keccak") + .expect("named auth scheme should parse"); + assert_eq!(named_word, Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::from(1u8)])); + + let displayed = SCHEMA_TYPE_REGISTRY.display_word(&auth_scheme_type, numeric_word); + assert!( + matches!(displayed, WordDisplay::Felt(ref value) if value == "Falcon512Poseidon2"), + "expected canonical auth scheme display, got {displayed:?}" + ); + } + + #[test] + fn schema_types_reject_invalid_values() { + // Auth scheme rejects out-of-range and unknown values. + let auth_scheme_type = SchemaType::auth_scheme(); + + assert!(SCHEMA_TYPE_REGISTRY.try_parse_word(&auth_scheme_type, "9").is_err()); + assert!(SCHEMA_TYPE_REGISTRY.try_parse_word(&auth_scheme_type, "invalid").is_err()); + + let invalid_word = Word::from([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::from(9u8)]); + assert!( + SCHEMA_TYPE_REGISTRY + .validate_word_value(&auth_scheme_type, invalid_word) + .is_err() + ); + + // Bool type parses "true"/"false"/"1"/"0" and rejects everything else. + let bool_type = SchemaType::bool(); + + assert_eq!(SCHEMA_TYPE_REGISTRY.try_parse_felt(&bool_type, "true").unwrap(), Felt::new(1)); + assert_eq!(SCHEMA_TYPE_REGISTRY.try_parse_felt(&bool_type, "false").unwrap(), Felt::new(0)); + assert_eq!(SCHEMA_TYPE_REGISTRY.try_parse_felt(&bool_type, "1").unwrap(), Felt::new(1)); + assert_eq!(SCHEMA_TYPE_REGISTRY.try_parse_felt(&bool_type, "0").unwrap(), Felt::new(0)); + assert_eq!(SCHEMA_TYPE_REGISTRY.display_felt(&bool_type, Felt::new(0)), "false"); + assert_eq!(SCHEMA_TYPE_REGISTRY.display_felt(&bool_type, Felt::new(1)), "true"); + + assert!(SCHEMA_TYPE_REGISTRY.try_parse_felt(&bool_type, "yes").is_err()); + assert!(SCHEMA_TYPE_REGISTRY.try_parse_felt(&bool_type, "2").is_err()); + assert!(SCHEMA_TYPE_REGISTRY.validate_felt_value(&bool_type, Felt::new(2)).is_err()); + } +} diff --git a/crates/miden-protocol/src/account/component/storage/value_name.rs b/crates/miden-protocol/src/account/component/storage/value_name.rs index 302916d9de..46568ebef6 100644 --- a/crates/miden-protocol/src/account/component/storage/value_name.rs +++ b/crates/miden-protocol/src/account/component/storage/value_name.rs @@ -3,12 +3,17 @@ use core::cmp::Ordering; use core::fmt::{self, Display}; use core::str::FromStr; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; use thiserror::Error; use crate::account::StorageSlotName; use crate::errors::StorageSlotNameError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A simple wrapper type around a string key that identifies init-provided values. /// @@ -158,6 +163,14 @@ impl TryFrom for StorageValueName { } } +impl TryFrom<&str> for StorageValueName { + type Error = StorageValueNameError; + + fn try_from(value: &str) -> Result { + value.parse() + } +} + impl From for String { fn from(value: StorageValueName) -> Self { value.to_string() diff --git a/crates/miden-protocol/src/account/delta/mod.rs b/crates/miden-protocol/src/account/delta/mod.rs index 1b8d13d1f5..5fdddc577b 100644 --- a/crates/miden-protocol/src/account/delta/mod.rs +++ b/crates/miden-protocol/src/account/delta/mod.rs @@ -12,7 +12,13 @@ use crate::account::{ use crate::asset::AssetVault; use crate::crypto::SequentialCommit; use crate::errors::{AccountDeltaError, AccountError}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{Felt, Word, ZERO}; mod storage; @@ -96,7 +102,7 @@ impl AccountDelta { pub fn merge(&mut self, other: Self) -> Result<(), AccountDeltaError> { let new_nonce_delta = self.nonce_delta + other.nonce_delta; - if new_nonce_delta.as_int() < self.nonce_delta.as_int() { + if new_nonce_delta.as_canonical_u64() < self.nonce_delta.as_canonical_u64() { return Err(AccountDeltaError::NonceIncrementOverflow { current: self.nonce_delta, increment: other.nonce_delta, @@ -190,22 +196,25 @@ impl AccountDelta { /// [`LexicographicWord`](crate::LexicographicWord). The WORD layout is in memory-order. /// /// - Append `[[nonce_delta, 0, account_id_suffix, account_id_prefix], EMPTY_WORD]`, where - /// account_id_{prefix,suffix} are the prefix and suffix felts of the native account id and - /// nonce_delta is the value by which the nonce was incremented. + /// `account_id_{prefix,suffix}` are the prefix and suffix felts of the native account id and + /// `nonce_delta` is the value by which the nonce was incremented. /// - Fungible Asset Delta /// - For each **updated** fungible asset, sorted by its vault key, whose amount delta is /// **non-zero**: - /// - Append `[domain = 1, was_added, 0, 0]`. - /// - Append `[amount, 0, faucet_id_suffix, faucet_id_prefix]` where amount is the delta by - /// which the fungible asset's amount has changed and was_added is a boolean flag - /// indicating whether the amount was added (1) or subtracted (0). + /// - Append `[domain = 1, was_added, faucet_id_suffix_and_metadata, faucet_id_prefix]` + /// where `faucet_id_suffix_and_metadata` is the faucet ID suffix with asset metadata + /// (including the callbacks flag) encoded in the lower 8 bits. + /// - Append `[amount_delta, 0, 0, 0]` where `amount_delta` is the delta by which the + /// fungible asset's amount has changed and `was_added` is a boolean flag indicating + /// whether the amount was added (1) or subtracted (0). /// - Non-Fungible Asset Delta /// - For each **updated** non-fungible asset, sorted by its vault key: - /// - Append `[domain = 1, was_added, 0, 0]` where was_added is a boolean flag indicating - /// whether the asset was added (1) or removed (0). Note that the domain is the same for - /// assets since `faucet_id_prefix` is at the same position in the layout for both assets, - /// and, by design, it is never the same for fungible and non-fungible assets. - /// - Append `[hash0, hash1, hash2, faucet_id_prefix]`, i.e. the non-fungible asset. + /// - Append `[domain = 1, was_added, faucet_id_suffix, faucet_id_prefix]` where `was_added` + /// is a boolean flag indicating whether the asset was added (1) or removed (0). Note that + /// the domain is the same for assets since `faucet_id_suffix` and `faucet_id_prefix` are + /// at the same position in the layout for both assets, and, by design, they are never the + /// same for fungible and non-fungible assets. + /// - Append `[hash0, hash1, hash2, hash3]`, i.e. the non-fungible asset. /// - Storage Slots are sorted by slot ID and are iterated in this order. For each slot **whose /// value has changed**, depending on the slot type: /// - Value Slot @@ -269,7 +278,7 @@ impl AccountDelta { /// [ /// ID_AND_NONCE, EMPTY_WORD, /// [/* no fungible asset delta */], - /// [[domain = 1, was_added = 0, 0, 0], NON_FUNGIBLE_ASSET], + /// [[domain = 1, was_added = 0, faucet_id_suffix, faucet_id_prefix], NON_FUNGIBLE_ASSET], /// [/* no storage delta */] /// ] /// ``` @@ -279,14 +288,15 @@ impl AccountDelta { /// ID_AND_NONCE, EMPTY_WORD, /// [/* no fungible asset delta */], /// [/* no non-fungible asset delta */], - /// [[domain = 2, 0, slot_id_suffix = 0, slot_id_prefix = 0], NEW_VALUE] + /// [[domain = 2, 0, slot_id_suffix = faucet_id_suffix, slot_id_prefix = faucet_id_prefix], NEW_VALUE] /// ] /// ``` /// - /// `NEW_VALUE` is user-controllable so it can be crafted to match `NON_FUNGIBLE_ASSET`. The - /// domain separator is then the only value that differentiates these two deltas. This shows the - /// importance of placing the domain separators in the same index within each word's layout - /// which makes it easy to see that this value cannot be crafted to be the same. + /// `NEW_VALUE` is user-controllable so it can be crafted to match `NON_FUNGIBLE_ASSET`. Users + /// would have to choose a slot ID (at account creation time) that is equal to the faucet ID. + /// The domain separator is then the only value that differentiates these two deltas. This shows + /// the importance of placing the domain separators in the same index within each word's layout + /// to ensure users cannot craft an ambiguous delta. /// /// ### Number of Changed Entries /// @@ -587,8 +597,7 @@ fn validate_nonce( mod tests { use assert_matches::assert_matches; - use miden_core::utils::Serializable; - use miden_core::{Felt, FieldElement}; + use miden_core::Felt; use super::{AccountDelta, AccountStorageDelta, AccountVaultDelta}; use crate::account::delta::AccountUpdateDetails; @@ -600,6 +609,7 @@ mod tests { AccountStorageMode, AccountType, StorageMapDelta, + StorageMapKey, StorageSlotName, }; use crate::asset::{ @@ -615,6 +625,7 @@ mod tests { ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE, AccountIdBuilder, }; + use crate::utils::serde::Serializable; use crate::{ONE, Word, ZERO}; #[test] @@ -684,8 +695,11 @@ mod tests { [( StorageSlotName::mock(4), StorageMapDelta::from_iters( - [Word::from([1, 1, 1, 0u32]), Word::from([0, 1, 1, 1u32])], - [(Word::from([1, 1, 1, 1u32]), Word::from([1, 1, 1, 1u32]))], + [ + StorageMapKey::from_array([1, 1, 1, 0]), + StorageMapKey::from_array([0, 1, 1, 1]), + ], + [(StorageMapKey::from_array([1, 1, 1, 1]), Word::from([1, 1, 1, 1u32]))], ), )], ); @@ -695,8 +709,7 @@ mod tests { AccountIdBuilder::new() .account_type(AccountType::NonFungibleFaucet) .storage_mode(AccountStorageMode::Public) - .build_with_rng(&mut rand::rng()) - .prefix(), + .build_with_rng(&mut rand::rng()), vec![6], ) .unwrap(), @@ -734,13 +747,8 @@ mod tests { let account_code = AccountCode::mock(); assert_eq!(account_code.to_bytes().len(), account_code.get_size_hint()); - let account = Account::new_existing( - account_id, - asset_vault, - account_storage, - account_code, - Felt::ONE, - ); + let account = + Account::new_existing(account_id, asset_vault, account_storage, account_code, ONE); assert_eq!(account.to_bytes().len(), account.get_size_hint()); // AccountUpdateDetails diff --git a/crates/miden-protocol/src/account/delta/storage.rs b/crates/miden-protocol/src/account/delta/storage.rs index bd600ac0e0..dd8249c046 100644 --- a/crates/miden-protocol/src/account/delta/storage.rs +++ b/crates/miden-protocol/src/account/delta/storage.rs @@ -11,7 +11,13 @@ use super::{ Serializable, Word, }; -use crate::account::{StorageMap, StorageSlotContent, StorageSlotName, StorageSlotType}; +use crate::account::{ + StorageMap, + StorageMapKey, + StorageSlotContent, + StorageSlotName, + StorageSlotType, +}; use crate::{EMPTY_WORD, Felt, LexicographicWord, ZERO}; // ACCOUNT STORAGE DELTA @@ -103,7 +109,7 @@ impl AccountStorageDelta { pub fn set_map_item( &mut self, slot_name: StorageSlotName, - key: Word, + key: StorageMapKey, new_value: Word, ) -> Result<(), AccountDeltaError> { match self @@ -194,9 +200,10 @@ impl AccountStorageDelta { elements.extend_from_slice(value.as_elements()); } - let num_changed_entries = Felt::try_from(map_delta.num_entries()).expect( - "number of changed entries should not exceed max representable felt", - ); + let num_changed_entries = Felt::try_from(map_delta.num_entries() as u64) + .expect( + "number of changed entries should not exceed max representable felt", + ); elements.extend_from_slice(&[ DOMAIN_MAP, @@ -288,12 +295,10 @@ impl Deserializable for AccountStorageDelta { } let num_maps = source.read_u8()? as usize; - deltas.extend( - source - .read_many::<(StorageSlotName, StorageMapDelta)>(num_maps)? - .into_iter() - .map(|(slot_name, map_delta)| (slot_name, StorageSlotDelta::Map(map_delta))), - ); + for read_result in source.read_many_iter::<(StorageSlotName, StorageMapDelta)>(num_maps)? { + let (slot_name, map_delta) = read_result?; + deltas.insert(slot_name, StorageSlotDelta::Map(map_delta)); + } Ok(Self::from_raw(deltas)) } @@ -465,11 +470,11 @@ impl Deserializable for StorageSlotDelta { /// The [`LexicographicWord`] wrapper is necessary to order the keys in the same way as the /// in-kernel account delta which uses a link map. #[derive(Clone, Debug, Default, PartialEq, Eq)] -pub struct StorageMapDelta(BTreeMap); +pub struct StorageMapDelta(BTreeMap, Word>); impl StorageMapDelta { /// Creates a new storage map delta from the provided leaves. - pub fn new(map: BTreeMap) -> Self { + pub fn new(map: BTreeMap, Word>) -> Self { Self(map) } @@ -480,14 +485,14 @@ impl StorageMapDelta { /// Returns a reference to the updated entries in this storage map delta. /// - /// Note that the returned key is the raw map key. - pub fn entries(&self) -> &BTreeMap { + /// Note that the returned key is the [`StorageMapKey`]. + pub fn entries(&self) -> &BTreeMap, Word> { &self.0 } /// Inserts an item into the storage map delta. - pub fn insert(&mut self, raw_key: Word, value: Word) { - self.0.insert(LexicographicWord::new(raw_key), value); + pub fn insert(&mut self, key: StorageMapKey, value: Word) { + self.0.insert(LexicographicWord::new(key), value); } /// Returns true if storage map delta contains no updates. @@ -502,17 +507,17 @@ impl StorageMapDelta { } /// Returns a mutable reference to the underlying map. - pub fn as_map_mut(&mut self) -> &mut BTreeMap { + pub fn as_map_mut(&mut self) -> &mut BTreeMap, Word> { &mut self.0 } /// Returns an iterator of all the cleared keys in the storage map. - fn cleared_keys(&self) -> impl Iterator + '_ { + fn cleared_keys(&self) -> impl Iterator + '_ { self.0.iter().filter(|&(_, value)| value.is_empty()).map(|(key, _)| key.inner()) } /// Returns an iterator of all the updated entries in the storage map. - fn updated_entries(&self) -> impl Iterator + '_ { + fn updated_entries(&self) -> impl Iterator + '_ { self.0.iter().filter_map(|(key, value)| { if !value.is_empty() { Some((key.inner(), value)) @@ -527,8 +532,8 @@ impl StorageMapDelta { impl StorageMapDelta { /// Creates a new [StorageMapDelta] from the provided iterators. pub fn from_iters( - cleared_leaves: impl IntoIterator, - updated_leaves: impl IntoIterator, + cleared_leaves: impl IntoIterator, + updated_leaves: impl IntoIterator, ) -> Self { Self(BTreeMap::from_iter( cleared_leaves @@ -543,7 +548,7 @@ impl StorageMapDelta { } /// Consumes self and returns the underlying map. - pub fn into_map(self) -> BTreeMap { + pub fn into_map(self) -> BTreeMap, Word> { self.0 } } @@ -562,8 +567,8 @@ impl From for StorageMapDelta { impl Serializable for StorageMapDelta { fn write_into(&self, target: &mut W) { - let cleared: Vec<&Word> = self.cleared_keys().collect(); - let updated: Vec<(&Word, &Word)> = self.updated_entries().collect(); + let cleared: Vec<&StorageMapKey> = self.cleared_keys().collect(); + let updated: Vec<(&StorageMapKey, &Word)> = self.updated_entries().collect(); target.write_usize(cleared.len()); target.write_many(cleared.iter()); @@ -573,18 +578,16 @@ impl Serializable for StorageMapDelta { } fn get_size_hint(&self) -> usize { - let word_size = EMPTY_WORD.get_size_hint(); - let cleared_keys_count = self.cleared_keys().count(); let updated_entries_count = self.updated_entries().count(); // Cleared Keys cleared_keys_count.get_size_hint() + - cleared_keys_count * Word::SERIALIZED_SIZE + + cleared_keys_count * StorageMapKey::SERIALIZED_SIZE + // Updated Entries updated_entries_count.get_size_hint() + - updated_entries_count * (Word::SERIALIZED_SIZE + word_size) + updated_entries_count * (StorageMapKey::SERIALIZED_SIZE + Word::SERIALIZED_SIZE) } } @@ -617,7 +620,7 @@ mod tests { use assert_matches::assert_matches; use super::{AccountStorageDelta, Deserializable, Serializable}; - use crate::account::{StorageMapDelta, StorageSlotDelta, StorageSlotName}; + use crate::account::{StorageMapDelta, StorageMapKey, StorageSlotDelta, StorageSlotName}; use crate::errors::AccountDeltaError; use crate::{ONE, Word}; @@ -633,7 +636,7 @@ mod tests { ); let err = delta - .set_map_item(value_slot_name.clone(), Word::empty(), Word::empty()) + .set_map_item(value_slot_name.clone(), StorageMapKey::empty(), Word::empty()) .unwrap_err(); assert_matches!(err, AccountDeltaError::StorageSlotUsedAsDifferentTypes(slot_name) => { assert_eq!(value_slot_name, slot_name) @@ -674,11 +677,13 @@ mod tests { let serialized = storage_delta.to_bytes(); let deserialized = AccountStorageDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_delta); + assert_eq!(storage_delta.get_size_hint(), serialized.len()); let storage_delta = AccountStorageDelta::from_iters([StorageSlotName::mock(1)], [], []); let serialized = storage_delta.to_bytes(); let deserialized = AccountStorageDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_delta); + assert_eq!(storage_delta.get_size_hint(), serialized.len()); let storage_delta = AccountStorageDelta::from_iters( [], @@ -688,6 +693,7 @@ mod tests { let serialized = storage_delta.to_bytes(); let deserialized = AccountStorageDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_delta); + assert_eq!(storage_delta.get_size_hint(), serialized.len()); let storage_delta = AccountStorageDelta::from_iters( [], @@ -697,6 +703,7 @@ mod tests { let serialized = storage_delta.to_bytes(); let deserialized = AccountStorageDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_delta); + assert_eq!(storage_delta.get_size_hint(), serialized.len()); } #[test] @@ -706,13 +713,16 @@ mod tests { let deserialized = StorageMapDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_map_delta); - let storage_map_delta = StorageMapDelta::from_iters([Word::from([ONE, ONE, ONE, ONE])], []); + let storage_map_delta = + StorageMapDelta::from_iters([StorageMapKey::from_array([1, 1, 1, 1])], []); let serialized = storage_map_delta.to_bytes(); let deserialized = StorageMapDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_map_delta); - let storage_map_delta = - StorageMapDelta::from_iters([], [(Word::empty(), Word::from([ONE, ONE, ONE, ONE]))]); + let storage_map_delta = StorageMapDelta::from_iters( + [], + [(StorageMapKey::empty(), Word::from([ONE, ONE, ONE, ONE]))], + ); let serialized = storage_map_delta.to_bytes(); let deserialized = StorageMapDelta::read_from_bytes(&serialized).unwrap(); assert_eq!(deserialized, storage_map_delta); @@ -739,8 +749,8 @@ mod tests { assert_eq!(deserialized, slot_delta); let map_delta = StorageMapDelta::from_iters( - [Word::from([1, 2, 3, 4u32])], - [(Word::from([5, 6, 7, 8u32]), Word::from([3, 4, 5, 6u32]))], + [StorageMapKey::from_array([1, 2, 3, 4])], + [(StorageMapKey::from_array([5, 6, 7, 8]), Word::from([3, 4, 5, 6u32]))], ); let slot_delta = StorageSlotDelta::Map(map_delta); let serialized = slot_delta.to_bytes(); @@ -786,7 +796,7 @@ mod tests { #[test] fn merge_maps(#[case] x: Option, #[case] y: Option, #[case] expected: Option) { fn create_delta(value: Option) -> StorageMapDelta { - let key = Word::from([10u32, 0, 0, 0]); + let key = StorageMapKey::from_array([10, 0, 0, 0]); match value { Some(value) => { StorageMapDelta::from_iters([], [(key, Word::from([value, 0, 0, 0]))]) diff --git a/crates/miden-protocol/src/account/delta/vault.rs b/crates/miden-protocol/src/account/delta/vault.rs index 959b813764..7586337f54 100644 --- a/crates/miden-protocol/src/account/delta/vault.rs +++ b/crates/miden-protocol/src/account/delta/vault.rs @@ -11,9 +11,9 @@ use super::{ DeserializationError, Serializable, }; -use crate::account::{AccountId, AccountType}; -use crate::asset::{Asset, FungibleAsset, NonFungibleAsset}; -use crate::{Felt, LexicographicWord, ONE, Word, ZERO}; +use crate::account::AccountType; +use crate::asset::{Asset, AssetVaultKey, FungibleAsset, NonFungibleAsset}; +use crate::{Felt, ONE, ZERO}; // ACCOUNT VAULT DELTA // ================================================================================================ @@ -100,8 +100,6 @@ impl AccountVaultDelta { added_assets: impl IntoIterator, removed_assets: impl IntoIterator, ) -> Self { - use crate::asset::Asset; - let mut fungible = FungibleAssetDelta::default(); let mut non_fungible = NonFungibleAssetDelta::default(); @@ -132,32 +130,42 @@ impl AccountVaultDelta { /// Returns an iterator over the added assets in this delta. pub fn added_assets(&self) -> impl Iterator + '_ { - use crate::asset::{Asset, FungibleAsset, NonFungibleAsset}; self.fungible .0 .iter() .filter(|&(_, &value)| value >= 0) - .map(|(&faucet_id, &diff)| { - Asset::Fungible(FungibleAsset::new(faucet_id, diff.unsigned_abs()).unwrap()) + .map(|(vault_key, &diff)| { + Asset::Fungible( + FungibleAsset::new(vault_key.faucet_id(), diff.unsigned_abs()) + .unwrap() + .with_callbacks(vault_key.callback_flag()), + ) }) - .chain(self.non_fungible.filter_by_action(NonFungibleDeltaAction::Add).map(|key| { - Asset::NonFungible(unsafe { NonFungibleAsset::new_unchecked(key.into()) }) - })) + .chain( + self.non_fungible + .filter_by_action(NonFungibleDeltaAction::Add) + .map(Asset::NonFungible), + ) } /// Returns an iterator over the removed assets in this delta. pub fn removed_assets(&self) -> impl Iterator + '_ { - use crate::asset::{Asset, FungibleAsset, NonFungibleAsset}; self.fungible .0 .iter() .filter(|&(_, &value)| value < 0) - .map(|(&faucet_id, &diff)| { - Asset::Fungible(FungibleAsset::new(faucet_id, diff.unsigned_abs()).unwrap()) + .map(|(vault_key, &diff)| { + Asset::Fungible( + FungibleAsset::new(vault_key.faucet_id(), diff.unsigned_abs()) + .unwrap() + .with_callbacks(vault_key.callback_flag()), + ) }) - .chain(self.non_fungible.filter_by_action(NonFungibleDeltaAction::Remove).map(|key| { - Asset::NonFungible(unsafe { NonFungibleAsset::new_unchecked(key.into()) }) - })) + .chain( + self.non_fungible + .filter_by_action(NonFungibleDeltaAction::Remove) + .map(Asset::NonFungible), + ) } } @@ -185,15 +193,18 @@ impl Deserializable for AccountVaultDelta { // ================================================================================================ /// A binary tree map of fungible asset balance changes in the account vault. +/// +/// The [`AssetVaultKey`] orders the assets in the same way as the in-kernel account delta which +/// uses a link map. #[derive(Clone, Debug, Default, PartialEq, Eq)] -pub struct FungibleAssetDelta(BTreeMap); +pub struct FungibleAssetDelta(BTreeMap); impl FungibleAssetDelta { /// Validates and creates a new fungible asset delta. /// /// # Errors /// Returns an error if the delta does not pass the validation. - pub fn new(map: BTreeMap) -> Result { + pub fn new(map: BTreeMap) -> Result { let delta = Self(map); delta.validate()?; @@ -206,7 +217,7 @@ impl FungibleAssetDelta { /// Returns an error if the delta would overflow. pub fn add(&mut self, asset: FungibleAsset) -> Result<(), AccountDeltaError> { let amount: i64 = asset.amount().try_into().expect("Amount it too high"); - self.add_delta(asset.faucet_id(), amount) + self.add_delta(asset.vault_key(), amount) } /// Removes a fungible asset from the delta. @@ -215,12 +226,12 @@ impl FungibleAssetDelta { /// Returns an error if the delta would overflow. pub fn remove(&mut self, asset: FungibleAsset) -> Result<(), AccountDeltaError> { let amount: i64 = asset.amount().try_into().expect("Amount it too high"); - self.add_delta(asset.faucet_id(), -amount) + self.add_delta(asset.vault_key(), -amount) } - /// Returns the amount of the fungible asset with the given faucet ID. - pub fn amount(&self, faucet_id: &AccountId) -> Option { - self.0.get(faucet_id).copied() + /// Returns the amount of the fungible asset with the given vault key. + pub fn amount(&self, vault_key: &AssetVaultKey) -> Option { + self.0.get(vault_key).copied() } /// Returns the number of fungible assets affected in the delta. @@ -234,7 +245,7 @@ impl FungibleAssetDelta { } /// Returns an iterator over the (key, value) pairs of the map. - pub fn iter(&self) -> impl Iterator { + pub fn iter(&self) -> impl Iterator { self.0.iter() } @@ -250,8 +261,8 @@ impl FungibleAssetDelta { // Track fungible asset amounts - positive and negative. `i64` is not lossy while // fungibles are restricted to 2^63-1. Overflow is still possible but we check for that. - for (&faucet_id, &amount) in other.0.iter() { - self.add_delta(faucet_id, amount)?; + for (&vault_key, &amount) in other.0.iter() { + self.add_delta(vault_key, amount)?; } Ok(()) @@ -265,8 +276,8 @@ impl FungibleAssetDelta { /// /// # Errors /// Returns an error if the delta would overflow. - fn add_delta(&mut self, faucet_id: AccountId, delta: i64) -> Result<(), AccountDeltaError> { - match self.0.entry(faucet_id) { + fn add_delta(&mut self, vault_key: AssetVaultKey, delta: i64) -> Result<(), AccountDeltaError> { + match self.0.entry(vault_key) { Entry::Vacant(entry) => { // Only track non-zero amounts. if delta != 0 { @@ -277,7 +288,7 @@ impl FungibleAssetDelta { let old = *entry.get(); let new = old.checked_add(delta).ok_or( AccountDeltaError::FungibleAssetDeltaOverflow { - faucet_id, + faucet_id: vault_key.faucet_id(), current: old, delta, }, @@ -299,9 +310,9 @@ impl FungibleAssetDelta { /// # Errors /// Returns an error if one or more fungible assets' faucet IDs are invalid. fn validate(&self) -> Result<(), AccountDeltaError> { - for faucet_id in self.0.keys() { - if !matches!(faucet_id.account_type(), AccountType::FungibleFaucet) { - return Err(AccountDeltaError::NotAFungibleFaucetId(*faucet_id)); + for vault_key in self.0.keys() { + if !matches!(vault_key.faucet_id().account_type(), AccountType::FungibleFaucet) { + return Err(AccountDeltaError::NotAFungibleFaucetId(vault_key.faucet_id())); } } @@ -314,12 +325,12 @@ impl FungibleAssetDelta { /// Note that the order in which elements are appended should be the link map key ordering. This /// is fulfilled here because the link map key's most significant element takes precedence over /// less significant ones. The most significant element in the fungible asset delta is the - /// account ID prefix and the delta happens to be sorted by account IDs. Since the account ID + /// faucet ID prefix and the delta happens to be sorted by vault keys. Since the faucet ID /// prefix is unique, it will always decide on the ordering of a link map key, so less /// significant elements are unimportant. This implicit sort should therefore always match the /// link map key ordering, however this is subtle and fragile. pub(super) fn append_delta_elements(&self, elements: &mut Vec) { - for (faucet_id, amount_delta) in self.iter() { + for (vault_key, amount_delta) in self.iter() { // Note that this iterator is guaranteed to never yield zero amounts, so we don't have // to exclude those explicitly. debug_assert_ne!( @@ -327,12 +338,18 @@ impl FungibleAssetDelta { "fungible asset iterator should never yield amount deltas of 0" ); - let asset = FungibleAsset::new(*faucet_id, amount_delta.unsigned_abs()) - .expect("absolute amount delta should be less than i64::MAX"); let was_added = if *amount_delta > 0 { ONE } else { ZERO }; - - elements.extend_from_slice(&[DOMAIN_ASSET, was_added, ZERO, ZERO]); - elements.extend_from_slice(Word::from(asset).as_elements()); + let amount_delta = Felt::try_from(amount_delta.unsigned_abs()) + .expect("amount delta should be less than i64::MAX"); + + let key_word = vault_key.to_word(); + elements.extend_from_slice(&[ + DOMAIN_ASSET, + was_added, + key_word[2], // faucet_id_suffix_and_metadata + key_word[3], // faucet_id_prefix + ]); + elements.extend_from_slice(&[amount_delta, ZERO, ZERO, ZERO]); } } } @@ -343,11 +360,13 @@ impl Serializable for FungibleAssetDelta { // TODO: We save `i64` as `u64` since winter utils only supports unsigned integers for now. // We should update this code (and deserialization as well) once it supports signed // integers. - target.write_many(self.0.iter().map(|(&faucet_id, &delta)| (faucet_id, delta as u64))); + // TODO: If we keep this code, optimize by not serializing asset ID (which is always 0). + target.write_many(self.0.iter().map(|(vault_key, &delta)| (*vault_key, delta as u64))); } fn get_size_hint(&self) -> usize { - self.0.len().get_size_hint() + self.0.len() * FungibleAsset::SERIALIZED_SIZE + const ENTRY_SIZE: usize = AssetVaultKey::SERIALIZED_SIZE + core::mem::size_of::(); + self.0.len().get_size_hint() + self.0.len() * ENTRY_SIZE } } @@ -355,13 +374,12 @@ impl Deserializable for FungibleAssetDelta { fn read_from(source: &mut R) -> Result { let num_fungible_assets = source.read_usize()?; // TODO: We save `i64` as `u64` since winter utils only supports unsigned integers for now. - // We should update this code (and serialization as well) once it support signeds - // integers. + // We should update this code (and serialization as well) once it supports signed + // integers. let map = source - .read_many::<(AccountId, u64)>(num_fungible_assets)? - .into_iter() - .map(|(account_id, delta_as_u64)| (account_id, delta_as_u64 as i64)) - .collect(); + .read_many_iter::<(AssetVaultKey, u64)>(num_fungible_assets)? + .map(|result| result.map(|(vault_key, delta_as_u64)| (vault_key, delta_as_u64 as i64))) + .collect::>()?; Self::new(map).map_err(|err| DeserializationError::InvalidValue(err.to_string())) } @@ -372,17 +390,17 @@ impl Deserializable for FungibleAssetDelta { /// A binary tree map of non-fungible asset changes (addition and removal) in the account vault. /// -/// The [`LexicographicWord`] wrapper is necessary to order the assets in the same way as the -/// in-kernel account delta which uses a link map. +/// The [`AssetVaultKey`] orders the assets in the same way as the in-kernel account delta which +/// uses a link map. #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct NonFungibleAssetDelta( - BTreeMap, NonFungibleDeltaAction>, + BTreeMap, ); impl NonFungibleAssetDelta { /// Creates a new non-fungible asset delta. pub const fn new( - map: BTreeMap, NonFungibleDeltaAction>, + map: BTreeMap, ) -> Self { Self(map) } @@ -415,7 +433,9 @@ impl NonFungibleAssetDelta { /// Returns an iterator over the (key, value) pairs of the map. pub fn iter(&self) -> impl Iterator { - self.0.iter().map(|(key, value)| (key.inner(), value)) + self.0 + .iter() + .map(|(_key, (non_fungible_asset, delta_action))| (non_fungible_asset, delta_action)) } /// Merges another delta into this one, overwriting any existing values. @@ -426,8 +446,8 @@ impl NonFungibleAssetDelta { /// Returns an error if duplicate non-fungible assets are added or removed. pub fn merge(&mut self, other: Self) -> Result<(), AccountDeltaError> { // Merge non-fungible assets. Each non-fungible asset can cancel others out. - for (&key, &action) in other.0.iter() { - self.apply_action(key.into_inner(), action)?; + for (&asset, &action) in other.iter() { + self.apply_action(asset, action)?; } Ok(()) @@ -446,13 +466,13 @@ impl NonFungibleAssetDelta { asset: NonFungibleAsset, action: NonFungibleDeltaAction, ) -> Result<(), AccountDeltaError> { - match self.0.entry(LexicographicWord::new(asset)) { + match self.0.entry(asset.vault_key()) { Entry::Vacant(entry) => { - entry.insert(action); + entry.insert((asset, action)); }, Entry::Occupied(entry) => { - let previous = *entry.get(); - if previous == action { + let (_prev_asset, previous_action) = *entry.get(); + if previous_action == action { // Asset cannot be added nor removed twice. return Err(AccountDeltaError::DuplicateNonFungibleVaultUpdate(asset)); } @@ -471,8 +491,8 @@ impl NonFungibleAssetDelta { ) -> impl Iterator + '_ { self.0 .iter() - .filter(move |&(_, cur_action)| cur_action == &action) - .map(|(key, _)| key.into_inner()) + .filter(move |&(_, (_asset, cur_action))| cur_action == &action) + .map(|(_key, (asset, _action))| *asset) } /// Appends the non-fungible asset vault delta to the given `elements` from which the delta @@ -484,8 +504,14 @@ impl NonFungibleAssetDelta { NonFungibleDeltaAction::Add => ONE, }; - elements.extend_from_slice(&[DOMAIN_ASSET, was_added, ZERO, ZERO]); - elements.extend_from_slice(Word::from(*asset).as_elements()); + let key_word = asset.vault_key().to_word(); + elements.extend_from_slice(&[ + DOMAIN_ASSET, + was_added, + key_word[2], // faucet_id_suffix_and_metadata + key_word[3], // faucet_id_prefix + ]); + elements.extend_from_slice(asset.to_value_word().as_elements()); } } } @@ -519,14 +545,14 @@ impl Deserializable for NonFungibleAssetDelta { let num_added = source.read_usize()?; for _ in 0..num_added { - let added_asset = source.read()?; - map.insert(LexicographicWord::new(added_asset), NonFungibleDeltaAction::Add); + let added_asset: NonFungibleAsset = source.read()?; + map.insert(added_asset.vault_key(), (added_asset, NonFungibleDeltaAction::Add)); } let num_removed = source.read_usize()?; for _ in 0..num_removed { - let removed_asset = source.read()?; - map.insert(LexicographicWord::new(removed_asset), NonFungibleDeltaAction::Remove); + let removed_asset: NonFungibleAsset = source.read()?; + map.insert(removed_asset.vault_key(), (removed_asset, NonFungibleDeltaAction::Remove)); } Ok(Self::new(map)) @@ -545,7 +571,7 @@ pub enum NonFungibleDeltaAction { #[cfg(test)] mod tests { use super::{AccountVaultDelta, Deserializable, Serializable}; - use crate::account::{AccountId, AccountIdPrefix}; + use crate::account::AccountId; use crate::asset::{Asset, FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}; use crate::testing::account_id::{ ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, @@ -627,11 +653,11 @@ mod tests { /// Creates an [AccountVaultDelta] with an optional [NonFungibleAsset] delta. This delta /// will be added if `Some(true)`, removed for `Some(false)` and missing for `None`. fn create_delta_with_non_fungible( - account_id_prefix: AccountIdPrefix, + account_id: AccountId, added: Option, ) -> AccountVaultDelta { let asset: Asset = NonFungibleAsset::new( - &NonFungibleAssetDetails::new(account_id_prefix, vec![1, 2, 3]).unwrap(), + &NonFungibleAssetDetails::new(account_id, vec![1, 2, 3]).unwrap(), ) .unwrap() .into(); @@ -643,7 +669,7 @@ mod tests { } } - let account_id = NonFungibleAsset::mock_issuer().prefix(); + let account_id = NonFungibleAsset::mock_issuer(); let mut delta_x = create_delta_with_non_fungible(account_id, x); let delta_y = create_delta_with_non_fungible(account_id, y); diff --git a/crates/miden-protocol/src/account/file.rs b/crates/miden-protocol/src/account/file.rs index 4b6f5d4287..64e979cdb2 100644 --- a/crates/miden-protocol/src/account/file.rs +++ b/crates/miden-protocol/src/account/file.rs @@ -119,8 +119,8 @@ mod tests { let storage = AccountStorage::new(vec![]).unwrap(); let nonce = Felt::new(1); let account = Account::new_existing(id, vault, storage, code, nonce); - let auth_secret_key = AuthSecretKey::new_falcon512_rpo(); - let auth_secret_key_2 = AuthSecretKey::new_falcon512_rpo(); + let auth_secret_key = AuthSecretKey::new_falcon512_poseidon2(); + let auth_secret_key_2 = AuthSecretKey::new_falcon512_poseidon2(); AccountFile::new(account, vec![auth_secret_key, auth_secret_key_2]) } diff --git a/crates/miden-protocol/src/account/header.rs b/crates/miden-protocol/src/account/header.rs index a725394a2f..93635fa3a3 100644 --- a/crates/miden-protocol/src/account/header.rs +++ b/crates/miden-protocol/src/account/header.rs @@ -1,6 +1,7 @@ use alloc::vec::Vec; -use super::{Account, AccountId, Felt, PartialAccount, ZERO, hash_account}; +use super::{Account, AccountId, Felt, PartialAccount}; +use crate::crypto::SequentialCommit; use crate::errors::AccountError; use crate::transaction::memory::{ ACCT_CODE_COMMITMENT_OFFSET, @@ -13,7 +14,13 @@ use crate::transaction::memory::{ ACCT_VAULT_ROOT_OFFSET, MemoryOffset, }; -use crate::utils::serde::{Deserializable, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{WORD_SIZE, Word, WordError}; // ACCOUNT HEADER @@ -68,10 +75,10 @@ impl AccountHeader { }); } - let id = AccountId::try_from([ - elements[ACCT_ID_AND_NONCE_OFFSET as usize + ACCT_ID_PREFIX_IDX], + let id = AccountId::try_from_elements( elements[ACCT_ID_AND_NONCE_OFFSET as usize + ACCT_ID_SUFFIX_IDX], - ]) + elements[ACCT_ID_AND_NONCE_OFFSET as usize + ACCT_ID_PREFIX_IDX], + ) .map_err(AccountError::FinalAccountHeaderIdParsingFailed)?; let nonce = elements[ACCT_ID_AND_NONCE_OFFSET as usize + ACCT_NONCE_IDX]; let vault_root = parse_word(elements, ACCT_VAULT_ROOT_OFFSET) @@ -89,17 +96,11 @@ impl AccountHeader { /// Returns the commitment of this account. /// - /// The commitment of an account is computed as hash(id, nonce, vault_root, storage_commitment, - /// code_commitment). Computing the account commitment requires 2 permutations of the hash - /// function. - pub fn commitment(&self) -> Word { - hash_account( - self.id, - self.nonce, - self.vault_root, - self.storage_commitment, - self.code_commitment, - ) + /// The commitment of an account is computed as a hash over the account header elements returned + /// by [`Self::to_elements`]. Computing the account commitment requires 2 permutations of the + /// hash function. + pub fn to_commitment(&self) -> Word { + ::to_commitment(self) } /// Returns the id of this account. @@ -127,26 +128,19 @@ impl AccountHeader { self.code_commitment } - /// Converts the account header into a vector of field elements. + /// Returns the account header encoded to a vector of field elements. /// - /// This is done by first converting the account header data into an array of Words as follows: + /// This is a vector of the following field elements: /// ```text /// [ - /// [account_id_suffix, account_id_prefix, 0, account_nonce] - /// [VAULT_ROOT] - /// [STORAGE_COMMITMENT] - /// [CODE_COMMITMENT] + /// [account_nonce, 0, account_id_suffix, account_id_prefix], + /// VAULT_ROOT, + /// STORAGE_COMMITMENT, + /// CODE_COMMITMENT, /// ] /// ``` - /// And then concatenating the resulting elements into a single vector. - pub fn as_elements(&self) -> Vec { - [ - &[self.id.suffix(), self.id.prefix().as_felt(), ZERO, self.nonce], - self.vault_root.as_elements(), - self.storage_commitment.as_elements(), - self.code_commitment.as_elements(), - ] - .concat() + pub fn to_elements(&self) -> Vec { + ::to_elements(self) } } @@ -186,8 +180,30 @@ impl From<&Account> for AccountHeader { } } +impl SequentialCommit for AccountHeader { + type Commitment = Word; + + fn to_elements(&self) -> Vec { + let mut id_nonce = Word::empty(); + id_nonce[ACCT_NONCE_IDX] = self.nonce; + id_nonce[ACCT_ID_SUFFIX_IDX] = self.id.suffix(); + id_nonce[ACCT_ID_PREFIX_IDX] = self.id.prefix().as_felt(); + + [ + id_nonce.as_elements(), + self.vault_root.as_elements(), + self.storage_commitment.as_elements(), + self.code_commitment.as_elements(), + ] + .concat() + } +} + +// SERIALIZATION +// ================================================================================================ + impl Serializable for AccountHeader { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { self.id.write_into(target); self.nonce.write_into(target); self.vault_root.write_into(target); @@ -197,9 +213,7 @@ impl Serializable for AccountHeader { } impl Deserializable for AccountHeader { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let id = AccountId::read_from(source)?; let nonce = Felt::read_from(source)?; let vault_root = Word::read_from(source)?; @@ -230,13 +244,13 @@ fn parse_word(data: &[Felt], offset: MemoryOffset) -> Result { #[cfg(test)] mod tests { use miden_core::Felt; - use miden_core::utils::{Deserializable, Serializable}; use super::AccountHeader; use crate::Word; use crate::account::StorageSlotContent; use crate::account::tests::build_account; use crate::asset::FungibleAsset; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn test_serde_account_storage() { diff --git a/crates/miden-protocol/src/account/mod.rs b/crates/miden-protocol/src/account/mod.rs index c521c5e46f..0396dbac43 100644 --- a/crates/miden-protocol/src/account/mod.rs +++ b/crates/miden-protocol/src/account/mod.rs @@ -2,6 +2,7 @@ use alloc::string::ToString; use alloc::vec::Vec; use crate::asset::{Asset, AssetVault}; +use crate::crypto::SequentialCommit; use crate::errors::AccountError; use crate::utils::serde::{ ByteReader, @@ -54,6 +55,8 @@ pub use storage::{ PartialStorage, PartialStorageMap, StorageMap, + StorageMapKey, + StorageMapKeyHash, StorageMapWitness, StorageSlot, StorageSlotContent, @@ -158,16 +161,6 @@ impl Account { /// [`AccountId`]. Finally, a new account can then be instantiated from those parts using /// [`Account::new`]. /// - /// If the account type is faucet the reserved slot ([`AccountStorage::faucet_metadata_slot`]) - /// will be initialized as follows: - /// - For [`AccountType::FungibleFaucet`] the value is set to - /// [`StorageSlotContent::empty_value`]. - /// - For [`AccountType::NonFungibleFaucet`] the value is set to - /// [`StorageSlotContent::empty_map`]. - /// - /// If the storage needs to be initialized with certain values in that slot, those must be added - /// after construction with the standard set methods for items and maps. - /// /// # Errors /// /// Returns an error if: @@ -186,7 +179,7 @@ impl Account { validate_components_support_account_type(&components, account_type)?; let code = AccountCode::from_components_unchecked(&components)?; - let storage = AccountStorage::from_components(components, account_type)?; + let storage = AccountStorage::from_components(components)?; Ok((code, storage)) } @@ -204,33 +197,25 @@ impl Account { /// Returns the commitment of this account. /// - /// The commitment of an account is computed as hash(id, nonce, vault_root, storage_commitment, - /// code_commitment). Computing the account commitment requires 2 permutations of the hash - /// function. - pub fn commitment(&self) -> Word { - hash_account( - self.id, - self.nonce, - self.vault.root(), - self.storage.to_commitment(), - self.code.commitment(), - ) + /// See [`AccountHeader::to_commitment`] for details on how it is computed. + pub fn to_commitment(&self) -> Word { + AccountHeader::from(self).to_commitment() } /// Returns the commitment of this account as used for the initial account state commitment in /// transaction proofs. /// - /// For existing accounts, this is exactly the same as [Account::commitment()], however, for new - /// accounts this value is set to [crate::EMPTY_WORD]. This is because when a transaction is - /// executed against a new account, public input for the initial account state is set to - /// [crate::EMPTY_WORD] to distinguish new accounts from existing accounts. The actual - /// commitment of the initial account state (and the initial state itself), are provided to - /// the VM via the advice provider. + /// For existing accounts, this is exactly the same as [Account::to_commitment], however, for + /// new accounts this value is set to [crate::EMPTY_WORD]. This is because when a + /// transaction is executed against a new account, public input for the initial account + /// state is set to [crate::EMPTY_WORD] to distinguish new accounts from existing accounts. + /// The actual commitment of the initial account state (and the initial state itself), are + /// provided to the VM via the advice provider. pub fn initial_commitment(&self) -> Word { if self.is_new() { Word::empty() } else { - self.commitment() + self.to_commitment() } } @@ -361,7 +346,7 @@ impl Account { pub fn increment_nonce(&mut self, nonce_delta: Felt) -> Result<(), AccountError> { let new_nonce = self.nonce + nonce_delta; - if new_nonce.as_int() < self.nonce.as_int() { + if new_nonce.as_canonical_u64() < self.nonce.as_canonical_u64() { return Err(AccountError::NonceOverflow { current: self.nonce, increment: nonce_delta, @@ -457,6 +442,18 @@ impl TryFrom for AccountDelta { } } +impl SequentialCommit for Account { + type Commitment = Word; + + fn to_elements(&self) -> Vec { + AccountHeader::from(self).to_elements() + } + + fn to_commitment(&self) -> Self::Commitment { + AccountHeader::from(self).to_commitment() + } +} + // SERIALIZATION // ================================================================================================ @@ -496,31 +493,6 @@ impl Deserializable for Account { } } -// HELPERS -// ================================================================================================ - -/// Returns hash of an account with the specified ID, nonce, vault root, storage commitment, and -/// code commitment. -/// -/// Hash of an account is computed as hash(id, nonce, vault_root, storage_commitment, -/// code_commitment). Computing the account commitment requires 2 permutations of the hash function. -pub fn hash_account( - id: AccountId, - nonce: Felt, - vault_root: Word, - storage_commitment: Word, - code_commitment: Word, -) -> Word { - let mut elements = [ZERO; 16]; - elements[0] = id.suffix(); - elements[1] = id.prefix().as_felt(); - elements[3] = nonce; - elements[4..8].copy_from_slice(&*vault_root); - elements[8..12].copy_from_slice(&*storage_commitment); - elements[12..].copy_from_slice(&*code_commitment); - Hasher::hash_elements(&elements) -} - // HELPER FUNCTIONS // ================================================================================================ @@ -581,7 +553,6 @@ mod tests { use assert_matches::assert_matches; use miden_assembly::Assembler; - use miden_core::FieldElement; use miden_crypto::utils::{Deserializable, Serializable}; use miden_crypto::{Felt, Word}; @@ -594,6 +565,7 @@ mod tests { AccountVaultDelta, }; use crate::account::AccountStorageMode::Network; + use crate::account::component::AccountComponentMetadata; use crate::account::{ Account, AccountBuilder, @@ -603,6 +575,7 @@ mod tests { PartialAccount, StorageMap, StorageMapDelta, + StorageMapKey, StorageSlot, StorageSlotContent, StorageSlotName, @@ -664,7 +637,7 @@ mod tests { let storage_slot_value_1 = StorageSlotContent::Value(Word::from([5, 6, 7, 8u32])); let mut storage_map = StorageMap::with_entries([ ( - Word::new([Felt::new(101), Felt::new(102), Felt::new(103), Felt::new(104)]), + StorageMapKey::from_array([101, 102, 103, 104]), Word::from([ Felt::new(1_u64), Felt::new(2_u64), @@ -673,7 +646,7 @@ mod tests { ]), ), ( - Word::new([Felt::new(105), Felt::new(106), Felt::new(107), Felt::new(108)]), + StorageMapKey::from_array([105, 106, 107, 108]), Word::new([Felt::new(5_u64), Felt::new(6_u64), Felt::new(7_u64), Felt::new(8_u64)]), ), ]) @@ -687,14 +660,11 @@ mod tests { ); // update storage map - let new_map_entry = ( - Word::new([Felt::new(101), Felt::new(102), Felt::new(103), Felt::new(104)]), - [Felt::new(9_u64), Felt::new(10_u64), Felt::new(11_u64), Felt::new(12_u64)], - ); + let key = StorageMapKey::from_array([101, 102, 103, 104]); + let value = Word::from([9, 10, 11, 12u32]); - let updated_map = - StorageMapDelta::from_iters([], [(new_map_entry.0, new_map_entry.1.into())]); - storage_map.insert(new_map_entry.0, new_map_entry.1.into()).unwrap(); + let updated_map = StorageMapDelta::from_iters([], [(key, value)]); + storage_map.insert(key, value).unwrap(); // build account delta let final_nonce = Felt::new(2); @@ -833,11 +803,15 @@ mod tests { let library1 = Assembler::default().assemble_library([code1]).unwrap(); // This component support all account types except the regular account with updatable code. - let component1 = AccountComponent::new(library1, vec![]) - .unwrap() - .with_supported_type(AccountType::FungibleFaucet) - .with_supported_type(AccountType::NonFungibleFaucet) - .with_supported_type(AccountType::RegularAccountImmutableCode); + let metadata = AccountComponentMetadata::new( + "test::component1", + [ + AccountType::FungibleFaucet, + AccountType::NonFungibleFaucet, + AccountType::RegularAccountImmutableCode, + ], + ); + let component1 = AccountComponent::new(library1, vec![], metadata).unwrap(); let err = Account::initialize_from_components( AccountType::RegularAccountUpdatableCode, diff --git a/crates/miden-protocol/src/account/partial.rs b/crates/miden-protocol/src/account/partial.rs index f7ae354dae..414c2ef067 100644 --- a/crates/miden-protocol/src/account/partial.rs +++ b/crates/miden-protocol/src/account/partial.rs @@ -1,14 +1,21 @@ use alloc::string::ToString; +use alloc::vec::Vec; -use miden_core::utils::{Deserializable, Serializable}; use miden_core::{Felt, ZERO}; use super::{Account, AccountCode, AccountId, PartialStorage}; use crate::Word; -use crate::account::{hash_account, validate_account_seed}; +use crate::account::{AccountHeader, validate_account_seed}; use crate::asset::PartialVault; +use crate::crypto::SequentialCommit; use crate::errors::AccountError; -use crate::utils::serde::DeserializationError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A partial representation of an account. /// @@ -115,26 +122,16 @@ impl PartialAccount { /// Returns the commitment of this account. /// - /// The commitment of an account is computed as: - /// - /// ```text - /// hash(id, nonce, vault_root, storage_commitment, code_commitment). - /// ``` - pub fn commitment(&self) -> Word { - hash_account( - self.id, - self.nonce, - self.vault().root(), - self.storage().commitment(), - self.code().commitment(), - ) + /// See [`AccountHeader::to_commitment`] for details on how it is computed. + pub fn to_commitment(&self) -> Word { + AccountHeader::from(self).to_commitment() } /// Returns the commitment of this account as used for the initial account state commitment in /// transaction proofs. /// - /// For existing accounts, this is exactly the same as [Account::commitment()], however, for new - /// accounts this value is set to [`Word::empty`]. This is because when a transaction is + /// For existing accounts, this is exactly the same as [Account::to_commitment], however, for + /// new accounts this value is set to [`Word::empty`]. This is because when a transaction is /// executed against a new account, public input for the initial account state is set to /// [`Word::empty`] to distinguish new accounts from existing accounts. The actual /// commitment of the initial account state (and the initial state itself), are provided to @@ -143,7 +140,7 @@ impl PartialAccount { if self.is_new() { Word::empty() } else { - self.commitment() + self.to_commitment() } } @@ -202,8 +199,22 @@ impl From<&Account> for PartialAccount { } } +impl SequentialCommit for PartialAccount { + type Commitment = Word; + + fn to_elements(&self) -> Vec { + AccountHeader::from(self).to_elements() + } + + fn to_commitment(&self) -> Self::Commitment { + AccountHeader::from(self).to_commitment() + } +} +// SERIALIZATION +// ================================================================================================ + impl Serializable for PartialAccount { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { target.write(self.id); target.write(self.nonce); target.write(&self.code); @@ -214,9 +225,7 @@ impl Serializable for PartialAccount { } impl Deserializable for PartialAccount { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let account_id = source.read()?; let nonce = source.read()?; let account_code = source.read()?; diff --git a/crates/miden-protocol/src/account/storage/header.rs b/crates/miden-protocol/src/account/storage/header.rs index 359e6ff49e..cc809a28d6 100644 --- a/crates/miden-protocol/src/account/storage/header.rs +++ b/crates/miden-protocol/src/account/storage/header.rs @@ -5,6 +5,7 @@ use alloc::vec::Vec; use super::map::EMPTY_STORAGE_MAP_ROOT; use super::{AccountStorage, Felt, StorageSlotType, Word}; +use crate::ZERO; use crate::account::{StorageSlot, StorageSlotId, StorageSlotName}; use crate::crypto::SequentialCommit; use crate::errors::AccountError; @@ -15,7 +16,6 @@ use crate::utils::serde::{ DeserializationError, Serializable, }; -use crate::{FieldElement, ZERO}; // ACCOUNT STORAGE HEADER // ================================================================================================ @@ -233,7 +233,8 @@ impl Serializable for AccountStorageHeader { impl Deserializable for AccountStorageHeader { fn read_from(source: &mut R) -> Result { let len = source.read_u8()?; - let slots: Vec = source.read_many(len as usize)?; + let slots: Vec = + source.read_many_iter(len as usize)?.collect::>()?; Self::new(slots).map_err(|err| DeserializationError::InvalidValue(err.to_string())) } } @@ -351,12 +352,12 @@ mod tests { use alloc::string::ToString; use miden_core::Felt; - use miden_core::utils::{Deserializable, Serializable}; use super::AccountStorageHeader; use crate::Word; use crate::account::{AccountStorage, StorageSlotHeader, StorageSlotName, StorageSlotType}; use crate::testing::storage::{MOCK_MAP_SLOT, MOCK_VALUE_SLOT0, MOCK_VALUE_SLOT1}; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn test_from_account_storage() { diff --git a/crates/miden-protocol/src/account/storage/map/key.rs b/crates/miden-protocol/src/account/storage/map/key.rs new file mode 100644 index 0000000000..dda1a09c36 --- /dev/null +++ b/crates/miden-protocol/src/account/storage/map/key.rs @@ -0,0 +1,125 @@ +use alloc::string::String; + +use miden_crypto::merkle::smt::{LeafIndex, SMT_DEPTH}; +use miden_protocol_macros::WordWrapper; + +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Felt, Hasher, Word}; + +// STORAGE MAP KEY +// ================================================================================================ + +/// A raw, user-chosen key for a [`StorageMap`](super::StorageMap). +/// +/// Storage map keys are user-chosen and thus not necessarily uniformly distributed. To mitigate +/// potential tree imbalance, keys are hashed before being inserted into the underlying SMT. +/// +/// Use [`StorageMapKey::hash`] to produce the corresponding [`StorageMapKeyHash`] that is used +/// in the SMT. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, WordWrapper)] +pub struct StorageMapKey(Word); + +impl StorageMapKey { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// The serialized size of the map key in bytes. + pub const SERIALIZED_SIZE: usize = Word::SERIALIZED_SIZE; + + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a new [`StorageMapKey`] from the given word. + pub fn new(word: Word) -> Self { + Self::from_raw(word) + } + + /// Returns the storage map key based on an empty word. + pub fn empty() -> Self { + Self::from_raw(Word::empty()) + } + + /// Creates a [`StorageMapKey`] from a `u32` index. + /// + /// This is a convenience constructor for the common pattern of using sequential indices + /// as storage map keys, producing a key of `[idx, 0, 0, 0]`. + pub fn from_index(idx: u32) -> Self { + Self::from_raw(Word::from([idx, 0, 0, 0])) + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Hashes this raw map key to produce a [`StorageMapKeyHash`]. + /// + /// Storage map keys are hashed before being inserted into the SMT to ensure a uniform + /// key distribution. + pub fn hash(&self) -> StorageMapKeyHash { + StorageMapKeyHash::from_raw(Hasher::hash_elements(self.0.as_elements())) + } +} + +impl From for Word { + fn from(key: StorageMapKey) -> Self { + key.0 + } +} + +impl core::fmt::Display for StorageMapKey { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_fmt(format_args!("{}", self.as_word())) + } +} + +impl Serializable for StorageMapKey { + fn write_into(&self, target: &mut W) { + target.write_many(self.as_word()); + } + + fn get_size_hint(&self) -> usize { + Self::SERIALIZED_SIZE + } +} + +impl Deserializable for StorageMapKey { + fn read_from(source: &mut R) -> Result { + let key = source.read()?; + Ok(StorageMapKey::from_raw(key)) + } +} + +// STORAGE MAP KEY HASH +// ================================================================================================ + +/// A hashed key for a [`StorageMap`](super::StorageMap). +/// +/// This is produced by hashing a [`StorageMapKey`] and is used as the actual key in the +/// underlying SMT. Wrapping the hashed key in a distinct type prevents accidentally using a raw +/// key where a hashed key is expected and vice-versa. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, WordWrapper)] +pub struct StorageMapKeyHash(Word); + +impl StorageMapKeyHash { + /// Returns the leaf index in the SMT for this hashed key. + pub fn to_leaf_index(&self) -> LeafIndex { + self.0.into() + } +} + +impl From for Word { + fn from(key: StorageMapKeyHash) -> Self { + key.0 + } +} + +impl From for StorageMapKeyHash { + fn from(key: StorageMapKey) -> Self { + key.hash() + } +} diff --git a/crates/miden-protocol/src/account/storage/map/mod.rs b/crates/miden-protocol/src/account/storage/map/mod.rs index 46d489a91c..66df39e909 100644 --- a/crates/miden-protocol/src/account/storage/map/mod.rs +++ b/crates/miden-protocol/src/account/storage/map/mod.rs @@ -4,12 +4,14 @@ use miden_core::EMPTY_WORD; use miden_crypto::merkle::EmptySubtreeRoots; use super::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, Word}; -use crate::Hasher; use crate::account::StorageMapDelta; use crate::crypto::merkle::InnerNodeInfo; use crate::crypto::merkle::smt::{LeafIndex, SMT_DEPTH, Smt, SmtLeaf}; use crate::errors::{AccountError, StorageMapError}; +mod key; +pub use key::{StorageMapKey, StorageMapKeyHash}; + mod partial; pub use partial::PartialStorageMap; @@ -43,11 +45,11 @@ pub const EMPTY_STORAGE_MAP_ROOT: Word = *EmptySubtreeRoots::entry(StorageMap::D pub struct StorageMap { /// The SMT where each key is the hashed original key. smt: Smt, - /// The entries of the map where the key is the raw user-chosen one. + /// The entries of the map that retains the original unhashed keys (i.e. [`StorageMapKey`]). /// /// It is an invariant of this type that the map's entries are always consistent with the SMT's /// entries and vice-versa. - entries: BTreeMap, + entries: BTreeMap, } impl StorageMap { @@ -79,8 +81,8 @@ impl StorageMap { /// /// Returns an error if: /// - the provided entries contain multiple values for the same key. - pub fn with_entries>( - entries: impl IntoIterator, + pub fn with_entries>( + entries: impl IntoIterator, ) -> Result { let mut map = BTreeMap::new(); @@ -98,8 +100,8 @@ impl StorageMap { } /// Creates a new [`StorageMap`] from the given map. For internal use. - fn from_btree_map(entries: BTreeMap) -> Self { - let hashed_keys_iter = entries.iter().map(|(key, value)| (Self::hash_key(*key), *value)); + fn from_btree_map(entries: BTreeMap) -> Self { + let hashed_keys_iter = entries.iter().map(|(key, value)| (key.hash().as_word(), *value)); let smt = Smt::with_entries(hashed_keys_iter) .expect("btree maps should not contain duplicate keys"); @@ -132,21 +134,20 @@ impl StorageMap { /// Returns the value corresponding to the key or [`Self::EMPTY_VALUE`] if the key is not /// associated with a value. - pub fn get(&self, raw_key: &Word) -> Word { - self.entries.get(raw_key).copied().unwrap_or_default() + pub fn get(&self, key: &StorageMapKey) -> Word { + self.entries.get(key).copied().unwrap_or_default() } - /// Returns an opening of the leaf associated with raw key. + /// Returns an opening of the leaf associated with the given key. /// /// Conceptually, an opening is a Merkle path to the leaf, as well as the leaf itself. - pub fn open(&self, raw_key: &Word) -> StorageMapWitness { - let hashed_map_key = Self::hash_key(*raw_key); - let smt_proof = self.smt.open(&hashed_map_key); - let value = self.entries.get(raw_key).copied().unwrap_or_default(); + pub fn open(&self, key: &StorageMapKey) -> StorageMapWitness { + let smt_proof = self.smt.open(&key.hash().as_word()); + let value = self.entries.get(key).copied().unwrap_or_default(); // SAFETY: The key value pair is guaranteed to be present in the provided proof since we // open its hashed version and because of the guarantees of the storage map. - StorageMapWitness::new_unchecked(smt_proof, [(*raw_key, value)]) + StorageMapWitness::new_unchecked(smt_proof, [(*key, value)]) } // ITERATORS @@ -158,9 +159,7 @@ impl StorageMap { } /// Returns an iterator over the key-value pairs in this storage map. - /// - /// Note that the returned key is the raw map key. - pub fn entries(&self) -> impl Iterator { + pub fn entries(&self) -> impl Iterator { self.entries.iter() } @@ -176,16 +175,16 @@ impl StorageMap { /// [`Self::EMPTY_VALUE`] if no entry was previously present. /// /// If the provided `value` is [`Self::EMPTY_VALUE`] the entry will be removed. - pub fn insert(&mut self, raw_key: Word, value: Word) -> Result { + pub fn insert(&mut self, key: StorageMapKey, value: Word) -> Result { if value == EMPTY_WORD { - self.entries.remove(&raw_key); + self.entries.remove(&key); } else { - self.entries.insert(raw_key, value); + self.entries.insert(key, value); } - let hashed_key = Self::hash_key(raw_key); + let hashed_key = key.hash(); self.smt - .insert(hashed_key, value) + .insert(hashed_key.into(), value) .map_err(AccountError::MaxNumStorageMapLeavesExceeded) } @@ -200,27 +199,9 @@ impl StorageMap { } /// Consumes the map and returns the underlying map of entries. - pub fn into_entries(self) -> BTreeMap { + pub fn into_entries(self) -> BTreeMap { self.entries } - - // UTILITY FUNCTIONS - // -------------------------------------------------------------------------------------------- - - /// Hashes the given key to get the key of the SMT. - pub fn hash_key(raw_key: Word) -> Word { - Hasher::hash_elements(raw_key.as_elements()) - } - - /// Returns leaf index of a raw map key. - pub fn map_key_to_leaf_index(raw_key: Word) -> LeafIndex { - Self::hash_key(raw_key).into() - } - - /// Returns the leaf index of a map key. - pub fn hashed_map_key_to_leaf_index(hashed_map_key: Word) -> LeafIndex { - hashed_map_key.into() - } } impl Default for StorageMap { @@ -253,7 +234,14 @@ impl Deserializable for StorageMap { mod tests { use assert_matches::assert_matches; - use super::{Deserializable, EMPTY_STORAGE_MAP_ROOT, Serializable, StorageMap, Word}; + use super::{ + Deserializable, + EMPTY_STORAGE_MAP_ROOT, + Serializable, + StorageMap, + StorageMapKey, + Word, + }; use crate::errors::StorageMapError; #[test] @@ -264,9 +252,9 @@ mod tests { assert_eq!(storage_map_default, StorageMap::read_from_bytes(&bytes).unwrap()); // StorageMap with values - let storage_map_leaves_2: [(Word, Word); 2] = [ - (Word::from([101, 102, 103, 104u32]), Word::from([1, 2, 3, 4u32])), - (Word::from([105, 106, 107, 108u32]), Word::from([5, 6, 7, 8u32])), + let storage_map_leaves_2 = [ + (StorageMapKey::from_array([101, 102, 103, 104]), Word::from([1, 2, 3, 4u32])), + (StorageMapKey::from_array([105, 106, 107, 108]), Word::from([5, 6, 7, 8u32])), ]; let storage_map = StorageMap::with_entries(storage_map_leaves_2).unwrap(); assert_eq!(storage_map.num_entries(), 2); @@ -289,9 +277,9 @@ mod tests { #[test] fn account_storage_map_fails_on_duplicate_entries() { // StorageMap with values - let storage_map_leaves_2: [(Word, Word); 2] = [ - (Word::from([101, 102, 103, 104u32]), Word::from([1, 2, 3, 4u32])), - (Word::from([101, 102, 103, 104u32]), Word::from([5, 6, 7, 8u32])), + let storage_map_leaves_2 = [ + (StorageMapKey::from_array([101, 102, 103, 104]), Word::from([1, 2, 3, 4u32])), + (StorageMapKey::from_array([101, 102, 103, 104]), Word::from([5, 6, 7, 8u32])), ]; let error = StorageMap::with_entries(storage_map_leaves_2).unwrap_err(); diff --git a/crates/miden-protocol/src/account/storage/map/partial.rs b/crates/miden-protocol/src/account/storage/map/partial.rs index 55f1f408f7..e120c32d8d 100644 --- a/crates/miden-protocol/src/account/storage/map/partial.rs +++ b/crates/miden-protocol/src/account/storage/map/partial.rs @@ -1,12 +1,17 @@ use alloc::collections::BTreeMap; -use miden_core::utils::{Deserializable, Serializable}; use miden_crypto::Word; use miden_crypto::merkle::smt::{LeafIndex, PartialSmt, SMT_DEPTH, SmtLeaf, SmtProof}; use miden_crypto::merkle::{InnerNodeInfo, MerkleError}; -use crate::account::{StorageMap, StorageMapWitness}; -use crate::utils::serde::{ByteReader, DeserializationError}; +use crate::account::{StorageMap, StorageMapKey, StorageMapWitness}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A partial representation of a [`StorageMap`], containing only proofs for a subset of the /// key-value pairs. @@ -25,11 +30,11 @@ use crate::utils::serde::{ByteReader, DeserializationError}; #[derive(Clone, Debug, PartialEq, Eq, Default)] pub struct PartialStorageMap { partial_smt: PartialSmt, - /// The entries of the map where the key is the raw user-chosen one. + /// The entries of the map that retains the original unhashed keys (i.e. [`StorageMapKey`]). /// /// It is an invariant of this type that the map's entries are always consistent with the /// partial SMT's entries and vice-versa. - entries: BTreeMap, + entries: BTreeMap, } impl PartialStorageMap { @@ -97,13 +102,13 @@ impl PartialStorageMap { /// - a non-empty [`Word`] if the key is tracked by this map and exists in it, /// - [`Word::empty`] if the key is tracked by this map and does not exist, /// - `None` if the key is not tracked by this map. - pub fn get(&self, raw_key: &Word) -> Option { - let hashed_key = StorageMap::hash_key(*raw_key); + pub fn get(&self, key: &StorageMapKey) -> Option { + let hash_word = key.hash().as_word(); // This returns an error if the key is not tracked which we map to a `None`. - self.partial_smt.get_value(&hashed_key).ok() + self.partial_smt.get_value(&hash_word).ok() } - /// Returns an opening of the leaf associated with the raw key. + /// Returns an opening of the leaf associated with the given key. /// /// Conceptually, an opening is a Merkle path to the leaf, as well as the leaf itself. /// @@ -111,14 +116,13 @@ impl PartialStorageMap { /// /// Returns an error if: /// - the key is not tracked by this partial storage map. - pub fn open(&self, raw_key: &Word) -> Result { - let hashed_key = StorageMap::hash_key(*raw_key); - let smt_proof = self.partial_smt.open(&hashed_key)?; - let value = self.entries.get(raw_key).copied().unwrap_or_default(); + pub fn open(&self, key: &StorageMapKey) -> Result { + let smt_proof = self.partial_smt.open(&key.hash().as_word())?; + let value = self.entries.get(key).copied().unwrap_or_default(); // SAFETY: The key value pair is guaranteed to be present in the provided proof since we // open its hashed version and because of the guarantees of the partial storage map. - Ok(StorageMapWitness::new_unchecked(smt_proof, [(*raw_key, value)])) + Ok(StorageMapWitness::new_unchecked(smt_proof, [(*key, value)])) } // ITERATORS @@ -130,9 +134,7 @@ impl PartialStorageMap { } /// Returns an iterator over the key-value pairs in this storage map. - /// - /// Note that the returned key is the raw map key. - pub fn entries(&self) -> impl Iterator { + pub fn entries(&self) -> impl Iterator { self.entries.iter() } @@ -152,7 +154,7 @@ impl PartialStorageMap { } impl Serializable for PartialStorageMap { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { target.write(&self.partial_smt); target.write_usize(self.entries.len()); target.write_many(self.entries.keys()); @@ -167,8 +169,8 @@ impl Deserializable for PartialStorageMap { let num_entries: usize = source.read()?; for _ in 0..num_entries { - let key: Word = source.read()?; - let hashed_map_key = StorageMap::hash_key(key); + let key: StorageMapKey = source.read()?; + let hashed_map_key: Word = key.hash().into(); let value = partial_smt.get_value(&hashed_map_key).map_err(|err| { DeserializationError::InvalidValue(format!( "failed to find map key {key} in partial SMT: {err}" diff --git a/crates/miden-protocol/src/account/storage/map/witness.rs b/crates/miden-protocol/src/account/storage/map/witness.rs index f70a8359af..a8566f992e 100644 --- a/crates/miden-protocol/src/account/storage/map/witness.rs +++ b/crates/miden-protocol/src/account/storage/map/witness.rs @@ -4,7 +4,7 @@ use miden_crypto::merkle::InnerNodeInfo; use miden_crypto::merkle::smt::SmtProof; use crate::Word; -use crate::account::StorageMap; +use crate::account::StorageMapKey; use crate::errors::StorageMapError; /// A witness of an asset in a [`StorageMap`](super::StorageMap). @@ -26,7 +26,7 @@ pub struct StorageMapWitness { /// /// It is an invariant of this type that the map's entries are always consistent with the SMT's /// entries and vice-versa. - entries: BTreeMap, + entries: BTreeMap, } impl StorageMapWitness { @@ -41,21 +41,20 @@ impl StorageMapWitness { /// - Any of the map keys is not contained in the proof. pub fn new( proof: SmtProof, - raw_keys: impl IntoIterator, + keys: impl IntoIterator, ) -> Result { let mut entries = BTreeMap::new(); - for raw_key in raw_keys.into_iter() { - let hashed_map_key = StorageMap::hash_key(raw_key); - let value = - proof.get(&hashed_map_key).ok_or(StorageMapError::MissingKey { raw_key })?; - entries.insert(raw_key, value); + for key in keys.into_iter() { + let hashed_map_key = key.hash().as_word(); + let value = proof.get(&hashed_map_key).ok_or(StorageMapError::MissingKey { key })?; + entries.insert(key, value); } Ok(Self { proof, entries }) } - /// Creates a new [`StorageMapWitness`] from an SMT proof and a set of raw key value pairs. + /// Creates a new [`StorageMapWitness`] from an SMT proof and a set of key value pairs. /// /// # Warning /// @@ -63,11 +62,11 @@ impl StorageMapWitness { /// details. pub fn new_unchecked( proof: SmtProof, - raw_key_values: impl IntoIterator, + key_values: impl IntoIterator, ) -> Self { Self { proof, - entries: raw_key_values.into_iter().collect(), + entries: key_values.into_iter().collect(), } } @@ -83,15 +82,13 @@ impl StorageMapWitness { /// - a non-empty [`Word`] if the key is tracked by this witness and exists in it, /// - [`Word::empty`] if the key is tracked by this witness and does not exist, /// - `None` if the key is not tracked by this witness. - pub fn get(&self, raw_key: &Word) -> Option { - let hashed_key = StorageMap::hash_key(*raw_key); - self.proof.get(&hashed_key) + pub fn get(&self, key: StorageMapKey) -> Option { + let hash_word = key.hash().as_word(); + self.proof.get(&hash_word) } /// Returns an iterator over the key-value pairs in this witness. - /// - /// Note that the returned key is the raw map key. - pub fn entries(&self) -> impl Iterator { + pub fn entries(&self) -> impl Iterator { self.entries.iter() } @@ -120,7 +117,7 @@ mod tests { #[test] fn creating_witness_fails_on_missing_key() { // Create a storage map with one key-value pair - let key1 = Word::from([1, 2, 3, 4u32]); + let key1 = StorageMapKey::from_array([1, 2, 3, 4]); let value1 = Word::from([10, 20, 30, 40u32]); let entries = [(key1, value1)]; let storage_map = StorageMap::with_entries(entries).unwrap(); @@ -129,11 +126,11 @@ mod tests { let proof = storage_map.open(&key1).into(); // Try to create a witness for a different key that's not in the proof - let missing_key = Word::from([5, 6, 7, 8u32]); + let missing_key = StorageMapKey::from_array([5, 6, 7, 8u32]); let result = StorageMapWitness::new(proof, [missing_key]); - assert_matches!(result, Err(StorageMapError::MissingKey { raw_key }) => { - assert_eq!(raw_key, missing_key); + assert_matches!(result, Err(StorageMapError::MissingKey { key }) => { + assert_eq!(key, missing_key); }); } } diff --git a/crates/miden-protocol/src/account/storage/mod.rs b/crates/miden-protocol/src/account/storage/mod.rs index a2e2f44c21..4f3217f905 100644 --- a/crates/miden-protocol/src/account/storage/mod.rs +++ b/crates/miden-protocol/src/account/storage/mod.rs @@ -12,15 +12,14 @@ use super::{ Serializable, Word, }; -use crate::account::{AccountComponent, AccountType}; +use crate::account::AccountComponent; use crate::crypto::SequentialCommit; -use crate::utils::sync::LazyLock; mod slot; pub use slot::{StorageSlot, StorageSlotContent, StorageSlotId, StorageSlotName, StorageSlotType}; mod map; -pub use map::{PartialStorageMap, StorageMap, StorageMapWitness}; +pub use map::{PartialStorageMap, StorageMap, StorageMapKey, StorageMapKeyHash, StorageMapWitness}; mod header; pub use header::{AccountStorageHeader, StorageSlotHeader}; @@ -28,19 +27,6 @@ pub use header::{AccountStorageHeader, StorageSlotHeader}; mod partial; pub use partial::PartialStorage; -static FAUCET_SYSDATA_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::protocol::faucet::sysdata") - .expect("storage slot name should be valid") -}); - -static RESERVED_SLOT_NAMES: LazyLock> = - LazyLock::new(|| vec![FAUCET_SYSDATA_SLOT_NAME.clone()]); - -/// Returns `true` if the provided slot name is reserved by the protocol. -pub fn is_reserved_slot_name(slot_name: &StorageSlotName) -> bool { - RESERVED_SLOT_NAMES.iter().any(|reserved| reserved.id() == slot_name.id()) -} - // ACCOUNT STORAGE // ================================================================================================ @@ -90,7 +76,7 @@ impl AccountStorage { } // Unstable sort is fine because we require all names to be unique. - slots.sort_unstable(); + slots.sort_unstable_by(|a, b| a.name().cmp(b.name())); // Check for slot name uniqueness by checking each neighboring slot's IDs. This is // sufficient because the slots are sorted. @@ -105,42 +91,21 @@ impl AccountStorage { /// Creates an [`AccountStorage`] from the provided components' storage slots. /// - /// If the account type is faucet the reserved slot (slot 0) will be initialized. - /// - For Fungible Faucets the value is [`StorageSlot::empty_value`]. - /// - For Non-Fungible Faucets the value is [`StorageSlot::empty_map`]. - /// - /// If the storage needs to be initialized with certain values in that slot, those can be added - /// after construction with the standard set methods for items and maps. - /// /// # Errors /// /// Returns an error if: /// - The number of [`StorageSlot`]s of all components exceeds 255. - /// - Any component accesses [`AccountStorage::faucet_sysdata_slot`]. + /// - There are multiple storage slots with the same [`StorageSlotName`]. pub(super) fn from_components( components: Vec, - account_type: AccountType, ) -> Result { - let mut storage_slots = match account_type { - AccountType::FungibleFaucet => { - vec![StorageSlot::with_empty_value(Self::faucet_sysdata_slot().clone())] - }, - AccountType::NonFungibleFaucet => { - vec![StorageSlot::with_empty_map(Self::faucet_sysdata_slot().clone())] - }, - _ => vec![], - }; - - for component_slot in components.into_iter().flat_map(|component| { - let AccountComponent { storage_slots, .. } = component; - storage_slots.into_iter() - }) { - if is_reserved_slot_name(component_slot.name()) { - return Err(AccountError::StorageSlotNameMustNotBeFaucetSysdata); - } - - storage_slots.push(component_slot); - } + let storage_slots = components + .into_iter() + .flat_map(|component| { + let AccountComponent { storage_slots, .. } = component; + storage_slots.into_iter() + }) + .collect(); Self::new(storage_slots) } @@ -148,11 +113,6 @@ impl AccountStorage { // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- - /// Returns the [`StorageSlotName`] of the faucet's protocol system data. - pub fn faucet_sysdata_slot() -> &'static StorageSlotName { - &FAUCET_SYSDATA_SLOT_NAME - } - /// Converts storage slots of this account storage into a vector of field elements. /// /// Each storage slot is represented by exactly 8 elements: @@ -231,7 +191,7 @@ impl AccountStorage { self.get(slot_name) .ok_or_else(|| AccountError::StorageSlotNameNotFound { slot_name: slot_name.clone() }) .and_then(|slot| match slot.content() { - StorageSlotContent::Map(map) => Ok(map.get(&key)), + StorageSlotContent::Map(map) => Ok(map.get(&StorageMapKey::from_raw(key))), _ => Err(AccountError::StorageSlotNotMap(slot_name.clone())), }) } @@ -311,7 +271,7 @@ impl AccountStorage { pub fn set_map_item( &mut self, slot_name: &StorageSlotName, - raw_key: Word, + key: StorageMapKey, value: Word, ) -> Result<(Word, Word), AccountError> { let slot = self.get_mut(slot_name).ok_or_else(|| { @@ -324,7 +284,7 @@ impl AccountStorage { let old_root = storage_map.root(); - let old_value = storage_map.insert(raw_key, value)?; + let old_value = storage_map.insert(key, value)?; Ok((old_root, old_value)) } @@ -388,7 +348,7 @@ impl Serializable for AccountStorage { impl Deserializable for AccountStorage { fn read_from(source: &mut R) -> Result { let num_slots = source.read_u8()? as usize; - let slots = source.read_many::(num_slots)?; + let slots = source.read_many_iter::(num_slots)?.collect::>()?; Self::new(slots).map_err(|err| DeserializationError::InvalidValue(err.to_string())) } @@ -462,7 +422,7 @@ mod tests { assert_eq!(name, slot_name0); }); - slots.sort_unstable(); + slots.sort_unstable_by(|a, b| a.name().cmp(b.name())); let err = AccountStorageHeader::new(slots.iter().map(StorageSlotHeader::from).collect()) .unwrap_err(); diff --git a/crates/miden-protocol/src/account/storage/partial.rs b/crates/miden-protocol/src/account/storage/partial.rs index 41e2500bac..7d5ef2d18d 100644 --- a/crates/miden-protocol/src/account/storage/partial.rs +++ b/crates/miden-protocol/src/account/storage/partial.rs @@ -1,6 +1,5 @@ use alloc::collections::{BTreeMap, BTreeSet}; -use miden_core::utils::{Deserializable, Serializable}; use miden_crypto::Word; use miden_crypto::merkle::InnerNodeInfo; use miden_crypto::merkle::smt::SmtLeaf; @@ -8,6 +7,13 @@ use miden_crypto::merkle::smt::SmtLeaf; use super::{AccountStorage, AccountStorageHeader, StorageSlotContent}; use crate::account::PartialStorageMap; use crate::errors::AccountError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A partial representation of an account storage, containing only a subset of the storage data. /// @@ -133,16 +139,14 @@ impl PartialStorage { } impl Serializable for PartialStorage { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { target.write(&self.header); target.write(&self.maps); } } impl Deserializable for PartialStorage { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let header: AccountStorageHeader = source.read()?; let map_smts: BTreeMap = source.read()?; @@ -163,14 +167,15 @@ mod tests { PartialStorage, PartialStorageMap, StorageMap, + StorageMapKey, StorageSlot, StorageSlotName, }; #[test] pub fn new_partial_storage() -> anyhow::Result<()> { - let map_key_present: Word = [1u64, 2, 3, 4].try_into()?; - let map_key_absent: Word = [9u64, 12, 18, 3].try_into()?; + let map_key_present = StorageMapKey::from_array([1, 2, 3, 4]); + let map_key_absent = StorageMapKey::from_array([9, 12, 18, 3]); let mut map_1 = StorageMap::new(); map_1.insert(map_key_absent, Word::try_from([1u64, 2, 3, 2])?).unwrap(); diff --git a/crates/miden-protocol/src/account/storage/slot/slot_content.rs b/crates/miden-protocol/src/account/storage/slot/slot_content.rs index 746391de54..dfc37017dd 100644 --- a/crates/miden-protocol/src/account/storage/slot/slot_content.rs +++ b/crates/miden-protocol/src/account/storage/slot/slot_content.rs @@ -1,10 +1,15 @@ use miden_core::EMPTY_WORD; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; use crate::account::StorageSlotType; use crate::account::storage::map::EMPTY_STORAGE_MAP_ROOT; use crate::account::storage::{StorageMap, Word}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // STORAGE SLOT CONTENT // ================================================================================================ @@ -116,9 +121,8 @@ impl Deserializable for StorageSlotContent { #[cfg(test)] mod tests { - use miden_core::utils::{Deserializable, Serializable}; - use crate::account::AccountStorage; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn test_serde_storage_slot_content() { diff --git a/crates/miden-protocol/src/account/storage/slot/slot_id.rs b/crates/miden-protocol/src/account/storage/slot/slot_id.rs index b22b46b2dd..b589cfef6d 100644 --- a/crates/miden-protocol/src/account/storage/slot/slot_id.rs +++ b/crates/miden-protocol/src/account/storage/slot/slot_id.rs @@ -61,17 +61,17 @@ impl StorageSlotId { /// Returns the [`StorageSlotId`]'s felts encoded into a u128. fn as_u128(&self) -> u128 { let mut le_bytes = [0_u8; 16]; - le_bytes[..8].copy_from_slice(&self.suffix().as_int().to_le_bytes()); - le_bytes[8..].copy_from_slice(&self.prefix().as_int().to_le_bytes()); + le_bytes[..8].copy_from_slice(&self.suffix().as_canonical_u64().to_le_bytes()); + le_bytes[8..].copy_from_slice(&self.prefix().as_canonical_u64().to_le_bytes()); u128::from_le_bytes(le_bytes) } } impl Ord for StorageSlotId { fn cmp(&self, other: &Self) -> Ordering { - match self.prefix.as_int().cmp(&other.prefix.as_int()) { + match self.prefix.as_canonical_u64().cmp(&other.prefix.as_canonical_u64()) { ord @ Ordering::Less | ord @ Ordering::Greater => ord, - Ordering::Equal => self.suffix.as_int().cmp(&other.suffix.as_int()), + Ordering::Equal => self.suffix.as_canonical_u64().cmp(&other.suffix.as_canonical_u64()), } } } @@ -84,8 +84,8 @@ impl PartialOrd for StorageSlotId { impl Hash for StorageSlotId { fn hash(&self, state: &mut H) { - self.suffix.inner().hash(state); - self.prefix.inner().hash(state); + self.suffix.as_canonical_u64().hash(state); + self.prefix.as_canonical_u64().hash(state); } } diff --git a/crates/miden-protocol/src/account/storage/slot/slot_name.rs b/crates/miden-protocol/src/account/storage/slot/slot_name.rs index 27d0c7ce45..08892d5de1 100644 --- a/crates/miden-protocol/src/account/storage/slot/slot_name.rs +++ b/crates/miden-protocol/src/account/storage/slot/slot_name.rs @@ -5,7 +5,13 @@ use core::str::FromStr; use crate::account::storage::slot::StorageSlotId; use crate::errors::StorageSlotNameError; -use crate::utils::serde::{ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// The name of an account storage slot. /// @@ -100,7 +106,7 @@ impl StorageSlotName { /// We must check the validity of a slot name against the raw bytes of the UTF-8 string because /// typical character APIs are not available in a const version. We can do this because any byte /// in a UTF-8 string that is an ASCII character never represents anything other than such a - /// character, even though UTF-8 can contain multibyte sequences: + /// character, even though UTF-8 can contain multi-byte sequences: /// /// > UTF-8, the object of this memo, has a one-octet encoding unit. It uses all bits of an /// > octet, but has the quality of preserving the full US-ASCII range: US-ASCII characters @@ -210,6 +216,22 @@ impl FromStr for StorageSlotName { } } +impl TryFrom<&str> for StorageSlotName { + type Error = StorageSlotNameError; + + fn try_from(value: &str) -> Result { + value.parse() + } +} + +impl TryFrom for StorageSlotName { + type Error = StorageSlotNameError; + + fn try_from(value: String) -> Result { + value.parse() + } +} + impl From for String { fn from(slot_name: StorageSlotName) -> Self { slot_name.name.to_string() @@ -229,11 +251,9 @@ impl Serializable for StorageSlotName { } impl Deserializable for StorageSlotName { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let len = source.read_u8()?; - let name = source.read_many(len as usize)?; + let name = source.read_many_iter(len as usize)?.collect::>()?; String::from_utf8(name) .map_err(|err| DeserializationError::InvalidValue(err.to_string())) .and_then(|name| { diff --git a/crates/miden-protocol/src/account/storage/slot/storage_slot.rs b/crates/miden-protocol/src/account/storage/slot/storage_slot.rs index 9fce734847..37da4c86a9 100644 --- a/crates/miden-protocol/src/account/storage/slot/storage_slot.rs +++ b/crates/miden-protocol/src/account/storage/slot/storage_slot.rs @@ -1,6 +1,13 @@ use crate::Word; use crate::account::storage::slot::StorageSlotId; use crate::account::{StorageMap, StorageSlotContent, StorageSlotName, StorageSlotType}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// An individual storage slot in [`AccountStorage`](crate::account::AccountStorage). /// @@ -101,23 +108,11 @@ impl StorageSlot { } } -impl Ord for StorageSlot { - fn cmp(&self, other: &Self) -> core::cmp::Ordering { - self.name().cmp(&other.name) - } -} - -impl PartialOrd for StorageSlot { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - // SERIALIZATION // ================================================================================================ -impl crate::utils::serde::Serializable for StorageSlot { - fn write_into(&self, target: &mut W) { +impl Serializable for StorageSlot { + fn write_into(&self, target: &mut W) { target.write(&self.name); target.write(&self.content); } @@ -127,10 +122,8 @@ impl crate::utils::serde::Serializable for StorageSlot { } } -impl crate::utils::serde::Deserializable for StorageSlot { - fn read_from( - source: &mut R, - ) -> Result { +impl Deserializable for StorageSlot { + fn read_from(source: &mut R) -> Result { let name: StorageSlotName = source.read()?; let content: StorageSlotContent = source.read()?; diff --git a/crates/miden-protocol/src/account/storage/slot/type.rs b/crates/miden-protocol/src/account/storage/slot/type.rs index 25bd5a3a60..baa12f204c 100644 --- a/crates/miden-protocol/src/account/storage/slot/type.rs +++ b/crates/miden-protocol/src/account/storage/slot/type.rs @@ -114,10 +114,9 @@ impl Deserializable for StorageSlotType { #[cfg(test)] mod tests { - use miden_core::utils::{Deserializable, Serializable}; - + use crate::Felt; use crate::account::StorageSlotType; - use crate::{Felt, FieldElement}; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn test_serde_account_storage_slot_type() { diff --git a/crates/miden-protocol/src/address/address_id.rs b/crates/miden-protocol/src/address/address_id.rs index e6cdbdddeb..70c8eb14df 100644 --- a/crates/miden-protocol/src/address/address_id.rs +++ b/crates/miden-protocol/src/address/address_id.rs @@ -2,13 +2,17 @@ use alloc::string::ToString; use bech32::Bech32m; use bech32::primitives::decode::CheckedHrpstring; -use miden_processor::DeserializationError; -use crate::account::{AccountId, AccountStorageMode}; +use crate::account::AccountId; use crate::address::{AddressType, NetworkId}; use crate::errors::{AddressError, Bech32Error}; -use crate::note::NoteTag; -use crate::utils::serde::{ByteWriter, Deserializable, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// The identifier of an [`Address`](super::Address). /// @@ -27,23 +31,6 @@ impl AddressId { } } - /// Returns the default tag length of the ID. - /// - /// This is guaranteed to be in range `0..=30` (e.g. the maximum of - /// [`NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH`] and - /// [`NoteTag::DEFAULT_NETWORK_ACCOUNT_TARGET_TAG_LENGTH`]). - pub fn default_note_tag_len(&self) -> u8 { - match self { - AddressId::AccountId(id) => { - if id.storage_mode() == AccountStorageMode::Network { - NoteTag::DEFAULT_NETWORK_ACCOUNT_TARGET_TAG_LENGTH - } else { - NoteTag::DEFAULT_LOCAL_ACCOUNT_TARGET_TAG_LENGTH - } - }, - } - } - /// Decodes a bech32 string into an identifier. pub(crate) fn decode(bech32_string: &str) -> Result<(NetworkId, Self), AddressError> { // We use CheckedHrpString with an explicit checksum algorithm so we don't allow the @@ -100,9 +87,7 @@ impl Serializable for AddressId { } impl Deserializable for AddressId { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let address_type: u8 = source.read_u8()?; let address_type = AddressType::try_from(address_type) .map_err(|err| DeserializationError::InvalidValue(err.to_string()))?; diff --git a/crates/miden-protocol/src/address/mod.rs b/crates/miden-protocol/src/address/mod.rs index 91c9438569..8a99b8eb0d 100644 --- a/crates/miden-protocol/src/address/mod.rs +++ b/crates/miden-protocol/src/address/mod.rs @@ -1,5 +1,4 @@ mod r#type; -use alloc::string::ToString; pub use r#type::AddressType; @@ -13,14 +12,18 @@ mod network_id; use alloc::string::String; pub use interface::AddressInterface; -use miden_processor::DeserializationError; pub use network_id::{CustomNetworkId, NetworkId}; -use crate::account::AccountStorageMode; use crate::crypto::ies::SealingKey; use crate::errors::AddressError; use crate::note::NoteTag; -use crate::utils::serde::{ByteWriter, Deserializable, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; mod address_id; pub use address_id::AddressId; @@ -79,35 +82,10 @@ impl Address { Self { id: id.into(), routing_params: None } } - /// For local (both public and private) accounts, up to 30 bits can be encoded into the tag. - /// For network accounts, the tag length must be set to 30 bits. - /// - /// # Errors - /// - /// Returns an error if: - /// - The tag length routing parameter is not - /// [`NoteTag::DEFAULT_NETWORK_ACCOUNT_TARGET_TAG_LENGTH`] for network accounts. - pub fn with_routing_parameters( - mut self, - routing_params: RoutingParameters, - ) -> Result { - if let Some(tag_len) = routing_params.note_tag_len() { - match self.id { - AddressId::AccountId(account_id) => { - if account_id.storage_mode() == AccountStorageMode::Network - && tag_len != NoteTag::DEFAULT_NETWORK_ACCOUNT_TARGET_TAG_LENGTH - { - return Err(AddressError::CustomTagLengthNotAllowedForNetworkAccounts( - tag_len, - )); - } - }, - } - } - + /// Sets the routing parameters of the address. + pub fn with_routing_parameters(mut self, routing_params: RoutingParameters) -> Self { self.routing_params = Some(routing_params); - - Ok(self) + self } // ACCESSORS @@ -125,14 +103,13 @@ impl Address { /// Returns the preferred tag length. /// - /// This is guaranteed to be in range `0..=30` (e.g. the maximum of - /// [`NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH`] and - /// [`NoteTag::DEFAULT_NETWORK_ACCOUNT_TARGET_TAG_LENGTH`]). + /// This is guaranteed to be in range `0..=32` (i.e. at most + /// [`NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH `]). pub fn note_tag_len(&self) -> u8 { self.routing_params .as_ref() .and_then(RoutingParameters::note_tag_len) - .unwrap_or(self.id.default_note_tag_len()) + .unwrap_or(NoteTag::DEFAULT_ACCOUNT_TARGET_TAG_LENGTH) } /// Returns a note tag derived from this address. @@ -140,15 +117,11 @@ impl Address { let note_tag_len = self.note_tag_len(); match self.id { - AddressId::AccountId(id) => { - match id.storage_mode() { - AccountStorageMode::Network => NoteTag::from_network_account_id(id), - AccountStorageMode::Private | AccountStorageMode::Public => { - NoteTag::with_custom_account_target(id, note_tag_len) - .expect("address should validate that tag len does not exceed MAX_ACCOUNT_TARGET_TAG_LENGTH bits") - } - } - }, + AddressId::AccountId(id) => NoteTag::with_custom_account_target(id, note_tag_len) + .expect( + "address should validate that tag len does not exceed \ + MAX_ACCOUNT_TARGET_TAG_LENGTH bits", + ), } } @@ -201,7 +174,7 @@ impl Address { if let Some(encoded_routing_params) = split.next() { let routing_params = RoutingParameters::decode(encoded_routing_params.to_owned())?; - address = address.with_routing_parameters(routing_params)?; + address = address.with_routing_parameters(routing_params); } Ok((network_id, address)) @@ -216,18 +189,14 @@ impl Serializable for Address { } impl Deserializable for Address { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let identifier: AddressId = source.read()?; let routing_params: Option = source.read()?; let mut address = Self::new(identifier); if let Some(routing_params) = routing_params { - address = address - .with_routing_parameters(routing_params) - .map_err(|err| DeserializationError::InvalidValue(err.to_string()))?; + address = address.with_routing_parameters(routing_params); } Ok(address) @@ -246,7 +215,7 @@ mod tests { use bech32::{Bech32, Bech32m, NoChecksum}; use super::*; - use crate::account::{AccountId, AccountType}; + use crate::account::{AccountId, AccountStorageMode, AccountType}; use crate::address::CustomNetworkId; use crate::errors::{AccountIdError, Bech32Error}; use crate::testing::account_id::{ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, AccountIdBuilder}; @@ -302,8 +271,8 @@ mod tests { // Encode/Decode with routing parameters should be valid. address = address.with_routing_parameters( RoutingParameters::new(AddressInterface::BasicWallet) - .with_note_tag_len(NoteTag::DEFAULT_NETWORK_ACCOUNT_TARGET_TAG_LENGTH)?, - )?; + .with_note_tag_len(NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH)?, + ); let bech32_string = address.encode(network_id.clone()); assert!( @@ -346,7 +315,7 @@ mod tests { let account_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET)?; let address = Address::new(account_id).with_routing_parameters( RoutingParameters::new(AddressInterface::BasicWallet).with_note_tag_len(14)?, - )?; + ); let bech32_string = address.encode(network_id); let mut invalid_bech32_1 = bech32_string.clone(); @@ -431,8 +400,8 @@ mod tests { let account_id = AccountIdBuilder::new().account_type(account_type).build_with_rng(rng); let address = Address::new(account_id).with_routing_parameters( RoutingParameters::new(AddressInterface::BasicWallet) - .with_note_tag_len(NoteTag::DEFAULT_NETWORK_ACCOUNT_TARGET_TAG_LENGTH)?, - )?; + .with_note_tag_len(NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH)?, + ); let serialized = address.to_bytes(); let deserialized = Address::read_from_bytes(&serialized)?; @@ -463,7 +432,7 @@ mod tests { let address = Address::new(account_id).with_routing_parameters( RoutingParameters::new(AddressInterface::BasicWallet) .with_encryption_key(sealing_key.clone()), - )?; + ); // Verify encryption key is present let retrieved_key = @@ -503,7 +472,7 @@ mod tests { let address = Address::new(account_id).with_routing_parameters( RoutingParameters::new(AddressInterface::BasicWallet) .with_encryption_key(sealing_key.clone()), - )?; + ); // Encode and decode let encoded = address.encode(NetworkId::Mainnet); @@ -521,4 +490,20 @@ mod tests { Ok(()) } + + #[test] + fn address_allows_max_note_tag_len() -> anyhow::Result<()> { + let account_id = AccountIdBuilder::new() + .account_type(AccountType::RegularAccountImmutableCode) + .build_with_rng(&mut rand::rng()); + + let address = Address::new(account_id).with_routing_parameters( + RoutingParameters::new(AddressInterface::BasicWallet) + .with_note_tag_len(NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH)?, + ); + + assert_eq!(address.note_tag_len(), NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH); + + Ok(()) + } } diff --git a/crates/miden-protocol/src/address/routing_parameters.rs b/crates/miden-protocol/src/address/routing_parameters.rs index 0fb5ec6923..ed0a45fe8a 100644 --- a/crates/miden-protocol/src/address/routing_parameters.rs +++ b/crates/miden-protocol/src/address/routing_parameters.rs @@ -33,12 +33,12 @@ const BECH32_SEPARATOR: &str = "1"; /// The value to encode the absence of a note tag routing parameter (i.e. `None`). /// -/// The note tag length occupies 5 bits (values 0..=31). Valid tag lengths are 0..=30, -/// so we reserve the maximum 5-bit value (31) to represent `None`. +/// The note tag length occupies 6 bits (values 0..=63). Valid tag lengths are 0..=32, +/// so we reserve the maximum 6-bit value (63) to represent `None`. /// /// If the note tag length is absent from routing parameters, the note tag length for the address /// will be set to the default default tag length of the address' ID component. -const ABSENT_NOTE_TAG_LEN: u8 = (1 << 5) - 1; // 31 +const ABSENT_NOTE_TAG_LEN: u8 = 63; /// The routing parameter key for the receiver profile. const RECEIVER_PROFILE_PARAM_KEY: u8 = 0; @@ -55,8 +55,8 @@ const K256_PUBLIC_KEY_LENGTH: usize = 33; /// Discriminants for encryption key variants. const ENCRYPTION_KEY_X25519_XCHACHA20POLY1305: u8 = 0; const ENCRYPTION_KEY_K256_XCHACHA20POLY1305: u8 = 1; -const ENCRYPTION_KEY_X25519_AEAD_RPO: u8 = 2; -const ENCRYPTION_KEY_K256_AEAD_RPO: u8 = 3; +const ENCRYPTION_KEY_X25519_AEAD_POSEIDON2: u8 = 2; +const ENCRYPTION_KEY_K256_AEAD_POSEIDON2: u8 = 3; /// Parameters that define how a sender should route a note to the [`AddressId`](super::AddressId) /// in an [`Address`](super::Address). @@ -92,8 +92,7 @@ impl RoutingParameters { /// # Errors /// /// Returns an error if: - /// - The tag length exceeds the maximum of [`NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH`] and - /// [`NoteTag::DEFAULT_NETWORK_ACCOUNT_TARGET_TAG_LENGTH`]. + /// - The tag length exceeds the maximum of [`NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH `]. pub fn with_note_tag_len(mut self, note_tag_len: u8) -> Result { if note_tag_len > NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH { return Err(AddressError::TagLengthTooLarge(note_tag_len)); @@ -108,9 +107,8 @@ impl RoutingParameters { /// Returns the note tag length preference. /// - /// This is guaranteed to be in range `0..=30` (e.g. the maximum of - /// [`NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH`] and - /// [`NoteTag::DEFAULT_NETWORK_ACCOUNT_TARGET_TAG_LENGTH`]). + /// This is guaranteed to be in range `0..=32` (i.e. at most + /// [`NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH `]). pub fn note_tag_len(&self) -> Option { self.note_tag_len } @@ -254,7 +252,8 @@ impl Serializable for RoutingParameters { impl Deserializable for RoutingParameters { fn read_from(source: &mut R) -> Result { let num_bytes = source.read_u16()?; - let bytes: Vec = source.read_many(num_bytes as usize)?; + let bytes: Vec = + source.read_many_iter(num_bytes as usize)?.collect::>()?; Self::decode_from_bytes(bytes.into_iter()) .map_err(|err| DeserializationError::InvalidValue(err.to_string())) @@ -269,11 +268,11 @@ fn encode_receiver_profile(interface: AddressInterface, note_tag_len: Option let note_tag_len = note_tag_len.unwrap_or(ABSENT_NOTE_TAG_LEN); let interface = interface as u16; - debug_assert_eq!(interface >> 11, 0, "address interface should have its upper 5 bits unset"); + debug_assert_eq!(interface >> 10, 0, "address interface should fit into 10 bits"); - // The interface takes up 11 bits and the tag length 5 bits, so we can merge them + // The interface takes up 10 bits and the tag length 6 bits, so we can merge them // together. - let tag_len = (note_tag_len as u16) << 11; + let tag_len = (note_tag_len as u16) << 10; let receiver_profile: u16 = tag_len | interface; receiver_profile.to_be_bytes() } @@ -290,14 +289,16 @@ fn decode_receiver_profile( let byte1 = byte_iter.next().expect("byte1 should exist"); let receiver_profile = u16::from_be_bytes([byte0, byte1]); - let tag_len = (receiver_profile >> 11) as u8; - let note_tag_len = if tag_len == ABSENT_NOTE_TAG_LEN { - None - } else { - Some(tag_len) + let tag_len = (receiver_profile >> 10) as u8; + let note_tag_len = match tag_len { + ABSENT_NOTE_TAG_LEN => None, + 0..=32 => Some(tag_len), + _ => { + return Err(AddressError::decode_error(format!("invalid note tag length {}", tag_len))); + }, }; - let addr_interface = receiver_profile & 0b0000_0111_1111_1111; + let addr_interface = receiver_profile & 0b0000_0011_1111_1111; let addr_interface = AddressInterface::try_from(addr_interface).map_err(|err| { AddressError::decode_error_with_source("failed to decode address interface", err) })?; @@ -316,12 +317,12 @@ fn encode_encryption_key(key: &SealingKey, encoded: &mut Vec) { encoded.push(ENCRYPTION_KEY_K256_XCHACHA20POLY1305); encoded.extend(&pk.to_bytes()); }, - SealingKey::X25519AeadRpo(pk) => { - encoded.push(ENCRYPTION_KEY_X25519_AEAD_RPO); + SealingKey::X25519AeadPoseidon2(pk) => { + encoded.push(ENCRYPTION_KEY_X25519_AEAD_POSEIDON2); encoded.extend(&pk.to_bytes()); }, - SealingKey::K256AeadRpo(pk) => { - encoded.push(ENCRYPTION_KEY_K256_AEAD_RPO); + SealingKey::K256AeadPoseidon2(pk) => { + encoded.push(ENCRYPTION_KEY_K256_AEAD_POSEIDON2); encoded.extend(&pk.to_bytes()); }, } @@ -346,10 +347,12 @@ fn decode_encryption_key( ENCRYPTION_KEY_K256_XCHACHA20POLY1305 => { SealingKey::K256XChaCha20Poly1305(read_k256_pub_key(byte_iter)?) }, - ENCRYPTION_KEY_X25519_AEAD_RPO => { - SealingKey::X25519AeadRpo(read_x25519_pub_key(byte_iter)?) + ENCRYPTION_KEY_X25519_AEAD_POSEIDON2 => { + SealingKey::X25519AeadPoseidon2(read_x25519_pub_key(byte_iter)?) + }, + ENCRYPTION_KEY_K256_AEAD_POSEIDON2 => { + SealingKey::K256AeadPoseidon2(read_k256_pub_key(byte_iter)?) }, - ENCRYPTION_KEY_K256_AEAD_RPO => SealingKey::K256AeadRpo(read_k256_pub_key(byte_iter)?), other => { return Err(AddressError::decode_error(format!( "unknown encryption key variant: {}", @@ -554,21 +557,21 @@ mod tests { test_encryption_key_roundtrip(encryption_key)?; } - // Test X25519AeadRpo + // Test X25519AeadPoseidon2 { use crate::crypto::dsa::eddsa_25519_sha512::SecretKey; let secret_key = SecretKey::with_rng(&mut rand::rng()); let public_key = secret_key.public_key(); - let encryption_key = SealingKey::X25519AeadRpo(public_key); + let encryption_key = SealingKey::X25519AeadPoseidon2(public_key); test_encryption_key_roundtrip(encryption_key)?; } - // Test K256AeadRpo + // Test K256AeadPoseidon2 { use crate::crypto::dsa::ecdsa_k256_keccak::SecretKey; let secret_key = SecretKey::with_rng(&mut rand::rng()); let public_key = secret_key.public_key(); - let encryption_key = SealingKey::K256AeadRpo(public_key); + let encryption_key = SealingKey::K256AeadPoseidon2(public_key); test_encryption_key_roundtrip(encryption_key)?; } diff --git a/crates/miden-protocol/src/asset/asset_callbacks.rs b/crates/miden-protocol/src/asset/asset_callbacks.rs new file mode 100644 index 0000000000..3edf662f2f --- /dev/null +++ b/crates/miden-protocol/src/asset/asset_callbacks.rs @@ -0,0 +1,105 @@ +use alloc::vec::Vec; + +use crate::Word; +use crate::account::{StorageSlot, StorageSlotName}; +use crate::utils::sync::LazyLock; + +// CONSTANTS +// ================================================================================================ + +static ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_SLOT_NAME: LazyLock = + LazyLock::new(|| { + StorageSlotName::new("miden::protocol::faucet::callback::on_before_asset_added_to_account") + .expect("storage slot name should be valid") + }); + +static ON_BEFORE_ASSET_ADDED_TO_NOTE_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::protocol::faucet::callback::on_before_asset_added_to_note") + .expect("storage slot name should be valid") +}); + +// ASSET CALLBACKS +// ================================================================================================ + +/// Configures the callback procedure roots for asset callbacks. +/// +/// ## Storage Layout +/// +/// - [`Self::on_before_asset_added_to_account_slot()`]: Stores the procedure root of the +/// `on_before_asset_added_to_account` callback. This storage slot is only added if the callback +/// procedure root is not the empty word. +/// - [`Self::on_before_asset_added_to_note_slot()`]: Stores the procedure root of the +/// `on_before_asset_added_to_note` callback. This storage slot is only added if the callback +/// procedure root is not the empty word. +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct AssetCallbacks { + on_before_asset_added_to_account: Word, + on_before_asset_added_to_note: Word, +} + +impl AssetCallbacks { + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a new [`AssetCallbacks`] with all callbacks set to the empty word. + pub fn new() -> Self { + Self::default() + } + + /// Sets the `on_before_asset_added_to_account` callback procedure root. + pub fn on_before_asset_added_to_account(mut self, proc_root: Word) -> Self { + self.on_before_asset_added_to_account = proc_root; + self + } + + /// Sets the `on_before_asset_added_to_note` callback procedure root. + pub fn on_before_asset_added_to_note(mut self, proc_root: Word) -> Self { + self.on_before_asset_added_to_note = proc_root; + self + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the [`StorageSlotName`] where the `on_before_asset_added_to_account` callback + /// procedure root is stored. + pub fn on_before_asset_added_to_account_slot() -> &'static StorageSlotName { + &ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_SLOT_NAME + } + + /// Returns the [`StorageSlotName`] where the `on_before_asset_added_to_note` callback + /// procedure root is stored. + pub fn on_before_asset_added_to_note_slot() -> &'static StorageSlotName { + &ON_BEFORE_ASSET_ADDED_TO_NOTE_SLOT_NAME + } + + /// Returns the procedure root of the `on_before_asset_added_to_account` callback. + pub fn on_before_asset_added_to_account_proc_root(&self) -> Word { + self.on_before_asset_added_to_account + } + + /// Returns the procedure root of the `on_before_asset_added_to_note` callback. + pub fn on_before_asset_added_to_note_proc_root(&self) -> Word { + self.on_before_asset_added_to_note + } + + pub fn into_storage_slots(self) -> Vec { + let mut slots = Vec::new(); + + if !self.on_before_asset_added_to_account.is_empty() { + slots.push(StorageSlot::with_value( + AssetCallbacks::on_before_asset_added_to_account_slot().clone(), + self.on_before_asset_added_to_account, + )); + } + + if !self.on_before_asset_added_to_note.is_empty() { + slots.push(StorageSlot::with_value( + AssetCallbacks::on_before_asset_added_to_note_slot().clone(), + self.on_before_asset_added_to_note, + )); + } + + slots + } +} diff --git a/crates/miden-protocol/src/asset/asset_callbacks_flag.rs b/crates/miden-protocol/src/asset/asset_callbacks_flag.rs new file mode 100644 index 0000000000..c5dfa620e4 --- /dev/null +++ b/crates/miden-protocol/src/asset/asset_callbacks_flag.rs @@ -0,0 +1,68 @@ +use alloc::string::ToString; + +use crate::errors::AssetError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; + +/// The flag in an [`AssetVaultKey`](super::AssetVaultKey) that indicates whether +/// [`AssetCallbacks`](super::AssetCallbacks) are enabled for this asset. +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] +#[repr(u8)] +pub enum AssetCallbackFlag { + #[default] + Disabled = Self::DISABLED, + + Enabled = Self::ENABLED, +} + +impl AssetCallbackFlag { + const DISABLED: u8 = 0; + const ENABLED: u8 = 1; + + /// The serialized size of an [`AssetCallbackFlag`] in bytes. + pub const SERIALIZED_SIZE: usize = core::mem::size_of::(); + + /// Encodes the callbacks setting as a `u8`. + pub const fn as_u8(&self) -> u8 { + *self as u8 + } +} + +impl TryFrom for AssetCallbackFlag { + type Error = AssetError; + + /// Decodes a callbacks setting from a `u8`. + /// + /// # Errors + /// + /// Returns an error if the value is not a valid callbacks encoding. + fn try_from(value: u8) -> Result { + match value { + Self::DISABLED => Ok(Self::Disabled), + Self::ENABLED => Ok(Self::Enabled), + _ => Err(AssetError::InvalidAssetCallbackFlag(value)), + } + } +} + +impl Serializable for AssetCallbackFlag { + fn write_into(&self, target: &mut W) { + target.write_u8(self.as_u8()); + } + + fn get_size_hint(&self) -> usize { + AssetCallbackFlag::SERIALIZED_SIZE + } +} + +impl Deserializable for AssetCallbackFlag { + fn read_from(source: &mut R) -> Result { + Self::try_from(source.read_u8()?) + .map_err(|err| DeserializationError::InvalidValue(err.to_string())) + } +} diff --git a/crates/miden-protocol/src/asset/fungible.rs b/crates/miden-protocol/src/asset/fungible.rs index 95e7dfdcb6..58b5754663 100644 --- a/crates/miden-protocol/src/asset/fungible.rs +++ b/crates/miden-protocol/src/asset/fungible.rs @@ -1,10 +1,11 @@ -use alloc::boxed::Box; use alloc::string::ToString; use core::fmt; use super::vault::AssetVaultKey; -use super::{AccountType, Asset, AssetError, Felt, Word, ZERO, is_not_a_non_fungible_asset}; -use crate::account::{AccountId, AccountIdPrefix}; +use super::{AccountType, Asset, AssetCallbackFlag, AssetError, Word}; +use crate::Felt; +use crate::account::AccountId; +use crate::asset::AssetId; use crate::utils::serde::{ ByteReader, ByteWriter, @@ -19,10 +20,14 @@ use crate::utils::serde::{ /// /// A fungible asset consists of a faucet ID of the faucet which issued the asset as well as the /// asset amount. Asset amount is guaranteed to be 2^63 - 1 or smaller. +/// +/// The fungible asset can have callbacks to the faucet enabled or disabled, depending on +/// [`AssetCallbackFlag`]. See [`AssetCallbacks`](crate::asset::AssetCallbacks) for more details. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct FungibleAsset { faucet_id: AccountId, amount: u64, + callbacks: AssetCallbackFlag, } impl FungibleAsset { @@ -36,28 +41,80 @@ impl FungibleAsset { /// The serialized size of a [`FungibleAsset`] in bytes. /// - /// Currently an account ID (15 bytes) plus an amount (u64). - pub const SERIALIZED_SIZE: usize = AccountId::SERIALIZED_SIZE + core::mem::size_of::(); + /// An account ID (15 bytes) plus an amount (u64) plus a callbacks flag (u8). + pub const SERIALIZED_SIZE: usize = AccountId::SERIALIZED_SIZE + + core::mem::size_of::() + + AssetCallbackFlag::SERIALIZED_SIZE; // CONSTRUCTOR // -------------------------------------------------------------------------------------------- + /// Returns a fungible asset instantiated with the provided faucet ID and amount. /// /// # Errors + /// /// Returns an error if: - /// - The faucet_id is not a valid fungible faucet ID. - /// - The provided amount is greater than 2^63 - 1. - pub const fn new(faucet_id: AccountId, amount: u64) -> Result { - let asset = Self { faucet_id, amount }; - asset.validate() + /// - The faucet ID is not a valid fungible faucet ID. + /// - The provided amount is greater than [`FungibleAsset::MAX_AMOUNT`]. + pub fn new(faucet_id: AccountId, amount: u64) -> Result { + if !matches!(faucet_id.account_type(), AccountType::FungibleFaucet) { + return Err(AssetError::FungibleFaucetIdTypeMismatch(faucet_id)); + } + + if amount > Self::MAX_AMOUNT { + return Err(AssetError::FungibleAssetAmountTooBig(amount)); + } + + Ok(Self { + faucet_id, + amount, + callbacks: AssetCallbackFlag::default(), + }) } - /// Creates a new [FungibleAsset] without checking its validity. - pub(crate) fn new_unchecked(value: Word) -> FungibleAsset { - FungibleAsset { - faucet_id: AccountId::new_unchecked([value[3], value[2]]), - amount: value[0].as_int(), + /// Creates a fungible asset from the provided key and value. + /// + /// # Errors + /// + /// Returns an error if: + /// - The provided key does not contain a valid faucet ID. + /// - The provided key's asset ID limbs are not zero. + /// - The faucet ID is not a fungible faucet ID. + /// - The provided value's amount is greater than [`FungibleAsset::MAX_AMOUNT`] or its three + /// most significant elements are not zero. + pub fn from_key_value(key: AssetVaultKey, value: Word) -> Result { + if !key.asset_id().is_empty() { + return Err(AssetError::FungibleAssetIdMustBeZero(key.asset_id())); + } + + if value[1] != Felt::ZERO || value[2] != Felt::ZERO || value[3] != Felt::ZERO { + return Err(AssetError::FungibleAssetValueMostSignificantElementsMustBeZero(value)); } + + let mut asset = Self::new(key.faucet_id(), value[0].as_canonical_u64())?; + asset.callbacks = key.callback_flag(); + + Ok(asset) + } + + /// Creates a fungible asset from the provided key and value. + /// + /// Prefer [`Self::from_key_value`] for more type safety. + /// + /// # Errors + /// + /// Returns an error if: + /// - The provided key does not contain a valid faucet ID. + /// - [`Self::from_key_value`] fails. + pub fn from_key_value_words(key: Word, value: Word) -> Result { + let vault_key = AssetVaultKey::try_from(key)?; + Self::from_key_value(vault_key, value) + } + + /// Returns a copy of this asset with the given [`AssetCallbackFlag`]. + pub fn with_callbacks(mut self, callbacks: AssetCallbackFlag) -> Self { + self.callbacks = callbacks; + self } // PUBLIC ACCESSORS @@ -68,27 +125,43 @@ impl FungibleAsset { self.faucet_id } - /// Return ID prefix of the faucet which issued this asset. - pub fn faucet_id_prefix(&self) -> AccountIdPrefix { - self.faucet_id.prefix() - } - /// Returns the amount of this asset. pub fn amount(&self) -> u64 { self.amount } - /// Returns true if this and the other assets were issued from the same faucet. - pub fn is_from_same_faucet(&self, other: &Self) -> bool { - self.faucet_id == other.faucet_id + /// Returns true if this and the other asset were issued from the same faucet. + pub fn is_same(&self, other: &Self) -> bool { + self.vault_key() == other.vault_key() + } + + /// Returns the [`AssetCallbackFlag`] of this asset. + pub fn callbacks(&self) -> AssetCallbackFlag { + self.callbacks } /// Returns the key which is used to store this asset in the account vault. pub fn vault_key(&self) -> AssetVaultKey { - AssetVaultKey::from_account_id(self.faucet_id) + AssetVaultKey::new(AssetId::default(), self.faucet_id, self.callbacks) .expect("faucet ID should be of type fungible") } + /// Returns the asset's key encoded to a [`Word`]. + pub fn to_key_word(&self) -> Word { + self.vault_key().to_word() + } + + /// Returns the asset's value encoded to a [`Word`]. + pub fn to_value_word(&self) -> Word { + Word::new([ + Felt::try_from(self.amount) + .expect("fungible asset should only allow amounts that fit into a felt"), + Felt::ZERO, + Felt::ZERO, + Felt::ZERO, + ]) + } + // OPERATIONS // -------------------------------------------------------------------------------------------- @@ -96,14 +169,14 @@ impl FungibleAsset { /// /// # Errors /// Returns an error if: - /// - The assets were not issued by the same faucet. + /// - The assets do not have the same vault key (i.e. different faucet or callback flags). /// - The total value of assets is greater than or equal to 2^63. #[allow(clippy::should_implement_trait)] pub fn add(self, other: Self) -> Result { - if self.faucet_id != other.faucet_id { - return Err(AssetError::FungibleAssetInconsistentFaucetIds { - original_issuer: self.faucet_id, - other_issuer: other.faucet_id, + if !self.is_same(&other) { + return Err(AssetError::FungibleAssetInconsistentVaultKeys { + original_key: self.vault_key(), + other_key: other.vault_key(), }); } @@ -115,21 +188,25 @@ impl FungibleAsset { return Err(AssetError::FungibleAssetAmountTooBig(amount)); } - Ok(Self { faucet_id: self.faucet_id, amount }) + Ok(Self { + faucet_id: self.faucet_id, + amount, + callbacks: self.callbacks, + }) } /// Subtracts a fungible asset from another and returns the result. /// /// # Errors /// Returns an error if: - /// - The assets were not issued by the same faucet. + /// - The assets do not have the same vault key (i.e. different faucet or callback flags). /// - The final amount would be negative. #[allow(clippy::should_implement_trait)] pub fn sub(self, other: Self) -> Result { - if self.faucet_id != other.faucet_id { - return Err(AssetError::FungibleAssetInconsistentFaucetIds { - original_issuer: self.faucet_id, - other_issuer: other.faucet_id, + if !self.is_same(&other) { + return Err(AssetError::FungibleAssetInconsistentVaultKeys { + original_key: self.vault_key(), + other_key: other.vault_key(), }); } @@ -140,39 +217,11 @@ impl FungibleAsset { }, )?; - Ok(FungibleAsset { faucet_id: self.faucet_id, amount }) - } - - // HELPER FUNCTIONS - // -------------------------------------------------------------------------------------------- - - /// Validates this fungible asset. - /// # Errors - /// Returns an error if: - /// - The faucet_id is not a valid fungible faucet ID. - /// - The provided amount is greater than 2^63 - 1. - const fn validate(self) -> Result { - let account_type = self.faucet_id.account_type(); - if !matches!(account_type, AccountType::FungibleFaucet) { - return Err(AssetError::FungibleFaucetIdTypeMismatch(self.faucet_id)); - } - - if self.amount > Self::MAX_AMOUNT { - return Err(AssetError::FungibleAssetAmountTooBig(self.amount)); - } - - Ok(self) - } -} - -impl From for Word { - fn from(asset: FungibleAsset) -> Self { - let mut result = Word::empty(); - result[0] = Felt::new(asset.amount); - result[2] = asset.faucet_id.suffix(); - result[3] = asset.faucet_id.prefix().as_felt(); - debug_assert!(is_not_a_non_fungible_asset(result)); - result + Ok(FungibleAsset { + faucet_id: self.faucet_id, + amount, + callbacks: self.callbacks, + }) } } @@ -182,22 +231,9 @@ impl From for Asset { } } -impl TryFrom for FungibleAsset { - type Error = AssetError; - - fn try_from(value: Word) -> Result { - if value[1] != ZERO { - return Err(AssetError::FungibleAssetExpectedZero(value)); - } - let faucet_id = AccountId::try_from([value[3], value[2]]) - .map_err(|err| AssetError::InvalidFaucetAccountId(Box::new(err)))?; - let amount = value[0].as_int(); - Self::new(faucet_id, amount) - } -} - impl fmt::Display for FungibleAsset { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // TODO: Replace with hex representation? write!(f, "{self:?}") } } @@ -211,42 +247,38 @@ impl Serializable for FungibleAsset { // distinguishable during deserialization. target.write(self.faucet_id); target.write(self.amount); + target.write(self.callbacks); } fn get_size_hint(&self) -> usize { - self.faucet_id.get_size_hint() + self.amount.get_size_hint() + self.faucet_id.get_size_hint() + + self.amount.get_size_hint() + + self.callbacks.get_size_hint() } } impl Deserializable for FungibleAsset { fn read_from(source: &mut R) -> Result { - let faucet_id_prefix: AccountIdPrefix = source.read()?; - FungibleAsset::deserialize_with_faucet_id_prefix(faucet_id_prefix, source) + let faucet_id: AccountId = source.read()?; + FungibleAsset::deserialize_with_faucet_id(faucet_id, source) } } impl FungibleAsset { - /// Deserializes a [`FungibleAsset`] from an [`AccountIdPrefix`] and the remaining data from the - /// given `source`. - pub(super) fn deserialize_with_faucet_id_prefix( - faucet_id_prefix: AccountIdPrefix, + /// Deserializes a [`FungibleAsset`] from an [`AccountId`] and the remaining data from the given + /// `source`. + pub(super) fn deserialize_with_faucet_id( + faucet_id: AccountId, source: &mut R, ) -> Result { - // The 8 bytes of the prefix have already been read, so we only need to read the remaining 7 - // bytes of the account ID's 15 total bytes. - let suffix_bytes: [u8; 7] = source.read()?; - // Convert prefix back to bytes so we can call the TryFrom<[u8; 15]> impl. - let prefix_bytes: [u8; 8] = faucet_id_prefix.into(); - let mut id_bytes: [u8; 15] = [0; 15]; - id_bytes[..8].copy_from_slice(&prefix_bytes); - id_bytes[8..].copy_from_slice(&suffix_bytes); - - let faucet_id = AccountId::try_from(id_bytes) - .map_err(|err| DeserializationError::InvalidValue(err.to_string()))?; - let amount: u64 = source.read()?; - FungibleAsset::new(faucet_id, amount) - .map_err(|err| DeserializationError::InvalidValue(err.to_string())) + let callbacks = source.read()?; + + let asset = FungibleAsset::new(faucet_id, amount) + .map_err(|err| DeserializationError::InvalidValue(err.to_string()))? + .with_callbacks(callbacks); + + Ok(asset) } } @@ -255,6 +287,8 @@ impl FungibleAsset { #[cfg(test)] mod tests { + use assert_matches::assert_matches; + use super::*; use crate::account::AccountId; use crate::testing::account_id::{ @@ -267,7 +301,39 @@ mod tests { }; #[test] - fn test_fungible_asset_serde() { + fn fungible_asset_from_key_value_words_fails_on_invalid_asset_id() -> anyhow::Result<()> { + let faucet_id: AccountId = ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET.try_into()?; + let invalid_key = Word::from([ + Felt::from(1u32), + Felt::from(2u32), + faucet_id.suffix(), + faucet_id.prefix().as_felt(), + ]); + + let err = FungibleAsset::from_key_value_words( + invalid_key, + FungibleAsset::mock(5).to_value_word(), + ) + .unwrap_err(); + assert_matches!(err, AssetError::FungibleAssetIdMustBeZero(_)); + + Ok(()) + } + + #[test] + fn fungible_asset_from_key_value_fails_on_invalid_value() -> anyhow::Result<()> { + let asset = FungibleAsset::mock(42); + let mut invalid_value = asset.to_value_word(); + invalid_value[2] = Felt::from(5u32); + + let err = FungibleAsset::from_key_value(asset.vault_key(), invalid_value).unwrap_err(); + assert_matches!(err, AssetError::FungibleAssetValueMostSignificantElementsMustBeZero(_)); + + Ok(()) + } + + #[test] + fn test_fungible_asset_serde() -> anyhow::Result<()> { for fungible_account_id in [ ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, @@ -281,6 +347,15 @@ mod tests { fungible_asset, FungibleAsset::read_from_bytes(&fungible_asset.to_bytes()).unwrap() ); + assert_eq!(fungible_asset.to_bytes().len(), fungible_asset.get_size_hint()); + + assert_eq!( + fungible_asset, + FungibleAsset::from_key_value_words( + fungible_asset.to_key_word(), + fungible_asset.to_value_word() + )? + ) } let account_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_3).unwrap(); @@ -296,5 +371,16 @@ mod tests { asset_bytes[0..15].copy_from_slice(&non_fungible_faucet_id.to_bytes()); let err = FungibleAsset::read_from_bytes(&asset_bytes).unwrap_err(); assert!(matches!(err, DeserializationError::InvalidValue(_))); + + Ok(()) + } + + #[test] + fn test_vault_key_for_fungible_asset() { + let asset = FungibleAsset::mock(34); + + assert_eq!(asset.vault_key().faucet_id(), FungibleAsset::mock_issuer()); + assert_eq!(asset.vault_key().asset_id().prefix().as_canonical_u64(), 0); + assert_eq!(asset.vault_key().asset_id().suffix().as_canonical_u64(), 0); } } diff --git a/crates/miden-protocol/src/asset/mod.rs b/crates/miden-protocol/src/asset/mod.rs index 4d14998289..4bdec21c38 100644 --- a/crates/miden-protocol/src/asset/mod.rs +++ b/crates/miden-protocol/src/asset/mod.rs @@ -7,11 +7,10 @@ use super::utils::serde::{ DeserializationError, Serializable, }; -use super::{Felt, Hasher, Word, ZERO}; -use crate::account::AccountIdPrefix; +use super::{Felt, Word}; +use crate::account::AccountId; mod fungible; -use alloc::boxed::Box; pub use fungible::FungibleAsset; @@ -22,70 +21,72 @@ pub use nonfungible::{NonFungibleAsset, NonFungibleAssetDetails}; mod token_symbol; pub use token_symbol::TokenSymbol; +mod asset_callbacks; +pub use asset_callbacks::AssetCallbacks; + +mod asset_callbacks_flag; +pub use asset_callbacks_flag::AssetCallbackFlag; + mod vault; -pub use vault::{AssetVault, AssetVaultKey, AssetWitness, PartialVault}; +pub use vault::{AssetId, AssetVault, AssetVaultKey, AssetWitness, PartialVault}; // ASSET // ================================================================================================ /// A fungible or a non-fungible asset. /// -/// All assets are encoded using a single word (4 elements) such that it is easy to determine the -/// type of an asset both inside and outside Miden VM. Specifically: -/// -/// Element 1 of the asset will be: -/// - ZERO for a fungible asset. -/// - non-ZERO for a non-fungible asset. +/// All assets are encoded as the vault key of the asset and its value, each represented as one word +/// (4 elements). This makes it is easy to determine the type of an asset both inside and outside +/// Miden VM. Specifically: /// -/// Element 3 of both asset types is an [`AccountIdPrefix`] or equivalently, the prefix of an -/// [`AccountId`](crate::account::AccountId), which can be used to distinguish assets -/// based on [`AccountIdPrefix::account_type`]. +/// The vault key of an asset contains the [`AccountId`] of the faucet that issues the asset. It can +/// be used to distinguish assets based on the encoded [`AccountId::account_type`]. In the vault +/// keys of assets, the account type bits at index 4 and 5 determine whether the asset is fungible +/// or non-fungible. /// -/// For element 3 of the vault keys of assets, the bit at index 5 (referred to as the -/// "fungible bit" will be): -/// - `1` for a fungible asset. -/// - `0` for a non-fungible asset. -/// -/// The above properties guarantee that there can never be a collision between a fungible and a +/// This property guarantees that there can never be a collision between a fungible and a /// non-fungible asset. /// /// The methodology for constructing fungible and non-fungible assets is described below. /// /// # Fungible assets /// -/// - A fungible asset's data layout is: `[amount, 0, faucet_id_suffix, faucet_id_prefix]`. +/// - A fungible asset's value layout is: `[amount, 0, 0, 0]`. /// - A fungible asset's vault key layout is: `[0, 0, faucet_id_suffix, faucet_id_prefix]`. /// -/// The most significant elements of a fungible asset are set to the prefix (`faucet_id_prefix`) and -/// suffix (`faucet_id_suffix`) of the ID of the faucet which issues the asset. This guarantees the -/// properties described above (the fungible bit is `1`). -/// -/// The least significant element is set to the amount of the asset. This amount cannot be greater -/// than [`FungibleAsset::MAX_AMOUNT`] and thus fits into a felt. +/// The most significant elements of a fungible asset's key are set to the prefix +/// (`faucet_id_prefix`) and suffix (`faucet_id_suffix`) of the ID of the faucet which issues the +/// asset. The asset ID limbs are set to zero, which means two instances of the same fungible asset +/// have the same asset key and will be merged together when stored in the same account's vault. /// -/// Elements 1 and 2 are set to ZERO. +/// The least significant element of the value is set to the amount of the asset and the remaining +/// felts are zero. This amount cannot be greater than [`FungibleAsset::MAX_AMOUNT`] and thus fits +/// into a felt. /// /// It is impossible to find a collision between two fungible assets issued by different faucets as -/// the faucet_id is included in the description of the asset and this is guaranteed to be different +/// the faucet ID is included in the description of the asset and this is guaranteed to be different /// for each faucet as per the faucet creation logic. /// /// # Non-fungible assets /// -/// - A non-fungible asset's data layout is: `[hash0, hash1, hash2, faucet_id_prefix]`. -/// - A non-fungible asset's vault key layout is: `[faucet_id_prefix, hash1, hash2, hash0']`, where -/// `hash0'` is equivalent to `hash0` with the fungible bit set to `0`. See -/// [`NonFungibleAsset::vault_key`] for more details. +/// - A non-fungible asset's data layout is: `[hash0, hash1, hash2, hash3]`. +/// - A non-fungible asset's vault key layout is: `[hash0, hash1, faucet_id_suffix, +/// faucet_id_prefix]`. /// -/// The 4 elements of non-fungible assets are computed as follows: -/// - First the asset data is hashed. This compresses an asset of an arbitrary length to 4 field -/// elements: `[hash0, hash1, hash2, hash3]`. -/// - `hash3` is then replaced with the prefix of the faucet ID (`faucet_id_prefix`) which issues -/// the asset: `[hash0, hash1, hash2, faucet_id_prefix]`. +/// The 4 elements of non-fungible assets are computed by hashing the asset data. This compresses an +/// asset of an arbitrary length to 4 field elements: `[hash0, hash1, hash2, hash3]`. /// /// It is impossible to find a collision between two non-fungible assets issued by different faucets -/// as the faucet_id is included in the description of the non-fungible asset and this is guaranteed -/// to be different as per the faucet creation logic. Collision resistance for non-fungible assets -/// issued by the same faucet is ~2^95. +/// as the faucet ID is included in the description of the non-fungible asset and this is guaranteed +/// to be different as per the faucet creation logic. +/// +/// The most significant elements of a non-fungible asset's key are set to the prefix +/// (`faucet_id_prefix`) and suffix (`faucet_id_suffix`) of the ID of the faucet which issues the +/// asset. The asset ID limbs are set to hashes from the asset's value. This means the collision +/// resistance of non-fungible assets issued by the same faucet is ~2^64, due to the 128-bit asset +/// ID that is unique per non-fungible asset. In other words, two non-fungible assets issued by the +/// same faucet are very unlikely to have the same asset key and thus should not collide when stored +/// in the same account's vault. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum Asset { Fungible(FungibleAsset), @@ -93,47 +94,66 @@ pub enum Asset { } impl Asset { - /// Creates a new [Asset] without checking its validity. - pub(crate) fn new_unchecked(value: Word) -> Asset { - if is_not_a_non_fungible_asset(value) { - Asset::Fungible(FungibleAsset::new_unchecked(value)) + /// Creates an asset from the provided key and value. + /// + /// # Errors + /// + /// Returns an error if: + /// - [`FungibleAsset::from_key_value`] or [`NonFungibleAsset::from_key_value`] fails. + pub fn from_key_value(key: AssetVaultKey, value: Word) -> Result { + if matches!(key.faucet_id().account_type(), AccountType::FungibleFaucet) { + FungibleAsset::from_key_value(key, value).map(Asset::Fungible) } else { - Asset::NonFungible(unsafe { NonFungibleAsset::new_unchecked(value) }) + NonFungibleAsset::from_key_value(key, value).map(Asset::NonFungible) + } + } + + /// Creates an asset from the provided key and value. + /// + /// Prefer [`Self::from_key_value`] for more type safety. + /// + /// # Errors + /// + /// Returns an error if: + /// - The provided key does not contain a valid faucet ID. + /// - [`Self::from_key_value`] fails. + pub fn from_key_value_words(key: Word, value: Word) -> Result { + let vault_key = AssetVaultKey::try_from(key)?; + Self::from_key_value(vault_key, value) + } + + /// Returns a copy of this asset with the given [`AssetCallbackFlag`]. + pub fn with_callbacks(self, callbacks: AssetCallbackFlag) -> Self { + match self { + Asset::Fungible(fungible_asset) => fungible_asset.with_callbacks(callbacks).into(), + Asset::NonFungible(non_fungible_asset) => { + non_fungible_asset.with_callbacks(callbacks).into() + }, } } /// Returns true if this asset is the same as the specified asset. /// - /// Two assets are defined to be the same if: - /// - For fungible assets, if they were issued by the same faucet. - /// - For non-fungible assets, if the assets are identical. + /// Two assets are defined to be the same if their vault keys match. pub fn is_same(&self, other: &Self) -> bool { - use Asset::*; - match (self, other) { - (Fungible(l), Fungible(r)) => l.is_from_same_faucet(r), - (NonFungible(l), NonFungible(r)) => l == r, - _ => false, - } + self.vault_key() == other.vault_key() } /// Returns true if this asset is a fungible asset. - pub const fn is_fungible(&self) -> bool { + pub fn is_fungible(&self) -> bool { matches!(self, Self::Fungible(_)) } /// Returns true if this asset is a non fungible asset. - pub const fn is_non_fungible(&self) -> bool { + pub fn is_non_fungible(&self) -> bool { matches!(self, Self::NonFungible(_)) } - /// Returns the prefix of the faucet ID which issued this asset. - /// - /// To get the full [`AccountId`](crate::account::AccountId) of a fungible asset the asset - /// must be matched on. - pub fn faucet_id_prefix(&self) -> AccountIdPrefix { + /// Returns the ID of the faucet that issued this asset. + pub fn faucet_id(&self) -> AccountId { match self { - Self::Fungible(asset) => asset.faucet_id_prefix(), - Self::NonFungible(asset) => asset.faucet_id_prefix(), + Self::Fungible(asset) => asset.faucet_id(), + Self::NonFungible(asset) => asset.faucet_id(), } } @@ -145,6 +165,30 @@ impl Asset { } } + /// Returns the asset's key encoded to a [`Word`]. + pub fn to_key_word(&self) -> Word { + self.vault_key().to_word() + } + + /// Returns the asset's value encoded to a [`Word`]. + pub fn to_value_word(&self) -> Word { + match self { + Asset::Fungible(fungible_asset) => fungible_asset.to_value_word(), + Asset::NonFungible(non_fungible_asset) => non_fungible_asset.to_value_word(), + } + } + + /// Returns the asset encoded as elements. + /// + /// The first four elements contain the asset key and the last four elements contain the asset + /// value. + pub fn as_elements(&self) -> [Felt; 8] { + let mut elements = [Felt::ZERO; 8]; + elements[0..4].copy_from_slice(self.to_key_word().as_elements()); + elements[4..8].copy_from_slice(self.to_value_word().as_elements()); + elements + } + /// Returns the inner [`FungibleAsset`]. /// /// # Panics @@ -170,47 +214,6 @@ impl Asset { } } -impl From for Word { - fn from(asset: Asset) -> Self { - match asset { - Asset::Fungible(asset) => asset.into(), - Asset::NonFungible(asset) => asset.into(), - } - } -} - -impl From<&Asset> for Word { - fn from(value: &Asset) -> Self { - (*value).into() - } -} - -impl TryFrom<&Word> for Asset { - type Error = AssetError; - - fn try_from(value: &Word) -> Result { - (*value).try_into() - } -} - -impl TryFrom for Asset { - type Error = AssetError; - - fn try_from(value: Word) -> Result { - // Return an error if element 3 is not a valid account ID prefix, which cannot be checked by - // is_not_a_non_fungible_asset. - // Keep in mind serialized assets do _not_ carry the suffix required to reconstruct the full - // account identifier. - let prefix = AccountIdPrefix::try_from(value[3]) - .map_err(|err| AssetError::InvalidFaucetAccountId(Box::from(err)))?; - match prefix.account_type() { - AccountType::FungibleFaucet => FungibleAsset::try_from(value).map(Asset::from), - AccountType::NonFungibleFaucet => NonFungibleAsset::try_from(value).map(Asset::from), - _ => Err(AssetError::InvalidFaucetAccountIdPrefix(prefix)), - } - } -} - // SERIALIZATION // ================================================================================================ @@ -232,58 +235,34 @@ impl Serializable for Asset { impl Deserializable for Asset { fn read_from(source: &mut R) -> Result { - // Both asset types have their faucet ID prefix as the first element, so we can use it to - // inspect what type of asset it is. - let faucet_id_prefix: AccountIdPrefix = source.read()?; + // Both asset types have their faucet ID as the first element, so we can use it to inspect + // what type of asset it is. + let faucet_id: AccountId = source.read()?; - match faucet_id_prefix.account_type() { + match faucet_id.account_type() { AccountType::FungibleFaucet => { - FungibleAsset::deserialize_with_faucet_id_prefix(faucet_id_prefix, source) - .map(Asset::from) + FungibleAsset::deserialize_with_faucet_id(faucet_id, source).map(Asset::from) }, AccountType::NonFungibleFaucet => { - NonFungibleAsset::deserialize_with_faucet_id_prefix(faucet_id_prefix, source) - .map(Asset::from) + NonFungibleAsset::deserialize_with_faucet_id(faucet_id, source).map(Asset::from) }, other_type => Err(DeserializationError::InvalidValue(format!( - "failed to deserialize asset: expected an account ID prefix of type faucet, found {other_type:?}" + "failed to deserialize asset: expected an account ID prefix of type faucet, found {other_type}" ))), } } } -// HELPER FUNCTIONS -// ================================================================================================ - -/// Returns `true` if asset in [Word] is not a non-fungible asset. -/// -/// Note: this does not mean that the word is a fungible asset as the word may contain a value -/// which is not a valid asset. -fn is_not_a_non_fungible_asset(asset: Word) -> bool { - match AccountIdPrefix::try_from(asset[3]) { - Ok(prefix) => { - matches!(prefix.account_type(), AccountType::FungibleFaucet) - }, - Err(_err) => { - #[cfg(debug_assertions)] - panic!("invalid account ID prefix passed to is_not_a_non_fungible_asset: {_err}"); - #[cfg(not(debug_assertions))] - false - }, - } -} - // TESTS // ================================================================================================ #[cfg(test)] mod tests { - use miden_crypto::Word; use miden_crypto::utils::{Deserializable, Serializable}; use super::{Asset, FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}; - use crate::account::{AccountId, AccountIdPrefix}; + use crate::account::AccountId; use crate::testing::account_id::{ ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET, @@ -295,8 +274,9 @@ mod tests { ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET_1, }; + /// Tests the serialization roundtrip for assets for assets <-> bytes and assets <-> words. #[test] - fn test_asset_serde() { + fn test_asset_serde() -> anyhow::Result<()> { for fungible_account_id in [ ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, @@ -307,6 +287,13 @@ mod tests { let account_id = AccountId::try_from(fungible_account_id).unwrap(); let fungible_asset: Asset = FungibleAsset::new(account_id, 10).unwrap().into(); assert_eq!(fungible_asset, Asset::read_from_bytes(&fungible_asset.to_bytes()).unwrap()); + assert_eq!( + fungible_asset, + Asset::from_key_value_words( + fungible_asset.to_key_word(), + fungible_asset.to_value_word() + )?, + ); } for non_fungible_account_id in [ @@ -315,50 +302,33 @@ mod tests { ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET_1, ] { let account_id = AccountId::try_from(non_fungible_account_id).unwrap(); - let details = NonFungibleAssetDetails::new(account_id.prefix(), vec![1, 2, 3]).unwrap(); + let details = NonFungibleAssetDetails::new(account_id, vec![1, 2, 3]).unwrap(); let non_fungible_asset: Asset = NonFungibleAsset::new(&details).unwrap().into(); assert_eq!( non_fungible_asset, Asset::read_from_bytes(&non_fungible_asset.to_bytes()).unwrap() ); - } - } - - #[test] - fn test_new_unchecked() { - for fungible_account_id in [ - ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1, - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_2, - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_3, - ] { - let account_id = AccountId::try_from(fungible_account_id).unwrap(); - let fungible_asset: Asset = FungibleAsset::new(account_id, 10).unwrap().into(); - assert_eq!(fungible_asset, Asset::new_unchecked(Word::from(&fungible_asset))); + assert_eq!( + non_fungible_asset, + Asset::from_key_value_words( + non_fungible_asset.to_key_word(), + non_fungible_asset.to_value_word() + )? + ); } - for non_fungible_account_id in [ - ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET, - ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET, - ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET_1, - ] { - let account_id = AccountId::try_from(non_fungible_account_id).unwrap(); - let details = NonFungibleAssetDetails::new(account_id.prefix(), vec![1, 2, 3]).unwrap(); - let non_fungible_asset: Asset = NonFungibleAsset::new(&details).unwrap().into(); - assert_eq!(non_fungible_asset, Asset::new_unchecked(Word::from(non_fungible_asset))); - } + Ok(()) } - /// This test asserts that account ID's prefix is serialized in the first felt of assets. + /// This test asserts that account ID's is serialized in the first felt of assets. /// Asset deserialization relies on that fact and if this changes the serialization must /// be updated. #[test] - fn test_account_id_prefix_is_in_first_serialized_felt() { + fn test_account_id_is_serialized_first() { for asset in [FungibleAsset::mock(300), NonFungibleAsset::mock(&[0xaa, 0xbb])] { let serialized_asset = asset.to_bytes(); - let prefix = AccountIdPrefix::read_from_bytes(&serialized_asset).unwrap(); - assert_eq!(prefix, asset.faucet_id_prefix()); + let prefix = AccountId::read_from_bytes(&serialized_asset).unwrap(); + assert_eq!(prefix, asset.faucet_id()); } } } diff --git a/crates/miden-protocol/src/asset/nonfungible.rs b/crates/miden-protocol/src/asset/nonfungible.rs index d48b26602b..c6fcec2297 100644 --- a/crates/miden-protocol/src/asset/nonfungible.rs +++ b/crates/miden-protocol/src/asset/nonfungible.rs @@ -1,44 +1,37 @@ -use alloc::boxed::Box; use alloc::string::ToString; use alloc::vec::Vec; use core::fmt; use super::vault::AssetVaultKey; -use super::{AccountIdPrefix, AccountType, Asset, AssetError, Felt, Hasher, Word}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; -use crate::{FieldElement, WORD_SIZE}; - -/// Position of the faucet_id inside the [`NonFungibleAsset`] word having fields in BigEndian. -const FAUCET_ID_POS_BE: usize = 3; +use super::{AccountType, Asset, AssetCallbackFlag, AssetError, Word}; +use crate::Hasher; +use crate::account::AccountId; +use crate::asset::vault::AssetId; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // NON-FUNGIBLE ASSET // ================================================================================================ /// A commitment to a non-fungible asset. /// -/// The commitment is constructed as follows: -/// -/// - Hash the asset data producing `[hash0, hash1, hash2, hash3]`. -/// - Replace the value of `hash3` with the prefix of the faucet id (`faucet_id_prefix`) producing -/// `[hash0, hash1, hash2, faucet_id_prefix]`. -/// - This layout ensures that fungible and non-fungible assets are distinguishable by interpreting -/// the 3rd element of an asset as an [`AccountIdPrefix`] and checking its type. +/// See [`Asset`] for details on how it is constructed. /// /// [`NonFungibleAsset`] itself does not contain the actual asset data. The container for this data /// is [`NonFungibleAssetDetails`]. +/// +/// The non-fungible asset can have callbacks to the faucet enabled or disabled, depending on +/// [`AssetCallbackFlag`]. See [`AssetCallbacks`](crate::asset::AssetCallbacks) for more details. #[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub struct NonFungibleAsset(Word); - -impl PartialOrd for NonFungibleAsset { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for NonFungibleAsset { - fn cmp(&self, other: &Self) -> core::cmp::Ordering { - self.0.cmp(&other.0) - } +pub struct NonFungibleAsset { + faucet_id: AccountId, + value: Word, + callbacks: AssetCallbackFlag, } impl NonFungibleAsset { @@ -47,8 +40,9 @@ impl NonFungibleAsset { /// The serialized size of a [`NonFungibleAsset`] in bytes. /// - /// Currently represented as a word. - pub const SERIALIZED_SIZE: usize = Felt::ELEMENT_BYTES * WORD_SIZE; + /// An account ID (15 bytes) plus a word (32 bytes) plus a callbacks flag (1 byte). + pub const SERIALIZED_SIZE: usize = + AccountId::SERIALIZED_SIZE + Word::SERIALIZED_SIZE + AssetCallbackFlag::SERIALIZED_SIZE; // CONSTRUCTORS // -------------------------------------------------------------------------------------------- @@ -70,110 +64,110 @@ impl NonFungibleAsset { /// /// # Errors /// Returns an error if the provided faucet ID is not for a non-fungible asset faucet. - pub fn from_parts(faucet_id: AccountIdPrefix, mut data_hash: Word) -> Result { + pub fn from_parts(faucet_id: AccountId, value: Word) -> Result { if !matches!(faucet_id.account_type(), AccountType::NonFungibleFaucet) { return Err(AssetError::NonFungibleFaucetIdTypeMismatch(faucet_id)); } - data_hash[FAUCET_ID_POS_BE] = Felt::from(faucet_id); - - Ok(Self(data_hash)) + Ok(Self { + faucet_id, + value, + callbacks: AssetCallbackFlag::default(), + }) } - /// Creates a new [NonFungibleAsset] without checking its validity. + /// Creates a non-fungible asset from the provided key and value. /// - /// # Safety - /// This function requires that the provided value is a valid word encoding of a - /// [NonFungibleAsset]. - pub unsafe fn new_unchecked(value: Word) -> NonFungibleAsset { - NonFungibleAsset(value) - } + /// # Errors + /// + /// Returns an error if: + /// - The provided key does not contain a valid faucet ID. + /// - The provided key's asset ID limbs are not equal to the provided value's first and second + /// element. + /// - The faucet ID is not a non-fungible faucet ID. + pub fn from_key_value(key: AssetVaultKey, value: Word) -> Result { + if key.asset_id().suffix() != value[0] || key.asset_id().prefix() != value[1] { + return Err(AssetError::NonFungibleAssetIdMustMatchValue { + asset_id: key.asset_id(), + value, + }); + } - // ACCESSORS - // -------------------------------------------------------------------------------------------- + let mut asset = Self::from_parts(key.faucet_id(), value)?; + asset.callbacks = key.callback_flag(); - /// Returns the vault key of the [`NonFungibleAsset`]. - /// - /// This is the same as the asset with the following modifications, in this order: - /// - Swaps the faucet ID at index 0 and `hash0` at index 3. - /// - Sets the fungible bit for `hash0` to `0`. + Ok(asset) + } + + /// Creates a non-fungible asset from the provided key and value. /// - /// # Rationale + /// Prefer [`Self::from_key_value`] for more type safety. /// - /// This means `hash0` will be used as the leaf index in the asset SMT which ensures that a - /// non-fungible faucet's assets generally end up in different leaves as the key is not based on - /// the faucet ID. + /// # Errors /// - /// It also ensures that there is never any collision in the leaf index between a non-fungible - /// asset and a fungible asset, as the former's vault key always has the fungible bit set to `0` - /// and the latter's vault key always has the bit set to `1`. - pub fn vault_key(&self) -> AssetVaultKey { - let mut vault_key = self.0; - - // Swap prefix of faucet ID with hash0. - vault_key.swap(0, FAUCET_ID_POS_BE); - - // Set the fungible bit to zero. - vault_key[3] = - AccountIdPrefix::clear_fungible_bit(self.faucet_id_prefix().version(), vault_key[3]); - - AssetVaultKey::new_unchecked(vault_key) + /// Returns an error if: + /// - The provided key does not contain a valid faucet ID. + /// - [`Self::from_key_value`] fails. + pub fn from_key_value_words(key: Word, value: Word) -> Result { + let vault_key = AssetVaultKey::try_from(key)?; + Self::from_key_value(vault_key, value) } - /// Return ID prefix of the faucet which issued this asset. - pub fn faucet_id_prefix(&self) -> AccountIdPrefix { - AccountIdPrefix::new_unchecked(self.0[FAUCET_ID_POS_BE]) + /// Returns a copy of this asset with the given [`AssetCallbackFlag`]. + pub fn with_callbacks(mut self, callbacks: AssetCallbackFlag) -> Self { + self.callbacks = callbacks; + self } - // HELPER FUNCTIONS + // ACCESSORS // -------------------------------------------------------------------------------------------- - /// Validates this non-fungible asset. - /// # Errors - /// Returns an error if: - /// - The faucet_id is not a valid non-fungible faucet ID. - /// - The most significant bit of the asset is not ZERO. - fn validate(&self) -> Result<(), AssetError> { - let faucet_id = AccountIdPrefix::try_from(self.0[FAUCET_ID_POS_BE]) - .map_err(|err| AssetError::InvalidFaucetAccountId(Box::new(err)))?; - - let account_type = faucet_id.account_type(); - if !matches!(account_type, AccountType::NonFungibleFaucet) { - return Err(AssetError::NonFungibleFaucetIdTypeMismatch(faucet_id)); - } + /// Returns the vault key of the [`NonFungibleAsset`]. + /// + /// See [`Asset`] docs for details on the key. + pub fn vault_key(&self) -> AssetVaultKey { + let asset_id_suffix = self.value[0]; + let asset_id_prefix = self.value[1]; + let asset_id = AssetId::new(asset_id_suffix, asset_id_prefix); - Ok(()) + AssetVaultKey::new(asset_id, self.faucet_id, self.callbacks) + .expect("constructors should ensure account ID is of type non-fungible faucet") } -} -impl From for Word { - fn from(asset: NonFungibleAsset) -> Self { - asset.0 + /// Returns the ID of the faucet which issued this asset. + pub fn faucet_id(&self) -> AccountId { + self.faucet_id } -} -impl From for Asset { - fn from(asset: NonFungibleAsset) -> Self { - Asset::NonFungible(asset) + /// Returns the [`AssetCallbackFlag`] of this asset. + pub fn callbacks(&self) -> AssetCallbackFlag { + self.callbacks } -} -impl TryFrom for NonFungibleAsset { - type Error = AssetError; + /// Returns the asset's key encoded to a [`Word`]. + pub fn to_key_word(&self) -> Word { + self.vault_key().to_word() + } - fn try_from(value: Word) -> Result { - let asset = Self(value); - asset.validate()?; - Ok(asset) + /// Returns the asset's value encoded to a [`Word`]. + pub fn to_value_word(&self) -> Word { + self.value } } impl fmt::Display for NonFungibleAsset { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // TODO: Replace with hex representation? write!(f, "{self:?}") } } +impl From for Asset { + fn from(asset: NonFungibleAsset) -> Self { + Asset::NonFungible(asset) + } +} + // SERIALIZATION // ================================================================================================ @@ -181,44 +175,38 @@ impl Serializable for NonFungibleAsset { fn write_into(&self, target: &mut W) { // All assets should serialize their faucet ID at the first position to allow them to be // easily distinguishable during deserialization. - target.write(self.faucet_id_prefix()); - target.write(self.0[2]); - target.write(self.0[1]); - target.write(self.0[0]); + target.write(self.faucet_id()); + target.write(self.value); + target.write(self.callbacks); } fn get_size_hint(&self) -> usize { - Self::SERIALIZED_SIZE + self.faucet_id.get_size_hint() + self.value.get_size_hint() + self.callbacks.get_size_hint() } } impl Deserializable for NonFungibleAsset { fn read_from(source: &mut R) -> Result { - let faucet_id_prefix: AccountIdPrefix = source.read()?; + let faucet_id: AccountId = source.read()?; - Self::deserialize_with_faucet_id_prefix(faucet_id_prefix, source) + Self::deserialize_with_faucet_id(faucet_id, source) .map_err(|err| DeserializationError::InvalidValue(err.to_string())) } } impl NonFungibleAsset { - /// Deserializes a [`NonFungibleAsset`] from an [`AccountIdPrefix`] and the remaining data from - /// the given `source`. - pub(super) fn deserialize_with_faucet_id_prefix( - faucet_id_prefix: AccountIdPrefix, + /// Deserializes a [`NonFungibleAsset`] from an [`AccountId`] and the remaining data from the + /// given `source`. + pub(super) fn deserialize_with_faucet_id( + faucet_id: AccountId, source: &mut R, ) -> Result { - let hash_2: Felt = source.read()?; - let hash_1: Felt = source.read()?; - let hash_0: Felt = source.read()?; - - // The last felt in the data_hash will be replaced by the faucet id, so we can set it to - // zero here. - NonFungibleAsset::from_parts( - faucet_id_prefix, - Word::from([hash_0, hash_1, hash_2, Felt::ZERO]), - ) - .map_err(|err| DeserializationError::InvalidValue(err.to_string())) + let value: Word = source.read()?; + let callbacks: AssetCallbackFlag = source.read()?; + + NonFungibleAsset::from_parts(faucet_id, value) + .map(|asset| asset.with_callbacks(callbacks)) + .map_err(|err| DeserializationError::InvalidValue(err.to_string())) } } @@ -230,7 +218,7 @@ impl NonFungibleAsset { /// Unlike [NonFungibleAsset] struct, this struct contains full details of a non-fungible asset. #[derive(Debug, Clone, PartialEq, Eq)] pub struct NonFungibleAssetDetails { - faucet_id: AccountIdPrefix, + faucet_id: AccountId, asset_data: Vec, } @@ -239,7 +227,7 @@ impl NonFungibleAssetDetails { /// /// # Errors /// Returns an error if the provided faucet ID is not for a non-fungible asset faucet. - pub fn new(faucet_id: AccountIdPrefix, asset_data: Vec) -> Result { + pub fn new(faucet_id: AccountId, asset_data: Vec) -> Result { if !matches!(faucet_id.account_type(), AccountType::NonFungibleFaucet) { return Err(AssetError::NonFungibleFaucetIdTypeMismatch(faucet_id)); } @@ -248,7 +236,7 @@ impl NonFungibleAssetDetails { } /// Returns ID of the faucet which issued this asset. - pub fn faucet_id(&self) -> AccountIdPrefix { + pub fn faucet_id(&self) -> AccountId { self.faucet_id } @@ -266,6 +254,7 @@ mod tests { use assert_matches::assert_matches; use super::*; + use crate::Felt; use crate::account::AccountId; use crate::testing::account_id::{ ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, @@ -275,32 +264,66 @@ mod tests { }; #[test] - fn test_non_fungible_asset_serde() { + fn fungible_asset_from_key_value_fails_on_invalid_asset_id() -> anyhow::Result<()> { + let invalid_key = AssetVaultKey::new_native( + AssetId::new(Felt::from(1u32), Felt::from(2u32)), + ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET.try_into()?, + )?; + let err = + NonFungibleAsset::from_key_value(invalid_key, Word::from([4, 5, 6, 7u32])).unwrap_err(); + + assert_matches!(err, AssetError::NonFungibleAssetIdMustMatchValue { .. }); + + Ok(()) + } + + #[test] + fn test_non_fungible_asset_serde() -> anyhow::Result<()> { for non_fungible_account_id in [ ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET_1, ] { let account_id = AccountId::try_from(non_fungible_account_id).unwrap(); - let details = NonFungibleAssetDetails::new(account_id.prefix(), vec![1, 2, 3]).unwrap(); + let details = NonFungibleAssetDetails::new(account_id, vec![1, 2, 3]).unwrap(); let non_fungible_asset = NonFungibleAsset::new(&details).unwrap(); assert_eq!( non_fungible_asset, NonFungibleAsset::read_from_bytes(&non_fungible_asset.to_bytes()).unwrap() ); + assert_eq!(non_fungible_asset.to_bytes().len(), non_fungible_asset.get_size_hint()); + + assert_eq!( + non_fungible_asset, + NonFungibleAsset::from_key_value_words( + non_fungible_asset.to_key_word(), + non_fungible_asset.to_value_word() + )? + ) } let account = AccountId::try_from(ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET).unwrap(); - let details = NonFungibleAssetDetails::new(account.prefix(), vec![4, 5, 6, 7]).unwrap(); + let details = NonFungibleAssetDetails::new(account, vec![4, 5, 6, 7]).unwrap(); let asset = NonFungibleAsset::new(&details).unwrap(); let mut asset_bytes = asset.to_bytes(); let fungible_faucet_id = AccountId::try_from(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET).unwrap(); - // Set invalid Faucet ID Prefix. - asset_bytes[0..8].copy_from_slice(&fungible_faucet_id.prefix().to_bytes()); + // Set invalid faucet ID. + asset_bytes[0..AccountId::SERIALIZED_SIZE].copy_from_slice(&fungible_faucet_id.to_bytes()); let err = NonFungibleAsset::read_from_bytes(&asset_bytes).unwrap_err(); assert_matches!(err, DeserializationError::InvalidValue(msg) if msg.contains("must be of type NonFungibleFaucet")); + + Ok(()) + } + + #[test] + fn test_vault_key_for_non_fungible_asset() { + let asset = NonFungibleAsset::mock(&[42]); + + assert_eq!(asset.vault_key().faucet_id(), NonFungibleAsset::mock_issuer()); + assert_eq!(asset.vault_key().asset_id().suffix(), asset.to_value_word()[0]); + assert_eq!(asset.vault_key().asset_id().prefix(), asset.to_value_word()[1]); } } diff --git a/crates/miden-protocol/src/asset/token_symbol.rs b/crates/miden-protocol/src/asset/token_symbol.rs index a1132fd396..7189d6805b 100644 --- a/crates/miden-protocol/src/asset/token_symbol.rs +++ b/crates/miden-protocol/src/asset/token_symbol.rs @@ -1,201 +1,178 @@ +use alloc::fmt; use alloc::string::String; use super::{Felt, TokenSymbolError}; -/// Represents a string token symbol (e.g. "POL", "ETH") as a single [`Felt`] value. +/// Represents a token symbol (e.g. "POL", "ETH"). /// -/// Token Symbols can consists of up to 6 capital Latin characters, e.g. "C", "ETH", "MIDENC". -#[derive(Default, Clone, Copy, Debug, PartialEq)] -pub struct TokenSymbol(Felt); +/// Token Symbols can consist of up to 12 capital Latin characters, e.g. "C", "ETH", "MIDEN". +/// +/// The symbol is stored as a [`String`] and can be converted to a [`Felt`] encoding via +/// [`as_element()`](Self::as_element). +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct TokenSymbol(String); impl TokenSymbol { /// Maximum allowed length of the token string. - pub const MAX_SYMBOL_LENGTH: usize = 6; + pub const MAX_SYMBOL_LENGTH: usize = 12; /// The length of the set of characters that can be used in a token's name. pub const ALPHABET_LENGTH: u64 = 26; - /// The maximum integer value of an encoded [`TokenSymbol`]. + /// The minimum integer value of an encoded [`TokenSymbol`]. /// - /// This value encodes the "ZZZZZZ" token symbol. - pub const MAX_ENCODED_VALUE: u64 = 8031810156; + /// This value encodes the "A" token symbol. + pub const MIN_ENCODED_VALUE: u64 = 1; - /// Constructs a new [`TokenSymbol`] from a static string. - /// - /// This function is `const` and can be used to define token symbols as constants, e.g.: - /// - /// ```rust - /// # use miden_protocol::asset::TokenSymbol; - /// const TOKEN: TokenSymbol = TokenSymbol::from_static_str("ETH"); - /// ``` + /// The maximum integer value of an encoded [`TokenSymbol`]. /// - /// This is convenient because using a string that is not a valid token symbol fails to - /// compile. + /// This value encodes the "ZZZZZZZZZZZZ" token symbol. + pub const MAX_ENCODED_VALUE: u64 = 2481152873203736562; + + /// Constructs a new [`TokenSymbol`] from a string, panicking on invalid input. /// /// # Panics /// /// Panics if: - /// - The length of the provided string is less than 1 or greater than 6. + /// - The length of the provided string is less than 1 or greater than 12. /// - The provided token string contains characters that are not uppercase ASCII. - pub const fn from_static_str(symbol: &'static str) -> Self { - match encode_symbol_to_felt(symbol) { - Ok(felt) => Self(felt), - // We cannot format the error in a const context. - Err(_) => panic!("invalid token symbol"), - } + pub fn new_unchecked(symbol: &str) -> Self { + Self::new(symbol).expect("invalid token symbol") } /// Creates a new [`TokenSymbol`] instance from the provided token name string. /// /// # Errors /// Returns an error if: - /// - The length of the provided string is less than 1 or greater than 6. + /// - The length of the provided string is less than 1 or greater than 12. /// - The provided token string contains characters that are not uppercase ASCII. pub fn new(symbol: &str) -> Result { - let felt = encode_symbol_to_felt(symbol)?; - Ok(Self(felt)) - } + let len = symbol.len(); - /// Returns the token name string from the encoded [`TokenSymbol`] value. - /// - /// # Errors - /// Returns an error if: - /// - The encoded value exceeds the maximum value of [`Self::MAX_ENCODED_VALUE`]. - /// - The encoded token string length is less than 1 or greater than 6. - /// - The encoded token string length is less than the actual string length. - pub fn to_string(&self) -> Result { - decode_felt_to_symbol(self.0) - } -} + if len == 0 || len > Self::MAX_SYMBOL_LENGTH { + return Err(TokenSymbolError::InvalidLength(len)); + } -impl From for Felt { - fn from(symbol: TokenSymbol) -> Self { - symbol.0 + for byte in symbol.as_bytes() { + if !byte.is_ascii_uppercase() { + return Err(TokenSymbolError::InvalidCharacter); + } + } + + Ok(Self(String::from(symbol))) } -} -impl TryFrom<&str> for TokenSymbol { - type Error = TokenSymbolError; + /// Returns the [`Felt`] encoding of this token symbol. + /// + /// The alphabet used in the encoding process consists of the Latin capital letters as defined + /// in the ASCII table, having the length of 26 characters. + /// + /// The encoding is performed by multiplying the intermediate encoded value by the length of + /// the used alphabet and adding the relative index of the character to it. At the end of the + /// encoding process the length of the initial token string is added to the encoded value. + /// + /// Relative character index is computed by subtracting the index of the character "A" (65) + /// from the index of the currently processing character, e.g., `A = 65 - 65 = 0`, + /// `B = 66 - 65 = 1`, `...` , `Z = 90 - 65 = 25`. + pub fn as_element(&self) -> Felt { + let bytes = self.0.as_bytes(); + let len = bytes.len(); + + let mut encoded_value: u64 = 0; + let mut idx = 0; + + while idx < len { + let digit = (bytes[idx] - b'A') as u64; + encoded_value = encoded_value * Self::ALPHABET_LENGTH + digit; + idx += 1; + } - fn try_from(symbol: &str) -> Result { - TokenSymbol::new(symbol) + // add token length to the encoded value to be able to decode the exact number of + // characters + encoded_value = encoded_value * Self::ALPHABET_LENGTH + len as u64; + + Felt::new(encoded_value) } } -impl TryFrom for TokenSymbol { - type Error = TokenSymbolError; - - fn try_from(felt: Felt) -> Result { - // Check if the felt value is within the valid range - if felt.as_int() > Self::MAX_ENCODED_VALUE { - return Err(TokenSymbolError::ValueTooLarge(felt.as_int())); - } - Ok(TokenSymbol(felt)) +impl fmt::Display for TokenSymbol { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.0) } } -// HELPER FUNCTIONS -// ================================================================================================ - -/// Encodes the provided token symbol string into a single [`Felt`] value. -/// -/// The alphabet used in the decoding process consists of the Latin capital letters as defined in -/// the ASCII table, having the length of 26 characters. -/// -/// The encoding is performed by multiplying the intermediate encrypted value by the length of the -/// used alphabet and adding the relative index of the character to it. At the end of the encoding -/// process the length of the initial token string is added to the encrypted value. -/// -/// Relative character index is computed by subtracting the index of the character "A" (65) from the -/// index of the currently processing character, e.g., `A = 65 - 65 = 0`, `B = 66 - 65 = 1`, `...` , -/// `Z = 90 - 65 = 25`. -/// -/// # Errors -/// Returns an error if: -/// - The length of the provided string is less than 1 or greater than 6. -/// - The provided token string contains characters that are not uppercase ASCII. -const fn encode_symbol_to_felt(s: &str) -> Result { - let bytes = s.as_bytes(); - let len = bytes.len(); - - if len == 0 || len > TokenSymbol::MAX_SYMBOL_LENGTH { - return Err(TokenSymbolError::InvalidLength(len)); +impl From for Felt { + fn from(symbol: TokenSymbol) -> Self { + symbol.as_element() } +} - let mut encoded_value: u64 = 0; - let mut idx = 0; - - while idx < len { - let byte = bytes[idx]; - - if !byte.is_ascii_uppercase() { - return Err(TokenSymbolError::InvalidCharacter); - } - - let digit = (byte - b'A') as u64; - encoded_value = encoded_value * TokenSymbol::ALPHABET_LENGTH + digit; - idx += 1; +impl From<&TokenSymbol> for Felt { + fn from(symbol: &TokenSymbol) -> Self { + symbol.as_element() } +} - // add token length to the encoded value to be able to decode the exact number of characters - encoded_value = encoded_value * TokenSymbol::ALPHABET_LENGTH + len as u64; +impl TryFrom<&str> for TokenSymbol { + type Error = TokenSymbolError; - Ok(Felt::new(encoded_value)) + fn try_from(symbol: &str) -> Result { + TokenSymbol::new(symbol) + } } -/// Decodes a [Felt] representation of the token symbol into a string. +/// Decodes a [`Felt`] representation of the token symbol into a [`TokenSymbol`]. /// /// The alphabet used in the decoding process consists of the Latin capital letters as defined in /// the ASCII table, having the length of 26 characters. /// -/// The decoding is performed by getting the modulus of the intermediate encrypted value by the +/// The decoding is performed by getting the modulus of the intermediate encoded value by the /// length of the used alphabet and then dividing the intermediate value by the length of the -/// alphabet to shift to the next character. At the beginning of the decoding process the length of -/// the initial token string is obtained from the encrypted value. After that the value obtained +/// alphabet to shift to the next character. At the beginning of the decoding process the length +/// of the initial token string is obtained from the encoded value. After that the value obtained /// after taking the modulus represents the relative character index, which then gets converted to /// the ASCII index. /// -/// Final ASCII character idex is computed by adding the index of the character "A" (65) to the -/// index of the currently processing character, e.g., `A = 0 + 65 = 65`, `B = 1 + 65 = 66`, `...` , -/// `Z = 25 + 65 = 90`. -/// -/// # Errors -/// Returns an error if: -/// - The encoded value exceeds the maximum value of [`TokenSymbol::MAX_ENCODED_VALUE`]. -/// - The encoded token string length is less than 1 or greater than 6. -/// - The encoded token string length is less than the actual string length. -fn decode_felt_to_symbol(encoded_felt: Felt) -> Result { - let encoded_value = encoded_felt.as_int(); - - // Check if the encoded value is within the valid range - if encoded_value > TokenSymbol::MAX_ENCODED_VALUE { - return Err(TokenSymbolError::ValueTooLarge(encoded_value)); - } +/// Final ASCII character index is computed by adding the index of the character "A" (65) to the +/// index of the currently processing character, e.g., `A = 0 + 65 = 65`, `B = 1 + 65 = 66`, +/// `...` , `Z = 25 + 65 = 90`. +impl TryFrom for TokenSymbol { + type Error = TokenSymbolError; - let mut decoded_string = String::new(); - let mut remaining_value = encoded_value; + fn try_from(felt: Felt) -> Result { + let encoded_value = felt.as_canonical_u64(); + if encoded_value < Self::MIN_ENCODED_VALUE { + return Err(TokenSymbolError::ValueTooSmall(encoded_value)); + } + if encoded_value > Self::MAX_ENCODED_VALUE { + return Err(TokenSymbolError::ValueTooLarge(encoded_value)); + } - // get the token symbol length - let token_len = (remaining_value % TokenSymbol::ALPHABET_LENGTH) as usize; - if token_len == 0 || token_len > TokenSymbol::MAX_SYMBOL_LENGTH { - return Err(TokenSymbolError::InvalidLength(token_len)); - } - remaining_value /= TokenSymbol::ALPHABET_LENGTH; + let mut decoded_string = String::new(); + let mut remaining_value = encoded_value; - for _ in 0..token_len { - let digit = (remaining_value % TokenSymbol::ALPHABET_LENGTH) as u8; - let char = (digit + b'A') as char; - decoded_string.insert(0, char); - remaining_value /= TokenSymbol::ALPHABET_LENGTH; - } + // get the token symbol length + let token_len = (remaining_value % Self::ALPHABET_LENGTH) as usize; + if token_len == 0 || token_len > Self::MAX_SYMBOL_LENGTH { + return Err(TokenSymbolError::InvalidLength(token_len)); + } + remaining_value /= Self::ALPHABET_LENGTH; - // return an error if some data still remains after specified number of characters have been - // decoded. - if remaining_value != 0 { - return Err(TokenSymbolError::DataNotFullyDecoded); - } + for _ in 0..token_len { + let digit = (remaining_value % Self::ALPHABET_LENGTH) as u8; + let char = (digit + b'A') as char; + decoded_string.insert(0, char); + remaining_value /= Self::ALPHABET_LENGTH; + } + + // return an error if some data still remains after specified number of characters have + // been decoded. + if remaining_value != 0 { + return Err(TokenSymbolError::DataNotFullyDecoded); + } - Ok(decoded_string) + Ok(TokenSymbol(decoded_string)) + } } // TESTS @@ -203,58 +180,60 @@ fn decode_felt_to_symbol(encoded_felt: Felt) -> Result #[cfg(test)] mod test { + use alloc::string::ToString; + use assert_matches::assert_matches; - use super::{ - Felt, - TokenSymbol, - TokenSymbolError, - decode_felt_to_symbol, - encode_symbol_to_felt, - }; + use super::{Felt, TokenSymbol, TokenSymbolError}; #[test] fn test_token_symbol_decoding_encoding() { - let symbols = vec!["AAAAAA", "AAAAB", "AAAC", "ABC", "BC", "A", "B", "ZZZZZZ"]; + let symbols = vec![ + "AAAAAA", + "AAAAB", + "AAAC", + "ABC", + "BC", + "A", + "B", + "ZZZZZZ", + "ABCDEFGH", + "MIDENCRYPTO", + "ZZZZZZZZZZZZ", + ]; for symbol in symbols { let token_symbol = TokenSymbol::try_from(symbol).unwrap(); - let decoded_symbol = TokenSymbol::to_string(&token_symbol).unwrap(); + let decoded_symbol = token_symbol.to_string(); assert_eq!(symbol, decoded_symbol); } - let symbol = ""; - let felt = encode_symbol_to_felt(symbol); - assert_matches!(felt.unwrap_err(), TokenSymbolError::InvalidLength(0)); + let err = TokenSymbol::new("").unwrap_err(); + assert_matches!(err, TokenSymbolError::InvalidLength(0)); - let symbol = "ABCDEFG"; - let felt = encode_symbol_to_felt(symbol); - assert_matches!(felt.unwrap_err(), TokenSymbolError::InvalidLength(7)); + let err = TokenSymbol::new("ABCDEFGHIJKLM").unwrap_err(); + assert_matches!(err, TokenSymbolError::InvalidLength(13)); - let symbol = "$$$"; - let felt = encode_symbol_to_felt(symbol); - assert_matches!(felt.unwrap_err(), TokenSymbolError::InvalidCharacter); + let err = TokenSymbol::new("$$$").unwrap_err(); + assert_matches!(err, TokenSymbolError::InvalidCharacter); - let symbol = "ABCDEF"; - let token_symbol = TokenSymbol::try_from(symbol); - assert!(token_symbol.is_ok()); - let token_symbol_felt: Felt = token_symbol.unwrap().into(); - assert_eq!(token_symbol_felt, encode_symbol_to_felt(symbol).unwrap()); + let symbol = "ABCDEFGHIJKL"; + let token_symbol = TokenSymbol::new(symbol).unwrap(); + let token_symbol_felt: Felt = token_symbol.into(); + assert_eq!(token_symbol_felt, TokenSymbol::new(symbol).unwrap().as_element()); } /// Checks that if the encoded length of the token is less than the actual number of token - /// characters, [decode_felt_to_symbol] procedure should return the - /// [TokenSymbolError::DataNotFullyDecoded] error. + /// characters, decoding should return the [TokenSymbolError::DataNotFullyDecoded] error. #[test] fn test_invalid_token_len() { // encoded value of this token has `6` as the length of the initial token string let encoded_symbol = TokenSymbol::try_from("ABCDEF").unwrap(); // decrease encoded length by, for example, `3` - let invalid_encoded_symbol_u64 = Felt::from(encoded_symbol).as_int() - 3; + let invalid_encoded_symbol_u64 = Felt::from(encoded_symbol).as_canonical_u64() - 3; - // check that `decode_felt_to_symbol()` procedure returns an error in attempt to create a - // token from encoded token with invalid length - let err = decode_felt_to_symbol(Felt::new(invalid_encoded_symbol_u64)).unwrap_err(); + // check that decoding returns an error for a token with invalid length + let err = TokenSymbol::try_from(Felt::new(invalid_encoded_symbol_u64)).unwrap_err(); assert_matches!(err, TokenSymbolError::DataNotFullyDecoded); } @@ -262,61 +241,66 @@ mod test { /// represents the maximum possible encoded value. #[test] fn test_token_symbol_max_value() { - let token_symbol = TokenSymbol::try_from("ZZZZZZ").unwrap(); - assert_eq!(Felt::from(token_symbol).as_int(), TokenSymbol::MAX_ENCODED_VALUE); + let token_symbol = TokenSymbol::try_from("ZZZZZZZZZZZZ").unwrap(); + assert_eq!(Felt::from(token_symbol).as_canonical_u64(), TokenSymbol::MAX_ENCODED_VALUE); } - // Const function tests - // -------------------------------------------------------------------------------------------- + /// Utility test to make sure that the [TokenSymbol::MIN_ENCODED_VALUE] constant still + /// represents the minimum possible encoded value. + #[test] + fn test_token_symbol_min_value() { + let token_symbol = TokenSymbol::try_from("A").unwrap(); + assert_eq!(Felt::from(token_symbol).as_canonical_u64(), TokenSymbol::MIN_ENCODED_VALUE); + } - const _TOKEN0: TokenSymbol = TokenSymbol::from_static_str("A"); - const _TOKEN1: TokenSymbol = TokenSymbol::from_static_str("ETH"); - const _TOKEN2: TokenSymbol = TokenSymbol::from_static_str("MIDEN"); - const _TOKEN3: TokenSymbol = TokenSymbol::from_static_str("ZZZZZZ"); + /// Checks that [TokenSymbol::try_from(Felt)] returns an error for values below the minimum. + #[test] + fn test_token_symbol_underflow() { + let err = TokenSymbol::try_from(Felt::ZERO).unwrap_err(); + assert_matches!(err, TokenSymbolError::ValueTooSmall(0)); + } + + // new_unchecked tests + // -------------------------------------------------------------------------------------------- #[test] - fn test_from_static_str_matches_new() { - // Test that from_static_str produces the same result as new - let symbols = ["A", "BC", "ETH", "MIDEN", "ZZZZZZ"]; + fn test_new_unchecked_matches_new() { + // Test that new_unchecked produces the same result as new + let symbols = ["A", "BC", "ETH", "MIDEN", "ZZZZZZ", "ABCDEFGH", "ZZZZZZZZZZZZ"]; for symbol in symbols { let from_new = TokenSymbol::new(symbol).unwrap(); - let from_static = TokenSymbol::from_static_str(symbol); - assert_eq!( - Felt::from(from_new), - Felt::from(from_static), - "Mismatch for symbol: {}", - symbol - ); + let from_static = TokenSymbol::new_unchecked(symbol); + assert_eq!(from_new, from_static, "Mismatch for symbol: {}", symbol); } } #[test] #[should_panic(expected = "invalid token symbol")] fn token_symbol_panics_on_empty_string() { - TokenSymbol::from_static_str(""); + TokenSymbol::new_unchecked(""); } #[test] #[should_panic(expected = "invalid token symbol")] fn token_symbol_panics_on_too_long_string() { - TokenSymbol::from_static_str("ABCDEFG"); + TokenSymbol::new_unchecked("ABCDEFGHIJKLM"); } #[test] #[should_panic(expected = "invalid token symbol")] fn token_symbol_panics_on_lowercase() { - TokenSymbol::from_static_str("eth"); + TokenSymbol::new_unchecked("eth"); } #[test] #[should_panic(expected = "invalid token symbol")] fn token_symbol_panics_on_invalid_character() { - TokenSymbol::from_static_str("ET$"); + TokenSymbol::new_unchecked("ET$"); } #[test] #[should_panic(expected = "invalid token symbol")] fn token_symbol_panics_on_number() { - TokenSymbol::from_static_str("ETH1"); + TokenSymbol::new_unchecked("ETH1"); } } diff --git a/crates/miden-protocol/src/asset/vault/asset_id.rs b/crates/miden-protocol/src/asset/vault/asset_id.rs new file mode 100644 index 0000000000..5976ca4df9 --- /dev/null +++ b/crates/miden-protocol/src/asset/vault/asset_id.rs @@ -0,0 +1,43 @@ +use core::fmt::Display; + +use crate::Felt; + +/// The [`AssetId`] in an [`AssetVaultKey`](crate::asset::AssetVaultKey) distinguishes different +/// assets issued by the same faucet. +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] +pub struct AssetId { + suffix: Felt, + prefix: Felt, +} + +impl AssetId { + /// Constructs an asset ID from its parts. + pub fn new(suffix: Felt, prefix: Felt) -> Self { + Self { suffix, prefix } + } + + /// Returns the suffix of the asset ID. + pub fn suffix(&self) -> Felt { + self.suffix + } + + /// Returns the prefix of the asset ID. + pub fn prefix(&self) -> Felt { + self.prefix + } + + /// Returns `true` if both prefix and suffix are zero, `false` otherwise. + pub fn is_empty(&self) -> bool { + self.prefix == Felt::ZERO && self.suffix == Felt::ZERO + } +} + +impl Display for AssetId { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_fmt(format_args!( + "0x{:016x}{:016x}", + self.prefix().as_canonical_u64(), + self.suffix().as_canonical_u64() + )) + } +} diff --git a/crates/miden-protocol/src/asset/vault/asset_witness.rs b/crates/miden-protocol/src/asset/vault/asset_witness.rs index 503b468d41..4c0cffbdc5 100644 --- a/crates/miden-protocol/src/asset/vault/asset_witness.rs +++ b/crates/miden-protocol/src/asset/vault/asset_witness.rs @@ -1,3 +1,4 @@ +use alloc::boxed::Box; use alloc::string::ToString; use miden_crypto::merkle::InnerNodeInfo; @@ -6,7 +7,13 @@ use miden_crypto::merkle::smt::{SmtLeaf, SmtProof}; use super::vault_key::AssetVaultKey; use crate::asset::Asset; use crate::errors::AssetError; -use crate::utils::serde::{Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A witness of an asset in an [`AssetVault`](super::AssetVault). /// @@ -23,17 +30,12 @@ impl AssetWitness { /// # Errors /// /// Returns an error if: - /// - any of the entries in the SMT leaf is not a valid asset. - /// - any of the entries' vault keys does not match the expected vault key of the asset. + /// - any of the key value pairs in the SMT leaf do not form a valid asset. pub fn new(smt_proof: SmtProof) -> Result { - for (vault_key, asset) in smt_proof.leaf().entries() { - let asset = Asset::try_from(asset)?; - if *vault_key != asset.vault_key().into() { - return Err(AssetError::AssetVaultKeyMismatch { - actual: *vault_key, - expected: asset.vault_key().into(), - }); - } + for (vault_key, asset_value) in smt_proof.leaf().entries() { + // This ensures that vault key and value are consistent. + Asset::from_key_value_words(*vault_key, *asset_value) + .map_err(|err| AssetError::AssetWitnessInvalid(Box::new(err)))?; } Ok(Self(smt_proof)) @@ -72,8 +74,9 @@ impl AssetWitness { SmtLeaf::Multiple(kv_pairs) => kv_pairs, }; - entries.iter().map(|(_key, value)| { - Asset::try_from(value).expect("asset witness should track valid assets") + entries.iter().map(|(key, value)| { + Asset::from_key_value_words(*key, *value) + .expect("asset witness should track valid assets") }) } @@ -93,15 +96,13 @@ impl From for SmtProof { } impl Serializable for AssetWitness { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { self.0.write_into(target); } } impl Deserializable for AssetWitness { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let proof = SmtProof::read_from(source)?; Self::new(proof).map_err(|err| DeserializationError::InvalidValue(err.to_string())) } @@ -132,7 +133,9 @@ mod tests { let err = AssetWitness::new(proof).unwrap_err(); - assert_matches!(err, AssetError::InvalidFaucetAccountId(_)); + assert_matches!(err, AssetError::AssetWitnessInvalid(source) => { + assert_matches!(*source, AssetError::InvalidFaucetAccountId(_)); + }); Ok(()) } @@ -144,15 +147,16 @@ mod tests { let fungible_asset = FungibleAsset::mock(500); let non_fungible_asset = NonFungibleAsset::mock(&[1]); - let smt = - Smt::with_entries([(fungible_asset.vault_key().into(), non_fungible_asset.into())])?; + let smt = Smt::with_entries([( + fungible_asset.vault_key().into(), + non_fungible_asset.to_value_word(), + )])?; let proof = smt.open(&fungible_asset.vault_key().into()); let err = AssetWitness::new(proof).unwrap_err(); - assert_matches!(err, AssetError::AssetVaultKeyMismatch { actual, expected } => { - assert_eq!(actual, fungible_asset.vault_key().into()); - assert_eq!(expected, non_fungible_asset.vault_key().into()); + assert_matches!(err, AssetError::AssetWitnessInvalid(source) => { + assert_matches!(*source, AssetError::FungibleAssetValueMostSignificantElementsMustBeZero(_)); }); Ok(()) diff --git a/crates/miden-protocol/src/asset/vault/mod.rs b/crates/miden-protocol/src/asset/vault/mod.rs index 94bb7ab81a..35c7f1c643 100644 --- a/crates/miden-protocol/src/asset/vault/mod.rs +++ b/crates/miden-protocol/src/asset/vault/mod.rs @@ -1,7 +1,7 @@ use alloc::string::ToString; +use alloc::vec::Vec; use miden_crypto::merkle::InnerNodeInfo; -use miden_processor::SMT_DEPTH; use super::{ AccountType, @@ -16,7 +16,7 @@ use super::{ }; use crate::Word; use crate::account::{AccountId, AccountVaultDelta, NonFungibleDeltaAction}; -use crate::crypto::merkle::smt::Smt; +use crate::crypto::merkle::smt::{SMT_DEPTH, Smt}; use crate::errors::AssetVaultError; mod partial; @@ -28,6 +28,9 @@ pub use asset_witness::AssetWitness; mod vault_key; pub use vault_key::AssetVaultKey; +mod asset_id; +pub use asset_id::AssetId; + // ASSET VAULT // ================================================================================================ @@ -61,7 +64,7 @@ impl AssetVault { pub fn new(assets: &[Asset]) -> Result { Ok(Self { asset_tree: Smt::with_entries( - assets.iter().map(|asset| (asset.vault_key().into(), (*asset).into())), + assets.iter().map(|asset| (asset.vault_key().to_word(), asset.to_value_word())), ) .map_err(AssetVaultError::DuplicateAsset)?, }) @@ -75,10 +78,25 @@ impl AssetVault { self.asset_tree.root() } + /// Returns the asset corresponding to the provided asset vault key, or `None` if the asset + /// doesn't exist. + pub fn get(&self, asset_vault_key: AssetVaultKey) -> Option { + let asset_value = self.asset_tree.get_value(&asset_vault_key.to_word()); + + if asset_value.is_empty() { + None + } else { + Some( + Asset::from_key_value(asset_vault_key, asset_value) + .expect("asset vault should only store valid assets"), + ) + } + } + /// Returns true if the specified non-fungible asset is stored in this vault. pub fn has_non_fungible_asset(&self, asset: NonFungibleAsset) -> Result { // check if the asset is stored in the vault - match self.asset_tree.get_value(&asset.vault_key().into()) { + match self.asset_tree.get_value(&asset.vault_key().to_word()) { asset if asset == Smt::EMPTY_VALUE => Ok(false), _ => Ok(true), } @@ -94,21 +112,22 @@ impl AssetVault { return Err(AssetVaultError::NotAFungibleFaucetId(faucet_id)); } - // if the tree value is [0, 0, 0, 0], the asset is not stored in the vault - match self.asset_tree.get_value( - &AssetVaultKey::from_account_id(faucet_id) - .expect("faucet ID should be of type fungible") - .into(), - ) { - asset if asset == Smt::EMPTY_VALUE => Ok(0), - asset => Ok(FungibleAsset::new_unchecked(asset).amount()), - } + let vault_key = + AssetVaultKey::new_fungible(faucet_id).expect("faucet ID should be of type fungible"); + let asset_value = self.asset_tree.get_value(&vault_key.to_word()); + let asset = FungibleAsset::from_key_value(vault_key, asset_value) + .expect("asset vault should only store valid assets"); + + Ok(asset.amount()) } /// Returns an iterator over the assets stored in the vault. pub fn assets(&self) -> impl Iterator + '_ { // SAFETY: The asset tree tracks only valid assets. - self.asset_tree.entries().map(|(_key, value)| Asset::new_unchecked(*value)) + self.asset_tree.entries().map(|(key, value)| { + Asset::from_key_value_words(*key, *value) + .expect("asset vault should only store valid assets") + }) } /// Returns an iterator over the inner nodes of the underlying [`Smt`]. @@ -120,7 +139,7 @@ impl AssetVault { /// /// The `vault_key` can be obtained with [`Asset::vault_key`]. pub fn open(&self, vault_key: AssetVaultKey) -> AssetWitness { - let smt_proof = self.asset_tree.open(&vault_key.into()); + let smt_proof = self.asset_tree.open(&vault_key.to_word()); // SAFETY: The asset vault should only contain valid assets. AssetWitness::new_unchecked(smt_proof) } @@ -153,16 +172,19 @@ impl AssetVault { /// /// # Errors /// Returns an error: - /// - If the total value of assets is greater than or equal to 2^63. + /// - If the total value of the added assets is greater than [`FungibleAsset::MAX_AMOUNT`]. /// - If the delta contains an addition/subtraction for a fungible asset that is not stored in /// the vault. /// - If the delta contains a non-fungible asset removal that is not stored in the vault. /// - If the delta contains a non-fungible asset addition that is already stored in the vault. /// - The maximum number of leaves per asset is exceeded. pub fn apply_delta(&mut self, delta: &AccountVaultDelta) -> Result<(), AssetVaultError> { - for (&faucet_id, &delta) in delta.fungible().iter() { - let asset = FungibleAsset::new(faucet_id, delta.unsigned_abs()) - .expect("Not a fungible faucet ID or delta is too large"); + for (vault_key, &delta) in delta.fungible().iter() { + // SAFETY: fungible asset delta should only contain fungible faucet IDs and delta amount + // should be in bounds + let asset = FungibleAsset::new(vault_key.faucet_id(), delta.unsigned_abs()) + .expect("fungible asset delta should be valid") + .with_callbacks(vault_key.callback_flag()); match delta >= 0 { true => self.add_fungible_asset(asset), false => self.remove_fungible_asset(asset), @@ -184,7 +206,7 @@ impl AssetVault { /// Add the specified asset to the vault. /// /// # Errors - /// - If the total value of two fungible assets is greater than or equal to 2^63. + /// - If the total value of the added assets is greater than [`FungibleAsset::MAX_AMOUNT`]. /// - If the vault already contains the same non-fungible asset. /// - The maximum number of leaves per asset is exceeded. pub fn add_asset(&mut self, asset: Asset) -> Result { @@ -198,26 +220,26 @@ impl AssetVault { /// issued by the same faucet, the amounts are added together. /// /// # Errors - /// - If the total value of assets is greater than or equal to 2^63. + /// - If the total value of the added assets is greater than [`FungibleAsset::MAX_AMOUNT`]. /// - The maximum number of leaves per asset is exceeded. fn add_fungible_asset( &mut self, - asset: FungibleAsset, + other_asset: FungibleAsset, ) -> Result { - // fetch current asset value from the tree and add the new asset to it. - let new: FungibleAsset = match self.asset_tree.get_value(&asset.vault_key().into()) { - current if current == Smt::EMPTY_VALUE => asset, - current => { - let current = FungibleAsset::new_unchecked(current); - current.add(asset).map_err(AssetVaultError::AddFungibleAssetBalanceError)? - }, - }; + let current_asset_value = self.asset_tree.get_value(&other_asset.vault_key().to_word()); + let current_asset = + FungibleAsset::from_key_value(other_asset.vault_key(), current_asset_value) + .expect("asset vault should store valid assets"); + + let new_asset = current_asset + .add(other_asset) + .map_err(AssetVaultError::AddFungibleAssetBalanceError)?; + self.asset_tree - .insert(new.vault_key().into(), new.into()) + .insert(new_asset.vault_key().to_word(), new_asset.to_value_word()) .map_err(AssetVaultError::MaxLeafEntriesExceeded)?; - // return the new asset - Ok(new) + Ok(new_asset) } /// Add the specified non-fungible asset to the vault. @@ -232,7 +254,7 @@ impl AssetVault { // add non-fungible asset to the vault let old = self .asset_tree - .insert(asset.vault_key().into(), asset.into()) + .insert(asset.vault_key().to_word(), asset.to_value_word()) .map_err(AssetVaultError::MaxLeafEntriesExceeded)?; // if the asset already exists, return an error @@ -273,30 +295,37 @@ impl AssetVault { /// - The maximum number of leaves per asset is exceeded. fn remove_fungible_asset( &mut self, - asset: FungibleAsset, + other_asset: FungibleAsset, ) -> Result { - // fetch the asset from the vault. - let new: FungibleAsset = match self.asset_tree.get_value(&asset.vault_key().into()) { - current if current == Smt::EMPTY_VALUE => { - return Err(AssetVaultError::FungibleAssetNotFound(asset)); - }, - current => { - let current = FungibleAsset::new_unchecked(current); - current.sub(asset).map_err(AssetVaultError::SubtractFungibleAssetBalanceError)? - }, - }; + let current_asset_value = self.asset_tree.get_value(&other_asset.vault_key().to_word()); + let current_asset = + FungibleAsset::from_key_value(other_asset.vault_key(), current_asset_value) + .expect("asset vault should store valid assets"); + + // If the asset's amount is 0, we consider it absent from the vault. + if current_asset.amount() == 0 { + return Err(AssetVaultError::FungibleAssetNotFound(other_asset)); + } + + let new_asset = current_asset + .sub(other_asset) + .map_err(AssetVaultError::SubtractFungibleAssetBalanceError)?; + + // Note that if new_asset's amount is 0, its value's word representation is equal to + // the empty word, which results in the removal of the entire entry from the corresponding + // leaf. + #[cfg(debug_assertions)] + { + if new_asset.amount() == 0 { + assert!(new_asset.to_value_word().is_empty()) + } + } - // if the amount of the asset is zero, remove the asset from the vault. - let value = match new.amount() { - 0 => Smt::EMPTY_VALUE, - _ => new.into(), - }; self.asset_tree - .insert(new.vault_key().into(), value) + .insert(new_asset.vault_key().to_word(), new_asset.to_value_word()) .map_err(AssetVaultError::MaxLeafEntriesExceeded)?; - // return the asset that was removed. - Ok(asset) + Ok(other_asset) } /// Remove the specified non-fungible asset from the vault and returns the asset that was just @@ -312,7 +341,7 @@ impl AssetVault { // remove the asset from the vault. let old = self .asset_tree - .insert(asset.vault_key().into(), Smt::EMPTY_VALUE) + .insert(asset.vault_key().to_word(), Smt::EMPTY_VALUE) .map_err(AssetVaultError::MaxLeafEntriesExceeded)?; // return an error if the asset did not exist in the vault. @@ -353,7 +382,24 @@ impl Serializable for AssetVault { impl Deserializable for AssetVault { fn read_from(source: &mut R) -> Result { let num_assets = source.read_usize()?; - let assets = source.read_many::(num_assets)?; + let assets = source.read_many_iter::(num_assets)?.collect::, _>>()?; Self::new(&assets).map_err(|err| DeserializationError::InvalidValue(err.to_string())) } } + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use assert_matches::assert_matches; + + use super::*; + + #[test] + fn vault_fails_on_absent_fungible_asset() { + let mut vault = AssetVault::default(); + let err = vault.remove_asset(FungibleAsset::mock(50)).unwrap_err(); + assert_matches!(err, AssetVaultError::FungibleAssetNotFound(_)); + } +} diff --git a/crates/miden-protocol/src/asset/vault/partial.rs b/crates/miden-protocol/src/asset/vault/partial.rs index 1427a8902c..970d3c8508 100644 --- a/crates/miden-protocol/src/asset/vault/partial.rs +++ b/crates/miden-protocol/src/asset/vault/partial.rs @@ -7,7 +7,13 @@ use super::{AssetVault, AssetVaultKey}; use crate::Word; use crate::asset::{Asset, AssetWitness}; use crate::errors::PartialAssetVaultError; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A partial representation of an [`AssetVault`], containing only proofs for a subset of assets. /// @@ -98,13 +104,16 @@ impl PartialVault { /// Returns an error if: /// - the key is not tracked by this partial SMT. pub fn get(&self, vault_key: AssetVaultKey) -> Result, MerkleError> { - self.partial_smt.get_value(&vault_key.into()).map(|word| { - if word.is_empty() { + self.partial_smt.get_value(&vault_key.into()).map(|asset_value| { + if asset_value.is_empty() { None } else { // SAFETY: If this returned a non-empty word, then it should be a valid asset, // because the vault should only track valid ones. - Some(Asset::try_from(word).expect("partial vault should only track valid assets")) + Some( + Asset::from_key_value(vault_key, asset_value) + .expect("partial vault should only track valid assets"), + ) } }) } @@ -136,17 +145,11 @@ impl PartialVault { fn validate_entries<'a>( entries: impl IntoIterator, ) -> Result<(), PartialAssetVaultError> { - for (vault_key, asset) in entries { - let asset = Asset::try_from(asset).map_err(|source| { - PartialAssetVaultError::InvalidAssetInSmt { entry: *asset, source } + for (vault_key, asset_value) in entries { + // This ensures that vault key and value are consistent. + Asset::from_key_value_words(*vault_key, *asset_value).map_err(|source| { + PartialAssetVaultError::InvalidAssetInSmt { entry: *asset_value, source } })?; - - if *vault_key != asset.vault_key().into() { - return Err(PartialAssetVaultError::AssetVaultKeyMismatch { - expected: asset.vault_key(), - actual: *vault_key, - }); - } } Ok(()) @@ -216,15 +219,12 @@ mod tests { fn partial_vault_ensures_asset_vault_key_matches() -> anyhow::Result<()> { let asset = FungibleAsset::mock(500); let invalid_vault_key = Word::from([0, 1, 2, 3u32]); - let smt = Smt::with_entries([(invalid_vault_key, asset.into())])?; + let smt = Smt::with_entries([(invalid_vault_key, asset.to_value_word())])?; let proof = smt.open(&invalid_vault_key); let partial_smt = PartialSmt::from_proofs([proof.clone()])?; let err = PartialVault::try_from(partial_smt).unwrap_err(); - assert_matches!(err, PartialAssetVaultError::AssetVaultKeyMismatch { expected, actual } => { - assert_eq!(actual, invalid_vault_key); - assert_eq!(expected, asset.vault_key()); - }); + assert_matches!(err, PartialAssetVaultError::InvalidAssetInSmt { .. }); Ok(()) } diff --git a/crates/miden-protocol/src/asset/vault/vault_key.rs b/crates/miden-protocol/src/asset/vault/vault_key.rs index 2cff63d04d..290d7d6ba3 100644 --- a/crates/miden-protocol/src/asset/vault/vault_key.rs +++ b/crates/miden-protocol/src/asset/vault/vault_key.rs @@ -1,96 +1,158 @@ +use alloc::boxed::Box; +use alloc::string::ToString; use core::fmt; +use miden_core::LexicographicWord; use miden_crypto::merkle::smt::LeafIndex; -use miden_processor::SMT_DEPTH; -use crate::Word; -use crate::account::AccountType::FungibleFaucet; -use crate::account::{AccountId, AccountIdPrefix}; -use crate::asset::{Asset, FungibleAsset, NonFungibleAsset}; +use crate::account::AccountId; +use crate::account::AccountType::{self}; +use crate::asset::vault::AssetId; +use crate::asset::{Asset, AssetCallbackFlag, FungibleAsset, NonFungibleAsset}; +use crate::crypto::merkle::smt::SMT_DEPTH; +use crate::errors::AssetError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Felt, Word}; -/// The key of an [`Asset`] in the asset vault. +/// The unique identifier of an [`Asset`] in the [`AssetVault`](crate::asset::AssetVault). /// -/// The layout of an asset key is: -/// - Fungible asset key: `[0, 0, faucet_id_suffix, faucet_id_prefix]`. -/// - Non-fungible asset key: `[faucet_id_prefix, hash1, hash2, hash0']`, where `hash0'` is -/// equivalent to `hash0` with the fungible bit set to `0`. See [`NonFungibleAsset::vault_key`] -/// for more details. -/// -/// For details on the layout of an asset, see the documentation of [`Asset`]. -/// -/// ## Guarantees -/// -/// This type guarantees that it contains a valid fungible or non-fungible asset key: -/// - For fungible assets -/// - The felt at index 3 has the fungible bit set to 1 and it is a valid account ID prefix. -/// - The felt at index 2 is a valid account ID suffix. -/// - For non-fungible assets -/// - The felt at index 3 has the fungible bit set to 0. -/// - The felt at index 0 is a valid account ID prefix. -/// -/// The fungible bit is the bit in the [`AccountId`] that encodes whether the ID is a faucet. -#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)] -pub struct AssetVaultKey(Word); +/// Its [`Word`] layout is: +/// ```text +/// [ +/// asset_id_suffix (64 bits), +/// asset_id_prefix (64 bits), +/// [faucet_id_suffix (56 bits) | 7 zero bits | callbacks_enabled (1 bit)], +/// faucet_id_prefix (64 bits) +/// ] +/// ``` +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub struct AssetVaultKey { + /// The asset ID of the vault key. + asset_id: AssetId, + + /// The ID of the faucet that issued the asset. + faucet_id: AccountId, + + /// Determines whether callbacks are enabled. + callback_flag: AssetCallbackFlag, +} impl AssetVaultKey { - /// Creates a new [`AssetVaultKey`] from the given [`Word`] **without performing validation**. + /// The serialized size of an [`AssetVaultKey`] in bytes. + /// + /// Serialized as its [`Word`] representation (4 field elements). + pub const SERIALIZED_SIZE: usize = Word::SERIALIZED_SIZE; + + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates an [`AssetVaultKey`] for a native asset with callbacks disabled. /// - /// ## Warning + /// # Errors /// - /// This function **does not check** whether the provided `Word` represents a valid - /// fungible or non-fungible asset key. - pub fn new_unchecked(value: Word) -> Self { - Self(value) + /// Returns an error if: + /// - the provided ID is not of type + /// [`AccountType::FungibleFaucet`](crate::account::AccountType::FungibleFaucet) or + /// [`AccountType::NonFungibleFaucet`](crate::account::AccountType::NonFungibleFaucet) + /// - the asset ID limbs are not zero when `faucet_id` is of type + /// [`AccountType::FungibleFaucet`](crate::account::AccountType::FungibleFaucet). + pub fn new_native(asset_id: AssetId, faucet_id: AccountId) -> Result { + Self::new(asset_id, faucet_id, AssetCallbackFlag::Disabled) } - /// Returns an [`AccountIdPrefix`] from the asset key. - pub fn faucet_id_prefix(&self) -> AccountIdPrefix { - if self.is_fungible() { - AccountIdPrefix::new_unchecked(self.0[3]) - } else { - AccountIdPrefix::new_unchecked(self.0[0]) + /// Creates an [`AssetVaultKey`] from its parts with the given [`AssetCallbackFlag`]. + /// + /// # Errors + /// + /// Returns an error if: + /// - the provided ID is not of type + /// [`AccountType::FungibleFaucet`](crate::account::AccountType::FungibleFaucet) or + /// [`AccountType::NonFungibleFaucet`](crate::account::AccountType::NonFungibleFaucet) + /// - the asset ID limbs are not zero when `faucet_id` is of type + /// [`AccountType::FungibleFaucet`](crate::account::AccountType::FungibleFaucet). + pub fn new( + asset_id: AssetId, + faucet_id: AccountId, + callback_flag: AssetCallbackFlag, + ) -> Result { + if !faucet_id.is_faucet() { + return Err(AssetError::InvalidFaucetAccountId(Box::from(format!( + "expected account ID of type faucet, found account type {}", + faucet_id.account_type() + )))); } - } - /// Returns the [`AccountId`] from the asset key if it is a fungible asset, `None` otherwise. - pub fn faucet_id(&self) -> Option { - if self.is_fungible() { - Some(AccountId::new_unchecked([self.0[3], self.0[2]])) - } else { - None + if matches!(faucet_id.account_type(), AccountType::FungibleFaucet) && !asset_id.is_empty() { + return Err(AssetError::FungibleAssetIdMustBeZero(asset_id)); } + + Ok(Self { asset_id, faucet_id, callback_flag }) } - /// Returns the leaf index of a vault key. - pub fn to_leaf_index(&self) -> LeafIndex { - LeafIndex::::from(self.0) + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the word representation of the vault key. + /// + /// See the type-level documentation for details. + pub fn to_word(&self) -> Word { + let faucet_suffix = self.faucet_id.suffix().as_canonical_u64(); + // The lower 8 bits of the faucet suffix are guaranteed to be zero and so it is used to + // encode the asset metadata. + debug_assert!(faucet_suffix & 0xff == 0, "lower 8 bits of faucet suffix must be zero"); + let faucet_id_suffix_and_metadata = faucet_suffix | self.callback_flag.as_u8() as u64; + let faucet_id_suffix_and_metadata = Felt::try_from(faucet_id_suffix_and_metadata) + .expect("highest bit should still be zero resulting in a valid felt"); + + Word::new([ + self.asset_id.suffix(), + self.asset_id.prefix(), + faucet_id_suffix_and_metadata, + self.faucet_id.prefix().as_felt(), + ]) + } + + /// Returns the [`AssetId`] of the vault key that distinguishes different assets issued by the + /// same faucet. + pub fn asset_id(&self) -> AssetId { + self.asset_id + } + + /// Returns the [`AccountId`] of the faucet that issued the asset. + pub fn faucet_id(&self) -> AccountId { + self.faucet_id + } + + /// Returns the [`AssetCallbackFlag`] flag of the vault key. + pub fn callback_flag(&self) -> AssetCallbackFlag { + self.callback_flag } /// Constructs a fungible asset's key from a faucet ID. /// /// Returns `None` if the provided ID is not of type /// [`AccountType::FungibleFaucet`](crate::account::AccountType::FungibleFaucet) - pub fn from_account_id(faucet_id: AccountId) -> Option { - match faucet_id.account_type() { - FungibleFaucet => { - let mut key = Word::empty(); - key[2] = faucet_id.suffix(); - key[3] = faucet_id.prefix().as_felt(); - Some(AssetVaultKey::new_unchecked(key)) - }, - _ => None, + pub fn new_fungible(faucet_id: AccountId) -> Option { + if matches!(faucet_id.account_type(), AccountType::FungibleFaucet) { + let asset_id = AssetId::new(Felt::ZERO, Felt::ZERO); + Some( + Self::new_native(asset_id, faucet_id) + .expect("we should have account type fungible faucet"), + ) + } else { + None } } - /// Returns `true` if the asset key is for a fungible asset, `false` otherwise. - fn is_fungible(&self) -> bool { - self.0[0].as_int() == 0 && self.0[1].as_int() == 0 - } -} - -impl fmt::Display for AssetVaultKey { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.0) + /// Returns the leaf index of a vault key. + pub fn to_leaf_index(&self) -> LeafIndex { + LeafIndex::::from(self.to_word()) } } @@ -99,7 +161,56 @@ impl fmt::Display for AssetVaultKey { impl From for Word { fn from(vault_key: AssetVaultKey) -> Self { - vault_key.0 + vault_key.to_word() + } +} + +impl Ord for AssetVaultKey { + /// Implements comparison based on [`LexicographicWord`]. + fn cmp(&self, other: &Self) -> core::cmp::Ordering { + LexicographicWord::new(self.to_word()).cmp(&LexicographicWord::new(other.to_word())) + } +} + +impl PartialOrd for AssetVaultKey { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl TryFrom for AssetVaultKey { + type Error = AssetError; + + /// Attempts to convert the provided [`Word`] into an [`AssetVaultKey`]. + /// + /// # Errors + /// + /// Returns an error if: + /// - the faucet ID in the key is invalid or not of a faucet type. + /// - the asset ID limbs are not zero when `faucet_id` is of type + /// [`AccountType::FungibleFaucet`](crate::account::AccountType::FungibleFaucet). + fn try_from(key: Word) -> Result { + let asset_id_suffix = key[0]; + let asset_id_prefix = key[1]; + let faucet_id_suffix_and_metadata = key[2]; + let faucet_id_prefix = key[3]; + + let raw = faucet_id_suffix_and_metadata.as_canonical_u64(); + let callback_flag = AssetCallbackFlag::try_from((raw & 0xff) as u8)?; + let faucet_id_suffix = Felt::try_from(raw & 0xffff_ffff_ffff_ff00) + .expect("clearing lower bits should not produce an invalid felt"); + + let asset_id = AssetId::new(asset_id_suffix, asset_id_prefix); + let faucet_id = AccountId::try_from_elements(faucet_id_suffix, faucet_id_prefix) + .map_err(|err| AssetError::InvalidFaucetAccountId(Box::new(err)))?; + + Self::new(asset_id, faucet_id, callback_flag) + } +} + +impl fmt::Display for AssetVaultKey { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(&self.to_word().to_hex()) } } @@ -121,56 +232,63 @@ impl From for AssetVaultKey { } } +// SERIALIZATION +// ================================================================================================ + +impl Serializable for AssetVaultKey { + fn write_into(&self, target: &mut W) { + self.to_word().write_into(target); + } + + fn get_size_hint(&self) -> usize { + Self::SERIALIZED_SIZE + } +} + +impl Deserializable for AssetVaultKey { + fn read_from(source: &mut R) -> Result { + let word: Word = source.read()?; + Self::try_from(word).map_err(|err| DeserializationError::InvalidValue(err.to_string())) + } +} + // TESTS // ================================================================================================ #[cfg(test)] mod tests { - use miden_core::Felt; - use super::*; - use crate::account::{AccountIdVersion, AccountStorageMode, AccountType}; - - fn make_non_fungible_key(prefix: u64) -> AssetVaultKey { - let word = [Felt::new(prefix), Felt::new(11), Felt::new(22), Felt::new(33)].into(); - AssetVaultKey::new_unchecked(word) - } + use crate::asset::AssetCallbackFlag; + use crate::testing::account_id::{ + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, + ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET, + }; #[test] - fn test_faucet_id_for_fungible_asset() { - let id = AccountId::dummy( - [0xff; 15], - AccountIdVersion::Version0, - AccountType::FungibleFaucet, - AccountStorageMode::Public, - ); + fn asset_vault_key_word_roundtrip() -> anyhow::Result<()> { + let fungible_faucet = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET)?; + let nonfungible_faucet = AccountId::try_from(ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET)?; - let key = - AssetVaultKey::from_account_id(id).expect("Expected AssetVaultKey for FungibleFaucet"); + for callback_flag in [AssetCallbackFlag::Disabled, AssetCallbackFlag::Enabled] { + // Fungible: asset_id must be zero. + let key = AssetVaultKey::new(AssetId::default(), fungible_faucet, callback_flag)?; - // faucet_id_prefix() should match AccountId prefix - assert_eq!(key.faucet_id_prefix(), id.prefix()); + let roundtripped = AssetVaultKey::try_from(key.to_word())?; + assert_eq!(key, roundtripped); + assert_eq!(key, AssetVaultKey::read_from_bytes(&key.to_bytes())?); - // faucet_id() should return the same account id - assert_eq!(key.faucet_id().unwrap(), id); - } + // Non-fungible: asset_id can be non-zero. + let key = AssetVaultKey::new( + AssetId::new(Felt::from(42u32), Felt::from(99u32)), + nonfungible_faucet, + callback_flag, + )?; - #[test] - fn test_faucet_id_for_non_fungible_asset() { - let id = AccountId::dummy( - [0xff; 15], - AccountIdVersion::Version0, - AccountType::NonFungibleFaucet, - AccountStorageMode::Public, - ); - - let prefix_value = id.prefix().as_u64(); - let key = make_non_fungible_key(prefix_value); - - // faucet_id_prefix() should match AccountId prefix - assert_eq!(key.faucet_id_prefix(), id.prefix()); - - // faucet_id() should return the None - assert_eq!(key.faucet_id(), None); + let roundtripped = AssetVaultKey::try_from(key.to_word())?; + assert_eq!(key, roundtripped); + assert_eq!(key, AssetVaultKey::read_from_bytes(&key.to_bytes())?); + } + + Ok(()) } } diff --git a/crates/miden-protocol/src/batch/batch_id.rs b/crates/miden-protocol/src/batch/batch_id.rs index bcbb50e16a..b84769cbc8 100644 --- a/crates/miden-protocol/src/batch/batch_id.rs +++ b/crates/miden-protocol/src/batch/batch_id.rs @@ -5,7 +5,13 @@ use miden_protocol_macros::WordWrapper; use crate::account::AccountId; use crate::transaction::{ProvenTransaction, TransactionId}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{Felt, Hasher, Word, ZERO}; // BATCH ID diff --git a/crates/miden-protocol/src/batch/input_output_note_tracker.rs b/crates/miden-protocol/src/batch/input_output_note_tracker.rs index 296cf021e6..3b90259899 100644 --- a/crates/miden-protocol/src/batch/input_output_note_tracker.rs +++ b/crates/miden-protocol/src/batch/input_output_note_tracker.rs @@ -250,7 +250,7 @@ impl InputOutputNoteTracker { // This could happen if the metadata of the notes is different, which we consider an // error. let input_commitment = input_note_header.commitment(); - let output_commitment = output_note.commitment(); + let output_commitment = output_note.to_commitment(); if output_commitment != input_commitment { return Err(InputOutputNoteTrackerError::NoteCommitmentMismatch { id, diff --git a/crates/miden-protocol/src/batch/note_tree.rs b/crates/miden-protocol/src/batch/note_tree.rs index 7897856389..e0aa847f01 100644 --- a/crates/miden-protocol/src/batch/note_tree.rs +++ b/crates/miden-protocol/src/batch/note_tree.rs @@ -3,7 +3,13 @@ use alloc::vec::Vec; use crate::crypto::merkle::MerkleError; use crate::crypto::merkle::smt::{LeafIndex, SimpleSmt}; use crate::note::{NoteId, NoteMetadata, compute_note_commitment}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{BATCH_NOTE_TREE_DEPTH, EMPTY_WORD, Word}; /// Wrapper over [SimpleSmt] for batch note tree. diff --git a/crates/miden-protocol/src/batch/ordered_batches.rs b/crates/miden-protocol/src/batch/ordered_batches.rs index 00b04fcf9a..749707480c 100644 --- a/crates/miden-protocol/src/batch/ordered_batches.rs +++ b/crates/miden-protocol/src/batch/ordered_batches.rs @@ -2,7 +2,13 @@ use alloc::vec::Vec; use crate::batch::ProvenBatch; use crate::transaction::OrderedTransactionHeaders; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // ORDERED BATCHES // ================================================================================================ diff --git a/crates/miden-protocol/src/batch/proposed_batch.rs b/crates/miden-protocol/src/batch/proposed_batch.rs index 5d5cb0f97b..b0a96439ba 100644 --- a/crates/miden-protocol/src/batch/proposed_batch.rs +++ b/crates/miden-protocol/src/batch/proposed_batch.rs @@ -17,7 +17,13 @@ use crate::transaction::{ ProvenTransaction, TransactionHeader, }; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{MAX_ACCOUNTS_PER_BATCH, MAX_INPUT_NOTES_PER_BATCH, MAX_OUTPUT_NOTES_PER_BATCH}; /// A proposed batch of transactions with all necessary data to validate it. @@ -52,7 +58,8 @@ pub struct ProposedBatch { /// [`InputNoteCommitment::nullifier`]. input_notes: InputNotes, /// The output notes of this batch. This consists of all notes created by transactions in the - /// batch that are not consumed within the same batch. These are sorted by [`OutputNote::id`]. + /// batch that are not consumed within the same batch. These are sorted by + /// [`OutputNote::id`]. output_notes: Vec, } @@ -426,14 +433,15 @@ impl Deserializable for ProposedBatch { mod tests { use anyhow::Context; use miden_crypto::merkle::mmr::{Mmr, PartialMmr}; + use miden_crypto::rand::test_utils::rand_value; use miden_verifier::ExecutionProof; - use winter_rand_utils::rand_value; use super::*; use crate::Word; + use crate::account::delta::AccountUpdateDetails; use crate::account::{AccountIdVersion, AccountStorageMode, AccountType}; use crate::asset::FungibleAsset; - use crate::transaction::ProvenTransactionBuilder; + use crate::transaction::{InputNoteCommitment, OutputNote, ProvenTransaction, TxAccountUpdate}; #[test] fn proposed_batch_serialization() -> anyhow::Result<()> { @@ -474,18 +482,25 @@ mod tests { let expiration_block_num = reference_block_header.block_num() + 1; let proof = ExecutionProof::new_dummy(); - let tx = ProvenTransactionBuilder::new( + let account_update = TxAccountUpdate::new( account_id, initial_account_commitment, final_account_commitment, account_delta_commitment, + AccountUpdateDetails::Private, + ) + .context("failed to build account update")?; + + let tx = ProvenTransaction::new( + account_update, + Vec::::new(), + Vec::::new(), block_num, block_ref, FungibleAsset::mock(100).unwrap_fungible(), expiration_block_num, proof, ) - .build() .context("failed to build proven transaction")?; let batch = ProposedBatch::new( diff --git a/crates/miden-protocol/src/batch/proven_batch.rs b/crates/miden-protocol/src/batch/proven_batch.rs index 97075a8736..eb8aae5495 100644 --- a/crates/miden-protocol/src/batch/proven_batch.rs +++ b/crates/miden-protocol/src/batch/proven_batch.rs @@ -8,7 +8,13 @@ use crate::block::BlockNumber; use crate::errors::ProvenBatchError; use crate::note::Nullifier; use crate::transaction::{InputNoteCommitment, InputNotes, OrderedTransactionHeaders, OutputNote}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{MIN_PROOF_SECURITY_LEVEL, Word}; /// A transaction batch with an execution proof. diff --git a/crates/miden-protocol/src/block/account_tree/account_id_key.rs b/crates/miden-protocol/src/block/account_tree/account_id_key.rs new file mode 100644 index 0000000000..1974e866b5 --- /dev/null +++ b/crates/miden-protocol/src/block/account_tree/account_id_key.rs @@ -0,0 +1,156 @@ +use miden_crypto::merkle::smt::LeafIndex; + +use super::AccountId; +use crate::Word; +use crate::crypto::merkle::smt::SMT_DEPTH; +use crate::errors::AccountIdError; + +/// The account ID encoded as a key for use in AccountTree and advice maps in +/// `TransactionAdviceInputs`. +/// +/// Canonical word layout: +/// +/// [0, 0, suffix, prefix] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct AccountIdKey(AccountId); + +impl AccountIdKey { + // Indices in the word layout where the prefix and suffix are stored. + const KEY_SUFFIX_IDX: usize = 2; + const KEY_PREFIX_IDX: usize = 3; + + /// Create from AccountId + pub fn new(id: AccountId) -> Self { + Self(id) + } + + /// Returns the underlying AccountId + pub fn account_id(&self) -> AccountId { + self.0 + } + + // SMT WORD REPRESENTATION + //--------------------------------------------------------------------------------------------------- + + /// Returns `[0, 0, suffix, prefix]` + pub fn as_word(&self) -> Word { + let mut key = Word::empty(); + + key[Self::KEY_SUFFIX_IDX] = self.0.suffix(); + key[Self::KEY_PREFIX_IDX] = self.0.prefix().as_felt(); + + key + } + + /// Construct from SMT word representation. + /// + /// Validates structure before converting. + pub fn try_from_word(word: Word) -> Result { + AccountId::try_from_elements(word[Self::KEY_SUFFIX_IDX], word[Self::KEY_PREFIX_IDX]) + } + + // LEAF INDEX + //--------------------------------------------------------------------------------------------------- + + /// Converts to SMT leaf index used by AccountTree + pub fn to_leaf_index(&self) -> LeafIndex { + LeafIndex::from(self.as_word()) + } +} + +impl From for AccountIdKey { + fn from(id: AccountId) -> Self { + Self(id) + } +} + +// TESTS +//--------------------------------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + + use miden_core::ZERO; + + use super::{AccountId, *}; + use crate::account::{AccountIdVersion, AccountStorageMode, AccountType}; + #[test] + fn test_as_word_layout() { + let id = AccountId::dummy( + [1u8; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + let key = AccountIdKey::from(id); + let word = key.as_word(); + + assert_eq!(word[0], ZERO); + assert_eq!(word[1], ZERO); + assert_eq!(word[2], id.suffix()); + assert_eq!(word[3], id.prefix().as_felt()); + } + + #[test] + fn test_roundtrip_word_conversion() { + let id = AccountId::dummy( + [1u8; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + + let key = AccountIdKey::from(id); + let recovered = + AccountIdKey::try_from_word(key.as_word()).expect("valid account id conversion"); + + assert_eq!(id, recovered); + } + + #[test] + fn test_leaf_index_consistency() { + let id = AccountId::dummy( + [1u8; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + let key = AccountIdKey::from(id); + + let idx1 = key.to_leaf_index(); + let idx2 = key.to_leaf_index(); + + assert_eq!(idx1, idx2); + } + + #[test] + fn test_from_conversion() { + let id = AccountId::dummy( + [1u8; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + let key: AccountIdKey = id.into(); + + assert_eq!(key.account_id(), id); + } + + #[test] + fn test_multiple_roundtrips() { + for _ in 0..100 { + let id = AccountId::dummy( + [1u8; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + let key = AccountIdKey::from(id); + + let recovered = + AccountIdKey::try_from_word(key.as_word()).expect("valid account id conversion"); + + assert_eq!(id, recovered); + } + } +} diff --git a/crates/miden-protocol/src/block/account_tree/backend.rs b/crates/miden-protocol/src/block/account_tree/backend.rs index 78dc989786..58963f0e44 100644 --- a/crates/miden-protocol/src/block/account_tree/backend.rs +++ b/crates/miden-protocol/src/block/account_tree/backend.rs @@ -1,7 +1,7 @@ use alloc::boxed::Box; use alloc::vec::Vec; -use super::{AccountId, AccountIdPrefix, AccountTree, AccountTreeError, account_id_to_smt_key}; +use super::{AccountId, AccountIdKey, AccountIdPrefix, AccountTree, AccountTreeError}; use crate::Word; use crate::crypto::merkle::MerkleError; #[cfg(feature = "std")] @@ -129,9 +129,7 @@ where type Error = MerkleError; fn num_leaves(&self) -> usize { - // LargeSmt::num_leaves returns Result - // We'll unwrap or return 0 on error - LargeSmt::num_leaves(self).map_err(large_smt_error_to_merkle_error).unwrap_or(0) + LargeSmt::num_leaves(self) } fn leaves<'a>(&'a self) -> Box, SmtLeaf)>> { @@ -205,7 +203,7 @@ impl AccountTree { let smt = Smt::with_entries( entries .into_iter() - .map(|(id, commitment)| (account_id_to_smt_key(id), commitment)), + .map(|(id, commitment)| (AccountIdKey::from(id).as_word(), commitment)), ) .map_err(|err| { let MerkleError::DuplicateValuesForIndex(leaf_idx) = err else { @@ -234,6 +232,13 @@ fn large_smt_error_to_merkle_error(err: LargeSmtError) -> MerkleError { LargeSmtError::Storage(storage_err) => { panic!("Storage error encountered: {:?}", storage_err) }, + LargeSmtError::StorageNotEmpty => { + panic!("StorageNotEmpty error encountered: {:?}", err) + }, LargeSmtError::Merkle(merkle_err) => merkle_err, + LargeSmtError::RootMismatch { expected, actual } => MerkleError::ConflictingRoots { + expected_root: expected, + actual_root: actual, + }, } } diff --git a/crates/miden-protocol/src/block/account_tree/mod.rs b/crates/miden-protocol/src/block/account_tree/mod.rs index 2bff2b5f44..e594684be1 100644 --- a/crates/miden-protocol/src/block/account_tree/mod.rs +++ b/crates/miden-protocol/src/block/account_tree/mod.rs @@ -1,14 +1,18 @@ use alloc::string::ToString; use alloc::vec::Vec; -use miden_crypto::merkle::smt::LeafIndex; - use crate::Word; use crate::account::{AccountId, AccountIdPrefix}; use crate::crypto::merkle::MerkleError; use crate::crypto::merkle::smt::{MutationSet, SMT_DEPTH, Smt, SmtLeaf}; use crate::errors::AccountTreeError; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; mod partial; pub use partial::PartialAccountTree; @@ -19,39 +23,8 @@ pub use witness::AccountWitness; mod backend; pub use backend::AccountTreeBackend; -// FREE HELPER FUNCTIONS -// ================================================================================================ -// These module-level functions provide conversions between AccountIds and SMT keys. -// They avoid the need for awkward syntax like account_id_to_smt_key(). - -const KEY_PREFIX_IDX: usize = 3; -const KEY_SUFFIX_IDX: usize = 2; - -/// Converts an [`AccountId`] to an SMT key for use in account trees. -/// -/// The key is constructed with the account ID suffix at index 2 and prefix at index 3. -pub fn account_id_to_smt_key(account_id: AccountId) -> Word { - let mut key = Word::empty(); - key[KEY_SUFFIX_IDX] = account_id.suffix(); - key[KEY_PREFIX_IDX] = account_id.prefix().as_felt(); - key -} - -/// Recovers an [`AccountId`] from an SMT key. -/// -/// # Panics -/// -/// Panics if the key does not represent a valid account ID. This should never happen when used -/// with keys from account trees, as the tree only stores valid IDs. -pub fn smt_key_to_account_id(key: Word) -> AccountId { - AccountId::try_from([key[KEY_PREFIX_IDX], key[KEY_SUFFIX_IDX]]) - .expect("account tree should only contain valid IDs") -} - -/// Converts an AccountId to an SMT leaf index for use with MerkleStore operations. -pub fn account_id_to_smt_index(account_id: AccountId) -> LeafIndex { - account_id_to_smt_key(account_id).into() -} +mod account_id_key; +pub use account_id_key::AccountIdKey; // ACCOUNT TREE // ================================================================================================ @@ -59,8 +32,8 @@ pub fn account_id_to_smt_index(account_id: AccountId) -> LeafIndex { /// The sparse merkle tree of all accounts in the blockchain. /// /// The key is the [`AccountId`] while the value is the current state commitment of the account, -/// i.e. [`Account::commitment`](crate::account::Account::commitment). If the account is new, then -/// the commitment is the [`EMPTY_WORD`](crate::EMPTY_WORD). +/// i.e. [`Account::to_commitment`](crate::account::Account::to_commitment). If the account is new, +/// then the commitment is the [`EMPTY_WORD`](crate::EMPTY_WORD). /// /// Each account ID occupies exactly one leaf in the tree, which is identified by its /// [`AccountId::prefix`]. In other words, account ID prefixes are unique in the blockchain. @@ -104,7 +77,8 @@ where /// # Errors /// /// Returns an error if: - /// - The SMT contains duplicate account ID prefixes + /// - The SMT contains invalid account IDs. + /// - The SMT contains duplicate account ID prefixes. pub fn new(smt: S) -> Result { for (_leaf_idx, leaf) in smt.leaves() { match leaf { @@ -114,13 +88,19 @@ where }, SmtLeaf::Single((key, _)) => { // Single entry is good - verify it's a valid account ID - smt_key_to_account_id(key); + AccountIdKey::try_from_word(key).map_err(|err| { + AccountTreeError::InvalidAccountIdKey { key, source: err } + })?; }, SmtLeaf::Multiple(entries) => { // Multiple entries means duplicate prefixes // Extract one of the keys to identify the duplicate prefix if let Some((key, _)) = entries.first() { - let account_id = smt_key_to_account_id(*key); + let key = *key; + let account_id = AccountIdKey::try_from_word(key).map_err(|err| { + AccountTreeError::InvalidAccountIdKey { key, source: err } + })?; + return Err(AccountTreeError::DuplicateIdPrefix { duplicate_prefix: account_id.prefix(), }); @@ -158,7 +138,7 @@ where /// /// Panics if the SMT backend fails to open the leaf (only possible with `LargeSmt` backend). pub fn open(&self, account_id: AccountId) -> AccountWitness { - let key = account_id_to_smt_key(account_id); + let key = AccountIdKey::from(account_id).as_word(); let proof = self.smt.open(&key); AccountWitness::from_smt_proof(account_id, proof) @@ -166,7 +146,7 @@ where /// Returns the current state commitment of the given account ID. pub fn get(&self, account_id: AccountId) -> Word { - let key = account_id_to_smt_key(account_id); + let key = AccountIdKey::from(account_id).as_word(); self.smt.get_value(&key) } @@ -201,7 +181,7 @@ where ( // SAFETY: By construction, the tree only contains valid IDs. - AccountId::try_from([key[Self::KEY_PREFIX_IDX], key[Self::KEY_SUFFIX_IDX]]) + AccountId::try_from_elements(key[Self::KEY_SUFFIX_IDX], key[Self::KEY_PREFIX_IDX]) .expect("account tree should only contain valid IDs"), commitment, ) @@ -234,7 +214,7 @@ where .compute_mutations(Vec::from_iter( account_commitments .into_iter() - .map(|(id, commitment)| (account_id_to_smt_key(id), commitment)), + .map(|(id, commitment)| (AccountIdKey::from(id).as_word(), commitment)), )) .map_err(AccountTreeError::ComputeMutations)?; @@ -248,7 +228,9 @@ where // valid. If it does not match, then we would insert a duplicate. if existing_key != *id_key { return Err(AccountTreeError::DuplicateIdPrefix { - duplicate_prefix: smt_key_to_account_id(*id_key).prefix(), + duplicate_prefix: AccountIdKey::try_from_word(*id_key) + .expect("account tree should only contain valid IDs") + .prefix(), }); } }, @@ -281,7 +263,7 @@ where account_id: AccountId, state_commitment: Word, ) -> Result { - let key = account_id_to_smt_key(account_id); + let key = AccountIdKey::from(account_id).as_word(); // SAFETY: account tree should not contain multi-entry leaves and so the maximum number // of entries per leaf should never be exceeded. let prev_value = self.smt.insert(key, state_commitment) @@ -372,9 +354,10 @@ impl Deserializable for AccountTree { } // Create the SMT with validated entries - let smt = - Smt::with_entries(entries.into_iter().map(|(k, v)| (account_id_to_smt_key(k), v))) - .map_err(|err| DeserializationError::InvalidValue(err.to_string()))?; + let smt = Smt::with_entries( + entries.into_iter().map(|(k, v)| (AccountIdKey::from(k).as_word(), v)), + ) + .map_err(|err| DeserializationError::InvalidValue(err.to_string()))?; Ok(Self::new_unchecked(smt)) } } @@ -556,7 +539,7 @@ pub(super) mod tests { assert_eq!(tree.num_accounts(), 2); for id in [id0, id1] { - let proof = tree.smt.open(&account_id_to_smt_key(id)); + let proof = tree.smt.open(&AccountIdKey::from(id).as_word()); let (control_path, control_leaf) = proof.into_parts(); let witness = tree.open(id); @@ -600,7 +583,10 @@ pub(super) mod tests { // Create AccountTree with LargeSmt backend let tree = LargeSmt::::with_entries( MemoryStorage::default(), - [(account_id_to_smt_key(id0), digest0), (account_id_to_smt_key(id1), digest1)], + [ + (AccountIdKey::from(id0).as_word(), digest0), + (AccountIdKey::from(id1).as_word(), digest1), + ], ) .map(AccountTree::new_unchecked) .unwrap(); @@ -617,7 +603,10 @@ pub(super) mod tests { // Test mutations let mut tree_mut = LargeSmt::::with_entries( MemoryStorage::default(), - [(account_id_to_smt_key(id0), digest0), (account_id_to_smt_key(id1), digest1)], + [ + (AccountIdKey::from(id0).as_word(), digest0), + (AccountIdKey::from(id1).as_word(), digest1), + ], ) .map(AccountTree::new_unchecked) .unwrap(); @@ -666,7 +655,10 @@ pub(super) mod tests { let mut tree = LargeSmt::with_entries( MemoryStorage::default(), - [(account_id_to_smt_key(id0), digest0), (account_id_to_smt_key(id1), digest1)], + [ + (AccountIdKey::from(id0).as_word(), digest0), + (AccountIdKey::from(id1).as_word(), digest1), + ], ) .map(AccountTree::new_unchecked) .unwrap(); @@ -697,7 +689,10 @@ pub(super) mod tests { // Create tree with LargeSmt backend let large_tree = LargeSmt::with_entries( MemoryStorage::default(), - [(account_id_to_smt_key(id0), digest0), (account_id_to_smt_key(id1), digest1)], + [ + (AccountIdKey::from(id0).as_word(), digest0), + (AccountIdKey::from(id1).as_word(), digest1), + ], ) .map(AccountTree::new_unchecked) .unwrap(); diff --git a/crates/miden-protocol/src/block/account_tree/partial.rs b/crates/miden-protocol/src/block/account_tree/partial.rs index af84f725ae..f663bf5405 100644 --- a/crates/miden-protocol/src/block/account_tree/partial.rs +++ b/crates/miden-protocol/src/block/account_tree/partial.rs @@ -1,6 +1,6 @@ use miden_crypto::merkle::smt::{PartialSmt, SmtLeaf}; -use super::{AccountWitness, account_id_to_smt_key}; +use super::{AccountIdKey, AccountWitness}; use crate::Word; use crate::account::AccountId; use crate::errors::AccountTreeError; @@ -68,7 +68,7 @@ impl PartialAccountTree { /// Returns an error if: /// - the account ID is not tracked by this account tree. pub fn open(&self, account_id: AccountId) -> Result { - let key = account_id_to_smt_key(account_id); + let key = AccountIdKey::from(account_id).as_word(); self.smt .open(&key) @@ -83,7 +83,7 @@ impl PartialAccountTree { /// Returns an error if: /// - the account ID is not tracked by this account tree. pub fn get(&self, account_id: AccountId) -> Result { - let key = account_id_to_smt_key(account_id); + let key = AccountIdKey::from(account_id).as_word(); self.smt .get_value(&key) .map_err(|source| AccountTreeError::UntrackedAccountId { id: account_id, source }) @@ -109,7 +109,7 @@ impl PartialAccountTree { /// witness. pub fn track_account(&mut self, witness: AccountWitness) -> Result<(), AccountTreeError> { let id_prefix = witness.id().prefix(); - let id_key = account_id_to_smt_key(witness.id()); + let id_key = AccountIdKey::from(witness.id()).as_word(); // If a leaf with the same prefix is already tracked by this partial tree, consider it an // error. @@ -165,7 +165,7 @@ impl PartialAccountTree { account_id: AccountId, state_commitment: Word, ) -> Result { - let key = account_id_to_smt_key(account_id); + let key = AccountIdKey::from(account_id).as_word(); // If there exists a tracked leaf whose key is _not_ the one we're about to overwrite, then // we would insert the new commitment next to an existing account ID with the same prefix, @@ -195,6 +195,7 @@ mod tests { use super::*; use crate::block::account_tree::AccountTree; use crate::block::account_tree::tests::setup_duplicate_prefix_ids; + use crate::testing::account_id::AccountIdBuilder; #[test] fn insert_fails_on_duplicate_prefix() -> anyhow::Result<()> { @@ -252,15 +253,26 @@ mod tests { assert_eq!(partial_tree.get(id0).unwrap(), commitment1); } + /// Check that updating an account ID in the partial account tree fails if that ID is not + /// tracked. #[test] - fn upsert_state_commitments_fails_on_untracked_key() { - let mut partial_tree = PartialAccountTree::default(); - let [update, _] = setup_duplicate_prefix_ids(); + fn upsert_state_commitments_fails_on_untracked_key() -> anyhow::Result<()> { + let id0 = AccountIdBuilder::default().build_with_seed([5; 32]); + let id2 = AccountIdBuilder::default().build_with_seed([6; 32]); - let err = partial_tree.upsert_state_commitments([update]).unwrap_err(); + let commitment0 = Word::from([1, 2, 3, 4u32]); + let commitment2 = Word::from([2, 3, 4, 5u32]); + + let account_tree = AccountTree::with_entries([(id0, commitment0), (id2, commitment2)])?; + // Let the partial account tree only track id0, not id2. + let mut partial_tree = PartialAccountTree::with_witnesses([account_tree.open(id0)])?; + + let err = partial_tree.upsert_state_commitments([(id2, commitment0)]).unwrap_err(); assert_matches!(err, AccountTreeError::UntrackedAccountId { id, .. } - if id == update.0 - ) + if id == id2 + ); + + Ok(()) } #[test] @@ -269,14 +281,14 @@ mod tests { // account IDs with the same prefix. let full_tree = Smt::with_entries( setup_duplicate_prefix_ids() - .map(|(id, commitment)| (account_id_to_smt_key(id), commitment)), + .map(|(id, commitment)| (AccountIdKey::from(id).as_word(), commitment)), ) .unwrap(); let [(id0, _), (id1, _)] = setup_duplicate_prefix_ids(); - let key0 = account_id_to_smt_key(id0); - let key1 = account_id_to_smt_key(id1); + let key0 = AccountIdKey::from(id0).as_word(); + let key1 = AccountIdKey::from(id1).as_word(); let proof0 = full_tree.open(&key0); let proof1 = full_tree.open(&key1); assert_eq!(proof0.leaf(), proof1.leaf()); diff --git a/crates/miden-protocol/src/block/account_tree/witness.rs b/crates/miden-protocol/src/block/account_tree/witness.rs index 9c5c81d745..98be6b8310 100644 --- a/crates/miden-protocol/src/block/account_tree/witness.rs +++ b/crates/miden-protocol/src/block/account_tree/witness.rs @@ -1,21 +1,27 @@ use alloc::string::ToString; -use miden_crypto::merkle::smt::{LeafIndex, SMT_DEPTH, SmtLeaf, SmtProof, SmtProofError}; +use miden_crypto::merkle::smt::{SMT_DEPTH, SmtLeaf, SmtProof, SmtProofError}; use miden_crypto::merkle::{InnerNodeInfo, SparseMerklePath}; use crate::Word; use crate::account::AccountId; -use crate::block::account_tree::{account_id_to_smt_key, smt_key_to_account_id}; +use crate::block::account_tree::AccountIdKey; use crate::errors::AccountTreeError; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // ACCOUNT WITNESS // ================================================================================================ /// A specialized version of an [`SmtProof`] for use in [`AccountTree`](super::AccountTree) and /// [`PartialAccountTree`](super::PartialAccountTree). It proves the inclusion of an account -/// ID at a certain state (i.e. [`Account::commitment`](crate::account::Account::commitment)) in the -/// [`AccountTree`](super::AccountTree). +/// ID at a certain state (i.e. [`Account::to_commitment`](crate::account::Account::to_commitment)) +/// in the [`AccountTree`](super::AccountTree). /// /// By construction the witness can only represent the equivalent of an [`SmtLeaf`] with zero or one /// entries, which guarantees that the account ID prefix it represents is unique in the tree. @@ -68,6 +74,7 @@ impl AccountWitness { /// # Panics /// /// Panics if: + /// - the proof contains an entry whose key contains an invalid account ID. /// - the merkle path in the proof does not have depth equal to [`SMT_DEPTH`]. /// - the proof contains an SmtLeaf::Multiple. pub(super) fn from_smt_proof(requested_account_id: AccountId, proof: SmtProof) -> Self { @@ -83,7 +90,8 @@ impl AccountWitness { SmtLeaf::Empty(_) => requested_account_id, SmtLeaf::Single((key_in_leaf, _)) => { // SAFETY: By construction, the tree only contains valid IDs. - smt_key_to_account_id(*key_in_leaf) + AccountIdKey::try_from_word(*key_in_leaf) + .expect("account tree should only contain valid IDs") }, SmtLeaf::Multiple(_) => { unreachable!("account tree should only contain zero or one entry per ID prefix") @@ -91,7 +99,7 @@ impl AccountWitness { }; let commitment = proof - .get(&account_id_to_smt_key(witness_id)) + .get(&AccountIdKey::from(witness_id).as_word()) .expect("we should have received a proof for the witness key"); // SAFETY: The proof is guaranteed to have depth SMT_DEPTH if it comes from one of @@ -132,10 +140,10 @@ impl AccountWitness { /// Returns the [`SmtLeaf`] of the account witness. pub fn leaf(&self) -> SmtLeaf { if self.commitment == Word::empty() { - let leaf_idx = LeafIndex::from(account_id_to_smt_key(self.id)); + let leaf_idx = AccountIdKey::from(self.id).to_leaf_index(); SmtLeaf::new_empty(leaf_idx) } else { - let key = account_id_to_smt_key(self.id); + let key = AccountIdKey::from(self.id).as_word(); SmtLeaf::new_single(key, self.commitment) } } diff --git a/crates/miden-protocol/src/block/block_account_update.rs b/crates/miden-protocol/src/block/block_account_update.rs index d3e2541613..8b809151d6 100644 --- a/crates/miden-protocol/src/block/block_account_update.rs +++ b/crates/miden-protocol/src/block/block_account_update.rs @@ -1,7 +1,13 @@ use crate::Word; use crate::account::AccountId; use crate::account::delta::AccountUpdateDetails; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // BLOCK ACCOUNT UPDATE // ================================================================================================ diff --git a/crates/miden-protocol/src/block/block_body.rs b/crates/miden-protocol/src/block/block_body.rs index 53b86741cb..4b10460edd 100644 --- a/crates/miden-protocol/src/block/block_body.rs +++ b/crates/miden-protocol/src/block/block_body.rs @@ -1,13 +1,6 @@ use alloc::vec::Vec; use miden_core::Word; -use miden_core::utils::{ - ByteReader, - ByteWriter, - Deserializable, - DeserializationError, - Serializable, -}; use crate::block::{ BlockAccountUpdate, @@ -18,6 +11,13 @@ use crate::block::{ }; use crate::note::Nullifier; use crate::transaction::{OrderedTransactionHeaders, OutputNote}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // BLOCK BODY // ================================================================================================ diff --git a/crates/miden-protocol/src/block/block_inputs.rs b/crates/miden-protocol/src/block/block_inputs.rs index e67f4e0bba..5e531f9e82 100644 --- a/crates/miden-protocol/src/block/block_inputs.rs +++ b/crates/miden-protocol/src/block/block_inputs.rs @@ -6,7 +6,13 @@ use crate::block::account_tree::AccountWitness; use crate::block::nullifier_tree::NullifierWitness; use crate::note::{NoteId, NoteInclusionProof, Nullifier}; use crate::transaction::PartialBlockchain; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // BLOCK INPUTS // ================================================================================================ diff --git a/crates/miden-protocol/src/block/block_number.rs b/crates/miden-protocol/src/block/block_number.rs index 33c65c82c3..aec6613a48 100644 --- a/crates/miden-protocol/src/block/block_number.rs +++ b/crates/miden-protocol/src/block/block_number.rs @@ -30,6 +30,9 @@ impl BlockNumber { /// The block height of the genesis block. pub const GENESIS: Self = Self(0); + /// The maximum block number. + pub const MAX: Self = Self(u32::MAX); + /// Returns the previous block number pub fn parent(self) -> Option { self.checked_sub(1) @@ -96,8 +99,8 @@ impl Deserializable for BlockNumber { } impl From for Felt { - fn from(value: BlockNumber) -> Self { - Felt::from(value.as_u32()) + fn from(block_num: BlockNumber) -> Self { + Felt::from(block_num.as_u32()) } } diff --git a/crates/miden-protocol/src/block/block_proof.rs b/crates/miden-protocol/src/block/block_proof.rs index 710a77cb03..999b9ccd3a 100644 --- a/crates/miden-protocol/src/block/block_proof.rs +++ b/crates/miden-protocol/src/block/block_proof.rs @@ -1,4 +1,10 @@ -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// Represents a proof of a block in the chain. /// diff --git a/crates/miden-protocol/src/block/blockchain.rs b/crates/miden-protocol/src/block/blockchain.rs index a70159fbd0..17c96bbad9 100644 --- a/crates/miden-protocol/src/block/blockchain.rs +++ b/crates/miden-protocol/src/block/blockchain.rs @@ -1,11 +1,16 @@ use alloc::collections::BTreeSet; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; use miden_crypto::merkle::mmr::{Forest, Mmr, MmrError, MmrPeaks, MmrProof, PartialMmr}; -use miden_processor::DeserializationError; use crate::Word; use crate::block::BlockNumber; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// The [Merkle Mountain Range](Mmr) defining the Miden blockchain. /// @@ -140,13 +145,13 @@ impl Blockchain { let mut partial_mmr = PartialMmr::from_peaks(peaks); for block_num in blocks.iter() { let leaf = self.mmr.get(block_num.as_usize())?; - let path = self.open_at(*block_num, checkpoint)?.merkle_path; + let proof = self.open_at(*block_num, checkpoint)?; // SAFETY: We should be able to fill the partial MMR with data from the partial // blockchain without errors, otherwise it indicates the blockchain is // invalid. partial_mmr - .track(block_num.as_usize(), leaf, &path) + .track(block_num.as_usize(), leaf, proof.merkle_path()) .expect("filling partial mmr with data from mmr should succeed"); } diff --git a/crates/miden-protocol/src/block/header.rs b/crates/miden-protocol/src/block/header.rs index 33b5f0f8dd..fc13578258 100644 --- a/crates/miden-protocol/src/block/header.rs +++ b/crates/miden-protocol/src/block/header.rs @@ -241,12 +241,12 @@ impl BlockHeader { elements.extend_from_slice(tx_commitment.as_elements()); elements.extend_from_slice(tx_kernel_commitment.as_elements()); elements.extend(validator_key.to_commitment()); - elements.extend([block_num.into(), version.into(), timestamp.into(), ZERO]); + elements.extend([block_num.into(), Felt::from(version), Felt::from(timestamp), ZERO]); elements.extend([ + ZERO, + Felt::from(fee_parameters.verification_base_fee()), fee_parameters.native_asset_id().suffix(), fee_parameters.native_asset_id().prefix().as_felt(), - fee_parameters.verification_base_fee().into(), - ZERO, ]); elements.extend([ZERO, ZERO, ZERO, ZERO]); Hasher::hash_elements(&elements) @@ -400,7 +400,7 @@ impl Deserializable for FeeParameters { mod tests { use assert_matches::assert_matches; use miden_core::Word; - use winter_rand_utils::rand_value; + use miden_crypto::rand::test_utils::rand_value; use super::*; use crate::testing::account_id::ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET; diff --git a/crates/miden-protocol/src/block/mod.rs b/crates/miden-protocol/src/block/mod.rs index e829aacdb6..cb2df1a489 100644 --- a/crates/miden-protocol/src/block/mod.rs +++ b/crates/miden-protocol/src/block/mod.rs @@ -13,6 +13,9 @@ pub use block_proof::BlockProof; mod proposed_block; pub use proposed_block::ProposedBlock; +mod signed_block; +pub use signed_block::SignedBlock; + mod proven_block; pub use proven_block::ProvenBlock; @@ -34,9 +37,6 @@ pub use block_inputs::BlockInputs; mod note_tree; pub use note_tree::{BlockNoteIndex, BlockNoteTree}; -mod signer; -pub use signer::BlockSigner; - /// The set of notes created in a transaction batch with their index in the batch. /// /// The index is included as some notes may be erased at the block level that were part of the diff --git a/crates/miden-protocol/src/block/note_tree.rs b/crates/miden-protocol/src/block/note_tree.rs index 497aab12ba..e0e9feb1ae 100644 --- a/crates/miden-protocol/src/block/note_tree.rs +++ b/crates/miden-protocol/src/block/note_tree.rs @@ -1,4 +1,5 @@ use alloc::string::ToString; +use alloc::vec::Vec; use miden_crypto::merkle::SparseMerklePath; @@ -6,7 +7,13 @@ use crate::batch::BatchNoteTree; use crate::crypto::merkle::MerkleError; use crate::crypto::merkle::smt::{LeafIndex, SimpleSmt}; use crate::note::{NoteId, NoteMetadata, compute_note_commitment}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{ BLOCK_NOTE_TREE_DEPTH, MAX_BATCHES_PER_BLOCK, @@ -167,7 +174,7 @@ impl Serializable for BlockNoteTree { impl Deserializable for BlockNoteTree { fn read_from(source: &mut R) -> Result { let count = source.read_u32()?; - let leaves = source.read_many(count as usize)?; + let leaves = source.read_many_iter(count as usize)?.collect::, _>>()?; SimpleSmt::with_leaves(leaves) .map(Self) diff --git a/crates/miden-protocol/src/block/nullifier_tree/backend.rs b/crates/miden-protocol/src/block/nullifier_tree/backend.rs index 603258ea0a..90f0955046 100644 --- a/crates/miden-protocol/src/block/nullifier_tree/backend.rs +++ b/crates/miden-protocol/src/block/nullifier_tree/backend.rs @@ -114,12 +114,7 @@ where type Error = MerkleError; fn num_entries(&self) -> usize { - // SAFETY: We panic on storage errors here as they represent unrecoverable I/O failures. - // This maintains API compatibility with the non-fallible Smt::num_entries(). - // See issue #2010 for future improvements to error handling. LargeSmt::num_entries(self) - .map_err(large_smt_error_to_merkle_error) - .expect("Storage I/O error accessing num_entries") } fn entries(&self) -> Box + '_> { @@ -230,6 +225,13 @@ pub(super) fn large_smt_error_to_merkle_error(err: LargeSmtError) -> MerkleError LargeSmtError::Storage(storage_err) => { panic!("Storage error encountered: {:?}", storage_err) }, + LargeSmtError::StorageNotEmpty => { + panic!("StorageNotEmpty error encountered: {:?}", err) + }, LargeSmtError::Merkle(merkle_err) => merkle_err, + LargeSmtError::RootMismatch { expected, actual } => MerkleError::ConflictingRoots { + expected_root: expected, + actual_root: actual, + }, } } diff --git a/crates/miden-protocol/src/block/nullifier_tree/mod.rs b/crates/miden-protocol/src/block/nullifier_tree/mod.rs index 18332812ea..b85a4aebd7 100644 --- a/crates/miden-protocol/src/block/nullifier_tree/mod.rs +++ b/crates/miden-protocol/src/block/nullifier_tree/mod.rs @@ -6,8 +6,14 @@ use crate::crypto::merkle::MerkleError; use crate::crypto::merkle::smt::{MutationSet, SMT_DEPTH, Smt}; use crate::errors::NullifierTreeError; use crate::note::Nullifier; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; -use crate::{Felt, FieldElement, Word}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Felt, Word}; mod backend; pub use backend::NullifierTreeBackend; @@ -273,7 +279,7 @@ impl NullifierBlock { /// - The 0th element in the word is not a valid [BlockNumber]. /// - Any of the remaining elements is non-zero. pub fn new(word: Word) -> Result { - let block_num = u32::try_from(word[0].as_int()) + let block_num = u32::try_from(word[0].as_canonical_u64()) .map(BlockNumber::from) .map_err(|_| NullifierTreeError::InvalidNullifierBlockNumber(word))?; diff --git a/crates/miden-protocol/src/block/nullifier_tree/partial.rs b/crates/miden-protocol/src/block/nullifier_tree/partial.rs index 0bcb90c80d..9897525196 100644 --- a/crates/miden-protocol/src/block/nullifier_tree/partial.rs +++ b/crates/miden-protocol/src/block/nullifier_tree/partial.rs @@ -111,7 +111,7 @@ impl PartialNullifierTree { mod tests { use assert_matches::assert_matches; use miden_crypto::merkle::smt::Smt; - use winter_rand_utils::rand_value; + use miden_crypto::rand::test_utils::rand_value; use super::*; use crate::block::nullifier_tree::NullifierTree; diff --git a/crates/miden-protocol/src/block/proposed_block.rs b/crates/miden-protocol/src/block/proposed_block.rs index 129f94912b..c828278f60 100644 --- a/crates/miden-protocol/src/block/proposed_block.rs +++ b/crates/miden-protocol/src/block/proposed_block.rs @@ -495,7 +495,7 @@ impl ProposedBlock { let fee_parameters = prev_block_header.fee_parameters().clone(); // Currently undefined and reserved for future use. - // See miden-base/1155. + // See https://github.com/0xMiden/protocol/issues/1155. let version = 0; let tx_kernel_commitment = TransactionKernel.to_commitment(); let header = BlockHeader::new( diff --git a/crates/miden-protocol/src/block/proven_block.rs b/crates/miden-protocol/src/block/proven_block.rs index 8e1c23c6c8..68abc97d23 100644 --- a/crates/miden-protocol/src/block/proven_block.rs +++ b/crates/miden-protocol/src/block/proven_block.rs @@ -1,8 +1,37 @@ +use miden_core::Word; use miden_crypto::dsa::ecdsa_k256_keccak::Signature; use crate::MIN_PROOF_SECURITY_LEVEL; use crate::block::{BlockBody, BlockHeader, BlockProof}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; + +// PROVEN BLOCK ERROR +// ================================================================================================ + +#[derive(Debug, thiserror::Error)] +pub enum ProvenBlockError { + #[error( + "ECDSA signature verification failed based on the proven block's header commitment, validator public key and signature" + )] + InvalidSignature, + #[error( + "header tx commitment ({header_tx_commitment}) does not match body tx commitment ({body_tx_commitment})" + )] + TxCommitmentMismatch { + header_tx_commitment: Word, + body_tx_commitment: Word, + }, + #[error( + "proven block header note root ({header_root}) does not match the corresponding body's note root ({body_root})" + )] + NoteRootMismatch { header_root: Word, body_root: Word }, +} // PROVEN BLOCK // ================================================================================================ @@ -21,7 +50,7 @@ pub struct ProvenBlock { /// The body of the proven block. body: BlockBody, - /// The validator's signature over the block header. + /// The Validator's signature over the block header. signature: Signature, /// The proof of the block. @@ -29,12 +58,50 @@ pub struct ProvenBlock { } impl ProvenBlock { + /// Returns a new [`ProvenBlock`] instantiated from the provided components. + /// + /// Validates that the provided components correspond to each other by verifying the signature, + /// and checking for matching transaction commitments and note roots. + /// + /// Involves non-trivial computation. Use [`Self::new_unchecked`] if the validation is not + /// necessary. + /// + /// Note: this does not fully validate the consistency of provided components. Specifically, + /// we cannot validate that: + /// - That applying the account updates in the block body to the account tree represented by the + /// root from the previous block header would actually result in the account root in the + /// provided header. + /// - That inserting the created nullifiers in the block body to the nullifier tree represented + /// by the root from the previous block header would actually result in the nullifier root in + /// the provided header. + /// + /// # Errors + /// Returns an error if: + /// - If the validator signature does not verify against the block header commitment and the + /// validator key. + /// - If the transaction commitment in the block header is inconsistent with the transactions + /// included in the block body. + /// - If the note root in the block header is inconsistent with the notes included in the block + /// body. + pub fn new( + header: BlockHeader, + body: BlockBody, + signature: Signature, + proof: BlockProof, + ) -> Result { + let proven_block = Self { header, signature, body, proof }; + + proven_block.validate()?; + + Ok(proven_block) + } + /// Returns a new [`ProvenBlock`] instantiated from the provided components. /// /// # Warning /// - /// This constructor does not do any validation, so passing incorrect values may lead to later - /// panics. + /// This constructor does not do any validation as to whether the arguments correctly correspond + /// to each other, which could cause errors downstream. pub fn new_unchecked( header: BlockHeader, body: BlockBody, @@ -44,6 +111,42 @@ impl ProvenBlock { Self { header, signature, body, proof } } + /// Validates that the components of the proven block correspond to each other by verifying the + /// signature, and checking for matching transaction commitments and note roots. + /// + /// Validation involves non-trivial computation, and depending on the size of the block may + /// take non-negligible amount of time. + /// + /// Note: this does not fully validate the consistency of internal components. Specifically, + /// we cannot validate that: + /// - That applying the account updates in the block body to the account tree represented by the + /// root from the previous block header would actually result in the account root in the + /// provided header. + /// - That inserting the created nullifiers in the block body to the nullifier tree represented + /// by the root from the previous block header would actually result in the nullifier root in + /// the provided header. + /// + /// # Errors + /// Returns an error if: + /// - If the validator signature does not verify against the block header commitment and the + /// validator key. + /// - If the transaction commitment in the block header is inconsistent with the transactions + /// included in the block body. + /// - If the note root in the block header is inconsistent with the notes included in the block + /// body. + pub fn validate(&self) -> Result<(), ProvenBlockError> { + // Verify signature. + self.validate_signature()?; + + // Validate that header / body transaction commitments match. + self.validate_tx_commitment()?; + + // Validate that header / body note roots match. + self.validate_note_root()?; + + Ok(()) + } + /// Returns the proof security level of the block. pub fn proof_security_level(&self) -> u32 { MIN_PROOF_SECURITY_LEVEL @@ -54,20 +157,64 @@ impl ProvenBlock { &self.header } - /// Returns the validator's signature over the block header. - pub fn signature(&self) -> &Signature { - &self.signature - } - /// Returns the body of the block. pub fn body(&self) -> &BlockBody { &self.body } + /// Returns the Validator's signature over the block header. + pub fn signature(&self) -> &Signature { + &self.signature + } + /// Returns the proof of the block. pub fn proof(&self) -> &BlockProof { &self.proof } + + /// Destructures this proven block into individual parts. + pub fn into_parts(self) -> (BlockHeader, BlockBody, Signature, BlockProof) { + (self.header, self.body, self.signature, self.proof) + } + + // HELPER METHODS + // -------------------------------------------------------------------------------------------- + + /// Performs ECDSA signature verification against the header commitment and validator key. + fn validate_signature(&self) -> Result<(), ProvenBlockError> { + if !self.signature.verify(self.header.commitment(), self.header.validator_key()) { + Err(ProvenBlockError::InvalidSignature) + } else { + Ok(()) + } + } + + /// Validates that the transaction commitments between the header and body match for this proven + /// block. + /// + /// Involves non-trivial computation of the body's transaction commitment. + fn validate_tx_commitment(&self) -> Result<(), ProvenBlockError> { + let header_tx_commitment = self.header.tx_commitment(); + let body_tx_commitment = self.body.transactions().commitment(); + if header_tx_commitment != body_tx_commitment { + Err(ProvenBlockError::TxCommitmentMismatch { header_tx_commitment, body_tx_commitment }) + } else { + Ok(()) + } + } + + /// Validates that the header's note tree root matches that of the body. + /// + /// Involves non-trivial computation of the body's note tree. + fn validate_note_root(&self) -> Result<(), ProvenBlockError> { + let header_root = self.header.note_root(); + let body_root = self.body.compute_block_note_tree().root(); + if header_root != body_root { + Err(ProvenBlockError::NoteRootMismatch { header_root, body_root }) + } else { + Ok(()) + } + } } // SERIALIZATION diff --git a/crates/miden-protocol/src/block/signed_block.rs b/crates/miden-protocol/src/block/signed_block.rs new file mode 100644 index 0000000000..12aead9af5 --- /dev/null +++ b/crates/miden-protocol/src/block/signed_block.rs @@ -0,0 +1,212 @@ +use miden_core::Word; +use miden_crypto::dsa::ecdsa_k256_keccak::Signature; + +use crate::block::{BlockBody, BlockHeader, BlockNumber}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; + +// SIGNED BLOCK ERROR +// ================================================================================================ + +#[derive(Debug, thiserror::Error)] +pub enum SignedBlockError { + #[error( + "ECDSA signature verification failed based on the signed block's header commitment, validator public key and signature" + )] + InvalidSignature, + #[error( + "header tx commitment ({header_tx_commitment}) does not match body tx commitment ({body_tx_commitment})" + )] + TxCommitmentMismatch { + header_tx_commitment: Word, + body_tx_commitment: Word, + }, + #[error( + "signed block previous block commitment ({expected}) does not match expected parent's block commitment ({parent})" + )] + ParentCommitmentMismatch { expected: Word, parent: Word }, + #[error("parent block number ({parent}) is not signed block number - 1 ({expected})")] + ParentNumberMismatch { + expected: BlockNumber, + parent: BlockNumber, + }, + #[error( + "signed block header note root ({header_root}) does not match the corresponding body's note root ({body_root})" + )] + NoteRootMismatch { header_root: Word, body_root: Word }, + #[error("supplied parent block ({parent}) cannot be parent to genesis block")] + GenesisBlockHasNoParent { parent: BlockNumber }, +} + +// SIGNED BLOCK +// ================================================================================================ + +/// Represents a block in the Miden blockchain that has been signed by the Validator. +/// +/// Signed blocks are applied to the chain's state before they are proven. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct SignedBlock { + /// The header of the Signed block. + header: BlockHeader, + + /// The body of the Signed block. + body: BlockBody, + + /// The Validator's signature over the block header. + signature: Signature, +} + +impl SignedBlock { + /// Returns a new [`SignedBlock`] instantiated from the provided components. + /// + /// Validates that the provided components correspond to each other by verifying the signature, + /// and checking for matching commitments and note roots. + /// + /// Involves non-trivial computation. Use [`Self::new_unchecked`] if the validation is not + /// necessary. + pub fn new( + header: BlockHeader, + body: BlockBody, + signature: Signature, + ) -> Result { + let signed_block = Self { header, body, signature }; + + // Verify signature. + signed_block.validate_signature()?; + + // Validate that header / body transaction commitments match. + signed_block.validate_tx_commitment()?; + + // Validate that header / body note roots match. + signed_block.validate_note_root()?; + + Ok(signed_block) + } + + /// Returns a new [`SignedBlock`] instantiated from the provided components. + /// + /// # Warning + /// + /// This constructor does not do any validation as to whether the arguments correctly correspond + /// to each other, which could cause errors downstream. + pub fn new_unchecked(header: BlockHeader, body: BlockBody, signature: Signature) -> Self { + Self { header, signature, body } + } + + /// Returns the header of the block. + pub fn header(&self) -> &BlockHeader { + &self.header + } + + /// Returns the body of the block. + pub fn body(&self) -> &BlockBody { + &self.body + } + + /// Returns the Validator's signature over the block header. + pub fn signature(&self) -> &Signature { + &self.signature + } + + /// Destructures this signed block into individual parts. + pub fn into_parts(self) -> (BlockHeader, BlockBody, Signature) { + (self.header, self.body, self.signature) + } + + /// Performs ECDSA signature verification against the header commitment and validator key. + fn validate_signature(&self) -> Result<(), SignedBlockError> { + if !self.signature.verify(self.header.commitment(), self.header.validator_key()) { + Err(SignedBlockError::InvalidSignature) + } else { + Ok(()) + } + } + + /// Validates that the transaction commitments between the header and body match for this signed + /// block. + /// + /// Involves non-trivial computation of the body's transaction commitment. + fn validate_tx_commitment(&self) -> Result<(), SignedBlockError> { + let header_tx_commitment = self.header.tx_commitment(); + let body_tx_commitment = self.body.transactions().commitment(); + if header_tx_commitment != body_tx_commitment { + Err(SignedBlockError::TxCommitmentMismatch { header_tx_commitment, body_tx_commitment }) + } else { + Ok(()) + } + } + + /// Validates that the header's note tree root matches that of the body. + /// + /// Involves non-trivial computation of the body's note tree. + fn validate_note_root(&self) -> Result<(), SignedBlockError> { + let header_root = self.header.note_root(); + let body_root = self.body.compute_block_note_tree().root(); + if header_root != body_root { + Err(SignedBlockError::NoteRootMismatch { header_root, body_root }) + } else { + Ok(()) + } + } + + /// Validates that the provided parent block's commitment and number correctly corresponds to + /// the signed block. + /// + /// # Errors + /// + /// Returns an error if: + /// - The signed block is the genesis block. + /// - The parent block number is not the signed block number - 1. + /// - The parent block's commitment is not equal to the signed block's previous block + /// commitment. + pub fn validate_parent(&self, parent_block: &BlockHeader) -> Result<(), SignedBlockError> { + // Check block numbers. + if let Some(expected) = self.header.block_num().checked_sub(1) { + let parent = parent_block.block_num(); + if expected != parent { + return Err(SignedBlockError::ParentNumberMismatch { expected, parent }); + } + + // Check commitments. + let expected = self.header.prev_block_commitment(); + let parent = parent_block.commitment(); + if expected != parent { + return Err(SignedBlockError::ParentCommitmentMismatch { expected, parent }); + } + + Ok(()) + } else { + // Block 0 does not have a parent. + let parent = parent_block.block_num(); + Err(SignedBlockError::GenesisBlockHasNoParent { parent }) + } + } +} + +// SERIALIZATION +// ================================================================================================ + +impl Serializable for SignedBlock { + fn write_into(&self, target: &mut W) { + self.header.write_into(target); + self.body.write_into(target); + self.signature.write_into(target); + } +} + +impl Deserializable for SignedBlock { + fn read_from(source: &mut R) -> Result { + let block = Self { + header: BlockHeader::read_from(source)?, + body: BlockBody::read_from(source)?, + signature: Signature::read_from(source)?, + }; + + Ok(block) + } +} diff --git a/crates/miden-protocol/src/block/signer.rs b/crates/miden-protocol/src/block/signer.rs deleted file mode 100644 index 2007e58146..0000000000 --- a/crates/miden-protocol/src/block/signer.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::block::BlockHeader; -use crate::crypto::dsa::ecdsa_k256_keccak as ecdsa; -use crate::crypto::dsa::ecdsa_k256_keccak::SecretKey; - -// BLOCK SIGNER -// ================================================================================================ - -/// Trait which abstracts the signing of block headers with ECDSA signatures. -/// -/// Production-level implementations will involve some sort of secure remote backend. The trait also -/// allows for testing with local and ephemeral signers. -pub trait BlockSigner { - fn sign(&self, header: &BlockHeader) -> ecdsa::Signature; - fn public_key(&self) -> ecdsa::PublicKey; -} - -// SECRET KEY BLOCK SIGNER -// ================================================================================================ - -impl BlockSigner for SecretKey { - fn sign(&self, header: &BlockHeader) -> ecdsa::Signature { - self.sign(header.commitment()) - } - - fn public_key(&self) -> ecdsa::PublicKey { - self.public_key() - } -} diff --git a/crates/miden-protocol/src/constants.rs b/crates/miden-protocol/src/constants.rs index bda8e339cc..14950b8fd1 100644 --- a/crates/miden-protocol/src/constants.rs +++ b/crates/miden-protocol/src/constants.rs @@ -6,13 +6,16 @@ pub const ACCOUNT_TREE_DEPTH: u8 = 64; /// The maximum allowed size of an account update is 256 KiB. pub const ACCOUNT_UPDATE_MAX_SIZE: u32 = 2u32.pow(18); +/// The maximum allowed size of a serialized note in bytes (32 KiB). +pub const NOTE_MAX_SIZE: u32 = 2u32.pow(15); + /// The maximum number of assets that can be stored in a single note. pub const MAX_ASSETS_PER_NOTE: usize = 255; -/// The maximum number of inputs that can accompany a single note. +/// The maximum number of storage items that can accompany a single note. /// /// The value is set to 1024 so that it is evenly divisible by 8. -pub const MAX_INPUTS_PER_NOTE: usize = 1024; +pub const MAX_NOTE_STORAGE_ITEMS: usize = 1024; /// The maximum number of notes that can be consumed by a single transaction. pub const MAX_INPUT_NOTES_PER_TX: usize = 1024; diff --git a/crates/miden-protocol/src/errors/mod.rs b/crates/miden-protocol/src/errors/mod.rs index 19236a647f..6d5a19e68b 100644 --- a/crates/miden-protocol/src/errors/mod.rs +++ b/crates/miden-protocol/src/errors/mod.rs @@ -5,16 +5,15 @@ use core::error::Error; use miden_assembly::Report; use miden_assembly::diagnostics::reporting::PrintDiagnostic; +use miden_core::Felt; use miden_core::mast::MastForestError; -use miden_core::{EventId, Felt}; use miden_crypto::merkle::mmr::MmrError; use miden_crypto::merkle::smt::{SmtLeafError, SmtProofError}; use miden_crypto::utils::HexParseError; -use miden_processor::DeserializationError; use thiserror::Error; use super::account::AccountId; -use super::asset::{FungibleAsset, NonFungibleAsset, TokenSymbol}; +use super::asset::{AssetVaultKey, FungibleAsset, NonFungibleAsset, TokenSymbol}; use super::crypto::merkle::MerkleError; use super::note::NoteId; use super::{MAX_BATCHES_PER_BLOCK, MAX_OUTPUT_NOTES_PER_BATCH, Word}; @@ -24,32 +23,34 @@ use crate::account::{ AccountIdPrefix, AccountStorage, AccountType, + StorageMapKey, StorageSlotId, - // StorageValueName, - // StorageValueNameError, - // TemplateTypeError, StorageSlotName, }; use crate::address::AddressType; -use crate::asset::AssetVaultKey; +use crate::asset::AssetId; use crate::batch::BatchId; use crate::block::BlockNumber; use crate::note::{ NoteAssets, NoteAttachmentArray, - NoteExecutionHint, + NoteAttachmentKind, + NoteAttachmentScheme, NoteTag, NoteType, Nullifier, }; use crate::transaction::{TransactionEventId, TransactionId}; +use crate::utils::serde::DeserializationError; +use crate::vm::EventId; use crate::{ ACCOUNT_UPDATE_MAX_SIZE, MAX_ACCOUNTS_PER_BATCH, MAX_INPUT_NOTES_PER_BATCH, MAX_INPUT_NOTES_PER_TX, - MAX_INPUTS_PER_NOTE, + MAX_NOTE_STORAGE_ITEMS, MAX_OUTPUT_NOTES_PER_TX, + NOTE_MAX_SIZE, }; #[cfg(any(feature = "testing", test))] @@ -59,19 +60,21 @@ pub use masm_error::MasmError; /// The errors from the MASM code of the transaction kernel. #[cfg(any(feature = "testing", test))] -#[rustfmt::skip] -pub mod tx_kernel; +pub mod tx_kernel { + include!(concat!(env!("OUT_DIR"), "/tx_kernel_errors.rs")); +} /// The errors from the MASM code of the Miden protocol library. #[cfg(any(feature = "testing", test))] -#[rustfmt::skip] -pub mod protocol; +pub mod protocol { + include!(concat!(env!("OUT_DIR"), "/protocol_errors.rs")); +} // ACCOUNT COMPONENT TEMPLATE ERROR // ================================================================================================ #[derive(Debug, Error)] -pub enum AccountComponentTemplateError { +pub enum ComponentMetadataError { #[error("storage slot name `{0}` is duplicate")] DuplicateSlotName(StorageSlotName), #[error("storage init value name `{0}` is duplicate")] @@ -88,10 +91,6 @@ pub enum AccountComponentTemplateError { InitValueNotProvided(StorageValueName), #[error("invalid init storage value for `{0}`: {1}")] InvalidInitStorageValue(StorageValueName, String), - #[error( - "account component storage schema cannot contain a slot with name `{0}` as it is reserved by the protocol" - )] - ReservedSlotName(StorageSlotName), #[error("error converting value into expected type: {0}")] StorageValueParsingError(#[source] SchemaTypeError), #[error("storage map contains duplicate keys")] @@ -123,8 +122,6 @@ pub enum AccountError { AccountComponentAssemblyError(Report), #[error("failed to merge components into one account code mast forest")] AccountComponentMastForestMergeError(#[source] MastForestError), - // #[error("failed to create account component")] - // AccountComponentTemplateInstantiationError(#[source] AccountComponentTemplateError), #[error("account component contains multiple authentication procedures")] AccountComponentMultipleAuthProcedures, #[error("failed to update asset vault")] @@ -165,11 +162,6 @@ pub enum AccountError { StorageSlotNotValue(StorageSlotName), #[error("storage slot name {0} is assigned to more than one slot")] DuplicateStorageSlotName(StorageSlotName), - #[error( - "account storage cannot contain a user-provided slot with name {} as it is reserved by the protocol", - AccountStorage::faucet_sysdata_slot() - )] - StorageSlotNameMustNotBeFaucetSysdata, #[error("storage does not contain a slot with name {slot_name}")] StorageSlotNameNotFound { slot_name: StorageSlotName }, #[error("storage does not contain a slot with ID {slot_id}")] @@ -290,6 +282,8 @@ pub enum AccountTreeError { ApplyMutations(#[source] MerkleError), #[error("failed to compute account tree mutations")] ComputeMutations(#[source] MerkleError), + #[error("provided smt contains an invalid account ID in key {key}")] + InvalidAccountIdKey { key: Word, source: AccountIdError }, #[error("smt leaf's index is not a valid account ID prefix")] InvalidAccountIdPrefix(#[source] AccountIdError), #[error("account witness merkle path depth {0} does not match AccountTree::DEPTH")] @@ -301,10 +295,6 @@ pub enum AccountTreeError { #[derive(Debug, Error)] pub enum AddressError { - #[error("tag length {0} should be {expected} bits for network accounts", - expected = NoteTag::DEFAULT_NETWORK_ACCOUNT_TARGET_TAG_LENGTH - )] - CustomTagLengthNotAllowedForNetworkAccounts(u8), #[error("tag length {0} is too large, must be less than or equal to {max}", max = NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH )] @@ -320,7 +310,7 @@ pub enum AddressError { #[error("{error_msg}")] DecodeError { error_msg: Box, - // thiserror will return this when calling Error::source on NoteError. + // thiserror will return this when calling Error::source on AddressError. source: Option>, }, #[error("found unknown routing parameter key {0}")] @@ -423,9 +413,13 @@ pub enum AccountDeltaError { #[derive(Debug, Error)] pub enum StorageMapError { #[error("map entries contain key {key} twice with values {value0} and {value1}")] - DuplicateKey { key: Word, value0: Word, value1: Word }, - #[error("map key {raw_key} is not present in provided SMT proof")] - MissingKey { raw_key: Word }, + DuplicateKey { + key: StorageMapKey, + value0: Word, + value1: Word, + }, + #[error("map key {key} is not present in provided SMT proof")] + MissingKey { key: StorageMapKey }, } // BATCH ACCOUNT UPDATE ERROR @@ -461,33 +455,41 @@ pub enum AssetError { FungibleAssetAmountTooBig(u64), #[error("subtracting {subtrahend} from fungible asset amount {minuend} would underflow")] FungibleAssetAmountNotSufficient { minuend: u64, subtrahend: u64 }, - #[error("fungible asset word {0} does not contain expected ZERO at word index 1")] - FungibleAssetExpectedZero(Word), #[error( - "cannot add fungible asset with issuer {other_issuer} to fungible asset with issuer {original_issuer}" + "cannot combine fungible assets with different vault keys: {original_key} and {other_key}" )] - FungibleAssetInconsistentFaucetIds { - original_issuer: AccountId, - other_issuer: AccountId, + FungibleAssetInconsistentVaultKeys { + original_key: AssetVaultKey, + other_key: AssetVaultKey, }, #[error("faucet account ID in asset is invalid")] InvalidFaucetAccountId(#[source] Box), - #[error("faucet account ID in asset has a non-faucet prefix: {}", .0)] - InvalidFaucetAccountIdPrefix(AccountIdPrefix), #[error( "faucet id {0} of type {id_type} must be of type {expected_ty} for fungible assets", id_type = .0.account_type(), expected_ty = AccountType::FungibleFaucet )] FungibleFaucetIdTypeMismatch(AccountId), + #[error( + "asset ID prefix and suffix in a non-fungible asset's vault key must match indices 0 and 1 in the value, but asset ID was {asset_id} and value was {value}" + )] + NonFungibleAssetIdMustMatchValue { asset_id: AssetId, value: Word }, + #[error("asset ID prefix and suffix in a fungible asset's vault key must be zero but was {0}")] + FungibleAssetIdMustBeZero(AssetId), + #[error( + "the three most significant elements in a fungible asset's value must be zero but provided value was {0}" + )] + FungibleAssetValueMostSignificantElementsMustBeZero(Word), #[error( "faucet id {0} of type {id_type} must be of type {expected_ty} for non fungible assets", id_type = .0.account_type(), expected_ty = AccountType::NonFungibleFaucet )] - NonFungibleFaucetIdTypeMismatch(AccountIdPrefix), - #[error("asset vault key {actual} does not match expected asset vault key {expected}")] - AssetVaultKeyMismatch { actual: Word, expected: Word }, + NonFungibleFaucetIdTypeMismatch(AccountId), + #[error("smt proof in asset witness contains invalid key or value")] + AssetWitnessInvalid(#[source] Box), + #[error("invalid native asset callbacks encoding: {0}")] + InvalidAssetCallbackFlag(u8), } // TOKEN SYMBOL ERROR @@ -497,7 +499,12 @@ pub enum AssetError { pub enum TokenSymbolError { #[error("token symbol value {0} cannot exceed {max}", max = TokenSymbol::MAX_ENCODED_VALUE)] ValueTooLarge(u64), - #[error("token symbol should have length between 1 and 6 characters, but {0} was provided")] + #[error( + "token symbol value {0} cannot be less than {min}", + min = TokenSymbol::MIN_ENCODED_VALUE + )] + ValueTooSmall(u64), + #[error("token symbol should have length between 1 and 12 characters, but {0} was provided")] InvalidLength(usize), #[error("token symbol contains a character that is not uppercase ASCII")] InvalidCharacter, @@ -535,8 +542,6 @@ pub enum AssetVaultError { pub enum PartialAssetVaultError { #[error("provided SMT entry {entry} is not a valid asset")] InvalidAssetInSmt { entry: Word, source: AssetError }, - #[error("expected asset vault key to be {expected} but it was {actual}")] - AssetVaultKeyMismatch { expected: AssetVaultKey, actual: Word }, #[error("failed to add asset proof")] FailedToAddProof(#[source] MerkleError), #[error("asset is not tracked in the partial vault")] @@ -548,6 +553,14 @@ pub enum PartialAssetVaultError { #[derive(Debug, Error)] pub enum NoteError { + #[error("library does not contain a procedure with @note_script attribute")] + NoteScriptNoProcedureWithAttribute, + #[error("library contains multiple procedures with @note_script attribute")] + NoteScriptMultipleProceduresWithAttribute, + #[error("procedure at path '{0}' not found in library")] + NoteScriptProcedureNotFound(Box), + #[error("procedure at path '{0}' does not have @note_script attribute")] + NoteScriptProcedureMissingAttribute(Box), #[error("note tag length {0} exceeds the maximum of {max}", max = NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH)] NoteTagLengthTooLarge(u8), #[error("duplicate fungible asset from issuer {0} in note")] @@ -560,20 +573,14 @@ pub enum NoteError { AddFungibleAssetBalanceError(#[source] AssetError), #[error("note sender is not a valid account ID")] NoteSenderInvalidAccountId(#[source] AccountIdError), - #[error( - "note execution hint tag {0} must be in range {from}..={to}", - from = NoteExecutionHint::NONE_TAG, - to = NoteExecutionHint::ON_BLOCK_SLOT_TAG, - )] - NoteExecutionHintTagOutOfRange(u8), #[error("note execution hint after block variant cannot contain u32::MAX")] NoteExecutionHintAfterBlockCannotBeU32Max, #[error("invalid note execution hint payload {1} for tag {0}")] InvalidNoteExecutionHintPayload(u8, u32), - #[error("note type {0} does not match any of the valid note types {public}, {private} or {encrypted}", - public = NoteType::Public, - private = NoteType::Private, - encrypted = NoteType::Encrypted, + #[error( + "note type {0} does not match any of the valid note types {public} or {private}", + public = NoteType::Public, + private = NoteType::Private, )] UnknownNoteType(Box), #[error("note location index {node_index_in_block} is out of bounds 0..={highest_index}")] @@ -589,8 +596,10 @@ pub enum NoteError { NoteScriptDeserializationError(#[source] DeserializationError), #[error("note contains {0} assets which exceeds the maximum of {max}", max = NoteAssets::MAX_NUM_ASSETS)] TooManyAssets(usize), - #[error("note contains {0} inputs which exceeds the maximum of {max}", max = MAX_INPUTS_PER_NOTE)] - TooManyInputs(usize), + #[error("note contains {0} storage items which exceeds the maximum of {max}", max = MAX_NOTE_STORAGE_ITEMS)] + TooManyStorageItems(usize), + #[error("invalid note storage length: expected {expected} items, got {actual}")] + InvalidNoteStorageLength { expected: usize, actual: usize }, #[error("note tag requires a public note but the note is of type {0}")] PublicNoteRequired(NoteType), #[error( @@ -602,6 +611,20 @@ pub enum NoteError { UnknownNoteAttachmentKind(u8), #[error("note attachment of kind None must have attachment scheme None")] AttachmentKindNoneMustHaveAttachmentSchemeNone, + #[error( + "note attachment kind mismatch: header has {header_kind:?} but attachment has {attachment_kind:?}" + )] + AttachmentKindMismatch { + header_kind: NoteAttachmentKind, + attachment_kind: NoteAttachmentKind, + }, + #[error( + "note attachment scheme mismatch: header has {header_scheme:?} but attachment has {attachment_scheme:?}" + )] + AttachmentSchemeMismatch { + header_scheme: NoteAttachmentScheme, + attachment_scheme: NoteAttachmentScheme, + }, #[error("{error_msg}")] Other { error_msg: Box, @@ -734,7 +757,7 @@ pub enum TransactionInputsExtractionError { MissingMapRoot, #[error("failed to construct SMT proof")] SmtProofError(#[from] SmtProofError), - #[error("failed to construct asset witness")] + #[error("failed to construct an asset")] AssetError(#[from] AssetError), #[error("failed to handle storage map data")] StorageMapError(#[from] StorageMapError), @@ -771,6 +794,24 @@ pub enum TransactionOutputError { AccountUpdateCommitment(Box), } +// OUTPUT NOTE ERROR +// ================================================================================================ + +/// Errors that can occur when creating a +/// [`PublicOutputNote`](crate::transaction::PublicOutputNote) or +/// [`PrivateNoteHeader`](crate::transaction::PrivateNoteHeader). +#[derive(Debug, Error)] +pub enum OutputNoteError { + #[error("note with id {0} is private but expected a public note")] + NoteIsPrivate(NoteId), + #[error("note with id {0} is public but expected a private note")] + NoteIsPublic(NoteId), + #[error( + "public note with id {note_id} has size {note_size} bytes which exceeds maximum note size of {NOTE_MAX_SIZE}" + )] + NoteSizeLimitExceeded { note_id: NoteId, note_size: usize }, +} + // TRANSACTION EVENT PARSING ERROR // ================================================================================================ @@ -825,7 +866,7 @@ pub enum ProvenTransactionError { )] ExistingPublicStateAccountRequiresDeltaDetails(AccountId), #[error("failed to construct output notes for proven transaction")] - OutputNotesError(TransactionOutputError), + OutputNotesError(#[source] TransactionOutputError), #[error( "account update of size {update_size} for account {account_id} exceeds maximum update size of {ACCOUNT_UPDATE_MAX_SIZE}" )] @@ -1176,5 +1217,5 @@ pub enum NullifierTreeError { #[derive(Debug, Error)] pub enum AuthSchemeError { #[error("auth scheme identifier `{0}` is not valid")] - InvalidAuthSchemeIdentifier(u8), + InvalidAuthSchemeIdentifier(String), } diff --git a/crates/miden-protocol/src/errors/protocol.rs b/crates/miden-protocol/src/errors/protocol.rs deleted file mode 100644 index 73b7085d33..0000000000 --- a/crates/miden-protocol/src/errors/protocol.rs +++ /dev/null @@ -1,37 +0,0 @@ -use crate::errors::MasmError; - -// This file is generated by build.rs, do not modify manually. -// It is generated by extracting errors from the MASM files in the `./asm` directory. -// -// To add a new error, define a constant in MASM of the pattern `const ERR__...`. -// Try to fit the error into a pre-existing category if possible (e.g. Account, Note, ...). - -// PROTOCOL LIB ERRORS -// ================================================================================================ - -/// Error Message: "the account ID must have storage mode public if the network flag is set" -pub const ERR_ACCOUNT_ID_NON_PUBLIC_NETWORK_ACCOUNT: MasmError = MasmError::from_static_str("the account ID must have storage mode public if the network flag is set"); -/// Error Message: "least significant byte of the account ID suffix must be zero" -pub const ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO: MasmError = MasmError::from_static_str("least significant byte of the account ID suffix must be zero"); -/// Error Message: "most significant bit of the account ID suffix must be zero" -pub const ERR_ACCOUNT_ID_SUFFIX_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO: MasmError = MasmError::from_static_str("most significant bit of the account ID suffix must be zero"); -/// Error Message: "unknown account storage mode in account ID" -pub const ERR_ACCOUNT_ID_UNKNOWN_STORAGE_MODE: MasmError = MasmError::from_static_str("unknown account storage mode in account ID"); -/// Error Message: "unknown version in account ID" -pub const ERR_ACCOUNT_ID_UNKNOWN_VERSION: MasmError = MasmError::from_static_str("unknown version in account ID"); - -/// Error Message: "fungible asset build operation called with amount that exceeds the maximum allowed asset amount" -pub const ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_ALLOWED_AMOUNT: MasmError = MasmError::from_static_str("fungible asset build operation called with amount that exceeds the maximum allowed asset amount"); -/// Error Message: "failed to build the fungible asset because the provided faucet id is not from a fungible faucet" -pub const ERR_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID: MasmError = MasmError::from_static_str("failed to build the fungible asset because the provided faucet id is not from a fungible faucet"); - -/// Error Message: "failed to build the non-fungible asset because the provided faucet id is not from a non-fungible faucet" -pub const ERR_NON_FUNGIBLE_ASSET_PROVIDED_FAUCET_ID_IS_INVALID: MasmError = MasmError::from_static_str("failed to build the non-fungible asset because the provided faucet id is not from a non-fungible faucet"); - -/// Error Message: "note data does not match the commitment" -pub const ERR_NOTE_DATA_DOES_NOT_MATCH_COMMITMENT: MasmError = MasmError::from_static_str("note data does not match the commitment"); -/// Error Message: "the specified number of note inputs does not match the actual number" -pub const ERR_NOTE_INVALID_NUMBER_OF_INPUTS: MasmError = MasmError::from_static_str("the specified number of note inputs does not match the actual number"); - -/// Error Message: "number of note inputs exceeded the maximum limit of 1024" -pub const ERR_PROLOGUE_NOTE_INPUTS_LEN_EXCEEDED_LIMIT: MasmError = MasmError::from_static_str("number of note inputs exceeded the maximum limit of 1024"); diff --git a/crates/miden-protocol/src/errors/tx_kernel.rs b/crates/miden-protocol/src/errors/tx_kernel.rs deleted file mode 100644 index d267df0216..0000000000 --- a/crates/miden-protocol/src/errors/tx_kernel.rs +++ /dev/null @@ -1,244 +0,0 @@ -use crate::errors::MasmError; - -// This file is generated by build.rs, do not modify manually. -// It is generated by extracting errors from the MASM files in the `./asm` directory. -// -// To add a new error, define a constant in MASM of the pattern `const ERR__...`. -// Try to fit the error into a pre-existing category if possible (e.g. Account, Note, ...). - -// TX KERNEL ERRORS -// ================================================================================================ - -/// Error Message: "computed account code commitment does not match recorded account code commitment" -pub const ERR_ACCOUNT_CODE_COMMITMENT_MISMATCH: MasmError = MasmError::from_static_str("computed account code commitment does not match recorded account code commitment"); -/// Error Message: "account code must be updatable for it to be possible to set new code" -pub const ERR_ACCOUNT_CODE_IS_NOT_UPDATABLE: MasmError = MasmError::from_static_str("account code must be updatable for it to be possible to set new code"); -/// Error Message: "nonce must be incremented if account vault or account storage changed" -pub const ERR_ACCOUNT_DELTA_NONCE_MUST_BE_INCREMENTED_IF_VAULT_OR_STORAGE_CHANGED: MasmError = MasmError::from_static_str("nonce must be incremented if account vault or account storage changed"); -/// Error Message: "the account ID must have storage mode public if the network flag is set" -pub const ERR_ACCOUNT_ID_NON_PUBLIC_NETWORK_ACCOUNT: MasmError = MasmError::from_static_str("the account ID must have storage mode public if the network flag is set"); -/// Error Message: "least significant byte of the account ID suffix must be zero" -pub const ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO: MasmError = MasmError::from_static_str("least significant byte of the account ID suffix must be zero"); -/// Error Message: "most significant bit of the account ID suffix must be zero" -pub const ERR_ACCOUNT_ID_SUFFIX_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO: MasmError = MasmError::from_static_str("most significant bit of the account ID suffix must be zero"); -/// Error Message: "unknown account storage mode in account ID" -pub const ERR_ACCOUNT_ID_UNKNOWN_STORAGE_MODE: MasmError = MasmError::from_static_str("unknown account storage mode in account ID"); -/// Error Message: "unknown version in account ID" -pub const ERR_ACCOUNT_ID_UNKNOWN_VERSION: MasmError = MasmError::from_static_str("unknown version in account ID"); -/// Error Message: "the active account is not native" -pub const ERR_ACCOUNT_IS_NOT_NATIVE: MasmError = MasmError::from_static_str("the active account is not native"); -/// Error Message: "account nonce is already at its maximum possible value" -pub const ERR_ACCOUNT_NONCE_AT_MAX: MasmError = MasmError::from_static_str("account nonce is already at its maximum possible value"); -/// Error Message: "account nonce can only be incremented once" -pub const ERR_ACCOUNT_NONCE_CAN_ONLY_BE_INCREMENTED_ONCE: MasmError = MasmError::from_static_str("account nonce can only be incremented once"); -/// Error Message: "number of account procedures must be at least 2" -pub const ERR_ACCOUNT_NOT_ENOUGH_PROCEDURES: MasmError = MasmError::from_static_str("number of account procedures must be at least 2"); -/// Error Message: "provided procedure index is out of bounds" -pub const ERR_ACCOUNT_PROC_INDEX_OUT_OF_BOUNDS: MasmError = MasmError::from_static_str("provided procedure index is out of bounds"); -/// Error Message: "account procedure is not the authentication procedure; some procedures (e.g. `incr_nonce`) can be called only from the authentication procedure" -pub const ERR_ACCOUNT_PROC_NOT_AUTH_PROC: MasmError = MasmError::from_static_str("account procedure is not the authentication procedure; some procedures (e.g. `incr_nonce`) can be called only from the authentication procedure"); -/// Error Message: "procedure is not part of the account code" -pub const ERR_ACCOUNT_PROC_NOT_PART_OF_ACCOUNT_CODE: MasmError = MasmError::from_static_str("procedure is not part of the account code"); -/// Error Message: "failed to read an account map item from a non-map storage slot" -pub const ERR_ACCOUNT_READING_MAP_VALUE_FROM_NON_MAP_SLOT: MasmError = MasmError::from_static_str("failed to read an account map item from a non-map storage slot"); -/// Error Message: "ID of the new account does not match the ID computed from the seed and commitments" -pub const ERR_ACCOUNT_SEED_AND_COMMITMENT_DIGEST_MISMATCH: MasmError = MasmError::from_static_str("ID of the new account does not match the ID computed from the seed and commitments"); -/// Error Message: "failed to write an account map item to a non-map storage slot" -pub const ERR_ACCOUNT_SETTING_MAP_ITEM_ON_NON_MAP_SLOT: MasmError = MasmError::from_static_str("failed to write an account map item to a non-map storage slot"); -/// Error Message: "failed to write an account value item to a non-value storage slot" -pub const ERR_ACCOUNT_SETTING_VALUE_ITEM_ON_NON_VALUE_SLOT: MasmError = MasmError::from_static_str("failed to write an account value item to a non-value storage slot"); -/// Error Message: "depth of the nested FPI calls exceeded 64" -pub const ERR_ACCOUNT_STACK_OVERFLOW: MasmError = MasmError::from_static_str("depth of the nested FPI calls exceeded 64"); -/// Error Message: "failed to end foreign context because the active account is the native account" -pub const ERR_ACCOUNT_STACK_UNDERFLOW: MasmError = MasmError::from_static_str("failed to end foreign context because the active account is the native account"); -/// Error Message: "computed account storage commitment does not match recorded account storage commitment" -pub const ERR_ACCOUNT_STORAGE_COMMITMENT_MISMATCH: MasmError = MasmError::from_static_str("computed account storage commitment does not match recorded account storage commitment"); -/// Error Message: "storage map entries provided as advice inputs do not have the same storage map root as the root of the map the new account commits to" -pub const ERR_ACCOUNT_STORAGE_MAP_ENTRIES_DO_NOT_MATCH_MAP_ROOT: MasmError = MasmError::from_static_str("storage map entries provided as advice inputs do not have the same storage map root as the root of the map the new account commits to"); -/// Error Message: "slot IDs must be unique and sorted in ascending order" -pub const ERR_ACCOUNT_STORAGE_SLOTS_MUST_BE_SORTED_AND_UNIQUE: MasmError = MasmError::from_static_str("slot IDs must be unique and sorted in ascending order"); -/// Error Message: "number of account procedures exceeds the maximum limit of 256" -pub const ERR_ACCOUNT_TOO_MANY_PROCEDURES: MasmError = MasmError::from_static_str("number of account procedures exceeds the maximum limit of 256"); -/// Error Message: "number of account storage slots exceeds the maximum limit of 255" -pub const ERR_ACCOUNT_TOO_MANY_STORAGE_SLOTS: MasmError = MasmError::from_static_str("number of account storage slots exceeds the maximum limit of 255"); -/// Error Message: "storage slot with the provided name does not exist" -pub const ERR_ACCOUNT_UNKNOWN_STORAGE_SLOT_NAME: MasmError = MasmError::from_static_str("storage slot with the provided name does not exist"); - -/// Error Message: "auth procedure has been called from outside the epilogue" -pub const ERR_EPILOGUE_AUTH_PROCEDURE_CALLED_FROM_WRONG_CONTEXT: MasmError = MasmError::from_static_str("auth procedure has been called from outside the epilogue"); -/// Error Message: "executed transaction neither changed the account state, nor consumed any notes" -pub const ERR_EPILOGUE_EXECUTED_TRANSACTION_IS_EMPTY: MasmError = MasmError::from_static_str("executed transaction neither changed the account state, nor consumed any notes"); -/// Error Message: "nonce cannot be 0 after an account-creating transaction" -pub const ERR_EPILOGUE_NONCE_CANNOT_BE_0: MasmError = MasmError::from_static_str("nonce cannot be 0 after an account-creating transaction"); -/// Error Message: "total number of assets in the account and all involved notes must stay the same" -pub const ERR_EPILOGUE_TOTAL_NUMBER_OF_ASSETS_MUST_STAY_THE_SAME: MasmError = MasmError::from_static_str("total number of assets in the account and all involved notes must stay the same"); - -/// Error Message: "asset amount to burn can not exceed the existing total supply" -pub const ERR_FAUCET_BURN_CANNOT_EXCEED_EXISTING_TOTAL_SUPPLY: MasmError = MasmError::from_static_str("asset amount to burn can not exceed the existing total supply"); -/// Error Message: "the burn_non_fungible_asset procedure can only be called on a non-fungible faucet" -pub const ERR_FAUCET_BURN_NON_FUNGIBLE_ASSET_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET: MasmError = MasmError::from_static_str("the burn_non_fungible_asset procedure can only be called on a non-fungible faucet"); -/// Error Message: "the faucet_is_non_fungible_asset_issued procedure can only be called on a non-fungible faucet" -pub const ERR_FAUCET_IS_NF_ASSET_ISSUED_PROC_CAN_ONLY_BE_CALLED_ON_NON_FUNGIBLE_FAUCET: MasmError = MasmError::from_static_str("the faucet_is_non_fungible_asset_issued procedure can only be called on a non-fungible faucet"); -/// Error Message: "asset mint operation would cause the new total supply to exceed the maximum allowed asset amount" -pub const ERR_FAUCET_NEW_TOTAL_SUPPLY_WOULD_EXCEED_MAX_ASSET_AMOUNT: MasmError = MasmError::from_static_str("asset mint operation would cause the new total supply to exceed the maximum allowed asset amount"); -/// Error Message: "failed to mint new non-fungible asset because it was already issued" -pub const ERR_FAUCET_NON_FUNGIBLE_ASSET_ALREADY_ISSUED: MasmError = MasmError::from_static_str("failed to mint new non-fungible asset because it was already issued"); -/// Error Message: "failed to burn non-existent non-fungible asset in the vault" -pub const ERR_FAUCET_NON_FUNGIBLE_ASSET_TO_BURN_NOT_FOUND: MasmError = MasmError::from_static_str("failed to burn non-existent non-fungible asset in the vault"); -/// Error Message: "for faucets the FAUCET_STORAGE_DATA_SLOT storage slot is reserved and can not be used with set_account_item" -pub const ERR_FAUCET_STORAGE_DATA_SLOT_IS_RESERVED: MasmError = MasmError::from_static_str("for faucets the FAUCET_STORAGE_DATA_SLOT storage slot is reserved and can not be used with set_account_item"); -/// Error Message: "the faucet_get_total_fungible_asset_issuance procedure can only be called on a fungible faucet" -pub const ERR_FAUCET_TOTAL_ISSUANCE_PROC_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_FAUCET: MasmError = MasmError::from_static_str("the faucet_get_total_fungible_asset_issuance procedure can only be called on a fungible faucet"); - -/// Error Message: "creation of a foreign context against the native account is forbidden" -pub const ERR_FOREIGN_ACCOUNT_CONTEXT_AGAINST_NATIVE_ACCOUNT: MasmError = MasmError::from_static_str("creation of a foreign context against the native account is forbidden"); -/// Error Message: "ID of the provided foreign account equals zero" -pub const ERR_FOREIGN_ACCOUNT_ID_IS_ZERO: MasmError = MasmError::from_static_str("ID of the provided foreign account equals zero"); -/// Error Message: "commitment of the foreign account in the advice provider does not match the commitment in the account tree" -pub const ERR_FOREIGN_ACCOUNT_INVALID_COMMITMENT: MasmError = MasmError::from_static_str("commitment of the foreign account in the advice provider does not match the commitment in the account tree"); -/// Error Message: "maximum allowed number of foreign account to be loaded (64) was exceeded" -pub const ERR_FOREIGN_ACCOUNT_MAX_NUMBER_EXCEEDED: MasmError = MasmError::from_static_str("maximum allowed number of foreign account to be loaded (64) was exceeded"); - -/// Error Message: "the origin of the fungible asset is not this faucet" -pub const ERR_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN: MasmError = MasmError::from_static_str("the origin of the fungible asset is not this faucet"); -/// Error Message: "malformed fungible asset: `ASSET[1]` must be 0" -pub const ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ONE_MUST_BE_ZERO: MasmError = MasmError::from_static_str("malformed fungible asset: `ASSET[1]` must be 0"); -/// Error Message: "malformed fungible asset: `ASSET[2]` and `ASSET[3]` must be a valid fungible faucet id" -pub const ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_TWO_AND_THREE_MUST_BE_FUNGIBLE_FAUCET_ID: MasmError = MasmError::from_static_str("malformed fungible asset: `ASSET[2]` and `ASSET[3]` must be a valid fungible faucet id"); -/// Error Message: "malformed fungible asset: `ASSET[0]` exceeds the maximum allowed amount" -pub const ERR_FUNGIBLE_ASSET_FORMAT_ELEMENT_ZERO_MUST_BE_WITHIN_LIMITS: MasmError = MasmError::from_static_str("malformed fungible asset: `ASSET[0]` exceeds the maximum allowed amount"); - -/// Error Message: "requested input note index should be less than the total number of input notes" -pub const ERR_INPUT_NOTE_INDEX_OUT_OF_BOUNDS: MasmError = MasmError::from_static_str("requested input note index should be less than the total number of input notes"); - -/// Error Message: "provided kernel procedure offset is out of bounds" -pub const ERR_KERNEL_PROCEDURE_OFFSET_OUT_OF_BOUNDS: MasmError = MasmError::from_static_str("provided kernel procedure offset is out of bounds"); - -/// Error Message: "map cannot be empty when proving absence after an entry" -pub const ERR_LINK_MAP_CANNOT_BE_EMPTY_ON_ABSENCE_AFTER_ENTRY: MasmError = MasmError::from_static_str("map cannot be empty when proving absence after an entry"); -/// Error Message: "host-provided entry ptr is not 'link map entry'-aligned" -pub const ERR_LINK_MAP_ENTRY_PTR_IS_NOT_ENTRY_ALIGNED: MasmError = MasmError::from_static_str("host-provided entry ptr is not 'link map entry'-aligned"); -/// Error Message: "host-provided entry ptr is outside the valid memory region" -pub const ERR_LINK_MAP_ENTRY_PTR_IS_OUTSIDE_VALID_MEMORY_REGION: MasmError = MasmError::from_static_str("host-provided entry ptr is outside the valid memory region"); -/// Error Message: "map ptr stored in host-provided entry does not match actual pointer of the map" -pub const ERR_LINK_MAP_MAP_PTR_IN_ENTRY_DOES_NOT_MATCH_EXPECTED_MAP_PTR: MasmError = MasmError::from_static_str("map ptr stored in host-provided entry does not match actual pointer of the map"); -/// Error Message: "number of link map entries exceeds maximum" -pub const ERR_LINK_MAP_MAX_ENTRIES_EXCEEDED: MasmError = MasmError::from_static_str("number of link map entries exceeds maximum"); -/// Error Message: "provided key does not match key in map entry" -pub const ERR_LINK_MAP_PROVIDED_KEY_NOT_EQUAL_TO_ENTRY_KEY: MasmError = MasmError::from_static_str("provided key does not match key in map entry"); -/// Error Message: "provided key is not greater than the entry key" -pub const ERR_LINK_MAP_PROVIDED_KEY_NOT_GREATER_THAN_ENTRY_KEY: MasmError = MasmError::from_static_str("provided key is not greater than the entry key"); -/// Error Message: "provided key is not less than the entry key" -pub const ERR_LINK_MAP_PROVIDED_KEY_NOT_LESS_THAN_ENTRY_KEY: MasmError = MasmError::from_static_str("provided key is not less than the entry key"); - -/// Error Message: "non-fungible asset that already exists in the note cannot be added again" -pub const ERR_NON_FUNGIBLE_ASSET_ALREADY_EXISTS: MasmError = MasmError::from_static_str("non-fungible asset that already exists in the note cannot be added again"); -/// Error Message: "the origin of the non-fungible asset is not this faucet" -pub const ERR_NON_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN: MasmError = MasmError::from_static_str("the origin of the non-fungible asset is not this faucet"); -/// Error Message: "malformed non-fungible asset: `ASSET[3]` is not a valid non-fungible faucet id" -pub const ERR_NON_FUNGIBLE_ASSET_FORMAT_ELEMENT_THREE_MUST_BE_FUNGIBLE_FAUCET_ID: MasmError = MasmError::from_static_str("malformed non-fungible asset: `ASSET[3]` is not a valid non-fungible faucet id"); -/// Error Message: "malformed non-fungible asset: the most significant bit must be 0" -pub const ERR_NON_FUNGIBLE_ASSET_FORMAT_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO: MasmError = MasmError::from_static_str("malformed non-fungible asset: the most significant bit must be 0"); - -/// Error Message: "failed to access note assets of active note because no note is currently being processed" -pub const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_ASSETS_WHILE_NO_NOTE_BEING_PROCESSED: MasmError = MasmError::from_static_str("failed to access note assets of active note because no note is currently being processed"); -/// Error Message: "failed to access note inputs of active note because no note is currently being processed" -pub const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_INPUTS_WHILE_NO_NOTE_BEING_PROCESSED: MasmError = MasmError::from_static_str("failed to access note inputs of active note because no note is currently being processed"); -/// Error Message: "failed to access note metadata of active note because no note is currently being processed" -pub const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_METADATA_WHILE_NO_NOTE_BEING_PROCESSED: MasmError = MasmError::from_static_str("failed to access note metadata of active note because no note is currently being processed"); -/// Error Message: "failed to access note recipient of active note because no note is currently being processed" -pub const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_RECIPIENT_WHILE_NO_NOTE_BEING_PROCESSED: MasmError = MasmError::from_static_str("failed to access note recipient of active note because no note is currently being processed"); -/// Error Message: "failed to access note script root of active note because no note is currently being processed" -pub const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_SCRIPT_ROOT_WHILE_NO_NOTE_BEING_PROCESSED: MasmError = MasmError::from_static_str("failed to access note script root of active note because no note is currently being processed"); -/// Error Message: "failed to access note serial number of active note because no note is currently being processed" -pub const ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_SERIAL_NUMBER_WHILE_NO_NOTE_BEING_PROCESSED: MasmError = MasmError::from_static_str("failed to access note serial number of active note because no note is currently being processed"); -/// Error Message: "adding a fungible asset to a note cannot exceed the max_amount of 9223372036854775807" -pub const ERR_NOTE_FUNGIBLE_MAX_AMOUNT_EXCEEDED: MasmError = MasmError::from_static_str("adding a fungible asset to a note cannot exceed the max_amount of 9223372036854775807"); -/// Error Message: "failed to find note at the given index; index must be within [0, num_of_notes]" -pub const ERR_NOTE_INVALID_INDEX: MasmError = MasmError::from_static_str("failed to find note at the given index; index must be within [0, num_of_notes]"); -/// Error Message: "invalid note type" -pub const ERR_NOTE_INVALID_TYPE: MasmError = MasmError::from_static_str("invalid note type"); -/// Error Message: "number of assets in a note exceed 255" -pub const ERR_NOTE_NUM_OF_ASSETS_EXCEED_LIMIT: MasmError = MasmError::from_static_str("number of assets in a note exceed 255"); -/// Error Message: "the note's tag must fit into a u32 so the 32 most significant bits of the felt must be zero" -pub const ERR_NOTE_TAG_MUST_BE_U32: MasmError = MasmError::from_static_str("the note's tag must fit into a u32 so the 32 most significant bits of the felt must be zero"); - -/// Error Message: "attachment kind None requires ATTACHMENT to be set to an empty word" -pub const ERR_OUTPUT_NOTE_ATTACHMENT_KIND_NONE_MUST_BE_EMPTY_WORD: MasmError = MasmError::from_static_str("attachment kind None requires ATTACHMENT to be set to an empty word"); -/// Error Message: "attachment kind none must have attachment scheme none" -pub const ERR_OUTPUT_NOTE_ATTACHMENT_KIND_NONE_MUST_HAVE_ATTACHMENT_SCHEME_NONE: MasmError = MasmError::from_static_str("attachment kind none must have attachment scheme none"); -/// Error Message: "requested output note index should be less than the total number of created output notes" -pub const ERR_OUTPUT_NOTE_INDEX_OUT_OF_BOUNDS: MasmError = MasmError::from_static_str("requested output note index should be less than the total number of created output notes"); -/// Error Message: "attachment scheme and attachment kind must fit into u32s" -pub const ERR_OUTPUT_NOTE_INVALID_ATTACHMENT_SCHEMES: MasmError = MasmError::from_static_str("attachment scheme and attachment kind must fit into u32s"); -/// Error Message: "attachment kind variant must be between 0 and 2" -pub const ERR_OUTPUT_NOTE_UNKNOWN_ATTACHMENT_KIND: MasmError = MasmError::from_static_str("attachment kind variant must be between 0 and 2"); - -/// Error Message: "existing accounts must have a non-zero nonce" -pub const ERR_PROLOGUE_EXISTING_ACCOUNT_MUST_HAVE_NON_ZERO_NONCE: MasmError = MasmError::from_static_str("existing accounts must have a non-zero nonce"); -/// Error Message: "the provided global inputs do not match the block commitment" -pub const ERR_PROLOGUE_GLOBAL_INPUTS_PROVIDED_DO_NOT_MATCH_BLOCK_COMMITMENT: MasmError = MasmError::from_static_str("the provided global inputs do not match the block commitment"); -/// Error Message: "the provided global inputs do not match the block number commitment" -pub const ERR_PROLOGUE_GLOBAL_INPUTS_PROVIDED_DO_NOT_MATCH_BLOCK_NUMBER_COMMITMENT: MasmError = MasmError::from_static_str("the provided global inputs do not match the block number commitment"); -/// Error Message: "note commitment computed from the input note data does not match given note commitment" -pub const ERR_PROLOGUE_INPUT_NOTES_COMMITMENT_MISMATCH: MasmError = MasmError::from_static_str("note commitment computed from the input note data does not match given note commitment"); -/// Error Message: "sequential hash over kernel procedures does not match kernel commitment from block" -pub const ERR_PROLOGUE_KERNEL_PROCEDURE_COMMITMENT_MISMATCH: MasmError = MasmError::from_static_str("sequential hash over kernel procedures does not match kernel commitment from block"); -/// Error Message: "account IDs provided via global inputs and advice provider do not match" -pub const ERR_PROLOGUE_MISMATCH_OF_ACCOUNT_IDS_FROM_GLOBAL_INPUTS_AND_ADVICE_PROVIDER: MasmError = MasmError::from_static_str("account IDs provided via global inputs and advice provider do not match"); -/// Error Message: "reference block MMR and note's authentication MMR must match" -pub const ERR_PROLOGUE_MISMATCH_OF_REFERENCE_BLOCK_MMR_AND_NOTE_AUTHENTICATION_MMR: MasmError = MasmError::from_static_str("reference block MMR and note's authentication MMR must match"); -/// Error Message: "native asset account ID in reference block is not of type fungible faucet" -pub const ERR_PROLOGUE_NATIVE_ASSET_ID_IS_NOT_FUNGIBLE: MasmError = MasmError::from_static_str("native asset account ID in reference block is not of type fungible faucet"); -/// Error Message: "new account must have a zero nonce" -pub const ERR_PROLOGUE_NEW_ACCOUNT_NONCE_MUST_BE_ZERO: MasmError = MasmError::from_static_str("new account must have a zero nonce"); -/// Error Message: "new account must have an empty vault" -pub const ERR_PROLOGUE_NEW_ACCOUNT_VAULT_MUST_BE_EMPTY: MasmError = MasmError::from_static_str("new account must have an empty vault"); -/// Error Message: "reserved slot for new fungible faucet has an invalid type" -pub const ERR_PROLOGUE_NEW_FUNGIBLE_FAUCET_RESERVED_SLOT_INVALID_TYPE: MasmError = MasmError::from_static_str("reserved slot for new fungible faucet has an invalid type"); -/// Error Message: "reserved slot for new fungible faucet is not empty" -pub const ERR_PROLOGUE_NEW_FUNGIBLE_FAUCET_RESERVED_SLOT_MUST_BE_EMPTY: MasmError = MasmError::from_static_str("reserved slot for new fungible faucet is not empty"); -/// Error Message: "reserved slot for new non-fungible faucet has an invalid type" -pub const ERR_PROLOGUE_NEW_NON_FUNGIBLE_FAUCET_RESERVED_SLOT_INVALID_TYPE: MasmError = MasmError::from_static_str("reserved slot for new non-fungible faucet has an invalid type"); -/// Error Message: "reserved slot for non-fungible faucet is not a valid empty SMT" -pub const ERR_PROLOGUE_NEW_NON_FUNGIBLE_FAUCET_RESERVED_SLOT_MUST_BE_VALID_EMPTY_SMT: MasmError = MasmError::from_static_str("reserved slot for non-fungible faucet is not a valid empty SMT"); -/// Error Message: "failed to authenticate note inclusion in block" -pub const ERR_PROLOGUE_NOTE_AUTHENTICATION_FAILED: MasmError = MasmError::from_static_str("failed to authenticate note inclusion in block"); -/// Error Message: "number of input notes exceeds the kernel's maximum limit of 1024" -pub const ERR_PROLOGUE_NUMBER_OF_INPUT_NOTES_EXCEEDS_LIMIT: MasmError = MasmError::from_static_str("number of input notes exceeds the kernel's maximum limit of 1024"); -/// Error Message: "number of note assets exceeds the maximum limit of 256" -pub const ERR_PROLOGUE_NUMBER_OF_NOTE_ASSETS_EXCEEDS_LIMIT: MasmError = MasmError::from_static_str("number of note assets exceeds the maximum limit of 256"); -/// Error Message: "number of note inputs exceeded the maximum limit of 1024" -pub const ERR_PROLOGUE_NUMBER_OF_NOTE_INPUTS_EXCEEDED_LIMIT: MasmError = MasmError::from_static_str("number of note inputs exceeded the maximum limit of 1024"); -/// Error Message: "account data provided does not match the commitment recorded on-chain" -pub const ERR_PROLOGUE_PROVIDED_ACCOUNT_DATA_DOES_NOT_MATCH_ON_CHAIN_COMMITMENT: MasmError = MasmError::from_static_str("account data provided does not match the commitment recorded on-chain"); -/// Error Message: "provided info about assets of an input does not match its commitment" -pub const ERR_PROLOGUE_PROVIDED_INPUT_ASSETS_INFO_DOES_NOT_MATCH_ITS_COMMITMENT: MasmError = MasmError::from_static_str("provided info about assets of an input does not match its commitment"); -/// Error Message: "verification base fee must fit into a u32" -pub const ERR_PROLOGUE_VERIFICATION_BASE_FEE_MUST_BE_U32: MasmError = MasmError::from_static_str("verification base fee must fit into a u32"); - -/// Error Message: "transaction expiration block delta must be within 0x1 and 0xFFFF" -pub const ERR_TX_INVALID_EXPIRATION_DELTA: MasmError = MasmError::from_static_str("transaction expiration block delta must be within 0x1 and 0xFFFF"); -/// Error Message: "number of output notes in the transaction exceeds the maximum limit of 1024" -pub const ERR_TX_NUMBER_OF_OUTPUT_NOTES_EXCEEDS_LIMIT: MasmError = MasmError::from_static_str("number of output notes in the transaction exceeds the maximum limit of 1024"); -/// Error Message: "the transaction script is missing" -pub const ERR_TX_TRANSACTION_SCRIPT_IS_MISSING: MasmError = MasmError::from_static_str("the transaction script is missing"); - -/// Error Message: "failed to add fungible asset to the asset vault due to the initial value being invalid" -pub const ERR_VAULT_ADD_FUNGIBLE_ASSET_FAILED_INITIAL_VALUE_INVALID: MasmError = MasmError::from_static_str("failed to add fungible asset to the asset vault due to the initial value being invalid"); -/// Error Message: "failed to remove the fungible asset from the vault since the amount of the asset in the vault is less than the amount to remove" -pub const ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW: MasmError = MasmError::from_static_str("failed to remove the fungible asset from the vault since the amount of the asset in the vault is less than the amount to remove"); -/// Error Message: "adding the fungible asset to the vault would exceed the max amount of 9223372036854775807" -pub const ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED: MasmError = MasmError::from_static_str("adding the fungible asset to the vault would exceed the max amount of 9223372036854775807"); -/// Error Message: "get_balance can only be called on a fungible asset" -pub const ERR_VAULT_GET_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET: MasmError = MasmError::from_static_str("get_balance can only be called on a fungible asset"); -/// Error Message: "the has_non_fungible_asset procedure can only be called on a non-fungible faucet" -pub const ERR_VAULT_HAS_NON_FUNGIBLE_ASSET_PROC_CAN_BE_CALLED_ONLY_WITH_NON_FUNGIBLE_ASSET: MasmError = MasmError::from_static_str("the has_non_fungible_asset procedure can only be called on a non-fungible faucet"); -/// Error Message: "the non-fungible asset already exists in the asset vault" -pub const ERR_VAULT_NON_FUNGIBLE_ASSET_ALREADY_EXISTS: MasmError = MasmError::from_static_str("the non-fungible asset already exists in the asset vault"); -/// Error Message: "failed to remove non-existent non-fungible asset from the vault" -pub const ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND: MasmError = MasmError::from_static_str("failed to remove non-existent non-fungible asset from the vault"); -/// Error Message: "peek_balance can only be called on a fungible asset" -pub const ERR_VAULT_PEEK_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET: MasmError = MasmError::from_static_str("peek_balance can only be called on a fungible asset"); -/// Error Message: "failed to remove fungible asset from the asset vault due to the initial value being invalid" -pub const ERR_VAULT_REMOVE_FUNGIBLE_ASSET_FAILED_INITIAL_VALUE_INVALID: MasmError = MasmError::from_static_str("failed to remove fungible asset from the asset vault due to the initial value being invalid"); diff --git a/crates/miden-protocol/src/lib.rs b/crates/miden-protocol/src/lib.rs index 0b4b8be503..e04f7b7ca1 100644 --- a/crates/miden-protocol/src/lib.rs +++ b/crates/miden-protocol/src/lib.rs @@ -27,9 +27,9 @@ mod constants; pub use constants::*; pub use miden_core::mast::{MastForest, MastNodeId}; pub use miden_core::prettier::PrettyPrint; -pub use miden_core::{EMPTY_WORD, Felt, FieldElement, ONE, StarkField, WORD_SIZE, ZERO}; +pub use miden_core::{EMPTY_WORD, Felt, ONE, WORD_SIZE, ZERO, field}; pub use miden_core_lib::CoreLibrary; -pub use miden_crypto::hash::rpo::Rpo256 as Hasher; +pub use miden_crypto::hash::poseidon2::Poseidon2 as Hasher; pub use miden_crypto::word; pub use miden_crypto::word::{LexicographicWord, Word, WordError}; pub use protocol::ProtocolLib; @@ -66,28 +66,34 @@ pub mod utils { pub use miden_utils_sync as sync; pub mod serde { - pub use miden_core::utils::{ + pub use miden_crypto::utils::{ + BudgetedReader, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, + SliceReader, }; } } pub mod vm { pub use miden_assembly_syntax::ast::{AttributeSet, QualifiedProcedureName}; - pub use miden_core::sys_events::SystemEvent; - pub use miden_core::{AdviceMap, EventId, Program, ProgramInfo}; + pub use miden_core::advice::{AdviceInputs, AdviceMap}; + pub use miden_core::events::{EventId, SystemEvent}; + pub use miden_core::program::{Program, ProgramInfo}; pub use miden_mast_package::{ MastArtifact, Package, PackageExport, + PackageKind, PackageManifest, + ProcedureExport, Section, SectionId, }; - pub use miden_processor::{AdviceInputs, FutureMaybeSend, RowIndex, StackInputs, StackOutputs}; + pub use miden_processor::trace::RowIndex; + pub use miden_processor::{FutureMaybeSend, StackInputs, StackOutputs}; pub use miden_verifier::ExecutionProof; } diff --git a/crates/miden-protocol/src/note/assets.rs b/crates/miden-protocol/src/note/assets.rs index 260274eeaa..d8f0ec17f9 100644 --- a/crates/miden-protocol/src/note/assets.rs +++ b/crates/miden-protocol/src/note/assets.rs @@ -1,5 +1,7 @@ use alloc::vec::Vec; +use miden_crypto::SequentialCommit; + use crate::asset::{Asset, FungibleAsset, NonFungibleAsset}; use crate::errors::NoteError; use crate::utils::serde::{ @@ -9,13 +11,14 @@ use crate::utils::serde::{ DeserializationError, Serializable, }; -use crate::{Felt, Hasher, MAX_ASSETS_PER_NOTE, WORD_SIZE, Word, ZERO}; +use crate::{Felt, Hasher, MAX_ASSETS_PER_NOTE, WORD_SIZE, Word}; // NOTE ASSETS // ================================================================================================ + /// An asset container for a note. /// -/// A note can contain between 0 and 256 assets. No duplicates are allowed, but the order of assets +/// A note can contain between 0 and 255 assets. No duplicates are allowed, but the order of assets /// is unspecified. /// /// All the assets in a note can be reduced to a single commitment which is computed by @@ -24,7 +27,7 @@ use crate::{Felt, Hasher, MAX_ASSETS_PER_NOTE, WORD_SIZE, Word, ZERO}; #[derive(Debug, Default, Clone)] pub struct NoteAssets { assets: Vec, - hash: Word, + commitment: Word, } impl NoteAssets { @@ -60,8 +63,9 @@ impl NoteAssets { } } - let hash = compute_asset_commitment(&assets); - Ok(Self { assets, hash }) + let commitment = to_commitment(&assets); + + Ok(Self { assets, commitment }) } // PUBLIC ACCESSORS @@ -69,7 +73,7 @@ impl NoteAssets { /// Returns a commitment to the note's assets. pub fn commitment(&self) -> Word { - self.hash + self.commitment } /// Returns the number of assets. @@ -88,27 +92,8 @@ impl NoteAssets { } /// Returns all assets represented as a vector of field elements. - /// - /// The vector is padded with ZEROs so that its length is a multiple of 8. This is useful - /// because hashing the returned elements results in the note asset commitment. - pub fn to_padded_assets(&self) -> Vec { - // if we have an odd number of assets with pad with a single word. - let padded_len = if self.assets.len().is_multiple_of(2) { - self.assets.len() * WORD_SIZE - } else { - (self.assets.len() + 1) * WORD_SIZE - }; - - // allocate a vector to hold the padded assets - let mut padded_assets = Vec::with_capacity(padded_len * WORD_SIZE); - - // populate the vector with the assets - padded_assets.extend(self.assets.iter().flat_map(|asset| Word::from(*asset))); - - // pad with an empty word if we have an odd number of assets - padded_assets.resize(padded_len, ZERO); - - padded_assets + pub fn to_elements(&self) -> Vec { + ::to_elements(self) } /// Returns an iterator over all [`FungibleAsset`]. @@ -127,45 +112,9 @@ impl NoteAssets { }) } - // STATE MUTATORS - // -------------------------------------------------------------------------------------------- - - /// Adds the provided asset to this list of note assets. - /// - /// # Errors - /// Returns an error if: - /// - The same non-fungible asset is already in the list. - /// - A fungible asset issued by the same faucet exists in the list and adding both assets - /// together results in an invalid asset. - /// - Adding the asset to the list will push the list beyond the [Self::MAX_NUM_ASSETS] limit. - pub fn add_asset(&mut self, asset: Asset) -> Result<(), NoteError> { - // check if the asset issued by the faucet as the provided asset already exists in the - // list of assets - if let Some(own_asset) = self.assets.iter_mut().find(|a| a.is_same(&asset)) { - match own_asset { - Asset::Fungible(f_own_asset) => { - // if a fungible asset issued by the same faucet is found, try to add the - // the provided asset to it - let new_asset = f_own_asset - .add(asset.unwrap_fungible()) - .map_err(NoteError::AddFungibleAssetBalanceError)?; - *own_asset = Asset::Fungible(new_asset); - }, - Asset::NonFungible(nf_asset) => { - return Err(NoteError::DuplicateNonFungibleAsset(*nf_asset)); - }, - } - } else { - // if the asset is not in the list, add it to the list - self.assets.push(asset); - if self.assets.len() > Self::MAX_NUM_ASSETS { - return Err(NoteError::TooManyAssets(self.assets.len())); - } - } - - self.hash = compute_asset_commitment(&self.assets); - - Ok(()) + /// Consumes self and returns the underlying vector of assets. + pub fn into_vec(self) -> Vec { + self.assets } } @@ -177,42 +126,28 @@ impl PartialEq for NoteAssets { impl Eq for NoteAssets {} -// HELPER FUNCTIONS -// ================================================================================================ +impl SequentialCommit for NoteAssets { + type Commitment = Word; -/// Returns a commitment to a note's assets. -/// -/// The commitment is computed as a sequential hash of all assets (each asset represented by 4 -/// field elements), padded to the next multiple of 2. If the asset list is empty, a default digest -/// is returned. -fn compute_asset_commitment(assets: &[Asset]) -> Word { - if assets.is_empty() { - return Word::empty(); + /// Returns all assets represented as a vector of field elements. + fn to_elements(&self) -> Vec { + to_elements(&self.assets) } - // If we have an odd number of assets we pad the vector with 4 zero elements. This is to - // ensure the number of elements is a multiple of 8 - the size of the hasher rate. - let word_capacity = if assets.len().is_multiple_of(2) { - assets.len() - } else { - assets.len() + 1 - }; - let mut asset_elements = Vec::with_capacity(word_capacity * WORD_SIZE); - - for asset in assets.iter() { - // convert the asset into field elements and add them to the list elements - let asset_word: Word = (*asset).into(); - asset_elements.extend_from_slice(asset_word.as_elements()); + /// Computes the commitment to the assets. + fn to_commitment(&self) -> Self::Commitment { + to_commitment(&self.assets) } +} - // If we have an odd number of assets we pad the vector with 4 zero elements. This is to - // ensure the number of elements is a multiple of 8 - the size of the hasher rate. This - // simplifies hashing inside of the virtual machine when ingesting assets from a note. - if assets.len() % 2 == 1 { - asset_elements.extend_from_slice(Word::empty().as_elements()); - } +fn to_elements(assets: &[Asset]) -> Vec { + let mut elements = Vec::with_capacity(assets.len() * 2 * WORD_SIZE); + elements.extend(assets.iter().flat_map(Asset::as_elements)); + elements +} - Hasher::hash_elements(&asset_elements) +fn to_commitment(assets: &[Asset]) -> Word { + Hasher::hash_elements(&to_elements(assets)) } // SERIALIZATION @@ -225,12 +160,21 @@ impl Serializable for NoteAssets { target.write_u8(self.assets.len().try_into().expect("Asset number must fit into `u8`")); target.write_many(&self.assets); } + + fn get_size_hint(&self) -> usize { + // Size of the serialized asset count prefix. + let u8_size = 0u8.get_size_hint(); + + let assets_size: usize = self.assets.iter().map(|asset| asset.get_size_hint()).sum(); + + u8_size + assets_size + } } impl Deserializable for NoteAssets { fn read_from(source: &mut R) -> Result { let count = source.read_u8()?; - let assets = source.read_many::(count.into())?; + let assets = source.read_many_iter::(count.into())?.collect::>()?; Self::new(assets).map_err(|e| DeserializationError::InvalidValue(format!("{e:?}"))) } } @@ -240,8 +184,7 @@ impl Deserializable for NoteAssets { #[cfg(test)] mod tests { - use super::{NoteAssets, compute_asset_commitment}; - use crate::Word; + use super::NoteAssets; use crate::account::AccountId; use crate::asset::{Asset, FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}; use crate::testing::account_id::{ @@ -250,35 +193,12 @@ mod tests { ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, }; - #[test] - fn add_asset() { - let faucet_id = AccountId::try_from(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET).unwrap(); - - let asset1 = Asset::Fungible(FungibleAsset::new(faucet_id, 100).unwrap()); - let asset2 = Asset::Fungible(FungibleAsset::new(faucet_id, 50).unwrap()); - - // create empty assets - let mut assets = NoteAssets::default(); - - assert_eq!(assets.hash, Word::empty()); - - // add asset1 - assert!(assets.add_asset(asset1).is_ok()); - assert_eq!(assets.assets, vec![asset1]); - assert_eq!(assets.hash, compute_asset_commitment(&[asset1])); - - // add asset2 - assert!(assets.add_asset(asset2).is_ok()); - let expected_asset = Asset::Fungible(FungibleAsset::new(faucet_id, 150).unwrap()); - assert_eq!(assets.assets, vec![expected_asset]); - assert_eq!(assets.hash, compute_asset_commitment(&[expected_asset])); - } #[test] fn iter_fungible_asset() { let faucet_id_1 = AccountId::try_from(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET).unwrap(); let faucet_id_2 = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET).unwrap(); let account_id = AccountId::try_from(ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET).unwrap(); - let details = NonFungibleAssetDetails::new(account_id.prefix(), vec![1, 2, 3]).unwrap(); + let details = NonFungibleAssetDetails::new(account_id, vec![1, 2, 3]).unwrap(); let asset1 = Asset::Fungible(FungibleAsset::new(faucet_id_1, 100).unwrap()); let asset2 = Asset::Fungible(FungibleAsset::new(faucet_id_2, 50).unwrap()); diff --git a/crates/miden-protocol/src/note/attachment.rs b/crates/miden-protocol/src/note/attachment.rs index 56a46e4b7d..fa8e567341 100644 --- a/crates/miden-protocol/src/note/attachment.rs +++ b/crates/miden-protocol/src/note/attachment.rs @@ -3,7 +3,13 @@ use alloc::vec::Vec; use crate::crypto::SequentialCommit; use crate::errors::NoteError; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{Felt, Hasher, Word}; // NOTE ATTACHMENT @@ -110,6 +116,10 @@ impl Serializable for NoteAttachment { self.attachment_scheme().write_into(target); self.content().write_into(target); } + + fn get_size_hint(&self) -> usize { + self.attachment_scheme().get_size_hint() + self.content().get_size_hint() + } } impl Deserializable for NoteAttachment { @@ -211,6 +221,19 @@ impl Serializable for NoteAttachmentContent { }, } } + + fn get_size_hint(&self) -> usize { + let kind_size = self.attachment_kind().get_size_hint(); + match self { + NoteAttachmentContent::None => kind_size, + NoteAttachmentContent::Word(word) => kind_size + word.get_size_hint(), + NoteAttachmentContent::Array(attachment_commitment) => { + kind_size + + attachment_commitment.num_elements().get_size_hint() + + attachment_commitment.elements.len() * crate::ZERO.get_size_hint() + }, + } + } } impl Deserializable for NoteAttachmentContent { @@ -225,7 +248,8 @@ impl Deserializable for NoteAttachmentContent { }, NoteAttachmentKind::Array => { let num_elements = u16::read_from(source)?; - let elements = source.read_many(num_elements as usize)?; + let elements = + source.read_many_iter(num_elements as usize)?.collect::>()?; Self::new_array(elements) .map_err(|err| DeserializationError::InvalidValue(err.to_string())) }, @@ -374,6 +398,10 @@ impl Serializable for NoteAttachmentScheme { fn write_into(&self, target: &mut W) { self.as_u32().write_into(target); } + + fn get_size_hint(&self) -> usize { + core::mem::size_of::() + } } impl Deserializable for NoteAttachmentScheme { @@ -464,6 +492,10 @@ impl Serializable for NoteAttachmentKind { fn write_into(&self, target: &mut W) { self.as_u8().write_into(target); } + + fn get_size_hint(&self) -> usize { + core::mem::size_of::() + } } impl Deserializable for NoteAttachmentKind { diff --git a/crates/miden-protocol/src/note/details.rs b/crates/miden-protocol/src/note/details.rs index f6ede73dab..14bad33b70 100644 --- a/crates/miden-protocol/src/note/details.rs +++ b/crates/miden-protocol/src/note/details.rs @@ -1,13 +1,17 @@ -use miden_processor::DeserializationError; - -use super::{NoteAssets, NoteId, NoteInputs, NoteRecipient, NoteScript, Nullifier}; +use super::{NoteAssets, NoteId, NoteRecipient, NoteScript, NoteStorage, Nullifier}; use crate::Word; -use crate::utils::serde::{ByteReader, ByteWriter, Deserializable, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // NOTE DETAILS // ================================================================================================ -/// Details of a note consisting of assets, script, inputs, and a serial number. +/// Details of a note consisting of assets, script, storage, and a serial number. /// /// See [super::Note] for more details. #[derive(Clone, Debug, PartialEq, Eq)] @@ -50,9 +54,9 @@ impl NoteDetails { self.recipient.script() } - /// Returns the note's recipient inputs which customizes the script's behavior. - pub fn inputs(&self) -> &NoteInputs { - self.recipient.inputs() + /// Returns the note's recipient storage which customizes the script's behavior. + pub fn storage(&self) -> &NoteStorage { + self.recipient.storage() } /// Returns the note's recipient. @@ -67,6 +71,14 @@ impl NoteDetails { Nullifier::from(self) } + // MUTATORS + // -------------------------------------------------------------------------------------------- + + /// Reduces the size of the note script by stripping all debug info from it. + pub fn minify_script(&mut self) { + self.recipient.minify_script(); + } + /// Decomposes note details into underlying assets and recipient. pub fn into_parts(self) -> (NoteAssets, NoteRecipient) { (self.assets, self.recipient) @@ -92,6 +104,10 @@ impl Serializable for NoteDetails { assets.write_into(target); recipient.write_into(target); } + + fn get_size_hint(&self) -> usize { + self.assets.get_size_hint() + self.recipient.get_size_hint() + } } impl Deserializable for NoteDetails { diff --git a/crates/miden-protocol/src/note/file.rs b/crates/miden-protocol/src/note/file.rs index 62cd7ba863..44aac4ddfe 100644 --- a/crates/miden-protocol/src/note/file.rs +++ b/crates/miden-protocol/src/note/file.rs @@ -6,13 +6,17 @@ use std::{ vec::Vec, }; -#[cfg(feature = "std")] -use miden_core::utils::SliceReader; -use miden_core::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; - use super::{Note, NoteDetails, NoteId, NoteInclusionProof, NoteTag}; use crate::block::BlockNumber; +#[cfg(feature = "std")] +use crate::utils::serde::SliceReader; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; const MAGIC: &str = "note"; @@ -137,8 +141,6 @@ impl Deserializable for NoteFile { mod tests { use alloc::vec::Vec; - use miden_core::utils::{Deserializable, Serializable}; - use crate::Word; use crate::account::AccountId; use crate::asset::{Asset, FungibleAsset}; @@ -148,10 +150,10 @@ mod tests { NoteAssets, NoteFile, NoteInclusionProof, - NoteInputs, NoteMetadata, NoteRecipient, NoteScript, + NoteStorage, NoteTag, NoteType, }; @@ -159,6 +161,7 @@ mod tests { ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE, }; + use crate::utils::serde::{Deserializable, Serializable}; fn create_example_note() -> Note { let faucet = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET).unwrap(); @@ -167,11 +170,11 @@ mod tests { let serial_num = Word::from([0, 1, 2, 3u32]); let script = NoteScript::mock(); - let note_inputs = NoteInputs::new(vec![target.prefix().into()]).unwrap(); - let recipient = NoteRecipient::new(serial_num, script, note_inputs); + let note_storage = NoteStorage::new(vec![target.prefix().into()]).unwrap(); + let recipient = NoteRecipient::new(serial_num, script, note_storage); let asset = Asset::Fungible(FungibleAsset::new(faucet, 100).unwrap()); - let metadata = NoteMetadata::new(faucet, NoteType::Public, NoteTag::from(123)); + let metadata = NoteMetadata::new(faucet, NoteType::Public).with_tag(NoteTag::from(123)); Note::new(NoteAssets::new(vec![asset]).unwrap(), metadata, recipient) } diff --git a/crates/miden-protocol/src/note/header.rs b/crates/miden-protocol/src/note/header.rs index 04aac21de4..f0ca1c1265 100644 --- a/crates/miden-protocol/src/note/header.rs +++ b/crates/miden-protocol/src/note/header.rs @@ -77,6 +77,10 @@ impl Serializable for NoteHeader { self.note_id.write_into(target); self.note_metadata.write_into(target); } + + fn get_size_hint(&self) -> usize { + self.note_id.get_size_hint() + self.note_metadata.get_size_hint() + } } impl Deserializable for NoteHeader { diff --git a/crates/miden-protocol/src/note/inputs.rs b/crates/miden-protocol/src/note/inputs.rs deleted file mode 100644 index 7d7b04a0c6..0000000000 --- a/crates/miden-protocol/src/note/inputs.rs +++ /dev/null @@ -1,158 +0,0 @@ -use alloc::vec::Vec; - -use crate::errors::NoteError; -use crate::utils::serde::{ - ByteReader, - ByteWriter, - Deserializable, - DeserializationError, - Serializable, -}; -use crate::{Felt, Hasher, MAX_INPUTS_PER_NOTE, Word}; - -// NOTE INPUTS -// ================================================================================================ - -/// A container for note inputs. -/// -/// A note can be associated with up to 1024 input values. Each value is represented by a single -/// field element. Thus, note input values can contain up to ~8 KB of data. -/// -/// All inputs associated with a note can be reduced to a single commitment which is computed as an -/// RPO256 hash over the input elements. -#[derive(Clone, Debug)] -pub struct NoteInputs { - values: Vec, - commitment: Word, -} - -impl NoteInputs { - // CONSTRUCTOR - // -------------------------------------------------------------------------------------------- - - /// Returns [NoteInputs] instantiated from the provided values. - /// - /// # Errors - /// Returns an error if the number of provided inputs is greater than 1024. - pub fn new(values: Vec) -> Result { - if values.len() > MAX_INPUTS_PER_NOTE { - return Err(NoteError::TooManyInputs(values.len())); - } - - let commitment = Hasher::hash_elements(&values); - - Ok(Self { values, commitment }) - } - - // PUBLIC ACCESSORS - // -------------------------------------------------------------------------------------------- - - /// Returns a commitment to these inputs. - pub fn commitment(&self) -> Word { - self.commitment - } - - /// Returns the number of input values. - /// - /// The returned value is guaranteed to be smaller than or equal to 1024. - pub fn num_values(&self) -> u16 { - const _: () = assert!(MAX_INPUTS_PER_NOTE <= u16::MAX as usize); - debug_assert!( - self.values.len() <= MAX_INPUTS_PER_NOTE, - "The constructor should have checked the number of inputs" - ); - self.values.len() as u16 - } - - /// Returns a reference to the input values. - pub fn values(&self) -> &[Felt] { - &self.values - } - - /// Returns the note's input as a vector of field elements. - pub fn to_elements(&self) -> Vec { - self.values.to_vec() - } -} - -impl Default for NoteInputs { - fn default() -> Self { - Self::new(vec![]).expect("empty values should be valid") - } -} - -impl PartialEq for NoteInputs { - fn eq(&self, other: &Self) -> bool { - let NoteInputs { values: inputs, commitment: _ } = self; - inputs == &other.values - } -} - -impl Eq for NoteInputs {} - -// CONVERSION -// ================================================================================================ - -impl From for Vec { - fn from(value: NoteInputs) -> Self { - value.values - } -} - -impl TryFrom> for NoteInputs { - type Error = NoteError; - - fn try_from(value: Vec) -> Result { - NoteInputs::new(value) - } -} - -// SERIALIZATION -// ================================================================================================ - -impl Serializable for NoteInputs { - fn write_into(&self, target: &mut W) { - let NoteInputs { values, commitment: _commitment } = self; - target.write_u16(values.len().try_into().expect("inputs len is not a u16 value")); - target.write_many(values); - } -} - -impl Deserializable for NoteInputs { - fn read_from(source: &mut R) -> Result { - let num_values = source.read_u16()? as usize; - let values = source.read_many::(num_values)?; - Self::new(values).map_err(|v| DeserializationError::InvalidValue(format!("{v}"))) - } -} - -// TESTS -// ================================================================================================ - -#[cfg(test)] -mod tests { - use miden_crypto::utils::Deserializable; - - use super::{Felt, NoteInputs, Serializable}; - - #[test] - fn test_input_ordering() { - // inputs are provided in reverse stack order - let inputs = vec![Felt::new(1), Felt::new(2), Felt::new(3)]; - // we expect the inputs to remain in reverse stack order. - let expected_ordering = vec![Felt::new(1), Felt::new(2), Felt::new(3)]; - - let note_inputs = NoteInputs::new(inputs).expect("note created should succeed"); - assert_eq!(&expected_ordering, ¬e_inputs.values); - } - - #[test] - fn test_input_serialization() { - let inputs = vec![Felt::new(1), Felt::new(2), Felt::new(3)]; - let note_inputs = NoteInputs::new(inputs).unwrap(); - - let bytes = note_inputs.to_bytes(); - let parsed_note_inputs = NoteInputs::read_from_bytes(&bytes).unwrap(); - assert_eq!(note_inputs, parsed_note_inputs); - } -} diff --git a/crates/miden-protocol/src/note/metadata.rs b/crates/miden-protocol/src/note/metadata.rs index d1a58ceace..04c36b9c08 100644 --- a/crates/miden-protocol/src/note/metadata.rs +++ b/crates/miden-protocol/src/note/metadata.rs @@ -43,8 +43,8 @@ use crate::note::{NoteAttachment, NoteAttachmentKind, NoteAttachmentScheme}; /// The felt validity of each part of the layout is guaranteed: /// - 1st felt: The lower 8 bits of the account ID suffix are `0` by construction, so that they can /// be overwritten with other data. The suffix' most significant bit must be zero such that the -/// entire felt retains its validity even if all of its lower 8 bits are be set to `1`. So the -/// note type can be comfortably encoded. +/// entire felt retains its validity even if all of its lower 8 bits are set to `1`. So the note +/// type can be comfortably encoded. /// - 2nd felt: Is equivalent to the prefix of the account ID so it inherits its validity. /// - 3rd felt: The upper 32 bits are always zero. /// - 4th felt: The upper 30 bits are always zero. @@ -77,15 +77,50 @@ impl NoteMetadata { // -------------------------------------------------------------------------------------------- /// Returns a new [`NoteMetadata`] instantiated with the specified parameters. - pub fn new(sender: AccountId, note_type: NoteType, tag: NoteTag) -> Self { + /// + /// The tag defaults to [`NoteTag::default()`]. Use [`NoteMetadata::with_tag`] to set a + /// specific tag if needed. + pub fn new(sender: AccountId, note_type: NoteType) -> Self { Self { sender, note_type, - tag, + tag: NoteTag::default(), attachment: NoteAttachment::default(), } } + /// Reconstructs a [`NoteMetadata`] from a [`NoteMetadataHeader`] and a + /// [`NoteAttachment`]. + /// + /// # Errors + /// + /// Returns an error if the attachment's kind or scheme do not match those in the header. + pub fn try_from_header( + header: NoteMetadataHeader, + attachment: NoteAttachment, + ) -> Result { + if header.attachment_kind != attachment.attachment_kind() { + return Err(NoteError::AttachmentKindMismatch { + header_kind: header.attachment_kind, + attachment_kind: attachment.attachment_kind(), + }); + } + + if header.attachment_scheme != attachment.attachment_scheme() { + return Err(NoteError::AttachmentSchemeMismatch { + header_scheme: header.attachment_scheme, + attachment_scheme: attachment.attachment_scheme(), + }); + } + + Ok(Self { + sender: header.sender, + note_type: header.note_type, + tag: header.tag, + attachment, + }) + } + // ACCESSORS // -------------------------------------------------------------------------------------------- @@ -117,7 +152,7 @@ impl NoteMetadata { /// Returns the header of a [`NoteMetadata`] as a [`Word`]. /// /// See [`NoteMetadata`] docs for more details. - fn to_header(&self) -> NoteMetadataHeader { + pub fn to_header(&self) -> NoteMetadataHeader { NoteMetadataHeader { sender: self.sender, note_type: self.note_type, @@ -153,12 +188,27 @@ impl NoteMetadata { // MUTATORS // -------------------------------------------------------------------------------------------- - /// Overwrites the note's attachment with the provided one. + /// Mutates the note's tag by setting it to the provided value. + pub fn set_tag(&mut self, tag: NoteTag) { + self.tag = tag; + } + + /// Returns a new [`NoteMetadata`] with the tag set to the provided value. + /// + /// This is a builder method that consumes self and returns a new instance for method chaining. + pub fn with_tag(mut self, tag: NoteTag) -> Self { + self.tag = tag; + self + } + + /// Mutates the note's attachment by setting it to the provided value. pub fn set_attachment(&mut self, attachment: NoteAttachment) { self.attachment = attachment; } - /// Overwrites the note's attachment with the provided one. + /// Returns a new [`NoteMetadata`] with the attachment set to the provided value. + /// + /// This is a builder method that consumes self and returns a new instance for method chaining. pub fn with_attachment(mut self, attachment: NoteAttachment) -> Self { self.attachment = attachment; self @@ -175,6 +225,13 @@ impl Serializable for NoteMetadata { self.tag().write_into(target); self.attachment().write_into(target); } + + fn get_size_hint(&self) -> usize { + self.note_type().get_size_hint() + + self.sender().get_size_hint() + + self.tag().get_size_hint() + + self.attachment().get_size_hint() + } } impl Deserializable for NoteMetadata { @@ -184,7 +241,7 @@ impl Deserializable for NoteMetadata { let tag = NoteTag::read_from(source)?; let attachment = NoteAttachment::read_from(source)?; - Ok(NoteMetadata::new(sender, note_type, tag).with_attachment(attachment)) + Ok(NoteMetadata::new(sender, note_type).with_tag(tag).with_attachment(attachment)) } } @@ -194,10 +251,8 @@ impl Deserializable for NoteMetadata { /// The header representation of [`NoteMetadata`]. /// /// See the metadata's type for details on this type's [`Word`] layout. -/// -/// This is intended to be a private type meant for encapsulating the conversion from and to words. #[derive(Clone, Copy, Debug, Eq, PartialEq)] -struct NoteMetadataHeader { +pub struct NoteMetadataHeader { sender: AccountId, note_type: NoteType, tag: NoteTag, @@ -205,6 +260,36 @@ struct NoteMetadataHeader { attachment_scheme: NoteAttachmentScheme, } +impl NoteMetadataHeader { + // ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the account which created the note. + pub fn sender(&self) -> AccountId { + self.sender + } + + /// Returns the note's type. + pub fn note_type(&self) -> NoteType { + self.note_type + } + + /// Returns the tag associated with the note. + pub fn tag(&self) -> NoteTag { + self.tag + } + + /// Returns the attachment kind. + pub fn attachment_kind(&self) -> NoteAttachmentKind { + self.attachment_kind + } + + /// Returns the attachment scheme. + pub fn attachment_scheme(&self) -> NoteAttachmentScheme { + self.attachment_scheme + } +} + impl From for Word { fn from(header: NoteMetadataHeader) -> Self { let mut metadata = Word::empty(); @@ -226,14 +311,18 @@ impl TryFrom for NoteMetadataHeader { fn try_from(word: Word) -> Result { let (sender_suffix, note_type) = unmerge_sender_suffix_and_note_type(word[0])?; let sender_prefix = word[1]; - let tag = u32::try_from(word[2]).map(NoteTag::new).map_err(|_| { + let tag = u32::try_from(word[2].as_canonical_u64()).map(NoteTag::new).map_err(|_| { NoteError::other("failed to convert note tag from metadata header to u32") })?; let (attachment_kind, attachment_scheme) = unmerge_attachment_kind_scheme(word[3])?; - let sender = AccountId::try_from([sender_prefix, sender_suffix]).map_err(|source| { - NoteError::other_with_source("failed to decode account ID from metadata header", source) - })?; + let sender = + AccountId::try_from_elements(sender_suffix, sender_prefix).map_err(|source| { + NoteError::other_with_source( + "failed to decode account ID from metadata header", + source, + ) + })?; Ok(Self { sender, @@ -261,7 +350,7 @@ impl TryFrom for NoteMetadataHeader { /// /// The `sender_id_suffix` is the suffix of the sender's account ID. fn merge_sender_suffix_and_note_type(sender_id_suffix: Felt, note_type: NoteType) -> Felt { - let mut merged = sender_id_suffix.as_int(); + let mut merged = sender_id_suffix.as_canonical_u64(); let note_type_byte = note_type as u8; debug_assert!(note_type_byte < 4, "note type must not contain values >= 4"); @@ -278,14 +367,14 @@ fn unmerge_sender_suffix_and_note_type(element: Felt) -> Result<(Felt, NoteType) // Inverts the note type mask. const SENDER_SUFFIX_MASK: u64 = !(NOTE_TYPE_MASK as u64); - let note_type_byte = element.as_int() as u8 & NOTE_TYPE_MASK; + let note_type_byte = element.as_canonical_u64() as u8 & NOTE_TYPE_MASK; let note_type = NoteType::try_from(note_type_byte).map_err(|source| { NoteError::other_with_source("failed to decode note type from metadata header", source) })?; // No bits were set so felt should still be valid. - let sender_suffix = - Felt::try_from(element.as_int() & SENDER_SUFFIX_MASK).expect("felt should still be valid"); + let sender_suffix = Felt::try_from(element.as_canonical_u64() & SENDER_SUFFIX_MASK) + .expect("felt should still be valid"); Ok((sender_suffix, note_type)) } @@ -313,8 +402,8 @@ fn merge_attachment_kind_scheme( fn unmerge_attachment_kind_scheme( element: Felt, ) -> Result<(NoteAttachmentKind, NoteAttachmentScheme), NoteError> { - let attachment_scheme = element.as_int() as u32; - let attachment_kind = (element.as_int() >> 32) as u8; + let attachment_scheme = element.as_canonical_u64() as u32; + let attachment_kind = (element.as_canonical_u64() >> 32) as u8; let attachment_scheme = NoteAttachmentScheme::new(attachment_scheme); let attachment_kind = NoteAttachmentKind::try_from(attachment_kind).map_err(|source| { @@ -351,7 +440,8 @@ mod tests { let sender = AccountId::try_from(ACCOUNT_ID_MAX_ONES).unwrap(); let note_type = NoteType::Public; let tag = NoteTag::new(u32::MAX); - let metadata = NoteMetadata::new(sender, note_type, tag).with_attachment(attachment); + let metadata = + NoteMetadata::new(sender, note_type).with_tag(tag).with_attachment(attachment); // Serialization Roundtrip let deserialized = NoteMetadata::read_from_bytes(&metadata.to_bytes())?; diff --git a/crates/miden-protocol/src/note/mod.rs b/crates/miden-protocol/src/note/mod.rs index 27aeda9a54..f209a07a71 100644 --- a/crates/miden-protocol/src/note/mod.rs +++ b/crates/miden-protocol/src/note/mod.rs @@ -1,10 +1,15 @@ use miden_crypto::Word; -use miden_crypto::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; -use miden_processor::DeserializationError; use crate::account::AccountId; use crate::errors::NoteError; -use crate::{Felt, Hasher, WORD_SIZE, ZERO}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Felt, Hasher, ZERO}; mod assets; pub use assets::NoteAssets; @@ -15,11 +20,11 @@ pub use details::NoteDetails; mod header; pub use header::{NoteHeader, compute_note_commitment}; -mod inputs; -pub use inputs::NoteInputs; +mod storage; +pub use storage::NoteStorage; mod metadata; -pub use metadata::NoteMetadata; +pub use metadata::{NoteMetadata, NoteMetadataHeader}; mod attachment; pub use attachment::{ @@ -30,9 +35,6 @@ pub use attachment::{ NoteAttachmentScheme, }; -mod execution_hint; -pub use execution_hint::NoteExecutionHint; - mod note_id; pub use note_id::NoteId; @@ -68,7 +70,7 @@ pub use file::NoteFile; /// /// Notes consist of note metadata and details. Note metadata is always public, but details may be /// either public, encrypted, or private, depending on the note type. Note details consist of note -/// assets, script, inputs, and a serial number, the three latter grouped into a recipient object. +/// assets, script, storage, and a serial number, the three latter grouped into a recipient object. /// /// Note details can be reduced to two unique identifiers: [NoteId] and [Nullifier]. The former is /// publicly associated with a note, while the latter is known only to entities which have access @@ -78,10 +80,10 @@ pub use file::NoteFile; /// note's script determines the conditions required for the note consumption, i.e. the target /// account of a P2ID or conditions of a SWAP, and the effects of the note. The serial number has /// a double duty of preventing double spend, and providing unlikability to the consumer of a note. -/// The note's inputs allow for customization of its script. +/// The note's storage allows for customization of its script. /// /// To create a note, the kernel does not require all the information above, a user can create a -/// note only with the commitment to the script, inputs, the serial number (i.e., the recipient), +/// note only with the commitment to the script, storage, the serial number (i.e., the recipient), /// and the kernel only verifies the source account has the assets necessary for the note creation. /// See [NoteRecipient] for more details. #[derive(Clone, Debug, PartialEq, Eq)] @@ -140,9 +142,9 @@ impl Note { self.details.script() } - /// Returns the note's recipient inputs which customizes the script's behavior. - pub fn inputs(&self) -> &NoteInputs { - self.details.inputs() + /// Returns the note's recipient storage which customizes the script's behavior. + pub fn storage(&self) -> &NoteStorage { + self.details.storage() } /// Returns the note's recipient. @@ -166,6 +168,21 @@ impl Note { pub fn commitment(&self) -> Word { self.header.commitment() } + + // MUTATORS + // -------------------------------------------------------------------------------------------- + + /// Reduces the size of the note script by stripping all debug info from it. + pub fn minify_script(&mut self) { + self.details.minify_script(); + } + + /// Consumes self and returns the underlying parts of the [`Note`]. + pub fn into_parts(self) -> (NoteAssets, NoteMetadata, NoteRecipient) { + let (assets, recipient) = self.details.into_parts(); + let metadata = self.header.into_metadata(); + (assets, metadata, recipient) + } } // AS REF @@ -222,6 +239,10 @@ impl Serializable for Note { header.metadata().write_into(target); details.write_into(target); } + + fn get_size_hint(&self) -> usize { + self.header.metadata().get_size_hint() + self.details.get_size_hint() + } } impl Deserializable for Note { diff --git a/crates/miden-protocol/src/note/note_id.rs b/crates/miden-protocol/src/note/note_id.rs index 054ca5a564..343285a81e 100644 --- a/crates/miden-protocol/src/note/note_id.rs +++ b/crates/miden-protocol/src/note/note_id.rs @@ -24,7 +24,7 @@ use crate::utils::serde::{ /// /// where `recipient` is defined as: /// -/// > hash(hash(hash(serial_num, ZERO), script_root), input_commitment) +/// > hash(hash(hash(serial_num, ZERO), script_root), storage_commitment) /// /// This achieves the following properties: /// - Every note can be reduced to a single unique ID. @@ -71,6 +71,10 @@ impl Serializable for NoteId { fn write_into(&self, target: &mut W) { target.write_bytes(&self.0.to_bytes()); } + + fn get_size_hint(&self) -> usize { + Word::SERIALIZED_SIZE + } } impl Deserializable for NoteId { diff --git a/crates/miden-protocol/src/note/note_tag.rs b/crates/miden-protocol/src/note/note_tag.rs index 3c1ee2242a..2611f6988b 100644 --- a/crates/miden-protocol/src/note/note_tag.rs +++ b/crates/miden-protocol/src/note/note_tag.rs @@ -11,8 +11,6 @@ use super::{ NoteError, Serializable, }; -use crate::account::AccountStorageMode; - // NOTE TAG // ================================================================================================ @@ -66,11 +64,9 @@ impl NoteTag { // -------------------------------------------------------------------------------------------- /// The default note tag length for an account ID with local execution. - pub const DEFAULT_LOCAL_ACCOUNT_TARGET_TAG_LENGTH: u8 = 14; - /// The default note tag length for an account ID with network execution. - pub const DEFAULT_NETWORK_ACCOUNT_TARGET_TAG_LENGTH: u8 = 30; + pub const DEFAULT_ACCOUNT_TARGET_TAG_LENGTH: u8 = 14; /// The maximum number of bits that can be encoded into the tag for local accounts. - pub const MAX_ACCOUNT_TARGET_TAG_LENGTH: u8 = 30; + pub const MAX_ACCOUNT_TARGET_TAG_LENGTH: u8 = 32; // CONSTRUCTORS // -------------------------------------------------------------------------------------------- @@ -82,35 +78,17 @@ impl NoteTag { /// Constructs a note tag that targets the given `account_id`. /// - /// The tag is constructed as follows: - /// - /// - For local execution ([`AccountStorageMode::Private`] or [`AccountStorageMode::Public`]), - /// the two most significant bits are set to `0b00`. The following 14 bits are set to the most - /// significant bits of the account ID, and the remaining 16 bits are set to 0. - /// - For network execution ([`AccountStorageMode::Network`]), the most significant bits are set - /// to `0b00` and the remaining bits are set to the 30 most significant bits of the account - /// ID. + /// The tag is a u32 constructed by taking the [`NoteTag::DEFAULT_ACCOUNT_TARGET_TAG_LENGTH`] + /// most significant bits of the account ID prefix and setting the remaining bits to zero. pub fn with_account_target(account_id: AccountId) -> Self { - match account_id.storage_mode() { - AccountStorageMode::Network => Self::from_network_account_id(account_id), - AccountStorageMode::Private | AccountStorageMode::Public => { - // safe to unwrap since DEFAULT_LOCAL_ACCOUNT_TARGET_TAG_LENGTH < - // MAX_ACCOUNT_TARGET_TAG_LENGTH - Self::with_custom_account_target( - account_id, - Self::DEFAULT_LOCAL_ACCOUNT_TARGET_TAG_LENGTH, - ) - .unwrap() - }, - } + Self::with_custom_account_target(account_id, Self::DEFAULT_ACCOUNT_TARGET_TAG_LENGTH) + .expect("default account target tag length must be valid") } /// Constructs a note tag that targets the given `account_id` with a custom `tag_len`. /// - /// The tag is constructed by: - /// - Setting the two most significant bits to zero. - /// - The next `tag_len` bits are set to the most significant bits of the account ID prefix. - /// - The remaining bits are set to zero. + /// The tag is a u32 constructed by taking the `tag_len` most significant bits of the account ID + /// prefix and setting the remaining bits to zero. /// /// # Errors /// @@ -123,39 +101,13 @@ impl NoteTag { return Err(NoteError::NoteTagLengthTooLarge(tag_len)); } - let prefix_id: u64 = account_id.prefix().into(); - - // Shift the high bits of the account ID such that they are laid out as: - // [34 zero bits | remaining high bits (30 bits)]. - let high_bits = prefix_id >> 34; - - // This is equivalent to the following layout, interpreted as a u32: - // [2 zero bits | remaining high bits (30 bits)]. - let high_bits = high_bits as u32; - - // Select the top `tag_len` bits of the account ID, i.e.: - // [2 zero bits | remaining high bits (tag_len bits) | (30 - tag_len) zero bits]. - let high_bits = high_bits & (u32::MAX << (32 - 2 - tag_len)); - - Ok(Self(high_bits)) - } - - /// Constructs a network account note tag from the specified `account_id`. - /// - /// The tag is constructed as follows: - /// - /// - The two most significant bits are set to `0b00`. - /// - The remaining bits are set to the 30 most significant bits of the account ID. - pub(crate) fn from_network_account_id(account_id: AccountId) -> Self { - let prefix_id: u64 = account_id.prefix().into(); - - // Shift the high bits of the account ID such that they are laid out as: - // [34 zero bits | remaining high bits (30 bits)]. - let high_bits = prefix_id >> 34; - - // This is equivalent to the following layout, interpreted as a u32: - // [2 zero bits | remaining high bits (30 bits)]. - Self(high_bits as u32) + let prefix = account_id.prefix().as_u64(); + // Get the high bits as a u32. + let high_bits = (prefix >> 32) as u32; + // Create a mask that zeros out the lower 32 - len bits. + let mask = u32::MAX.checked_shl(u32::BITS - tag_len as u32).unwrap_or(0); + let tag = high_bits & mask; + Ok(Self(tag)) } // PUBLIC ACCESSORS @@ -204,6 +156,10 @@ impl Serializable for NoteTag { fn write_into(&self, target: &mut W) { self.as_u32().write_into(target); } + + fn get_size_hint(&self) -> usize { + core::mem::size_of::() + } } impl Deserializable for NoteTag { @@ -220,7 +176,7 @@ impl Deserializable for NoteTag { mod tests { use super::NoteTag; - use crate::account::AccountId; + use crate::account::{AccountId, AccountStorageMode}; use crate::testing::account_id::{ ACCOUNT_ID_NETWORK_FUNGIBLE_FAUCET, ACCOUNT_ID_NETWORK_NON_FUNGIBLE_FAUCET, @@ -240,6 +196,7 @@ mod tests { ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE_ON_CHAIN_2, ACCOUNT_ID_SENDER, + AccountIdBuilder, }; #[test] @@ -250,6 +207,9 @@ mod tests { AccountId::try_from(ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE).unwrap(), AccountId::try_from(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET).unwrap(), AccountId::try_from(ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET).unwrap(), + AccountIdBuilder::new() + .storage_mode(AccountStorageMode::Private) + .build_with_seed([2; 32]), ]; let public_accounts = [ AccountId::try_from(ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE).unwrap(), @@ -263,48 +223,46 @@ mod tests { AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_3).unwrap(), AccountId::try_from(ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET).unwrap(), AccountId::try_from(ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET_1).unwrap(), + AccountIdBuilder::new() + .storage_mode(AccountStorageMode::Public) + .build_with_seed([3; 32]), ]; let network_accounts = [ AccountId::try_from(ACCOUNT_ID_REGULAR_NETWORK_ACCOUNT_IMMUTABLE_CODE).unwrap(), AccountId::try_from(ACCOUNT_ID_NETWORK_FUNGIBLE_FAUCET).unwrap(), AccountId::try_from(ACCOUNT_ID_NETWORK_NON_FUNGIBLE_FAUCET).unwrap(), + AccountIdBuilder::new() + .storage_mode(AccountStorageMode::Network) + .build_with_seed([4; 32]), ]; - for account_id in private_accounts.iter().chain(public_accounts.iter()) { + for account_id in private_accounts + .iter() + .chain(public_accounts.iter()) + .chain(network_accounts.iter()) + { let tag = NoteTag::with_account_target(*account_id); - assert_eq!(tag.as_u32() >> 30, 0, "two most significant bits should be zero"); assert_eq!(tag.as_u32() << 16, 0, "16 least significant bits should be zero"); - assert_eq!( - (account_id.prefix().as_u64() >> 50) as u32, - tag.as_u32() >> 16, - "14 most significant bits should match" - ); - } + let expected = ((account_id.prefix().as_u64() >> 32) as u32) >> 16; + let actual = tag.as_u32() >> 16; - for account_id in network_accounts { - let tag = NoteTag::with_account_target(account_id); - assert_eq!(tag.as_u32() >> 30, 0, "two most significant bits should be zero"); - assert_eq!( - account_id.prefix().as_u64() >> 34, - tag.as_u32() as u64, - "30 most significant bits should match" - ); + assert_eq!(actual, expected, "14 most significant bits should match"); } } #[test] fn from_custom_account_target() -> anyhow::Result<()> { let account_id = AccountId::try_from(ACCOUNT_ID_SENDER)?; + let tag = NoteTag::with_custom_account_target( account_id, NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH, )?; - assert_eq!(tag.as_u32() >> 30, 0, "two most significant bits should be zero"); assert_eq!( - (account_id.prefix().as_u64() >> 34) as u32, + (account_id.prefix().as_u64() >> 32) as u32, tag.as_u32(), - "30 most significant bits should match" + "32 most significant bits should match" ); Ok(()) diff --git a/crates/miden-protocol/src/note/note_type.rs b/crates/miden-protocol/src/note/note_type.rs index f426ea58ab..d72b953f86 100644 --- a/crates/miden-protocol/src/note/note_type.rs +++ b/crates/miden-protocol/src/note/note_type.rs @@ -11,14 +11,6 @@ use crate::utils::serde::{ Serializable, }; -// CONSTANTS -// ================================================================================================ - -// Keep these masks in sync with `miden-lib/asm/miden/kernels/tx/tx.masm` -const PUBLIC: u8 = 0b01; -const PRIVATE: u8 = 0b10; -const ENCRYPTED: u8 = 0b11; - // NOTE TYPE // ================================================================================================ @@ -26,13 +18,16 @@ const ENCRYPTED: u8 = 0b11; #[repr(u8)] pub enum NoteType { /// Notes with this type have only their hash published to the network. - Private = PRIVATE, - - /// Notes with this type are shared with the network encrypted. - Encrypted = ENCRYPTED, + Private = Self::PRIVATE, /// Notes with this type are fully shared with the network. - Public = PUBLIC, + Public = Self::PUBLIC, +} + +impl NoteType { + // Keep these masks in sync with `miden-lib/asm/miden/kernels/tx/tx.masm` + pub const PUBLIC: u8 = 0b01; + pub const PRIVATE: u8 = 0b10; } // CONVERSIONS FROM NOTE TYPE @@ -52,9 +47,8 @@ impl TryFrom for NoteType { fn try_from(value: u8) -> Result { match value { - PRIVATE => Ok(NoteType::Private), - ENCRYPTED => Ok(NoteType::Encrypted), - PUBLIC => Ok(NoteType::Public), + Self::PRIVATE => Ok(NoteType::Private), + Self::PUBLIC => Ok(NoteType::Public), _ => Err(NoteError::UnknownNoteType(format!("0b{value:b}").into())), } } @@ -91,7 +85,7 @@ impl TryFrom for NoteType { type Error = NoteError; fn try_from(value: Felt) -> Result { - value.as_int().try_into() + value.as_canonical_u64().try_into() } } @@ -101,7 +95,6 @@ impl FromStr for NoteType { fn from_str(s: &str) -> Result { match s { "private" => Ok(NoteType::Private), - "encrypted" => Ok(NoteType::Encrypted), "public" => Ok(NoteType::Public), _ => Err(NoteError::UnknownNoteType(s.into())), } @@ -115,6 +108,10 @@ impl Serializable for NoteType { fn write_into(&self, target: &mut W) { (*self as u8).write_into(target) } + + fn get_size_hint(&self) -> usize { + core::mem::size_of::() + } } impl Deserializable for NoteType { @@ -122,9 +119,8 @@ impl Deserializable for NoteType { let discriminant = u8::read_from(source)?; let note_type = match discriminant { - PRIVATE => NoteType::Private, - ENCRYPTED => NoteType::Encrypted, - PUBLIC => NoteType::Public, + NoteType::PRIVATE => NoteType::Private, + NoteType::PUBLIC => NoteType::Public, discriminant => { return Err(DeserializationError::InvalidValue(format!( "discriminant {discriminant} is not a valid NoteType" @@ -143,7 +139,6 @@ impl Display for NoteType { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { match self { NoteType::Private => write!(f, "private"), - NoteType::Encrypted => write!(f, "encrypted"), NoteType::Public => write!(f, "public"), } } @@ -155,7 +150,7 @@ fn test_from_str_note_type() { use crate::alloc::string::ToString; - for string in ["private", "public", "encrypted"] { + for string in ["private", "public"] { let parsed_note_type = NoteType::from_str(string).unwrap(); assert_eq!(parsed_note_type.to_string(), string); } @@ -163,9 +158,6 @@ fn test_from_str_note_type() { let public_type_invalid_err = NoteType::from_str("puBlIc").unwrap_err(); assert_matches!(public_type_invalid_err, NoteError::UnknownNoteType(_)); - let encrypted_type_invalid = NoteType::from_str("eNcrYptEd").unwrap_err(); - assert_matches!(encrypted_type_invalid, NoteError::UnknownNoteType(_)); - let invalid_type = NoteType::from_str("invalid").unwrap_err(); assert_matches!(invalid_type, NoteError::UnknownNoteType(_)); } diff --git a/crates/miden-protocol/src/note/nullifier.rs b/crates/miden-protocol/src/note/nullifier.rs index 26377caa8a..2f728b4123 100644 --- a/crates/miden-protocol/src/note/nullifier.rs +++ b/crates/miden-protocol/src/note/nullifier.rs @@ -1,6 +1,7 @@ use alloc::string::String; use core::fmt::{Debug, Display, Formatter}; +use miden_core::WORD_SIZE; use miden_crypto::WordError; use miden_protocol_macros::WordWrapper; @@ -13,7 +14,6 @@ use super::{ Hasher, NoteDetails, Serializable, - WORD_SIZE, Word, ZERO, }; @@ -30,13 +30,13 @@ const NULLIFIER_PREFIX_SHIFT: u8 = 48; /// /// A note's nullifier is computed as: /// -/// > hash(serial_num, script_root, input_commitment, asset_commitment). +/// > hash(serial_num, script_root, storage_commitment, asset_commitment). /// /// This achieves the following properties: /// - Every note can be reduced to a single unique nullifier. /// - We cannot derive a note's commitment from its nullifier, or a note's nullifier from its hash. /// - To compute the nullifier we must know all components of the note: serial_num, script_root, -/// input_commitment and asset_commitment. +/// storage_commitment and asset_commitment. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, WordWrapper)] pub struct Nullifier(Word); @@ -44,14 +44,14 @@ impl Nullifier { /// Returns a new note [Nullifier] instantiated from the provided digest. pub fn new( script_root: Word, - inputs_commitment: Word, + storage_commitment: Word, asset_commitment: Word, serial_num: Word, ) -> Self { let mut elements = [ZERO; 4 * WORD_SIZE]; elements[..4].copy_from_slice(serial_num.as_elements()); elements[4..8].copy_from_slice(script_root.as_elements()); - elements[8..12].copy_from_slice(inputs_commitment.as_elements()); + elements[8..12].copy_from_slice(storage_commitment.as_elements()); elements[12..].copy_from_slice(asset_commitment.as_elements()); Self(Hasher::hash_elements(&elements)) } @@ -65,7 +65,7 @@ impl Nullifier { /// /// Nullifier prefix is defined as the 16 most significant bits of the nullifier value. pub fn prefix(&self) -> u16 { - (self.as_word()[3].as_int() >> NULLIFIER_PREFIX_SHIFT) as u16 + (self.as_word()[3].as_canonical_u64() >> NULLIFIER_PREFIX_SHIFT) as u16 } /// Creates a Nullifier from a hex string. Assumes that the string starts with "0x" and @@ -78,8 +78,6 @@ impl Nullifier { #[cfg(any(feature = "testing", test))] pub fn dummy(n: u64) -> Self { - use miden_core::FieldElement; - Self(Word::new([Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::new(n)])) } } @@ -103,7 +101,7 @@ impl From<&NoteDetails> for Nullifier { fn from(note: &NoteDetails) -> Self { Self::new( note.script().root(), - note.inputs().commitment(), + note.storage().commitment(), note.assets().commitment(), note.serial_num(), ) @@ -117,6 +115,10 @@ impl Serializable for Nullifier { fn write_into(&self, target: &mut W) { target.write_bytes(&self.0.to_bytes()); } + + fn get_size_hint(&self) -> usize { + Word::SERIALIZED_SIZE + } } impl Deserializable for Nullifier { diff --git a/crates/miden-protocol/src/note/partial.rs b/crates/miden-protocol/src/note/partial.rs index 03553c3d2e..2cfd911c6a 100644 --- a/crates/miden-protocol/src/note/partial.rs +++ b/crates/miden-protocol/src/note/partial.rs @@ -18,7 +18,7 @@ use crate::Word; /// /// Partial note consists of [NoteMetadata], [NoteAssets], and a recipient digest (see /// [super::NoteRecipient]). However, it does not contain detailed recipient info, including -/// note script, note inputs, and note's serial number. This means that a partial note is +/// note script, note storage, and note's serial number. This means that a partial note is /// sufficient to compute note ID and note header, but not sufficient to compute note nullifier, /// and generally does not have enough info to execute the note. #[derive(Debug, Clone, PartialEq, Eq)] @@ -75,6 +75,10 @@ impl Serializable for PartialNote { self.recipient_digest.write_into(target); self.assets.write_into(target) } + + fn get_size_hint(&self) -> usize { + self.metadata().get_size_hint() + Word::SERIALIZED_SIZE + self.assets.get_size_hint() + } } impl Deserializable for PartialNote { diff --git a/crates/miden-protocol/src/note/recipient.rs b/crates/miden-protocol/src/note/recipient.rs index 78cc247bf3..36e20db762 100644 --- a/crates/miden-protocol/src/note/recipient.rs +++ b/crates/miden-protocol/src/note/recipient.rs @@ -6,8 +6,8 @@ use super::{ Deserializable, DeserializationError, Hasher, - NoteInputs, NoteScript, + NoteStorage, Serializable, Word, }; @@ -16,24 +16,24 @@ use super::{ /// /// The recipient is not an account address, instead it is a value that describes when a note /// can be consumed. Because not all notes have predetermined consumer addresses, e.g. swap -/// notes can be consumed by anyone, the recipient is defined as the code and its inputs, that +/// notes can be consumed by anyone, the recipient is defined as the code and its storage, that /// when successfully executed results in the note's consumption. /// /// Recipient is computed as: /// -/// > hash(hash(hash(serial_num, [0; 4]), script_root), input_commitment) +/// > hash(hash(hash(serial_num, [0; 4]), script_root), storage_commitment) #[derive(Clone, Debug, PartialEq, Eq)] pub struct NoteRecipient { serial_num: Word, script: NoteScript, - inputs: NoteInputs, + storage: NoteStorage, digest: Word, } impl NoteRecipient { - pub fn new(serial_num: Word, script: NoteScript, inputs: NoteInputs) -> Self { - let digest = compute_recipient_digest(serial_num, &script, &inputs); - Self { serial_num, script, inputs, digest } + pub fn new(serial_num: Word, script: NoteScript, storage: NoteStorage) -> Self { + let digest = compute_recipient_digest(serial_num, &script, &storage); + Self { serial_num, script, storage, digest } } // PUBLIC ACCESSORS @@ -49,9 +49,9 @@ impl NoteRecipient { &self.script } - /// The recipient's inputs which customizes the script's behavior. - pub fn inputs(&self) -> &NoteInputs { - &self.inputs + /// The recipient's storage which customizes the script's behavior. + pub fn storage(&self) -> &NoteStorage { + &self.storage } /// The recipient's digest, which commits to its details. @@ -60,12 +60,25 @@ impl NoteRecipient { pub fn digest(&self) -> Word { self.digest } + + // MUTATORS + // -------------------------------------------------------------------------------------------- + + /// Reduces the size of the note script by stripping all debug info from it. + pub fn minify_script(&mut self) { + self.script.clear_debug_info(); + } + + /// Consumes self and returns the underlying parts of the [`NoteRecipient`]. + pub fn into_parts(self) -> (Word, NoteScript, NoteStorage) { + (self.serial_num, self.script, self.storage) + } } -fn compute_recipient_digest(serial_num: Word, script: &NoteScript, inputs: &NoteInputs) -> Word { +fn compute_recipient_digest(serial_num: Word, script: &NoteScript, storage: &NoteStorage) -> Word { let serial_num_hash = Hasher::merge(&[serial_num, Word::empty()]); let merge_script = Hasher::merge(&[serial_num_hash, script.root()]); - Hasher::merge(&[merge_script, inputs.commitment()]) + Hasher::merge(&[merge_script, storage.commitment()]) } // SERIALIZATION @@ -75,7 +88,7 @@ impl Serializable for NoteRecipient { fn write_into(&self, target: &mut W) { let Self { script, - inputs, + storage, serial_num, // These attributes don't have to be serialized, they can be re-computed from the rest @@ -84,17 +97,21 @@ impl Serializable for NoteRecipient { } = self; script.write_into(target); - inputs.write_into(target); + storage.write_into(target); serial_num.write_into(target); } + + fn get_size_hint(&self) -> usize { + self.script.get_size_hint() + self.storage.get_size_hint() + Word::SERIALIZED_SIZE + } } impl Deserializable for NoteRecipient { fn read_from(source: &mut R) -> Result { let script = NoteScript::read_from(source)?; - let inputs = NoteInputs::read_from(source)?; + let storage = NoteStorage::read_from(source)?; let serial_num = Word::read_from(source)?; - Ok(Self::new(serial_num, script, inputs)) + Ok(Self::new(serial_num, script, storage)) } } diff --git a/crates/miden-protocol/src/note/script.rs b/crates/miden-protocol/src/note/script.rs index eb11c82e0b..5e0266b08e 100644 --- a/crates/miden-protocol/src/note/script.rs +++ b/crates/miden-protocol/src/note/script.rs @@ -1,11 +1,14 @@ +use alloc::string::ToString; use alloc::sync::Arc; use alloc::vec::Vec; use core::fmt::Display; +use core::num::TryFromIntError; -use miden_processor::MastNodeExt; +use miden_core::mast::MastNodeExt; use super::Felt; -use crate::assembly::mast::{MastForest, MastNodeId}; +use crate::assembly::mast::{ExternalNodeBuilder, MastForest, MastForestContributor, MastNodeId}; +use crate::assembly::{Library, Path}; use crate::errors::NoteError; use crate::utils::serde::{ ByteReader, @@ -14,9 +17,12 @@ use crate::utils::serde::{ DeserializationError, Serializable, }; -use crate::vm::Program; +use crate::vm::{AdviceMap, Program}; use crate::{PrettyPrint, Word}; +/// The attribute name used to mark the entrypoint procedure in a note script library. +const NOTE_SCRIPT_ATTRIBUTE: &str = "note_script"; + // NOTE SCRIPT // ================================================================================================ @@ -59,6 +65,80 @@ impl NoteScript { Self { mast, entrypoint } } + /// Returns a new [NoteScript] instantiated from the provided library. + /// + /// The library must contain exactly one procedure with the `@note_script` attribute, + /// which will be used as the entrypoint. + /// + /// # Errors + /// Returns an error if: + /// - The library does not contain a procedure with the `@note_script` attribute. + /// - The library contains multiple procedures with the `@note_script` attribute. + pub fn from_library(library: &Library) -> Result { + let mut entrypoint = None; + + for export in library.exports() { + if let Some(proc_export) = export.as_procedure() { + // Check for @note_script attribute + if proc_export.attributes.has(NOTE_SCRIPT_ATTRIBUTE) { + if entrypoint.is_some() { + return Err(NoteError::NoteScriptMultipleProceduresWithAttribute); + } + entrypoint = Some(proc_export.node); + } + } + } + + let entrypoint = entrypoint.ok_or(NoteError::NoteScriptNoProcedureWithAttribute)?; + + Ok(Self { + mast: library.mast_forest().clone(), + entrypoint, + }) + } + + /// Returns a new [NoteScript] containing only a reference to a procedure in the provided + /// library. + /// + /// This method is useful when a library contains multiple note scripts and you need to + /// extract a specific one by its fully qualified path (e.g., + /// `miden::standards::notes::burn::main`). + /// + /// The procedure at the specified path must have the `@note_script` attribute. + /// + /// Note: This method creates a minimal [MastForest] containing only an external node + /// referencing the procedure's digest, rather than copying the entire library. The actual + /// procedure code will be resolved at runtime via the `MastForestStore`. + /// + /// # Errors + /// Returns an error if: + /// - The library does not contain a procedure at the specified path. + /// - The procedure at the specified path does not have the `@note_script` attribute. + pub fn from_library_reference(library: &Library, path: &Path) -> Result { + // Find the export matching the path + let export = library + .exports() + .find(|e| e.path().as_ref() == path) + .ok_or_else(|| NoteError::NoteScriptProcedureNotFound(path.to_string().into()))?; + + // Get the procedure export and verify it has the @note_script attribute + let proc_export = export + .as_procedure() + .ok_or_else(|| NoteError::NoteScriptProcedureNotFound(path.to_string().into()))?; + + if !proc_export.attributes.has(NOTE_SCRIPT_ATTRIBUTE) { + return Err(NoteError::NoteScriptProcedureMissingAttribute(path.to_string().into())); + } + + // Get the digest of the procedure from the library + let digest = library.mast_forest()[proc_export.node].digest(); + + // Create a minimal MastForest with just an external node referencing the digest + let (mast, entrypoint) = create_external_node_forest(digest); + + Ok(Self { mast: Arc::new(mast), entrypoint }) + } + // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- @@ -76,6 +156,34 @@ impl NoteScript { pub fn entrypoint(&self) -> MastNodeId { self.entrypoint } + + /// Clears all debug info from this script's [`MastForest`]: decorators, error codes, and + /// procedure names. + /// + /// See [`MastForest::clear_debug_info`] for more details. + pub fn clear_debug_info(&mut self) { + let mut mast = self.mast.clone(); + Arc::make_mut(&mut mast).clear_debug_info(); + self.mast = mast; + } + + /// Returns a new [NoteScript] with the provided advice map entries merged into the + /// underlying [MastForest]. + /// + /// This allows adding advice map entries to an already-compiled note script, + /// which is useful when the entries are determined after script compilation. + pub fn with_advice_map(self, advice_map: AdviceMap) -> Self { + if advice_map.is_empty() { + return self; + } + + let mut mast = (*self.mast).clone(); + mast.advice_map_mut().extend(advice_map); + Self { + mast: Arc::new(mast), + entrypoint: self.entrypoint, + } + } } // CONVERSIONS INTO NOTE SCRIPT @@ -135,16 +243,23 @@ impl TryFrom<&[Felt]> for NoteScript { return Err(DeserializationError::UnexpectedEOF); } - let entrypoint: u32 = elements[0].try_into().map_err(DeserializationError::InvalidValue)?; - let len = elements[1].as_int(); + let entrypoint: u32 = elements[0] + .as_canonical_u64() + .try_into() + .map_err(|err: TryFromIntError| DeserializationError::InvalidValue(err.to_string()))?; + let len = elements[1].as_canonical_u64(); let mut data = Vec::with_capacity(elements.len() * 4); for &felt in &elements[2..] { - let v: u32 = felt.try_into().map_err(DeserializationError::InvalidValue)?; - data.extend(v.to_le_bytes()) + let element: u32 = + felt.as_canonical_u64().try_into().map_err(|err: TryFromIntError| { + DeserializationError::InvalidValue(err.to_string()) + })?; + data.extend(element.to_le_bytes()) } data.shrink_to(len as usize); + // TODO: Use UntrustedMastForest and check where else we deserialize mast forests. let mast = MastForest::read_from_bytes(&data)?; let entrypoint = MastNodeId::from_u32_safe(entrypoint, &mast)?; Ok(NoteScript::from_parts(Arc::new(mast), entrypoint)) @@ -167,6 +282,16 @@ impl Serializable for NoteScript { self.mast.write_into(target); target.write_u32(u32::from(self.entrypoint)); } + + fn get_size_hint(&self) -> usize { + // TODO: this is a temporary workaround. Replace mast.to_bytes().len() with + // MastForest::get_size_hint() (or a similar size-hint API) once it becomes + // available. + let mast_size = self.mast.to_bytes().len(); + let u32_size = 0u32.get_size_hint(); + + mast_size + u32_size + } } impl Deserializable for NoteScript { @@ -196,6 +321,23 @@ impl Display for NoteScript { } } +// HELPER FUNCTIONS +// ================================================================================================ + +/// Creates a minimal [MastForest] containing only an external node referencing the given digest. +/// +/// This is useful for creating lightweight references to procedures without copying entire +/// libraries. The external reference will be resolved at runtime, assuming the source library +/// is loaded into the VM's MastForestStore. +fn create_external_node_forest(digest: Word) -> (MastForest, MastNodeId) { + let mut mast = MastForest::new(); + let node_id = ExternalNodeBuilder::new(digest) + .add_to_forest(&mut mast) + .expect("adding external node to empty forest should not fail"); + mast.make_root(node_id); + (mast, node_id) +} + // TESTS // ================================================================================================ @@ -208,8 +350,8 @@ mod tests { #[test] fn test_note_script_to_from_felt() { let assembler = Assembler::default(); - let tx_script_src = DEFAULT_NOTE_CODE; - let program = assembler.assemble_program(tx_script_src).unwrap(); + let script_src = DEFAULT_NOTE_CODE; + let program = assembler.assemble_program(script_src).unwrap(); let note_script = NoteScript::new(program); let encoded: Vec = (¬e_script).into(); @@ -217,4 +359,34 @@ mod tests { assert_eq!(note_script, decoded); } + + #[test] + fn test_note_script_with_advice_map() { + use miden_core::advice::AdviceMap; + + use crate::Word; + + let assembler = Assembler::default(); + let program = assembler.assemble_program("begin nop end").unwrap(); + let script = NoteScript::new(program); + + assert!(script.mast().advice_map().is_empty()); + + // Empty advice map should be a no-op + let original_root = script.root(); + let script = script.with_advice_map(AdviceMap::default()); + assert_eq!(original_root, script.root()); + + // Non-empty advice map should add entries + let key = Word::from([5u32, 6, 7, 8]); + let value = vec![Felt::new(100)]; + let mut advice_map = AdviceMap::default(); + advice_map.insert(key, value.clone()); + + let script = script.with_advice_map(advice_map); + + let mast = script.mast(); + let stored = mast.advice_map().get(&key).expect("entry should be present"); + assert_eq!(stored.as_ref(), value.as_slice()); + } } diff --git a/crates/miden-protocol/src/note/storage.rs b/crates/miden-protocol/src/note/storage.rs new file mode 100644 index 0000000000..0b8b73a976 --- /dev/null +++ b/crates/miden-protocol/src/note/storage.rs @@ -0,0 +1,168 @@ +use alloc::vec::Vec; + +use crate::errors::NoteError; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Felt, Hasher, MAX_NOTE_STORAGE_ITEMS, Word}; + +// NOTE STORAGE +// ================================================================================================ + +/// A container for note storage items. +/// +/// A note can be associated with up to 1024 storage items. Each item is represented by a single +/// field element. Thus, note storage can contain up to ~8 KB of data. +/// +/// All storage items associated with a note can be reduced to a single commitment which is +/// computed as sequential hash over the storage elements. +#[derive(Clone, Debug)] +pub struct NoteStorage { + items: Vec, + commitment: Word, +} + +impl NoteStorage { + // CONSTRUCTOR + // -------------------------------------------------------------------------------------------- + + /// Returns [NoteStorage] instantiated from the provided items. + /// + /// # Errors + /// Returns an error if the number of provided storage items is greater than 1024. + pub fn new(items: Vec) -> Result { + if items.len() > MAX_NOTE_STORAGE_ITEMS { + return Err(NoteError::TooManyStorageItems(items.len())); + } + + let commitment = Hasher::hash_elements(&items); + + Ok(Self { items, commitment }) + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns a commitment to this storage. + pub fn commitment(&self) -> Word { + self.commitment + } + + /// Returns the number of storage items. + /// + /// The returned value is guaranteed to be smaller than or equal to [`MAX_NOTE_STORAGE_ITEMS`]. + pub fn num_items(&self) -> u16 { + const _: () = assert!(MAX_NOTE_STORAGE_ITEMS <= u16::MAX as usize); + debug_assert!( + self.items.len() <= MAX_NOTE_STORAGE_ITEMS, + "The constructor should have checked the number of storage items" + ); + self.items.len() as u16 + } + + /// Returns `true` if the storage has no items. + pub fn is_empty(&self) -> bool { + self.items.is_empty() + } + + /// Returns a reference to the storage items. + pub fn items(&self) -> &[Felt] { + &self.items + } + + /// Returns the note's storage as a vector of field elements. + pub fn to_elements(&self) -> Vec { + self.items.to_vec() + } +} + +impl Default for NoteStorage { + fn default() -> Self { + Self::new(vec![]).expect("empty storage should be valid") + } +} + +impl PartialEq for NoteStorage { + fn eq(&self, other: &Self) -> bool { + let NoteStorage { items, commitment: _ } = self; + items == &other.items + } +} + +impl Eq for NoteStorage {} + +// CONVERSION +// ================================================================================================ + +impl From for Vec { + fn from(value: NoteStorage) -> Self { + value.items + } +} + +impl TryFrom> for NoteStorage { + type Error = NoteError; + + fn try_from(value: Vec) -> Result { + NoteStorage::new(value) + } +} + +// SERIALIZATION +// ================================================================================================ + +impl Serializable for NoteStorage { + fn write_into(&self, target: &mut W) { + let NoteStorage { items, commitment: _commitment } = self; + target.write_u16(items.len().try_into().expect("storage items len is not a u16 value")); + target.write_many(items); + } + + fn get_size_hint(&self) -> usize { + // 2 bytes for u16 length + 8 bytes per Felt + 2 + self.items.len() * 8 + } +} + +impl Deserializable for NoteStorage { + fn read_from(source: &mut R) -> Result { + let len = source.read_u16()? as usize; + let items = source.read_many_iter(len)?.collect::, _>>()?; + Self::new(items).map_err(|v| DeserializationError::InvalidValue(format!("{v}"))) + } +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use miden_crypto::utils::Deserializable; + + use super::{Felt, NoteStorage, Serializable}; + + #[test] + fn test_storage_item_ordering() { + // storage items are provided in reverse stack order + let storage_items = vec![Felt::new(1), Felt::new(2), Felt::new(3)]; + // we expect the storage items to remain in reverse stack order. + let expected_ordering = vec![Felt::new(1), Felt::new(2), Felt::new(3)]; + + let note_storage = NoteStorage::new(storage_items).expect("note created should succeed"); + assert_eq!(&expected_ordering, note_storage.items()); + } + + #[test] + fn test_storage_serialization() { + let storage_items = vec![Felt::new(1), Felt::new(2), Felt::new(3)]; + let note_storage = NoteStorage::new(storage_items).unwrap(); + + let bytes = note_storage.to_bytes(); + let parsed_note_storage = NoteStorage::read_from_bytes(&bytes).unwrap(); + assert_eq!(note_storage, parsed_note_storage); + } +} diff --git a/crates/miden-protocol/src/testing/account_code.rs b/crates/miden-protocol/src/testing/account_code.rs index bd1df53194..ea98aeb1cf 100644 --- a/crates/miden-protocol/src/testing/account_code.rs +++ b/crates/miden-protocol/src/testing/account_code.rs @@ -3,6 +3,7 @@ use miden_assembly::Assembler; +use crate::account::component::AccountComponentMetadata; use crate::account::{AccountCode, AccountComponent, AccountType}; use crate::testing::noop_auth_component::NoopAuthComponent; @@ -22,7 +23,8 @@ impl AccountCode { let library = Assembler::default() .assemble_library([CODE]) .expect("mock account component should assemble"); - let component = AccountComponent::new(library, vec![]).unwrap().with_supports_all_types(); + let metadata = AccountComponentMetadata::new("miden::testing::mock", AccountType::all()); + let component = AccountComponent::new(library, vec![], metadata).unwrap(); Self::from_components( &[NoopAuthComponent.into(), component], diff --git a/crates/miden-protocol/src/testing/add_component.rs b/crates/miden-protocol/src/testing/add_component.rs index 3415a70a16..98dd2b8629 100644 --- a/crates/miden-protocol/src/testing/add_component.rs +++ b/crates/miden-protocol/src/testing/add_component.rs @@ -1,4 +1,5 @@ -use crate::account::AccountComponent; +use crate::account::component::AccountComponentMetadata; +use crate::account::{AccountComponent, AccountType}; use crate::assembly::{Assembler, Library}; use crate::utils::sync::LazyLock; @@ -24,8 +25,10 @@ pub struct AddComponent; impl From for AccountComponent { fn from(_: AddComponent) -> Self { - AccountComponent::new(ADD_LIBRARY.clone(), vec![]) + let metadata = AccountComponentMetadata::new("miden::testing::add", AccountType::all()) + .with_description("Add component for testing"); + + AccountComponent::new(ADD_LIBRARY.clone(), vec![], metadata) .expect("component should be valid") - .with_supports_all_types() } } diff --git a/crates/miden-protocol/src/testing/asset.rs b/crates/miden-protocol/src/testing/asset.rs index b1b12223bd..a89be85545 100644 --- a/crates/miden-protocol/src/testing/asset.rs +++ b/crates/miden-protocol/src/testing/asset.rs @@ -1,97 +1,15 @@ -use rand::Rng; -use rand::distr::StandardUniform; - -use crate::account::{AccountId, AccountIdPrefix, AccountType}; +use crate::account::AccountId; use crate::asset::{Asset, FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}; -use crate::errors::AssetError; use crate::testing::account_id::{ ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET, }; -/// Builder for an `NonFungibleAssetDetails`, the builder can be configured and used multiplied -/// times. -#[derive(Debug, Clone)] -pub struct NonFungibleAssetDetailsBuilder { - faucet_id: AccountIdPrefix, - rng: T, -} - -/// Builder for an `FungibleAsset`, the builder can be configured and used multiplied times. -#[derive(Debug, Clone)] -pub struct FungibleAssetBuilder { - faucet_id: AccountId, - amount: u64, -} - -impl NonFungibleAssetDetailsBuilder { - pub fn new(faucet_id: AccountIdPrefix, rng: T) -> Result { - if !matches!(faucet_id.account_type(), AccountType::NonFungibleFaucet) { - return Err(AssetError::NonFungibleFaucetIdTypeMismatch(faucet_id)); - } - - Ok(Self { faucet_id, rng }) - } - - pub fn build(&mut self) -> Result { - let data = (&mut self.rng).sample_iter(StandardUniform).take(5).collect(); - NonFungibleAssetDetails::new(self.faucet_id, data) - } -} - -/// Builder for an `NonFungibleAsset`, the builder can be configured and used multiplied times. -#[derive(Debug, Clone)] -pub struct NonFungibleAssetBuilder { - details_builder: NonFungibleAssetDetailsBuilder, -} - -impl NonFungibleAssetBuilder { - pub fn new(faucet_id: AccountIdPrefix, rng: T) -> Result { - let details_builder = NonFungibleAssetDetailsBuilder::new(faucet_id, rng)?; - Ok(Self { details_builder }) - } - - pub fn build(&mut self) -> Result { - let details = self.details_builder.build()?; - NonFungibleAsset::new(&details) - } -} - -impl FungibleAssetBuilder { - pub const DEFAULT_AMOUNT: u64 = 10; - - pub fn new(faucet_id: AccountId) -> Result { - let account_type = faucet_id.account_type(); - if !matches!(account_type, AccountType::FungibleFaucet) { - return Err(AssetError::FungibleFaucetIdTypeMismatch(faucet_id)); - } - - Ok(Self { faucet_id, amount: Self::DEFAULT_AMOUNT }) - } - - pub fn amount(&mut self, amount: u64) -> Result<&mut Self, AssetError> { - if amount > FungibleAsset::MAX_AMOUNT { - return Err(AssetError::FungibleAssetAmountTooBig(amount)); - } - - self.amount = amount; - Ok(self) - } - - pub fn with_amount(&self, amount: u64) -> Result { - FungibleAsset::new(self.faucet_id, amount) - } - - pub fn build(&self) -> Result { - FungibleAsset::new(self.faucet_id, self.amount) - } -} - impl NonFungibleAsset { /// Returns a mocked non-fungible asset, issued by [ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET]. pub fn mock(asset_data: &[u8]) -> Asset { let non_fungible_asset_details = NonFungibleAssetDetails::new( - AccountId::try_from(ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET).unwrap().prefix(), + AccountId::try_from(ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET).unwrap(), asset_data.to_vec(), ) .unwrap(); diff --git a/crates/miden-protocol/src/testing/block.rs b/crates/miden-protocol/src/testing/block.rs index 3ef1a2c624..cb4fbc446f 100644 --- a/crates/miden-protocol/src/testing/block.rs +++ b/crates/miden-protocol/src/testing/block.rs @@ -1,14 +1,13 @@ use miden_crypto::merkle::smt::Smt; #[cfg(not(target_family = "wasm"))] -use winter_rand_utils::rand_value; +use miden_crypto::rand::test_utils::rand_value; use crate::Word; use crate::account::Account; -use crate::block::account_tree::{AccountTree, account_id_to_smt_key}; +use crate::block::account_tree::{AccountIdKey, AccountTree}; use crate::block::{BlockHeader, BlockNumber, FeeParameters}; -use crate::crypto::dsa::ecdsa_k256_keccak::SecretKey; use crate::testing::account_id::ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET; -use crate::testing::random_signer::RandomBlockSigner; +use crate::testing::random_secret_key::random_secret_key; impl BlockHeader { /// Creates a mock block. The account tree is formed from the provided `accounts`, @@ -27,7 +26,7 @@ impl BlockHeader { let smt = Smt::with_entries( accounts .iter() - .map(|acct| (account_id_to_smt_key(acct.id()), acct.commitment())), + .map(|acct| (AccountIdKey::from(acct.id()).as_word(), acct.to_commitment())), ) .expect("failed to create account db"); let acct_db = AccountTree::new(smt).expect("failed to create account tree"); @@ -35,7 +34,7 @@ impl BlockHeader { let fee_parameters = FeeParameters::new(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(), 500) .expect("native asset ID should be a fungible faucet ID"); - let validator_key = SecretKey::random().public_key(); + let validator_key = random_secret_key(); #[cfg(not(target_family = "wasm"))] let ( @@ -92,7 +91,7 @@ impl BlockHeader { note_root, tx_commitment, tx_kernel_commitment, - validator_key, + validator_key.public_key(), fee_parameters, timestamp, ) diff --git a/crates/miden-protocol/src/testing/component_metadata.rs b/crates/miden-protocol/src/testing/component_metadata.rs new file mode 100644 index 0000000000..2586d5e1dd --- /dev/null +++ b/crates/miden-protocol/src/testing/component_metadata.rs @@ -0,0 +1,10 @@ +use crate::account::AccountType; +use crate::account::component::AccountComponentMetadata; + +impl AccountComponentMetadata { + /// Creates a mock [`AccountComponentMetadata`] with the given name that supports all account + /// types. + pub fn mock(name: &str) -> Self { + AccountComponentMetadata::new(name, AccountType::all()) + } +} diff --git a/crates/miden-protocol/src/testing/constants.rs b/crates/miden-protocol/src/testing/constants.rs index 84c7fb3b01..8ff709f854 100644 --- a/crates/miden-protocol/src/testing/constants.rs +++ b/crates/miden-protocol/src/testing/constants.rs @@ -1,5 +1,4 @@ pub const FUNGIBLE_ASSET_AMOUNT: u64 = 100; -pub const FUNGIBLE_FAUCET_INITIAL_BALANCE: u64 = 50000; pub const CONSUMED_ASSET_1_AMOUNT: u64 = 100; pub const CONSUMED_ASSET_2_AMOUNT: u64 = 200; diff --git a/crates/miden-protocol/src/testing/mock_util_lib.rs b/crates/miden-protocol/src/testing/mock_util_lib.rs deleted file mode 100644 index f9d454f5ee..0000000000 --- a/crates/miden-protocol/src/testing/mock_util_lib.rs +++ /dev/null @@ -1,47 +0,0 @@ -use miden_assembly::diagnostics::NamedSource; - -use crate::assembly::Library; -use crate::transaction::TransactionKernel; -use crate::utils::sync::LazyLock; - -const MOCK_UTIL_LIBRARY_CODE: &str = " - use miden::protocol::output_note - - #! Inputs: [] - #! Outputs: [note_idx] - pub proc create_default_note - push.1.2.3.4 # = RECIPIENT - push.2 # = NoteType::Private - push.0 # = NoteTag - # => [tag, note_type, RECIPIENT] - - exec.output_note::create - # => [note_idx] - end - - #! Inputs: [ASSET] - #! Outputs: [] - pub proc create_default_note_with_asset - exec.create_default_note - # => [note_idx, ASSET] - - movdn.4 - # => [ASSET, note_idx] - - exec.output_note::add_asset - # => [] - end -"; - -static MOCK_UTIL_LIBRARY: LazyLock = LazyLock::new(|| { - TransactionKernel::assembler() - .assemble_library([NamedSource::new("mock::util", MOCK_UTIL_LIBRARY_CODE)]) - .expect("mock util library should be valid") -}); - -/// Returns the mock test [`Library`] under the `mock::util` namespace. -/// -/// This provides convenient wrappers for testing purposes. -pub fn mock_util_library() -> Library { - MOCK_UTIL_LIBRARY.clone() -} diff --git a/crates/miden-protocol/src/testing/mod.rs b/crates/miden-protocol/src/testing/mod.rs index 9b04908a64..80dda62a68 100644 --- a/crates/miden-protocol/src/testing/mod.rs +++ b/crates/miden-protocol/src/testing/mod.rs @@ -5,12 +5,13 @@ pub mod add_component; pub mod asset; pub mod block; pub mod block_note_tree; +pub mod component_metadata; pub mod constants; -pub mod mock_util_lib; pub mod noop_auth_component; pub mod note; pub mod partial_blockchain; -pub mod random_signer; +pub mod random_secret_key; pub mod slot_name; pub mod storage; +pub mod storage_map_key; pub mod tx; diff --git a/crates/miden-protocol/src/testing/noop_auth_component.rs b/crates/miden-protocol/src/testing/noop_auth_component.rs index 35b7a79126..5a7880e7f8 100644 --- a/crates/miden-protocol/src/testing/noop_auth_component.rs +++ b/crates/miden-protocol/src/testing/noop_auth_component.rs @@ -1,4 +1,5 @@ -use crate::account::AccountComponent; +use crate::account::component::AccountComponentMetadata; +use crate::account::{AccountComponent, AccountType}; use crate::assembly::{Assembler, Library}; use crate::utils::sync::LazyLock; @@ -6,6 +7,7 @@ use crate::utils::sync::LazyLock; // ================================================================================================ const NOOP_AUTH_CODE: &str = " + @auth_script pub proc auth_noop push.0 drop end @@ -24,8 +26,11 @@ pub struct NoopAuthComponent; impl From for AccountComponent { fn from(_: NoopAuthComponent) -> Self { - AccountComponent::new(NOOP_AUTH_LIBRARY.clone(), vec![]) + let metadata = + AccountComponentMetadata::new("miden::testing::noop_auth", AccountType::all()) + .with_description("No-op auth component for testing"); + + AccountComponent::new(NOOP_AUTH_LIBRARY.clone(), vec![], metadata) .expect("component should be valid") - .with_supports_all_types() } } diff --git a/crates/miden-protocol/src/testing/note.rs b/crates/miden-protocol/src/testing/note.rs index 56f2bedf84..913fd0f7ee 100644 --- a/crates/miden-protocol/src/testing/note.rs +++ b/crates/miden-protocol/src/testing/note.rs @@ -6,10 +6,10 @@ use crate::asset::FungibleAsset; use crate::note::{ Note, NoteAssets, - NoteInputs, NoteMetadata, NoteRecipient, NoteScript, + NoteStorage, NoteTag, NoteType, }; @@ -24,12 +24,9 @@ impl Note { let note_script = NoteScript::mock(); let assets = NoteAssets::new(vec![FungibleAsset::mock(200)]).expect("note assets should be valid"); - let metadata = NoteMetadata::new( - sender_id, - NoteType::Private, - NoteTag::with_account_target(sender_id), - ); - let inputs = NoteInputs::new(Vec::new()).unwrap(); + let metadata = NoteMetadata::new(sender_id, NoteType::Private) + .with_tag(NoteTag::with_account_target(sender_id)); + let inputs = NoteStorage::new(Vec::new()).unwrap(); let recipient = NoteRecipient::new(serial_num, note_script, inputs); Note::new(assets, metadata, recipient) diff --git a/crates/miden-protocol/src/testing/random_secret_key.rs b/crates/miden-protocol/src/testing/random_secret_key.rs new file mode 100644 index 0000000000..ab5fea909a --- /dev/null +++ b/crates/miden-protocol/src/testing/random_secret_key.rs @@ -0,0 +1,14 @@ +// NO STD ECDSA SECRET KEY +// ================================================================================================ + +use crate::crypto::dsa::ecdsa_k256_keccak::SecretKey; + +// NO STD SECRET KEY +// ================================================================================================ + +pub fn random_secret_key() -> SecretKey { + use rand::SeedableRng; + use rand_chacha::ChaCha20Rng; + let mut rng = ChaCha20Rng::from_os_rng(); + SecretKey::with_rng(&mut rng) +} diff --git a/crates/miden-protocol/src/testing/random_signer.rs b/crates/miden-protocol/src/testing/random_signer.rs deleted file mode 100644 index 4d104e2f21..0000000000 --- a/crates/miden-protocol/src/testing/random_signer.rs +++ /dev/null @@ -1,22 +0,0 @@ -// NO STD ECDSA SIGNER -// ================================================================================================ - -use crate::block::BlockSigner; -use crate::crypto::dsa::ecdsa_k256_keccak::SecretKey; - -/// An insecure, random block signer for testing purposes. -pub trait RandomBlockSigner: BlockSigner { - fn random() -> Self; -} - -// NO STD SECRET KEY BLOCK SIGNER -// ================================================================================================ - -impl RandomBlockSigner for SecretKey { - fn random() -> Self { - use rand::SeedableRng; - use rand_chacha::ChaCha20Rng; - let mut rng = ChaCha20Rng::from_os_rng(); - SecretKey::with_rng(&mut rng) - } -} diff --git a/crates/miden-protocol/src/testing/storage.rs b/crates/miden-protocol/src/testing/storage.rs index c4c0fc47cb..bf32740a72 100644 --- a/crates/miden-protocol/src/testing/storage.rs +++ b/crates/miden-protocol/src/testing/storage.rs @@ -1,4 +1,3 @@ -use alloc::string::{String, ToString}; use alloc::vec::Vec; use miden_core::{Felt, Word}; @@ -8,11 +7,11 @@ use crate::account::{ AccountStorageDelta, StorageMap, StorageMapDelta, + StorageMapKey, StorageSlot, StorageSlotDelta, StorageSlotName, }; -use crate::note::NoteAssets; use crate::utils::sync::LazyLock; // ACCOUNT STORAGE DELTA @@ -132,19 +131,9 @@ impl AccountStorage { } pub fn mock_map() -> StorageMap { - StorageMap::with_entries(STORAGE_LEAVES_2).unwrap() + StorageMap::with_entries( + STORAGE_LEAVES_2.map(|(key, value)| (StorageMapKey::from_raw(key), value)), + ) + .unwrap() } } - -// UTILITIES -// -------------------------------------------------------------------------------------------- - -/// Returns a list of strings, one for each note asset. -pub fn prepare_assets(note_assets: &NoteAssets) -> Vec { - let mut assets = Vec::new(); - for &asset in note_assets.iter() { - let asset_word = Word::from(asset); - assets.push(asset_word.to_string()); - } - assets -} diff --git a/crates/miden-protocol/src/testing/storage_map_key.rs b/crates/miden-protocol/src/testing/storage_map_key.rs new file mode 100644 index 0000000000..36da9f1e93 --- /dev/null +++ b/crates/miden-protocol/src/testing/storage_map_key.rs @@ -0,0 +1,9 @@ +use crate::Word; +use crate::account::StorageMapKey; + +impl StorageMapKey { + /// Creates a [`StorageMapKey`] from an array of u32s for testing purposes. + pub fn from_array(array: [u32; 4]) -> Self { + Self::from_raw(Word::from(array)) + } +} diff --git a/crates/miden-protocol/src/transaction/executed_tx.rs b/crates/miden-protocol/src/transaction/executed_tx.rs index 27bd067a81..19613a48e8 100644 --- a/crates/miden-protocol/src/transaction/executed_tx.rs +++ b/crates/miden-protocol/src/transaction/executed_tx.rs @@ -8,7 +8,7 @@ use super::{ InputNote, InputNotes, NoteId, - OutputNotes, + RawOutputNotes, TransactionArgs, TransactionId, TransactionOutputs, @@ -68,9 +68,10 @@ impl ExecutedTransaction { // `id` value after construction `Self {..}` without moving let id = TransactionId::new( tx_inputs.account().initial_commitment(), - tx_outputs.account.commitment(), + tx_outputs.account.to_commitment(), tx_inputs.input_notes().commitment(), tx_outputs.output_notes.commitment(), + tx_outputs.fee, ); Self { @@ -111,7 +112,7 @@ impl ExecutedTransaction { } /// Returns the notes created in this transaction. - pub fn output_notes(&self) -> &OutputNotes { + pub fn output_notes(&self) -> &RawOutputNotes { &self.tx_outputs.output_notes } diff --git a/crates/miden-protocol/src/transaction/inputs/account.rs b/crates/miden-protocol/src/transaction/inputs/account.rs index f0c2296323..9f06662d8a 100644 --- a/crates/miden-protocol/src/transaction/inputs/account.rs +++ b/crates/miden-protocol/src/transaction/inputs/account.rs @@ -3,7 +3,13 @@ use crate::account::{AccountCode, AccountId, PartialAccount, PartialStorage}; use crate::asset::PartialVault; use crate::block::account_tree::AccountWitness; use crate::crypto::merkle::smt::{SmtProof, SmtProofError}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // ACCOUNT INPUTS // ================================================================================================ @@ -97,15 +103,15 @@ mod tests { use alloc::vec::Vec; use miden_core::Felt; - use miden_core::utils::{Deserializable, Serializable}; use miden_crypto::merkle::SparseMerklePath; - use miden_processor::SMT_DEPTH; use crate::account::{Account, AccountCode, AccountId, AccountStorage, PartialAccount}; use crate::asset::AssetVault; use crate::block::account_tree::AccountWitness; + use crate::crypto::merkle::smt::SMT_DEPTH; use crate::testing::account_id::ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE; use crate::transaction::AccountInputs; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn serde_roundtrip() { @@ -115,7 +121,7 @@ mod tests { let storage = AccountStorage::new(vec![]).unwrap(); let account = Account::new_existing(id, vault, storage, code, Felt::new(10)); - let commitment = account.commitment(); + let commitment = account.to_commitment(); let mut merkle_nodes = Vec::with_capacity(SMT_DEPTH as usize); for _ in 0..(SMT_DEPTH as usize) { diff --git a/crates/miden-protocol/src/transaction/inputs/mod.rs b/crates/miden-protocol/src/transaction/inputs/mod.rs index a8122e0908..c8e8d75777 100644 --- a/crates/miden-protocol/src/transaction/inputs/mod.rs +++ b/crates/miden-protocol/src/transaction/inputs/mod.rs @@ -1,10 +1,10 @@ use alloc::collections::{BTreeMap, BTreeSet}; +use alloc::sync::Arc; use alloc::vec::Vec; use core::fmt::Debug; -use miden_core::utils::{Deserializable, Serializable}; -use miden_crypto::merkle::NodeIndex; -use miden_crypto::merkle::smt::{LeafIndex, SmtLeaf, SmtProof}; +use miden_crypto::merkle::smt::{SmtLeaf, SmtProof}; +use miden_crypto::merkle::{MerkleError, NodeIndex}; use super::PartialBlockchain; use crate::account::{ @@ -14,18 +14,25 @@ use crate::account::{ AccountStorageHeader, PartialAccount, PartialStorage, - StorageMap, + StorageMapKey, StorageMapWitness, StorageSlotId, StorageSlotName, }; -use crate::asset::{AssetVaultKey, AssetWitness, PartialVault}; -use crate::block::account_tree::{AccountWitness, account_id_to_smt_index}; +use crate::asset::{Asset, AssetVaultKey, AssetWitness, PartialVault}; +use crate::block::account_tree::{AccountIdKey, AccountWitness}; use crate::block::{BlockHeader, BlockNumber}; use crate::crypto::merkle::SparseMerklePath; use crate::errors::{TransactionInputError, TransactionInputsExtractionError}; use crate::note::{Note, NoteInclusionProof}; -use crate::transaction::{TransactionAdviceInputs, TransactionArgs, TransactionScript}; +use crate::transaction::{TransactionArgs, TransactionScript}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{Felt, Word}; #[cfg(test)] @@ -35,9 +42,10 @@ mod account; pub use account::AccountInputs; mod notes; -use miden_processor::{AdviceInputs, SMT_DEPTH}; pub use notes::{InputNote, InputNotes, ToInputNoteCommitments}; +use crate::vm::AdviceInputs; + // TRANSACTION INPUTS // ================================================================================================ @@ -51,8 +59,6 @@ pub struct TransactionInputs { tx_args: TransactionArgs, advice_inputs: AdviceInputs, foreign_account_code: Vec, - /// Pre-fetched asset witnesses for note assets and the fee asset. - asset_witnesses: Vec, /// Storage slot names for foreign accounts. foreign_account_slot_names: BTreeMap, } @@ -110,14 +116,21 @@ impl TransactionInputs { tx_args: TransactionArgs::default(), advice_inputs: AdviceInputs::default(), foreign_account_code: Vec::new(), - asset_witnesses: Vec::new(), foreign_account_slot_names: BTreeMap::new(), }) } /// Replaces the transaction inputs and assigns the given asset witnesses. pub fn with_asset_witnesses(mut self, witnesses: Vec) -> Self { - self.asset_witnesses = witnesses; + for witness in witnesses { + self.advice_inputs.store.extend(witness.authenticated_nodes()); + let smt_proof = SmtProof::from(witness); + self.advice_inputs.map.extend([( + smt_proof.leaf().hash(), + smt_proof.leaf().to_elements().collect::>(), + )]); + } + self } @@ -210,11 +223,6 @@ impl TransactionInputs { &self.foreign_account_code } - /// Returns the pre-fetched witnesses for note and fee assets. - pub fn asset_witnesses(&self) -> &[AssetWitness] { - &self.asset_witnesses - } - /// Returns the foreign account storage slot names. pub fn foreign_account_slot_names(&self) -> &BTreeMap { &self.foreign_account_slot_names @@ -237,10 +245,10 @@ impl TransactionInputs { pub fn read_storage_map_witness( &self, map_root: Word, - map_key: Word, + map_key: StorageMapKey, ) -> Result { // Convert map key into the index at which the key-value pair for this key is stored - let leaf_index = StorageMap::map_key_to_leaf_index(map_key); + let leaf_index = map_key.hash().to_leaf_index(); // Construct sparse Merkle path. let merkle_path = self.advice_inputs.store.get_path(map_root, leaf_index.into())?; @@ -253,7 +261,7 @@ impl TransactionInputs { .map .get(&merkle_node) .ok_or(TransactionInputsExtractionError::MissingVaultRoot)?; - let smt_leaf = smt_leaf_from_elements(smt_leaf_elements, leaf_index)?; + let smt_leaf = SmtLeaf::try_from_elements(smt_leaf_elements, leaf_index)?; // Construct SMT proof and witness. let smt_proof = SmtProof::new(sparse_path, smt_leaf)?; @@ -263,6 +271,12 @@ impl TransactionInputs { } /// Reads the vault asset witnesses for the given account and vault keys. + /// + /// # Errors + /// Returns an error if: + /// - A Merkle tree with the specified root is not present in the advice data of these inputs. + /// - Witnesses for any of the requested assets are not in the specified Merkle tree. + /// - Construction of the Merkle path or the leaf node for the witness fails. pub fn read_vault_asset_witnesses( &self, vault_root: Word, @@ -282,7 +296,7 @@ impl TransactionInputs { .map .get(&merkle_node) .ok_or(TransactionInputsExtractionError::MissingVaultRoot)?; - let smt_leaf = smt_leaf_from_elements(smt_leaf_elements, smt_index)?; + let smt_leaf = SmtLeaf::try_from_elements(smt_leaf_elements, smt_index)?; // Construct SMT proof and witness. let smt_proof = SmtProof::new(sparse_path, smt_leaf)?; @@ -292,12 +306,70 @@ impl TransactionInputs { Ok(asset_witnesses) } - /// Reads AccountInputs for a foreign account from the advice inputs. + /// Returns true if the witness for the specified asset key is present in these inputs. + /// + /// Note that this does not verify the witness' validity (i.e., that the witness is for a valid + /// asset). + pub fn has_vault_asset_witness(&self, vault_root: Word, asset_key: &AssetVaultKey) -> bool { + let smt_index: NodeIndex = asset_key.to_leaf_index().into(); + + // make sure the path is in the Merkle store + if !self.advice_inputs.store.has_path(vault_root, smt_index) { + return false; + } + + // make sure the node pre-image is in the Merkle store + match self.advice_inputs.store.get_node(vault_root, smt_index) { + Ok(node) => self.advice_inputs.map.contains_key(&node), + Err(_) => false, + } + } + + /// Reads the asset from the specified vault under the specified key; returns `None` if the + /// specified asset is not present in these inputs. + /// + /// # Errors + /// Returns an error if: + /// - A Merkle tree with the specified root is not present in the advice data of these inputs. + /// - Construction of the leaf node or the asset fails. + pub fn read_vault_asset( + &self, + vault_root: Word, + asset_key: AssetVaultKey, + ) -> Result, TransactionInputsExtractionError> { + // Get the node corresponding to the asset_key; if not found return None + let smt_index = asset_key.to_leaf_index(); + let merkle_node = match self.advice_inputs.store.get_node(vault_root, smt_index.into()) { + Ok(node) => node, + Err(MerkleError::NodeIndexNotFoundInStore(..)) => return Ok(None), + Err(err) => return Err(err.into()), + }; + + // Construct SMT leaf for this asset key + let smt_leaf_elements = self + .advice_inputs + .map + .get(&merkle_node) + .ok_or(TransactionInputsExtractionError::MissingVaultRoot)?; + let smt_leaf = SmtLeaf::try_from_elements(smt_leaf_elements, smt_index)?; + + // Find the asset in the SMT leaf + let asset = smt_leaf + .entries() + .iter() + .find(|(key, _value)| key == &asset_key.to_word()) + .map(|(_key, value)| Asset::from_key_value(asset_key, *value)) + .transpose()?; + + Ok(asset) + } + + /// Reads `AccountInputs` for a foreign account from the advice inputs. /// - /// This function reverses the process of [`TransactionAdviceInputs::add_foreign_accounts`] by: + /// This function reverses the process of `TransactionAdviceInputs::add_foreign_accounts` by: /// 1. Reading the account header from the advice map using the account_id_key. - /// 2. Building a PartialAccount from the header and foreign account code. - /// 3. Creating an AccountWitness. + /// 2. Building a `PartialAccount` from the header and foreign account code. + /// 3. Creating an `AccountWitness`. pub fn read_foreign_account_inputs( &self, account_id: AccountId, @@ -307,11 +379,11 @@ impl TransactionInputs { } // Read the account header elements from the advice map. - let account_id_key = TransactionAdviceInputs::account_id_map_key(account_id); + let account_id_key = AccountIdKey::from(account_id); let header_elements = self .advice_inputs .map - .get(&account_id_key) + .get(&account_id_key.as_word()) .ok_or(TransactionInputsExtractionError::ForeignAccountNotFound(account_id))?; // Parse the header from elements. @@ -377,7 +449,7 @@ impl TransactionInputs { ) -> Result { // Get the account tree root from the block header. let account_tree_root = self.block_header.account_root(); - let leaf_index: NodeIndex = account_id_to_smt_index(header.id()).into(); + let leaf_index = AccountIdKey::from(header.id()).to_leaf_index().into(); // Get the Merkle path from the merkle store. let merkle_path = self.advice_inputs.store.get_path(account_tree_root, leaf_index)?; @@ -386,7 +458,7 @@ impl TransactionInputs { let sparse_path = SparseMerklePath::from_sized_iter(merkle_path.path)?; // Create the account witness. - let witness = AccountWitness::new(header.id(), header.commitment(), sparse_path)?; + let witness = AccountWitness::new(header.id(), header.to_commitment(), sparse_path)?; Ok(witness) } @@ -424,7 +496,7 @@ impl TransactionInputs { // ================================================================================================ impl Serializable for TransactionInputs { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { self.account.write_into(target); self.block_header.write_into(target); self.blockchain.write_into(target); @@ -432,15 +504,12 @@ impl Serializable for TransactionInputs { self.tx_args.write_into(target); self.advice_inputs.write_into(target); self.foreign_account_code.write_into(target); - self.asset_witnesses.write_into(target); self.foreign_account_slot_names.write_into(target); } } impl Deserializable for TransactionInputs { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let account = PartialAccount::read_from(source)?; let block_header = BlockHeader::read_from(source)?; let blockchain = PartialBlockchain::read_from(source)?; @@ -448,7 +517,6 @@ impl Deserializable for TransactionInputs { let tx_args = TransactionArgs::read_from(source)?; let advice_inputs = AdviceInputs::read_from(source)?; let foreign_account_code = Vec::::read_from(source)?; - let asset_witnesses = Vec::::read_from(source)?; let foreign_account_slot_names = BTreeMap::::read_from(source)?; @@ -460,7 +528,6 @@ impl Deserializable for TransactionInputs { tx_args, advice_inputs, foreign_account_code, - asset_witnesses, foreign_account_slot_names, }) } @@ -469,58 +536,6 @@ impl Deserializable for TransactionInputs { // HELPER FUNCTIONS // ================================================================================================ -// TODO(sergerad): Move this fn to crypto SmtLeaf::try_from_elements. -pub fn smt_leaf_from_elements( - elements: &[Felt], - leaf_index: LeafIndex, -) -> Result { - use miden_crypto::merkle::smt::SmtLeaf; - - // Based on the miden-crypto SMT leaf serialization format. - - if elements.is_empty() { - return Ok(SmtLeaf::new_empty(leaf_index)); - } - - // Elements should be organized into a contiguous array of K/V Words (4 Felts each). - if !elements.len().is_multiple_of(8) { - return Err(TransactionInputsExtractionError::LeafConversionError( - "invalid SMT leaf format: elements length must be divisible by 8".into(), - )); - } - - let num_entries = elements.len() / 8; - - if num_entries == 1 { - // Single entry. - let key = Word::new([elements[0], elements[1], elements[2], elements[3]]); - let value = Word::new([elements[4], elements[5], elements[6], elements[7]]); - Ok(SmtLeaf::new_single(key, value)) - } else { - // Multiple entries. - let mut entries = Vec::with_capacity(num_entries); - // Read k/v pairs from each entry. - for i in 0..num_entries { - let base_idx = i * 8; - let key = Word::new([ - elements[base_idx], - elements[base_idx + 1], - elements[base_idx + 2], - elements[base_idx + 3], - ]); - let value = Word::new([ - elements[base_idx + 4], - elements[base_idx + 5], - elements[base_idx + 6], - elements[base_idx + 7], - ]); - entries.push((key, value)); - } - let leaf = SmtLeaf::new_multiple(entries)?; - Ok(leaf) - } -} - /// Validates whether the provided note belongs to the note tree of the specified block. fn validate_is_in_block( note: &Note, diff --git a/crates/miden-protocol/src/transaction/inputs/notes.rs b/crates/miden-protocol/src/transaction/inputs/notes.rs index 638ae56eba..b48fe8741b 100644 --- a/crates/miden-protocol/src/transaction/inputs/notes.rs +++ b/crates/miden-protocol/src/transaction/inputs/notes.rs @@ -204,7 +204,7 @@ impl Serializable for InputNotes { impl Deserializable for InputNotes { fn read_from(source: &mut R) -> Result { let num_notes = source.read_u16()?; - let notes = source.read_many::(num_notes.into())?; + let notes = source.read_many_iter(num_notes.into())?.collect::, _>>()?; Self::new(notes).map_err(|err| DeserializationError::InvalidValue(format!("{err}"))) } } diff --git a/crates/miden-protocol/src/transaction/inputs/tests.rs b/crates/miden-protocol/src/transaction/inputs/tests.rs index 09500ef25b..cc2fcee7a2 100644 --- a/crates/miden-protocol/src/transaction/inputs/tests.rs +++ b/crates/miden-protocol/src/transaction/inputs/tests.rs @@ -1,10 +1,9 @@ use alloc::string::ToString; +use alloc::sync::Arc; use alloc::vec; use std::collections::BTreeMap; use std::vec::Vec; -use miden_core::utils::{Deserializable, Serializable}; - use crate::account::{ AccountCode, AccountHeader, @@ -17,12 +16,14 @@ use crate::account::{ StorageSlotType, }; use crate::asset::PartialVault; +use crate::block::account_tree::AccountIdKey; use crate::errors::TransactionInputsExtractionError; use crate::testing::account_id::{ ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE, ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2, }; use crate::transaction::TransactionInputs; +use crate::utils::serde::{Deserializable, Serializable}; use crate::{Felt, Word}; #[test] @@ -55,7 +56,6 @@ fn test_read_foreign_account_inputs_missing_data() { tx_args: crate::transaction::TransactionArgs::default(), advice_inputs: crate::vm::AdviceInputs::default(), foreign_account_code: Vec::new(), - asset_witnesses: Vec::new(), foreign_account_slot_names: BTreeMap::new(), }; @@ -120,9 +120,10 @@ fn test_read_foreign_account_inputs_with_storage_data() { // Create advice inputs with both account header and storage header. let mut advice_inputs = crate::vm::AdviceInputs::default(); - let account_id_key = - crate::transaction::TransactionAdviceInputs::account_id_map_key(foreign_account_id); - advice_inputs.map.insert(account_id_key, foreign_header.as_elements().to_vec()); + let account_id_key = AccountIdKey::from(foreign_account_id); + advice_inputs + .map + .insert(account_id_key.as_word(), foreign_header.to_elements().to_vec()); advice_inputs .map .insert(foreign_header.storage_commitment(), foreign_storage_header.to_elements()); @@ -139,7 +140,6 @@ fn test_read_foreign_account_inputs_with_storage_data() { tx_args: crate::transaction::TransactionArgs::default(), advice_inputs, foreign_account_code: vec![code], - asset_witnesses: Vec::new(), foreign_account_slot_names, }; @@ -211,7 +211,7 @@ fn test_read_foreign_account_inputs_with_proper_witness() { let mut account_tree = AccountTree::::default(); // Insert native account. - let native_commitment = AccountHeader::from(&native_account).commitment(); + let native_commitment = AccountHeader::from(&native_account).to_commitment(); account_tree.insert(native_account_id, native_commitment).unwrap(); // Insert foreign account. @@ -224,7 +224,7 @@ fn test_read_foreign_account_inputs_with_proper_witness() { None, ) .unwrap(); - account_tree.insert(foreign_account_id, foreign_header.commitment()).unwrap(); + account_tree.insert(foreign_account_id, foreign_header.to_commitment()).unwrap(); // Get the account tree root and create witness. let account_tree_root = account_tree.root(); @@ -234,10 +234,10 @@ fn test_read_foreign_account_inputs_with_proper_witness() { let mut advice_inputs = crate::vm::AdviceInputs::default(); // Add account header to advice map. - let account_id_key = - crate::transaction::TransactionAdviceInputs::account_id_map_key(foreign_account_id); - advice_inputs.map.insert(account_id_key, foreign_header.as_elements().to_vec()); - + let account_id_key = AccountIdKey::from(foreign_account_id); + advice_inputs + .map + .insert(account_id_key.as_word(), foreign_header.to_elements().to_vec()); // Add storage header to advice map. advice_inputs .map @@ -248,7 +248,9 @@ fn test_read_foreign_account_inputs_with_proper_witness() { // Add the account leaf to the advice map (needed for witness verification). let leaf = foreign_witness.leaf(); - advice_inputs.map.insert(leaf.hash(), leaf.to_elements()); + advice_inputs + .map + .insert(leaf.hash(), leaf.to_elements().collect::>()); // Create block header with the account tree root. let block_header = crate::block::BlockHeader::mock(0, None, None, &[], account_tree_root); @@ -261,7 +263,6 @@ fn test_read_foreign_account_inputs_with_proper_witness() { tx_args: crate::transaction::TransactionArgs::default(), advice_inputs, foreign_account_code: vec![code], - asset_witnesses: Vec::new(), foreign_account_slot_names: BTreeMap::new(), }; @@ -348,7 +349,6 @@ fn test_transaction_inputs_serialization_with_foreign_slot_names() { tx_args: crate::transaction::TransactionArgs::default(), advice_inputs: crate::vm::AdviceInputs::default(), foreign_account_code: Vec::new(), - asset_witnesses: Vec::new(), foreign_account_slot_names, }; diff --git a/crates/miden-protocol/src/transaction/kernel/advice_inputs.rs b/crates/miden-protocol/src/transaction/kernel/advice_inputs.rs index aec2c04229..295a3ef03b 100644 --- a/crates/miden-protocol/src/transaction/kernel/advice_inputs.rs +++ b/crates/miden-protocol/src/transaction/kernel/advice_inputs.rs @@ -1,13 +1,11 @@ use alloc::vec::Vec; -use miden_processor::AdviceMutation; +use miden_processor::advice::AdviceMutation; -use crate::account::{AccountHeader, AccountId, PartialAccount}; -use crate::asset::AssetWitness; -use crate::block::account_tree::AccountWitness; +use crate::account::{AccountHeader, PartialAccount}; +use crate::block::account_tree::{AccountIdKey, AccountWitness}; use crate::crypto::SequentialCommit; use crate::crypto::merkle::InnerNodeInfo; -use crate::crypto::merkle::smt::SmtProof; use crate::note::NoteAttachmentContent; use crate::transaction::{ AccountInputs, @@ -17,7 +15,7 @@ use crate::transaction::{ TransactionKernel, }; use crate::vm::AdviceInputs; -use crate::{EMPTY_WORD, Felt, FieldElement, Word, ZERO}; +use crate::{EMPTY_WORD, Felt, Word, ZERO}; // TRANSACTION ADVICE INPUTS // ================================================================================================ @@ -58,8 +56,8 @@ impl TransactionAdviceInputs { // If a seed was provided, extend the map appropriately. if let Some(seed) = tx_inputs.account().seed() { // ACCOUNT_ID |-> ACCOUNT_SEED - let account_id_key = Self::account_id_map_key(partial_native_acc.id()); - inputs.add_map_entry(account_id_key, seed.to_vec()); + let account_id_key = AccountIdKey::from(partial_native_acc.id()); + inputs.add_map_entry(account_id_key.as_word(), seed.to_vec()); } // if the account is new, insert the storage map entries into the advice provider. @@ -75,10 +73,6 @@ impl TransactionAdviceInputs { } } - tx_inputs.asset_witnesses().iter().for_each(|asset_witness| { - inputs.add_asset_witness(asset_witness.clone()); - }); - // Extend with extra user-supplied advice. inputs.extend(tx_inputs.tx_args().advice_inputs().clone()); @@ -110,13 +104,6 @@ impl TransactionAdviceInputs { // PUBLIC UTILITIES // -------------------------------------------------------------------------------------------- - /// Returns the advice map key where: - /// - the seed for native accounts is stored. - /// - the account header for foreign accounts is stored. - pub fn account_id_map_key(id: AccountId) -> Word { - Word::from([id.suffix(), id.prefix().as_felt(), ZERO, ZERO]) - } - // MUTATORS // -------------------------------------------------------------------------------------------- @@ -136,17 +123,17 @@ impl TransactionAdviceInputs { // for foreign accounts, we need to insert the id to state mapping // NOTE: keep this in sync with the account::load_from_advice procedure - let account_id_key = Self::account_id_map_key(foreign_acc.id()); + let account_id_key = AccountIdKey::from(foreign_acc.id()); let header = AccountHeader::from(foreign_acc.account()); // ACCOUNT_ID |-> [ID_AND_NONCE, VAULT_ROOT, STORAGE_COMMITMENT, CODE_COMMITMENT] - self.add_map_entry(account_id_key, header.as_elements()); + self.add_map_entry(account_id_key.as_word(), header.to_elements()); } } /// Extend the advice stack with the transaction inputs. /// - /// The following data is pushed to the advice stack: + /// The following data is pushed to the advice stack (words shown in memory-order): /// /// [ /// PARENT_BLOCK_COMMITMENT, @@ -157,11 +144,11 @@ impl TransactionAdviceInputs { /// TX_KERNEL_COMMITMENT /// VALIDATOR_KEY_COMMITMENT, /// [block_num, version, timestamp, 0], - /// [native_asset_id_suffix, native_asset_id_prefix, verification_base_fee, 0] + /// [0, verification_base_fee, native_asset_id_suffix, native_asset_id_prefix] /// [0, 0, 0, 0] /// NOTE_ROOT, /// kernel_version - /// [account_id, 0, 0, account_nonce], + /// [account_nonce, 0, account_id_suffix, account_id_prefix], /// ACCOUNT_VAULT_ROOT, /// ACCOUNT_STORAGE_COMMITMENT, /// ACCOUNT_CODE_COMMITMENT, @@ -183,15 +170,15 @@ impl TransactionAdviceInputs { self.extend_stack(header.validator_key().to_commitment()); self.extend_stack([ header.block_num().into(), - header.version().into(), - header.timestamp().into(), + Felt::from(header.version()), + Felt::from(header.timestamp()), ZERO, ]); self.extend_stack([ + ZERO, + Felt::from(header.fee_parameters().verification_base_fee()), header.fee_parameters().native_asset_id().suffix(), header.fee_parameters().native_asset_id().prefix().as_felt(), - header.fee_parameters().verification_base_fee().into(), - ZERO, ]); self.extend_stack([ZERO, ZERO, ZERO, ZERO]); self.extend_stack(header.note_root()); @@ -199,10 +186,10 @@ impl TransactionAdviceInputs { // --- core account items (keep in sync with process_account_data) ---- let account = tx_inputs.account(); self.extend_stack([ + account.nonce(), + ZERO, account.id().suffix(), account.id().prefix().as_felt(), - ZERO, - account.nonce(), ]); self.extend_stack(account.vault().root()); self.extend_stack(account.storage().commitment()); @@ -287,13 +274,20 @@ impl TransactionAdviceInputs { // populate Merkle store and advice map with nodes info needed to access storage map entries self.extend_merkle_store(account.storage().inner_nodes()); - self.extend_map(account.storage().leaves().map(|leaf| (leaf.hash(), leaf.to_elements()))); + self.extend_map( + account + .storage() + .leaves() + .map(|leaf| (leaf.hash(), leaf.to_elements().collect())), + ); // --- account vault ------------------------------------------------------ // populate Merkle store and advice map with nodes info needed to access vault assets self.extend_merkle_store(account.vault().inner_nodes()); - self.extend_map(account.vault().leaves().map(|leaf| (leaf.hash(), leaf.to_elements()))); + self.extend_map( + account.vault().leaves().map(|leaf| (leaf.hash(), leaf.to_elements().collect())), + ); } /// Adds an account witness to the advice inputs. @@ -303,20 +297,12 @@ impl TransactionAdviceInputs { fn add_account_witness(&mut self, witness: &AccountWitness) { // populate advice map with the account's leaf let leaf = witness.leaf(); - self.add_map_entry(leaf.hash(), leaf.to_elements()); + self.add_map_entry(leaf.hash(), leaf.to_elements().collect()); // extend the merkle store and map with account witnesses merkle path self.extend_merkle_store(witness.authenticated_nodes()); } - /// Adds an asset witness to the advice inputs. - fn add_asset_witness(&mut self, witness: AssetWitness) { - self.extend_merkle_store(witness.authenticated_nodes()); - - let smt_proof = SmtProof::from(witness); - self.extend_map([(smt_proof.leaf().hash(), smt_proof.leaf().to_elements())]); - } - // NOTE INJECTION // -------------------------------------------------------------------------------------------- @@ -325,13 +311,12 @@ impl TransactionAdviceInputs { /// The advice provider is populated with: /// /// - For each note: - /// - The note's details (serial number, script root, and its input / assets commitment). + /// - The note's details (serial number, script root, and its storage / assets commitment). /// - The note's private arguments. - /// - The note's public metadata. - /// - The note's public inputs data. Prefixed by its length and padded to an even word - /// length. - /// - The note's asset padded. Prefixed by its length and padded to an even word length. - /// - The note's script MAST forest's advice map inputs + /// - The note's public metadata (sender account ID, note type, note tag, attachment kind / + /// scheme and the attachment content). + /// - The note's storage (unpadded). + /// - The note's assets (key and value words). /// - For authenticated notes (determined by the `is_authenticated` flag): /// - The note's authentication path against its block's note tree. /// - The block number, sub commitment, note root. @@ -350,10 +335,10 @@ impl TransactionAdviceInputs { let recipient = note.recipient(); let note_arg = tx_inputs.tx_args().get_note_args(note.id()).unwrap_or(&EMPTY_WORD); - // recipient inputs - self.add_map_entry(recipient.inputs().commitment(), recipient.inputs().to_elements()); + // recipient storage + self.add_map_entry(recipient.storage().commitment(), recipient.storage().to_elements()); // assets commitments - self.add_map_entry(assets.commitment(), assets.to_padded_assets()); + self.add_map_entry(assets.commitment(), assets.to_elements()); // array attachments if let NoteAttachmentContent::Array(array_attachment) = note.metadata().attachment().content() @@ -367,14 +352,14 @@ impl TransactionAdviceInputs { // note details / metadata note_data.extend(recipient.serial_num()); note_data.extend(*recipient.script().root()); - note_data.extend(*recipient.inputs().commitment()); + note_data.extend(*recipient.storage().commitment()); note_data.extend(*assets.commitment()); note_data.extend(*note_arg); - note_data.extend(note.metadata().to_header_word()); note_data.extend(note.metadata().to_attachment_word()); - note_data.push(recipient.inputs().num_values().into()); - note_data.push((assets.num_assets() as u32).into()); - note_data.extend(assets.to_padded_assets()); + note_data.extend(note.metadata().to_header_word()); + note_data.push(Felt::from(recipient.storage().num_items())); + note_data.push(Felt::from(assets.num_assets() as u32)); + note_data.extend(assets.to_elements()); // authentication vs unauthenticated match input_note { @@ -398,7 +383,7 @@ impl TransactionAdviceInputs { note_data.push(block_num.into()); note_data.extend(block_header.sub_commitment()); note_data.extend(block_header.note_root()); - note_data.push(proof.location().node_index_in_block().into()); + note_data.push(Felt::from(proof.location().node_index_in_block())); }, InputNote::Unauthenticated { .. } => { // push the `is_authenticated` flag diff --git a/crates/miden-protocol/src/transaction/kernel/memory.rs b/crates/miden-protocol/src/transaction/kernel/memory.rs index 8b33b214ae..87e03ac88b 100644 --- a/crates/miden-protocol/src/transaction/kernel/memory.rs +++ b/crates/miden-protocol/src/transaction/kernel/memory.rs @@ -14,16 +14,16 @@ pub type StorageSlot = u8; // // | Section | Start address | Size in elements | Comment | // | ------------------ | ------------- | ---------------- | ------------------------------------------ | -// | Bookkeeping | 0 | 89 | | +// | Bookkeeping | 0 | 85 | | // | Global inputs | 400 | 40 | | // | Block header | 800 | 44 | | // | Partial blockchain | 1_200 | 132 | | // | Kernel data | 1_600 | 140 | 34 procedures in total, 4 elements each | // | Accounts data | 8_192 | 524_288 | 64 accounts max, 8192 elements each | // | Account delta | 532_480 | 263 | | -// | Input notes | 4_194_304 | 2_162_688 | nullifiers data segment + 1024 input notes | -// | | | | max, 2048 elements each | -// | Output notes | 16_777_216 | 2_097_152 | 1024 output notes max, 2048 elements each | +// | Input notes | 4_194_304 | 3_211_264 | nullifiers data segment (2^16 elements) | +// | | | | + 1024 input notes max, 3072 elements each | +// | Output notes | 16_777_216 | 3_145_728 | 1024 output notes max, 3072 elements each | // | Link Map Memory | 33_554_432 | 33_554_432 | Enough for 2_097_151 key-value pairs | // Relative layout of one account @@ -63,23 +63,39 @@ pub type StorageSlot = u8; pub const ACTIVE_INPUT_NOTE_PTR: MemoryAddress = 0; /// The memory address at which the number of output notes is stored. -pub const NUM_OUTPUT_NOTES_PTR: MemoryAddress = 4; +pub const NUM_OUTPUT_NOTES_PTR: MemoryAddress = 1; -/// The memory address at which the input vault root is stored. -pub const INPUT_VAULT_ROOT_PTR: MemoryAddress = 8; - -/// The memory address at which the output vault root is stored. -pub const OUTPUT_VAULT_ROOT_PTR: MemoryAddress = 12; +/// The memory address at which the transaction expiration block number is stored. +pub const TX_EXPIRATION_BLOCK_NUM_PTR: MemoryAddress = 2; /// The memory address at which the dirty flag of the storage commitment of the native account is /// stored. /// /// This binary flag specifies whether the commitment is outdated: it holds 1 if some changes were /// made to the account storage since the last re-computation, and 0 otherwise. -pub const NATIVE_ACCT_STORAGE_COMMITMENT_DIRTY_FLAG_PTR: MemoryAddress = 16; +pub const NATIVE_ACCT_STORAGE_COMMITMENT_DIRTY_FLAG_PTR: MemoryAddress = 3; -/// The memory address at which the transaction expiration block number is stored. -pub const TX_EXPIRATION_BLOCK_NUM_PTR: MemoryAddress = 20; +/// The memory address at which the input vault root is stored. +pub const INPUT_VAULT_ROOT_PTR: MemoryAddress = 4; + +/// The memory address at which the output vault root is stored. +pub const OUTPUT_VAULT_ROOT_PTR: MemoryAddress = 8; + +// Pointer to the suffix and prefix of the ID of the foreign account which will be loaded during the +// upcoming FPI call. This ID is updated during the `prepare_fpi_call` kernel procedure. +pub const UPCOMING_FOREIGN_ACCOUNT_PREFIX_PTR: MemoryAddress = 12; +pub const UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR: MemoryAddress = + UPCOMING_FOREIGN_ACCOUNT_PREFIX_PTR + 1; + +// Pointer to the 16th input value of the foreign procedure which will be loaded during the upcoming +// FPI call. This "buffer" value helps to work around the 15 value limitation of the +// `exec_kernel_proc` kernel procedure, so that any account procedure, even if it has 16 input +// values, could be executed as foreign. +pub const UPCOMING_FOREIGN_PROC_INPUT_VALUE_15_PTR: MemoryAddress = 14; + +// Pointer to the root of the foreign procedure which will be executed during the upcoming FPI call. +// This root is updated during the `prepare_fpi_call` kernel procedure. +pub const UPCOMING_FOREIGN_PROCEDURE_PTR: MemoryAddress = 16; /// The memory address at which the pointer to the stack element containing the pointer to the /// active account data is stored. @@ -92,9 +108,9 @@ pub const TX_EXPIRATION_BLOCK_NUM_PTR: MemoryAddress = 20; /// ┌───────────────┬────────────────┬───────────────────┬─────┬────────────────────┐ /// │ STACK TOP PTR │ NATIVE ACCOUNT │ FOREIGN ACCOUNT 1 │ ... │ FOREIGN ACCOUNT 63 │ /// ├───────────────┼────────────────┼───────────────────┼─────┼────────────────────┤ -/// 24 25 30 88 +/// 20 21 22 84 /// ``` -pub const ACCOUNT_STACK_TOP_PTR: MemoryAddress = 24; +pub const ACCOUNT_STACK_TOP_PTR: MemoryAddress = 20; // GLOBAL INPUTS // ------------------------------------------------------------------------------------------------ @@ -105,8 +121,12 @@ pub const GLOBAL_INPUTS_SECTION_OFFSET: MemoryOffset = 400; /// The memory address at which the commitment of the transaction's reference block is stored. pub const BLOCK_COMMITMENT_PTR: MemoryAddress = 400; -/// The memory address at which the native account ID is stored. -pub const NATIVE_ACCT_ID_PTR: MemoryAddress = 404; +/// The memory address at which the native account ID suffix provided as a global transaction input +/// is stored. +pub const GLOBAL_ACCOUNT_ID_SUFFIX_PTR: MemoryAddress = 404; +/// The memory address at which the native account ID prefix provided as a global transaction input +/// is stored. +pub const GLOBAL_ACCOUNT_ID_PREFIX_PTR: MemoryAddress = GLOBAL_ACCOUNT_ID_SUFFIX_PTR + 1; /// The memory address at which the initial account commitment is stored. pub const INIT_ACCT_COMMITMENT_PTR: MemoryAddress = 408; @@ -174,14 +194,14 @@ pub const TIMESTAMP_IDX: DataIndex = 2; /// The memory address at which the fee parameters are stored. These occupy a double word. pub const FEE_PARAMETERS_PTR: MemoryAddress = 832; +/// The index of the verification base fee within the block fee parameters. +pub const VERIFICATION_BASE_FEE_IDX: DataIndex = 1; + /// The index of the native asset ID suffix within the block fee parameters. -pub const NATIVE_ASSET_ID_SUFFIX_IDX: DataIndex = 0; +pub const NATIVE_ASSET_ID_SUFFIX_IDX: DataIndex = 2; /// The index of the native asset ID prefix within the block fee parameters. -pub const NATIVE_ASSET_ID_PREFIX_IDX: DataIndex = 1; - -/// The index of the verification base fee within the block fee parameters. -pub const VERIFICATION_BASE_FEE_IDX: DataIndex = 2; +pub const NATIVE_ASSET_ID_PREFIX_IDX: DataIndex = 3; /// The memory address at which the note root is stored. pub const NOTE_ROOT_PTR: MemoryAddress = 840; @@ -228,12 +248,12 @@ pub const ACCT_ID_AND_NONCE_OFFSET: MemoryOffset = 0; pub const NATIVE_ACCT_ID_AND_NONCE_PTR: MemoryAddress = NATIVE_ACCOUNT_DATA_PTR + ACCT_ID_AND_NONCE_OFFSET; -/// The index of the account ID within the account ID and nonce data. -pub const ACCT_ID_SUFFIX_IDX: DataIndex = 0; -pub const ACCT_ID_PREFIX_IDX: DataIndex = 1; - /// The index of the account nonce within the account ID and nonce data. -pub const ACCT_NONCE_IDX: DataIndex = 3; +pub const ACCT_NONCE_IDX: DataIndex = 0; + +/// The index of the account ID within the account ID and nonce data. +pub const ACCT_ID_SUFFIX_IDX: DataIndex = 2; +pub const ACCT_ID_PREFIX_IDX: DataIndex = 3; /// The offset at which the account vault root is stored relative to the start of the account /// data segment. @@ -324,7 +344,7 @@ pub const NATIVE_ACCT_STORAGE_SLOTS_SECTION_PTR: MemoryAddress = // ================================================================================================ /// The size of the memory segment allocated to each note. -pub const NOTE_MEM_SIZE: MemoryAddress = 2048; +pub const NOTE_MEM_SIZE: MemoryAddress = 3072; #[allow(clippy::empty_line_after_outer_attr)] #[rustfmt::skip] @@ -334,33 +354,39 @@ pub const NOTE_MEM_SIZE: MemoryAddress = 2048; // memory offset 4_194_304 with a word containing the total number of input notes and is followed // by note nullifiers and note data like so: // -// ┌─────────┬───────────┬───────────┬─────┬───────────┬─────────┬────────┬────────┬───────┬────────┐ -// │ NUM │ NOTE 0 │ NOTE 1 │ ... │ NOTE n │ PADDING │ NOTE 0 │ NOTE 1 │ ... │ NOTE n │ -// │ NOTES │ NULLIFIER │ NULLIFIER │ │ NULLIFIER │ │ DATA │ DATA │ │ DATA │ -// └─────────┴───────────┴───────────┴─────┴───────────┴─────────┴────────┴────────┴───────┴────────┘ -// 4_194_304 4_194_308 4_194_312 4_194_304+4(n+1) 4_259_840 +2048 +4096 +2048n +// ┌──────────┬───────────┬───────────┬─────┬────────────────┬─────────┬──────────┬────────┬───────┬────────┐ +// │ NUM │ NOTE 0 │ NOTE 1 │ ... │ NOTE n │ PADDING │ NOTE 0 │ NOTE 1 │ ... │ NOTE n │ +// │ NOTES │ NULLIFIER │ NULLIFIER │ │ NULLIFIER │ │ DATA │ DATA │ │ DATA │ +// ├──────────┼───────────┼───────────┼─────┼────────────────┼─────────┼──────────┼────────┼───────┼────────┤ +// 4_194_304 4_194_308 4_194_312 4_194_304+4(n+1) 4_259_840 +3072 +6144 +3072n // // Here `n` represents number of input notes. // -// Each nullifier occupies a single word. A data section for each note consists of exactly 2048 +// Each nullifier occupies a single word. A data section for each note consists of exactly 3072 // elements and is laid out like so: // -// ┌──────┬────────┬────────┬────────┬────────────┬───────────┬──────────┬────────────┬───────┬────────┬────────┬───────┬─────┬───────┬─────────┬ -// │ NOTE │ SERIAL │ SCRIPT │ INPUTS │ ASSETS | RECIPIENT │ METADATA │ ATTACHMENT │ NOTE │ NUM │ NUM │ ASSET │ ... │ ASSET │ PADDING │ -// │ ID │ NUM │ ROOT │ HASH │ COMMITMENT | │ HEADER │ │ ARGS │ INPUTS │ ASSETS │ 0 │ │ n │ │ -// ├──────┼────────┼────────┼────────┼────────────┼───────────┼──────────┼────────────┼───────┼────────┼────────┼───────┼─────┼───────┼─────────┤ -// 0 4 8 12 16 20 24 28 32 36 40 44 + 4n +// ┌──────┬────────┬────────┬─────────┬────────────┬───────────┬──────────┬────────────┬───────┬ +// │ NOTE │ SERIAL │ SCRIPT │ STORAGE │ ASSETS │ RECIPIENT │ METADATA │ ATTACHMENT │ NOTE │ +// │ ID │ NUM │ ROOT │ COMM │ COMMITMENT │ │ HEADER │ │ ARGS │ +// ├──────┼────────┼────────┼─────────┼────────────┼───────────┼──────────┼────────────┼───────┼ +// 0 4 8 12 16 20 24 28 32 +// +// ┬─────────┬────────┬───────┬─────────┬─────┬────────┬─────────┬─────────┐ +// │ STORAGE │ NUM │ ASSET │ ASSET │ ... │ ASSET │ ASSET │ PADDING │ +// │ LENGTH │ ASSETS │ KEY 0 │ VALUE 0 │ │ KEY n │ VALUE n │ │ +// ┼─────────┼────────┼───────┼─────────┼─────┼────────┼─────────┼─────────┘ +// 36 40 44 48 44 + 8n 48 + 8n // -// - NUM_INPUTS is encoded as [num_inputs, 0, 0, 0]. +// - NUM_STORAGE_ITEMS is encoded as [num_storage_items, 0, 0, 0]. // - NUM_ASSETS is encoded as [num_assets, 0, 0, 0]. -// - INPUTS_COMMITMENT is the key to look up note inputs in the advice map. +// - STORAGE_COMMITMENT is the key to look up note storage in the advice map. // - ASSETS_COMMITMENT is the key to look up note assets in the advice map. // -// Notice that note input values are not loaded to the memory, only their length. In order to obtain -// the input values the advice map should be used: they are stored there as -// `INPUTS_COMMITMENT -> INPUTS`. +// Notice that note storage item are not loaded to the memory, only their length. In order to obtain +// the storage values the advice map should be used: they are stored there as +// `STORAGE_COMMITMENT -> STORAGE`. // -// As opposed to the asset values, input values are never used in kernel memory, so their presence +// As opposed to the asset values, storage items are never used in kernel memory, so their presence // there is unnecessary. /// The memory address at which the input note section begins. @@ -379,13 +405,13 @@ pub const NUM_INPUT_NOTES_PTR: MemoryAddress = INPUT_NOTE_SECTION_PTR; pub const INPUT_NOTE_ID_OFFSET: MemoryOffset = 0; pub const INPUT_NOTE_SERIAL_NUM_OFFSET: MemoryOffset = 4; pub const INPUT_NOTE_SCRIPT_ROOT_OFFSET: MemoryOffset = 8; -pub const INPUT_NOTE_INPUTS_COMMITMENT_OFFSET: MemoryOffset = 12; +pub const INPUT_NOTE_STORAGE_COMMITMENT_OFFSET: MemoryOffset = 12; pub const INPUT_NOTE_ASSETS_COMMITMENT_OFFSET: MemoryOffset = 16; pub const INPUT_NOTE_RECIPIENT_OFFSET: MemoryOffset = 20; pub const INPUT_NOTE_METADATA_HEADER_OFFSET: MemoryOffset = 24; pub const INPUT_NOTE_ATTACHMENT_OFFSET: MemoryOffset = 28; pub const INPUT_NOTE_ARGS_OFFSET: MemoryOffset = 32; -pub const INPUT_NOTE_NUM_INPUTS_OFFSET: MemoryOffset = 36; +pub const INPUT_NOTE_NUM_STORAGE_ITEMS_OFFSET: MemoryOffset = 36; pub const INPUT_NOTE_NUM_ASSETS_OFFSET: MemoryOffset = 40; pub const INPUT_NOTE_ASSETS_OFFSET: MemoryOffset = 44; @@ -399,27 +425,29 @@ pub const INPUT_NOTE_ASSETS_OFFSET: MemoryOffset = 44; // ┌─────────────┬─────────────┬───────────────┬─────────────┐ // │ NOTE 0 DATA │ NOTE 1 DATA │ ... │ NOTE n DATA │ // └─────────────┴─────────────┴───────────────┴─────────────┘ -// 16_777_216 +2048 +4096 +2048n +// 16_777_216 +3072 +6144 +3072n // // The total number of output notes for a transaction is stored in the bookkeeping section of the // memory. Data section of each note is laid out like so: // -// ┌──────┬──────────┬────────────┬───────────┬────────────┬────────────────┬─────────┬─────┬─────────┬─────────┐ -// │ NOTE │ METADATA │ METADATA │ RECIPIENT │ ASSETS │ NUM ASSETS │ ASSET 0 │ ... │ ASSET n │ PADDING │ -// | ID | HEADER | ATTACHMENT | | COMMITMENT | AND DIRTY FLAG | | | | | -// ├──────┼──────────┼────────────┼───────────┼────────────┼────────────────┼─────────┼─────┼─────────┼─────────┤ -// 0 1 2 3 4 5 6 6 + n +// ┌──────┬──────────┬────────────┬───────────┬────────────┬────────┬───────┬ +// │ NOTE │ METADATA │ METADATA │ RECIPIENT │ ASSETS │ NUM │ DIRTY │ +// │ ID │ HEADER │ ATTACHMENT │ │ COMMITMENT │ ASSETS │ FLAG │ +// ├──────┼──────────┼────────────┼───────────┼────────────┼────────┼───────┼ +// 0 4 8 12 16 20 21 // -// The NUM_ASSETS_AND_DIRTY_FLAG word has the following layout: -// `[num_assets, assets_commitment_dirty_flag, 0, 0]`, where: -// - `num_assets` is the number of assets in this output note. -// - `assets_commitment_dirty_flag` is the binary flag which specifies whether the assets commitment -// stored in this note is outdated. It holds 1 if some changes were made to the note assets since -// the last re-computation, and 0 otherwise. +// ┬───────┬─────────┬─────┬────────┬─────────┬─────────┐ +// │ ASSET │ ASSET │ ... │ ASSET │ ASSET │ PADDING │ +// │ KEY 0 │ VALUE 0 │ │ KEY n │ VALUE n │ │ +// ┼───────┼─────────┼─────┼────────┼─────────┼─────────┘ +// 24 28 24 + 8n 28 + 8n // -// Dirty flag is set to 0 after every recomputation of the assets commitment in the -// `kernel::note::compute_output_note_assets_commitment` procedure. It is set to 1 in the -// `kernel::output_note::add_asset` procedure after any change was made to the assets data. +// The DIRTY_FLAG is the binary flag which specifies whether the assets commitment stored in this +// note is outdated. It holds 1 if some changes were made to the note assets since the last +// re-computation, and 0 otherwise. +// It is set to 0 after every recomputation of the assets commitment in the +// `$kernel::note::compute_output_note_assets_commitment` procedure. It is set to 1 in the +// `$kernel::output_note::add_asset` procedure after any change was made to the assets data. /// The memory address at which the output notes section begins. pub const OUTPUT_NOTE_SECTION_OFFSET: MemoryOffset = 16_777_216; @@ -434,6 +462,17 @@ pub const OUTPUT_NOTE_NUM_ASSETS_OFFSET: MemoryOffset = 20; pub const OUTPUT_NOTE_DIRTY_FLAG_OFFSET: MemoryOffset = 21; pub const OUTPUT_NOTE_ASSETS_OFFSET: MemoryOffset = 24; +// ASSETS +// ------------------------------------------------------------------------------------------------ + +/// The size of an asset's memory representation. +#[cfg(any(feature = "testing", test))] +pub const ASSET_SIZE: MemoryOffset = 8; + +/// The offset of the asset value in an asset's memory representation. +#[cfg(any(feature = "testing", test))] +pub const ASSET_VALUE_OFFSET: MemoryOffset = 4; + // LINK MAP // ------------------------------------------------------------------------------------------------ diff --git a/crates/miden-protocol/src/transaction/kernel/mod.rs b/crates/miden-protocol/src/transaction/kernel/mod.rs index 334376165a..adda2421b6 100644 --- a/crates/miden-protocol/src/transaction/kernel/mod.rs +++ b/crates/miden-protocol/src/transaction/kernel/mod.rs @@ -1,4 +1,3 @@ -use alloc::string::ToString; use alloc::sync::Arc; use alloc::vec::Vec; @@ -14,13 +13,15 @@ use crate::block::BlockNumber; use crate::crypto::SequentialCommit; use crate::errors::TransactionOutputError; use crate::protocol::ProtocolLib; -use crate::transaction::{OutputNote, OutputNotes, TransactionInputs, TransactionOutputs}; +use crate::transaction::{RawOutputNote, RawOutputNotes, TransactionInputs, TransactionOutputs}; use crate::utils::serde::Deserializable; use crate::utils::sync::LazyLock; use crate::vm::{AdviceInputs, Program, ProgramInfo, StackInputs, StackOutputs}; use crate::{Felt, Hasher, Word}; -mod procedures; +mod procedures { + include!(concat!(env!("OUT_DIR"), "/procedures.rs")); +} pub mod memory; @@ -160,7 +161,7 @@ impl TransactionKernel { /// BLOCK_COMMITMENT, /// INITIAL_ACCOUNT_COMMITMENT, /// INPUT_NOTES_COMMITMENT, - /// account_id_prefix, account_id_suffix, block_num + /// account_id_suffix, account_id_prefix, block_num /// ] /// ``` /// @@ -181,15 +182,14 @@ impl TransactionKernel { ) -> StackInputs { // Note: Must be kept in sync with the transaction's kernel prepare_transaction procedure let mut inputs: Vec = Vec::with_capacity(14); - inputs.push(Felt::from(block_num)); + inputs.extend_from_slice(block_commitment.as_elements()); + inputs.extend_from_slice(initial_account_commitment.as_elements()); + inputs.extend(input_notes_commitment); inputs.push(account_id.suffix()); inputs.push(account_id.prefix().as_felt()); - inputs.extend(input_notes_commitment); - inputs.extend_from_slice(initial_account_commitment.as_elements()); - inputs.extend_from_slice(block_commitment.as_elements()); - StackInputs::new(inputs) - .map_err(|e| e.to_string()) - .expect("Invalid stack input") + inputs.push(Felt::from(block_num)); + + StackInputs::new(&inputs).expect("number of stack inputs should be <= 16") } /// Builds the stack for expected transaction execution outputs. @@ -199,8 +199,7 @@ impl TransactionKernel { /// [ /// OUTPUT_NOTES_COMMITMENT, /// ACCOUNT_UPDATE_COMMITMENT, - /// FEE_ASSET, - /// expiration_block_num, + /// native_asset_id_suffix, native_asset_id_prefix, fee_amount, expiration_block_num /// ] /// ``` /// @@ -219,15 +218,16 @@ impl TransactionKernel { ) -> StackOutputs { let account_update_commitment = Hasher::merge(&[final_account_commitment, account_delta_commitment]); - let mut outputs: Vec = Vec::with_capacity(9); - outputs.push(Felt::from(expiration_block_num)); - outputs.extend(Word::from(fee)); - outputs.extend(account_update_commitment); + + let mut outputs: Vec = Vec::with_capacity(12); outputs.extend(output_notes_commitment); - outputs.reverse(); - StackOutputs::new(outputs) - .map_err(|e| e.to_string()) - .expect("Invalid stack output") + outputs.extend(account_update_commitment); + outputs.push(fee.faucet_id().suffix()); + outputs.push(fee.faucet_id().prefix().as_felt()); + outputs.push(Felt::try_from(fee.amount()).expect("amount should fit into felt")); + outputs.push(Felt::from(expiration_block_num)); + + StackOutputs::new(&outputs).expect("number of stack inputs should be <= 16") } /// Extracts transaction output data from the provided stack outputs. @@ -261,22 +261,28 @@ impl TransactionKernel { stack: &StackOutputs, // FIXME TODO add an extension trait for this one ) -> Result<(Word, Word, FungibleAsset, BlockNumber), TransactionOutputError> { let output_notes_commitment = stack - .get_stack_word_be(TransactionOutputs::OUTPUT_NOTES_COMMITMENT_WORD_IDX * 4) + .get_word(TransactionOutputs::OUTPUT_NOTES_COMMITMENT_WORD_IDX) .expect("output_notes_commitment (first word) missing"); let account_update_commitment = stack - .get_stack_word_be(TransactionOutputs::ACCOUNT_UPDATE_COMMITMENT_WORD_IDX * 4) + .get_word(TransactionOutputs::ACCOUNT_UPDATE_COMMITMENT_WORD_IDX) .expect("account_update_commitment (second word) missing"); - let fee = stack - .get_stack_word_be(TransactionOutputs::FEE_ASSET_WORD_IDX * 4) - .expect("fee_asset (third word) missing"); + let native_asset_id_prefix = stack + .get_element(TransactionOutputs::NATIVE_ASSET_ID_PREFIX_ELEMENT_IDX) + .expect("native_asset_id_prefix missing"); + let native_asset_id_suffix = stack + .get_element(TransactionOutputs::NATIVE_ASSET_ID_SUFFIX_ELEMENT_IDX) + .expect("native_asset_id_suffix missing"); + let fee_amount = stack + .get_element(TransactionOutputs::FEE_AMOUNT_ELEMENT_IDX) + .expect("fee_amount missing"); let expiration_block_num = stack - .get_stack_item(TransactionOutputs::EXPIRATION_BLOCK_ELEMENT_IDX) - .expect("tx_expiration_block_num (element on index 12) missing"); + .get_element(TransactionOutputs::EXPIRATION_BLOCK_ELEMENT_IDX) + .expect("tx_expiration_block_num missing"); - let expiration_block_num = u32::try_from(expiration_block_num.as_int()) + let expiration_block_num = u32::try_from(expiration_block_num.as_canonical_u64()) .map_err(|_| { TransactionOutputError::OutputStackInvalid( "expiration block number should be smaller than u32::MAX".into(), @@ -286,7 +292,7 @@ impl TransactionKernel { // Make sure that indices 13, 14 and 15 are zeroes (i.e. the fourth word without the // expiration block number). - if stack.get_stack_word_be(12).expect("fourth word missing").as_elements()[..3] + if stack.get_word(12).expect("fourth word missing").as_elements()[..3] != Word::empty().as_elements()[..3] { return Err(TransactionOutputError::OutputStackInvalid( @@ -294,7 +300,10 @@ impl TransactionKernel { )); } - let fee = FungibleAsset::try_from(fee) + let native_asset_id = + AccountId::try_from_elements(native_asset_id_suffix, native_asset_id_prefix) + .expect("native asset ID should be validated by the tx kernel"); + let fee = FungibleAsset::new(native_asset_id, fee_amount.as_canonical_u64()) .map_err(TransactionOutputError::FeeAssetNotFungibleAsset)?; Ok((output_notes_commitment, account_update_commitment, fee, expiration_block_num)) @@ -332,7 +341,7 @@ impl TransactionKernel { pub fn from_transaction_parts( stack: &StackOutputs, advice_inputs: &AdviceInputs, - output_notes: Vec, + output_notes: Vec, ) -> Result { let (output_notes_commitment, account_update_commitment, fee, expiration_block_num) = Self::parse_output_stack(stack)?; @@ -350,7 +359,7 @@ impl TransactionKernel { .map_err(TransactionOutputError::FinalAccountHeaderParseFailure)?; // validate output notes - let output_notes = OutputNotes::new(output_notes)?; + let output_notes = RawOutputNotes::new(output_notes)?; if output_notes_commitment != output_notes.commitment() { return Err(TransactionOutputError::OutputNotesCommitmentInconsistent { actual: output_notes.commitment(), diff --git a/crates/miden-protocol/src/transaction/kernel/procedures.rs b/crates/miden-protocol/src/transaction/kernel/procedures.rs deleted file mode 100644 index 60c5fb4349..0000000000 --- a/crates/miden-protocol/src/transaction/kernel/procedures.rs +++ /dev/null @@ -1,116 +0,0 @@ -// This file is generated by build.rs, do not modify - -use crate::{Word, word}; - -// KERNEL PROCEDURES -// ================================================================================================ - -/// Hashes of all dynamically executed kernel procedures. -pub const KERNEL_PROCEDURES: [Word; 53] = [ - // account_get_initial_commitment - word!("0x1de52b747e823a098f3e146cf2e2b7c3f585a4424ec54c9022414d9ca2574375"), - // account_compute_commitment - word!("0xdcb7c06bc7617d49bcda33d1753e327cc744b9f83e2019d61eca016b33c527a7"), - // account_get_id - word!("0xd76288f2e94b9e6a8f7eeee45c4ee0a23997d78496f6132e3f55681efea809c4"), - // account_get_nonce - word!("0x4a1f11db21ddb1f0ebf7c9fd244f896a95e99bb136008185da3e7d6aa85827a3"), - // account_incr_nonce - word!("0xd1316d21f95e20385283ac72914585a4a3281ff69935540d130fd1ace25ec9ae"), - // account_get_code_commitment - word!("0x8c044d53df35c71425c806b8997c6541e0aac70d7880657e9354b9854626ce3e"), - // account_get_initial_storage_commitment - word!("0x91377c2852feb7a2798e54d7dbaa2d97000270ec4c0d0888b26d720a25ae0e84"), - // account_compute_storage_commitment - word!("0x8732c9765d2b35f0d9f26dcec349f18a7234c9e988057a14e358f97ea123cb5f"), - // account_get_item - word!("0xdc6917a6d797c0717a56255e3e94e6d4f1317e92862c7331c3070d82402828ec"), - // account_get_initial_item - word!("0x1853416c007dc75de04c25aaf2376fa7e98d6c010a46bb90e504491f5634ee12"), - // account_set_item - word!("0x29392c01f8953e4e4f6dd8eba69c53bd5f4ff7f54beeaab2e86d8ef7c8d982a0"), - // account_get_map_item - word!("0x41e4f17b24281fbb05279fbba5ece3a5181055217be0d1d33cc44b71b6d19a23"), - // account_get_initial_map_item - word!("0x30f13f40cd4de71c1fae252218e27138e0b406a26e9c22cfb0219633fef9de23"), - // account_set_map_item - word!("0x722b97307928cda6600f5ed17a5c55c8130200520ff076b08f0f77706f6e81ea"), - // account_get_initial_vault_root - word!("0x46297d9ac95afd60c7ef1a065e024ad49aa4c019f6b3924191905449b244d4ec"), - // account_get_vault_root - word!("0x42a2bfb8eac4fce9bbf75ea15215b00729faeeaf7fff784692948d3f618a9bb7"), - // account_add_asset - word!("0x9a63b5385a8ac6404306852c13b1f7c2625449ce6afde6c14d27701e9e3fb550"), - // account_remove_asset - word!("0x2ac6fa0e12e581a59edbf7ab0995033aa308dbf77ecaf9b9215ebcf27da5a942"), - // account_get_balance - word!("0x6a5eb788fd2beec7555874f978a4dd2f2c4f5d8088cd33e148c61450e4510fe1"), - // account_get_initial_balance - word!("0x2e0decbc35a10c15ba72c14ed3e32dc9d4a3866f66114c073b3fc7b127362b74"), - // account_has_non_fungible_asset - word!("0xffe57961158c8e5f8a3aaa773943ee208fac7ed4786a7c8b6fed04ba54f39111"), - // account_compute_delta_commitment - word!("0x09767ee5e29aeca91a57f3af3871bbfb3037681e193444b3f7af878894c1aaa3"), - // account_get_num_procedures - word!("0x53b5ec38b7841948762c258010e6e07ad93963bcaac2d83813f8edb6710dc720"), - // account_get_procedure_root - word!("0xa45796077477599813ea54d88a83dfca3e64b1c112117970d63a1c24f7cfef5a"), - // account_was_procedure_called - word!("0xd9e08c27f3f1e7d01d257cbb4583ecf14f5e362d9c19e39ada8bc19dcc820d45"), - // account_has_procedure - word!("0xb0b63fdd01af0bcb4aacb2412e934cdc7691308647152d416c7ae4fc909da076"), - // faucet_mint_asset - word!("0x3d533697caf8b2522507c33b98af7652f3eb2e4f5d29d64f3d147af07ed2c494"), - // faucet_burn_asset - word!("0xcad46a403b78da4082d3e256025fd427e19b3fbc261e062cd5b2f853617311a7"), - // faucet_get_total_fungible_asset_issuance - word!("0x0953a2f2ec88ad0b7008c3d71aca46ebfcbb58a8ffdf59390616497c6693e8ab"), - // faucet_is_non_fungible_asset_issued - word!("0x9d9d5ec39551a1ab1c063a6dae9f3633195c89e8e31bbb6b217957f0ea8323c2"), - // input_note_get_metadata - word!("0x447b342e38855a9402cde0ea52ecb5e4c1fe542b535a5364cb5caa8e94c82442"), - // input_note_get_assets_info - word!("0xe0817bed99fb61180e705b2c9e5ca8c8f0c62864953247a56acbc65b7d58c2d5"), - // input_note_get_script_root - word!("0x527036257e58c3a84cf0aa170fb3f219a4553db17d269279355ad164a2b90ac5"), - // input_note_get_inputs_info - word!("0xb7f45ec34f7708355551dcf1f82c9c40e2c19252f8d5c98dcf9ef1aa0a3eb878"), - // input_note_get_serial_number - word!("0x25815e02b7976d8e5c297dde60d372cc142c81f702f424ac0920190528c547ee"), - // input_note_get_recipient - word!("0xd3c255177f9243bb1a523a87615bbe76dd5a3605fcae87eb9d3a626d4ecce33c"), - // output_note_create - word!("0xf6d7790691427c5d54ac9dd3b7ed1bb587fa6e864bdf7ba372f022a45c7caa47"), - // output_note_get_metadata - word!("0x3db8427f20eb70603b72aa574a986506eb7216312004aeaf8b2a7e55d0049a48"), - // output_note_get_assets_info - word!("0xbbf90ac2a7e16ee6d2336f7389d5b972bf0c1fa9938405945b85f948bf24fc4f"), - // output_note_get_recipient - word!("0x1ce137f0c5be72832970e6c818968a789f65b97db34515bfebb767705f28db67"), - // output_note_add_asset - word!("0xaf22383e4390f4f15a429768f79aa445f8a535bb21b0807172b9ef2de063d9d1"), - // output_note_set_attachment - word!("0x800ab6457b20be22a721d61770ab493334004d2b5d01a6bbd245e49554c31a2c"), - // tx_get_num_input_notes - word!("0xfcc186d4b65c584f3126dda1460b01eef977efd76f9e36f972554af28e33c685"), - // tx_get_input_notes_commitment - word!("0xc3a334434daa7d4ea15e1b2cb1a8000ad757f9348560a7246336662b77b0d89a"), - // tx_get_num_output_notes - word!("0x2511fca9c078cd96e526fd488d1362cbfd597eb3db8452aedb00beffee9782b4"), - // tx_get_output_notes_commitment - word!("0x8b9b29c837b5d0834f550d7f32703b35e2ff014b523dd581a09a0b94a925fcec"), - // tx_get_block_commitment - word!("0xe474b491a64d222397fcf83ee5db7b048061988e5e83ce99b91bae6fd75a3522"), - // tx_get_block_number - word!("0x297797dff54b8108dd2df254b95d43895d3f917ab10399efc62adaf861c905ae"), - // tx_get_block_timestamp - word!("0x7903185b847517debb6c2072364e3e757b99ee623e97c2bd0a4661316c5c5418"), - // tx_start_foreign_context - word!("0x753c9c0afcbd36fc7b7ba1e8dc7b668c8aa6036ae8cf5c891587f10117669d76"), - // tx_end_foreign_context - word!("0xaa0018aa8da890b73511879487f65553753fb7df22de380dd84c11e6f77eec6f"), - // tx_get_expiration_delta - word!("0xa60286e820a755128b2269db5057b0e2d9b79fef6f813bf3fe3337553a8fbb53"), - // tx_update_expiration_block_delta - word!("0xa16440a9a8cd2a6d0ff7f5c3bcce2958976e5d3e6e8a6935ff40ae1863c324f0"), -]; diff --git a/crates/miden-protocol/src/transaction/kernel/tx_event_id.rs b/crates/miden-protocol/src/transaction/kernel/tx_event_id.rs index 1b6373bbf4..3de0025779 100644 --- a/crates/miden-protocol/src/transaction/kernel/tx_event_id.rs +++ b/crates/miden-protocol/src/transaction/kernel/tx_event_id.rs @@ -1,8 +1,7 @@ use core::fmt; -use miden_core::EventId; - use crate::errors::TransactionEventError; +use crate::vm::EventId; // CONSTANTS // ================================================================================================ @@ -28,9 +27,7 @@ pub enum TransactionEventId { AccountVaultBeforeRemoveAsset = ACCOUNT_VAULT_BEFORE_REMOVE_ASSET, AccountVaultAfterRemoveAsset = ACCOUNT_VAULT_AFTER_REMOVE_ASSET, - AccountVaultBeforeGetBalance = ACCOUNT_VAULT_BEFORE_GET_BALANCE, - - AccountVaultBeforeHasNonFungibleAsset = ACCOUNT_VAULT_BEFORE_HAS_NON_FUNGIBLE_ASSET, + AccountVaultBeforeGetAsset = ACCOUNT_VAULT_BEFORE_GET_ASSET, AccountStorageBeforeSetItem = ACCOUNT_STORAGE_BEFORE_SET_ITEM, AccountStorageAfterSetItem = ACCOUNT_STORAGE_AFTER_SET_ITEM, @@ -106,7 +103,7 @@ impl TryFrom for TransactionEventId { type Error = TransactionEventError; fn try_from(event_id: EventId) -> Result { - let raw = event_id.as_felt().as_int(); + let raw = event_id.as_felt().as_canonical_u64(); let name = EVENT_NAME_LUT.get(&raw).copied(); @@ -123,13 +120,7 @@ impl TryFrom for TransactionEventId { Ok(TransactionEventId::AccountVaultAfterRemoveAsset) }, - ACCOUNT_VAULT_BEFORE_GET_BALANCE => { - Ok(TransactionEventId::AccountVaultBeforeGetBalance) - }, - - ACCOUNT_VAULT_BEFORE_HAS_NON_FUNGIBLE_ASSET => { - Ok(TransactionEventId::AccountVaultBeforeHasNonFungibleAsset) - }, + ACCOUNT_VAULT_BEFORE_GET_ASSET => Ok(TransactionEventId::AccountVaultBeforeGetAsset), ACCOUNT_STORAGE_BEFORE_SET_ITEM => Ok(TransactionEventId::AccountStorageBeforeSetItem), ACCOUNT_STORAGE_AFTER_SET_ITEM => Ok(TransactionEventId::AccountStorageAfterSetItem), diff --git a/crates/miden-protocol/src/transaction/mod.rs b/crates/miden-protocol/src/transaction/mod.rs index feef2fc878..977155e755 100644 --- a/crates/miden-protocol/src/transaction/mod.rs +++ b/crates/miden-protocol/src/transaction/mod.rs @@ -19,14 +19,18 @@ pub use executed_tx::{ExecutedTransaction, TransactionMeasurements}; pub use inputs::{AccountInputs, InputNote, InputNotes, ToInputNoteCommitments, TransactionInputs}; pub use kernel::{TransactionAdviceInputs, TransactionEventId, TransactionKernel, memory}; pub use ordered_transactions::OrderedTransactionHeaders; -pub use outputs::{OutputNote, OutputNotes, TransactionOutputs}; -pub use partial_blockchain::PartialBlockchain; -pub use proven_tx::{ - InputNoteCommitment, - ProvenTransaction, - ProvenTransactionBuilder, - TxAccountUpdate, +pub use outputs::{ + OutputNote, + OutputNoteCollection, + OutputNotes, + PrivateNoteHeader, + PublicOutputNote, + RawOutputNote, + RawOutputNotes, + TransactionOutputs, }; +pub use partial_blockchain::PartialBlockchain; +pub use proven_tx::{InputNoteCommitment, ProvenTransaction, TxAccountUpdate}; pub use transaction_id::TransactionId; pub use tx_args::{TransactionArgs, TransactionScript}; pub use tx_header::TransactionHeader; diff --git a/crates/miden-protocol/src/transaction/ordered_transactions.rs b/crates/miden-protocol/src/transaction/ordered_transactions.rs index 7f8f805e16..fbf32c8e72 100644 --- a/crates/miden-protocol/src/transaction/ordered_transactions.rs +++ b/crates/miden-protocol/src/transaction/ordered_transactions.rs @@ -2,7 +2,13 @@ use alloc::vec::Vec; use crate::account::AccountId; use crate::transaction::{TransactionHeader, TransactionId}; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{Felt, Hasher, Word, ZERO}; // ORDERED TRANSACTION HEADERS diff --git a/crates/miden-protocol/src/transaction/outputs.rs b/crates/miden-protocol/src/transaction/outputs.rs deleted file mode 100644 index 179da30367..0000000000 --- a/crates/miden-protocol/src/transaction/outputs.rs +++ /dev/null @@ -1,371 +0,0 @@ -use alloc::collections::BTreeSet; -use alloc::string::ToString; -use alloc::vec::Vec; -use core::fmt::Debug; - -use crate::account::AccountHeader; -use crate::asset::FungibleAsset; -use crate::block::BlockNumber; -use crate::errors::TransactionOutputError; -use crate::note::{ - Note, - NoteAssets, - NoteHeader, - NoteId, - NoteMetadata, - NoteRecipient, - PartialNote, - compute_note_commitment, -}; -use crate::utils::serde::{ - ByteReader, - ByteWriter, - Deserializable, - DeserializationError, - Serializable, -}; -use crate::{Felt, Hasher, MAX_OUTPUT_NOTES_PER_TX, Word}; - -// TRANSACTION OUTPUTS -// ================================================================================================ - -/// Describes the result of executing a transaction. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct TransactionOutputs { - /// Information related to the account's final state. - pub account: AccountHeader, - /// The commitment to the delta computed by the transaction kernel. - pub account_delta_commitment: Word, - /// Set of output notes created by the transaction. - pub output_notes: OutputNotes, - /// The fee of the transaction. - pub fee: FungibleAsset, - /// Defines up to which block the transaction is considered valid. - pub expiration_block_num: BlockNumber, -} - -impl TransactionOutputs { - // CONSTANTS - // -------------------------------------------------------------------------------------------- - - /// The index of the word at which the final account nonce is stored on the output stack. - pub const OUTPUT_NOTES_COMMITMENT_WORD_IDX: usize = 0; - - /// The index of the word at which the account update commitment is stored on the output stack. - pub const ACCOUNT_UPDATE_COMMITMENT_WORD_IDX: usize = 1; - - /// The index of the word at which the fee asset is stored on the output stack. - pub const FEE_ASSET_WORD_IDX: usize = 2; - - /// The index of the item at which the expiration block height is stored on the output stack. - pub const EXPIRATION_BLOCK_ELEMENT_IDX: usize = 12; -} - -impl Serializable for TransactionOutputs { - fn write_into(&self, target: &mut W) { - self.account.write_into(target); - self.account_delta_commitment.write_into(target); - self.output_notes.write_into(target); - self.fee.write_into(target); - self.expiration_block_num.write_into(target); - } -} - -impl Deserializable for TransactionOutputs { - fn read_from(source: &mut R) -> Result { - let account = AccountHeader::read_from(source)?; - let account_delta_commitment = Word::read_from(source)?; - let output_notes = OutputNotes::read_from(source)?; - let fee = FungibleAsset::read_from(source)?; - let expiration_block_num = BlockNumber::read_from(source)?; - - Ok(Self { - account, - account_delta_commitment, - output_notes, - fee, - expiration_block_num, - }) - } -} - -// OUTPUT NOTES -// ================================================================================================ - -/// Contains a list of output notes of a transaction. The list can be empty if the transaction does -/// not produce any notes. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct OutputNotes { - notes: Vec, - commitment: Word, -} - -impl OutputNotes { - // CONSTRUCTOR - // -------------------------------------------------------------------------------------------- - - /// Returns new [OutputNotes] instantiated from the provide vector of notes. - /// - /// # Errors - /// Returns an error if: - /// - The total number of notes is greater than [`MAX_OUTPUT_NOTES_PER_TX`]. - /// - The vector of notes contains duplicates. - pub fn new(notes: Vec) -> Result { - if notes.len() > MAX_OUTPUT_NOTES_PER_TX { - return Err(TransactionOutputError::TooManyOutputNotes(notes.len())); - } - - let mut seen_notes = BTreeSet::new(); - for note in notes.iter() { - if !seen_notes.insert(note.id()) { - return Err(TransactionOutputError::DuplicateOutputNote(note.id())); - } - } - - let commitment = Self::compute_commitment(notes.iter().map(OutputNote::header)); - - Ok(Self { notes, commitment }) - } - - // PUBLIC ACCESSORS - // -------------------------------------------------------------------------------------------- - - /// Returns the commitment to the output notes. - /// - /// The commitment is computed as a sequential hash of (hash, metadata) tuples for the notes - /// created in a transaction. - pub fn commitment(&self) -> Word { - self.commitment - } - /// Returns total number of output notes. - pub fn num_notes(&self) -> usize { - self.notes.len() - } - - /// Returns true if this [OutputNotes] does not contain any notes. - pub fn is_empty(&self) -> bool { - self.notes.is_empty() - } - - /// Returns a reference to the note located at the specified index. - pub fn get_note(&self, idx: usize) -> &OutputNote { - &self.notes[idx] - } - - // ITERATORS - // -------------------------------------------------------------------------------------------- - - /// Returns an iterator over notes in this [OutputNotes]. - pub fn iter(&self) -> impl Iterator { - self.notes.iter() - } - - // HELPERS - // -------------------------------------------------------------------------------------------- - - /// Computes a commitment to output notes. - /// - /// - For an empty list, [`Word::empty`] is returned. - /// - For a non-empty list of notes, this is a sequential hash of (note_id, metadata_commitment) - /// tuples for the notes created in a transaction, where `metadata_commitment` is the return - /// value of [`NoteMetadata::to_commitment`]. - pub(crate) fn compute_commitment<'header>( - notes: impl ExactSizeIterator, - ) -> Word { - if notes.len() == 0 { - return Word::empty(); - } - - let mut elements: Vec = Vec::with_capacity(notes.len() * 8); - for note_header in notes { - elements.extend_from_slice(note_header.id().as_elements()); - elements.extend_from_slice(note_header.metadata().to_commitment().as_elements()); - } - - Hasher::hash_elements(&elements) - } -} - -// SERIALIZATION -// ------------------------------------------------------------------------------------------------ - -impl Serializable for OutputNotes { - fn write_into(&self, target: &mut W) { - // assert is OK here because we enforce max number of notes in the constructor - assert!(self.notes.len() <= u16::MAX.into()); - target.write_u16(self.notes.len() as u16); - target.write_many(&self.notes); - } -} - -impl Deserializable for OutputNotes { - fn read_from(source: &mut R) -> Result { - let num_notes = source.read_u16()?; - let notes = source.read_many::(num_notes.into())?; - Self::new(notes).map_err(|err| DeserializationError::InvalidValue(err.to_string())) - } -} - -// OUTPUT NOTE -// ================================================================================================ - -const FULL: u8 = 0; -const PARTIAL: u8 = 1; -const HEADER: u8 = 2; - -/// The types of note outputs supported by the transaction kernel. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum OutputNote { - Full(Note), - Partial(PartialNote), - Header(NoteHeader), -} - -impl OutputNote { - /// The assets contained in the note. - pub fn assets(&self) -> Option<&NoteAssets> { - match self { - OutputNote::Full(note) => Some(note.assets()), - OutputNote::Partial(note) => Some(note.assets()), - OutputNote::Header(_) => None, - } - } - - /// Unique note identifier. - /// - /// This value is both an unique identifier and a commitment to the note. - pub fn id(&self) -> NoteId { - match self { - OutputNote::Full(note) => note.id(), - OutputNote::Partial(note) => note.id(), - OutputNote::Header(note) => note.id(), - } - } - - /// Returns the recipient of the processed [`Full`](OutputNote::Full) output note, [`None`] if - /// the note type is not [`Full`](OutputNote::Full). - /// - /// See [crate::note::NoteRecipient] for more details. - pub fn recipient(&self) -> Option<&NoteRecipient> { - match self { - OutputNote::Full(note) => Some(note.recipient()), - OutputNote::Partial(_) => None, - OutputNote::Header(_) => None, - } - } - - /// Returns the recipient digest of the processed [`Full`](OutputNote::Full) or - /// [`Partial`](OutputNote::Partial) output note. Returns [`None`] if the note type is - /// [`Header`](OutputNote::Header). - /// - /// See [crate::note::NoteRecipient] for more details. - pub fn recipient_digest(&self) -> Option { - match self { - OutputNote::Full(note) => Some(note.recipient().digest()), - OutputNote::Partial(note) => Some(note.recipient_digest()), - OutputNote::Header(_) => None, - } - } - - /// Note's metadata. - pub fn metadata(&self) -> &NoteMetadata { - match self { - OutputNote::Full(note) => note.metadata(), - OutputNote::Partial(note) => note.metadata(), - OutputNote::Header(note) => note.metadata(), - } - } - - /// Erase private note information. - /// - /// Specifically: - /// - Full private notes are converted into note headers. - /// - All partial notes are converted into note headers. - pub fn shrink(&self) -> Self { - match self { - OutputNote::Full(note) if note.metadata().is_private() => { - OutputNote::Header(note.header().clone()) - }, - OutputNote::Partial(note) => OutputNote::Header(note.header().clone()), - _ => self.clone(), - } - } - - /// Returns a reference to the [`NoteHeader`] of this note. - pub fn header(&self) -> &NoteHeader { - match self { - OutputNote::Full(note) => note.header(), - OutputNote::Partial(note) => note.header(), - OutputNote::Header(header) => header, - } - } - - /// Returns a commitment to the note and its metadata. - /// - /// > hash(NOTE_ID || NOTE_METADATA_COMMITMENT) - pub fn commitment(&self) -> Word { - compute_note_commitment(self.id(), self.metadata()) - } -} - -// SERIALIZATION -// ------------------------------------------------------------------------------------------------ - -impl Serializable for OutputNote { - fn write_into(&self, target: &mut W) { - match self { - OutputNote::Full(note) => { - target.write(FULL); - target.write(note); - }, - OutputNote::Partial(note) => { - target.write(PARTIAL); - target.write(note); - }, - OutputNote::Header(note) => { - target.write(HEADER); - target.write(note); - }, - } - } -} - -impl Deserializable for OutputNote { - fn read_from(source: &mut R) -> Result { - match source.read_u8()? { - FULL => Ok(OutputNote::Full(Note::read_from(source)?)), - PARTIAL => Ok(OutputNote::Partial(PartialNote::read_from(source)?)), - HEADER => Ok(OutputNote::Header(NoteHeader::read_from(source)?)), - v => Err(DeserializationError::InvalidValue(format!("invalid note type: {v}"))), - } - } -} - -// TESTS -// ================================================================================================ - -#[cfg(test)] -mod output_notes_tests { - use assert_matches::assert_matches; - - use super::OutputNotes; - use crate::Word; - use crate::errors::TransactionOutputError; - use crate::note::Note; - use crate::transaction::OutputNote; - - #[test] - fn test_duplicate_output_notes() -> anyhow::Result<()> { - let mock_note = Note::mock_noop(Word::empty()); - let mock_note_id = mock_note.id(); - let mock_note_clone = mock_note.clone(); - - let error = - OutputNotes::new(vec![OutputNote::Full(mock_note), OutputNote::Full(mock_note_clone)]) - .expect_err("input notes creation should fail"); - - assert_matches!(error, TransactionOutputError::DuplicateOutputNote(note_id) if note_id == mock_note_id); - - Ok(()) - } -} diff --git a/crates/miden-protocol/src/transaction/outputs/mod.rs b/crates/miden-protocol/src/transaction/outputs/mod.rs new file mode 100644 index 0000000000..da4e7f2cac --- /dev/null +++ b/crates/miden-protocol/src/transaction/outputs/mod.rs @@ -0,0 +1,100 @@ +use core::fmt::Debug; + +use crate::Word; +use crate::account::AccountHeader; +use crate::asset::FungibleAsset; +use crate::block::BlockNumber; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; + +mod notes; +pub use notes::{ + OutputNote, + OutputNoteCollection, + OutputNotes, + PrivateNoteHeader, + PublicOutputNote, + RawOutputNote, + RawOutputNotes, +}; + +#[cfg(test)] +mod tests; + +// TRANSACTION OUTPUTS +// ================================================================================================ + +/// Describes the result of executing a transaction. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TransactionOutputs { + /// Information related to the account's final state. + pub account: AccountHeader, + /// The commitment to the delta computed by the transaction kernel. + pub account_delta_commitment: Word, + /// Set of output notes created by the transaction. + pub output_notes: RawOutputNotes, + /// The fee of the transaction. + pub fee: FungibleAsset, + /// Defines up to which block the transaction is considered valid. + pub expiration_block_num: BlockNumber, +} + +impl TransactionOutputs { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// The element index starting from which the output notes commitment is stored on the output + /// stack. + pub const OUTPUT_NOTES_COMMITMENT_WORD_IDX: usize = 0; + + /// The element index starting from which the account update commitment word is stored on the + /// output stack. + pub const ACCOUNT_UPDATE_COMMITMENT_WORD_IDX: usize = 4; + + /// The index of the element at which the ID suffix of the faucet that issues the native asset + /// is stored on the output stack. + pub const NATIVE_ASSET_ID_SUFFIX_ELEMENT_IDX: usize = 8; + + /// The index of the element at which the ID prefix of the faucet that issues the native asset + /// is stored on the output stack. + pub const NATIVE_ASSET_ID_PREFIX_ELEMENT_IDX: usize = 9; + + /// The index of the element at which the fee amount is stored on the output stack. + pub const FEE_AMOUNT_ELEMENT_IDX: usize = 10; + + /// The index of the item at which the expiration block height is stored on the output stack. + pub const EXPIRATION_BLOCK_ELEMENT_IDX: usize = 11; +} + +impl Serializable for TransactionOutputs { + fn write_into(&self, target: &mut W) { + self.account.write_into(target); + self.account_delta_commitment.write_into(target); + self.output_notes.write_into(target); + self.fee.write_into(target); + self.expiration_block_num.write_into(target); + } +} + +impl Deserializable for TransactionOutputs { + fn read_from(source: &mut R) -> Result { + let account = AccountHeader::read_from(source)?; + let account_delta_commitment = Word::read_from(source)?; + let output_notes = RawOutputNotes::read_from(source)?; + let fee = FungibleAsset::read_from(source)?; + let expiration_block_num = BlockNumber::read_from(source)?; + + Ok(Self { + account, + account_delta_commitment, + output_notes, + fee, + expiration_block_num, + }) + } +} diff --git a/crates/miden-protocol/src/transaction/outputs/notes.rs b/crates/miden-protocol/src/transaction/outputs/notes.rs new file mode 100644 index 0000000000..fa20c3cfd6 --- /dev/null +++ b/crates/miden-protocol/src/transaction/outputs/notes.rs @@ -0,0 +1,614 @@ +use alloc::collections::BTreeSet; +use alloc::string::ToString; +use alloc::vec::Vec; +use core::fmt::Debug; + +use crate::constants::NOTE_MAX_SIZE; +use crate::errors::{OutputNoteError, TransactionOutputError}; +use crate::note::{ + Note, + NoteAssets, + NoteHeader, + NoteId, + NoteMetadata, + NoteRecipient, + PartialNote, + compute_note_commitment, +}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; +use crate::{Felt, Hasher, MAX_OUTPUT_NOTES_PER_TX, Word}; + +// OUTPUT NOTE COLLECTION +// ================================================================================================ + +/// Contains a list of output notes of a transaction. The list can be empty if the transaction does +/// not produce any notes. +/// +/// This struct is generic over the note type `N`, allowing it to be used with both +/// [`RawOutputNote`] (in [`ExecutedTransaction`](crate::transaction::ExecutedTransaction)) and +/// [`OutputNote`] (in [`ProvenTransaction`](crate::transaction::ProvenTransaction)). +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct OutputNoteCollection { + notes: Vec, + commitment: Word, +} + +impl OutputNoteCollection +where + for<'a> &'a NoteHeader: From<&'a N>, + for<'a> NoteId: From<&'a N>, +{ + // CONSTRUCTOR + // -------------------------------------------------------------------------------------------- + + /// Returns new [OutputNoteCollection] instantiated from the provided vector of notes. + /// + /// # Errors + /// Returns an error if: + /// - The total number of notes is greater than [`MAX_OUTPUT_NOTES_PER_TX`]. + /// - The vector of notes contains duplicates. + pub fn new(notes: Vec) -> Result { + if notes.len() > MAX_OUTPUT_NOTES_PER_TX { + return Err(TransactionOutputError::TooManyOutputNotes(notes.len())); + } + + let mut seen_notes = BTreeSet::new(); + for note in notes.iter() { + let note_id = NoteId::from(note); + if !seen_notes.insert(note_id) { + return Err(TransactionOutputError::DuplicateOutputNote(note_id)); + } + } + + let commitment = Self::compute_commitment(notes.iter().map(<&NoteHeader>::from)); + + Ok(Self { notes, commitment }) + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the commitment to the output notes. + /// + /// The commitment is computed as a sequential hash of (note ID, metadata) tuples for the notes + /// created in a transaction. + pub fn commitment(&self) -> Word { + self.commitment + } + + /// Returns total number of output notes. + pub fn num_notes(&self) -> usize { + self.notes.len() + } + + /// Returns true if this [OutputNoteCollection] does not contain any notes. + pub fn is_empty(&self) -> bool { + self.notes.is_empty() + } + + /// Returns a reference to the note located at the specified index. + pub fn get_note(&self, idx: usize) -> &N { + &self.notes[idx] + } + + // ITERATORS + // -------------------------------------------------------------------------------------------- + + /// Returns an iterator over notes in this [OutputNoteCollection]. + pub fn iter(&self) -> impl Iterator { + self.notes.iter() + } + + // HELPERS + // -------------------------------------------------------------------------------------------- + + /// Computes a commitment to output notes. + /// + /// - For an empty list, [`Word::empty`] is returned. + /// - For a non-empty list of notes, this is a sequential hash of (note_id, metadata_commitment) + /// tuples for the notes created in a transaction, where `metadata_commitment` is the return + /// value of [`NoteMetadata::to_commitment`]. + pub(crate) fn compute_commitment<'header>( + notes: impl ExactSizeIterator, + ) -> Word { + if notes.len() == 0 { + return Word::empty(); + } + + let mut elements: Vec = Vec::with_capacity(notes.len() * 8); + for note_header in notes { + elements.extend_from_slice(note_header.id().as_elements()); + elements.extend_from_slice(note_header.metadata().to_commitment().as_elements()); + } + + Hasher::hash_elements(&elements) + } +} + +// SERIALIZATION +// ------------------------------------------------------------------------------------------------ + +impl Serializable for OutputNoteCollection { + fn write_into(&self, target: &mut W) { + // assert is OK here because we enforce max number of notes in the constructor + assert!(self.notes.len() <= u16::MAX.into()); + target.write_u16(self.notes.len() as u16); + target.write_many(&self.notes); + } +} + +impl Deserializable for OutputNoteCollection +where + N: Deserializable, + for<'a> &'a NoteHeader: From<&'a N>, + for<'a> NoteId: From<&'a N>, +{ + fn read_from(source: &mut R) -> Result { + let num_notes = source.read_u16()?; + let notes = source.read_many_iter::(num_notes.into())?.collect::>()?; + Self::new(notes).map_err(|err| DeserializationError::InvalidValue(err.to_string())) + } +} + +// RAW OUTPUT NOTES +// ================================================================================================ + +/// Output notes produced during transaction execution (before proving). +/// +/// Contains [`RawOutputNote`] instances which represent notes as they exist immediately after +/// transaction execution. +pub type RawOutputNotes = OutputNoteCollection; + +/// The types of note outputs produced during transaction execution (before proving). +/// +/// This enum represents notes as they exist immediately after transaction execution, +/// before they are processed for inclusion in a proven transaction. It includes: +/// - Full notes with all details (public or private) +/// - Partial notes (notes created with only recipient digest, not full recipient details) +/// +/// During proving, these are converted to [`OutputNote`] via the +/// [`to_output_note`](Self::to_output_note) method, which enforces size limits on public notes and +/// converts private/partial notes to headers. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum RawOutputNote { + Full(Note), + Partial(PartialNote), +} + +impl RawOutputNote { + const FULL: u8 = 0; + const PARTIAL: u8 = 1; + + /// The assets contained in the note. + pub fn assets(&self) -> &NoteAssets { + match self { + Self::Full(note) => note.assets(), + Self::Partial(note) => note.assets(), + } + } + + /// Unique note identifier. + /// + /// This value is both an unique identifier and a commitment to the note. + pub fn id(&self) -> NoteId { + match self { + Self::Full(note) => note.id(), + Self::Partial(note) => note.id(), + } + } + + /// Returns the recipient of the processed [`Full`](RawOutputNote::Full) output note, [`None`] + /// if the note type is not [`Full`](RawOutputNote::Full). + /// + /// See [crate::note::NoteRecipient] for more details. + pub fn recipient(&self) -> Option<&NoteRecipient> { + match self { + Self::Full(note) => Some(note.recipient()), + Self::Partial(_) => None, + } + } + + /// Returns the recipient digest of the output note. + /// + /// See [crate::note::NoteRecipient] for more details. + pub fn recipient_digest(&self) -> Word { + match self { + RawOutputNote::Full(note) => note.recipient().digest(), + RawOutputNote::Partial(note) => note.recipient_digest(), + } + } + + /// Returns the note's metadata. + pub fn metadata(&self) -> &NoteMetadata { + match self { + Self::Full(note) => note.metadata(), + Self::Partial(note) => note.metadata(), + } + } + + /// Converts this output note to a proven output note. + /// + /// This method performs the following transformations: + /// - Private notes (full or partial) are converted into note headers (only public info + /// retained). + /// - Full public notes are wrapped in [`PublicOutputNote`], which enforces size limits + /// + /// # Errors + /// Returns an error if a public note exceeds the maximum allowed size ([`NOTE_MAX_SIZE`]). + pub fn to_output_note(&self) -> Result { + match self { + Self::Full(note) if note.metadata().is_private() => { + Ok(OutputNote::Private(PrivateNoteHeader::new(note.header().clone())?)) + }, + Self::Full(note) => Ok(OutputNote::Public(PublicOutputNote::new(note.clone())?)), + Self::Partial(note) => { + Ok(OutputNote::Private(PrivateNoteHeader::new(note.header().clone())?)) + }, + } + } + + /// Returns a reference to the [`NoteHeader`] of this note. + pub fn header(&self) -> &NoteHeader { + match self { + Self::Full(note) => note.header(), + Self::Partial(note) => note.header(), + } + } + + /// Returns a commitment to the note and its metadata. + /// + /// > hash(NOTE_ID || NOTE_METADATA_COMMITMENT) + pub fn commitment(&self) -> Word { + compute_note_commitment(self.id(), self.metadata()) + } +} + +impl From<&RawOutputNote> for NoteId { + fn from(note: &RawOutputNote) -> Self { + note.id() + } +} + +impl<'note> From<&'note RawOutputNote> for &'note NoteHeader { + fn from(note: &'note RawOutputNote) -> Self { + note.header() + } +} + +impl Serializable for RawOutputNote { + fn write_into(&self, target: &mut W) { + match self { + Self::Full(note) => { + target.write(Self::FULL); + target.write(note); + }, + Self::Partial(note) => { + target.write(Self::PARTIAL); + target.write(note); + }, + } + } + + fn get_size_hint(&self) -> usize { + // Serialized size of the enum tag. + let tag_size = 0u8.get_size_hint(); + + match self { + Self::Full(note) => tag_size + note.get_size_hint(), + Self::Partial(note) => tag_size + note.get_size_hint(), + } + } +} + +impl Deserializable for RawOutputNote { + fn read_from(source: &mut R) -> Result { + match source.read_u8()? { + Self::FULL => Ok(Self::Full(Note::read_from(source)?)), + Self::PARTIAL => Ok(Self::Partial(PartialNote::read_from(source)?)), + v => Err(DeserializationError::InvalidValue(format!("invalid output note type: {v}"))), + } + } +} + +// OUTPUT NOTES +// ================================================================================================ + +/// Output notes in a proven transaction. +/// +/// Contains [`OutputNote`] instances which have been processed for inclusion in proven +/// transactions, with size limits enforced on public notes. +pub type OutputNotes = OutputNoteCollection; + +/// Output note types that can appear in a proven transaction. +/// +/// This enum represents the final form of output notes after proving. Unlike [`RawOutputNote`], +/// this enum: +/// - Does not include partial notes (they are converted to headers). +/// - Wraps public notes in [`PublicOutputNote`] which enforces size limits. +/// - Contains only the minimal information needed for verification. +#[allow(clippy::large_enum_variant)] +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum OutputNote { + /// A public note with full details, size-validated. + Public(PublicOutputNote), + /// A note private header (for private notes). + Private(PrivateNoteHeader), +} + +impl OutputNote { + const PUBLIC: u8 = 0; + const PRIVATE: u8 = 1; + + /// Unique note identifier. + /// + /// This value is both an unique identifier and a commitment to the note. + pub fn id(&self) -> NoteId { + match self { + Self::Public(note) => note.id(), + Self::Private(header) => header.id(), + } + } + + /// Note's metadata. + pub fn metadata(&self) -> &NoteMetadata { + match self { + Self::Public(note) => note.metadata(), + Self::Private(header) => header.metadata(), + } + } + + /// The assets contained in the note, if available. + /// + /// Returns `Some` for public notes, `None` for private notes. + pub fn assets(&self) -> Option<&NoteAssets> { + match self { + Self::Public(note) => Some(note.assets()), + Self::Private(_) => None, + } + } + + /// Returns a commitment to the note and its metadata. + /// + /// > hash(NOTE_ID || NOTE_METADATA_COMMITMENT) + pub fn to_commitment(&self) -> Word { + compute_note_commitment(self.id(), self.metadata()) + } + + /// Returns the recipient of the public note, if this is a public note. + pub fn recipient(&self) -> Option<&NoteRecipient> { + match self { + Self::Public(note) => Some(note.recipient()), + Self::Private(_) => None, + } + } +} + +// CONVERSIONS +// ------------------------------------------------------------------------------------------------ + +impl<'note> From<&'note OutputNote> for &'note NoteHeader { + fn from(value: &'note OutputNote) -> Self { + match value { + OutputNote::Public(note) => note.header(), + OutputNote::Private(header) => &header.0, + } + } +} + +impl From<&OutputNote> for NoteId { + fn from(value: &OutputNote) -> Self { + value.id() + } +} + +// SERIALIZATION +// ------------------------------------------------------------------------------------------------ + +impl Serializable for OutputNote { + fn write_into(&self, target: &mut W) { + match self { + Self::Public(note) => { + target.write(Self::PUBLIC); + target.write(note); + }, + Self::Private(header) => { + target.write(Self::PRIVATE); + target.write(header); + }, + } + } + + fn get_size_hint(&self) -> usize { + let tag_size = 0u8.get_size_hint(); + match self { + Self::Public(note) => tag_size + note.get_size_hint(), + Self::Private(header) => tag_size + header.get_size_hint(), + } + } +} + +impl Deserializable for OutputNote { + fn read_from(source: &mut R) -> Result { + match source.read_u8()? { + Self::PUBLIC => Ok(Self::Public(PublicOutputNote::read_from(source)?)), + Self::PRIVATE => Ok(Self::Private(PrivateNoteHeader::read_from(source)?)), + v => Err(DeserializationError::InvalidValue(format!( + "invalid proven output note type: {v}" + ))), + } + } +} + +// PUBLIC OUTPUT NOTE +// ================================================================================================ + +/// A public output note with enforced size limits. +/// +/// This struct wraps a [`Note`] and guarantees that: +/// - The note is public (not private). +/// - The serialized size does not exceed [`NOTE_MAX_SIZE`]. +/// +/// This type is used in [`OutputNote::Public`] to ensure that all public notes in proven +/// transactions meet the protocol's size requirements. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PublicOutputNote(Note); + +impl PublicOutputNote { + /// Creates a new [`PublicOutputNote`] from the given note. + /// + /// # Errors + /// Returns an error if: + /// - The note is private. + /// - The serialized size exceeds [`NOTE_MAX_SIZE`]. + pub fn new(mut note: Note) -> Result { + // Ensure the note is public + if note.metadata().is_private() { + return Err(OutputNoteError::NoteIsPrivate(note.id())); + } + + // Strip decorators from the note script + note.minify_script(); + + // Check the size limit after stripping decorators + let note_size = note.get_size_hint(); + if note_size > NOTE_MAX_SIZE as usize { + return Err(OutputNoteError::NoteSizeLimitExceeded { note_id: note.id(), note_size }); + } + + Ok(Self(note)) + } + + /// Returns the unique identifier of this note. + pub fn id(&self) -> NoteId { + self.0.id() + } + + /// Returns the note's metadata. + pub fn metadata(&self) -> &NoteMetadata { + self.0.metadata() + } + + /// Returns the note's assets. + pub fn assets(&self) -> &NoteAssets { + self.0.assets() + } + + /// Returns the note's recipient. + pub fn recipient(&self) -> &NoteRecipient { + self.0.recipient() + } + + /// Returns the note's header. + pub fn header(&self) -> &NoteHeader { + self.0.header() + } + + /// Returns a reference to the underlying note. + pub fn as_note(&self) -> &Note { + &self.0 + } + + /// Consumes this wrapper and returns the underlying note. + pub fn into_note(self) -> Note { + self.0 + } +} + +impl Serializable for PublicOutputNote { + fn write_into(&self, target: &mut W) { + self.0.write_into(target); + } + + fn get_size_hint(&self) -> usize { + self.0.get_size_hint() + } +} + +impl Deserializable for PublicOutputNote { + fn read_from(source: &mut R) -> Result { + let note = Note::read_from(source)?; + Self::new(note).map_err(|err| DeserializationError::InvalidValue(err.to_string())) + } +} + +// PRIVATE NOTE HEADER +// ================================================================================================ + +/// A [NoteHeader] of a private note. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PrivateNoteHeader(NoteHeader); + +impl PrivateNoteHeader { + /// Creates a new [`PrivateNoteHeader`] from the given note header. + /// + /// # Errors + /// Returns an error if: + /// - The provided header is for a public note. + pub fn new(header: NoteHeader) -> Result { + if !header.metadata().is_private() { + return Err(OutputNoteError::NoteIsPublic(header.id())); + } + + Ok(Self(header)) + } + + /// Returns the note's identifier. + /// + /// The [NoteId] value is both an unique identifier and a commitment to the note. + pub fn id(&self) -> NoteId { + self.0.id() + } + + /// Returns the note's metadata. + pub fn metadata(&self) -> &NoteMetadata { + self.0.metadata() + } + + /// Consumes self and returns the note header's metadata. + pub fn into_metadata(self) -> NoteMetadata { + self.0.into_metadata() + } + + /// Returns a commitment to the note and its metadata. + /// + /// > hash(NOTE_ID || NOTE_METADATA_COMMITMENT) + /// + /// This value is used primarily for authenticating notes consumed when they are consumed + /// in a transaction. + pub fn commitment(&self) -> Word { + self.0.commitment() + } + + /// Returns a reference to the underlying note header. + pub fn as_header(&self) -> &NoteHeader { + &self.0 + } + + /// Consumes this wrapper and returns the underlying note header. + pub fn into_header(self) -> NoteHeader { + self.0 + } +} + +impl Serializable for PrivateNoteHeader { + fn write_into(&self, target: &mut W) { + self.0.write_into(target); + } + + fn get_size_hint(&self) -> usize { + self.0.get_size_hint() + } +} + +impl Deserializable for PrivateNoteHeader { + fn read_from(source: &mut R) -> Result { + let header = NoteHeader::read_from(source)?; + Self::new(header).map_err(|err| DeserializationError::InvalidValue(err.to_string())) + } +} diff --git a/crates/miden-protocol/src/transaction/outputs/tests.rs b/crates/miden-protocol/src/transaction/outputs/tests.rs new file mode 100644 index 0000000000..24340550db --- /dev/null +++ b/crates/miden-protocol/src/transaction/outputs/tests.rs @@ -0,0 +1,138 @@ +use assert_matches::assert_matches; + +use super::{PublicOutputNote, RawOutputNote, RawOutputNotes}; +use crate::account::AccountId; +use crate::assembly::Assembler; +use crate::asset::FungibleAsset; +use crate::constants::NOTE_MAX_SIZE; +use crate::errors::{OutputNoteError, TransactionOutputError}; +use crate::note::{ + Note, + NoteAssets, + NoteMetadata, + NoteRecipient, + NoteScript, + NoteStorage, + NoteTag, + NoteType, +}; +use crate::testing::account_id::{ + ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, + ACCOUNT_ID_SENDER, +}; +use crate::utils::serde::Serializable; +use crate::{Felt, Word}; + +#[test] +fn test_duplicate_output_notes() -> anyhow::Result<()> { + let mock_note = Note::mock_noop(Word::empty()); + let mock_note_id = mock_note.id(); + let mock_note_clone = mock_note.clone(); + + let error = RawOutputNotes::new(vec![ + RawOutputNote::Full(mock_note), + RawOutputNote::Full(mock_note_clone), + ]) + .expect_err("input notes creation should fail"); + + assert_matches!(error, TransactionOutputError::DuplicateOutputNote(note_id) if note_id == mock_note_id); + + Ok(()) +} + +#[test] +fn output_note_size_hint_matches_serialized_length() -> anyhow::Result<()> { + let sender_id = ACCOUNT_ID_SENDER.try_into().unwrap(); + + // Build a note with at least two assets. + let faucet_id_1 = AccountId::try_from(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET).unwrap(); + let faucet_id_2 = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET).unwrap(); + + let asset_1 = FungibleAsset::new(faucet_id_1, 100)?.into(); + let asset_2 = FungibleAsset::new(faucet_id_2, 200)?.into(); + + let assets = NoteAssets::new(vec![asset_1, asset_2])?; + + // Build metadata similarly to how mock notes are constructed. + let metadata = NoteMetadata::new(sender_id, NoteType::Private) + .with_tag(NoteTag::with_account_target(sender_id)); + + // Build storage with at least two values. + let storage = NoteStorage::new(vec![Felt::new(1), Felt::new(2)])?; + + let serial_num = Word::empty(); + let script = NoteScript::mock(); + let recipient = NoteRecipient::new(serial_num, script, storage); + + let note = Note::new(assets, metadata, recipient); + let output_note = RawOutputNote::Full(note); + + let bytes = output_note.to_bytes(); + + assert_eq!(bytes.len(), output_note.get_size_hint()); + + Ok(()) +} + +#[test] +fn oversized_public_note_triggers_size_limit_error() -> anyhow::Result<()> { + // Construct a public note whose serialized size exceeds NOTE_MAX_SIZE by creating + // a very large note script so that the script's serialized MAST alone is larger + // than the configured limit. + + let sender_id = ACCOUNT_ID_SENDER.try_into().unwrap(); + + // Build a large MASM program with many `nop` instructions. + let mut src = alloc::string::String::from("begin\n"); + // The exact threshold is not critical as long as we clearly exceed NOTE_MAX_SIZE. + // After strip_decorators(), the size is reduced, so we need more nops. + for _ in 0..50000 { + src.push_str(" nop\n"); + } + src.push_str("end\n"); + + let assembler = Assembler::default(); + let program = assembler.assemble_program(&src).unwrap(); + let script = NoteScript::new(program); + + let serial_num = Word::empty(); + let storage = NoteStorage::new(alloc::vec::Vec::new())?; + + // Create a public note (NoteType::Public is required for PublicOutputNote) + let faucet_id = AccountId::try_from(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET).unwrap(); + let asset = FungibleAsset::new(faucet_id, 100)?.into(); + let assets = NoteAssets::new(vec![asset])?; + + let metadata = NoteMetadata::new(sender_id, NoteType::Public) + .with_tag(NoteTag::with_account_target(sender_id)); + + let recipient = NoteRecipient::new(serial_num, script, storage); + let oversized_note = Note::new(assets, metadata, recipient); + + // Sanity-check that our constructed note is indeed larger than the configured + // maximum. + let computed_note_size = oversized_note.get_size_hint(); + assert!(computed_note_size > NOTE_MAX_SIZE as usize); + + // Creating a PublicOutputNote should fail with size limit error + let result = PublicOutputNote::new(oversized_note.clone()); + + assert_matches!( + result, + Err(OutputNoteError::NoteSizeLimitExceeded { note_id: _, note_size }) + if note_size > NOTE_MAX_SIZE as usize + ); + + // to_output_note() should also fail + let output_note = RawOutputNote::Full(oversized_note); + let result = output_note.to_output_note(); + + assert_matches!( + result, + Err(OutputNoteError::NoteSizeLimitExceeded { note_id: _, note_size }) + if note_size > NOTE_MAX_SIZE as usize + ); + + Ok(()) +} diff --git a/crates/miden-protocol/src/transaction/partial_blockchain.rs b/crates/miden-protocol/src/transaction/partial_blockchain.rs index a7b2cbf491..75a6e98fc0 100644 --- a/crates/miden-protocol/src/transaction/partial_blockchain.rs +++ b/crates/miden-protocol/src/transaction/partial_blockchain.rs @@ -2,6 +2,8 @@ use alloc::collections::BTreeMap; use alloc::vec::Vec; use core::ops::RangeTo; +use miden_crypto::merkle::mmr::MmrProof; + use crate::block::{BlockHeader, BlockNumber}; use crate::crypto::merkle::InnerNodeInfo; use crate::crypto::merkle::mmr::{MmrPeaks, PartialMmr}; @@ -67,11 +69,13 @@ impl PartialBlockchain { for (block_num, block) in partial_chain.blocks.iter() { // SAFETY: new_unchecked returns an error if a block is not tracked in the MMR, so // retrieving a proof here should succeed. - let proof = partial_chain + let path = partial_chain .mmr .open(block_num.as_usize()) .expect("block should not exceed chain length") .expect("block should be tracked in the partial MMR"); + // This should go away again once https://github.com/0xMiden/crypto/pull/787 is propagated here. + let proof = MmrProof::new(path, block.commitment()); partial_chain.mmr.peaks().verify(block.commitment(), proof).map_err(|source| { PartialBlockchainError::BlockHeaderCommitmentMismatch { @@ -281,7 +285,6 @@ impl Default for PartialBlockchain { #[cfg(test)] mod tests { use assert_matches::assert_matches; - use miden_core::utils::{Deserializable, Serializable}; use rand::SeedableRng; use rand_chacha::ChaCha20Rng; @@ -293,6 +296,7 @@ mod tests { use crate::crypto::merkle::mmr::{Mmr, PartialMmr}; use crate::errors::PartialBlockchainError; use crate::testing::account_id::ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn test_partial_blockchain_add() { @@ -312,7 +316,7 @@ mod tests { partial_blockchain.add_block(&block_header, true); assert_eq!( - mmr.open(block_num as usize).unwrap(), + *mmr.open(block_num as usize).unwrap().path(), partial_blockchain.mmr.open(block_num as usize).unwrap().unwrap() ); @@ -323,7 +327,7 @@ mod tests { partial_blockchain.add_block(&block_header, true); assert_eq!( - mmr.open(block_num as usize).unwrap(), + *mmr.open(block_num as usize).unwrap().path(), partial_blockchain.mmr.open(block_num as usize).unwrap().unwrap() ); @@ -334,7 +338,7 @@ mod tests { partial_blockchain.add_block(&block_header, true); assert_eq!( - mmr.open(block_num as usize).unwrap(), + *mmr.open(block_num as usize).unwrap().path(), partial_blockchain.mmr.open(block_num as usize).unwrap().unwrap() ); } @@ -353,7 +357,7 @@ mod tests { let mut partial_mmr = PartialMmr::from_peaks(mmr.peaks()); for i in 0..3 { partial_mmr - .track(i, mmr.get(i).unwrap(), &mmr.open(i).unwrap().merkle_path) + .track(i, mmr.get(i).unwrap(), mmr.open(i).unwrap().merkle_path()) .unwrap(); } @@ -403,7 +407,7 @@ mod tests { let mut partial_mmr = PartialMmr::from_peaks(mmr.peaks()); partial_mmr - .track(1, block_header1.commitment(), &mmr.open(1).unwrap().merkle_path) + .track(1, block_header1.commitment(), mmr.open(1).unwrap().merkle_path()) .unwrap(); let error = @@ -470,7 +474,7 @@ mod tests { for i in 0..total_blocks { let i: usize = i as usize; partial_mmr - .track(i, full_mmr.get(i).unwrap(), &full_mmr.open(i).unwrap().merkle_path) + .track(i, full_mmr.get(i).unwrap(), full_mmr.open(i).unwrap().merkle_path()) .unwrap(); } let mut chain = PartialBlockchain::new(partial_mmr, headers).unwrap(); diff --git a/crates/miden-protocol/src/transaction/proven_tx.rs b/crates/miden-protocol/src/transaction/proven_tx.rs index ab679b9ffc..5775902cc7 100644 --- a/crates/miden-protocol/src/transaction/proven_tx.rs +++ b/crates/miden-protocol/src/transaction/proven_tx.rs @@ -71,6 +71,69 @@ pub struct ProvenTransaction { } impl ProvenTransaction { + // CONSTRUCTOR + // -------------------------------------------------------------------------------------------- + + /// Creates a new [ProvenTransaction] from the specified components. + /// + /// # Errors + /// + /// Returns an error if: + /// - The total number of input notes is greater than + /// [`MAX_INPUT_NOTES_PER_TX`](crate::constants::MAX_INPUT_NOTES_PER_TX). + /// - The vector of input notes contains duplicates. + /// - The total number of output notes is greater than + /// [`MAX_OUTPUT_NOTES_PER_TX`](crate::constants::MAX_OUTPUT_NOTES_PER_TX). + /// - The vector of output notes contains duplicates. + /// - The transaction is empty, which is the case if the account state is unchanged or the + /// number of input notes is zero. + /// - The commitment computed on the actual account delta contained in [`TxAccountUpdate`] does + /// not match its declared account delta commitment. + pub fn new( + account_update: TxAccountUpdate, + input_notes: impl IntoIterator>, + output_notes: impl IntoIterator>, + ref_block_num: BlockNumber, + ref_block_commitment: Word, + fee: FungibleAsset, + expiration_block_num: BlockNumber, + proof: ExecutionProof, + ) -> Result { + let input_notes: Vec = + input_notes.into_iter().map(Into::into).collect(); + let output_notes: Vec = output_notes.into_iter().map(Into::into).collect(); + + let input_notes = + InputNotes::new(input_notes).map_err(ProvenTransactionError::InputNotesError)?; + let output_notes = + OutputNotes::new(output_notes).map_err(ProvenTransactionError::OutputNotesError)?; + + let id = TransactionId::new( + account_update.initial_state_commitment(), + account_update.final_state_commitment(), + input_notes.commitment(), + output_notes.commitment(), + fee, + ); + + let proven_transaction = Self { + id, + account_update, + input_notes, + output_notes, + ref_block_num, + ref_block_commitment, + fee, + expiration_block_num, + proof, + }; + + proven_transaction.validate() + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + /// Returns unique identifier of this transaction. pub fn id(&self) -> TransactionId { self.id @@ -218,6 +281,7 @@ impl Deserializable for ProvenTransaction { account_update.final_state_commitment(), input_notes.commitment(), output_notes.commitment(), + fee, ); let proven_transaction = Self { @@ -238,171 +302,6 @@ impl Deserializable for ProvenTransaction { } } -// PROVEN TRANSACTION BUILDER -// ================================================================================================ - -/// Builder for a proven transaction. -#[derive(Clone, Debug)] -pub struct ProvenTransactionBuilder { - /// ID of the account that the transaction was executed against. - account_id: AccountId, - - /// The commitment of the account before the transaction was executed. - initial_account_commitment: Word, - - /// The commitment of the account after the transaction was executed. - final_account_commitment: Word, - - /// The commitment of the account delta produced by the transaction. - account_delta_commitment: Word, - - /// State changes to the account due to the transaction. - account_update_details: AccountUpdateDetails, - - /// List of [InputNoteCommitment]s of all consumed notes by the transaction. - input_notes: Vec, - - /// List of [OutputNote]s of all notes created by the transaction. - output_notes: Vec, - - /// [`BlockNumber`] of the transaction's reference block. - ref_block_num: BlockNumber, - - /// Block digest of the transaction's reference block. - ref_block_commitment: Word, - - /// The fee of the transaction. - fee: FungibleAsset, - - /// The block number by which the transaction will expire, as defined by the executed scripts. - expiration_block_num: BlockNumber, - - /// A STARK proof that attests to the correct execution of the transaction. - proof: ExecutionProof, -} - -impl ProvenTransactionBuilder { - // CONSTRUCTOR - // -------------------------------------------------------------------------------------------- - - /// Returns a [ProvenTransactionBuilder] used to build a [ProvenTransaction]. - #[allow(clippy::too_many_arguments)] - pub fn new( - account_id: AccountId, - initial_account_commitment: Word, - final_account_commitment: Word, - account_delta_commitment: Word, - ref_block_num: BlockNumber, - ref_block_commitment: Word, - fee: FungibleAsset, - expiration_block_num: BlockNumber, - proof: ExecutionProof, - ) -> Self { - Self { - account_id, - initial_account_commitment, - final_account_commitment, - account_delta_commitment, - account_update_details: AccountUpdateDetails::Private, - input_notes: Vec::new(), - output_notes: Vec::new(), - ref_block_num, - ref_block_commitment, - fee, - expiration_block_num, - proof, - } - } - - // PUBLIC ACCESSORS - // -------------------------------------------------------------------------------------------- - - /// Sets the account's update details. - pub fn account_update_details(mut self, details: AccountUpdateDetails) -> Self { - self.account_update_details = details; - self - } - - /// Add notes consumed by the transaction. - pub fn add_input_notes(mut self, notes: I) -> Self - where - I: IntoIterator, - T: Into, - { - self.input_notes.extend(notes.into_iter().map(|note| note.into())); - self - } - - /// Add notes produced by the transaction. - pub fn add_output_notes(mut self, notes: T) -> Self - where - T: IntoIterator, - { - self.output_notes.extend(notes); - self - } - - /// Builds the [`ProvenTransaction`]. - /// - /// # Errors - /// - /// Returns an error if: - /// - The total number of input notes is greater than - /// [`MAX_INPUT_NOTES_PER_TX`](crate::constants::MAX_INPUT_NOTES_PER_TX). - /// - The vector of input notes contains duplicates. - /// - The total number of output notes is greater than - /// [`MAX_OUTPUT_NOTES_PER_TX`](crate::constants::MAX_OUTPUT_NOTES_PER_TX). - /// - The vector of output notes contains duplicates. - /// - The transaction is empty, which is the case if the account state is unchanged or the - /// number of input notes is zero. - /// - The commitment computed on the actual account delta contained in [`TxAccountUpdate`] does - /// not match its declared account delta commitment. - /// - The size of the serialized account update exceeds [`ACCOUNT_UPDATE_MAX_SIZE`]. - /// - The transaction was executed against a _new_ account with public state and its account ID - /// does not match the ID in the account update. - /// - The transaction was executed against a _new_ account with public state and its commitment - /// does not match the final state commitment of the account update. - /// - The transaction creates a _new_ account with public state and the update is of type - /// [`AccountUpdateDetails::Delta`] but the account delta is not a full state delta. - /// - The transaction was executed against a private account and the account update is _not_ of - /// type [`AccountUpdateDetails::Private`]. - /// - The transaction was executed against an account with public state and the update is of - /// type [`AccountUpdateDetails::Private`]. - pub fn build(self) -> Result { - let input_notes = - InputNotes::new(self.input_notes).map_err(ProvenTransactionError::InputNotesError)?; - let output_notes = OutputNotes::new(self.output_notes) - .map_err(ProvenTransactionError::OutputNotesError)?; - let id = TransactionId::new( - self.initial_account_commitment, - self.final_account_commitment, - input_notes.commitment(), - output_notes.commitment(), - ); - let account_update = TxAccountUpdate::new( - self.account_id, - self.initial_account_commitment, - self.final_account_commitment, - self.account_delta_commitment, - self.account_update_details, - )?; - - let proven_transaction = ProvenTransaction { - id, - account_update, - input_notes, - output_notes, - ref_block_num: self.ref_block_num, - ref_block_commitment: self.ref_block_commitment, - fee: self.fee, - expiration_block_num: self.expiration_block_num, - proof: self.proof, - }; - - proven_transaction.validate() - } -} - // TRANSACTION ACCOUNT UPDATE // ================================================================================================ @@ -508,10 +407,10 @@ impl TxAccountUpdate { }); } - if account.commitment() != account_update.final_state_commitment { + if account.to_commitment() != account_update.final_state_commitment { return Err(ProvenTransactionError::AccountFinalCommitmentMismatch { tx_final_commitment: account_update.final_state_commitment, - details_commitment: account.commitment(), + details_commitment: account.to_commitment(), }); } } @@ -599,6 +498,16 @@ pub struct InputNoteCommitment { } impl InputNoteCommitment { + /// Returns a new [InputNoteCommitment] instantiated from the provided nullifier and optional + /// note header. + /// + /// Note: this method does not validate that the provided nullifier and header are consistent + /// with each other (i.e., it does not check that the nullifier was derived from the note + /// referenced by the header). + pub fn from_parts_unchecked(nullifier: Nullifier, header: Option) -> Self { + Self { nullifier, header } + } + /// Returns the nullifier of the input note committed to by this commitment. pub fn nullifier(&self) -> Nullifier { self.nullifier @@ -684,11 +593,11 @@ impl Deserializable for InputNoteCommitment { #[cfg(test)] mod tests { use alloc::collections::BTreeMap; + use alloc::vec::Vec; use anyhow::Context; - use miden_core::utils::Deserializable; + use miden_crypto::rand::test_utils::rand_value; use miden_verifier::ExecutionProof; - use winter_rand_utils::rand_value; use super::ProvenTransaction; use crate::account::delta::AccountUpdateDetails; @@ -702,6 +611,7 @@ mod tests { AccountType, AccountVaultDelta, StorageMapDelta, + StorageMapKey, StorageSlotName, }; use crate::asset::FungibleAsset; @@ -713,8 +623,8 @@ mod tests { }; use crate::testing::add_component::AddComponent; use crate::testing::noop_auth_component::NoopAuthComponent; - use crate::transaction::{ProvenTransactionBuilder, TxAccountUpdate}; - use crate::utils::Serializable; + use crate::transaction::{InputNoteCommitment, OutputNote, TxAccountUpdate}; + use crate::utils::serde::{Deserializable, Serializable}; use crate::{ACCOUNT_UPDATE_MAX_SIZE, EMPTY_WORD, LexicographicWord, ONE, Word}; fn check_if_sync() {} @@ -749,8 +659,8 @@ mod tests { TxAccountUpdate::new( account.id(), - account.commitment(), - account.commitment(), + account.to_commitment(), + account.to_commitment(), Word::empty(), details, )?; @@ -767,7 +677,10 @@ mod tests { // 32 bytes in size. let required_entries = ACCOUNT_UPDATE_MAX_SIZE / (2 * 32); for _ in 0..required_entries { - map.insert(LexicographicWord::new(rand_value::()), rand_value::()); + map.insert( + LexicographicWord::new(StorageMapKey::from_raw(rand_value())), + rand_value::(), + ); } let storage_delta = StorageMapDelta::new(map); @@ -812,18 +725,25 @@ mod tests { let expiration_block_num = BlockNumber::from(2); let proof = ExecutionProof::new_dummy(); - let tx = ProvenTransactionBuilder::new( + let account_update = TxAccountUpdate::new( account_id, initial_account_commitment, final_account_commitment, account_delta_commitment, + AccountUpdateDetails::Private, + ) + .context("failed to build account update")?; + + let tx = ProvenTransaction::new( + account_update, + Vec::::new(), + Vec::::new(), ref_block_num, ref_block_commitment, FungibleAsset::mock(42).unwrap_fungible(), expiration_block_num, proof, ) - .build() .context("failed to build proven transaction")?; let deserialized = ProvenTransaction::read_from_bytes(&tx.to_bytes()).unwrap(); diff --git a/crates/miden-protocol/src/transaction/transaction_id.rs b/crates/miden-protocol/src/transaction/transaction_id.rs index e75f7662f7..ddf14c7532 100644 --- a/crates/miden-protocol/src/transaction/transaction_id.rs +++ b/crates/miden-protocol/src/transaction/transaction_id.rs @@ -4,6 +4,7 @@ use core::fmt::{Debug, Display}; use miden_protocol_macros::WordWrapper; use super::{Felt, Hasher, ProvenTransaction, WORD_SIZE, Word, ZERO}; +use crate::asset::{Asset, FungibleAsset}; use crate::utils::serde::{ ByteReader, ByteWriter, @@ -19,8 +20,13 @@ use crate::utils::serde::{ /// /// Transaction ID is computed as: /// -/// hash(init_account_commitment, final_account_commitment, input_notes_commitment, -/// output_notes_commitment) +/// hash( +/// INIT_ACCOUNT_COMMITMENT, +/// FINAL_ACCOUNT_COMMITMENT, +/// INPUT_NOTES_COMMITMENT, +/// OUTPUT_NOTES_COMMITMENT, +/// FEE_ASSET, +/// ) /// /// This achieves the following properties: /// - Transactions are identical if and only if they have the same ID. @@ -35,12 +41,14 @@ impl TransactionId { final_account_commitment: Word, input_notes_commitment: Word, output_notes_commitment: Word, + fee_asset: FungibleAsset, ) -> Self { - let mut elements = [ZERO; 4 * WORD_SIZE]; + let mut elements = [ZERO; 6 * WORD_SIZE]; elements[..4].copy_from_slice(init_account_commitment.as_elements()); elements[4..8].copy_from_slice(final_account_commitment.as_elements()); elements[8..12].copy_from_slice(input_notes_commitment.as_elements()); - elements[12..].copy_from_slice(output_notes_commitment.as_elements()); + elements[12..16].copy_from_slice(output_notes_commitment.as_elements()); + elements[16..].copy_from_slice(&Asset::from(fee_asset).as_elements()); Self(Hasher::hash_elements(&elements)) } } @@ -67,6 +75,7 @@ impl From<&ProvenTransaction> for TransactionId { tx.account_update().final_state_commitment(), tx.input_notes().commitment(), tx.output_notes().commitment(), + tx.fee(), ) } } diff --git a/crates/miden-protocol/src/transaction/tx_args.rs b/crates/miden-protocol/src/transaction/tx_args.rs index 598ce1b94b..1e6657bfaf 100644 --- a/crates/miden-protocol/src/transaction/tx_args.rs +++ b/crates/miden-protocol/src/transaction/tx_args.rs @@ -2,8 +2,8 @@ use alloc::collections::BTreeMap; use alloc::sync::Arc; use alloc::vec::Vec; +use miden_core::mast::MastNodeExt; use miden_crypto::merkle::InnerNodeInfo; -use miden_processor::MastNodeExt; use super::{Felt, Hasher, Word}; use crate::account::auth::{PublicKeyCommitment, Signature}; @@ -30,7 +30,7 @@ use crate::{EMPTY_WORD, MastForest, MastNodeId}; /// be used as a default value. If the [AdviceInputs] are propagated with some user defined map /// entries, this script arguments word could be used as a key to access the corresponding value. /// - Note arguments: data put onto the stack right before a note script is executed. These are -/// different from note inputs, as the user executing the transaction can specify arbitrary note +/// different from note storage, as the user executing the transaction can specify arbitrary note /// args. /// - Advice inputs: provides data needed by the runtime, like the details of public output notes. /// - Foreign account inputs: provides foreign account data that will be used during the foreign @@ -155,14 +155,14 @@ impl TransactionArgs { /// Populates the advice inputs with the expected recipient data for creating output notes. /// /// The advice inputs' map is extended with the following entries: - /// - RECIPIENT: [SERIAL_SCRIPT_HASH, INPUTS_COMMITMENT] + /// - RECIPIENT: [SERIAL_SCRIPT_HASH, STORAGE_COMMITMENT] /// - SERIAL_SCRIPT_HASH: [SERIAL_HASH, SCRIPT_ROOT] /// - SERIAL_HASH: [SERIAL_NUM, EMPTY_WORD] - /// - inputs_commitment |-> inputs. + /// - storage_commitment |-> storage_items. /// - script_root |-> script. pub fn add_output_note_recipient>(&mut self, note_recipient: T) { let note_recipient = note_recipient.as_ref(); - let inputs = note_recipient.inputs(); + let storage = note_recipient.storage(); let script = note_recipient.script(); let script_encoded: Vec = script.into(); @@ -173,12 +173,8 @@ impl TransactionArgs { let new_elements = vec![ (sn_hash, concat_words(note_recipient.serial_num(), Word::empty())), (sn_script_hash, concat_words(sn_hash, script.root())), - (note_recipient.digest(), concat_words(sn_script_hash, inputs.commitment())), - (inputs.commitment(), inputs.to_elements()), - ( - Hasher::hash_elements(inputs.commitment().as_elements()), - vec![Felt::from(inputs.num_values())], - ), + (note_recipient.digest(), concat_words(sn_script_hash, storage.commitment())), + (storage.commitment(), storage.to_elements()), (script.root(), script_encoded), ]; @@ -207,7 +203,7 @@ impl TransactionArgs { /// The advice inputs' map is extended with the following keys: /// /// - recipient |-> recipient details (inputs_hash, script_root, serial_num). - /// - inputs_commitment |-> inputs. + /// - storage_commitment |-> storage_items. /// - script_root |-> script. pub fn extend_output_note_recipients(&mut self, notes: L) where @@ -324,6 +320,24 @@ impl TransactionScript { pub fn root(&self) -> Word { self.mast[self.entrypoint].digest() } + + /// Returns a new [TransactionScript] with the provided advice map entries merged into the + /// underlying [MastForest]. + /// + /// This allows adding advice map entries to an already-compiled transaction script, + /// which is useful when the entries are determined after script compilation. + pub fn with_advice_map(self, advice_map: AdviceMap) -> Self { + if advice_map.is_empty() { + return self; + } + + let mut mast = (*self.mast).clone(); + mast.advice_map_mut().extend(advice_map); + Self { + mast: Arc::new(mast), + entrypoint: self.entrypoint, + } + } } // SERIALIZATION @@ -347,10 +361,10 @@ impl Deserializable for TransactionScript { #[cfg(test)] mod tests { - use miden_core::AdviceMap; - use miden_core::utils::{Deserializable, Serializable}; + use miden_core::advice::AdviceMap; use crate::transaction::TransactionArgs; + use crate::utils::serde::{Deserializable, Serializable}; #[test] fn test_tx_args_serialization() { @@ -360,4 +374,35 @@ mod tests { assert_eq!(tx_args, decoded); } + + #[test] + fn test_transaction_script_with_advice_map() { + use miden_core::{Felt, Word}; + + use super::TransactionScript; + use crate::assembly::Assembler; + + let assembler = Assembler::default(); + let program = assembler.assemble_program("begin nop end").unwrap(); + let script = TransactionScript::new(program); + + assert!(script.mast().advice_map().is_empty()); + + // Empty advice map should be a no-op + let original_root = script.root(); + let script = script.with_advice_map(AdviceMap::default()); + assert_eq!(original_root, script.root()); + + // Non-empty advice map should add entries + let key = Word::from([1u32, 2, 3, 4]); + let value = vec![Felt::new(42), Felt::new(43)]; + let mut advice_map = AdviceMap::default(); + advice_map.insert(key, value.clone()); + + let script = script.with_advice_map(advice_map); + + let mast = script.mast(); + let stored = mast.advice_map().get(&key).expect("entry should be present"); + assert_eq!(stored.as_ref(), value.as_slice()); + } } diff --git a/crates/miden-protocol/src/transaction/tx_header.rs b/crates/miden-protocol/src/transaction/tx_header.rs index 210d722e4b..23a2721e88 100644 --- a/crates/miden-protocol/src/transaction/tx_header.rs +++ b/crates/miden-protocol/src/transaction/tx_header.rs @@ -1,7 +1,5 @@ use alloc::vec::Vec; -use miden_processor::DeserializationError; - use crate::Word; use crate::asset::FungibleAsset; use crate::note::NoteHeader; @@ -10,20 +8,26 @@ use crate::transaction::{ ExecutedTransaction, InputNoteCommitment, InputNotes, - OutputNote, - OutputNotes, ProvenTransaction, + RawOutputNotes, TransactionId, }; -use crate::utils::{ByteReader, ByteWriter, Deserializable, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; /// A transaction header derived from a /// [`ProvenTransaction`](crate::transaction::ProvenTransaction). /// -/// The header is essentially a direct copy of the transaction's commitments, in particular the -/// initial and final account state commitment as well as all nullifiers of consumed notes and all -/// note IDs of created notes. While account updates may be aggregated and notes may be erased as -/// part of batch and block building, the header retains the original transaction's data. +/// The header is essentially a direct copy of the transaction's public commitments, in particular +/// the initial and final account state commitment as well as all nullifiers of consumed notes and +/// all note IDs of created notes together with the fee asset. While account updates may be +/// aggregated and notes may be erased as part of batch and block building, the header retains the +/// original transaction's data. #[derive(Debug, Clone, PartialEq, Eq)] pub struct TransactionHeader { id: TransactionId, @@ -41,7 +45,8 @@ impl TransactionHeader { /// Constructs a new [`TransactionHeader`] from the provided parameters. /// - /// The [`TransactionId`] is computed from the provided parameters. + /// The [`TransactionId`] is computed from the provided parameters, committing to the initial + /// and final account commitments, input and output note commitments, and the fee asset. /// /// The input notes and output notes must be in the same order as they appeared in the /// transaction that this header represents, otherwise an incorrect ID will be computed. @@ -57,13 +62,14 @@ impl TransactionHeader { fee: FungibleAsset, ) -> Self { let input_notes_commitment = input_notes.commitment(); - let output_notes_commitment = OutputNotes::compute_commitment(output_notes.iter()); + let output_notes_commitment = RawOutputNotes::compute_commitment(output_notes.iter()); let id = TransactionId::new( initial_state_commitment, final_state_commitment, input_notes_commitment, output_notes_commitment, + fee, ); Self { @@ -167,7 +173,7 @@ impl From<&ProvenTransaction> for TransactionHeader { tx.account_update().initial_state_commitment(), tx.account_update().final_state_commitment(), tx.input_notes().clone(), - tx.output_notes().iter().map(OutputNote::header).cloned().collect(), + tx.output_notes().iter().map(<&NoteHeader>::from).cloned().collect(), tx.fee(), ) } @@ -180,9 +186,9 @@ impl From<&ExecutedTransaction> for TransactionHeader { tx.id(), tx.account_id(), tx.initial_account().initial_commitment(), - tx.final_account().commitment(), + tx.final_account().to_commitment(), tx.input_notes().to_commitments(), - tx.output_notes().iter().map(OutputNote::header).cloned().collect(), + tx.output_notes().iter().map(|n| n.header().clone()).collect(), tx.fee(), ) } diff --git a/crates/miden-protocol/src/transaction/tx_summary.rs b/crates/miden-protocol/src/transaction/tx_summary.rs index b2fcf0c0af..f02342e456 100644 --- a/crates/miden-protocol/src/transaction/tx_summary.rs +++ b/crates/miden-protocol/src/transaction/tx_summary.rs @@ -2,8 +2,14 @@ use alloc::vec::Vec; use crate::account::AccountDelta; use crate::crypto::SequentialCommit; -use crate::transaction::{InputNote, InputNotes, OutputNotes}; -use crate::utils::{Deserializable, Serializable}; +use crate::transaction::{InputNote, InputNotes, RawOutputNotes}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; use crate::{Felt, Word}; /// The summary of the changes that result from executing a transaction. @@ -14,7 +20,7 @@ use crate::{Felt, Word}; pub struct TransactionSummary { account_delta: AccountDelta, input_notes: InputNotes, - output_notes: OutputNotes, + output_notes: RawOutputNotes, salt: Word, } @@ -26,7 +32,7 @@ impl TransactionSummary { pub fn new( account_delta: AccountDelta, input_notes: InputNotes, - output_notes: OutputNotes, + output_notes: RawOutputNotes, salt: Word, ) -> Self { Self { @@ -51,7 +57,7 @@ impl TransactionSummary { } /// Returns the output notes of this transaction summary. - pub fn output_notes(&self) -> &OutputNotes { + pub fn output_notes(&self) -> &RawOutputNotes { &self.output_notes } @@ -82,7 +88,7 @@ impl SequentialCommit for TransactionSummary { } impl Serializable for TransactionSummary { - fn write_into(&self, target: &mut W) { + fn write_into(&self, target: &mut W) { self.account_delta.write_into(target); self.input_notes.write_into(target); self.output_notes.write_into(target); @@ -91,9 +97,7 @@ impl Serializable for TransactionSummary { } impl Deserializable for TransactionSummary { - fn read_from( - source: &mut R, - ) -> Result { + fn read_from(source: &mut R) -> Result { let account_delta = source.read()?; let input_notes = source.read()?; let output_notes = source.read()?; diff --git a/crates/miden-standards/Cargo.toml b/crates/miden-standards/Cargo.toml index a57fb2814e..d4876b5cd9 100644 --- a/crates/miden-standards/Cargo.toml +++ b/crates/miden-standards/Cargo.toml @@ -29,7 +29,7 @@ rand = { optional = true, workspace = true } thiserror = { workspace = true } [build-dependencies] -fs-err = { version = "3" } +fs-err = { workspace = true } miden-assembly = { workspace = true } miden-core = { workspace = true } miden-core-lib = { workspace = true } diff --git a/crates/miden-standards/asm/account_components/access/ownable2step.masm b/crates/miden-standards/asm/account_components/access/ownable2step.masm new file mode 100644 index 0000000000..0f7b7dd2bd --- /dev/null +++ b/crates/miden-standards/asm/account_components/access/ownable2step.masm @@ -0,0 +1,9 @@ +# The MASM code of the Ownable2Step Account Component. +# +# See the `Ownable2Step` Rust type's documentation for more details. + +pub use ::miden::standards::access::ownable2step::get_owner +pub use ::miden::standards::access::ownable2step::get_nominated_owner +pub use ::miden::standards::access::ownable2step::transfer_ownership +pub use ::miden::standards::access::ownable2step::accept_ownership +pub use ::miden::standards::access::ownable2step::renounce_ownership diff --git a/crates/miden-standards/asm/account_components/auth/ecdsa_k256_keccak.masm b/crates/miden-standards/asm/account_components/auth/ecdsa_k256_keccak.masm deleted file mode 100644 index e226eeee00..0000000000 --- a/crates/miden-standards/asm/account_components/auth/ecdsa_k256_keccak.masm +++ /dev/null @@ -1,43 +0,0 @@ -# The MASM code of the ECDSA K256 Keccak authentication Account Component. -# -# See the `AuthEcdsaK256Keccak` Rust type's documentation for more details. - -use miden::standards::auth::ecdsa_k256_keccak -use miden::protocol::active_account - -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } - -# CONSTANTS -# ================================================================================================= - -# The slot in this component's storage layout where the public key is stored. -const PUBLIC_KEY_SLOT = word("miden::standards::auth::ecdsa_k256_keccak::public_key") - -#! Authenticate a transaction using the ECDSA signature scheme. -#! -#! It first increments the nonce of the account, independent of whether the account's state has -#! changed or not. Then it computes and signs the following message (in memory order): -#! [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, -#! OUTPUT_NOTES_COMMITMENT, [0, 0, ref_block_num, final_nonce]] -#! -#! Including the final_nonce is necessary for replay protection. The reference block number is -#! included to commit to the transaction creator's intended reference block of the transaction -#! which determines the fee parameters and therefore the fee amount that is deducted. -#! -#! Inputs: [AUTH_ARGS, pad(12)] -#! Outputs: [pad(16)] -#! -#! Invocation: call -pub proc auth_tx_ecdsa_k256_keccak(auth_args: BeWord) - dropw - # => [pad(16)] - - # Fetch public key from storage. - # --------------------------------------------------------------------------------------------- - - push.PUBLIC_KEY_SLOT[0..2] exec.active_account::get_item - # => [PUB_KEY, pad(16)] - - exec.ecdsa_k256_keccak::authenticate_transaction - # => [pad(16)] -end diff --git a/crates/miden-standards/asm/account_components/auth/ecdsa_k256_keccak_multisig.masm b/crates/miden-standards/asm/account_components/auth/ecdsa_k256_keccak_multisig.masm deleted file mode 100644 index b03b896d6a..0000000000 --- a/crates/miden-standards/asm/account_components/auth/ecdsa_k256_keccak_multisig.masm +++ /dev/null @@ -1,417 +0,0 @@ -# The MASM code of the Multi-Signature ECDSA K256 Keccak Authentication Component. -# -# See the `AuthEcdsaK256KeccakMultisig` Rust type's documentation for more details. - -use miden::protocol::active_account -use miden::protocol::native_account -use miden::standards::auth - -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } - -# CONSTANTS -# ================================================================================================= - -# Auth Request Constants - -# The event emitted when a signature is not found for a required signer. -const AUTH_UNAUTHORIZED_EVENT = event("miden::auth::unauthorized") - -# Storage Layout Constants -# -# ┌───────────────────────────────┬──────────┬──────────────┬───────────────────┐ -# │ THRESHOLD & APPROVERS CONFIG │ PUB KEYS │ EXECUTED TXS │ PROC THRESHOLDS │ -# │ (slot) │ (map) │ (map) │ (map) │ -# ├───────────────────────────────┼──────────┼──────────────┼───────────────────┤ -# │ 0 │ 1 │ 2 │ 3 │ -# └───────────────────────────────┴──────────┴──────────────┴───────────────────┘ - -# The slot in this component's storage layout where the default signature threshold and -# number of approvers are stored as: -# [default_threshold, num_approvers, 0, 0]. -# The threshold is guaranteed to be less than or equal to num_approvers. -const THRESHOLD_CONFIG_SLOT = word("miden::standards::auth::ecdsa_k256_keccak_multisig::threshold_config") - -# The slot in this component's storage layout where the public keys map is stored. -# Map entries: [key_index, 0, 0, 0] => APPROVER_PUBLIC_KEY -const APPROVER_PUBLIC_KEYS_SLOT = word("miden::standards::auth::ecdsa_k256_keccak_multisig::approver_public_keys") - -# The slot in this component's storage layout where executed transactions are stored. -# Map entries: transaction_message => [is_executed, 0, 0, 0] -const EXECUTED_TXS_SLOT = word("miden::standards::auth::ecdsa_k256_keccak_multisig::executed_transactions") - -# The slot in this component's storage layout where procedure thresholds are stored. -# Map entries: PROC_ROOT => [proc_threshold, 0, 0, 0] -const PROC_THRESHOLD_ROOTS_SLOT = word("miden::standards::auth::ecdsa_k256_keccak_multisig::procedure_thresholds") - -# Executed Transaction Flag Constant -const IS_EXECUTED_FLAG = [1, 0, 0, 0] - -# ERRORS -# ================================================================================================= - -const ERR_TX_ALREADY_EXECUTED = "failed to approve multisig transaction as it was already executed" - -const ERR_MALFORMED_MULTISIG_CONFIG = "number of approvers must be equal to or greater than threshold" - -const ERR_ZERO_IN_MULTISIG_CONFIG = "number of approvers or threshold must not be zero" - -#! Check if transaction has already been executed and add it to executed transactions for replay protection. -#! -#! Inputs: [MSG] -#! Outputs: [] -#! -#! Panics if: -#! - the same transaction has already been executed -proc assert_new_tx(msg: BeWord) - push.IS_EXECUTED_FLAG - # => [[0, 0, 0, is_executed], MSG] - - swapw - # => [MSG, IS_EXECUTED_FLAG] - - push.EXECUTED_TXS_SLOT[0..2] - # => [txs_slot_prefix, txs_slot_suffix, MSG, IS_EXECUTED_FLAG] - - # Set the key value pair in the map to mark transaction as executed - exec.native_account::set_map_item - # => [[0, 0, 0, is_executed]] - - drop drop drop - # => [is_executed] - - assertz.err=ERR_TX_ALREADY_EXECUTED - # => [] -end - -#! Remove old approver public keys from the approver public key mapping. -#! -#! This procedure cleans up the storage by removing public keys of approvers that are no longer -#! part of the multisig configuration. This procedure assumes that init_num_of_approvers and -#! new_num_of_approvers are u32 values. -#! -#! Inputs: [init_num_of_approvers, new_num_of_approvers] -#! Outputs: [] -#! -#! Where: -#! - init_num_of_approvers is the original number of approvers before the update -#! - new_num_of_approvers is the new number of approvers after the update -proc cleanup_pubkey_mapping(init_num_of_approvers: u32, new_num_of_approvers: u32) - dup.1 dup.1 - u32assert2 u32lt - # => [should_loop, i = init_num_of_approvers, new_num_of_approvers] - - while.true - # => [i, new_num_of_approvers] - - sub.1 - # => [i-1, new_num_of_approvers] - - dup - # => [i-1, i-1, new_num_of_approvers] - - push.0.0.0 - # => [[0, 0, 0, i-1], i-1, new_num_of_approvers] - - padw swapw - # => [[0, 0, 0, i-1], EMPTY_WORD, i-1, new_num_of_approvers] - - push.APPROVER_PUBLIC_KEYS_SLOT[0..2] - # => [pub_key_slot_prefix, pub_key_slot_suffix, [0, 0, 0, i-1], EMPTY_WORD, i-1, new_num_of_approvers] - - exec.native_account::set_map_item - # => [OLD_VALUE, i-1, new_num_of_approvers] - - dropw - # => [i-1, new_num_of_approvers] - - dup.1 dup.1 - u32lt - # => [should_loop, i-1, new_num_of_approvers] - end - - drop drop - # => [] -end - -#! Update threshold config and add / remove approvers -#! -#! Inputs: -#! Operand stack: [MULTISIG_CONFIG_HASH, pad(12)] -#! Advice map: { -#! MULTISIG_CONFIG_HASH => [CONFIG, PUB_KEY_N, PUB_KEY_N-1, ..., PUB_KEY_0] -#! } -#! Outputs: -#! Operand stack: [] -#! -#! Where: -#! - MULTISIG_CONFIG_HASH is the hash of the threshold and new public key vector -#! - MULTISIG_CONFIG is [threshold, num_approvers, 0, 0] -#! - PUB_KEY_i is the public key of the i-th signer -#! -#! Locals: -#! 0: new_num_of_approvers -#! 1: init_num_of_approvers -@locals(2) -pub proc update_signers_and_threshold(multisig_config_hash: BeWord) - adv.push_mapval - # => [MULTISIG_CONFIG_HASH, pad(12)] - - adv_loadw - # => [MULTISIG_CONFIG, pad(12)] - - # store new_num_of_approvers for later - dup.2 loc_store.0 - # => [MULTISIG_CONFIG, pad(12)] - - dup.3 dup.3 - # => [num_approvers, threshold, MULTISIG_CONFIG, pad(12)] - - # make sure that the threshold is smaller than the number of approvers - u32assert2.err=ERR_MALFORMED_MULTISIG_CONFIG - u32gt assertz.err=ERR_MALFORMED_MULTISIG_CONFIG - # => [MULTISIG_CONFIG, pad(12)] - - dup.3 dup.3 - # => [num_approvers, threshold, MULTISIG_CONFIG, pad(12)] - - # make sure that threshold or num_approvers are not zero - eq.0 assertz.err=ERR_ZERO_IN_MULTISIG_CONFIG - eq.0 assertz.err=ERR_ZERO_IN_MULTISIG_CONFIG - # => [MULTISIG_CONFIG, pad(12)] - - push.THRESHOLD_CONFIG_SLOT[0..2] - # => [config_slot_prefix, config_slot_suffix, MULTISIG_CONFIG, pad(12)] - - exec.native_account::set_item - # => [OLD_THRESHOLD_CONFIG, pad(12)] - - # store init_num_of_approvers for later - drop drop loc_store.1 drop - # => [pad(12)] - - loc_load.0 - # => [num_approvers] - - dup neq.0 - while.true - sub.1 - # => [i-1, pad(12)] - - dup push.0.0.0 - # => [[0, 0, 0, i-1], i-1, pad(12)] - - padw adv_loadw - # => [PUB_KEY, [0, 0, 0, i-1], i-1, pad(12)] - - swapw - # => [[0, 0, 0, i-1], PUB_KEY, i-1, pad(12)] - - push.APPROVER_PUBLIC_KEYS_SLOT[0..2] - # => [pub_key_slot_prefix, pub_key_slot_suffix, [0, 0, 0, i-1], PUB_KEY, i-1, pad(12)] - - exec.native_account::set_map_item - # => [OLD_VALUE, i-1, pad(12)] - - dropw - # => [i-1, pad(12)] - - dup neq.0 - # => [is_non_zero, i-1, pad(12)] - end - # => [pad(13)] - - drop - # => [pad(12)] - - # compare initial vs current multisig config - - # load init_num_of_approvers & new_num_of_approvers - loc_load.0 loc_load.1 - # => [init_num_of_approvers, new_num_of_approvers, pad(12)] - - exec.cleanup_pubkey_mapping - # => [pad(12)] -end - -# Computes the effective transaction threshold based on called procedures and per-procedure -# overrides stored in PROC_THRESHOLD_ROOTS_SLOT. Falls back to default_threshold if no -# overrides apply. -# -#! Inputs: [default_threshold] -#! Outputs: [transaction_threshold] -@locals(1) -proc compute_transaction_threshold(default_threshold: u32) -> u32 - # 1. initialize transaction_threshold = 0 - # 2. iterate through all account procedures - # a. check if the procedure was called during the transaction - # b. if called, get the override threshold of that procedure from the config map - # c. if proc_threshold > transaction_threshold, set transaction_threshold = proc_threshold - # 3. if transaction_threshold == 0 at the end, revert to using default_threshold - - # store default_threshold for later - loc_store.0 - # => [] - - # 1. initialize transaction_threshold = 0 - push.0 - # => [transaction_threshold] - - # get the number of account procedures - exec.active_account::get_num_procedures - # => [num_procedures, transaction_threshold] - - # 2. iterate through all account procedures - dup neq.0 - # => [should_continue, num_procedures, transaction_threshold] - while.true - sub.1 dup - # => [num_procedures-1, num_procedures-1, transaction_threshold] - - # get procedure root of the procedure with index i - exec.active_account::get_procedure_root dupw - # => [PROC_ROOT, PROC_ROOT, num_procedures-1, transaction_threshold] - - # 2a. check if this procedure has been called in the transaction - exec.native_account::was_procedure_called - # => [was_called, PROC_ROOT, num_procedures-1, transaction_threshold] - - # if it has been called, get the override threshold of that procedure - if.true - # => [PROC_ROOT, num_procedures-1, transaction_threshold] - - push.PROC_THRESHOLD_ROOTS_SLOT[0..2] - # => [proc_roots_slot_prefix, proc_roots_slot_suffix, PROC_ROOT, num_procedures-1, transaction_threshold] - - # 2b. get the override proc_threshold of that procedure - # if the procedure has no override threshold, the returned map item will be [0, 0, 0, 0] - exec.active_account::get_initial_map_item - # => [[0, 0, 0, proc_threshold], num_procedures-1, transaction_threshold] - - drop drop drop dup dup.3 - # => [transaction_threshold, proc_threshold, proc_threshold, num_procedures-1, transaction_threshold] - - u32assert2.err="transaction threshold or procedure threshold are not u32" - u32gt - # => [is_gt, proc_threshold, num_procedures-1, transaction_threshold] - # 2c. if proc_threshold > transaction_threshold, update transaction_threshold - movup.2 movdn.3 - # => [is_gt, proc_threshold, transaction_threshold, num_procedures-1] - cdrop - # => [updated_transaction_threshold, num_procedures-1] - swap - # => [num_procedures-1, updated_transaction_threshold] - # if it has not been called during this transaction, nothing to do, move to the next procedure - else - dropw - # => [num_procedures-1, transaction_threshold] - end - - dup neq.0 - # => [should_continue, num_procedures-1, transaction_threshold] - end - - drop - # => [transaction_threshold] - - loc_load.0 - # => [default_threshold, transaction_threshold] - - # 3. if transaction_threshold == 0 at the end, revert to using default_threshold - dup.1 eq.0 - # => [is_zero, default_threshold, transaction_threshold] - - cdrop - # => [effective_transaction_threshold] -end - -#! Authenticate a transaction using the ECDSA signature scheme with multi-signature support. -#! -#! This procedure implements multi-signature authentication by: -#! 1. Computing the transaction summary message that needs to be signed -#! 2. Verifying signatures from multiple required signers against their public keys -#! 3. Ensuring the minimum threshold of valid signatures is met -#! 4. Implementing replay protection by tracking executed transactions -#! -#! Inputs: -#! Operand stack: [SALT] -#! Advice map: { -#! h(SIG_0, MSG): SIG_0, -#! h(SIG_1, MSG): SIG_1, -#! h(SIG_n, MSG): SIG_n -#! } -#! Outputs: -#! Operand stack: [] -#! -#! Where: -#! - SALT is a cryptographically random nonce that enables multiple concurrent -#! multisig transactions while maintaining replay protection. Each transaction -#! must use a unique SALT value to ensure transaction uniqueness. -#! - SIG_i is the signature from the i-th signer. -#! - MSG is the transaction message being signed. -#! - h(SIG_i, MSG) is the hash of the signature and message used as the advice map key. -#! -#! Panics if: -#! - insufficient number of valid signatures (below threshold). -#! - the same transaction has already been executed (replay protection). -#! -#! Invocation: call -@locals(1) -pub proc auth_tx_ecdsa_k256_keccak_multisig(salt: BeWord) - exec.native_account::incr_nonce drop - # => [SALT] - - # ------ Computing transaction summary ------ - - exec.auth::create_tx_summary - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] - - # to build a tx_summary in the host, we need these four words in the advice provider - exec.auth::adv_insert_hqword - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] - - # the commitment to the tx summary is the message that is signed - exec.auth::hash_tx_summary - # => [TX_SUMMARY_COMMITMENT] - - # ------ Verifying approver signatures ------ - - push.THRESHOLD_CONFIG_SLOT[0..2] - # => [config_slot_prefix, config_slot_suffix, TX_SUMMARY_COMMITMENT] - - exec.active_account::get_initial_item - # => [0, 0, num_of_approvers, default_threshold, TX_SUMMARY_COMMITMENT] - - drop drop - # => [num_of_approvers, default_threshold, TX_SUMMARY_COMMITMENT] - - swap movdn.5 - # => [num_of_approvers, TX_SUMMARY_COMMITMENT, default_threshold] - - push.APPROVER_PUBLIC_KEYS_SLOT[0..2] - # => [pub_key_slot_prefix, pub_key_slot_suffix, num_of_approvers, TX_SUMMARY_COMMITMENT, default_threshold] - - exec.::miden::standards::auth::ecdsa_k256_keccak::verify_signatures - # => [num_verified_signatures, TX_SUMMARY_COMMITMENT, default_threshold] - - # ------ Checking threshold is >= num_verified_signatures ------ - - movup.5 - # => [default_threshold, num_verified_signatures, TX_SUMMARY_COMMITMENT] - - exec.compute_transaction_threshold - # => [transaction_threshold, num_verified_signatures, TX_SUMMARY_COMMITMENT] - - u32assert2 u32lt - # => [is_unauthorized, TX_SUMMARY_COMMITMENT] - - # If signatures are non-existent the tx will fail here. - if.true - emit.AUTH_UNAUTHORIZED_EVENT - push.0 assert.err="insufficient number of signatures" - end - - # ------ Writing executed transaction MSG to map ------ - - exec.assert_new_tx - # => [TX_SUMMARY_COMMITMENT] -end diff --git a/crates/miden-standards/asm/account_components/auth/falcon_512_rpo_acl.masm b/crates/miden-standards/asm/account_components/auth/falcon_512_rpo_acl.masm deleted file mode 100644 index 80081ab6c6..0000000000 --- a/crates/miden-standards/asm/account_components/auth/falcon_512_rpo_acl.masm +++ /dev/null @@ -1,160 +0,0 @@ -# The MASM code of the Falcon 512 RPO authentication Account Component with ACL. -# -# See the `AuthFalcon512RpoAcl` Rust type's documentation for more details. - -use miden::protocol::active_account -use miden::protocol::native_account -use miden::protocol::tx -use miden::core::word - -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } - -# CONSTANTS -# ================================================================================================ - -# The slot in this component's storage layout where the public key is stored. -const PUBLIC_KEY_SLOT = word("miden::standards::auth::falcon512_rpo_acl::public_key") - -# The slot where the authentication configuration is stored. -const AUTH_CONFIG_SLOT = word("miden::standards::auth::falcon512_rpo_acl::config") - -# The slot where the map of auth trigger procedure roots is stored. -const AUTH_TRIGGER_PROCS_MAP_SLOT = word("miden::standards::auth::falcon512_rpo_acl::trigger_procedure_roots") - -#! Authenticate a transaction using the Falcon signature scheme based on procedure calls and note usage. -#! -#! This authentication procedure checks: -#! 1. If any of the trigger procedures were called during the transaction -#! 2. If input notes were consumed and allow_unauthorized_input_notes is false -#! 3. If output notes were created and allow_unauthorized_output_notes is false -#! -#! If any of these conditions are true, standard Falcon512Rpo signature verification is performed. -#! Otherwise, only the nonce is incremented. -#! -#! Inputs: [AUTH_ARGS, pad(12)] -#! Outputs: [pad(16)] -#! -#! Invocation: call -@locals(2) -pub proc auth_tx_falcon512_rpo_acl(auth_args: BeWord) - dropw - # => [pad(16)] - - # Get the authentication configuration - push.AUTH_CONFIG_SLOT[0..2] exec.active_account::get_item - # => [0, allow_unauthorized_input_notes, allow_unauthorized_output_notes, num_auth_trigger_procs, pad(16)] - - drop - # => [allow_unauthorized_input_notes, allow_unauthorized_output_notes, num_auth_trigger_procs, pad(16)] - - loc_store.1 loc_store.0 - # => [num_auth_trigger_procs, pad(16)] - - # ------ Check if any trigger procedure was called ------ - - # Counter `i` starts at `num_auth_trigger_procs` and flag `require_acl_auth` starts at 0 - # `require_acl_auth` is true if any ACL procedures were called - push.0 - # => [require_acl_auth, i, pad(16)] - - # Loop through trigger procedures - dup.1 neq.0 - while.true - # => [require_acl_auth, i, pad(16)] - - # Get the procedure root from storage - dup.1 sub.1 push.0.0.0 push.AUTH_TRIGGER_PROCS_MAP_SLOT[0..2] - # => [trigger_proc_slot_prefix, trigger_proc_slot_suffix, [0, 0, 0, i-1], require_acl_auth, i, pad(16)] - - exec.active_account::get_map_item - # => [AUTH_TRIGGER_PROC_ROOT, require_acl_auth, i, pad(16)] - - exec.native_account::was_procedure_called - # => [was_called, require_acl_auth, i, pad(16)] - - # Update require_acl_auth - or - # => [require_acl_auth', i, pad(16)] - - swap sub.1 swap - # => [require_acl_auth', i-1, pad(16)] - - # Check if we should continue looping - dup.1 neq.0 - # => [should_continue, require_acl_auth', i-1, pad(16)] - end - # => [require_acl_auth, i-1, pad(16)] - - swap drop - # => [require_acl_auth, pad(16)] - - # ------ Check if output notes were created ------ - - exec.tx::get_num_output_notes - # => [num_output_notes, require_acl_auth, pad(16)] - - neq.0 - # => [were_output_notes_created, require_acl_auth, pad(16)] - - loc_load.0 not - # => [!allow_unauthorized_output_notes, were_output_notes_created, require_acl_auth, pad(16)] - - and - # => [require_output_note_auth, require_acl_auth, pad(16)] - - or - # => [auth_required, pad(16)] - - # ------ Check if input notes were consumed ------ - - exec.tx::get_num_input_notes - # => [INPUT_NOTES_COMMITMENT, auth_required, pad(16)] - - neq.0 - # => [were_input_notes_consumed, auth_required, pad(16)] - - loc_load.1 not - # => [!allow_unauthorized_input_notes, were_input_notes_consumed, auth_required, pad(16)] - - and - # => [require_input_note_auth, auth_required, pad(16)] - - or - # => [auth_required, pad(16)] - - # If authentication is required, perform signature verification - if.true - # Fetch public key from storage. - push.PUBLIC_KEY_SLOT[0..2] exec.active_account::get_item - # => [PUB_KEY, pad(16)] - - exec.::miden::standards::auth::falcon512_rpo::authenticate_transaction - else - # ------ Check if initial account commitment differs from current commitment ------ - - exec.active_account::get_initial_commitment - # => [INITIAL_COMMITMENT, pad(16)] - - exec.active_account::compute_commitment - # => [CURRENT_COMMITMENT, INITIAL_COMMITMENT, pad(16)] - - exec.word::eq not - # => [has_account_state_changed, pad(16)] - - # check if this is a new account (i.e., nonce == 0); this check is needed because new - # accounts are initialized with a non-empty state, and thus, unless the account was - # modified during the transaction, the initial and current state commitments will be - # the same - - exec.active_account::get_nonce eq.0 - # => [is_new_account, has_account_state_changed, pad(16)] - - or - # => [should_increment_nonce, pad(16)] - - if.true - exec.native_account::incr_nonce drop - end - end - # => [pad(16)] -end diff --git a/crates/miden-standards/asm/account_components/auth/falcon_512_rpo_multisig.masm b/crates/miden-standards/asm/account_components/auth/falcon_512_rpo_multisig.masm deleted file mode 100644 index e3f3e2d6e7..0000000000 --- a/crates/miden-standards/asm/account_components/auth/falcon_512_rpo_multisig.masm +++ /dev/null @@ -1,417 +0,0 @@ -# The MASM code of the Multi-Signature Falcon 512 RPO Authentication Component. -# -# See the `AuthFalcon512RpoMultisig` Rust type's documentation for more details. - -use miden::protocol::active_account -use miden::protocol::native_account -use miden::standards::auth - -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } - -# CONSTANTS -# ================================================================================================= - -# Auth Request Constants - -# The event emitted when a signature is not found for a required signer. -const AUTH_UNAUTHORIZED_EVENT = event("miden::auth::unauthorized") - -# Storage Layout Constants -# -# ┌───────────────────────────────┬──────────┬──────────────┬───────────────────┐ -# │ THRESHOLD & APPROVERS CONFIG │ PUB KEYS │ EXECUTED TXS │ PROC THRESHOLDS │ -# │ (slot) │ (map) │ (map) │ (map) │ -# ├───────────────────────────────┼──────────┼──────────────┼───────────────────┤ -# │ 0 │ 1 │ 2 │ 3 │ -# └───────────────────────────────┴──────────┴──────────────┴───────────────────┘ - -# The slot in this component's storage layout where the default signature threshold and -# number of approvers are stored as: -# [default_threshold, num_approvers, 0, 0]. -# The threshold is guaranteed to be less than or equal to num_approvers. -const THRESHOLD_CONFIG_SLOT = word("miden::standards::auth::falcon512_rpo_multisig::threshold_config") - -# The slot in this component's storage layout where the public keys map is stored. -# Map entries: [key_index, 0, 0, 0] => APPROVER_PUBLIC_KEY -const APPROVER_PUBLIC_KEYS_SLOT = word("miden::standards::auth::falcon512_rpo_multisig::approver_public_keys") - -# The slot in this component's storage layout where executed transactions are stored. -# Map entries: transaction_message => [is_executed, 0, 0, 0] -const EXECUTED_TXS_SLOT = word("miden::standards::auth::falcon512_rpo_multisig::executed_transactions") - -# The slot in this component's storage layout where procedure thresholds are stored. -# Map entries: PROC_ROOT => [proc_threshold, 0, 0, 0] -const PROC_THRESHOLD_ROOTS_SLOT = word("miden::standards::auth::falcon512_rpo_multisig::procedure_thresholds") - -# Executed Transaction Flag Constant -const IS_EXECUTED_FLAG = [1, 0, 0, 0] - -# ERRORS -# ================================================================================================= - -const ERR_TX_ALREADY_EXECUTED = "failed to approve multisig transaction as it was already executed" - -const ERR_MALFORMED_MULTISIG_CONFIG = "number of approvers must be equal to or greater than threshold" - -const ERR_ZERO_IN_MULTISIG_CONFIG = "number of approvers or threshold must not be zero" - -#! Check if transaction has already been executed and add it to executed transactions for replay protection. -#! -#! Inputs: [MSG] -#! Outputs: [] -#! -#! Panics if: -#! - the same transaction has already been executed -proc assert_new_tx(msg: BeWord) - push.IS_EXECUTED_FLAG - # => [[0, 0, 0, is_executed], MSG] - - swapw - # => [MSG, IS_EXECUTED_FLAG] - - push.EXECUTED_TXS_SLOT[0..2] - # => [txs_slot_prefix, txs_slot_suffix, MSG, IS_EXECUTED_FLAG] - - # Set the key value pair in the map to mark transaction as executed - exec.native_account::set_map_item - # => [[0, 0, 0, is_executed]] - - drop drop drop - # => [is_executed] - - assertz.err=ERR_TX_ALREADY_EXECUTED - # => [] -end - -#! Remove old approver public keys from the approver public key mapping. -#! -#! This procedure cleans up the storage by removing public keys of approvers that are no longer -#! part of the multisig configuration. This procedure assumes that init_num_of_approvers and -#! new_num_of_approvers are u32 values. -#! -#! Inputs: [init_num_of_approvers, new_num_of_approvers] -#! Outputs: [] -#! -#! Where: -#! - init_num_of_approvers is the original number of approvers before the update -#! - new_num_of_approvers is the new number of approvers after the update -proc cleanup_pubkey_mapping(init_num_of_approvers: u32, new_num_of_approvers: u32) - dup.1 dup.1 - u32assert2 u32lt - # => [should_loop, i = init_num_of_approvers, new_num_of_approvers] - - while.true - # => [i, new_num_of_approvers] - - sub.1 - # => [i-1, new_num_of_approvers] - - dup - # => [i-1, i-1, new_num_of_approvers] - - push.0.0.0 - # => [[0, 0, 0, i-1], i-1, new_num_of_approvers] - - padw swapw - # => [[0, 0, 0, i-1], EMPTY_WORD, i-1, new_num_of_approvers] - - push.APPROVER_PUBLIC_KEYS_SLOT[0..2] - # => [pub_key_slot_prefix, pub_key_slot_suffix, [0, 0, 0, i-1], EMPTY_WORD, i-1, new_num_of_approvers] - - exec.native_account::set_map_item - # => [OLD_VALUE, i-1, new_num_of_approvers] - - dropw - # => [i-1, new_num_of_approvers] - - dup.1 dup.1 - u32lt - # => [should_loop, i-1, new_num_of_approvers] - end - - drop drop - # => [] -end - -#! Update threshold config and add / remove approvers -#! -#! Inputs: -#! Operand stack: [MULTISIG_CONFIG_HASH, pad(12)] -#! Advice map: { -#! MULTISIG_CONFIG_HASH => [CONFIG, PUB_KEY_N, PUB_KEY_N-1, ..., PUB_KEY_0] -#! } -#! Outputs: -#! Operand stack: [] -#! -#! Where: -#! - MULTISIG_CONFIG_HASH is the hash of the threshold and new public key vector -#! - MULTISIG_CONFIG is [threshold, num_approvers, 0, 0] -#! - PUB_KEY_i is the public key of the i-th signer -#! -#! Locals: -#! 0: new_num_of_approvers -#! 1: init_num_of_approvers -@locals(2) -pub proc update_signers_and_threshold(multisig_config_hash: BeWord) - adv.push_mapval - # => [MULTISIG_CONFIG_HASH, pad(12)] - - adv_loadw - # => [MULTISIG_CONFIG, pad(12)] - - # store new_num_of_approvers for later - dup.2 loc_store.0 - # => [MULTISIG_CONFIG, pad(12)] - - dup.3 dup.3 - # => [num_approvers, threshold, MULTISIG_CONFIG, pad(12)] - - # make sure that the threshold is smaller than the number of approvers - u32assert2.err=ERR_MALFORMED_MULTISIG_CONFIG - u32gt assertz.err=ERR_MALFORMED_MULTISIG_CONFIG - # => [MULTISIG_CONFIG, pad(12)] - - dup.3 dup.3 - # => [num_approvers, threshold, MULTISIG_CONFIG, pad(12)] - - # make sure that threshold or num_approvers are not zero - eq.0 assertz.err=ERR_ZERO_IN_MULTISIG_CONFIG - eq.0 assertz.err=ERR_ZERO_IN_MULTISIG_CONFIG - # => [MULTISIG_CONFIG, pad(12)] - - push.THRESHOLD_CONFIG_SLOT[0..2] - # => [config_slot_prefix, config_slot_suffix, MULTISIG_CONFIG, pad(12)] - - exec.native_account::set_item - # => [OLD_THRESHOLD_CONFIG, pad(12)] - - # store init_num_of_approvers for later - drop drop loc_store.1 drop - # => [pad(12)] - - loc_load.0 - # => [num_approvers] - - dup neq.0 - while.true - sub.1 - # => [i-1, pad(12)] - - dup push.0.0.0 - # => [[0, 0, 0, i-1], i-1, pad(12)] - - padw adv_loadw - # => [PUB_KEY, [0, 0, 0, i-1], i-1, pad(12)] - - swapw - # => [[0, 0, 0, i-1], PUB_KEY, i-1, pad(12)] - - push.APPROVER_PUBLIC_KEYS_SLOT[0..2] - # => [pub_key_slot_prefix, pub_key_slot_suffix, [0, 0, 0, i-1], PUB_KEY, i-1, pad(12)] - - exec.native_account::set_map_item - # => [OLD_VALUE, i-1, pad(12)] - - dropw - # => [i-1, pad(12)] - - dup neq.0 - # => [is_non_zero, i-1, pad(12)] - end - # => [pad(13)] - - drop - # => [pad(12)] - - # compare initial vs current multisig config - - # load init_num_of_approvers & new_num_of_approvers - loc_load.0 loc_load.1 - # => [init_num_of_approvers, new_num_of_approvers, pad(12)] - - exec.cleanup_pubkey_mapping - # => [pad(12)] -end - -# Computes the effective transaction threshold based on called procedures and per-procedure -# overrides stored in PROC_THRESHOLD_ROOTS_SLOT. Falls back to default_threshold if no -# overrides apply. -# -#! Inputs: [default_threshold] -#! Outputs: [transaction_threshold] -@locals(1) -proc compute_transaction_threshold(default_threshold: u32) -> u32 - # 1. initialize transaction_threshold = 0 - # 2. iterate through all account procedures - # a. check if the procedure was called during the transaction - # b. if called, get the override threshold of that procedure from the config map - # c. if proc_threshold > transaction_threshold, set transaction_threshold = proc_threshold - # 3. if transaction_threshold == 0 at the end, revert to using default_threshold - - # store default_threshold for later - loc_store.0 - # => [] - - # 1. initialize transaction_threshold = 0 - push.0 - # => [transaction_threshold] - - # get the number of account procedures - exec.active_account::get_num_procedures - # => [num_procedures, transaction_threshold] - - # 2. iterate through all account procedures - dup neq.0 - # => [should_continue, num_procedures, transaction_threshold] - while.true - sub.1 dup - # => [num_procedures-1, num_procedures-1, transaction_threshold] - - # get procedure root of the procedure with index i - exec.active_account::get_procedure_root dupw - # => [PROC_ROOT, PROC_ROOT, num_procedures-1, transaction_threshold] - - # 2a. check if this procedure has been called in the transaction - exec.native_account::was_procedure_called - # => [was_called, PROC_ROOT, num_procedures-1, transaction_threshold] - - # if it has been called, get the override threshold of that procedure - if.true - # => [PROC_ROOT, num_procedures-1, transaction_threshold] - - push.PROC_THRESHOLD_ROOTS_SLOT[0..2] - # => [proc_roots_slot_prefix, proc_roots_slot_suffix, PROC_ROOT, num_procedures-1, transaction_threshold] - - # 2b. get the override proc_threshold of that procedure - # if the procedure has no override threshold, the returned map item will be [0, 0, 0, 0] - exec.active_account::get_initial_map_item - # => [[0, 0, 0, proc_threshold], num_procedures-1, transaction_threshold] - - drop drop drop dup dup.3 - # => [transaction_threshold, proc_threshold, proc_threshold, num_procedures-1, transaction_threshold] - - u32assert2.err="transaction threshold or procedure threshold are not u32" - u32gt - # => [is_gt, proc_threshold, num_procedures-1, transaction_threshold] - # 2c. if proc_threshold > transaction_threshold, update transaction_threshold - movup.2 movdn.3 - # => [is_gt, proc_threshold, transaction_threshold, num_procedures-1] - cdrop - # => [updated_transaction_threshold, num_procedures-1] - swap - # => [num_procedures-1, updated_transaction_threshold] - # if it has not been called during this transaction, nothing to do, move to the next procedure - else - dropw - # => [num_procedures-1, transaction_threshold] - end - - dup neq.0 - # => [should_continue, num_procedures-1, transaction_threshold] - end - - drop - # => [transaction_threshold] - - loc_load.0 - # => [default_threshold, transaction_threshold] - - # 3. if transaction_threshold == 0 at the end, revert to using default_threshold - dup.1 eq.0 - # => [is_zero, default_threshold, transaction_threshold] - - cdrop - # => [effective_transaction_threshold] -end - -#! Authenticate a transaction using the Falcon signature scheme with multi-signature support. -#! -#! This procedure implements multi-signature authentication by: -#! 1. Computing the transaction summary message that needs to be signed -#! 2. Verifying signatures from multiple required signers against their public keys -#! 3. Ensuring the minimum threshold of valid signatures is met -#! 4. Implementing replay protection by tracking executed transactions -#! -#! Inputs: -#! Operand stack: [SALT] -#! Advice map: { -#! h(SIG_0, MSG): SIG_0, -#! h(SIG_1, MSG): SIG_1, -#! h(SIG_n, MSG): SIG_n -#! } -#! Outputs: -#! Operand stack: [] -#! -#! Where: -#! - SALT is a cryptographically random nonce that enables multiple concurrent -#! multisig transactions while maintaining replay protection. Each transaction -#! must use a unique SALT value to ensure transaction uniqueness. -#! - SIG_i is the signature from the i-th signer. -#! - MSG is the transaction message being signed. -#! - h(SIG_i, MSG) is the hash of the signature and message used as the advice map key. -#! -#! Panics if: -#! - insufficient number of valid signatures (below threshold). -#! - the same transaction has already been executed (replay protection). -#! -#! Invocation: call -@locals(1) -pub proc auth_tx_falcon512_rpo_multisig(salt: BeWord) - exec.native_account::incr_nonce drop - # => [SALT] - - # ------ Computing transaction summary ------ - - exec.auth::create_tx_summary - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] - - # to build a tx_summary in the host, we need these four words in the advice provider - exec.auth::adv_insert_hqword - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] - - # the commitment to the tx summary is the message that is signed - exec.auth::hash_tx_summary - # => [TX_SUMMARY_COMMITMENT] - - # ------ Verifying approver signatures ------ - - push.THRESHOLD_CONFIG_SLOT[0..2] - # => [config_slot_prefix, config_slot_suffix, TX_SUMMARY_COMMITMENT] - - exec.active_account::get_initial_item - # => [0, 0, num_of_approvers, default_threshold, TX_SUMMARY_COMMITMENT] - - drop drop - # => [num_of_approvers, default_threshold, TX_SUMMARY_COMMITMENT] - - swap movdn.5 - # => [num_of_approvers, TX_SUMMARY_COMMITMENT, default_threshold] - - push.APPROVER_PUBLIC_KEYS_SLOT[0..2] - # => [pub_key_slot_prefix, pub_key_slot_suffix, num_of_approvers, TX_SUMMARY_COMMITMENT, default_threshold] - - exec.::miden::standards::auth::falcon512_rpo::verify_signatures - # => [num_verified_signatures, TX_SUMMARY_COMMITMENT, default_threshold] - - # ------ Checking threshold is >= num_verified_signatures ------ - - movup.5 - # => [default_threshold, num_verified_signatures, TX_SUMMARY_COMMITMENT] - - exec.compute_transaction_threshold - # => [transaction_threshold, num_verified_signatures, TX_SUMMARY_COMMITMENT] - - u32assert2 u32lt - # => [is_unauthorized, TX_SUMMARY_COMMITMENT] - - # If signatures are non-existent the tx will fail here. - if.true - emit.AUTH_UNAUTHORIZED_EVENT - push.0 assert.err="insufficient number of signatures" - end - - # ------ Writing executed transaction MSG to map ------ - - exec.assert_new_tx - # => [TX_SUMMARY_COMMITMENT] -end diff --git a/crates/miden-standards/asm/account_components/auth/multisig.masm b/crates/miden-standards/asm/account_components/auth/multisig.masm new file mode 100644 index 0000000000..5e698bc886 --- /dev/null +++ b/crates/miden-standards/asm/account_components/auth/multisig.masm @@ -0,0 +1,28 @@ +# The MASM code of the Multi-Signature Authentication Component. +# +# See the `AuthMultisig` Rust type's documentation for more details. + +use miden::standards::auth::multisig + +pub use multisig::update_signers_and_threshold +pub use multisig::get_threshold_and_num_approvers +pub use multisig::set_procedure_threshold +pub use multisig::get_signer_at +pub use multisig::is_signer + +#! Authenticate a transaction with multi-signature support. +#! +#! Inputs: +#! Operand stack: [SALT] +#! Outputs: +#! Operand stack: [] +#! +#! Invocation: call +@auth_script +pub proc auth_tx_multisig(salt: word) + exec.multisig::auth_tx + # => [TX_SUMMARY_COMMITMENT] + + exec.multisig::assert_new_tx + # => [] +end diff --git a/crates/miden-standards/asm/account_components/auth/multisig_psm.masm b/crates/miden-standards/asm/account_components/auth/multisig_psm.masm new file mode 100644 index 0000000000..591ba376ab --- /dev/null +++ b/crates/miden-standards/asm/account_components/auth/multisig_psm.masm @@ -0,0 +1,37 @@ +# The MASM code of the Multi-Signature Authentication Component with Private State Manager. +# +# See the `AuthMultisigPsm` Rust type's documentation for more details. + +use miden::standards::auth::multisig +use miden::standards::auth::psm + +pub use multisig::update_signers_and_threshold +pub use multisig::get_threshold_and_num_approvers +pub use multisig::set_procedure_threshold +pub use multisig::get_signer_at +pub use multisig::is_signer + +pub use psm::update_psm_public_key + +#! Authenticate a transaction with multi-signature support and optional PSM verification. +#! +#! Inputs: +#! Operand stack: [SALT] +#! Outputs: +#! Operand stack: [] +#! +#! Invocation: call +@auth_script +pub proc auth_tx_multisig_psm(salt: word) + exec.multisig::auth_tx + # => [TX_SUMMARY_COMMITMENT] + + dupw + # => [TX_SUMMARY_COMMITMENT, TX_SUMMARY_COMMITMENT] + + exec.psm::verify_signature + # => [TX_SUMMARY_COMMITMENT] + + exec.multisig::assert_new_tx + # => [] +end diff --git a/crates/miden-standards/asm/account_components/auth/no_auth.masm b/crates/miden-standards/asm/account_components/auth/no_auth.masm index 4ced08325f..d0b2c86f26 100644 --- a/crates/miden-standards/asm/account_components/auth/no_auth.masm +++ b/crates/miden-standards/asm/account_components/auth/no_auth.masm @@ -12,6 +12,7 @@ use miden::core::word #! #! Inputs: [pad(16)] #! Outputs: [pad(16)] +@auth_script pub proc auth_no_auth # check if the account state has changed by comparing initial and final commitments diff --git a/crates/miden-standards/asm/account_components/auth/falcon_512_rpo.masm b/crates/miden-standards/asm/account_components/auth/singlesig.masm similarity index 58% rename from crates/miden-standards/asm/account_components/auth/falcon_512_rpo.masm rename to crates/miden-standards/asm/account_components/auth/singlesig.masm index bee640eab6..ab9b587f48 100644 --- a/crates/miden-standards/asm/account_components/auth/falcon_512_rpo.masm +++ b/crates/miden-standards/asm/account_components/auth/singlesig.masm @@ -1,19 +1,24 @@ -# The MASM code of the Falcon 512 RPO authentication Account Component. +# The MASM code of the BasicSignature Authentication Account Component. # -# See the `AuthFalcon512Rpo` Rust type's documentation for more details. +# See the `AuthBasicSignature` Rust type's documentation for more details. -use miden::standards::auth::falcon512_rpo +use miden::standards::auth::signature use miden::protocol::active_account -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } - # CONSTANTS # ================================================================================================= # The slot in this component's storage layout where the public key is stored. -const PUBLIC_KEY_SLOT = word("miden::standards::auth::falcon512_rpo::public_key") +const PUBLIC_KEY_SLOT = word("miden::standards::auth::singlesig::pub_key") + +# The slot in this component's storage layout where the corresponding signature scheme id is stored. +const SCHEME_ID_SLOT = word("miden::standards::auth::singlesig::scheme") -#! Authenticate a transaction using the Falcon signature scheme. +#! Authenticate a transaction using the signature scheme specified by scheme_id. +#! +#! Supported schemes: +#! - 1 => ecdsa_k256_keccak +#! - 2 => falcon512_poseidon2 #! #! It first increments the nonce of the account, independent of whether the account's state has #! changed or not. Then it computes and signs the following message (in memory order): @@ -28,7 +33,8 @@ const PUBLIC_KEY_SLOT = word("miden::standards::auth::falcon512_rpo::public_key" #! Outputs: [pad(16)] #! #! Invocation: call -pub proc auth_tx_falcon512_rpo(auth_args: BeWord) +@auth_script +pub proc auth_tx(auth_args: word) dropw # => [pad(16)] @@ -38,6 +44,12 @@ pub proc auth_tx_falcon512_rpo(auth_args: BeWord) push.PUBLIC_KEY_SLOT[0..2] exec.active_account::get_item # => [PUB_KEY, pad(16)] - exec.falcon512_rpo::authenticate_transaction + push.SCHEME_ID_SLOT[0..2] exec.active_account::get_item + # => [scheme_id, 0, 0, 0, PUB_KEY, pad(16)] + + movdn.7 drop drop drop + # => [PUB_KEY, scheme_id, pad(16)] + + exec.signature::authenticate_transaction # => [pad(16)] end diff --git a/crates/miden-standards/asm/account_components/auth/ecdsa_k256_keccak_acl.masm b/crates/miden-standards/asm/account_components/auth/singlesig_acl.masm similarity index 72% rename from crates/miden-standards/asm/account_components/auth/ecdsa_k256_keccak_acl.masm rename to crates/miden-standards/asm/account_components/auth/singlesig_acl.masm index 173b811386..b3484554a5 100644 --- a/crates/miden-standards/asm/account_components/auth/ecdsa_k256_keccak_acl.masm +++ b/crates/miden-standards/asm/account_components/auth/singlesig_acl.masm @@ -1,27 +1,37 @@ # The MASM code of the ECDSA K256 Keccak authentication Account Component with ACL. # -# See the `AuthEcdsaK256KeccakAcl` Rust type's documentation for more details. +# See the `AuthSingleSigAcl` Rust type's documentation for more details. use miden::protocol::active_account use miden::protocol::native_account use miden::protocol::tx +use miden::standards::auth::signature use miden::core::word -type BeWord = struct @bigendian { a: felt, b: felt, c: felt, d: felt } - # CONSTANTS # ================================================================================================ # The slot in this component's storage layout where the public key is stored. -const PUBLIC_KEY_SLOT = word("miden::standards::auth::ecdsa_k256_keccak_acl::public_key") +const PUBLIC_KEY_SLOT = word("miden::standards::auth::singlesig_acl::pub_key") + +# The slot in this component's storage layout where the corresponding signature scheme id is stored. +const SCHEME_ID_SLOT = word("miden::standards::auth::singlesig_acl::scheme") # The slot where the authentication configuration is stored. -const AUTH_CONFIG_SLOT = word("miden::standards::auth::ecdsa_k256_keccak_acl::config") +const AUTH_CONFIG_SLOT = word("miden::standards::auth::singlesig_acl::config") # The slot where the map of auth trigger procedure roots is stored. -const AUTH_TRIGGER_PROCS_MAP_SLOT = word("miden::standards::auth::ecdsa_k256_keccak_acl::trigger_procedure_roots") +const AUTH_TRIGGER_PROCS_MAP_SLOT = word("miden::standards::auth::singlesig_acl::trigger_procedure_roots") + +const ALLOW_UNAUTHORIZED_OUTPUT_NOTES_LOC = 0 +const ALLOW_UNAUTHORIZED_INPUT_NOTES_LOC = 1 -#! Authenticate a transaction using the ECDSA signature scheme based on procedure calls and note usage. +#! Authenticate a transaction using the signature scheme specified by scheme_id +#! based on procedure calls and note usage. +#! +#! Supported schemes: +#! - 1 => ecdsa_k256_keccak +#! - 2 => falcon512_poseidon2 #! #! This authentication procedure checks: #! 1. If any of the trigger procedures were called during the transaction @@ -35,19 +45,21 @@ const AUTH_TRIGGER_PROCS_MAP_SLOT = word("miden::standards::auth::ecdsa_k256_kec #! Outputs: [pad(16)] #! #! Invocation: call +@auth_script @locals(2) -pub proc auth_tx_ecdsa_k256_keccak_acl(auth_args: BeWord) +pub proc auth_tx_acl(auth_args: word) dropw # => [pad(16)] # Get the authentication configuration push.AUTH_CONFIG_SLOT[0..2] exec.active_account::get_item - # => [0, allow_unauthorized_input_notes, allow_unauthorized_output_notes, num_auth_trigger_procs, pad(16)] + # => [num_auth_trigger_procs, allow_unauthorized_output_notes, allow_unauthorized_input_notes, 0, pad(16)] - drop - # => [allow_unauthorized_input_notes, allow_unauthorized_output_notes, num_auth_trigger_procs, pad(16)] + movup.3 drop + # => [num_auth_trigger_procs, allow_unauthorized_output_notes, allow_unauthorized_input_notes, pad(16)] - loc_store.1 loc_store.0 + swap loc_store.ALLOW_UNAUTHORIZED_OUTPUT_NOTES_LOC + swap loc_store.ALLOW_UNAUTHORIZED_INPUT_NOTES_LOC # => [num_auth_trigger_procs, pad(16)] # ------ Check if any trigger procedure was called ------ @@ -63,8 +75,8 @@ pub proc auth_tx_ecdsa_k256_keccak_acl(auth_args: BeWord) # => [require_acl_auth, i, pad(16)] # Get the procedure root from storage - dup.1 sub.1 push.0.0.0 push.AUTH_TRIGGER_PROCS_MAP_SLOT[0..2] - # => [trigger_proc_slot_prefix, trigger_proc_slot_suffix, [0, 0, 0, i-1], require_acl_auth, i, pad(16)] + push.0.0.0 dup.4 sub.1 push.AUTH_TRIGGER_PROCS_MAP_SLOT[0..2] + # => [trigger_proc_slot_prefix, trigger_proc_slot_suffix, [i-1, 0, 0, 0], require_acl_auth, i, pad(16)] exec.active_account::get_map_item # => [AUTH_TRIGGER_PROC_ROOT, require_acl_auth, i, pad(16)] @@ -96,7 +108,7 @@ pub proc auth_tx_ecdsa_k256_keccak_acl(auth_args: BeWord) neq.0 # => [were_output_notes_created, require_acl_auth, pad(16)] - loc_load.0 not + loc_load.ALLOW_UNAUTHORIZED_OUTPUT_NOTES_LOC not # => [!allow_unauthorized_output_notes, were_output_notes_created, require_acl_auth, pad(16)] and @@ -113,7 +125,7 @@ pub proc auth_tx_ecdsa_k256_keccak_acl(auth_args: BeWord) neq.0 # => [were_input_notes_consumed, auth_required, pad(16)] - loc_load.1 not + loc_load.ALLOW_UNAUTHORIZED_INPUT_NOTES_LOC not # => [!allow_unauthorized_input_notes, were_input_notes_consumed, auth_required, pad(16)] and @@ -128,7 +140,14 @@ pub proc auth_tx_ecdsa_k256_keccak_acl(auth_args: BeWord) push.PUBLIC_KEY_SLOT[0..2] exec.active_account::get_item # => [PUB_KEY, pad(16)] - exec.::miden::standards::auth::ecdsa_k256_keccak::authenticate_transaction + # Fetch scheme_id from storage + push.SCHEME_ID_SLOT[0..2] exec.active_account::get_item + # => [[scheme_id, 0, 0, 0], PUB_KEY, pad(16)] + + movdn.7 drop drop drop + # => [PUB_KEY, scheme_id, pad(16)] + + exec.signature::authenticate_transaction else # ------ Check if initial account commitment differs from current commitment ------ diff --git a/crates/miden-standards/asm/account_components/faucets/network_fungible_faucet.masm b/crates/miden-standards/asm/account_components/faucets/network_fungible_faucet.masm index 7d350a4224..604239c7fd 100644 --- a/crates/miden-standards/asm/account_components/faucets/network_fungible_faucet.masm +++ b/crates/miden-standards/asm/account_components/faucets/network_fungible_faucet.masm @@ -4,5 +4,3 @@ pub use ::miden::standards::faucets::network_fungible::distribute pub use ::miden::standards::faucets::network_fungible::burn -pub use ::miden::standards::faucets::network_fungible::transfer_ownership -pub use ::miden::standards::faucets::network_fungible::renounce_ownership diff --git a/crates/miden-standards/asm/note_scripts/BURN.masm b/crates/miden-standards/asm/note_scripts/BURN.masm deleted file mode 100644 index 07426b1bc5..0000000000 --- a/crates/miden-standards/asm/note_scripts/BURN.masm +++ /dev/null @@ -1,5 +0,0 @@ -use miden::standards::notes::burn - -begin - exec.burn::main -end diff --git a/crates/miden-standards/asm/note_scripts/MINT.masm b/crates/miden-standards/asm/note_scripts/MINT.masm deleted file mode 100644 index 272fc4ab9a..0000000000 --- a/crates/miden-standards/asm/note_scripts/MINT.masm +++ /dev/null @@ -1,5 +0,0 @@ -use miden::standards::notes::mint - -begin - exec.mint::main -end diff --git a/crates/miden-standards/asm/note_scripts/P2ID.masm b/crates/miden-standards/asm/note_scripts/P2ID.masm deleted file mode 100644 index d3050ea305..0000000000 --- a/crates/miden-standards/asm/note_scripts/P2ID.masm +++ /dev/null @@ -1,5 +0,0 @@ -use miden::standards::notes::p2id - -begin - exec.p2id::main -end diff --git a/crates/miden-standards/asm/note_scripts/P2IDE.masm b/crates/miden-standards/asm/note_scripts/P2IDE.masm deleted file mode 100644 index 6d65869eef..0000000000 --- a/crates/miden-standards/asm/note_scripts/P2IDE.masm +++ /dev/null @@ -1,5 +0,0 @@ -use miden::standards::notes::p2ide - -begin - exec.p2ide::main -end diff --git a/crates/miden-standards/asm/note_scripts/SWAP.masm b/crates/miden-standards/asm/note_scripts/SWAP.masm deleted file mode 100644 index 7d4f95b31b..0000000000 --- a/crates/miden-standards/asm/note_scripts/SWAP.masm +++ /dev/null @@ -1,5 +0,0 @@ -use miden::standards::notes::swap - -begin - exec.swap::main -end diff --git a/crates/miden-standards/asm/standards/access/ownable.masm b/crates/miden-standards/asm/standards/access/ownable.masm index 79702c1945..b0591e71a5 100644 --- a/crates/miden-standards/asm/standards/access/ownable.masm +++ b/crates/miden-standards/asm/standards/access/ownable.masm @@ -15,7 +15,7 @@ use miden::protocol::native_account const OWNER_CONFIG_SLOT = word("miden::standards::access::ownable::owner_config") # ZERO_ADDRESS word (all zeros) used to represent no owner -# Format: [prefix=0, suffix=0, 0, 0] as stored in account storage +# Layout: [suffix=0, prefix=0, 0, 0] as stored in account storage const ZERO_ADDRESS = [0, 0, 0, 0] # ERRORS @@ -29,45 +29,32 @@ const ERR_SENDER_NOT_OWNER = "note sender is not the owner" #! Returns the owner AccountId from storage. #! #! Inputs: [] -#! Outputs: [owner_prefix, owner_suffix] +#! Outputs: [owner_suffix, owner_prefix] #! #! Where: -#! - owner_{prefix, suffix} are the prefix and suffix felts of the owner AccountId. +#! - owner_{suffix, prefix} are the suffix and prefix felts of the owner AccountId. proc owner push.OWNER_CONFIG_SLOT[0..2] exec.active_account::get_item - # => [owner_prefix, owner_suffix, 0, 0] - - # Storage format in memory: [0, 0, suffix, prefix] (word[0], word[1], word[2], word[3]) - # mem_loadw_be loads big-endian (reversed), so stack gets: [prefix, suffix, 0, 0] - # Stack: [owner_prefix (pos 0), owner_suffix (pos 1), 0 (pos 2), 0 (pos 3)] - # We want: [owner_prefix, owner_suffix] - # Move zeros to top using movup, then drop them - movup.2 - # => [0, owner_prefix, owner_suffix, 0] (moves element at pos 2 to pos 0) - - movup.3 - # => [0, 0, owner_prefix, owner_suffix] (moves element at pos 3 to pos 0) + # => [0, 0, owner_suffix, owner_prefix] drop drop - # => [owner_prefix, owner_suffix] + # => [owner_suffix, owner_prefix] end #! Checks if the given account ID is the owner of this component. #! -#! Inputs: [account_id_prefix, account_id_suffix] +#! Inputs: [account_id_suffix, account_id_prefix] #! Outputs: [is_owner] #! #! Where: -#! - account_id_{prefix, suffix} are the prefix and suffix felts of the AccountId to check. +#! - account_id_{suffix, prefix} are the suffix and prefix felts of the AccountId to check. #! - is_owner is 1 if the account is the owner, 0 otherwise. proc is_owner - exec.owner - # => [owner_prefix, owner_suffix, account_id_prefix, account_id_suffix] + # => [owner_suffix, owner_prefix, account_id_suffix, account_id_prefix] exec.account_id::is_equal # => [is_owner] - end # PUBLIC INTERFACE @@ -82,7 +69,7 @@ end #! - the note sender is not the owner. pub proc verify_owner exec.active_note::get_sender - # => [sender_prefix, sender_suffix] + # => [sender_suffix, sender_prefix] exec.is_owner # => [is_owner] @@ -94,26 +81,26 @@ end #! Returns the owner AccountId. #! #! Inputs: [pad(16)] -#! Outputs: [owner_prefix, owner_suffix, pad(14)] +#! Outputs: [owner_suffix, owner_prefix, pad(14)] #! #! Where: -#! - owner_{prefix, suffix} are the prefix and suffix felts of the owner AccountId. +#! - owner_{suffix, prefix} are the suffix and prefix felts of the owner AccountId. #! #! Invocation: call pub proc get_owner exec.owner - # => [owner_prefix, owner_suffix, pad(14)] + # => [owner_suffix, owner_prefix, pad(14)] end #! Transfers ownership to a new account. #! #! Can only be called by the current owner. #! -#! Inputs: [new_owner_prefix, new_owner_suffix, pad(14)] +#! Inputs: [new_owner_suffix, new_owner_prefix, pad(14)] #! Outputs: [pad(16)] #! #! Where: -#! - new_owner_{prefix, suffix} are the prefix and suffix felts of the new owner AccountId. +#! - new_owner_{suffix, prefix} are the suffix and prefix felts of the new owner AccountId. #! #! Panics if: #! - the note sender is not the owner. @@ -122,13 +109,13 @@ end pub proc transfer_ownership # Check that the caller is the owner exec.verify_owner - # => [new_owner_prefix, new_owner_suffix, pad(14)] + # => [new_owner_suffix, new_owner_prefix, pad(14)] - push.0 movdn.2 push.0 movdn.2 - # => [new_owner_prefix, new_owner_suffix, 0, 0, pad(14)] + push.0.0 + # => [0, 0, new_owner_suffix, new_owner_prefix, pad(14)] push.OWNER_CONFIG_SLOT[0..2] - # => [slot_prefix, slot_suffix, new_owner_prefix, new_owner_suffix, 0, 0, pad(14)] + # => [slot_suffix, slot_prefix, 0, 0, new_owner_suffix, new_owner_prefix, pad(14)] exec.native_account::set_item # => [OLD_OWNER_WORD, pad(14)] @@ -164,7 +151,7 @@ pub proc renounce_ownership # => [0, 0, 0, 0, pad(16)] push.OWNER_CONFIG_SLOT[0..2] - # => [slot_prefix, slot_suffix, 0, 0, 0, 0, pad(16)] + # => [slot_suffix, slot_prefix, 0, 0, 0, 0, pad(16)] exec.native_account::set_item # => [OLD_OWNER_WORD, pad(16)] diff --git a/crates/miden-standards/asm/standards/access/ownable2step.masm b/crates/miden-standards/asm/standards/access/ownable2step.masm new file mode 100644 index 0000000000..d4b7bcffbd --- /dev/null +++ b/crates/miden-standards/asm/standards/access/ownable2step.masm @@ -0,0 +1,369 @@ +# miden::standards::access::ownable2step +# +# Provides two-step ownership management functionality for account components. +# This module can be imported and used by any component that needs owner controls. +# +# Unlike a single-step ownership transfer, this module requires the new owner to explicitly +# accept the transfer before it takes effect. This prevents accidental transfers to incorrect +# addresses, which would permanently lock the component. +# +# The transfer flow is: +# 1. The current owner calls `transfer_ownership` to nominate a new owner. +# 2. The nominated account calls `accept_ownership` to complete the transfer. +# 3. Optionally, the current owner can call `transfer_ownership` with their own address +# to cancel the nominated transfer. +# +# Storage layout (single slot): +# Word: [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] +# word[0] word[1] word[2] word[3] + +use miden::protocol::active_account +use miden::protocol::account_id +use miden::protocol::active_note +use miden::protocol::native_account + +# CONSTANTS +# ================================================================================================ + +# Ownership config value representing renounced ownership (all zeros). +const RENOUNCED_OWNERSHIP_CONFIG = [0, 0, 0, 0] + +# The slot in this component's storage layout where the owner configuration is stored. +# Contains both the current owner and the nominated owner in a single word. +const OWNER_CONFIG_SLOT = word("miden::standards::access::ownable2step::owner_config") + +# ERRORS +# ================================================================================================ + +const ERR_SENDER_NOT_OWNER = "note sender is not the owner" +const ERR_SENDER_NOT_NOMINATED_OWNER = "note sender is not the nominated owner" +const ERR_NO_NOMINATED_OWNER = "no nominated ownership transfer exists" + +# LOCAL MEMORY ADDRESSES +# ================================================================================================ + +# transfer_ownership locals +const NEW_OWNER_SUFFIX_LOC = 0 +const NEW_OWNER_PREFIX_LOC = 1 +const OWNER_SUFFIX_LOC = 2 +const OWNER_PREFIX_LOC = 3 + +# INTERNAL PROCEDURES +# ================================================================================================ + +#! Returns the full ownership word from storage. +#! +#! Inputs: [] +#! Outputs: [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] +#! +#! Where: +#! - owner_{suffix, prefix} are the suffix and prefix felts of the current owner account ID. +#! - nominated_owner_{suffix, prefix} are the suffix and prefix felts of the nominated +#! owner account ID. +proc load_ownership_info + push.OWNER_CONFIG_SLOT[0..2] exec.active_account::get_item + # => [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] +end + +#! Writes the ownership word to storage and drops the old value. +#! +#! Inputs: [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] +#! Outputs: [] +proc save_ownership_info + push.OWNER_CONFIG_SLOT[0..2] + # => [slot_suffix, slot_prefix, owner_suffix, owner_prefix, + # nominated_owner_suffix, nominated_owner_prefix] + + exec.native_account::set_item + # => [OLD_OWNERSHIP_WORD] + + dropw + # => [] +end + +#! Returns the owner account ID from storage. +#! +#! Inputs: [] +#! Outputs: [owner_suffix, owner_prefix] +#! +#! Where: +#! - owner_{suffix, prefix} are the suffix and prefix felts of the owner account ID. +proc get_owner_internal + exec.load_ownership_info + # => [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] + + movup.2 drop + # => [owner_suffix, owner_prefix, nominated_owner_prefix] + + movup.2 drop + # => [owner_suffix, owner_prefix] +end + +#! Returns the nominated owner account ID from storage. +#! +#! Inputs: [] +#! Outputs: [nominated_owner_suffix, nominated_owner_prefix] +#! +#! Where: +#! - nominated_owner_{suffix, prefix} are the suffix and prefix felts of the nominated +#! owner account ID. +proc get_nominated_owner_internal + exec.load_ownership_info + # => [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] + + drop drop + # => [nominated_owner_suffix, nominated_owner_prefix] +end + +#! Checks if the given account ID is the owner of this component. +#! +#! Inputs: [account_id_suffix, account_id_prefix] +#! Outputs: [is_owner] +#! +#! Where: +#! - is_owner is 1 if the account is the owner, 0 otherwise. +proc is_owner_internal + exec.get_owner_internal + # => [owner_suffix, owner_prefix, account_id_suffix, account_id_prefix] + + exec.account_id::is_equal + # => [is_owner] +end + +#! Checks if the given account ID is the nominated owner of this component. +#! +#! Inputs: [account_id_suffix, account_id_prefix] +#! Outputs: [is_nominated_owner] +#! +#! Where: +#! - account_id_{suffix, prefix} are the suffix and prefix felts of the account ID to check. +#! - is_nominated_owner is 1 if the account is the nominated owner, 0 otherwise. +proc is_nominated_owner_internal + exec.get_nominated_owner_internal + # => [nominated_owner_suffix, nominated_owner_prefix, account_id_suffix, account_id_prefix] + + exec.account_id::is_equal + # => [is_nominated_owner] +end + +#! Checks if the note sender is the owner and panics if not. +#! +#! Inputs: [] +#! Outputs: [] +#! +#! Panics if: +#! - the note sender is not the owner. +#! +#! Invocation: exec +proc assert_sender_is_owner_internal + exec.active_note::get_sender + # => [sender_suffix, sender_prefix] + + exec.is_owner_internal + # => [is_owner] + + assert.err=ERR_SENDER_NOT_OWNER + # => [] +end + +# PUBLIC INTERFACE +# ================================================================================================ + +#! Checks if the note sender is the owner and panics if not. +#! +#! Inputs: [pad(16)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - the note sender is not the owner. +#! +#! Invocation: call +pub proc assert_sender_is_owner + exec.assert_sender_is_owner_internal + # => [pad(16)] +end + +#! Returns the owner account ID. +#! +#! Inputs: [pad(16)] +#! Outputs: [owner_suffix, owner_prefix, pad(14)] +#! +#! Where: +#! - owner_{suffix, prefix} are the suffix and prefix felts of the owner account ID. +#! +#! Invocation: call +pub proc get_owner + exec.get_owner_internal + # => [owner_suffix, owner_prefix, pad(16)] + + movup.2 drop movup.2 drop + # => [owner_suffix, owner_prefix, pad(14)] +end + +#! Returns the nominated owner account ID. +#! +#! Inputs: [pad(16)] +#! Outputs: [nominated_owner_suffix, nominated_owner_prefix, pad(14)] +#! +#! Where: +#! - nominated_owner_{suffix, prefix} are the suffix and prefix felts of the nominated +#! owner account ID. Both are zero if no nominated transfer exists. +#! +#! Invocation: call +pub proc get_nominated_owner + exec.get_nominated_owner_internal + # => [nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + movup.2 drop movup.2 drop + # => [nominated_owner_suffix, nominated_owner_prefix, pad(14)] +end + +#! Initiates a two-step ownership transfer by setting the nominated owner. +#! +#! The current owner remains in control until the nominated owner calls `accept_ownership`. +#! Can only be called by the current owner. +#! +#! If the new owner is the current owner, any nominated transfer is cancelled and the +#! nominated owner field is cleared. +#! +#! Inputs: [new_owner_suffix, new_owner_prefix, pad(14)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - the note sender is not the owner. +#! +#! Locals: +#! 0: new_owner_suffix +#! 1: new_owner_prefix +#! 2: owner_suffix +#! 3: owner_prefix +#! +#! Invocation: call +@locals(4) +pub proc transfer_ownership + exec.assert_sender_is_owner_internal + # => [new_owner_suffix, new_owner_prefix, pad(14)] + + dup.1 dup.1 exec.account_id::validate + # => [new_owner_suffix, new_owner_prefix, pad(14)] + + loc_store.NEW_OWNER_SUFFIX_LOC + # => [new_owner_prefix, pad(14)] + + loc_store.NEW_OWNER_PREFIX_LOC + # => [pad(14)] + + exec.get_owner_internal + # => [owner_suffix, owner_prefix, pad(14)] + + loc_store.OWNER_SUFFIX_LOC + # => [owner_prefix, pad(13)] + + loc_store.OWNER_PREFIX_LOC + # => [pad(12)] + + # Check if new_owner == owner (cancel case). + loc_load.NEW_OWNER_PREFIX_LOC loc_load.NEW_OWNER_SUFFIX_LOC + # => [new_owner_suffix, new_owner_prefix, pad(12)] + + loc_load.OWNER_PREFIX_LOC loc_load.OWNER_SUFFIX_LOC + # => [owner_suffix, owner_prefix, new_owner_suffix, new_owner_prefix, pad(12)] + + exec.account_id::is_equal + # => [is_self_transfer, pad(12)] + + if.true + # Cancel ownership transfer and clear nominated owner. + # Stack for save: [owner_suffix, owner_prefix, nominated_suffix=0, nominated_prefix=0] + loc_load.OWNER_PREFIX_LOC loc_load.OWNER_SUFFIX_LOC + # => [owner_suffix, owner_prefix, pad(12)] + + push.0.0 movup.3 movup.3 + # => [owner_suffix, owner_prefix, 0, 0, pad(12)] + else + # Transfer ownership by setting nominated = new_owner. + # Stack for save: [owner_suffix, owner_prefix, new_owner_suffix, new_owner_prefix] + loc_load.NEW_OWNER_PREFIX_LOC loc_load.NEW_OWNER_SUFFIX_LOC + # => [new_owner_suffix, new_owner_prefix, pad(12)] + + loc_load.OWNER_PREFIX_LOC loc_load.OWNER_SUFFIX_LOC + # => [owner_suffix, owner_prefix, new_owner_suffix, new_owner_prefix, pad(12)] + end + + exec.save_ownership_info + # => [pad(12)] +end + +#! Accepts a nominated ownership transfer. The nominated owner becomes the new owner +#! and the nominated owner field is cleared. +#! +#! Can only be called by the nominated owner. +#! +#! Inputs: [pad(16)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - there is no nominated ownership transfer (nominated owner is zero). +#! - the note sender is not the nominated owner. +#! +#! Invocation: call +pub proc accept_ownership + exec.get_nominated_owner_internal + # => [nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + # Check that a nominated transfer exists (nominated owner is not zero). + dup.1 eq.0 dup.1 eq.0 and + # => [is_zero, nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + assertz.err=ERR_NO_NOMINATED_OWNER + # => [nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + exec.active_note::get_sender + # => [sender_suffix, sender_prefix, nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + dup.3 dup.3 + exec.account_id::is_equal + # => [is_sender_nominated_owner, nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + assert.err=ERR_SENDER_NOT_NOMINATED_OWNER + # => [nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + # Build new ownership word: nominated becomes owner, clear nominated. + # Stack for save: [owner_suffix, owner_prefix, nominated_suffix=0, nominated_prefix=0] + push.0.0 + # => [0, 0, nominated_owner_suffix, nominated_owner_prefix, pad(16)] + + # Reorder: move nominated (now new owner) to owner position + movup.3 movup.3 + # => [nominated_owner_suffix, nominated_owner_prefix, 0, 0, pad(16)] + + exec.save_ownership_info + # => [pad(16)] +end + +#! Renounces ownership, leaving the component without an owner. +#! +#! Can only be called by the current owner. Clears both the owner and any nominated owner. +#! +#! Important Note! +#! This feature allows the owner to relinquish administrative privileges, a common pattern +#! after an initial stage with centralized administration is over. Once ownership is renounced, +#! the component becomes permanently ownerless and cannot be managed by any account. +#! +#! Inputs: [pad(16)] +#! Outputs: [pad(16)] +#! +#! Panics if: +#! - the note sender is not the owner. +#! +#! Invocation: call +pub proc renounce_ownership + exec.assert_sender_is_owner_internal + # => [pad(16)] + + push.RENOUNCED_OWNERSHIP_CONFIG + # => [0, 0, 0, 0, pad(16)] + + exec.save_ownership_info + # => [pad(16)] +end diff --git a/crates/miden-standards/asm/standards/attachments/network_account_target.masm b/crates/miden-standards/asm/standards/attachments/network_account_target.masm index 9c097162bc..a5ee0bde40 100644 --- a/crates/miden-standards/asm/standards/attachments/network_account_target.masm +++ b/crates/miden-standards/asm/standards/attachments/network_account_target.masm @@ -2,6 +2,8 @@ #! #! Provides a standardized way to work with network account targets. +use miden::protocol::account_id +use miden::protocol::active_account use miden::protocol::active_note use miden::protocol::note @@ -18,55 +20,110 @@ pub const NETWORK_ACCOUNT_TARGET_ATTACHMENT_KIND = 1 # ERRORS # ================================================================================================ -const ERR_ATTACHMENT_SCHEME_MISMATCH = "expected network account target attachment scheme" -const ERR_ATTACHMENT_KIND_MISMATCH = "expected attachment kind to be Word for network account target" +const ERR_NOT_NETWORK_ACCOUNT_TARGET = "attachment is not a valid network account target" + +#! Returns a boolean indicating whether the attachment scheme and kind match the expected +#! values for a NetworkAccountTarget attachment. +#! +#! Inputs: [attachment_scheme, attachment_kind] +#! Outputs: [is_network_account_target] +#! +#! Invocation: exec +pub proc is_network_account_target + eq.NETWORK_ACCOUNT_TARGET_ATTACHMENT_SCHEME + # => [is_scheme_valid, attachment_kind] + + swap eq.NETWORK_ACCOUNT_TARGET_ATTACHMENT_KIND + # => [is_kind_valid, is_scheme_valid] + + and + # => [is_network_account_target] +end #! Returns the account ID encoded in the attachment. #! #! The attachment is expected to have the following layout: -#! [0, exec_hint_tag, account_id_prefix, account_id_suffix] +#! [account_id_suffix, account_id_prefix, exec_hint_tag, 0] +#! +#! WARNING: This procedure does not validate the attachment scheme or kind. The caller +#! should validate these using `is_network_account_target` before calling this procedure. #! #! WARNING: This procedure does not validate that the returned account ID is well-formed. #! The caller should validate the account ID if needed using `account_id::validate`. #! -#! Inputs: [attachment_scheme, attachment_kind, NOTE_ATTACHMENT] -#! Outputs: [account_id_prefix, account_id_suffix] +#! Inputs: [NOTE_ATTACHMENT] +#! Outputs: [account_id_suffix, account_id_prefix] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of an account ID. -#! -#! Panics if: -#! - the attachment scheme does not match NETWORK_ACCOUNT_TARGET_ATTACHMENT_SCHEME. +#! - account_id_{suffix,prefix} are the suffix and prefix felts of an account ID. #! #! Invocation: exec pub proc get_id - # verify that the attachment scheme and kind are correct - # => [attachment_scheme, attachment_kind, NOTE_ATTACHMENT] - eq.NETWORK_ACCOUNT_TARGET_ATTACHMENT_SCHEME assert.err=ERR_ATTACHMENT_SCHEME_MISMATCH - eq.NETWORK_ACCOUNT_TARGET_ATTACHMENT_KIND assert.err=ERR_ATTACHMENT_KIND_MISMATCH - # => [NOTE_ATTACHMENT] = [0, exec_hint_tag, account_id_prefix, account_id_suffix] + # => [NOTE_ATTACHMENT] = [account_id_suffix, account_id_prefix, exec_hint_tag, 0] - drop drop - # => [account_id_prefix, account_id_suffix] + movup.2 drop movup.2 drop + # => [account_id_suffix, account_id_prefix] end #! Creates a new attachment of type NetworkAccountTarget with the following layout: -#! [0, exec_hint_tag, account_id_prefix, account_id_suffix] +#! [account_id_suffix, account_id_prefix, exec_hint_tag, 0] #! -#! Inputs: [account_id_prefix, account_id_suffix, exec_hint] +#! Inputs: [account_id_suffix, account_id_prefix, exec_hint_tag] #! Outputs: [attachment_scheme, attachment_kind, NOTE_ATTACHMENT] #! #! Where: -#! - account_id_{prefix,suffix} are the prefix and suffix felts of an account ID. -#! - exec_hint is the execution hint for the note. +#! - account_id_{suffix,prefix} are the suffix and prefix felts of an account ID. +#! - exec_hint_tag is the encoded execution hint for the note with its tag. #! - attachment_kind is the attachment kind (Word = 1) for use with `output_note::set_attachment`. #! - attachment_scheme is the attachment scheme (1) for use with `output_note::set_attachment`. #! #! Invocation: exec pub proc new - movup.2 - push.0 + # => [account_id_suffix, account_id_prefix, exec_hint_tag] + push.0 movdn.3 + # => [NOTE_ATTACHMENT] = [account_id_suffix, account_id_prefix, exec_hint_tag, 0] push.NETWORK_ACCOUNT_TARGET_ATTACHMENT_KIND push.NETWORK_ACCOUNT_TARGET_ATTACHMENT_SCHEME - # => [attachment_scheme, attachment_kind, ATTACHMENT] + # => [attachment_scheme, attachment_kind, NOTE_ATTACHMENT] +end + +#! Returns a boolean indicating whether the active account matches the target account +#! encoded in the active note's attachment. +#! +#! Inputs: [] +#! Outputs: [is_equal] +#! +#! Where: +#! - is_equal is a boolean indicating whether the active account matches the target account. +#! +#! Panics if: +#! - the attachment is not a valid network account target. +#! +#! Invocation: exec +pub proc active_account_matches_target_account + # ensure note attachment targets the consuming bridge account + exec.active_note::get_metadata + # => [NOTE_ATTACHMENT, METADATA_HEADER] + + swapw + # => [METADATA_HEADER, NOTE_ATTACHMENT] + + exec.note::extract_attachment_info_from_metadata + # => [attachment_kind, attachment_scheme, NOTE_ATTACHMENT] + + swap + # => [attachment_scheme, attachment_kind, NOTE_ATTACHMENT] + + # ensure the attachment is a network account target + exec.is_network_account_target assert.err=ERR_NOT_NETWORK_ACCOUNT_TARGET + # => [NOTE_ATTACHMENT] = [target_id_suffix, target_id_prefix, exec_hint_tag, 0] + + exec.get_id + # => [target_id_suffix, target_id_prefix] + + exec.active_account::get_id + # => [active_account_id_suffix, active_account_id_prefix, target_id_suffix, target_id_prefix] + + exec.account_id::is_equal + # => [is_equal] end diff --git a/crates/miden-standards/asm/standards/auth/falcon512_rpo.masm b/crates/miden-standards/asm/standards/auth/falcon512_rpo.masm deleted file mode 100644 index 7c9ccbf757..0000000000 --- a/crates/miden-standards/asm/standards/auth/falcon512_rpo.masm +++ /dev/null @@ -1,189 +0,0 @@ -use miden::core::crypto::dsa::falcon512rpo -use miden::core::crypto::hashes::rpo256 -use miden::protocol::active_account -use miden::protocol::native_account -use miden::protocol::tx -use miden::standards::auth - -# CONSTANTS -# ================================================================================================= - -# The event to request an authentication signature. -const AUTH_REQUEST_EVENT=event("miden::auth::request") - -# Local Memory Addresses for multisig operations -const NUM_OF_APPROVERS_LOC=0 -const PUB_KEY_SLOT_SUFFIX_LOC=4 -const PUB_KEY_SLOT_PREFIX_LOC=5 -const CURRENT_PK_LOC=8 -const SUCCESSFUL_VERIFICATIONS_LOC=12 - -#! Authenticate a transaction using the Falcon signature scheme. -#! -#! It first increments the nonce of the account, independent of whether the account's state has -#! changed or not. Then it computes and signs the following message (in memory order): -#! [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, -#! OUTPUT_NOTES_COMMITMENT, [0, 0, ref_block_num, final_nonce]] -#! -#! Including the final_nonce is necessary for replay protection. The reference block number is -#! included to commit to the transaction creator's intended reference block of the transaction -#! which determines the fee parameters and therefore the fee amount that is deducted. -#! -#! Inputs: [PUB_KEY] -#! Outputs: [] -#! -#! Invocation: exec -pub proc authenticate_transaction - # Increment the account's nonce. - # --------------------------------------------------------------------------------------------- - # This has to happen before computing the delta commitment, otherwise that procedure will abort - push.0.0 exec.tx::get_block_number - exec.native_account::incr_nonce - # => [[final_nonce, ref_block_num, 0, 0], PUB_KEY] - - # Compute the message that is signed. - # --------------------------------------------------------------------------------------------- - exec.auth::create_tx_summary - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT, PUB_KEY] - - exec.auth::adv_insert_hqword - - # The commitment to the tx summary is the message that is signed - exec.auth::hash_tx_summary - # OS => [MESSAGE, PUB_KEY] - # AS => [] - - # Fetch signature from advice provider and verify. - # --------------------------------------------------------------------------------------------- - # Emit the authentication request event that pushes a signature for the message to the advice stack - emit.AUTH_REQUEST_EVENT - swapw - # OS => [PUB_KEY, MESSAGE] - # AS => [SIGNATURE] - - # Verify the signature against the public key and the message. The procedure gets as inputs the - # hash of the public key and the message via the operand stack. The signature is provided via - # the advice stack. The signature is valid if and only if the procedure returns. - exec.falcon512rpo::verify - # OS => [] - # AS => [] -end - -#! Verify signatures for all required signers in a loop. -#! -#! This procedure iterates through the required number of signers, fetches their public keys from -#! the provided account storage map slot, verifies their signatures against the transaction message, -#! and returns the number of successfully verified signatures. -#! -#! Note: Calls `active_account::get_initial_map_item` to access the transaction's initial storage -#! state rather than the current state. This is crucial when validating transactions that update -#! the owner public key mapping - the previous signers must authorize the change to the new signers, -#! not the new signers authorizing themselves. -#! -#! Inputs: [pub_key_slot_prefix, pub_key_slot_suffix, num_of_approvers, MSG] -#! Outputs: [num_verified_signatures, MSG] -@locals(16) -pub proc verify_signatures - loc_store.PUB_KEY_SLOT_PREFIX_LOC - loc_store.PUB_KEY_SLOT_SUFFIX_LOC - # => [num_of_approvers, MSG] - - # Initializing SUCCESSFUL_VERIFICATIONS local memory address to 0 - push.0 loc_store.SUCCESSFUL_VERIFICATIONS_LOC - # => [num_of_approvers, MSG] - - # Counter `i` starts at `num_of_approvers` and counts down to 0 - # => [i, MSG] - - # Loop through required signers and verify signatures. - # --------------------------------------------------------------------------------------------- - - dup neq.0 - while.true - # => [i, MSG] - - # Fetch public key from storage map. - # ----------------------------------------------------------------------------------------- - - sub.1 dup push.0.0.0 - loc_load.PUB_KEY_SLOT_SUFFIX_LOC loc_load.PUB_KEY_SLOT_PREFIX_LOC - # => [owner_key_slot_prefix, owner_key_slot_suffix, [0, 0, 0, i-1], i-1, MSG] - - # Get public key from initial storage state - exec.active_account::get_initial_map_item - # => [OWNER_PUB_KEY, i-1, MSG] - - loc_storew_be.CURRENT_PK_LOC - # => [OWNER_PUB_KEY, i-1, MSG] - - # Check if signature exists for this signer. - # ----------------------------------------------------------------------------------------- - - movup.4 movdn.8 swapw dupw movdnw.2 - # => [MSG, OWNER_PUB_KEY, MSG, i-1] - - exec.rpo256::merge - # => [SIG_KEY, MSG, i-1] - - adv.has_mapkey - # => [SIG_KEY, MSG, i-1] - - adv_push.1 - # => [has_signature, SIG_KEY, MSG, i-1] - - # if SIG_KEY => SIGNATURE exists in AdviceMap check the signature - if.true - # => [SIG_KEY, MSG, i-1] - - dupw.1 - # => [MSG, SIG_KEY, MSG, i-1] - - swapw - # => [SIG_KEY, MSG, MSG, i-1] - - # Verify the signature against the public key and message. - # ----------------------------------------------------------------------------------------- - - loc_loadw_be.CURRENT_PK_LOC - # => [PK, MSG, MSG, i-1] - - swapw - # => [MSG, PK, MSG, i-1] - - # Emit the authentication request event that pushes a signature for the message to the advice stack. - emit.AUTH_REQUEST_EVENT - - swapw - # OS => [PUB_KEY, MSG, MSG, i-1] - # AS => [SIGNATURE] - - exec.falcon512rpo::verify - # => [MSG, i-1] - - loc_load.SUCCESSFUL_VERIFICATIONS_LOC - add.1 - loc_store.SUCCESSFUL_VERIFICATIONS_LOC - # => [MSG, i-1] - else - dropw - # => [MSG, i-1] - end - # => [MSG, i-1] - - movup.4 - # => [i-1, MSG] - - dup neq.0 - # => [should_continue, i-1, MSG] - end - # => [i-1, MSG] - - # Return successful signature verifications along with MSG - # ----------------------------------------------------------------------------------------- - - drop - # => [MSG] - - loc_load.SUCCESSFUL_VERIFICATIONS_LOC - # => [num_verified_signatures, MSG] -end diff --git a/crates/miden-standards/asm/standards/auth/mod.masm b/crates/miden-standards/asm/standards/auth/mod.masm index ec213ee4b8..9d6503aad1 100644 --- a/crates/miden-standards/asm/standards/auth/mod.masm +++ b/crates/miden-standards/asm/standards/auth/mod.masm @@ -1,46 +1,11 @@ use miden::protocol::native_account use miden::protocol::tx -use miden::core::crypto::hashes::rpo256 - -#! Inputs: [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] -#! Outputs: [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] -@locals(16) -pub proc adv_insert_hqword - loc_storew_be.0 - movdnw.3 - loc_storew_be.4 - movdnw.3 - loc_storew_be.8 - movdnw.3 - loc_storew_be.12 - movdnw.3 - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] - - exec.hash_tx_summary - # => [MESSAGE] - - locaddr.0 - dup add.16 - # => [mem_addr_end, mem_addr_start, MESSAGE] - - movdn.5 movdn.4 - # => [MESSAGE, mem_addr_start, mem_addr_end] - - adv.insert_mem - drop drop - # => [<4 stack elements>] - - loc_loadw_be.12 - padw loc_loadw_be.8 - padw loc_loadw_be.4 - padw loc_loadw_be.0 - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] -end +use miden::core::crypto::hashes::poseidon2 #! Creates the transaction summary and returns it in the order in which it will be hashed. #! #! Inputs: [SALT] -#! Outputs: [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] +#! Outputs: [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] #! #! Where: #! - SALT is a user-defined input recommended to use as replay protection. @@ -48,22 +13,19 @@ end #! - INPUT_NOTES_COMMITMENT is the commitment to the transaction's inputs notes. #! - ACCOUNT_DELTA_COMMITMENT is the commitment to the transaction's account delta. pub proc create_tx_summary - exec.native_account::compute_delta_commitment - # => [ACCOUNT_DELTA_COMMITMENT, SALT] + exec.tx::get_output_notes_commitment + # => [OUTPUT_NOTES_COMMITMENT, SALT] exec.tx::get_input_notes_commitment - # => [INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT, SALT] - - exec.tx::get_output_notes_commitment - # => [OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT, SALT] + # => [INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] - movupw.3 - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] + exec.native_account::compute_delta_commitment + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] end #! Hashes the provided transaction summary and returns its commitment. #! -#! Inputs: [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] +#! Inputs: [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] #! Outputs: [TX_SUMMARY_COMMITMENT] #! #! Where: @@ -72,26 +34,21 @@ end #! - INPUT_NOTES_COMMITMENT is the commitment to the transaction's inputs notes. #! - ACCOUNT_DELTA_COMMITMENT is the commitment to the transaction's account delta. pub proc hash_tx_summary - swapdw - # => [INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT, SALT, OUTPUT_NOTES_COMMITMENT] - # pad capacity element of the hasher padw movdnw.2 - # => [INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT, CAPACITY, SALT, OUTPUT_NOTES_COMMITMENT] + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, CAPACITY, OUTPUT_NOTES_COMMITMENT, SALT] - exec.rpo256::permute - # => [RATE, RATE, PERM, SALT, OUTPUT_NOTES_COMMITMENT] + exec.poseidon2::permute + # => [RATE0, RATE1, CAPACITY, OUTPUT_NOTES_COMMITMENT, SALT] # drop rate words dropw dropw - # => [PERM, SALT, OUTPUT_NOTES_COMMITMENT] + # => [CAPACITY, OUTPUT_NOTES_COMMITMENT, SALT] movdnw.2 - # => [SALT, OUTPUT_NOTES_COMMITMENT, PERM] - - exec.rpo256::permute - # => [RATE, RATE, PERM] + # => [OUTPUT_NOTES_COMMITMENT, SALT, CAPACITY] - exec.rpo256::squeeze_digest + exec.poseidon2::permute + exec.poseidon2::squeeze_digest # => [TX_SUMMARY_COMMITMENT] end diff --git a/crates/miden-standards/asm/standards/auth/multisig.masm b/crates/miden-standards/asm/standards/auth/multisig.masm new file mode 100644 index 0000000000..ed20ff2325 --- /dev/null +++ b/crates/miden-standards/asm/standards/auth/multisig.masm @@ -0,0 +1,732 @@ +# The MASM code of the Multi-Signature Authentication Component. +# +# See the `AuthMultisig` Rust type's documentation for more details. + +use miden::protocol::active_account +use miden::protocol::auth::AUTH_UNAUTHORIZED_EVENT +use miden::protocol::native_account +use miden::standards::auth +use miden::core::word + +# Local Memory Addresses +const IS_SIGNER_FOUND_LOC=0 +const CURRENT_SIGNER_INDEX_LOC=1 + +const NEW_NUM_OF_APPROVERS_LOC=0 +const INIT_NUM_OF_APPROVERS_LOC=1 + +const DEFAULT_THRESHOLD_LOC=0 + +# CONSTANTS +# ================================================================================================= + +# Storage Slots +# +# This authentication component uses named storage slots. +# - THRESHOLD_CONFIG_SLOT: +# [default_threshold, num_approvers, 0, 0] +# +# - APPROVER_PUBLIC_KEYS_SLOT (map): +# APPROVER_MAP_KEY => APPROVER_PUBLIC_KEY +# where APPROVER_MAP_KEY = [key_index, 0, 0, 0] +# +# - APPROVER_SCHEME_ID_SLOT (map): +# APPROVER_MAP_KEY => [scheme_id, 0, 0, 0] +# where APPROVER_MAP_KEY = [key_index, 0, 0, 0] +# +# - EXECUTED_TXS_SLOT (map): +# TRANSACTION_MESSAGE => [is_executed, 0, 0, 0] +# +# - PROC_THRESHOLD_ROOTS_SLOT (map): +# PROC_ROOT => [proc_threshold, 0, 0, 0] + + +# The slot in this component's storage layout where the default signature threshold and +# number of approvers are stored as: +# [default_threshold, num_approvers, 0, 0]. +# The threshold is guaranteed to be less than or equal to num_approvers. +const THRESHOLD_CONFIG_SLOT = word("miden::standards::auth::multisig::threshold_config") + +# The slot in this component's storage layout where the public keys map is stored. +# Map entries: [key_index, 0, 0, 0] => APPROVER_PUBLIC_KEY +const APPROVER_PUBLIC_KEYS_SLOT = word("miden::standards::auth::multisig::approver_public_keys") + +# The slot in this component's storage layout where the scheme id for the corresponding public keys map is stored. +# Map entries: [key_index, 0, 0, 0] => [scheme_id, 0, 0, 0] +const APPROVER_SCHEME_ID_SLOT = word("miden::standards::auth::multisig::approver_schemes") + +# The slot in this component's storage layout where executed transactions are stored. +# Map entries: transaction_message => [is_executed, 0, 0, 0] +const EXECUTED_TXS_SLOT = word("miden::standards::auth::multisig::executed_transactions") + +# The slot in this component's storage layout where procedure thresholds are stored. +# Map entries: PROC_ROOT => [proc_threshold, 0, 0, 0] +const PROC_THRESHOLD_ROOTS_SLOT = word("miden::standards::auth::multisig::procedure_thresholds") + +# Executed Transaction Flag Constant +const IS_EXECUTED_FLAG = [1, 0, 0, 0] + +# ERRORS +# ================================================================================================= + +const ERR_TX_ALREADY_EXECUTED = "failed to approve multisig transaction as it was already executed" + +const ERR_MALFORMED_MULTISIG_CONFIG = "number of approvers must be equal to or greater than threshold" + +const ERR_ZERO_IN_MULTISIG_CONFIG = "number of approvers or threshold must not be zero" + +const ERR_APPROVER_COUNTS_NOT_U32 = "initial and new number of approvers must be u32" + +const ERR_SIGNER_INDEX_NOT_U32 = "signer index must be u32" + +const ERR_PROC_THRESHOLD_NOT_U32 = "procedure threshold must be u32" + +const ERR_NUM_APPROVERS_OR_PROC_THRESHOLD_NOT_U32 = "number of approvers and procedure threshold must be u32" + +const ERR_PROC_THRESHOLD_EXCEEDS_NUM_APPROVERS = "procedure threshold exceeds new number of approvers" + +#! Remove old approver public keys and the corresponding scheme ids +#! from the approver public key and scheme id mappings. +#! +#! This procedure cleans up the storage by removing public keys and signature scheme ids of approvers +#! that are no longer part of the multisig configuration. +#! +#! Inputs: [init_num_of_approvers, new_num_of_approvers] +#! Outputs: [] +#! +#! Where: +#! - init_num_of_approvers is the original number of approvers before the update +#! - new_num_of_approvers is the new number of approvers after the update +#! +#! Panics if: +#! - init_num_of_approvers is not a u32 value. +#! - new_num_of_approvers is not a u32 value. +proc cleanup_pubkey_and_scheme_id_mapping(init_num_of_approvers: u32, new_num_of_approvers: u32) + dup.1 dup.1 + u32assert2.err=ERR_APPROVER_COUNTS_NOT_U32 + u32lt + # => [should_loop, i = init_num_of_approvers, new_num_of_approvers] + + while.true + # => [i, new_num_of_approvers] + + sub.1 + # => [i-1, new_num_of_approvers] + + # clear scheme id at APPROVER_MAP_KEY(i-1) + dup exec.create_approver_map_key + # => [APPROVER_MAP_KEY, i-1, new_num_of_approvers] + + padw swapw + # => [APPROVER_MAP_KEY, EMPTY_WORD, i-1, new_num_of_approvers] + + push.APPROVER_SCHEME_ID_SLOT[0..2] + # => [scheme_id_slot_suffix, scheme_id_slot_prefix, APPROVER_MAP_KEY, EMPTY_WORD, i-1, new_num_of_approvers] + + exec.native_account::set_map_item + # => [OLD_VALUE, i-1, new_num_of_approvers] + + dropw + # => [i-1, new_num_of_approvers] + + # clear public key at APPROVER_MAP_KEY(i-1) + dup exec.create_approver_map_key + # => [APPROVER_MAP_KEY, i-1, new_num_of_approvers] + + padw swapw + # => [APPROVER_MAP_KEY, EMPTY_WORD, i-1, new_num_of_approvers] + + push.APPROVER_PUBLIC_KEYS_SLOT[0..2] + # => [pub_key_slot_suffix, pub_key_slot_prefix, APPROVER_MAP_KEY, EMPTY_WORD, i-1, new_num_of_approvers] + + exec.native_account::set_map_item + # => [OLD_VALUE, i-1, new_num_of_approvers] + + dropw + # => [i-1, new_num_of_approvers] + + dup.1 dup.1 + u32lt + # => [should_loop, i-1, new_num_of_approvers] + end + + drop drop +end + +#! Builds the storage map key for a signer index. +#! +#! Inputs: [key_index] +#! Outputs: [APPROVER_MAP_KEY] +proc create_approver_map_key + push.0.0.0 movup.3 + # => [[key_index, 0, 0, 0]] + # => [APPROVER_MAP_KEY] +end + +#! Asserts that all configured per-procedure threshold overrides are less than or equal to +#! number of approvers +#! +#! Inputs: [num_approvers] +#! Outputs: [] +#! Panics if: +#! - any configured procedure threshold is not a u32 value. +#! - any configured procedure threshold exceeds num_approvers. +proc assert_proc_thresholds_lte_num_approvers(num_approvers: u32) + exec.active_account::get_num_procedures + # => [num_procedures, num_approvers] + + dup neq.0 + # => [should_continue, num_procedures, num_approvers] + while.true + sub.1 dup + # => [proc_index, proc_index, num_approvers] + + exec.active_account::get_procedure_root + # => [PROC_ROOT, proc_index, num_approvers] + + push.PROC_THRESHOLD_ROOTS_SLOT[0..2] + # => [proc_roots_slot_suffix, proc_roots_slot_prefix, PROC_ROOT, proc_index, num_approvers] + + exec.active_account::get_map_item + # => [[proc_threshold, 0, 0, 0], proc_index, num_approvers] + + movdn.3 drop drop drop + # => [proc_threshold, proc_index, num_approvers] + + dup.2 + # => [num_approvers, proc_threshold, proc_index, num_approvers] + + u32assert2.err=ERR_PROC_THRESHOLD_NOT_U32 + u32gt assertz.err=ERR_PROC_THRESHOLD_EXCEEDS_NUM_APPROVERS + # => [proc_index, num_approvers] + + dup neq.0 + # => [should_continue, proc_index, num_approvers] + end + # => [proc_index, num_approvers] + + drop drop + # => [] +end + +#! Update threshold config, add & remove approvers, and update the approver scheme ids +#! +#! Inputs: +#! Operand stack: [MULTISIG_CONFIG_HASH, pad(12)] +#! Advice map: { +#! MULTISIG_CONFIG_HASH => +#! [ +#! CONFIG, +#! PUB_KEY_N, PUB_KEY_N-1, ..., PUB_KEY_0, +#! SCHEME_ID_N, SCHEME_ID_N-1, ..., SCHEME_ID_0 +#! ] +#! } +#! Outputs: +#! Operand stack: [] +#! +#! Where: +#! - MULTISIG_CONFIG_HASH is the hash of the threshold, +#! new public key vector, and the corresponding scheme identifiers +#! - MULTISIG_CONFIG is [threshold, num_approvers, 0, 0] +#! - PUB_KEY_i is the public key of the i-th signer +#! - SCHEME_ID_i is the signature scheme id of the i-th signer +#! +#! Locals: +#! 0: new_num_of_approvers +#! 1: init_num_of_approvers +#! +#! Invocation: call +@locals(2) +pub proc update_signers_and_threshold(multisig_config_hash: word) + adv.push_mapval + # => [MULTISIG_CONFIG_HASH, pad(12)] + + adv_loadw + # => [MULTISIG_CONFIG, pad(12)] + + # store new_num_of_approvers for later + dup.1 loc_store.NEW_NUM_OF_APPROVERS_LOC + # => [MULTISIG_CONFIG, pad(12)] + + dup dup.2 + # => [num_approvers, threshold, MULTISIG_CONFIG, pad(12)] + + # make sure that the threshold is smaller than the number of approvers + u32assert2.err=ERR_MALFORMED_MULTISIG_CONFIG + u32gt assertz.err=ERR_MALFORMED_MULTISIG_CONFIG + # => [MULTISIG_CONFIG, pad(12)] + + dup dup.2 + # => [num_approvers, threshold, MULTISIG_CONFIG, pad(12)] + + # make sure that threshold or num_approvers are not zero + eq.0 assertz.err=ERR_ZERO_IN_MULTISIG_CONFIG + eq.0 assertz.err=ERR_ZERO_IN_MULTISIG_CONFIG + # => [MULTISIG_CONFIG, pad(12)] + + loc_load.NEW_NUM_OF_APPROVERS_LOC + # => [num_approvers, MULTISIG_CONFIG, pad(12)] + + # make sure that all existing procedure threshold overrides remain reachable + exec.assert_proc_thresholds_lte_num_approvers + # => [MULTISIG_CONFIG, pad(12)] + + push.THRESHOLD_CONFIG_SLOT[0..2] + # => [config_slot_suffix, config_slot_prefix, MULTISIG_CONFIG, pad(12)] + + exec.native_account::set_item + # => [OLD_THRESHOLD_CONFIG, pad(12)] + + # store init_num_of_approvers for later + drop loc_store.INIT_NUM_OF_APPROVERS_LOC drop drop + # => [pad(12)] + + loc_load.NEW_NUM_OF_APPROVERS_LOC + # => [num_approvers] + + dup neq.0 + while.true + sub.1 + # => [i-1, pad(12)] + + dup exec.create_approver_map_key + # => [APPROVER_MAP_KEY, i-1, pad(12)] + + padw adv_loadw + # => [PUB_KEY, APPROVER_MAP_KEY, i-1, pad(12)] + + swapw + # => [APPROVER_MAP_KEY, PUB_KEY, i-1, pad(12)] + + push.APPROVER_PUBLIC_KEYS_SLOT[0..2] + # => [pub_key_slot_suffix, pub_key_slot_prefix, APPROVER_MAP_KEY, PUB_KEY, i-1, pad(12)] + + exec.native_account::set_map_item + # => [OLD_VALUE, i-1, pad(12)] + + # override OLD_VALUE with SCHEME_ID_WORD + adv_loadw + # => [SCHEME_ID_WORD, i-1, pad(12)] + + # validate the scheme id word is in a correct form + exec.auth::signature::assert_supported_scheme_word + # => [SCHEME_ID_WORD, i-1, pad(12)] + + dup.4 exec.create_approver_map_key + # => [APPROVER_MAP_KEY, SCHEME_ID_WORD, i-1, pad(12)] + + push.APPROVER_SCHEME_ID_SLOT[0..2] + # => [scheme_id_slot_id_suffix, scheme_id_slot_id_prefix, APPROVER_MAP_KEY, SCHEME_ID_WORD, i-1, pad(12)] + + exec.native_account::set_map_item + # => [OLD_VALUE, i-1, pad(12)] + + dropw + # => [i-1, pad(12)] + + dup neq.0 + # => [is_non_zero, i-1, pad(12)] + end + # => [pad(13)] + + drop + # => [pad(12)] + + # compare initial vs current multisig config + + # load init_num_of_approvers & new_num_of_approvers + loc_load.NEW_NUM_OF_APPROVERS_LOC loc_load.INIT_NUM_OF_APPROVERS_LOC + # => [init_num_of_approvers, new_num_of_approvers, pad(12)] + + exec.cleanup_pubkey_and_scheme_id_mapping + # => [pad(12)] +end + +# Computes the effective transaction threshold based on called procedures and per-procedure +# overrides stored in PROC_THRESHOLD_ROOTS_SLOT. Falls back to default_threshold if no +# overrides apply. +# +#! Inputs: [default_threshold] +#! Outputs: [transaction_threshold] +@locals(1) +proc compute_transaction_threshold(default_threshold: u32) -> u32 + # 1. initialize transaction_threshold = 0 + # 2. iterate through all account procedures + # a. check if the procedure was called during the transaction + # b. if called, get the override threshold of that procedure from the config map + # c. if proc_threshold > transaction_threshold, set transaction_threshold = proc_threshold + # 3. if transaction_threshold == 0 at the end, revert to using default_threshold + + # store default_threshold for later + loc_store.DEFAULT_THRESHOLD_LOC + # => [] + + # 1. initialize transaction_threshold = 0 + push.0 + # => [transaction_threshold] + + # get the number of account procedures + exec.active_account::get_num_procedures + # => [num_procedures, transaction_threshold] + + # 2. iterate through all account procedures + dup neq.0 + # => [should_continue, num_procedures, transaction_threshold] + while.true + sub.1 dup + # => [num_procedures-1, num_procedures-1, transaction_threshold] + + # get procedure root of the procedure with index i + exec.active_account::get_procedure_root dupw + # => [PROC_ROOT, PROC_ROOT, num_procedures-1, transaction_threshold] + + # 2a. check if this procedure has been called in the transaction + exec.native_account::was_procedure_called + # => [was_called, PROC_ROOT, num_procedures-1, transaction_threshold] + + # if it has been called, get the override threshold of that procedure + if.true + # => [PROC_ROOT, num_procedures-1, transaction_threshold] + + push.PROC_THRESHOLD_ROOTS_SLOT[0..2] + # => [proc_roots_slot_suffix, proc_roots_slot_prefix, PROC_ROOT, num_procedures-1, transaction_threshold] + + # 2b. get the override proc_threshold of that procedure + # if the procedure has no override threshold, the returned map item will be [0, 0, 0, 0] + exec.active_account::get_initial_map_item + # => [[proc_threshold, 0, 0, 0], num_procedures-1, transaction_threshold] + + movdn.3 drop drop drop dup dup.3 + # => [transaction_threshold, proc_threshold, proc_threshold, num_procedures-1, transaction_threshold] + + u32assert2.err="transaction threshold or procedure threshold are not u32" + u32gt + # => [is_gt, proc_threshold, num_procedures-1, transaction_threshold] + # 2c. if proc_threshold > transaction_threshold, update transaction_threshold + movup.2 movdn.3 + # => [is_gt, proc_threshold, transaction_threshold, num_procedures-1] + cdrop + # => [updated_transaction_threshold, num_procedures-1] + swap + # => [num_procedures-1, updated_transaction_threshold] + # if it has not been called during this transaction, nothing to do, move to the next procedure + else + dropw + # => [num_procedures-1, transaction_threshold] + end + + dup neq.0 + # => [should_continue, num_procedures-1, transaction_threshold] + end + + drop + # => [transaction_threshold] + + loc_load.DEFAULT_THRESHOLD_LOC + # => [default_threshold, transaction_threshold] + + # 3. if transaction_threshold == 0 at the end, revert to using default_threshold + dup.1 eq.0 + # => [is_zero, default_threshold, transaction_threshold] + + cdrop + # => [effective_transaction_threshold] +end + +#! Returns current num_approvers and the threshold `THRESHOLD_CONFIG_SLOT` +#! +#! Inputs: [] +#! Outputs: [threshold, num_approvers] +#! +#! Invocation: call +pub proc get_threshold_and_num_approvers + push.THRESHOLD_CONFIG_SLOT[0..2] + exec.active_account::get_initial_item + # => [threshold, num_approvers, 0, 0] + + movup.2 drop movup.2 drop + # => [threshold, num_approvers] +end + +#! Sets or clears a per-procedure threshold override. +#! +#! Inputs: [proc_threshold, PROC_ROOT] +#! Outputs: [] +#! +#! Where: +#! - PROC_ROOT is the root of the account procedure whose override is being updated. +#! - proc_threshold is the override threshold to set. +#! - if proc_threshold == 0, override is cleared and the default threshold applies. +#! - if proc_threshold > 0, it must be <= current num_approvers. +#! +#! Panics if: +#! - proc_threshold is not a u32 value. +#! - current num_approvers is not a u32 value. +#! - proc_threshold > current num_approvers. +#! +#! Invocation: call +pub proc set_procedure_threshold + exec.get_threshold_and_num_approvers + # => [default_threshold, num_approvers, proc_threshold, PROC_ROOT] + + drop + # => [num_approvers, proc_threshold, PROC_ROOT] + + dup.1 swap + # => [num_approvers, proc_threshold, proc_threshold, PROC_ROOT] + + u32assert2.err=ERR_NUM_APPROVERS_OR_PROC_THRESHOLD_NOT_U32 + u32gt assertz.err=ERR_PROC_THRESHOLD_EXCEEDS_NUM_APPROVERS + # => [proc_threshold, PROC_ROOT] + + # Store [proc_threshold, 0, 0, 0] = PROC_THRESHOLD_WORD, where proc_threshold == 0 acts as clear. + push.0.0.0 + movup.3 + swapw + # => [PROC_ROOT, PROC_THRESHOLD_WORD] + + push.PROC_THRESHOLD_ROOTS_SLOT[0..2] + # => [proc_roots_slot_suffix, proc_roots_slot_prefix, PROC_ROOT, PROC_THRESHOLD_WORD] + + exec.native_account::set_map_item + # => [OLD_PROC_THRESHOLD_WORD] + + dropw + # => [] +end + +#! Returns signer public key at index i +#! +#! Inputs: [index] +#! Outputs: [PUB_KEY, scheme_id] +#! +#! Panics if: +#! - index is not a u32 value. +#! +#! Invocation: call +pub proc get_signer_at + u32assert.err=ERR_SIGNER_INDEX_NOT_U32 + # => [index] + + dup + # => [index, index] + + exec.create_approver_map_key + # => [APPROVER_MAP_KEY, index] + + push.APPROVER_PUBLIC_KEYS_SLOT[0..2] + # => [APPROVER_PUBLIC_KEYS_SLOT, APPROVER_MAP_KEY, index] + + exec.active_account::get_initial_map_item + # => [PUB_KEY, index] + + movup.4 + # => [index, PUB_KEY] + + exec.create_approver_map_key + # => [APPROVER_MAP_KEY, PUB_KEY] + + push.APPROVER_SCHEME_ID_SLOT[0..2] + # => [APPROVER_SCHEME_ID_SLOT, APPROVER_MAP_KEY, PUB_KEY] + + exec.active_account::get_initial_map_item + # => [SCHEME_ID_WORD, PUB_KEY] + + movdn.3 drop drop drop + # => [scheme_id, PUB_KEY] + + movdn.4 + # => [PUB_KEY, scheme_id] +end + + +#! Returns 1 if PUB_KEY is a current signer, else 0. +#! +#! Inputs: [PUB_KEY] +#! Outputs: [is_signer] +#! Locals: +#! 0: is_signer_found +#! 1: current_signer_index +#! +#! Invocation: call +@locals(2) +pub proc is_signer(pub_key: word) -> felt + # initialize is_signer_found = false + push.0 loc_store.IS_SIGNER_FOUND_LOC + # => [PUB_KEY] + + exec.get_threshold_and_num_approvers + # => [threshold, num_approvers, PUB_KEY] + + drop + # => [num_approvers, PUB_KEY] + + dup neq.0 + # => [has_remaining_signers, num_approvers, PUB_KEY] + + while.true + # => [i, PUB_KEY] + + sub.1 + # => [i-1, PUB_KEY] + + # store i-1 for this loop iteration before map lookup + dup loc_store.CURRENT_SIGNER_INDEX_LOC + # => [i-1, PUB_KEY] + + exec.create_approver_map_key + # => [APPROVER_MAP_KEY, PUB_KEY] + + push.APPROVER_PUBLIC_KEYS_SLOT[0..2] + # => [pub_key_slot_suffix, pub_key_slot_prefix, APPROVER_MAP_KEY, PUB_KEY] + + exec.active_account::get_initial_map_item + # => [CURRENT_PUB_KEY, PUB_KEY] + + dupw.1 exec.word::eq + # => [is_pub_key_match, PUB_KEY] + + loc_store.IS_SIGNER_FOUND_LOC + # => [PUB_KEY] + + loc_load.CURRENT_SIGNER_INDEX_LOC + # => [i-1, PUB_KEY] + + dup neq.0 + # => [has_remaining_signers, i-1, PUB_KEY] + + loc_load.IS_SIGNER_FOUND_LOC not + # => [!is_signer_found, has_remaining_signers, i-1, PUB_KEY] + + and + # => [should_loop, i-1, PUB_KEY] + end + + drop dropw + # => [] + + loc_load.IS_SIGNER_FOUND_LOC + # => [is_signer] +end + +#! Check if transaction has already been executed and add it to executed transactions for replay protection, and +#! finalizes multisig authentication. +#! +#! Inputs: [MSG] +#! Outputs: [] +#! +#! Panics if: +#! - the same transaction has already been executed +#! +#! Invocation: exec +pub proc assert_new_tx(msg: word) + push.IS_EXECUTED_FLAG + # => [[0, 0, 0, is_executed], MSG] + + swapw + # => [TX_SUMMARY_COMMITMENT, IS_EXECUTED_FLAG] + + push.EXECUTED_TXS_SLOT[0..2] + # => [txs_slot_suffix, txs_slot_prefix, MSG, IS_EXECUTED_FLAG] + + # Set the key value pair in the map to mark transaction as executed + exec.native_account::set_map_item + # => [[0, 0, 0, is_executed]] + + movdn.3 drop drop drop + # => [is_executed] + + assertz.err=ERR_TX_ALREADY_EXECUTED + # => [] +end + +#! Authenticate a transaction using the signature scheme specified by scheme_id +#! with multi-signature support +#! +#! Supported schemes: +#! - 1 => ecdsa_k256_keccak +#! - 2 => falcon512_poseidon2 +#! +#! This procedure implements multi-signature authentication by: +#! 1. Computing the transaction summary message that needs to be signed +#! 2. Verifying signatures from multiple required signers against their public keys +#! 3. Ensuring the minimum threshold of valid signatures is met +#! +#! Inputs: +#! Operand stack: [SALT] +#! Advice map: { +#! h(SIG_0, MSG): SIG_0, +#! h(SIG_1, MSG): SIG_1, +#! h(SIG_n, MSG): SIG_n +#! } +#! Outputs: +#! Operand stack: [TX_SUMMARY_COMMITMENT] +#! +#! Where: +#! - SALT is a cryptographically random nonce that enables multiple concurrent +#! multisig transactions while maintaining replay protection. Each transaction +#! must use a unique SALT value to ensure transaction uniqueness. +#! - SIG_i is the signature from the i-th signer. +#! - MSG is the transaction message being signed. +#! - h(SIG_i, MSG) is the hash of the signature and message used as the advice map key. +#! +#! Panics if: +#! - insufficient number of valid signatures (below threshold). +#! +#! Invocation: call +@locals(1) +pub proc auth_tx(salt: word) + exec.native_account::incr_nonce drop + # => [SALT] + + # ------ Computing transaction summary ------ + + exec.auth::create_tx_summary + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] + + # insert tx summary into advice provider for extraction by the host + adv.insert_hqword + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] + + # the commitment to the tx summary is the message that is signed + exec.auth::hash_tx_summary + # => [TX_SUMMARY_COMMITMENT] + + # ------ Verifying approver signatures ------ + + exec.get_threshold_and_num_approvers + # => [default_threshold, num_of_approvers, TX_SUMMARY_COMMITMENT] + + movdn.5 + # => [num_of_approvers, TX_SUMMARY_COMMITMENT, default_threshold] + + push.APPROVER_PUBLIC_KEYS_SLOT[0..2] + # => [pub_key_slot_suffix, pub_key_slot_prefix, num_of_approvers, TX_SUMMARY_COMMITMENT, default_threshold] + + push.APPROVER_SCHEME_ID_SLOT[0..2] + # => [scheme_id_slot_suffix, scheme_id_slot_prefix, pub_key_slot_suffix, pub_key_slot_prefix, num_of_approvers, TX_SUMMARY_COMMITMENT, default_threshold] + + exec.::miden::standards::auth::signature::verify_signatures + # => [num_verified_signatures, TX_SUMMARY_COMMITMENT, default_threshold] + + # ------ Checking threshold is >= num_verified_signatures ------ + + movup.5 + # => [default_threshold, num_verified_signatures, TX_SUMMARY_COMMITMENT] + + exec.compute_transaction_threshold + # => [transaction_threshold, num_verified_signatures, TX_SUMMARY_COMMITMENT] + + u32assert2 u32lt + # => [is_unauthorized, TX_SUMMARY_COMMITMENT] + + # If signatures are non-existent the tx will fail here. + if.true + emit.AUTH_UNAUTHORIZED_EVENT + push.0 assert.err="insufficient number of signatures" + end + + # TX_SUMMARY_COMMITMENT is returned so wrappers can run optional checks + # (e.g. PSM) before replay-protection finalization. + # => [TX_SUMMARY_COMMITMENT] +end diff --git a/crates/miden-standards/asm/standards/auth/psm.masm b/crates/miden-standards/asm/standards/auth/psm.masm new file mode 100644 index 0000000000..d778cafb14 --- /dev/null +++ b/crates/miden-standards/asm/standards/auth/psm.masm @@ -0,0 +1,158 @@ +# Private State Manager (PSM) account component. +# This component is composed into account auth flows especially for multisig and adds +# an extra signature check by a dedicated private state manager signer. +# +# Private State Manager (PSM) is a cloud backup and synchronization layer for Miden private accounts +# See: https://github.com/OpenZeppelin/private-state-manager + +use miden::protocol::auth::AUTH_UNAUTHORIZED_EVENT +use miden::protocol::native_account +use miden::standards::auth::tx_policy +use miden::standards::auth::signature + +# IMPORTANT SECURITY NOTES +# -------------------------------------------------------------------------------- +# - By default, exactly one valid PSM signature is required. +# - If `update_psm_public_key` is the only non-auth account procedure called in the current +# transaction, `verify_signature` skips the PSM signature check so key rotation can proceed +# without the old PSM signer. +# - `update_psm_public_key` rotates the PSM public key and corresponding scheme id using the fixed +# map key `PSM_MAP_KEY`. + + +# CONSTANTS +# ================================================================================================= + +# Storage Slots +# +# This authentication component uses named storage slots. +# - PSM_PUBLIC_KEYS_SLOT (map): +# PSM_MAP_KEY => PSM_PUBLIC_KEY +# where: PSM_MAP_KEY = [0, 0, 0, 0] +# +# - PSM_SCHEME_ID_SLOT (map): +# PSM_MAP_KEY => [scheme_id, 0, 0, 0] +# where: PSM_MAP_KEY = [0, 0, 0, 0] + +# The slot in this component's storage layout where the PSM public key map is stored. +# Map entries: [PSM_MAP_KEY] => [PSM_PUBLIC_KEY] +const PSM_PUBLIC_KEYS_SLOT = word("miden::standards::auth::psm::pub_key") + +# The slot in this component's storage layout where the scheme id for the corresponding PSM public key map is stored. +# Map entries: [PSM_MAP_KEY] => [scheme_id, 0, 0, 0] +const PSM_SCHEME_ID_SLOT = word("miden::standards::auth::psm::scheme") + +# Single-entry storage map key where private state manager signer data is stored. +const PSM_MAP_KEY = [0, 0, 0, 0] + +# ERRORS +# ------------------------------------------------------------------------------------------------- +const ERR_INVALID_PSM_SIGNATURE = "invalid private state manager signature" + +# PUBLIC INTERFACE +# ================================================================================================ + +#! Updates the private state manager public key. +#! +#! Inputs: [new_psm_scheme_id, NEW_PSM_PUBLIC_KEY] +#! Outputs: [] +#! +#! Notes: +#! - This procedure only updates the PSM public key and corresponding scheme id. +#! - `verify_signature` skips PSM verification only when this is the only non-auth account +#! procedure called in the transaction. +#! +#! Invocation: call +@locals(1) +pub proc update_psm_public_key(new_psm_scheme_id: felt, new_psm_public_key: word) + # Validate supported signature scheme before committing it to storage. + dup exec.signature::assert_supported_scheme + # => [new_psm_scheme_id, NEW_PSM_PUBLIC_KEY] + + loc_store.0 + # => [NEW_PSM_PUBLIC_KEY] + + push.PSM_MAP_KEY + # => [PSM_MAP_KEY, NEW_PSM_PUBLIC_KEY] + + push.PSM_PUBLIC_KEYS_SLOT[0..2] + # => [psm_pubkeys_slot_prefix, psm_pubkeys_slot_suffix, PSM_MAP_KEY, NEW_PSM_PUBLIC_KEY] + + exec.native_account::set_map_item + # => [OLD_PSM_PUBLIC_KEY] + + dropw + # => [] + + # Store new scheme id as [scheme_id, 0, 0, 0] in the single-entry map. + loc_load.0 + # => [scheme_id] + + push.0.0.0 movup.3 + # => [NEW_PSM_SCHEME_ID_WORD] + + push.PSM_MAP_KEY + # => [PSM_MAP_KEY, NEW_PSM_SCHEME_ID_WORD] + + push.PSM_SCHEME_ID_SLOT[0..2] + # => [psm_scheme_slot_prefix, psm_scheme_slot_suffix, PSM_MAP_KEY, NEW_PSM_SCHEME_ID_WORD] + + exec.native_account::set_map_item + # => [OLD_PSM_SCHEME_ID_WORD] + + dropw + # => [] +end + +#! Conditionally verifies a private state manager signature. +#! +#! Inputs: [MSG] +#! Outputs: [] +#! +#! Panics if: +#! - `update_psm_public_key` is called together with another non-auth account procedure. +#! - `update_psm_public_key` was not called and a valid PSM signature is missing or invalid. +#! +#! Invocation: exec +pub proc verify_signature(msg: word) + procref.update_psm_public_key + # => [UPDATE_PSM_PUBLIC_KEY_ROOT, MSG] + + exec.native_account::was_procedure_called + # => [was_update_psm_public_key_called, MSG] + + if.true + exec.tx_policy::assert_only_one_non_auth_procedure_called + # => [MSG] + + exec.tx_policy::assert_no_input_or_output_notes + # => [MSG] + + dropw + # => [] + else + push.1 + # => [1, MSG] + + push.PSM_PUBLIC_KEYS_SLOT[0..2] + # => [psm_pubkeys_slot_prefix, psm_pubkeys_slot_suffix, 1, MSG] + + push.PSM_SCHEME_ID_SLOT[0..2] + # => [psm_scheme_slot_prefix, psm_scheme_slot_suffix, psm_pubkeys_slot_prefix, psm_pubkeys_slot_suffix, 1, MSG] + + exec.signature::verify_signatures + # => [num_verified_signatures, MSG] + + neq.1 + # => [is_not_exactly_one, MSG] + + if.true + emit.AUTH_UNAUTHORIZED_EVENT + push.0 assert.err=ERR_INVALID_PSM_SIGNATURE + end + # => [MSG] + + dropw + # => [] + end +end diff --git a/crates/miden-standards/asm/standards/auth/ecdsa_k256_keccak.masm b/crates/miden-standards/asm/standards/auth/signature.masm similarity index 50% rename from crates/miden-standards/asm/standards/auth/ecdsa_k256_keccak.masm rename to crates/miden-standards/asm/standards/auth/signature.masm index a0be086829..c0ceb29788 100644 --- a/crates/miden-standards/asm/standards/auth/ecdsa_k256_keccak.masm +++ b/crates/miden-standards/asm/standards/auth/signature.masm @@ -1,6 +1,8 @@ +use miden::core::crypto::dsa::falcon512poseidon2 +use miden::core::crypto::hashes::poseidon2 use miden::core::crypto::dsa::ecdsa_k256_keccak -use miden::core::crypto::hashes::rpo256 use miden::protocol::active_account +use miden::protocol::auth::AUTH_REQUEST_EVENT use miden::protocol::native_account use miden::protocol::tx use miden::standards::auth @@ -8,17 +10,30 @@ use miden::standards::auth # CONSTANTS # ================================================================================================= -# The event to request an authentication signature. -const AUTH_REQUEST_EVENT=event("miden::auth::request") +# Auth Scheme ID Structure +const ECDSA_K256_KECCAK_SCHEME_ID=1 +const FALCON_512_POSEIDON2_SCHEME_ID=2 # Local Memory Addresses for multisig operations const NUM_OF_APPROVERS_LOC=0 -const PUB_KEY_SLOT_SUFFIX_LOC=4 -const PUB_KEY_SLOT_PREFIX_LOC=5 +const SIGNER_INDEX_LOC=1 +const APPROVER_PUB_KEY_SLOT_ID_SUFFIX_LOC=4 +const APPROVER_PUB_KEY_SLOT_ID_PREFIX_LOC=5 const CURRENT_PK_LOC=8 const SUCCESSFUL_VERIFICATIONS_LOC=12 +const APPROVER_SCHEME_ID_SLOT_ID_SUFFIX_LOC=16 +const APPROVER_SCHEME_ID_SLOT_ID_PREFIX_LOC=17 -#! Authenticate a transaction using the ECDSA signature scheme. +# ERRORS +# ================================================================================================= +const ERR_INVALID_SCHEME_ID = "invalid signature scheme id: expected 2 for falcon512_poseidon2, 1 for ecdsa_k256_keccak" +const ERR_INVALID_SCHEME_ID_WORD = "invalid scheme ID word format expected three zero values followed by the scheme ID" + +#! Authenticate a transaction using the signature scheme specified by scheme_id. +#! +#! Supported schemes: +#! - 1 => ecdsa_k256_keccak +#! - 2 => falcon512_poseidon2 #! #! It first increments the nonce of the account, independent of whether the account's state has #! changed or not. Then it computes and signs the following message (in memory order): @@ -29,7 +44,7 @@ const SUCCESSFUL_VERIFICATIONS_LOC=12 #! included to commit to the transaction creator's intended reference block of the transaction #! which determines the fee parameters and therefore the fee amount that is deducted. #! -#! Inputs: [PUB_KEY] +#! Inputs: [PUB_KEY, scheme_id] #! Outputs: [] #! #! Invocation: exec @@ -37,20 +52,23 @@ pub proc authenticate_transaction # Increment the account's nonce. # --------------------------------------------------------------------------------------------- # This has to happen before computing the delta commitment, otherwise that procedure will abort - push.0.0 exec.tx::get_block_number exec.native_account::incr_nonce - # => [[final_nonce, ref_block_num, 0, 0], PUB_KEY] + exec.tx::get_block_number + push.0.0 + # => [[0, 0, ref_block_num, final_nonce], PUB_KEY, scheme_id] # Compute the message that is signed. # --------------------------------------------------------------------------------------------- exec.auth::create_tx_summary - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT, PUB_KEY] + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT, PUB_KEY, scheme_id] - exec.auth::adv_insert_hqword + # insert tx summary into advice provider for extraction by the host + adv.insert_hqword + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT, PUB_KEY, scheme_id] # The commitment to the tx summary is the message that is signed exec.auth::hash_tx_summary - # OS => [MESSAGE, PUB_KEY] + # OS => [MESSAGE, PUB_KEY, scheme_id] # AS => [] # Fetch signature from advice provider and verify. @@ -58,17 +76,100 @@ pub proc authenticate_transaction # Emit the authentication request event that pushes a signature for the message to the advice stack emit.AUTH_REQUEST_EVENT swapw - # OS => [PUB_KEY, MESSAGE] + # OS => [PUB_KEY, MESSAGE, scheme_id] + # AS => [SIGNATURE] + + movup.8 + # OS => [scheme_id, PUB_KEY, MESSAGE] + # AS => [SIGNATURE] + + dup.0 exec.assert_supported_scheme + # OS => [scheme_id, PUB_KEY, MESSAGE] # AS => [SIGNATURE] # Verify the signature against the public key and the message. The procedure gets as inputs the # hash of the public key and the message via the operand stack. The signature is provided via # the advice stack. The signature is valid if and only if the procedure returns. - exec.ecdsa_k256_keccak::verify + exec.verify_signature_by_scheme # OS => [] # AS => [] end +# Verify signature using scheme_id: +# 1 => ECDSA (ecdsa_k256_keccak) +# 2 => Falcon (falcon512_poseidon2) +# +# Inputs: [scheme_id, PUB_KEY, MSG] +# Outputs: [] +proc verify_signature_by_scheme + dup eq.ECDSA_K256_KECCAK_SCHEME_ID + # => [is_one, scheme_id PUB_KEY, MESSAGE] + + if.true + drop + # OS => [PUB_KEY, MESSAGE] + + exec.ecdsa_k256_keccak::verify + # OS => [] + # AS => [] + else + dup eq.FALCON_512_POSEIDON2_SCHEME_ID + # => [is_2, scheme_id, PUB_KEY, MESSAGE] + + if.true + drop + # OS => [PUB_KEY, MESSAGE] + + exec.falcon512poseidon2::verify + # OS => [] + # AS => [] + else + # ------ Invalid Scheme ID ------ + push.0 assert.err=ERR_INVALID_SCHEME_ID + end + end +end + +#! Returns 1 if scheme_id is supported, else 0. +#! Inputs: [scheme_id] +#! Outputs: [is_supported] +pub proc is_supported_scheme + dup eq.ECDSA_K256_KECCAK_SCHEME_ID + # => [is_1, scheme_id] + + swap eq.FALCON_512_POSEIDON2_SCHEME_ID + # => [is_2, is_1] + + or + # => [is_supported] +end + +#! Reverts if scheme_id is not supported. +#! Inputs: [scheme_id] +#! Outputs: [] +pub proc assert_supported_scheme + exec.is_supported_scheme + # => [is_supported] + + assert.err=ERR_INVALID_SCHEME_ID + # => [] +end + + +#! Validates scheme id word shape: [scheme_id, 0, 0, 0] +#! Inputs: [SCHEME_ID_WORD] +#! Outputs: [SCHEME_ID_WORD] +pub proc assert_supported_scheme_word + dupw exec.assert_supported_scheme + # => [0, 0, 0, SCHEME_ID_WORD] + + neq.0 assertz.err=ERR_INVALID_SCHEME_ID_WORD + neq.0 assertz.err=ERR_INVALID_SCHEME_ID_WORD + neq.0 assertz.err=ERR_INVALID_SCHEME_ID_WORD + # => [SCHEME_ID_WORD] +end + + #! Verify signatures for all required signers in a loop. #! #! This procedure iterates through the required number of signers, fetches their public keys from @@ -80,12 +181,18 @@ end #! the owner public key mapping - the previous signers must authorize the change to the new signers, #! not the new signers authorizing themselves. #! -#! Inputs: [pub_key_slot_prefix, pub_key_slot_suffix, num_of_approvers, MSG] +#! Inputs: [approver_scheme_id_slot_id_suffix, approver_scheme_id_slot_id_prefix, +#! approver_pub_key_slot_id_suffix, approver_pub_key_slot_id_prefix, +#! num_of_approvers, MSG] #! Outputs: [num_verified_signatures, MSG] -@locals(16) +@locals(18) pub proc verify_signatures - loc_store.PUB_KEY_SLOT_PREFIX_LOC - loc_store.PUB_KEY_SLOT_SUFFIX_LOC + loc_store.APPROVER_SCHEME_ID_SLOT_ID_SUFFIX_LOC + loc_store.APPROVER_SCHEME_ID_SLOT_ID_PREFIX_LOC + # => [approver_pub_key_slot_id_suffix, approver_pub_key_slot_id_prefix, num_of_approvers, MSG] + + loc_store.APPROVER_PUB_KEY_SLOT_ID_SUFFIX_LOC + loc_store.APPROVER_PUB_KEY_SLOT_ID_PREFIX_LOC # => [num_of_approvers, MSG] # Initializing SUCCESSFUL_VERIFICATIONS local memory address to 0 @@ -105,24 +212,31 @@ pub proc verify_signatures # Fetch public key from storage map. # ----------------------------------------------------------------------------------------- - sub.1 dup push.0.0.0 - loc_load.PUB_KEY_SLOT_SUFFIX_LOC loc_load.PUB_KEY_SLOT_PREFIX_LOC - # => [owner_key_slot_prefix, owner_key_slot_suffix, [0, 0, 0, i-1], i-1, MSG] + sub.1 dup dup loc_store.SIGNER_INDEX_LOC + # => [i-1, i-1, MSG] + + exec.create_approver_map_key + # => [APPROVER_MAP_KEY, i-1, MSG] # Get public key from initial storage state + loc_load.APPROVER_PUB_KEY_SLOT_ID_PREFIX_LOC + loc_load.APPROVER_PUB_KEY_SLOT_ID_SUFFIX_LOC exec.active_account::get_initial_map_item # => [OWNER_PUB_KEY, i-1, MSG] - loc_storew_be.CURRENT_PK_LOC + loc_storew_le.CURRENT_PK_LOC # => [OWNER_PUB_KEY, i-1, MSG] # Check if signature exists for this signer. # ----------------------------------------------------------------------------------------- - movup.4 movdn.8 swapw dupw movdnw.2 - # => [MSG, OWNER_PUB_KEY, MSG, i-1] + movup.4 movdn.8 + # => [OWNER_PUB_KEY, MSG, i-1] + + dupw.1 swapw + # => [OWNER_PUB_KEY, MSG, MSG, i-1] - exec.rpo256::merge + exec.poseidon2::merge # => [SIG_KEY, MSG, i-1] adv.has_mapkey @@ -144,7 +258,7 @@ pub proc verify_signatures # Verify the signature against the public key and message. # ----------------------------------------------------------------------------------------- - loc_loadw_be.CURRENT_PK_LOC + loc_loadw_le.CURRENT_PK_LOC # => [PK, MSG, MSG, i-1] swapw @@ -154,10 +268,24 @@ pub proc verify_signatures emit.AUTH_REQUEST_EVENT swapw - # OS => [PUB_KEY, MSG, MSG, i-1] + # => [PUB_KEY, MSG, MSG, i-1] + + # Build map key from the current signer index. + loc_load.SIGNER_INDEX_LOC exec.create_approver_map_key + # => [APPROVER_MAP_KEY, PUB_KEY, MSG, MSG, i-1] + + loc_load.APPROVER_SCHEME_ID_SLOT_ID_PREFIX_LOC loc_load.APPROVER_SCHEME_ID_SLOT_ID_SUFFIX_LOC + # => [scheme_slot_id_suffix, scheme_slot_id_prefix, APPROVER_MAP_KEY, PUB_KEY, MSG, MSG, i-1] + + # Get scheme_id for signer index i-1 from initial storage state. + exec.active_account::get_initial_map_item + # => [[scheme_id, 0, 0, 0], PUB_KEY, MSG, MSG, i-1] + + movdn.3 drop drop drop + # OS => [scheme_id, PUB_KEY, MSG, MSG, i-1] # AS => [SIGNATURE] - exec.ecdsa_k256_keccak::verify + exec.verify_signature_by_scheme # => [MSG, i-1] loc_load.SUCCESSFUL_VERIFICATIONS_LOC @@ -187,3 +315,13 @@ pub proc verify_signatures loc_load.SUCCESSFUL_VERIFICATIONS_LOC # => [num_verified_signatures, MSG] end + +#! Builds the storage map key for a signer index. +#! +#! Inputs: [key_index] +#! Outputs: [APPROVER_MAP_KEY] +proc create_approver_map_key + push.0.0.0 movup.3 + # => [[key_index, 0, 0, 0]] + # => [APPROVER_MAP_KEY] +end diff --git a/crates/miden-standards/asm/standards/auth/tx_policy.masm b/crates/miden-standards/asm/standards/auth/tx_policy.masm new file mode 100644 index 0000000000..76da300070 --- /dev/null +++ b/crates/miden-standards/asm/standards/auth/tx_policy.masm @@ -0,0 +1,80 @@ +use miden::protocol::active_account +use miden::protocol::native_account +use miden::protocol::tx + +const ERR_AUTH_PROCEDURE_MUST_BE_CALLED_ALONE = "procedure must be called alone" +const ERR_AUTH_TRANSACTION_MUST_NOT_INCLUDE_INPUT_OR_OUTPUT_NOTES = "transaction must not include input or output notes" + +#! Asserts that exactly one non-auth account procedure was called in the current transaction. +#! +#! Inputs: [] +#! Outputs: [] +#! +#! Invocation: exec +@locals(1) # non-auth called proc count +pub proc assert_only_one_non_auth_procedure_called + push.0 + loc_store.0 + # => [] + + exec.active_account::get_num_procedures + # => [num_procedures] + + dup neq.0 + # => [should_continue, num_procedures] + while.true + sub.1 dup + exec.active_account::get_procedure_root dupw + # => [PROC_ROOT, PROC_ROOT] + + exec.native_account::was_procedure_called + # => [was_called, PROC_ROOT] + + if.true + dropw + # => [proc_index] + + # The auth procedure is always at procedure index 0. + dup neq.0 + # => [is_not_auth_proc, proc_index] + + if.true + loc_load.0 add.1 loc_store.0 + # => [proc_index] + end + else + dropw + # => [proc_index] + end + + dup neq.0 + # => [should_continue, proc_index] + end + + drop + # => [] + + loc_load.0 eq.1 + assert.err=ERR_AUTH_PROCEDURE_MUST_BE_CALLED_ALONE + # => [] +end + +#! Asserts that the current transaction does not consume input notes or create output notes. +#! +#! Inputs: [] +#! Outputs: [] +#! +#! Invocation: exec +pub proc assert_no_input_or_output_notes + exec.tx::get_num_input_notes + # => [num_input_notes] + + assertz.err=ERR_AUTH_TRANSACTION_MUST_NOT_INCLUDE_INPUT_OR_OUTPUT_NOTES + # => [] + + exec.tx::get_num_output_notes + # => [num_output_notes] + + assertz.err=ERR_AUTH_TRANSACTION_MUST_NOT_INCLUDE_INPUT_OR_OUTPUT_NOTES + # => [] +end diff --git a/crates/miden-standards/asm/standards/data_structures/array.masm b/crates/miden-standards/asm/standards/data_structures/array.masm new file mode 100644 index 0000000000..438c76b9e4 --- /dev/null +++ b/crates/miden-standards/asm/standards/data_structures/array.masm @@ -0,0 +1,60 @@ +# The MASM code for the Array abstraction. +# +# It provides an abstraction layer over a storage map, treating it as an array, +# with "set" and "get" for storing and retrieving words by (slot_id, index). +# The array can store up to 2^64 - 2^32 + 1 elements (indices 0 to 2^64 - 2^32). +# +# Using this Array utility requires that the underlying storage map is already created and +# initialized as part of an account component, under the given slot ID. + +use miden::protocol::active_account +use miden::protocol::native_account + +# PROCEDURES +# ================================================================================================= + +#! Sets a word in the array at the specified index. +#! +#! Inputs: [slot_id_suffix, slot_id_prefix, index, VALUE] +#! Outputs: [OLD_VALUE] +#! +#! Where: +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier. +#! - index is the index at which to store the value (0 to 2^64 - 2^32). +#! - VALUE is the word to store at the specified index. +#! +#! Invocation: exec +pub proc set(slot_id_suffix: felt, slot_id_prefix: felt, index: felt, value: word) -> word + # Build KEY = [0, 0, 0, index] + movup.2 push.0.0.0 + # => [0, 0, 0, index, slot_id_suffix, slot_id_prefix, VALUE] + + movup.5 movup.5 + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] + + exec.native_account::set_map_item + # => [OLD_VALUE] +end + +#! Gets a word from the array at the specified index. +#! +#! Inputs: [slot_id_suffix, slot_id_prefix, index] +#! Outputs: [VALUE] +#! +#! Where: +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier. +#! - index is the index of the element to retrieve (0 to 2^64 - 2^32). +#! - VALUE is the word stored at the specified index (zero if not set). +#! +#! Invocation: exec +pub proc get(slot_id_suffix: felt, slot_id_prefix: felt, index: felt) -> word + # Build KEY = [0, 0, 0, index] + movup.2 push.0.0.0 + # => [0, 0, 0, index, slot_id_suffix, slot_id_prefix] + + movup.5 movup.5 + # => [slot_id_suffix, slot_id_prefix, KEY] + + exec.active_account::get_map_item + # => [VALUE] +end diff --git a/crates/miden-standards/asm/standards/data_structures/double_word_array.masm b/crates/miden-standards/asm/standards/data_structures/double_word_array.masm new file mode 100644 index 0000000000..a2cf4f23cc --- /dev/null +++ b/crates/miden-standards/asm/standards/data_structures/double_word_array.masm @@ -0,0 +1,133 @@ +# The MASM code for the Double-Word Array abstraction. +# +# It provides an abstraction layer over a storage map, treating it as an array +# of double-words, with "set" and "get" for storing and retrieving values by +# (slot_id, index). +# The array can store up to 2^64 - 2^32 + 1 elements (indices 0 to 2^64 - 2^32). +# +# Using this Double-Word Array utility requires that the underlying storage map is already created and +# initialized as part of an account component, under the given slot ID. + + +use miden::protocol::active_account +use miden::protocol::native_account + +# Local Memory Addresses +const SLOT_ID_PREFIX_LOC=0 +const SLOT_ID_SUFFIX_LOC=1 +const INDEX_LOC=2 + +type DoubleWord = struct { + a: felt, + b: felt, + c: felt, + d: felt, + e: felt, + f: felt, + g: felt, + h: felt +} + +# PROCEDURES +# ================================================================================================= + +#! Sets a double-word in the array at the specified index. +#! +#! Inputs: [slot_id_suffix, slot_id_prefix, index, VALUE_0, VALUE_1] +#! Outputs: [OLD_VALUE_0, OLD_VALUE_1] +#! +#! Where: +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier. +#! - index is the index at which to store the value (0 to 2^64 - 2^32). +#! - VALUE_0 is the first word to store at the specified index. +#! - VALUE_1 is the second word to store at the specified index. +#! +#! Internally, the words are stored under keys [index, 0, 0, 0] and [index, 1, 0, 0], +#! for the first and second word, respectively. +#! +#! Invocation: exec +@locals(3) +pub proc set( + slot_id_suffix: felt, + slot_id_prefix: felt, + index: felt, + value: DoubleWord +) -> DoubleWord + # save inputs to locals for reuse + loc_store.SLOT_ID_SUFFIX_LOC + loc_store.SLOT_ID_PREFIX_LOC + loc_store.INDEX_LOC + # => [VALUE_0, VALUE_1] + + # Set the first word under key [0, 0, 0, index]. + loc_load.INDEX_LOC + push.0.0.0 + # => [0, 0, 0, index, VALUE_0, VALUE_1] + + loc_load.SLOT_ID_PREFIX_LOC + loc_load.SLOT_ID_SUFFIX_LOC + # => [slot_id_suffix, slot_id_prefix, KEY_0, VALUE_0, VALUE_1] + + exec.native_account::set_map_item + # => [OLD_VALUE_0, VALUE_1] + swapw + + # Set the second word under key [0, 0, 1, index]. + loc_load.INDEX_LOC + push.1.0.0 + # => [0, 0, 1, index, VALUE_1, OLD_VALUE_0] + + loc_load.SLOT_ID_PREFIX_LOC + loc_load.SLOT_ID_SUFFIX_LOC + # => [slot_id_suffix, slot_id_prefix, KEY_1, VALUE_1, OLD_VALUE_0] + + exec.native_account::set_map_item + # => [OLD_VALUE_1, OLD_VALUE_0] + swapw +end + +#! Gets a double-word from the array at the specified index. +#! +#! Inputs: [slot_id_suffix, slot_id_prefix, index] +#! Outputs: [VALUE_0, VALUE_1] +#! +#! Where: +#! - slot_id_{suffix, prefix} are the suffix and prefix felts of the slot identifier. +#! - index is the index of the element to retrieve (0 to 2^64 - 2^32). +#! - VALUE_0 is the first word stored at the specified index (zero if not set). +#! - VALUE_1 is the second word stored at the specified index (zero if not set). +#! +#! Invocation: exec +@locals(3) +pub proc get(slot_id_suffix: felt, slot_id_prefix: felt, index: felt) -> DoubleWord + # Save inputs to locals for reuse. + loc_store.SLOT_ID_SUFFIX_LOC + loc_store.SLOT_ID_PREFIX_LOC + loc_store.INDEX_LOC + # => [] + + # Get the first word from key [0, 0, 0, index]. + loc_load.INDEX_LOC + push.0.0.0 + # => [0, 0, 0, index] + + loc_load.SLOT_ID_PREFIX_LOC + loc_load.SLOT_ID_SUFFIX_LOC + # => [slot_id_suffix, slot_id_prefix, KEY_0] + + exec.active_account::get_map_item + # => [VALUE_0] + + # Get the second word from key [0, 0, 1, index]. + loc_load.INDEX_LOC + push.1.0.0 + # => [0, 0, 1, index, VALUE_0] + + loc_load.SLOT_ID_PREFIX_LOC + loc_load.SLOT_ID_SUFFIX_LOC + # => [slot_id_suffix, slot_id_prefix, KEY_1, VALUE_0] + + exec.active_account::get_map_item + swapw + # => [VALUE_0, VALUE_1] +end diff --git a/crates/miden-standards/asm/standards/faucets/basic_fungible.masm b/crates/miden-standards/asm/standards/faucets/basic_fungible.masm index 11fbb4353b..b1229ff40e 100644 --- a/crates/miden-standards/asm/standards/faucets/basic_fungible.masm +++ b/crates/miden-standards/asm/standards/faucets/basic_fungible.masm @@ -1,47 +1,28 @@ # BASIC FUNGIBLE FAUCET CONTRACT -# ================================================================================================= -# This is a basic fungible faucet smart contract. # -# It allows the owner of the faucet to mint, distribute, and burn tokens. Token metadata is stored -# in account storage at position 1 as [max_supply, decimals, token_symbol, 0], where: -# - max_supply is the maximum supply of the token. -# - decimals are the decimals of the token. -# - token_symbol as three chars encoded in a Felt. - -use miden::standards::faucets - -# CONSTANTS +# See the `BasicFungibleFaucet` documentation for details. # ================================================================================================= -const PRIVATE_NOTE=2 - -# ERRORS -# ================================================================================================= -const ERR_FUNGIBLE_ASSET_DISTRIBUTE_WOULD_CAUSE_MAX_SUPPLY_TO_BE_EXCEEDED="distribute would cause the maximum supply to be exceeded" - -const ERR_BASIC_FUNGIBLE_BURN_WRONG_NUMBER_OF_ASSETS="burn requires exactly 1 note asset" +use miden::standards::faucets -# CONSTANTS +# PROCEDURES # ================================================================================================= #! Distributes freshly minted fungible assets to the provided recipient by creating a note. #! -#! Inputs: [amount, tag, aux, note_type, execution_hint, RECIPIENT, pad(7)] +#! Inputs: [amount, tag, note_type, RECIPIENT, pad(9)] #! Outputs: [note_idx, pad(15)] #! #! Where: #! - amount is the amount to be minted and sent. #! - tag is the tag to be included in the note. -#! - aux is the auxiliary data to be included in the note. #! - note_type is the type of the note that holds the asset. -#! - execution_hint is the execution hint of the note that holds the asset. #! - RECIPIENT is the recipient of the asset, i.e., -#! hash(hash(hash(serial_num, [0; 4]), script_root), input_commitment). +#! hash(hash(hash(serial_num, [0; 4]), script_root), storage_commitment). #! - note_idx is the index of the created note. #! #! Panics if: -#! - the transaction is being executed against an account that is not a fungible asset faucet. -#! - the total issuance after minting is greater than the maximum allowed supply. +#! - any of the validations in faucets::distribute fail. #! #! Invocation: call pub proc distribute diff --git a/crates/miden-standards/asm/standards/faucets/mod.masm b/crates/miden-standards/asm/standards/faucets/mod.masm index 68a7d0652e..bee1948a2c 100644 --- a/crates/miden-standards/asm/standards/faucets/mod.masm +++ b/crates/miden-standards/asm/standards/faucets/mod.masm @@ -1,23 +1,38 @@ use miden::protocol::active_account use miden::protocol::active_note use miden::protocol::faucet +use miden::protocol::native_account use miden::protocol::output_note +use miden::protocol::asset +use ::miden::protocol::asset::FUNGIBLE_ASSET_MAX_AMOUNT # CONSTANTS # ================================================================================================= +const ASSET_PTR=0 const PRIVATE_NOTE=2 # ERRORS # ================================================================================================= -const ERR_FUNGIBLE_ASSET_DISTRIBUTE_WOULD_CAUSE_MAX_SUPPLY_TO_BE_EXCEEDED="distribute would cause the maximum supply to be exceeded" + +const ERR_FUNGIBLE_ASSET_TOKEN_SUPPLY_EXCEEDS_MAX_SUPPLY="token supply exceeds max supply" + +const ERR_FUNGIBLE_ASSET_MAX_SUPPLY_EXCEEDS_FUNGIBLE_ASSET_MAX_AMOUNT="max supply exceeds maximum representable fungible asset amount" + +const ERR_FUNGIBLE_ASSET_DISTRIBUTE_AMOUNT_EXCEEDS_MAX_SUPPLY="token_supply plus the amount passed to distribute would exceed the maximum supply" + +const ERR_FAUCET_BURN_AMOUNT_EXCEEDS_TOKEN_SUPPLY="asset amount to burn exceeds the existing token supply" const ERR_BASIC_FUNGIBLE_BURN_WRONG_NUMBER_OF_ASSETS="burn requires exactly 1 note asset" # CONSTANTS # ================================================================================================= +# The local memory address at which the metadata slot content is stored. +const METADATA_SLOT_LOCAL=0 + # The standard slot where fungible faucet metadata like token symbol or decimals are stored. +# Layout: [token_supply, max_supply, decimals, token_symbol] const METADATA_SLOT=word("miden::standards::fungible_faucets::metadata") #! Distributes freshly minted fungible assets to the provided recipient by creating a note. @@ -30,49 +45,112 @@ const METADATA_SLOT=word("miden::standards::fungible_faucets::metadata") #! - tag is the tag to be included in the note. #! - note_type is the type of the note that holds the asset. #! - RECIPIENT is the recipient of the asset, i.e., -#! hash(hash(hash(serial_num, [0; 4]), script_root), input_commitment). +#! hash(hash(hash(serial_num, [0; 4]), script_root), storage_commitment). #! - note_idx is the index of the created note. #! #! Panics if: #! - the transaction is being executed against an account that is not a fungible asset faucet. -#! - the total issuance after minting is greater than the maximum allowed supply. +#! - the token supply exceeds the maximum supply. +#! - the maximum supply exceeds the maximum representable fungible asset amount. +#! - the token supply after minting is greater than the maximum allowed supply. #! #! Invocation: exec +@locals(4) pub proc distribute - # get max supply of this faucet. We assume it is stored at pos 3 of slot 0 - push.METADATA_SLOT[0..2] exec.active_account::get_item drop drop drop - # => [max_supply, amount, tag, note_type, RECIPIENT] + # Get the configured max supply and the token supply (= current supply). + # --------------------------------------------------------------------------------------------- + + push.METADATA_SLOT[0..2] exec.active_account::get_item + # => [token_supply, max_supply, decimals, token_symbol, amount, tag, note_type, RECIPIENT] + + # store a copy of the current slot content for the token_supply update later + loc_storew_le.METADATA_SLOT_LOCAL + swap movup.2 drop movup.2 drop + # => [max_supply, token_supply, amount, tag, note_type, RECIPIENT] + + # Assert that minting does not violate any supply constraints. + # + # To make sure we cannot mint more than intended, we need to check: + # 1) (max_supply - token_supply) <= max_supply, i.e. the subtraction does not wrap around + # 2) amount + token_supply does not exceed max_supply + # 3) amount + token_supply is less than FUNGIBLE_ASSET_MAX_AMOUNT + # + # This is done with the following concrete assertions: + # - assert token_supply <= max_supply which ensures 1) + # - assert max_supply <= FUNGIBLE_ASSET_MAX_AMOUNT to help ensure 3) + # - assert amount <= max_mint_amount to ensure 2) as well as 3) + # - this ensures 3) because token_supply + max_mint_amount at most ends up being equal to + # max_supply and we already asserted that max_supply does not exceed + # FUNGIBLE_ASSET_MAX_AMOUNT + # --------------------------------------------------------------------------------------------- + + dup.1 dup.1 + # => [max_supply, token_supply, max_supply, token_supply, amount, tag, note_type, RECIPIENT] + + # assert that token_supply <= max_supply + lte assert.err=ERR_FUNGIBLE_ASSET_TOKEN_SUPPLY_EXCEEDS_MAX_SUPPLY + # => [max_supply, token_supply, amount, tag, note_type, RECIPIENT] + + # assert max_supply <= FUNGIBLE_ASSET_MAX_AMOUNT + dup lte.FUNGIBLE_ASSET_MAX_AMOUNT + assert.err=ERR_FUNGIBLE_ASSET_MAX_SUPPLY_EXCEEDS_FUNGIBLE_ASSET_MAX_AMOUNT + # => [max_supply, token_supply, amount, tag, note_type, RECIPIENT] + + dup.2 swap dup.2 + # => [token_supply, max_supply, amount, token_supply, amount, tag, note_type, RECIPIENT] + + # compute maximum amount that can be minted, max_mint_amount = max_supply - token_supply + sub + # => [max_mint_amount, amount, token_supply, amount, tag, note_type, RECIPIENT] - # get total issuance of this faucet so far and add amount to be minted - exec.faucet::get_total_issuance - # => [total_issuance, max_supply, amount, tag, note_type, RECIPIENT] + # assert amount <= max_mint_amount + lte assert.err=ERR_FUNGIBLE_ASSET_DISTRIBUTE_AMOUNT_EXCEEDS_MAX_SUPPLY + # => [token_supply, amount, tag, note_type, RECIPIENT] - # compute maximum amount that can be minted, max_mint_amount = max_supply - total_issuance - sub - # => [max_supply - total_issuance, amount, tag, note_type, RECIPIENT] + # Compute the new token_supply and update in storage. + # --------------------------------------------------------------------------------------------- + + dup.1 add + # => [new_token_supply, amount, tag, note_type, RECIPIENT] - # check that amount =< max_supply - total_issuance, fails if otherwise - dup.1 gte assert.err=ERR_FUNGIBLE_ASSET_DISTRIBUTE_WOULD_CAUSE_MAX_SUPPLY_TO_BE_EXCEEDED + padw loc_loadw_le.METADATA_SLOT_LOCAL + # => [[token_supply, max_supply, decimals, token_symbol], new_token_supply, amount, tag, note_type, RECIPIENT] + + drop movup.3 + # => [[new_token_supply, max_supply, decimals, token_symbol], amount, tag, note_type, RECIPIENT] + + # update the metadata slot with the new supply + push.METADATA_SLOT[0..2] exec.native_account::set_item dropw # => [amount, tag, note_type, RECIPIENT] + # Create a new note. + # --------------------------------------------------------------------------------------------- + + movdn.6 exec.output_note::create + # => [note_idx, amount] + + dup movup.2 + # => [amount, note_idx, note_idx] + + # Mint the asset. + # --------------------------------------------------------------------------------------------- + # creating the asset exec.faucet::create_fungible_asset - # => [ASSET, tag, note_type, RECIPIENT] + # => [ASSET_KEY, ASSET_VALUE, note_idx, note_idx] + + dupw.1 dupw.1 + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY, ASSET_VALUE, note_idx, note_idx] # mint the asset; this is needed to satisfy asset preservation logic. + # this ensures that the asset's faucet ID matches the native account's ID. + # this is ensured because create_fungible_asset creates the asset with the native account's ID exec.faucet::mint - # => [ASSET, tag, note_type, RECIPIENT] - - movdn.9 movdn.9 movdn.9 movdn.9 - # => [tag, note_type, RECIPIENT, ASSET] - - # create a note - exec.output_note::create - # => [note_idx, ASSET] + dropw + # => [ASSET_KEY, ASSET_VALUE, note_idx, note_idx] - # load the ASSET and add it to the note - dup movdn.5 movdn.5 - # => [ASSET, note_idx, note_idx] + # Add the asset to the note. + # --------------------------------------------------------------------------------------------- exec.output_note::add_asset # => [note_idx] @@ -80,39 +158,70 @@ end #! Burns the fungible asset from the active note. #! +#! Burning the asset removes it from circulation and reduces the token_supply by the asset's amount. +#! #! This procedure retrieves the asset from the active note and burns it. The note must contain #! exactly one asset, which must be a fungible asset issued by this faucet. #! #! Inputs: [pad(16)] #! Outputs: [pad(16)] #! -#! Where: -#! - ASSET is the fungible asset that was burned. -#! #! Panics if: #! - the procedure is not called from a note context (active_note::get_assets will fail). #! - the note does not contain exactly one asset. #! - the transaction is executed against an account which is not a fungible asset faucet. #! - the transaction is executed against a faucet which is not the origin of the specified asset. -#! - the amount about to be burned is greater than the outstanding supply of the asset. +#! - the amount about to be burned is greater than the token_supply of the faucet. #! #! Invocation: call pub proc burn - # Get the assets from the note. This will fail if not called from a note context. - push.0 exec.active_note::get_assets + # Get the asset from the note. + # --------------------------------------------------------------------------------------------- + + # this will fail if not called from a note context. + push.ASSET_PTR exec.active_note::get_assets # => [num_assets, dest_ptr, pad(16)] # Verify we have exactly one asset assert.err=ERR_BASIC_FUNGIBLE_BURN_WRONG_NUMBER_OF_ASSETS # => [dest_ptr, pad(16)] - mem_loadw_be - # => [ASSET, pad(16)] + exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, pad(16)] - # burning the asset - exec.faucet::burn - # => [ASSET, pad(16)] + # Burn the asset from the transaction vault + # --------------------------------------------------------------------------------------------- - dropw + exec.asset::fungible_to_amount movdn.8 + # => [ASSET_KEY, ASSET_VALUE, amount, pad(16)] + + # burn the asset + # this ensures we only burn assets that were issued by this faucet (which implies they are + # fungible) + exec.faucet::burn dropw + # => [amount, pad(16)] + + # Subtract burnt amount from current token_supply in storage. + # --------------------------------------------------------------------------------------------- + + push.METADATA_SLOT[0..2] exec.active_account::get_item + # => [token_supply, max_supply, decimals, token_symbol, amount, pad(16)] + + dup.4 dup.1 + # => [token_supply, amount, token_supply, max_supply, decimals, token_symbol, amount, pad(16)] + + # assert that amount <= token_supply + lte assert.err=ERR_FAUCET_BURN_AMOUNT_EXCEEDS_TOKEN_SUPPLY + # => [token_supply, max_supply, decimals, token_symbol, amount, pad(16)] + + movup.4 + # => [amount, token_supply, max_supply, decimals, token_symbol, pad(16)] + + # compute new_token_supply = token_supply - amount + sub + # => [new_token_supply, max_supply, decimals, token_symbol, pad(16)] + + # update the metadata slot with the new supply + push.METADATA_SLOT[0..2] exec.native_account::set_item dropw # => [pad(16)] end diff --git a/crates/miden-standards/asm/standards/faucets/network_fungible.masm b/crates/miden-standards/asm/standards/faucets/network_fungible.masm index 5f405db8fe..6d23f55f8b 100644 --- a/crates/miden-standards/asm/standards/faucets/network_fungible.masm +++ b/crates/miden-standards/asm/standards/faucets/network_fungible.masm @@ -1,50 +1,9 @@ -use miden::protocol::active_note use miden::standards::faucets -use miden::standards::access::ownable +use miden::standards::access::ownable2step # PUBLIC INTERFACE # ================================================================================================ -# OWNER MANAGEMENT -# ------------------------------------------------------------------------------------------------ - -#! Returns the owner AccountId. -#! -#! Inputs: [] -#! Outputs: [owner_prefix, owner_suffix, pad(14)] -#! -#! Invocation: call -pub use ownable::get_owner - -#! Transfers ownership to a new account. -#! -#! Can only be called by the current owner. -#! -#! Inputs: [new_owner_prefix, new_owner_suffix, pad(14)] -#! Outputs: [pad(16)] -#! -#! Where: -#! - new_owner_{prefix, suffix} are the prefix and suffix felts of the new owner AccountId. -#! -#! Panics if: -#! - the note sender is not the owner. -#! -#! Invocation: call -pub use ownable::transfer_ownership - -#! Renounces ownership, leaving the component without an owner. -#! -#! Can only be called by the current owner. -#! -#! Inputs: [pad(16)] -#! Outputs: [pad(16)] -#! -#! Panics if: -#! - the note sender is not the owner. -#! -#! Invocation: call -pub use ownable::renounce_ownership - # ASSET DISTRIBUTION # ------------------------------------------------------------------------------------------------ @@ -69,7 +28,7 @@ pub use ownable::renounce_ownership #! #! Invocation: call pub proc distribute - exec.ownable::verify_owner + exec.ownable2step::assert_sender_is_owner # => [amount, tag, aux, note_type, execution_hint, RECIPIENT, pad(7)] exec.faucets::distribute diff --git a/crates/miden-standards/asm/standards/note/execution_hint.masm b/crates/miden-standards/asm/standards/note/execution_hint.masm new file mode 100644 index 0000000000..f3d2fb3a03 --- /dev/null +++ b/crates/miden-standards/asm/standards/note/execution_hint.masm @@ -0,0 +1,22 @@ +# CONSTANTS +# ================================================================================================= + +#! Unspecified execution hint. The conditions under which the note is consumable are not known. +#! +#! Felt encoding: `0` +pub const NONE = 0 + +#! The note's script can be executed at any time. +#! +#! Felt encoding: `1` +pub const ALWAYS = 1 + +#! The note's script can be executed after the specified block number. +#! +#! This constant encodes only the tag bits. The block number must be encoded in the payload. +pub const AFTER_BLOCK = 2 + +#! The note's script can be executed in a specified slot within a specified round. +#! +#! This constant encodes only the tag bits. The slot parameters must be encoded in the payload. +pub const ON_BLOCK_SLOT = 3 diff --git a/crates/miden-standards/asm/standards/note_tag/mod.masm b/crates/miden-standards/asm/standards/note_tag/mod.masm new file mode 100644 index 0000000000..83568c66ed --- /dev/null +++ b/crates/miden-standards/asm/standards/note_tag/mod.masm @@ -0,0 +1,98 @@ +use miden::core::math::u64 + +# ERRORS +# ================================================================================================= + +const ERR_NOTE_TAG_MAX_ACCOUNT_TARGET_LENGTH_EXCEEDED="note tag length can be at most 32" + +# CONSTANTS +# ================================================================================================= + +# The maximum account target tag length. +const MAX_ACCOUNT_TARGET_TAG_LENGTH = 32 + +# The default account target tag length. +const DEFAULT_ACCOUNT_TARGET_TAG_LENGTH = 14 + +#! The default note tag value (zero). Used for notes that rely on attachments +#! rather than tags for targeting. +pub const DEFAULT_TAG = 0 + +# PROCEDURES +# ================================================================================================= + +#! Constructs a note tag that targets the given account_id_prefix with the default tag_len of 14. +#! +#! The tag is a u32 constructed by taking the 14 most significant bits of the account ID prefix and +#! setting the remaining bits to zero. +#! +#! Inputs: [account_id_prefix] +#! Outputs: [note_tag] +#! +#! Where: +#! - account_id_prefix is the account id prefix to compute the note tag for. +#! - note_tag is the created note tag. +#! +#! Invocation: exec +pub proc create_account_target + push.DEFAULT_ACCOUNT_TARGET_TAG_LENGTH + exec.create_custom_account_target + # => [note_tag] +end + +#! Constructs a note tag that targets the given account_id_prefix with the provided tag_len. +#! +#! The tag is a u32 constructed by taking the `tag_len` most significant bits of the account ID +#! prefix and setting the remaining bits to zero. +#! +#! See the Rust `NoteTag` documentation for what changing the tag length means. +#! +#! Inputs: [tag_len, account_id_prefix] +#! Outputs: [note_tag] +#! +#! Where: +#! - account_id_prefix is the account id prefix to compute the note tag for. +#! - note_tag is the created note tag. +#! - tag_len is the number of most significant bits from the account ID prefix that should be used +#! for the tag. +#! +#! Panics if: +#! - the tag_len exceeds 32. +#! +#! Invocation: exec +pub proc create_custom_account_target + u32assert.err=ERR_NOTE_TAG_MAX_ACCOUNT_TARGET_LENGTH_EXCEEDED + # => [tag_len, account_id_prefix] + + dup u32lte.MAX_ACCOUNT_TARGET_TAG_LENGTH + assert.err=ERR_NOTE_TAG_MAX_ACCOUNT_TARGET_LENGTH_EXCEEDED + # => [tag_len, account_id_prefix] + + # create a bit mask that zeros out the lower (32 - tag_len) bits. + # since u32shl panics for a 32 shift, we need to use u64::shl in case tag_len is 0 + + # push u32::MAX as a u64 (hi limb set to zero) + push.0 push.0xffffffff + # => [u32::MAX, 0, tag_len, account_id_prefix] + + # compute "number of bits in u32" - tag_len + push.32 movup.3 sub + # => [shift_by, u32::MAX, 0, account_id_prefix] + + exec.u64::shl + # => [bit_mask_lo, bit_mask_hi, account_id_prefix] + + # the mask we need is the lo limb so discard the hi limb + swap drop + # => [bit_mask, account_id_prefix] + + swap u32split + # => [account_id_prefix_lo, account_id_prefix_hi, bit_mask] + + # discard the lo part of the ID prefix + drop + # => [account_id_prefix_hi, bit_mask] + + u32and + # => [note_tag] +end diff --git a/crates/miden-standards/asm/standards/notes/burn.masm b/crates/miden-standards/asm/standards/notes/burn.masm index 20696cc7bc..71f485784c 100644 --- a/crates/miden-standards/asm/standards/notes/burn.masm +++ b/crates/miden-standards/asm/standards/notes/burn.masm @@ -18,6 +18,7 @@ use miden::standards::faucets #! Panics if: #! - account does not expose burn procedure. #! - any of the validations in the burn procedure fail. +@note_script pub proc main dropw # => [pad(16)] diff --git a/crates/miden-standards/asm/standards/notes/mint.masm b/crates/miden-standards/asm/standards/notes/mint.masm index 7934306e33..655c496b77 100644 --- a/crates/miden-standards/asm/standards/notes/mint.masm +++ b/crates/miden-standards/asm/standards/notes/mint.masm @@ -6,23 +6,23 @@ use miden::standards::faucets::network_fungible->network_faucet # CONSTANTS # ================================================================================================= -const MINT_NOTE_NUM_INPUTS_PRIVATE=12 -const MINT_NOTE_MIN_NUM_INPUTS_PUBLIC=16 +const MINT_NOTE_NUM_STORAGE_ITEMS_PRIVATE=12 +const MINT_NOTE_MIN_NUM_STORAGE_ITEMS_PUBLIC=16 const OUTPUT_NOTE_TYPE_PUBLIC=1 const OUTPUT_NOTE_TYPE_PRIVATE=2 -# Memory Addresses of MINT note inputs -# The attachment is at the same memory address for both private and public inputs. +# Memory Addresses of MINT note storage +# The attachment is at the same memory address for both private and public storage. const ATTACHMENT_KIND_ADDRESS=2 const ATTACHMENT_SCHEME_ADDRESS=3 const ATTACHMENT_ADDRESS=4 -const OUTPUT_PUBLIC_NOTE_INPUTS_ADDR=16 +const OUTPUT_PUBLIC_NOTE_STORAGE_ADDR=16 # ERRORS # ================================================================================================= -const ERR_MINT_WRONG_NUMBER_OF_INPUTS="MINT script expects exactly 12 inputs for private or 16+ inputs for public output notes" +const ERR_MINT_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS="MINT script expects exactly 12 storage items for private or 16+ storage items for public output notes" #! Network Faucet MINT script: mints assets by calling the network faucet's distribute function. #! This note is intended to be executed against a network fungible faucet account. @@ -33,10 +33,10 @@ const ERR_MINT_WRONG_NUMBER_OF_INPUTS="MINT script expects exactly 12 inputs for #! Inputs: [ARGS, pad(12)] #! Outputs: [pad(16)] #! -#! Note inputs support two modes. Depending on the number of note inputs, +#! Note storage supports two modes. Depending on the number of note storage items, #! a private or public note is created on consumption of the MINT note: #! -#! Private mode (12 inputs) - creates a private note: +#! Private mode (12 storage items) - creates a private note: #! - tag: Note tag for the output note #! - amount: The amount to mint #! - attachment_scheme: The user-defined type of the attachment. @@ -44,7 +44,7 @@ const ERR_MINT_WRONG_NUMBER_OF_INPUTS="MINT script expects exactly 12 inputs for #! - ATTACHMENT: The attachment to be set. #! - RECIPIENT: The recipient digest (4 elements) #! -#! Public mode (16+ inputs) - creates a public note with variable-length inputs: +#! Public mode (16+ storage items) - creates a public note with variable-length storage: #! - tag: Note tag for the output note #! - amount: The amount to mint #! - attachment_scheme: The user-defined type of the attachment. @@ -52,45 +52,46 @@ const ERR_MINT_WRONG_NUMBER_OF_INPUTS="MINT script expects exactly 12 inputs for #! - ATTACHMENT: The attachment to be set. #! - SCRIPT_ROOT: Script root of the output note (4 elements) #! - SERIAL_NUM: Serial number of the output note (4 elements) -#! - [INPUTS]: Variable-length inputs for the output note (Vec) -#! The number of output note inputs = num_mint_note_inputs - 16 +#! - [STORAGE]: Variable-length storage for the output note (Vec) +#! The number of output note storage items = num_mint_note_storage_items - 16 #! #! Panics if: #! - account does not expose distribute procedure. -#! - the number of inputs is not exactly 12 for private or less than 16 for public output notes. +#! - the number of storage items is not exactly 12 for private or less than 16 for public output notes. +@note_script pub proc main dropw # => [pad(16)] - # Load note inputs into memory starting at address 0 - push.0 exec.active_note::get_inputs - # => [total_inputs, inputs_ptr, pad(16)] + # Load note storage into memory starting at address 0 + push.0 exec.active_note::get_storage + # => [num_storage_items, storage_ptr, pad(16)] dup - # => [num_inputs, num_inputs, inputs_ptr, pad(16)] + # => [num_storage_items, num_storage_items, storage_ptr, pad(16)] - u32assert2.err=ERR_MINT_WRONG_NUMBER_OF_INPUTS - u32gte.MINT_NOTE_MIN_NUM_INPUTS_PUBLIC - # => [is_public_output_note, total_inputs, inputs_ptr, pad(16)] + u32assert2.err=ERR_MINT_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS + u32gte.MINT_NOTE_MIN_NUM_STORAGE_ITEMS_PUBLIC + # => [is_public_output_note, num_storage_items, storage_ptr, pad(16)] if.true # public output note creation - # => [total_inputs, inputs_ptr, pad(16)] + # => [num_storage_items, storage_ptr, pad(16)] movdn.9 drop - # => [EMPTY_WORD, EMPTY_WORD, total_inputs, pad(8)] + # => [EMPTY_WORD, EMPTY_WORD, num_storage_items, pad(8)] - mem_loadw_be.8 - # => [SCRIPT_ROOT, EMPTY_WORD, total_inputs, pad(8)] + mem_loadw_le.8 + # => [SCRIPT_ROOT, EMPTY_WORD, num_storage_items, pad(8)] - swapw mem_loadw_be.12 - # => [SERIAL_NUM, SCRIPT_ROOT, total_inputs, pad(8)] + swapw mem_loadw_le.12 + # => [SERIAL_NUM, SCRIPT_ROOT, num_storage_items, pad(8)] - # compute variable length note inputs for the output note - movup.8 sub.MINT_NOTE_MIN_NUM_INPUTS_PUBLIC - # => [num_output_note_inputs, SERIAL_NUM, SCRIPT_ROOT, pad(8)] + # compute variable length note storage for the output note + movup.8 sub.MINT_NOTE_MIN_NUM_STORAGE_ITEMS_PUBLIC + # => [num_output_note_storage, SERIAL_NUM, SCRIPT_ROOT, pad(8)] - push.OUTPUT_PUBLIC_NOTE_INPUTS_ADDR - # => [inputs_ptr, num_output_note_inputs, SERIAL_NUM, SCRIPT_ROOT, pad(8)] + push.OUTPUT_PUBLIC_NOTE_STORAGE_ADDR + # => [storage_ptr, num_output_note_storage, SERIAL_NUM, SCRIPT_ROOT, pad(8)] exec.note::build_recipient # => [RECIPIENT, pad(12)] @@ -102,13 +103,13 @@ pub proc main else # private output note creation - eq.MINT_NOTE_NUM_INPUTS_PRIVATE assert.err=ERR_MINT_WRONG_NUMBER_OF_INPUTS drop - # => [inputs_ptr, pad(16)] + eq.MINT_NOTE_NUM_STORAGE_ITEMS_PRIVATE assert.err=ERR_MINT_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS drop + # => [storage_ptr, pad(16)] drop # => [pad(16)] - mem_loadw_be.8 + mem_loadw_le.8 # => [RECIPIENT, pad(12)] # push note_type, and load tag and amount @@ -123,7 +124,7 @@ pub proc main call.network_faucet::distribute # => [note_idx, pad(18))] - padw mem_loadw_be.ATTACHMENT_ADDRESS + padw mem_loadw_le.ATTACHMENT_ADDRESS # => [ATTACHMENT, note_idx, pad(18))] mem_load.ATTACHMENT_KIND_ADDRESS diff --git a/crates/miden-standards/asm/standards/notes/p2id.masm b/crates/miden-standards/asm/standards/notes/p2id.masm index b315ad4e5a..99abab6204 100644 --- a/crates/miden-standards/asm/standards/notes/p2id.masm +++ b/crates/miden-standards/asm/standards/notes/p2id.masm @@ -1,17 +1,29 @@ use miden::protocol::active_account use miden::protocol::account_id use miden::protocol::active_note +use miden::protocol::note +use miden::protocol::output_note use miden::standards::wallets::basic->basic_wallet # ERRORS # ================================================================================================= -const ERR_P2ID_WRONG_NUMBER_OF_INPUTS="P2ID note expects exactly 2 note inputs" +const ERR_P2ID_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS="P2ID note expects exactly 2 note storage items" const ERR_P2ID_TARGET_ACCT_MISMATCH="P2ID's target account address and transaction address do not match" +# CONSTANTS +# ================================================================================================= + +const STORAGE_PTR = 0 +const TARGET_ACCOUNT_ID_SUFFIX_PTR = STORAGE_PTR +const TARGET_ACCOUNT_ID_PREFIX_PTR = STORAGE_PTR + 1 + +# PROCEDURES +# ================================================================================================= + #! Pay-to-ID script: adds all assets from the note to the account, assuming ID of the account -#! matches target account ID specified by the note inputs. +#! matches target account ID specified by the note storage. #! #! Requires that the account exposes: #! - miden::standards::wallets::basic::receive_asset procedure. @@ -19,30 +31,33 @@ const ERR_P2ID_TARGET_ACCT_MISMATCH="P2ID's target account address and transacti #! Inputs: [] #! Outputs: [] #! -#! Note inputs are assumed to be as follows: +#! Note storage is assumed to be as follows: #! - target_account_id is the ID of the account for which the note is intended. #! #! Panics if: #! - Account does not expose miden::standards::wallets::basic::receive_asset procedure. -#! - Account ID of executing account is not equal to the Account ID specified via note inputs. +#! - Account ID of executing account is not equal to the Account ID specified via note storage. #! - The same non-fungible asset already exists in the account. #! - Adding a fungible asset would result in amount overflow, i.e., the total amount would be #! greater than 2^63. +@note_script pub proc main - # store the note inputs to memory starting at address 0 - padw push.0 exec.active_note::get_inputs - # => [num_inputs, inputs_ptr, EMPTY_WORD] + # store the note storage to memory starting at address 0 + push.STORAGE_PTR exec.active_note::get_storage + # => [num_storage_items, storage_ptr] - # make sure the number of inputs is 2 - eq.2 assert.err=ERR_P2ID_WRONG_NUMBER_OF_INPUTS - # => [inputs_ptr, EMPTY_WORD] + # make sure the number of storage items is 2 + eq.2 assert.err=ERR_P2ID_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS + # => [storage_ptr] - # read the target account ID from the note inputs - mem_loadw_be drop drop - # => [target_account_id_prefix, target_account_id_suffix] + # read the target account ID from the note storage + drop + mem_load.TARGET_ACCOUNT_ID_PREFIX_PTR + mem_load.TARGET_ACCOUNT_ID_SUFFIX_PTR + # => [target_account_id_suffix, target_account_id_prefix] exec.active_account::get_id - # => [account_id_prefix, account_id_suffix, target_account_id_prefix, target_account_id_suffix, ...] + # => [account_id_suffix, account_id_prefix, target_account_id_suffix, target_account_id_prefix] # ensure account_id = target_account_id, fails otherwise exec.account_id::is_equal assert.err=ERR_P2ID_TARGET_ACCT_MISMATCH @@ -51,3 +66,52 @@ pub proc main exec.basic_wallet::add_assets_to_account # => [] end + +#! Creates a new P2ID output note from the given inputs. +#! +#! This procedure handles: +#! - Writing note storage to memory in the expected layout ([suffix, prefix] to match +#! existing P2ID storage format) +#! - Obtaining the note script root via procref +#! - Building the recipient and creating the note +#! +#! Inputs: [target_id_suffix, target_id_prefix, tag, note_type, SERIAL_NUM] +#! Outputs: [note_idx] +#! +#! Where: +#! - target_id_suffix is the suffix felt of the target account ID. +#! - target_id_prefix is the prefix felt of the target account ID. +#! - tag is the note tag to be included in the note. +#! - note_type is the storage type of the note (1 = public, 2 = private). +#! - SERIAL_NUM is the serial number of the note (4 elements). +#! - note_idx is the index of the created note. +#! +#! Invocation: exec +@locals(2) +pub proc new + # => [target_id_suffix, target_id_prefix, tag, note_type, SERIAL_NUM] + + loc_store.TARGET_ACCOUNT_ID_SUFFIX_PTR loc_store.TARGET_ACCOUNT_ID_PREFIX_PTR + # => [tag, note_type, SERIAL_NUM] + + movdn.5 movdn.5 + # => [SERIAL_NUM, tag, note_type] + + procref.main + # => [SCRIPT_ROOT, SERIAL_NUM, tag, note_type] + + swapw + # => [SERIAL_NUM, SCRIPT_ROOT, tag, note_type] + + push.2 locaddr.STORAGE_PTR + # => [storage_ptr, num_storage_items=2, SERIAL_NUM, SCRIPT_ROOT, tag, note_type] + + exec.note::build_recipient + # => [RECIPIENT, tag, note_type] + + movup.5 movup.5 + # => [tag, note_type, RECIPIENT] + + exec.output_note::create + # => [note_idx] +end diff --git a/crates/miden-standards/asm/standards/notes/p2ide.masm b/crates/miden-standards/asm/standards/notes/p2ide.masm index 1345bc4acb..f476232e06 100644 --- a/crates/miden-standards/asm/standards/notes/p2ide.masm +++ b/crates/miden-standards/asm/standards/notes/p2ide.masm @@ -7,7 +7,7 @@ use miden::standards::wallets::basic->basic_wallet # ERRORS # ================================================================================================= -const ERR_P2IDE_WRONG_NUMBER_OF_INPUTS="P2IDE note expects exactly 4 note inputs" +const ERR_P2IDE_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS="P2IDE note expects exactly 4 note storage items" const ERR_P2IDE_RECLAIM_ACCT_IS_NOT_SENDER="failed to reclaim P2IDE note because the reclaiming account is not the sender" @@ -24,7 +24,7 @@ const ERR_P2IDE_TIMELOCK_HEIGHT_NOT_REACHED="failed to consume P2IDE note becaus #! #! Inputs: [current_block_height, timelock_block_height] #! Outputs: [current_block_height] -proc verify_unlocked +proc assert_unlocked dup movdn.2 # => [current_block_height, timelock_block_height, current_block_height] @@ -37,7 +37,7 @@ end #! #! Checks if P2IDE reclaim is enabled and if true, if reclaim height has been reached. #! -#! Inputs: [account_id_prefix, account_id_suffix, current_block_height, reclaim_block_height] +#! Inputs: [account_id_suffix, account_id_prefix, current_block_height, reclaim_block_height] #! Outputs: [] #! #! Panics if: @@ -47,18 +47,18 @@ end proc reclaim_note # check that the reclaim of the active note is enabled movup.3 dup neq.0 assert.err=ERR_P2IDE_RECLAIM_DISABLED - # => [reclaim_block_height, account_id_prefix, account_id_suffix, current_block_height] + # => [reclaim_block_height, account_id_suffix, account_id_prefix, current_block_height] # now check that sender is allowed to reclaim, reclaim block height <= current block height movup.3 - # => [current_block_height, reclaim_block_height, account_id_prefix, account_id_suffix] + # => [current_block_height, reclaim_block_height, account_id_suffix, account_id_prefix] lte assert.err=ERR_P2IDE_RECLAIM_HEIGHT_NOT_REACHED - # => [account_id_prefix, account_id_suffix] + # => [account_id_suffix, account_id_prefix] # if active account is not the target, we need to ensure it is the sender exec.active_note::get_sender - # => [sender_account_id_prefix, sender_account_id_suffix, account_id_prefix, account_id_suffix] + # => [sender_account_id_suffix, sender_account_id_prefix, account_id_suffix, account_id_prefix] # ensure active account ID = sender account ID exec.account_id::is_equal assert.err=ERR_P2IDE_RECLAIM_ACCT_IS_NOT_SENDER @@ -84,7 +84,7 @@ end #! Inputs: [] #! Outputs: [] #! -#! Note inputs are assumed to be as follows: +#! Note storage is assumed to be as follows: #! - target_account_id is the ID of the account for which the note is intended. #! - reclaim_block_height is the block height at which the note can be reclaimed by the sender. #! - timelock_block_height is the block height at which the note can be consumed by the target. @@ -100,34 +100,38 @@ end #! - The same non-fungible asset already exists in the account. #! - Adding a fungible asset would result in an amount overflow, i.e., the total amount would be #! greater than 2^63. +@note_script pub proc main - # store the note inputs to memory starting at address 0 - push.0 exec.active_note::get_inputs - # => [num_inputs, inputs_ptr] + # store the note storage to memory starting at address 0 + push.0 exec.active_note::get_storage + # => [num_storage_items, storage_ptr] - # make sure the number of inputs is 4 - eq.4 assert.err=ERR_P2IDE_WRONG_NUMBER_OF_INPUTS - # => [inputs_ptr] + # make sure the number of storage items is 4 + eq.4 assert.err=ERR_P2IDE_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS + # => [storage_ptr] - # read the reclaim block height, timelock_block_height, and target account ID from the note inputs - mem_loadw_be - # => [timelock_block_height, reclaim_block_height, target_account_id_prefix, target_account_id_suffix] + # read the target account ID, reclaim block height, and timelock_block_height from the note storage + mem_loadw_le + # => [target_account_id_suffix, target_account_id_prefix, reclaim_block_height, timelock_block_height] + + movup.3 + # => [timelock_block_height, target_account_id_suffix, target_account_id_prefix, reclaim_block_height] # read the current block number exec.tx::get_block_number - # => [current_block_height, timelock_block_height, reclaim_block_height, target_account_id_prefix, target_account_id_suffix] + # => [current_block_height, timelock_block_height, target_account_id_suffix, target_account_id_prefix, reclaim_block_height] - # fails if note is locked - exec.verify_unlocked - # => [current_block_height, reclaim_block_height, target_account_id_prefix, target_account_id_suffix] + # assert note is unlocked + exec.assert_unlocked + # => [current_block_height, target_account_id_suffix, target_account_id_prefix, reclaim_block_height] # get active account id exec.active_account::get_id dup.1 dup.1 - # => [account_id_prefix, account_id_suffix, account_id_prefix, account_id_suffix, current_block_height, reclaim_block_height, target_account_id_prefix, target_account_id_suffix] + # => [account_id_suffix, account_id_prefix, account_id_suffix, account_id_prefix, current_block_height, target_account_id_suffix, target_account_id_prefix, reclaim_block_height] # determine if the active account is the target account - movup.7 movup.7 exec.account_id::is_equal - # => [is_target, account_id_prefix, account_id_suffix, current_block_height, reclaim_block_height] + movup.6 movup.6 exec.account_id::is_equal + # => [is_target, account_id_suffix, account_id_prefix, current_block_height, reclaim_block_height] if.true # we can safely consume the note since the active account is the target of the note diff --git a/crates/miden-standards/asm/standards/notes/swap.masm b/crates/miden-standards/asm/standards/notes/swap.masm index 8c95668cd1..cedb7a6236 100644 --- a/crates/miden-standards/asm/standards/notes/swap.masm +++ b/crates/miden-standards/asm/standards/notes/swap.masm @@ -1,29 +1,31 @@ use miden::protocol::active_note +use miden::protocol::asset use miden::protocol::output_note use miden::standards::wallets::basic->wallet # CONSTANTS # ================================================================================================= -const SWAP_NOTE_INPUTS_NUMBER=16 +const SWAP_NOTE_NUM_STORAGE_ITEMS=20 -const PAYBACK_NOTE_TYPE_ADDRESS=0 -const PAYBACK_NOTE_TAG_ADDRESS=1 -const ATTACHMENT_KIND_ADDRESS=2 -const ATTACHMENT_SCHEME_ADDRESS=3 -const ATTACHMENT_ADDRESS=4 -const REQUESTED_ASSET_ADDRESS=8 -const PAYBACK_RECIPIENT_ADDRESS=12 +const PAYBACK_NOTE_TYPE_PTR=0 +const PAYBACK_NOTE_TAG_PTR=1 +const ATTACHMENT_KIND_PTR=2 +const ATTACHMENT_SCHEME_PTR=3 +const ATTACHMENT_PTR=4 +const REQUESTED_ASSET_PTR=8 +const PAYBACK_RECIPIENT_PTR=16 +const ASSET_PTR=20 # ERRORS # ================================================================================================= -const ERR_SWAP_WRONG_NUMBER_OF_INPUTS="SWAP script expects exactly 16 note inputs" +const ERR_SWAP_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS="SWAP script expects exactly 16 note storage items" const ERR_SWAP_WRONG_NUMBER_OF_ASSETS="SWAP script requires exactly 1 note asset" #! Swap script: adds an asset from the note into consumers account and -#! creates a note consumable by note issuer containing requested ASSET. +#! creates a note consumable by note issuer containing requested asset. #! #! Requires that the account exposes: #! - miden::standards::wallets::basic::receive_asset procedure. @@ -32,13 +34,14 @@ const ERR_SWAP_WRONG_NUMBER_OF_ASSETS="SWAP script requires exactly 1 note asset #! Inputs: [ARGS] #! Outputs: [] #! -#! Note inputs are assumed to be as follows: +#! Note storage is assumed to be as follows: #! - payback_note_type #! - payback_note_tag #! - attachment_kind #! - attachment_scheme #! - ATTACHMENT -#! - REQUESTED_ASSET +#! - REQUESTED_ASSET_KEY +#! - REQUESTED_ASSET_VALUE #! - PAYBACK_RECIPIENT #! #! Panics if: @@ -49,6 +52,7 @@ const ERR_SWAP_WRONG_NUMBER_OF_ASSETS="SWAP script requires exactly 1 note asset #! greater than 2^63. #! - the attachment kind or scheme does not fit into a u32. #! - the attachment kind is an unknown variant. +@note_script pub proc main # dropping note args dropw @@ -56,71 +60,64 @@ pub proc main # --- create a payback note with the requested asset ---------------- - # store note inputs into memory starting at address 0 - push.0 exec.active_note::get_inputs - # => [num_inputs, inputs_ptr] + # store note storage into memory starting at address 0 + push.0 exec.active_note::get_storage + # => [num_storage_items, storage_ptr] - # check number of inputs - eq.SWAP_NOTE_INPUTS_NUMBER assert.err=ERR_SWAP_WRONG_NUMBER_OF_INPUTS + # check number of storage items + eq.SWAP_NOTE_NUM_STORAGE_ITEMS assert.err=ERR_SWAP_UNEXPECTED_NUMBER_OF_STORAGE_ITEMS drop # => [] - mem_loadw_be.REQUESTED_ASSET_ADDRESS - # => [REQUESTED_ASSET] - - padw mem_loadw_be.PAYBACK_RECIPIENT_ADDRESS - # => [PAYBACK_NOTE_RECIPIENT, REQUESTED_ASSET] + padw mem_loadw_le.PAYBACK_RECIPIENT_PTR + # => [PAYBACK_NOTE_RECIPIENT] # load payback P2ID details - mem_load.PAYBACK_NOTE_TYPE_ADDRESS - mem_load.PAYBACK_NOTE_TAG_ADDRESS - # => [tag, note_type, PAYBACK_NOTE_RECIPIENT, REQUESTED_ASSET] + mem_load.PAYBACK_NOTE_TYPE_PTR + mem_load.PAYBACK_NOTE_TAG_PTR + # => [tag, note_type, PAYBACK_NOTE_RECIPIENT] # create payback P2ID note exec.output_note::create - # => [note_idx, REQUESTED_ASSET] + # => [note_idx] - movdn.4 - # => [REQUESTED_ASSET, note_idx] + padw push.0.0.0 dup.7 + # => [note_idx, pad(7), note_idx] - # padding stack with 11 zeros - repeat.11 - push.0 - movdn.5 - end - # => [REQUESTED_ASSET, note_idx, pad(11)] + push.REQUESTED_ASSET_PTR exec.asset::load + # => [REQUESTED_ASSET_KEY, REQUESTED_ASSET_VALUE, note_idx, pad(7), note_idx] # move asset to the note call.wallet::move_asset_to_note - # => [REQUESTED_ASSET, note_idx, pad(11)] + # => [pad(16), note_idx] dropw - # => [note_idx, pad(11)] + mem_loadw_le.ATTACHMENT_PTR + # => [ATTACHMENT, pad(8), note_idx] - mem_loadw_be.ATTACHMENT_ADDRESS - mem_load.ATTACHMENT_KIND_ADDRESS - mem_load.ATTACHMENT_SCHEME_ADDRESS - movup.6 - # => [note_idx, attachment_scheme, attachment_kind, ATTACHMENT] + mem_load.ATTACHMENT_KIND_PTR + mem_load.ATTACHMENT_SCHEME_PTR + movup.14 + # => [note_idx, attachment_scheme, attachment_kind, ATTACHMENT, pad(8)] exec.output_note::set_attachment - # => [pad(12)] + # => [pad(8)] # --- move assets from the SWAP note into the account ------------------------- - # store the number of note assets to memory starting at address 0 - push.0 exec.active_note::get_assets - # => [num_assets, ptr, pad(12)] + # store the number of note assets to memory starting at address ASSET_PTR + push.ASSET_PTR exec.active_note::get_assets + # => [num_assets, asset_ptr, pad(8)] # make sure the number of assets is 1 assert.err=ERR_SWAP_WRONG_NUMBER_OF_ASSETS - # => [ptr, pad(12)] + # => [asset_ptr, pad(8)] - # load the ASSET - mem_loadw_be - # => [ASSET, pad(12)] + # load asset + exec.asset::load + # => [ASSET_KEY, ASSET_VALUE, pad(8)] - # add the ASSET to the account + # add the asset to the account call.wallet::receive_asset # => [pad(16)] diff --git a/crates/miden-standards/asm/standards/wallets/basic.masm b/crates/miden-standards/asm/standards/wallets/basic.masm index 837803c09c..ad06b2b1a9 100644 --- a/crates/miden-standards/asm/standards/wallets/basic.masm +++ b/crates/miden-standards/asm/standards/wallets/basic.masm @@ -1,3 +1,5 @@ +use ::miden::protocol::asset::ASSET_VALUE_MEMORY_OFFSET +use ::miden::protocol::asset::ASSET_SIZE use miden::protocol::native_account use miden::protocol::output_note use miden::protocol::active_note @@ -8,11 +10,12 @@ const PUBLIC_NOTE=1 #! Adds the provided asset to the active account. #! -#! Inputs: [ASSET, pad(12)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] #! Outputs: [pad(16)] #! #! Where: -#! - ASSET is the asset to be received, can be fungible or non-fungible +#! - ASSET_KEY is the vault key of the received asset. +#! - ASSET_VALUE is the value of the received asset. #! #! Panics if: #! - the same non-fungible asset already exists in the account. @@ -22,7 +25,7 @@ const PUBLIC_NOTE=1 #! Invocation: call pub proc receive_asset exec.native_account::add_asset - # => [ASSET', pad(12)] + # => [ASSET_VALUE', pad(12)] # drop the final asset dropw @@ -36,12 +39,13 @@ end #! the contents of the `PAD` elements shown below. It is the caller's responsibility to make sure #! these elements do not contain any meaningful data. #! -#! Inputs: [ASSET, note_idx, pad(11)] -#! Outputs: [ASSET, note_idx, pad(11)] +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] +#! Outputs: [pad(16)] #! #! Where: #! - note_idx is the index of the output note. -#! - ASSET is the fungible or non-fungible asset of interest. +#! - ASSET_KEY is the vault key of the asset to move to the note. +#! - ASSET_VALUE is the value of the asset to move to the note. #! #! Panics if: #! - the fungible asset is not found in the vault. @@ -50,71 +54,80 @@ end #! #! Invocation: call pub proc move_asset_to_note + dupw.1 dupw.1 + # => [ASSET_KEY, ASSET_VALUE, ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + # remove the asset from the account exec.native_account::remove_asset - # => [ASSET, note_idx, pad(11)] - - dupw dup.8 movdn.4 - # => [ASSET, note_idx, ASSET, note_idx, pad(11)] + dropw + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] exec.output_note::add_asset - # => [ASSET, note_idx, pad(11)] + # => [pad(16)] end #! Adds all assets from the active note to the native account's vault. #! #! Inputs: [] #! Outputs: [] -@locals(1024) +@locals(2048) pub proc add_assets_to_account # write assets to local memory starting at offset 0 - # we have allocated 4 * MAX_ASSETS_PER_NOTE number of locals so all assets should fit + # we have allocated ASSET_SIZE * MAX_ASSETS_PER_NOTE number of locals so all assets should fit # since the asset memory will be overwritten, we don't have to initialize the locals to zero locaddr.0 exec.active_note::get_assets # => [num_of_assets, ptr = 0] # compute the pointer at which we should stop iterating - mul.4 dup.1 add + mul.ASSET_SIZE dup.1 add # => [end_ptr, ptr] # pad the stack and move the pointer to the top - padw movup.5 - # => [ptr, EMPTY_WORD, end_ptr] + padw padw movup.9 + # => [ptr, pad(8), end_ptr] # loop if the amount of assets is non-zero - dup dup.6 neq - # => [should_loop, ptr, EMPTY_WORD, end_ptr] + dup dup.10 neq + # => [should_loop, ptr, pad(8), end_ptr] while.true - # => [ptr, EMPTY_WORD, end_ptr] + # => [ptr, pad(8), end_ptr] # save the pointer so that we can use it later - dup movdn.5 - # => [ptr, EMPTY_WORD, ptr, end_ptr] + dup movdn.9 + # => [ptr, pad(8), ptr, end_ptr] + + # load the asset value + add.ASSET_VALUE_MEMORY_OFFSET mem_loadw_le swapw + # => [EMPTY_WORD, ASSET_VALUE, ptr, end_ptr] - # load the asset - mem_loadw_be - # => [ASSET, ptr, end_ptr] + # load the asset key + dup.8 mem_loadw_le + # => [ASSET_KEY, ASSET_VALUE, ptr, end_ptr] # pad the stack before call - padw swapw padw padw swapdw - # => [ASSET, pad(12), ptr, end_ptr] + padw padw swapdw + # => [ASSET_KEY, ASSET_VALUE, pad(8), ptr, end_ptr] # add asset to the account call.receive_asset # => [pad(16), ptr, end_ptr] # clean the stack after call - dropw dropw dropw - # => [EMPTY_WORD, ptr, end_ptr] + dropw dropw + # => [pad(8), ptr, end_ptr] + + # increment the pointer + movup.8 add.ASSET_SIZE dup + # => [ptr+ASSET_SIZE, ptr+ASSET_SIZE, pad(8), end_ptr] - # increment the pointer and continue looping if ptr != end_ptr - movup.4 add.4 dup dup.6 neq - # => [should_loop, ptr+4, EMPTY_WORD, end_ptr] + # continue looping if ptr != end_ptr + dup.10 neq + # => [should_loop, ptr+ASSET_SIZE, pad(8), end_ptr] end - # => [ptr', EMPTY_WORD, end_ptr] + # => [ptr', pad(8), end_ptr] # clear the stack - drop dropw drop + drop dropw dropw drop # => [] end diff --git a/crates/miden-standards/build.rs b/crates/miden-standards/build.rs index f819b74ae2..d41e453948 100644 --- a/crates/miden-standards/build.rs +++ b/crates/miden-standards/build.rs @@ -3,27 +3,21 @@ use std::path::Path; use fs_err as fs; use miden_assembly::diagnostics::{IntoDiagnostic, NamedSource, Result, WrapErr}; -use miden_assembly::utils::Serializable; -use miden_assembly::{Assembler, Library, Report}; +use miden_assembly::{Assembler, Library}; use miden_protocol::transaction::TransactionKernel; // CONSTANTS // ================================================================================================ -/// Defines whether the build script should generate files in `/src`. -/// The docs.rs build pipeline has a read-only filesystem, so we have to avoid writing to `src`, -/// otherwise the docs will fail to build there. Note that writing to `OUT_DIR` is fine. -const BUILD_GENERATED_FILES_IN_SRC: bool = option_env!("BUILD_GENERATED_FILES_IN_SRC").is_some(); - const ASSETS_DIR: &str = "assets"; const ASM_DIR: &str = "asm"; const ASM_STANDARDS_DIR: &str = "standards"; -const ASM_NOTE_SCRIPTS_DIR: &str = "note_scripts"; const ASM_ACCOUNT_COMPONENTS_DIR: &str = "account_components"; const STANDARDS_LIB_NAMESPACE: &str = "miden::standards"; +const ACCOUNT_COMPONENTS_LIB_NAMESPACE: &str = "miden::standards::components"; -const STANDARDS_ERRORS_FILE: &str = "src/errors/standards.rs"; +const STANDARDS_ERRORS_RS_FILE: &str = "standards_errors.rs"; const STANDARDS_ERRORS_ARRAY_NAME: &str = "STANDARDS_ERRORS"; // PRE-PROCESSING @@ -31,13 +25,11 @@ const STANDARDS_ERRORS_ARRAY_NAME: &str = "STANDARDS_ERRORS"; /// Read and parse the contents from `./asm`. /// - Compiles the contents of asm/standards directory into a Miden library file (.masl) under -/// standards namespace. -/// - Compiles the contents of asm/note_scripts directory into individual .masb files. +/// standards namespace. Note scripts are included in this library. /// - Compiles the contents of asm/account_components directory into individual .masl files. fn main() -> Result<()> { // re-build when the MASM code changes println!("cargo::rerun-if-changed={ASM_DIR}/"); - println!("cargo::rerun-if-env-changed=BUILD_GENERATED_FILES_IN_SRC"); // Copies the MASM code to the build directory let crate_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); @@ -52,20 +44,12 @@ fn main() -> Result<()> { // set target directory to {OUT_DIR}/assets let target_dir = Path::new(&build_dir).join(ASSETS_DIR); - // compile standards library - let standards_lib = - compile_standards_lib(&source_dir, &target_dir, TransactionKernel::assembler())?; + let mut assembler = TransactionKernel::assembler().with_warnings_as_errors(true); + // compile standards library (includes note scripts) + let standards_lib = compile_standards_lib(&source_dir, &target_dir, assembler.clone())?; - let mut assembler = TransactionKernel::assembler(); assembler.link_static_library(standards_lib)?; - // compile note scripts - compile_note_scripts( - &source_dir.join(ASM_NOTE_SCRIPTS_DIR), - &target_dir.join(ASM_NOTE_SCRIPTS_DIR), - assembler.clone(), - )?; - // compile account components compile_account_components( &source_dir.join(ASM_ACCOUNT_COMPONENTS_DIR), @@ -73,7 +57,7 @@ fn main() -> Result<()> { assembler, )?; - generate_error_constants(&source_dir)?; + generate_error_constants(&source_dir, &build_dir)?; Ok(()) } @@ -99,38 +83,6 @@ fn compile_standards_lib( Ok(standards_lib) } -// COMPILE EXECUTABLE MODULES -// ================================================================================================ - -/// Reads all MASM files from the "{source_dir}", complies each file individually into a MASB -/// file, and stores the compiled files into the "{target_dir}". -/// -/// The source files are expected to contain executable programs. -fn compile_note_scripts(source_dir: &Path, target_dir: &Path, assembler: Assembler) -> Result<()> { - fs::create_dir_all(target_dir) - .into_diagnostic() - .wrap_err("failed to create note_scripts directory")?; - - for masm_file_path in shared::get_masm_files(source_dir).unwrap() { - // read the MASM file, parse it, and serialize the parsed AST to bytes - let code = assembler.clone().assemble_program(masm_file_path.clone())?; - - let bytes = code.to_bytes(); - - let masm_file_name = masm_file_path - .file_name() - .expect("file name should exist") - .to_str() - .ok_or_else(|| Report::msg("failed to convert file name to &str"))?; - let mut masb_file_path = target_dir.join(masm_file_name); - - // write the binary MASB to the output dir - masb_file_path.set_extension("masb"); - fs::write(masb_file_path, bytes).unwrap(); - } - Ok(()) -} - // COMPILE ACCOUNT COMPONENTS // ================================================================================================ @@ -156,7 +108,19 @@ fn compile_account_components( let component_source_code = fs::read_to_string(&masm_file_path) .expect("reading the component's MASM source code should succeed"); - let named_source = NamedSource::new(component_name.clone(), component_source_code); + // Build full library path from directory structure: + // e.g. faucets/basic_fungible_faucet.masm -> + // miden::standards::components::faucets::basic_fungible_faucet + let relative_path = masm_file_path + .strip_prefix(source_dir) + .expect("masm file should be inside source dir"); + let mut library_path = ACCOUNT_COMPONENTS_LIB_NAMESPACE.to_owned(); + for component in relative_path.with_extension("").components() { + let part = component.as_os_str().to_str().expect("valid UTF-8"); + library_path.push_str("::"); + library_path.push_str(part); + } + let named_source = NamedSource::new(library_path, component_source_code); let component_library = assembler .clone() @@ -207,14 +171,9 @@ fn compile_account_components( /// The function ensures that a constant is not defined twice, except if their error message is the /// same. This can happen across multiple files. /// -/// Because the error files will be written to ./src/errors, this should be a no-op if ./src is -/// read-only. To enable writing to ./src, set the `BUILD_GENERATED_FILES_IN_SRC` environment -/// variable. -fn generate_error_constants(asm_source_dir: &Path) -> Result<()> { - if !BUILD_GENERATED_FILES_IN_SRC { - return Ok(()); - } - +/// The generated file is written to `build_dir` (i.e. `OUT_DIR`) and included via `include!` +/// in the source. +fn generate_error_constants(asm_source_dir: &Path, build_dir: &str) -> Result<()> { // Miden standards errors // ------------------------------------------ @@ -222,7 +181,7 @@ fn generate_error_constants(asm_source_dir: &Path) -> Result<()> { .context("failed to extract all masm errors")?; shared::generate_error_file( shared::ErrorModule { - file_name: STANDARDS_ERRORS_FILE, + file_path: Path::new(build_dir).join(STANDARDS_ERRORS_RS_FILE), array_name: STANDARDS_ERRORS_ARRAY_NAME, is_crate_local: false, }, @@ -422,7 +381,7 @@ mod shared { } /// Generates the content of an error file for the given category and the set of errors and - /// writes it to the category's file. + /// writes it to the file at the path specified in the module. pub fn generate_error_file(module: ErrorModule, errors: Vec) -> Result<()> { let mut output = String::new(); @@ -469,7 +428,7 @@ mod shared { .into_diagnostic()?; } - std::fs::write(module.file_name, output).into_diagnostic()?; + fs::write(module.file_path, output).into_diagnostic()?; Ok(()) } @@ -487,9 +446,9 @@ mod shared { pub message: String, } - #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] + #[derive(Debug, Clone)] pub struct ErrorModule { - pub file_name: &'static str, + pub file_path: PathBuf, pub array_name: &'static str, pub is_crate_local: bool, } diff --git a/crates/miden-standards/src/account/access/mod.rs b/crates/miden-standards/src/account/access/mod.rs new file mode 100644 index 0000000000..f7c58c875b --- /dev/null +++ b/crates/miden-standards/src/account/access/mod.rs @@ -0,0 +1,20 @@ +use miden_protocol::account::{AccountComponent, AccountId}; + +pub mod ownable2step; + +/// Access control configuration for account components. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AccessControl { + /// Uses two-step ownership transfer with the provided initial owner. + Ownable2Step { owner: AccountId }, +} + +impl From for AccountComponent { + fn from(access_control: AccessControl) -> Self { + match access_control { + AccessControl::Ownable2Step { owner } => Ownable2Step::new(owner).into(), + } + } +} + +pub use ownable2step::{Ownable2Step, Ownable2StepError}; diff --git a/crates/miden-standards/src/account/access/ownable2step.rs b/crates/miden-standards/src/account/access/ownable2step.rs new file mode 100644 index 0000000000..c5356394ab --- /dev/null +++ b/crates/miden-standards/src/account/access/ownable2step.rs @@ -0,0 +1,188 @@ +use miden_protocol::account::component::{ + AccountComponentMetadata, + FeltSchema, + StorageSchema, + StorageSlotSchema, +}; +use miden_protocol::account::{ + AccountComponent, + AccountId, + AccountStorage, + AccountType, + StorageSlot, + StorageSlotName, +}; +use miden_protocol::errors::AccountIdError; +use miden_protocol::utils::sync::LazyLock; +use miden_protocol::{Felt, Word}; + +use crate::account::components::ownable2step_library; + +static OWNER_CONFIG_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::access::ownable2step::owner_config") + .expect("storage slot name should be valid") +}); + +/// Two-step ownership management for account components. +/// +/// This struct holds the current owner and any nominated (pending) owner. A nominated owner +/// must explicitly accept the transfer before it takes effect, preventing accidental transfers +/// to incorrect addresses. +/// +/// ## Storage Layout +/// +/// The ownership data is stored in a single word: +/// +/// ```text +/// Word: [owner_suffix, owner_prefix, nominated_owner_suffix, nominated_owner_prefix] +/// word[0] word[1] word[2] word[3] +/// ``` +pub struct Ownable2Step { + /// The current owner of the component. `None` when ownership has been renounced. + owner: Option, + nominated_owner: Option, +} + +impl Ownable2Step { + /// The name of the component. + pub const NAME: &'static str = "miden::standards::components::access::ownable2step"; + + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a new [`Ownable2Step`] with the given owner and no nominated owner. + pub fn new(owner: AccountId) -> Self { + Self { + owner: Some(owner), + nominated_owner: None, + } + } + + /// Reads ownership data from account storage, validating any non-zero account IDs. + /// + /// Returns an error if either owner or nominated owner contains an invalid (but non-zero) + /// account ID. + pub fn try_from_storage(storage: &AccountStorage) -> Result { + let word: Word = storage + .get_item(Self::slot_name()) + .map_err(Ownable2StepError::StorageLookupFailed)?; + + Self::try_from_word(word) + } + + /// Reconstructs an [`Ownable2Step`] from a raw storage word. + /// + /// Format: `[owner_suffix, owner_prefix, nominated_suffix, nominated_prefix]` + pub fn try_from_word(word: Word) -> Result { + let owner = account_id_from_felt_pair(word[0], word[1]) + .map_err(Ownable2StepError::InvalidOwnerId)?; + + let nominated_owner = account_id_from_felt_pair(word[2], word[3]) + .map_err(Ownable2StepError::InvalidNominatedOwnerId)?; + + Ok(Self { owner, nominated_owner }) + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the [`StorageSlotName`] where ownership data is stored. + pub fn slot_name() -> &'static StorageSlotName { + &OWNER_CONFIG_SLOT_NAME + } + + /// Returns the storage slot schema for the ownership configuration slot. + pub fn slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::slot_name().clone(), + StorageSlotSchema::value( + "Ownership data (owner and nominated owner)", + [ + FeltSchema::felt("owner_suffix"), + FeltSchema::felt("owner_prefix"), + FeltSchema::felt("nominated_suffix"), + FeltSchema::felt("nominated_prefix"), + ], + ), + ) + } + + /// Returns the current owner, or `None` if ownership has been renounced. + pub fn owner(&self) -> Option { + self.owner + } + + /// Returns the nominated owner, or `None` if no transfer is in progress. + pub fn nominated_owner(&self) -> Option { + self.nominated_owner + } + + /// Converts this ownership data into a [`StorageSlot`]. + pub fn to_storage_slot(&self) -> StorageSlot { + StorageSlot::with_value(Self::slot_name().clone(), self.to_word()) + } + + /// Converts this ownership data into a raw [`Word`]. + pub fn to_word(&self) -> Word { + let (owner_suffix, owner_prefix) = match self.owner { + Some(id) => (id.suffix(), id.prefix().as_felt()), + None => (Felt::ZERO, Felt::ZERO), + }; + let (nominated_suffix, nominated_prefix) = match self.nominated_owner { + Some(id) => (id.suffix(), id.prefix().as_felt()), + None => (Felt::ZERO, Felt::ZERO), + }; + [owner_suffix, owner_prefix, nominated_suffix, nominated_prefix].into() + } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = + StorageSchema::new([Self::slot_schema()]).expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description("Two-step ownership management component") + .with_storage_schema(storage_schema) + } +} + +impl From for AccountComponent { + fn from(ownership: Ownable2Step) -> Self { + let storage_slot = ownership.to_storage_slot(); + let metadata = Ownable2Step::component_metadata(); + + AccountComponent::new(ownable2step_library(), vec![storage_slot], metadata).expect( + "Ownable2Step component should satisfy the requirements of a valid account component", + ) + } +} + +// OWNABLE2STEP ERROR +// ================================================================================================ + +/// Errors that can occur when reading [`Ownable2Step`] data from storage. +#[derive(Debug, thiserror::Error)] +pub enum Ownable2StepError { + #[error("failed to read ownership slot from storage")] + StorageLookupFailed(#[source] miden_protocol::errors::AccountError), + #[error("invalid owner account ID in storage")] + InvalidOwnerId(#[source] AccountIdError), + #[error("invalid nominated owner account ID in storage")] + InvalidNominatedOwnerId(#[source] AccountIdError), +} + +// HELPERS +// ================================================================================================ + +/// Constructs an `Option` from a suffix/prefix felt pair. +/// Returns `Ok(None)` when both felts are zero (renounced / no nomination). +fn account_id_from_felt_pair( + suffix: Felt, + prefix: Felt, +) -> Result, AccountIdError> { + if suffix == Felt::ZERO && prefix == Felt::ZERO { + Ok(None) + } else { + AccountId::try_from_elements(suffix, prefix).map(Some) + } +} diff --git a/crates/miden-standards/src/account/auth/ecdsa_k256_keccak.rs b/crates/miden-standards/src/account/auth/ecdsa_k256_keccak.rs deleted file mode 100644 index e26661972c..0000000000 --- a/crates/miden-standards/src/account/auth/ecdsa_k256_keccak.rs +++ /dev/null @@ -1,55 +0,0 @@ -use miden_protocol::account::auth::PublicKeyCommitment; -use miden_protocol::account::{AccountComponent, StorageSlot, StorageSlotName}; -use miden_protocol::utils::sync::LazyLock; - -use crate::account::components::ecdsa_k256_keccak_library; - -static ECDSA_PUBKEY_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::ecdsa_k256_keccak::public_key") - .expect("storage slot name should be valid") -}); - -/// An [`AccountComponent`] implementing the ECDSA K256 Keccak signature scheme for authentication -/// of transactions. -/// -/// It reexports the procedures from `miden::standards::auth::ecdsa_k256_keccak`. When linking -/// against this component, the `miden` library (i.e. -/// [`ProtocolLib`](miden_protocol::ProtocolLib)) must be available to the assembler which is the -/// case when using [`CodeBuilder`][builder]. The procedures of this component are: -/// - `verify_signatures`, which can be used to verify a signature provided via the advice stack to -/// authenticate a transaction. -/// - `authenticate_transaction`, which can be used to authenticate a transaction using the ECDSA -/// signature scheme. -/// -/// This component supports all account types. -/// -/// [builder]: crate::code_builder::CodeBuilder -pub struct AuthEcdsaK256Keccak { - pub_key: PublicKeyCommitment, -} - -impl AuthEcdsaK256Keccak { - /// Creates a new [`AuthEcdsaK256Keccak`] component with the given `public_key`. - pub fn new(pub_key: PublicKeyCommitment) -> Self { - Self { pub_key } - } - - /// Returns the [`StorageSlotName`] where the public key is stored. - pub fn public_key_slot() -> &'static StorageSlotName { - &ECDSA_PUBKEY_SLOT_NAME - } -} - -impl From for AccountComponent { - fn from(ecdsa: AuthEcdsaK256Keccak) -> Self { - AccountComponent::new( - ecdsa_k256_keccak_library(), - vec![StorageSlot::with_value( - AuthEcdsaK256Keccak::public_key_slot().clone(), - ecdsa.pub_key.into(), - )], - ) - .expect("ecdsa component should satisfy the requirements of a valid account component") - .with_supports_all_types() - } -} diff --git a/crates/miden-standards/src/account/auth/ecdsa_k256_keccak_multisig.rs b/crates/miden-standards/src/account/auth/ecdsa_k256_keccak_multisig.rs deleted file mode 100644 index f29bf71732..0000000000 --- a/crates/miden-standards/src/account/auth/ecdsa_k256_keccak_multisig.rs +++ /dev/null @@ -1,331 +0,0 @@ -use alloc::collections::BTreeSet; -use alloc::vec::Vec; - -use miden_protocol::Word; -use miden_protocol::account::auth::PublicKeyCommitment; -use miden_protocol::account::{AccountComponent, StorageMap, StorageSlot, StorageSlotName}; -use miden_protocol::errors::AccountError; -use miden_protocol::utils::sync::LazyLock; - -use crate::account::components::ecdsa_k256_keccak_multisig_library; - -static THRESHOLD_CONFIG_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::ecdsa_k256_keccak_multisig::threshold_config") - .expect("storage slot name should be valid") -}); - -static APPROVER_PUBKEYS_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::ecdsa_k256_keccak_multisig::approver_public_keys") - .expect("storage slot name should be valid") -}); - -static EXECUTED_TRANSACTIONS_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new( - "miden::standards::auth::ecdsa_k256_keccak_multisig::executed_transactions", - ) - .expect("storage slot name should be valid") -}); - -static PROCEDURE_THRESHOLDS_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::ecdsa_k256_keccak_multisig::procedure_thresholds") - .expect("storage slot name should be valid") -}); - -// MULTISIG AUTHENTICATION COMPONENT -// ================================================================================================ - -/// Configuration for [`AuthEcdsaK256KeccakMultisig`] component. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct AuthEcdsaK256KeccakMultisigConfig { - approvers: Vec, - default_threshold: u32, - proc_thresholds: Vec<(Word, u32)>, -} - -impl AuthEcdsaK256KeccakMultisigConfig { - /// Creates a new configuration with the given approvers and a default threshold. - /// - /// The `default_threshold` must be at least 1 and at most the number of approvers. - pub fn new( - approvers: Vec, - default_threshold: u32, - ) -> Result { - if default_threshold == 0 { - return Err(AccountError::other("threshold must be at least 1")); - } - if default_threshold > approvers.len() as u32 { - return Err(AccountError::other( - "threshold cannot be greater than number of approvers", - )); - } - - // Check for duplicate approvers - if approvers.len() != approvers.iter().collect::>().len() { - return Err(AccountError::other("duplicate approver public keys are not allowed")); - } - - Ok(Self { - approvers, - default_threshold, - proc_thresholds: vec![], - }) - } - - /// Attaches a per-procedure threshold map. Each procedure threshold must be at least 1 and - /// at most the number of approvers. - pub fn with_proc_thresholds( - mut self, - proc_thresholds: Vec<(Word, u32)>, - ) -> Result { - for (_, threshold) in &proc_thresholds { - if *threshold == 0 { - return Err(AccountError::other("procedure threshold must be at least 1")); - } - if *threshold > self.approvers.len() as u32 { - return Err(AccountError::other( - "procedure threshold cannot be greater than number of approvers", - )); - } - } - self.proc_thresholds = proc_thresholds; - Ok(self) - } - - pub fn approvers(&self) -> &[PublicKeyCommitment] { - &self.approvers - } - - pub fn default_threshold(&self) -> u32 { - self.default_threshold - } - - pub fn proc_thresholds(&self) -> &[(Word, u32)] { - &self.proc_thresholds - } -} - -/// An [`AccountComponent`] implementing a multisig based on ECDSA signatures. -/// -/// It enforces a threshold of approver signatures for every transaction, with optional -/// per-procedure thresholds overrides. Non-uniform thresholds (especially a threshold of one) -/// should be used with caution for private multisig accounts, as a single approver could withhold -/// the new state from other approvers, effectively locking them out. -/// -/// The storage layout is: -/// - Slot 0(value): [threshold, num_approvers, 0, 0] -/// - Slot 1(map): A map with approver public keys (index -> pubkey) -/// - Slot 2(map): A map which stores executed transactions -/// - Slot 3(map): A map which stores procedure thresholds (PROC_ROOT -> threshold) -/// -/// This component supports all account types. -#[derive(Debug)] -pub struct AuthEcdsaK256KeccakMultisig { - config: AuthEcdsaK256KeccakMultisigConfig, -} - -impl AuthEcdsaK256KeccakMultisig { - /// Creates a new [`AuthEcdsaK256KeccakMultisig`] component from the provided configuration. - pub fn new(config: AuthEcdsaK256KeccakMultisigConfig) -> Result { - Ok(Self { config }) - } - - /// Returns the [`StorageSlotName`] where the threshold configuration is stored. - pub fn threshold_config_slot() -> &'static StorageSlotName { - &THRESHOLD_CONFIG_SLOT_NAME - } - - /// Returns the [`StorageSlotName`] where the approver public keys are stored. - pub fn approver_public_keys_slot() -> &'static StorageSlotName { - &APPROVER_PUBKEYS_SLOT_NAME - } - - /// Returns the [`StorageSlotName`] where the executed transactions are stored. - pub fn executed_transactions_slot() -> &'static StorageSlotName { - &EXECUTED_TRANSACTIONS_SLOT_NAME - } - - /// Returns the [`StorageSlotName`] where the procedure thresholds are stored. - pub fn procedure_thresholds_slot() -> &'static StorageSlotName { - &PROCEDURE_THRESHOLDS_SLOT_NAME - } -} - -impl From for AccountComponent { - fn from(multisig: AuthEcdsaK256KeccakMultisig) -> Self { - let mut storage_slots = Vec::with_capacity(3); - - // Threshold config slot (value: [threshold, num_approvers, 0, 0]) - let num_approvers = multisig.config.approvers().len() as u32; - storage_slots.push(StorageSlot::with_value( - AuthEcdsaK256KeccakMultisig::threshold_config_slot().clone(), - Word::from([multisig.config.default_threshold(), num_approvers, 0, 0]), - )); - - // Approver public keys slot (map) - let map_entries = multisig - .config - .approvers() - .iter() - .enumerate() - .map(|(i, pub_key)| (Word::from([i as u32, 0, 0, 0]), (*pub_key).into())); - - // Safe to unwrap because we know that the map keys are unique. - storage_slots.push(StorageSlot::with_map( - AuthEcdsaK256KeccakMultisig::approver_public_keys_slot().clone(), - StorageMap::with_entries(map_entries).unwrap(), - )); - - // Executed transactions slot (map) - let executed_transactions = StorageMap::default(); - storage_slots.push(StorageSlot::with_map( - AuthEcdsaK256KeccakMultisig::executed_transactions_slot().clone(), - executed_transactions, - )); - - // Procedure thresholds slot (map: PROC_ROOT -> threshold) - let proc_threshold_roots = StorageMap::with_entries( - multisig - .config - .proc_thresholds() - .iter() - .map(|(proc_root, threshold)| (*proc_root, Word::from([*threshold, 0, 0, 0]))), - ) - .unwrap(); - storage_slots.push(StorageSlot::with_map( - AuthEcdsaK256KeccakMultisig::procedure_thresholds_slot().clone(), - proc_threshold_roots, - )); - - AccountComponent::new(ecdsa_k256_keccak_multisig_library(), storage_slots) - .expect("Multisig auth component should satisfy the requirements of a valid account component") - .with_supports_all_types() - } -} - -#[cfg(test)] -mod tests { - use alloc::string::ToString; - - use miden_protocol::Word; - use miden_protocol::account::AccountBuilder; - - use super::*; - use crate::account::wallets::BasicWallet; - - /// Test multisig component setup with various configurations - #[test] - fn test_multisig_component_setup() { - // Create test public keys - let pub_key_1 = PublicKeyCommitment::from(Word::from([1u32, 0, 0, 0])); - let pub_key_2 = PublicKeyCommitment::from(Word::from([2u32, 0, 0, 0])); - let pub_key_3 = PublicKeyCommitment::from(Word::from([3u32, 0, 0, 0])); - let approvers = vec![pub_key_1, pub_key_2, pub_key_3]; - let threshold = 2u32; - - // Create multisig component - let multisig_component = AuthEcdsaK256KeccakMultisig::new( - AuthEcdsaK256KeccakMultisigConfig::new(approvers.clone(), threshold) - .expect("invalid multisig config"), - ) - .expect("multisig component creation failed"); - - // Build account with multisig component - let account = AccountBuilder::new([0; 32]) - .with_auth_component(multisig_component) - .with_component(BasicWallet) - .build() - .expect("account building failed"); - - // Verify config slot: [threshold, num_approvers, 0, 0] - let config_slot = account - .storage() - .get_item(AuthEcdsaK256KeccakMultisig::threshold_config_slot()) - .expect("config storage slot access failed"); - assert_eq!(config_slot, Word::from([threshold, approvers.len() as u32, 0, 0])); - - // Verify approver pub keys slot - for (i, expected_pub_key) in approvers.iter().enumerate() { - let stored_pub_key = account - .storage() - .get_map_item( - AuthEcdsaK256KeccakMultisig::approver_public_keys_slot(), - Word::from([i as u32, 0, 0, 0]), - ) - .expect("approver public key storage map access failed"); - assert_eq!(stored_pub_key, Word::from(*expected_pub_key)); - } - } - - /// Test multisig component with minimum threshold (1 of 1) - #[test] - fn test_multisig_component_minimum_threshold() { - let pub_key = PublicKeyCommitment::from(Word::from([42u32, 0, 0, 0])); - let approvers = vec![pub_key]; - let threshold = 1u32; - - let multisig_component = AuthEcdsaK256KeccakMultisig::new( - AuthEcdsaK256KeccakMultisigConfig::new(approvers.clone(), threshold) - .expect("invalid multisig config"), - ) - .expect("multisig component creation failed"); - - let account = AccountBuilder::new([0; 32]) - .with_auth_component(multisig_component) - .with_component(BasicWallet) - .build() - .expect("account building failed"); - - // Verify storage layout - let config_slot = account - .storage() - .get_item(AuthEcdsaK256KeccakMultisig::threshold_config_slot()) - .expect("config storage slot access failed"); - assert_eq!(config_slot, Word::from([threshold, approvers.len() as u32, 0, 0])); - - let stored_pub_key = account - .storage() - .get_map_item( - AuthEcdsaK256KeccakMultisig::approver_public_keys_slot(), - Word::from([0u32, 0, 0, 0]), - ) - .expect("approver pub keys storage map access failed"); - assert_eq!(stored_pub_key, Word::from(pub_key)); - } - - /// Test multisig component error cases - #[test] - fn test_multisig_component_error_cases() { - let pub_key = PublicKeyCommitment::from(Word::from([1u32, 0, 0, 0])); - let approvers = vec![pub_key]; - - // Test threshold = 0 (should fail) - let result = AuthEcdsaK256KeccakMultisigConfig::new(approvers.clone(), 0); - assert!(result.unwrap_err().to_string().contains("threshold must be at least 1")); - - // Test threshold > number of approvers (should fail) - let result = AuthEcdsaK256KeccakMultisigConfig::new(approvers, 2); - assert!( - result - .unwrap_err() - .to_string() - .contains("threshold cannot be greater than number of approvers") - ); - } - - /// Test multisig component with duplicate approvers (should fail) - #[test] - fn test_multisig_component_duplicate_approvers() { - let pub_key_1 = PublicKeyCommitment::from(Word::from([1u32, 0, 0, 0])); - let pub_key_2 = PublicKeyCommitment::from(Word::from([2u32, 0, 0, 0])); - - // Test with duplicate approvers (should fail) - let approvers = vec![pub_key_1, pub_key_2, pub_key_1]; - let result = AuthEcdsaK256KeccakMultisigConfig::new(approvers, 2); - assert!( - result - .unwrap_err() - .to_string() - .contains("duplicate approver public keys are not allowed") - ); - } -} diff --git a/crates/miden-standards/src/account/auth/falcon_512_rpo.rs b/crates/miden-standards/src/account/auth/falcon_512_rpo.rs deleted file mode 100644 index d3f37da6e6..0000000000 --- a/crates/miden-standards/src/account/auth/falcon_512_rpo.rs +++ /dev/null @@ -1,59 +0,0 @@ -use miden_protocol::account::auth::PublicKeyCommitment; -use miden_protocol::account::{AccountComponent, StorageSlot, StorageSlotName}; -use miden_protocol::utils::sync::LazyLock; - -use crate::account::components::falcon_512_rpo_library; - -static FALCON_PUBKEY_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::falcon512_rpo::public_key") - .expect("storage slot name should be valid") -}); - -/// An [`AccountComponent`] implementing the Falcon512Rpo signature scheme for authentication of -/// transactions. -/// -/// It reexports the procedures from `miden::standards::auth::falcon512_rpo`. When linking against -/// this component, the `miden` library (i.e. [`ProtocolLib`](miden_protocol::ProtocolLib)) must -/// be available to the assembler which is the case when using [`CodeBuilder`][builder]. The -/// procedures of this component are: -/// - `verify_signatures`, which can be used to verify a signature provided via the advice stack to -/// authenticate a transaction. -/// - `authenticate_transaction`, which can be used to authenticate a transaction using the Falcon -/// signature scheme. -/// -/// This component supports all account types. -/// -/// ## Storage Layout -/// -/// - [`Self::public_key_slot`]: Public key -/// -/// [builder]: crate::code_builder::CodeBuilder -pub struct AuthFalcon512Rpo { - pub_key: PublicKeyCommitment, -} - -impl AuthFalcon512Rpo { - /// Creates a new [`AuthFalcon512Rpo`] component with the given `public_key`. - pub fn new(pub_key: PublicKeyCommitment) -> Self { - Self { pub_key } - } - - /// Returns the [`StorageSlotName`] where the public key is stored. - pub fn public_key_slot() -> &'static StorageSlotName { - &FALCON_PUBKEY_SLOT_NAME - } -} - -impl From for AccountComponent { - fn from(falcon: AuthFalcon512Rpo) -> Self { - AccountComponent::new( - falcon_512_rpo_library(), - vec![StorageSlot::with_value( - AuthFalcon512Rpo::public_key_slot().clone(), - falcon.pub_key.into(), - )], - ) - .expect("falcon component should satisfy the requirements of a valid account component") - .with_supports_all_types() - } -} diff --git a/crates/miden-standards/src/account/auth/falcon_512_rpo_acl.rs b/crates/miden-standards/src/account/auth/falcon_512_rpo_acl.rs deleted file mode 100644 index aee82e064d..0000000000 --- a/crates/miden-standards/src/account/auth/falcon_512_rpo_acl.rs +++ /dev/null @@ -1,381 +0,0 @@ -use alloc::vec::Vec; - -use miden_protocol::Word; -use miden_protocol::account::auth::PublicKeyCommitment; -use miden_protocol::account::{ - AccountCode, - AccountComponent, - StorageMap, - StorageSlot, - StorageSlotName, -}; -use miden_protocol::errors::AccountError; -use miden_protocol::utils::sync::LazyLock; - -use crate::account::components::falcon_512_rpo_acl_library; - -static PUBKEY_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::falcon512_rpo_acl::public_key") - .expect("storage slot name should be valid") -}); - -static CONFIG_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::falcon512_rpo_acl::config") - .expect("storage slot name should be valid") -}); - -static TRIGGER_PROCEDURE_ROOT_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::falcon512_rpo_acl::trigger_procedure_roots") - .expect("storage slot name should be valid") -}); - -/// Configuration for [`AuthFalcon512RpoAcl`] component. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct AuthFalcon512RpoAclConfig { - /// List of procedure roots that require authentication when called. - pub auth_trigger_procedures: Vec, - /// When `false`, creating output notes (sending notes to other accounts) requires - /// authentication. When `true`, output notes can be created without authentication. - pub allow_unauthorized_output_notes: bool, - /// When `false`, consuming input notes (processing notes sent to this account) requires - /// authentication. When `true`, input notes can be consumed without authentication. - pub allow_unauthorized_input_notes: bool, -} - -impl AuthFalcon512RpoAclConfig { - /// Creates a new configuration with no trigger procedures and both flags set to `false` (most - /// restrictive). - pub fn new() -> Self { - Self { - auth_trigger_procedures: vec![], - allow_unauthorized_output_notes: false, - allow_unauthorized_input_notes: false, - } - } - - /// Sets the list of procedure roots that require authentication when called. - pub fn with_auth_trigger_procedures(mut self, procedures: Vec) -> Self { - self.auth_trigger_procedures = procedures; - self - } - - /// Sets whether unauthorized output notes are allowed. - pub fn with_allow_unauthorized_output_notes(mut self, allow: bool) -> Self { - self.allow_unauthorized_output_notes = allow; - self - } - - /// Sets whether unauthorized input notes are allowed. - pub fn with_allow_unauthorized_input_notes(mut self, allow: bool) -> Self { - self.allow_unauthorized_input_notes = allow; - self - } -} - -impl Default for AuthFalcon512RpoAclConfig { - fn default() -> Self { - Self::new() - } -} - -/// An [`AccountComponent`] implementing a procedure-based Access Control List (ACL) using the -/// Falcon512Rpo signature scheme for authentication of transactions. -/// -/// This component provides fine-grained authentication control based on three conditions: -/// 1. **Procedure-based authentication**: Requires authentication when any of the specified trigger -/// procedures are called during the transaction. -/// 2. **Output note authentication**: Controls whether creating output notes requires -/// authentication. Output notes are new notes created by the account and sent to other accounts -/// (e.g., when transferring assets). When `allow_unauthorized_output_notes` is `false`, any -/// transaction that creates output notes must be authenticated, ensuring account owners control -/// when their account sends assets to other accounts. -/// 3. **Input note authentication**: Controls whether consuming input notes requires -/// authentication. Input notes are notes that were sent to this account by other accounts (e.g., -/// incoming asset transfers). When `allow_unauthorized_input_notes` is `false`, any transaction -/// that consumes input notes must be authenticated, ensuring account owners control when their -/// account processes incoming notes. -/// -/// ## Authentication Logic -/// -/// Authentication is required if ANY of the following conditions are true: -/// - Any trigger procedure from the ACL was called -/// - Output notes were created AND `allow_unauthorized_output_notes` is `false` -/// - Input notes were consumed AND `allow_unauthorized_input_notes` is `false` -/// -/// If none of these conditions are met, only the nonce is incremented without requiring a -/// signature. -/// -/// ## Use Cases -/// -/// - **Restrictive mode** (`allow_unauthorized_output_notes=false`, -/// `allow_unauthorized_input_notes=false`): All note operations require authentication, providing -/// maximum security. -/// - **Selective mode**: Allow some note operations without authentication while protecting -/// specific procedures, useful for accounts that need to process certain operations -/// automatically. -/// - **Procedure-only mode** (`allow_unauthorized_output_notes=true`, -/// `allow_unauthorized_input_notes=true`): Only specific procedures require authentication, -/// allowing free note processing. -/// -/// ## Storage Layout -/// -/// - [`Self::public_key_slot`]: Public key -/// - [`Self::config_slot`]: `[num_trigger_procs, allow_unauthorized_output_notes, -/// allow_unauthorized_input_notes, 0]` -/// - [`Self::trigger_procedure_roots_slot`]: A map with trigger procedure roots -/// -/// ## Important Note on Procedure Detection -/// The procedure-based authentication relies on the `was_procedure_called` kernel function, -/// which only returns `true` if the procedure in question called into a kernel account API -/// that is restricted to the account context. Procedures that don't interact with account -/// state or kernel APIs may not be detected as "called" even if they were executed during -/// the transaction. This is an important limitation to consider when designing trigger -/// procedures for authentication. -/// -/// This component supports all account types. -pub struct AuthFalcon512RpoAcl { - pub_key: PublicKeyCommitment, - config: AuthFalcon512RpoAclConfig, -} - -impl AuthFalcon512RpoAcl { - /// Creates a new [`AuthFalcon512RpoAcl`] component with the given `public_key` and - /// configuration. - /// - /// # Panics - /// Panics if more than [AccountCode::MAX_NUM_PROCEDURES] procedures are specified. - pub fn new( - pub_key: PublicKeyCommitment, - config: AuthFalcon512RpoAclConfig, - ) -> Result { - let max_procedures = AccountCode::MAX_NUM_PROCEDURES; - if config.auth_trigger_procedures.len() > max_procedures { - return Err(AccountError::other(format!( - "Cannot track more than {max_procedures} procedures (account limit)" - ))); - } - - Ok(Self { pub_key, config }) - } - - /// Returns the [`StorageSlotName`] where the public key is stored. - pub fn public_key_slot() -> &'static StorageSlotName { - &PUBKEY_SLOT_NAME - } - - /// Returns the [`StorageSlotName`] where the component's configuration is stored. - pub fn config_slot() -> &'static StorageSlotName { - &CONFIG_SLOT_NAME - } - - /// Returns the [`StorageSlotName`] where the trigger procedure roots are stored. - pub fn trigger_procedure_roots_slot() -> &'static StorageSlotName { - &TRIGGER_PROCEDURE_ROOT_SLOT_NAME - } -} - -impl From for AccountComponent { - fn from(falcon: AuthFalcon512RpoAcl) -> Self { - let mut storage_slots = Vec::with_capacity(3); - - // Public key slot - storage_slots.push(StorageSlot::with_value( - AuthFalcon512RpoAcl::public_key_slot().clone(), - falcon.pub_key.into(), - )); - - // Config slot - let num_procs = falcon.config.auth_trigger_procedures.len() as u32; - storage_slots.push(StorageSlot::with_value( - AuthFalcon512RpoAcl::config_slot().clone(), - Word::from([ - num_procs, - u32::from(falcon.config.allow_unauthorized_output_notes), - u32::from(falcon.config.allow_unauthorized_input_notes), - 0, - ]), - )); - - // Trigger procedure roots slot - // We add the map even if there are no trigger procedures, to always maintain the same - // storage layout. - let map_entries = falcon - .config - .auth_trigger_procedures - .iter() - .enumerate() - .map(|(i, proc_root)| (Word::from([i as u32, 0, 0, 0]), *proc_root)); - - // Safe to unwrap because we know that the map keys are unique. - storage_slots.push(StorageSlot::with_map( - AuthFalcon512RpoAcl::trigger_procedure_roots_slot().clone(), - StorageMap::with_entries(map_entries).unwrap(), - )); - - AccountComponent::new(falcon_512_rpo_acl_library(), storage_slots) - .expect( - "ACL auth component should satisfy the requirements of a valid account component", - ) - .with_supports_all_types() - } -} - -#[cfg(test)] -mod tests { - use miden_protocol::Word; - use miden_protocol::account::AccountBuilder; - - use super::*; - use crate::account::components::WellKnownComponent; - use crate::account::wallets::BasicWallet; - - /// Test configuration for parametrized ACL tests - struct AclTestConfig { - /// Whether to include auth trigger procedures - with_procedures: bool, - /// Allow unauthorized output notes flag - allow_unauthorized_output_notes: bool, - /// Allow unauthorized input notes flag - allow_unauthorized_input_notes: bool, - /// Expected config slot value [num_procs, allow_output, allow_input, 0] - expected_config_slot: Word, - } - - /// Helper function to get the basic wallet procedures for testing - fn get_basic_wallet_procedures() -> Vec { - // Get the two trigger procedures from BasicWallet: `receive_asset`, `move_asset_to_note`. - let procedures: Vec = WellKnownComponent::BasicWallet.procedure_digests().collect(); - - assert_eq!(procedures.len(), 2); - procedures - } - - /// Parametrized test helper for ACL component testing - fn test_acl_component(config: AclTestConfig) { - let public_key = PublicKeyCommitment::from(Word::empty()); - - // Build the configuration - let mut acl_config = AuthFalcon512RpoAclConfig::new() - .with_allow_unauthorized_output_notes(config.allow_unauthorized_output_notes) - .with_allow_unauthorized_input_notes(config.allow_unauthorized_input_notes); - - let auth_trigger_procedures = if config.with_procedures { - let procedures = get_basic_wallet_procedures(); - acl_config = acl_config.with_auth_trigger_procedures(procedures.clone()); - procedures - } else { - vec![] - }; - - // Create component and account - let component = - AuthFalcon512RpoAcl::new(public_key, acl_config).expect("component creation failed"); - - let account = AccountBuilder::new([0; 32]) - .with_auth_component(component) - .with_component(BasicWallet) - .build() - .expect("account building failed"); - - // Check public key storage - let public_key_slot = account - .storage() - .get_item(AuthFalcon512RpoAcl::public_key_slot()) - .expect("public key storage slot access failed"); - assert_eq!(public_key_slot, public_key.into()); - - // Check configuration storage - let config_slot = account - .storage() - .get_item(AuthFalcon512RpoAcl::config_slot()) - .expect("config storage slot access failed"); - assert_eq!(config_slot, config.expected_config_slot); - - // Check procedure roots - if config.with_procedures { - for (i, expected_proc_root) in auth_trigger_procedures.iter().enumerate() { - let proc_root = account - .storage() - .get_map_item( - AuthFalcon512RpoAcl::trigger_procedure_roots_slot(), - Word::from([i as u32, 0, 0, 0]), - ) - .expect("storage map access failed"); - assert_eq!(proc_root, *expected_proc_root); - } - } else { - // When no procedures, the map should return empty for key [0,0,0,0] - let proc_root = account - .storage() - .get_map_item(AuthFalcon512RpoAcl::trigger_procedure_roots_slot(), Word::empty()) - .expect("storage map access failed"); - assert_eq!(proc_root, Word::empty()); - } - } - - /// Test ACL component with no procedures and both authorization flags set to false - #[test] - fn test_falcon_512_rpo_acl_no_procedures() { - test_acl_component(AclTestConfig { - with_procedures: false, - allow_unauthorized_output_notes: false, - allow_unauthorized_input_notes: false, - expected_config_slot: Word::empty(), // [0, 0, 0, 0] - }); - } - - /// Test ACL component with two procedures and both authorization flags set to false - #[test] - fn test_falcon_512_rpo_acl_with_two_procedures() { - test_acl_component(AclTestConfig { - with_procedures: true, - allow_unauthorized_output_notes: false, - allow_unauthorized_input_notes: false, - expected_config_slot: Word::from([2u32, 0, 0, 0]), - }); - } - - /// Test ACL component with no procedures and allow_unauthorized_output_notes set to true - #[test] - fn test_falcon_512_rpo_acl_with_allow_unauthorized_output_notes() { - test_acl_component(AclTestConfig { - with_procedures: false, - allow_unauthorized_output_notes: true, - allow_unauthorized_input_notes: false, - expected_config_slot: Word::from([0u32, 1, 0, 0]), - }); - } - - /// Test ACL component with two procedures and allow_unauthorized_output_notes set to true - #[test] - fn test_falcon_512_rpo_acl_with_procedures_and_allow_unauthorized_output_notes() { - test_acl_component(AclTestConfig { - with_procedures: true, - allow_unauthorized_output_notes: true, - allow_unauthorized_input_notes: false, - expected_config_slot: Word::from([2u32, 1, 0, 0]), - }); - } - - /// Test ACL component with no procedures and allow_unauthorized_input_notes set to true - #[test] - fn test_falcon_512_rpo_acl_with_allow_unauthorized_input_notes() { - test_acl_component(AclTestConfig { - with_procedures: false, - allow_unauthorized_output_notes: false, - allow_unauthorized_input_notes: true, - expected_config_slot: Word::from([0u32, 0, 1, 0]), - }); - } - - /// Test ACL component with two procedures and both authorization flags set to true - #[test] - fn test_falcon_512_rpo_acl_with_both_allow_flags() { - test_acl_component(AclTestConfig { - with_procedures: true, - allow_unauthorized_output_notes: true, - allow_unauthorized_input_notes: true, - expected_config_slot: Word::from([2u32, 1, 1, 0]), - }); - } -} diff --git a/crates/miden-standards/src/account/auth/falcon_512_rpo_multisig.rs b/crates/miden-standards/src/account/auth/falcon_512_rpo_multisig.rs deleted file mode 100644 index 0b038c20a1..0000000000 --- a/crates/miden-standards/src/account/auth/falcon_512_rpo_multisig.rs +++ /dev/null @@ -1,331 +0,0 @@ -use alloc::collections::BTreeSet; -use alloc::vec::Vec; - -use miden_protocol::Word; -use miden_protocol::account::auth::PublicKeyCommitment; -use miden_protocol::account::{AccountComponent, StorageMap, StorageSlot, StorageSlotName}; -use miden_protocol::errors::AccountError; -use miden_protocol::utils::sync::LazyLock; - -use crate::account::components::falcon_512_rpo_multisig_library; - -static THRESHOLD_CONFIG_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::falcon512_rpo_multisig::threshold_config") - .expect("storage slot name should be valid") -}); - -static APPROVER_PUBKEYS_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::falcon512_rpo_multisig::approver_public_keys") - .expect("storage slot name should be valid") -}); - -static EXECUTED_TRANSACTIONS_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::falcon512_rpo_multisig::executed_transactions") - .expect("storage slot name should be valid") -}); - -static PROCEDURE_THRESHOLDS_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::falcon512_rpo_multisig::procedure_thresholds") - .expect("storage slot name should be valid") -}); - -// MULTISIG AUTHENTICATION COMPONENT -// ================================================================================================ - -/// Configuration for [`AuthFalcon512RpoMultisig`] component. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct AuthFalcon512RpoMultisigConfig { - approvers: Vec, - default_threshold: u32, - proc_thresholds: Vec<(Word, u32)>, -} - -impl AuthFalcon512RpoMultisigConfig { - /// Creates a new configuration with the given approvers and a default threshold. - /// - /// The `default_threshold` must be at least 1 and at most the number of approvers. - pub fn new( - approvers: Vec, - default_threshold: u32, - ) -> Result { - if default_threshold == 0 { - return Err(AccountError::other("threshold must be at least 1")); - } - if default_threshold > approvers.len() as u32 { - return Err(AccountError::other( - "threshold cannot be greater than number of approvers", - )); - } - - // Check for duplicate approvers - if approvers.len() != approvers.iter().collect::>().len() { - return Err(AccountError::other("duplicate approver public keys are not allowed")); - } - - Ok(Self { - approvers, - default_threshold, - proc_thresholds: vec![], - }) - } - - /// Attaches a per-procedure threshold map. Each procedure threshold must be at least 1 and - /// at most the number of approvers. - pub fn with_proc_thresholds( - mut self, - proc_thresholds: Vec<(Word, u32)>, - ) -> Result { - for (_, threshold) in &proc_thresholds { - if *threshold == 0 { - return Err(AccountError::other("procedure threshold must be at least 1")); - } - if *threshold > self.approvers.len() as u32 { - return Err(AccountError::other( - "procedure threshold cannot be greater than number of approvers", - )); - } - } - self.proc_thresholds = proc_thresholds; - Ok(self) - } - - pub fn approvers(&self) -> &[PublicKeyCommitment] { - &self.approvers - } - - pub fn default_threshold(&self) -> u32 { - self.default_threshold - } - - pub fn proc_thresholds(&self) -> &[(Word, u32)] { - &self.proc_thresholds - } -} - -/// An [`AccountComponent`] implementing a multisig based on Falcon512Rpo signatures. -/// -/// It enforces a threshold of approver signatures for every transaction, with optional -/// per-procedure thresholds overrides. Non-uniform thresholds (especially a threshold of one) -/// should be used with caution for private multisig accounts, as a single approver could withhold -/// the new state from other approvers, effectively locking them out. -/// -/// ## Storage Layout -/// -/// - [`Self::threshold_config_slot`]: `[threshold, num_approvers, 0, 0]` -/// - [`Self::approver_public_keys_slot`]: A map with approver public keys (index -> pubkey) -/// - [`Self::executed_transactions_slot`]: A map which stores executed transactions -/// - [`Self::procedure_thresholds_slot`]: A map which stores procedure thresholds (PROC_ROOT -> -/// threshold) -/// -/// This component supports all account types. -#[derive(Debug)] -pub struct AuthFalcon512RpoMultisig { - config: AuthFalcon512RpoMultisigConfig, -} - -impl AuthFalcon512RpoMultisig { - /// Creates a new [`AuthFalcon512RpoMultisig`] component from the provided configuration. - pub fn new(config: AuthFalcon512RpoMultisigConfig) -> Result { - Ok(Self { config }) - } - - /// Returns the [`StorageSlotName`] where the threshold configuration is stored. - pub fn threshold_config_slot() -> &'static StorageSlotName { - &THRESHOLD_CONFIG_SLOT_NAME - } - - /// Returns the [`StorageSlotName`] where the approver public keys are stored. - pub fn approver_public_keys_slot() -> &'static StorageSlotName { - &APPROVER_PUBKEYS_SLOT_NAME - } - - /// Returns the [`StorageSlotName`] where the executed transactions are stored. - pub fn executed_transactions_slot() -> &'static StorageSlotName { - &EXECUTED_TRANSACTIONS_SLOT_NAME - } - - /// Returns the [`StorageSlotName`] where the procedure thresholds are stored. - pub fn procedure_thresholds_slot() -> &'static StorageSlotName { - &PROCEDURE_THRESHOLDS_SLOT_NAME - } -} - -impl From for AccountComponent { - fn from(multisig: AuthFalcon512RpoMultisig) -> Self { - let mut storage_slots = Vec::with_capacity(3); - - // Threshold config slot (value: [threshold, num_approvers, 0, 0]) - let num_approvers = multisig.config.approvers().len() as u32; - storage_slots.push(StorageSlot::with_value( - AuthFalcon512RpoMultisig::threshold_config_slot().clone(), - Word::from([multisig.config.default_threshold(), num_approvers, 0, 0]), - )); - - // Approver public keys slot (map) - let map_entries = multisig - .config - .approvers() - .iter() - .enumerate() - .map(|(i, pub_key)| (Word::from([i as u32, 0, 0, 0]), (*pub_key).into())); - - // Safe to unwrap because we know that the map keys are unique. - storage_slots.push(StorageSlot::with_map( - AuthFalcon512RpoMultisig::approver_public_keys_slot().clone(), - StorageMap::with_entries(map_entries).unwrap(), - )); - - // Executed transactions slot (map) - let executed_transactions = StorageMap::default(); - storage_slots.push(StorageSlot::with_map( - AuthFalcon512RpoMultisig::executed_transactions_slot().clone(), - executed_transactions, - )); - - // Procedure thresholds slot (map: PROC_ROOT -> threshold) - let proc_threshold_roots = StorageMap::with_entries( - multisig - .config - .proc_thresholds() - .iter() - .map(|(proc_root, threshold)| (*proc_root, Word::from([*threshold, 0, 0, 0]))), - ) - .unwrap(); - storage_slots.push(StorageSlot::with_map( - AuthFalcon512RpoMultisig::procedure_thresholds_slot().clone(), - proc_threshold_roots, - )); - - AccountComponent::new(falcon_512_rpo_multisig_library(), storage_slots) - .expect("Multisig auth component should satisfy the requirements of a valid account component") - .with_supports_all_types() - } -} - -#[cfg(test)] -mod tests { - use alloc::string::ToString; - - use miden_protocol::Word; - use miden_protocol::account::AccountBuilder; - - use super::*; - use crate::account::wallets::BasicWallet; - - /// Test multisig component setup with various configurations - #[test] - fn test_multisig_component_setup() { - // Create test public keys - let pub_key_1 = PublicKeyCommitment::from(Word::from([1u32, 0, 0, 0])); - let pub_key_2 = PublicKeyCommitment::from(Word::from([2u32, 0, 0, 0])); - let pub_key_3 = PublicKeyCommitment::from(Word::from([3u32, 0, 0, 0])); - let approvers = vec![pub_key_1, pub_key_2, pub_key_3]; - let threshold = 2u32; - - // Create multisig component - let multisig_component = AuthFalcon512RpoMultisig::new( - AuthFalcon512RpoMultisigConfig::new(approvers.clone(), threshold) - .expect("invalid multisig config"), - ) - .expect("multisig component creation failed"); - - // Build account with multisig component - let account = AccountBuilder::new([0; 32]) - .with_auth_component(multisig_component) - .with_component(BasicWallet) - .build() - .expect("account building failed"); - - // Verify config slot: [threshold, num_approvers, 0, 0] - let config_slot = account - .storage() - .get_item(AuthFalcon512RpoMultisig::threshold_config_slot()) - .expect("config storage slot access failed"); - assert_eq!(config_slot, Word::from([threshold, approvers.len() as u32, 0, 0])); - - // Verify approver pub keys slot - for (i, expected_pub_key) in approvers.iter().enumerate() { - let stored_pub_key = account - .storage() - .get_map_item( - AuthFalcon512RpoMultisig::approver_public_keys_slot(), - Word::from([i as u32, 0, 0, 0]), - ) - .expect("approver public key storage map access failed"); - assert_eq!(stored_pub_key, Word::from(*expected_pub_key)); - } - } - - /// Test multisig component with minimum threshold (1 of 1) - #[test] - fn test_multisig_component_minimum_threshold() { - let pub_key = PublicKeyCommitment::from(Word::from([42u32, 0, 0, 0])); - let approvers = vec![pub_key]; - let threshold = 1u32; - - let multisig_component = AuthFalcon512RpoMultisig::new( - AuthFalcon512RpoMultisigConfig::new(approvers.clone(), threshold) - .expect("invalid multisig config"), - ) - .expect("multisig component creation failed"); - - let account = AccountBuilder::new([0; 32]) - .with_auth_component(multisig_component) - .with_component(BasicWallet) - .build() - .expect("account building failed"); - - // Verify storage layout - let config_slot = account - .storage() - .get_item(AuthFalcon512RpoMultisig::threshold_config_slot()) - .expect("config storage slot access failed"); - assert_eq!(config_slot, Word::from([threshold, approvers.len() as u32, 0, 0])); - - let stored_pub_key = account - .storage() - .get_map_item( - AuthFalcon512RpoMultisig::approver_public_keys_slot(), - Word::from([0u32, 0, 0, 0]), - ) - .expect("approver pub keys storage map access failed"); - assert_eq!(stored_pub_key, Word::from(pub_key)); - } - - /// Test multisig component error cases - #[test] - fn test_multisig_component_error_cases() { - let pub_key = PublicKeyCommitment::from(Word::from([1u32, 0, 0, 0])); - let approvers = vec![pub_key]; - - // Test threshold = 0 (should fail) - let result = AuthFalcon512RpoMultisigConfig::new(approvers.clone(), 0); - assert!(result.unwrap_err().to_string().contains("threshold must be at least 1")); - - // Test threshold > number of approvers (should fail) - let result = AuthFalcon512RpoMultisigConfig::new(approvers, 2); - assert!( - result - .unwrap_err() - .to_string() - .contains("threshold cannot be greater than number of approvers") - ); - } - - /// Test multisig component with duplicate approvers (should fail) - #[test] - fn test_multisig_component_duplicate_approvers() { - let pub_key_1 = PublicKeyCommitment::from(Word::from([1u32, 0, 0, 0])); - let pub_key_2 = PublicKeyCommitment::from(Word::from([2u32, 0, 0, 0])); - - // Test with duplicate approvers (should fail) - let approvers = vec![pub_key_1, pub_key_2, pub_key_1]; - let result = AuthFalcon512RpoMultisigConfig::new(approvers, 2); - assert!( - result - .unwrap_err() - .to_string() - .contains("duplicate approver public keys are not allowed") - ); - } -} diff --git a/crates/miden-standards/src/account/auth/mod.rs b/crates/miden-standards/src/account/auth/mod.rs index 5d239b6cfb..e999fab153 100644 --- a/crates/miden-standards/src/account/auth/mod.rs +++ b/crates/miden-standards/src/account/auth/mod.rs @@ -1,23 +1,14 @@ mod no_auth; pub use no_auth::NoAuth; -mod ecdsa_k256_keccak; -pub use ecdsa_k256_keccak::AuthEcdsaK256Keccak; +mod singlesig; +pub use singlesig::AuthSingleSig; -mod ecdsa_k256_keccak_acl; -pub use ecdsa_k256_keccak_acl::{AuthEcdsaK256KeccakAcl, AuthEcdsaK256KeccakAclConfig}; +mod singlesig_acl; +pub use singlesig_acl::{AuthSingleSigAcl, AuthSingleSigAclConfig}; -mod ecdsa_k256_keccak_multisig; -pub use ecdsa_k256_keccak_multisig::{ - AuthEcdsaK256KeccakMultisig, - AuthEcdsaK256KeccakMultisigConfig, -}; +mod multisig; +pub use multisig::{AuthMultisig, AuthMultisigConfig}; -mod falcon_512_rpo; -pub use falcon_512_rpo::AuthFalcon512Rpo; - -mod falcon_512_rpo_acl; -pub use falcon_512_rpo_acl::{AuthFalcon512RpoAcl, AuthFalcon512RpoAclConfig}; - -mod falcon_512_rpo_multisig; -pub use falcon_512_rpo_multisig::{AuthFalcon512RpoMultisig, AuthFalcon512RpoMultisigConfig}; +mod multisig_psm; +pub use multisig_psm::{AuthMultisigPsm, AuthMultisigPsmConfig, PsmConfig}; diff --git a/crates/miden-standards/src/account/auth/multisig.rs b/crates/miden-standards/src/account/auth/multisig.rs new file mode 100644 index 0000000000..196bb3de0c --- /dev/null +++ b/crates/miden-standards/src/account/auth/multisig.rs @@ -0,0 +1,478 @@ +use alloc::collections::BTreeSet; +use alloc::vec::Vec; + +use miden_protocol::Word; +use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; +use miden_protocol::account::component::{ + AccountComponentMetadata, + FeltSchema, + SchemaType, + StorageSchema, + StorageSlotSchema, +}; +use miden_protocol::account::{ + AccountComponent, + AccountType, + StorageMap, + StorageMapKey, + StorageSlot, + StorageSlotName, +}; +use miden_protocol::errors::AccountError; +use miden_protocol::utils::sync::LazyLock; + +use crate::account::components::multisig_library; + +// CONSTANTS +// ================================================================================================ + +static THRESHOLD_CONFIG_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::auth::multisig::threshold_config") + .expect("storage slot name should be valid") +}); + +static APPROVER_PUBKEYS_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::auth::multisig::approver_public_keys") + .expect("storage slot name should be valid") +}); + +static APPROVER_SCHEME_ID_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::auth::multisig::approver_schemes") + .expect("storage slot name should be valid") +}); + +static EXECUTED_TRANSACTIONS_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::auth::multisig::executed_transactions") + .expect("storage slot name should be valid") +}); + +static PROCEDURE_THRESHOLDS_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::auth::multisig::procedure_thresholds") + .expect("storage slot name should be valid") +}); + +// MULTISIG AUTHENTICATION COMPONENT +// ================================================================================================ + +/// Configuration for [`AuthMultisig`] component. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct AuthMultisigConfig { + approvers: Vec<(PublicKeyCommitment, AuthScheme)>, + default_threshold: u32, + proc_thresholds: Vec<(Word, u32)>, +} + +impl AuthMultisigConfig { + /// Creates a new configuration with the given approvers and a default threshold. + /// + /// The `default_threshold` must be at least 1 and at most the number of approvers. + pub fn new( + approvers: Vec<(PublicKeyCommitment, AuthScheme)>, + default_threshold: u32, + ) -> Result { + if default_threshold == 0 { + return Err(AccountError::other("threshold must be at least 1")); + } + if default_threshold > approvers.len() as u32 { + return Err(AccountError::other( + "threshold cannot be greater than number of approvers", + )); + } + + // Check for duplicate approvers + let unique_approvers: BTreeSet<_> = approvers.iter().map(|(pk, _)| pk).collect(); + + if unique_approvers.len() != approvers.len() { + return Err(AccountError::other("duplicate approver public keys are not allowed")); + } + + Ok(Self { + approvers, + default_threshold, + proc_thresholds: vec![], + }) + } + + /// Attaches a per-procedure threshold map. Each procedure threshold must be at least 1 and + /// at most the number of approvers. + pub fn with_proc_thresholds( + mut self, + proc_thresholds: Vec<(Word, u32)>, + ) -> Result { + for (_, threshold) in &proc_thresholds { + if *threshold == 0 { + return Err(AccountError::other("procedure threshold must be at least 1")); + } + if *threshold > self.approvers.len() as u32 { + return Err(AccountError::other( + "procedure threshold cannot be greater than number of approvers", + )); + } + } + self.proc_thresholds = proc_thresholds; + Ok(self) + } + + pub fn approvers(&self) -> &[(PublicKeyCommitment, AuthScheme)] { + &self.approvers + } + + pub fn default_threshold(&self) -> u32 { + self.default_threshold + } + + pub fn proc_thresholds(&self) -> &[(Word, u32)] { + &self.proc_thresholds + } +} + +/// An [`AccountComponent`] implementing a multisig authentication. +/// +/// It enforces a threshold of approver signatures for every transaction, with optional +/// per-procedure threshold overrides. Non-uniform thresholds (especially a threshold of one) +/// should be used with caution for private multisig accounts, without Private State Manager (PSM), +/// a single approver may advance state and withhold updates from other approvers, effectively +/// locking them out. +/// +/// This component supports all account types. +#[derive(Debug)] +pub struct AuthMultisig { + config: AuthMultisigConfig, +} + +impl AuthMultisig { + /// The name of the component. + pub const NAME: &'static str = "miden::standards::components::auth::multisig"; + + /// Creates a new [`AuthMultisig`] component from the provided configuration. + pub fn new(config: AuthMultisigConfig) -> Result { + Ok(Self { config }) + } + + /// Returns the [`StorageSlotName`] where the threshold configuration is stored. + pub fn threshold_config_slot() -> &'static StorageSlotName { + &THRESHOLD_CONFIG_SLOT_NAME + } + + /// Returns the [`StorageSlotName`] where the approver public keys are stored. + pub fn approver_public_keys_slot() -> &'static StorageSlotName { + &APPROVER_PUBKEYS_SLOT_NAME + } + + // Returns the [`StorageSlotName`] where the approver scheme IDs are stored. + pub fn approver_scheme_ids_slot() -> &'static StorageSlotName { + &APPROVER_SCHEME_ID_SLOT_NAME + } + + /// Returns the [`StorageSlotName`] where the executed transactions are stored. + pub fn executed_transactions_slot() -> &'static StorageSlotName { + &EXECUTED_TRANSACTIONS_SLOT_NAME + } + + /// Returns the [`StorageSlotName`] where the procedure thresholds are stored. + pub fn procedure_thresholds_slot() -> &'static StorageSlotName { + &PROCEDURE_THRESHOLDS_SLOT_NAME + } + + /// Returns the storage slot schema for the threshold configuration slot. + pub fn threshold_config_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::threshold_config_slot().clone(), + StorageSlotSchema::value( + "Threshold configuration", + [ + FeltSchema::u32("threshold"), + FeltSchema::u32("num_approvers"), + FeltSchema::new_void(), + FeltSchema::new_void(), + ], + ), + ) + } + + /// Returns the storage slot schema for the approver public keys slot. + pub fn approver_public_keys_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::approver_public_keys_slot().clone(), + StorageSlotSchema::map( + "Approver public keys", + SchemaType::u32(), + SchemaType::pub_key(), + ), + ) + } + + // Returns the storage slot schema for the approver scheme IDs slot. + pub fn approver_auth_scheme_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::approver_scheme_ids_slot().clone(), + StorageSlotSchema::map( + "Approver scheme IDs", + SchemaType::u32(), + SchemaType::auth_scheme(), + ), + ) + } + + /// Returns the storage slot schema for the executed transactions slot. + pub fn executed_transactions_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::executed_transactions_slot().clone(), + StorageSlotSchema::map( + "Executed transactions", + SchemaType::native_word(), + SchemaType::native_word(), + ), + ) + } + + /// Returns the storage slot schema for the procedure thresholds slot. + pub fn procedure_thresholds_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::procedure_thresholds_slot().clone(), + StorageSlotSchema::map( + "Procedure thresholds", + SchemaType::native_word(), + SchemaType::u32(), + ), + ) + } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = StorageSchema::new([ + Self::threshold_config_slot_schema(), + Self::approver_public_keys_slot_schema(), + Self::approver_auth_scheme_slot_schema(), + Self::executed_transactions_slot_schema(), + Self::procedure_thresholds_slot_schema(), + ]) + .expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description("Multisig authentication component using hybrid signature schemes") + .with_storage_schema(storage_schema) + } +} + +impl From for AccountComponent { + fn from(multisig: AuthMultisig) -> Self { + let mut storage_slots = Vec::with_capacity(5); + + // Threshold config slot (value: [threshold, num_approvers, 0, 0]) + let num_approvers = multisig.config.approvers().len() as u32; + storage_slots.push(StorageSlot::with_value( + AuthMultisig::threshold_config_slot().clone(), + Word::from([multisig.config.default_threshold(), num_approvers, 0, 0]), + )); + + // Approver public keys slot (map) + let map_entries = + multisig.config.approvers().iter().enumerate().map(|(i, (pub_key, _))| { + (StorageMapKey::from_index(i as u32), Word::from(*pub_key)) + }); + + // Safe to unwrap because we know that the map keys are unique. + storage_slots.push(StorageSlot::with_map( + AuthMultisig::approver_public_keys_slot().clone(), + StorageMap::with_entries(map_entries).unwrap(), + )); + + // Approver scheme IDs slot (map): [index, 0, 0, 0] => [scheme_id, 0, 0, 0] + let scheme_id_entries = + multisig.config.approvers().iter().enumerate().map(|(i, (_, auth_scheme))| { + (StorageMapKey::from_index(i as u32), Word::from([*auth_scheme as u32, 0, 0, 0])) + }); + + storage_slots.push(StorageSlot::with_map( + AuthMultisig::approver_scheme_ids_slot().clone(), + StorageMap::with_entries(scheme_id_entries).unwrap(), + )); + + // Executed transactions slot (map) + let executed_transactions = StorageMap::default(); + storage_slots.push(StorageSlot::with_map( + AuthMultisig::executed_transactions_slot().clone(), + executed_transactions, + )); + + // Procedure thresholds slot (map: PROC_ROOT -> threshold) + let proc_threshold_roots = StorageMap::with_entries( + multisig.config.proc_thresholds().iter().map(|(proc_root, threshold)| { + (StorageMapKey::from_raw(*proc_root), Word::from([*threshold, 0, 0, 0])) + }), + ) + .unwrap(); + storage_slots.push(StorageSlot::with_map( + AuthMultisig::procedure_thresholds_slot().clone(), + proc_threshold_roots, + )); + + let metadata = AuthMultisig::component_metadata(); + + AccountComponent::new(multisig_library(), storage_slots, metadata).expect( + "Multisig auth component should satisfy the requirements of a valid account component", + ) + } +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use alloc::string::ToString; + + use miden_protocol::Word; + use miden_protocol::account::auth::AuthSecretKey; + use miden_protocol::account::{AccountBuilder, auth}; + + use super::*; + use crate::account::wallets::BasicWallet; + + /// Test multisig component setup with various configurations + #[test] + fn test_multisig_component_setup() { + // Create test secret keys + let sec_key_1 = AuthSecretKey::new_falcon512_poseidon2(); + let sec_key_2 = AuthSecretKey::new_falcon512_poseidon2(); + let sec_key_3 = AuthSecretKey::new_falcon512_poseidon2(); + + // Create approvers list for multisig config + let approvers = vec![ + (sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + (sec_key_2.public_key().to_commitment(), sec_key_2.auth_scheme()), + (sec_key_3.public_key().to_commitment(), sec_key_3.auth_scheme()), + ]; + + let threshold = 2u32; + + // Create multisig component + let multisig_component = AuthMultisig::new( + AuthMultisigConfig::new(approvers.clone(), threshold).expect("invalid multisig config"), + ) + .expect("multisig component creation failed"); + + // Build account with multisig component + let account = AccountBuilder::new([0; 32]) + .with_auth_component(multisig_component) + .with_component(BasicWallet) + .build() + .expect("account building failed"); + + // Verify config slot: [threshold, num_approvers, 0, 0] + let config_slot = account + .storage() + .get_item(AuthMultisig::threshold_config_slot()) + .expect("config storage slot access failed"); + assert_eq!(config_slot, Word::from([threshold, approvers.len() as u32, 0, 0])); + + // Verify approver pub keys slot + for (i, (expected_pub_key, _)) in approvers.iter().enumerate() { + let stored_pub_key = account + .storage() + .get_map_item( + AuthMultisig::approver_public_keys_slot(), + Word::from([i as u32, 0, 0, 0]), + ) + .expect("approver public key storage map access failed"); + assert_eq!(stored_pub_key, Word::from(*expected_pub_key)); + } + + // Verify approver scheme IDs slot + for (i, (_, expected_auth_scheme)) in approvers.iter().enumerate() { + let stored_scheme_id = account + .storage() + .get_map_item( + AuthMultisig::approver_scheme_ids_slot(), + Word::from([i as u32, 0, 0, 0]), + ) + .expect("approver scheme ID storage map access failed"); + assert_eq!(stored_scheme_id, Word::from([*expected_auth_scheme as u32, 0, 0, 0])); + } + } + + /// Test multisig component with minimum threshold (1 of 1) + #[test] + fn test_multisig_component_minimum_threshold() { + let pub_key = AuthSecretKey::new_ecdsa_k256_keccak().public_key().to_commitment(); + let approvers = vec![(pub_key, auth::AuthScheme::EcdsaK256Keccak)]; + let threshold = 1u32; + + let multisig_component = AuthMultisig::new( + AuthMultisigConfig::new(approvers.clone(), threshold).expect("invalid multisig config"), + ) + .expect("multisig component creation failed"); + + let account = AccountBuilder::new([0; 32]) + .with_auth_component(multisig_component) + .with_component(BasicWallet) + .build() + .expect("account building failed"); + + // Verify storage layout + let config_slot = account + .storage() + .get_item(AuthMultisig::threshold_config_slot()) + .expect("config storage slot access failed"); + assert_eq!(config_slot, Word::from([threshold, approvers.len() as u32, 0, 0])); + + let stored_pub_key = account + .storage() + .get_map_item(AuthMultisig::approver_public_keys_slot(), Word::from([0u32, 0, 0, 0])) + .expect("approver pub keys storage map access failed"); + assert_eq!(stored_pub_key, Word::from(pub_key)); + + let stored_scheme_id = account + .storage() + .get_map_item(AuthMultisig::approver_scheme_ids_slot(), Word::from([0u32, 0, 0, 0])) + .expect("approver scheme IDs storage map access failed"); + assert_eq!( + stored_scheme_id, + Word::from([auth::AuthScheme::EcdsaK256Keccak as u32, 0, 0, 0]) + ); + } + + /// Test multisig component error cases + #[test] + fn test_multisig_component_error_cases() { + let pub_key = AuthSecretKey::new_ecdsa_k256_keccak().public_key().to_commitment(); + let approvers = vec![(pub_key, auth::AuthScheme::EcdsaK256Keccak)]; + + // Test threshold = 0 (should fail) + let result = AuthMultisigConfig::new(approvers.clone(), 0); + assert!(result.unwrap_err().to_string().contains("threshold must be at least 1")); + + // Test threshold > number of approvers (should fail) + let result = AuthMultisigConfig::new(approvers, 2); + assert!( + result + .unwrap_err() + .to_string() + .contains("threshold cannot be greater than number of approvers") + ); + } + + /// Test multisig component with duplicate approvers (should fail) + #[test] + fn test_multisig_component_duplicate_approvers() { + // Create secret keys for approvers + let sec_key_1 = AuthSecretKey::new_ecdsa_k256_keccak(); + let sec_key_2 = AuthSecretKey::new_ecdsa_k256_keccak(); + + // Create approvers list with duplicate public keys + let approvers = vec![ + (sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + (sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + (sec_key_2.public_key().to_commitment(), sec_key_2.auth_scheme()), + ]; + + let result = AuthMultisigConfig::new(approvers, 2); + assert!( + result + .unwrap_err() + .to_string() + .contains("duplicate approver public keys are not allowed") + ); + } +} diff --git a/crates/miden-standards/src/account/auth/multisig_psm.rs b/crates/miden-standards/src/account/auth/multisig_psm.rs new file mode 100644 index 0000000000..1e9ecc34b2 --- /dev/null +++ b/crates/miden-standards/src/account/auth/multisig_psm.rs @@ -0,0 +1,588 @@ +use alloc::vec::Vec; + +use miden_protocol::Word; +use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; +use miden_protocol::account::component::{ + AccountComponentMetadata, + SchemaType, + StorageSchema, + StorageSlotSchema, +}; +use miden_protocol::account::{ + AccountComponent, + AccountType, + StorageMap, + StorageMapKey, + StorageSlot, + StorageSlotName, +}; +use miden_protocol::errors::AccountError; +use miden_protocol::utils::sync::LazyLock; + +use super::multisig::{AuthMultisig, AuthMultisigConfig}; +use crate::account::components::multisig_psm_library; + +// CONSTANTS +// ================================================================================================ + +static PSM_PUBKEY_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::auth::psm::pub_key") + .expect("storage slot name should be valid") +}); + +static PSM_SCHEME_ID_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::auth::psm::scheme") + .expect("storage slot name should be valid") +}); + +// MULTISIG AUTHENTICATION COMPONENT +// ================================================================================================ + +/// Configuration for [`AuthMultisigPsm`] component. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct AuthMultisigPsmConfig { + multisig: AuthMultisigConfig, + psm_config: PsmConfig, +} + +/// Public configuration for the private state manager signer. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct PsmConfig { + pub_key: PublicKeyCommitment, + auth_scheme: AuthScheme, +} + +impl PsmConfig { + pub fn new(pub_key: PublicKeyCommitment, auth_scheme: AuthScheme) -> Self { + Self { pub_key, auth_scheme } + } + + pub fn pub_key(&self) -> PublicKeyCommitment { + self.pub_key + } + + pub fn auth_scheme(&self) -> AuthScheme { + self.auth_scheme + } + + fn public_key_slot() -> &'static StorageSlotName { + &PSM_PUBKEY_SLOT_NAME + } + + fn scheme_id_slot() -> &'static StorageSlotName { + &PSM_SCHEME_ID_SLOT_NAME + } + + fn public_key_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::public_key_slot().clone(), + StorageSlotSchema::map( + "Private state manager public keys", + SchemaType::u32(), + SchemaType::pub_key(), + ), + ) + } + + fn auth_scheme_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::scheme_id_slot().clone(), + StorageSlotSchema::map( + "Private state manager scheme IDs", + SchemaType::u32(), + SchemaType::auth_scheme(), + ), + ) + } + + fn into_component_parts(self) -> (Vec, Vec<(StorageSlotName, StorageSlotSchema)>) { + let mut storage_slots = Vec::with_capacity(2); + + // Private state manager public key slot (map: [0, 0, 0, 0] -> pubkey) + let psm_public_key_entries = + [(StorageMapKey::from_raw(Word::from([0u32, 0, 0, 0])), Word::from(self.pub_key))]; + storage_slots.push(StorageSlot::with_map( + Self::public_key_slot().clone(), + StorageMap::with_entries(psm_public_key_entries).unwrap(), + )); + + // Private state manager scheme IDs slot (map: [0, 0, 0, 0] -> [scheme_id, 0, 0, 0]) + let psm_scheme_id_entries = [( + StorageMapKey::from_raw(Word::from([0u32, 0, 0, 0])), + Word::from([self.auth_scheme as u32, 0, 0, 0]), + )]; + storage_slots.push(StorageSlot::with_map( + Self::scheme_id_slot().clone(), + StorageMap::with_entries(psm_scheme_id_entries).unwrap(), + )); + + let slot_metadata = vec![Self::public_key_slot_schema(), Self::auth_scheme_slot_schema()]; + + (storage_slots, slot_metadata) + } +} + +impl AuthMultisigPsmConfig { + /// Creates a new configuration with the given approvers, default threshold and PSM signer. + /// + /// The `default_threshold` must be at least 1 and at most the number of approvers. + /// The private state manager public key must be different from all approver public keys. + pub fn new( + approvers: Vec<(PublicKeyCommitment, AuthScheme)>, + default_threshold: u32, + psm_config: PsmConfig, + ) -> Result { + let multisig = AuthMultisigConfig::new(approvers, default_threshold)?; + if multisig + .approvers() + .iter() + .any(|(approver, _)| *approver == psm_config.pub_key()) + { + return Err(AccountError::other( + "private state manager public key must be different from approvers", + )); + } + + Ok(Self { multisig, psm_config }) + } + + /// Attaches a per-procedure threshold map. Each procedure threshold must be at least 1 and + /// at most the number of approvers. + pub fn with_proc_thresholds( + mut self, + proc_thresholds: Vec<(Word, u32)>, + ) -> Result { + self.multisig = self.multisig.with_proc_thresholds(proc_thresholds)?; + Ok(self) + } + + pub fn approvers(&self) -> &[(PublicKeyCommitment, AuthScheme)] { + self.multisig.approvers() + } + + pub fn default_threshold(&self) -> u32 { + self.multisig.default_threshold() + } + + pub fn proc_thresholds(&self) -> &[(Word, u32)] { + self.multisig.proc_thresholds() + } + + pub fn psm_config(&self) -> PsmConfig { + self.psm_config + } + + fn into_parts(self) -> (AuthMultisigConfig, PsmConfig) { + (self.multisig, self.psm_config) + } +} + +/// An [`AccountComponent`] implementing a multisig authentication with a private state manager. +/// +/// It enforces a threshold of approver signatures for every transaction, with optional +/// per-procedure threshold overrides. With Private State Manager (PSM) is configured, +/// multisig authorization is combined with PSM authorization, so operations require both +/// multisig approval and a valid PSM signature. This substantially mitigates low-threshold +/// state-withholding scenarios since the PSM is expected to forward state updates to other +/// approvers. +/// +/// This component supports all account types. +#[derive(Debug)] +pub struct AuthMultisigPsm { + multisig: AuthMultisig, + psm_config: PsmConfig, +} + +impl AuthMultisigPsm { + /// The name of the component. + pub const NAME: &'static str = "miden::standards::components::auth::multisig_psm"; + + /// Creates a new [`AuthMultisigPsm`] component from the provided configuration. + pub fn new(config: AuthMultisigPsmConfig) -> Result { + let (multisig_config, psm_config) = config.into_parts(); + Ok(Self { + multisig: AuthMultisig::new(multisig_config)?, + psm_config, + }) + } + + /// Returns the [`StorageSlotName`] where the threshold configuration is stored. + pub fn threshold_config_slot() -> &'static StorageSlotName { + AuthMultisig::threshold_config_slot() + } + + /// Returns the [`StorageSlotName`] where the approver public keys are stored. + pub fn approver_public_keys_slot() -> &'static StorageSlotName { + AuthMultisig::approver_public_keys_slot() + } + + // Returns the [`StorageSlotName`] where the approver scheme IDs are stored. + pub fn approver_scheme_ids_slot() -> &'static StorageSlotName { + AuthMultisig::approver_scheme_ids_slot() + } + + /// Returns the [`StorageSlotName`] where the executed transactions are stored. + pub fn executed_transactions_slot() -> &'static StorageSlotName { + AuthMultisig::executed_transactions_slot() + } + + /// Returns the [`StorageSlotName`] where the procedure thresholds are stored. + pub fn procedure_thresholds_slot() -> &'static StorageSlotName { + AuthMultisig::procedure_thresholds_slot() + } + + /// Returns the [`StorageSlotName`] where the private state manager public key is stored. + pub fn psm_public_key_slot() -> &'static StorageSlotName { + PsmConfig::public_key_slot() + } + + /// Returns the [`StorageSlotName`] where the private state manager scheme IDs are stored. + pub fn psm_scheme_id_slot() -> &'static StorageSlotName { + PsmConfig::scheme_id_slot() + } + + /// Returns the storage slot schema for the threshold configuration slot. + pub fn threshold_config_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + AuthMultisig::threshold_config_slot_schema() + } + + /// Returns the storage slot schema for the approver public keys slot. + pub fn approver_public_keys_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + AuthMultisig::approver_public_keys_slot_schema() + } + + // Returns the storage slot schema for the approver scheme IDs slot. + pub fn approver_auth_scheme_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + AuthMultisig::approver_auth_scheme_slot_schema() + } + + /// Returns the storage slot schema for the executed transactions slot. + pub fn executed_transactions_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + AuthMultisig::executed_transactions_slot_schema() + } + + /// Returns the storage slot schema for the procedure thresholds slot. + pub fn procedure_thresholds_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + AuthMultisig::procedure_thresholds_slot_schema() + } + + /// Returns the storage slot schema for the private state manager public key slot. + pub fn psm_public_key_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + PsmConfig::public_key_slot_schema() + } + + /// Returns the storage slot schema for the private state manager scheme IDs slot. + pub fn psm_auth_scheme_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + PsmConfig::auth_scheme_slot_schema() + } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = StorageSchema::new([ + Self::threshold_config_slot_schema(), + Self::approver_public_keys_slot_schema(), + Self::approver_auth_scheme_slot_schema(), + Self::executed_transactions_slot_schema(), + Self::procedure_thresholds_slot_schema(), + Self::psm_public_key_slot_schema(), + Self::psm_auth_scheme_slot_schema(), + ]) + .expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description( + "Multisig authentication component with private state manager \ + using hybrid signature schemes", + ) + .with_storage_schema(storage_schema) + } +} + +impl From for AccountComponent { + fn from(multisig: AuthMultisigPsm) -> Self { + let AuthMultisigPsm { multisig, psm_config } = multisig; + let multisig_component = AccountComponent::from(multisig); + let (psm_slots, psm_slot_metadata) = psm_config.into_component_parts(); + + let mut storage_slots = multisig_component.storage_slots().to_vec(); + storage_slots.extend(psm_slots); + + let mut slot_schemas: Vec<(StorageSlotName, StorageSlotSchema)> = multisig_component + .storage_schema() + .iter() + .map(|(slot_name, slot_schema)| (slot_name.clone(), slot_schema.clone())) + .collect(); + slot_schemas.extend(psm_slot_metadata); + + let storage_schema = + StorageSchema::new(slot_schemas).expect("storage schema should be valid"); + + let metadata = AccountComponentMetadata::new( + AuthMultisigPsm::NAME, + multisig_component.supported_types().clone(), + ) + .with_description(multisig_component.metadata().description()) + .with_version(multisig_component.metadata().version().clone()) + .with_storage_schema(storage_schema); + + AccountComponent::new(multisig_psm_library(), storage_slots, metadata).expect( + "Multisig auth component should satisfy the requirements of a valid account component", + ) + } +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use alloc::string::ToString; + + use miden_protocol::Word; + use miden_protocol::account::AccountBuilder; + use miden_protocol::account::auth::AuthSecretKey; + + use super::*; + use crate::account::wallets::BasicWallet; + + /// Test multisig component setup with various configurations + #[test] + fn test_multisig_component_setup() { + // Create test secret keys + let sec_key_1 = AuthSecretKey::new_falcon512_poseidon2(); + let sec_key_2 = AuthSecretKey::new_falcon512_poseidon2(); + let sec_key_3 = AuthSecretKey::new_falcon512_poseidon2(); + let psm_key = AuthSecretKey::new_ecdsa_k256_keccak(); + + // Create approvers list for multisig config + let approvers = vec![ + (sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + (sec_key_2.public_key().to_commitment(), sec_key_2.auth_scheme()), + (sec_key_3.public_key().to_commitment(), sec_key_3.auth_scheme()), + ]; + + let threshold = 2u32; + + // Create multisig component + let multisig_component = AuthMultisigPsm::new( + AuthMultisigPsmConfig::new( + approvers.clone(), + threshold, + PsmConfig::new(psm_key.public_key().to_commitment(), psm_key.auth_scheme()), + ) + .expect("invalid multisig config"), + ) + .expect("multisig component creation failed"); + + // Build account with multisig component + let account = AccountBuilder::new([0; 32]) + .with_auth_component(multisig_component) + .with_component(BasicWallet) + .build() + .expect("account building failed"); + + // Verify config slot: [threshold, num_approvers, 0, 0] + let config_slot = account + .storage() + .get_item(AuthMultisigPsm::threshold_config_slot()) + .expect("config storage slot access failed"); + assert_eq!(config_slot, Word::from([threshold, approvers.len() as u32, 0, 0])); + + // Verify approver pub keys slot + for (i, (expected_pub_key, _)) in approvers.iter().enumerate() { + let stored_pub_key = account + .storage() + .get_map_item( + AuthMultisigPsm::approver_public_keys_slot(), + Word::from([i as u32, 0, 0, 0]), + ) + .expect("approver public key storage map access failed"); + assert_eq!(stored_pub_key, Word::from(*expected_pub_key)); + } + + // Verify approver scheme IDs slot + for (i, (_, expected_auth_scheme)) in approvers.iter().enumerate() { + let stored_scheme_id = account + .storage() + .get_map_item( + AuthMultisigPsm::approver_scheme_ids_slot(), + Word::from([i as u32, 0, 0, 0]), + ) + .expect("approver scheme ID storage map access failed"); + assert_eq!(stored_scheme_id, Word::from([*expected_auth_scheme as u32, 0, 0, 0])); + } + + // Verify private state manager signer is configured. + let psm_public_key = account + .storage() + .get_map_item(AuthMultisigPsm::psm_public_key_slot(), Word::from([0u32, 0, 0, 0])) + .expect("private state manager public key storage map access failed"); + assert_eq!(psm_public_key, Word::from(psm_key.public_key().to_commitment())); + + let psm_scheme_id = account + .storage() + .get_map_item(AuthMultisigPsm::psm_scheme_id_slot(), Word::from([0u32, 0, 0, 0])) + .expect("private state manager scheme ID storage map access failed"); + assert_eq!(psm_scheme_id, Word::from([psm_key.auth_scheme() as u32, 0, 0, 0])); + } + + /// Test multisig component with minimum threshold (1 of 1) + #[test] + fn test_multisig_component_minimum_threshold() { + let pub_key = AuthSecretKey::new_ecdsa_k256_keccak().public_key().to_commitment(); + let psm_key = AuthSecretKey::new_falcon512_poseidon2(); + let approvers = vec![(pub_key, AuthScheme::EcdsaK256Keccak)]; + let threshold = 1u32; + + let multisig_component = AuthMultisigPsm::new( + AuthMultisigPsmConfig::new( + approvers.clone(), + threshold, + PsmConfig::new(psm_key.public_key().to_commitment(), psm_key.auth_scheme()), + ) + .expect("invalid multisig config"), + ) + .expect("multisig component creation failed"); + + let account = AccountBuilder::new([0; 32]) + .with_auth_component(multisig_component) + .with_component(BasicWallet) + .build() + .expect("account building failed"); + + // Verify storage layout + let config_slot = account + .storage() + .get_item(AuthMultisigPsm::threshold_config_slot()) + .expect("config storage slot access failed"); + assert_eq!(config_slot, Word::from([threshold, approvers.len() as u32, 0, 0])); + + let stored_pub_key = account + .storage() + .get_map_item(AuthMultisigPsm::approver_public_keys_slot(), Word::from([0u32, 0, 0, 0])) + .expect("approver pub keys storage map access failed"); + assert_eq!(stored_pub_key, Word::from(pub_key)); + + let stored_scheme_id = account + .storage() + .get_map_item(AuthMultisigPsm::approver_scheme_ids_slot(), Word::from([0u32, 0, 0, 0])) + .expect("approver scheme IDs storage map access failed"); + assert_eq!(stored_scheme_id, Word::from([AuthScheme::EcdsaK256Keccak as u32, 0, 0, 0])); + } + + /// Test multisig component setup with a private state manager. + #[test] + fn test_multisig_component_with_psm() { + let sec_key_1 = AuthSecretKey::new_falcon512_poseidon2(); + let sec_key_2 = AuthSecretKey::new_falcon512_poseidon2(); + let psm_key = AuthSecretKey::new_ecdsa_k256_keccak(); + + let approvers = vec![ + (sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + (sec_key_2.public_key().to_commitment(), sec_key_2.auth_scheme()), + ]; + + let multisig_component = AuthMultisigPsm::new( + AuthMultisigPsmConfig::new( + approvers, + 2, + PsmConfig::new(psm_key.public_key().to_commitment(), psm_key.auth_scheme()), + ) + .expect("invalid multisig config"), + ) + .expect("multisig component creation failed"); + + let account = AccountBuilder::new([0; 32]) + .with_auth_component(multisig_component) + .with_component(BasicWallet) + .build() + .expect("account building failed"); + + let psm_public_key = account + .storage() + .get_map_item(AuthMultisigPsm::psm_public_key_slot(), Word::from([0u32, 0, 0, 0])) + .expect("private state manager public key storage map access failed"); + assert_eq!(psm_public_key, Word::from(psm_key.public_key().to_commitment())); + + let psm_scheme_id = account + .storage() + .get_map_item(AuthMultisigPsm::psm_scheme_id_slot(), Word::from([0u32, 0, 0, 0])) + .expect("private state manager scheme ID storage map access failed"); + assert_eq!(psm_scheme_id, Word::from([psm_key.auth_scheme() as u32, 0, 0, 0])); + } + + /// Test multisig component error cases + #[test] + fn test_multisig_component_error_cases() { + let pub_key = AuthSecretKey::new_ecdsa_k256_keccak().public_key().to_commitment(); + let psm_key = AuthSecretKey::new_falcon512_poseidon2(); + let approvers = vec![(pub_key, AuthScheme::EcdsaK256Keccak)]; + + // Test threshold > number of approvers (should fail) + let result = AuthMultisigPsmConfig::new( + approvers, + 2, + PsmConfig::new(psm_key.public_key().to_commitment(), psm_key.auth_scheme()), + ); + + assert!( + result + .unwrap_err() + .to_string() + .contains("threshold cannot be greater than number of approvers") + ); + } + + /// Test multisig component with duplicate approvers (should fail) + #[test] + fn test_multisig_component_duplicate_approvers() { + // Create secret keys for approvers + let sec_key_1 = AuthSecretKey::new_ecdsa_k256_keccak(); + let sec_key_2 = AuthSecretKey::new_ecdsa_k256_keccak(); + let psm_key = AuthSecretKey::new_falcon512_poseidon2(); + + // Create approvers list with duplicate public keys + let approvers = vec![ + (sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + (sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + (sec_key_2.public_key().to_commitment(), sec_key_2.auth_scheme()), + ]; + + let result = AuthMultisigPsmConfig::new( + approvers, + 2, + PsmConfig::new(psm_key.public_key().to_commitment(), psm_key.auth_scheme()), + ); + assert!( + result + .unwrap_err() + .to_string() + .contains("duplicate approver public keys are not allowed") + ); + } + + /// Test multisig component rejects a private state manager key which is already an approver. + #[test] + fn test_multisig_component_psm_not_approver() { + let sec_key_1 = AuthSecretKey::new_ecdsa_k256_keccak(); + let sec_key_2 = AuthSecretKey::new_ecdsa_k256_keccak(); + + let approvers = vec![ + (sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + (sec_key_2.public_key().to_commitment(), sec_key_2.auth_scheme()), + ]; + + let result = AuthMultisigPsmConfig::new( + approvers, + 2, + PsmConfig::new(sec_key_1.public_key().to_commitment(), sec_key_1.auth_scheme()), + ); + + assert!( + result + .unwrap_err() + .to_string() + .contains("private state manager public key must be different from approvers") + ); + } +} diff --git a/crates/miden-standards/src/account/auth/no_auth.rs b/crates/miden-standards/src/account/auth/no_auth.rs index 1424ecbecb..7fb4396f0f 100644 --- a/crates/miden-standards/src/account/auth/no_auth.rs +++ b/crates/miden-standards/src/account/auth/no_auth.rs @@ -1,4 +1,5 @@ -use miden_protocol::account::AccountComponent; +use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{AccountComponent, AccountType}; use crate::account::components::no_auth_library; @@ -19,10 +20,19 @@ use crate::account::components::no_auth_library; pub struct NoAuth; impl NoAuth { + /// The name of the component. + pub const NAME: &'static str = "miden::standards::components::auth::no_auth"; + /// Creates a new [`NoAuth`] component. pub fn new() -> Self { Self } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description("No authentication component") + } } impl Default for NoAuth { @@ -33,9 +43,10 @@ impl Default for NoAuth { impl From for AccountComponent { fn from(_: NoAuth) -> Self { - AccountComponent::new(no_auth_library(), vec![]) + let metadata = NoAuth::component_metadata(); + + AccountComponent::new(no_auth_library(), vec![], metadata) .expect("NoAuth component should satisfy the requirements of a valid account component") - .with_supports_all_types() } } diff --git a/crates/miden-standards/src/account/auth/singlesig.rs b/crates/miden-standards/src/account/auth/singlesig.rs new file mode 100644 index 0000000000..ee1e8401ef --- /dev/null +++ b/crates/miden-standards/src/account/auth/singlesig.rs @@ -0,0 +1,115 @@ +use miden_protocol::Word; +use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; +use miden_protocol::account::component::{ + AccountComponentMetadata, + SchemaType, + StorageSchema, + StorageSlotSchema, +}; +use miden_protocol::account::{AccountComponent, AccountType, StorageSlot, StorageSlotName}; +use miden_protocol::utils::sync::LazyLock; + +use crate::account::components::singlesig_library; + +// CONSTANTS +// ================================================================================================ + +static PUBKEY_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::auth::singlesig::pub_key") + .expect("storage slot name should be valid") +}); + +static SCHEME_ID_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::auth::singlesig::scheme") + .expect("storage slot name should be valid") +}); + +/// An [`AccountComponent`] implementing the signature scheme for authentication +/// of transactions. +/// +/// This component exports `auth_tx`, which loads the public key and signature scheme id from +/// storage and delegates transaction authentication to +/// `miden::standards::auth::signature::authenticate_transaction`. +/// +/// When linking against this component, the `miden::standards` library must be available to the +/// assembler (which also implies availability of `miden::protocol`). This is the case when using +/// [`CodeBuilder`][builder]. +/// +/// This component supports all account types. +/// +/// [builder]: crate::code_builder::CodeBuilder +pub struct AuthSingleSig { + pub_key: PublicKeyCommitment, + auth_scheme: AuthScheme, +} + +impl AuthSingleSig { + /// The name of the component. + pub const NAME: &'static str = "miden::standards::components::auth::singlesig"; + + /// Creates a new [`AuthSingleSig`] component with the given `public_key`. + pub fn new(pub_key: PublicKeyCommitment, auth_scheme: AuthScheme) -> Self { + Self { pub_key, auth_scheme } + } + + /// Returns the [`StorageSlotName`] where the public key is stored. + pub fn public_key_slot() -> &'static StorageSlotName { + &PUBKEY_SLOT_NAME + } + + // Returns the [`StorageSlotName`] where the scheme ID is stored. + pub fn scheme_id_slot() -> &'static StorageSlotName { + &SCHEME_ID_SLOT_NAME + } + + /// Returns the storage slot schema for the public key slot. + pub fn public_key_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::public_key_slot().clone(), + StorageSlotSchema::value("Public key commitment", SchemaType::pub_key()), + ) + } + /// Returns the storage slot schema for the scheme ID slot. + pub fn auth_scheme_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::scheme_id_slot().clone(), + StorageSlotSchema::value("Scheme ID", SchemaType::auth_scheme()), + ) + } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = StorageSchema::new(vec![ + Self::public_key_slot_schema(), + Self::auth_scheme_slot_schema(), + ]) + .expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description( + "Authentication component using ECDSA K256 Keccak or Falcon512 Poseidon2 signature scheme", + ) + .with_storage_schema(storage_schema) + } +} + +impl From for AccountComponent { + fn from(basic_signature: AuthSingleSig) -> Self { + let metadata = AuthSingleSig::component_metadata(); + + let storage_slots = vec![ + StorageSlot::with_value( + AuthSingleSig::public_key_slot().clone(), + basic_signature.pub_key.into(), + ), + StorageSlot::with_value( + AuthSingleSig::scheme_id_slot().clone(), + Word::from([basic_signature.auth_scheme.as_u8(), 0, 0, 0]), + ), + ]; + + AccountComponent::new(singlesig_library(), storage_slots, metadata).expect( + "singlesig component should satisfy the requirements of a valid account component", + ) + } +} diff --git a/crates/miden-standards/src/account/auth/ecdsa_k256_keccak_acl.rs b/crates/miden-standards/src/account/auth/singlesig_acl.rs similarity index 65% rename from crates/miden-standards/src/account/auth/ecdsa_k256_keccak_acl.rs rename to crates/miden-standards/src/account/auth/singlesig_acl.rs index 276a85f1bf..70ff9a1b1d 100644 --- a/crates/miden-standards/src/account/auth/ecdsa_k256_keccak_acl.rs +++ b/crates/miden-standards/src/account/auth/singlesig_acl.rs @@ -1,37 +1,54 @@ use alloc::vec::Vec; -use miden_protocol::Word; -use miden_protocol::account::auth::PublicKeyCommitment; +use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; +use miden_protocol::account::component::{ + AccountComponentMetadata, + FeltSchema, + SchemaType, + StorageSchema, + StorageSlotSchema, +}; use miden_protocol::account::{ AccountCode, AccountComponent, + AccountType, StorageMap, + StorageMapKey, StorageSlot, StorageSlotName, }; use miden_protocol::errors::AccountError; use miden_protocol::utils::sync::LazyLock; +use miden_protocol::{Felt, Word}; + +use crate::account::components::singlesig_acl_library; -use crate::account::components::ecdsa_k256_keccak_acl_library; +// CONSTANTS +// ================================================================================================ static PUBKEY_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::ecdsa_k256_keccak_acl::public_key") + StorageSlotName::new("miden::standards::auth::singlesig_acl::pub_key") + .expect("storage slot name should be valid") +}); + +static SCHEME_ID_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::auth::singlesig_acl::scheme") .expect("storage slot name should be valid") }); static CONFIG_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::ecdsa_k256_keccak_acl::config") + StorageSlotName::new("miden::standards::auth::singlesig_acl::config") .expect("storage slot name should be valid") }); static TRIGGER_PROCEDURE_ROOT_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::auth::ecdsa_k256_keccak_acl::trigger_procedure_roots") + StorageSlotName::new("miden::standards::auth::singlesig_acl::trigger_procedure_roots") .expect("storage slot name should be valid") }); -/// Configuration for [`AuthEcdsaK256KeccakAcl`] component. +/// Configuration for [`AuthSingleSigAcl`] component. #[derive(Debug, Clone, PartialEq, Eq)] -pub struct AuthEcdsaK256KeccakAclConfig { +pub struct AuthSingleSigAclConfig { /// List of procedure roots that require authentication when called. pub auth_trigger_procedures: Vec, /// When `false`, creating output notes (sending notes to other accounts) requires @@ -42,7 +59,7 @@ pub struct AuthEcdsaK256KeccakAclConfig { pub allow_unauthorized_input_notes: bool, } -impl AuthEcdsaK256KeccakAclConfig { +impl AuthSingleSigAclConfig { /// Creates a new configuration with no trigger procedures and both flags set to `false` (most /// restrictive). pub fn new() -> Self { @@ -72,14 +89,14 @@ impl AuthEcdsaK256KeccakAclConfig { } } -impl Default for AuthEcdsaK256KeccakAclConfig { +impl Default for AuthSingleSigAclConfig { fn default() -> Self { Self::new() } } -/// An [`AccountComponent`] implementing a procedure-based Access Control List (ACL) using the -/// EcdsaK256Keccak signature scheme for authentication of transactions. +/// An [`AccountComponent`] implementing a procedure-based Access Control List (ACL) using either +/// the EcdsaK256Keccak or Falcon512 Poseidon2 signature scheme for authentication of transactions. /// /// This component provides fine-grained authentication control based on three conditions: /// 1. **Procedure-based authentication**: Requires authentication when any of the specified trigger @@ -118,10 +135,10 @@ impl Default for AuthEcdsaK256KeccakAclConfig { /// allowing free note processing. /// /// ## Storage Layout -/// - Slot 0(value): Public key (same as EcdsaK256Keccak) -/// - Slot 1(value): [num_trigger_procs, allow_unauthorized_output_notes, -/// allow_unauthorized_input_notes, 0] -/// - Slot 2(map): A map with trigger procedure roots +/// - [`Self::public_key_slot`]: Public key +/// - [`Self::config_slot`]: `[num_trigger_procs, allow_unauthorized_output_notes, +/// allow_unauthorized_input_notes, 0]` +/// - [`Self::trigger_procedure_roots_slot`]: A map with trigger procedure roots /// /// ## Important Note on Procedure Detection /// The procedure-based authentication relies on the `was_procedure_called` kernel function, @@ -132,20 +149,24 @@ impl Default for AuthEcdsaK256KeccakAclConfig { /// procedures for authentication. /// /// This component supports all account types. -pub struct AuthEcdsaK256KeccakAcl { +pub struct AuthSingleSigAcl { pub_key: PublicKeyCommitment, - config: AuthEcdsaK256KeccakAclConfig, + auth_scheme: AuthScheme, + config: AuthSingleSigAclConfig, } -impl AuthEcdsaK256KeccakAcl { - /// Creates a new [`AuthEcdsaK256KeccakAcl`] component with the given `public_key` and +impl AuthSingleSigAcl { + /// The name of the component. + pub const NAME: &'static str = "miden::standards::components::auth::singlesig_acl"; + /// Creates a new [`AuthSingleSigAcl`] component with the given `public_key` and /// configuration. /// /// # Panics /// Panics if more than [AccountCode::MAX_NUM_PROCEDURES] procedures are specified. pub fn new( pub_key: PublicKeyCommitment, - config: AuthEcdsaK256KeccakAclConfig, + auth_scheme: AuthScheme, + config: AuthSingleSigAclConfig, ) -> Result { let max_procedures = AccountCode::MAX_NUM_PROCEDURES; if config.auth_trigger_procedures.len() > max_procedures { @@ -154,7 +175,7 @@ impl AuthEcdsaK256KeccakAcl { ))); } - Ok(Self { pub_key, config }) + Ok(Self { pub_key, auth_scheme, config }) } /// Returns the [`StorageSlotName`] where the public key is stored. @@ -162,6 +183,11 @@ impl AuthEcdsaK256KeccakAcl { &PUBKEY_SLOT_NAME } + /// Returns the [`StorageSlotName`] where the scheme ID is stored. + pub fn scheme_id_slot() -> &'static StorageSlotName { + &SCHEME_ID_SLOT_NAME + } + /// Returns the [`StorageSlotName`] where the component's configuration is stored. pub fn config_slot() -> &'static StorageSlotName { &CONFIG_SLOT_NAME @@ -171,26 +197,93 @@ impl AuthEcdsaK256KeccakAcl { pub fn trigger_procedure_roots_slot() -> &'static StorageSlotName { &TRIGGER_PROCEDURE_ROOT_SLOT_NAME } + + /// Returns the storage slot schema for the public key slot. + pub fn public_key_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::public_key_slot().clone(), + StorageSlotSchema::value("Public key commitment", SchemaType::pub_key()), + ) + } + + /// Returns the storage slot schema for the configuration slot. + pub fn config_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::config_slot().clone(), + StorageSlotSchema::value( + "ACL configuration", + [ + FeltSchema::u32("num_trigger_procs").with_default(Felt::new(0)), + FeltSchema::bool("allow_unauthorized_output_notes").with_default(Felt::new(0)), + FeltSchema::bool("allow_unauthorized_input_notes").with_default(Felt::new(0)), + FeltSchema::new_void(), + ], + ), + ) + } + + // Returns the storage slot schema for the scheme ID slot. + pub fn auth_scheme_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::scheme_id_slot().clone(), + StorageSlotSchema::value("Scheme ID", SchemaType::auth_scheme()), + ) + } + + /// Returns the storage slot schema for the trigger procedure roots slot. + pub fn trigger_procedure_roots_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + ( + Self::trigger_procedure_roots_slot().clone(), + StorageSlotSchema::map( + "Trigger procedure roots", + SchemaType::u32(), + SchemaType::native_word(), + ), + ) + } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = StorageSchema::new(vec![ + Self::public_key_slot_schema(), + Self::auth_scheme_slot_schema(), + Self::config_slot_schema(), + Self::trigger_procedure_roots_slot_schema(), + ]) + .expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description( + "Authentication component with procedure-based ACL using ECDSA K256 Keccak or Falcon512 Poseidon2 signature scheme", + ) + .with_storage_schema(storage_schema) + } } -impl From for AccountComponent { - fn from(ecdsa: AuthEcdsaK256KeccakAcl) -> Self { +impl From for AccountComponent { + fn from(singlesig_acl: AuthSingleSigAcl) -> Self { let mut storage_slots = Vec::with_capacity(3); // Public key slot storage_slots.push(StorageSlot::with_value( - AuthEcdsaK256KeccakAcl::public_key_slot().clone(), - ecdsa.pub_key.into(), + AuthSingleSigAcl::public_key_slot().clone(), + singlesig_acl.pub_key.into(), + )); + + // Scheme ID slot + storage_slots.push(StorageSlot::with_value( + AuthSingleSigAcl::scheme_id_slot().clone(), + Word::from([singlesig_acl.auth_scheme.as_u8(), 0, 0, 0]), )); // Config slot - let num_procs = ecdsa.config.auth_trigger_procedures.len() as u32; + let num_procs = singlesig_acl.config.auth_trigger_procedures.len() as u32; storage_slots.push(StorageSlot::with_value( - AuthEcdsaK256KeccakAcl::config_slot().clone(), + AuthSingleSigAcl::config_slot().clone(), Word::from([ num_procs, - u32::from(ecdsa.config.allow_unauthorized_output_notes), - u32::from(ecdsa.config.allow_unauthorized_input_notes), + u32::from(singlesig_acl.config.allow_unauthorized_output_notes), + u32::from(singlesig_acl.config.allow_unauthorized_input_notes), 0, ]), )); @@ -198,34 +291,37 @@ impl From for AccountComponent { // Trigger procedure roots slot // We add the map even if there are no trigger procedures, to always maintain the same // storage layout. - let map_entries = ecdsa + let map_entries = singlesig_acl .config .auth_trigger_procedures .iter() .enumerate() - .map(|(i, proc_root)| (Word::from([i as u32, 0, 0, 0]), *proc_root)); + .map(|(i, proc_root)| (StorageMapKey::from_index(i as u32), *proc_root)); // Safe to unwrap because we know that the map keys are unique. storage_slots.push(StorageSlot::with_map( - AuthEcdsaK256KeccakAcl::trigger_procedure_roots_slot().clone(), + AuthSingleSigAcl::trigger_procedure_roots_slot().clone(), StorageMap::with_entries(map_entries).unwrap(), )); - AccountComponent::new(ecdsa_k256_keccak_acl_library(), storage_slots) - .expect( - "ACL auth component should satisfy the requirements of a valid account component", - ) - .with_supports_all_types() + let metadata = AuthSingleSigAcl::component_metadata(); + + AccountComponent::new(singlesig_acl_library(), storage_slots, metadata).expect( + "singlesig ACL component should satisfy the requirements of a valid account component", + ) } } +// TESTS +// ================================================================================================ + #[cfg(test)] mod tests { use miden_protocol::Word; use miden_protocol::account::AccountBuilder; use super::*; - use crate::account::components::WellKnownComponent; + use crate::account::components::StandardAccountComponent; use crate::account::wallets::BasicWallet; /// Test configuration for parametrized ACL tests @@ -243,7 +339,8 @@ mod tests { /// Helper function to get the basic wallet procedures for testing fn get_basic_wallet_procedures() -> Vec { // Get the two trigger procedures from BasicWallet: `receive_asset`, `move_asset_to_note`. - let procedures: Vec = WellKnownComponent::BasicWallet.procedure_digests().collect(); + let procedures: Vec = + StandardAccountComponent::BasicWallet.procedure_digests().collect(); assert_eq!(procedures.len(), 2); procedures @@ -252,9 +349,10 @@ mod tests { /// Parametrized test helper for ACL component testing fn test_acl_component(config: AclTestConfig) { let public_key = PublicKeyCommitment::from(Word::empty()); + let auth_scheme = AuthScheme::Falcon512Poseidon2; // Build the configuration - let mut acl_config = AuthEcdsaK256KeccakAclConfig::new() + let mut acl_config = AuthSingleSigAclConfig::new() .with_allow_unauthorized_output_notes(config.allow_unauthorized_output_notes) .with_allow_unauthorized_input_notes(config.allow_unauthorized_input_notes); @@ -267,8 +365,8 @@ mod tests { }; // Create component and account - let component = - AuthEcdsaK256KeccakAcl::new(public_key, acl_config).expect("component creation failed"); + let component = AuthSingleSigAcl::new(public_key, auth_scheme, acl_config) + .expect("component creation failed"); let account = AccountBuilder::new([0; 32]) .with_auth_component(component) @@ -279,14 +377,14 @@ mod tests { // Check public key storage let public_key_slot = account .storage() - .get_item(AuthEcdsaK256KeccakAcl::public_key_slot()) + .get_item(AuthSingleSigAcl::public_key_slot()) .expect("public key storage slot access failed"); assert_eq!(public_key_slot, public_key.into()); // Check configuration storage let config_slot = account .storage() - .get_item(AuthEcdsaK256KeccakAcl::config_slot()) + .get_item(AuthSingleSigAcl::config_slot()) .expect("config storage slot access failed"); assert_eq!(config_slot, config.expected_config_slot); @@ -296,7 +394,7 @@ mod tests { let proc_root = account .storage() .get_map_item( - AuthEcdsaK256KeccakAcl::trigger_procedure_roots_slot(), + AuthSingleSigAcl::trigger_procedure_roots_slot(), Word::from([i as u32, 0, 0, 0]), ) .expect("storage map access failed"); @@ -306,7 +404,7 @@ mod tests { // When no procedures, the map should return empty for key [0,0,0,0] let proc_root = account .storage() - .get_map_item(AuthEcdsaK256KeccakAcl::trigger_procedure_roots_slot(), Word::empty()) + .get_map_item(AuthSingleSigAcl::trigger_procedure_roots_slot(), Word::empty()) .expect("storage map access failed"); assert_eq!(proc_root, Word::empty()); } @@ -314,7 +412,7 @@ mod tests { /// Test ACL component with no procedures and both authorization flags set to false #[test] - fn test_ecdsa_k256_keccak_acl_no_procedures() { + fn test_singlesig_acl_no_procedures() { test_acl_component(AclTestConfig { with_procedures: false, allow_unauthorized_output_notes: false, @@ -325,7 +423,7 @@ mod tests { /// Test ACL component with two procedures and both authorization flags set to false #[test] - fn test_ecdsa_k256_keccak_acl_with_two_procedures() { + fn test_singlesig_acl_with_two_procedures() { test_acl_component(AclTestConfig { with_procedures: true, allow_unauthorized_output_notes: false, diff --git a/crates/miden-standards/src/account/components/mod.rs b/crates/miden-standards/src/account/components/mod.rs index 3b0f7e2f3f..b840ce7ac5 100644 --- a/crates/miden-standards/src/account/components/mod.rs +++ b/crates/miden-standards/src/account/components/mod.rs @@ -1,11 +1,11 @@ use alloc::collections::BTreeSet; use alloc::vec::Vec; -use miden_processor::MastNodeExt; +use miden_processor::mast::MastNodeExt; use miden_protocol::Word; use miden_protocol::account::AccountProcedureRoot; use miden_protocol::assembly::{Library, LibraryExport}; -use miden_protocol::utils::Deserializable; +use miden_protocol::utils::serde::Deserializable; use miden_protocol::utils::sync::LazyLock; use crate::account::interface::AccountComponentInterface; @@ -22,62 +22,51 @@ static BASIC_WALLET_LIBRARY: LazyLock = LazyLock::new(|| { Library::read_from_bytes(bytes).expect("Shipped Basic Wallet library is well-formed") }); -// AUTH LIBRARIES +// ACCESS LIBRARIES // ================================================================================================ -/// Initialize the ECDSA K256 Keccak library only once. -static ECDSA_K256_KECCAK_LIBRARY: LazyLock = LazyLock::new(|| { +// Initialize the Ownable2Step library only once. +static OWNABLE2STEP_LIBRARY: LazyLock = LazyLock::new(|| { let bytes = include_bytes!(concat!( env!("OUT_DIR"), - "/assets/account_components/auth/ecdsa_k256_keccak.masl" + "/assets/account_components/access/ownable2step.masl" )); - Library::read_from_bytes(bytes).expect("Shipped Ecdsa K256 Keccak library is well-formed") + Library::read_from_bytes(bytes).expect("Shipped Ownable2Step library is well-formed") }); -// Initialize the ECDSA K256 Keccak ACL library only once. -static ECDSA_K256_KECCAK_ACL_LIBRARY: LazyLock = LazyLock::new(|| { - let bytes = include_bytes!(concat!( - env!("OUT_DIR"), - "/assets/account_components/auth/ecdsa_k256_keccak_acl.masl" - )); - Library::read_from_bytes(bytes).expect("Shipped Ecdsa K256 Keccak ACL library is well-formed") -}); +// AUTH LIBRARIES +// ================================================================================================ -/// Initialize the ECDSA K256 Keccak Multisig library only once. -static ECDSA_K256_KECCAK_MULTISIG_LIBRARY: LazyLock = LazyLock::new(|| { - let bytes = include_bytes!(concat!( - env!("OUT_DIR"), - "/assets/account_components/auth/ecdsa_k256_keccak_multisig.masl" - )); - Library::read_from_bytes(bytes) - .expect("Shipped Multisig Ecdsa K256 Keccak library is well-formed") +/// Initialize the ECDSA K256 Keccak library only once. +static SINGLESIG_LIBRARY: LazyLock = LazyLock::new(|| { + let bytes = + include_bytes!(concat!(env!("OUT_DIR"), "/assets/account_components/auth/singlesig.masl")); + Library::read_from_bytes(bytes).expect("Shipped Singlesig library is well-formed") }); -// Initialize the Falcon 512 RPO library only once. -static FALCON_512_RPO_LIBRARY: LazyLock = LazyLock::new(|| { +// Initialize the ECDSA K256 Keccak ACL library only once. +static SINGLESIG_ACL_LIBRARY: LazyLock = LazyLock::new(|| { let bytes = include_bytes!(concat!( env!("OUT_DIR"), - "/assets/account_components/auth/falcon_512_rpo.masl" + "/assets/account_components/auth/singlesig_acl.masl" )); - Library::read_from_bytes(bytes).expect("Shipped Falcon 512 RPO library is well-formed") + Library::read_from_bytes(bytes).expect("Shipped Singlesig ACL library is well-formed") }); -// Initialize the Falcon 512 RPO ACL library only once. -static FALCON_512_RPO_ACL_LIBRARY: LazyLock = LazyLock::new(|| { - let bytes = include_bytes!(concat!( - env!("OUT_DIR"), - "/assets/account_components/auth/falcon_512_rpo_acl.masl" - )); - Library::read_from_bytes(bytes).expect("Shipped Falcon 512 RPO ACL library is well-formed") +/// Initialize the Multisig library only once. +static MULTISIG_LIBRARY: LazyLock = LazyLock::new(|| { + let bytes = + include_bytes!(concat!(env!("OUT_DIR"), "/assets/account_components/auth/multisig.masl")); + Library::read_from_bytes(bytes).expect("Shipped Multisig library is well-formed") }); -// Initialize the Multisig Falcon 512 RPO library only once. -static FALCON_512_RPO_MULTISIG_LIBRARY: LazyLock = LazyLock::new(|| { +/// Initialize the Multisig PSM library only once. +static MULTISIG_PSM_LIBRARY: LazyLock = LazyLock::new(|| { let bytes = include_bytes!(concat!( env!("OUT_DIR"), - "/assets/account_components/auth/falcon_512_rpo_multisig.masl" + "/assets/account_components/auth/multisig_psm.masl" )); - Library::read_from_bytes(bytes).expect("Shipped Multisig Falcon 512 RPO library is well-formed") + Library::read_from_bytes(bytes).expect("Shipped Multisig PSM library is well-formed") }); // Initialize the NoAuth library only once. @@ -125,6 +114,11 @@ pub fn basic_wallet_library() -> Library { BASIC_WALLET_LIBRARY.clone() } +/// Returns the Ownable2Step Library. +pub fn ownable2step_library() -> Library { + OWNABLE2STEP_LIBRARY.clone() +} + /// Returns the Basic Fungible Faucet Library. pub fn basic_fungible_faucet_library() -> Library { BASIC_FUNGIBLE_FAUCET_LIBRARY.clone() @@ -140,29 +134,24 @@ pub fn storage_schema_library() -> Library { STORAGE_SCHEMA_LIBRARY.clone() } -/// Returns the ECDSA K256 Keccak Library. -pub fn ecdsa_k256_keccak_library() -> Library { - ECDSA_K256_KECCAK_LIBRARY.clone() -} - -/// Returns the ECDSA K256 Keccak ACL Library. -pub fn ecdsa_k256_keccak_acl_library() -> Library { - ECDSA_K256_KECCAK_ACL_LIBRARY.clone() +/// Returns the Singlesig Library. +pub fn singlesig_library() -> Library { + SINGLESIG_LIBRARY.clone() } -/// Returns the ECDSA K256 Keccak Multisig Library. -pub fn ecdsa_k256_keccak_multisig_library() -> Library { - ECDSA_K256_KECCAK_MULTISIG_LIBRARY.clone() +/// Returns the Singlesig ACL Library. +pub fn singlesig_acl_library() -> Library { + SINGLESIG_ACL_LIBRARY.clone() } -/// Returns the Falcon 512 RPO Library. -pub fn falcon_512_rpo_library() -> Library { - FALCON_512_RPO_LIBRARY.clone() +/// Returns the Multisig Library. +pub fn multisig_library() -> Library { + MULTISIG_LIBRARY.clone() } -/// Returns the Falcon 512 RPO ACL Library. -pub fn falcon_512_rpo_acl_library() -> Library { - FALCON_512_RPO_ACL_LIBRARY.clone() +/// Returns the Multisig PSM Library. +pub fn multisig_psm_library() -> Library { + MULTISIG_PSM_LIBRARY.clone() } /// Returns the NoAuth Library. @@ -170,41 +159,33 @@ pub fn no_auth_library() -> Library { NO_AUTH_LIBRARY.clone() } -/// Returns the Falcon 512 RPO Multisig Library. -pub fn falcon_512_rpo_multisig_library() -> Library { - FALCON_512_RPO_MULTISIG_LIBRARY.clone() -} - -// WELL KNOWN COMPONENTS +// STANDARD ACCOUNT COMPONENTS // ================================================================================================ -/// The enum holding the types of basic well-known account components provided by the `miden-lib`. -pub enum WellKnownComponent { +/// The enum holding the types of standard account components defined in the `miden-standards` +/// crate. +pub enum StandardAccountComponent { BasicWallet, BasicFungibleFaucet, NetworkFungibleFaucet, - AuthEcdsaK256Keccak, - AuthEcdsaK256KeccakAcl, - AuthEcdsaK256KeccakMultisig, - AuthFalcon512Rpo, - AuthFalcon512RpoAcl, - AuthFalcon512RpoMultisig, + AuthSingleSig, + AuthSingleSigAcl, + AuthMultisig, + AuthMultisigPsm, AuthNoAuth, } -impl WellKnownComponent { +impl StandardAccountComponent { /// Returns the iterator over digests of all procedures exported from the component. pub fn procedure_digests(&self) -> impl Iterator { let library = match self { Self::BasicWallet => BASIC_WALLET_LIBRARY.as_ref(), Self::BasicFungibleFaucet => BASIC_FUNGIBLE_FAUCET_LIBRARY.as_ref(), Self::NetworkFungibleFaucet => NETWORK_FUNGIBLE_FAUCET_LIBRARY.as_ref(), - Self::AuthEcdsaK256Keccak => ECDSA_K256_KECCAK_LIBRARY.as_ref(), - Self::AuthEcdsaK256KeccakAcl => ECDSA_K256_KECCAK_ACL_LIBRARY.as_ref(), - Self::AuthEcdsaK256KeccakMultisig => ECDSA_K256_KECCAK_MULTISIG_LIBRARY.as_ref(), - Self::AuthFalcon512Rpo => FALCON_512_RPO_LIBRARY.as_ref(), - Self::AuthFalcon512RpoAcl => FALCON_512_RPO_ACL_LIBRARY.as_ref(), - Self::AuthFalcon512RpoMultisig => FALCON_512_RPO_MULTISIG_LIBRARY.as_ref(), + Self::AuthSingleSig => SINGLESIG_LIBRARY.as_ref(), + Self::AuthSingleSigAcl => SINGLESIG_ACL_LIBRARY.as_ref(), + Self::AuthMultisig => MULTISIG_LIBRARY.as_ref(), + Self::AuthMultisigPsm => MULTISIG_PSM_LIBRARY.as_ref(), Self::AuthNoAuth => NO_AUTH_LIBRARY.as_ref(), }; @@ -248,22 +229,18 @@ impl WellKnownComponent { Self::NetworkFungibleFaucet => { component_interface_vec.push(AccountComponentInterface::NetworkFungibleFaucet) }, - Self::AuthEcdsaK256Keccak => { - component_interface_vec.push(AccountComponentInterface::AuthEcdsaK256Keccak) + Self::AuthSingleSig => { + component_interface_vec.push(AccountComponentInterface::AuthSingleSig) }, - Self::AuthEcdsaK256KeccakAcl => { - component_interface_vec.push(AccountComponentInterface::AuthEcdsaK256KeccakAcl) + Self::AuthSingleSigAcl => { + component_interface_vec.push(AccountComponentInterface::AuthSingleSigAcl) }, - Self::AuthEcdsaK256KeccakMultisig => component_interface_vec - .push(AccountComponentInterface::AuthEcdsaK256KeccakMultisig), - Self::AuthFalcon512Rpo => { - component_interface_vec.push(AccountComponentInterface::AuthFalcon512Rpo) + Self::AuthMultisig => { + component_interface_vec.push(AccountComponentInterface::AuthMultisig) }, - Self::AuthFalcon512RpoAcl => { - component_interface_vec.push(AccountComponentInterface::AuthFalcon512RpoAcl) + Self::AuthMultisigPsm => { + component_interface_vec.push(AccountComponentInterface::AuthMultisigPsm) }, - Self::AuthFalcon512RpoMultisig => component_interface_vec - .push(AccountComponentInterface::AuthFalcon512RpoMultisig), Self::AuthNoAuth => { component_interface_vec.push(AccountComponentInterface::AuthNoAuth) }, @@ -271,22 +248,19 @@ impl WellKnownComponent { } } - /// Gets all well known components which could be constructed from the provided procedures map + /// Gets all standard components which could be constructed from the provided procedures map /// and pushes them to the `component_interface_vec`. - pub fn extract_well_known_components( + pub fn extract_standard_components( procedures_set: &mut BTreeSet, component_interface_vec: &mut Vec, ) { Self::BasicWallet.extract_component(procedures_set, component_interface_vec); Self::BasicFungibleFaucet.extract_component(procedures_set, component_interface_vec); Self::NetworkFungibleFaucet.extract_component(procedures_set, component_interface_vec); - Self::AuthEcdsaK256Keccak.extract_component(procedures_set, component_interface_vec); - Self::AuthEcdsaK256KeccakAcl.extract_component(procedures_set, component_interface_vec); - Self::AuthEcdsaK256KeccakMultisig - .extract_component(procedures_set, component_interface_vec); - Self::AuthFalcon512Rpo.extract_component(procedures_set, component_interface_vec); - Self::AuthFalcon512RpoAcl.extract_component(procedures_set, component_interface_vec); - Self::AuthFalcon512RpoMultisig.extract_component(procedures_set, component_interface_vec); + Self::AuthSingleSig.extract_component(procedures_set, component_interface_vec); + Self::AuthSingleSigAcl.extract_component(procedures_set, component_interface_vec); + Self::AuthMultisigPsm.extract_component(procedures_set, component_interface_vec); + Self::AuthMultisig.extract_component(procedures_set, component_interface_vec); Self::AuthNoAuth.extract_component(procedures_set, component_interface_vec); } } diff --git a/crates/miden-standards/src/account/faucets/basic_fungible.rs b/crates/miden-standards/src/account/faucets/basic_fungible.rs index b848b43431..169d45eb69 100644 --- a/crates/miden-standards/src/account/faucets/basic_fungible.rs +++ b/crates/miden-standards/src/account/faucets/basic_fungible.rs @@ -1,3 +1,10 @@ +use miden_protocol::account::component::{ + AccountComponentMetadata, + FeltSchema, + SchemaType, + StorageSchema, + StorageSlotSchema, +}; use miden_protocol::account::{ Account, AccountBuilder, @@ -5,21 +12,18 @@ use miden_protocol::account::{ AccountStorage, AccountStorageMode, AccountType, - StorageSlot, StorageSlotName, }; -use miden_protocol::asset::{FungibleAsset, TokenSymbol}; -use miden_protocol::{Felt, FieldElement, Word}; - -use super::FungibleFaucetError; -use crate::account::AuthScheme; -use crate::account::auth::{ - AuthEcdsaK256KeccakAcl, - AuthEcdsaK256KeccakAclConfig, - AuthFalcon512RpoAcl, - AuthFalcon512RpoAclConfig, -}; +use miden_protocol::asset::TokenSymbol; +use miden_protocol::{Felt, Word}; + +use super::{FungibleFaucetError, TokenMetadata}; +use crate::account::AuthMethod; +use crate::account::auth::{AuthSingleSigAcl, AuthSingleSigAclConfig}; use crate::account::components::basic_fungible_faucet_library; + +/// The schema type for token symbols. +const TOKEN_SYMBOL_TYPE: &str = "miden::standards::fungible_faucets::metadata::token_symbol"; use crate::account::interface::{AccountComponentInterface, AccountInterface, AccountInterfaceExt}; use crate::procedure_digest; @@ -29,6 +33,7 @@ use crate::procedure_digest; // Initialize the digest of the `distribute` procedure of the Basic Fungible Faucet only once. procedure_digest!( BASIC_FUNGIBLE_FAUCET_DISTRIBUTE, + BasicFungibleFaucet::NAME, BasicFungibleFaucet::DISTRIBUTE_PROC_NAME, basic_fungible_faucet_library ); @@ -36,6 +41,7 @@ procedure_digest!( // Initialize the digest of the `burn` procedure of the Basic Fungible Faucet only once. procedure_digest!( BASIC_FUNGIBLE_FAUCET_BURN, + BasicFungibleFaucet::NAME, BasicFungibleFaucet::BURN_PROC_NAME, basic_fungible_faucet_library ); @@ -58,97 +64,80 @@ procedure_digest!( /// /// ## Storage Layout /// -/// - [`Self::metadata_slot`]: Fungible faucet metadata +/// - [`Self::metadata_slot`]: Stores [`TokenMetadata`]. /// /// [builder]: crate::code_builder::CodeBuilder pub struct BasicFungibleFaucet { - symbol: TokenSymbol, - decimals: u8, - max_supply: Felt, + metadata: TokenMetadata, } impl BasicFungibleFaucet { // CONSTANTS // -------------------------------------------------------------------------------------------- + /// The name of the component. + pub const NAME: &'static str = "miden::standards::components::faucets::basic_fungible_faucet"; + /// The maximum number of decimals supported by the component. - pub const MAX_DECIMALS: u8 = 12; + pub const MAX_DECIMALS: u8 = TokenMetadata::MAX_DECIMALS; - const DISTRIBUTE_PROC_NAME: &str = "basic_fungible_faucet::distribute"; - const BURN_PROC_NAME: &str = "basic_fungible_faucet::burn"; + const DISTRIBUTE_PROC_NAME: &str = "distribute"; + const BURN_PROC_NAME: &str = "burn"; // CONSTRUCTORS // -------------------------------------------------------------------------------------------- - /// Creates a new [`BasicFungibleFaucet`] component from the given pieces of metadata. + /// Creates a new [`BasicFungibleFaucet`] component from the given pieces of metadata and with + /// an initial token supply of zero. + /// + /// # Errors /// - /// # Errors: /// Returns an error if: /// - the decimals parameter exceeds maximum value of [`Self::MAX_DECIMALS`]. /// - the max supply parameter exceeds maximum possible amount for a fungible asset - /// ([`FungibleAsset::MAX_AMOUNT`]) + /// ([`miden_protocol::asset::FungibleAsset::MAX_AMOUNT`]) pub fn new( symbol: TokenSymbol, decimals: u8, max_supply: Felt, ) -> Result { - // First check that the metadata is valid. - if decimals > Self::MAX_DECIMALS { - return Err(FungibleFaucetError::TooManyDecimals { - actual: decimals as u64, - max: Self::MAX_DECIMALS, - }); - } else if max_supply.as_int() > FungibleAsset::MAX_AMOUNT { - return Err(FungibleFaucetError::MaxSupplyTooLarge { - actual: max_supply.as_int(), - max: FungibleAsset::MAX_AMOUNT, - }); - } + let metadata = TokenMetadata::new(symbol, decimals, max_supply)?; + Ok(Self { metadata }) + } - Ok(Self { symbol, decimals, max_supply }) + /// Creates a new [`BasicFungibleFaucet`] component from the given [`TokenMetadata`]. + /// + /// This is a convenience constructor that allows creating a faucet from pre-validated + /// metadata. + pub fn from_metadata(metadata: TokenMetadata) -> Self { + Self { metadata } } /// Attempts to create a new [`BasicFungibleFaucet`] component from the associated account /// interface and storage. /// - /// # Errors: + /// # Errors + /// /// Returns an error if: /// - the provided [`AccountInterface`] does not contain a /// [`AccountComponentInterface::BasicFungibleFaucet`] component. /// - the decimals parameter exceeds maximum value of [`Self::MAX_DECIMALS`]. /// - the max supply value exceeds maximum possible amount for a fungible asset of - /// [`FungibleAsset::MAX_AMOUNT`]. + /// [`miden_protocol::asset::FungibleAsset::MAX_AMOUNT`]. + /// - the token supply exceeds the max supply. /// - the token symbol encoded value exceeds the maximum value of /// [`TokenSymbol::MAX_ENCODED_VALUE`]. fn try_from_interface( interface: AccountInterface, storage: &AccountStorage, ) -> Result { - for component in interface.components().iter() { - if let AccountComponentInterface::BasicFungibleFaucet = component { - let faucet_metadata = storage - .get_item(BasicFungibleFaucet::metadata_slot()) - .map_err(|err| FungibleFaucetError::StorageLookupFailed { - slot_name: BasicFungibleFaucet::metadata_slot().clone(), - source: err, - })?; - let [max_supply, decimals, token_symbol, _] = *faucet_metadata; - - // verify metadata values - let token_symbol = TokenSymbol::try_from(token_symbol) - .map_err(FungibleFaucetError::InvalidTokenSymbol)?; - let decimals = decimals.as_int().try_into().map_err(|_| { - FungibleFaucetError::TooManyDecimals { - actual: decimals.as_int(), - max: Self::MAX_DECIMALS, - } - })?; - - return BasicFungibleFaucet::new(token_symbol, decimals, max_supply); - } + // Check that the procedures of the basic fungible faucet exist in the account. + if !interface.components().contains(&AccountComponentInterface::BasicFungibleFaucet) { + return Err(FungibleFaucetError::MissingBasicFungibleFaucetInterface); } - Err(FungibleFaucetError::NoAvailableInterface) + let metadata = TokenMetadata::try_from(storage)?; + Ok(Self { metadata }) } // PUBLIC ACCESSORS @@ -156,22 +145,54 @@ impl BasicFungibleFaucet { /// Returns the [`StorageSlotName`] where the [`BasicFungibleFaucet`]'s metadata is stored. pub fn metadata_slot() -> &'static StorageSlotName { - &super::METADATA_SLOT_NAME + TokenMetadata::metadata_slot() + } + + /// Returns the storage slot schema for the metadata slot. + pub fn metadata_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + let token_symbol_type = SchemaType::new(TOKEN_SYMBOL_TYPE).expect("valid type"); + ( + Self::metadata_slot().clone(), + StorageSlotSchema::value( + "Token metadata", + [ + FeltSchema::felt("token_supply").with_default(Felt::new(0)), + FeltSchema::felt("max_supply"), + FeltSchema::u8("decimals"), + FeltSchema::new_typed(token_symbol_type, "symbol"), + ], + ), + ) + } + + /// Returns the token metadata. + pub fn metadata(&self) -> &TokenMetadata { + &self.metadata } /// Returns the symbol of the faucet. - pub fn symbol(&self) -> TokenSymbol { - self.symbol + pub fn symbol(&self) -> &TokenSymbol { + self.metadata.symbol() } /// Returns the decimals of the faucet. pub fn decimals(&self) -> u8 { - self.decimals + self.metadata.decimals() } - /// Returns the max supply of the faucet. + /// Returns the max supply (in base units) of the faucet. + /// + /// This is the highest amount of tokens that can be minted from this faucet. pub fn max_supply(&self) -> Felt { - self.max_supply + self.metadata.max_supply() + } + + /// Returns the token supply (in base units) of the faucet. + /// + /// This is the amount of tokens that were minted from the faucet so far. Its value can never + /// exceed [`Self::max_supply`]. + pub fn token_supply(&self) -> Felt { + self.metadata.token_supply() } /// Returns the digest of the `distribute` account procedure. @@ -183,24 +204,39 @@ impl BasicFungibleFaucet { pub fn burn_digest() -> Word { *BASIC_FUNGIBLE_FAUCET_BURN } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = StorageSchema::new([Self::metadata_slot_schema()]) + .expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, [AccountType::FungibleFaucet]) + .with_description("Basic fungible faucet component for minting and burning tokens") + .with_storage_schema(storage_schema) + } + + // MUTATORS + // -------------------------------------------------------------------------------------------- + + /// Sets the token_supply (in base units) of the basic fungible faucet. + /// + /// # Errors + /// + /// Returns an error if: + /// - the token supply exceeds the max supply. + pub fn with_token_supply(mut self, token_supply: Felt) -> Result { + self.metadata = self.metadata.with_token_supply(token_supply)?; + Ok(self) + } } impl From for AccountComponent { fn from(faucet: BasicFungibleFaucet) -> Self { - // Note: data is stored as [a0, a1, a2, a3] but loaded onto the stack as - // [a3, a2, a1, a0, ...] - let metadata = Word::new([ - faucet.max_supply, - Felt::from(faucet.decimals), - faucet.symbol.into(), - Felt::ZERO, - ]); - let storage_slot = - StorageSlot::with_value(BasicFungibleFaucet::metadata_slot().clone(), metadata); - - AccountComponent::new(basic_fungible_faucet_library(), vec![storage_slot]) + let storage_slot = faucet.metadata.into(); + let metadata = BasicFungibleFaucet::component_metadata(); + + AccountComponent::new(basic_fungible_faucet_library(), vec![storage_slot], metadata) .expect("basic fungible faucet component should satisfy the requirements of a valid account component") - .with_supported_type(AccountType::FungibleFaucet) } } @@ -236,60 +272,44 @@ impl TryFrom<&Account> for BasicFungibleFaucet { /// via the specified authentication scheme. The `burn` procedure can only be called from a note /// script and requires the calling note to contain the asset to be burned. /// -/// The storage layout of the faucet account is: -/// - Slot 0: Reserved slot for faucets. -/// - Slot 1: Public Key of the authentication component. -/// - Slot 2: [num_trigger_procs, allow_unauthorized_output_notes, allow_unauthorized_input_notes, -/// 0]. -/// - Slot 3: A map with trigger procedure roots. -/// - Slot 4: Token metadata of the faucet. +/// The storage layout of the faucet account is defined by the combination of the following +/// components (see their docs for details): +/// - [`BasicFungibleFaucet`] +/// - [`AuthSingleSigAcl`] pub fn create_basic_fungible_faucet( init_seed: [u8; 32], symbol: TokenSymbol, decimals: u8, max_supply: Felt, account_storage_mode: AccountStorageMode, - auth_scheme: AuthScheme, + auth_method: AuthMethod, ) -> Result { let distribute_proc_root = BasicFungibleFaucet::distribute_digest(); - let auth_component: AccountComponent = match auth_scheme { - AuthScheme::Falcon512Rpo { pub_key } => AuthFalcon512RpoAcl::new( + let auth_component: AccountComponent = match auth_method { + AuthMethod::SingleSig { approver: (pub_key, auth_scheme) } => AuthSingleSigAcl::new( pub_key, - AuthFalcon512RpoAclConfig::new() - .with_auth_trigger_procedures(vec![distribute_proc_root]) - .with_allow_unauthorized_input_notes(true), - ) - .map_err(FungibleFaucetError::AccountError)? - .into(), - AuthScheme::EcdsaK256Keccak { pub_key } => AuthEcdsaK256KeccakAcl::new( - pub_key, - AuthEcdsaK256KeccakAclConfig::new() + auth_scheme, + AuthSingleSigAclConfig::new() .with_auth_trigger_procedures(vec![distribute_proc_root]) .with_allow_unauthorized_input_notes(true), ) .map_err(FungibleFaucetError::AccountError)? .into(), - AuthScheme::NoAuth => { - return Err(FungibleFaucetError::UnsupportedAuthScheme( - "basic fungible faucets cannot be created with NoAuth authentication scheme".into(), + AuthMethod::NoAuth => { + return Err(FungibleFaucetError::UnsupportedAuthMethod( + "basic fungible faucets cannot be created with NoAuth authentication method".into(), )); }, - AuthScheme::Falcon512RpoMultisig { threshold: _, pub_keys: _ } => { - return Err(FungibleFaucetError::UnsupportedAuthScheme( - "basic fungible faucets do not support multisig authentication".into(), - )); - }, - AuthScheme::Unknown => { - return Err(FungibleFaucetError::UnsupportedAuthScheme( - "basic fungible faucets cannot be created with Unknown authentication scheme" + AuthMethod::Unknown => { + return Err(FungibleFaucetError::UnsupportedAuthMethod( + "basic fungible faucets cannot be created with Unknown authentication method" .into(), )); }, - AuthScheme::EcdsaK256KeccakMultisig { threshold: _, pub_keys: _ } => { - return Err(FungibleFaucetError::UnsupportedAuthScheme( - "basic fungible faucets do not support EcdsaK256KeccakMultisig authentication" - .into(), + AuthMethod::Multisig { .. } => { + return Err(FungibleFaucetError::UnsupportedAuthMethod( + "basic fungible faucets do not support Multisig authentication".into(), )); }, }; @@ -311,28 +331,29 @@ pub fn create_basic_fungible_faucet( #[cfg(test)] mod tests { use assert_matches::assert_matches; - use miden_protocol::account::AccountStorage; - use miden_protocol::account::auth::PublicKeyCommitment; - use miden_protocol::{FieldElement, ONE, Word}; + use miden_protocol::Word; + use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; use super::{ AccountBuilder, AccountStorageMode, AccountType, - AuthScheme, + AuthMethod, BasicFungibleFaucet, Felt, FungibleFaucetError, TokenSymbol, create_basic_fungible_faucet, }; - use crate::account::auth::{AuthFalcon512Rpo, AuthFalcon512RpoAcl}; + use crate::account::auth::{AuthSingleSig, AuthSingleSigAcl}; use crate::account::wallets::BasicWallet; #[test] fn faucet_contract_creation() { - let pub_key_word = Word::new([ONE; 4]); - let auth_scheme: AuthScheme = AuthScheme::Falcon512Rpo { pub_key: pub_key_word.into() }; + let pub_key_word = Word::new([Felt::ONE; 4]); + let auth_method: AuthMethod = AuthMethod::SingleSig { + approver: (pub_key_word.into(), AuthScheme::Falcon512Poseidon2), + }; // we need to use an initial seed to create the wallet account let init_seed: [u8; 32] = [ @@ -346,31 +367,20 @@ mod tests { let decimals = 2u8; let storage_mode = AccountStorageMode::Private; + let token_symbol_felt = token_symbol.as_element(); let faucet_account = create_basic_fungible_faucet( init_seed, - token_symbol, + token_symbol.clone(), decimals, max_supply, storage_mode, - auth_scheme, + auth_method, ) .unwrap(); - // The faucet sysdata slot should be initialized to an empty word. - assert_eq!( - faucet_account - .storage() - .get_item(AccountStorage::faucet_sysdata_slot()) - .unwrap(), - Word::empty() - ); - // The falcon auth component's public key should be present. assert_eq!( - faucet_account - .storage() - .get_item(AuthFalcon512RpoAcl::public_key_slot()) - .unwrap(), + faucet_account.storage().get_item(AuthSingleSigAcl::public_key_slot()).unwrap(), pub_key_word ); @@ -380,7 +390,7 @@ mod tests { // With 1 trigger procedure (distribute), allow_unauthorized_output_notes=false, and // allow_unauthorized_input_notes=true, this should be [1, 0, 1, 0]. assert_eq!( - faucet_account.storage().get_item(AuthFalcon512RpoAcl::config_slot()).unwrap(), + faucet_account.storage().get_item(AuthSingleSigAcl::config_slot()).unwrap(), [Felt::ONE, Felt::ZERO, Felt::ONE, Felt::ZERO].into() ); @@ -390,7 +400,7 @@ mod tests { faucet_account .storage() .get_map_item( - AuthFalcon512RpoAcl::trigger_procedure_roots_slot(), + AuthSingleSigAcl::trigger_procedure_roots_slot(), [Felt::ZERO, Felt::ZERO, Felt::ZERO, Felt::ZERO].into() ) .unwrap(), @@ -398,9 +408,10 @@ mod tests { ); // Check that faucet metadata was initialized to the given values. + // Storage layout: [token_supply, max_supply, decimals, symbol] assert_eq!( faucet_account.storage().get_item(BasicFungibleFaucet::metadata_slot()).unwrap(), - [Felt::new(123), Felt::new(2), token_symbol.into(), Felt::ZERO].into() + [Felt::ZERO, Felt::new(123), Felt::new(2), token_symbol_felt].into() ); assert!(faucet_account.is_faucet()); @@ -409,9 +420,10 @@ mod tests { // Verify the faucet can be extracted and has correct metadata let faucet_component = BasicFungibleFaucet::try_from(faucet_account.clone()).unwrap(); - assert_eq!(faucet_component.symbol(), token_symbol); + assert_eq!(faucet_component.symbol(), &token_symbol); assert_eq!(faucet_component.decimals(), decimals); assert_eq!(faucet_component.max_supply(), max_supply); + assert_eq!(faucet_component.token_supply(), Felt::ZERO); } #[test] @@ -426,23 +438,27 @@ mod tests { let faucet_account = AccountBuilder::new(mock_seed) .account_type(AccountType::FungibleFaucet) .with_component( - BasicFungibleFaucet::new(token_symbol, 10, Felt::new(100)) + BasicFungibleFaucet::new(token_symbol.clone(), 10, Felt::new(100)) .expect("failed to create a fungible faucet component"), ) - .with_auth_component(AuthFalcon512Rpo::new(mock_public_key)) + .with_auth_component(AuthSingleSig::new( + mock_public_key, + AuthScheme::Falcon512Poseidon2, + )) .build_existing() .expect("failed to create wallet account"); let basic_ff = BasicFungibleFaucet::try_from(faucet_account) .expect("basic fungible faucet creation failed"); - assert_eq!(basic_ff.symbol, token_symbol); - assert_eq!(basic_ff.decimals, 10); - assert_eq!(basic_ff.max_supply, Felt::new(100)); + assert_eq!(basic_ff.symbol(), &token_symbol); + assert_eq!(basic_ff.decimals(), 10); + assert_eq!(basic_ff.max_supply(), Felt::new(100)); + assert_eq!(basic_ff.token_supply(), Felt::ZERO); // invalid account: basic fungible faucet component is missing let invalid_faucet_account = AccountBuilder::new(mock_seed) .account_type(AccountType::FungibleFaucet) - .with_auth_component(AuthFalcon512Rpo::new(mock_public_key)) + .with_auth_component(AuthSingleSig::new(mock_public_key, AuthScheme::Falcon512Poseidon2)) // we need to add some other component so the builder doesn't fail .with_component(BasicWallet) .build_existing() @@ -451,7 +467,7 @@ mod tests { let err = BasicFungibleFaucet::try_from(invalid_faucet_account) .err() .expect("basic fungible faucet creation should fail"); - assert_matches!(err, FungibleFaucetError::NoAvailableInterface); + assert_matches!(err, FungibleFaucetError::MissingBasicFungibleFaucetInterface); } /// Check that the obtaining of the basic fungible faucet procedure digests does not panic. diff --git a/crates/miden-standards/src/account/faucets/mod.rs b/crates/miden-standards/src/account/faucets/mod.rs index 9733cd53ab..db654c10fe 100644 --- a/crates/miden-standards/src/account/faucets/mod.rs +++ b/crates/miden-standards/src/account/faucets/mod.rs @@ -1,55 +1,18 @@ use alloc::string::String; -use miden_protocol::Felt; -use miden_protocol::account::{Account, AccountStorage, AccountType, StorageSlotName}; +use miden_protocol::account::StorageSlotName; use miden_protocol::errors::{AccountError, TokenSymbolError}; -use miden_protocol::utils::sync::LazyLock; use thiserror::Error; +use crate::account::access::Ownable2StepError; + mod basic_fungible; mod network_fungible; +mod token_metadata; pub use basic_fungible::{BasicFungibleFaucet, create_basic_fungible_faucet}; pub use network_fungible::{NetworkFungibleFaucet, create_network_fungible_faucet}; - -static METADATA_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::fungible_faucets::metadata") - .expect("storage slot name should be valid") -}); - -// FUNGIBLE FAUCET -// ================================================================================================ - -/// Extension trait for fungible faucet accounts. Provides methods to access the fungible faucet -/// account's reserved storage slot. -pub trait FungibleFaucetExt { - const ISSUANCE_ELEMENT_INDEX: usize; - - /// Returns the amount of tokens (in base units) issued from this fungible faucet. - /// - /// # Errors - /// Returns an error if the account is not a fungible faucet account. - fn get_token_issuance(&self) -> Result; -} - -impl FungibleFaucetExt for Account { - const ISSUANCE_ELEMENT_INDEX: usize = 3; - - fn get_token_issuance(&self) -> Result { - if self.account_type() != AccountType::FungibleFaucet { - return Err(FungibleFaucetError::NotAFungibleFaucetAccount); - } - - let slot = - self.storage().get_item(AccountStorage::faucet_sysdata_slot()).map_err(|err| { - FungibleFaucetError::StorageLookupFailed { - slot_name: AccountStorage::faucet_sysdata_slot().clone(), - source: err, - } - })?; - Ok(slot[Self::ISSUANCE_ELEMENT_INDEX]) - } -} +pub use token_metadata::TokenMetadata; // FUNGIBLE FAUCET ERROR // ================================================================================================ @@ -61,10 +24,16 @@ pub enum FungibleFaucetError { TooManyDecimals { actual: u64, max: u8 }, #[error("faucet metadata max supply is {actual} which exceeds max value of {max}")] MaxSupplyTooLarge { actual: u64, max: u64 }, + #[error("token supply {token_supply} exceeds max_supply {max_supply}")] + TokenSupplyExceedsMaxSupply { token_supply: u64, max_supply: u64 }, #[error( - "account interface provided for faucet creation does not have basic fungible faucet component" + "account interface does not have the procedures of the basic fungible faucet component" )] - NoAvailableInterface, + MissingBasicFungibleFaucetInterface, + #[error( + "account interface does not have the procedures of the network fungible faucet component" + )] + MissingNetworkFungibleFaucetInterface, #[error("failed to retrieve storage slot with name {slot_name}")] StorageLookupFailed { slot_name: StorageSlotName, @@ -72,10 +41,17 @@ pub enum FungibleFaucetError { }, #[error("invalid token symbol")] InvalidTokenSymbol(#[source] TokenSymbolError), - #[error("unsupported authentication scheme: {0}")] - UnsupportedAuthScheme(String), + #[error("storage slot name mismatch: expected {expected}, got {actual}")] + SlotNameMismatch { + expected: StorageSlotName, + actual: StorageSlotName, + }, + #[error("unsupported authentication method: {0}")] + UnsupportedAuthMethod(String), #[error("account creation failed")] AccountError(#[source] AccountError), #[error("account is not a fungible faucet account")] NotAFungibleFaucetAccount, + #[error("failed to read ownership data from storage")] + OwnershipError(#[source] Ownable2StepError), } diff --git a/crates/miden-standards/src/account/faucets/network_fungible.rs b/crates/miden-standards/src/account/faucets/network_fungible.rs index fb150d2d9d..74c1e5298a 100644 --- a/crates/miden-standards/src/account/faucets/network_fungible.rs +++ b/crates/miden-standards/src/account/faucets/network_fungible.rs @@ -1,30 +1,39 @@ +use miden_protocol::account::component::{ + AccountComponentMetadata, + FeltSchema, + SchemaType, + StorageSchema, + StorageSlotSchema, +}; use miden_protocol::account::{ Account, AccountBuilder, AccountComponent, - AccountId, AccountStorage, AccountStorageMode, AccountType, - StorageSlot, StorageSlotName, }; use miden_protocol::asset::TokenSymbol; -use miden_protocol::utils::sync::LazyLock; -use miden_protocol::{Felt, FieldElement, Word}; +use miden_protocol::{Felt, Word}; -use super::{BasicFungibleFaucet, FungibleFaucetError}; +use super::{FungibleFaucetError, TokenMetadata}; +use crate::account::access::AccessControl; use crate::account::auth::NoAuth; use crate::account::components::network_fungible_faucet_library; use crate::account::interface::{AccountComponentInterface, AccountInterface, AccountInterfaceExt}; use crate::procedure_digest; +/// The schema type for token symbols. +const TOKEN_SYMBOL_TYPE: &str = "miden::standards::fungible_faucets::metadata::token_symbol"; + // NETWORK FUNGIBLE FAUCET ACCOUNT COMPONENT // ================================================================================================ // Initialize the digest of the `distribute` procedure of the Network Fungible Faucet only once. procedure_digest!( NETWORK_FUNGIBLE_FAUCET_DISTRIBUTE, + NetworkFungibleFaucet::NAME, NetworkFungibleFaucet::DISTRIBUTE_PROC_NAME, network_fungible_faucet_library ); @@ -32,15 +41,11 @@ procedure_digest!( // Initialize the digest of the `burn` procedure of the Network Fungible Faucet only once. procedure_digest!( NETWORK_FUNGIBLE_FAUCET_BURN, + NetworkFungibleFaucet::NAME, NetworkFungibleFaucet::BURN_PROC_NAME, network_fungible_faucet_library ); -static OWNER_CONFIG_SLOT_NAME: LazyLock = LazyLock::new(|| { - StorageSlotName::new("miden::standards::access::ownable::owner_config") - .expect("storage slot name should be valid") -}); - /// An [`AccountComponent`] implementing a network fungible faucet. /// /// It reexports the procedures from `miden::standards::faucets::network_fungible`. When linking @@ -54,26 +59,31 @@ static OWNER_CONFIG_SLOT_NAME: LazyLock = LazyLock::new(|| { /// authentication while `burn` does not require authentication and can be called by anyone. /// Thus, this component must be combined with a component providing authentication. /// +/// This component relies on [`crate::account::access::Ownable2Step`] for ownership checks in +/// `distribute`. When building an account with this component, +/// [`crate::account::access::Ownable2Step`] must also be included. +/// /// ## Storage Layout /// /// - [`Self::metadata_slot`]: Fungible faucet metadata. -/// - [`Self::owner_config_slot`]: The owner account of this network faucet. /// /// [builder]: crate::code_builder::CodeBuilder pub struct NetworkFungibleFaucet { - faucet: BasicFungibleFaucet, - owner_account_id: AccountId, + metadata: TokenMetadata, } impl NetworkFungibleFaucet { // CONSTANTS // -------------------------------------------------------------------------------------------- + /// The name of the component. + pub const NAME: &'static str = "miden::standards::components::faucets::network_fungible_faucet"; + /// The maximum number of decimals supported by the component. - pub const MAX_DECIMALS: u8 = 12; + pub const MAX_DECIMALS: u8 = TokenMetadata::MAX_DECIMALS; - const DISTRIBUTE_PROC_NAME: &str = "network_fungible_faucet::distribute"; - const BURN_PROC_NAME: &str = "network_fungible_faucet::burn"; + const DISTRIBUTE_PROC_NAME: &str = "distribute"; + const BURN_PROC_NAME: &str = "burn"; // CONSTRUCTORS // -------------------------------------------------------------------------------------------- @@ -89,12 +99,17 @@ impl NetworkFungibleFaucet { symbol: TokenSymbol, decimals: u8, max_supply: Felt, - owner_account_id: AccountId, ) -> Result { - // Create the basic fungible faucet (this validates the metadata) - let faucet = BasicFungibleFaucet::new(symbol, decimals, max_supply)?; + let metadata = TokenMetadata::new(symbol, decimals, max_supply)?; + Ok(Self { metadata }) + } - Ok(Self { faucet, owner_account_id }) + /// Creates a new [`NetworkFungibleFaucet`] component from the given [`TokenMetadata`]. + /// + /// This is a convenience constructor that allows creating a faucet from pre-validated + /// metadata. + pub fn from_metadata(metadata: TokenMetadata) -> Self { + Self { metadata } } /// Attempts to create a new [`NetworkFungibleFaucet`] component from the associated account @@ -107,55 +122,25 @@ impl NetworkFungibleFaucet { /// - the decimals parameter exceeds maximum value of [`Self::MAX_DECIMALS`]. /// - the max supply value exceeds maximum possible amount for a fungible asset of /// [`miden_protocol::asset::FungibleAsset::MAX_AMOUNT`]. + /// - the token supply exceeds the max supply. /// - the token symbol encoded value exceeds the maximum value of /// [`TokenSymbol::MAX_ENCODED_VALUE`]. fn try_from_interface( interface: AccountInterface, storage: &AccountStorage, ) -> Result { - for component in interface.components().iter() { - if let AccountComponentInterface::NetworkFungibleFaucet = component { - // obtain metadata from storage using offset provided by NetworkFungibleFaucet - // interface - let faucet_metadata = storage - .get_item(NetworkFungibleFaucet::metadata_slot()) - .map_err(|err| FungibleFaucetError::StorageLookupFailed { - slot_name: NetworkFungibleFaucet::metadata_slot().clone(), - source: err, - })?; - let [max_supply, decimals, token_symbol, _] = *faucet_metadata; - - // obtain owner account ID from the next storage slot - let owner_account_id_word: Word = storage - .get_item(NetworkFungibleFaucet::owner_config_slot()) - .map_err(|err| FungibleFaucetError::StorageLookupFailed { - slot_name: NetworkFungibleFaucet::owner_config_slot().clone(), - source: err, - })?; - - // Convert Word back to AccountId - // Storage format: [0, 0, suffix, prefix] - let prefix = owner_account_id_word[3]; - let suffix = owner_account_id_word[2]; - let owner_account_id = AccountId::new_unchecked([prefix, suffix]); - - // verify metadata values and create BasicFungibleFaucet - let token_symbol = TokenSymbol::try_from(token_symbol) - .map_err(FungibleFaucetError::InvalidTokenSymbol)?; - let decimals = decimals.as_int().try_into().map_err(|_| { - FungibleFaucetError::TooManyDecimals { - actual: decimals.as_int(), - max: Self::MAX_DECIMALS, - } - })?; - - let faucet = BasicFungibleFaucet::new(token_symbol, decimals, max_supply)?; - - return Ok(Self { faucet, owner_account_id }); - } + // Check that the procedures of the network fungible faucet exist in the account. + if !interface + .components() + .contains(&AccountComponentInterface::NetworkFungibleFaucet) + { + return Err(FungibleFaucetError::MissingNetworkFungibleFaucetInterface); } - Err(FungibleFaucetError::NoAvailableInterface) + // Read token metadata from storage + let metadata = TokenMetadata::try_from(storage)?; + + Ok(Self { metadata }) } // PUBLIC ACCESSORS @@ -163,33 +148,54 @@ impl NetworkFungibleFaucet { /// Returns the [`StorageSlotName`] where the [`NetworkFungibleFaucet`]'s metadata is stored. pub fn metadata_slot() -> &'static StorageSlotName { - &super::METADATA_SLOT_NAME + TokenMetadata::metadata_slot() + } + + /// Returns the storage slot schema for the metadata slot. + pub fn metadata_slot_schema() -> (StorageSlotName, StorageSlotSchema) { + let token_symbol_type = SchemaType::new(TOKEN_SYMBOL_TYPE).expect("valid type"); + ( + Self::metadata_slot().clone(), + StorageSlotSchema::value( + "Token metadata", + [ + FeltSchema::felt("token_supply").with_default(Felt::new(0)), + FeltSchema::felt("max_supply"), + FeltSchema::u8("decimals"), + FeltSchema::new_typed(token_symbol_type, "symbol"), + ], + ), + ) } - /// Returns the [`StorageSlotName`] where the [`NetworkFungibleFaucet`]'s owner configuration is - /// stored. - pub fn owner_config_slot() -> &'static StorageSlotName { - &OWNER_CONFIG_SLOT_NAME + /// Returns the token metadata. + pub fn metadata(&self) -> &TokenMetadata { + &self.metadata } /// Returns the symbol of the faucet. - pub fn symbol(&self) -> TokenSymbol { - self.faucet.symbol() + pub fn symbol(&self) -> &TokenSymbol { + self.metadata.symbol() } /// Returns the decimals of the faucet. pub fn decimals(&self) -> u8 { - self.faucet.decimals() + self.metadata.decimals() } - /// Returns the max supply of the faucet. + /// Returns the max supply (in base units) of the faucet. + /// + /// This is the highest amount of tokens that can be minted from this faucet. pub fn max_supply(&self) -> Felt { - self.faucet.max_supply() + self.metadata.max_supply() } - /// Returns the owner account ID of the faucet. - pub fn owner_account_id(&self) -> AccountId { - self.owner_account_id + /// Returns the token supply (in base units) of the faucet. + /// + /// This is the amount of tokens that were minted from the faucet so far. Its value can never + /// exceed [`Self::max_supply`]. + pub fn token_supply(&self) -> Felt { + self.metadata.token_supply() } /// Returns the digest of the `distribute` account procedure. @@ -201,41 +207,43 @@ impl NetworkFungibleFaucet { pub fn burn_digest() -> Word { *NETWORK_FUNGIBLE_FAUCET_BURN } + + // MUTATORS + // -------------------------------------------------------------------------------------------- + + /// Sets the token_supply (in base units) of the network fungible faucet. + /// + /// # Errors + /// + /// Returns an error if: + /// - the token supply exceeds the max supply. + pub fn with_token_supply(mut self, token_supply: Felt) -> Result { + self.metadata = self.metadata.with_token_supply(token_supply)?; + Ok(self) + } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + let storage_schema = StorageSchema::new([Self::metadata_slot_schema()]) + .expect("storage schema should be valid"); + + AccountComponentMetadata::new(Self::NAME, [AccountType::FungibleFaucet]) + .with_description("Network fungible faucet component for minting and burning tokens") + .with_storage_schema(storage_schema) + } } impl From for AccountComponent { fn from(network_faucet: NetworkFungibleFaucet) -> Self { - // Note: data is stored as [a0, a1, a2, a3] but loaded onto the stack as - // [a3, a2, a1, a0, ...] - let metadata = Word::new([ - network_faucet.faucet.max_supply(), - Felt::from(network_faucet.faucet.decimals()), - network_faucet.faucet.symbol().into(), - Felt::ZERO, - ]); - - // Convert AccountId into its Word encoding for storage. - let owner_account_id_word: Word = [ - Felt::new(0), - Felt::new(0), - network_faucet.owner_account_id.suffix(), - network_faucet.owner_account_id.prefix().as_felt(), - ] - .into(); - - let metadata_slot = - StorageSlot::with_value(NetworkFungibleFaucet::metadata_slot().clone(), metadata); - let owner_slot = StorageSlot::with_value( - NetworkFungibleFaucet::owner_config_slot().clone(), - owner_account_id_word, - ); + let metadata_slot = network_faucet.metadata.into(); + let metadata = NetworkFungibleFaucet::component_metadata(); AccountComponent::new( network_fungible_faucet_library(), - vec![metadata_slot, owner_slot] + vec![metadata_slot], + metadata, ) - .expect("network fungible faucet component should satisfy the requirements of a valid account component") - .with_supported_type(AccountType::FungibleFaucet) + .expect("network fungible faucet component should satisfy the requirements of a valid account component") } } @@ -260,7 +268,7 @@ impl TryFrom<&Account> for NetworkFungibleFaucet { } /// Creates a new faucet account with network fungible faucet interface and provided metadata -/// (token symbol, decimals, max supply, owner account ID). +/// (token symbol, decimals, max supply) and access control. /// /// The network faucet interface exposes two procedures: /// - `distribute`, which mints an assets and create a note for the provided recipient. @@ -274,20 +282,15 @@ impl TryFrom<&Account> for NetworkFungibleFaucet { /// - [`AccountStorageMode::Network`] for storage /// - [`NoAuth`] for authentication /// -/// The storage layout of the network faucet account is: -/// - Slot 0: Reserved slot for faucets. -/// - Slot 1: Public Key of the authentication component. -/// - Slot 2: [num_trigger_procs, allow_unauthorized_output_notes, allow_unauthorized_input_notes, -/// 0]. -/// - Slot 3: A map with trigger procedure roots. -/// - Slot 4: Token metadata of the faucet. -/// - Slot 5: Owner account ID. +/// The storage layout of the faucet account is documented on the [`NetworkFungibleFaucet`] and +/// [`crate::account::access::Ownable2Step`] types, and contains no additional storage slots for +/// its auth ([`NoAuth`]). pub fn create_network_fungible_faucet( init_seed: [u8; 32], symbol: TokenSymbol, decimals: u8, max_supply: Felt, - owner_account_id: AccountId, + access_control: AccessControl, ) -> Result { let auth_component: AccountComponent = NoAuth::new().into(); @@ -295,9 +298,58 @@ pub fn create_network_fungible_faucet( .account_type(AccountType::FungibleFaucet) .storage_mode(AccountStorageMode::Network) .with_auth_component(auth_component) - .with_component(NetworkFungibleFaucet::new(symbol, decimals, max_supply, owner_account_id)?) + .with_component(NetworkFungibleFaucet::new(symbol, decimals, max_supply)?) + .with_component(access_control) .build() .map_err(FungibleFaucetError::AccountError)?; Ok(account) } + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use miden_protocol::account::{AccountId, AccountIdVersion, AccountStorageMode, AccountType}; + + use super::*; + use crate::account::access::Ownable2Step; + + #[test] + fn test_create_network_fungible_faucet() { + let init_seed = [7u8; 32]; + let symbol = TokenSymbol::new("NET").expect("token symbol should be valid"); + let decimals = 8u8; + let max_supply = Felt::new(1_000); + + let owner = AccountId::dummy( + [1u8; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + + let account = create_network_fungible_faucet( + init_seed, + symbol.clone(), + decimals, + max_supply, + AccessControl::Ownable2Step { owner }, + ) + .expect("network faucet creation should succeed"); + + let expected_owner_word = Ownable2Step::new(owner).to_word(); + assert_eq!( + account.storage().get_item(Ownable2Step::slot_name()).unwrap(), + expected_owner_word + ); + + let faucet = NetworkFungibleFaucet::try_from(&account) + .expect("network fungible faucet should be extractable from account"); + assert_eq!(faucet.symbol(), &symbol); + assert_eq!(faucet.decimals(), decimals); + assert_eq!(faucet.max_supply(), max_supply); + assert_eq!(faucet.token_supply(), Felt::ZERO); + } +} diff --git a/crates/miden-standards/src/account/faucets/token_metadata.rs b/crates/miden-standards/src/account/faucets/token_metadata.rs new file mode 100644 index 0000000000..bdca915fa5 --- /dev/null +++ b/crates/miden-standards/src/account/faucets/token_metadata.rs @@ -0,0 +1,348 @@ +use miden_protocol::account::{AccountStorage, StorageSlot, StorageSlotName}; +use miden_protocol::asset::{FungibleAsset, TokenSymbol}; +use miden_protocol::utils::sync::LazyLock; +use miden_protocol::{Felt, Word}; + +use super::FungibleFaucetError; + +// CONSTANTS +// ================================================================================================ + +static METADATA_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::standards::fungible_faucets::metadata") + .expect("storage slot name should be valid") +}); + +// TOKEN METADATA +// ================================================================================================ + +/// Token metadata for fungible faucet accounts. +/// +/// This struct encapsulates the metadata associated with a fungible token faucet: +/// - `token_supply`: The current amount of tokens issued by the faucet. +/// - `max_supply`: The maximum amount of tokens that can be issued. +/// - `decimals`: The number of decimal places for token amounts. +/// - `symbol`: The token symbol. +/// +/// The metadata is stored in a single storage slot as: +/// `[token_supply, max_supply, decimals, symbol]` +#[derive(Debug, Clone)] +pub struct TokenMetadata { + token_supply: Felt, + max_supply: Felt, + decimals: u8, + symbol: TokenSymbol, +} + +impl TokenMetadata { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// The maximum number of decimals supported. + pub const MAX_DECIMALS: u8 = 12; + + // CONSTRUCTORS + // -------------------------------------------------------------------------------------------- + + /// Creates a new [`TokenMetadata`] with the specified metadata and zero token supply. + /// + /// # Errors + /// Returns an error if: + /// - The decimals parameter exceeds [`Self::MAX_DECIMALS`]. + /// - The max supply parameter exceeds [`FungibleAsset::MAX_AMOUNT`]. + pub fn new( + symbol: TokenSymbol, + decimals: u8, + max_supply: Felt, + ) -> Result { + Self::with_supply(symbol, decimals, max_supply, Felt::ZERO) + } + + /// Creates a new [`TokenMetadata`] with the specified metadata and token supply. + /// + /// # Errors + /// Returns an error if: + /// - The decimals parameter exceeds [`Self::MAX_DECIMALS`]. + /// - The max supply parameter exceeds [`FungibleAsset::MAX_AMOUNT`]. + /// - The token supply exceeds the max supply. + pub fn with_supply( + symbol: TokenSymbol, + decimals: u8, + max_supply: Felt, + token_supply: Felt, + ) -> Result { + if decimals > Self::MAX_DECIMALS { + return Err(FungibleFaucetError::TooManyDecimals { + actual: decimals as u64, + max: Self::MAX_DECIMALS, + }); + } + + if max_supply.as_canonical_u64() > FungibleAsset::MAX_AMOUNT { + return Err(FungibleFaucetError::MaxSupplyTooLarge { + actual: max_supply.as_canonical_u64(), + max: FungibleAsset::MAX_AMOUNT, + }); + } + + if token_supply.as_canonical_u64() > max_supply.as_canonical_u64() { + return Err(FungibleFaucetError::TokenSupplyExceedsMaxSupply { + token_supply: token_supply.as_canonical_u64(), + max_supply: max_supply.as_canonical_u64(), + }); + } + + Ok(Self { + token_supply, + max_supply, + decimals, + symbol, + }) + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the [`StorageSlotName`] where the token metadata is stored. + pub fn metadata_slot() -> &'static StorageSlotName { + &METADATA_SLOT_NAME + } + + /// Returns the current token supply (amount issued). + pub fn token_supply(&self) -> Felt { + self.token_supply + } + + /// Returns the maximum token supply. + pub fn max_supply(&self) -> Felt { + self.max_supply + } + + /// Returns the number of decimals. + pub fn decimals(&self) -> u8 { + self.decimals + } + + /// Returns the token symbol. + pub fn symbol(&self) -> &TokenSymbol { + &self.symbol + } + + // MUTATORS + // -------------------------------------------------------------------------------------------- + + /// Sets the token_supply (in base units). + /// + /// # Errors + /// + /// Returns an error if: + /// - the token supply exceeds the max supply. + pub fn with_token_supply(mut self, token_supply: Felt) -> Result { + if token_supply.as_canonical_u64() > self.max_supply.as_canonical_u64() { + return Err(FungibleFaucetError::TokenSupplyExceedsMaxSupply { + token_supply: token_supply.as_canonical_u64(), + max_supply: self.max_supply.as_canonical_u64(), + }); + } + + self.token_supply = token_supply; + + Ok(self) + } +} + +// TRAIT IMPLEMENTATIONS +// ================================================================================================ + +impl TryFrom for TokenMetadata { + type Error = FungibleFaucetError; + + /// Parses token metadata from a Word. + /// + /// The Word is expected to be in the format: `[token_supply, max_supply, decimals, symbol]` + fn try_from(word: Word) -> Result { + let [token_supply, max_supply, decimals, token_symbol] = *word; + + let symbol = + TokenSymbol::try_from(token_symbol).map_err(FungibleFaucetError::InvalidTokenSymbol)?; + + let decimals = decimals.as_canonical_u64().try_into().map_err(|_| { + FungibleFaucetError::TooManyDecimals { + actual: decimals.as_canonical_u64(), + max: Self::MAX_DECIMALS, + } + })?; + + Self::with_supply(symbol, decimals, max_supply, token_supply) + } +} + +impl From for Word { + fn from(metadata: TokenMetadata) -> Self { + // Storage layout: [token_supply, max_supply, decimals, symbol] + Word::new([ + metadata.token_supply, + metadata.max_supply, + Felt::from(metadata.decimals), + metadata.symbol.as_element(), + ]) + } +} + +impl From for StorageSlot { + fn from(metadata: TokenMetadata) -> Self { + StorageSlot::with_value(TokenMetadata::metadata_slot().clone(), metadata.into()) + } +} + +impl TryFrom<&StorageSlot> for TokenMetadata { + type Error = FungibleFaucetError; + + /// Tries to create [`TokenMetadata`] from a storage slot. + /// + /// # Errors + /// Returns an error if: + /// - The slot name does not match the expected metadata slot name. + /// - The slot value cannot be parsed as valid token metadata. + fn try_from(slot: &StorageSlot) -> Result { + if slot.name() != Self::metadata_slot() { + return Err(FungibleFaucetError::SlotNameMismatch { + expected: Self::metadata_slot().clone(), + actual: slot.name().clone(), + }); + } + TokenMetadata::try_from(slot.value()) + } +} + +impl TryFrom<&AccountStorage> for TokenMetadata { + type Error = FungibleFaucetError; + + /// Tries to create [`TokenMetadata`] from account storage. + fn try_from(storage: &AccountStorage) -> Result { + let metadata_word = storage.get_item(TokenMetadata::metadata_slot()).map_err(|err| { + FungibleFaucetError::StorageLookupFailed { + slot_name: TokenMetadata::metadata_slot().clone(), + source: err, + } + })?; + + TokenMetadata::try_from(metadata_word) + } +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use miden_protocol::asset::TokenSymbol; + use miden_protocol::{Felt, Word}; + + use super::*; + + #[test] + fn token_metadata_new() { + let symbol = TokenSymbol::new("TEST").unwrap(); + let decimals = 8u8; + let max_supply = Felt::new(1_000_000); + + let metadata = TokenMetadata::new(symbol.clone(), decimals, max_supply).unwrap(); + + assert_eq!(metadata.symbol(), &symbol); + assert_eq!(metadata.decimals(), decimals); + assert_eq!(metadata.max_supply(), max_supply); + assert_eq!(metadata.token_supply(), Felt::ZERO); + } + + #[test] + fn token_metadata_with_supply() { + let symbol = TokenSymbol::new("TEST").unwrap(); + let decimals = 8u8; + let max_supply = Felt::new(1_000_000); + let token_supply = Felt::new(500_000); + + let metadata = + TokenMetadata::with_supply(symbol.clone(), decimals, max_supply, token_supply).unwrap(); + + assert_eq!(metadata.symbol(), &symbol); + assert_eq!(metadata.decimals(), decimals); + assert_eq!(metadata.max_supply(), max_supply); + assert_eq!(metadata.token_supply(), token_supply); + } + + #[test] + fn token_metadata_too_many_decimals() { + let symbol = TokenSymbol::new("TEST").unwrap(); + let decimals = 13u8; // exceeds MAX_DECIMALS + let max_supply = Felt::new(1_000_000); + + let result = TokenMetadata::new(symbol, decimals, max_supply); + assert!(matches!(result, Err(FungibleFaucetError::TooManyDecimals { .. }))); + } + + #[test] + fn token_metadata_max_supply_too_large() { + use miden_protocol::asset::FungibleAsset; + + let symbol = TokenSymbol::new("TEST").unwrap(); + let decimals = 8u8; + // FungibleAsset::MAX_AMOUNT is 2^63 - 1, so we use MAX_AMOUNT + 1 to exceed it + let max_supply = Felt::new(FungibleAsset::MAX_AMOUNT + 1); + + let result = TokenMetadata::new(symbol, decimals, max_supply); + assert!(matches!(result, Err(FungibleFaucetError::MaxSupplyTooLarge { .. }))); + } + + #[test] + fn token_metadata_to_word() { + let symbol = TokenSymbol::new("POL").unwrap(); + let symbol_felt = symbol.as_element(); + let decimals = 2u8; + let max_supply = Felt::new(123); + + let metadata = TokenMetadata::new(symbol, decimals, max_supply).unwrap(); + let word: Word = metadata.into(); + + // Storage layout: [token_supply, max_supply, decimals, symbol] + assert_eq!(word[0], Felt::ZERO); // token_supply + assert_eq!(word[1], max_supply); + assert_eq!(word[2], Felt::from(decimals)); + assert_eq!(word[3], symbol_felt); + } + + #[test] + fn token_metadata_from_storage_slot() { + let symbol = TokenSymbol::new("POL").unwrap(); + let decimals = 2u8; + let max_supply = Felt::new(123); + + let original = TokenMetadata::new(symbol.clone(), decimals, max_supply).unwrap(); + let slot: StorageSlot = original.into(); + + let restored = TokenMetadata::try_from(&slot).unwrap(); + + assert_eq!(restored.symbol(), &symbol); + assert_eq!(restored.decimals(), decimals); + assert_eq!(restored.max_supply(), max_supply); + assert_eq!(restored.token_supply(), Felt::ZERO); + } + + #[test] + fn token_metadata_roundtrip_with_supply() { + let symbol = TokenSymbol::new("POL").unwrap(); + let decimals = 2u8; + let max_supply = Felt::new(1000); + let token_supply = Felt::new(500); + + let original = + TokenMetadata::with_supply(symbol.clone(), decimals, max_supply, token_supply).unwrap(); + let word: Word = original.into(); + let restored = TokenMetadata::try_from(word).unwrap(); + + assert_eq!(restored.symbol(), &symbol); + assert_eq!(restored.decimals(), decimals); + assert_eq!(restored.max_supply(), max_supply); + assert_eq!(restored.token_supply(), token_supply); + } +} diff --git a/crates/miden-standards/src/account/interface/component.rs b/crates/miden-standards/src/account/interface/component.rs index 110e23d1b3..51615b7151 100644 --- a/crates/miden-standards/src/account/interface/component.rs +++ b/crates/miden-standards/src/account/interface/component.rs @@ -1,20 +1,13 @@ use alloc::string::{String, ToString}; use alloc::vec::Vec; -use miden_protocol::account::auth::PublicKeyCommitment; +use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; use miden_protocol::account::{AccountId, AccountProcedureRoot, AccountStorage, StorageSlotName}; use miden_protocol::note::PartialNote; -use miden_protocol::{Felt, FieldElement, Word}; - -use crate::AuthScheme; -use crate::account::auth::{ - AuthEcdsaK256Keccak, - AuthEcdsaK256KeccakAcl, - AuthEcdsaK256KeccakMultisig, - AuthFalcon512Rpo, - AuthFalcon512RpoAcl, - AuthFalcon512RpoMultisig, -}; +use miden_protocol::{Felt, Word}; + +use crate::AuthMethod; +use crate::account::auth::{AuthMultisig, AuthMultisigPsm, AuthSingleSig, AuthSingleSigAcl}; use crate::account::interface::AccountInterfaceError; // ACCOUNT COMPONENT INTERFACE @@ -32,23 +25,17 @@ pub enum AccountComponentInterface { /// [`NetworkFungibleFaucet`][crate::account::faucets::NetworkFungibleFaucet] module. NetworkFungibleFaucet, /// Exposes procedures from the - /// [`AuthEcdsaK256Keccak`][crate::account::auth::AuthEcdsaK256Keccak] module. - AuthEcdsaK256Keccak, - /// Exposes procedures from the - /// [`AuthEcdsaK256KeccakAcl`][crate::account::auth::AuthEcdsaK256KeccakAcl] module. - AuthEcdsaK256KeccakAcl, + /// [`AuthSingleSig`][crate::account::auth::AuthSingleSig] module. + AuthSingleSig, /// Exposes procedures from the - /// [`AuthEcdsaK256KeccakMultisig`][crate::account::auth::AuthEcdsaK256KeccakMultisig] module. - AuthEcdsaK256KeccakMultisig, + /// [`AuthSingleSigAcl`][crate::account::auth::AuthSingleSigAcl] module. + AuthSingleSigAcl, /// Exposes procedures from the - /// [`AuthFalcon512Rpo`][crate::account::auth::AuthFalcon512Rpo] module. - AuthFalcon512Rpo, + /// [`AuthMultisig`][crate::account::auth::AuthMultisig] module. + AuthMultisig, /// Exposes procedures from the - /// [`AuthFalcon512RpoAcl`][crate::account::auth::AuthFalcon512RpoAcl] module. - AuthFalcon512RpoAcl, - /// Exposes procedures from the - /// [`AuthFalcon512RpoMultisig`][crate::account::auth::AuthFalcon512RpoMultisig] module. - AuthFalcon512RpoMultisig, + /// [`AuthMultisigPsm`][crate::account::auth::AuthMultisigPsm] module. + AuthMultisigPsm, /// Exposes procedures from the [`NoAuth`][crate::account::auth::NoAuth] module. /// /// This authentication scheme provides no cryptographic authentication and only increments @@ -74,19 +61,10 @@ impl AccountComponentInterface { AccountComponentInterface::NetworkFungibleFaucet => { "Network Fungible Faucet".to_string() }, - AccountComponentInterface::AuthEcdsaK256Keccak => "ECDSA K256 Keccak".to_string(), - AccountComponentInterface::AuthEcdsaK256KeccakAcl => { - "ECDSA K256 Keccak ACL".to_string() - }, - AccountComponentInterface::AuthEcdsaK256KeccakMultisig => { - "ECDSA K256 Keccak Multisig".to_string() - }, - AccountComponentInterface::AuthFalcon512Rpo => "Falcon512 RPO".to_string(), - AccountComponentInterface::AuthFalcon512RpoAcl => "Falcon512 RPO ACL".to_string(), - AccountComponentInterface::AuthFalcon512RpoMultisig => { - "Falcon512 RPO Multisig".to_string() - }, - + AccountComponentInterface::AuthSingleSig => "SingleSig".to_string(), + AccountComponentInterface::AuthSingleSigAcl => "SingleSig ACL".to_string(), + AccountComponentInterface::AuthMultisig => "Multisig".to_string(), + AccountComponentInterface::AuthMultisigPsm => "Multisig PSM".to_string(), AccountComponentInterface::AuthNoAuth => "No Auth".to_string(), AccountComponentInterface::Custom(proc_root_vec) => { let result = proc_root_vec @@ -105,70 +83,44 @@ impl AccountComponentInterface { pub fn is_auth_component(&self) -> bool { matches!( self, - AccountComponentInterface::AuthEcdsaK256Keccak - | AccountComponentInterface::AuthEcdsaK256KeccakAcl - | AccountComponentInterface::AuthEcdsaK256KeccakMultisig - | AccountComponentInterface::AuthFalcon512Rpo - | AccountComponentInterface::AuthFalcon512RpoAcl - | AccountComponentInterface::AuthFalcon512RpoMultisig + AccountComponentInterface::AuthSingleSig + | AccountComponentInterface::AuthSingleSigAcl + | AccountComponentInterface::AuthMultisig + | AccountComponentInterface::AuthMultisigPsm | AccountComponentInterface::AuthNoAuth ) } /// Returns the authentication schemes associated with this component interface. - pub fn get_auth_schemes(&self, storage: &AccountStorage) -> Vec { + pub fn get_auth_methods(&self, storage: &AccountStorage) -> Vec { match self { - AccountComponentInterface::AuthEcdsaK256Keccak => { - vec![AuthScheme::EcdsaK256Keccak { - pub_key: PublicKeyCommitment::from( - storage - .get_item(AuthEcdsaK256Keccak::public_key_slot()) - .expect("invalid storage index of the public key"), - ), - }] - }, - AccountComponentInterface::AuthEcdsaK256KeccakAcl => { - vec![AuthScheme::EcdsaK256Keccak { - pub_key: PublicKeyCommitment::from( - storage - .get_item(AuthEcdsaK256KeccakAcl::public_key_slot()) - .expect("invalid storage index of the public key"), - ), - }] - }, - AccountComponentInterface::AuthEcdsaK256KeccakMultisig => { - vec![extract_multisig_auth_scheme( + AccountComponentInterface::AuthSingleSig => vec![extract_singlesig_auth_method( + storage, + AuthSingleSig::public_key_slot(), + AuthSingleSig::scheme_id_slot(), + )], + AccountComponentInterface::AuthSingleSigAcl => vec![extract_singlesig_auth_method( + storage, + AuthSingleSigAcl::public_key_slot(), + AuthSingleSigAcl::scheme_id_slot(), + )], + AccountComponentInterface::AuthMultisig => { + vec![extract_multisig_auth_method( storage, - AuthEcdsaK256KeccakMultisig::threshold_config_slot(), - AuthEcdsaK256KeccakMultisig::approver_public_keys_slot(), + AuthMultisig::threshold_config_slot(), + AuthMultisig::approver_public_keys_slot(), + AuthMultisig::approver_scheme_ids_slot(), )] }, - AccountComponentInterface::AuthFalcon512Rpo => { - vec![AuthScheme::Falcon512Rpo { - pub_key: PublicKeyCommitment::from( - storage - .get_item(AuthFalcon512Rpo::public_key_slot()) - .expect("invalid slot name of the AuthFalcon512Rpo public key"), - ), - }] - }, - AccountComponentInterface::AuthFalcon512RpoAcl => { - vec![AuthScheme::Falcon512Rpo { - pub_key: PublicKeyCommitment::from( - storage - .get_item(AuthFalcon512RpoAcl::public_key_slot()) - .expect("invalid slot name of the AuthFalcon512RpoAcl public key"), - ), - }] - }, - AccountComponentInterface::AuthFalcon512RpoMultisig => { - vec![extract_multisig_auth_scheme( + AccountComponentInterface::AuthMultisigPsm => { + vec![extract_multisig_auth_method( storage, - AuthFalcon512RpoMultisig::threshold_config_slot(), - AuthFalcon512RpoMultisig::approver_public_keys_slot(), + AuthMultisigPsm::threshold_config_slot(), + AuthMultisigPsm::approver_public_keys_slot(), + AuthMultisigPsm::approver_scheme_ids_slot(), )] }, - AccountComponentInterface::AuthNoAuth => vec![AuthScheme::NoAuth], + AccountComponentInterface::AuthNoAuth => vec![AuthMethod::NoAuth], _ => vec![], // Non-auth components return empty vector } } @@ -247,9 +199,9 @@ impl AccountComponentInterface { let asset = partial_note.assets().iter().next().expect("note should contain an asset"); - if asset.faucet_id_prefix() != sender_account_id.prefix() { + if asset.faucet_id() != sender_account_id { return Err(AccountInterfaceError::IssuanceFaucetMismatch( - asset.faucet_id_prefix(), + asset.faucet_id(), )); } @@ -275,13 +227,22 @@ impl AccountComponentInterface { for asset in partial_note.assets().iter() { body.push_str(&format!( " - push.{asset} - # => [ASSET, note_idx, pad(16)] + # duplicate note index + padw push.0 push.0 push.0 dup.7 + # => [note_idx, pad(7), note_idx, pad(16)] + + push.{ASSET_VALUE} + push.{ASSET_KEY} + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7), note_idx, pad(16)] + call.::miden::standards::wallets::basic::move_asset_to_note - dropw + # => [pad(16), note_idx, pad(16)] + + dropw dropw dropw dropw # => [note_idx, pad(16)]\n ", - asset = Word::from(*asset) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), )); } }, @@ -316,43 +277,78 @@ impl AccountComponentInterface { // HELPER FUNCTIONS // ================================================================================================ -/// Extracts authentication scheme from a multisig component. -fn extract_multisig_auth_scheme( +/// Extracts authentication method from a single-signature component. +fn extract_singlesig_auth_method( + storage: &AccountStorage, + public_key_slot: &StorageSlotName, + scheme_id_slot: &StorageSlotName, +) -> AuthMethod { + let pub_key = PublicKeyCommitment::from( + storage + .get_item(public_key_slot) + .expect("invalid storage index of the public key"), + ); + + let scheme_id = storage + .get_item(scheme_id_slot) + .expect("invalid storage index of the scheme id")[0] + .as_canonical_u64() as u8; + + let auth_scheme = + AuthScheme::try_from(scheme_id).expect("invalid auth scheme id in the scheme id slot"); + + AuthMethod::SingleSig { approver: (pub_key, auth_scheme) } +} + +/// Extracts authentication method from a multisig component. +fn extract_multisig_auth_method( storage: &AccountStorage, config_slot: &StorageSlotName, approver_public_keys_slot: &StorageSlotName, -) -> AuthScheme { + approver_scheme_ids_slot: &StorageSlotName, +) -> AuthMethod { // Read the multisig configuration from the config slot // Format: [threshold, num_approvers, 0, 0] let config = storage .get_item(config_slot) .expect("invalid slot name of the multisig configuration"); - let threshold = config[0].as_int() as u32; - let num_approvers = config[1].as_int() as u8; + let threshold = config[0].as_canonical_u64() as u32; + let num_approvers = config[1].as_canonical_u64() as u8; - let mut pub_keys = Vec::new(); + let mut approvers = Vec::new(); // Read each public key from the map for key_index in 0..num_approvers { - // The multisig component stores keys using pattern [index, 0, 0, 0] - let map_key = [Felt::new(key_index as u64), Felt::ZERO, Felt::ZERO, Felt::ZERO]; + // The multisig component stores keys and scheme IDs using pattern [index, 0, 0, 0] + let map_key = Word::from([key_index as u32, 0, 0, 0]); - match storage.get_map_item(approver_public_keys_slot, map_key.into()) { - Ok(pub_key) => { - pub_keys.push(PublicKeyCommitment::from(pub_key)); - }, - Err(_) => { - // If we can't read a public key, panic with a clear error message + let pub_key_word = + storage.get_map_item(approver_public_keys_slot, map_key).unwrap_or_else(|_| { panic!( "Failed to read public key {} from multisig configuration at storage slot {}. \ - Expected key pattern [index, 0, 0, 0]. \ - This indicates corrupted multisig storage or incorrect storage layout.", + Expected key pattern [index, 0, 0, 0].", key_index, approver_public_keys_slot - ); - }, - } + ) + }); + + let pub_key = PublicKeyCommitment::from(pub_key_word); + + let scheme_word = storage + .get_map_item(approver_scheme_ids_slot, map_key) + .unwrap_or_else(|_| { + panic!( + "Failed to read scheme id for approver {} from multisig configuration at storage slot {}. \ + Expected key pattern [index, 0, 0, 0].", + key_index, approver_scheme_ids_slot + ) + }); + + let scheme_id = scheme_word[0].as_canonical_u64() as u8; + let auth_scheme = + AuthScheme::try_from(scheme_id).expect("invalid auth scheme id in the scheme id slot"); + approvers.push((pub_key, auth_scheme)); } - AuthScheme::Falcon512RpoMultisig { threshold, pub_keys } + AuthMethod::Multisig { threshold, approvers } } diff --git a/crates/miden-standards/src/account/interface/extension.rs b/crates/miden-standards/src/account/interface/extension.rs index e563408c33..f23b1414a7 100644 --- a/crates/miden-standards/src/account/interface/extension.rs +++ b/crates/miden-standards/src/account/interface/extension.rs @@ -2,32 +2,30 @@ use alloc::collections::BTreeSet; use alloc::sync::Arc; use alloc::vec::Vec; -use miden_processor::MastNodeExt; +use miden_processor::mast::MastNodeExt; use miden_protocol::Word; use miden_protocol::account::{Account, AccountCode, AccountId, AccountProcedureRoot}; use miden_protocol::assembly::mast::{MastForest, MastNode, MastNodeId}; use miden_protocol::note::{Note, NoteScript}; -use crate::AuthScheme; +use crate::AuthMethod; use crate::account::components::{ - WellKnownComponent, + StandardAccountComponent, basic_fungible_faucet_library, basic_wallet_library, - ecdsa_k256_keccak_acl_library, - ecdsa_k256_keccak_library, - ecdsa_k256_keccak_multisig_library, - falcon_512_rpo_acl_library, - falcon_512_rpo_library, - falcon_512_rpo_multisig_library, + multisig_library, + multisig_psm_library, network_fungible_faucet_library, no_auth_library, + singlesig_acl_library, + singlesig_library, }; use crate::account::interface::{ AccountComponentInterface, AccountInterface, NoteAccountCompatibility, }; -use crate::note::WellKnownNote; +use crate::note::StandardNote; // ACCOUNT INTERFACE EXTENSION TRAIT // ================================================================================================ @@ -35,8 +33,8 @@ use crate::note::WellKnownNote; /// An extension for [`AccountInterface`] that allows instantiation from higher-level types. pub trait AccountInterfaceExt { /// Creates a new [`AccountInterface`] instance from the provided account ID, authentication - /// schemes and account code. - fn from_code(account_id: AccountId, auth: Vec, code: &AccountCode) -> Self; + /// methods and account code. + fn from_code(account_id: AccountId, auth: Vec, code: &AccountCode) -> Self; /// Creates a new [`AccountInterface`] instance from the provided [`Account`]. fn from_account(account: &Account) -> Self; @@ -50,7 +48,7 @@ pub trait AccountInterfaceExt { } impl AccountInterfaceExt for AccountInterface { - fn from_code(account_id: AccountId, auth: Vec, code: &AccountCode) -> Self { + fn from_code(account_id: AccountId, auth: Vec, code: &AccountCode) -> Self { let components = AccountComponentInterface::from_procedures(code.procedures()); Self::new(account_id, auth, components) @@ -60,11 +58,11 @@ impl AccountInterfaceExt for AccountInterface { let components = AccountComponentInterface::from_procedures(account.code().procedures()); let mut auth = Vec::new(); - // Find the auth component and extract all auth schemes from it + // Find the auth component and extract all auth methods from it // An account should have only one auth component for component in components.iter() { if component.is_auth_component() { - auth = component.get_auth_schemes(account.storage()); + auth = component.get_auth_methods(account.storage()); break; } } @@ -75,8 +73,8 @@ impl AccountInterfaceExt for AccountInterface { /// Returns [NoteAccountCompatibility::Maybe] if the provided note is compatible with the /// current [AccountInterface], and [NoteAccountCompatibility::No] otherwise. fn is_compatible_with(&self, note: &Note) -> NoteAccountCompatibility { - if let Some(well_known_note) = WellKnownNote::from_note(note) { - if well_known_note.is_compatible_with(self) { + if let Some(standard_note) = StandardNote::from_script_root(note.script().root()) { + if standard_note.is_compatible_with(self) { NoteAccountCompatibility::Maybe } else { NoteAccountCompatibility::No @@ -103,31 +101,21 @@ impl AccountInterfaceExt for AccountInterface { network_fungible_faucet_library().mast_forest().procedure_digests(), ); }, - AccountComponentInterface::AuthEcdsaK256Keccak => { + AccountComponentInterface::AuthSingleSig => { component_proc_digests - .extend(ecdsa_k256_keccak_library().mast_forest().procedure_digests()); + .extend(singlesig_library().mast_forest().procedure_digests()); }, - AccountComponentInterface::AuthEcdsaK256KeccakAcl => { + AccountComponentInterface::AuthSingleSigAcl => { component_proc_digests - .extend(ecdsa_k256_keccak_acl_library().mast_forest().procedure_digests()); + .extend(singlesig_acl_library().mast_forest().procedure_digests()); }, - AccountComponentInterface::AuthEcdsaK256KeccakMultisig => { - component_proc_digests.extend( - ecdsa_k256_keccak_multisig_library().mast_forest().procedure_digests(), - ); - }, - AccountComponentInterface::AuthFalcon512Rpo => { + AccountComponentInterface::AuthMultisig => { component_proc_digests - .extend(falcon_512_rpo_library().mast_forest().procedure_digests()); + .extend(multisig_library().mast_forest().procedure_digests()); }, - AccountComponentInterface::AuthFalcon512RpoAcl => { + AccountComponentInterface::AuthMultisigPsm => { component_proc_digests - .extend(falcon_512_rpo_acl_library().mast_forest().procedure_digests()); - }, - AccountComponentInterface::AuthFalcon512RpoMultisig => { - component_proc_digests.extend( - falcon_512_rpo_multisig_library().mast_forest().procedure_digests(), - ); + .extend(multisig_psm_library().mast_forest().procedure_digests()); }, AccountComponentInterface::AuthNoAuth => { component_proc_digests @@ -158,12 +146,12 @@ impl AccountComponentInterfaceExt for AccountComponentInterface { let mut procedures = BTreeSet::from_iter(procedures.iter().copied()); - // Well known component interfaces + // Standard component interfaces // ---------------------------------------------------------------------------------------- - // Get all available well known components which could be constructed from the + // Get all available standard components which could be constructed from the // `procedures` map and push them to the `component_interface_vec` - WellKnownComponent::extract_well_known_components( + StandardAccountComponent::extract_standard_components( &mut procedures, &mut component_interface_vec, ); diff --git a/crates/miden-standards/src/account/interface/mod.rs b/crates/miden-standards/src/account/interface/mod.rs index cdc967759a..41181e037e 100644 --- a/crates/miden-standards/src/account/interface/mod.rs +++ b/crates/miden-standards/src/account/interface/mod.rs @@ -1,12 +1,12 @@ use alloc::string::String; use alloc::vec::Vec; -use miden_protocol::account::{AccountId, AccountIdPrefix, AccountType}; -use miden_protocol::note::PartialNote; +use miden_protocol::account::{AccountId, AccountType}; +use miden_protocol::note::{NoteAttachmentContent, PartialNote}; use miden_protocol::transaction::TransactionScript; use thiserror::Error; -use crate::AuthScheme; +use crate::AuthMethod; use crate::code_builder::CodeBuilder; use crate::errors::CodeBuilderError; @@ -28,7 +28,7 @@ pub use extension::{AccountComponentInterfaceExt, AccountInterfaceExt}; /// result in a successful execution against this account. pub struct AccountInterface { account_id: AccountId, - auth: Vec, + auth: Vec, components: Vec, } @@ -42,7 +42,7 @@ impl AccountInterface { /// schemes and account component interfaces. pub fn new( account_id: AccountId, - auth: Vec, + auth: Vec, components: Vec, ) -> Self { Self { account_id, auth, components } @@ -94,8 +94,8 @@ impl AccountInterface { self.account_id.is_network() } - /// Returns a reference to the vector of used authentication schemes. - pub fn auth(&self) -> &Vec { + /// Returns a reference to the vector of used authentication methods. + pub fn auth(&self) -> &Vec { &self.auth } @@ -161,7 +161,17 @@ impl AccountInterface { note_creation_source, ); - let tx_script = CodeBuilder::new() + // Add attachment array entries to the code builder's advice map. + // For NoteAttachmentContent::Array, the commitment (to_word) is used as key + // and the array elements as value. + let mut code_builder = CodeBuilder::new(); + for note in output_notes { + if let NoteAttachmentContent::Array(array) = note.metadata().attachment().content() { + code_builder.add_advice_map_entry(array.commitment(), array.as_slice().to_vec()); + } + } + + let tx_script = code_builder .compile_tx_script(script) .map_err(AccountInterfaceError::InvalidTransactionScript)?; @@ -239,8 +249,8 @@ pub enum NoteAccountCompatibility { /// Account interface related errors. #[derive(Debug, Error)] pub enum AccountInterfaceError { - #[error("note asset is not issued by this faucet: {0}")] - IssuanceFaucetMismatch(AccountIdPrefix), + #[error("note asset is not issued by faucet {0}")] + IssuanceFaucetMismatch(AccountId), #[error("note created by the basic fungible faucet doesn't contain exactly one asset")] FaucetNoteWithoutAsset, #[error("invalid transaction script")] diff --git a/crates/miden-standards/src/account/interface/test.rs b/crates/miden-standards/src/account/interface/test.rs index e6639b32f0..d6cbcc7b51 100644 --- a/crates/miden-standards/src/account/interface/test.rs +++ b/crates/miden-standards/src/account/interface/test.rs @@ -1,6 +1,7 @@ use assert_matches::assert_matches; -use miden_protocol::account::auth::PublicKeyCommitment; -use miden_protocol::account::{AccountBuilder, AccountComponent, AccountType}; +use miden_protocol::account::auth::{self, PublicKeyCommitment}; +use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{AccountBuilder, AccountComponent, AccountId, AccountType}; use miden_protocol::asset::{FungibleAsset, NonFungibleAsset, TokenSymbol}; use miden_protocol::crypto::rand::{FeltRng, RpoRandomCoin}; use miden_protocol::errors::NoteError; @@ -8,9 +9,9 @@ use miden_protocol::note::{ Note, NoteAssets, NoteAttachment, - NoteInputs, NoteMetadata, NoteRecipient, + NoteStorage, NoteTag, NoteType, }; @@ -20,14 +21,8 @@ use miden_protocol::testing::account_id::{ }; use miden_protocol::{Felt, Word}; -use crate::AuthScheme; -use crate::account::auth::{ - AuthEcdsaK256Keccak, - AuthFalcon512Rpo, - AuthFalcon512RpoMultisig, - AuthFalcon512RpoMultisigConfig, - NoAuth, -}; +use crate::AuthMethod; +use crate::account::auth::{AuthMultisig, AuthMultisigConfig, AuthSingleSig, NoAuth}; use crate::account::faucets::BasicFungibleFaucet; use crate::account::interface::{ AccountComponentInterface, @@ -37,7 +32,7 @@ use crate::account::interface::{ }; use crate::account::wallets::BasicWallet; use crate::code_builder::CodeBuilder; -use crate::note::{create_p2id_note, create_p2ide_note, create_swap_note}; +use crate::note::{P2idNote, P2ideNote, P2ideNoteStorage, SwapNote}; use crate::testing::account_interface::get_public_keys_from_account; // DEFAULT NOTES @@ -47,7 +42,7 @@ use crate::testing::account_interface::get_public_keys_from_account; fn test_basic_wallet_default_notes() { let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let wallet_account = AccountBuilder::new(mock_seed) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component(BasicWallet) .with_assets(vec![FungibleAsset::mock(20)]) .build_existing() @@ -58,7 +53,7 @@ fn test_basic_wallet_default_notes() { let mock_seed = Word::from([Felt::new(4), Felt::new(5), Felt::new(6), Felt::new(7)]).as_bytes(); let faucet_account = AccountBuilder::new(mock_seed) .account_type(AccountType::FungibleFaucet) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component( BasicFungibleFaucet::new( TokenSymbol::new("POL").expect("invalid token symbol"), @@ -71,7 +66,7 @@ fn test_basic_wallet_default_notes() { .expect("failed to create wallet account"); let faucet_account_interface = AccountInterface::from_account(&faucet_account); - let p2id_note = create_p2id_note( + let p2id_note = P2idNote::create( ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(), ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2.try_into().unwrap(), vec![FungibleAsset::mock(10)], @@ -81,12 +76,14 @@ fn test_basic_wallet_default_notes() { ) .unwrap(); - let p2ide_note = create_p2ide_note( - ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(), - ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2.try_into().unwrap(), + let sender: AccountId = ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(); + + let target: AccountId = ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2.try_into().unwrap(); + + let p2ide_note = P2ideNote::create( + sender, + P2ideNoteStorage::new(target, None, None), vec![FungibleAsset::mock(10)], - None, - None, NoteType::Public, Default::default(), &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), @@ -96,7 +93,7 @@ fn test_basic_wallet_default_notes() { let offered_asset = NonFungibleAsset::mock(&[5, 6, 7, 8]); let requested_asset = NonFungibleAsset::mock(&[1, 2, 3, 4]); - let (swap_note, _) = create_swap_note( + let (swap_note, _) = SwapNote::create( ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(), offered_asset, requested_asset, @@ -153,18 +150,18 @@ fn test_custom_account_default_note() { let account_code = CodeBuilder::default() .compile_component_code("test::account_custom", account_custom_code_source) .unwrap(); - let account_component = - AccountComponent::new(account_code, vec![]).unwrap().with_supports_all_types(); + let metadata = AccountComponentMetadata::new("test::account_custom", AccountType::all()); + let account_component = AccountComponent::new(account_code, vec![], metadata).unwrap(); let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let target_account = AccountBuilder::new(mock_seed) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component(account_component.clone()) .build_existing() .unwrap(); let target_account_interface = AccountInterface::from_account(&target_account); - let p2id_note = create_p2id_note( + let p2id_note = P2idNote::create( ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(), ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2.try_into().unwrap(), vec![FungibleAsset::mock(10)], @@ -174,12 +171,14 @@ fn test_custom_account_default_note() { ) .unwrap(); - let p2ide_note = create_p2ide_note( - ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(), - ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2.try_into().unwrap(), + let sender: AccountId = ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(); + + let target: AccountId = ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2.try_into().unwrap(); + + let p2ide_note = P2ideNote::create( + sender, + P2ideNoteStorage::new(target, None, None), vec![FungibleAsset::mock(10)], - None, - None, NoteType::Public, Default::default(), &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), @@ -189,7 +188,7 @@ fn test_custom_account_default_note() { let offered_asset = NonFungibleAsset::mock(&[5, 6, 7, 8]); let requested_asset = NonFungibleAsset::mock(&[1, 2, 3, 4]); - let (swap_note, _) = create_swap_note( + let (swap_note, _) = SwapNote::create( ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(), offered_asset, requested_asset, @@ -222,7 +221,7 @@ fn test_required_asset_same_as_offered() { let offered_asset = NonFungibleAsset::mock(&[1, 2, 3, 4]); let requested_asset = NonFungibleAsset::mock(&[1, 2, 3, 4]); - let result = create_swap_note( + let result = SwapNote::create( ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(), offered_asset, requested_asset, @@ -243,7 +242,7 @@ fn test_required_asset_same_as_offered() { fn test_basic_wallet_custom_notes() { let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let wallet_account = AccountBuilder::new(mock_seed) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component(BasicWallet) .with_assets(vec![FungibleAsset::mock(20)]) .build_existing() @@ -253,7 +252,7 @@ fn test_basic_wallet_custom_notes() { let sender_account_id = ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2.try_into().unwrap(); let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); let tag = NoteTag::with_account_target(wallet_account.id()); - let metadata = NoteMetadata::new(sender_account_id, NoteType::Public, tag); + let metadata = NoteMetadata::new(sender_account_id, NoteType::Public).with_tag(tag); let vault = NoteAssets::new(vec![FungibleAsset::mock(100)]).unwrap(); let compatible_source_code = " @@ -279,7 +278,7 @@ fn test_basic_wallet_custom_notes() { end "; let note_script = CodeBuilder::default().compile_note_script(compatible_source_code).unwrap(); - let recipient = NoteRecipient::new(serial_num, note_script, NoteInputs::default()); + let recipient = NoteRecipient::new(serial_num, note_script, NoteStorage::default()); let compatible_custom_note = Note::new(vault.clone(), metadata.clone(), recipient); assert_eq!( NoteAccountCompatibility::Maybe, @@ -307,7 +306,7 @@ fn test_basic_wallet_custom_notes() { end "; let note_script = CodeBuilder::default().compile_note_script(incompatible_source_code).unwrap(); - let recipient = NoteRecipient::new(serial_num, note_script, NoteInputs::default()); + let recipient = NoteRecipient::new(serial_num, note_script, NoteStorage::default()); let incompatible_custom_note = Note::new(vault, metadata, recipient); assert_eq!( NoteAccountCompatibility::No, @@ -320,7 +319,7 @@ fn test_basic_fungible_faucet_custom_notes() { let mock_seed = Word::from([Felt::new(4), Felt::new(5), Felt::new(6), Felt::new(7)]).as_bytes(); let faucet_account = AccountBuilder::new(mock_seed) .account_type(AccountType::FungibleFaucet) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component( BasicFungibleFaucet::new( TokenSymbol::new("POL").expect("invalid token symbol"), @@ -336,7 +335,7 @@ fn test_basic_fungible_faucet_custom_notes() { let sender_account_id = ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2.try_into().unwrap(); let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); let tag = NoteTag::with_account_target(faucet_account.id()); - let metadata = NoteMetadata::new(sender_account_id, NoteType::Public, tag); + let metadata = NoteMetadata::new(sender_account_id, NoteType::Public).with_tag(tag); let vault = NoteAssets::new(vec![FungibleAsset::mock(100)]).unwrap(); let compatible_source_code = " @@ -360,7 +359,7 @@ fn test_basic_fungible_faucet_custom_notes() { end "; let note_script = CodeBuilder::default().compile_note_script(compatible_source_code).unwrap(); - let recipient = NoteRecipient::new(serial_num, note_script, NoteInputs::default()); + let recipient = NoteRecipient::new(serial_num, note_script, NoteStorage::default()); let compatible_custom_note = Note::new(vault.clone(), metadata.clone(), recipient); assert_eq!( NoteAccountCompatibility::Maybe, @@ -390,7 +389,7 @@ fn test_basic_fungible_faucet_custom_notes() { end "; let note_script = CodeBuilder::default().compile_note_script(incompatible_source_code).unwrap(); - let recipient = NoteRecipient::new(serial_num, note_script, NoteInputs::default()); + let recipient = NoteRecipient::new(serial_num, note_script, NoteStorage::default()); let incompatible_custom_note = Note::new(vault, metadata, recipient); assert_eq!( NoteAccountCompatibility::No, @@ -418,12 +417,12 @@ fn test_custom_account_custom_notes() { let account_code = CodeBuilder::default() .compile_component_code("test::account::component_1", account_custom_code_source) .unwrap(); - let account_component = - AccountComponent::new(account_code, vec![]).unwrap().with_supports_all_types(); + let metadata = AccountComponentMetadata::new("test::account::component_1", AccountType::all()); + let account_component = AccountComponent::new(account_code, vec![], metadata).unwrap(); let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let target_account = AccountBuilder::new(mock_seed) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component(account_component.clone()) .build_existing() .unwrap(); @@ -431,7 +430,7 @@ fn test_custom_account_custom_notes() { let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let sender_account = AccountBuilder::new(mock_seed) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component(BasicWallet) .with_assets(vec![FungibleAsset::mock(20)]) .build_existing() @@ -439,7 +438,7 @@ fn test_custom_account_custom_notes() { let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); let tag = NoteTag::with_account_target(target_account.id()); - let metadata = NoteMetadata::new(sender_account.id(), NoteType::Public, tag); + let metadata = NoteMetadata::new(sender_account.id(), NoteType::Public).with_tag(tag); let vault = NoteAssets::new(vec![FungibleAsset::mock(100)]).unwrap(); let compatible_source_code = " @@ -466,7 +465,7 @@ fn test_custom_account_custom_notes() { .unwrap() .compile_note_script(compatible_source_code) .unwrap(); - let recipient = NoteRecipient::new(serial_num, note_script, NoteInputs::default()); + let recipient = NoteRecipient::new(serial_num, note_script, NoteStorage::default()); let compatible_custom_note = Note::new(vault.clone(), metadata.clone(), recipient); assert_eq!( NoteAccountCompatibility::Maybe, @@ -493,7 +492,7 @@ fn test_custom_account_custom_notes() { .unwrap() .compile_note_script(incompatible_source_code) .unwrap(); - let recipient = NoteRecipient::new(serial_num, note_script, NoteInputs::default()); + let recipient = NoteRecipient::new(serial_num, note_script, NoteStorage::default()); let incompatible_custom_note = Note::new(vault, metadata, recipient); assert_eq!( NoteAccountCompatibility::No, @@ -521,12 +520,12 @@ fn test_custom_account_multiple_components_custom_notes() { let custom_code = CodeBuilder::default() .compile_component_code("test::account::component_1", account_custom_code_source) .unwrap(); - let custom_component = - AccountComponent::new(custom_code, vec![]).unwrap().with_supports_all_types(); + let metadata = AccountComponentMetadata::new("test::account::component_1", AccountType::all()); + let custom_component = AccountComponent::new(custom_code, vec![], metadata).unwrap(); let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let target_account = AccountBuilder::new(mock_seed) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component(custom_component.clone()) .with_component(BasicWallet) .build_existing() @@ -535,7 +534,7 @@ fn test_custom_account_multiple_components_custom_notes() { let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let sender_account = AccountBuilder::new(mock_seed) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component(BasicWallet) .with_assets(vec![FungibleAsset::mock(20)]) .build_existing() @@ -543,7 +542,7 @@ fn test_custom_account_multiple_components_custom_notes() { let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); let tag = NoteTag::with_account_target(target_account.id()); - let metadata = NoteMetadata::new(sender_account.id(), NoteType::Public, tag); + let metadata = NoteMetadata::new(sender_account.id(), NoteType::Public).with_tag(tag); let vault = NoteAssets::new(vec![FungibleAsset::mock(100)]).unwrap(); let compatible_source_code = " @@ -576,7 +575,7 @@ fn test_custom_account_multiple_components_custom_notes() { .unwrap() .compile_note_script(compatible_source_code) .unwrap(); - let recipient = NoteRecipient::new(serial_num, note_script, NoteInputs::default()); + let recipient = NoteRecipient::new(serial_num, note_script, NoteStorage::default()); let compatible_custom_note = Note::new(vault.clone(), metadata.clone(), recipient); assert_eq!( NoteAccountCompatibility::Maybe, @@ -614,7 +613,7 @@ fn test_custom_account_multiple_components_custom_notes() { .unwrap() .compile_note_script(incompatible_source_code) .unwrap(); - let recipient = NoteRecipient::new(serial_num, note_script, NoteInputs::default()); + let recipient = NoteRecipient::new(serial_num, note_script, NoteStorage::default()); let incompatible_custom_note = Note::new(vault.clone(), metadata, recipient); assert_eq!( NoteAccountCompatibility::No, @@ -626,17 +625,17 @@ fn test_custom_account_multiple_components_custom_notes() { // ================================================================================================ /// Helper function to create a mock auth component for testing -fn get_mock_auth_component() -> AuthFalcon512Rpo { +fn get_mock_falcon_auth_component() -> AuthSingleSig { let mock_word = Word::from([0, 1, 2, 3u32]); let mock_public_key = PublicKeyCommitment::from(mock_word); - AuthFalcon512Rpo::new(mock_public_key) + AuthSingleSig::new(mock_public_key, auth::AuthScheme::Falcon512Poseidon2) } /// Helper function to create a mock Ecdsa auth component for testing -fn get_mock_ecdsa_auth_component() -> AuthEcdsaK256Keccak { +fn get_mock_ecdsa_auth_component() -> AuthSingleSig { let mock_word = Word::from([0, 1, 2, 3u32]); let mock_public_key = PublicKeyCommitment::from(mock_word); - AuthEcdsaK256Keccak::new(mock_public_key) + AuthSingleSig::new(mock_public_key, auth::AuthScheme::EcdsaK256Keccak) } // GET AUTH SCHEME TESTS @@ -657,48 +656,50 @@ fn test_get_auth_scheme_ecdsa_k256_keccak() { let ecdsa_k256_keccak_component = wallet_account_interface .components() .iter() - .find(|component| matches!(component, AccountComponentInterface::AuthEcdsaK256Keccak)) + .find(|component| matches!(component, AccountComponentInterface::AuthSingleSig)) .expect("should have EcdsaK256Keccak component"); - // Test get_auth_schemes method - let auth_schemes = ecdsa_k256_keccak_component.get_auth_schemes(wallet_account.storage()); - assert_eq!(auth_schemes.len(), 1); - let auth_scheme = &auth_schemes[0]; - match auth_scheme { - AuthScheme::EcdsaK256Keccak { pub_key } => { + // Test get_auth_methods method + let auth_methods = ecdsa_k256_keccak_component.get_auth_methods(wallet_account.storage()); + assert_eq!(auth_methods.len(), 1); + let auth_method = &auth_methods[0]; + match auth_method { + AuthMethod::SingleSig { approver: (pub_key, auth_scheme) } => { assert_eq!(*pub_key, PublicKeyCommitment::from(Word::from([0, 1, 2, 3u32]))); + assert_eq!(*auth_scheme, auth::AuthScheme::EcdsaK256Keccak); }, _ => panic!("Expected EcdsaK256Keccak auth scheme"), } } #[test] -fn test_get_auth_scheme_falcon512_rpo() { +fn test_get_auth_scheme_falcon512_poseidon2() { let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let wallet_account = AccountBuilder::new(mock_seed) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component(BasicWallet) .build_existing() .expect("failed to create wallet account"); let wallet_account_interface = AccountInterface::from_account(&wallet_account); - // Find the Falcon512Rpo component interface + // Find the single sig component interface let rpo_falcon_component = wallet_account_interface .components() .iter() - .find(|component| matches!(component, AccountComponentInterface::AuthFalcon512Rpo)) - .expect("should have Falcon512Rpo component"); - - // Test get_auth_schemes method - let auth_schemes = rpo_falcon_component.get_auth_schemes(wallet_account.storage()); - assert_eq!(auth_schemes.len(), 1); - let auth_scheme = &auth_schemes[0]; - match auth_scheme { - AuthScheme::Falcon512Rpo { pub_key } => { + .find(|component| matches!(component, AccountComponentInterface::AuthSingleSig)) + .expect("should have single sig component"); + + // Test get_auth_methods method + let auth_methods = rpo_falcon_component.get_auth_methods(wallet_account.storage()); + assert_eq!(auth_methods.len(), 1); + let auth_method = &auth_methods[0]; + match auth_method { + AuthMethod::SingleSig { approver: (pub_key, auth_scheme) } => { assert_eq!(*pub_key, PublicKeyCommitment::from(Word::from([0, 1, 2, 3u32]))); + assert_eq!(*auth_scheme, auth::AuthScheme::Falcon512Poseidon2); }, - _ => panic!("Expected Falcon512Rpo auth scheme"), + _ => panic!("Expected Falcon512Poseidon2 auth scheme"), } } @@ -720,13 +721,13 @@ fn test_get_auth_scheme_no_auth() { .find(|component| matches!(component, AccountComponentInterface::AuthNoAuth)) .expect("should have NoAuth component"); - // Test get_auth_schemes method - let auth_schemes = no_auth_component.get_auth_schemes(no_auth_account.storage()); - assert_eq!(auth_schemes.len(), 1); - let auth_scheme = &auth_schemes[0]; - match auth_scheme { - AuthScheme::NoAuth => {}, - _ => panic!("Expected NoAuth auth scheme"), + // Test get_auth_methods method + let auth_methods = no_auth_component.get_auth_methods(no_auth_account.storage()); + assert_eq!(auth_methods.len(), 1); + let auth_method = &auth_methods[0]; + match auth_method { + AuthMethod::NoAuth => {}, + _ => panic!("Expected NoAuth auth method"), } } @@ -736,13 +737,13 @@ fn test_get_auth_scheme_non_auth_component() { let basic_wallet_component = AccountComponentInterface::BasicWallet; let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let wallet_account = AccountBuilder::new(mock_seed) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component(BasicWallet) .build_existing() .expect("failed to create wallet account"); - let auth_schemes = basic_wallet_component.get_auth_schemes(wallet_account.storage()); - assert!(auth_schemes.is_empty()); + let auth_methods = basic_wallet_component.get_auth_methods(wallet_account.storage()); + assert!(auth_methods.is_empty()); } /// Test that the From<&Account> implementation correctly uses get_auth_scheme @@ -750,7 +751,7 @@ fn test_get_auth_scheme_non_auth_component() { fn test_account_interface_from_account_uses_get_auth_scheme() { let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let wallet_account = AccountBuilder::new(mock_seed) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component(BasicWallet) .build_existing() .expect("failed to create wallet account"); @@ -761,11 +762,12 @@ fn test_account_interface_from_account_uses_get_auth_scheme() { assert_eq!(wallet_account_interface.auth().len(), 1); match &wallet_account_interface.auth()[0] { - AuthScheme::Falcon512Rpo { pub_key } => { + AuthMethod::SingleSig { approver: (pub_key, auth_scheme) } => { let expected_pub_key = PublicKeyCommitment::from(Word::from([0, 1, 2, 3u32])); assert_eq!(*pub_key, expected_pub_key); + assert_eq!(*auth_scheme, auth::AuthScheme::Falcon512Poseidon2); }, - _ => panic!("Expected Falcon512Rpo auth scheme"), + _ => panic!("Expected SingleSig auth method"), } // Test with NoAuth @@ -781,17 +783,17 @@ fn test_account_interface_from_account_uses_get_auth_scheme() { assert_eq!(no_auth_account_interface.auth().len(), 1); match &no_auth_account_interface.auth()[0] { - AuthScheme::NoAuth => {}, - _ => panic!("Expected NoAuth auth scheme"), + AuthMethod::NoAuth => {}, + _ => panic!("Expected NoAuth auth method"), } } -/// Test AccountInterface.get_auth_scheme() method with Falcon512Rpo and NoAuth +/// Test AccountInterface.get_auth_scheme() method with Falcon512Poseidon2 and NoAuth #[test] fn test_account_interface_get_auth_scheme() { let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let wallet_account = AccountBuilder::new(mock_seed) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component(BasicWallet) .build_existing() .expect("failed to create wallet account"); @@ -801,10 +803,11 @@ fn test_account_interface_get_auth_scheme() { // Test that auth() method provides the authentication schemes assert_eq!(wallet_account_interface.auth().len(), 1); match &wallet_account_interface.auth()[0] { - AuthScheme::Falcon512Rpo { pub_key } => { + AuthMethod::SingleSig { approver: (pub_key, auth_scheme) } => { assert_eq!(*pub_key, PublicKeyCommitment::from(Word::from([0, 1, 2, 3u32]))); + assert_eq!(*auth_scheme, auth::AuthScheme::Falcon512Poseidon2); }, - _ => panic!("Expected Falcon512Rpo auth scheme"), + _ => panic!("Expected SingleSig auth method"), } // Test AccountInterface.get_auth_scheme() method with NoAuth @@ -819,8 +822,8 @@ fn test_account_interface_get_auth_scheme() { // Test that auth() method provides the authentication schemes assert_eq!(no_auth_account_interface.auth().len(), 1); match &no_auth_account_interface.auth()[0] { - AuthScheme::NoAuth => {}, - _ => panic!("Expected NoAuth auth scheme"), + AuthMethod::NoAuth => {}, + _ => panic!("Expected NoAuth auth method"), } // Note: We don't test the case where an account has no auth components because @@ -831,7 +834,7 @@ fn test_account_interface_get_auth_scheme() { fn test_public_key_extraction_regular_account() { let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let wallet_account = AccountBuilder::new(mock_seed) - .with_auth_component(get_mock_auth_component()) + .with_auth_component(get_mock_falcon_auth_component()) .with_component(BasicWallet) .build_existing() .expect("failed to create wallet account"); @@ -849,14 +852,19 @@ fn test_public_key_extraction_multisig_account() { let pub_key_1 = PublicKeyCommitment::from(Word::from([1u32, 0, 0, 0])); let pub_key_2 = PublicKeyCommitment::from(Word::from([2u32, 0, 0, 0])); let pub_key_3 = PublicKeyCommitment::from(Word::from([3u32, 0, 0, 0])); - let approvers = vec![pub_key_1, pub_key_2, pub_key_3]; + + let approvers = vec![ + (pub_key_1, auth::AuthScheme::Falcon512Poseidon2), + (pub_key_2, auth::AuthScheme::Falcon512Poseidon2), + (pub_key_3, auth::AuthScheme::EcdsaK256Keccak), + ]; + let threshold = 2u32; // Create multisig component - let multisig_component = AuthFalcon512RpoMultisig::new( - AuthFalcon512RpoMultisigConfig::new(approvers.clone(), threshold).unwrap(), - ) - .expect("multisig component creation failed"); + let multisig_component = + AuthMultisig::new(AuthMultisigConfig::new(approvers.clone(), threshold).unwrap()) + .expect("multisig component creation failed"); let mock_seed = Word::from([0, 1, 2, 3u32]).as_bytes(); let multisig_account = AccountBuilder::new(mock_seed) diff --git a/crates/miden-standards/src/account/metadata/mod.rs b/crates/miden-standards/src/account/metadata/mod.rs index 0d5e24a3b5..69def3da9f 100644 --- a/crates/miden-standards/src/account/metadata/mod.rs +++ b/crates/miden-standards/src/account/metadata/mod.rs @@ -1,9 +1,16 @@ use alloc::collections::BTreeMap; use miden_protocol::Word; -use miden_protocol::account::component::StorageSchema; -use miden_protocol::account::{AccountComponent, StorageSlot, StorageSlotName}; -use miden_protocol::errors::AccountComponentTemplateError; +use miden_protocol::account::component::{AccountComponentMetadata, StorageSchema}; +use miden_protocol::account::{ + Account, + AccountBuilder, + AccountComponent, + AccountType, + StorageSlot, + StorageSlotName, +}; +use miden_protocol::errors::{AccountError, ComponentMetadataError}; use miden_protocol::utils::sync::LazyLock; use crate::account::components::storage_schema_library; @@ -30,23 +37,23 @@ pub struct AccountSchemaCommitment { } impl AccountSchemaCommitment { - /// Creates a new [`AccountSchemaCommitment`] component from a list of storage schemas. + /// Creates a new [`AccountSchemaCommitment`] component from storage schemas. /// /// The input schemas are merged into a single schema before the final commitment is computed. /// /// # Errors /// /// Returns an error if the schemas contain conflicting definitions for the same slot name. - pub fn new(schemas: &[StorageSchema]) -> Result { + pub fn new<'a>( + schemas: impl IntoIterator, + ) -> Result { Ok(Self { schema_commitment: compute_schema_commitment(schemas)?, }) } /// Creates a new [`AccountSchemaCommitment`] component from a [`StorageSchema`]. - pub fn from_schema( - storage_schema: &StorageSchema, - ) -> Result { + pub fn from_schema(storage_schema: &StorageSchema) -> Result { Self::new(core::slice::from_ref(storage_schema)) } @@ -54,36 +61,78 @@ impl AccountSchemaCommitment { pub fn schema_commitment_slot() -> &'static StorageSlotName { &SCHEMA_COMMITMENT_SLOT_NAME } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + AccountComponentMetadata::new("miden::metadata::schema_commitment", AccountType::all()) + .with_description("Component exposing the account storage schema commitment") + } } impl From for AccountComponent { fn from(schema_commitment: AccountSchemaCommitment) -> Self { + let metadata = AccountSchemaCommitment::component_metadata(); + AccountComponent::new( storage_schema_library(), vec![StorageSlot::with_value( AccountSchemaCommitment::schema_commitment_slot().clone(), schema_commitment.schema_commitment, )], + metadata, ) .expect( "AccountSchemaCommitment component should satisfy the requirements of a valid account component", ) - .with_supports_all_types() } } +// ACCOUNT BUILDER EXTENSION +// ================================================================================================ + +/// An extension trait for [`AccountBuilder`] that provides a convenience method for building an +/// account with an [`AccountSchemaCommitment`] component. +pub trait AccountBuilderSchemaCommitmentExt { + /// Builds an [`Account`] out of the configured builder after computing the storage schema + /// commitment from all components currently in the builder and adding an + /// [`AccountSchemaCommitment`] component. + /// + /// # Errors + /// + /// Returns an error if: + /// - The components' storage schemas contain conflicting definitions for the same slot name. + /// - [`AccountBuilder::build`] fails. + fn build_with_schema_commitment(self) -> Result; +} + +impl AccountBuilderSchemaCommitmentExt for AccountBuilder { + fn build_with_schema_commitment(self) -> Result { + let schema_commitment = + AccountSchemaCommitment::new(self.storage_schemas()).map_err(|err| { + AccountError::other_with_source("failed to compute account schema commitment", err) + })?; + + self.with_component(schema_commitment).build() + } +} + +// HELPERS +// ================================================================================================ + /// Computes the schema commitment. /// /// The account schema commitment is computed from the merged schema commitment. /// If the passed list of schemas is empty, [`Word::empty()`] is returned. -fn compute_schema_commitment( - schemas: &[StorageSchema], -) -> Result { - if schemas.is_empty() { +fn compute_schema_commitment<'a>( + schemas: impl IntoIterator, +) -> Result { + let mut schemas = schemas.into_iter().peekable(); + if schemas.peek().is_none() { return Ok(Word::empty()); } let mut merged_slots = BTreeMap::new(); + for schema in schemas { for (slot_name, slot_schema) in schema.iter() { match merged_slots.get(slot_name) { @@ -93,7 +142,7 @@ fn compute_schema_commitment( // Slot exists, check if the schema is the same before erroring Some(existing) => { if existing != slot_schema { - return Err(AccountComponentTemplateError::InvalidSchema(format!( + return Err(ComponentMetadataError::InvalidSchema(format!( "conflicting definitions for storage slot `{slot_name}`", ))); } @@ -114,10 +163,11 @@ fn compute_schema_commitment( mod tests { use miden_protocol::Word; use miden_protocol::account::AccountBuilder; + use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; use miden_protocol::account::component::AccountComponentMetadata; - use super::AccountSchemaCommitment; - use crate::account::auth::NoAuth; + use super::{AccountBuilderSchemaCommitmentExt, AccountSchemaCommitment}; + use crate::account::auth::{AuthSingleSig, NoAuth}; #[test] fn storage_schema_commitment_is_order_independent() { @@ -180,4 +230,29 @@ mod tests { assert_eq!(component.schema_commitment, Word::empty()); } + + #[test] + fn build_with_schema_commitment_adds_schema_commitment_component() { + let auth_component = AuthSingleSig::new( + PublicKeyCommitment::from(Word::empty()), + AuthScheme::EcdsaK256Keccak, + ); + + let account = AccountBuilder::new([1u8; 32]) + .with_auth_component(auth_component) + .build_with_schema_commitment() + .unwrap(); + + // The auth component has 2 slots (public key and scheme ID) and the schema commitment adds + // 1 more. + assert_eq!(account.storage().num_slots(), 3); + + // The auth component's public key slot should be accessible. + assert!(account.storage().get_item(AuthSingleSig::public_key_slot()).is_ok()); + + // The schema commitment slot should be non-empty since we have a component with a schema. + let slot_name = AccountSchemaCommitment::schema_commitment_slot(); + let commitment = account.storage().get_item(slot_name).unwrap(); + assert_ne!(commitment, Word::empty()); + } } diff --git a/crates/miden-standards/src/account/mod.rs b/crates/miden-standards/src/account/mod.rs index af3d4ff69b..ad7b67f1f9 100644 --- a/crates/miden-standards/src/account/mod.rs +++ b/crates/miden-standards/src/account/mod.rs @@ -1,5 +1,6 @@ -use super::auth_scheme::AuthScheme; +use super::auth_method::AuthMethod; +pub mod access; pub mod auth; pub mod components; pub mod faucets; @@ -7,34 +8,42 @@ pub mod interface; pub mod metadata; pub mod wallets; +pub use metadata::AccountBuilderSchemaCommitmentExt; + /// Macro to simplify the creation of static procedure digest constants. /// /// This macro generates a `LazyLock` static variable that lazily initializes /// the digest of a procedure from a library. /// +/// The full procedure path is constructed by concatenating `$component_name` and `$proc_name` +/// with `::` as separator (i.e. `"{component_name}::{proc_name}"`). +/// /// Note: This macro references exported types from `miden_protocol`, so your crate must /// include `miden_protocol` as a dependency. /// /// # Arguments /// * `$name` - The name of the static variable to create -/// * `$proc_name` - The string name of the procedure +/// * `$component_name` - The name of the component (e.g. `BasicWallet::NAME`) +/// * `$proc_name` - The short name of the procedure (e.g. `"receive_asset"`) /// * `$library_fn` - The function that returns the library containing the procedure /// /// # Example /// ```ignore /// procedure_digest!( /// BASIC_WALLET_RECEIVE_ASSET, +/// BasicWallet::NAME, /// BasicWallet::RECEIVE_ASSET_PROC_NAME, /// basic_wallet_library /// ); /// ``` #[macro_export] macro_rules! procedure_digest { - ($name:ident, $proc_name:expr, $library_fn:expr) => { + ($name:ident, $component_name:expr, $proc_name:expr, $library_fn:expr) => { static $name: miden_protocol::utils::sync::LazyLock = miden_protocol::utils::sync::LazyLock::new(|| { - $library_fn().get_procedure_root_by_path($proc_name).unwrap_or_else(|| { - panic!("{} should contain '{}' procedure", stringify!($library_fn), $proc_name) + let full_path = alloc::format!("{}::{}", $component_name, $proc_name); + $library_fn().get_procedure_root_by_path(full_path.as_str()).unwrap_or_else(|| { + panic!("{} should contain '{}' procedure", stringify!($library_fn), full_path) }) }); }; diff --git a/crates/miden-standards/src/account/wallets/mod.rs b/crates/miden-standards/src/account/wallets/mod.rs index 23e9204322..c220a0b3e6 100644 --- a/crates/miden-standards/src/account/wallets/mod.rs +++ b/crates/miden-standards/src/account/wallets/mod.rs @@ -1,6 +1,7 @@ use alloc::string::String; use miden_protocol::Word; +use miden_protocol::account::component::AccountComponentMetadata; use miden_protocol::account::{ Account, AccountBuilder, @@ -11,15 +12,8 @@ use miden_protocol::account::{ use miden_protocol::errors::AccountError; use thiserror::Error; -use super::AuthScheme; -use crate::account::auth::{ - AuthEcdsaK256Keccak, - AuthEcdsaK256KeccakMultisig, - AuthEcdsaK256KeccakMultisigConfig, - AuthFalcon512Rpo, - AuthFalcon512RpoMultisig, - AuthFalcon512RpoMultisigConfig, -}; +use super::AuthMethod; +use crate::account::auth::{AuthMultisig, AuthMultisigConfig, AuthSingleSig}; use crate::account::components::basic_wallet_library; use crate::procedure_digest; @@ -29,6 +23,7 @@ use crate::procedure_digest; // Initialize the digest of the `receive_asset` procedure of the Basic Wallet only once. procedure_digest!( BASIC_WALLET_RECEIVE_ASSET, + BasicWallet::NAME, BasicWallet::RECEIVE_ASSET_PROC_NAME, basic_wallet_library ); @@ -36,6 +31,7 @@ procedure_digest!( // Initialize the digest of the `move_asset_to_note` procedure of the Basic Wallet only once. procedure_digest!( BASIC_WALLET_MOVE_ASSET_TO_NOTE, + BasicWallet::NAME, BasicWallet::MOVE_ASSET_TO_NOTE_PROC_NAME, basic_wallet_library ); @@ -61,8 +57,12 @@ pub struct BasicWallet; impl BasicWallet { // CONSTANTS // -------------------------------------------------------------------------------------------- - const RECEIVE_ASSET_PROC_NAME: &str = "basic_wallet::receive_asset"; - const MOVE_ASSET_TO_NOTE_PROC_NAME: &str = "basic_wallet::move_asset_to_note"; + + /// The name of the component. + pub const NAME: &'static str = "miden::standards::components::wallets::basic_wallet"; + + const RECEIVE_ASSET_PROC_NAME: &str = "receive_asset"; + const MOVE_ASSET_TO_NOTE_PROC_NAME: &str = "move_asset_to_note"; // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- @@ -76,13 +76,21 @@ impl BasicWallet { pub fn move_asset_to_note_digest() -> Word { *BASIC_WALLET_MOVE_ASSET_TO_NOTE } + + /// Returns the [`AccountComponentMetadata`] for this component. + pub fn component_metadata() -> AccountComponentMetadata { + AccountComponentMetadata::new(Self::NAME, AccountType::all()) + .with_description("Basic wallet component for receiving and sending assets") + } } impl From for AccountComponent { fn from(_: BasicWallet) -> Self { - AccountComponent::new(basic_wallet_library(), vec![]) - .expect("basic wallet component should satisfy the requirements of a valid account component") - .with_supports_all_types() + let metadata = BasicWallet::component_metadata(); + + AccountComponent::new(basic_wallet_library(), vec![], metadata).expect( + "basic wallet component should satisfy the requirements of a valid account component", + ) } } @@ -92,8 +100,8 @@ impl From for AccountComponent { /// Basic wallet related errors. #[derive(Debug, Error)] pub enum BasicWalletError { - #[error("unsupported authentication scheme: {0}")] - UnsupportedAuthScheme(String), + #[error("unsupported authentication method: {0}")] + UnsupportedAuthMethod(String), #[error("account creation failed")] AccountError(#[source] AccountError), } @@ -110,7 +118,7 @@ pub enum BasicWalletError { /// authentication scheme. pub fn create_basic_wallet( init_seed: [u8; 32], - auth_scheme: AuthScheme, + auth_method: AuthMethod, account_type: AccountType, account_storage_mode: AccountStorageMode, ) -> Result { @@ -120,37 +128,26 @@ pub fn create_basic_wallet( ))); } - let auth_component: AccountComponent = match auth_scheme { - AuthScheme::EcdsaK256Keccak { pub_key } => AuthEcdsaK256Keccak::new(pub_key).into(), - AuthScheme::EcdsaK256KeccakMultisig { threshold, pub_keys } => { - let config = AuthEcdsaK256KeccakMultisigConfig::new(pub_keys, threshold) - .and_then(|cfg| { - cfg.with_proc_thresholds(vec![(BasicWallet::receive_asset_digest(), 1)]) - }) - .map_err(BasicWalletError::AccountError)?; - AuthEcdsaK256KeccakMultisig::new(config) - .map_err(BasicWalletError::AccountError)? - .into() + let auth_component: AccountComponent = match auth_method { + AuthMethod::SingleSig { approver: (pub_key, auth_scheme) } => { + AuthSingleSig::new(pub_key, auth_scheme).into() }, - AuthScheme::Falcon512Rpo { pub_key } => AuthFalcon512Rpo::new(pub_key).into(), - AuthScheme::Falcon512RpoMultisig { threshold, pub_keys } => { - let config = AuthFalcon512RpoMultisigConfig::new(pub_keys, threshold) + AuthMethod::Multisig { threshold, approvers } => { + let config = AuthMultisigConfig::new(approvers, threshold) .and_then(|cfg| { cfg.with_proc_thresholds(vec![(BasicWallet::receive_asset_digest(), 1)]) }) .map_err(BasicWalletError::AccountError)?; - AuthFalcon512RpoMultisig::new(config) - .map_err(BasicWalletError::AccountError)? - .into() + AuthMultisig::new(config).map_err(BasicWalletError::AccountError)?.into() }, - AuthScheme::NoAuth => { - return Err(BasicWalletError::UnsupportedAuthScheme( - "basic wallets cannot be created with NoAuth authentication scheme".into(), + AuthMethod::NoAuth => { + return Err(BasicWalletError::UnsupportedAuthMethod( + "basic wallets cannot be created with NoAuth authentication method".into(), )); }, - AuthScheme::Unknown => { - return Err(BasicWalletError::UnsupportedAuthScheme( - "basic wallets cannot be created with Unknown authentication scheme".into(), + AuthMethod::Unknown => { + return Err(BasicWalletError::UnsupportedAuthMethod( + "basic wallets cannot be created with Unknown authentication method".into(), )); }, }; @@ -171,19 +168,20 @@ pub fn create_basic_wallet( #[cfg(test)] mod tests { - use miden_processor::utils::{Deserializable, Serializable}; - use miden_protocol::account::auth::PublicKeyCommitment; + use miden_protocol::account::auth::{self, PublicKeyCommitment}; + use miden_protocol::utils::serde::{Deserializable, Serializable}; use miden_protocol::{ONE, Word}; - use super::{Account, AccountStorageMode, AccountType, AuthScheme, create_basic_wallet}; + use super::{Account, AccountStorageMode, AccountType, AuthMethod, create_basic_wallet}; use crate::account::wallets::BasicWallet; #[test] fn test_create_basic_wallet() { let pub_key = PublicKeyCommitment::from(Word::from([ONE; 4])); + let auth_scheme = auth::AuthScheme::Falcon512Poseidon2; let wallet = create_basic_wallet( [1; 32], - AuthScheme::Falcon512Rpo { pub_key }, + AuthMethod::SingleSig { approver: (pub_key, auth_scheme) }, AccountType::RegularAccountImmutableCode, AccountStorageMode::Public, ); @@ -196,9 +194,10 @@ mod tests { #[test] fn test_serialize_basic_wallet() { let pub_key = PublicKeyCommitment::from(Word::from([ONE; 4])); + let auth_scheme = auth::AuthScheme::EcdsaK256Keccak; let wallet = create_basic_wallet( [1; 32], - AuthScheme::Falcon512Rpo { pub_key }, + AuthMethod::SingleSig { approver: (pub_key, auth_scheme) }, AccountType::RegularAccountImmutableCode, AccountStorageMode::Public, ) diff --git a/crates/miden-standards/src/auth_method.rs b/crates/miden-standards/src/auth_method.rs new file mode 100644 index 0000000000..fc2d1a02de --- /dev/null +++ b/crates/miden-standards/src/auth_method.rs @@ -0,0 +1,43 @@ +use alloc::vec::Vec; + +use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; + +/// Defines standard authentication methods supported by account auth components. +pub enum AuthMethod { + /// A minimal authentication method that provides no cryptographic authentication. + /// + /// It only increments the nonce if the account state has actually changed during transaction + /// execution, avoiding unnecessary nonce increments for transactions that don't modify the + /// account state. + NoAuth, + /// A single-key authentication method which relies on either ECDSA or Falcon512Poseidon2 + /// signatures. + SingleSig { + approver: (PublicKeyCommitment, AuthScheme), + }, + /// A multi-signature authentication method using either ECDSA or Falcon512Poseidon2 signatures. + /// + /// Requires a threshold number of signatures from the provided public keys. + Multisig { + threshold: u32, + approvers: Vec<(PublicKeyCommitment, AuthScheme)>, + }, + /// A non-standard authentication method. + Unknown, +} + +impl AuthMethod { + /// Returns all public key commitments associated with this authentication method. + /// + /// For unknown methods, an empty vector is returned. + pub fn get_public_key_commitments(&self) -> Vec { + match self { + AuthMethod::NoAuth => Vec::new(), + AuthMethod::SingleSig { approver: (pub_key, _) } => vec![*pub_key], + AuthMethod::Multisig { approvers, .. } => { + approvers.iter().map(|(pub_key, _)| *pub_key).collect() + }, + AuthMethod::Unknown => Vec::new(), + } + } +} diff --git a/crates/miden-standards/src/auth_scheme.rs b/crates/miden-standards/src/auth_scheme.rs deleted file mode 100644 index d8a5e7cf56..0000000000 --- a/crates/miden-standards/src/auth_scheme.rs +++ /dev/null @@ -1,54 +0,0 @@ -use alloc::vec::Vec; - -use miden_protocol::account::auth::PublicKeyCommitment; - -/// Defines authentication schemes available to standard and faucet accounts. -pub enum AuthScheme { - /// A minimal authentication scheme that provides no cryptographic authentication. - /// - /// It only increments the nonce if the account state has actually changed during transaction - /// execution, avoiding unnecessary nonce increments for transactions that don't modify the - /// account state. - NoAuth, - /// A single-key authentication scheme which relies on ECDSA signatures. - EcdsaK256Keccak { pub_key: PublicKeyCommitment }, - /// A multi-signature authentication scheme using ECDSA signatures. - /// - /// Requires a threshold number of signatures from the provided public keys. - EcdsaK256KeccakMultisig { - threshold: u32, - pub_keys: Vec, - }, - /// A single-key authentication scheme which relies Falcon512 RPO signatures. - /// - /// Falcon512 RPO is a variant of the [Falcon](https://falcon-sign.info/) signature scheme. - /// This variant differs from the standard in that instead of using SHAKE256 hash function in - /// the hash-to-point algorithm we use RPO256. This makes the signature more efficient to - /// verify in Miden VM. - Falcon512Rpo { pub_key: PublicKeyCommitment }, - /// A multi-signature authentication scheme using Falcon512 RPO signatures. - /// - /// Requires a threshold number of signatures from the provided public keys. - Falcon512RpoMultisig { - threshold: u32, - pub_keys: Vec, - }, - /// A non-standard authentication scheme. - Unknown, -} - -impl AuthScheme { - /// Returns all public key commitments associated with this authentication scheme. - /// - /// For unknown schemes, an empty vector is returned. - pub fn get_public_key_commitments(&self) -> Vec { - match self { - AuthScheme::NoAuth => Vec::new(), - AuthScheme::EcdsaK256Keccak { pub_key } => vec![*pub_key], - AuthScheme::EcdsaK256KeccakMultisig { pub_keys, .. } => pub_keys.clone(), - AuthScheme::Falcon512Rpo { pub_key } => vec![*pub_key], - AuthScheme::Falcon512RpoMultisig { pub_keys, .. } => pub_keys.clone(), - AuthScheme::Unknown => Vec::new(), - } - } -} diff --git a/crates/miden-standards/src/code_builder/mod.rs b/crates/miden-standards/src/code_builder/mod.rs index 7ea72c9dc6..9a1da52c00 100644 --- a/crates/miden-standards/src/code_builder/mod.rs +++ b/crates/miden-standards/src/code_builder/mod.rs @@ -1,4 +1,5 @@ use alloc::sync::Arc; +use alloc::vec::Vec; use miden_protocol::account::AccountComponentCode; use miden_protocol::assembly::{ @@ -12,6 +13,8 @@ use miden_protocol::assembly::{ }; use miden_protocol::note::NoteScript; use miden_protocol::transaction::{TransactionKernel, TransactionScript}; +use miden_protocol::vm::AdviceMap; +use miden_protocol::{Felt, Word}; use crate::errors::CodeBuilderError; use crate::standards_lib::StandardsLib; @@ -81,6 +84,7 @@ use crate::standards_lib::StandardsLib; pub struct CodeBuilder { assembler: Assembler, source_manager: Arc, + advice_map: AdviceMap, } impl CodeBuilder { @@ -100,7 +104,23 @@ impl CodeBuilder { let assembler = TransactionKernel::assembler_with_source_manager(source_manager.clone()) .with_dynamic_library(StandardsLib::default()) .expect("linking std lib should work"); - Self { assembler, source_manager } + Self { + assembler, + source_manager, + advice_map: AdviceMap::default(), + } + } + + // CONFIGURATION + // -------------------------------------------------------------------------------------------- + + /// Configures the assembler to treat warning diagnostics as errors. + /// + /// When enabled, any warning emitted during compilation will be promoted to an error, + /// causing the compilation to fail. + pub fn with_warnings_as_errors(mut self, yes: bool) -> Self { + self.assembler = self.assembler.with_warnings_as_errors(yes); + self } // LIBRARY MANAGEMENT @@ -228,6 +248,76 @@ impl CodeBuilder { Ok(self) } + // ADVICE MAP MANAGEMENT + // -------------------------------------------------------------------------------------------- + + /// Adds an entry to the advice map that will be included in compiled scripts. + /// + /// The advice map allows passing non-deterministic inputs to the VM that can be + /// accessed using `adv.push_mapval` instruction. + /// + /// # Arguments + /// * `key` - The key for the advice map entry (a Word) + /// * `value` - The values to associate with this key + pub fn add_advice_map_entry(&mut self, key: Word, value: impl Into>) { + self.advice_map.insert(key, value.into()); + } + + /// Builder-style method to add an advice map entry. + /// + /// # Arguments + /// * `key` - The key for the advice map entry (a Word) + /// * `value` - The values to associate with this key + pub fn with_advice_map_entry(mut self, key: Word, value: impl Into>) -> Self { + self.add_advice_map_entry(key, value); + self + } + + /// Extends the advice map with entries from another advice map. + /// + /// # Arguments + /// * `advice_map` - The advice map to merge into this builder's advice map + pub fn extend_advice_map(&mut self, advice_map: AdviceMap) { + self.advice_map.extend(advice_map); + } + + /// Builder-style method to extend the advice map. + /// + /// # Arguments + /// * `advice_map` - The advice map to merge into this builder's advice map + pub fn with_extended_advice_map(mut self, advice_map: AdviceMap) -> Self { + self.extend_advice_map(advice_map); + self + } + + // PRIVATE HELPERS + // -------------------------------------------------------------------------------------------- + + /// Applies the advice map to a program if it's non-empty. + /// + /// This avoids cloning the MAST forest when there are no advice map entries. + fn apply_advice_map( + advice_map: AdviceMap, + program: miden_protocol::vm::Program, + ) -> miden_protocol::vm::Program { + if advice_map.is_empty() { + program + } else { + program.with_advice_map(advice_map) + } + } + + /// Applies the advice map to a library if it's non-empty. + /// + /// This avoids cloning the MAST forest when there are no advice map entries. + fn apply_advice_map_to_library(advice_map: AdviceMap, library: Library) -> Library { + if advice_map.is_empty() { + library + } else { + library.with_advice_map(advice_map) + } + } + // COMPILATION // -------------------------------------------------------------------------------------------- @@ -246,7 +336,7 @@ impl CodeBuilder { component_path: impl AsRef, component_code: impl Parse, ) -> Result { - let CodeBuilder { assembler, source_manager } = self; + let CodeBuilder { assembler, source_manager, advice_map } = self; let mut parse_options = ParseOptions::for_library(); parse_options.path = Some(Path::new(component_path.as_ref()).into()); @@ -262,7 +352,9 @@ impl CodeBuilder { CodeBuilderError::build_error_with_report("failed to parse component code", err) })?; - Ok(AccountComponentCode::from(library)) + Ok(AccountComponentCode::from(Self::apply_advice_map_to_library( + advice_map, library, + ))) } /// Compiles the provided MASM code into a [`TransactionScript`]. @@ -279,12 +371,13 @@ impl CodeBuilder { self, tx_script: impl Parse, ) -> Result { - let assembler = self.assembler; + let CodeBuilder { assembler, advice_map, .. } = self; let program = assembler.assemble_program(tx_script).map_err(|err| { CodeBuilderError::build_error_with_report("failed to parse transaction script", err) })?; - Ok(TransactionScript::new(program)) + + Ok(TransactionScript::new(Self::apply_advice_map(advice_map, program))) } /// Compiles the provided MASM code into a [`NoteScript`]. @@ -297,13 +390,14 @@ impl CodeBuilder { /// # Errors /// Returns an error if: /// - The note script compiling fails - pub fn compile_note_script(self, program: impl Parse) -> Result { - let assembler = self.assembler; + pub fn compile_note_script(self, source: impl Parse) -> Result { + let CodeBuilder { assembler, advice_map, .. } = self; - let program = assembler.assemble_program(program).map_err(|err| { + let program = assembler.assemble_program(source).map_err(|err| { CodeBuilderError::build_error_with_report("failed to parse note script", err) })?; - Ok(NoteScript::new(program)) + + Ok(NoteScript::new(Self::apply_advice_map(advice_map, program))) } // ACCESSORS @@ -342,7 +436,7 @@ impl CodeBuilder { /// /// [account_lib]: crate::testing::mock_account_code::MockAccountCodeExt::mock_account_library /// [faucet_lib]: crate::testing::mock_account_code::MockAccountCodeExt::mock_faucet_library - /// [util_lib]: miden_protocol::testing::mock_util_lib::mock_util_library + /// [util_lib]: crate::testing::mock_util_lib::mock_util_library #[cfg(any(feature = "testing", test))] pub fn with_mock_libraries() -> Self { Self::with_mock_libraries_with_source_manager(Arc::new(DefaultSourceManager::default())) @@ -362,7 +456,7 @@ impl CodeBuilder { pub fn with_mock_libraries_with_source_manager( source_manager: Arc, ) -> Self { - use miden_protocol::testing::mock_util_lib::mock_util_library; + use crate::testing::mock_util_lib::mock_util_library; // Start with the builder linking against the transaction kernel, protocol library and // standards library. @@ -612,4 +706,88 @@ mod tests { Ok(()) } + + #[test] + fn test_code_builder_warnings_as_errors() { + let assembler: Assembler = CodeBuilder::default().with_warnings_as_errors(true).into(); + assert!(assembler.warnings_as_errors()); + } + + #[test] + fn test_code_builder_with_advice_map_entry() -> anyhow::Result<()> { + let key = Word::from([1u32, 2, 3, 4]); + let value = vec![Felt::new(42), Felt::new(43)]; + + let script = CodeBuilder::default() + .with_advice_map_entry(key, value.clone()) + .compile_tx_script("begin nop end") + .context("failed to compile tx script with advice map")?; + + let mast = script.mast(); + let stored_value = mast.advice_map().get(&key).expect("advice map entry should be present"); + assert_eq!(stored_value.as_ref(), value.as_slice()); + + Ok(()) + } + + #[test] + fn test_code_builder_extend_advice_map() -> anyhow::Result<()> { + let key1 = Word::from([1u32, 0, 0, 0]); + let key2 = Word::from([2u32, 0, 0, 0]); + + let mut advice_map = AdviceMap::default(); + advice_map.insert(key1, vec![Felt::new(1)]); + advice_map.insert(key2, vec![Felt::new(2)]); + + let script = CodeBuilder::default() + .with_extended_advice_map(advice_map) + .compile_tx_script("begin nop end") + .context("failed to compile tx script")?; + + let mast = script.mast(); + assert!(mast.advice_map().get(&key1).is_some(), "key1 should be present"); + assert!(mast.advice_map().get(&key2).is_some(), "key2 should be present"); + + Ok(()) + } + + #[test] + fn test_code_builder_advice_map_in_note_script() -> anyhow::Result<()> { + let key = Word::from([5u32, 6, 7, 8]); + let value = vec![Felt::new(100)]; + + let script = CodeBuilder::default() + .with_advice_map_entry(key, value.clone()) + .compile_note_script("begin nop end") + .context("failed to compile note script with advice map")?; + + let mast = script.mast(); + let stored_value = mast + .advice_map() + .get(&key) + .expect("advice map entry should be present in note script"); + assert_eq!(stored_value.as_ref(), value.as_slice()); + + Ok(()) + } + + #[test] + fn test_code_builder_advice_map_in_component_code() -> anyhow::Result<()> { + let key = Word::from([11u32, 22, 33, 44]); + let value = vec![Felt::new(500)]; + + let component_code = CodeBuilder::default() + .with_advice_map_entry(key, value.clone()) + .compile_component_code("test::component", "pub proc test nop end") + .context("failed to compile component code with advice map")?; + + let mast = component_code.mast_forest(); + let stored_value = mast + .advice_map() + .get(&key) + .expect("advice map entry should be present in component code"); + assert_eq!(stored_value.as_ref(), value.as_slice()); + + Ok(()) + } } diff --git a/crates/miden-standards/src/errors/mod.rs b/crates/miden-standards/src/errors/mod.rs index 2bf69e28e0..f1c21dd45b 100644 --- a/crates/miden-standards/src/errors/mod.rs +++ b/crates/miden-standards/src/errors/mod.rs @@ -1,7 +1,8 @@ /// The errors from the MASM code of the Miden standards. #[cfg(any(feature = "testing", test))] -#[rustfmt::skip] -pub mod standards; +pub mod standards { + include!(concat!(env!("OUT_DIR"), "/standards_errors.rs")); +} mod code_builder_errors; pub use code_builder_errors::CodeBuilderError; diff --git a/crates/miden-standards/src/errors/standards.rs b/crates/miden-standards/src/errors/standards.rs deleted file mode 100644 index e1907b7a5a..0000000000 --- a/crates/miden-standards/src/errors/standards.rs +++ /dev/null @@ -1,57 +0,0 @@ -use miden_protocol::errors::MasmError; - -// This file is generated by build.rs, do not modify manually. -// It is generated by extracting errors from the MASM files in the `./asm` directory. -// -// To add a new error, define a constant in MASM of the pattern `const ERR__...`. -// Try to fit the error into a pre-existing category if possible (e.g. Account, Note, ...). - -// STANDARDS ERRORS -// ================================================================================================ - -/// Error Message: "expected attachment kind to be Word for network account target" -pub const ERR_ATTACHMENT_KIND_MISMATCH: MasmError = MasmError::from_static_str("expected attachment kind to be Word for network account target"); -/// Error Message: "expected network account target attachment scheme" -pub const ERR_ATTACHMENT_SCHEME_MISMATCH: MasmError = MasmError::from_static_str("expected network account target attachment scheme"); - -/// Error Message: "burn requires exactly 1 note asset" -pub const ERR_BASIC_FUNGIBLE_BURN_WRONG_NUMBER_OF_ASSETS: MasmError = MasmError::from_static_str("burn requires exactly 1 note asset"); - -/// Error Message: "distribute would cause the maximum supply to be exceeded" -pub const ERR_FUNGIBLE_ASSET_DISTRIBUTE_WOULD_CAUSE_MAX_SUPPLY_TO_BE_EXCEEDED: MasmError = MasmError::from_static_str("distribute would cause the maximum supply to be exceeded"); - -/// Error Message: "number of approvers must be equal to or greater than threshold" -pub const ERR_MALFORMED_MULTISIG_CONFIG: MasmError = MasmError::from_static_str("number of approvers must be equal to or greater than threshold"); - -/// Error Message: "MINT script expects exactly 12 inputs for private or 16+ inputs for public output notes" -pub const ERR_MINT_WRONG_NUMBER_OF_INPUTS: MasmError = MasmError::from_static_str("MINT script expects exactly 12 inputs for private or 16+ inputs for public output notes"); - -/// Error Message: "failed to reclaim P2IDE note because the reclaiming account is not the sender" -pub const ERR_P2IDE_RECLAIM_ACCT_IS_NOT_SENDER: MasmError = MasmError::from_static_str("failed to reclaim P2IDE note because the reclaiming account is not the sender"); -/// Error Message: "P2IDE reclaim is disabled" -pub const ERR_P2IDE_RECLAIM_DISABLED: MasmError = MasmError::from_static_str("P2IDE reclaim is disabled"); -/// Error Message: "failed to reclaim P2IDE note because the reclaim block height is not reached yet" -pub const ERR_P2IDE_RECLAIM_HEIGHT_NOT_REACHED: MasmError = MasmError::from_static_str("failed to reclaim P2IDE note because the reclaim block height is not reached yet"); -/// Error Message: "failed to consume P2IDE note because the note is still timelocked" -pub const ERR_P2IDE_TIMELOCK_HEIGHT_NOT_REACHED: MasmError = MasmError::from_static_str("failed to consume P2IDE note because the note is still timelocked"); -/// Error Message: "P2IDE note expects exactly 4 note inputs" -pub const ERR_P2IDE_WRONG_NUMBER_OF_INPUTS: MasmError = MasmError::from_static_str("P2IDE note expects exactly 4 note inputs"); - -/// Error Message: "P2ID's target account address and transaction address do not match" -pub const ERR_P2ID_TARGET_ACCT_MISMATCH: MasmError = MasmError::from_static_str("P2ID's target account address and transaction address do not match"); -/// Error Message: "P2ID note expects exactly 2 note inputs" -pub const ERR_P2ID_WRONG_NUMBER_OF_INPUTS: MasmError = MasmError::from_static_str("P2ID note expects exactly 2 note inputs"); - -/// Error Message: "note sender is not the owner" -pub const ERR_SENDER_NOT_OWNER: MasmError = MasmError::from_static_str("note sender is not the owner"); - -/// Error Message: "SWAP script requires exactly 1 note asset" -pub const ERR_SWAP_WRONG_NUMBER_OF_ASSETS: MasmError = MasmError::from_static_str("SWAP script requires exactly 1 note asset"); -/// Error Message: "SWAP script expects exactly 16 note inputs" -pub const ERR_SWAP_WRONG_NUMBER_OF_INPUTS: MasmError = MasmError::from_static_str("SWAP script expects exactly 16 note inputs"); - -/// Error Message: "failed to approve multisig transaction as it was already executed" -pub const ERR_TX_ALREADY_EXECUTED: MasmError = MasmError::from_static_str("failed to approve multisig transaction as it was already executed"); - -/// Error Message: "number of approvers or threshold must not be zero" -pub const ERR_ZERO_IN_MULTISIG_CONFIG: MasmError = MasmError::from_static_str("number of approvers or threshold must not be zero"); diff --git a/crates/miden-standards/src/lib.rs b/crates/miden-standards/src/lib.rs index fdcec02916..f3365c64bf 100644 --- a/crates/miden-standards/src/lib.rs +++ b/crates/miden-standards/src/lib.rs @@ -6,8 +6,8 @@ extern crate alloc; #[cfg(feature = "std")] extern crate std; -mod auth_scheme; -pub use auth_scheme::AuthScheme; +mod auth_method; +pub use auth_method::AuthMethod; pub mod account; pub mod code_builder; diff --git a/crates/miden-standards/src/note/burn.rs b/crates/miden-standards/src/note/burn.rs new file mode 100644 index 0000000000..d9b22572a1 --- /dev/null +++ b/crates/miden-standards/src/note/burn.rs @@ -0,0 +1,109 @@ +use miden_protocol::Word; +use miden_protocol::account::AccountId; +use miden_protocol::assembly::Path; +use miden_protocol::asset::Asset; +use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::errors::NoteError; +use miden_protocol::note::{ + Note, + NoteAssets, + NoteAttachment, + NoteMetadata, + NoteRecipient, + NoteScript, + NoteStorage, + NoteTag, + NoteType, +}; +use miden_protocol::utils::sync::LazyLock; + +use crate::StandardsLib; + +// NOTE SCRIPT +// ================================================================================================ + +/// Path to the BURN note script procedure in the standards library. +const BURN_SCRIPT_PATH: &str = "::miden::standards::notes::burn::main"; + +// Initialize the BURN note script only once +static BURN_SCRIPT: LazyLock = LazyLock::new(|| { + let standards_lib = StandardsLib::default(); + let path = Path::new(BURN_SCRIPT_PATH); + NoteScript::from_library_reference(standards_lib.as_ref(), path) + .expect("Standards library contains BURN note script procedure") +}); + +// BURN NOTE +// ================================================================================================ + +/// TODO: add docs +pub struct BurnNote; + +impl BurnNote { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// Expected number of storage items of the BURN note. + pub const NUM_STORAGE_ITEMS: usize = 0; + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the script of the BURN note. + pub fn script() -> NoteScript { + BURN_SCRIPT.clone() + } + + /// Returns the BURN note script root. + pub fn script_root() -> Word { + BURN_SCRIPT.root() + } + + // BUILDERS + // -------------------------------------------------------------------------------------------- + + /// Generates a BURN note - a note that instructs a faucet to burn a fungible asset. + /// + /// This script enables the creation of a PUBLIC note that, when consumed by a faucet (either + /// basic or network), will burn the fungible assets contained in the note. Both basic and + /// network fungible faucets export the same `burn` procedure with identical MAST roots, + /// allowing a single BURN note script to work with either faucet type. + /// + /// BURN notes are always PUBLIC for network execution. + /// + /// The passed-in `rng` is used to generate a serial number for the note. The note's tag + /// is automatically set to the faucet's account ID for proper routing. + /// + /// # Parameters + /// - `sender`: The account ID of the note creator + /// - `faucet_id`: The account ID of the faucet that will burn the assets + /// - `fungible_asset`: The fungible asset to be burned + /// - `attachment`: The [`NoteAttachment`] of the BURN note + /// - `rng`: Random number generator for creating the serial number + /// + /// # Errors + /// Returns an error if note creation fails. + pub fn create( + sender: AccountId, + faucet_id: AccountId, + fungible_asset: Asset, + attachment: NoteAttachment, + rng: &mut R, + ) -> Result { + let note_script = Self::script(); + let serial_num = rng.draw_word(); + + // BURN notes are always public + let note_type = NoteType::Public; + + let inputs = NoteStorage::new(vec![])?; + let tag = NoteTag::with_account_target(faucet_id); + + let metadata = + NoteMetadata::new(sender, note_type).with_tag(tag).with_attachment(attachment); + let assets = NoteAssets::new(vec![fungible_asset])?; // BURN notes contain the asset to burn + let recipient = NoteRecipient::new(serial_num, note_script, inputs); + + Ok(Note::new(assets, metadata, recipient)) + } +} diff --git a/crates/miden-protocol/src/note/execution_hint.rs b/crates/miden-standards/src/note/execution_hint.rs similarity index 97% rename from crates/miden-protocol/src/note/execution_hint.rs rename to crates/miden-standards/src/note/execution_hint.rs index 10d11503cb..1b0f35d068 100644 --- a/crates/miden-protocol/src/note/execution_hint.rs +++ b/crates/miden-standards/src/note/execution_hint.rs @@ -1,9 +1,9 @@ // NOTE EXECUTION HINT // ================================================================================================ -use crate::Felt; -use crate::block::BlockNumber; -use crate::errors::NoteError; +use miden_protocol::Felt; +use miden_protocol::block::BlockNumber; +use miden_protocol::errors::NoteError; /// Specifies the conditions under which a note is ready to be consumed. /// These conditions are meant to be encoded in the note script as well. @@ -110,7 +110,10 @@ impl NoteExecutionHint { Ok(hint) }, - _ => Err(NoteError::NoteExecutionHintTagOutOfRange(tag)), + _ => Err(NoteError::other(format!( + "note execution hint tag {tag} must be in range 0..={}", + Self::ON_BLOCK_SLOT_TAG + ))), } } diff --git a/crates/miden-standards/src/note/mint.rs b/crates/miden-standards/src/note/mint.rs new file mode 100644 index 0000000000..f3363eaa76 --- /dev/null +++ b/crates/miden-standards/src/note/mint.rs @@ -0,0 +1,236 @@ +use alloc::vec::Vec; + +use miden_protocol::account::AccountId; +use miden_protocol::assembly::Path; +use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::errors::NoteError; +use miden_protocol::note::{ + Note, + NoteAssets, + NoteAttachment, + NoteMetadata, + NoteRecipient, + NoteScript, + NoteStorage, + NoteTag, + NoteType, +}; +use miden_protocol::utils::sync::LazyLock; +use miden_protocol::{Felt, MAX_NOTE_STORAGE_ITEMS, Word}; + +use crate::StandardsLib; + +// NOTE SCRIPT +// ================================================================================================ + +/// Path to the MINT note script procedure in the standards library. +const MINT_SCRIPT_PATH: &str = "::miden::standards::notes::mint::main"; + +// Initialize the MINT note script only once +static MINT_SCRIPT: LazyLock = LazyLock::new(|| { + let standards_lib = StandardsLib::default(); + let path = Path::new(MINT_SCRIPT_PATH); + NoteScript::from_library_reference(standards_lib.as_ref(), path) + .expect("Standards library contains MINT note script procedure") +}); + +// MINT NOTE +// ================================================================================================ + +/// TODO: add docs +pub struct MintNote; + +impl MintNote { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// Expected number of storage items of the MINT note (private mode). + pub const NUM_STORAGE_ITEMS_PRIVATE: usize = 8; + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the script of the MINT note. + pub fn script() -> NoteScript { + MINT_SCRIPT.clone() + } + + /// Returns the MINT note script root. + pub fn script_root() -> Word { + MINT_SCRIPT.root() + } + + // BUILDERS + // -------------------------------------------------------------------------------------------- + + /// Generates a MINT note - a note that instructs a network faucet to mint fungible assets. + /// + /// This script enables the creation of a PUBLIC note that, when consumed by a network faucet, + /// will mint the specified amount of fungible assets and create either a PRIVATE or PUBLIC + /// output note depending on the input configuration. The MINT note uses note-based + /// authentication, checking if the note sender equals the faucet owner to authorize + /// minting. + /// + /// MINT notes are always PUBLIC (for network execution). Output notes can be either PRIVATE + /// or PUBLIC depending on the MintNoteStorage variant used. + /// + /// The passed-in `rng` is used to generate a serial number for the note. The note's tag + /// is automatically set to the faucet's account ID for proper routing. + /// + /// # Parameters + /// - `faucet_id`: The account ID of the network faucet that will mint the assets + /// - `sender`: The account ID of the note creator (must be the faucet owner) + /// - `mint_storage`: The storage configuration specifying private or public output mode + /// - `attachment`: The [`NoteAttachment`] of the MINT note + /// - `rng`: Random number generator for creating the serial number + /// + /// # Errors + /// Returns an error if note creation fails. + pub fn create( + faucet_id: AccountId, + sender: AccountId, + mint_storage: MintNoteStorage, + attachment: NoteAttachment, + rng: &mut R, + ) -> Result { + let note_script = Self::script(); + let serial_num = rng.draw_word(); + + // MINT notes are always public for network execution + let note_type = NoteType::Public; + + // Convert MintNoteStorage to NoteStorage + let storage = NoteStorage::from(mint_storage); + + let tag = NoteTag::with_account_target(faucet_id); + + let metadata = + NoteMetadata::new(sender, note_type).with_tag(tag).with_attachment(attachment); + let assets = NoteAssets::new(vec![])?; // MINT notes have no assets + let recipient = NoteRecipient::new(serial_num, note_script, storage); + + Ok(Note::new(assets, metadata, recipient)) + } +} + +// MINT NOTE STORAGE +// ================================================================================================ + +/// Represents the different storage formats for MINT notes. +/// - Private: Creates a private output note using a precomputed recipient digest (12 MINT note +/// storage items) +/// - Public: Creates a public output note by providing script root, serial number, and +/// variable-length storage (16+ MINT note storage items: 16 fixed + variable number of output +/// note storage items) +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum MintNoteStorage { + Private { + recipient_digest: Word, + amount: Felt, + tag: Felt, + attachment: NoteAttachment, + }, + Public { + recipient: NoteRecipient, + amount: Felt, + tag: Felt, + attachment: NoteAttachment, + }, +} + +impl MintNoteStorage { + pub fn new_private(recipient_digest: Word, amount: Felt, tag: Felt) -> Self { + Self::Private { + recipient_digest, + amount, + tag, + attachment: NoteAttachment::default(), + } + } + + pub fn new_public( + recipient: NoteRecipient, + amount: Felt, + tag: Felt, + ) -> Result { + // Calculate total number of storage items that will be created: + // 16 fixed items (tag, amount, attachment_kind, attachment_scheme, ATTACHMENT, + // SCRIPT_ROOT, SERIAL_NUM) + variable recipient number of storage items + const FIXED_PUBLIC_STORAGE_ITEMS: usize = 16; + let total_storage_items = + FIXED_PUBLIC_STORAGE_ITEMS + recipient.storage().num_items() as usize; + + if total_storage_items > MAX_NOTE_STORAGE_ITEMS { + return Err(NoteError::TooManyStorageItems(total_storage_items)); + } + + Ok(Self::Public { + recipient, + amount, + tag, + attachment: NoteAttachment::default(), + }) + } + + /// Overwrites the [`NoteAttachment`] of the note storage. + pub fn with_attachment(self, attachment: NoteAttachment) -> Self { + match self { + MintNoteStorage::Private { + recipient_digest, + amount, + tag, + attachment: _, + } => MintNoteStorage::Private { + recipient_digest, + amount, + tag, + attachment, + }, + MintNoteStorage::Public { recipient, amount, tag, attachment: _ } => { + MintNoteStorage::Public { recipient, amount, tag, attachment } + }, + } + } +} + +impl From for NoteStorage { + fn from(mint_storage: MintNoteStorage) -> Self { + match mint_storage { + MintNoteStorage::Private { + recipient_digest, + amount, + tag, + attachment, + } => { + let attachment_scheme = Felt::from(attachment.attachment_scheme().as_u32()); + let attachment_kind = Felt::from(attachment.attachment_kind().as_u8()); + let attachment = attachment.content().to_word(); + + let mut storage_values = Vec::with_capacity(12); + storage_values.extend_from_slice(&[ + tag, + amount, + attachment_kind, + attachment_scheme, + ]); + storage_values.extend_from_slice(attachment.as_elements()); + storage_values.extend_from_slice(recipient_digest.as_elements()); + NoteStorage::new(storage_values) + .expect("number of storage items should not exceed max storage items") + }, + MintNoteStorage::Public { recipient, amount, tag, attachment } => { + let attachment_scheme = Felt::from(attachment.attachment_scheme().as_u32()); + let attachment_kind = Felt::from(attachment.attachment_kind().as_u8()); + let attachment = attachment.content().to_word(); + + let mut storage_values = vec![tag, amount, attachment_kind, attachment_scheme]; + storage_values.extend_from_slice(attachment.as_elements()); + storage_values.extend_from_slice(recipient.script().root().as_elements()); + storage_values.extend_from_slice(recipient.serial_num().as_elements()); + storage_values.extend_from_slice(recipient.storage().items()); + NoteStorage::new(storage_values) + .expect("number of storage items should not exceed max storage items") + }, + } + } +} diff --git a/crates/miden-standards/src/note/mint_inputs.rs b/crates/miden-standards/src/note/mint_inputs.rs deleted file mode 100644 index 3fd62e3e67..0000000000 --- a/crates/miden-standards/src/note/mint_inputs.rs +++ /dev/null @@ -1,118 +0,0 @@ -use alloc::vec::Vec; - -use miden_protocol::errors::NoteError; -use miden_protocol::note::{NoteAttachment, NoteInputs, NoteRecipient}; -use miden_protocol::{Felt, MAX_INPUTS_PER_NOTE, Word}; - -/// Represents the different input formats for MINT notes. -/// - Private: Creates a private output note using a precomputed recipient digest (12 MINT note -/// inputs) -/// - Public: Creates a public output note by providing script root, serial number, and -/// variable-length inputs (16+ MINT note inputs: 16 fixed + variable number of output note -/// inputs) -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum MintNoteInputs { - Private { - recipient_digest: Word, - amount: Felt, - tag: Felt, - attachment: NoteAttachment, - }, - Public { - recipient: NoteRecipient, - amount: Felt, - tag: Felt, - attachment: NoteAttachment, - }, -} - -impl MintNoteInputs { - pub fn new_private(recipient_digest: Word, amount: Felt, tag: Felt) -> Self { - Self::Private { - recipient_digest, - amount, - tag, - attachment: NoteAttachment::default(), - } - } - - pub fn new_public( - recipient: NoteRecipient, - amount: Felt, - tag: Felt, - ) -> Result { - // Calculate total number of inputs that will be created: - // 16 fixed inputs (tag, amount, attachment_kind, attachment_scheme, ATTACHMENT, - // SCRIPT_ROOT, SERIAL_NUM) + variable recipient inputs length - const FIXED_PUBLIC_INPUTS: usize = 16; - let total_inputs = FIXED_PUBLIC_INPUTS + recipient.inputs().num_values() as usize; - - if total_inputs > MAX_INPUTS_PER_NOTE { - return Err(NoteError::TooManyInputs(total_inputs)); - } - - Ok(Self::Public { - recipient, - amount, - tag, - attachment: NoteAttachment::default(), - }) - } - - /// Overwrites the [`NoteAttachment`] of the note inputs. - pub fn with_attachment(self, attachment: NoteAttachment) -> Self { - match self { - MintNoteInputs::Private { - recipient_digest, - amount, - tag, - attachment: _, - } => MintNoteInputs::Private { - recipient_digest, - amount, - tag, - attachment, - }, - MintNoteInputs::Public { recipient, amount, tag, attachment: _ } => { - MintNoteInputs::Public { recipient, amount, tag, attachment } - }, - } - } -} - -impl From for NoteInputs { - fn from(mint_inputs: MintNoteInputs) -> Self { - match mint_inputs { - MintNoteInputs::Private { - recipient_digest, - amount, - tag, - attachment, - } => { - let attachment_scheme = Felt::from(attachment.attachment_scheme().as_u32()); - let attachment_kind = Felt::from(attachment.attachment_kind().as_u8()); - let attachment = attachment.content().to_word(); - - let mut input_values = Vec::with_capacity(12); - input_values.extend_from_slice(&[tag, amount, attachment_kind, attachment_scheme]); - input_values.extend_from_slice(attachment.as_elements()); - input_values.extend_from_slice(recipient_digest.as_elements()); - NoteInputs::new(input_values) - .expect("number of inputs should not exceed max inputs") - }, - MintNoteInputs::Public { recipient, amount, tag, attachment } => { - let attachment_scheme = Felt::from(attachment.attachment_scheme().as_u32()); - let attachment_kind = Felt::from(attachment.attachment_kind().as_u8()); - let attachment = attachment.content().to_word(); - - let mut input_values = vec![tag, amount, attachment_kind, attachment_scheme]; - input_values.extend_from_slice(attachment.as_elements()); - input_values.extend_from_slice(recipient.script().root().as_elements()); - input_values.extend_from_slice(recipient.serial_num().as_elements()); - input_values.extend_from_slice(recipient.inputs().values()); - NoteInputs::new(input_values) - .expect("number of inputs should not exceed max inputs") - }, - } - } -} diff --git a/crates/miden-standards/src/note/mod.rs b/crates/miden-standards/src/note/mod.rs index 30f0d8d172..82b94c6a68 100644 --- a/crates/miden-standards/src/note/mod.rs +++ b/crates/miden-standards/src/note/mod.rs @@ -1,255 +1,321 @@ -use alloc::vec::Vec; +use alloc::boxed::Box; +use alloc::string::ToString; +use core::error::Error; +use miden_protocol::Word; use miden_protocol::account::AccountId; -use miden_protocol::asset::Asset; use miden_protocol::block::BlockNumber; -use miden_protocol::crypto::rand::FeltRng; -use miden_protocol::errors::NoteError; -use miden_protocol::note::{ - Note, - NoteAssets, - NoteAttachment, - NoteDetails, - NoteInputs, - NoteMetadata, - NoteRecipient, - NoteTag, - NoteType, -}; -use miden_protocol::{Felt, Word}; -use utils::build_swap_tag; - -pub mod mint_inputs; -pub mod utils; +use miden_protocol::note::{Note, NoteScript}; + +use crate::account::faucets::{BasicFungibleFaucet, NetworkFungibleFaucet}; +use crate::account::interface::{AccountComponentInterface, AccountInterface, AccountInterfaceExt}; +use crate::account::wallets::BasicWallet; + +mod burn; +pub use burn::BurnNote; + +mod execution_hint; +pub use execution_hint::NoteExecutionHint; + +mod mint; +pub use mint::{MintNote, MintNoteStorage}; + +mod p2id; +pub use p2id::{P2idNote, P2idNoteStorage}; + +mod p2ide; +pub use p2ide::{P2ideNote, P2ideNoteStorage}; + +mod swap; +pub use swap::SwapNote; mod network_account_target; pub use network_account_target::{NetworkAccountTarget, NetworkAccountTargetError}; -mod well_known_note_attachment; -pub use well_known_note_attachment::WellKnownNoteAttachment; - -mod well_known_note; -pub use mint_inputs::MintNoteInputs; -pub use well_known_note::{NoteConsumptionStatus, WellKnownNote}; +mod network_note; +pub use network_note::{AccountTargetNetworkNote, NetworkNoteExt}; -// STANDARDIZED SCRIPTS +mod standard_note_attachment; +use miden_protocol::errors::NoteError; +pub use standard_note_attachment::StandardNoteAttachment; +// STANDARD NOTE // ================================================================================================ -/// Generates a P2ID note - Pay-to-ID note. -/// -/// This script enables the transfer of assets from the `sender` account to the `target` account -/// by specifying the target's account ID. -/// -/// The passed-in `rng` is used to generate a serial number for the note. The returned note's tag -/// is set to the target's account ID. -/// -/// # Errors -/// Returns an error if deserialization or compilation of the `P2ID` script fails. -pub fn create_p2id_note( - sender: AccountId, - target: AccountId, - assets: Vec, - note_type: NoteType, - attachment: NoteAttachment, - rng: &mut R, -) -> Result { - let serial_num = rng.draw_word(); - let recipient = utils::build_p2id_recipient(target, serial_num)?; - - let tag = NoteTag::with_account_target(target); - - let metadata = NoteMetadata::new(sender, note_type, tag).with_attachment(attachment); - let vault = NoteAssets::new(assets)?; - - Ok(Note::new(vault, metadata, recipient)) +/// The enum holding the types of standard notes provided by `miden-standards`. +pub enum StandardNote { + P2ID, + P2IDE, + SWAP, + MINT, + BURN, } -/// Generates a P2IDE note - Pay-to-ID note with optional reclaim after a certain block height and -/// optional timelock. -/// -/// This script enables the transfer of assets from the `sender` account to the `target` -/// account by specifying the target's account ID. It adds the optional possibility for the -/// sender to reclaiming the assets if the note has not been consumed by the target within the -/// specified timeframe and the optional possibility to add a timelock to the asset transfer. -/// -/// The passed-in `rng` is used to generate a serial number for the note. The returned note's tag -/// is set to the target's account ID. -/// -/// # Errors -/// Returns an error if deserialization or compilation of the `P2ID` script fails. -pub fn create_p2ide_note( - sender: AccountId, - target: AccountId, - assets: Vec, - reclaim_height: Option, - timelock_height: Option, - note_type: NoteType, - attachment: NoteAttachment, - rng: &mut R, -) -> Result { - let serial_num = rng.draw_word(); - let recipient = - utils::build_p2ide_recipient(target, reclaim_height, timelock_height, serial_num)?; - let tag = NoteTag::with_account_target(target); - - let metadata = NoteMetadata::new(sender, note_type, tag).with_attachment(attachment); - let vault = NoteAssets::new(assets)?; - - Ok(Note::new(vault, metadata, recipient)) -} +impl StandardNote { + // CONSTRUCTOR + // -------------------------------------------------------------------------------------------- -/// Generates a SWAP note - swap of assets between two accounts - and returns the note as well as -/// [`NoteDetails`] for the payback note. -/// -/// This script enables a swap of 2 assets between the `sender` account and any other account that -/// is willing to consume the note. The consumer will receive the `offered_asset` and will create a -/// new P2ID note with `sender` as target, containing the `requested_asset`. -/// -/// # Errors -/// Returns an error if deserialization or compilation of the `SWAP` script fails. -pub fn create_swap_note( - sender: AccountId, - offered_asset: Asset, - requested_asset: Asset, - swap_note_type: NoteType, - swap_note_attachment: NoteAttachment, - payback_note_type: NoteType, - payback_note_attachment: NoteAttachment, - rng: &mut R, -) -> Result<(Note, NoteDetails), NoteError> { - if requested_asset == offered_asset { - return Err(NoteError::other("requested asset same as offered asset")); + /// Returns a [`StandardNote`] instance based on the provided [`NoteScript`]. Returns `None` + /// if the provided script does not match any standard note script. + pub fn from_script(script: &NoteScript) -> Option { + Self::from_script_root(script.root()) + } + + /// Returns a [`StandardNote`] instance based on the provided script root. Returns `None` if + /// the provided root does not match any standard note script. + pub fn from_script_root(root: Word) -> Option { + if root == P2idNote::script_root() { + return Some(Self::P2ID); + } + if root == P2ideNote::script_root() { + return Some(Self::P2IDE); + } + if root == SwapNote::script_root() { + return Some(Self::SWAP); + } + if root == MintNote::script_root() { + return Some(Self::MINT); + } + if root == BurnNote::script_root() { + return Some(Self::BURN); + } + + None + } + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the name of this [`StandardNote`] variant as a string. + pub fn name(&self) -> &'static str { + match self { + Self::P2ID => "P2ID", + Self::P2IDE => "P2IDE", + Self::SWAP => "SWAP", + Self::MINT => "MINT", + Self::BURN => "BURN", + } + } + + /// Returns the expected number of storage items of the active note. + pub fn expected_num_storage_items(&self) -> usize { + match self { + Self::P2ID => P2idNote::NUM_STORAGE_ITEMS, + Self::P2IDE => P2ideNote::NUM_STORAGE_ITEMS, + Self::SWAP => SwapNote::NUM_STORAGE_ITEMS, + Self::MINT => MintNote::NUM_STORAGE_ITEMS_PRIVATE, + Self::BURN => BurnNote::NUM_STORAGE_ITEMS, + } + } + + /// Returns the note script of the current [StandardNote] instance. + pub fn script(&self) -> NoteScript { + match self { + Self::P2ID => P2idNote::script(), + Self::P2IDE => P2ideNote::script(), + Self::SWAP => SwapNote::script(), + Self::MINT => MintNote::script(), + Self::BURN => BurnNote::script(), + } + } + + /// Returns the script root of the current [StandardNote] instance. + pub fn script_root(&self) -> Word { + match self { + Self::P2ID => P2idNote::script_root(), + Self::P2IDE => P2ideNote::script_root(), + Self::SWAP => SwapNote::script_root(), + Self::MINT => MintNote::script_root(), + Self::BURN => BurnNote::script_root(), + } + } + + /// Returns a boolean value indicating whether this [StandardNote] is compatible with the + /// provided [AccountInterface]. + pub fn is_compatible_with(&self, account_interface: &AccountInterface) -> bool { + if account_interface.components().contains(&AccountComponentInterface::BasicWallet) { + return true; + } + + let interface_proc_digests = account_interface.get_procedure_digests(); + match self { + Self::P2ID | &Self::P2IDE => { + // To consume P2ID and P2IDE notes, the `receive_asset` procedure must be present in + // the provided account interface. + interface_proc_digests.contains(&BasicWallet::receive_asset_digest()) + }, + Self::SWAP => { + // To consume SWAP note, the `receive_asset` and `move_asset_to_note` procedures + // must be present in the provided account interface. + interface_proc_digests.contains(&BasicWallet::receive_asset_digest()) + && interface_proc_digests.contains(&BasicWallet::move_asset_to_note_digest()) + }, + Self::MINT => { + // MINT notes work only with network fungible faucets. The network faucet uses + // note-based authentication (checking if the note sender equals the faucet owner) + // to authorize minting, while basic faucets have different mint procedures that + // are not compatible with MINT notes. + interface_proc_digests.contains(&NetworkFungibleFaucet::distribute_digest()) + }, + Self::BURN => { + // BURN notes work with both basic and network fungible faucets because both + // faucet types export the same `burn` procedure with identical MAST roots. + // This allows a single BURN note script to work with either faucet type. + interface_proc_digests.contains(&BasicFungibleFaucet::burn_digest()) + || interface_proc_digests.contains(&NetworkFungibleFaucet::burn_digest()) + }, + } + } + + /// Performs the inputs check of the provided standard note against the target account and the + /// block number. + /// + /// This function returns: + /// - `Some` if we can definitively determine whether the note can be consumed not by the target + /// account. + /// - `None` if the consumption status of the note cannot be determined conclusively and further + /// checks are necessary. + pub fn is_consumable( + &self, + note: &Note, + target_account_id: AccountId, + block_ref: BlockNumber, + ) -> Option { + match self.is_consumable_inner(note, target_account_id, block_ref) { + Ok(status) => status, + Err(err) => { + let err: Box = Box::from(err); + Some(NoteConsumptionStatus::NeverConsumable(err)) + }, + } } - let note_script = WellKnownNote::SWAP.script(); - - let payback_serial_num = rng.draw_word(); - let payback_recipient = utils::build_p2id_recipient(sender, payback_serial_num)?; - - let requested_asset_word: Word = requested_asset.into(); - let payback_tag = NoteTag::with_account_target(sender); - - let attachment_scheme = Felt::from(payback_note_attachment.attachment_scheme().as_u32()); - let attachment_kind = Felt::from(payback_note_attachment.attachment_kind().as_u8()); - let attachment = payback_note_attachment.content().to_word(); - - let mut inputs = Vec::with_capacity(16); - inputs.extend_from_slice(&[ - payback_note_type.into(), - payback_tag.into(), - attachment_scheme, - attachment_kind, - ]); - inputs.extend_from_slice(attachment.as_elements()); - inputs.extend_from_slice(requested_asset_word.as_elements()); - inputs.extend_from_slice(payback_recipient.digest().as_elements()); - let inputs = NoteInputs::new(inputs)?; - - // build the tag for the SWAP use case - let tag = build_swap_tag(swap_note_type, &offered_asset, &requested_asset); - let serial_num = rng.draw_word(); - - // build the outgoing note - let metadata = - NoteMetadata::new(sender, swap_note_type, tag).with_attachment(swap_note_attachment); - let assets = NoteAssets::new(vec![offered_asset])?; - let recipient = NoteRecipient::new(serial_num, note_script, inputs); - let note = Note::new(assets, metadata, recipient); - - // build the payback note details - let payback_assets = NoteAssets::new(vec![requested_asset])?; - let payback_note = NoteDetails::new(payback_assets, payback_recipient); - - Ok((note, payback_note)) + /// Performs the inputs check of the provided note against the target account and the block + /// number. + /// + /// It performs: + /// - for `P2ID` note: + /// - check that note storage has correct number of values. + /// - assertion that the account ID provided by the note storage is equal to the target + /// account ID. + /// - for `P2IDE` note: + /// - check that note storage has correct number of values. + /// - check that the target account is either the receiver account or the sender account. + /// - check that depending on whether the target account is sender or receiver, it could be + /// either consumed, or consumed after timelock height, or consumed after reclaim height. + fn is_consumable_inner( + &self, + note: &Note, + target_account_id: AccountId, + block_ref: BlockNumber, + ) -> Result, NoteError> { + match self { + StandardNote::P2ID => { + let input_account_id = P2idNoteStorage::try_from(note.storage().items()) + .map_err(|e| NoteError::other_with_source("invalid P2ID note storage", e))?; + + if input_account_id.target() == target_account_id { + Ok(Some(NoteConsumptionStatus::ConsumableWithAuthorization)) + } else { + Ok(Some(NoteConsumptionStatus::NeverConsumable("account ID provided to the P2ID note storage doesn't match the target account ID".into()))) + } + }, + StandardNote::P2IDE => { + let P2ideNoteStorage { + target: receiver_account_id, + reclaim_height, + timelock_height, + } = P2ideNoteStorage::try_from(note.storage().items()) + .map_err(|e| NoteError::other_with_source("invalid P2IDE note storage", e))?; + + let current_block_height = block_ref.as_u32(); + let reclaim_height = reclaim_height.unwrap_or_default().as_u32(); + let timelock_height = timelock_height.unwrap_or_default().as_u32(); + + // block height after which sender account can consume the note + let consumable_after = reclaim_height.max(timelock_height); + + // handle the case when the target account of the transaction is sender + if target_account_id == note.metadata().sender() { + // For the sender, the current block height needs to have reached both reclaim + // and timelock height to be consumable. + if current_block_height >= consumable_after { + Ok(Some(NoteConsumptionStatus::ConsumableWithAuthorization)) + } else { + Ok(Some(NoteConsumptionStatus::ConsumableAfter(BlockNumber::from( + consumable_after, + )))) + } + // handle the case when the target account of the transaction is receiver + } else if target_account_id == receiver_account_id { + // For the receiver, the current block height needs to have reached only the + // timelock height to be consumable: we can ignore the reclaim height in this + // case + if current_block_height >= timelock_height { + Ok(Some(NoteConsumptionStatus::ConsumableWithAuthorization)) + } else { + Ok(Some(NoteConsumptionStatus::ConsumableAfter(BlockNumber::from( + timelock_height, + )))) + } + // if the target account is neither the sender nor the receiver (from the note's + // storage), then this account cannot consume the note + } else { + Ok(Some(NoteConsumptionStatus::NeverConsumable( + "target account of the transaction does not match neither the receiver account specified by the P2IDE storage, nor the sender account".into() + ))) + } + }, + + // the consumption status of any other note cannot be determined by the static analysis, + // further checks are necessary. + _ => Ok(None), + } + } } -/// Generates a MINT note - a note that instructs a network faucet to mint fungible assets. -/// -/// This script enables the creation of a PUBLIC note that, when consumed by a network faucet, -/// will mint the specified amount of fungible assets and create either a PRIVATE or PUBLIC -/// output note depending on the input configuration. The MINT note uses note-based authentication, -/// checking if the note sender equals the faucet owner to authorize minting. -/// -/// MINT notes are always PUBLIC (for network execution). Output notes can be either PRIVATE -/// or PUBLIC depending on the MintNoteInputs variant used. -/// -/// The passed-in `rng` is used to generate a serial number for the note. The note's tag -/// is automatically set to the faucet's account ID for proper routing. -/// -/// # Parameters -/// - `faucet_id`: The account ID of the network faucet that will mint the assets -/// - `sender`: The account ID of the note creator (must be the faucet owner) -/// - `mint_inputs`: The input configuration specifying private or public output mode -/// - `attachment`: The [`NoteAttachment`] of the MINT note -/// - `rng`: Random number generator for creating the serial number +// HELPER FUNCTIONS +// ================================================================================================ + +// HELPER STRUCTURES +// ================================================================================================ + +/// Describes if a note could be consumed under a specific conditions: target account state +/// and block height. /// -/// # Errors -/// Returns an error if note creation fails. -pub fn create_mint_note( - faucet_id: AccountId, - sender: AccountId, - mint_inputs: MintNoteInputs, - attachment: NoteAttachment, - rng: &mut R, -) -> Result { - let note_script = WellKnownNote::MINT.script(); - let serial_num = rng.draw_word(); - - // MINT notes are always public for network execution - let note_type = NoteType::Public; - - // Convert MintNoteInputs to NoteInputs - let inputs = NoteInputs::from(mint_inputs); - - let tag = NoteTag::with_account_target(faucet_id); - - let metadata = NoteMetadata::new(sender, note_type, tag).with_attachment(attachment); - let assets = NoteAssets::new(vec![])?; // MINT notes have no assets - let recipient = NoteRecipient::new(serial_num, note_script, inputs); - - Ok(Note::new(assets, metadata, recipient)) +/// The status does not account for any authorization that may be required to consume the +/// note, nor does it indicate whether the account has sufficient fees to consume it. +#[derive(Debug)] +pub enum NoteConsumptionStatus { + /// The note can be consumed by the account at the specified block height. + Consumable, + /// The note can be consumed by the account after the required block height is achieved. + ConsumableAfter(BlockNumber), + /// The note can be consumed by the account if proper authorization is provided. + ConsumableWithAuthorization, + /// The note cannot be consumed by the account at the specified conditions (i.e., block + /// height and account state). + UnconsumableConditions, + /// The note cannot be consumed by the specified account under any conditions. + NeverConsumable(Box), } -/// Generates a BURN note - a note that instructs a faucet to burn a fungible asset. -/// -/// This script enables the creation of a PUBLIC note that, when consumed by a faucet (either basic -/// or network), will burn the fungible assets contained in the note. Both basic and network -/// fungible faucets export the same `burn` procedure with identical MAST roots, allowing -/// a single BURN note script to work with either faucet type. -/// -/// BURN notes are always PUBLIC for network execution. -/// -/// The passed-in `rng` is used to generate a serial number for the note. The note's tag -/// is automatically set to the faucet's account ID for proper routing. -/// -/// # Parameters -/// - `sender`: The account ID of the note creator -/// - `faucet_id`: The account ID of the faucet that will burn the assets -/// - `fungible_asset`: The fungible asset to be burned -/// - `attachment`: The [`NoteAttachment`] of the BURN note -/// - `rng`: Random number generator for creating the serial number -/// -/// # Errors -/// Returns an error if note creation fails. -pub fn create_burn_note( - sender: AccountId, - faucet_id: AccountId, - fungible_asset: Asset, - attachment: NoteAttachment, - rng: &mut R, -) -> Result { - let note_script = WellKnownNote::BURN.script(); - let serial_num = rng.draw_word(); - - // BURN notes are always public - let note_type = NoteType::Public; - - let inputs = NoteInputs::new(vec![])?; - let tag = NoteTag::with_account_target(faucet_id); - - let metadata = NoteMetadata::new(sender, note_type, tag).with_attachment(attachment); - let assets = NoteAssets::new(vec![fungible_asset])?; // BURN notes contain the asset to burn - let recipient = NoteRecipient::new(serial_num, note_script, inputs); - - Ok(Note::new(assets, metadata, recipient)) +impl Clone for NoteConsumptionStatus { + fn clone(&self) -> Self { + match self { + NoteConsumptionStatus::Consumable => NoteConsumptionStatus::Consumable, + NoteConsumptionStatus::ConsumableAfter(block_height) => { + NoteConsumptionStatus::ConsumableAfter(*block_height) + }, + NoteConsumptionStatus::ConsumableWithAuthorization => { + NoteConsumptionStatus::ConsumableWithAuthorization + }, + NoteConsumptionStatus::UnconsumableConditions => { + NoteConsumptionStatus::UnconsumableConditions + }, + NoteConsumptionStatus::NeverConsumable(error) => { + let err = error.to_string(); + NoteConsumptionStatus::NeverConsumable(err.into()) + }, + } + } } diff --git a/crates/miden-standards/src/note/network_account_target.rs b/crates/miden-standards/src/note/network_account_target.rs index 2ea446bafb..4471c145f8 100644 --- a/crates/miden-standards/src/note/network_account_target.rs +++ b/crates/miden-standards/src/note/network_account_target.rs @@ -6,10 +6,10 @@ use miden_protocol::note::{ NoteAttachmentContent, NoteAttachmentKind, NoteAttachmentScheme, - NoteExecutionHint, + NoteType, }; -use crate::note::WellKnownNoteAttachment; +use crate::note::{NoteExecutionHint, StandardNoteAttachment}; // NETWORK ACCOUNT TARGET // ================================================================================================ @@ -36,7 +36,7 @@ impl NetworkAccountTarget { /// The standardized scheme of [`NetworkAccountTarget`] attachments. pub const ATTACHMENT_SCHEME: NoteAttachmentScheme = - WellKnownNoteAttachment::NetworkAccountTarget.attachment_scheme(); + StandardNoteAttachment::NetworkAccountTarget.attachment_scheme(); // CONSTRUCTORS // -------------------------------------------------------------------------------------------- @@ -101,10 +101,10 @@ impl TryFrom<&NoteAttachment> for NetworkAccountTarget { let id_prefix = word[1]; let exec_hint = word[2]; - let target_id = AccountId::try_from([id_prefix, id_suffix]) + let target_id = AccountId::try_from_elements(id_suffix, id_prefix) .map_err(NetworkAccountTargetError::DecodeTargetId)?; - let exec_hint = NoteExecutionHint::try_from(exec_hint.as_int()) + let exec_hint = NoteExecutionHint::try_from(exec_hint.as_canonical_u64()) .map_err(NetworkAccountTargetError::DecodeExecutionHint)?; NetworkAccountTarget::new(target_id, exec_hint) @@ -137,6 +137,8 @@ pub enum NetworkAccountTargetError { DecodeTargetId(#[source] AccountIdError), #[error("failed to decode execution hint")] DecodeExecutionHint(#[source] NoteError), + #[error("network note must be public, but was {0:?}")] + NoteNotPublic(NoteType), } // TESTS diff --git a/crates/miden-standards/src/note/network_note.rs b/crates/miden-standards/src/note/network_note.rs new file mode 100644 index 0000000000..c0a1c51559 --- /dev/null +++ b/crates/miden-standards/src/note/network_note.rs @@ -0,0 +1,108 @@ +use miden_protocol::account::AccountId; +use miden_protocol::note::{Note, NoteAttachment, NoteMetadata, NoteType}; + +use crate::note::{NetworkAccountTarget, NetworkAccountTargetError, NoteExecutionHint}; + +/// A wrapper around a [`Note`] that is guaranteed to target a network account via a +/// [`NetworkAccountTarget`] attachment. +/// +/// This represents a note that is specifically targeted at a single network account. In the future, +/// other types of network notes may exist (e.g., SWAP notes that can be consumed by network +/// accounts but are not targeted at a specific one). +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct AccountTargetNetworkNote { + note: Note, +} + +impl AccountTargetNetworkNote { + /// Attempts to construct an [`AccountTargetNetworkNote`] from `note`. + /// + /// Returns an error if: + /// - the note is not [`NoteType::Public`]. + /// - the note's attachment cannot be decoded as a [`NetworkAccountTarget`]. + pub fn new(note: Note) -> Result { + // Network notes must be public. + if note.metadata().note_type() != NoteType::Public { + return Err(NetworkAccountTargetError::NoteNotPublic(note.metadata().note_type())); + } + + // Validate that the attachment is a valid NetworkAccountTarget. + NetworkAccountTarget::try_from(note.metadata().attachment())?; + Ok(Self { note }) + } + + /// Consumes `self` and returns the underlying [`Note`]. + pub fn into_note(self) -> Note { + self.note + } + + /// Returns a reference to the underlying [`Note`]. + pub fn as_note(&self) -> &Note { + &self.note + } + + /// Returns the [`NoteMetadata`] of the underlying note. + pub fn metadata(&self) -> &NoteMetadata { + self.note.metadata() + } + + /// Returns the target network [`AccountId`]. + pub fn target_account_id(&self) -> AccountId { + self.target().target_id() + } + + /// Returns the decoded [`NetworkAccountTarget`] attachment. + pub fn target(&self) -> NetworkAccountTarget { + NetworkAccountTarget::try_from(self.note.metadata().attachment()) + .expect("AccountTargetNetworkNote guarantees valid NetworkAccountTarget attachment") + } + + /// Returns the [`NoteExecutionHint`] from the decoded [`NetworkAccountTarget`] attachment. + pub fn execution_hint(&self) -> NoteExecutionHint { + self.target().execution_hint() + } + + /// Returns the raw [`NoteAttachment`] from the note metadata. + pub fn attachment(&self) -> &NoteAttachment { + self.metadata().attachment() + } + + /// Returns the [`NoteType`] of the underlying note. + pub fn note_type(&self) -> NoteType { + self.metadata().note_type() + } +} + +/// Convenience helpers for [`Note`]s that may target a network account. +pub trait NetworkNoteExt { + /// Returns `true` if this note is public and its attachment decodes as a + /// [`NetworkAccountTarget`]. + fn is_network_note(&self) -> bool; + + /// Consumes `self` and returns an [`AccountTargetNetworkNote`], or an error if the attachment + /// is not a valid target. + fn into_account_target_network_note( + self, + ) -> Result; +} + +impl NetworkNoteExt for Note { + fn is_network_note(&self) -> bool { + self.metadata().note_type() == NoteType::Public + && NetworkAccountTarget::try_from(self.metadata().attachment()).is_ok() + } + + fn into_account_target_network_note( + self, + ) -> Result { + AccountTargetNetworkNote::new(self) + } +} + +impl TryFrom for AccountTargetNetworkNote { + type Error = NetworkAccountTargetError; + + fn try_from(note: Note) -> Result { + Self::new(note) + } +} diff --git a/crates/miden-standards/src/note/p2id.rs b/crates/miden-standards/src/note/p2id.rs new file mode 100644 index 0000000000..82ea64f41a --- /dev/null +++ b/crates/miden-standards/src/note/p2id.rs @@ -0,0 +1,213 @@ +use alloc::vec::Vec; + +use miden_protocol::account::AccountId; +use miden_protocol::assembly::Path; +use miden_protocol::asset::Asset; +use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::errors::NoteError; +use miden_protocol::note::{ + Note, + NoteAssets, + NoteAttachment, + NoteMetadata, + NoteRecipient, + NoteScript, + NoteStorage, + NoteTag, + NoteType, +}; +use miden_protocol::utils::sync::LazyLock; +use miden_protocol::{Felt, Word}; + +use crate::StandardsLib; +// NOTE SCRIPT +// ================================================================================================ + +/// Path to the P2ID note script procedure in the standards library. +const P2ID_SCRIPT_PATH: &str = "::miden::standards::notes::p2id::main"; + +// Initialize the P2ID note script only once +static P2ID_SCRIPT: LazyLock = LazyLock::new(|| { + let standards_lib = StandardsLib::default(); + let path = Path::new(P2ID_SCRIPT_PATH); + NoteScript::from_library_reference(standards_lib.as_ref(), path) + .expect("Standards library contains P2ID note script procedure") +}); + +// P2ID NOTE +// ================================================================================================ + +/// TODO: add docs +pub struct P2idNote; + +impl P2idNote { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// Expected number of storage items of the P2ID note. + pub const NUM_STORAGE_ITEMS: usize = P2idNoteStorage::NUM_ITEMS; + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the script of the P2ID (Pay-to-ID) note. + pub fn script() -> NoteScript { + P2ID_SCRIPT.clone() + } + + /// Returns the P2ID (Pay-to-ID) note script root. + pub fn script_root() -> Word { + P2ID_SCRIPT.root() + } + + // BUILDERS + // -------------------------------------------------------------------------------------------- + + /// Generates a P2ID note - Pay-to-ID note. + /// + /// This script enables the transfer of assets from the `sender` account to the `target` account + /// by specifying the target's account ID. + /// + /// The passed-in `rng` is used to generate a serial number for the note. The returned note's + /// tag is set to the target's account ID. + /// + /// # Errors + /// Returns an error if deserialization or compilation of the `P2ID` script fails. + pub fn create( + sender: AccountId, + target: AccountId, + assets: Vec, + note_type: NoteType, + attachment: NoteAttachment, + rng: &mut R, + ) -> Result { + let serial_num = rng.draw_word(); + let recipient = P2idNoteStorage::new(target).into_recipient(serial_num); + + let tag = NoteTag::with_account_target(target); + + let metadata = + NoteMetadata::new(sender, note_type).with_tag(tag).with_attachment(attachment); + let vault = NoteAssets::new(assets)?; + + Ok(Note::new(vault, metadata, recipient)) + } +} + +/// Canonical storage representation for a P2ID note. +/// +/// Contains the identifier of the target account that is authorized +/// to consume the note. Only the account matching this ID can execute +/// the note and claim its assets. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct P2idNoteStorage { + target: AccountId, +} + +impl P2idNoteStorage { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// Expected number of storage items of the P2ID note. + pub const NUM_ITEMS: usize = 2; + + /// Creates new P2ID note storage targeting the given account. + pub fn new(target: AccountId) -> Self { + Self { target } + } + + /// Consumes the storage and returns a P2ID [`NoteRecipient`] with the provided serial number. + /// + /// Notes created with this recipient will be P2ID notes consumable by the specified target + /// account stored in this [`P2idNoteStorage`]. + pub fn into_recipient(self, serial_num: Word) -> NoteRecipient { + NoteRecipient::new(serial_num, P2idNote::script(), NoteStorage::from(self)) + } + + /// Returns the target account ID. + pub fn target(&self) -> AccountId { + self.target + } +} + +impl From for NoteStorage { + fn from(storage: P2idNoteStorage) -> Self { + // Storage layout: + // [ account_id_suffix, account_id_prefix ] + NoteStorage::new(vec![storage.target.suffix(), storage.target.prefix().as_felt()]) + .expect("number of storage items should not exceed max storage items") + } +} + +impl TryFrom<&[Felt]> for P2idNoteStorage { + type Error = NoteError; + + fn try_from(note_storage: &[Felt]) -> Result { + if note_storage.len() != P2idNote::NUM_STORAGE_ITEMS { + return Err(NoteError::InvalidNoteStorageLength { + expected: P2idNote::NUM_STORAGE_ITEMS, + actual: note_storage.len(), + }); + } + + let target = AccountId::try_from_elements(note_storage[0], note_storage[1]) + .map_err(|err| NoteError::other_with_source("failed to create account id", err))?; + + Ok(Self { target }) + } +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use miden_protocol::Felt; + use miden_protocol::account::{AccountId, AccountIdVersion, AccountStorageMode, AccountType}; + use miden_protocol::errors::NoteError; + + use super::*; + + #[test] + fn try_from_valid_storage_succeeds() { + let target = AccountId::dummy( + [1u8; 15], + AccountIdVersion::Version0, + AccountType::FungibleFaucet, + AccountStorageMode::Private, + ); + + let storage = vec![target.suffix(), target.prefix().as_felt()]; + + let parsed = + P2idNoteStorage::try_from(storage.as_slice()).expect("storage should be valid"); + + assert_eq!(parsed.target(), target); + } + + #[test] + fn try_from_invalid_length_returns_error() { + let storage = vec![Felt::ZERO]; + + let err = P2idNoteStorage::try_from(storage.as_slice()) + .expect_err("should fail due to invalid length"); + + assert!(matches!( + err, + NoteError::InvalidNoteStorageLength { + expected: P2idNote::NUM_STORAGE_ITEMS, + actual: 1 + } + )); + } + + #[test] + fn try_from_invalid_storage_contents_returns_error() { + let storage = vec![Felt::new(999u64), Felt::new(888u64)]; + + let err = P2idNoteStorage::try_from(storage.as_slice()) + .expect_err("should fail due to invalid account id encoding"); + + assert!(matches!(err, NoteError::Other { source: Some(_), .. })); + } +} diff --git a/crates/miden-standards/src/note/p2ide.rs b/crates/miden-standards/src/note/p2ide.rs new file mode 100644 index 0000000000..aa1bdafe15 --- /dev/null +++ b/crates/miden-standards/src/note/p2ide.rs @@ -0,0 +1,317 @@ +use alloc::vec::Vec; + +use miden_protocol::account::AccountId; +use miden_protocol::assembly::Path; +use miden_protocol::asset::Asset; +use miden_protocol::block::BlockNumber; +use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::errors::NoteError; +use miden_protocol::note::{ + Note, + NoteAssets, + NoteAttachment, + NoteMetadata, + NoteRecipient, + NoteScript, + NoteStorage, + NoteTag, + NoteType, +}; +use miden_protocol::utils::sync::LazyLock; +use miden_protocol::{Felt, Word}; + +use crate::StandardsLib; +// NOTE SCRIPT +// ================================================================================================ + +/// Path to the P2IDE note script procedure in the standards library. +const P2IDE_SCRIPT_PATH: &str = "::miden::standards::notes::p2ide::main"; + +// Initialize the P2IDE note script only once +static P2IDE_SCRIPT: LazyLock = LazyLock::new(|| { + let standards_lib = StandardsLib::default(); + let path = Path::new(P2IDE_SCRIPT_PATH); + NoteScript::from_library_reference(standards_lib.as_ref(), path) + .expect("Standards library contains P2IDE note script procedure") +}); + +// P2IDE NOTE +// ================================================================================================ + +/// Pay-to-ID Extended (P2IDE) note abstraction. +/// +/// A P2IDE note enables transferring assets to a target account specified in the note storage. +/// The note may optionally include: +/// +/// - A reclaim height allowing the sender to recover assets if the note remains unconsumed +/// - A timelock height preventing consumption before a given block +/// +/// These constraints are encoded in `P2ideNoteStorage` and enforced by the associated note script. +pub struct P2ideNote; + +impl P2ideNote { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// Expected number of storage items of the P2IDE note. + pub const NUM_STORAGE_ITEMS: usize = P2ideNoteStorage::NUM_ITEMS; + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the script of the P2IDE (Pay-to-ID extended) note. + pub fn script() -> NoteScript { + P2IDE_SCRIPT.clone() + } + + /// Returns the P2IDE (Pay-to-ID extended) note script root. + pub fn script_root() -> Word { + P2IDE_SCRIPT.root() + } + + // BUILDERS + // -------------------------------------------------------------------------------------------- + + /// Generates a P2IDE note using the provided storage configuration. + /// + /// The note recipient and execution constraints are derived from + /// `P2ideNoteStorage`. A random serial number is generated using `rng`, + /// and the note tag is set to the storage target account. + /// + /// # Errors + /// Returns an error if construction of the note recipient or asset vault fails. + pub fn create( + sender: AccountId, + storage: P2ideNoteStorage, + assets: Vec, + note_type: NoteType, + attachment: NoteAttachment, + rng: &mut R, + ) -> Result { + let serial_num = rng.draw_word(); + let recipient = storage.into_recipient(serial_num)?; + let tag = NoteTag::with_account_target(storage.target()); + + let metadata = + NoteMetadata::new(sender, note_type).with_tag(tag).with_attachment(attachment); + let vault = NoteAssets::new(assets)?; + + Ok(Note::new(vault, metadata, recipient)) + } +} + +// P2IDE NOTE STORAGE +// ================================================================================================ + +/// Canonical storage representation for a P2IDE note. +/// +/// Stores the target account ID together with optional +/// reclaim and timelock constraints controlling when +/// the note can be spent or reclaimed. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct P2ideNoteStorage { + pub target: AccountId, + pub reclaim_height: Option, + pub timelock_height: Option, +} + +impl P2ideNoteStorage { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// Expected number of storage items of the P2IDE note. + pub const NUM_ITEMS: usize = 4; + + /// Creates new P2IDE note storage. + pub fn new( + target: AccountId, + reclaim_height: Option, + timelock_height: Option, + ) -> Self { + Self { target, reclaim_height, timelock_height } + } + + /// Consumes the storage and returns a P2IDE [`NoteRecipient`] with the provided serial number. + pub fn into_recipient(self, serial_num: Word) -> Result { + let note_script = P2ideNote::script(); + Ok(NoteRecipient::new(serial_num, note_script, self.into())) + } + + /// Returns the target account ID. + pub fn target(&self) -> AccountId { + self.target + } + + /// Returns the reclaim block height (if any). + pub fn reclaim_height(&self) -> Option { + self.reclaim_height + } + + /// Returns the timelock block height (if any). + pub fn timelock_height(&self) -> Option { + self.timelock_height + } +} + +impl From for NoteStorage { + fn from(storage: P2ideNoteStorage) -> Self { + let reclaim = storage.reclaim_height.map(Felt::from).unwrap_or(Felt::ZERO); + let timelock = storage.timelock_height.map(Felt::from).unwrap_or(Felt::ZERO); + + NoteStorage::new(vec![ + storage.target.suffix(), + storage.target.prefix().as_felt(), + reclaim, + timelock, + ]) + .expect("number of storage items should not exceed max storage items") + } +} + +impl TryFrom<&[Felt]> for P2ideNoteStorage { + type Error = NoteError; + + fn try_from(note_storage: &[Felt]) -> Result { + if note_storage.len() != P2ideNote::NUM_STORAGE_ITEMS { + return Err(NoteError::InvalidNoteStorageLength { + expected: P2ideNote::NUM_STORAGE_ITEMS, + actual: note_storage.len(), + }); + } + + let target = AccountId::try_from_elements(note_storage[0], note_storage[1]) + .map_err(|err| NoteError::other_with_source("failed to create account id", err))?; + + let reclaim_height = if note_storage[2] == Felt::ZERO { + None + } else { + let height: u32 = note_storage[2] + .as_canonical_u64() + .try_into() + .map_err(|e| NoteError::other_with_source("invalid note storage", e))?; + + Some(BlockNumber::from(height)) + }; + + let timelock_height = if note_storage[3] == Felt::ZERO { + None + } else { + let height: u32 = note_storage[3] + .as_canonical_u64() + .try_into() + .map_err(|e| NoteError::other_with_source("invalid note storage", e))?; + + Some(BlockNumber::from(height)) + }; + + Ok(Self { target, reclaim_height, timelock_height }) + } +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use miden_protocol::Felt; + use miden_protocol::account::{AccountId, AccountIdVersion, AccountStorageMode, AccountType}; + use miden_protocol::block::BlockNumber; + use miden_protocol::errors::NoteError; + + use super::*; + + fn dummy_account() -> AccountId { + AccountId::dummy( + [3u8; 15], + AccountIdVersion::Version0, + AccountType::FungibleFaucet, + AccountStorageMode::Private, + ) + } + + #[test] + fn try_from_valid_storage_with_all_fields_succeeds() { + let target = dummy_account(); + + let storage = vec![ + target.suffix(), + target.prefix().as_felt(), + Felt::from(42u32), + Felt::from(100u32), + ]; + + let decoded = P2ideNoteStorage::try_from(storage.as_slice()) + .expect("valid P2IDE storage should decode"); + + assert_eq!(decoded.target(), target); + assert_eq!(decoded.reclaim_height(), Some(BlockNumber::from(42u32))); + assert_eq!(decoded.timelock_height(), Some(BlockNumber::from(100u32))); + } + + #[test] + fn try_from_zero_heights_map_to_none() { + let target = dummy_account(); + + let storage = vec![target.suffix(), target.prefix().as_felt(), Felt::ZERO, Felt::ZERO]; + + let decoded = P2ideNoteStorage::try_from(storage.as_slice()).unwrap(); + + assert_eq!(decoded.reclaim_height(), None); + assert_eq!(decoded.timelock_height(), None); + } + + #[test] + fn try_from_invalid_length_fails() { + let storage = vec![Felt::ZERO; 3]; + + let err = + P2ideNoteStorage::try_from(storage.as_slice()).expect_err("wrong length must fail"); + + assert!(matches!( + err, + NoteError::InvalidNoteStorageLength { + expected: P2ideNote::NUM_STORAGE_ITEMS, + actual: 3 + } + )); + } + + #[test] + fn try_from_invalid_account_id_fails() { + let storage = vec![Felt::new(999u64), Felt::new(888u64), Felt::ZERO, Felt::ZERO]; + + let err = P2ideNoteStorage::try_from(storage.as_slice()) + .expect_err("invalid account id encoding must fail"); + + assert!(matches!(err, NoteError::Other { source: Some(_), .. })); + } + + #[test] + fn try_from_reclaim_height_overflow_fails() { + let target = dummy_account(); + + // > u32::MAX + let overflow = Felt::new(u64::from(u32::MAX) + 1); + + let storage = vec![target.suffix(), target.prefix().as_felt(), overflow, Felt::ZERO]; + + let err = P2ideNoteStorage::try_from(storage.as_slice()) + .expect_err("overflow reclaim height must fail"); + + assert!(matches!(err, NoteError::Other { source: Some(_), .. })); + } + + #[test] + fn try_from_timelock_height_overflow_fails() { + let target = dummy_account(); + + let overflow = Felt::new(u64::from(u32::MAX) + 10); + + let storage = vec![target.suffix(), target.prefix().as_felt(), Felt::ZERO, overflow]; + + let err = P2ideNoteStorage::try_from(storage.as_slice()) + .expect_err("overflow timelock height must fail"); + + assert!(matches!(err, NoteError::Other { source: Some(_), .. })); + } +} diff --git a/crates/miden-standards/src/note/well_known_note_attachment.rs b/crates/miden-standards/src/note/standard_note_attachment.rs similarity index 52% rename from crates/miden-standards/src/note/well_known_note_attachment.rs rename to crates/miden-standards/src/note/standard_note_attachment.rs index 90ca70a5b3..17ec1332df 100644 --- a/crates/miden-standards/src/note/well_known_note_attachment.rs +++ b/crates/miden-standards/src/note/standard_note_attachment.rs @@ -1,18 +1,18 @@ use miden_protocol::note::NoteAttachmentScheme; -/// The [`NoteAttachmentScheme`]s of well-known note attachmens. +/// The [`NoteAttachmentScheme`]s of standard note attachments. #[derive(Debug, Clone, Copy, PartialEq, Eq)] #[non_exhaustive] -pub enum WellKnownNoteAttachment { +pub enum StandardNoteAttachment { /// See [`NetworkAccountTarget`](crate::note::NetworkAccountTarget) for details. NetworkAccountTarget, } -impl WellKnownNoteAttachment { - /// Returns the [`NoteAttachmentScheme`] of the well-known attachment. +impl StandardNoteAttachment { + /// Returns the [`NoteAttachmentScheme`] of the standard attachment. pub const fn attachment_scheme(&self) -> NoteAttachmentScheme { match self { - WellKnownNoteAttachment::NetworkAccountTarget => NoteAttachmentScheme::new(1u32), + StandardNoteAttachment::NetworkAccountTarget => NoteAttachmentScheme::new(1u32), } } } diff --git a/crates/miden-standards/src/note/swap.rs b/crates/miden-standards/src/note/swap.rs new file mode 100644 index 0000000000..b06bb7fa64 --- /dev/null +++ b/crates/miden-standards/src/note/swap.rs @@ -0,0 +1,249 @@ +use alloc::vec::Vec; + +use miden_protocol::account::AccountId; +use miden_protocol::assembly::Path; +use miden_protocol::asset::Asset; +use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::errors::NoteError; +use miden_protocol::note::{ + Note, + NoteAssets, + NoteAttachment, + NoteDetails, + NoteMetadata, + NoteRecipient, + NoteScript, + NoteStorage, + NoteTag, + NoteType, +}; +use miden_protocol::utils::sync::LazyLock; +use miden_protocol::{Felt, Word}; + +use crate::StandardsLib; +use crate::note::P2idNoteStorage; + +// NOTE SCRIPT +// ================================================================================================ + +/// Path to the SWAP note script procedure in the standards library. +const SWAP_SCRIPT_PATH: &str = "::miden::standards::notes::swap::main"; + +// Initialize the SWAP note script only once +static SWAP_SCRIPT: LazyLock = LazyLock::new(|| { + let standards_lib = StandardsLib::default(); + let path = Path::new(SWAP_SCRIPT_PATH); + NoteScript::from_library_reference(standards_lib.as_ref(), path) + .expect("Standards library contains SWAP note script procedure") +}); + +// SWAP NOTE +// ================================================================================================ + +/// TODO: add docs +pub struct SwapNote; + +impl SwapNote { + // CONSTANTS + // -------------------------------------------------------------------------------------------- + + /// Expected number of storage items of the SWAP note. + pub const NUM_STORAGE_ITEMS: usize = 20; + + // PUBLIC ACCESSORS + // -------------------------------------------------------------------------------------------- + + /// Returns the script of the SWAP note. + pub fn script() -> NoteScript { + SWAP_SCRIPT.clone() + } + + /// Returns the SWAP note script root. + pub fn script_root() -> Word { + SWAP_SCRIPT.root() + } + + // BUILDERS + // -------------------------------------------------------------------------------------------- + + /// Generates a SWAP note - swap of assets between two accounts - and returns the note as well + /// as [`NoteDetails`] for the payback note. + /// + /// This script enables a swap of 2 assets between the `sender` account and any other account + /// that is willing to consume the note. The consumer will receive the `offered_asset` and + /// will create a new P2ID note with `sender` as target, containing the `requested_asset`. + /// + /// # Errors + /// Returns an error if deserialization or compilation of the `SWAP` script fails. + pub fn create( + sender: AccountId, + offered_asset: Asset, + requested_asset: Asset, + swap_note_type: NoteType, + swap_note_attachment: NoteAttachment, + payback_note_type: NoteType, + payback_note_attachment: NoteAttachment, + rng: &mut R, + ) -> Result<(Note, NoteDetails), NoteError> { + if requested_asset == offered_asset { + return Err(NoteError::other("requested asset same as offered asset")); + } + + let note_script = Self::script(); + + let payback_serial_num = rng.draw_word(); + let payback_recipient = P2idNoteStorage::new(sender).into_recipient(payback_serial_num); + + let payback_tag = NoteTag::with_account_target(sender); + + let attachment_scheme = Felt::from(payback_note_attachment.attachment_scheme().as_u32()); + let attachment_kind = Felt::from(payback_note_attachment.attachment_kind().as_u8()); + let attachment = payback_note_attachment.content().to_word(); + + let mut storage = Vec::with_capacity(SwapNote::NUM_STORAGE_ITEMS); + storage.extend_from_slice(&[ + payback_note_type.into(), + payback_tag.into(), + attachment_scheme, + attachment_kind, + ]); + storage.extend_from_slice(attachment.as_elements()); + storage.extend_from_slice(&requested_asset.as_elements()); + storage.extend_from_slice(payback_recipient.digest().as_elements()); + let inputs = NoteStorage::new(storage)?; + + // build the tag for the SWAP use case + let tag = Self::build_tag(swap_note_type, &offered_asset, &requested_asset); + let serial_num = rng.draw_word(); + + // build the outgoing note + let metadata = NoteMetadata::new(sender, swap_note_type) + .with_tag(tag) + .with_attachment(swap_note_attachment); + let assets = NoteAssets::new(vec![offered_asset])?; + let recipient = NoteRecipient::new(serial_num, note_script, inputs); + let note = Note::new(assets, metadata, recipient); + + // build the payback note details + let payback_assets = NoteAssets::new(vec![requested_asset])?; + let payback_note = NoteDetails::new(payback_assets, payback_recipient); + + Ok((note, payback_note)) + } + + /// Returns a note tag for a swap note with the specified parameters. + /// + /// The tag is laid out as follows: + /// + /// ```text + /// [ + /// note_type (2 bits) | script_root (14 bits) + /// | offered_asset_faucet_id (8 bits) | requested_asset_faucet_id (8 bits) + /// ] + /// ``` + /// + /// The script root serves as the use case identifier of the SWAP tag. + pub fn build_tag( + note_type: NoteType, + offered_asset: &Asset, + requested_asset: &Asset, + ) -> NoteTag { + let swap_root_bytes = Self::script().root().as_bytes(); + // Construct the swap use case ID from the 14 most significant bits of the script root. This + // leaves the two most significant bits zero. + let mut swap_use_case_id = (swap_root_bytes[0] as u16) << 6; + swap_use_case_id |= (swap_root_bytes[1] >> 2) as u16; + + // Get bits 0..8 from the faucet IDs of both assets which will form the tag payload. + let offered_asset_id: u64 = offered_asset.faucet_id().prefix().into(); + let offered_asset_tag = (offered_asset_id >> 56) as u8; + + let requested_asset_id: u64 = requested_asset.faucet_id().prefix().into(); + let requested_asset_tag = (requested_asset_id >> 56) as u8; + + let asset_pair = ((offered_asset_tag as u16) << 8) | (requested_asset_tag as u16); + + let tag = ((note_type as u8 as u32) << 30) + | ((swap_use_case_id as u32) << 16) + | asset_pair as u32; + + NoteTag::new(tag) + } +} + +// TESTS +// ================================================================================================ + +#[cfg(test)] +mod tests { + use miden_protocol::account::{AccountId, AccountIdVersion, AccountStorageMode, AccountType}; + use miden_protocol::asset::{FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}; + use miden_protocol::{self}; + + use super::*; + + #[test] + fn swap_tag() { + // Construct an ID that starts with 0xcdb1. + let mut fungible_faucet_id_bytes = [0; 15]; + fungible_faucet_id_bytes[0] = 0xcd; + fungible_faucet_id_bytes[1] = 0xb1; + + // Construct an ID that starts with 0xabec. + let mut non_fungible_faucet_id_bytes = [0; 15]; + non_fungible_faucet_id_bytes[0] = 0xab; + non_fungible_faucet_id_bytes[1] = 0xec; + + let offered_asset = Asset::Fungible( + FungibleAsset::new( + AccountId::dummy( + fungible_faucet_id_bytes, + AccountIdVersion::Version0, + AccountType::FungibleFaucet, + AccountStorageMode::Public, + ), + 2500, + ) + .unwrap(), + ); + + let requested_asset = Asset::NonFungible( + NonFungibleAsset::new( + &NonFungibleAssetDetails::new( + AccountId::dummy( + non_fungible_faucet_id_bytes, + AccountIdVersion::Version0, + AccountType::NonFungibleFaucet, + AccountStorageMode::Public, + ), + vec![0xaa, 0xbb, 0xcc, 0xdd], + ) + .unwrap(), + ) + .unwrap(), + ); + + // The fungible ID starts with 0xcdb1. + // The non fungible ID starts with 0xabec. + // The expected tag payload is thus 0xcdab. + let expected_asset_pair = 0xcdab; + + let note_type = NoteType::Public; + let actual_tag = SwapNote::build_tag(note_type, &offered_asset, &requested_asset); + + assert_eq!(actual_tag.as_u32() as u16, expected_asset_pair, "asset pair should match"); + assert_eq!((actual_tag.as_u32() >> 30) as u8, note_type as u8, "note type should match"); + // Check the 8 bits of the first script root byte. + assert_eq!( + (actual_tag.as_u32() >> 22) as u8, + SwapNote::script_root().as_bytes()[0], + "swap script root byte 0 should match" + ); + // Extract the 6 bits of the second script root byte and shift for comparison. + assert_eq!( + ((actual_tag.as_u32() & 0b00000000_00111111_00000000_00000000) >> 16) as u8, + SwapNote::script_root().as_bytes()[1] >> 2, + "swap script root byte 1 should match with the lower two bits set to zero" + ); + } +} diff --git a/crates/miden-standards/src/note/utils.rs b/crates/miden-standards/src/note/utils.rs deleted file mode 100644 index b111e68bce..0000000000 --- a/crates/miden-standards/src/note/utils.rs +++ /dev/null @@ -1,160 +0,0 @@ -use miden_protocol::account::AccountId; -use miden_protocol::asset::Asset; -use miden_protocol::block::BlockNumber; -use miden_protocol::errors::NoteError; -use miden_protocol::note::{NoteInputs, NoteRecipient, NoteTag, NoteType}; -use miden_protocol::{Felt, Word}; - -use super::well_known_note::WellKnownNote; - -/// Creates a [NoteRecipient] for the P2ID note. -/// -/// Notes created with this recipient will be P2ID notes consumable by the specified target -/// account. -pub fn build_p2id_recipient( - target: AccountId, - serial_num: Word, -) -> Result { - let note_script = WellKnownNote::P2ID.script(); - let note_inputs = NoteInputs::new(vec![target.suffix(), target.prefix().as_felt()])?; - - Ok(NoteRecipient::new(serial_num, note_script, note_inputs)) -} - -/// Creates a [NoteRecipient] for the P2IDE note. -/// -/// Notes created with this recipient will be P2IDE notes consumable by the specified target -/// account. -pub fn build_p2ide_recipient( - target: AccountId, - reclaim_block_height: Option, - timelock_block_height: Option, - serial_num: Word, -) -> Result { - let note_script = WellKnownNote::P2IDE.script(); - - let reclaim_height_u32 = reclaim_block_height.map_or(0, |bn| bn.as_u32()); - let timelock_height_u32 = timelock_block_height.map_or(0, |bn| bn.as_u32()); - - let note_inputs = NoteInputs::new(vec![ - target.suffix(), - target.prefix().into(), - Felt::new(reclaim_height_u32 as u64), - Felt::new(timelock_height_u32 as u64), - ])?; - - Ok(NoteRecipient::new(serial_num, note_script, note_inputs)) -} - -/// Returns a note tag for a swap note with the specified parameters. -/// -/// The tag is laid out as follows: -/// -/// ```text -/// [ -/// note_type (2 bits) | script_root (14 bits) -/// | offered_asset_faucet_id (8 bits) | requested_asset_faucet_id (8 bits) -/// ] -/// ``` -/// -/// The script root serves as the use case identifier of the SWAP tag. -pub fn build_swap_tag( - note_type: NoteType, - offered_asset: &Asset, - requested_asset: &Asset, -) -> NoteTag { - let swap_root_bytes = WellKnownNote::SWAP.script().root().as_bytes(); - // Construct the swap use case ID from the 14 most significant bits of the script root. This - // leaves the two most significant bits zero. - let mut swap_use_case_id = (swap_root_bytes[0] as u16) << 6; - swap_use_case_id |= (swap_root_bytes[1] >> 2) as u16; - - // Get bits 0..8 from the faucet IDs of both assets which will form the tag payload. - let offered_asset_id: u64 = offered_asset.faucet_id_prefix().into(); - let offered_asset_tag = (offered_asset_id >> 56) as u8; - - let requested_asset_id: u64 = requested_asset.faucet_id_prefix().into(); - let requested_asset_tag = (requested_asset_id >> 56) as u8; - - let asset_pair = ((offered_asset_tag as u16) << 8) | (requested_asset_tag as u16); - - let tag = - ((note_type as u8 as u32) << 30) | ((swap_use_case_id as u32) << 16) | asset_pair as u32; - - NoteTag::new(tag) -} - -#[cfg(test)] -mod tests { - use miden_protocol::account::{AccountIdVersion, AccountStorageMode, AccountType}; - use miden_protocol::asset::{FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}; - use miden_protocol::{self}; - - use super::*; - - #[test] - fn swap_tag() { - // Construct an ID that starts with 0xcdb1. - let mut fungible_faucet_id_bytes = [0; 15]; - fungible_faucet_id_bytes[0] = 0xcd; - fungible_faucet_id_bytes[1] = 0xb1; - - // Construct an ID that starts with 0xabec. - let mut non_fungible_faucet_id_bytes = [0; 15]; - non_fungible_faucet_id_bytes[0] = 0xab; - non_fungible_faucet_id_bytes[1] = 0xec; - - let offered_asset = Asset::Fungible( - FungibleAsset::new( - AccountId::dummy( - fungible_faucet_id_bytes, - AccountIdVersion::Version0, - AccountType::FungibleFaucet, - AccountStorageMode::Public, - ), - 2500, - ) - .unwrap(), - ); - - let requested_asset = Asset::NonFungible( - NonFungibleAsset::new( - &NonFungibleAssetDetails::new( - AccountId::dummy( - non_fungible_faucet_id_bytes, - AccountIdVersion::Version0, - AccountType::NonFungibleFaucet, - AccountStorageMode::Public, - ) - .prefix(), - vec![0xaa, 0xbb, 0xcc, 0xdd], - ) - .unwrap(), - ) - .unwrap(), - ); - - // The fungible ID starts with 0xcdb1. - // The non fungible ID starts with 0xabec. - // The expected tag payload is thus 0xcdab. - let expected_asset_pair = 0xcdab; - - let note_type = NoteType::Public; - let actual_tag = build_swap_tag(note_type, &offered_asset, &requested_asset); - - assert_eq!(actual_tag.as_u32() as u16, expected_asset_pair, "asset pair should match"); - assert_eq!((actual_tag.as_u32() >> 30) as u8, note_type as u8, "note type should match"); - // Check the 8 bits of the first script root byte. - assert_eq!( - (actual_tag.as_u32() >> 22) as u8, - WellKnownNote::SWAP.script().root().as_bytes()[0], - "swap script root byte 0 should match" - ); - // Extract the 6 bits of the second script root byte and shift for comparison. - assert_eq!( - ((actual_tag.as_u32() & 0b00000000_00111111_00000000_00000000) >> 16) as u8, - WellKnownNote::SWAP.script().root().as_bytes()[1] >> 2, - "swap script root byte 1 should match with the lower two bits set to zero" - ); - } -} diff --git a/crates/miden-standards/src/note/well_known_note.rs b/crates/miden-standards/src/note/well_known_note.rs deleted file mode 100644 index 98d44c59e2..0000000000 --- a/crates/miden-standards/src/note/well_known_note.rs +++ /dev/null @@ -1,480 +0,0 @@ -use alloc::boxed::Box; -use alloc::string::{String, ToString}; -use core::error::Error; - -use miden_protocol::account::AccountId; -use miden_protocol::block::BlockNumber; -use miden_protocol::note::{Note, NoteScript}; -use miden_protocol::utils::Deserializable; -use miden_protocol::utils::sync::LazyLock; -use miden_protocol::vm::Program; -use miden_protocol::{Felt, Word}; - -use crate::account::faucets::{BasicFungibleFaucet, NetworkFungibleFaucet}; -use crate::account::interface::{AccountComponentInterface, AccountInterface, AccountInterfaceExt}; -use crate::account::wallets::BasicWallet; - -// WELL KNOWN NOTE SCRIPTS -// ================================================================================================ - -// Initialize the P2ID note script only once -static P2ID_SCRIPT: LazyLock = LazyLock::new(|| { - let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/P2ID.masb")); - let program = Program::read_from_bytes(bytes).expect("Shipped P2ID script is well-formed"); - NoteScript::new(program) -}); - -// Initialize the P2IDE note script only once -static P2IDE_SCRIPT: LazyLock = LazyLock::new(|| { - let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/P2IDE.masb")); - let program = Program::read_from_bytes(bytes).expect("Shipped P2IDE script is well-formed"); - NoteScript::new(program) -}); - -// Initialize the SWAP note script only once -static SWAP_SCRIPT: LazyLock = LazyLock::new(|| { - let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/SWAP.masb")); - let program = Program::read_from_bytes(bytes).expect("Shipped SWAP script is well-formed"); - NoteScript::new(program) -}); - -// Initialize the MINT note script only once -static MINT_SCRIPT: LazyLock = LazyLock::new(|| { - let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/MINT.masb")); - let program = Program::read_from_bytes(bytes).expect("Shipped MINT script is well-formed"); - NoteScript::new(program) -}); - -// Initialize the BURN note script only once -static BURN_SCRIPT: LazyLock = LazyLock::new(|| { - let bytes = include_bytes!(concat!(env!("OUT_DIR"), "/assets/note_scripts/BURN.masb")); - let program = Program::read_from_bytes(bytes).expect("Shipped BURN script is well-formed"); - NoteScript::new(program) -}); - -/// Returns the P2ID (Pay-to-ID) note script. -fn p2id() -> NoteScript { - P2ID_SCRIPT.clone() -} - -/// Returns the P2ID (Pay-to-ID) note script root. -fn p2id_root() -> Word { - P2ID_SCRIPT.root() -} - -/// Returns the P2IDE (Pay-to-ID with optional reclaim & timelock) note script. -fn p2ide() -> NoteScript { - P2IDE_SCRIPT.clone() -} - -/// Returns the P2IDE (Pay-to-ID with optional reclaim & timelock) note script root. -fn p2ide_root() -> Word { - P2IDE_SCRIPT.root() -} - -/// Returns the SWAP (Swap note) note script. -fn swap() -> NoteScript { - SWAP_SCRIPT.clone() -} - -/// Returns the SWAP (Swap note) note script root. -fn swap_root() -> Word { - SWAP_SCRIPT.root() -} - -/// Returns the MINT (Mint note) note script. -fn mint() -> NoteScript { - MINT_SCRIPT.clone() -} - -/// Returns the MINT (Mint note) note script root. -fn mint_root() -> Word { - MINT_SCRIPT.root() -} - -/// Returns the BURN (Burn note) note script. -fn burn() -> NoteScript { - BURN_SCRIPT.clone() -} - -/// Returns the BURN (Burn note) note script root. -fn burn_root() -> Word { - BURN_SCRIPT.root() -} - -// WELL KNOWN NOTE -// ================================================================================================ - -/// The enum holding the types of basic well-known notes provided by the `miden-lib`. -pub enum WellKnownNote { - P2ID, - P2IDE, - SWAP, - MINT, - BURN, -} - -impl WellKnownNote { - // CONSTANTS - // -------------------------------------------------------------------------------------------- - - /// Expected number of inputs of the P2ID note. - const P2ID_NUM_INPUTS: usize = 2; - - /// Expected number of inputs of the P2IDE note. - const P2IDE_NUM_INPUTS: usize = 4; - - /// Expected number of inputs of the SWAP note. - const SWAP_NUM_INPUTS: usize = 16; - - /// Expected number of inputs of the MINT note (private mode). - const MINT_NUM_INPUTS_PRIVATE: usize = 8; - - /// Expected number of inputs of the BURN note. - const BURN_NUM_INPUTS: usize = 0; - - // CONSTRUCTOR - // -------------------------------------------------------------------------------------------- - - /// Returns a [WellKnownNote] instance based on the note script of the provided [Note]. Returns - /// `None` if the provided note is not a basic well-known note. - pub fn from_note(note: &Note) -> Option { - let note_script_root = note.script().root(); - - if note_script_root == p2id_root() { - return Some(Self::P2ID); - } - if note_script_root == p2ide_root() { - return Some(Self::P2IDE); - } - if note_script_root == swap_root() { - return Some(Self::SWAP); - } - if note_script_root == mint_root() { - return Some(Self::MINT); - } - if note_script_root == burn_root() { - return Some(Self::BURN); - } - - None - } - - // PUBLIC ACCESSORS - // -------------------------------------------------------------------------------------------- - - /// Returns the expected inputs number of the active note. - pub fn num_expected_inputs(&self) -> usize { - match self { - Self::P2ID => Self::P2ID_NUM_INPUTS, - Self::P2IDE => Self::P2IDE_NUM_INPUTS, - Self::SWAP => Self::SWAP_NUM_INPUTS, - Self::MINT => Self::MINT_NUM_INPUTS_PRIVATE, - Self::BURN => Self::BURN_NUM_INPUTS, - } - } - - /// Returns the note script of the current [WellKnownNote] instance. - pub fn script(&self) -> NoteScript { - match self { - Self::P2ID => p2id(), - Self::P2IDE => p2ide(), - Self::SWAP => swap(), - Self::MINT => mint(), - Self::BURN => burn(), - } - } - - /// Returns the script root of the current [WellKnownNote] instance. - pub fn script_root(&self) -> Word { - match self { - Self::P2ID => p2id_root(), - Self::P2IDE => p2ide_root(), - Self::SWAP => swap_root(), - Self::MINT => mint_root(), - Self::BURN => burn_root(), - } - } - - /// Returns a boolean value indicating whether this [WellKnownNote] is compatible with the - /// provided [AccountInterface]. - pub fn is_compatible_with(&self, account_interface: &AccountInterface) -> bool { - if account_interface.components().contains(&AccountComponentInterface::BasicWallet) { - return true; - } - - let interface_proc_digests = account_interface.get_procedure_digests(); - match self { - Self::P2ID | &Self::P2IDE => { - // To consume P2ID and P2IDE notes, the `receive_asset` procedure must be present in - // the provided account interface. - interface_proc_digests.contains(&BasicWallet::receive_asset_digest()) - }, - Self::SWAP => { - // To consume SWAP note, the `receive_asset` and `move_asset_to_note` procedures - // must be present in the provided account interface. - interface_proc_digests.contains(&BasicWallet::receive_asset_digest()) - && interface_proc_digests.contains(&BasicWallet::move_asset_to_note_digest()) - }, - Self::MINT => { - // MINT notes work only with network fungible faucets. The network faucet uses - // note-based authentication (checking if the note sender equals the faucet owner) - // to authorize minting, while basic faucets have different mint procedures that - // are not compatible with MINT notes. - interface_proc_digests.contains(&NetworkFungibleFaucet::distribute_digest()) - }, - Self::BURN => { - // BURN notes work with both basic and network fungible faucets because both - // faucet types export the same `burn` procedure with identical MAST roots. - // This allows a single BURN note script to work with either faucet type. - interface_proc_digests.contains(&BasicFungibleFaucet::burn_digest()) - || interface_proc_digests.contains(&NetworkFungibleFaucet::burn_digest()) - }, - } - } - - /// Performs the inputs check of the provided well-known note against the target account and the - /// block number. - /// - /// This function returns: - /// - `Some` if we can definitively determine whether the note can be consumed not by the target - /// account. - /// - `None` if the consumption status of the note cannot be determined conclusively and further - /// checks are necessary. - pub fn is_consumable( - &self, - note: &Note, - target_account_id: AccountId, - block_ref: BlockNumber, - ) -> Option { - match self.is_consumable_inner(note, target_account_id, block_ref) { - Ok(status) => status, - Err(err) => { - let err: Box = Box::from(err); - Some(NoteConsumptionStatus::NeverConsumable(err)) - }, - } - } - - /// Performs the inputs check of the provided note against the target account and the block - /// number. - /// - /// It performs: - /// - for `P2ID` note: - /// - check that note inputs have correct number of values. - /// - assertion that the account ID provided by the note inputs is equal to the target - /// account ID. - /// - for `P2IDE` note: - /// - check that note inputs have correct number of values. - /// - check that the target account is either the receiver account or the sender account. - /// - check that depending on whether the target account is sender or receiver, it could be - /// either consumed, or consumed after timelock height, or consumed after reclaim height. - fn is_consumable_inner( - &self, - note: &Note, - target_account_id: AccountId, - block_ref: BlockNumber, - ) -> Result, StaticAnalysisError> { - match self { - WellKnownNote::P2ID => { - let input_account_id = parse_p2id_inputs(note.inputs().values())?; - - if input_account_id == target_account_id { - Ok(Some(NoteConsumptionStatus::ConsumableWithAuthorization)) - } else { - Ok(Some(NoteConsumptionStatus::NeverConsumable("account ID provided to the P2ID note inputs doesn't match the target account ID".into()))) - } - }, - WellKnownNote::P2IDE => { - let (receiver_account_id, reclaim_height, timelock_height) = - parse_p2ide_inputs(note.inputs().values())?; - - let current_block_height = block_ref.as_u32(); - - // block height after which sender account can consume the note - let consumable_after = reclaim_height.max(timelock_height); - - // handle the case when the target account of the transaction is sender - if target_account_id == note.metadata().sender() { - // For the sender, the current block height needs to have reached both reclaim - // and timelock height to be consumable. - if current_block_height >= consumable_after { - Ok(Some(NoteConsumptionStatus::ConsumableWithAuthorization)) - } else { - Ok(Some(NoteConsumptionStatus::ConsumableAfter(BlockNumber::from( - consumable_after, - )))) - } - // handle the case when the target account of the transaction is receiver - } else if target_account_id == receiver_account_id { - // For the receiver, the current block height needs to have reached only the - // timelock height to be consumable: we can ignore the reclaim height in this - // case - if current_block_height >= timelock_height { - Ok(Some(NoteConsumptionStatus::ConsumableWithAuthorization)) - } else { - Ok(Some(NoteConsumptionStatus::ConsumableAfter(BlockNumber::from( - timelock_height, - )))) - } - // if the target account is neither the sender nor the receiver (from the note's - // inputs), then this account cannot consume the note - } else { - Ok(Some(NoteConsumptionStatus::NeverConsumable( - "target account of the transaction does not match neither the receiver account specified by the P2IDE inputs, nor the sender account".into() - ))) - } - }, - - // the consumption status of any other note cannot be determined by the static analysis, - // further checks are necessary. - _ => Ok(None), - } - } -} - -// HELPER FUNCTIONS -// ================================================================================================ - -/// Returns the receiver account ID parsed from the provided P2ID note inputs. -/// -/// # Errors -/// -/// Returns an error if: -/// - the length of the provided note inputs array is not equal to the expected inputs number of the -/// P2ID note. -/// - first two elements of the note inputs array does not form the valid account ID. -fn parse_p2id_inputs(note_inputs: &[Felt]) -> Result { - if note_inputs.len() != WellKnownNote::P2ID.num_expected_inputs() { - return Err(StaticAnalysisError::new(format!( - "P2ID note should have {} inputs, but {} was provided", - WellKnownNote::P2ID.num_expected_inputs(), - note_inputs.len() - ))); - } - - try_read_account_id_from_inputs(note_inputs) -} - -/// Returns the receiver account ID, reclaim height and timelock height parsed from the provided -/// P2IDE note inputs. -/// -/// # Errors -/// -/// Returns an error if: -/// - the length of the provided note inputs array is not equal to the expected inputs number of the -/// P2IDE note. -/// - first two elements of the note inputs array does not form the valid account ID. -/// - third note inputs array element (reclaim height) is not a valid u32 value. -/// - fourth note inputs array element (timelock height) is not a valid u32 value. -fn parse_p2ide_inputs(note_inputs: &[Felt]) -> Result<(AccountId, u32, u32), StaticAnalysisError> { - if note_inputs.len() != WellKnownNote::P2IDE.num_expected_inputs() { - return Err(StaticAnalysisError::new(format!( - "P2IDE note should have {} inputs, but {} was provided", - WellKnownNote::P2IDE.num_expected_inputs(), - note_inputs.len() - ))); - } - - let receiver_account_id = try_read_account_id_from_inputs(note_inputs)?; - - let reclaim_height = u32::try_from(note_inputs[2]) - .map_err(|_err| StaticAnalysisError::new("reclaim block height should be a u32"))?; - - let timelock_height = u32::try_from(note_inputs[3]) - .map_err(|_err| StaticAnalysisError::new("timelock block height should be a u32"))?; - - Ok((receiver_account_id, reclaim_height, timelock_height)) -} - -/// Reads the account ID from the first two note input values. -/// -/// Returns None if the note input values used to construct the account ID are invalid. -fn try_read_account_id_from_inputs(note_inputs: &[Felt]) -> Result { - if note_inputs.len() < 2 { - return Err(StaticAnalysisError::new(format!( - "P2ID and P2IDE notes should have at least 2 note inputs, but {} was provided", - note_inputs.len() - ))); - } - - AccountId::try_from([note_inputs[1], note_inputs[0]]).map_err(|source| { - StaticAnalysisError::with_source( - "failed to create an account ID from the first two note inputs", - source, - ) - }) -} - -// HELPER STRUCTURES -// ================================================================================================ - -/// Describes if a note could be consumed under a specific conditions: target account state -/// and block height. -/// -/// The status does not account for any authorization that may be required to consume the -/// note, nor does it indicate whether the account has sufficient fees to consume it. -#[derive(Debug)] -pub enum NoteConsumptionStatus { - /// The note can be consumed by the account at the specified block height. - Consumable, - /// The note can be consumed by the account after the required block height is achieved. - ConsumableAfter(BlockNumber), - /// The note can be consumed by the account if proper authorization is provided. - ConsumableWithAuthorization, - /// The note cannot be consumed by the account at the specified conditions (i.e., block - /// height and account state). - UnconsumableConditions, - /// The note cannot be consumed by the specified account under any conditions. - NeverConsumable(Box), -} - -impl Clone for NoteConsumptionStatus { - fn clone(&self) -> Self { - match self { - NoteConsumptionStatus::Consumable => NoteConsumptionStatus::Consumable, - NoteConsumptionStatus::ConsumableAfter(block_height) => { - NoteConsumptionStatus::ConsumableAfter(*block_height) - }, - NoteConsumptionStatus::ConsumableWithAuthorization => { - NoteConsumptionStatus::ConsumableWithAuthorization - }, - NoteConsumptionStatus::UnconsumableConditions => { - NoteConsumptionStatus::UnconsumableConditions - }, - NoteConsumptionStatus::NeverConsumable(error) => { - let err = error.to_string(); - NoteConsumptionStatus::NeverConsumable(err.into()) - }, - } - } -} - -#[derive(thiserror::Error, Debug)] -#[error("{message}")] -struct StaticAnalysisError { - /// Stack size of `Box` is smaller than String. - message: Box, - /// thiserror will return this when calling Error::source on StaticAnalysisError. - source: Option>, -} - -impl StaticAnalysisError { - /// Creates a new static analysis error from an error message. - pub fn new(message: impl Into) -> Self { - let message: String = message.into(); - Self { message: message.into(), source: None } - } - - /// Creates a new static analysis error from an error message and a source error. - pub fn with_source( - message: impl Into, - source: impl Error + Send + Sync + 'static, - ) -> Self { - let message: String = message.into(); - Self { - message: message.into(), - source: Some(Box::new(source)), - } - } -} diff --git a/crates/miden-standards/src/testing/account_component/conditional_auth.rs b/crates/miden-standards/src/testing/account_component/conditional_auth.rs index 3ca719e220..64a5f4ce46 100644 --- a/crates/miden-standards/src/testing/account_component/conditional_auth.rs +++ b/crates/miden-standards/src/testing/account_component/conditional_auth.rs @@ -1,6 +1,7 @@ use alloc::string::String; -use miden_protocol::account::{AccountComponent, AccountComponentCode}; +use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{AccountComponent, AccountComponentCode, AccountType}; use miden_protocol::utils::sync::LazyLock; use crate::code_builder::CodeBuilder; @@ -14,6 +15,7 @@ static CONDITIONAL_AUTH_CODE: LazyLock = LazyLock::new(|| { const WRONG_ARGS="{ERR_WRONG_ARGS_MSG}" + @auth_script pub proc auth_conditional # => [AUTH_ARGS] @@ -50,8 +52,11 @@ pub struct ConditionalAuthComponent; impl From for AccountComponent { fn from(_: ConditionalAuthComponent) -> Self { - AccountComponent::new(CONDITIONAL_AUTH_LIBRARY.clone(), vec![]) + let metadata = + AccountComponentMetadata::new("miden::testing::conditional_auth", AccountType::all()) + .with_description("Testing auth component with conditional behavior"); + + AccountComponent::new(CONDITIONAL_AUTH_LIBRARY.clone(), vec![], metadata) .expect("component should be valid") - .with_supports_all_types() } } diff --git a/crates/miden-standards/src/testing/account_component/incr_nonce.rs b/crates/miden-standards/src/testing/account_component/incr_nonce.rs index 34087c04eb..96dc055ba2 100644 --- a/crates/miden-standards/src/testing/account_component/incr_nonce.rs +++ b/crates/miden-standards/src/testing/account_component/incr_nonce.rs @@ -1,4 +1,5 @@ -use miden_protocol::account::AccountComponent; +use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{AccountComponent, AccountType}; use miden_protocol::assembly::Library; use miden_protocol::utils::sync::LazyLock; @@ -7,6 +8,7 @@ use crate::code_builder::CodeBuilder; const INCR_NONCE_AUTH_CODE: &str = " use miden::protocol::native_account + @auth_script pub proc auth_incr_nonce exec.native_account::incr_nonce drop end @@ -27,8 +29,11 @@ pub struct IncrNonceAuthComponent; impl From for AccountComponent { fn from(_: IncrNonceAuthComponent) -> Self { - AccountComponent::new(INCR_NONCE_AUTH_LIBRARY.clone(), vec![]) + let metadata = + AccountComponentMetadata::new("miden::testing::incr_nonce_auth", AccountType::all()) + .with_description("Testing auth component that always increments nonce"); + + AccountComponent::new(INCR_NONCE_AUTH_LIBRARY.clone(), vec![], metadata) .expect("component should be valid") - .with_supports_all_types() } } diff --git a/crates/miden-standards/src/testing/account_component/mock_account_component.rs b/crates/miden-standards/src/testing/account_component/mock_account_component.rs index 3b1f95dc1e..e3e089e2cb 100644 --- a/crates/miden-standards/src/testing/account_component/mock_account_component.rs +++ b/crates/miden-standards/src/testing/account_component/mock_account_component.rs @@ -1,6 +1,13 @@ use alloc::vec::Vec; -use miden_protocol::account::{AccountCode, AccountComponent, AccountStorage, StorageSlot}; +use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{ + AccountCode, + AccountComponent, + AccountStorage, + AccountType, + StorageSlot, +}; use crate::testing::mock_account_code::MockAccountCodeExt; @@ -54,8 +61,17 @@ impl MockAccountComponent { impl From for AccountComponent { fn from(mock_component: MockAccountComponent) -> Self { - AccountComponent::new(AccountCode::mock_account_library(), mock_component.storage_slots) - .expect("mock account component should satisfy the requirements of a valid account component") - .with_supports_all_types() + let metadata = + AccountComponentMetadata::new("miden::testing::mock_account", AccountType::all()) + .with_description("Mock account component for testing"); + + AccountComponent::new( + AccountCode::mock_account_library(), + mock_component.storage_slots, + metadata, + ) + .expect( + "mock account component should satisfy the requirements of a valid account component", + ) } } diff --git a/crates/miden-standards/src/testing/account_component/mock_faucet_component.rs b/crates/miden-standards/src/testing/account_component/mock_faucet_component.rs index 0520d541cc..23cffa2ec3 100644 --- a/crates/miden-standards/src/testing/account_component/mock_faucet_component.rs +++ b/crates/miden-standards/src/testing/account_component/mock_faucet_component.rs @@ -1,3 +1,4 @@ +use miden_protocol::account::component::AccountComponentMetadata; use miden_protocol::account::{AccountCode, AccountComponent, AccountType}; use crate::testing::mock_account_code::MockAccountCodeExt; @@ -18,9 +19,14 @@ pub struct MockFaucetComponent; impl From for AccountComponent { fn from(_: MockFaucetComponent) -> Self { - AccountComponent::new(AccountCode::mock_faucet_library(), vec![]) - .expect("mock faucet component should satisfy the requirements of a valid account component") - .with_supported_type(AccountType::FungibleFaucet) - .with_supported_type(AccountType::NonFungibleFaucet) + let metadata = AccountComponentMetadata::new( + "miden::testing::mock_faucet", + [AccountType::FungibleFaucet, AccountType::NonFungibleFaucet], + ) + .with_description("Mock faucet component for testing"); + + AccountComponent::new(AccountCode::mock_faucet_library(), vec![], metadata).expect( + "mock faucet component should satisfy the requirements of a valid account component", + ) } } diff --git a/crates/miden-standards/src/testing/mock_account.rs b/crates/miden-standards/src/testing/mock_account.rs index e92e5f595d..0a9be4e5ef 100644 --- a/crates/miden-standards/src/testing/mock_account.rs +++ b/crates/miden-standards/src/testing/mock_account.rs @@ -5,13 +5,9 @@ use miden_protocol::account::{ AccountId, AccountStorage, AccountType, - StorageMap, - StorageSlot, }; -use miden_protocol::asset::{AssetVault, NonFungibleAsset}; -use miden_protocol::testing::constants::{self}; +use miden_protocol::asset::AssetVault; use miden_protocol::testing::noop_auth_component::NoopAuthComponent; -use miden_protocol::{Felt, Word, ZERO}; use crate::testing::account_component::{MockAccountComponent, MockFaucetComponent}; @@ -36,7 +32,7 @@ pub trait MockAccountExt { } /// Creates a mock account with fungible faucet storage and the given account ID. - fn mock_fungible_faucet(account_id: u128, initial_balance: Felt) -> Account { + fn mock_fungible_faucet(account_id: u128) -> Account { let account_id = AccountId::try_from(account_id).unwrap(); assert_eq!(account_id.account_type(), AccountType::FungibleFaucet); @@ -46,12 +42,7 @@ pub trait MockAccountExt { .with_component(MockFaucetComponent) .build_existing() .expect("account should be valid"); - let (_id, vault, mut storage, code, nonce, _seed) = account.into_parts(); - - let faucet_sysdata_slot = Word::from([ZERO, ZERO, ZERO, initial_balance]); - storage - .set_item(AccountStorage::faucet_sysdata_slot(), faucet_sysdata_slot) - .unwrap(); + let (_id, vault, storage, code, nonce, _seed) = account.into_parts(); Account::new_existing(account_id, vault, storage, code, nonce) } @@ -67,16 +58,7 @@ pub trait MockAccountExt { .with_component(MockFaucetComponent) .build_existing() .expect("account should be valid"); - let (_id, vault, _storage, code, nonce, _seed) = account.into_parts(); - - let asset = NonFungibleAsset::mock(&constants::NON_FUNGIBLE_ASSET_DATA_2); - let non_fungible_storage_map = - StorageMap::with_entries([(asset.vault_key().into(), asset.into())]).unwrap(); - let storage = AccountStorage::new(vec![StorageSlot::with_map( - AccountStorage::faucet_sysdata_slot().clone(), - non_fungible_storage_map, - )]) - .unwrap(); + let (_id, vault, storage, code, nonce, _seed) = account.into_parts(); Account::new_existing(account_id, vault, storage, code, nonce) } diff --git a/crates/miden-standards/src/testing/mock_account_code.rs b/crates/miden-standards/src/testing/mock_account_code.rs index cabcb23028..8146673c82 100644 --- a/crates/miden-standards/src/testing/mock_account_code.rs +++ b/crates/miden-standards/src/testing/mock_account_code.rs @@ -7,18 +7,18 @@ use crate::code_builder::CodeBuilder; const MOCK_FAUCET_CODE: &str = " use miden::protocol::faucet - #! Inputs: [ASSET, pad(12)] - #! Outputs: [ASSET, pad(12)] + #! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] + #! Outputs: [NEW_ASSET_VALUE, pad(12)] pub proc mint exec.faucet::mint - # => [ASSET, pad(12)] + # => [NEW_ASSET_VALUE, pad(12)] end - #! Inputs: [ASSET, pad(12)] - #! Outputs: [ASSET, pad(12)] + #! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] + #! Outputs: [ASSET_VALUE, pad(12)] pub proc burn exec.faucet::burn - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] end "; @@ -105,18 +105,18 @@ const MOCK_ACCOUNT_CODE: &str = " # => [STORAGE_COMMITMENT, pad(12)] end - #! Inputs: [ASSET, pad(12)] - #! Outputs: [ASSET', pad(12)] + #! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] + #! Outputs: [ASSET_VALUE', pad(12)] pub proc add_asset exec.native_account::add_asset - # => [ASSET', pad(12)] + # => [ASSET_VALUE', pad(12)] end - #! Inputs: [ASSET, pad(12)] - #! Outputs: [ASSET, pad(12)] + #! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] + #! Outputs: [ASSET_VALUE, pad(12)] pub proc remove_asset exec.native_account::remove_asset - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] end #! Inputs: [pad(16)] diff --git a/crates/miden-standards/src/testing/mock_util_lib.rs b/crates/miden-standards/src/testing/mock_util_lib.rs new file mode 100644 index 0000000000..211feed5d9 --- /dev/null +++ b/crates/miden-standards/src/testing/mock_util_lib.rs @@ -0,0 +1,75 @@ +use miden_protocol::assembly::Library; +use miden_protocol::assembly::diagnostics::NamedSource; +use miden_protocol::transaction::TransactionKernel; +use miden_protocol::utils::sync::LazyLock; + +use crate::StandardsLib; + +const MOCK_UTIL_LIBRARY_CODE: &str = " + use miden::protocol::output_note + use miden::standards::wallets::basic->wallet + + #! Inputs: [] + #! Outputs: [note_idx] + pub proc create_default_note + push.1.2.3.4 # = RECIPIENT + push.2 # = NoteType::Private + push.0 # = NoteTag + # => [tag, note_type, RECIPIENT] + + exec.output_note::create + # => [note_idx] + end + + #! Inputs: [ASSET_KEY, ASSET_VALUE] + #! Outputs: [] + pub proc create_default_note_with_asset + exec.create_default_note + # => [note_idx, ASSET_KEY, ASSET_VALUE] + + movdn.8 + # => [ASSET_KEY, ASSET_VALUE, note_idx] + + exec.output_note::add_asset + # => [] + end + + #! Inputs: [ASSET_KEY, ASSET_VALUE] + #! Outputs: [] + pub proc create_default_note_with_moved_asset + exec.create_default_note + # => [note_idx, ASSET_KEY, ASSET_VALUE] + + movdn.8 + # => [ASSET_KEY, ASSET_VALUE, note_idx] + + exec.move_asset_to_note + # => [] + end + + #! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx] + #! Outputs: [] + pub proc move_asset_to_note + repeat.7 push.0 movdn.9 end + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + + call.wallet::move_asset_to_note + + dropw dropw dropw dropw + end +"; + +static MOCK_UTIL_LIBRARY: LazyLock = LazyLock::new(|| { + TransactionKernel::assembler() + .with_dynamic_library(StandardsLib::default()) + .expect("dynamically linking standards library should work") + .assemble_library([NamedSource::new("mock::util", MOCK_UTIL_LIBRARY_CODE)]) + .expect("mock util library should be valid") +}); + +/// Returns the mock test [`Library`] under the `mock::util` namespace. +/// +/// This provides convenient wrappers for testing purposes. +pub fn mock_util_library() -> Library { + MOCK_UTIL_LIBRARY.clone() +} diff --git a/crates/miden-standards/src/testing/mod.rs b/crates/miden-standards/src/testing/mod.rs index f08811b562..01cf73f63c 100644 --- a/crates/miden-standards/src/testing/mod.rs +++ b/crates/miden-standards/src/testing/mod.rs @@ -4,4 +4,5 @@ pub mod account_interface; pub mod mock_account; pub mod mock_account_code; +pub mod mock_util_lib; pub mod note; diff --git a/crates/miden-standards/src/testing/note.rs b/crates/miden-standards/src/testing/note.rs index 4e4460f432..2f7087381e 100644 --- a/crates/miden-standards/src/testing/note.rs +++ b/crates/miden-standards/src/testing/note.rs @@ -11,9 +11,9 @@ use miden_protocol::note::{ Note, NoteAssets, NoteAttachment, - NoteInputs, NoteMetadata, NoteRecipient, + NoteStorage, NoteTag, NoteType, }; @@ -29,7 +29,7 @@ use crate::code_builder::CodeBuilder; #[derive(Debug, Clone)] pub struct NoteBuilder { sender: AccountId, - inputs: Vec, + storage: Vec, assets: Vec, note_type: NoteType, serial_num: Word, @@ -51,7 +51,7 @@ impl NoteBuilder { Self { sender, - inputs: vec![], + storage: vec![], assets: vec![], note_type: NoteType::Public, serial_num, @@ -64,15 +64,15 @@ impl NoteBuilder { } } - /// Set the note's input to `inputs`. + /// Set the note's storage to `storage`. /// /// Note: This overwrite the inputs, the previous input values are discarded. - pub fn note_inputs( + pub fn note_storage( mut self, - inputs: impl IntoIterator, + storage: impl IntoIterator, ) -> Result { - let validate = NoteInputs::new(inputs.into_iter().collect())?; - self.inputs = validate.into(); + let validate = NoteStorage::new(storage.into_iter().collect())?; + self.storage = validate.into(); Ok(self) } @@ -132,8 +132,8 @@ impl NoteBuilder { SourceLanguage::Masm, Uri::new(format!( "note_{:x}{:x}", - self.serial_num[0].as_int(), - self.serial_num[1].as_int() + self.serial_num[0].as_canonical_u64(), + self.serial_num[1].as_canonical_u64() )), self.code, ); @@ -149,10 +149,11 @@ impl NoteBuilder { .compile_note_script(virtual_source_file) .expect("note script should compile"); let vault = NoteAssets::new(self.assets)?; - let metadata = NoteMetadata::new(self.sender, self.note_type, self.tag) + let metadata = NoteMetadata::new(self.sender, self.note_type) + .with_tag(self.tag) .with_attachment(self.attachment); - let inputs = NoteInputs::new(self.inputs)?; - let recipient = NoteRecipient::new(self.serial_num, note_script, inputs); + let storage = NoteStorage::new(self.storage)?; + let recipient = NoteRecipient::new(self.serial_num, note_script, storage); Ok(Note::new(vault, metadata, recipient)) } diff --git a/crates/miden-testing/Cargo.toml b/crates/miden-testing/Cargo.toml index af565ab386..28b86046ab 100644 --- a/crates/miden-testing/Cargo.toml +++ b/crates/miden-testing/Cargo.toml @@ -13,7 +13,12 @@ rust-version.workspace = true version.workspace = true [features] -std = ["miden-protocol/std", "miden-standards/std", "miden-tx/std"] +default = ["tx_context_debug"] +std = ["miden-protocol/std", "miden-standards/std", "miden-tx/std"] +# Enables debug mode in the transaction context, which in turn enables debug mode in the transaction +# executor. It is enabled by default and should generally be enabled. Can be opted out of to improve +# CI test performance. +tx_context_debug = [] [dependencies] # Workspace dependencies @@ -27,6 +32,7 @@ miden-tx-batch-prover = { features = ["testing"], workspace = true } # Miden dependencies miden-assembly = { workspace = true } miden-core-lib = { workspace = true } +miden-crypto = { workspace = true } miden-processor = { workspace = true } # External dependencies @@ -34,15 +40,16 @@ anyhow = { workspace = true } itertools = { default-features = false, features = ["use_alloc"], version = "0.14" } rand = { features = ["os_rng", "small_rng"], workspace = true } rand_chacha = { workspace = true } -winterfell = { version = "0.13" } +thiserror = { workspace = true } [dev-dependencies] -anyhow = { features = ["backtrace", "std"], workspace = true } -assert_matches = { workspace = true } -hex = { version = "0.4" } -miden-crypto = { workspace = true } -miden-protocol = { features = ["std"], workspace = true } -primitive-types = { workspace = true } -rstest = { workspace = true } -tokio = { features = ["macros", "rt"], workspace = true } -winter-rand-utils = { version = "0.13" } +anyhow = { features = ["backtrace", "std"], workspace = true } +assert_matches = { workspace = true } +hex = { version = "0.4" } +miden-crypto = { workspace = true } +miden-protocol = { features = ["std"], workspace = true } +primitive-types = { workspace = true } +rstest = { workspace = true } +serde = { features = ["derive"], workspace = true } +serde_json = { features = ["arbitrary_precision"], version = "1.0" } +tokio = { features = ["macros", "rt"], workspace = true } diff --git a/crates/miden-testing/src/executor.rs b/crates/miden-testing/src/executor.rs index b5b9b1e085..6e0486d502 100644 --- a/crates/miden-testing/src/executor.rs +++ b/crates/miden-testing/src/executor.rs @@ -1,10 +1,12 @@ #[cfg(test)] use miden_processor::DefaultHost; -use miden_processor::fast::{ExecutionOutput, FastProcessor}; -use miden_processor::{AdviceInputs, AsyncHost, ExecutionError, Program, StackInputs}; +use miden_processor::advice::AdviceInputs; +use miden_processor::{ExecutionOutput, FastProcessor, Host, Program, StackInputs}; #[cfg(test)] use miden_protocol::assembly::Assembler; +use crate::ExecError; + // CODE EXECUTOR // ================================================================================================ @@ -15,7 +17,7 @@ pub(crate) struct CodeExecutor { advice_inputs: AdviceInputs, } -impl CodeExecutor { +impl CodeExecutor { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- pub(crate) fn new(host: H) -> Self { @@ -37,11 +39,8 @@ impl CodeExecutor { } /// Compiles and runs the desired code in the host and returns the [`Process`] state. - /// - /// To improve the error message quality, convert the returned [`ExecutionError`] into a - /// [`Report`](miden_protocol::assembly::diagnostics::Report). #[cfg(test)] - pub async fn run(self, code: &str) -> Result { + pub async fn run(self, code: &str) -> Result { use alloc::borrow::ToOwned; use alloc::sync::Arc; @@ -64,22 +63,15 @@ impl CodeExecutor { /// /// To improve the error message quality, convert the returned [`ExecutionError`] into a /// [`Report`](miden_protocol::assembly::diagnostics::Report). - pub async fn execute_program( - mut self, - program: Program, - ) -> Result { - // This reverses the stack inputs (even though it doesn't look like it does) because the - // fast processor expects the reverse order. - // - // Once we use the FastProcessor for execution and proving, we can change the way these - // inputs are constructed in TransactionKernel::prepare_inputs. - let stack_inputs = - StackInputs::new(self.stack_inputs.unwrap_or_default().iter().copied().collect()) - .unwrap(); - - let processor = FastProcessor::new_debug(stack_inputs.as_slice(), self.advice_inputs); - - let execution_output = processor.execute(&program, &mut self.host).await?; + pub async fn execute_program(mut self, program: Program) -> Result { + let stack_inputs = self.stack_inputs.unwrap_or_default(); + + let processor = FastProcessor::new(stack_inputs) + .with_advice(self.advice_inputs) + .with_debugging(true); + + let execution_output = + processor.execute(&program, &mut self.host).await.map_err(ExecError::new)?; Ok(execution_output) } @@ -88,12 +80,16 @@ impl CodeExecutor { #[cfg(test)] impl CodeExecutor { pub fn with_default_host() -> Self { + use miden_core_lib::CoreLibrary; use miden_protocol::ProtocolLib; use miden_protocol::transaction::TransactionKernel; use miden_standards::StandardsLib; let mut host = DefaultHost::default(); + let core_lib = CoreLibrary::default(); + host.load_library(core_lib.mast_forest()).unwrap(); + let standards_lib = StandardsLib::default(); host.load_library(standards_lib.mast_forest()).unwrap(); diff --git a/crates/miden-testing/src/kernel_tests/batch/proposed_batch.rs b/crates/miden-testing/src/kernel_tests/batch/proposed_batch.rs index 2d44e187b9..54cef785ee 100644 --- a/crates/miden-testing/src/kernel_tests/batch/proposed_batch.rs +++ b/crates/miden-testing/src/kernel_tests/batch/proposed_batch.rs @@ -11,7 +11,13 @@ use miden_protocol::crypto::merkle::MerkleError; use miden_protocol::errors::{BatchAccountUpdateError, ProposedBatchError}; use miden_protocol::note::{Note, NoteType}; use miden_protocol::testing::account_id::AccountIdBuilder; -use miden_protocol::transaction::{InputNote, InputNoteCommitment, OutputNote, PartialBlockchain}; +use miden_protocol::transaction::{ + InputNote, + InputNoteCommitment, + OutputNote, + PartialBlockchain, + RawOutputNote, +}; use miden_standards::testing::account_component::MockAccountComponent; use miden_standards::testing::note::NoteBuilder; use rand::rngs::SmallRng; @@ -31,7 +37,7 @@ pub fn mock_note(num: u8) -> Note { } pub fn mock_output_note(num: u8) -> OutputNote { - OutputNote::Full(mock_note(num)) + RawOutputNote::Full(mock_note(num)).to_output_note().unwrap() } struct TestSetup { @@ -89,12 +95,12 @@ fn note_created_and_consumed_in_same_batch() -> anyhow::Result<()> { let note = mock_note(40); let tx1 = - MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.commitment()) + MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) - .output_notes(vec![OutputNote::Full(note.clone())]) + .output_notes(vec![RawOutputNote::Full(note.clone()).to_output_note().unwrap()]) .build()?; let tx2 = - MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.commitment()) + MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) .ref_block_commitment(block1.commitment()) .unauthenticated_notes(vec![note.clone()]) .build()?; @@ -121,12 +127,12 @@ fn duplicate_unauthenticated_input_notes() -> anyhow::Result<()> { let note = mock_note(50); let tx1 = - MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.commitment()) + MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) .unauthenticated_notes(vec![note.clone()]) .build()?; let tx2 = - MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.commitment()) + MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) .ref_block_commitment(block1.commitment()) .unauthenticated_notes(vec![note.clone()]) .build()?; @@ -160,12 +166,12 @@ fn duplicate_authenticated_input_notes() -> anyhow::Result<()> { let block2 = chain.prove_next_block()?; let tx1 = - MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.commitment()) + MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) .authenticated_notes(vec![note1.clone()]) .build()?; let tx2 = - MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.commitment()) + MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) .ref_block_commitment(block1.commitment()) .authenticated_notes(vec![note1.clone()]) .build()?; @@ -199,12 +205,12 @@ fn duplicate_mixed_input_notes() -> anyhow::Result<()> { let block2 = chain.prove_next_block()?; let tx1 = - MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.commitment()) + MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) .unauthenticated_notes(vec![note1.clone()]) .build()?; let tx2 = - MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.commitment()) + MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) .ref_block_commitment(block1.commitment()) .authenticated_notes(vec![note1.clone()]) .build()?; @@ -238,12 +244,12 @@ fn duplicate_output_notes() -> anyhow::Result<()> { let note0 = mock_output_note(50); let tx1 = - MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.commitment()) + MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) .output_notes(vec![note0.clone()]) .build()?; let tx2 = - MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.commitment()) + MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) .ref_block_commitment(block1.commitment()) .output_notes(vec![note0.clone()]) .build()?; @@ -281,8 +287,8 @@ async fn unauthenticated_note_converted_to_authenticated() -> anyhow::Result<()> let tx = chain .build_tx_context(account1.clone(), &[spawn_note.id()], &[])? .extend_expected_output_notes(vec![ - OutputNote::Full(note1.clone()), - OutputNote::Full(note2.clone()), + RawOutputNote::Full(note1.clone()), + RawOutputNote::Full(note2.clone()), ]) .build()? .execute() @@ -303,20 +309,20 @@ async fn unauthenticated_note_converted_to_authenticated() -> anyhow::Result<()> block1 .body() .output_notes() - .any(|(_, note)| note.commitment() == note1.commitment()), + .any(|(_, note)| note.to_commitment() == note1.commitment()), "block 1 should contain note1" ); assert!( block1 .body() .output_notes() - .any(|(_, note)| note.commitment() == note2.commitment()), + .any(|(_, note)| note.to_commitment() == note2.commitment()), "block 1 should contain note2" ); // Consume the authenticated note as an unauthenticated one in the transaction. let tx1 = - MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.commitment()) + MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block2.header().commitment()) .unauthenticated_notes(vec![note2.clone()]) .build()?; @@ -425,12 +431,12 @@ fn authenticated_note_created_in_same_batch() -> anyhow::Result<()> { let note0 = mock_note(50); let tx1 = - MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.commitment()) + MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) - .output_notes(vec![OutputNote::Full(note0.clone())]) + .output_notes(vec![RawOutputNote::Full(note0.clone()).to_output_note().unwrap()]) .build()?; let tx2 = - MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.commitment()) + MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) .ref_block_commitment(block1.commitment()) .authenticated_notes(vec![note1.clone()]) .build()?; @@ -461,7 +467,7 @@ fn multiple_transactions_against_same_account() -> anyhow::Result<()> { let tx1 = MockProvenTxBuilder::with_account( account1.id(), initial_state_commitment, - account1.commitment(), + account1.to_commitment(), ) .ref_block_commitment(block1.commitment()) .output_notes(vec![mock_output_note(0)]) @@ -471,7 +477,7 @@ fn multiple_transactions_against_same_account() -> anyhow::Result<()> { let final_state_commitment = mock_note(10).commitment(); let tx2 = MockProvenTxBuilder::with_account( account1.id(), - account1.commitment(), + account1.to_commitment(), final_state_commitment, ) .ref_block_commitment(block1.commitment()) @@ -519,31 +525,42 @@ fn multiple_transactions_against_same_account() -> anyhow::Result<()> { /// Tests that the input and outputs notes commitment is correctly computed. /// - Notes created and consumed in the same batch are erased from these commitments. -/// - The input note commitment is sorted by the order in which the notes appeared in the batch. +/// - The input note commitment is sorted by [`Nullifier`]. /// - The output note commitment is sorted by [`NoteId`]. #[test] fn input_and_output_notes_commitment() -> anyhow::Result<()> { let TestSetup { chain, account1, account2, .. } = setup_chain(); let block1 = chain.block_header(1); - let note0 = mock_output_note(50); - let note1 = mock_note(60); - let note2 = mock_output_note(70); - let note3 = mock_output_note(80); - let note4 = mock_note(90); - let note5 = mock_note(100); + // Randomize the note IDs and nullifiers on each test run to make sure the sorting property + // is tested with various inputs. + let mut rng = rand::rng(); + // Generate a single random number and derive other unique numbers from it to avoid collisions. + let note_num = rng.random(); + + let note0 = mock_output_note(note_num); + let note1 = mock_note(note_num.wrapping_add(1)); + let note2 = mock_output_note(note_num.wrapping_add(2)); + let note3 = mock_output_note(note_num.wrapping_add(3)); + let note4 = mock_note(note_num.wrapping_add(4)); + let note5 = mock_note(note_num.wrapping_add(5)); + let note6 = mock_note(note_num.wrapping_add(6)); let tx1 = - MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.commitment()) + MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) .unauthenticated_notes(vec![note1.clone(), note5.clone()]) .output_notes(vec![note0.clone()]) .build()?; let tx2 = - MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.commitment()) + MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) .ref_block_commitment(block1.commitment()) - .unauthenticated_notes(vec![note4.clone()]) - .output_notes(vec![OutputNote::Full(note1.clone()), note2.clone(), note3.clone()]) + .unauthenticated_notes(vec![note4.clone(), note6.clone()]) + .output_notes(vec![ + RawOutputNote::Full(note1.clone()).to_output_note().unwrap(), + note2.clone(), + note3.clone(), + ]) .build()?; let batch = ProposedBatch::new( @@ -562,15 +579,17 @@ fn input_and_output_notes_commitment() -> anyhow::Result<()> { assert_eq!(batch.output_notes().len(), 3); assert_eq!(batch.output_notes(), expected_output_notes); + let mut expected_input_notes = [ + InputNoteCommitment::from(&InputNote::unauthenticated(note4)), + InputNoteCommitment::from(&InputNote::unauthenticated(note5)), + InputNoteCommitment::from(&InputNote::unauthenticated(note6)), + ]; + // We expect a vector sorted by Nullifier (since InputOutputNoteTracker is set up that way). + expected_input_notes.sort_unstable_by_key(InputNoteCommitment::nullifier); + // Input notes are sorted by the order in which they appeared in the batch. - assert_eq!(batch.input_notes().num_notes(), 2); - assert_eq!( - batch.input_notes().clone().into_vec(), - &[ - InputNoteCommitment::from(&InputNote::unauthenticated(note5)), - InputNoteCommitment::from(&InputNote::unauthenticated(note4)), - ] - ); + assert_eq!(batch.input_notes().num_notes(), 3); + assert_eq!(batch.input_notes().clone().into_vec(), &expected_input_notes); Ok(()) } @@ -582,14 +601,14 @@ fn batch_expiration() -> anyhow::Result<()> { let block1 = chain.block_header(1); let tx1 = - MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.commitment()) + MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) .expiration_block_num(BlockNumber::from(35)) .build()?; // This transaction has the smallest valid expiration block num that allows it to still be // included in the batch. let tx2 = - MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.commitment()) + MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) .ref_block_commitment(block1.commitment()) .expiration_block_num(block1.block_num() + 1) .build()?; @@ -613,7 +632,7 @@ fn duplicate_transaction() -> anyhow::Result<()> { let block1 = chain.block_header(1); let tx1 = - MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.commitment()) + MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) .expiration_block_num(BlockNumber::from(35)) .build()?; @@ -643,16 +662,16 @@ fn circular_note_dependency() -> anyhow::Result<()> { let note_y = mock_note(30); let tx1 = - MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.commitment()) + MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) .unauthenticated_notes(vec![note_x.clone()]) - .output_notes(vec![OutputNote::Full(note_y.clone())]) + .output_notes(vec![RawOutputNote::Full(note_y.clone()).to_output_note().unwrap()]) .build()?; let tx2 = - MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.commitment()) + MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) .ref_block_commitment(block1.commitment()) .unauthenticated_notes(vec![note_y.clone()]) - .output_notes(vec![OutputNote::Full(note_x.clone())]) + .output_notes(vec![RawOutputNote::Full(note_x.clone()).to_output_note().unwrap()]) .build()?; let batch = ProposedBatch::new( @@ -676,12 +695,12 @@ fn expired_transaction() -> anyhow::Result<()> { // This transaction expired at the batch's reference block. let tx1 = - MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.commitment()) + MockProvenTxBuilder::with_account(account1.id(), Word::empty(), account1.to_commitment()) .ref_block_commitment(block1.commitment()) .expiration_block_num(block1.block_num()) .build()?; let tx2 = - MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.commitment()) + MockProvenTxBuilder::with_account(account2.id(), Word::empty(), account2.to_commitment()) .ref_block_commitment(block1.commitment()) .expiration_block_num(block1.block_num() + 3) .build()?; @@ -722,12 +741,12 @@ fn noop_tx_before_state_updating_tx_against_same_account() -> anyhow::Result<()> // consume a random note to make the transaction non-empty let noop_tx1 = MockProvenTxBuilder::with_account( account1.id(), - account1.commitment(), - account1.commitment(), + account1.to_commitment(), + account1.to_commitment(), ) .ref_block_commitment(block1.commitment()) .authenticated_notes(vec![note1]) - .output_notes(vec![OutputNote::Full(note.clone())]) + .output_notes(vec![RawOutputNote::Full(note.clone()).to_output_note().unwrap()]) .build()?; // sanity check @@ -738,7 +757,7 @@ fn noop_tx_before_state_updating_tx_against_same_account() -> anyhow::Result<()> let tx2 = MockProvenTxBuilder::with_account( account1.id(), - account1.commitment(), + account1.to_commitment(), random_final_state_commitment, ) .ref_block_commitment(block1.commitment()) @@ -753,7 +772,7 @@ fn noop_tx_before_state_updating_tx_against_same_account() -> anyhow::Result<()> )?; let update = batch.account_updates().get(&account1.id()).unwrap(); - assert_eq!(update.initial_state_commitment(), account1.commitment()); + assert_eq!(update.initial_state_commitment(), account1.to_commitment()); assert_eq!(update.final_state_commitment(), random_final_state_commitment); Ok(()) @@ -773,7 +792,7 @@ fn noop_tx_after_state_updating_tx_against_same_account() -> anyhow::Result<()> let tx1 = MockProvenTxBuilder::with_account( account1.id(), - account1.commitment(), + account1.to_commitment(), random_final_state_commitment, ) .ref_block_commitment(block1.commitment()) @@ -788,7 +807,7 @@ fn noop_tx_after_state_updating_tx_against_same_account() -> anyhow::Result<()> ) .ref_block_commitment(block1.commitment()) .authenticated_notes(vec![note1]) - .output_notes(vec![OutputNote::Full(note.clone())]) + .output_notes(vec![RawOutputNote::Full(note.clone()).to_output_note().unwrap()]) .build()?; // sanity check @@ -805,7 +824,7 @@ fn noop_tx_after_state_updating_tx_against_same_account() -> anyhow::Result<()> )?; let update = batch.account_updates().get(&account1.id()).unwrap(); - assert_eq!(update.initial_state_commitment(), account1.commitment()); + assert_eq!(update.initial_state_commitment(), account1.to_commitment()); assert_eq!(update.final_state_commitment(), random_final_state_commitment); Ok(()) diff --git a/crates/miden-testing/src/kernel_tests/batch/proven_tx_builder.rs b/crates/miden-testing/src/kernel_tests/batch/proven_tx_builder.rs index 95be1c04c5..abf6b8809e 100644 --- a/crates/miden-testing/src/kernel_tests/batch/proven_tx_builder.rs +++ b/crates/miden-testing/src/kernel_tests/batch/proven_tx_builder.rs @@ -3,15 +3,17 @@ use alloc::vec::Vec; use anyhow::Context; use miden_protocol::Word; use miden_protocol::account::AccountId; +use miden_protocol::account::delta::AccountUpdateDetails; use miden_protocol::asset::FungibleAsset; use miden_protocol::block::BlockNumber; use miden_protocol::crypto::merkle::SparseMerklePath; use miden_protocol::note::{Note, NoteInclusionProof, Nullifier}; use miden_protocol::transaction::{ InputNote, + InputNoteCommitment, OutputNote, ProvenTransaction, - ProvenTransactionBuilder, + TxAccountUpdate, }; use miden_protocol::vm::ExecutionProof; @@ -102,21 +104,36 @@ impl MockProvenTxBuilder { /// Builds the [`ProvenTransaction`] and returns potential errors. pub fn build(self) -> anyhow::Result { - ProvenTransactionBuilder::new( + let mut input_note_commitments: Vec = self + .input_notes + .unwrap_or_default() + .into_iter() + .map(InputNoteCommitment::from) + .collect(); + + // Add nullifiers as input note commitments + input_note_commitments + .extend(self.nullifiers.unwrap_or_default().into_iter().map(InputNoteCommitment::from)); + + let account_update = TxAccountUpdate::new( self.account_id, self.initial_account_commitment, self.final_account_commitment, Word::empty(), + AccountUpdateDetails::Private, + ) + .context("failed to build account update")?; + + ProvenTransaction::new( + account_update, + input_note_commitments, + self.output_notes.unwrap_or_default(), BlockNumber::from(0), self.ref_block_commitment.unwrap_or_default(), self.fee, self.expiration_block_num, ExecutionProof::new_dummy(), ) - .add_input_notes(self.input_notes.unwrap_or_default()) - .add_input_notes(self.nullifiers.unwrap_or_default()) - .add_output_notes(self.output_notes.unwrap_or_default()) - .build() .context("failed to build proven transaction") } } diff --git a/crates/miden-testing/src/kernel_tests/block/header_errors.rs b/crates/miden-testing/src/kernel_tests/block/header_errors.rs index e3d3864f3f..37ae0d579f 100644 --- a/crates/miden-testing/src/kernel_tests/block/header_errors.rs +++ b/crates/miden-testing/src/kernel_tests/block/header_errors.rs @@ -17,7 +17,12 @@ use miden_protocol::batch::ProvenBatch; use miden_protocol::block::{BlockInputs, BlockNumber, ProposedBlock}; use miden_protocol::errors::{AccountTreeError, NullifierTreeError, ProposedBlockError}; use miden_protocol::note::NoteType; -use miden_protocol::transaction::ProvenTransactionBuilder; +use miden_protocol::transaction::{ + InputNoteCommitment, + OutputNote, + ProvenTransaction, + TxAccountUpdate, +}; use miden_protocol::vm::ExecutionProof; use miden_standards::testing::account_component::{IncrNonceAuthComponent, MockAccountComponent}; use miden_standards::testing::mock_account::MockAccountExt; @@ -325,7 +330,7 @@ async fn block_building_fails_on_creating_account_with_existing_account_id_prefi // The witness should be for the **existing** account, because that's the one that exists in // the tree and is therefore in the same SMT leaf that we would insert the new ID into. assert_eq!(witness.id(), existing_id); - assert_eq!(witness.state_commitment(), existing_account.commitment()); + assert_eq!(witness.state_commitment(), existing_account.to_commitment()); let block = mock_chain.propose_block(batches).context("failed to propose block")?; @@ -383,19 +388,25 @@ async fn block_building_fails_on_creating_account_with_duplicate_account_id_pref let [tx0, tx1] = [(id0, [0, 0, 0, 1u32]), (id1, [0, 0, 0, 2u32])].map(|(id, final_state_comm)| { - ProvenTransactionBuilder::new( + let account_update = TxAccountUpdate::new( id, Word::empty(), Word::from(final_state_comm), Word::empty(), + AccountUpdateDetails::Private, + ) + .context("failed to build account update") + .unwrap(); + ProvenTransaction::new( + account_update, + Vec::::new(), + Vec::::new(), genesis_block.block_num(), genesis_block.commitment(), FungibleAsset::mock(500).unwrap_fungible(), BlockNumber::from(u32::MAX), ExecutionProof::new_dummy(), ) - .account_update_details(AccountUpdateDetails::Private) - .build() .context("failed to build proven transaction") .unwrap() }); diff --git a/crates/miden-testing/src/kernel_tests/block/proposed_block_errors.rs b/crates/miden-testing/src/kernel_tests/block/proposed_block_errors.rs index f7fea9e494..5e537ed9ba 100644 --- a/crates/miden-testing/src/kernel_tests/block/proposed_block_errors.rs +++ b/crates/miden-testing/src/kernel_tests/block/proposed_block_errors.rs @@ -3,14 +3,14 @@ use std::collections::BTreeMap; use std::vec::Vec; use assert_matches::assert_matches; -use miden_processor::crypto::MerklePath; +use miden_processor::crypto::merkle::MerklePath; use miden_protocol::MAX_BATCHES_PER_BLOCK; use miden_protocol::asset::FungibleAsset; use miden_protocol::block::{BlockInputs, BlockNumber, ProposedBlock}; use miden_protocol::crypto::merkle::SparseMerklePath; use miden_protocol::errors::ProposedBlockError; use miden_protocol::note::{NoteAttachment, NoteInclusionProof, NoteType}; -use miden_standards::note::create_p2id_note; +use miden_standards::note::P2idNote; use miden_tx::LocalTransactionProver; use crate::kernel_tests::block::utils::MockChainBlockExt; @@ -348,7 +348,7 @@ async fn proposed_block_fails_on_invalid_proof_or_missing_note_inclusion_referen let mut builder = MockChain::builder(); let account0 = builder.add_existing_mock_account(Auth::IncrNonce)?; let account1 = builder.add_existing_mock_account(Auth::IncrNonce)?; - let p2id_note = create_p2id_note( + let p2id_note = P2idNote::create( account0.id(), account1.id(), vec![], @@ -652,8 +652,8 @@ async fn proposed_block_fails_on_inconsistent_account_state_transition() -> anyh state_commitment, remaining_state_commitments } if account_id == account.id() && - state_commitment == executed_tx0.final_account().commitment() && - remaining_state_commitments == [executed_tx2.initial_account().commitment()] + state_commitment == executed_tx0.final_account().to_commitment() && + remaining_state_commitments == [executed_tx2.initial_account().to_commitment()] ); Ok(()) diff --git a/crates/miden-testing/src/kernel_tests/block/proposed_block_success.rs b/crates/miden-testing/src/kernel_tests/block/proposed_block_success.rs index cce09fcaaa..46cd8eb3a6 100644 --- a/crates/miden-testing/src/kernel_tests/block/proposed_block_success.rs +++ b/crates/miden-testing/src/kernel_tests/block/proposed_block_success.rs @@ -4,14 +4,14 @@ use std::vec::Vec; use anyhow::Context; use assert_matches::assert_matches; +use miden_protocol::Felt; use miden_protocol::account::delta::AccountUpdateDetails; use miden_protocol::account::{Account, AccountId, AccountStorageMode}; use miden_protocol::asset::FungibleAsset; use miden_protocol::block::{BlockInputs, ProposedBlock}; use miden_protocol::note::{Note, NoteType}; use miden_protocol::testing::account_id::ACCOUNT_ID_SENDER; -use miden_protocol::transaction::{ExecutedTransaction, OutputNote, TransactionHeader}; -use miden_protocol::{Felt, FieldElement}; +use miden_protocol::transaction::{ExecutedTransaction, RawOutputNote, TransactionHeader}; use miden_standards::testing::account_component::MockAccountComponent; use miden_standards::testing::note::NoteBuilder; use miden_tx::LocalTransactionProver; @@ -171,7 +171,7 @@ async fn proposed_block_aggregates_account_state_transition() -> anyhow::Result< assert_matches!(account_update.details(), AccountUpdateDetails::Delta(delta) => { assert_eq!(delta.vault().fungible().num_assets(), 1); - assert_eq!(delta.vault().fungible().amount(&asset.unwrap_fungible().faucet_id()).unwrap(), 300); + assert_eq!(delta.vault().fungible().amount(&asset.unwrap_fungible().vault_key()).unwrap(), 300); }); Ok(()) @@ -279,8 +279,8 @@ async fn noop_tx_and_state_updating_tx_against_same_account_in_same_block() -> a NoteBuilder::new(ACCOUNT_ID_SENDER.try_into().unwrap(), &mut rand::rng()).build()?; let noop_note1 = NoteBuilder::new(ACCOUNT_ID_SENDER.try_into().unwrap(), &mut rand::rng()).build()?; - builder.add_output_note(OutputNote::Full(noop_note0.clone())); - builder.add_output_note(OutputNote::Full(noop_note1.clone())); + builder.add_output_note(RawOutputNote::Full(noop_note0.clone())); + builder.add_output_note(RawOutputNote::Full(noop_note1.clone())); let mut chain = builder.build()?; let noop_tx = generate_conditional_tx(&mut chain, account0.id(), noop_note0, false).await; @@ -289,12 +289,15 @@ async fn noop_tx_and_state_updating_tx_against_same_account_in_same_block() -> a generate_conditional_tx(&mut chain, account0.clone(), noop_note1, true).await; // sanity check: NOOP transaction's init and final commitment should be the same. - assert_eq!(noop_tx.initial_account().commitment(), noop_tx.final_account().commitment()); + assert_eq!( + noop_tx.initial_account().to_commitment(), + noop_tx.final_account().to_commitment() + ); // sanity check: State-updating transaction's init and final commitment should *not* be the // same. assert_ne!( - state_updating_tx.initial_account().commitment(), - state_updating_tx.final_account().commitment() + state_updating_tx.initial_account().to_commitment(), + state_updating_tx.final_account().to_commitment() ); let tx0 = LocalTransactionProver::default().prove_dummy(noop_tx)?; @@ -309,7 +312,7 @@ async fn noop_tx_and_state_updating_tx_against_same_account_in_same_block() -> a let block = ProposedBlock::new(block_inputs, batches.clone())?; let (_, update) = block.updated_accounts().iter().next().unwrap(); - assert_eq!(update.initial_state_commitment(), account0.commitment()); + assert_eq!(update.initial_state_commitment(), account0.to_commitment()); assert_eq!(update.final_state_commitment(), tx1.account_update().final_state_commitment()); Ok(()) @@ -330,11 +333,11 @@ async fn generate_conditional_tx( modify_storage: bool, ) -> ExecutedTransaction { let auth_args = [ + Felt::new(97), + Felt::new(98), + Felt::new(99), // increment nonce if modify_storage is true if modify_storage { Felt::ONE } else { Felt::ZERO }, - Felt::new(99), - Felt::new(98), - Felt::new(97), ]; let tx_context = chain diff --git a/crates/miden-testing/src/kernel_tests/block/proven_block_success.rs b/crates/miden-testing/src/kernel_tests/block/proven_block_success.rs index 573b5e81d8..19aaea3df2 100644 --- a/crates/miden-testing/src/kernel_tests/block/proven_block_success.rs +++ b/crates/miden-testing/src/kernel_tests/block/proven_block_success.rs @@ -10,7 +10,7 @@ use miden_protocol::block::{BlockInputs, BlockNoteIndex, BlockNoteTree, Proposed use miden_protocol::crypto::merkle::smt::Smt; use miden_protocol::note::{NoteAttachment, NoteType}; use miden_protocol::transaction::InputNoteCommitment; -use miden_standards::note::create_p2id_note; +use miden_standards::note::P2idNote; use crate::kernel_tests::block::utils::MockChainBlockExt; use crate::utils::create_p2any_note; @@ -32,7 +32,7 @@ async fn proven_block_success() -> anyhow::Result<()> { let account2 = builder.add_existing_mock_account_with_assets(Auth::IncrNonce, [asset])?; let account3 = builder.add_existing_mock_account_with_assets(Auth::IncrNonce, [asset])?; - let output_note0 = create_p2id_note( + let output_note0 = P2idNote::create( account0.id(), account0.id(), vec![asset], @@ -40,7 +40,7 @@ async fn proven_block_success() -> anyhow::Result<()> { NoteAttachment::default(), builder.rng_mut(), )?; - let output_note1 = create_p2id_note( + let output_note1 = P2idNote::create( account1.id(), account1.id(), vec![asset], @@ -48,7 +48,7 @@ async fn proven_block_success() -> anyhow::Result<()> { NoteAttachment::default(), builder.rng_mut(), )?; - let output_note2 = create_p2id_note( + let output_note2 = P2idNote::create( account2.id(), account2.id(), vec![asset], @@ -56,7 +56,7 @@ async fn proven_block_success() -> anyhow::Result<()> { NoteAttachment::default(), builder.rng_mut(), )?; - let output_note3 = create_p2id_note( + let output_note3 = P2idNote::create( account3.id(), account3.id(), vec![asset], diff --git a/crates/miden-testing/src/kernel_tests/tx/mod.rs b/crates/miden-testing/src/kernel_tests/tx/mod.rs index c3a9f84f77..e8fa628883 100644 --- a/crates/miden-testing/src/kernel_tests/tx/mod.rs +++ b/crates/miden-testing/src/kernel_tests/tx/mod.rs @@ -1,6 +1,6 @@ use anyhow::Context; -use miden_processor::ContextId; -use miden_processor::fast::ExecutionOutput; +use miden_processor::{ContextId, ExecutionOutput}; +use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{Account, AccountId}; use miden_protocol::asset::{Asset, FungibleAsset}; use miden_protocol::note::{Note, NoteType}; @@ -19,9 +19,11 @@ mod test_account; mod test_account_delta; mod test_account_interface; mod test_active_note; +mod test_array; mod test_asset; mod test_asset_vault; mod test_auth; +mod test_callbacks; mod test_epilogue; mod test_faucet; mod test_fee; @@ -52,12 +54,8 @@ pub trait ExecutionOutputExt { /// Reads an element from the stack. fn get_stack_element(&self, idx: usize) -> Felt; - /// Reads a [`Word`] from the stack in big-endian (reversed) order. - fn get_stack_word_be(&self, index: usize) -> Word; - - /// Reads a [`Word`] from the stack in little-endian (memory) order. - #[allow(dead_code)] - fn get_stack_word_le(&self, index: usize) -> Word; + /// Reads a [`Word`] from the stack in little-endian order. + fn get_stack_word(&self, index: usize) -> Word; /// Reads the [`Word`] of the input note's memory identified by the index at the provided /// `offset`. @@ -70,10 +68,9 @@ impl ExecutionOutputExt for ExecutionOutput { fn get_kernel_mem_word(&self, addr: u32) -> Word { let tx_kernel_context = ContextId::root(); let clk = 0u32; - let err_ctx = (); self.memory - .read_word(tx_kernel_context, Felt::from(addr), clk.into(), &err_ctx) + .read_word(tx_kernel_context, Felt::from(addr), clk.into()) .expect("expected address to be word-aligned") } @@ -81,20 +78,15 @@ impl ExecutionOutputExt for ExecutionOutput { *self.stack.get(index).expect("index must be in bounds") } - fn get_stack_word_be(&self, index: usize) -> Word { - self.stack.get_stack_word_be(index).expect("index must be in bounds") - } - - fn get_stack_word_le(&self, index: usize) -> Word { - self.stack.get_stack_word_le(index).expect("index must be in bounds") + fn get_stack_word(&self, index: usize) -> Word { + self.stack.get_word(index).expect("index must be in bounds") } fn get_kernel_mem_element(&self, addr: u32) -> Felt { let tx_kernel_context = ContextId::root(); - let err_ctx = (); self.memory - .read_element(tx_kernel_context, Felt::from(addr), &err_ctx) + .read_element(tx_kernel_context, Felt::from(addr)) .expect("address converted from u32 should be in bounds") } } @@ -146,7 +138,9 @@ fn setup_test() -> anyhow::Result { ); let account = builder.add_existing_wallet_with_assets( - crate::Auth::BasicAuth, + crate::Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, [fungible_asset_0_double_amount, fungible_asset_1], )?; diff --git a/crates/miden-testing/src/kernel_tests/tx/test_account.rs b/crates/miden-testing/src/kernel_tests/tx/test_account.rs index d0146be16b..58a8d2b725 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_account.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_account.rs @@ -4,7 +4,11 @@ use std::collections::BTreeMap; use anyhow::Context; use assert_matches::assert_matches; +use miden_crypto::rand::test_utils::rand_value; use miden_processor::{ExecutionError, Word}; +use miden_protocol::LexicographicWord; +use miden_protocol::account::auth::AuthScheme; +use miden_protocol::account::component::AccountComponentMetadata; use miden_protocol::account::delta::AccountUpdateDetails; use miden_protocol::account::{ Account, @@ -16,6 +20,7 @@ use miden_protocol::account::{ AccountStorageMode, AccountType, StorageMap, + StorageMapKey, StorageSlot, StorageSlotContent, StorageSlotDelta, @@ -23,9 +28,10 @@ use miden_protocol::account::{ StorageSlotName, StorageSlotType, }; -use miden_protocol::assembly::diagnostics::{IntoDiagnostic, NamedSource, Report, WrapErr, miette}; +use miden_protocol::assembly::diagnostics::NamedSource; +use miden_protocol::assembly::diagnostics::reporting::PrintDiagnostic; use miden_protocol::assembly::{DefaultSourceManager, Library}; -use miden_protocol::asset::{Asset, FungibleAsset}; +use miden_protocol::asset::{Asset, AssetCallbacks, FungibleAsset}; use miden_protocol::errors::tx_kernel::{ ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO, ERR_ACCOUNT_ID_SUFFIX_MOST_SIGNIFICANT_BIT_MUST_BE_ZERO, @@ -34,8 +40,8 @@ use miden_protocol::errors::tx_kernel::{ ERR_ACCOUNT_NONCE_AT_MAX, ERR_ACCOUNT_NONCE_CAN_ONLY_BE_INCREMENTED_ONCE, ERR_ACCOUNT_UNKNOWN_STORAGE_SLOT_NAME, - ERR_FAUCET_STORAGE_DATA_SLOT_IS_RESERVED, }; +use miden_protocol::field::PrimeField64; use miden_protocol::note::NoteType; use miden_protocol::testing::account_id::{ ACCOUNT_ID_PRIVATE_NON_FUNGIBLE_FAUCET, @@ -46,16 +52,15 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_SENDER, }; use miden_protocol::testing::storage::{MOCK_MAP_SLOT, MOCK_VALUE_SLOT0, MOCK_VALUE_SLOT1}; -use miden_protocol::transaction::{OutputNote, TransactionKernel}; +use miden_protocol::transaction::{RawOutputNote, TransactionKernel}; use miden_protocol::utils::sync::LazyLock; -use miden_protocol::{LexicographicWord, StarkField}; +use miden_standards::account::faucets::BasicFungibleFaucet; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::account_component::MockAccountComponent; use miden_standards::testing::mock_account::MockAccountExt; use miden_tx::LocalTransactionProver; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha20Rng; -use winter_rand_utils::rand_value; use super::{Felt, StackInputs, ZERO}; use crate::executor::CodeExecutor; @@ -63,6 +68,7 @@ use crate::kernel_tests::tx::ExecutionOutputExt; use crate::utils::create_public_p2any_note; use crate::{ Auth, + ExecError, MockChain, TransactionContextBuilder, TxContextInput, @@ -73,16 +79,16 @@ use crate::{ // ================================================================================================ #[tokio::test] -pub async fn compute_commitment() -> miette::Result<()> { +pub async fn compute_commitment() -> anyhow::Result<()> { let account = Account::mock(ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE, Auth::IncrNonce); // Precompute a commitment to a changed account so we can assert it during tx script execution. let mut account_clone = account.clone(); - let key = Word::from([1, 2, 3, 4u32]); + let key = StorageMapKey::from_array([1, 2, 3, 4]); let value = Word::from([2, 3, 4, 5u32]); let mock_map_slot = &*MOCK_MAP_SLOT; account_clone.storage_mut().set_map_item(mock_map_slot, key, value).unwrap(); - let expected_commitment = account_clone.commitment(); + let expected_commitment = account_clone.to_commitment(); let tx_script = format!( r#" @@ -113,7 +119,7 @@ pub async fn compute_commitment() -> miette::Result<()> { push.{value} push.{key} push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE, pad(7)] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE, pad(7)] call.mock_account::set_map_item dropw dropw dropw dropw # => [STORAGE_COMMITMENT0] @@ -144,19 +150,13 @@ pub async fn compute_commitment() -> miette::Result<()> { ); let tx_context_builder = TransactionContextBuilder::new(account); - let tx_script = CodeBuilder::with_mock_libraries() - .compile_tx_script(tx_script) - .into_diagnostic()?; - let tx_context = tx_context_builder - .tx_script(tx_script) - .build() - .map_err(|err| miette::miette!("{err}"))?; + let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(tx_script)?; + let tx_context = tx_context_builder.tx_script(tx_script).build()?; tx_context .execute() .await - .into_diagnostic() - .wrap_err("failed to execute transaction")?; + .map_err(|err| anyhow::anyhow!("failed to execute transaction: {err}"))?; Ok(()) } @@ -165,7 +165,7 @@ pub async fn compute_commitment() -> miette::Result<()> { // ================================================================================================ #[tokio::test] -async fn test_account_type() -> miette::Result<()> { +async fn test_account_type() -> anyhow::Result<()> { let procedures = vec![ ("is_fungible_faucet", AccountType::FungibleFaucet), ("is_non_fungible_faucet", AccountType::NonFungibleFaucet), @@ -197,14 +197,12 @@ async fn test_account_type() -> miette::Result<()> { ); let exec_output = CodeExecutor::with_default_host() - .stack_inputs( - StackInputs::new(vec![account_id.prefix().as_felt()]).into_diagnostic()?, - ) + .stack_inputs(StackInputs::new(&[account_id.prefix().as_felt()])?) .run(&code) .await?; let type_matches = account_id.account_type() == expected_type; - let expected_result = Felt::from(type_matches); + let expected_result = if type_matches { Felt::ONE } else { Felt::ZERO }; has_type |= type_matches; assert_eq!( @@ -225,7 +223,7 @@ async fn test_account_type() -> miette::Result<()> { } #[tokio::test] -async fn test_account_validate_id() -> miette::Result<()> { +async fn test_account_validate_id() -> anyhow::Result<()> { let test_cases = [ (ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE, None), (ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE, None), @@ -256,8 +254,8 @@ async fn test_account_validate_id() -> miette::Result<()> { for (account_id, expected_error) in test_cases.iter() { // Manually split the account ID into prefix and suffix since we can't use AccountId methods // on invalid ids. - let prefix = Felt::try_from((account_id / (1u128 << 64)) as u64).unwrap(); - let suffix = Felt::try_from((account_id % (1u128 << 64)) as u64).unwrap(); + let prefix = Felt::try_from((account_id / (1u128 << 64)) as u64)?; + let suffix = Felt::try_from((account_id % (1u128 << 64)) as u64)?; let code = " use $kernel::account_id @@ -268,30 +266,44 @@ async fn test_account_validate_id() -> miette::Result<()> { "; let result = CodeExecutor::with_default_host() - .stack_inputs(StackInputs::new(vec![suffix, prefix]).unwrap()) + .stack_inputs(StackInputs::new(&[suffix, prefix]).unwrap()) .run(code) .await; - match (result, expected_error) { + match (result.map_err(ExecError::into_execution_error), expected_error) { (Ok(_), None) => (), (Ok(_), Some(err)) => { - miette::bail!("expected error {err} but validation was successful") + anyhow::bail!("expected error {err} but validation was successful") }, - (Err(ExecutionError::FailedAssertion { err_code, err_msg, .. }), Some(err)) => { + ( + Err(ExecutionError::OperationError { + err: + miden_processor::operation::OperationError::FailedAssertion { + err_code, + err_msg, + }, + .. + }), + Some(err), + ) => { if err_code != err.code() { - miette::bail!( + anyhow::bail!( "actual error \"{}\" (code: {err_code}) did not match expected error {err}", err_msg.as_ref().map(AsRef::as_ref).unwrap_or("") ); } }, - // Construct Reports to get the diagnostics-based error messages. (Err(err), None) => { - return Err(Report::from(err) - .context("validation is supposed to succeed but error occurred")); + return Err(anyhow::anyhow!( + "validation is supposed to succeed but error occurred: {}", + PrintDiagnostic::new(&err) + )); }, (Err(err), Some(_)) => { - return Err(Report::from(err).context("unexpected different error than expected")); + return Err(anyhow::anyhow!( + "unexpected different error than expected: {}", + PrintDiagnostic::new(&err) + )); }, } } @@ -300,7 +312,7 @@ async fn test_account_validate_id() -> miette::Result<()> { } #[tokio::test] -async fn test_is_faucet_procedure() -> miette::Result<()> { +async fn test_is_faucet_procedure() -> anyhow::Result<()> { let test_cases = [ ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE, ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE, @@ -327,10 +339,7 @@ async fn test_is_faucet_procedure() -> miette::Result<()> { prefix = account_id.prefix().as_felt(), ); - let exec_output = CodeExecutor::with_default_host() - .run(&code) - .await - .wrap_err("failed to execute is_faucet procedure")?; + let exec_output = CodeExecutor::with_default_host().run(&code).await?; let is_faucet = account_id.is_faucet(); assert_eq!( @@ -348,7 +357,7 @@ async fn test_is_faucet_procedure() -> miette::Result<()> { // TODO: update this test once the ability to change the account code will be implemented #[tokio::test] -pub async fn test_compute_code_commitment() -> miette::Result<()> { +pub async fn test_compute_code_commitment() -> anyhow::Result<()> { let tx_context = TransactionContextBuilder::with_existing_mock_account().build().unwrap(); let account = tx_context.account(); @@ -377,7 +386,7 @@ pub async fn test_compute_code_commitment() -> miette::Result<()> { // ================================================================================================ #[tokio::test] -async fn test_get_item() -> miette::Result<()> { +async fn test_get_item() -> anyhow::Result<()> { for storage_item in [AccountStorage::mock_value_slot0(), AccountStorage::mock_value_slot1()] { let tx_context = TransactionContextBuilder::with_existing_mock_account().build().unwrap(); @@ -393,7 +402,7 @@ async fn test_get_item() -> miette::Result<()> { # push the account storage item index push.SLOT_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix] + # => [slot_id_suffix, slot_id_prefix] # assert the item value is correct exec.account::get_item @@ -412,7 +421,7 @@ async fn test_get_item() -> miette::Result<()> { } #[tokio::test] -async fn test_get_map_item() -> miette::Result<()> { +async fn test_get_map_item() -> anyhow::Result<()> { let slot = AccountStorage::mock_map_slot(); let account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -459,7 +468,7 @@ async fn test_get_map_item() -> miette::Result<()> { } #[tokio::test] -async fn test_get_storage_slot_type() -> miette::Result<()> { +async fn test_get_native_storage_slot_type() -> anyhow::Result<()> { for slot_name in [ AccountStorage::mock_value_slot0().name(), AccountStorage::mock_value_slot1().name(), @@ -487,7 +496,7 @@ async fn test_get_storage_slot_type() -> miette::Result<()> { push.{slot_idx} # get the type of the respective storage slot - exec.account::get_storage_slot_type + exec.account::get_native_storage_slot_type # truncate the stack swap drop @@ -500,28 +509,16 @@ async fn test_get_storage_slot_type() -> miette::Result<()> { assert_eq!( slot.slot_type(), StorageSlotType::try_from( - u8::try_from(exec_output.get_stack_element(0).as_int()).unwrap() + u8::try_from(exec_output.get_stack_element(0).as_canonical_u64()).unwrap() ) .unwrap() ); assert_eq!(exec_output.get_stack_element(1), ZERO, "the rest of the stack is empty"); assert_eq!(exec_output.get_stack_element(2), ZERO, "the rest of the stack is empty"); assert_eq!(exec_output.get_stack_element(3), ZERO, "the rest of the stack is empty"); - assert_eq!( - exec_output.get_stack_word_be(4), - Word::empty(), - "the rest of the stack is empty" - ); - assert_eq!( - exec_output.get_stack_word_be(8), - Word::empty(), - "the rest of the stack is empty" - ); - assert_eq!( - exec_output.get_stack_word_be(12), - Word::empty(), - "the rest of the stack is empty" - ); + assert_eq!(exec_output.get_stack_word(4), Word::empty(), "the rest of the stack is empty"); + assert_eq!(exec_output.get_stack_word(8), Word::empty(), "the rest of the stack is empty"); + assert_eq!(exec_output.get_stack_word(12), Word::empty(), "the rest of the stack is empty"); } Ok(()) @@ -537,8 +534,10 @@ async fn test_account_get_item_fails_on_unknown_slot() -> anyhow::Result<()> { let account_empty_storage = builder.add_existing_mock_account(Auth::IncrNonce)?; assert_eq!(account_empty_storage.storage().num_slots(), 0); - let account_non_empty_storage = builder.add_existing_mock_account(Auth::BasicAuth)?; - assert_eq!(account_non_empty_storage.storage().num_slots(), 1); + let account_non_empty_storage = builder.add_existing_mock_account(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + assert_eq!(account_non_empty_storage.storage().num_slots(), 2); let chain = builder.build()?; @@ -573,38 +572,8 @@ async fn test_account_get_item_fails_on_unknown_slot() -> anyhow::Result<()> { Ok(()) } -/// Tests that accessing the protocol-reserved faucet metadata slot fails with the expected error -/// message. #[tokio::test] -async fn test_account_set_item_fails_on_reserved_faucet_metadata_slot() -> anyhow::Result<()> { - let code = r#" - use miden::protocol::native_account - - const FAUCET_SYSDATA_SLOT=word("miden::protocol::faucet::sysdata") - - begin - push.FAUCET_SYSDATA_SLOT[0..2] - exec.native_account::set_item - end - "#; - let tx_script = CodeBuilder::default().compile_tx_script(code)?; - - let tx_context = TransactionContextBuilder::with_fungible_faucet( - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, - Felt::from(0u32), - ) - .tx_script(tx_script) - .build() - .unwrap(); - - let result = tx_context.execute().await; - assert_transaction_executor_error!(result, ERR_FAUCET_STORAGE_DATA_SLOT_IS_RESERVED); - - Ok(()) -} - -#[tokio::test] -async fn test_is_slot_id_lt() -> miette::Result<()> { +async fn test_is_slot_id_lt() -> anyhow::Result<()> { // Note that the slot IDs derived from the names are essentially randomly sorted, so these cover // "less than" and "greater than" outcomes. let mut test_cases = (0..100) @@ -641,8 +610,8 @@ async fn test_is_slot_id_lt() -> miette::Result<()> { use $kernel::account begin - push.{curr_suffix}.{curr_prefix}.{prev_suffix}.{prev_prefix} - # => [prev_slot_id_prefix, prev_slot_id_suffix, curr_slot_id_prefix, curr_slot_id_suffix] + push.{curr_prefix}.{curr_suffix}.{prev_prefix}.{prev_suffix} + # => [prev_slot_id_suffix, prev_slot_id_prefix, curr_slot_id_suffix, curr_slot_id_prefix] exec.account::is_slot_id_lt # => [is_slot_id_lt] @@ -686,7 +655,7 @@ async fn test_set_item() -> anyhow::Result<()> { # set the storage item push.{new_value} push.MOCK_VALUE_SLOT0[0..2] - # => [slot_id_prefix, slot_id_suffix, NEW_VALUE] + # => [slot_id_suffix, slot_id_prefix, NEW_VALUE] exec.account::set_item @@ -696,7 +665,7 @@ async fn test_set_item() -> anyhow::Result<()> { # assert new value has been correctly set push.MOCK_VALUE_SLOT0[0..2] - # => [slot_id_prefix, slot_id_suffix] + # => [slot_id_suffix, slot_id_prefix] exec.account::get_item push.{new_value} @@ -711,9 +680,11 @@ async fn test_set_item() -> anyhow::Result<()> { } #[tokio::test] -async fn test_set_map_item() -> miette::Result<()> { - let (new_key, new_value) = - (Word::from([109, 110, 111, 112u32]), Word::from([9, 10, 11, 12u32])); +async fn test_set_map_item() -> anyhow::Result<()> { + let (new_key, new_value) = ( + StorageMapKey::from_array([109, 110, 111, 112u32]), + Word::from([9, 10, 11, 12u32]), + ); let slot = AccountStorage::mock_map_slot(); let account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) @@ -744,11 +715,11 @@ async fn test_set_map_item() -> miette::Result<()> { # double check that the storage slot is indeed the new map push.SLOT_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, OLD_VALUE] + # => [slot_id_suffix, slot_id_prefix, OLD_VALUE] # pad the stack repeat.14 push.0 movdn.2 end - # => [slot_id_prefix, slot_id_suffix, pad(14), OLD_VALUE] + # => [slot_id_suffix, slot_id_prefix, pad(14), OLD_VALUE] call.mock_account::get_item # => [MAP_ROOT, pad(12), OLD_VALUE] @@ -772,7 +743,7 @@ async fn test_set_map_item() -> miette::Result<()> { assert_eq!( new_storage_map.root(), - exec_output.get_stack_word_be(0), + exec_output.get_stack_word(0), "get_item should return the updated root", ); @@ -782,7 +753,7 @@ async fn test_set_map_item() -> miette::Result<()> { }; assert_eq!( old_value_for_key, - exec_output.get_stack_word_be(4), + exec_output.get_stack_word(4), "set_map_item must return the old value for the key (empty word for new key)", ); @@ -849,16 +820,16 @@ async fn test_compute_storage_commitment() -> anyhow::Result<()> { let init_storage_commitment = account_storage.to_commitment(); let mock_value_slot0 = &*MOCK_VALUE_SLOT0; + let value_slot0 = Word::from([9, 10, 11, 12u32]); + let mock_map_slot = &*MOCK_MAP_SLOT; + let map_key = StorageMapKey::from_array([101, 102, 103, 104u32]); + let map_value = Word::from([5, 6, 7, 8u32]); - account_storage.set_item(mock_value_slot0, [9, 10, 11, 12].map(Felt::new).into())?; + account_storage.set_item(mock_value_slot0, value_slot0)?; let storage_commitment_value = account_storage.to_commitment(); - account_storage.set_map_item( - mock_map_slot, - [101, 102, 103, 104].map(Felt::new).into(), - [5, 6, 7, 8].map(Felt::new).into(), - )?; + account_storage.set_map_item(mock_map_slot, map_key, map_value)?; let storage_commitment_map = account_storage.to_commitment(); let code = format!( @@ -878,7 +849,7 @@ async fn test_compute_storage_commitment() -> anyhow::Result<()> { assert_eqw.err="storage commitment at the beginning of the transaction is not equal to the expected one" # update the value storage slot - push.9.10.11.12 + push.{value_slot0} push.MOCK_VALUE_SLOT0[0..2] call.mock_account::set_item dropw drop # => [] @@ -895,9 +866,10 @@ async fn test_compute_storage_commitment() -> anyhow::Result<()> { assert_eqw.err="storage commitment should remain the same" # update the map storage slot - push.5.6.7.8.101.102.103.104 + push.{map_value} + push.{map_key} push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] call.mock_account::set_map_item dropw dropw # => [] @@ -929,7 +901,7 @@ async fn prove_account_creation_with_non_empty_storage() -> anyhow::Result<()> { let slot1 = StorageSlot::with_value(slot_name1.clone(), Word::from([10, 20, 30, 40u32])); let mut map_entries = Vec::new(); for _ in 0..10 { - map_entries.push((rand_value::(), rand_value::())); + map_entries.push((StorageMapKey::from_raw(rand_value::()), rand_value::())); } let map_slot = StorageSlot::with_map(slot_name2.clone(), StorageMap::with_entries(map_entries.clone())?); @@ -979,7 +951,7 @@ async fn prove_account_creation_with_non_empty_storage() -> anyhow::Result<()> { assert!(tx.account_delta().vault().is_empty()); assert_eq!(tx.final_account().nonce(), Felt::new(1)); - let proven_tx = LocalTransactionProver::default().prove(tx.clone())?; + let proven_tx = LocalTransactionProver::default().prove(tx.clone()).await?; // The delta should be present on the proven tx. let AccountUpdateDetails::Delta(delta) = proven_tx.account_update().details() else { @@ -1039,8 +1011,10 @@ async fn test_get_vault_root() -> anyhow::Result<()> { exec.prologue::prepare_transaction # add an asset to the account - push.{fungible_asset} - call.mock_account::add_asset dropw + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} + call.mock_account::add_asset + dropw dropw # => [] # get the current vault root @@ -1049,7 +1023,8 @@ async fn test_get_vault_root() -> anyhow::Result<()> { assert_eqw.err="vault root mismatch" end "#, - fungible_asset = Word::from(&fungible_asset), + FUNGIBLE_ASSET_VALUE = fungible_asset.to_value_word(), + FUNGIBLE_ASSET_KEY = fungible_asset.to_key_word(), expected_vault_root = &account.vault().root(), ); tx_context.execute_code(&code).await?; @@ -1078,8 +1053,12 @@ async fn test_get_init_balance_addition() -> anyhow::Result<()> { let fungible_asset_for_account = Asset::Fungible( FungibleAsset::new(faucet_existing_asset, 10).context("fungible_asset_0 is invalid")?, ); - let account = builder - .add_existing_wallet_with_assets(crate::Auth::BasicAuth, [fungible_asset_for_account])?; + let account = builder.add_existing_wallet_with_assets( + crate::Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + [fungible_asset_for_account], + )?; let fungible_asset_for_note_existing = Asset::Fungible( FungibleAsset::new(faucet_existing_asset, 7).context("fungible_asset_0 is invalid")?, @@ -1119,17 +1098,17 @@ async fn test_get_init_balance_addition() -> anyhow::Result<()> { begin # push faucet ID prefix and suffix - push.{suffix}.{prefix} - # => [faucet_id_prefix, faucet_id_suffix] + push.{prefix}.{suffix} + # => [faucet_id_suffix, faucet_id_prefix] # get the current asset balance dup.1 dup.1 exec.active_account::get_balance - # => [final_balance, faucet_id_prefix, faucet_id_suffix] + # => [final_balance, faucet_id_suffix, faucet_id_prefix] # assert final balance is correct push.{final_balance} assert_eq.err="final balance is incorrect" - # => [faucet_id_prefix, faucet_id_suffix] + # => [faucet_id_suffix, faucet_id_prefix] # get the initial asset balance exec.active_account::get_initial_balance @@ -1173,17 +1152,17 @@ async fn test_get_init_balance_addition() -> anyhow::Result<()> { begin # push faucet ID prefix and suffix - push.{suffix}.{prefix} - # => [faucet_id_prefix, faucet_id_suffix] + push.{prefix}.{suffix} + # => [faucet_id_suffix, faucet_id_prefix] # get the current asset balance dup.1 dup.1 exec.active_account::get_balance - # => [final_balance, faucet_id_prefix, faucet_id_suffix] + # => [final_balance, faucet_id_suffix, faucet_id_prefix] # assert final balance is correct push.{final_balance} assert_eq.err="final balance is incorrect" - # => [faucet_id_prefix, faucet_id_suffix] + # => [faucet_id_suffix, faucet_id_prefix] # get the initial asset balance exec.active_account::get_initial_balance @@ -1226,8 +1205,12 @@ async fn test_get_init_balance_subtraction() -> anyhow::Result<()> { let fungible_asset_for_account = Asset::Fungible( FungibleAsset::new(faucet_existing_asset, 10).context("fungible_asset_0 is invalid")?, ); - let account = builder - .add_existing_wallet_with_assets(crate::Auth::BasicAuth, [fungible_asset_for_account])?; + let account = builder.add_existing_wallet_with_assets( + crate::Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + [fungible_asset_for_account], + )?; let fungible_asset_for_note_existing = Asset::Fungible( FungibleAsset::new(faucet_existing_asset, 7).context("fungible_asset_0 is invalid")?, @@ -1250,42 +1233,28 @@ async fn test_get_init_balance_subtraction() -> anyhow::Result<()> { use miden::standards::wallets::basic->wallet use mock::util - # Inputs: [ASSET, note_idx] - # Outputs: [ASSET, note_idx] - proc move_asset_to_note - # pad the stack before call - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [ASSET, note_idx, pad(11)] - - call.wallet::move_asset_to_note - # => [ASSET, note_idx, pad(11)] - - # remove excess PADs from the stack - swapdw dropw dropw swapw movdn.7 drop drop drop - # => [ASSET, note_idx] - end - begin # create random note and move the asset into it exec.util::create_default_note # => [note_idx] - push.{REMOVED_ASSET} - exec.move_asset_to_note dropw drop + push.{REMOVED_ASSET_VALUE} + push.{REMOVED_ASSET_KEY} + exec.util::move_asset_to_note # => [] # push faucet ID prefix and suffix - push.{suffix}.{prefix} - # => [faucet_id_prefix, faucet_id_suffix] + push.{prefix}.{suffix} + # => [faucet_id_suffix, faucet_id_prefix] # get the current asset balance dup.1 dup.1 exec.active_account::get_balance - # => [final_balance, faucet_id_prefix, faucet_id_suffix] + # => [final_balance, faucet_id_suffix, faucet_id_prefix] # assert final balance is correct push.{final_balance} assert_eq.err="final balance is incorrect" - # => [faucet_id_prefix, faucet_id_suffix] + # => [faucet_id_suffix, faucet_id_prefix] # get the initial asset balance exec.active_account::get_initial_balance @@ -1296,7 +1265,8 @@ async fn test_get_init_balance_subtraction() -> anyhow::Result<()> { assert_eq.err="initial balance is incorrect" end "#, - REMOVED_ASSET = Word::from(fungible_asset_for_note_existing), + REMOVED_ASSET_KEY = fungible_asset_for_note_existing.to_key_word(), + REMOVED_ASSET_VALUE = fungible_asset_for_note_existing.to_value_word(), suffix = faucet_existing_asset.suffix(), prefix = faucet_existing_asset.prefix().as_felt(), final_balance = @@ -1308,7 +1278,7 @@ async fn test_get_init_balance_subtraction() -> anyhow::Result<()> { let tx_context = mock_chain .build_tx_context(TxContextInput::AccountId(account.id()), &[], &[])? .tx_script(tx_script) - .extend_expected_output_notes(vec![OutputNote::Full(expected_output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(expected_output_note)]) .build()?; tx_context.execute().await?; @@ -1316,11 +1286,98 @@ async fn test_get_init_balance_subtraction() -> anyhow::Result<()> { Ok(()) } +/// This test checks the correctness of the `miden::protocol::active_account::get_initial_asset` +/// procedure creating a note which removes an asset from the account vault. +/// +/// As part of the test pipeline it also checks the correctness of the +/// `miden::protocol::active_account::get_asset` procedure. +#[tokio::test] +async fn test_get_init_asset() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + let faucet_existing_asset = + AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET).context("id should be valid")?; + + let fungible_asset_for_account = Asset::Fungible( + FungibleAsset::new(faucet_existing_asset, 10).context("fungible_asset_0 is invalid")?, + ); + let account = builder.add_existing_wallet_with_assets( + crate::Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + [fungible_asset_for_account], + )?; + + let fungible_asset_for_note_existing = Asset::Fungible( + FungibleAsset::new(faucet_existing_asset, 7).context("fungible_asset_0 is invalid")?, + ); + + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let final_asset = fungible_asset_for_account + .unwrap_fungible() + .sub(fungible_asset_for_note_existing.unwrap_fungible())?; + + let expected_output_note = + create_public_p2any_note(ACCOUNT_ID_SENDER.try_into()?, [fungible_asset_for_note_existing]); + + let remove_existing_source = format!( + r#" + use miden::protocol::active_account + use miden::standards::wallets::basic->wallet + use mock::util + + begin + # create default note and move the asset into it + exec.util::create_default_note + # => [note_idx] + + push.{REMOVED_ASSET_VALUE} + push.{ASSET_KEY} + exec.util::move_asset_to_note + # => [] + + # get the current asset + push.{ASSET_KEY} exec.active_account::get_asset + # => [ASSET_VALUE] + + push.{FINAL_ASSET} + assert_eqw.err="final asset is incorrect" + # => [] + + # get the initial asset + push.{ASSET_KEY} exec.active_account::get_initial_asset + # => [INITIAL_ASSET] + + push.{INITIAL_ASSET_VALUE} + assert_eqw.err="initial asset is incorrect" + end + "#, + ASSET_KEY = fungible_asset_for_note_existing.to_key_word(), + REMOVED_ASSET_VALUE = fungible_asset_for_note_existing.to_value_word(), + INITIAL_ASSET_VALUE = fungible_asset_for_account.to_value_word(), + FINAL_ASSET = final_asset.to_value_word(), + ); + + let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(remove_existing_source)?; + + mock_chain + .build_tx_context(TxContextInput::AccountId(account.id()), &[], &[])? + .tx_script(tx_script) + .extend_expected_output_notes(vec![RawOutputNote::Full(expected_output_note)]) + .build()? + .execute() + .await?; + + Ok(()) +} + // PROCEDURE AUTHENTICATION TESTS // ================================================================================================ #[tokio::test] -async fn test_authenticate_and_track_procedure() -> miette::Result<()> { +async fn test_authenticate_and_track_procedure() -> anyhow::Result<()> { let mock_component = MockAccountComponent::with_empty_slots(); let account_code = AccountCode::from_components( @@ -1379,7 +1436,7 @@ async fn test_authenticate_and_track_procedure() -> miette::Result<()> { // ================================================================================================ #[tokio::test] -async fn test_was_procedure_called() -> miette::Result<()> { +async fn test_was_procedure_called() -> anyhow::Result<()> { // Create a standard account using the mock component let mock_component = MockAccountComponent::with_slots(AccountStorage::mock_storage_slots()); let account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) @@ -1429,9 +1486,7 @@ async fn test_was_procedure_called() -> miette::Result<()> { ); // Compile the transaction script using the testing assembler with mock account - let tx_script = CodeBuilder::with_mock_libraries() - .compile_tx_script(tx_script_code) - .into_diagnostic()?; + let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(tx_script_code)?; // Create transaction context and execute let tx_context = TransactionContextBuilder::new(account).tx_script(tx_script).build().unwrap(); @@ -1439,8 +1494,7 @@ async fn test_was_procedure_called() -> miette::Result<()> { tx_context .execute() .await - .into_diagnostic() - .wrap_err("Failed to execute transaction")?; + .map_err(|err| anyhow::anyhow!("Failed to execute transaction: {err}"))?; Ok(()) } @@ -1451,7 +1505,8 @@ async fn test_was_procedure_called() -> miette::Result<()> { /// The call chain and dependency graph in this test is: /// `tx script -> account code -> external library` #[tokio::test] -async fn transaction_executor_account_code_using_custom_library() -> miette::Result<()> { +async fn transaction_executor_account_code_using_custom_library() -> anyhow::Result<()> { + let slot_value = Word::from([2, 3, 4, 5u32]); let external_library_code = format!( r#" use miden::protocol::native_account @@ -1459,7 +1514,7 @@ async fn transaction_executor_account_code_using_custom_library() -> miette::Res const MOCK_VALUE_SLOT0 = word("{mock_value_slot0}") pub proc external_setter - push.2.3.4.5 + push.{slot_value} push.MOCK_VALUE_SLOT0[0..2] exec.native_account::set_item dropw dropw @@ -1476,15 +1531,20 @@ async fn transaction_executor_account_code_using_custom_library() -> miette::Res let external_library_source = NamedSource::new("external_library::external_module", external_library_code); - let external_library = - TransactionKernel::assembler().assemble_library([external_library_source])?; + let external_library = TransactionKernel::assembler() + .assemble_library([external_library_source]) + .map_err(|err| { + anyhow::anyhow!("failed to assemble library: {}", PrintDiagnostic::new(&err)) + })?; let mut assembler: miden_protocol::assembly::Assembler = CodeBuilder::with_mock_libraries_with_source_manager(Arc::new( DefaultSourceManager::default(), )) .into(); - assembler.link_static_library(&external_library)?; + assembler.link_static_library(&external_library).map_err(|err| { + anyhow::anyhow!("failed to link static library: {}", PrintDiagnostic::new(&err)) + })?; let account_component_source = NamedSource::new("account_component::account_module", ACCOUNT_COMPONENT_CODE); @@ -1498,30 +1558,28 @@ async fn transaction_executor_account_code_using_custom_library() -> miette::Res call.account_module::custom_setter end"; - let account_component = - AccountComponent::new(account_component_lib.clone(), AccountStorage::mock_storage_slots()) - .into_diagnostic()? - .with_supports_all_types(); + let account_component = AccountComponent::new( + account_component_lib.clone(), + AccountStorage::mock_storage_slots(), + AccountComponentMetadata::mock("account_module"), + )?; // Build an existing account with nonce 1. let native_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) .with_component(account_component) - .build_existing() - .into_diagnostic()?; + .build_existing()?; let tx_script = CodeBuilder::default() - .with_dynamically_linked_library(&account_component_lib) - .into_diagnostic()? - .compile_tx_script(tx_script_src) - .into_diagnostic()?; + .with_dynamically_linked_library(&account_component_lib)? + .compile_tx_script(tx_script_src)?; let tx_context = TransactionContextBuilder::new(native_account.clone()) .tx_script(tx_script) .build() .unwrap(); - let executed_tx = tx_context.execute().await.into_diagnostic()?; + let executed_tx = tx_context.execute().await?; // Account's initial nonce of 1 should have been incremented by 1. assert_eq!(executed_tx.account_delta().nonce_delta(), Felt::new(1)); @@ -1530,7 +1588,7 @@ async fn transaction_executor_account_code_using_custom_library() -> miette::Res assert_eq!(executed_tx.account_delta().storage().values().count(), 1); assert_eq!( executed_tx.account_delta().storage().get(&MOCK_VALUE_SLOT0).unwrap(), - &StorageSlotDelta::Value(Word::from([2, 3, 4, 5u32])), + &StorageSlotDelta::Value(slot_value), ); Ok(()) } @@ -1541,6 +1599,7 @@ async fn incrementing_nonce_twice_fails() -> anyhow::Result<()> { let source_code = " use miden::protocol::native_account + @auth_script pub proc auth_incr_nonce_twice exec.native_account::incr_nonce drop exec.native_account::incr_nonce drop @@ -1549,8 +1608,11 @@ async fn incrementing_nonce_twice_fails() -> anyhow::Result<()> { let faulty_auth_code = CodeBuilder::default().compile_component_code("test::faulty_auth", source_code)?; - let faulty_auth_component = - AccountComponent::new(faulty_auth_code, vec![])?.with_supports_all_types(); + let faulty_auth_component = AccountComponent::new( + faulty_auth_code, + vec![], + AccountComponentMetadata::mock("test::faulty_auth"), + )?; let account = AccountBuilder::new([5; 32]) .with_auth_component(faulty_auth_component) .with_component(MockAccountComponent::with_empty_slots()) @@ -1565,7 +1627,7 @@ async fn incrementing_nonce_twice_fails() -> anyhow::Result<()> { } #[tokio::test] -async fn test_has_procedure() -> miette::Result<()> { +async fn test_has_procedure() -> anyhow::Result<()> { // Create a standard account using the mock component let mock_component = MockAccountComponent::with_slots(AccountStorage::mock_storage_slots()); let account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) @@ -1603,7 +1665,7 @@ async fn test_has_procedure() -> miette::Result<()> { // Compile the transaction script using the testing assembler with mock account let tx_script = CodeBuilder::with_mock_libraries() .compile_tx_script(tx_script_code) - .into_diagnostic()?; + .map_err(|err| anyhow::anyhow!("{err}"))?; // Create transaction context and execute let tx_context = TransactionContextBuilder::new(account).tx_script(tx_script).build().unwrap(); @@ -1611,8 +1673,66 @@ async fn test_has_procedure() -> miette::Result<()> { tx_context .execute() .await - .into_diagnostic() - .wrap_err("Failed to execute transaction")?; + .map_err(|err| anyhow::anyhow!("Failed to execute transaction: {err}"))?; + + Ok(()) +} + +/// Tests that the `has_callbacks` faucet procedure correctly reports whether a faucet defines +/// callbacks. +/// +/// - `with_callbacks`: callback slot has a non-empty value -> returns 1 +/// - `with_empty_callback`: callback slot exists but value is the empty word -> returns 0 +/// - `without_callbacks`: no callback slot at all -> returns 0 +#[rstest::rstest] +#[case::with_callbacks( + vec![StorageSlot::with_value( + AssetCallbacks::on_before_asset_added_to_account_slot().clone(), + Word::from([1, 2, 3, 4u32]), + )], + true, +)] +#[case::with_empty_callback( + vec![StorageSlot::with_empty_value( + AssetCallbacks::on_before_asset_added_to_account_slot().clone(), + )], + false, +)] +#[case::without_callbacks(vec![], false)] +#[tokio::test] +async fn test_faucet_has_callbacks( + #[case] callback_slots: Vec, + #[case] expected_has_callbacks: bool, +) -> anyhow::Result<()> { + let basic_faucet = BasicFungibleFaucet::new("CBK".try_into()?, 8, Felt::new(1_000_000))?; + + let account = AccountBuilder::new([1u8; 32]) + .storage_mode(AccountStorageMode::Public) + .account_type(AccountType::FungibleFaucet) + .with_component(basic_faucet) + .with_component(MockAccountComponent::with_slots(callback_slots)) + .with_auth_component(Auth::IncrNonce) + .build_existing()?; + + let tx_script_code = format!( + r#" + use miden::protocol::faucet + + begin + exec.faucet::has_callbacks + push.{has_callbacks} + assert_eq.err="has_callbacks returned unexpected value" + end + "#, + has_callbacks = u8::from(expected_has_callbacks) + ); + let tx_script = CodeBuilder::default().compile_tx_script(&tx_script_code)?; + + TransactionContextBuilder::new(account) + .tx_script(tx_script) + .build()? + .execute() + .await?; Ok(()) } @@ -1621,7 +1741,7 @@ async fn test_has_procedure() -> miette::Result<()> { // ================================================================================================ #[tokio::test] -async fn test_get_initial_item() -> miette::Result<()> { +async fn test_get_initial_item() -> anyhow::Result<()> { let tx_context = TransactionContextBuilder::with_existing_mock_account().build().unwrap(); // Test that get_initial_item returns the initial value before any changes @@ -1671,7 +1791,7 @@ async fn test_get_initial_item() -> miette::Result<()> { } #[tokio::test] -async fn test_get_initial_map_item() -> miette::Result<()> { +async fn test_get_initial_map_item() -> anyhow::Result<()> { let map_slot = AccountStorage::mock_map_slot(); let account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -1759,7 +1879,7 @@ async fn incrementing_nonce_overflow_fails() -> anyhow::Result<()> { .context("failed to build account")?; // Increment the nonce to the maximum felt value. The nonce is already 1, so we increment by // modulus - 2. - account.increment_nonce(Felt::new(Felt::MODULUS - 2))?; + account.increment_nonce(Felt::new(Felt::ORDER_U64 - 2))?; let result = TransactionContextBuilder::new(account).build()?.execute().await; @@ -1815,7 +1935,7 @@ async fn merging_components_with_same_mast_root_succeeds() -> anyhow::Result<()> end pub proc set_slot_content - push.5.6.7.8 + push.[5,6,7,8] push.TEST_SLOT_NAME[0..2] exec.native_account::set_item swapw dropw @@ -1836,9 +1956,12 @@ async fn merging_components_with_same_mast_root_succeeds() -> anyhow::Result<()> impl From for AccountComponent { fn from(component: CustomComponent1) -> AccountComponent { - AccountComponent::new(COMPONENT_1_LIBRARY.clone(), vec![component.slot]) - .expect("should be valid") - .with_supports_all_types() + AccountComponent::new( + COMPONENT_1_LIBRARY.clone(), + vec![component.slot], + AccountComponentMetadata::mock("component1::interface"), + ) + .expect("should be valid") } } @@ -1846,13 +1969,17 @@ async fn merging_components_with_same_mast_root_succeeds() -> anyhow::Result<()> impl From for AccountComponent { fn from(_component: CustomComponent2) -> AccountComponent { - AccountComponent::new(COMPONENT_2_LIBRARY.clone(), vec![]) - .expect("should be valid") - .with_supports_all_types() + AccountComponent::new( + COMPONENT_2_LIBRARY.clone(), + vec![], + AccountComponentMetadata::mock("component2::interface"), + ) + .expect("should be valid") } } - let slot = StorageSlot::with_value(TEST_SLOT_NAME.clone(), Word::from([1, 2, 3, 4u32])); + let slot_content1 = Word::from([1, 2, 3, 4u32]); + let slot = StorageSlot::with_value(TEST_SLOT_NAME.clone(), slot_content1); let account = AccountBuilder::new([42; 32]) .with_auth_component(Auth::IncrNonce) @@ -1861,22 +1988,24 @@ async fn merging_components_with_same_mast_root_succeeds() -> anyhow::Result<()> .build() .context("failed to build account")?; - let tx_script = r#" + let tx_script = format!( + r#" use component1::interface->comp1_interface use component2::interface->comp2_interface begin call.comp1_interface::get_slot_content - push.1.2.3.4 + push.{slot_content1} assert_eqw.err="failed to get slot content1" call.comp2_interface::set_slot_content call.comp2_interface::get_slot_content - push.5.6.7.8 + push.[5,6,7,8] assert_eqw.err="failed to get slot content2" end - "#; + "# + ); let tx_script = CodeBuilder::default() .with_dynamically_linked_library(COMPONENT_1_LIBRARY.clone())? diff --git a/crates/miden-testing/src/kernel_tests/tx/test_account_delta.rs b/crates/miden-testing/src/kernel_tests/tx/test_account_delta.rs index e7e49b7efb..99c4dac8b5 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_account_delta.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_account_delta.rs @@ -3,6 +3,7 @@ use std::collections::BTreeMap; use std::string::String; use anyhow::Context; +use miden_crypto::rand::test_utils::rand_value; use miden_protocol::account::delta::AccountUpdateDetails; use miden_protocol::account::{ Account, @@ -13,11 +14,18 @@ use miden_protocol::account::{ AccountStorageMode, AccountType, StorageMap, + StorageMapKey, StorageSlot, StorageSlotDelta, StorageSlotName, }; -use miden_protocol::asset::{Asset, AssetVault, FungibleAsset, NonFungibleAsset}; +use miden_protocol::asset::{ + Asset, + AssetVault, + FungibleAsset, + NonFungibleAsset, + NonFungibleAssetDetails, +}; use miden_protocol::note::{Note, NoteTag, NoteType}; use miden_protocol::testing::account_id::{ ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1, @@ -27,7 +35,6 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_SENDER, AccountIdBuilder, }; -use miden_protocol::testing::asset::NonFungibleAssetBuilder; use miden_protocol::testing::constants::{ CONSUMED_ASSET_1_AMOUNT, CONSUMED_ASSET_3_AMOUNT, @@ -37,13 +44,12 @@ use miden_protocol::testing::constants::{ }; use miden_protocol::testing::storage::{MOCK_MAP_SLOT, MOCK_VALUE_SLOT0}; use miden_protocol::transaction::TransactionScript; -use miden_protocol::{EMPTY_WORD, Felt, FieldElement, LexicographicWord, Word, ZERO}; +use miden_protocol::{EMPTY_WORD, Felt, LexicographicWord, Word, ZERO}; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::account_component::MockAccountComponent; use miden_tx::LocalTransactionProver; use rand::{Rng, SeedableRng}; use rand_chacha::ChaCha20Rng; -use winter_rand_utils::rand_value; use crate::utils::create_public_p2any_note; use crate::{Auth, MockChain, TransactionContextBuilder}; @@ -147,37 +153,37 @@ async fn storage_delta_for_value_slots() -> anyhow::Result<()> { begin push.{slot_0_tmp_value} push.SLOT_0_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] exec.set_item # => [] push.{slot_0_final_value} push.SLOT_0_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] exec.set_item # => [] push.{slot_1_final_value} push.SLOT_1_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] exec.set_item # => [] push.{slot_2_final_value} push.SLOT_2_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] exec.set_item # => [] push.{slot_3_tmp_value} push.SLOT_3_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] exec.set_item # => [] push.{slot_3_final_value} push.SLOT_3_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] exec.set_item # => [] end @@ -223,12 +229,12 @@ async fn storage_delta_for_value_slots() -> anyhow::Result<()> { async fn storage_delta_for_map_slots() -> anyhow::Result<()> { // Test with random keys to make sure the ordering in the MASM and Rust implementations // matches. - let key0 = rand_value::(); - let key1 = rand_value::(); - let key2 = rand_value::(); - let key3 = rand_value::(); - let key4 = rand_value::(); - let key5 = rand_value::(); + let key0 = StorageMapKey::from_raw(rand_value::()); + let key1 = StorageMapKey::from_raw(rand_value::()); + let key2 = StorageMapKey::from_raw(rand_value::()); + let key3 = StorageMapKey::from_raw(rand_value::()); + let key4 = StorageMapKey::from_raw(rand_value::()); + let key5 = StorageMapKey::from_raw(rand_value::()); let key0_init_value = EMPTY_WORD; let key1_init_value = EMPTY_WORD; @@ -285,55 +291,55 @@ async fn storage_delta_for_map_slots() -> anyhow::Result<()> { begin push.{key0_final_value} push.{key0} push.SLOT_0_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key1_tmp_value} push.{key1} push.SLOT_0_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key1_final_value} push.{key1} push.SLOT_0_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key2_final_value} push.{key2} push.SLOT_1_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key3_final_value} push.{key3} push.SLOT_1_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key4_tmp_value} push.{key4} push.SLOT_1_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key4_final_value} push.{key4} push.SLOT_1_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key5_tmp_value} push.{key5} push.SLOT_2_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] push.{key5_final_value} push.{key5} push.SLOT_2_NAME[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] exec.set_map_item # => [] end @@ -427,20 +433,31 @@ async fn fungible_asset_delta() -> anyhow::Result<()> { let tx_script = parse_tx_script(format!( " begin - push.{asset0} exec.create_note_with_asset + push.{ASSET0_VALUE} push.{ASSET0_KEY} + exec.util::create_default_note_with_moved_asset # => [] - push.{asset1} exec.create_note_with_asset + + push.{ASSET1_VALUE} push.{ASSET1_KEY} + exec.util::create_default_note_with_moved_asset # => [] - push.{asset2} exec.create_note_with_asset + + push.{ASSET2_VALUE} push.{ASSET2_KEY} + exec.util::create_default_note_with_moved_asset # => [] - push.{asset3} exec.create_note_with_asset + + push.{ASSET3_VALUE} push.{ASSET3_KEY} + exec.util::create_default_note_with_moved_asset # => [] end ", - asset0 = Word::from(removed_asset0), - asset1 = Word::from(removed_asset1), - asset2 = Word::from(removed_asset2), - asset3 = Word::from(removed_asset3), + ASSET0_KEY = removed_asset0.to_key_word(), + ASSET0_VALUE = removed_asset0.to_value_word(), + ASSET1_KEY = removed_asset1.to_key_word(), + ASSET1_VALUE = removed_asset1.to_value_word(), + ASSET2_KEY = removed_asset2.to_key_word(), + ASSET2_VALUE = removed_asset2.to_value_word(), + ASSET3_KEY = removed_asset3.to_key_word(), + ASSET3_VALUE = removed_asset3.to_value_word(), ))?; let executed_tx = mock_chain @@ -508,10 +525,22 @@ async fn non_fungible_asset_delta() -> anyhow::Result<()> { .account_type(AccountType::NonFungibleFaucet) .build_with_seed(rng.random()); - let asset0 = NonFungibleAssetBuilder::new(faucet0.prefix(), &mut rng)?.build()?; - let asset1 = NonFungibleAssetBuilder::new(faucet1.prefix(), &mut rng)?.build()?; - let asset2 = NonFungibleAssetBuilder::new(faucet2.prefix(), &mut rng)?.build()?; - let asset3 = NonFungibleAssetBuilder::new(faucet3.prefix(), &mut rng)?.build()?; + let asset0 = NonFungibleAsset::new(&NonFungibleAssetDetails::new( + faucet0, + rng.random::<[u8; 32]>().to_vec(), + )?)?; + let asset1 = NonFungibleAsset::new(&NonFungibleAssetDetails::new( + faucet1, + rng.random::<[u8; 32]>().to_vec(), + )?)?; + let asset2 = NonFungibleAsset::new(&NonFungibleAssetDetails::new( + faucet2, + rng.random::<[u8; 32]>().to_vec(), + )?)?; + let asset3 = NonFungibleAsset::new(&NonFungibleAssetDetails::new( + faucet3, + rng.random::<[u8; 32]>().to_vec(), + )?)?; let TestSetup { mock_chain, account_id, notes } = setup_test([], [asset1, asset3].map(Asset::from), [asset0, asset2].map(Asset::from))?; @@ -519,22 +548,32 @@ async fn non_fungible_asset_delta() -> anyhow::Result<()> { let tx_script = parse_tx_script(format!( " begin - push.{asset1} exec.create_note_with_asset + push.{ASSET1_VALUE} push.{ASSET1_KEY} + exec.util::create_default_note_with_moved_asset # => [] - push.{asset2} exec.create_note_with_asset + + push.{ASSET2_VALUE} push.{ASSET2_KEY} + exec.util::create_default_note_with_moved_asset # => [] # remove and re-add asset 3 - push.{asset3} + push.{ASSET3_VALUE} + push.{ASSET3_KEY} exec.remove_asset - # => [ASSET] + # => [ASSET_VALUE] + + push.{ASSET3_KEY} + # => [ASSET_KEY, ASSET_VALUE] exec.add_asset dropw # => [] end ", - asset1 = Word::from(asset1), - asset2 = Word::from(asset2), - asset3 = Word::from(asset3), + ASSET1_KEY = asset1.to_key_word(), + ASSET1_VALUE = asset1.to_value_word(), + ASSET2_KEY = asset2.to_key_word(), + ASSET2_VALUE = asset2.to_value_word(), + ASSET3_KEY = asset3.to_key_word(), + ASSET3_VALUE = asset3.to_value_word(), ))?; let executed_tx = mock_chain @@ -549,20 +588,20 @@ async fn non_fungible_asset_delta() -> anyhow::Result<()> { .account_delta() .vault() .added_assets() - .map(|asset| (asset.faucet_id_prefix(), asset.unwrap_non_fungible())) + .map(|asset| (asset.faucet_id(), asset.unwrap_non_fungible())) .collect::>(); let mut removed_assets = executed_tx .account_delta() .vault() .removed_assets() - .map(|asset| (asset.faucet_id_prefix(), asset.unwrap_non_fungible())) + .map(|asset| (asset.faucet_id(), asset.unwrap_non_fungible())) .collect::>(); assert_eq!(added_assets.len(), 1); assert_eq!(removed_assets.len(), 1); - assert_eq!(added_assets.remove(&asset0.faucet_id_prefix()).unwrap(), asset0); - assert_eq!(removed_assets.remove(&asset1.faucet_id_prefix()).unwrap(), asset1); + assert_eq!(added_assets.remove(&asset0.faucet_id()).unwrap(), asset0); + assert_eq!(removed_assets.remove(&asset1.faucet_id()).unwrap(), asset1); Ok(()) } @@ -583,7 +622,7 @@ async fn asset_and_storage_delta() -> anyhow::Result<()> { let updated_slot_value = Word::from([7, 9, 11, 13u32]); // updated storage map - let updated_map_key = Word::from([14, 15, 16, 17u32]); + let updated_map_key = StorageMapKey::from_array([14, 15, 16, 17u32]); let updated_map_value = Word::from([18, 19, 20, 21u32]); // removed assets @@ -622,16 +661,19 @@ async fn asset_and_storage_delta() -> anyhow::Result<()> { # => [note_idx, pad(15)] # move an asset to the created note to partially deplete fungible asset balance - swapw dropw push.{REMOVED_ASSET} + swapw dropw + push.{REMOVED_ASSET_VALUE} + push.{REMOVED_ASSET_KEY} call.::miden::standards::wallets::basic::move_asset_to_note - # => [ASSET, note_idx, pad(11)] + # => [pad(16)] # clear the stack dropw dropw dropw dropw ", NOTETYPE = note_types[i] as u8, tag = tags[i], - REMOVED_ASSET = Word::from(removed_assets[i]) + REMOVED_ASSET_KEY = removed_assets[i].to_key_word(), + REMOVED_ASSET_VALUE = removed_assets[i].to_value_word(), )); } @@ -654,7 +696,7 @@ async fn asset_and_storage_delta() -> anyhow::Result<()> { # get the index of account storage slot push.MOCK_VALUE_SLOT0[0..2] - # => [slot_id_prefix, slot_id_suffix, 13, 11, 9, 7] + # => [slot_id_suffix, slot_id_prefix, 13, 11, 9, 7] # update the storage value call.account::set_item dropw # => [] @@ -671,7 +713,7 @@ async fn asset_and_storage_delta() -> anyhow::Result<()> { # get the index of account storage slot push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, 14, 15, 16, 17, 18, 19, 20, 21] + # => [slot_id_suffix, slot_id_prefix, 14, 15, 16, 17, 18, 19, 20, 21] # update the storage value call.account::set_map_item dropw dropw dropw @@ -793,14 +835,14 @@ async fn asset_and_storage_delta() -> anyhow::Result<()> { #[tokio::test] async fn proven_tx_storage_maps_matches_executed_tx_for_new_account() -> anyhow::Result<()> { // Use two identical maps to test that they are properly handled - // (see also https://github.com/0xMiden/miden-base/issues/2037). - let map0 = StorageMap::with_entries([(rand_value(), rand_value())])?; + // (see also https://github.com/0xMiden/protocol/issues/2037). + let map0 = StorageMap::with_entries([(StorageMapKey::from_raw(rand_value()), rand_value())])?; let map1 = map0.clone(); let mut map2 = StorageMap::with_entries([ - (rand_value(), rand_value()), - (rand_value(), rand_value()), - (rand_value(), rand_value()), - (rand_value(), rand_value()), + (StorageMapKey::from_raw(rand_value()), rand_value()), + (StorageMapKey::from_raw(rand_value()), rand_value()), + (StorageMapKey::from_raw(rand_value()), rand_value()), + (StorageMapKey::from_raw(rand_value()), rand_value()), ])?; let map0_slot_name = StorageSlotName::mock(1); @@ -835,7 +877,7 @@ async fn proven_tx_storage_maps_matches_executed_tx_for_new_account() -> anyhow: push.{value0} push.{existing_key} push.MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] call.account::set_map_item exec.::miden::core::sys::truncate_stack @@ -1083,13 +1125,14 @@ fn parse_tx_script(code: impl AsRef) -> anyhow::Result { const TEST_ACCOUNT_CONVENIENCE_WRAPPERS: &str = " use mock::account + use mock::util use miden::protocol::output_note - #! Inputs: [slot_id_prefix, slot_id_suffix, VALUE] + #! Inputs: [slot_id_suffix, slot_id_prefix, VALUE] #! Outputs: [] proc set_item repeat.10 push.0 movdn.6 end - # => [slot_id_prefix, slot_id_suffix, VALUE, pad(10)] + # => [slot_id_suffix, slot_id_prefix, VALUE, pad(10)] call.account::set_item # => [OLD_VALUE, pad(12)] @@ -1097,7 +1140,7 @@ const TEST_ACCOUNT_CONVENIENCE_WRAPPERS: &str = " dropw dropw dropw dropw end - #! Inputs: [slot_id_prefix, slot_id_suffix, KEY, VALUE] + #! Inputs: [slot_id_suffix, slot_id_prefix, KEY, VALUE] #! Outputs: [] proc set_map_item repeat.6 push.0 movdn.10 end @@ -1110,59 +1153,29 @@ const TEST_ACCOUNT_CONVENIENCE_WRAPPERS: &str = " # => [] end - #! Inputs: [ASSET] - #! Outputs: [] - proc create_note_with_asset - push.0.1.2.3 # recipient - push.2 # note_type private - push.0xC0000000 # tag - # => [tag, note_type, RECIPIENT, ASSET] - - exec.output_note::create - # => [note_idx, ASSET] - - movdn.4 - # => [ASSET, note_idx] - - exec.move_asset_to_note - # => [] - end - - #! Inputs: [ASSET, note_idx] - #! Outputs: [] - proc move_asset_to_note - repeat.11 push.0 movdn.5 end - # => [ASSET, note_idx, pad(11)] - - call.account::move_asset_to_note - - # return values are unused - dropw dropw dropw dropw - end - - #! Inputs: [ASSET] - #! Outputs: [ASSET'] + #! Inputs: [ASSET_KEY, ASSET_VALUE] + #! Outputs: [ASSET_VALUE'] proc add_asset - repeat.12 push.0 movdn.4 end - # => [ASSET, pad(12)] + repeat.8 push.0 movdn.8 end + # => [ASSET_KEY, ASSET_VALUE, pad(8)] call.account::add_asset - # => [ASSET', pad(12)] + # => [ASSET_VALUE', pad(12)] repeat.12 movup.4 drop end - # => [ASSET'] + # => [ASSET_VALUE'] end - #! Inputs: [ASSET] - #! Outputs: [ASSET] + #! Inputs: [ASSET_KEY, ASSET_VALUE] + #! Outputs: [ASSET_VALUE] proc remove_asset - repeat.12 push.0 movdn.4 end - # => [ASSET, pad(12)] + padw padw swapdw + # => [ASSET_KEY, ASSET_VALUE, pad(8)] call.account::remove_asset - # => [ASSET, pad(12)] + # => [ASSET_VALUE, pad(12)] repeat.12 movup.4 drop end - # => [ASSET] + # => [ASSET_VALUE] end "; diff --git a/crates/miden-testing/src/kernel_tests/tx/test_account_interface.rs b/crates/miden-testing/src/kernel_tests/tx/test_account_interface.rs index dc4beb3519..d17386d728 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_account_interface.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_account_interface.rs @@ -3,16 +3,18 @@ use alloc::vec::Vec; use assert_matches::assert_matches; use miden_processor::ExecutionError; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::crypto::random::RpoRandomCoin; +use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{Account, AccountId}; use miden_protocol::asset::{Asset, FungibleAsset}; use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::field::PrimeField64; use miden_protocol::note::{ Note, NoteAssets, - NoteInputs, NoteMetadata, NoteRecipient, + NoteStorage, NoteTag, NoteType, }; @@ -22,13 +24,14 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, ACCOUNT_ID_SENDER, }; -use miden_protocol::transaction::{InputNote, OutputNote, TransactionKernel}; -use miden_protocol::{Felt, StarkField, Word}; +use miden_protocol::transaction::{InputNote, RawOutputNote, TransactionKernel}; +use miden_protocol::{Felt, Word}; use miden_standards::note::{ NoteConsumptionStatus, - WellKnownNote, - create_p2id_note, - create_p2ide_note, + P2idNote, + P2ideNote, + P2ideNoteStorage, + StandardNote, }; use miden_standards::testing::mock_account::MockAccountExt; use miden_standards::testing::note::NoteBuilder; @@ -47,8 +50,8 @@ use crate::utils::create_public_p2any_note; use crate::{Auth, MockChain, TransactionContextBuilder, TxContextInput}; #[tokio::test] -async fn check_note_consumability_well_known_notes_success() -> anyhow::Result<()> { - let p2id_note = create_p2id_note( +async fn check_note_consumability_standard_notes_success() -> anyhow::Result<()> { + let p2id_note = P2idNote::create( ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(), ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE.try_into().unwrap(), vec![FungibleAsset::mock(10)], @@ -57,12 +60,14 @@ async fn check_note_consumability_well_known_notes_success() -> anyhow::Result<( &mut RpoRandomCoin::new(Word::from([2u32; 4])), )?; - let p2ide_note = create_p2ide_note( + let p2ide_note = P2ideNote::create( ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(), - ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE.try_into().unwrap(), + P2ideNoteStorage::new( + ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE.try_into().unwrap(), + None, + None, + ), vec![FungibleAsset::mock(10)], - None, - None, NoteType::Public, Default::default(), &mut RpoRandomCoin::new(Word::from([2u32; 4])), @@ -105,9 +110,14 @@ async fn check_note_consumability_custom_notes_success( #[case] notes: Vec, ) -> anyhow::Result<()> { let tx_context = { + use miden_protocol::account::auth::AuthScheme; + let account = Account::mock(ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, Auth::IncrNonce); - let (_, authenticator) = Auth::BasicAuth.build_component(); + let (_, authenticator) = Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + } + .build_component(); TransactionContextBuilder::new(account) .extend_input_notes(notes.clone()) .authenticator(authenticator) @@ -221,7 +231,7 @@ async fn check_note_consumability_partial_success() -> anyhow::Result<()> { FailedNote { note, error: TransactionExecutorError::TransactionProgramExecutionFailed( - ExecutionError::DivideByZero { .. }) + ExecutionError::OperationError { err: miden_processor::operation::OperationError::DivideByZero, .. }) } => { assert_eq!( note.id(), @@ -235,7 +245,7 @@ async fn check_note_consumability_partial_success() -> anyhow::Result<()> { FailedNote { note, error: TransactionExecutorError::TransactionProgramExecutionFailed( - ExecutionError::DivideByZero { .. }) + ExecutionError::OperationError { err: miden_processor::operation::OperationError::DivideByZero, .. }) } => { assert_eq!( note.id(), @@ -258,7 +268,9 @@ async fn check_note_consumability_epilogue_failure() -> anyhow::Result<()> { let mut builder = MockChain::builder(); // Use basic auth which will cause epilogue failure when paired up with unreachable auth. - let account = builder.add_existing_wallet(Auth::BasicAuth)?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let successful_note = builder.add_p2id_note( ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(), @@ -342,7 +354,7 @@ async fn check_note_consumability_epilogue_failure_with_new_combination() -> any let fail_epilogue_note = NoteBuilder::new(account.id(), &mut rand::rng()) .add_assets([Asset::from(note_asset)]) .build()?; - builder.add_output_note(OutputNote::Full(fail_epilogue_note.clone())); + builder.add_output_note(RawOutputNote::Full(fail_epilogue_note.clone())); let mock_chain = builder.build()?; let notes = vec![ @@ -383,7 +395,7 @@ async fn check_note_consumability_epilogue_failure_with_new_combination() -> any FailedNote { note, error: TransactionExecutorError::TransactionProgramExecutionFailed( - ExecutionError::DivideByZero { .. }) + ExecutionError::OperationError { err: miden_processor::operation::OperationError::DivideByZero, .. }) } => { assert_eq!( note.id(), @@ -397,7 +409,7 @@ async fn check_note_consumability_epilogue_failure_with_new_combination() -> any FailedNote { note, error: TransactionExecutorError::TransactionProgramExecutionFailed( - ExecutionError::FailedAssertion { .. }) + ExecutionError::OperationError { err: miden_processor::operation::OperationError::FailedAssertion { .. }, .. }) } => { assert_eq!( note.id(), @@ -420,7 +432,9 @@ async fn test_check_note_consumability_without_signatures() -> anyhow::Result<() let mut builder = MockChain::builder(); // Use basic auth which will cause epilogue failure when paired up with unreachable auth. - let account = builder.add_existing_wallet(Auth::BasicAuth)?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let successful_note = builder.add_p2id_note( ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(), @@ -470,31 +484,36 @@ async fn test_check_note_consumability_static_analysis_invalid_inputs() -> anyho // create notes for testing // -------------------------------------------------------------------------------------------- - let p2ide_wrong_inputs_number = create_p2ide_note_with_inputs([1, 2, 3], sender_account_id); + let p2ide_wrong_inputs_number = create_p2ide_note_with_storage([1, 2, 3], sender_account_id); - let p2ide_invalid_target_id = create_p2ide_note_with_inputs([1, 2, 3, 4], sender_account_id); + let p2ide_invalid_target_id = create_p2ide_note_with_storage([1, 2, 3, 4], sender_account_id); - let p2ide_wrong_target = create_p2ide_note_with_inputs( - [wrong_target_id.suffix().as_int(), wrong_target_id.prefix().as_u64(), 3, 4], + let p2ide_wrong_target = create_p2ide_note_with_storage( + [ + wrong_target_id.suffix().as_canonical_u64(), + wrong_target_id.prefix().as_u64(), + 3, + 4, + ], sender_account_id, ); - let p2ide_invalid_reclaim = create_p2ide_note_with_inputs( + let p2ide_invalid_reclaim = create_p2ide_note_with_storage( [ - target_account_id.suffix().as_int(), + target_account_id.suffix().as_canonical_u64(), target_account_id.prefix().as_u64(), - Felt::MODULUS - 1, + Felt::ORDER_U64 - 1, 4, ], sender_account_id, ); - let p2ide_invalid_timelock = create_p2ide_note_with_inputs( + let p2ide_invalid_timelock = create_p2ide_note_with_storage( [ - target_account_id.suffix().as_int(), + target_account_id.suffix().as_canonical_u64(), target_account_id.prefix().as_u64(), 3, - Felt::MODULUS - 1, + Felt::ORDER_U64 - 1, ], sender_account_id, ); @@ -508,7 +527,7 @@ async fn test_check_note_consumability_static_analysis_invalid_inputs() -> anyho .build_tx_context( TxContextInput::Account(account), &[], - &vec![ + &[ p2ide_wrong_inputs_number.clone(), p2ide_invalid_target_id.clone(), p2ide_invalid_reclaim.clone(), @@ -533,13 +552,12 @@ async fn test_check_note_consumability_static_analysis_invalid_inputs() -> anyho tx_args.clone(), ) .await?; - assert_matches!(consumability_info, NoteConsumptionStatus::NeverConsumable(reason) => { - assert_eq!(reason.to_string(), format!( - "P2IDE note should have {} inputs, but {} was provided", - WellKnownNote::P2IDE.num_expected_inputs(), - p2ide_wrong_inputs_number.recipient().inputs().num_values() - )); - }); + assert_matches!( + consumability_info, + NoteConsumptionStatus::NeverConsumable(reason) => { + assert!(reason.to_string().contains("invalid P2IDE note storage")); + } + ); // check the note with invalid target account ID // -------------------------------------------------------------------------------------------- @@ -552,7 +570,7 @@ async fn test_check_note_consumability_static_analysis_invalid_inputs() -> anyho ) .await?; assert_matches!(consumability_info, NoteConsumptionStatus::NeverConsumable(reason) => { - assert_eq!(reason.to_string(), "failed to create an account ID from the first two note inputs"); + assert!(reason.to_string().contains("invalid P2IDE note storage")); }); // check the note with a wrong target account ID (target is neither the sender nor the receiver) @@ -566,7 +584,7 @@ async fn test_check_note_consumability_static_analysis_invalid_inputs() -> anyho ) .await?; assert_matches!(consumability_info, NoteConsumptionStatus::NeverConsumable(reason) => { - assert_eq!(reason.to_string(), "target account of the transaction does not match neither the receiver account specified by the P2IDE inputs, nor the sender account"); + assert_eq!(reason.to_string(), "target account of the transaction does not match neither the receiver account specified by the P2IDE storage, nor the sender account"); }); // check the note with an invalid reclaim height @@ -580,7 +598,7 @@ async fn test_check_note_consumability_static_analysis_invalid_inputs() -> anyho ) .await?; assert_matches!(consumability_info, NoteConsumptionStatus::NeverConsumable(reason) => { - assert_eq!(reason.to_string(), "reclaim block height should be a u32"); + assert!(reason.to_string().contains("invalid P2IDE note storage")); }); // check the note with an invalid timelock height @@ -594,7 +612,7 @@ async fn test_check_note_consumability_static_analysis_invalid_inputs() -> anyho ) .await?; assert_matches!(consumability_info, NoteConsumptionStatus::NeverConsumable(reason) => { - assert_eq!(reason.to_string(), "timelock block height should be a u32"); + assert!(reason.to_string().contains("invalid P2IDE note storage")); }); Ok(()) @@ -648,16 +666,16 @@ async fn test_check_note_consumability_static_analysis_receiver( let target_account_id = account.id(); let sender_account_id = ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(); - let p2ide = create_p2ide_note_with_inputs( + let p2ide = create_p2ide_note_with_storage( [ - target_account_id.suffix().as_int(), + target_account_id.suffix().as_canonical_u64(), target_account_id.prefix().as_u64(), reclaim_height, timelock_height, ], sender_account_id, ); - builder.add_output_note(OutputNote::Full(p2ide.clone())); + builder.add_output_note(RawOutputNote::Full(p2ide.clone())); let mut mock_chain = builder.build()?; mock_chain.prove_until_block(3)?; @@ -738,16 +756,16 @@ async fn test_check_note_consumability_static_analysis_sender( let target_account_id: AccountId = ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into().unwrap(); - let p2ide = create_p2ide_note_with_inputs( + let p2ide = create_p2ide_note_with_storage( [ - target_account_id.suffix().as_int(), + target_account_id.suffix().as_canonical_u64(), target_account_id.prefix().as_u64(), reclaim_height, timelock_height, ], sender_account_id, ); - builder.add_output_note(OutputNote::Full(p2ide.clone())); + builder.add_output_note(RawOutputNote::Full(p2ide.clone())); let mut mock_chain = builder.build()?; mock_chain.prove_until_block(3)?; @@ -782,18 +800,21 @@ async fn test_check_note_consumability_static_analysis_sender( // HELPER FUNCTIONS // ================================================================================================ -/// Creates a mock P2IDE note with the specified note inputs. -fn create_p2ide_note_with_inputs(inputs: impl IntoIterator, sender: AccountId) -> Note { +/// Creates a mock P2IDE note with the specified note storage. +fn create_p2ide_note_with_storage( + storage: impl IntoIterator, + sender: AccountId, +) -> Note { let serial_num = RpoRandomCoin::new(Default::default()).draw_word(); - let note_script = WellKnownNote::P2IDE.script(); + let note_script = StandardNote::P2IDE.script(); let recipient = NoteRecipient::new( serial_num, note_script, - NoteInputs::new(inputs.into_iter().map(Felt::new).collect()).unwrap(), + NoteStorage::new(storage.into_iter().map(Felt::new).collect()).unwrap(), ); let tag = NoteTag::with_account_target(sender); - let metadata = NoteMetadata::new(sender, NoteType::Public, tag); + let metadata = NoteMetadata::new(sender, NoteType::Public).with_tag(tag); Note::new(NoteAssets::default(), metadata, recipient) } diff --git a/crates/miden-testing/src/kernel_tests/tx/test_active_note.rs b/crates/miden-testing/src/kernel_tests/tx/test_active_note.rs index 3b2301338f..7896471a84 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_active_note.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_active_note.rs @@ -2,15 +2,16 @@ use alloc::string::String; use anyhow::Context; use miden_protocol::account::Account; +use miden_protocol::account::auth::AuthScheme; use miden_protocol::asset::FungibleAsset; use miden_protocol::crypto::rand::{FeltRng, RpoRandomCoin}; use miden_protocol::errors::tx_kernel::ERR_NOTE_ATTEMPT_TO_ACCESS_NOTE_METADATA_WHILE_NO_NOTE_BEING_PROCESSED; use miden_protocol::note::{ Note, NoteAssets, - NoteInputs, NoteMetadata, NoteRecipient, + NoteStorage, NoteTag, NoteType, }; @@ -19,6 +20,7 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, ACCOUNT_ID_SENDER, }; +use miden_protocol::transaction::memory::{ASSET_SIZE, ASSET_VALUE_OFFSET}; use miden_protocol::{EMPTY_WORD, Felt, ONE, WORD_SIZE, Word}; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::mock_account::MockAccountExt; @@ -37,7 +39,9 @@ use crate::{ async fn test_active_note_get_sender_fails_from_tx_script() -> anyhow::Result<()> { // Creates a mockchain with an account and a note let mut builder = MockChain::builder(); - let account = builder.add_existing_wallet(Auth::BasicAuth)?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2id_note = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), account.id(), @@ -158,8 +162,8 @@ async fn test_active_note_get_sender() -> anyhow::Result<()> { let exec_output = tx_context.execute_code(code).await?; let sender = tx_context.input_notes().get_note(0).note().metadata().sender(); - assert_eq!(exec_output.stack[0], sender.prefix().as_felt()); - assert_eq!(exec_output.stack[1], sender.suffix()); + assert_eq!(exec_output.get_stack_element(0), sender.suffix()); + assert_eq!(exec_output.get_stack_element(1), sender.prefix().as_felt()); Ok(()) } @@ -169,7 +173,9 @@ async fn test_active_note_get_assets() -> anyhow::Result<()> { // Creates a mockchain with an account and a note that it can consume let tx_context = { let mut builder = MockChain::builder(); - let account = builder.add_existing_wallet(Auth::BasicAuth)?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2id_note_1 = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), account.id(), @@ -204,10 +210,16 @@ async fn test_active_note_get_assets() -> anyhow::Result<()> { for asset in note.assets().iter() { code += &format!( r#" - # assert the asset is correct - dup padw movup.4 mem_loadw_be push.{asset} assert_eqw.err="asset mismatch" push.4 add + dup padw movup.4 mem_loadw_le push.{ASSET_KEY} + assert_eqw.err="asset key mismatch" + + dup padw movup.4 add.{ASSET_VALUE_OFFSET} mem_loadw_le push.{ASSET_VALUE} + assert_eqw.err="asset value mismatch" + + add.{ASSET_SIZE} "#, - asset = Word::from(asset) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), ); } code @@ -223,7 +235,7 @@ async fn test_active_note_get_assets() -> anyhow::Result<()> { use miden::protocol::active_note proc process_note_0 - # drop the note inputs + # drop the note storage dropw dropw dropw dropw # set the destination pointer for note 0 assets @@ -246,7 +258,7 @@ async fn test_active_note_get_assets() -> anyhow::Result<()> { end proc process_note_1 - # drop the note inputs + # drop the note storage dropw dropw dropw dropw # set the destination pointer for note 1 assets @@ -302,11 +314,13 @@ async fn test_active_note_get_assets() -> anyhow::Result<()> { } #[tokio::test] -async fn test_active_note_get_inputs() -> anyhow::Result<()> { +async fn test_active_note_get_storage() -> anyhow::Result<()> { // Creates a mockchain with an account and a note that it can consume let tx_context = { let mut builder = MockChain::builder(); - let account = builder.add_existing_wallet(Auth::BasicAuth)?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2id_note = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), account.id(), @@ -321,17 +335,17 @@ async fn test_active_note_get_inputs() -> anyhow::Result<()> { .build()? }; - fn construct_inputs_assertions(note: &Note) -> String { + fn construct_storage_assertions(note: &Note) -> String { let mut code = String::new(); - for inputs_chunk in note.inputs().values().chunks(WORD_SIZE) { - let mut inputs_word = EMPTY_WORD; - inputs_word.as_mut_slice()[..inputs_chunk.len()].copy_from_slice(inputs_chunk); + for storage_chunk in note.storage().items().chunks(WORD_SIZE) { + let mut storage_word = EMPTY_WORD; + storage_word.as_mut_slice()[..storage_chunk.len()].copy_from_slice(storage_chunk); code += &format!( r#" - # assert the inputs are correct + # assert the storage items are correct # => [dest_ptr] - dup padw movup.4 mem_loadw_be push.{inputs_word} assert_eqw.err="inputs are incorrect" + dup padw movup.4 mem_loadw_le push.{storage_word} assert_eqw.err="storage items are incorrect" # => [dest_ptr] push.4 add @@ -362,17 +376,17 @@ async fn test_active_note_get_inputs() -> anyhow::Result<()> { dropw dropw dropw dropw # => [] - push.{NOTE_0_PTR} exec.active_note::get_inputs - # => [num_inputs, dest_ptr] + push.{NOTE_0_PTR} exec.active_note::get_storage + # => [num_storage_items, dest_ptr] - eq.{num_inputs} assert.err="unexpected num inputs" + eq.{num_storage_items} assert.err="unexpected num_storage_items" # => [dest_ptr] dup eq.{NOTE_0_PTR} assert.err="unexpected dest ptr" # => [dest_ptr] - # apply note 1 inputs assertions - {inputs_assertions} + # apply note 1 storage assertions + {storage_assertions} # => [dest_ptr] # clear the stack @@ -380,8 +394,8 @@ async fn test_active_note_get_inputs() -> anyhow::Result<()> { # => [] end "#, - num_inputs = note0.inputs().num_values(), - inputs_assertions = construct_inputs_assertions(note0), + num_storage_items = note0.storage().num_items(), + storage_assertions = construct_storage_assertions(note0), NOTE_0_PTR = 100000000, ); @@ -389,12 +403,12 @@ async fn test_active_note_get_inputs() -> anyhow::Result<()> { Ok(()) } -/// This test checks the scenario when an input note has exactly 8 inputs, and the transaction -/// script attempts to load the inputs to memory using the +/// This test checks the scenario when an input note has exactly 8 storage items, and the +/// transaction script attempts to load the storage to memory using the /// `miden::protocol::active_note::get_inputs` procedure. /// -/// Previously this setup was leading to the incorrect number of note inputs computed during the -/// `get_inputs` procedure, see the [issue #1363](https://github.com/0xMiden/miden-base/issues/1363) +/// Previously this setup was leading to the incorrect number of note storage items computed during +/// the `get_inputs` procedure, see the [issue #1363](https://github.com/0xMiden/protocol/issues/1363) /// for more details. #[tokio::test] async fn test_active_note_get_exactly_8_inputs() -> anyhow::Result<()> { @@ -408,18 +422,18 @@ async fn test_active_note_get_exactly_8_inputs() -> anyhow::Result<()> { // prepare note data let serial_num = RpoRandomCoin::new(Word::from([4u32; 4])).draw_word(); let tag = NoteTag::with_account_target(target_id); - let metadata = NoteMetadata::new(sender_id, NoteType::Public, tag); + let metadata = NoteMetadata::new(sender_id, NoteType::Public).with_tag(tag); let vault = NoteAssets::new(vec![]).context("failed to create input note assets")?; let note_script = CodeBuilder::default() .compile_note_script("begin nop end") .context("failed to parse note script")?; - // create a recipient with note inputs, which number divides by 8. For simplicity create 8 input - // values + // create a recipient with note storage, which number divides by 8. For simplicity create 8 + // storage values let recipient = NoteRecipient::new( serial_num, note_script, - NoteInputs::new(vec![ + NoteStorage::new(vec![ ONE, Felt::new(2), Felt::new(3), @@ -429,7 +443,7 @@ async fn test_active_note_get_exactly_8_inputs() -> anyhow::Result<()> { Felt::new(7), Felt::new(8), ]) - .context("failed to create note inputs")?, + .context("failed to create note storage")?, ); let input_note = Note::new(vault.clone(), metadata, recipient); @@ -445,12 +459,12 @@ async fn test_active_note_get_exactly_8_inputs() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - # execute the `get_inputs` procedure to trigger note inputs length assertion - push.0 exec.active_note::get_inputs - # => [num_inputs, 0] + # execute the `get_storage` procedure to trigger note number of storage items assertion + push.0 exec.active_note::get_storage + # => [num_storage_items, 0] - # assert that the inputs length is 8 - push.8 assert_eq.err=\"number of inputs values should be equal to 8\" + # assert that the number of storage items is 8 + push.8 assert_eq.err=\"number of storage values should be equal to 8\" # clean the stack drop @@ -466,7 +480,9 @@ async fn test_active_note_get_exactly_8_inputs() -> anyhow::Result<()> { async fn test_active_note_get_serial_number() -> anyhow::Result<()> { let tx_context = { let mut builder = MockChain::builder(); - let account = builder.add_existing_wallet(Auth::BasicAuth)?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2id_note_1 = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), account.id(), @@ -497,7 +513,7 @@ async fn test_active_note_get_serial_number() -> anyhow::Result<()> { let exec_output = tx_context.execute_code(code).await?; let serial_number = tx_context.input_notes().get_note(0).note().serial_num(); - assert_eq!(exec_output.get_stack_word_be(0), serial_number); + assert_eq!(exec_output.get_stack_word(0), serial_number); Ok(()) } @@ -505,7 +521,9 @@ async fn test_active_note_get_serial_number() -> anyhow::Result<()> { async fn test_active_note_get_script_root() -> anyhow::Result<()> { let tx_context = { let mut builder = MockChain::builder(); - let account = builder.add_existing_wallet(Auth::BasicAuth)?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2id_note_1 = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), account.id(), @@ -536,6 +554,6 @@ async fn test_active_note_get_script_root() -> anyhow::Result<()> { let exec_output = tx_context.execute_code(code).await?; let script_root = tx_context.input_notes().get_note(0).note().script().root(); - assert_eq!(exec_output.get_stack_word_be(0), script_root); + assert_eq!(exec_output.get_stack_word(0), script_root); Ok(()) } diff --git a/crates/miden-testing/src/kernel_tests/tx/test_array.rs b/crates/miden-testing/src/kernel_tests/tx/test_array.rs new file mode 100644 index 0000000000..87d5877a0d --- /dev/null +++ b/crates/miden-testing/src/kernel_tests/tx/test_array.rs @@ -0,0 +1,254 @@ +//! Tests for the Array utility `get` and `set` procedures. + +use miden_protocol::Word; +use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{ + AccountBuilder, + AccountComponent, + StorageMap, + StorageMapKey, + StorageSlot, + StorageSlotName, +}; +use miden_standards::code_builder::CodeBuilder; +use rand::{Rng, SeedableRng}; +use rand_chacha::ChaCha20Rng; + +use crate::{Auth, TransactionContextBuilder}; + +/// The slot name used for testing the array component. +const TEST_ARRAY_SLOT: &str = "test::array::data"; +const TEST_DOUBLE_WORD_ARRAY_SLOT: &str = "test::double_word_array::data"; + +/// Verify that, given an account component with a storage map to hold the array data, +/// we can use the array utility to: +/// 1. Retrieve the initial value via `get` +/// 2. Update the value via `set` +/// 3. Retrieve the updated value via `get` +#[tokio::test] +async fn test_array_get_and_set() -> anyhow::Result<()> { + let slot_name = StorageSlotName::new(TEST_ARRAY_SLOT).expect("slot name should be valid"); + + let wrapper_component_code = format!( + r#" + use miden::core::word + use miden::standards::data_structures::array + + const ARRAY_SLOT_NAME = word("{slot_name}") + + #! Wrapper for array::get that uses exec internally. + #! Inputs: [index, pad(15)] + #! Outputs: [VALUE, pad(12)] + pub proc test_get + push.ARRAY_SLOT_NAME[0..2] + exec.array::get + # => [VALUE, pad(15)] + swapw dropw + end + + #! Wrapper for array::set that uses exec internally. + #! Inputs: [index, VALUE, pad(11)] + #! Outputs: [OLD_VALUE, pad(12)] + pub proc test_set + push.ARRAY_SLOT_NAME[0..2] + exec.array::set + # => [OLD_VALUE, pad(12)] + end + "#, + ); + + // Build the wrapper component by linking against the array library + let wrapper_library = CodeBuilder::default() + .compile_component_code("wrapper::component", wrapper_component_code)?; + + // Create the wrapper account component with a storage map to hold the array data + let initial_value = Word::from([42u32, 42, 42, 42]); + let wrapper_component = AccountComponent::new( + wrapper_library.clone(), + vec![StorageSlot::with_map( + slot_name.clone(), + StorageMap::with_entries([(StorageMapKey::empty(), initial_value)])?, + )], + AccountComponentMetadata::mock("wrapper::component"), + )?; + + // Build an account with the wrapper component that uses the array utility + let account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) + .with_auth_component(Auth::IncrNonce) + .with_component(wrapper_component) + .build_existing()?; + + // Verify the storage slot exists + assert!( + account.storage().get(&slot_name).is_some(), + "Array data slot should exist in account storage" + ); + + // Transaction script that: + // 1. Gets the initial value at index 0 (should be [42, 42, 42, 42]) + // 2. Sets index 0 to [43, 43, 43, 43] + // 3. Gets the updated value at index 0 (should be [43, 43, 43, 43]) + let tx_script_code = r#" + use wrapper::component->wrapper + + begin + # Step 1: Get value at index 0 (should return [42, 42, 42, 42]) + push.0 + # => [index, pad(16)] + call.wrapper::test_get + # => [VALUE, pad(13)] + + # Verify value is [42, 42, 42, 42] + push.42.42.42.42 + assert_eqw.err="get(0) should return [42, 42, 42, 42] initially" + # => [pad(16)] (auto-padding) + + # Step 2: Set value at index 0 to [43, 43, 43, 43] + push.43.43.43.43 + push.0 + # => [index, VALUE, pad(16)] + call.wrapper::test_set + # => [OLD_VALUE, pad(17)] + dropw + + # Step 3: Get value at index 0 (should return [43, 43, 43, 43]) + push.0 + # => [index, pad(17)] + call.wrapper::test_get + # => [VALUE, pad(14)] + + # Verify value is [43, 43, 43, 43] + push.43.43.43.43 + assert_eqw.err="get(0) should return [43, 43, 43, 43] after set" + # => [pad(16)] (auto-padding) + end + "#; + + // Compile the transaction script with the wrapper library linked + let tx_script = CodeBuilder::default() + .with_dynamically_linked_library(&wrapper_library)? + .compile_tx_script(tx_script_code)?; + + // Create transaction context and execute + let tx_context = TransactionContextBuilder::new(account).tx_script(tx_script).build()?; + + tx_context.execute().await?; + + Ok(()) +} + +/// Verify that the double-word array utility can store and retrieve two words per index. +#[tokio::test] +async fn test_double_word_array_get_and_set() -> anyhow::Result<()> { + let slot_name = + StorageSlotName::new(TEST_DOUBLE_WORD_ARRAY_SLOT).expect("slot name should be valid"); + let index = 7; + + let wrapper_component_code = format!( + r#" + use miden::core::word + use miden::standards::data_structures::double_word_array + + const ARRAY_SLOT_NAME = word("{slot_name}") + + #! Wrapper for double_word_array::get that uses exec internally. + #! Inputs: [index, pad(15)] + #! Outputs: [VALUE_0, VALUE_1, pad(8)] + pub proc test_get + push.ARRAY_SLOT_NAME[0..2] + exec.double_word_array::get + # => [VALUE_0, VALUE_1, pad(15)] + swapdw dropw dropw + # => [VALUE_0, VALUE_1, pad(8)] auto-padding + end + + #! Wrapper for double_word_array::set that uses exec internally. + #! Inputs: [index, VALUE_0, VALUE_1, pad(7)] + #! Outputs: [OLD_VALUE_0, OLD_VALUE_1, pad(8)] + pub proc test_set + push.ARRAY_SLOT_NAME[0..2] + exec.double_word_array::set + # => [OLD_VALUE_0, OLD_VALUE_1, pad(8)] auto-padding + end + "#, + ); + + let wrapper_library = CodeBuilder::default() + .compile_component_code("wrapper::component", wrapper_component_code)?; + + let initial_value_0 = Word::from([1u32, 2, 3, 4]); + let initial_value_1 = Word::from([5u32, 6, 7, 8]); + let wrapper_component = AccountComponent::new( + wrapper_library.clone(), + vec![StorageSlot::with_map( + slot_name.clone(), + StorageMap::with_entries([ + (StorageMapKey::from_array([0, 0, 0, index]), initial_value_0), + (StorageMapKey::from_array([0, 0, 1, index]), initial_value_1), + ])?, + )], + AccountComponentMetadata::mock("wrapper::component"), + )?; + + let account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) + .with_auth_component(Auth::IncrNonce) + .with_component(wrapper_component) + .build_existing()?; + + assert!( + account.storage().get(&slot_name).is_some(), + "Double-word array data slot should exist in account storage" + ); + + let updated_value_0 = Word::from([9u32, 9, 9, 9]); + let updated_value_1 = Word::from([10u32, 10, 10, 10]); + let tx_script_code = format!( + r#" + use wrapper::component->wrapper + + begin + # Step 1: Get value at index {index} (should return the initial double-word) + push.{index} + call.wrapper::test_get + + push.{initial_value_0} + assert_eqw.err="get(index) should return initial word 0" + + push.{initial_value_1} + assert_eqw.err="get(index) should return initial word 1" + + # Step 2: Set the double-word at index {index} to the updated values + push.{updated_value_1} + push.{updated_value_0} + push.{index} + call.wrapper::test_set + push.{initial_value_0} + assert_eqw.err="set(index) should return the original double-word, left word" + push.{initial_value_1} + assert_eqw.err="set(index) should return the original double-word, right word" + + # Step 3: Get value at index {index} (should return the updated double-word) + push.{index} + call.wrapper::test_get + + push.{updated_value_0} + assert_eqw.err="get(index) should return the updated double-word, left word" + + push.{updated_value_1} + assert_eqw.err="get(index) should return the updated double-word, right word" + + repeat.8 drop end + end + "#, + ); + + let tx_script = CodeBuilder::default() + .with_dynamically_linked_library(&wrapper_library)? + .compile_tx_script(tx_script_code)?; + + let tx_context = TransactionContextBuilder::new(account).tx_script(tx_script).build()?; + + tx_context.execute().await?; + + Ok(()) +} diff --git a/crates/miden-testing/src/kernel_tests/tx/test_asset.rs b/crates/miden-testing/src/kernel_tests/tx/test_asset.rs index adbacaf792..15e49bf6cd 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_asset.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_asset.rs @@ -1,23 +1,40 @@ use miden_protocol::account::AccountId; -use miden_protocol::asset::NonFungibleAsset; -use miden_protocol::testing::account_id::ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET; -use miden_protocol::testing::constants::{ - FUNGIBLE_ASSET_AMOUNT, - FUNGIBLE_FAUCET_INITIAL_BALANCE, - NON_FUNGIBLE_ASSET_DATA, +use miden_protocol::asset::{ + AssetCallbackFlag, + AssetId, + AssetVaultKey, + FungibleAsset, + NonFungibleAsset, + NonFungibleAssetDetails, }; -use miden_protocol::{Felt, Hasher, Word}; +use miden_protocol::errors::MasmError; +use miden_protocol::errors::tx_kernel::{ + ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_AMOUNT, + ERR_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_FUNGIBLE, + ERR_FUNGIBLE_ASSET_KEY_ASSET_ID_MUST_BE_ZERO, + ERR_FUNGIBLE_ASSET_VALUE_MOST_SIGNIFICANT_ELEMENTS_MUST_BE_ZERO, + ERR_NON_FUNGIBLE_ASSET_ID_PREFIX_MUST_MATCH_HASH1, + ERR_NON_FUNGIBLE_ASSET_ID_SUFFIX_MUST_MATCH_HASH0, + ERR_NON_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_NON_FUNGIBLE, +}; +use miden_protocol::testing::account_id::{ + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, + ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET, + ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE, +}; +use miden_protocol::testing::constants::{FUNGIBLE_ASSET_AMOUNT, NON_FUNGIBLE_ASSET_DATA}; +use miden_protocol::{Felt, Word}; -use crate::TransactionContextBuilder; +use crate::executor::CodeExecutor; use crate::kernel_tests::tx::ExecutionOutputExt; +use crate::{TransactionContextBuilder, assert_execution_error}; #[tokio::test] async fn test_create_fungible_asset_succeeds() -> anyhow::Result<()> { - let tx_context = TransactionContextBuilder::with_fungible_faucet( - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, - Felt::new(FUNGIBLE_FAUCET_INITIAL_BALANCE), - ) - .build()?; + let tx_context = + TransactionContextBuilder::with_fungible_faucet(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET) + .build()?; + let expected_asset = FungibleAsset::new(tx_context.account().id(), FUNGIBLE_ASSET_AMOUNT)?; let code = format!( " @@ -30,25 +47,19 @@ async fn test_create_fungible_asset_succeeds() -> anyhow::Result<()> { # create fungible asset push.{FUNGIBLE_ASSET_AMOUNT} exec.faucet::create_fungible_asset + # => [ASSET_KEY, ASSET_VALUE] # truncate the stack - swapw dropw + exec.::miden::core::sys::truncate_stack end " ); let exec_output = &tx_context.execute_code(&code).await?; - let faucet_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET).unwrap(); - assert_eq!( - exec_output.get_stack_word_be(0), - Word::from([ - Felt::new(FUNGIBLE_ASSET_AMOUNT), - Felt::new(0), - faucet_id.suffix(), - faucet_id.prefix().as_felt(), - ]) - ); + assert_eq!(exec_output.get_stack_word(0), expected_asset.to_key_word()); + assert_eq!(exec_output.get_stack_word(4), expected_asset.to_value_word()); + Ok(()) } @@ -58,7 +69,11 @@ async fn test_create_non_fungible_asset_succeeds() -> anyhow::Result<()> { TransactionContextBuilder::with_non_fungible_faucet(NonFungibleAsset::mock_issuer().into()) .build()?; - let non_fungible_asset = NonFungibleAsset::mock(&NON_FUNGIBLE_ASSET_DATA); + let non_fungible_asset_details = NonFungibleAssetDetails::new( + NonFungibleAsset::mock_issuer(), + NON_FUNGIBLE_ASSET_DATA.to_vec(), + )?; + let non_fungible_asset = NonFungibleAsset::new(&non_fungible_asset_details)?; let code = format!( " @@ -69,46 +84,182 @@ async fn test_create_non_fungible_asset_succeeds() -> anyhow::Result<()> { exec.prologue::prepare_transaction # push non-fungible asset data hash onto the stack - push.{non_fungible_asset_data_hash} + push.{NON_FUNGIBLE_ASSET_DATA_HASH} exec.faucet::create_non_fungible_asset # truncate the stack - swapw dropw + exec.::miden::core::sys::truncate_stack end ", - non_fungible_asset_data_hash = Hasher::hash(&NON_FUNGIBLE_ASSET_DATA), + NON_FUNGIBLE_ASSET_DATA_HASH = non_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; - assert_eq!(exec_output.get_stack_word_be(0), Word::from(non_fungible_asset)); + + assert_eq!(exec_output.get_stack_word(0), non_fungible_asset.to_key_word()); + assert_eq!(exec_output.get_stack_word(4), non_fungible_asset.to_value_word()); Ok(()) } +#[rstest::rstest] +#[case::account_is_not_non_fungible_faucet( + ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE.try_into()?, + AssetId::default(), + ERR_NON_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_NON_FUNGIBLE +)] +#[case::asset_id_suffix_mismatch( + ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET.try_into()?, + AssetId::new(Felt::from(0u32), Felt::from(3u32)), + ERR_NON_FUNGIBLE_ASSET_ID_SUFFIX_MUST_MATCH_HASH0 +)] +#[case::asset_id_prefix_mismatch( + ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET.try_into()?, + AssetId::new(Felt::from(2u32), Felt::from(0u32)), + ERR_NON_FUNGIBLE_ASSET_ID_PREFIX_MUST_MATCH_HASH1 +)] #[tokio::test] -async fn test_validate_non_fungible_asset() -> anyhow::Result<()> { - let tx_context = - TransactionContextBuilder::with_non_fungible_faucet(NonFungibleAsset::mock_issuer().into()) - .build()?; +async fn test_validate_non_fungible_asset( + #[case] account_id: AccountId, + #[case] asset_id: AssetId, + #[case] expected_err: MasmError, +) -> anyhow::Result<()> { + let code = format!( + " + use $kernel::non_fungible_asset - let non_fungible_asset = Word::from(NonFungibleAsset::mock(&[1, 2, 3])); + begin + # a random asset value + push.[2, 3, 4, 5] + # => [hash0 = 2, hash1 = 3, 4, 5] + + push.{account_id_prefix} + push.{account_id_suffix} + push.{asset_id_prefix} + push.{asset_id_suffix} + # => [ASSET_KEY, ASSET_VALUE] + + exec.non_fungible_asset::validate + + # truncate the stack + swapdw dropw dropw + end + ", + asset_id_suffix = asset_id.suffix(), + asset_id_prefix = asset_id.prefix(), + account_id_suffix = account_id.suffix(), + account_id_prefix = account_id.prefix().as_felt(), + ); + + let exec_result = CodeExecutor::with_default_host().run(&code).await; + + assert_execution_error!(exec_result, expected_err); + + Ok(()) +} +#[rstest::rstest] +#[case::account_is_not_fungible_faucet( + ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE.try_into()?, + AssetId::default(), + Word::empty(), + ERR_FUNGIBLE_ASSET_KEY_ACCOUNT_ID_MUST_BE_FUNGIBLE +)] +#[case::asset_id_suffix_is_non_zero( + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into()?, + AssetId::new(Felt::from(1u32), Felt::from(0u32)), + Word::empty(), + ERR_FUNGIBLE_ASSET_KEY_ASSET_ID_MUST_BE_ZERO +)] +#[case::asset_id_prefix_is_non_zero( + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into()?, + AssetId::new(Felt::from(0u32), Felt::from(1u32)), + Word::empty(), + ERR_FUNGIBLE_ASSET_KEY_ASSET_ID_MUST_BE_ZERO +)] +#[case::non_amount_value_is_non_zero( + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into()?, + AssetId::default(), + Word::from([0, 1, 0, 0u32]), + ERR_FUNGIBLE_ASSET_VALUE_MOST_SIGNIFICANT_ELEMENTS_MUST_BE_ZERO +)] +#[case::amount_exceeds_max( + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into()?, + AssetId::default(), + Word::try_from([FungibleAsset::MAX_AMOUNT + 1, 0, 0, 0])?, + ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_AMOUNT +)] +#[tokio::test] +async fn test_validate_fungible_asset( + #[case] account_id: AccountId, + #[case] asset_id: AssetId, + #[case] asset_value: Word, + #[case] expected_err: MasmError, +) -> anyhow::Result<()> { let code = format!( " - use $kernel::asset + use $kernel::fungible_asset begin - push.{non_fungible_asset} - exec.asset::validate_non_fungible_asset + push.{ASSET_VALUE} + push.{account_id_prefix} + push.{account_id_suffix} + push.{asset_id_prefix} + push.{asset_id_suffix} + # => [ASSET_KEY, ASSET_VALUE] + + exec.fungible_asset::validate # truncate the stack - swapw dropw + swapdw dropw dropw end + ", + asset_id_suffix = asset_id.suffix(), + asset_id_prefix = asset_id.prefix(), + account_id_suffix = account_id.suffix(), + account_id_prefix = account_id.prefix().as_felt(), + ASSET_VALUE = asset_value, + ); + + let exec_result = CodeExecutor::with_default_host().run(&code).await; + + assert_execution_error!(exec_result, expected_err); + + Ok(()) +} + +#[rstest::rstest] +#[case::without_callbacks(AssetCallbackFlag::Disabled)] +#[case::with_callbacks(AssetCallbackFlag::Enabled)] +#[tokio::test] +async fn test_key_to_asset_metadata(#[case] callbacks: AssetCallbackFlag) -> anyhow::Result<()> { + let faucet_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET)?; + let vault_key = AssetVaultKey::new(AssetId::default(), faucet_id, callbacks)?; + + let code = format!( " + use $kernel::asset + + begin + push.{ASSET_KEY} + exec.asset::key_to_callbacks_enabled + # => [callbacks_enabled, ASSET_KEY] + + # truncate stack + swapw dropw swap drop + # => [callbacks_enabled] + end + ", + ASSET_KEY = vault_key.to_word(), ); - let exec_output = &tx_context.execute_code(&code).await?; + let exec_output = CodeExecutor::with_default_host().run(&code).await?; + + assert_eq!( + exec_output.get_stack_element(0).as_canonical_u64(), + callbacks.as_u8() as u64, + "MASM key_to_asset_category returned wrong value for {callbacks:?}" + ); - assert_eq!(exec_output.get_stack_word_be(0), non_fungible_asset); Ok(()) } diff --git a/crates/miden-testing/src/kernel_tests/tx/test_asset_vault.rs b/crates/miden-testing/src/kernel_tests/tx/test_asset_vault.rs index 88c7d85737..f551c63db1 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_asset_vault.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_asset_vault.rs @@ -1,23 +1,31 @@ use assert_matches::assert_matches; +use miden_protocol::ONE; use miden_protocol::account::AccountId; -use miden_protocol::asset::{Asset, FungibleAsset, NonFungibleAsset, NonFungibleAssetDetails}; -use miden_protocol::errors::AssetVaultError; +use miden_protocol::asset::{ + Asset, + AssetVaultKey, + FungibleAsset, + NonFungibleAsset, + NonFungibleAssetDetails, +}; +use miden_protocol::errors::protocol::ERR_VAULT_GET_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET; use miden_protocol::errors::tx_kernel::{ ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW, ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED, - ERR_VAULT_GET_BALANCE_CAN_ONLY_BE_CALLED_ON_FUNGIBLE_ASSET, ERR_VAULT_NON_FUNGIBLE_ASSET_ALREADY_EXISTS, ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND, }; +use miden_protocol::errors::{AssetError, AssetVaultError}; use miden_protocol::testing::account_id::{ ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1, ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET_1, }; use miden_protocol::testing::constants::{FUNGIBLE_ASSET_AMOUNT, NON_FUNGIBLE_ASSET_DATA}; use miden_protocol::transaction::memory; -use miden_protocol::{Felt, ONE, Word, ZERO}; +use crate::executor::CodeExecutor; use crate::kernel_tests::tx::ExecutionOutputExt; use crate::{TransactionContextBuilder, assert_execution_error}; @@ -35,7 +43,8 @@ async fn get_balance_returns_correct_amount() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{suffix} push.{prefix} + push.{prefix} + push.{suffix} exec.active_account::get_balance # => [balance] @@ -50,18 +59,19 @@ async fn get_balance_returns_correct_amount() -> anyhow::Result<()> { let exec_output = tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_element(0).as_int(), + exec_output.get_stack_element(0).as_canonical_u64(), tx_context.account().vault().get_balance(faucet_id).unwrap() ); Ok(()) } -/// Tests that asset_vault::peek_balance returns the correct amount. +/// Tests that asset_vault::peek_asset returns the correct asset. #[tokio::test] -async fn peek_balance_returns_correct_amount() -> anyhow::Result<()> { +async fn peek_asset_returns_correct_asset() -> anyhow::Result<()> { let tx_context = TransactionContextBuilder::with_existing_mock_account().build()?; let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); + let asset_key = AssetVaultKey::new_fungible(faucet_id).unwrap(); let code = format!( r#" @@ -73,30 +83,29 @@ async fn peek_balance_returns_correct_amount() -> anyhow::Result<()> { exec.prologue::prepare_transaction exec.memory::get_account_vault_root_ptr - push.{suffix} push.{prefix} - # => [prefix, suffix, account_vault_root_ptr, balance] + push.{ASSET_KEY} + # => [ASSET_KEY, account_vault_root_ptr] - # emit an event to fetch the merkle path for the asset since peek_balance does not do + # emit an event to fetch the merkle path for the asset since peek_asset does not do # that - emit.event("miden::account::vault_before_get_balance") - # => [prefix, suffix, account_vault_root_ptr, balance] + emit.event("miden::protocol::account::vault_before_get_asset") + # => [ASSET_KEY, account_vault_root_ptr] - exec.asset_vault::peek_balance - # => [peeked_balance] + exec.asset_vault::peek_asset + # => [PEEKED_ASSET_VALUE] # truncate the stack - swap drop + swapw dropw end "#, - prefix = faucet_id.prefix().as_felt(), - suffix = faucet_id.suffix(), + ASSET_KEY = asset_key.to_word() ); let exec_output = tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_element(0).as_int(), - tx_context.account().vault().get_balance(faucet_id).unwrap() + exec_output.get_stack_word(0), + tx_context.account().vault().get(asset_key).unwrap().to_value_word() ); Ok(()) @@ -118,7 +127,7 @@ async fn test_get_balance_non_fungible_fails() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{suffix} push.{prefix} + push.{prefix} push.{suffix} exec.active_account::get_balance end ", @@ -149,14 +158,14 @@ async fn test_has_non_fungible_asset() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{non_fungible_asset_key} + push.{NON_FUNGIBLE_ASSET_KEY} exec.active_account::has_non_fungible_asset # truncate the stack swap drop end ", - non_fungible_asset_key = Word::from(non_fungible_asset) + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset.to_key_word(), ); let exec_output = tx_context.execute_code(&code).await?; @@ -172,13 +181,7 @@ async fn test_add_fungible_asset_success() -> anyhow::Result<()> { let mut account_vault = tx_context.account().vault().clone(); let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); let amount = FungibleAsset::MAX_AMOUNT - FUNGIBLE_ASSET_AMOUNT; - let add_fungible_asset = Asset::try_from(Word::new([ - Felt::new(amount), - ZERO, - faucet_id.suffix(), - faucet_id.prefix().as_felt(), - ])) - .unwrap(); + let add_fungible_asset = FungibleAsset::new(faucet_id, amount)?; let code = format!( " @@ -187,21 +190,26 @@ async fn test_add_fungible_asset_success() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::add_asset # truncate the stack - swapw dropw + swapdw dropw dropw end ", - FUNGIBLE_ASSET = Word::from(add_fungible_asset) + FUNGIBLE_ASSET_KEY = add_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = add_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_word_be(0), - Word::from(account_vault.add_asset(add_fungible_asset).unwrap()) + exec_output.get_stack_word(0), + account_vault + .add_asset(Asset::Fungible(add_fungible_asset)) + .unwrap() + .to_value_word() ); assert_eq!( @@ -219,13 +227,7 @@ async fn test_add_non_fungible_asset_fail_overflow() -> anyhow::Result<()> { let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); let amount = FungibleAsset::MAX_AMOUNT - FUNGIBLE_ASSET_AMOUNT + 1; - let add_fungible_asset = Asset::try_from(Word::new([ - Felt::new(amount), - ZERO, - faucet_id.suffix(), - faucet_id.prefix().as_felt(), - ])) - .unwrap(); + let add_fungible_asset = FungibleAsset::new(faucet_id, amount)?; let code = format!( " @@ -234,17 +236,20 @@ async fn test_add_non_fungible_asset_fail_overflow() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::add_asset + dropw dropw end ", - FUNGIBLE_ASSET = Word::from(add_fungible_asset) + FUNGIBLE_ASSET_KEY = add_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = add_fungible_asset.to_value_word(), ); let exec_result = tx_context.execute_code(&code).await; assert_execution_error!(exec_result, ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED); - assert!(account_vault.add_asset(add_fungible_asset).is_err()); + assert!(account_vault.add_asset(Asset::Fungible(add_fungible_asset)).is_err()); Ok(()) } @@ -255,7 +260,7 @@ async fn test_add_non_fungible_asset_success() -> anyhow::Result<()> { let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET.try_into()?; let mut account_vault = tx_context.account().vault().clone(); let add_non_fungible_asset = Asset::NonFungible(NonFungibleAsset::new( - &NonFungibleAssetDetails::new(faucet_id.prefix(), vec![1, 2, 3, 4, 5, 6, 7, 8]).unwrap(), + &NonFungibleAssetDetails::new(faucet_id, vec![1, 2, 3, 4, 5, 6, 7, 8]).unwrap(), )?); let code = format!( @@ -265,21 +270,23 @@ async fn test_add_non_fungible_asset_success() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} call.account::add_asset # truncate the stack - swapw dropw + swapdw dropw dropw end ", - FUNGIBLE_ASSET = Word::from(add_non_fungible_asset) + NON_FUNGIBLE_ASSET_KEY = add_non_fungible_asset.to_key_word(), + NON_FUNGIBLE_ASSET_VALUE = add_non_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_word_be(0), - Word::from(account_vault.add_asset(add_non_fungible_asset)?) + exec_output.get_stack_word(0), + account_vault.add_asset(add_non_fungible_asset)?.to_value_word() ); assert_eq!( @@ -296,7 +303,7 @@ async fn test_add_non_fungible_asset_fail_duplicate() -> anyhow::Result<()> { let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET.try_into().unwrap(); let mut account_vault = tx_context.account().vault().clone(); let non_fungible_asset_details = - NonFungibleAssetDetails::new(faucet_id.prefix(), NON_FUNGIBLE_ASSET_DATA.to_vec()).unwrap(); + NonFungibleAssetDetails::new(faucet_id, NON_FUNGIBLE_ASSET_DATA.to_vec()).unwrap(); let non_fungible_asset = Asset::NonFungible(NonFungibleAsset::new(&non_fungible_asset_details).unwrap()); @@ -307,11 +314,14 @@ async fn test_add_non_fungible_asset_fail_duplicate() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{NON_FUNGIBLE_ASSET} + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} call.account::add_asset + dropw dropw end ", - NON_FUNGIBLE_ASSET = Word::from(non_fungible_asset) + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset.to_key_word(), + NON_FUNGIBLE_ASSET_VALUE = non_fungible_asset.to_value_word(), ); let exec_result = tx_context.execute_code(&code).await; @@ -329,13 +339,7 @@ async fn test_remove_fungible_asset_success_no_balance_remaining() -> anyhow::Re let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); let amount = FUNGIBLE_ASSET_AMOUNT; - let remove_fungible_asset = Asset::try_from(Word::new([ - Felt::new(amount), - ZERO, - faucet_id.suffix(), - faucet_id.prefix().as_felt(), - ])) - .unwrap(); + let remove_fungible_asset = FungibleAsset::new(faucet_id, amount)?; let code = format!( " @@ -344,21 +348,26 @@ async fn test_remove_fungible_asset_success_no_balance_remaining() -> anyhow::Re begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::remove_asset # truncate the stack - swapw dropw + exec.::miden::core::sys::truncate_stack end ", - FUNGIBLE_ASSET = Word::from(remove_fungible_asset) + FUNGIBLE_ASSET_KEY = remove_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = remove_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_word_be(0), - Word::from(account_vault.remove_asset(remove_fungible_asset).unwrap()) + exec_output.get_stack_word(0), + account_vault + .remove_asset(Asset::Fungible(remove_fungible_asset)) + .unwrap() + .to_value_word() ); assert_eq!( @@ -374,13 +383,7 @@ async fn test_remove_fungible_asset_fail_remove_too_much() -> anyhow::Result<()> let tx_context = TransactionContextBuilder::with_existing_mock_account().build()?; let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); let amount = FUNGIBLE_ASSET_AMOUNT + 1; - let remove_fungible_asset = Asset::try_from(Word::new([ - Felt::new(amount), - ZERO, - faucet_id.suffix(), - faucet_id.prefix().as_felt(), - ])) - .unwrap(); + let remove_fungible_asset = FungibleAsset::new(faucet_id, amount)?; let code = format!( " @@ -389,11 +392,13 @@ async fn test_remove_fungible_asset_fail_remove_too_much() -> anyhow::Result<()> begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::remove_asset end ", - FUNGIBLE_ASSET = Word::from(remove_fungible_asset) + FUNGIBLE_ASSET_KEY = remove_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = remove_fungible_asset.to_value_word(), ); let exec_result = tx_context.execute_code(&code).await; @@ -413,13 +418,7 @@ async fn test_remove_fungible_asset_success_balance_remaining() -> anyhow::Resul let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); let amount = FUNGIBLE_ASSET_AMOUNT - 1; - let remove_fungible_asset = Asset::try_from(Word::new([ - Felt::new(amount), - ZERO, - faucet_id.suffix(), - faucet_id.prefix().as_felt(), - ])) - .unwrap(); + let remove_fungible_asset = FungibleAsset::new(faucet_id, amount)?; let code = format!( " @@ -428,21 +427,26 @@ async fn test_remove_fungible_asset_success_balance_remaining() -> anyhow::Resul begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::remove_asset # truncate the stack - swapw dropw + exec.::miden::core::sys::truncate_stack end ", - FUNGIBLE_ASSET = Word::from(remove_fungible_asset) + FUNGIBLE_ASSET_KEY = remove_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = remove_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_word_be(0), - Word::from(account_vault.remove_asset(remove_fungible_asset).unwrap()) + exec_output.get_stack_word(0), + account_vault + .remove_asset(Asset::Fungible(remove_fungible_asset)) + .unwrap() + .to_value_word() ); assert_eq!( @@ -460,7 +464,7 @@ async fn test_remove_inexisting_non_fungible_asset_fails() -> anyhow::Result<()> let mut account_vault = tx_context.account().vault().clone(); let non_fungible_asset_details = - NonFungibleAssetDetails::new(faucet_id.prefix(), NON_FUNGIBLE_ASSET_DATA.to_vec()).unwrap(); + NonFungibleAssetDetails::new(faucet_id, NON_FUNGIBLE_ASSET_DATA.to_vec()).unwrap(); let nonfungible = NonFungibleAsset::new(&non_fungible_asset_details).unwrap(); let non_existent_non_fungible_asset = Asset::NonFungible(nonfungible); @@ -477,11 +481,13 @@ async fn test_remove_inexisting_non_fungible_asset_fails() -> anyhow::Result<()> begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::remove_asset end ", - FUNGIBLE_ASSET = Word::from(non_existent_non_fungible_asset) + FUNGIBLE_ASSET_KEY = non_existent_non_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = non_existent_non_fungible_asset.to_value_word(), ); let exec_result = tx_context.execute_code(&code).await; @@ -502,7 +508,7 @@ async fn test_remove_non_fungible_asset_success() -> anyhow::Result<()> { let faucet_id: AccountId = ACCOUNT_ID_PUBLIC_NON_FUNGIBLE_FAUCET.try_into().unwrap(); let mut account_vault = tx_context.account().vault().clone(); let non_fungible_asset_details = - NonFungibleAssetDetails::new(faucet_id.prefix(), NON_FUNGIBLE_ASSET_DATA.to_vec()).unwrap(); + NonFungibleAssetDetails::new(faucet_id, NON_FUNGIBLE_ASSET_DATA.to_vec()).unwrap(); let non_fungible_asset = Asset::NonFungible(NonFungibleAsset::new(&non_fungible_asset_details).unwrap()); @@ -513,21 +519,23 @@ async fn test_remove_non_fungible_asset_success() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.account::remove_asset # truncate the stack - swapw dropw + exec.::miden::core::sys::truncate_stack end ", - FUNGIBLE_ASSET = Word::from(non_fungible_asset) + FUNGIBLE_ASSET_KEY = non_fungible_asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = non_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_word_be(0), - Word::from(account_vault.remove_asset(non_fungible_asset).unwrap()) + exec_output.get_stack_word(0), + account_vault.remove_asset(non_fungible_asset).unwrap().to_value_word() ); assert_eq!( @@ -537,3 +545,177 @@ async fn test_remove_non_fungible_asset_success() -> anyhow::Result<()> { Ok(()) } + +/// Tests that adding two fungible assets results in the expected value. +#[tokio::test] +async fn test_merge_fungible_asset_success() -> anyhow::Result<()> { + let asset0 = FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT); + let asset1 = FungibleAsset::mock(FungibleAsset::MAX_AMOUNT - FUNGIBLE_ASSET_AMOUNT); + let merged_asset = asset0.unwrap_fungible().add(asset1.unwrap_fungible())?; + + // Check merging is commutative by checking asset0 + asset1 = asset1 + asset0. + for (asset_a, asset_b) in [(asset0, asset1), (asset1, asset0)] { + let code = format!( + " + use $kernel::fungible_asset + + begin + push.{ASSETA} + push.{ASSETB} + exec.fungible_asset::merge + # => [MERGED_ASSET] + + # truncate the stack + swapw dropw + end + ", + ASSETA = asset_a.to_value_word(), + ASSETB = asset_b.to_value_word(), + ); + + let exec_output = CodeExecutor::with_default_host().run(&code).await?; + + assert_eq!(exec_output.get_stack_word(0), merged_asset.to_value_word()); + } + + Ok(()) +} + +/// Tests that adding two fungible assets fails when the added amounts exceed +/// [`FungibleAsset::MAX_AMOUNT`]. +#[tokio::test] +async fn test_merge_fungible_asset_fails_when_max_amount_exceeded() -> anyhow::Result<()> { + let asset0 = FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT); + let asset1 = FungibleAsset::mock(FungibleAsset::MAX_AMOUNT + 1 - FUNGIBLE_ASSET_AMOUNT); + + // Check merging fails for both asset0 + asset1 and asset1 + asset0. + for (asset_a, asset_b) in [(asset0, asset1), (asset1, asset0)] { + // Sanity check that the Rust implementation errors. + assert_matches!( + asset_a.unwrap_fungible().add(asset_b.unwrap_fungible()).unwrap_err(), + AssetError::FungibleAssetAmountTooBig(_) + ); + + let code = format!( + " + use $kernel::fungible_asset + + begin + push.{ASSETA} + push.{ASSETB} + exec.fungible_asset::merge + # => [MERGED_ASSET] + + # truncate the stack + swapw dropw + end + ", + ASSETA = asset_a.to_value_word(), + ASSETB = asset_b.to_value_word(), + ); + + let exec_output = CodeExecutor::with_default_host().run(&code).await; + + assert_execution_error!(exec_output, ERR_VAULT_FUNGIBLE_MAX_AMOUNT_EXCEEDED); + } + + Ok(()) +} + +/// Tests that splitting a fungible asset returns the correct remaining amount. +#[rstest::rstest] +#[case::different_amounts(FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT), FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT - 1))] +#[case::same_amounts( + FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT), + FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT) +)] +#[tokio::test] +async fn test_split_fungible_asset_success( + #[case] asset0: Asset, + #[case] asset1: Asset, +) -> anyhow::Result<()> { + let split_asset = asset0.unwrap_fungible().sub(asset1.unwrap_fungible())?; + + let code = format!( + " + use $kernel::fungible_asset + + begin + push.{ASSET0} + push.{ASSET1} + exec.fungible_asset::split + # => [NEW_ASSET_VALUE_0] + + # truncate the stack + swapw dropw + end + ", + ASSET0 = asset0.to_value_word(), + ASSET1 = asset1.to_value_word(), + ); + + let exec_output = CodeExecutor::with_default_host().run(&code).await?; + + assert_eq!(exec_output.get_stack_word(0), split_asset.to_value_word()); + + Ok(()) +} + +/// Tests that splitting a fungible asset fails when the amount to withdraw exceeds the balance. +#[tokio::test] +async fn test_split_fungible_asset_fails_when_amount_exceeds_balance() -> anyhow::Result<()> { + let asset0 = FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT); + let asset1 = FungibleAsset::mock(FUNGIBLE_ASSET_AMOUNT + 1); + + // Sanity check that the Rust implementation errors. + assert_matches!( + asset0.unwrap_fungible().sub(asset1.unwrap_fungible()).unwrap_err(), + AssetError::FungibleAssetAmountNotSufficient { .. } + ); + + let code = format!( + " + use $kernel::fungible_asset + + begin + push.{ASSET0} + push.{ASSET1} + exec.fungible_asset::split + # => [SPLIT_ASSET] + + # truncate the stack + swapw dropw + end + ", + ASSET0 = asset0.to_value_word(), + ASSET1 = asset1.to_value_word(), + ); + + let exec_output = CodeExecutor::with_default_host().run(&code).await; + + assert_execution_error!( + exec_output, + ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW + ); + + Ok(()) +} + +/// Tests that merging two different fungible assets fails. +#[tokio::test] +async fn test_merge_different_fungible_assets_fails() -> anyhow::Result<()> { + // Create two fungible assets from different faucets + let faucet_id1: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET.try_into().unwrap(); + let faucet_id2: AccountId = ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1.try_into().unwrap(); + + let asset0 = FungibleAsset::new(faucet_id1, FUNGIBLE_ASSET_AMOUNT)?; + let asset1 = FungibleAsset::new(faucet_id2, FUNGIBLE_ASSET_AMOUNT)?; + + // Sanity check that the Rust implementation errors when adding assets from different faucets. + assert_matches!( + asset0.add(asset1).unwrap_err(), + AssetError::FungibleAssetInconsistentVaultKeys { .. } + ); + + Ok(()) +} diff --git a/crates/miden-testing/src/kernel_tests/tx/test_auth.rs b/crates/miden-testing/src/kernel_tests/tx/test_auth.rs index c400161482..e24157d1a5 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_auth.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_auth.rs @@ -24,10 +24,10 @@ async fn test_auth_procedure_args() -> anyhow::Result<()> { Account::mock(ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, ConditionalAuthComponent); let auth_args = [ - ONE, // incr_nonce = true - Felt::new(99), - Felt::new(98), Felt::new(97), + Felt::new(98), + Felt::new(99), + ONE, // incr_nonce = true ]; let tx_context = TransactionContextBuilder::new(account).auth_args(auth_args.into()).build()?; diff --git a/crates/miden-testing/src/kernel_tests/tx/test_callbacks.rs b/crates/miden-testing/src/kernel_tests/tx/test_callbacks.rs new file mode 100644 index 0000000000..a2b827a536 --- /dev/null +++ b/crates/miden-testing/src/kernel_tests/tx/test_callbacks.rs @@ -0,0 +1,708 @@ +extern crate alloc; + +use alloc::collections::BTreeSet; +use alloc::vec::Vec; + +use miden_protocol::account::auth::AuthScheme; +use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{ + Account, + AccountBuilder, + AccountComponent, + AccountComponentCode, + AccountId, + AccountStorageMode, + AccountType, + StorageMap, + StorageMapKey, + StorageSlot, + StorageSlotName, +}; +use miden_protocol::asset::{ + Asset, + AssetCallbackFlag, + AssetCallbacks, + FungibleAsset, + NonFungibleAsset, + NonFungibleAssetDetails, +}; +use miden_protocol::block::account_tree::AccountIdKey; +use miden_protocol::errors::MasmError; +use miden_protocol::note::{NoteTag, NoteType}; +use miden_protocol::utils::sync::LazyLock; +use miden_protocol::{Felt, Word}; +use miden_standards::account::faucets::BasicFungibleFaucet; +use miden_standards::code_builder::CodeBuilder; +use miden_standards::procedure_digest; +use miden_standards::testing::account_component::MockFaucetComponent; + +use crate::{AccountState, Auth, MockChain, MockChainBuilder, assert_transaction_executor_error}; + +// CONSTANTS +// ================================================================================================ + +/// MASM code for the BlockList callback component. +/// +/// This procedure checks whether the native account (the one receiving the asset) is in a +/// block list stored in a storage map. If the account is blocked, the callback panics. +const BLOCK_LIST_MASM: &str = r#" +use miden::protocol::active_account +use miden::protocol::native_account +use miden::core::word + +const BLOCK_LIST_MAP_SLOT = word("miden::testing::callbacks::block_list") +const ERR_ACCOUNT_BLOCKED = "the account is blocked and cannot receive this asset" + +#! Asserts that the native account is not in the block list. +#! +#! Inputs: [] +#! Outputs: [] +#! +#! Panics if the native account is in the block list. +#! +#! Invocation: exec +proc assert_native_account_not_blocked + # Get the native account ID + exec.native_account::get_id + # => [native_acct_suffix, native_acct_prefix] + + # Build account ID map key: [0, 0, suffix, prefix] + push.0.0 + # => [ACCOUNT_ID_KEY] + + # Look up in block list storage map + push.BLOCK_LIST_MAP_SLOT[0..2] + exec.active_account::get_map_item + # => [IS_BLOCKED] + + # If IS_BLOCKED is non-zero, account is blocked. + exec.word::eqz + assert.err=ERR_ACCOUNT_BLOCKED + # => [] +end + +#! Callback invoked when an asset with callbacks enabled is added to an account's vault. +#! +#! Checks whether the receiving account is in the block list. If so, panics. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] +#! Outputs: [ASSET_VALUE, pad(12)] +#! +#! Invocation: call +pub proc on_before_asset_added_to_account + exec.assert_native_account_not_blocked + # => [ASSET_KEY, ASSET_VALUE, pad(8)] + + # drop unused asset key + dropw + # => [ASSET_VALUE, pad(12)] +end + +#! Callback invoked when an asset with callbacks enabled is added to an output note. +#! +#! Checks whether the native account (the note creator) is in the block list. If so, panics. +#! +#! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] +#! Outputs: [ASSET_VALUE, pad(12)] +#! +#! Invocation: call +pub proc on_before_asset_added_to_note + exec.assert_native_account_not_blocked + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + + # drop unused asset key + dropw + # => [ASSET_VALUE, note_idx, pad(7)] +end +"#; + +/// The expected error when a blocked account tries to receive an asset with callbacks. +const ERR_ACCOUNT_BLOCKED: MasmError = + MasmError::from_static_str("the account is blocked and cannot receive this asset"); + +// Initialize the Basic Fungible Faucet library only once. +static BLOCK_LIST_COMPONENT_CODE: LazyLock = LazyLock::new(|| { + CodeBuilder::default() + .compile_component_code(BlockList::NAME, BLOCK_LIST_MASM) + .expect("block list library should be valid") +}); + +static BLOCK_LIST_SLOT_NAME: LazyLock = LazyLock::new(|| { + StorageSlotName::new("miden::testing::callbacks::block_list") + .expect("storage slot name should be valid") +}); + +procedure_digest!( + BLOCK_LIST_ON_BEFORE_ASSET_ADDED_TO_ACCOUNT, + BlockList::NAME, + BlockList::ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_PROC_NAME, + || { BLOCK_LIST_COMPONENT_CODE.as_library() } +); + +procedure_digest!( + BLOCK_LIST_ON_BEFORE_ASSET_ADDED_TO_NOTE, + BlockList::NAME, + BlockList::ON_BEFORE_ASSET_ADDED_TO_NOTE_PROC_NAME, + || { BLOCK_LIST_COMPONENT_CODE.as_library() } +); + +// BLOCK LIST +// ================================================================================================ + +/// A test component that implements a block list for the `on_before_asset_added_to_account` +/// callback. +/// +/// When a faucet distributes assets with callbacks enabled, this component checks whether the +/// receiving account is in the block list. If the account is blocked, the transaction fails. +struct BlockList { + blocked_accounts: BTreeSet, +} + +impl BlockList { + const NAME: &str = "miden::testing::callbacks::block_list"; + + const ON_BEFORE_ASSET_ADDED_TO_ACCOUNT_PROC_NAME: &str = "on_before_asset_added_to_account"; + + const ON_BEFORE_ASSET_ADDED_TO_NOTE_PROC_NAME: &str = "on_before_asset_added_to_note"; + + /// Creates a new [`BlockList`] with the given set of blocked accounts. + fn new(blocked_accounts: BTreeSet) -> Self { + Self { blocked_accounts } + } + + /// Returns the digest of the `on_before_asset_added_to_account` procedure. + pub fn on_before_asset_added_to_account_digest() -> Word { + *BLOCK_LIST_ON_BEFORE_ASSET_ADDED_TO_ACCOUNT + } + + /// Returns the digest of the `on_before_asset_added_to_note` procedure. + pub fn on_before_asset_added_to_note_digest() -> Word { + *BLOCK_LIST_ON_BEFORE_ASSET_ADDED_TO_NOTE + } +} + +impl From for AccountComponent { + fn from(block_list: BlockList) -> Self { + // Build the storage map of blocked accounts + let map_entries: Vec<(StorageMapKey, Word)> = block_list + .blocked_accounts + .iter() + .map(|account_id| { + let map_key = StorageMapKey::new(AccountIdKey::new(*account_id).as_word()); + // Non-zero value means the account is blocked + let map_value = Word::new([Felt::ONE, Felt::ZERO, Felt::ZERO, Felt::ZERO]); + (map_key, map_value) + }) + .collect(); + + let storage_map = StorageMap::with_entries(map_entries) + .expect("btree set should guarantee no duplicates"); + + // Build storage slots: block list map + asset callbacks value slot + let mut storage_slots = + vec![StorageSlot::with_map(BLOCK_LIST_SLOT_NAME.clone(), storage_map)]; + storage_slots.extend( + AssetCallbacks::new() + .on_before_asset_added_to_account( + BlockList::on_before_asset_added_to_account_digest(), + ) + .on_before_asset_added_to_note(BlockList::on_before_asset_added_to_note_digest()) + .into_storage_slots(), + ); + let metadata = AccountComponentMetadata::new( + BlockList::NAME, + [AccountType::FungibleFaucet, AccountType::NonFungibleFaucet], + ) + .with_description("block list callback component for testing"); + + AccountComponent::new(BLOCK_LIST_COMPONENT_CODE.clone(), storage_slots, metadata) + .expect("block list should satisfy the requirements of a valid account component") + } +} + +// TESTS +// ================================================================================================ + +/// Tests that consuming a callbacks-enabled asset succeeds even when the issuing faucet does not +/// have the callback storage slot or when the callback storage slot contains the empty word. +#[rstest::rstest] +#[case::fungible_empty_storage(AccountType::FungibleFaucet, true)] +#[case::fungible_no_storage(AccountType::FungibleFaucet, false)] +#[case::non_fungible_empty_storage(AccountType::NonFungibleFaucet, true)] +#[case::non_fungible_no_storage(AccountType::NonFungibleFaucet, false)] +#[tokio::test] +async fn test_faucet_without_callback_slot_skips_callback( + #[case] account_type: AccountType, + #[case] has_empty_callback_proc_root: bool, +) -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; + + // Create a faucet WITHOUT any AssetCallbacks component. + let mut account_builder = AccountBuilder::new([45u8; 32]) + .storage_mode(AccountStorageMode::Public) + .account_type(account_type) + .with_component(MockFaucetComponent); + + // If callback proc roots should be empty, add the empty storage slots. + if has_empty_callback_proc_root { + let name = "miden::testing::callbacks"; + let slots = AssetCallbacks::new().into_storage_slots(); + let component = AccountComponent::new( + CodeBuilder::new().compile_component_code(name, "pub proc dummy nop end")?, + slots, + AccountComponentMetadata::mock(name), + )?; + account_builder = account_builder.with_component(component); + } + + let faucet = builder.add_account_from_builder( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + account_builder, + AccountState::Exists, + )?; + + // Create a P2ID note with a callbacks-enabled asset from this faucet. + // The faucet does not have the callback slot, but the asset has callbacks enabled. + let asset = match account_type { + AccountType::FungibleFaucet => Asset::from(FungibleAsset::new(faucet.id(), 100)?), + AccountType::NonFungibleFaucet => Asset::from(NonFungibleAsset::new( + &NonFungibleAssetDetails::new(faucet.id(), vec![1])?, + )?), + _ => unreachable!("test only uses faucet account types"), + } + .with_callbacks(AssetCallbackFlag::Enabled); + + let note = + builder.add_p2id_note(faucet.id(), target_account.id(), &[asset], NoteType::Public)?; + + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let faucet_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; + + // Consuming the note should succeed: the callback is gracefully skipped because the + // faucet does not define the callback storage slot. + mock_chain + .build_tx_context(target_account.id(), &[note.id()], &[])? + .foreign_accounts(vec![faucet_inputs]) + .build()? + .execute() + .await?; + + Ok(()) +} + +// ON_ASSET_ADDED_TO_ACCOUNT TESTS +// ================================================================================================ + +/// Tests that the `on_before_asset_added_to_account` callback receives the correct inputs. +#[tokio::test] +async fn test_on_before_asset_added_to_account_callback_receives_correct_inputs() +-> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + // Create wallet first so we know its ID before building the faucet. + let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; + let wallet_id_suffix = target_account.id().suffix().as_canonical_u64(); + let wallet_id_prefix = target_account.id().prefix().as_u64(); + + let amount: u64 = 100; + + // MASM callback that asserts the inputs match expected values. + let account_callback_masm = format!( + r#" + #! Inputs: [ASSET_KEY, ASSET_VALUE, pad(8)] + #! Outputs: [ASSET_VALUE, pad(12)] + pub proc on_before_asset_added_to_account + # Assert native account ID can be retrieved via native_account::get_id + exec.::miden::protocol::native_account::get_id + # => [native_account_suffix, native_account_prefix, ASSET_KEY, ASSET_VALUE, pad(8)] + push.{wallet_id_suffix} assert_eq.err="callback received unexpected native account ID suffix" + push.{wallet_id_prefix} assert_eq.err="callback received unexpected native account ID prefix" + # => [ASSET_KEY, ASSET_VALUE, pad(8)] + + # duplicate the asset value for returning + dupw.1 swapw + # => [ASSET_KEY, ASSET_VALUE, ASSET_VALUE, pad(8)] + + # build the expected asset + push.{amount} + exec.::miden::protocol::active_account::get_id + push.1 + # => [enable_callbacks, active_account_id_suffix, active_account_id_prefix, amount, ASSET_KEY, ASSET_VALUE, ASSET_VALUE, pad(8)] + exec.::miden::protocol::asset::create_fungible_asset + # => [EXPECTED_ASSET_KEY, EXPECTED_ASSET_VALUE, ASSET_KEY, ASSET_VALUE, ASSET_VALUE, pad(8)] + + movupw.2 + assert_eqw.err="callback received unexpected asset key" + # => [EXPECTED_ASSET_VALUE, ASSET_VALUE, ASSET_VALUE, pad(8)] + + assert_eqw.err="callback received unexpected asset value" + # => [ASSET_VALUE, pad(12)] + end + "# + ); + + let faucet = add_faucet_with_callbacks(&mut builder, Some(&account_callback_masm), None)?; + + // Create a P2ID note with a callbacks-enabled fungible asset. + let fungible_asset = + FungibleAsset::new(faucet.id(), amount)?.with_callbacks(AssetCallbackFlag::Enabled); + let note = builder.add_p2id_note( + faucet.id(), + target_account.id(), + &[Asset::Fungible(fungible_asset)], + NoteType::Public, + )?; + + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let faucet_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; + + // Execute the transaction - should succeed because all callback assertions pass. + mock_chain + .build_tx_context(target_account.id(), &[note.id()], &[])? + .foreign_accounts(vec![faucet_inputs]) + .build()? + .execute() + .await?; + + Ok(()) +} + +/// Tests that a blocked account cannot receive an asset with callbacks enabled. +#[rstest::rstest] +#[case::fungible( + AccountType::FungibleFaucet, + |faucet_id| { + Ok(FungibleAsset::new(faucet_id, 100)?.with_callbacks(AssetCallbackFlag::Enabled).into()) + } +)] +#[case::non_fungible( + AccountType::NonFungibleFaucet, + |faucet_id| { + let details = NonFungibleAssetDetails::new(faucet_id, vec![1, 2, 3, 4])?; + Ok(NonFungibleAsset::new(&details)?.with_callbacks(AssetCallbackFlag::Enabled).into()) + } +)] +#[tokio::test] +async fn test_blocked_account_cannot_receive_asset( + #[case] account_type: AccountType, + #[case] create_asset: impl FnOnce(AccountId) -> anyhow::Result, +) -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; + let faucet = add_faucet_with_block_list(&mut builder, account_type, [target_account.id()])?; + + let note = builder.add_p2id_note( + faucet.id(), + target_account.id(), + &[create_asset(faucet.id())?], + NoteType::Public, + )?; + + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let faucet_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; + + let result = mock_chain + .build_tx_context(target_account.id(), &[note.id()], &[])? + .foreign_accounts(vec![faucet_inputs]) + .build()? + .execute() + .await; + + assert_transaction_executor_error!(result, ERR_ACCOUNT_BLOCKED); + + Ok(()) +} + +// ON_ASSET_ADDED_TO_NOTE TESTS +// ================================================================================================ + +/// Tests that a blocked account cannot add a callbacks-enabled asset to an output note. +#[rstest::rstest] +#[case::fungible( + AccountType::FungibleFaucet, + |faucet_id| { + Ok(FungibleAsset::new(faucet_id, 100)?.with_callbacks(AssetCallbackFlag::Enabled).into()) + } +)] +#[case::non_fungible( + AccountType::NonFungibleFaucet, + |faucet_id| { + let details = NonFungibleAssetDetails::new(faucet_id, vec![1, 2, 3, 4])?; + Ok(NonFungibleAsset::new(&details)?.with_callbacks(AssetCallbackFlag::Enabled).into()) + } +)] +#[tokio::test] +async fn test_blocked_account_cannot_add_asset_to_note( + #[case] account_type: AccountType, + #[case] create_asset: impl FnOnce(AccountId) -> anyhow::Result, +) -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; + let faucet = add_faucet_with_block_list(&mut builder, account_type, [target_account.id()])?; + let asset = create_asset(faucet.id())?; + + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + // Build a tx script that creates a private output note and adds the callbacks-enabled asset. + // We use a private note to avoid the public note details requirement in the advice provider. + let recipient = Word::from([0u32, 1, 2, 3]); + let script_code = format!( + r#" + use miden::protocol::output_note + + begin + push.{recipient} + push.{note_type} + push.{tag} + exec.output_note::create + + push.{asset_value} + push.{asset_key} + exec.output_note::add_asset + end + "#, + recipient = recipient, + note_type = NoteType::Private as u8, + tag = NoteTag::default(), + asset_value = asset.to_value_word(), + asset_key = asset.to_key_word(), + ); + + let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(&script_code)?; + + let faucet_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; + + let result = mock_chain + .build_tx_context(target_account.id(), &[], &[])? + .tx_script(tx_script) + .foreign_accounts(vec![faucet_inputs]) + .build()? + .execute() + .await; + + assert_transaction_executor_error!(result, ERR_ACCOUNT_BLOCKED); + + Ok(()) +} + +/// Tests that the `on_before_asset_added_to_note` callback receives the correct inputs. +/// +/// Creates two output notes so that the asset is added to note at index 1, verifying that +/// `note_idx` is correctly passed to the callback (using 1 instead of the default element of 0). +#[tokio::test] +async fn test_on_before_asset_added_to_note_callback_receives_correct_inputs() -> anyhow::Result<()> +{ + let mut builder = MockChain::builder(); + + // Create wallet first so we know its ID before building the faucet. + let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; + let wallet_id_suffix = target_account.id().suffix().as_canonical_u64(); + let wallet_id_prefix = target_account.id().prefix().as_u64(); + + let amount: u64 = 100; + + // MASM callback that asserts the inputs match expected values. + let note_callback_masm = format!( + r#" + const ERR_WRONG_NOTE_IDX = "callback received unexpected note_idx" + + #! Inputs: [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + #! Outputs: [ASSET_VALUE, pad(12)] + pub proc on_before_asset_added_to_note + # Assert native account ID can be retrieved via native_account::get_id + exec.::miden::protocol::native_account::get_id + # => [native_account_suffix, native_account_prefix, ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + push.{wallet_id_suffix} assert_eq.err="callback received unexpected native account ID suffix" + push.{wallet_id_prefix} assert_eq.err="callback received unexpected native account ID prefix" + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + + # Assert note_idx == 1 (we create two notes, adding the asset to the second one) + dup.8 push.1 assert_eq.err=ERR_WRONG_NOTE_IDX + # => [ASSET_KEY, ASSET_VALUE, note_idx, pad(7)] + + # duplicate the asset value for returning + dupw.1 swapw + # => [ASSET_KEY, ASSET_VALUE, ASSET_VALUE, note_idx, pad(7)] + + # build the expected asset + push.{amount} + exec.::miden::protocol::active_account::get_id + push.1 + # => [enable_callbacks, active_account_id_suffix, active_account_id_prefix, amount, ASSET_KEY, ASSET_VALUE, ASSET_VALUE, note_idx, pad(7)] + exec.::miden::protocol::asset::create_fungible_asset + # => [EXPECTED_ASSET_KEY, EXPECTED_ASSET_VALUE, ASSET_KEY, ASSET_VALUE, ASSET_VALUE, note_idx, pad(7)] + + movupw.2 + assert_eqw.err="callback received unexpected asset key" + # => [EXPECTED_ASSET_VALUE, ASSET_VALUE, ASSET_VALUE, note_idx, pad(7)] + + assert_eqw.err="callback received unexpected asset value" + # => [ASSET_VALUE, note_idx, pad(7)] + end + "# + ); + + let faucet = add_faucet_with_callbacks(&mut builder, None, Some(¬e_callback_masm))?; + + // Create a P2ID note with a callbacks-enabled fungible asset. + // Consuming this note adds the asset to the wallet's vault. + let fungible_asset = + FungibleAsset::new(faucet.id(), amount)?.with_callbacks(AssetCallbackFlag::Enabled); + let asset = Asset::Fungible(fungible_asset); + let note = + builder.add_p2id_note(faucet.id(), target_account.id(), &[asset], NoteType::Public)?; + + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + // Build a tx script that creates two output notes and moves the asset from vault to the + // second note (note_idx=1), so we can verify that the callback receives the correct + // note_idx. + let script_code = format!( + r#" + use mock::util + + begin + # Create note 0 (just to consume index 0) + exec.util::create_default_note drop + # => [] + + # Create note 1 + push.{asset_value} + push.{asset_key} + # => [ASSET_KEY, ASSET_VALUE] + exec.util::create_default_note_with_moved_asset + # => [] + + dropw dropw + end + "#, + asset_value = asset.to_value_word(), + asset_key = asset.to_key_word(), + ); + + let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(&script_code)?; + + let faucet_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; + + // Execute the transaction: consume the P2ID note (asset enters vault), then move the asset + // to output note 1. Should succeed because all callback assertions pass. + mock_chain + .build_tx_context(target_account.id(), &[note.id()], &[])? + .tx_script(tx_script) + .foreign_accounts(vec![faucet_inputs]) + .build()? + .execute() + .await?; + + Ok(()) +} + +// HELPERS +// ================================================================================================ + +/// Builds a fungible faucet with the block list callback component and adds it to the builder. +/// +/// The block list component registers both the account and note callbacks. When a +/// callbacks-enabled asset is added to an account or note, the callback checks whether the +/// native account is in the block list and panics if so. +fn add_faucet_with_block_list( + builder: &mut MockChainBuilder, + account_type: AccountType, + blocked_accounts: impl IntoIterator, +) -> anyhow::Result { + let block_list = BlockList::new(blocked_accounts.into_iter().collect()); + + if !account_type.is_faucet() { + anyhow::bail!("account type must be of type faucet") + } + + let account_builder = AccountBuilder::new([42u8; 32]) + .storage_mode(AccountStorageMode::Public) + .account_type(account_type) + .with_component(MockFaucetComponent) + .with_component(block_list); + + builder.add_account_from_builder( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + account_builder, + AccountState::Exists, + ) +} + +/// Builds a fungible faucet with custom callback MASM code and adds it to the builder. +/// +/// `account_callback_masm` and `note_callback_masm` are optional MASM source for the +/// `on_before_asset_added_to_account` and `on_before_asset_added_to_note` procedures. Each +/// string should contain a complete `pub proc ... end` block including any constants needed. +fn add_faucet_with_callbacks( + builder: &mut MockChainBuilder, + account_callback_masm: Option<&str>, + note_callback_masm: Option<&str>, +) -> anyhow::Result { + let component_name = "miden::testing::callbacks::input_validator"; + + let masm_source = + format!("{}\n{}", account_callback_masm.unwrap_or(""), note_callback_masm.unwrap_or(""),); + + let callback_code = + CodeBuilder::default().compile_component_code(component_name, &masm_source)?; + + let mut callbacks = AssetCallbacks::new(); + + if account_callback_masm.is_some() { + let path = format!("{component_name}::on_before_asset_added_to_account"); + let proc_root = callback_code + .as_library() + .get_procedure_root_by_path(path.as_str()) + .expect("account callback procedure should exist"); + callbacks = callbacks.on_before_asset_added_to_account(proc_root); + } + + if note_callback_masm.is_some() { + let path = format!("{component_name}::on_before_asset_added_to_note"); + let proc_root = callback_code + .as_library() + .get_procedure_root_by_path(path.as_str()) + .expect("note callback procedure should exist"); + callbacks = callbacks.on_before_asset_added_to_note(proc_root); + } + + let basic_faucet = BasicFungibleFaucet::new("SYM".try_into()?, 8, Felt::new(1_000_000))?; + + let callback_storage_slots = callbacks.into_storage_slots(); + let callback_metadata = + AccountComponentMetadata::new(component_name, [AccountType::FungibleFaucet]) + .with_description("callback component for testing"); + let callback_component = + AccountComponent::new(callback_code, callback_storage_slots, callback_metadata)?; + + let account_builder = AccountBuilder::new([42; 32]) + .storage_mode(AccountStorageMode::Public) + .account_type(AccountType::FungibleFaucet) + .with_component(basic_faucet) + .with_component(callback_component); + + builder.add_account_from_builder( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + account_builder, + AccountState::Exists, + ) +} diff --git a/crates/miden-testing/src/kernel_tests/tx/test_epilogue.rs b/crates/miden-testing/src/kernel_tests/tx/test_epilogue.rs index 958727913e..d1c07a50ab 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_epilogue.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_epilogue.rs @@ -1,10 +1,8 @@ use alloc::string::ToString; -use alloc::vec::Vec; use std::borrow::ToOwned; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::crypto::random::RpoRandomCoin; use miden_processor::{Felt, ONE}; -use miden_protocol::Word; use miden_protocol::account::{Account, AccountDelta, AccountStorageDelta, AccountVaultDelta}; use miden_protocol::asset::{Asset, FungibleAsset}; use miden_protocol::errors::tx_kernel::{ @@ -26,12 +24,12 @@ use miden_protocol::transaction::memory::{ OUTPUT_NOTE_ASSET_COMMITMENT_OFFSET, OUTPUT_NOTE_SECTION_OFFSET, }; -use miden_protocol::transaction::{OutputNote, OutputNotes, TransactionOutputs}; +use miden_protocol::transaction::{RawOutputNote, RawOutputNotes, TransactionOutputs}; +use miden_protocol::{Hasher, Word}; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::mock_account::MockAccountExt; use miden_standards::testing::note::NoteBuilder; -use super::ZERO; use crate::kernel_tests::tx::ExecutionOutputExt; use crate::utils::{create_p2any_note, create_public_p2any_note}; use crate::{ @@ -55,7 +53,7 @@ async fn test_transaction_epilogue() -> anyhow::Result<()> { let tx_context = TransactionContextBuilder::new(account.clone()) .extend_input_notes(vec![input_note_1]) - .extend_expected_output_notes(vec![OutputNote::Full(output_note_1.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note_1.clone())]) .build()?; let code = format!( @@ -74,7 +72,8 @@ async fn test_transaction_epilogue() -> anyhow::Result<()> { exec.output_note::create # => [note_idx] - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} exec.output_note::add_asset # => [] @@ -87,7 +86,8 @@ async fn test_transaction_epilogue() -> anyhow::Result<()> { recipient = output_note_1.recipient().digest(), note_type = Felt::from(output_note_1.metadata().note_type()), tag = Felt::from(output_note_1.metadata().tag()), - asset = Word::from(asset) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), ); let exec_output = tx_context.execute_code(&code).await?; @@ -96,12 +96,12 @@ async fn test_transaction_epilogue() -> anyhow::Result<()> { let mut final_account = account.clone(); final_account.increment_nonce(ONE)?; - let output_notes = OutputNotes::new( + let output_notes = RawOutputNotes::new( tx_context .expected_output_notes() .iter() .cloned() - .map(OutputNote::Full) + .map(RawOutputNote::Full) .collect(), )?; @@ -114,30 +114,41 @@ async fn test_transaction_epilogue() -> anyhow::Result<()> { .to_commitment(); let account_update_commitment = - miden_protocol::Hasher::merge(&[final_account.commitment(), account_delta_commitment]); - - let mut expected_stack = Vec::with_capacity(16); - expected_stack.extend(output_notes.commitment().as_elements().iter().rev()); - expected_stack.extend(account_update_commitment.as_elements().iter().rev()); - expected_stack.extend( - Word::from( - FungibleAsset::new( - tx_context.tx_inputs().block_header().fee_parameters().native_asset_id(), - 0, - ) - .unwrap(), - ) - .iter() - .rev(), - ); - expected_stack.push(Felt::from(u32::MAX)); // Value for tx expiration block number - expected_stack.extend((13..16).map(|_| ZERO)); + Hasher::merge(&[final_account.to_commitment(), account_delta_commitment]); + let fee_asset = FungibleAsset::new( + tx_context.tx_inputs().block_header().fee_parameters().native_asset_id(), + 0, + )?; assert_eq!( - exec_output.stack.as_slice(), - expected_stack.as_slice(), - "Stack state after finalize_transaction does not contain the expected values" + exec_output.get_stack_word(TransactionOutputs::OUTPUT_NOTES_COMMITMENT_WORD_IDX), + output_notes.commitment() + ); + assert_eq!( + exec_output.get_stack_word(TransactionOutputs::ACCOUNT_UPDATE_COMMITMENT_WORD_IDX), + account_update_commitment, + ); + assert_eq!( + exec_output.get_stack_element(TransactionOutputs::NATIVE_ASSET_ID_SUFFIX_ELEMENT_IDX), + fee_asset.faucet_id().suffix(), + ); + assert_eq!( + exec_output.get_stack_element(TransactionOutputs::NATIVE_ASSET_ID_PREFIX_ELEMENT_IDX), + fee_asset.faucet_id().prefix().as_felt() + ); + assert_eq!( + exec_output + .get_stack_element(TransactionOutputs::FEE_AMOUNT_ELEMENT_IDX) + .as_canonical_u64(), + fee_asset.amount() + ); + assert_eq!( + exec_output + .get_stack_element(TransactionOutputs::EXPIRATION_BLOCK_ELEMENT_IDX) + .as_canonical_u64(), + u64::from(u32::MAX) ); + assert_eq!(exec_output.get_stack_word(12), Word::empty()); assert_eq!( exec_output.stack.len(), @@ -161,8 +172,8 @@ async fn test_compute_output_note_id() -> anyhow::Result<()> { let tx_context = TransactionContextBuilder::new(account.clone()) .extend_expected_output_notes(vec![ - OutputNote::Full(output_note0.clone()), - OutputNote::Full(output_note1.clone()), + RawOutputNote::Full(output_note0.clone()), + RawOutputNote::Full(output_note1.clone()), ]) .build()?; @@ -187,14 +198,16 @@ async fn test_compute_output_note_id() -> anyhow::Result<()> { exec.output_note::create # => [note_idx] - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} call.::miden::standards::wallets::basic::move_asset_to_note # => [] ", recipient = note.recipient().digest(), note_type = Felt::from(note.metadata().note_type()), tag = Felt::from(note.metadata().tag()), - asset = Word::from(asset) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), )); } @@ -231,13 +244,21 @@ async fn test_compute_output_note_id() -> anyhow::Result<()> { Ok(()) } -/// Tests that a transaction fails due to the asset preservation rules when the input note has an -/// asset with amount 100 and the output note has the same asset with amount 200. +/// Tests that a transaction fails when assets aren't preserved, i.e. +/// - when the input note has asset amount 100 and the output note has asset amount 200. +/// - when the input note has asset amount 200 and the output note has asset amount 100. +#[rstest::rstest] +#[case::outputs_exceed_inputs(100, 200)] +#[case::inputs_exceed_outputs(200, 100)] #[tokio::test] -async fn epilogue_fails_when_num_output_assets_exceed_num_input_assets() -> anyhow::Result<()> { - // Create an input asset with amount 100 and an output asset with amount 200. - let input_asset = FungibleAsset::new(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1.try_into()?, 100)?; - let output_asset = input_asset.add(input_asset)?; +async fn epilogue_fails_when_assets_arent_preserved( + #[case] input_amount: u64, + #[case] output_amount: u64, +) -> anyhow::Result<()> { + let input_asset = + FungibleAsset::new(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1.try_into()?, input_amount)?; + let output_asset = + FungibleAsset::new(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1.try_into()?, output_amount)?; let mut builder = MockChain::builder(); let account = builder.add_existing_mock_account(Auth::IncrNonce)?; @@ -247,60 +268,7 @@ async fn epilogue_fails_when_num_output_assets_exceed_num_input_assets() -> anyh let input_note = NoteBuilder::new(account.id(), *builder.rng_mut()) .add_assets([Asset::from(input_asset)]) .build()?; - builder.add_output_note(OutputNote::Full(input_note.clone())); - let mock_chain = builder.build()?; - - let code = format!( - " - use mock::account - use mock::util - - begin - # create a note with the output asset - push.{OUTPUT_ASSET} - exec.util::create_default_note_with_asset - # => [] - end - ", - OUTPUT_ASSET = Word::from(output_asset), - ); - - let builder = CodeBuilder::with_mock_libraries(); - let source_manager = builder.source_manager(); - let tx_script = builder.compile_tx_script(code)?; - - let tx_context = mock_chain - .build_tx_context(TxContextInput::AccountId(account.id()), &[], &[input_note])? - .tx_script(tx_script) - .with_source_manager(source_manager) - .build()?; - - let exec_output = tx_context.execute().await; - assert_transaction_executor_error!( - exec_output, - ERR_EPILOGUE_TOTAL_NUMBER_OF_ASSETS_MUST_STAY_THE_SAME - ); - - Ok(()) -} - -/// Tests that a transaction fails due to the asset preservation rules when the input note has an -/// asset with amount 200 and the output note has the same asset with amount 100. -#[tokio::test] -async fn epilogue_fails_when_num_input_assets_exceed_num_output_assets() -> anyhow::Result<()> { - // Create an input asset with amount 200 and an output asset with amount 100. - let output_asset = FungibleAsset::new(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1.try_into()?, 100)?; - let input_asset = output_asset.add(output_asset)?; - - let mut builder = MockChain::builder(); - let account = builder.add_existing_mock_account(Auth::IncrNonce)?; - // Add an input note that (automatically) adds its assets to the transaction's input vault, but - // _does not_ add the asset to the account. This is just to keep the test conceptually simple - - // there is no account involved. - let input_note = NoteBuilder::new(account.id(), *builder.rng_mut()) - .add_assets([Asset::from(output_asset)]) - .build()?; - builder.add_output_note(OutputNote::Full(input_note.clone())); + builder.add_output_note(RawOutputNote::Full(input_note.clone())); let mock_chain = builder.build()?; let code = format!( @@ -310,12 +278,14 @@ async fn epilogue_fails_when_num_input_assets_exceed_num_output_assets() -> anyh begin # create a note with the output asset - push.{OUTPUT_ASSET} + push.{OUTPUT_ASSET_VALUE} + push.{OUTPUT_ASSET_KEY} exec.util::create_default_note_with_asset # => [] end ", - OUTPUT_ASSET = Word::from(input_asset), + OUTPUT_ASSET_KEY = output_asset.to_key_word(), + OUTPUT_ASSET_VALUE = output_asset.to_value_word(), ); let builder = CodeBuilder::with_mock_libraries(); @@ -379,7 +349,7 @@ async fn test_block_expiration_height_monotonically_decreases() -> anyhow::Resul assert_eq!( exec_output .get_stack_element(TransactionOutputs::EXPIRATION_BLOCK_ELEMENT_IDX) - .as_int(), + .as_canonical_u64(), expected_expiry ); } @@ -439,7 +409,7 @@ async fn test_no_expiration_delta_set() -> anyhow::Result<()> { assert_eq!( exec_output .get_stack_element(TransactionOutputs::EXPIRATION_BLOCK_ELEMENT_IDX) - .as_int() as u32, + .as_canonical_u64() as u32, u32::MAX ); @@ -498,7 +468,7 @@ async fn epilogue_fails_on_account_state_change_without_nonce_increment() -> any push.91.92.93.94 push.MOCK_VALUE_SLOT0[0..2] repeat.5 movup.5 drop end - # => [slot_id_prefix, slot_id_suffix, VALUE] + # => [slot_id_suffix, slot_id_prefix, VALUE] call.account::set_item # => [PREV_VALUE] dropw diff --git a/crates/miden-testing/src/kernel_tests/tx/test_faucet.rs b/crates/miden-testing/src/kernel_tests/tx/test_faucet.rs index 907a3b58a6..40425fdf7f 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_faucet.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_faucet.rs @@ -1,23 +1,20 @@ use alloc::sync::Arc; -use miden_protocol::account::{ - Account, - AccountBuilder, - AccountComponent, - AccountId, - AccountStorage, - AccountType, - StorageMap, -}; +use miden_protocol::account::{Account, AccountBuilder, AccountComponent, AccountId, AccountType}; use miden_protocol::assembly::DefaultSourceManager; -use miden_protocol::asset::{FungibleAsset, NonFungibleAsset}; +use miden_protocol::asset::{ + AssetCallbackFlag, + AssetId, + AssetVaultKey, + FungibleAsset, + NonFungibleAsset, +}; use miden_protocol::errors::tx_kernel::{ - ERR_FAUCET_NEW_TOTAL_SUPPLY_WOULD_EXCEED_MAX_ASSET_AMOUNT, - ERR_FAUCET_NON_FUNGIBLE_ASSET_ALREADY_ISSUED, - ERR_FAUCET_NON_FUNGIBLE_ASSET_TO_BURN_NOT_FOUND, + ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_AMOUNT, ERR_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN, ERR_NON_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN, ERR_VAULT_FUNGIBLE_ASSET_AMOUNT_LESS_THAN_AMOUNT_TO_WITHDRAW, + ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND, }; use miden_protocol::testing::account_id::{ ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, @@ -28,12 +25,10 @@ use miden_protocol::testing::account_id::{ use miden_protocol::testing::constants::{ CONSUMED_ASSET_1_AMOUNT, FUNGIBLE_ASSET_AMOUNT, - FUNGIBLE_FAUCET_INITIAL_BALANCE, NON_FUNGIBLE_ASSET_DATA, NON_FUNGIBLE_ASSET_DATA_2, }; use miden_protocol::testing::noop_auth_component::NoopAuthComponent; -use miden_protocol::{Felt, Word}; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::mock_account::MockAccountExt; @@ -46,7 +41,7 @@ use crate::{TransactionContextBuilder, assert_execution_error, assert_transactio #[tokio::test] async fn test_mint_fungible_asset_succeeds() -> anyhow::Result<()> { let faucet_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET).unwrap(); - let expected_final_amount = FUNGIBLE_FAUCET_INITIAL_BALANCE + FUNGIBLE_ASSET_AMOUNT; + let asset = FungibleAsset::new(faucet_id, FUNGIBLE_ASSET_AMOUNT)?; let code = format!( r#" @@ -60,35 +55,38 @@ async fn test_mint_fungible_asset_succeeds() -> anyhow::Result<()> { exec.prologue::prepare_transaction # mint asset - push.{FUNGIBLE_ASSET_AMOUNT} push.0 push.{suffix} push.{prefix} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.mock_faucet::mint # assert the correct asset is returned - push.{FUNGIBLE_ASSET_AMOUNT} push.0 push.{suffix} push.{prefix} + push.{FUNGIBLE_ASSET_VALUE} assert_eqw.err="minted asset does not match expected asset" # assert the input vault has been updated exec.memory::get_input_vault_root_ptr - push.{suffix} push.{prefix} - exec.asset_vault::get_balance + push.{FUNGIBLE_ASSET_KEY} + exec.asset_vault::get_asset + # => [ASSET_VALUE] + + # extract balance from asset + movdn.3 drop drop drop + # => [balance] + push.{FUNGIBLE_ASSET_AMOUNT} assert_eq.err="input vault should contain minted asset" - exec.faucet::get_total_issuance - push.{expected_final_amount} - assert_eq.err="expected total issuance to be {expected_final_amount}" + # truncate the stack + dropw end "#, - prefix = faucet_id.prefix().as_felt(), - suffix = faucet_id.suffix(), + FUNGIBLE_ASSET_KEY = asset.to_key_word(), + FUNGIBLE_ASSET_VALUE = asset.to_value_word(), ); - TransactionContextBuilder::with_fungible_faucet( - faucet_id.into(), - Felt::new(FUNGIBLE_FAUCET_INITIAL_BALANCE), - ) - .build()? - .execute_code(&code) - .await?; + TransactionContextBuilder::with_fungible_faucet(faucet_id.into()) + .build()? + .execute_code(&code) + .await?; Ok(()) } @@ -97,17 +95,20 @@ async fn test_mint_fungible_asset_succeeds() -> anyhow::Result<()> { #[tokio::test] async fn mint_fungible_asset_fails_on_non_faucet_account() -> anyhow::Result<()> { let account = setup_non_faucet_account()?; + let asset = FungibleAsset::mock(50); let code = format!( " use mock::faucet begin - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} call.faucet::mint end ", - asset = Word::from(FungibleAsset::mock(50)) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), ); let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(code)?; @@ -123,12 +124,11 @@ async fn mint_fungible_asset_fails_on_non_faucet_account() -> anyhow::Result<()> #[tokio::test] async fn test_mint_fungible_asset_inconsistent_faucet_id() -> anyhow::Result<()> { - let tx_context = TransactionContextBuilder::with_fungible_faucet( - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1, - 10u32.into(), - ) - .build()?; + let tx_context = + TransactionContextBuilder::with_fungible_faucet(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1) + .build()?; + let asset = FungibleAsset::mock(5); let code = format!( " use $kernel::prologue @@ -136,11 +136,13 @@ async fn test_mint_fungible_asset_inconsistent_faucet_id() -> anyhow::Result<()> begin exec.prologue::prepare_transaction - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} call.faucet::mint end ", - asset = Word::from(FungibleAsset::mock(5)) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), ); let exec_output = tx_context.execute_code(&code).await; @@ -149,34 +151,41 @@ async fn test_mint_fungible_asset_inconsistent_faucet_id() -> anyhow::Result<()> Ok(()) } +/// Tests that minting a fungible asset with [`FungibleAsset::MAX_AMOUNT`] + 1 fails. #[tokio::test] -async fn test_mint_fungible_asset_fails_saturate_max_amount() -> anyhow::Result<()> { +async fn test_mint_fungible_asset_fails_when_amount_exceeds_max_representable_amount() +-> anyhow::Result<()> { let code = format!( " use mock::faucet begin - push.{asset} + push.0 + push.0 + push.0 + push.{max_amount_plus_1} + # => [ASSET_VALUE] + + push.{ASSET_KEY} + # => [ASSET_KEY, ASSET_VALUE] + call.faucet::mint + dropw dropw end ", - asset = Word::from(FungibleAsset::mock(FungibleAsset::MAX_AMOUNT)) + ASSET_KEY = FungibleAsset::mock(0).to_key_word(), + max_amount_plus_1 = FungibleAsset::MAX_AMOUNT + 1, ); let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(code)?; - let result = TransactionContextBuilder::with_fungible_faucet( - FungibleAsset::mock_issuer().into(), - Felt::new(1), - ) - .tx_script(tx_script) - .build()? - .execute() - .await; - - assert_transaction_executor_error!( - result, - ERR_FAUCET_NEW_TOTAL_SUPPLY_WOULD_EXCEED_MAX_ASSET_AMOUNT - ); + let result = + TransactionContextBuilder::with_fungible_faucet(FungibleAsset::mock_issuer().into()) + .tx_script(tx_script) + .build()? + .execute() + .await; + + assert_transaction_executor_error!(result, ERR_FUNGIBLE_ASSET_AMOUNT_EXCEEDS_MAX_AMOUNT); Ok(()) } @@ -188,9 +197,7 @@ async fn test_mint_non_fungible_asset_succeeds() -> anyhow::Result<()> { let tx_context = TransactionContextBuilder::with_non_fungible_faucet(NonFungibleAsset::mock_issuer().into()) .build()?; - let non_fungible_asset = NonFungibleAsset::mock(&NON_FUNGIBLE_ASSET_DATA); - let asset_vault_key = non_fungible_asset.vault_key(); let code = format!( r#" @@ -202,37 +209,29 @@ async fn test_mint_non_fungible_asset_succeeds() -> anyhow::Result<()> { use $kernel::prologue use mock::faucet->mock_faucet - const FAUCET_SYSDATA_SLOT_NAME = word("{faucet_sysdata_slot_name}") - begin # mint asset exec.prologue::prepare_transaction - push.{non_fungible_asset} + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} call.mock_faucet::mint # assert the correct asset is returned - push.{non_fungible_asset} + push.{NON_FUNGIBLE_ASSET_VALUE} assert_eqw.err="minted asset does not match expected asset" # assert the input vault has been updated. exec.memory::get_input_vault_root_ptr - push.{non_fungible_asset} - exec.asset_vault::has_non_fungible_asset - assert.err="vault should contain asset" - - # assert the non-fungible asset has been added to the faucet smt - push.FAUCET_SYSDATA_SLOT_NAME[0..2] - exec.account::get_item - push.{asset_vault_key} - exec.smt::get - push.{non_fungible_asset} - assert_eqw.err="minted asset should have been added to faucet SMT" + push.{NON_FUNGIBLE_ASSET_KEY} + exec.asset_vault::get_asset + push.{NON_FUNGIBLE_ASSET_VALUE} + assert_eqw.err="vault should contain asset" + dropw end "#, - faucet_sysdata_slot_name = AccountStorage::faucet_sysdata_slot(), - non_fungible_asset = Word::from(non_fungible_asset), - asset_vault_key = StorageMap::hash_key(asset_vault_key.into()), + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset.to_key_word(), + NON_FUNGIBLE_ASSET_VALUE = non_fungible_asset.to_value_word(), ); tx_context.execute_code(&code).await?; @@ -255,11 +254,13 @@ async fn test_mint_non_fungible_asset_fails_inconsistent_faucet_id() -> anyhow:: begin exec.prologue::prepare_transaction - push.{non_fungible_asset} + push.{asset_value} + push.{asset_key} call.faucet::mint end ", - non_fungible_asset = Word::from(non_fungible_asset) + asset_key = non_fungible_asset.to_key_word(), + asset_value = non_fungible_asset.to_value_word(), ); let exec_output = tx_context.execute_code(&code).await; @@ -272,17 +273,20 @@ async fn test_mint_non_fungible_asset_fails_inconsistent_faucet_id() -> anyhow:: #[tokio::test] async fn mint_non_fungible_asset_fails_on_non_faucet_account() -> anyhow::Result<()> { let account = setup_non_faucet_account()?; + let asset = FungibleAsset::mock(50); let code = format!( " use mock::faucet begin - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} call.faucet::mint end ", - asset = Word::from(FungibleAsset::mock(50)) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), ); let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(code)?; @@ -296,31 +300,38 @@ async fn mint_non_fungible_asset_fails_on_non_faucet_account() -> anyhow::Result Ok(()) } +/// Tests minting a fungible asset with callbacks enabled. #[tokio::test] -async fn test_mint_non_fungible_asset_fails_asset_already_exists() -> anyhow::Result<()> { - let tx_context = - TransactionContextBuilder::with_non_fungible_faucet(NonFungibleAsset::mock_issuer().into()) - .build()?; +async fn test_mint_fungible_asset_with_callbacks_enabled() -> anyhow::Result<()> { + let faucet_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET).unwrap(); + let asset = FungibleAsset::new(faucet_id, FUNGIBLE_ASSET_AMOUNT)?; - let non_fungible_asset = NonFungibleAsset::mock(&NON_FUNGIBLE_ASSET_DATA_2); + // Build a vault key with callbacks enabled. + let vault_key = AssetVaultKey::new(AssetId::default(), faucet_id, AssetCallbackFlag::Enabled)?; let code = format!( - " + r#" + use mock::faucet->mock_faucet use $kernel::prologue - use mock::faucet begin exec.prologue::prepare_transaction - push.{non_fungible_asset} - call.faucet::mint + + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} + call.mock_faucet::mint + + dropw dropw end - ", - non_fungible_asset = Word::from(non_fungible_asset) + "#, + FUNGIBLE_ASSET_KEY = vault_key.to_word(), + FUNGIBLE_ASSET_VALUE = asset.to_value_word(), ); - let exec_output = tx_context.execute_code(&code).await; - - assert_execution_error!(exec_output, ERR_FAUCET_NON_FUNGIBLE_ASSET_ALREADY_ISSUED); + TransactionContextBuilder::with_fungible_faucet(faucet_id.into()) + .build()? + .execute_code(&code) + .await?; Ok(()) } @@ -330,20 +341,11 @@ async fn test_mint_non_fungible_asset_fails_asset_already_exists() -> anyhow::Re #[tokio::test] async fn test_burn_fungible_asset_succeeds() -> anyhow::Result<()> { - let tx_context = { - let account = Account::mock_fungible_faucet( - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1, - Felt::new(FUNGIBLE_FAUCET_INITIAL_BALANCE), - ); - let note = create_public_p2any_note( - ACCOUNT_ID_SENDER.try_into().unwrap(), - [FungibleAsset::new(account.id(), 100u64).unwrap().into()], - ); - TransactionContextBuilder::new(account).extend_input_notes(vec![note]).build()? - }; - - let faucet_id = tx_context.account().id(); - let expected_final_amount = FUNGIBLE_FAUCET_INITIAL_BALANCE - FUNGIBLE_ASSET_AMOUNT; + let account = Account::mock_fungible_faucet(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1); + let asset = FungibleAsset::new(account.id(), 100u64).unwrap().into(); + let note = create_public_p2any_note(ACCOUNT_ID_SENDER.try_into().unwrap(), [asset]); + let tx_context = + TransactionContextBuilder::new(account).extend_input_notes(vec![note]).build()?; let code = format!( r#" @@ -357,29 +359,33 @@ async fn test_burn_fungible_asset_succeeds() -> anyhow::Result<()> { exec.prologue::prepare_transaction # burn asset - push.{FUNGIBLE_ASSET_AMOUNT} push.0 push.{suffix} push.{prefix} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.mock_faucet::burn # assert the correct asset is returned - push.{FUNGIBLE_ASSET_AMOUNT} push.0 push.{suffix} push.{prefix} + push.{FUNGIBLE_ASSET_VALUE} assert_eqw.err="burnt asset does not match expected asset" # assert the input vault has been updated exec.memory::get_input_vault_root_ptr - push.{suffix} push.{prefix} - exec.asset_vault::get_balance + push.{FUNGIBLE_ASSET_KEY} + exec.asset_vault::get_asset + # => [ASSET_VALUE] + + # extract balance from asset + movdn.3 drop drop drop + # => [balance] push.{final_input_vault_asset_amount} assert_eq.err="vault balance does not match expected balance" - exec.faucet::get_total_issuance - push.{expected_final_amount} - assert_eq.err="expected total issuance to be {expected_final_amount}" + exec.::miden::core::sys::truncate_stack end "#, - prefix = faucet_id.prefix().as_felt(), - suffix = faucet_id.suffix(), + FUNGIBLE_ASSET_VALUE = asset.to_value_word(), + FUNGIBLE_ASSET_KEY = asset.to_key_word(), final_input_vault_asset_amount = CONSUMED_ASSET_1_AMOUNT - FUNGIBLE_ASSET_AMOUNT, ); @@ -392,17 +398,20 @@ async fn test_burn_fungible_asset_succeeds() -> anyhow::Result<()> { #[tokio::test] async fn burn_fungible_asset_fails_on_non_faucet_account() -> anyhow::Result<()> { let account = setup_non_faucet_account()?; + let asset = FungibleAsset::mock(50); let code = format!( " use mock::faucet begin - push.{asset} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.faucet::burn end ", - asset = Word::from(FungibleAsset::mock(50)) + FUNGIBLE_ASSET_VALUE = asset.to_value_word(), + FUNGIBLE_ASSET_KEY = asset.to_key_word(), ); let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(code)?; @@ -418,13 +427,12 @@ async fn burn_fungible_asset_fails_on_non_faucet_account() -> anyhow::Result<()> #[tokio::test] async fn test_burn_fungible_asset_inconsistent_faucet_id() -> anyhow::Result<()> { - let tx_context = TransactionContextBuilder::with_fungible_faucet( - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, - Felt::try_from(FUNGIBLE_FAUCET_INITIAL_BALANCE).unwrap(), - ) - .build()?; + let tx_context = + TransactionContextBuilder::with_fungible_faucet(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET) + .build()?; let faucet_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1).unwrap(); + let fungible_asset = FungibleAsset::new(faucet_id, FUNGIBLE_ASSET_AMOUNT)?; let code = format!( " @@ -433,12 +441,13 @@ async fn test_burn_fungible_asset_inconsistent_faucet_id() -> anyhow::Result<()> begin exec.prologue::prepare_transaction - push.{FUNGIBLE_ASSET_AMOUNT} push.0 push.{suffix} push.{prefix} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.faucet::burn end ", - prefix = faucet_id.prefix().as_felt(), - suffix = faucet_id.suffix(), + FUNGIBLE_ASSET_VALUE = fungible_asset.to_value_word(), + FUNGIBLE_ASSET_KEY = fungible_asset.to_key_word(), ); let exec_output = tx_context.execute_code(&code).await; @@ -449,13 +458,12 @@ async fn test_burn_fungible_asset_inconsistent_faucet_id() -> anyhow::Result<()> #[tokio::test] async fn test_burn_fungible_asset_insufficient_input_amount() -> anyhow::Result<()> { - let tx_context = TransactionContextBuilder::with_fungible_faucet( - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1, - Felt::new(FUNGIBLE_FAUCET_INITIAL_BALANCE), - ) - .build()?; + let tx_context = + TransactionContextBuilder::with_fungible_faucet(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1) + .build()?; let faucet_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1).unwrap(); + let fungible_asset = FungibleAsset::new(faucet_id, CONSUMED_ASSET_1_AMOUNT + 1)?; let code = format!( " @@ -464,13 +472,13 @@ async fn test_burn_fungible_asset_insufficient_input_amount() -> anyhow::Result< begin exec.prologue::prepare_transaction - push.{saturating_amount} push.0 push.{suffix} push.{prefix} + push.{FUNGIBLE_ASSET_VALUE} + push.{FUNGIBLE_ASSET_KEY} call.faucet::burn end ", - prefix = faucet_id.prefix().as_felt(), - suffix = faucet_id.suffix(), - saturating_amount = CONSUMED_ASSET_1_AMOUNT + 1 + FUNGIBLE_ASSET_VALUE = fungible_asset.to_value_word(), + FUNGIBLE_ASSET_KEY = fungible_asset.to_key_word(), ); let exec_output = tx_context.execute_code(&code).await; @@ -490,9 +498,7 @@ async fn test_burn_non_fungible_asset_succeeds() -> anyhow::Result<()> { let tx_context = TransactionContextBuilder::with_non_fungible_faucet(NonFungibleAsset::mock_issuer().into()) .build()?; - let non_fungible_asset_burnt = NonFungibleAsset::mock(&NON_FUNGIBLE_ASSET_DATA_2); - let burnt_asset_vault_key = non_fungible_asset_burnt.vault_key(); let code = format!( r#" @@ -502,58 +508,46 @@ async fn test_burn_non_fungible_asset_succeeds() -> anyhow::Result<()> { use $kernel::prologue use mock::faucet->mock_faucet - const FAUCET_SYSDATA_SLOT_NAME = word("{faucet_sysdata_slot_name}") - begin exec.prologue::prepare_transaction # add non-fungible asset to the vault - exec.memory::get_input_vault_root_ptr push.{non_fungible_asset} + exec.memory::get_input_vault_root_ptr + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} exec.asset_vault::add_non_fungible_asset dropw # check that the non-fungible asset is presented in the input vault exec.memory::get_input_vault_root_ptr - push.{non_fungible_asset} - exec.asset_vault::has_non_fungible_asset - assert.err="input vault should contain the asset" - - # check that the non-fungible asset is in the account map - push.{burnt_asset_vault_key} - push.FAUCET_SYSDATA_SLOT_NAME[0..2] - exec.account::get_map_item - push.{non_fungible_asset} - assert_eqw.err="non-fungible asset should be in the account map" - dropw + push.{NON_FUNGIBLE_ASSET_KEY} + exec.asset_vault::get_asset + push.{NON_FUNGIBLE_ASSET_VALUE} + assert_eqw.err="input vault should contain the asset" # burn the non-fungible asset - push.{non_fungible_asset} + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} call.mock_faucet::burn # assert the correct asset is returned - push.{non_fungible_asset} + push.{NON_FUNGIBLE_ASSET_VALUE} assert_eqw.err="burnt asset does not match expected asset" # assert the input vault has been updated and does not have the burnt asset exec.memory::get_input_vault_root_ptr - push.{non_fungible_asset} - exec.asset_vault::has_non_fungible_asset - not assert.err="input vault should not contain burned asset" - - # assert that the non-fungible asset is no longer in the account map - push.{burnt_asset_vault_key} - push.FAUCET_SYSDATA_SLOT_NAME[0..2] - exec.account::get_map_item - padw - assert_eqw.err="burnt asset should have been removed from map" + push.{NON_FUNGIBLE_ASSET_KEY} + exec.asset_vault::get_asset + # the returned word should be empty, indicating the asset is absent + padw assert_eqw.err="input vault should not contain burned asset" + dropw end "#, - faucet_sysdata_slot_name = AccountStorage::faucet_sysdata_slot(), - non_fungible_asset = Word::from(non_fungible_asset_burnt), - burnt_asset_vault_key = burnt_asset_vault_key, + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset_burnt.to_key_word(), + NON_FUNGIBLE_ASSET_VALUE = non_fungible_asset_burnt.to_value_word(), ); - tx_context.execute_code(&code).await.unwrap(); + tx_context.execute_code(&code).await?; Ok(()) } @@ -573,16 +567,18 @@ async fn test_burn_non_fungible_asset_fails_does_not_exist() -> anyhow::Result<( begin # burn asset exec.prologue::prepare_transaction - push.{non_fungible_asset} + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} call.faucet::burn end ", - non_fungible_asset = Word::from(non_fungible_asset_burnt) + NON_FUNGIBLE_ASSET_VALUE = non_fungible_asset_burnt.to_value_word(), + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset_burnt.to_key_word(), ); let exec_output = tx_context.execute_code(&code).await; - assert_execution_error!(exec_output, ERR_FAUCET_NON_FUNGIBLE_ASSET_TO_BURN_NOT_FOUND); + assert_execution_error!(exec_output, ERR_VAULT_NON_FUNGIBLE_ASSET_TO_REMOVE_NOT_FOUND); Ok(()) } @@ -590,17 +586,20 @@ async fn test_burn_non_fungible_asset_fails_does_not_exist() -> anyhow::Result<( #[tokio::test] async fn burn_non_fungible_asset_fails_on_non_faucet_account() -> anyhow::Result<()> { let account = setup_non_faucet_account()?; + let asset = FungibleAsset::mock(50); let code = format!( " use mock::faucet begin - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} call.faucet::burn end ", - asset = Word::from(FungibleAsset::mock(50)) + ASSET_VALUE = asset.to_value_word(), + ASSET_KEY = asset.to_key_word(), ); let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(code)?; @@ -632,95 +631,18 @@ async fn test_burn_non_fungible_asset_fails_inconsistent_faucet_id() -> anyhow:: begin # burn asset exec.prologue::prepare_transaction - push.{non_fungible_asset} + push.{NON_FUNGIBLE_ASSET_VALUE} + push.{NON_FUNGIBLE_ASSET_KEY} call.faucet::burn end ", - non_fungible_asset = Word::from(non_fungible_asset_burnt) + NON_FUNGIBLE_ASSET_VALUE = non_fungible_asset_burnt.to_value_word(), + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset_burnt.to_key_word(), ); let exec_output = tx_context.execute_code(&code).await; - assert_execution_error!(exec_output, ERR_FAUCET_NON_FUNGIBLE_ASSET_TO_BURN_NOT_FOUND); - Ok(()) -} - -// IS NON FUNGIBLE ASSET ISSUED TESTS -// ================================================================================================ - -#[tokio::test] -async fn test_is_non_fungible_asset_issued_succeeds() -> anyhow::Result<()> { - // NON_FUNGIBLE_ASSET_DATA_2 is "issued" during the mock faucet creation, so it is already in - // the map of issued assets. - let tx_context = - TransactionContextBuilder::with_non_fungible_faucet(NonFungibleAsset::mock_issuer().into()) - .build()?; - - let non_fungible_asset_1 = NonFungibleAsset::mock(&NON_FUNGIBLE_ASSET_DATA); - let non_fungible_asset_2 = NonFungibleAsset::mock(&NON_FUNGIBLE_ASSET_DATA_2); - - let code = format!( - r#" - use $kernel::prologue - use miden::protocol::faucet - - begin - exec.prologue::prepare_transaction - - # check that NON_FUNGIBLE_ASSET_DATA_2 is already issued - push.{non_fungible_asset_2} - exec.faucet::is_non_fungible_asset_issued - - # assert that NON_FUNGIBLE_ASSET_DATA_2 is issued - eq.1 assert.err="non fungible asset data 2 should have been issued" - - # check that NON_FUNGIBLE_ASSET_DATA was not issued yet - push.{non_fungible_asset_1} - exec.faucet::is_non_fungible_asset_issued - - # assert that NON_FUNGIBLE_ASSET_DATA is not issued - eq.0 assert.err="non fungible asset data should have been issued" - end - "#, - non_fungible_asset_1 = Word::from(non_fungible_asset_1), - non_fungible_asset_2 = Word::from(non_fungible_asset_2), - ); - - tx_context.execute_code(&code).await.unwrap(); - Ok(()) -} - -// GET TOTAL ISSUANCE TESTS -// ================================================================================================ - -#[tokio::test] -async fn test_get_total_issuance_succeeds() -> anyhow::Result<()> { - let tx_context = TransactionContextBuilder::with_fungible_faucet( - ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, - Felt::new(FUNGIBLE_FAUCET_INITIAL_BALANCE), - ) - .build()?; - - let code = format!( - r#" - use $kernel::prologue - use miden::protocol::faucet - - begin - exec.prologue::prepare_transaction - - # get the fungible faucet balance - exec.faucet::get_total_issuance - # => [total_issuance] - - # assert the correct balance is returned - push.{FUNGIBLE_FAUCET_INITIAL_BALANCE} assert_eq.err="total issuance did not match expected value" - # => [] - end - "#, - ); - - tx_context.execute_code(&code).await.unwrap(); + assert_execution_error!(exec_output, ERR_NON_FUNGIBLE_ASSET_FAUCET_IS_NOT_ORIGIN); Ok(()) } @@ -731,6 +653,8 @@ async fn test_get_total_issuance_succeeds() -> anyhow::Result<()> { /// /// This is used to test that calling these procedures fails as expected. fn setup_non_faucet_account() -> anyhow::Result { + use miden_protocol::account::component::AccountComponentMetadata; + // Build a custom non-faucet account that (invalidly) exposes faucet procedures. let faucet_code = CodeBuilder::with_mock_libraries_with_source_manager(Arc::new( DefaultSourceManager::default(), @@ -740,8 +664,11 @@ fn setup_non_faucet_account() -> anyhow::Result { "pub use ::miden::protocol::faucet::mint pub use ::miden::protocol::faucet::burn", )?; - let faucet_component = AccountComponent::new(faucet_code, vec![])? - .with_supported_type(AccountType::RegularAccountUpdatableCode); + let metadata = AccountComponentMetadata::new( + "test::non_faucet_component", + [AccountType::RegularAccountUpdatableCode], + ); + let faucet_component = AccountComponent::new(faucet_code, vec![], metadata)?; Ok(AccountBuilder::new([4; 32]) .account_type(AccountType::RegularAccountUpdatableCode) .with_auth_component(NoopAuthComponent) diff --git a/crates/miden-testing/src/kernel_tests/tx/test_fee.rs b/crates/miden-testing/src/kernel_tests/tx/test_fee.rs index 0a865d1098..2256a43ab9 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_fee.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_fee.rs @@ -1,13 +1,13 @@ use anyhow::Context; use assert_matches::assert_matches; -use miden_protocol::account::{AccountId, StorageMap, StorageSlot, StorageSlotName}; +use miden_crypto::rand::test_utils::rand_value; +use miden_protocol::account::{AccountId, StorageMap, StorageMapKey, StorageSlot, StorageSlotName}; use miden_protocol::asset::{Asset, FungibleAsset, NonFungibleAsset}; use miden_protocol::note::NoteType; use miden_protocol::testing::account_id::ACCOUNT_ID_NATIVE_ASSET_FAUCET; -use miden_protocol::transaction::{ExecutedTransaction, OutputNote}; +use miden_protocol::transaction::{ExecutedTransaction, RawOutputNote}; use miden_protocol::{self, Felt, Word}; use miden_tx::TransactionExecutorError; -use winter_rand_utils::rand_value; use crate::utils::create_public_p2any_note; use crate::{Auth, MockChain}; @@ -99,7 +99,7 @@ async fn num_tx_cycles_after_compute_fee_are_less_than_estimated( // These constants should always be updated together with the equivalent constants in // epilogue.masm. const SMT_SET_ADDITIONAL_CYCLES: usize = 250; - const NUM_POST_COMPUTE_FEE_CYCLES: usize = 500; + const NUM_POST_COMPUTE_FEE_CYCLES: usize = 608; assert!( tx.measurements().after_tx_cycles_obtained @@ -135,7 +135,7 @@ async fn mutate_account_with_storage() -> anyhow::Result { StorageSlot::with_value(StorageSlotName::mock(0), rand_value()), StorageSlot::with_map( StorageSlotName::mock(1), - StorageMap::with_entries([(rand_value(), rand_value())])?, + StorageMap::with_entries([(StorageMapKey::from_raw(rand_value()), rand_value())])?, ), ], [Asset::from(native_asset), NonFungibleAsset::mock(&[1, 2, 3, 4])], @@ -166,7 +166,7 @@ async fn create_output_notes() -> anyhow::Result { [ StorageSlot::with_map( StorageSlotName::mock(0), - StorageMap::with_entries([(rand_value(), rand_value())])?, + StorageMap::with_entries([(StorageMapKey::from_raw(rand_value()), rand_value())])?, ), StorageSlot::with_value(StorageSlotName::mock(1), rand_value()), ], @@ -178,7 +178,7 @@ async fn create_output_notes() -> anyhow::Result { // This creates a note that adds the given assets to the account vault. let asset_note = create_public_p2any_note(account.id(), [Asset::from(note_asset0.add(note_asset1)?)]); - builder.add_output_note(OutputNote::Full(asset_note.clone())); + builder.add_output_note(RawOutputNote::Full(asset_note.clone())); let output_note0 = create_public_p2any_note(account.id(), [note_asset0.into()]); let output_note1 = create_public_p2any_note(account.id(), [note_asset1.into()]); @@ -188,8 +188,8 @@ async fn create_output_notes() -> anyhow::Result { .build()? .build_tx_context(account, &[asset_note.id(), spawn_note.id()], &[])? .extend_expected_output_notes(vec![ - OutputNote::Full(output_note0), - OutputNote::Full(output_note1), + RawOutputNote::Full(output_note0), + RawOutputNote::Full(output_note1), ]) .build()? .execute() diff --git a/crates/miden-testing/src/kernel_tests/tx/test_fpi.rs b/crates/miden-testing/src/kernel_tests/tx/test_fpi.rs index addda7b449..9ae7d70fbd 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_fpi.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_fpi.rs @@ -2,12 +2,14 @@ use alloc::sync::Arc; use alloc::vec; use alloc::vec::Vec; -use miden_processor::fast::ExecutionOutput; -use miden_processor::{AdviceInputs, Felt}; +use miden_processor::advice::AdviceInputs; +use miden_processor::{EMPTY_WORD, ExecutionOutput, Felt}; +use miden_protocol::account::component::AccountComponentMetadata; use miden_protocol::account::{ Account, AccountBuilder, AccountComponent, + AccountHeader, AccountId, AccountProcedureRoot, AccountStorage, @@ -37,8 +39,11 @@ use miden_protocol::transaction::memory::{ ACCT_STORAGE_COMMITMENT_OFFSET, ACCT_VAULT_ROOT_OFFSET, NATIVE_ACCOUNT_DATA_PTR, + UPCOMING_FOREIGN_ACCOUNT_PREFIX_PTR, + UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR, + UPCOMING_FOREIGN_PROCEDURE_PTR, }; -use miden_protocol::{FieldElement, Word, ZERO}; +use miden_protocol::{Word, ZERO}; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::account_component::MockAccountComponent; use miden_tx::LocalTransactionProver; @@ -85,8 +90,8 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { CodeBuilder::with_source_manager(source_manager.clone()) .compile_component_code("test::foreign_account", foreign_account_code_source)?, vec![mock_value_slot0.clone(), mock_map_slot.clone()], - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("test::foreign_account"), + )?; let foreign_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -120,6 +125,8 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { // Check the correctness of the memory layout after `get_item_foreign` account procedure // invocation + let get_item_foreign_root = foreign_account.code().procedures()[1].mast_root(); + let code = format!( r#" use miden::core::sys @@ -140,12 +147,12 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { push.MOCK_VALUE_SLOT0[0..2] # get the hash of the `get_item_foreign` procedure of the foreign account - push.{get_item_foreign_hash} + push.{get_item_foreign_root} # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure # => [STORAGE_VALUE_1] @@ -157,13 +164,12 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { mock_value_slot0 = mock_value_slot0.name(), foreign_prefix = foreign_account.id().prefix().as_felt(), foreign_suffix = foreign_account.id().suffix(), - get_item_foreign_hash = foreign_account.code().procedures()[1].mast_root(), ); let exec_output = tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_word_be(0), + exec_output.get_stack_word(0), mock_value_slot0.content().value(), "Value at the top of the stack should be equal to [1, 2, 3, 4]", ); @@ -174,6 +180,8 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { // -------------------------------------------------------------------------------------------- // Check the correctness of the memory layout after `get_map_item` account procedure invocation + let get_map_item_foreign_root = foreign_account.code().procedures()[2].mast_root(); + let code = format!( r#" use miden::core::sys @@ -197,12 +205,12 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { push.MOCK_MAP_SLOT[0..2] # get the hash of the `get_map_item_foreign` account procedure - push.{get_map_item_foreign_hash} + push.{get_map_item_foreign_root} # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, MAP_KEY, pad(4)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, MAP_KEY, pad(4)] exec.tx::execute_foreign_procedure # => [MAP_VALUE] @@ -215,13 +223,12 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { foreign_prefix = foreign_account.id().prefix().as_felt(), foreign_suffix = foreign_account.id().suffix(), map_key = STORAGE_LEAVES_2[0].0, - get_map_item_foreign_hash = foreign_account.code().procedures()[2].mast_root(), ); let exec_output = tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_stack_word_be(0), + exec_output.get_stack_word(0), STORAGE_LEAVES_2[0].1, "Value at the top of the stack should be equal [1, 2, 3, 4]", ); @@ -258,9 +265,9 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { push.{get_item_foreign_hash} # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure dropw # => [] @@ -277,9 +284,9 @@ async fn test_fpi_memory_single_account() -> anyhow::Result<()> { push.{get_item_foreign_hash} # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure @@ -346,15 +353,15 @@ async fn test_fpi_memory_two_accounts() -> anyhow::Result<()> { CodeBuilder::default() .compile_component_code("test::foreign_account_1", foreign_account_code_source_1)?, vec![mock_value_slot0.clone()], - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("test::foreign_account_1"), + )?; let foreign_account_component_2 = AccountComponent::new( CodeBuilder::default() .compile_component_code("test::foreign_account_2", foreign_account_code_source_2)?, vec![mock_value_slot1.clone()], - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("test::foreign_account_2"), + )?; let foreign_account_1 = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -423,9 +430,9 @@ async fn test_fpi_memory_two_accounts() -> anyhow::Result<()> { push.{get_item_foreign_1_hash} # push the foreign account ID - push.{foreign_1_suffix} push.{foreign_1_prefix} - # => [foreign_account_1_id_prefix, foreign_account_1_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + push.{foreign_1_prefix} push.{foreign_1_suffix} + # => [foreign_account_1_id_suffix, foreign_account_1_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure dropw # => [] @@ -442,9 +449,9 @@ async fn test_fpi_memory_two_accounts() -> anyhow::Result<()> { push.{get_item_foreign_2_hash} # push the foreign account ID - push.{foreign_2_suffix} push.{foreign_2_prefix} - # => [foreign_account_2_id_prefix, foreign_account_2_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + push.{foreign_2_prefix} push.{foreign_2_suffix} + # => [foreign_account_2_id_suffix, foreign_account_2_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure dropw # => [] @@ -461,9 +468,9 @@ async fn test_fpi_memory_two_accounts() -> anyhow::Result<()> { push.{get_item_foreign_1_hash} # push the foreign account ID - push.{foreign_1_suffix} push.{foreign_1_prefix} - # => [foreign_account_1_id_prefix, foreign_account_1_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + push.{foreign_1_prefix} push.{foreign_1_suffix} + # => [foreign_account_1_id_suffix, foreign_account_1_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure @@ -492,25 +499,21 @@ async fn test_fpi_memory_two_accounts() -> anyhow::Result<()> { // Next account slot: [32768; 40959] <- should not be initialized // check that the first word of the first foreign account slot is correct + let header = AccountHeader::from(&foreign_account_1); assert_eq!( - exec_output.get_kernel_mem_word(NATIVE_ACCOUNT_DATA_PTR + ACCOUNT_DATA_LENGTH as u32), - Word::new([ - foreign_account_1.id().suffix(), - foreign_account_1.id().prefix().as_felt(), - ZERO, - foreign_account_1.nonce() - ]) + exec_output + .get_kernel_mem_word(NATIVE_ACCOUNT_DATA_PTR + ACCOUNT_DATA_LENGTH as u32) + .as_slice(), + &header.to_elements()[0..4] ); // check that the first word of the second foreign account slot is correct + let header = AccountHeader::from(&foreign_account_2); assert_eq!( - exec_output.get_kernel_mem_word(NATIVE_ACCOUNT_DATA_PTR + ACCOUNT_DATA_LENGTH as u32 * 2), - Word::new([ - foreign_account_2.id().suffix(), - foreign_account_2.id().prefix().as_felt(), - ZERO, - foreign_account_2.nonce() - ]) + exec_output + .get_kernel_mem_word(NATIVE_ACCOUNT_DATA_PTR + ACCOUNT_DATA_LENGTH as u32 * 2) + .as_slice(), + &header.to_elements()[0..4] ); // check that the first word of the third foreign account slot was not initialized @@ -534,9 +537,14 @@ async fn test_fpi_execute_foreign_procedure() -> anyhow::Result<()> { let mock_value_slot0 = AccountStorage::mock_value_slot0(); let mock_map_slot = AccountStorage::mock_map_slot(); - let foreign_account_code_source = " + let foreign_account_code_source = r#" use miden::protocol::active_account + use miden::core::sys + #! Gets an item from the active account storage. + #! + #! Inputs: [slot_id_suffix, slot_id_prefix] + #! Outputs: [VALUE] pub proc get_item_foreign # make this foreign procedure unique to make sure that we invoke the procedure of the # foreign account, not the native one @@ -547,21 +555,45 @@ async fn test_fpi_execute_foreign_procedure() -> anyhow::Result<()> { movup.6 movup.6 drop drop end + #! Gets a map item from the active account storage. + #! + #! Inputs: [slot_id_suffix, slot_id_prefix, KEY] + #! Outputs: [VALUE] pub proc get_map_item_foreign # make this foreign procedure unique to make sure that we invoke the procedure of the # foreign account, not the native one push.2 drop exec.active_account::get_map_item end - "; + + #! Validates the correctness of the top 16 elements on the stack and returns another 16 + #! elements to check that outputs are correctly passed back. + #! + #! Inputs: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] + #! Outputs: [17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32] + pub proc assert_inputs_correctness + push.[4, 3, 2, 1] assert_eqw.err="foreign procedure: 0th input word is incorrect" + push.[8, 7, 6, 5] assert_eqw.err="foreign procedure: 1st input word is incorrect" + push.[12, 11, 10, 9] assert_eqw.err="foreign procedure: 2nd input word is incorrect" + push.[16, 15, 14, 13] assert_eqw.err="foreign procedure: 3rd input word is incorrect" + + push.[32, 31, 30, 29] push.[28, 27, 26, 25] + push.[24, 23, 22, 21] push.[20, 19, 18, 17] + # => [17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, pad(16)] + + # truncate the stack + exec.sys::truncate_stack + # => [17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32] + end + "#; let source_manager = Arc::new(DefaultSourceManager::default()); let foreign_account_component = AccountComponent::new( CodeBuilder::with_kernel_library(source_manager.clone()) .compile_component_code("foreign_account", foreign_account_code_source)?, vec![mock_value_slot0.clone(), mock_map_slot.clone()], - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("foreign_account"), + )?; let foreign_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -581,42 +613,37 @@ async fn test_fpi_execute_foreign_procedure() -> anyhow::Result<()> { let code = format!( r#" - use miden::core::sys - use miden::protocol::tx const MOCK_VALUE_SLOT0 = word("{mock_value_slot0}") const MOCK_MAP_SLOT = word("{mock_map_slot}") begin - # get the storage item - # pad the stack for the `execute_foreign_procedure` execution - # pad the stack for the `execute_foreign_procedure` execution - padw padw - # => [pad(8)] + # => [pad(16)] + + ### get the storage item ########################################## # push the slot name of desired storage item push.MOCK_VALUE_SLOT0[0..2] + # => [slot_id_suffix, slot_id_prefix, pad(16)] # get the hash of the `get_item_foreign` account procedure procref.::foreign_account::get_item_foreign + # => [FOREIGN_PROC_ROOT, slot_id_suffix, slot_id_prefix, pad(16)] # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT - # slot_id_prefix, slot_id_suffix, pad(8)]] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT + # slot_id_suffix, slot_id_prefix, pad(16)]] exec.tx::execute_foreign_procedure - # => [STORAGE_VALUE] + # => [STORAGE_VALUE, pad(14)] # assert the correctness of the obtained value - push.1.2.3.4 assert_eqw.err="foreign proc returned unexpected value" - # => [] + push.{mock_value0} assert_eqw.err="foreign proc returned unexpected value (1)" + # => [pad(16)] - # get an item from the storage map - # pad the stack for the `execute_foreign_procedure` execution - padw - # => [pad(4)] + ### get the storage map item ###################################### # push the key of desired storage item push.{map_key} @@ -628,22 +655,55 @@ async fn test_fpi_execute_foreign_procedure() -> anyhow::Result<()> { procref.::foreign_account::get_map_item_foreign # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, MAP_ITEM_KEY, pad(4)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, MAP_ITEM_KEY, pad(16)] exec.tx::execute_foreign_procedure - # => [MAP_VALUE] + # => [MAP_VALUE, pad(18)] # assert the correctness of the obtained value - push.1.2.3.4 assert_eqw.err="foreign proc returned unexpected value" - # => [] + push.{mock_value0} assert_eqw.err="foreign proc returned unexpected value (2)" + # => [pad(18)] + + ### assert foreign procedure inputs correctness ################### + + # push the elements from 1 to 16 onto the stack as the inputs of the + # `assert_inputs_correctness` account procedure to check that all of them will be + # provided to the procedure correctly + push.[16, 15, 14, 13] + push.[12, 11, 10, 9] + push.[8, 7, 6, 5] + push.[4, 3, 2, 1] + # => [[1, 2, ..., 16], pad(18)] + + # get the hash of the `assert_inputs_correctness` account procedure + procref.::foreign_account::assert_inputs_correctness + # => [FOREIGN_PROC_ROOT, [1, 2, ..., 16], pad(16)] + + # push the foreign account ID + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # [1, 2, ..., 16], pad(18)] + + exec.tx::execute_foreign_procedure + # => [17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, pad(18)] + + # assert the correctness of the foreign procedure outputs + push.[20, 19, 18, 17] assert_eqw.err="transaction script: 0th output word is incorrect" + push.[24, 23, 22, 21] assert_eqw.err="transaction script: 0th output word is incorrect" + push.[28, 27, 26, 25] assert_eqw.err="transaction script: 0th output word is incorrect" + push.[32, 31, 30, 29] assert_eqw.err="transaction script: 0th output word is incorrect" + + # => [pad(18)] # truncate the stack - exec.sys::truncate_stack + drop drop + # => [pad(16)] end "#, mock_value_slot0 = mock_value_slot0.name(), + mock_value0 = mock_value_slot0.value(), mock_map_slot = mock_map_slot.name(), foreign_prefix = foreign_account.id().prefix().as_felt(), foreign_suffix = foreign_account.id().suffix(), @@ -681,7 +741,7 @@ async fn foreign_account_can_get_balance_and_presence_of_asset() -> anyhow::Resu // Create two different assets. let fungible_asset = Asset::Fungible(FungibleAsset::new(fungible_faucet_id, 1)?); let non_fungible_asset = Asset::NonFungible(NonFungibleAsset::new( - &NonFungibleAssetDetails::new(non_fungible_faucet_id.prefix(), vec![1, 2, 3])?, + &NonFungibleAssetDetails::new(non_fungible_faucet_id, vec![1, 2, 3])?, )?); let foreign_account_code_source = format!( @@ -690,12 +750,12 @@ async fn foreign_account_can_get_balance_and_presence_of_asset() -> anyhow::Resu pub proc get_asset_balance # get balance of first asset - push.{fungible_faucet_id_suffix} push.{fungible_faucet_id_prefix} + push.{fungible_faucet_id_prefix} push.{fungible_faucet_id_suffix} exec.active_account::get_balance # => [balance] # check presence of non fungible asset - push.{non_fungible_asset_word} + push.{NON_FUNGIBLE_ASSET_KEY} exec.active_account::has_non_fungible_asset # => [has_asset, balance] @@ -710,7 +770,7 @@ async fn foreign_account_can_get_balance_and_presence_of_asset() -> anyhow::Resu ", fungible_faucet_id_prefix = fungible_faucet_id.prefix().as_felt(), fungible_faucet_id_suffix = fungible_faucet_id.suffix(), - non_fungible_asset_word = Word::from(non_fungible_asset), + NON_FUNGIBLE_ASSET_KEY = non_fungible_asset.to_key_word(), ); let source_manager = Arc::new(DefaultSourceManager::default()); @@ -718,8 +778,8 @@ async fn foreign_account_can_get_balance_and_presence_of_asset() -> anyhow::Resu CodeBuilder::with_source_manager(source_manager.clone()) .compile_component_code("foreign_account_code", foreign_account_code_source)?, vec![], - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("foreign_account_code"), + )?; let foreign_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -754,8 +814,8 @@ async fn foreign_account_can_get_balance_and_presence_of_asset() -> anyhow::Resu procref.::foreign_account_code::get_asset_balance # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, pad(15)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, pad(15)] exec.tx::execute_foreign_procedure # => [has_asset_balance] @@ -803,7 +863,7 @@ async fn foreign_account_get_initial_balance() -> anyhow::Result<()> { pub proc get_initial_balance # push the faucet ID on the stack - push.{fungible_faucet_id_suffix} push.{fungible_faucet_id_prefix} + push.{fungible_faucet_id_prefix} push.{fungible_faucet_id_suffix} # get the initial balance of the asset associated with the provided faucet ID exec.active_account::get_balance @@ -823,8 +883,8 @@ async fn foreign_account_get_initial_balance() -> anyhow::Result<()> { CodeBuilder::with_source_manager(source_manager.clone()) .compile_component_code("foreign_account_code", foreign_account_code_source)?, vec![], - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("foreign_account_code"), + )?; let foreign_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -860,8 +920,8 @@ async fn foreign_account_get_initial_balance() -> anyhow::Result<()> { procref.::foreign_account_code::get_initial_balance # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, pad(15)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, pad(15)] exec.tx::execute_foreign_procedure # => [init_foreign_balance] @@ -932,12 +992,12 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { push.MOCK_VALUE_SLOT1[0..2] # get the hash of the `get_item_foreign` account procedure from the advice stack - adv_push.4 + padw adv_loadw # push the foreign account ID from the advice stack adv_push.2 - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, - # slot_id_prefix, slot_id_suffix, pad(8)] + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, + # slot_id_suffix, slot_id_prefix, pad(8)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -948,7 +1008,7 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { # get the first element of the value0 storage slot (it should be 1) and add it to the # obtained foreign value. push.MOCK_VALUE_SLOT0[0..2] exec.active_account::get_item - drop drop drop + swap.3 drop drop drop add # assert that the resulting value equals 6 @@ -968,8 +1028,8 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { second_foreign_account_code_source, )?, vec![mock_value_slot0.clone()], - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("test::second_foreign_account"), + )?; let second_foreign_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -992,11 +1052,11 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { # => [pad(15)] # get the hash of the `second_account_foreign_proc` account procedure from the advice stack - adv_push.4 + padw adv_loadw # push the ID of the second foreign account from the advice stack adv_push.2 - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -1004,7 +1064,7 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { # get the second element of the value0 storage slot (it should be 2) and add it to the # obtained foreign value. push.MOCK_VALUE_SLOT0[0..2] exec.active_account::get_item - drop drop swap drop + drop swap.2 drop drop add # assert that the resulting value equals 8 @@ -1020,7 +1080,7 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { exec.active_account::get_item # return the first element of the resulting word - drop drop drop + swap.3 drop drop drop end "#, mock_value_slot0 = mock_value_slot0.name(), @@ -1030,8 +1090,8 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { CodeBuilder::with_kernel_library(source_manager.clone()) .compile_component_code("first_foreign_account", first_foreign_account_code_source)?, vec![mock_value_slot0.clone(), mock_value_slot1.clone()], - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("first_foreign_account"), + )?; let first_foreign_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -1068,16 +1128,16 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { .stack .extend(*second_foreign_account.code().procedures()[1].mast_root()); advice_inputs.stack.extend([ - second_foreign_account.id().suffix(), second_foreign_account.id().prefix().as_felt(), + second_foreign_account.id().suffix(), ]); advice_inputs .stack .extend(*first_foreign_account.code().procedures()[2].mast_root()); advice_inputs.stack.extend([ - first_foreign_account.id().suffix(), first_foreign_account.id().prefix().as_felt(), + first_foreign_account.id().suffix(), ]); let code = format!( @@ -1094,8 +1154,8 @@ async fn test_nested_fpi_cyclic_invocation() -> anyhow::Result<()> { procref.::first_foreign_account::first_account_foreign_proc # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -1161,8 +1221,8 @@ async fn test_prove_fpi_two_foreign_accounts_chain() -> anyhow::Result<()> { CodeBuilder::with_kernel_library(source_manager.clone()) .compile_component_code("foreign_account", second_foreign_account_code_source)?, vec![], - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("foreign_account"), + )?; let second_foreign_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -1185,8 +1245,8 @@ async fn test_prove_fpi_two_foreign_accounts_chain() -> anyhow::Result<()> { procref.::foreign_account::second_account_foreign_proc # push the ID of the second foreign account - push.{second_foreign_suffix} push.{second_foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, pad(15)] + push.{second_foreign_prefix} push.{second_foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, pad(15)] # call the second foreign account exec.tx::execute_foreign_procedure @@ -1204,8 +1264,11 @@ async fn test_prove_fpi_two_foreign_accounts_chain() -> anyhow::Result<()> { let first_foreign_account_code = CodeBuilder::with_kernel_library(source_manager.clone()) .with_dynamically_linked_library(second_foreign_account_component.component_code())? .compile_component_code("first_foreign_account", first_foreign_account_code_source)?; - let first_foreign_account_component = - AccountComponent::new(first_foreign_account_code, vec![])?.with_supports_all_types(); + let first_foreign_account_component = AccountComponent::new( + first_foreign_account_code, + vec![], + AccountComponentMetadata::mock("first_foreign_account"), + )?; let first_foreign_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -1252,8 +1315,8 @@ async fn test_prove_fpi_two_foreign_accounts_chain() -> anyhow::Result<()> { procref.::first_foreign_account::first_account_foreign_proc # push the first foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, pad(15)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, pad(15)] exec.tx::execute_foreign_procedure # => [result_from_second] @@ -1284,7 +1347,7 @@ async fn test_prove_fpi_two_foreign_accounts_chain() -> anyhow::Result<()> { .await?; // Prove the executed transaction which uses FPI across two foreign accounts. - LocalTransactionProver::default().prove(executed_transaction)?; + LocalTransactionProver::default().prove(executed_transaction).await?; Ok(()) } @@ -1327,10 +1390,12 @@ async fn test_nested_fpi_stack_overflow() -> anyhow::Result<()> { let last_foreign_account_code = CodeBuilder::default() .compile_component_code("test::last_foreign_account", last_foreign_account_code_source) .unwrap(); - let last_foreign_account_component = - AccountComponent::new(last_foreign_account_code, vec![mock_value_slot0.clone()]) - .unwrap() - .with_supports_all_types(); + let last_foreign_account_component = AccountComponent::new( + last_foreign_account_code, + vec![mock_value_slot0.clone()], + AccountComponentMetadata::mock("test::last_foreign_account"), + ) + .unwrap(); let last_foreign_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -1357,8 +1422,8 @@ async fn test_nested_fpi_stack_overflow() -> anyhow::Result<()> { push.{next_account_proc_hash} # push the foreign account ID - push.{next_foreign_suffix} push.{next_foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] + push.{next_foreign_prefix} push.{next_foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -1377,9 +1442,12 @@ async fn test_nested_fpi_stack_overflow() -> anyhow::Result<()> { foreign_account_code_source, ) .unwrap(); - let foreign_account_component = AccountComponent::new(foreign_account_code, vec![]) - .unwrap() - .with_supports_all_types(); + let foreign_account_component = AccountComponent::new( + foreign_account_code, + vec![], + AccountComponentMetadata::mock("test::foreign_account_chain"), + ) + .unwrap(); let foreign_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -1431,8 +1499,8 @@ async fn test_nested_fpi_stack_overflow() -> anyhow::Result<()> { push.{foreign_account_proc_hash} # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -1475,11 +1543,11 @@ async fn test_nested_fpi_native_account_invocation() -> anyhow::Result<()> { # => [pad(15)] # get the hash of the native account procedure from the advice stack - adv_push.4 + padw adv_loadw # push the ID of the native account from the advice stack adv_push.2 - # => [native_account_id_prefix, native_account_id_suffix, NATIVE_PROC_ROOT, pad(15)] + # => [native_account_id_suffix, native_account_id_prefix, NATIVE_PROC_ROOT, pad(15)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -1492,8 +1560,8 @@ async fn test_nested_fpi_native_account_invocation() -> anyhow::Result<()> { CodeBuilder::default() .compile_component_code("foreign_account", foreign_account_code_source)?, vec![], - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("foreign_account"), + )?; let foreign_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -1527,8 +1595,8 @@ async fn test_nested_fpi_native_account_invocation() -> anyhow::Result<()> { push.{first_account_foreign_proc_hash} # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, storage_item_index, pad(14)] exec.tx::execute_foreign_procedure # => [storage_value] @@ -1555,7 +1623,7 @@ async fn test_nested_fpi_native_account_invocation() -> anyhow::Result<()> { advice_inputs.stack.extend(*native_account.code().procedures()[3].mast_root()); advice_inputs .stack - .extend([native_account.id().suffix(), native_account.id().prefix().as_felt()]); + .extend([native_account.id().prefix().as_felt(), native_account.id().suffix()]); let result = mock_chain .build_tx_context(native_account.id(), &[], &[]) @@ -1591,8 +1659,8 @@ async fn test_fpi_stale_account() -> anyhow::Result<()> { CodeBuilder::default() .compile_component_code("foreign_account_invalid", foreign_account_code_source)?, vec![mock_value_slot0.clone()], - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("foreign_account_invalid"), + )?; let mut foreign_account = AccountBuilder::new([5; 32]) .with_auth_component(Auth::IncrNonce) @@ -1656,8 +1724,8 @@ async fn test_fpi_stale_account() -> anyhow::Result<()> { # => [FOREIGN_PROC_ROOT, pad(16)] # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, pad(16)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, pad(16)] exec.tx::execute_foreign_procedure end @@ -1683,15 +1751,15 @@ async fn test_fpi_get_account_id() -> anyhow::Result<()> { pub proc get_current_and_native_ids # get the ID of the current (foreign) account exec.active_account::get_id - # => [acct_id_prefix, acct_id_suffix, pad(16)] + # => [acct_id_suffix, acct_id_prefix, pad(16)] # get the ID of the native account exec.native_account::get_id - # => [native_acct_id_prefix, native_acct_id_suffix, acct_id_prefix, acct_id_suffix, pad(16)] + # => [native_acct_id_suffix, native_acct_id_prefix, acct_id_suffix, acct_id_prefix, pad(16)] # truncate the stack swapw dropw - # => [native_acct_id_prefix, native_acct_id_suffix, acct_id_prefix, acct_id_suffix, pad(12)] + # => [native_acct_id_suffix, native_acct_id_prefix, acct_id_suffix, acct_id_prefix, pad(12)] end "; @@ -1699,8 +1767,8 @@ async fn test_fpi_get_account_id() -> anyhow::Result<()> { CodeBuilder::default() .compile_component_code("foreign_account", foreign_account_code_source)?, Vec::new(), - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("foreign_account"), + )?; let foreign_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -1735,22 +1803,22 @@ async fn test_fpi_get_account_id() -> anyhow::Result<()> { procref.::foreign_account::get_current_and_native_ids # push the foreign account ID - push.{foreign_suffix} push.{foreign_prefix} - # => [foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, pad(15)] + push.{foreign_prefix} push.{foreign_suffix} + # => [foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, pad(15)] exec.tx::execute_foreign_procedure - # => [native_acct_id_prefix, native_acct_id_suffix, acct_id_prefix, acct_id_suffix] + # => [native_acct_id_suffix, native_acct_id_prefix, acct_id_suffix, acct_id_prefix] # push the expected native account ID and check that it is equal to the one returned # from the FPI - push.{expected_native_suffix} push.{expected_native_prefix} + push.{expected_native_prefix} push.{expected_native_suffix} exec.account_id::is_equal assert.err="native account ID returned from the FPI is not equal to the expected one" - # => [acct_id_prefix, acct_id_suffix] + # => [acct_id_suffix, acct_id_prefix] # push the expected foreign account ID and check that it is equal to the one returned # from the FPI - push.{foreign_suffix} push.{foreign_prefix} + push.{foreign_prefix} push.{foreign_suffix} exec.account_id::is_equal assert.err="foreign account ID returned from the FPI is not equal to the expected one" # => [] @@ -1785,81 +1853,6 @@ async fn test_fpi_get_account_id() -> anyhow::Result<()> { Ok(()) } -// HELPER FUNCTIONS -// ================================================================================================ - -fn foreign_account_data_memory_assertions( - foreign_account: &Account, - exec_output: &ExecutionOutput, -) { - let foreign_account_data_ptr = NATIVE_ACCOUNT_DATA_PTR + ACCOUNT_DATA_LENGTH as u32; - - assert_eq!( - exec_output.get_kernel_mem_word(foreign_account_data_ptr + ACCT_ID_AND_NONCE_OFFSET), - Word::new([ - foreign_account.id().suffix(), - foreign_account.id().prefix().as_felt(), - ZERO, - foreign_account.nonce() - ]), - ); - - assert_eq!( - exec_output.get_kernel_mem_word(foreign_account_data_ptr + ACCT_VAULT_ROOT_OFFSET), - foreign_account.vault().root(), - ); - - assert_eq!( - exec_output.get_kernel_mem_word(foreign_account_data_ptr + ACCT_STORAGE_COMMITMENT_OFFSET), - foreign_account.storage().to_commitment(), - ); - - assert_eq!( - exec_output.get_kernel_mem_word(foreign_account_data_ptr + ACCT_CODE_COMMITMENT_OFFSET), - foreign_account.code().commitment(), - ); - - assert_eq!( - exec_output.get_kernel_mem_word(foreign_account_data_ptr + ACCT_NUM_STORAGE_SLOTS_OFFSET), - Word::from([u16::try_from(foreign_account.storage().slots().len()).unwrap(), 0, 0, 0]), - ); - - for (i, elements) in foreign_account - .storage() - .to_elements() - .chunks(StorageSlot::NUM_ELEMENTS / 2) - .enumerate() - { - assert_eq!( - exec_output.get_kernel_mem_word( - foreign_account_data_ptr - + ACCT_ACTIVE_STORAGE_SLOTS_SECTION_OFFSET - + (i as u32) * 4 - ), - Word::try_from(elements).unwrap(), - ) - } - - assert_eq!( - exec_output.get_kernel_mem_word(foreign_account_data_ptr + ACCT_NUM_PROCEDURES_OFFSET), - Word::from([u16::try_from(foreign_account.code().num_procedures()).unwrap(), 0, 0, 0]), - ); - - for (i, elements) in foreign_account - .code() - .as_elements() - .chunks(AccountProcedureRoot::NUM_ELEMENTS) - .enumerate() - { - assert_eq!( - exec_output.get_kernel_mem_word( - foreign_account_data_ptr + ACCT_PROCEDURES_SECTION_OFFSET + (i as u32) * 4 - ), - Word::try_from(elements).unwrap(), - ); - } -} - /// Test that get_initial_item and get_initial_map_item work correctly with foreign accounts. #[tokio::test] async fn test_get_initial_item_and_get_initial_map_item_with_foreign_account() -> anyhow::Result<()> @@ -1901,8 +1894,8 @@ async fn test_get_initial_item_and_get_initial_map_item_with_foreign_account() - CodeBuilder::default() .compile_component_code("foreign_account", foreign_account_code_source)?, vec![mock_value_slot0.clone(), mock_map_slot.clone()], - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("foreign_account"), + )?; let foreign_account = AccountBuilder::new(ChaCha20Rng::from_os_rng().random()) .with_auth_component(Auth::IncrNonce) @@ -1929,7 +1922,7 @@ async fn test_get_initial_item_and_get_initial_map_item_with_foreign_account() - padw padw padw push.0.0.0 # => [pad(15)] procref.::foreign_account::test_get_initial_item - push.{foreign_account_id_suffix} push.{foreign_account_id_prefix} + push.{foreign_account_id_prefix} push.{foreign_account_id_suffix} exec.tx::execute_foreign_procedure push.{expected_value_slot_0} assert_eqw.err="foreign account get_initial_item should work" @@ -1939,7 +1932,7 @@ async fn test_get_initial_item_and_get_initial_map_item_with_foreign_account() - push.{map_key} push.MOCK_MAP_SLOT[0..2] procref.::foreign_account::test_get_initial_map_item - push.{foreign_account_id_suffix} push.{foreign_account_id_prefix} + push.{foreign_account_id_prefix} push.{foreign_account_id_suffix} exec.tx::execute_foreign_procedure push.{map_value} assert_eqw.err="foreign account get_initial_map_item should work" @@ -1969,3 +1962,91 @@ async fn test_get_initial_item_and_get_initial_map_item_with_foreign_account() - Ok(()) } + +// HELPER FUNCTIONS +// ================================================================================================ + +fn foreign_account_data_memory_assertions( + foreign_account: &Account, + exec_output: &ExecutionOutput, +) { + let foreign_account_data_ptr = NATIVE_ACCOUNT_DATA_PTR + ACCOUNT_DATA_LENGTH as u32; + + // assert that the account ID and procedure root stored in the + // UPCOMING_FOREIGN_ACCOUNT_{SUFFIX, PREFIX}_PTR and UPCOMING_FOREIGN_PROCEDURE_PTR memory + // pointers respectively hold the ID and root of the account and procedure which were used + // during the FPI + + // foreign account ID prefix should be zero after FPI has ended + assert_eq!(exec_output.get_kernel_mem_element(UPCOMING_FOREIGN_ACCOUNT_PREFIX_PTR), ZERO); + + // foreign account ID suffix should be zero after FPI has ended + assert_eq!(exec_output.get_kernel_mem_element(UPCOMING_FOREIGN_ACCOUNT_SUFFIX_PTR), ZERO); + + // foreign procedure root should be zero word after FPI has ended + assert_eq!(exec_output.get_kernel_mem_word(UPCOMING_FOREIGN_PROCEDURE_PTR), EMPTY_WORD); + + // Check that account id and nonce match. + let header = AccountHeader::from(foreign_account); + assert_eq!( + exec_output + .get_kernel_mem_word(foreign_account_data_ptr + ACCT_ID_AND_NONCE_OFFSET) + .as_slice(), + &header.to_elements()[0..4] + ); + + assert_eq!( + exec_output.get_kernel_mem_word(foreign_account_data_ptr + ACCT_VAULT_ROOT_OFFSET), + foreign_account.vault().root(), + ); + + assert_eq!( + exec_output.get_kernel_mem_word(foreign_account_data_ptr + ACCT_STORAGE_COMMITMENT_OFFSET), + foreign_account.storage().to_commitment(), + ); + + assert_eq!( + exec_output.get_kernel_mem_word(foreign_account_data_ptr + ACCT_CODE_COMMITMENT_OFFSET), + foreign_account.code().commitment(), + ); + + assert_eq!( + exec_output.get_kernel_mem_word(foreign_account_data_ptr + ACCT_NUM_STORAGE_SLOTS_OFFSET), + Word::from([u16::try_from(foreign_account.storage().slots().len()).unwrap(), 0, 0, 0]), + ); + + for (i, elements) in foreign_account + .storage() + .to_elements() + .chunks(StorageSlot::NUM_ELEMENTS / 2) + .enumerate() + { + assert_eq!( + exec_output.get_kernel_mem_word( + foreign_account_data_ptr + + ACCT_ACTIVE_STORAGE_SLOTS_SECTION_OFFSET + + (i as u32) * 4 + ), + Word::try_from(elements).unwrap(), + ) + } + + assert_eq!( + exec_output.get_kernel_mem_word(foreign_account_data_ptr + ACCT_NUM_PROCEDURES_OFFSET), + Word::from([u16::try_from(foreign_account.code().num_procedures()).unwrap(), 0, 0, 0]), + ); + + for (i, elements) in foreign_account + .code() + .as_elements() + .chunks(AccountProcedureRoot::NUM_ELEMENTS) + .enumerate() + { + assert_eq!( + exec_output.get_kernel_mem_word( + foreign_account_data_ptr + ACCT_PROCEDURES_SECTION_OFFSET + (i as u32) * 4 + ), + Word::try_from(elements).unwrap(), + ); + } +} diff --git a/crates/miden-testing/src/kernel_tests/tx/test_input_note.rs b/crates/miden-testing/src/kernel_tests/tx/test_input_note.rs index 84a9d9322a..51d746748c 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_input_note.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_input_note.rs @@ -2,6 +2,7 @@ use alloc::string::String; use miden_protocol::Word; use miden_protocol::note::Note; +use miden_protocol::transaction::memory::{ASSET_SIZE, ASSET_VALUE_OFFSET}; use miden_standards::code_builder::CodeBuilder; use super::{TestSetup, setup_test}; @@ -168,16 +169,16 @@ async fn test_get_sender() -> anyhow::Result<()> { # get the sender from the input note push.0 exec.input_note::get_sender - # => [sender_id_prefix, sender_id_suffix] - - # assert the correctness of the prefix - push.{sender_prefix} - assert_eq.err="sender id prefix of the note 0 is incorrect" - # => [sender_id_suffix] + # => [sender_id_suffix, sender_id_prefix] # assert the correctness of the suffix push.{sender_suffix} assert_eq.err="sender id suffix of the note 0 is incorrect" + # => [sender_id_prefix] + + # assert the correctness of the prefix + push.{sender_prefix} + assert_eq.err="sender id prefix of the note 0 is incorrect" # => [] end "#, @@ -234,20 +235,32 @@ async fn test_get_assets() -> anyhow::Result<()> { for (asset_index, asset) in note.assets().iter().enumerate() { check_assets_code.push_str(&format!( r#" - # load the asset stored in memory - padw dup.4 mem_loadw_be - # => [STORED_ASSET, dest_ptr, note_index] + # load the asset key stored in memory + padw dup.4 mem_loadw_le + # => [STORED_ASSET_KEY, dest_ptr, note_index] + + # assert the asset key matches + push.{NOTE_ASSET_KEY} + assert_eqw.err="expected asset key at asset index {asset_index} of the note\ + {note_index} to be {NOTE_ASSET_KEY}" + # => [dest_ptr, note_index] + + # load the asset value stored in memory + padw dup.4 add.{ASSET_VALUE_OFFSET} mem_loadw_le + # => [STORED_ASSET_VALUE, dest_ptr, note_index] - # assert the asset - push.{NOTE_ASSET} - assert_eqw.err="asset {asset_index} of the note {note_index} is incorrect" + # assert the asset value matches + push.{NOTE_ASSET_VALUE} + assert_eqw.err="expected asset value at asset index {asset_index} of the note\ + {note_index} to be {NOTE_ASSET_VALUE}" # => [dest_ptr, note_index] # move the pointer - add.4 - # => [dest_ptr+4, note_index] + add.{ASSET_SIZE} + # => [dest_ptr+ASSET_SIZE, note_index] "#, - NOTE_ASSET = Word::from(*asset), + NOTE_ASSET_KEY = asset.to_key_word(), + NOTE_ASSET_VALUE = asset.to_value_word(), asset_index = asset_index, note_index = note_index, )); @@ -272,8 +285,8 @@ async fn test_get_assets() -> anyhow::Result<()> { end ", check_note_0 = check_assets_code(0, 0, &p2id_note_0_assets), - check_note_1 = check_assets_code(1, 4, &p2id_note_1_asset), - check_note_2 = check_assets_code(2, 8, &p2id_note_2_assets), + check_note_1 = check_assets_code(1, 8, &p2id_note_1_asset), + check_note_2 = check_assets_code(2, 16, &p2id_note_2_assets), ); let tx_script = CodeBuilder::default().compile_tx_script(code)?; @@ -292,10 +305,10 @@ async fn test_get_assets() -> anyhow::Result<()> { Ok(()) } -/// Check that the number of the inputs and their commitment of a note with one asset -/// obtained from the `input_note::get_inputs_info` procedure is correct. +/// Check that the number of the storage items and their commitment of a note with one asset +/// obtained from the `input_note::get_storage_info` procedure is correct. #[tokio::test] -async fn test_get_inputs_info() -> anyhow::Result<()> { +async fn test_get_storage_info() -> anyhow::Result<()> { let TestSetup { mock_chain, account, @@ -309,25 +322,25 @@ async fn test_get_inputs_info() -> anyhow::Result<()> { use miden::protocol::input_note begin - # get the inputs commitment and length from the input note with index 0 (the only one + # get the storage commitment and length from the input note with index 0 (the only one # we have) push.0 - exec.input_note::get_inputs_info - # => [NOTE_INPUTS_COMMITMENT, inputs_num] + exec.input_note::get_storage_info + # => [NOTE_STORAGE_COMMITMENT, num_storage_items] - # assert the correctness of the inputs commitment - push.{INPUTS_COMMITMENT} - assert_eqw.err="note 0 has incorrect inputs commitment" - # => [inputs_num] + # assert the correctness of the storage commitment + push.{STORAGE_COMMITMENT} + assert_eqw.err="note 0 has incorrect storage commitment" + # => [num_storage_items] - # assert the inputs have correct length - push.{inputs_num} - assert_eq.err="note 0 has incorrect inputs length" + # assert the storage has correct length + push.{num_storage_items} + assert_eq.err="note 0 has incorrect number of storage items" # => [] end "#, - INPUTS_COMMITMENT = p2id_note_1_asset.inputs().commitment(), - inputs_num = p2id_note_1_asset.inputs().num_values(), + STORAGE_COMMITMENT = p2id_note_1_asset.storage().commitment(), + num_storage_items = p2id_note_1_asset.storage().num_items(), ); let tx_script = CodeBuilder::default().compile_tx_script(code)?; diff --git a/crates/miden-testing/src/kernel_tests/tx/test_lazy_loading.rs b/crates/miden-testing/src/kernel_tests/tx/test_lazy_loading.rs index cf0216fa6d..b30c909220 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_lazy_loading.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_lazy_loading.rs @@ -3,7 +3,7 @@ //! Once lazy loading is enabled generally, it can be removed and/or integrated into other tests. use miden_protocol::LexicographicWord; -use miden_protocol::account::{AccountId, AccountStorage, StorageSlotDelta}; +use miden_protocol::account::{AccountId, AccountStorage, StorageMapKey, StorageSlotDelta}; use miden_protocol::asset::{Asset, FungibleAsset}; use miden_protocol::testing::account_id::{ ACCOUNT_ID_NATIVE_ASSET_FAUCET, @@ -43,15 +43,19 @@ async fn adding_fungible_assets_with_lazy_loading_succeeds() -> anyhow::Result<( use mock::account begin - push.{FUNGIBLE_ASSET1} - call.account::add_asset dropw + push.{FUNGIBLE_ASSET_VALUE1} + push.{FUNGIBLE_ASSET_KEY1} + call.account::add_asset dropw dropw - push.{FUNGIBLE_ASSET2} - call.account::add_asset dropw + push.{FUNGIBLE_ASSET_VALUE2} + push.{FUNGIBLE_ASSET_KEY2} + call.account::add_asset dropw dropw end ", - FUNGIBLE_ASSET1 = Word::from(fungible_asset1), - FUNGIBLE_ASSET2 = Word::from(fungible_asset2) + FUNGIBLE_ASSET_KEY1 = fungible_asset1.to_key_word(), + FUNGIBLE_ASSET_VALUE1 = fungible_asset1.to_value_word(), + FUNGIBLE_ASSET_KEY2 = fungible_asset2.to_key_word(), + FUNGIBLE_ASSET_VALUE2 = fungible_asset2.to_value_word() ); let builder = CodeBuilder::with_mock_libraries(); @@ -91,25 +95,37 @@ async fn removing_fungible_assets_with_lazy_loading_succeeds() -> anyhow::Result use mock::util begin - push.{FUNGIBLE_ASSET1} + push.{FUNGIBLE_ASSET1_VALUE} + push.{FUNGIBLE_ASSET1_KEY} call.account::remove_asset + # drop the excess words from the call + dropw dropw # => [] # move asset to note to adhere to asset preservation rules + push.{FUNGIBLE_ASSET1_VALUE} + push.{FUNGIBLE_ASSET1_KEY} exec.util::create_default_note_with_asset # => [] - push.{FUNGIBLE_ASSET2} + push.{FUNGIBLE_ASSET2_VALUE} + push.{FUNGIBLE_ASSET2_KEY} call.account::remove_asset - # => [ASSET] + # drop the excess words from the call + dropw dropw + # => [] # move asset to note to adhere to asset preservation rules + push.{FUNGIBLE_ASSET2_VALUE} + push.{FUNGIBLE_ASSET2_KEY} exec.util::create_default_note_with_asset # => [] end ", - FUNGIBLE_ASSET1 = Word::from(fungible_asset1), - FUNGIBLE_ASSET2 = Word::from(fungible_asset2) + FUNGIBLE_ASSET1_KEY = fungible_asset1.to_key_word(), + FUNGIBLE_ASSET1_VALUE = fungible_asset1.to_value_word(), + FUNGIBLE_ASSET2_KEY = fungible_asset2.to_key_word(), + FUNGIBLE_ASSET2_VALUE = fungible_asset2.to_value_word(), ); let builder = CodeBuilder::with_mock_libraries(); @@ -171,9 +187,9 @@ async fn setting_map_item_with_lazy_loading_succeeds() -> anyhow::Result<()> { let mock_map = AccountStorage::mock_map(); let existing_key = *mock_map.entries().next().unwrap().0; - let non_existent_key = Word::from([5, 5, 5, 5u32]); + let non_existent_key = StorageMapKey::from_array([5, 5, 5, 5u32]); assert!( - mock_map.open(&non_existent_key).get(&non_existent_key).unwrap() == Word::empty(), + mock_map.open(&non_existent_key).get(non_existent_key).unwrap() == Word::empty(), "test setup requires that the non existent key does not exist" ); @@ -194,14 +210,14 @@ async fn setting_map_item_with_lazy_loading_succeeds() -> anyhow::Result<()> { push.{value0} push.{existing_key} push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] call.account::set_map_item # Insert a non-existent key. push.{value1} push.{non_existent_key} push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY, VALUE] + # => [slot_id_suffix, slot_id_prefix, KEY, VALUE] call.account::set_map_item exec.::miden::core::sys::truncate_stack @@ -243,9 +259,9 @@ async fn getting_map_item_with_lazy_loading_succeeds() -> anyhow::Result<()> { let mock_map = AccountStorage::mock_map(); let (existing_key, existing_value) = mock_map.entries().next().unwrap(); - let non_existent_key = Word::from([5, 5, 5, 5u32]); + let non_existent_key = StorageMapKey::from_array([5, 5, 5, 5u32]); assert!( - mock_map.open(&non_existent_key).get(&non_existent_key).unwrap() == Word::empty(), + mock_map.open(&non_existent_key).get(non_existent_key).unwrap() == Word::empty(), "test setup requires that the non existent key does not exist" ); @@ -262,7 +278,7 @@ async fn getting_map_item_with_lazy_loading_succeeds() -> anyhow::Result<()> { # Fetch value from existing key. push.{existing_key} push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY] + # => [slot_id_suffix, slot_id_prefix, KEY] call.account::get_map_item push.{existing_value} @@ -271,7 +287,7 @@ async fn getting_map_item_with_lazy_loading_succeeds() -> anyhow::Result<()> { # Fetch a non-existent key. push.{non_existent_key} push.MOCK_MAP_SLOT[0..2] - # => [slot_id_prefix, slot_id_suffix, KEY] + # => [slot_id_suffix, slot_id_prefix, KEY] call.account::get_map_item padw assert_eqw.err="non-existent value should be the empty word" diff --git a/crates/miden-testing/src/kernel_tests/tx/test_link_map.rs b/crates/miden-testing/src/kernel_tests/tx/test_link_map.rs index fca31cf22a..58984af5df 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_link_map.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_link_map.rs @@ -3,11 +3,11 @@ use std::collections::BTreeMap; use std::string::String; use anyhow::Context; +use miden_crypto::rand::test_utils::rand_value; use miden_processor::{ONE, ZERO}; -use miden_protocol::{EMPTY_WORD, LexicographicWord, Word}; +use miden_protocol::{EMPTY_WORD, Felt, LexicographicWord, Word}; use miden_tx::{LinkMap, MemoryViewer}; use rand::seq::IteratorRandom; -use winter_rand_utils::rand_value; use crate::TransactionContextBuilder; @@ -176,7 +176,7 @@ async fn insertion() -> anyhow::Result<()> { let exec_output = tx_context.execute_code(&code).await.context("failed to execute code")?; let mem_viewer = MemoryViewer::ExecutionOutputs(&exec_output); - let map = LinkMap::new(map_ptr.into(), &mem_viewer); + let map = LinkMap::new(Felt::from(map_ptr), &mem_viewer); let mut map_iter = map.iter(); let entry0 = map_iter.next().expect("map should have four entries"); @@ -546,7 +546,7 @@ async fn execute_link_map_test(operations: Vec) -> anyhow::Result let mem_viewer = MemoryViewer::ExecutionOutputs(&exec_output); for (map_ptr, control_map) in control_maps { - let map = LinkMap::new(map_ptr.into(), &mem_viewer); + let map = LinkMap::new(Felt::from(map_ptr), &mem_viewer); let actual_map_len = map.iter().count(); assert_eq!( actual_map_len, diff --git a/crates/miden-testing/src/kernel_tests/tx/test_note.rs b/crates/miden-testing/src/kernel_tests/tx/test_note.rs index 49a5be0c60..dd2a5b5e9d 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_note.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_note.rs @@ -2,32 +2,30 @@ use alloc::collections::BTreeMap; use alloc::sync::Arc; use anyhow::Context; -use miden_processor::fast::ExecutionOutput; -use miden_protocol::account::auth::PublicKeyCommitment; +use miden_processor::ExecutionOutput; +use miden_protocol::account::auth::{AuthScheme, PublicKeyCommitment}; use miden_protocol::account::{AccountBuilder, AccountId}; use miden_protocol::assembly::DefaultSourceManager; -use miden_protocol::assembly::diagnostics::miette::{self, miette}; use miden_protocol::asset::FungibleAsset; -use miden_protocol::crypto::dsa::falcon512_rpo::SecretKey; +use miden_protocol::crypto::dsa::falcon512_poseidon2::SecretKey; use miden_protocol::crypto::rand::{FeltRng, RpoRandomCoin}; use miden_protocol::errors::MasmError; use miden_protocol::note::{ Note, NoteAssets, - NoteInputs, NoteMetadata, NoteRecipient, + NoteStorage, NoteTag, NoteType, }; use miden_protocol::testing::account_id::{ - ACCOUNT_ID_NETWORK_FUNGIBLE_FAUCET, ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE, ACCOUNT_ID_SENDER, }; use miden_protocol::transaction::memory::ACTIVE_INPUT_NOTE_PTR; -use miden_protocol::transaction::{OutputNote, TransactionArgs}; -use miden_protocol::{Felt, Hasher, Word, ZERO}; +use miden_protocol::transaction::{RawOutputNote, TransactionArgs}; +use miden_protocol::{Felt, Word}; use miden_standards::account::wallets::BasicWallet; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::note::NoteBuilder; @@ -48,7 +46,9 @@ use crate::{ async fn test_note_setup() -> anyhow::Result<()> { let tx_context = { let mut builder = MockChain::builder(); - let account = builder.add_existing_wallet(Auth::BasicAuth)?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2id_note_1 = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), account.id(), @@ -71,7 +71,7 @@ async fn test_note_setup() -> anyhow::Result<()> { exec.prologue::prepare_transaction exec.note::prepare_note # => [note_script_root_ptr, NOTE_ARGS, pad(11), pad(16)] - padw movup.4 mem_loadw_be + padw movup.4 mem_loadw_le # => [SCRIPT_ROOT, NOTE_ARGS, pad(11), pad(16)] # truncate the stack @@ -87,27 +87,25 @@ async fn test_note_setup() -> anyhow::Result<()> { } #[tokio::test] -async fn test_note_script_and_note_args() -> miette::Result<()> { +async fn test_note_script_and_note_args() -> anyhow::Result<()> { let mut tx_context = { let mut builder = MockChain::builder(); - let account = builder.add_existing_wallet(Auth::BasicAuth).map_err(|err| miette!(err))?; - let p2id_note_1 = builder - .add_p2id_note( - ACCOUNT_ID_SENDER.try_into().unwrap(), - account.id(), - &[FungibleAsset::mock(150)], - NoteType::Public, - ) - .map_err(|err| miette!(err))?; - let p2id_note_2 = builder - .add_p2id_note( - ACCOUNT_ID_SENDER.try_into().unwrap(), - account.id(), - &[FungibleAsset::mock(300)], - NoteType::Public, - ) - .map_err(|err| miette!(err))?; - let mut mock_chain = builder.build().map_err(|err| miette!(err))?; + let account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + let p2id_note_1 = builder.add_p2id_note( + ACCOUNT_ID_SENDER.try_into().unwrap(), + account.id(), + &[FungibleAsset::mock(150)], + NoteType::Public, + )?; + let p2id_note_2 = builder.add_p2id_note( + ACCOUNT_ID_SENDER.try_into().unwrap(), + account.id(), + &[FungibleAsset::mock(300)], + NoteType::Public, + )?; + let mut mock_chain = builder.build()?; mock_chain.prove_next_block().unwrap(); mock_chain @@ -159,22 +157,21 @@ async fn test_note_script_and_note_args() -> miette::Result<()> { tx_context.set_tx_args(tx_args); let exec_output = tx_context.execute_code(code).await.unwrap(); - assert_eq!(exec_output.get_stack_word_be(0), note_args[0]); - assert_eq!(exec_output.get_stack_word_be(4), note_args[1]); + assert_eq!(exec_output.get_stack_word(0), note_args[0]); + assert_eq!(exec_output.get_stack_word(4), note_args[1]); Ok(()) } fn note_setup_stack_assertions(exec_output: &ExecutionOutput, inputs: &TransactionContext) { - let mut expected_stack = [ZERO; 16]; - - // replace the top four elements with the tx script root - let mut note_script_root = *inputs.input_notes().get_note(0).note().script().root(); - note_script_root.reverse(); - expected_stack[..4].copy_from_slice(¬e_script_root); - - // assert that the stack contains the note inputs at the end of execution - assert_eq!(exec_output.stack.as_slice(), expected_stack.as_slice()) + // assert that the stack contains the note storage at the end of execution + assert_eq!( + exec_output.get_stack_word(0), + inputs.input_notes().get_note(0).note().script().root() + ); + assert_eq!(exec_output.get_stack_word(4), Word::empty()); + assert_eq!(exec_output.get_stack_word(8), Word::empty()); + assert_eq!(exec_output.get_stack_word(12), Word::empty()); } fn note_setup_memory_assertions(exec_output: &ExecutionOutput) { @@ -205,27 +202,27 @@ async fn test_build_recipient() -> anyhow::Result<()> { begin # put the values that will be hashed into the memory - push.{word_1} push.{base_addr} mem_storew_be dropw - push.{word_2} push.{addr_1} mem_storew_be dropw + push.{word_1} push.{base_addr} mem_storew_le dropw + push.{word_2} push.{addr_1} mem_storew_le dropw # Test with 4 values (needs padding to 8) push.{script_root} # SCRIPT_ROOT push.{serial_num} # SERIAL_NUM - push.4.4000 # num_inputs, inputs_ptr + push.4.{base_addr} # num_storage_items, storage_ptr exec.note::build_recipient # => [RECIPIENT_4] # Test with 5 values (needs padding to 8) push.{script_root} # SCRIPT_ROOT push.{serial_num} # SERIAL_NUM - push.5.4000 # num_inputs, inputs_ptr + push.5.{base_addr} # num_storage_items, storage_ptr exec.note::build_recipient # => [RECIPIENT_5, RECIPIENT_4] # Test with 8 values (no padding needed - exactly one rate block) push.{script_root} # SCRIPT_ROOT push.{serial_num} # SERIAL_NUM - push.8.4000 # num_inputs, inputs_ptr + push.8.{base_addr} # num_storage_items, storage_ptr exec.note::build_recipient # => [RECIPIENT_8, RECIPIENT_5, RECIPIENT_4] @@ -243,56 +240,45 @@ async fn test_build_recipient() -> anyhow::Result<()> { let exec_output = &tx_context.execute_code(&code).await?; - // Create expected NoteInputs for each test case + // Create expected NoteStorage for each test case let inputs_4 = word_1.to_vec(); - let note_inputs_4 = NoteInputs::new(inputs_4.clone())?; + let note_storage_4 = NoteStorage::new(inputs_4.clone())?; let mut inputs_5 = word_1.to_vec(); inputs_5.push(word_2[0]); - let note_inputs_5 = NoteInputs::new(inputs_5.clone())?; + let note_storage_5 = NoteStorage::new(inputs_5.clone())?; let mut inputs_8 = word_1.to_vec(); inputs_8.extend_from_slice(&word_2.to_vec()); - let note_inputs_8 = NoteInputs::new(inputs_8.clone())?; + let note_storage_8 = NoteStorage::new(inputs_8.clone())?; // Create expected recipients and get their digests - let recipient_4 = NoteRecipient::new(serial_num, note_script.clone(), note_inputs_4.clone()); - let recipient_5 = NoteRecipient::new(serial_num, note_script.clone(), note_inputs_5.clone()); - let recipient_8 = NoteRecipient::new(serial_num, note_script.clone(), note_inputs_8.clone()); - - for note_inputs in [ - (note_inputs_4, inputs_4.clone()), - (note_inputs_5, inputs_5.clone()), - (note_inputs_8, inputs_8.clone()), + let recipient_4 = NoteRecipient::new(serial_num, note_script.clone(), note_storage_4.clone()); + let recipient_5 = NoteRecipient::new(serial_num, note_script.clone(), note_storage_5.clone()); + let recipient_8 = NoteRecipient::new(serial_num, note_script.clone(), note_storage_8.clone()); + + for (note_storage, storage_elements) in [ + (note_storage_4, inputs_4.clone()), + (note_storage_5, inputs_5.clone()), + (note_storage_8, inputs_8.clone()), ] { - let inputs_advice_map_key = note_inputs.0.commitment(); + let inputs_advice_map_key = note_storage.commitment(); assert_eq!( exec_output.advice.get_mapped_values(&inputs_advice_map_key).unwrap(), - note_inputs.1, - "advice entry with note inputs should contain the unpadded values" - ); - - let num_inputs_advice_map_key = - Hasher::hash_elements(note_inputs.0.commitment().as_elements()); - assert_eq!( - exec_output.advice.get_mapped_values(&num_inputs_advice_map_key).unwrap(), - &[Felt::from(note_inputs.0.num_values())], - "advice entry with num note inputs should contain the original number of values" + storage_elements, + "advice entry with note storage should contain the unpadded values" ); } - let mut expected_stack = alloc::vec::Vec::new(); - expected_stack.extend_from_slice(recipient_4.digest().as_elements()); - expected_stack.extend_from_slice(recipient_5.digest().as_elements()); - expected_stack.extend_from_slice(recipient_8.digest().as_elements()); - expected_stack.reverse(); + assert_eq!(exec_output.get_stack_word(0), recipient_8.digest()); + assert_eq!(exec_output.get_stack_word(4), recipient_5.digest()); + assert_eq!(exec_output.get_stack_word(8), recipient_4.digest()); - assert_eq!(exec_output.stack[0..12], expected_stack); Ok(()) } #[tokio::test] -async fn test_compute_inputs_commitment() -> anyhow::Result<()> { +async fn test_compute_storage_commitment() -> anyhow::Result<()> { let tx_context = TransactionContextBuilder::with_existing_mock_account().build()?; // Define test values as Words @@ -310,31 +296,31 @@ async fn test_compute_inputs_commitment() -> anyhow::Result<()> { begin # put the values that will be hashed into the memory - push.{word_1} push.{base_addr} mem_storew_be dropw - push.{word_2} push.{addr_1} mem_storew_be dropw - push.{word_3} push.{addr_2} mem_storew_be dropw - push.{word_4} push.{addr_3} mem_storew_be dropw - - # push the number of values and pointer to the inputs on the stack - push.5.4000 - # execute the `compute_inputs_commitment` procedure for 5 values - exec.note::compute_inputs_commitment + push.{word_1} push.{base_addr} mem_storew_le dropw + push.{word_2} push.{addr_1} mem_storew_le dropw + push.{word_3} push.{addr_2} mem_storew_le dropw + push.{word_4} push.{addr_3} mem_storew_le dropw + + # push the number of values and pointer to the storage on the stack + push.5.{base_addr} + # execute the `compute_storage_commitment` procedure for 5 values + exec.note::compute_storage_commitment # => [HASH_5] - push.8.4000 - # execute the `compute_inputs_commitment` procedure for 8 values - exec.note::compute_inputs_commitment + push.8.{base_addr} + # execute the `compute_storage_commitment` procedure for 8 values + exec.note::compute_storage_commitment # => [HASH_8, HASH_5] - push.15.4000 - # execute the `compute_inputs_commitment` procedure for 15 values - exec.note::compute_inputs_commitment + push.15.{base_addr} + # execute the `compute_storage_commitment` procedure for 15 values + exec.note::compute_storage_commitment # => [HASH_15, HASH_8, HASH_5] - push.0.4000 - # check that calling `compute_inputs_commitment` procedure with 0 elements will result in an + push.0.{base_addr} + # check that calling `compute_storage_commitment` procedure with 0 elements will result in an # empty word - exec.note::compute_inputs_commitment + exec.note::compute_storage_commitment # => [0, 0, 0, 0, HASH_15, HASH_8, HASH_5] # truncate the stack @@ -355,41 +341,38 @@ async fn test_compute_inputs_commitment() -> anyhow::Result<()> { let mut inputs_5 = word_1.to_vec(); inputs_5.push(word_2[0]); - let note_inputs_5_hash = NoteInputs::new(inputs_5)?.commitment(); + let note_storage_5_hash = NoteStorage::new(inputs_5)?.commitment(); let mut inputs_8 = word_1.to_vec(); inputs_8.extend_from_slice(&word_2.to_vec()); - let note_inputs_8_hash = NoteInputs::new(inputs_8)?.commitment(); + let note_storage_8_hash = NoteStorage::new(inputs_8)?.commitment(); let mut inputs_15 = word_1.to_vec(); inputs_15.extend_from_slice(&word_2.to_vec()); inputs_15.extend_from_slice(&word_3.to_vec()); inputs_15.extend_from_slice(&word_4[0..3]); - let note_inputs_15_hash = NoteInputs::new(inputs_15)?.commitment(); - - let mut expected_stack = alloc::vec::Vec::new(); + let note_storage_15_hash = NoteStorage::new(inputs_15)?.commitment(); - expected_stack.extend_from_slice(note_inputs_5_hash.as_elements()); - expected_stack.extend_from_slice(note_inputs_8_hash.as_elements()); - expected_stack.extend_from_slice(note_inputs_15_hash.as_elements()); - expected_stack.extend_from_slice(Word::empty().as_elements()); - expected_stack.reverse(); + assert_eq!(exec_output.get_stack_word(0), Word::empty()); + assert_eq!(exec_output.get_stack_word(4), note_storage_15_hash); + assert_eq!(exec_output.get_stack_word(8), note_storage_8_hash); + assert_eq!(exec_output.get_stack_word(12), note_storage_5_hash); - assert_eq!(exec_output.stack[0..16], expected_stack); Ok(()) } #[tokio::test] -async fn test_build_metadata_header() -> miette::Result<()> { +async fn test_build_metadata_header() -> anyhow::Result<()> { let tx_context = TransactionContextBuilder::with_existing_mock_account().build().unwrap(); let sender = tx_context.account().id(); let receiver = AccountId::try_from(ACCOUNT_ID_REGULAR_PRIVATE_ACCOUNT_UPDATABLE_CODE) - .map_err(|e| miette::miette!("Failed to convert account ID: {}", e))?; + .map_err(|e| anyhow::anyhow!("Failed to convert account ID: {}", e))?; - let test_metadata1 = - NoteMetadata::new(sender, NoteType::Private, NoteTag::with_account_target(receiver)); - let test_metadata2 = NoteMetadata::new(sender, NoteType::Public, NoteTag::new(u32::MAX)); + let test_metadata1 = NoteMetadata::new(sender, NoteType::Private) + .with_tag(NoteTag::with_account_target(receiver)); + let test_metadata2 = + NoteMetadata::new(sender, NoteType::Public).with_tag(NoteTag::new(u32::MAX)); for (iteration, test_metadata) in [test_metadata1, test_metadata2].into_iter().enumerate() { let code = format!( @@ -412,7 +395,7 @@ async fn test_build_metadata_header() -> miette::Result<()> { let exec_output = tx_context.execute_code(&code).await?; - let metadata_word = exec_output.get_stack_word_be(0); + let metadata_word = exec_output.get_stack_word(0); assert_eq!( test_metadata.to_header_word(), @@ -435,13 +418,13 @@ pub async fn test_timelock() -> anyhow::Result<()> { use miden::protocol::tx begin - # store the note inputs to memory starting at address 0 - push.0 exec.active_note::get_inputs - # => [num_inputs, inputs_ptr] + # store the note storage to memory starting at address 0 + push.0 exec.active_note::get_storage + # => [num_storage_items, storage_ptr] - # make sure the number of inputs is 1 - eq.1 assert.err="number of note inputs is not 1" - # => [inputs_ptr] + # make sure the number of storage items is 1 + eq.1 assert.err="note number of storage items is not 1" + # => [storage_ptr] # read the timestamp at which the note can be consumed mem_load @@ -463,13 +446,13 @@ pub async fn test_timelock() -> anyhow::Result<()> { let lock_timestamp = 2_000_000_000; let source_manager = Arc::new(DefaultSourceManager::default()); let timelock_note = NoteBuilder::new(account.id(), &mut ChaCha20Rng::from_os_rng()) - .note_inputs([Felt::from(lock_timestamp)])? + .note_storage([Felt::from(lock_timestamp)])? .source_manager(source_manager.clone()) .code(code.clone()) .dynamically_linked_libraries(CodeBuilder::mock_libraries()) .build()?; - builder.add_output_note(OutputNote::Full(timelock_note.clone())); + builder.add_output_note(RawOutputNote::Full(timelock_note.clone())); let mut mock_chain = builder.build()?; mock_chain @@ -499,21 +482,24 @@ pub async fn test_timelock() -> anyhow::Result<()> { Ok(()) } -/// This test checks the scenario when some public key, which is provided to the RPO component of +/// This test checks the scenario when some public key, which is provided to the auth component of /// the target account, is also provided as an input to the input note. /// /// Previously this setup was leading to the values collision in the advice map, see the -/// [issue #1267](https://github.com/0xMiden/miden-base/issues/1267) for more details. +/// [issue #1267](https://github.com/0xMiden/protocol/issues/1267) for more details. #[tokio::test] async fn test_public_key_as_note_input() -> anyhow::Result<()> { let mut rng = ChaCha20Rng::from_seed(Default::default()); let sec_key = SecretKey::with_rng(&mut rng); - // this value will be used both as public key in the RPO component of the target account and as + // this value will be used both as public key in the auth component of the target account and as // well as the input of the input note let public_key = PublicKeyCommitment::from(sec_key.public_key()); let public_key_value = Word::from(public_key); - let (rpo_component, authenticator) = Auth::BasicAuth.build_component(); + let (rpo_component, authenticator) = Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + } + .build_component(); let mock_seed_1 = Word::from([1, 2, 3, 4u32]).as_bytes(); let target_account = AccountBuilder::new(mock_seed_1) @@ -530,11 +516,11 @@ async fn test_public_key_as_note_input() -> anyhow::Result<()> { let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); let tag = NoteTag::with_account_target(target_account.id()); - let metadata = NoteMetadata::new(sender_account.id(), NoteType::Public, tag); + let metadata = NoteMetadata::new(sender_account.id(), NoteType::Public).with_tag(tag); let vault = NoteAssets::new(vec![])?; let note_script = CodeBuilder::default().compile_note_script("begin nop end")?; let recipient = - NoteRecipient::new(serial_num, note_script, NoteInputs::new(public_key_value.to_vec())?); + NoteRecipient::new(serial_num, note_script, NoteStorage::new(public_key_value.to_vec())?); let note_with_pub_key = Note::new(vault.clone(), metadata, recipient); let tx_context = TransactionContextBuilder::new(target_account) @@ -545,43 +531,3 @@ async fn test_public_key_as_note_input() -> anyhow::Result<()> { tx_context.execute().await?; Ok(()) } - -#[tokio::test] -async fn test_build_note_tag_for_network_account() -> anyhow::Result<()> { - let tx_context = TransactionContextBuilder::with_existing_mock_account().build()?; - - let account_id = AccountId::try_from(ACCOUNT_ID_NETWORK_FUNGIBLE_FAUCET)?; - let expected_tag = NoteTag::with_account_target(account_id).as_u32(); - - let prefix: u64 = account_id.prefix().into(); - let suffix: u64 = account_id.suffix().into(); - - let code = format!( - " - use miden::core::sys - use miden::protocol::note - - begin - push.{suffix}.{prefix} - - exec.note::build_note_tag_for_network_account - # => [network_account_tag] - - exec.sys::truncate_stack - end - ", - suffix = suffix, - prefix = prefix, - ); - - let exec_output = tx_context.execute_code(&code).await?; - let actual_tag = exec_output.stack[0].as_int(); - - assert_eq!( - actual_tag, expected_tag as u64, - "Expected tag {:#010x}, got {:#010x}", - expected_tag, actual_tag - ); - - Ok(()) -} diff --git a/crates/miden-testing/src/kernel_tests/tx/test_output_note.rs b/crates/miden-testing/src/kernel_tests/tx/test_output_note.rs index 262301743c..82dbe2ef5e 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_output_note.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_output_note.rs @@ -1,7 +1,6 @@ use alloc::string::String; -use alloc::vec::Vec; -use anyhow::Context; +use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{Account, AccountId}; use miden_protocol::asset::{Asset, FungibleAsset, NonFungibleAsset}; use miden_protocol::crypto::rand::RpoRandomCoin; @@ -11,13 +10,11 @@ use miden_protocol::errors::tx_kernel::{ }; use miden_protocol::note::{ Note, - NoteAssets, NoteAttachment, NoteAttachmentScheme, - NoteExecutionHint, - NoteInputs, NoteMetadata, NoteRecipient, + NoteStorage, NoteTag, NoteType, }; @@ -34,18 +31,27 @@ use miden_protocol::testing::account_id::{ }; use miden_protocol::testing::constants::NON_FUNGIBLE_ASSET_DATA_2; use miden_protocol::transaction::memory::{ + ASSET_SIZE, + ASSET_VALUE_OFFSET, NOTE_MEM_SIZE, NUM_OUTPUT_NOTES_PTR, OUTPUT_NOTE_ASSETS_OFFSET, OUTPUT_NOTE_ATTACHMENT_OFFSET, OUTPUT_NOTE_METADATA_HEADER_OFFSET, + OUTPUT_NOTE_NUM_ASSETS_OFFSET, OUTPUT_NOTE_RECIPIENT_OFFSET, OUTPUT_NOTE_SECTION_OFFSET, }; -use miden_protocol::transaction::{OutputNote, OutputNotes}; +use miden_protocol::transaction::{RawOutputNote, RawOutputNotes}; use miden_protocol::{Felt, Word, ZERO}; use miden_standards::code_builder::CodeBuilder; -use miden_standards::note::{NetworkAccountTarget, create_p2id_note}; +use miden_standards::note::{ + AccountTargetNetworkNote, + NetworkAccountTarget, + NetworkNoteExt, + NoteExecutionHint, + P2idNote, +}; use miden_standards::testing::mock_account::MockAccountExt; use miden_standards::testing::note::NoteBuilder; @@ -89,8 +95,8 @@ async fn test_create_note() -> anyhow::Result<()> { let exec_output = &tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_kernel_mem_word(NUM_OUTPUT_NOTES_PTR), - Word::from([1, 0, 0, 0u32]), + exec_output.get_kernel_mem_element(NUM_OUTPUT_NOTES_PTR), + Felt::from(1u32), "number of output notes must increment by 1", ); @@ -100,7 +106,7 @@ async fn test_create_note() -> anyhow::Result<()> { "recipient must be stored at the correct memory location", ); - let metadata = NoteMetadata::new(account_id, NoteType::Public, tag); + let metadata = NoteMetadata::new(account_id, NoteType::Public).with_tag(tag); let expected_metadata_header = metadata.to_header_word(); let expected_note_attachment = metadata.to_attachment_word(); @@ -201,80 +207,45 @@ async fn test_create_note_too_many_notes() -> anyhow::Result<()> { #[tokio::test] async fn test_get_output_notes_commitment() -> anyhow::Result<()> { - let tx_context = { - let account = - Account::mock(ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, Auth::IncrNonce); + let mut rng = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])); + let account = Account::mock(ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, Auth::IncrNonce); - let output_note_1 = - create_public_p2any_note(ACCOUNT_ID_SENDER.try_into()?, [FungibleAsset::mock(100)]); + let asset_1 = FungibleAsset::mock(100); + let asset_2 = FungibleAsset::mock(200); - let input_note_1 = create_public_p2any_note( - ACCOUNT_ID_PRIVATE_SENDER.try_into()?, - [FungibleAsset::mock(100)], - ); - - let input_note_2 = create_public_p2any_note( - ACCOUNT_ID_PRIVATE_SENDER.try_into()?, - [FungibleAsset::mock(200)], - ); - - TransactionContextBuilder::new(account) - .extend_input_notes(vec![input_note_1, input_note_2]) - .extend_expected_output_notes(vec![OutputNote::Full(output_note_1)]) - .build()? - }; - - // extract input note data - let input_note_1 = tx_context.tx_inputs().input_notes().get_note(0).note(); - let input_asset_1 = **input_note_1 - .assets() - .iter() - .take(1) - .collect::>() - .first() - .context("getting first expected input asset")?; - let input_note_2 = tx_context.tx_inputs().input_notes().get_note(1).note(); - let input_asset_2 = **input_note_2 - .assets() - .iter() - .take(1) - .collect::>() - .first() - .context("getting second expected input asset")?; - - // Choose random accounts as the target for the note tag. - let network_account = AccountId::try_from(ACCOUNT_ID_NETWORK_NON_FUNGIBLE_FAUCET)?; - let local_account = AccountId::try_from(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET)?; + let input_note_1 = create_public_p2any_note(ACCOUNT_ID_PRIVATE_SENDER.try_into()?, [asset_1]); + let input_note_2 = create_public_p2any_note(ACCOUNT_ID_PRIVATE_SENDER.try_into()?, [asset_2]); // create output note 1 - let output_serial_no_1 = Word::from([8u32; 4]); - let output_tag_1 = NoteTag::with_account_target(network_account); - let assets = NoteAssets::new(vec![input_asset_1])?; - let metadata = - NoteMetadata::new(tx_context.tx_inputs().account().id(), NoteType::Public, output_tag_1); - let inputs = NoteInputs::new(vec![])?; - let recipient = NoteRecipient::new(output_serial_no_1, input_note_1.script().clone(), inputs); - let output_note_1 = Note::new(assets, metadata, recipient); + let output_note_1 = NoteBuilder::new(account.id(), &mut rng) + .tag(NoteTag::with_account_target(account.id()).as_u32()) + .note_type(NoteType::Public) + .add_assets([asset_1]) + .build()?; // create output note 2 - let output_serial_no_2 = Word::from([11u32; 4]); - let output_tag_2 = NoteTag::with_account_target(local_account); - let assets = NoteAssets::new(vec![input_asset_2])?; - let attachment = NoteAttachment::new_array( - NoteAttachmentScheme::new(5), - [42, 43, 44, 45, 46u32].map(Felt::from).to_vec(), - )?; - let metadata = - NoteMetadata::new(tx_context.tx_inputs().account().id(), NoteType::Public, output_tag_2) - .with_attachment(attachment); - let inputs = NoteInputs::new(vec![])?; - let recipient = NoteRecipient::new(output_serial_no_2, input_note_2.script().clone(), inputs); - let output_note_2 = Note::new(assets, metadata, recipient); + let output_note_2 = NoteBuilder::new(account.id(), &mut rng) + .tag(NoteTag::with_custom_account_target(account.id(), 2)?.as_u32()) + .note_type(NoteType::Public) + .add_assets([asset_2]) + .attachment(NoteAttachment::new_array( + NoteAttachmentScheme::new(5), + [42, 43, 44, 45, 46u32].map(Felt::from).to_vec(), + )?) + .build()?; + + let tx_context = TransactionContextBuilder::new(account) + .extend_input_notes(vec![input_note_1.clone(), input_note_2.clone()]) + .extend_expected_output_notes(vec![ + RawOutputNote::Full(output_note_1.clone()), + RawOutputNote::Full(output_note_2.clone()), + ]) + .build()?; // compute expected output notes commitment - let expected_output_notes_commitment = OutputNotes::new(vec![ - OutputNote::Full(output_note_1.clone()), - OutputNote::Full(output_note_2.clone()), + let expected_output_notes_commitment = RawOutputNotes::new(vec![ + RawOutputNote::Full(output_note_1.clone()), + RawOutputNote::Full(output_note_2.clone()), ])? .commitment(); @@ -298,7 +269,8 @@ async fn test_get_output_notes_commitment() -> anyhow::Result<()> { exec.output_note::create # => [note_idx] - push.{asset_1} + push.{ASSET_1_VALUE} + push.{ASSET_1_KEY} exec.output_note::add_asset # => [] @@ -309,7 +281,9 @@ async fn test_get_output_notes_commitment() -> anyhow::Result<()> { exec.output_note::create # => [note_idx] - dup push.{asset_2} + dup + push.{ASSET_2_VALUE} + push.{ASSET_2_KEY} exec.output_note::add_asset # => [note_idx] @@ -332,14 +306,12 @@ async fn test_get_output_notes_commitment() -> anyhow::Result<()> { PUBLIC_NOTE = NoteType::Public as u8, recipient_1 = output_note_1.recipient().digest(), tag_1 = output_note_1.metadata().tag(), - asset_1 = Word::from( - **output_note_1.assets().iter().take(1).collect::>().first().unwrap() - ), + ASSET_1_KEY = asset_1.to_key_word(), + ASSET_1_VALUE = asset_1.to_value_word(), recipient_2 = output_note_2.recipient().digest(), tag_2 = output_note_2.metadata().tag(), - asset_2 = Word::from( - **output_note_2.assets().iter().take(1).collect::>().first().unwrap() - ), + ASSET_2_KEY = asset_2.to_key_word(), + ASSET_2_VALUE = asset_2.to_value_word(), ATTACHMENT2 = output_note_2.metadata().to_attachment_word(), attachment_scheme2 = output_note_2.metadata().attachment().attachment_scheme().as_u32(), ); @@ -347,8 +319,8 @@ async fn test_get_output_notes_commitment() -> anyhow::Result<()> { let exec_output = &tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_kernel_mem_word(NUM_OUTPUT_NOTES_PTR), - Word::from([2u32, 0, 0, 0]), + exec_output.get_kernel_mem_element(NUM_OUTPUT_NOTES_PTR), + Felt::from(2u32), "The test creates two notes", ); assert_eq!( @@ -378,7 +350,7 @@ async fn test_get_output_notes_commitment() -> anyhow::Result<()> { "Validate the output note 2 attachment", ); - assert_eq!(exec_output.get_stack_word_be(0), expected_output_notes_commitment); + assert_eq!(exec_output.get_stack_word(0), expected_output_notes_commitment); Ok(()) } @@ -389,7 +361,7 @@ async fn test_create_note_and_add_asset() -> anyhow::Result<()> { let faucet_id = AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET)?; let recipient = Word::from([0, 1, 2, 3u32]); let tag = NoteTag::with_account_target(faucet_id); - let asset = Word::from(FungibleAsset::new(faucet_id, 10)?); + let asset = FungibleAsset::new(faucet_id, 10)?; let code = format!( " @@ -411,8 +383,9 @@ async fn test_create_note_and_add_asset() -> anyhow::Result<()> { dup assertz.err=\"index of the created note should be zero\" # => [note_idx] - push.{asset} - # => [ASSET, note_idx] + push.{ASSET_VALUE} + push.{ASSET_KEY} + # => [ASSET_KEY, ASSET_VALUE, note_idx] call.output_note::add_asset # => [] @@ -424,15 +397,21 @@ async fn test_create_note_and_add_asset() -> anyhow::Result<()> { recipient = recipient, PUBLIC_NOTE = NoteType::Public as u8, tag = tag, - asset = asset, + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; assert_eq!( exec_output.get_kernel_mem_word(OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET), - asset, - "asset must be stored at the correct memory location", + asset.to_key_word(), + "asset key must be stored at the correct memory location", + ); + assert_eq!( + exec_output.get_kernel_mem_word(OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET + 4), + asset.to_value_word(), + "asset value must be stored at the correct memory location", ); Ok(()) @@ -448,13 +427,12 @@ async fn test_create_note_and_add_multiple_assets() -> anyhow::Result<()> { let recipient = Word::from([0, 1, 2, 3u32]); let tag = NoteTag::with_account_target(faucet_2); - let asset = Word::from(FungibleAsset::new(faucet, 10)?); - let asset_2 = Word::from(FungibleAsset::new(faucet_2, 20)?); - let asset_3 = Word::from(FungibleAsset::new(faucet_2, 30)?); - let asset_2_and_3 = Word::from(FungibleAsset::new(faucet_2, 50)?); + let asset = FungibleAsset::new(faucet, 10)?; + let asset_2 = FungibleAsset::new(faucet_2, 20)?; + let asset_3 = FungibleAsset::new(faucet_2, 30)?; + let asset_2_plus_3 = FungibleAsset::new(faucet_2, 50)?; let non_fungible_asset = NonFungibleAsset::mock(&NON_FUNGIBLE_ASSET_DATA_2); - let non_fungible_asset_encoded = Word::from(non_fungible_asset); let code = format!( " @@ -474,20 +452,27 @@ async fn test_create_note_and_add_multiple_assets() -> anyhow::Result<()> { dup assertz.err=\"index of the created note should be zero\" # => [note_idx] - dup push.{asset} - call.output_note::add_asset + dup + push.{ASSET_VALUE} + push.{ASSET_KEY} + exec.output_note::add_asset # => [note_idx] - dup push.{asset_2} - call.output_note::add_asset + dup + push.{ASSET2_VALUE} + push.{ASSET2_KEY} + exec.output_note::add_asset # => [note_idx] - dup push.{asset_3} - call.output_note::add_asset + dup + push.{ASSET3_VALUE} + push.{ASSET3_KEY} + exec.output_note::add_asset # => [note_idx] - push.{nft} - call.output_note::add_asset + push.{ASSET4_VALUE} + push.{ASSET4_KEY} + exec.output_note::add_asset # => [] # truncate the stack @@ -497,30 +482,73 @@ async fn test_create_note_and_add_multiple_assets() -> anyhow::Result<()> { recipient = recipient, PUBLIC_NOTE = NoteType::Public as u8, tag = tag, - asset = asset, - asset_2 = asset_2, - asset_3 = asset_3, - nft = non_fungible_asset_encoded, + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), + ASSET2_KEY = asset_2.to_key_word(), + ASSET2_VALUE = asset_2.to_value_word(), + ASSET3_KEY = asset_3.to_key_word(), + ASSET3_VALUE = asset_3.to_value_word(), + ASSET4_KEY = non_fungible_asset.to_key_word(), + ASSET4_VALUE = non_fungible_asset.to_value_word(), ); let exec_output = &tx_context.execute_code(&code).await?; + assert_eq!( + exec_output + .get_kernel_mem_element(OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_NUM_ASSETS_OFFSET) + .as_canonical_u64(), + 3, + "unexpected number of assets in output note", + ); + assert_eq!( exec_output.get_kernel_mem_word(OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET), - asset, - "asset must be stored at the correct memory location", + asset.to_key_word(), + "asset key must be stored at the correct memory location", + ); + assert_eq!( + exec_output.get_kernel_mem_word( + OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET + ASSET_VALUE_OFFSET + ), + asset.to_value_word(), + "asset value must be stored at the correct memory location", ); assert_eq!( - exec_output.get_kernel_mem_word(OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET + 4), - asset_2_and_3, - "asset_2 and asset_3 must be stored at the same correct memory location", + exec_output.get_kernel_mem_word( + OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET + ASSET_SIZE + ), + asset_2_plus_3.to_key_word(), + "asset key must be stored at the correct memory location", + ); + assert_eq!( + exec_output.get_kernel_mem_word( + OUTPUT_NOTE_SECTION_OFFSET + + OUTPUT_NOTE_ASSETS_OFFSET + + ASSET_SIZE + + ASSET_VALUE_OFFSET + ), + asset_2_plus_3.to_value_word(), + "asset value must be stored at the correct memory location", ); assert_eq!( - exec_output.get_kernel_mem_word(OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET + 8), - non_fungible_asset_encoded, - "non_fungible_asset must be stored at the correct memory location", + exec_output.get_kernel_mem_word( + OUTPUT_NOTE_SECTION_OFFSET + OUTPUT_NOTE_ASSETS_OFFSET + ASSET_SIZE * 2 + ), + non_fungible_asset.to_key_word(), + "asset key must be stored at the correct memory location", + ); + assert_eq!( + exec_output.get_kernel_mem_word( + OUTPUT_NOTE_SECTION_OFFSET + + OUTPUT_NOTE_ASSETS_OFFSET + + ASSET_SIZE * 2 + + ASSET_VALUE_OFFSET + ), + non_fungible_asset.to_value_word(), + "asset value must be stored at the correct memory location", ); Ok(()) @@ -533,7 +561,6 @@ async fn test_create_note_and_add_same_nft_twice() -> anyhow::Result<()> { let recipient = Word::from([0, 1, 2, 3u32]); let tag = NoteTag::new(999 << 16 | 777); let non_fungible_asset = NonFungibleAsset::mock(&[1, 2, 3]); - let encoded = Word::from(non_fungible_asset); let code = format!( " @@ -550,13 +577,16 @@ async fn test_create_note_and_add_same_nft_twice() -> anyhow::Result<()> { exec.output_note::create # => [note_idx] - dup push.{nft} - # => [NFT, note_idx, note_idx] + dup + push.{ASSET_VALUE} + push.{ASSET_KEY} + # => [ASSET_KEY, ASSET_VALUE, note_idx, note_idx] exec.output_note::add_asset # => [note_idx] - push.{nft} + push.{ASSET_VALUE} + push.{ASSET_KEY} exec.output_note::add_asset # => [] end @@ -564,7 +594,8 @@ async fn test_create_note_and_add_same_nft_twice() -> anyhow::Result<()> { recipient = recipient, PUBLIC_NOTE = NoteType::Public as u8, tag = tag, - nft = encoded, + ASSET_KEY = non_fungible_asset.to_key_word(), + ASSET_VALUE = non_fungible_asset.to_value_word(), ); let exec_output = tx_context.execute_code(&code).await; @@ -616,10 +647,10 @@ async fn test_build_recipient_hash() -> anyhow::Result<()> { let output_serial_no = Word::from([0, 1, 2, 3u32]); let tag = NoteTag::new(42 << 16 | 42); let single_input = 2; - let inputs = NoteInputs::new(vec![Felt::new(single_input)]).unwrap(); - let input_commitment = inputs.commitment(); + let storage = NoteStorage::new(vec![Felt::new(single_input)]).unwrap(); + let storage_commitment = storage.commitment(); - let recipient = NoteRecipient::new(output_serial_no, input_note_1.script().clone(), inputs); + let recipient = NoteRecipient::new(output_serial_no, input_note_1.script().clone(), storage); let code = format!( " use $kernel::prologue @@ -630,13 +661,13 @@ async fn test_build_recipient_hash() -> anyhow::Result<()> { begin exec.prologue::prepare_transaction - # input - push.{input_commitment} + # storage + push.{storage_commitment} # SCRIPT_ROOT push.{script_root} # SERIAL_NUM push.{output_serial_no} - # => [SERIAL_NUM, SCRIPT_ROOT, INPUT_COMMITMENT] + # => [SERIAL_NUM, SCRIPT_ROOT, STORAGE_COMMITMENT] exec.note::build_recipient_hash # => [RECIPIENT, pad(12)] @@ -661,8 +692,8 @@ async fn test_build_recipient_hash() -> anyhow::Result<()> { let exec_output = &tx_context.execute_code(&code).await?; assert_eq!( - exec_output.get_kernel_mem_word(NUM_OUTPUT_NOTES_PTR), - Word::from([1, 0, 0, 0u32]), + exec_output.get_kernel_mem_element(NUM_OUTPUT_NOTES_PTR), + Felt::from(1u32), "number of output notes must increment by 1", ); @@ -706,12 +737,16 @@ async fn test_get_asset_info() -> anyhow::Result<()> { .expect("asset is invalid"), ); - let account = builder - .add_existing_wallet_with_assets(Auth::BasicAuth, [fungible_asset_0, fungible_asset_1])?; + let account = builder.add_existing_wallet_with_assets( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + [fungible_asset_0, fungible_asset_1], + )?; let mock_chain = builder.build()?; - let output_note_0 = create_p2id_note( + let output_note_0 = P2idNote::create( account.id(), ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into()?, vec![fungible_asset_0], @@ -720,7 +755,7 @@ async fn test_get_asset_info() -> anyhow::Result<()> { &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), )?; - let output_note_1 = create_p2id_note( + let output_note_1 = P2idNote::create( account.id(), ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into()?, vec![fungible_asset_0, fungible_asset_1], @@ -743,9 +778,10 @@ async fn test_get_asset_info() -> anyhow::Result<()> { # => [note_idx] # move the asset 0 to the note - push.{asset_0} + dup + push.{ASSET_0_VALUE} + push.{ASSET_0_KEY} call.::miden::standards::wallets::basic::move_asset_to_note - dropw # => [note_idx] # get the assets hash and assets number of the note having only asset_0 @@ -772,9 +808,10 @@ async fn test_get_asset_info() -> anyhow::Result<()> { # => [note_idx] # add asset_1 to the note - push.{asset_1} + dup + push.{ASSET_1_VALUE} + push.{ASSET_1_KEY} call.::miden::standards::wallets::basic::move_asset_to_note - dropw # => [note_idx] # get the assets hash and assets number of the note having asset_0 and asset_1 @@ -799,12 +836,14 @@ async fn test_get_asset_info() -> anyhow::Result<()> { RECIPIENT = output_note_1.recipient().digest(), note_type = NoteType::Public as u8, tag = output_note_1.metadata().tag(), - asset_0 = Word::from(fungible_asset_0), + ASSET_0_VALUE = fungible_asset_0.to_value_word(), + ASSET_0_KEY = fungible_asset_0.to_key_word(), // first data request COMPUTED_ASSETS_COMMITMENT_0 = output_note_0.assets().commitment(), assets_number_0 = output_note_0.assets().num_assets(), // second data request - asset_1 = Word::from(fungible_asset_1), + ASSET_1_VALUE = fungible_asset_1.to_value_word(), + ASSET_1_KEY = fungible_asset_1.to_key_word(), COMPUTED_ASSETS_COMMITMENT_1 = output_note_1.assets().commitment(), assets_number_1 = output_note_1.assets().num_assets(), ); @@ -813,7 +852,7 @@ async fn test_get_asset_info() -> anyhow::Result<()> { let tx_context = mock_chain .build_tx_context(account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note_1)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note_1)]) .tx_script(tx_script) .build()?; @@ -828,12 +867,16 @@ async fn test_get_asset_info() -> anyhow::Result<()> { async fn test_get_recipient_and_metadata() -> anyhow::Result<()> { let mut builder = MockChain::builder(); - let account = - builder.add_existing_wallet_with_assets(Auth::BasicAuth, [FungibleAsset::mock(2000)])?; + let account = builder.add_existing_wallet_with_assets( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + [FungibleAsset::mock(2000)], + )?; let mock_chain = builder.build()?; - let output_note = create_p2id_note( + let output_note = P2idNote::create( account.id(), ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into()?, vec![FungibleAsset::mock(5)], @@ -889,7 +932,7 @@ async fn test_get_recipient_and_metadata() -> anyhow::Result<()> { let tx_context = mock_chain .build_tx_context(account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) .tx_script(tx_script) .build()?; @@ -917,13 +960,13 @@ async fn test_get_assets() -> anyhow::Result<()> { push.{note_idx} push.{dest_ptr} # => [dest_ptr, note_index] - # write the assets to the memory + # write the assets to memory exec.output_note::get_assets # => [num_assets, dest_ptr, note_index] # assert the number of note assets push.{assets_number} - assert_eq.err="note {note_index} has incorrect assets number" + assert_eq.err="expected note {note_index} to have {assets_number} assets" # => [dest_ptr, note_index] "#, note_idx = note_index, @@ -936,19 +979,31 @@ async fn test_get_assets() -> anyhow::Result<()> { check_assets_code.push_str(&format!( r#" # load the asset stored in memory - padw dup.4 mem_loadw_be - # => [STORED_ASSET, dest_ptr, note_index] + padw dup.4 mem_loadw_le + # => [STORED_ASSET_KEY, dest_ptr, note_index] + + # assert the asset key matches + push.{NOTE_ASSET_KEY} + assert_eqw.err="expected asset key at asset index {asset_index} of the note\ + {note_index} to be {NOTE_ASSET_KEY}" + # => [dest_ptr, note_index] + + # load the asset stored in memory + padw dup.4 add.{ASSET_VALUE_OFFSET} mem_loadw_le + # => [STORED_ASSET_VALUE, dest_ptr, note_index] - # assert the asset - push.{NOTE_ASSET} - assert_eqw.err="asset {asset_index} of the note {note_index} is incorrect" + # assert the asset value matches + push.{NOTE_ASSET_VALUE} + assert_eqw.err="expected asset value at asset index {asset_index} of the note\ + {note_index} to be {NOTE_ASSET_VALUE}" # => [dest_ptr, note_index] # move the pointer - add.4 - # => [dest_ptr+4, note_index] + add.{ASSET_SIZE} + # => [dest_ptr+ASSET_SIZE, note_index] "#, - NOTE_ASSET = Word::from(*asset), + NOTE_ASSET_KEY = asset.to_key_word(), + NOTE_ASSET_VALUE = asset.to_value_word(), asset_index = asset_index, note_index = note_index, )); @@ -982,9 +1037,9 @@ async fn test_get_assets() -> anyhow::Result<()> { create_note_0 = create_output_note(&p2id_note_0_assets), check_note_0 = check_assets_code(0, 0, &p2id_note_0_assets), create_note_1 = create_output_note(&p2id_note_1_asset), - check_note_1 = check_assets_code(1, 4, &p2id_note_1_asset), + check_note_1 = check_assets_code(1, 8, &p2id_note_1_asset), create_note_2 = create_output_note(&p2id_note_2_assets), - check_note_2 = check_assets_code(2, 8, &p2id_note_2_assets), + check_note_2 = check_assets_code(2, 16, &p2id_note_2_assets), ); let tx_script = CodeBuilder::default().compile_tx_script(tx_script_src)?; @@ -992,9 +1047,9 @@ async fn test_get_assets() -> anyhow::Result<()> { let tx_context = mock_chain .build_tx_context(account.id(), &[], &[])? .extend_expected_output_notes(vec![ - OutputNote::Full(p2id_note_0_assets), - OutputNote::Full(p2id_note_1_asset), - OutputNote::Full(p2id_note_2_assets), + RawOutputNote::Full(p2id_note_0_assets), + RawOutputNote::Full(p2id_note_1_asset), + RawOutputNote::Full(p2id_note_2_assets), ]) .tx_script(tx_script) .build()?; @@ -1010,7 +1065,7 @@ async fn test_set_none_attachment() -> anyhow::Result<()> { let rng = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])); let attachment = NoteAttachment::default(); let output_note = - OutputNote::Full(NoteBuilder::new(account.id(), rng).attachment(attachment).build()?); + RawOutputNote::Full(NoteBuilder::new(account.id(), rng).attachment(attachment).build()?); let tx_script = format!( " @@ -1066,7 +1121,7 @@ async fn test_set_word_attachment() -> anyhow::Result<()> { let attachment = NoteAttachment::new_word(NoteAttachmentScheme::new(u32::MAX), Word::from([3, 4, 5, 6u32])); let output_note = - OutputNote::Full(NoteBuilder::new(account.id(), rng).attachment(attachment).build()?); + RawOutputNote::Full(NoteBuilder::new(account.id(), rng).attachment(attachment).build()?); let tx_script = format!( " @@ -1120,7 +1175,7 @@ async fn test_set_array_attachment() -> anyhow::Result<()> { let elements = [3, 4, 5, 6, 7, 8, 9u32].map(Felt::from).to_vec(); let attachment = NoteAttachment::new_array(NoteAttachmentScheme::new(42), elements.clone())?; let output_note = - OutputNote::Full(NoteBuilder::new(account.id(), rng).attachment(attachment).build()?); + RawOutputNote::Full(NoteBuilder::new(account.id(), rng).attachment(attachment).build()?); let tx_script = format!( " @@ -1191,7 +1246,7 @@ async fn test_set_network_target_account_attachment() -> anyhow::Result<()> { let actual_note = tx.output_notes().get_note(0); assert_eq!(actual_note.header(), output_note.header()); - assert_eq!(actual_note.assets().unwrap(), output_note.assets()); + assert_eq!(actual_note.assets(), output_note.assets()); // Make sure we can deserialize the attachment back into its original type. let actual_attachment = NetworkAccountTarget::try_from(actual_note.metadata().attachment())?; @@ -1200,6 +1255,75 @@ async fn test_set_network_target_account_attachment() -> anyhow::Result<()> { Ok(()) } +#[tokio::test] +async fn test_network_note() -> anyhow::Result<()> { + let sender = Account::mock(ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, Auth::IncrNonce); + let mut rng = RpoRandomCoin::new(Word::from([9, 8, 7, 6u32])); + + // --- Valid network note --- + let target_id = AccountId::try_from(ACCOUNT_ID_NETWORK_NON_FUNGIBLE_FAUCET)?; + let attachment = NetworkAccountTarget::new(target_id, NoteExecutionHint::Always)?; + + let note = NoteBuilder::new(sender.id(), &mut rng) + .note_type(NoteType::Public) + .attachment(attachment) + .build()?; + + // is_network_note() returns true for a note with a valid NetworkAccountTarget attachment. + assert!(note.is_network_note()); + + // into_account_target_network_note() succeeds and accessors return correct values. + let expected_note_type = note.metadata().note_type(); + let network_note = note.into_account_target_network_note()?; + assert_eq!(network_note.target_account_id(), target_id); + assert_eq!(network_note.execution_hint(), NoteExecutionHint::Always); + assert_eq!(network_note.note_type(), expected_note_type); + + // TryFrom succeeds for a valid network note. + let valid_note = NoteBuilder::new(sender.id(), &mut rng) + .note_type(NoteType::Public) + .attachment(attachment) + .build()?; + let try_from_note = AccountTargetNetworkNote::try_from(valid_note)?; + assert_eq!(try_from_note.target_account_id(), target_id); + + // --- Invalid: note with default (empty) attachment --- + let non_network_note = + NoteBuilder::new(sender.id(), &mut rng).note_type(NoteType::Public).build()?; + + // is_network_note() returns false for a note without a NetworkAccountTarget attachment. + assert!(!non_network_note.is_network_note()); + + // AccountTargetNetworkNote::new() fails for an invalid attachment. + assert!(AccountTargetNetworkNote::new(non_network_note.clone()).is_err()); + + // into_account_target_network_note() fails for a non-network note. + assert!(non_network_note.clone().into_account_target_network_note().is_err()); + + // TryFrom fails for a non-network note. + assert!(AccountTargetNetworkNote::try_from(non_network_note).is_err()); + + // --- Invalid: private note with valid NetworkAccountTarget attachment --- + let private_network_note = NoteBuilder::new(sender.id(), &mut rng) + .note_type(NoteType::Private) + .attachment(attachment) + .build()?; + + // is_network_note() returns false for a private note even with a valid attachment. + assert!(!private_network_note.is_network_note()); + + // AccountTargetNetworkNote::new() fails for a private note. + assert!(AccountTargetNetworkNote::new(private_network_note.clone()).is_err()); + + // into_account_target_network_note() fails for a private note. + assert!(private_network_note.clone().into_account_target_network_note().is_err()); + + // TryFrom fails for a private note. + assert!(AccountTargetNetworkNote::try_from(private_network_note).is_err()); + + Ok(()) +} + // HELPER FUNCTIONS // ================================================================================================ @@ -1225,12 +1349,15 @@ fn create_output_note(note: &Note) -> String { create_note_code.push_str(&format!( " # move the asset to the note - push.{asset} + dup + push.{ASSET_VALUE} + push.{ASSET_KEY} + # => [ASSET_KEY, ASSET_VALUE, note_idx, note_idx] call.::miden::standards::wallets::basic::move_asset_to_note - dropw # => [note_idx] ", - asset = Word::from(*asset) + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word() )); } diff --git a/crates/miden-testing/src/kernel_tests/tx/test_prologue.rs b/crates/miden-testing/src/kernel_tests/tx/test_prologue.rs index 07ad25bf3d..3cb661b3a2 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_prologue.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_prologue.rs @@ -2,39 +2,37 @@ use alloc::collections::BTreeMap; use alloc::vec::Vec; use anyhow::Context; -use miden_processor::fast::ExecutionOutput; -use miden_processor::{AdviceInputs, Word}; +use miden_processor::advice::AdviceInputs; +use miden_processor::{ExecutionOutput, Word}; use miden_protocol::account::{ Account, AccountBuilder, - AccountId, - AccountIdVersion, + AccountHeader, AccountProcedureRoot, - AccountStorage, AccountStorageMode, AccountType, - StorageMap, StorageSlot, StorageSlotName, }; use miden_protocol::asset::{FungibleAsset, NonFungibleAsset}; -use miden_protocol::errors::tx_kernel::{ - ERR_ACCOUNT_SEED_AND_COMMITMENT_DIGEST_MISMATCH, - ERR_PROLOGUE_NEW_FUNGIBLE_FAUCET_RESERVED_SLOT_MUST_BE_EMPTY, - ERR_PROLOGUE_NEW_NON_FUNGIBLE_FAUCET_RESERVED_SLOT_MUST_BE_VALID_EMPTY_SMT, -}; +use miden_protocol::block::account_tree::AccountIdKey; +use miden_protocol::errors::tx_kernel::ERR_ACCOUNT_SEED_AND_COMMITMENT_DIGEST_MISMATCH; +use miden_protocol::note::NoteId; use miden_protocol::testing::account_id::{ ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, ACCOUNT_ID_SENDER, }; -use miden_protocol::testing::noop_auth_component::NoopAuthComponent; use miden_protocol::transaction::memory::{ ACCT_DB_ROOT_PTR, + ASSET_SIZE, + ASSET_VALUE_OFFSET, BLOCK_COMMITMENT_PTR, BLOCK_METADATA_PTR, BLOCK_NUMBER_IDX, CHAIN_COMMITMENT_PTR, FEE_PARAMETERS_PTR, + GLOBAL_ACCOUNT_ID_PREFIX_PTR, + GLOBAL_ACCOUNT_ID_SUFFIX_PTR, INIT_ACCT_COMMITMENT_PTR, INIT_NATIVE_ACCT_STORAGE_COMMITMENT_PTR, INIT_NATIVE_ACCT_VAULT_ROOT_PTR, @@ -44,7 +42,6 @@ use miden_protocol::transaction::memory::{ INPUT_NOTE_ASSETS_OFFSET, INPUT_NOTE_ATTACHMENT_OFFSET, INPUT_NOTE_ID_OFFSET, - INPUT_NOTE_INPUTS_COMMITMENT_OFFSET, INPUT_NOTE_METADATA_HEADER_OFFSET, INPUT_NOTE_NULLIFIER_SECTION_PTR, INPUT_NOTE_NUM_ASSETS_OFFSET, @@ -52,11 +49,11 @@ use miden_protocol::transaction::memory::{ INPUT_NOTE_SCRIPT_ROOT_OFFSET, INPUT_NOTE_SECTION_PTR, INPUT_NOTE_SERIAL_NUM_OFFSET, + INPUT_NOTE_STORAGE_COMMITMENT_OFFSET, INPUT_NOTES_COMMITMENT_PTR, KERNEL_PROCEDURES_PTR, NATIVE_ACCT_CODE_COMMITMENT_PTR, NATIVE_ACCT_ID_AND_NONCE_PTR, - NATIVE_ACCT_ID_PTR, NATIVE_ACCT_PROCEDURES_SECTION_PTR, NATIVE_ACCT_STORAGE_COMMITMENT_PTR, NATIVE_ACCT_STORAGE_SLOTS_SECTION_PTR, @@ -80,7 +77,7 @@ use miden_protocol::transaction::memory::{ VERIFICATION_BASE_FEE_IDX, }; use miden_protocol::transaction::{ExecutedTransaction, TransactionArgs, TransactionKernel}; -use miden_protocol::{EMPTY_WORD, ONE, WORD_SIZE}; +use miden_protocol::{EMPTY_WORD, WORD_SIZE}; use miden_standards::account::wallets::BasicWallet; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::account_component::MockAccountComponent; @@ -98,7 +95,6 @@ use crate::{ TransactionContext, TransactionContextBuilder, assert_execution_error, - assert_transaction_executor_error, }; #[tokio::test] @@ -112,7 +108,7 @@ async fn test_transaction_prologue() -> anyhow::Result<()> { ); let input_note_2 = create_public_p2any_note( ACCOUNT_ID_SENDER.try_into().unwrap(), - [FungibleAsset::mock(100)], + [FungibleAsset::mock(100), NonFungibleAsset::mock(&[1, 2, 3])], ); let input_note_3 = create_public_p2any_note( ACCOUNT_ID_SENDER.try_into().unwrap(), @@ -139,16 +135,15 @@ async fn test_transaction_prologue() -> anyhow::Result<()> { let tx_script = CodeBuilder::default().compile_tx_script(mock_tx_script_code).unwrap(); - let note_args = [Word::from([91u32; 4]), Word::from([92u32; 4])]; - + // Input note 2 does not have any note args. let note_args_map = BTreeMap::from([ - (tx_context.input_notes().get_note(0).note().id(), note_args[0]), - (tx_context.input_notes().get_note(1).note().id(), note_args[1]), + (tx_context.input_notes().get_note(0).note().id(), Word::from([91u32; 4])), + (tx_context.input_notes().get_note(1).note().id(), Word::from([92u32; 4])), ]); let tx_args = TransactionArgs::new(tx_context.tx_args().advice_inputs().clone().map) .with_tx_script(tx_script) - .with_note_args(note_args_map); + .with_note_args(note_args_map.clone()); tx_context.set_tx_args(tx_args); let exec_output = &tx_context.execute_code(code).await?; @@ -158,7 +153,7 @@ async fn test_transaction_prologue() -> anyhow::Result<()> { partial_blockchain_memory_assertions(exec_output, &tx_context); kernel_data_memory_assertions(exec_output); account_data_memory_assertions(exec_output, &tx_context); - input_notes_memory_assertions(exec_output, &tx_context, ¬e_args); + input_notes_memory_assertions(exec_output, &tx_context, ¬e_args_map); Ok(()) } @@ -171,19 +166,19 @@ fn global_input_memory_assertions(exec_output: &ExecutionOutput, inputs: &Transa ); assert_eq!( - exec_output.get_kernel_mem_word(NATIVE_ACCT_ID_PTR)[0], + exec_output.get_kernel_mem_element(GLOBAL_ACCOUNT_ID_SUFFIX_PTR), inputs.account().id().suffix(), - "The account ID prefix should be stored at the ACCT_ID_PTR[0]" + "The account ID prefix should be stored at the GLOBAL_ACCOUNT_ID_SUFFIX_PTR" ); assert_eq!( - exec_output.get_kernel_mem_word(NATIVE_ACCT_ID_PTR)[1], + exec_output.get_kernel_mem_element(GLOBAL_ACCOUNT_ID_PREFIX_PTR), inputs.account().id().prefix().as_felt(), - "The account ID suffix should be stored at the ACCT_ID_PTR[1]" + "The account ID suffix should be stored at the GLOBAL_ACCOUNT_ID_PREFIX_PTR" ); assert_eq!( exec_output.get_kernel_mem_word(INIT_ACCT_COMMITMENT_PTR), - inputs.account().commitment(), + inputs.account().to_commitment(), "The account commitment should be stored at the INIT_ACCT_COMMITMENT_PTR" ); @@ -269,19 +264,19 @@ fn block_data_memory_assertions(exec_output: &ExecutionOutput, inputs: &Transact assert_eq!( exec_output.get_kernel_mem_word(BLOCK_METADATA_PTR)[BLOCK_NUMBER_IDX], - inputs.tx_inputs().block_header().block_num().into(), + Felt::from(inputs.tx_inputs().block_header().block_num()), "The block number should be stored at BLOCK_METADATA_PTR[BLOCK_NUMBER_IDX]" ); assert_eq!( exec_output.get_kernel_mem_word(BLOCK_METADATA_PTR)[PROTOCOL_VERSION_IDX], - inputs.tx_inputs().block_header().version().into(), + Felt::from(inputs.tx_inputs().block_header().version()), "The protocol version should be stored at BLOCK_METADATA_PTR[PROTOCOL_VERSION_IDX]" ); assert_eq!( exec_output.get_kernel_mem_word(BLOCK_METADATA_PTR)[TIMESTAMP_IDX], - inputs.tx_inputs().block_header().timestamp().into(), + Felt::from(inputs.tx_inputs().block_header().timestamp()), "The timestamp should be stored at BLOCK_METADATA_PTR[TIMESTAMP_IDX]" ); @@ -305,12 +300,7 @@ fn block_data_memory_assertions(exec_output: &ExecutionOutput, inputs: &Transact assert_eq!( exec_output.get_kernel_mem_word(FEE_PARAMETERS_PTR)[VERIFICATION_BASE_FEE_IDX], - inputs - .tx_inputs() - .block_header() - .fee_parameters() - .verification_base_fee() - .into(), + Felt::from(inputs.tx_inputs().block_header().fee_parameters().verification_base_fee()), "The verification base fee should be stored at FEE_PARAMETERS_PTR[VERIFICATION_BASE_FEE_IDX]" ); @@ -353,7 +343,7 @@ fn kernel_data_memory_assertions(exec_output: &ExecutionOutput) { // check that the number of kernel procedures stored in the memory is equal to the number of // procedures in the `TransactionKernel::PROCEDURES` array assert_eq!( - exec_output.get_kernel_mem_word(NUM_KERNEL_PROCEDURES_PTR)[0].as_int(), + exec_output.get_kernel_mem_word(NUM_KERNEL_PROCEDURES_PTR)[0].as_canonical_u64(), TransactionKernel::PROCEDURES.len() as u64, "Number of the kernel procedures should be stored at the NUM_KERNEL_PROCEDURES_PTR" ); @@ -370,15 +360,11 @@ fn kernel_data_memory_assertions(exec_output: &ExecutionOutput) { } fn account_data_memory_assertions(exec_output: &ExecutionOutput, inputs: &TransactionContext) { + let header = AccountHeader::from(inputs.account()); assert_eq!( - exec_output.get_kernel_mem_word(NATIVE_ACCT_ID_AND_NONCE_PTR), - Word::new([ - inputs.account().id().suffix(), - inputs.account().id().prefix().as_felt(), - ZERO, - inputs.account().nonce() - ]), - "The account ID should be stored at NATIVE_ACCT_ID_AND_NONCE_PTR[0]" + exec_output.get_kernel_mem_word(NATIVE_ACCT_ID_AND_NONCE_PTR).as_elements(), + &header.to_elements()[0..4], + "The account ID and nonce should be stored at NATIVE_ACCT_ID_AND_NONCE_PTR" ); assert_eq!( @@ -446,7 +432,7 @@ fn account_data_memory_assertions(exec_output: &ExecutionOutput, inputs: &Transa fn input_notes_memory_assertions( exec_output: &ExecutionOutput, inputs: &TransactionContext, - note_args: &[Word], + note_args: &BTreeMap, ) { assert_eq!( exec_output.get_kernel_mem_word(INPUT_NOTE_SECTION_PTR), @@ -484,9 +470,9 @@ fn input_notes_memory_assertions( ); assert_eq!( - exec_output.get_note_mem_word(note_idx, INPUT_NOTE_INPUTS_COMMITMENT_OFFSET), - note.inputs().commitment(), - "note input commitment should be stored at the correct offset" + exec_output.get_note_mem_word(note_idx, INPUT_NOTE_STORAGE_COMMITMENT_OFFSET), + note.storage().commitment(), + "note storage commitment should be stored at the correct offset" ); assert_eq!( @@ -515,7 +501,7 @@ fn input_notes_memory_assertions( assert_eq!( exec_output.get_note_mem_word(note_idx, INPUT_NOTE_ARGS_OFFSET), - note_args[note_idx as usize], + note_args.get(&input_note.id()).copied().unwrap_or_default(), "note args should be stored at the correct offset" ); @@ -526,14 +512,22 @@ fn input_notes_memory_assertions( ); for (asset, asset_idx) in note.assets().iter().cloned().zip(0_u32..) { - let word: Word = asset.into(); + let asset_key = asset.to_key_word(); + let asset_value = asset.to_value_word(); + + let asset_key_addr = INPUT_NOTE_ASSETS_OFFSET + asset_idx * ASSET_SIZE; + let asset_value_addr = asset_key_addr + ASSET_VALUE_OFFSET; + assert_eq!( - exec_output.get_note_mem_word( - note_idx, - INPUT_NOTE_ASSETS_OFFSET + asset_idx * WORD_SIZE as u32 - ), - word, - "assets should be stored at (INPUT_NOTES_DATA_OFFSET + note_index * 2048 + 32 + asset_idx * 4)" + exec_output.get_note_mem_word(note_idx, asset_key_addr), + asset_key, + "asset key should be stored at the correct offset" + ); + + assert_eq!( + exec_output.get_note_mem_word(note_idx, asset_value_addr), + asset_value, + "asset value should be stored at the correct offset" ); } } @@ -622,106 +616,6 @@ pub async fn create_accounts_with_all_storage_modes() -> anyhow::Result<()> { create_multiple_accounts_test(AccountStorageMode::Network).await } -/// Takes an account with a placeholder ID and returns the same account but with its ID replaced -/// with a newly generated one. -fn compute_valid_account_id(account: Account) -> Account { - let init_seed: [u8; 32] = [5; 32]; - let seed = AccountId::compute_account_seed( - init_seed, - account.account_type(), - AccountStorageMode::Public, - AccountIdVersion::Version0, - account.code().commitment(), - account.storage().to_commitment(), - ) - .unwrap(); - - let account_id = AccountId::new( - seed, - AccountIdVersion::Version0, - account.code().commitment(), - account.storage().to_commitment(), - ) - .unwrap(); - - // Overwrite old ID with generated ID. - let (_, vault, storage, code, _nonce, _seed) = account.into_parts(); - // Set nonce to zero so this is considered a new account. - Account::new(account_id, vault, storage, code, ZERO, Some(seed)).unwrap() -} - -/// Tests that creating a fungible faucet account with a non-empty initial balance in its reserved -/// slot fails. -#[tokio::test] -pub async fn create_account_fungible_faucet_invalid_initial_balance() -> anyhow::Result<()> { - let account = AccountBuilder::new([1; 32]) - .account_type(AccountType::FungibleFaucet) - .with_auth_component(NoopAuthComponent) - .with_component(MockAccountComponent::with_empty_slots()) - .build_existing() - .expect("account should be valid"); - let (id, vault, mut storage, code, _nonce, _seed) = account.into_parts(); - - // Set the initial balance to a non-zero value manually, since the builder would not allow us to - // do that. - let faucet_data_slot = Word::from([0, 0, 0, 100u32]); - storage - .set_item(AccountStorage::faucet_sysdata_slot(), faucet_data_slot) - .unwrap(); - - // The compute account ID function will set the nonce to zero so this is considered a new - // account. - let account = Account::new(id, vault, storage, code, ONE, None)?; - let account = compute_valid_account_id(account); - - let result = create_account_test(account).await; - - assert_transaction_executor_error!( - result, - ERR_PROLOGUE_NEW_FUNGIBLE_FAUCET_RESERVED_SLOT_MUST_BE_EMPTY - ); - - Ok(()) -} - -/// Tests that creating a non fungible faucet account with a non-empty storage map in its reserved -/// slot fails. -#[tokio::test] -pub async fn create_account_non_fungible_faucet_invalid_initial_reserved_slot() -> anyhow::Result<()> -{ - // Create a storage map with a mock asset to make it non-empty. - let asset = NonFungibleAsset::mock(&[1, 2, 3, 4]); - let non_fungible_storage_map = - StorageMap::with_entries([(asset.vault_key().into(), asset.into())]).unwrap(); - let storage = AccountStorage::new(vec![StorageSlot::with_map( - AccountStorage::faucet_sysdata_slot().clone(), - non_fungible_storage_map, - )]) - .unwrap(); - - let account = AccountBuilder::new([1; 32]) - .account_type(AccountType::NonFungibleFaucet) - .with_auth_component(NoopAuthComponent) - .with_component(MockAccountComponent::with_empty_slots()) - .build() - .expect("account should be valid"); - let (id, vault, _storage, code, _nonce, _seed) = account.into_parts(); - - // The compute account ID function will set the nonce to zero so this is considered a new - // account. - let account = Account::new(id, vault, storage, code, ONE, None)?; - let account = compute_valid_account_id(account); - - let result = create_account_test(account).await; - - assert_transaction_executor_error!( - result, - ERR_PROLOGUE_NEW_NON_FUNGIBLE_FAUCET_RESERVED_SLOT_MUST_BE_VALID_EMPTY_SMT - ); - - Ok(()) -} - /// Tests that supplying an invalid seed causes account creation to fail. #[tokio::test] pub async fn create_account_invalid_seed() -> anyhow::Result<()> { @@ -739,9 +633,8 @@ pub async fn create_account_invalid_seed() -> anyhow::Result<()> { .expect("failed to get transaction inputs from mock chain"); // override the seed with an invalid seed to ensure the kernel fails - let account_seed_key = [account.id().suffix(), account.id().prefix().as_felt(), ZERO, ZERO]; - let adv_inputs = - AdviceInputs::default().with_map([(Word::from(account_seed_key), vec![ZERO; WORD_SIZE])]); + let account_seed_key = AccountIdKey::from(account.id()).as_word(); + let adv_inputs = AdviceInputs::default().with_map([(account_seed_key, vec![ZERO; WORD_SIZE])]); let tx_context = TransactionContextBuilder::new(account) .tx_inputs(tx_inputs) @@ -783,7 +676,7 @@ async fn test_get_blk_version() -> anyhow::Result<()> { assert_eq!( exec_output.get_stack_element(0), - tx_context.tx_inputs().block_header().version().into() + Felt::from(tx_context.tx_inputs().block_header().version()) ); Ok(()) @@ -809,7 +702,7 @@ async fn test_get_blk_timestamp() -> anyhow::Result<()> { assert_eq!( exec_output.get_stack_element(0), - tx_context.tx_inputs().block_header().timestamp().into() + Felt::from(tx_context.tx_inputs().block_header().timestamp()) ); Ok(()) diff --git a/crates/miden-testing/src/kernel_tests/tx/test_tx.rs b/crates/miden-testing/src/kernel_tests/tx/test_tx.rs index 6ff0536b97..e5b0b2a8c0 100644 --- a/crates/miden-testing/src/kernel_tests/tx/test_tx.rs +++ b/crates/miden-testing/src/kernel_tests/tx/test_tx.rs @@ -2,7 +2,9 @@ use alloc::sync::Arc; use anyhow::Context; use assert_matches::assert_matches; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::crypto::random::RpoRandomCoin; +use miden_protocol::account::auth::AuthScheme; +use miden_protocol::account::component::AccountComponentMetadata; use miden_protocol::account::{ Account, AccountBuilder, @@ -26,9 +28,9 @@ use miden_protocol::note::{ NoteAttachmentScheme, NoteHeader, NoteId, - NoteInputs, NoteMetadata, NoteRecipient, + NoteStorage, NoteTag, NoteType, }; @@ -44,18 +46,18 @@ use miden_protocol::testing::constants::{FUNGIBLE_ASSET_AMOUNT, NON_FUNGIBLE_ASS use miden_protocol::testing::note::DEFAULT_NOTE_CODE; use miden_protocol::transaction::{ InputNotes, - OutputNote, - OutputNotes, + RawOutputNote, + RawOutputNotes, TransactionArgs, TransactionKernel, TransactionSummary, }; use miden_protocol::{Felt, Hasher, ONE, Word}; -use miden_standards::AuthScheme; +use miden_standards::AuthMethod; use miden_standards::account::interface::{AccountInterface, AccountInterfaceExt}; use miden_standards::account::wallets::BasicWallet; use miden_standards::code_builder::CodeBuilder; -use miden_standards::note::create_p2id_note; +use miden_standards::note::P2idNote; use miden_standards::testing::account_component::IncrNonceAuthComponent; use miden_standards::testing::mock_account::MockAccountExt; use miden_tx::auth::UnreachableAuth; @@ -72,8 +74,18 @@ async fn consuming_note_created_in_future_block_fails() -> anyhow::Result<()> { // Create a chain with an account let mut builder = MockChain::builder(); let asset = FungibleAsset::mock(400); - let account1 = builder.add_existing_wallet_with_assets(Auth::BasicAuth, [asset])?; - let account2 = builder.add_existing_wallet_with_assets(Auth::BasicAuth, [asset])?; + let account1 = builder.add_existing_wallet_with_assets( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + [asset], + )?; + let account2 = builder.add_existing_wallet_with_assets( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + [asset], + )?; let output_note = create_public_p2any_note(account1.id(), [asset]); let spawn_note = builder.add_spawn_note([&output_note])?; let mut mock_chain = builder.build()?; @@ -84,7 +96,7 @@ async fn consuming_note_created_in_future_block_fails() -> anyhow::Result<()> { // against reference block 1 which we'll use for the later transaction. let tx = mock_chain .build_tx_context(account1.id(), &[spawn_note.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .build()? .execute() .await?; @@ -154,19 +166,19 @@ async fn test_block_procedures() -> anyhow::Result<()> { let exec_output = &tx_context.execute_code(code).await?; assert_eq!( - exec_output.get_stack_word_be(0), + exec_output.get_stack_word(0), tx_context.tx_inputs().block_header().commitment(), "top word on the stack should be equal to the block header commitment" ); assert_eq!( - exec_output.get_stack_element(4).as_int(), + exec_output.get_stack_element(4).as_canonical_u64(), tx_context.tx_inputs().block_header().timestamp() as u64, "fifth element on the stack should be equal to the timestamp of the last block creation" ); assert_eq!( - exec_output.get_stack_element(5).as_int(), + exec_output.get_stack_element(5).as_canonical_u64(), tx_context.tx_inputs().block_header().block_num().as_u64(), "sixth element on the stack should be equal to the block number" ); @@ -219,12 +231,15 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { // In this test we create 3 notes. Note 1 is private, Note 2 is public and Note 3 is public // without assets. + let recipient_1 = Word::from([0, 1, 2, 3u32]); + // Create the expected output note for Note 2 which is public let serial_num_2 = Word::from([1, 2, 3, 4u32]); let note_script_2 = CodeBuilder::default().compile_note_script(DEFAULT_NOTE_CODE)?; - let inputs_2 = NoteInputs::new(vec![ONE])?; - let metadata_2 = - NoteMetadata::new(account_id, note_type2, tag2).with_attachment(attachment2.clone()); + let inputs_2 = NoteStorage::new(vec![ONE])?; + let metadata_2 = NoteMetadata::new(account_id, note_type2) + .with_tag(tag2) + .with_attachment(attachment2.clone()); let vault_2 = NoteAssets::new(vec![removed_asset_3, removed_asset_4])?; let recipient_2 = NoteRecipient::new(serial_num_2, note_script_2, inputs_2); let expected_output_note_2 = Note::new(vault_2, metadata_2, recipient_2); @@ -232,9 +247,10 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { // Create the expected output note for Note 3 which is public let serial_num_3 = Word::from([Felt::new(5), Felt::new(6), Felt::new(7), Felt::new(8)]); let note_script_3 = CodeBuilder::default().compile_note_script(DEFAULT_NOTE_CODE)?; - let inputs_3 = NoteInputs::new(vec![ONE, Felt::new(2)])?; - let metadata_3 = - NoteMetadata::new(account_id, note_type3, tag3).with_attachment(attachment3.clone()); + let inputs_3 = NoteStorage::new(vec![ONE, Felt::new(2)])?; + let metadata_3 = NoteMetadata::new(account_id, note_type3) + .with_tag(tag3) + .with_attachment(attachment3.clone()); let vault_3 = NoteAssets::new(vec![])?; let recipient_3 = NoteRecipient::new(serial_num_3, note_script_3, inputs_3); let expected_output_note_3 = Note::new(vault_3, metadata_3, recipient_3); @@ -243,24 +259,7 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { "\ use miden::standards::wallets::basic->wallet use miden::protocol::output_note - - #! Wrapper around move_asset_to_note for use with exec. - #! - #! Inputs: [ASSET, note_idx] - #! Outputs: [note_idx] - proc move_asset_to_note - # pad the stack before call - push.0.0.0 movdn.7 movdn.7 movdn.7 padw padw swapdw - # => [ASSET, note_idx, pad(11)] - - call.wallet::move_asset_to_note - dropw - # => [note_idx, pad(11)] - - # remove excess PADs from the stack - repeat.11 swap drop end - # => [note_idx] - end + use mock::util ## TRANSACTION SCRIPT ## ======================================================================================== @@ -268,21 +267,23 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { ## Send some assets from the account vault ## ------------------------------------------------------------------------------------ # partially deplete fungible asset balance - push.0.1.2.3 # recipient + push.{recipient_1} # recipient push.{NOTETYPE1} # note_type push.{tag1} # tag exec.output_note::create # => [note_idx = 0] - push.{REMOVED_ASSET_1} # asset_1 - # => [ASSET, note_idx] + dup + push.{REMOVED_ASSET_VALUE_1} + push.{REMOVED_ASSET_KEY_1} + # => [ASSET_KEY, ASSET_VALUE, note_idx, note_idx] - exec.move_asset_to_note + exec.util::move_asset_to_note # => [note_idx] - push.{REMOVED_ASSET_2} # asset_2 - exec.move_asset_to_note - drop + push.{REMOVED_ASSET_VALUE_2} + push.{REMOVED_ASSET_KEY_2} + exec.util::move_asset_to_note # => [] # send non-fungible asset @@ -292,12 +293,16 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { exec.output_note::create # => [note_idx = 1] - push.{REMOVED_ASSET_3} # asset_3 - exec.move_asset_to_note + dup + push.{REMOVED_ASSET_VALUE_3} + push.{REMOVED_ASSET_KEY_3} + exec.util::move_asset_to_note # => [note_idx] - push.{REMOVED_ASSET_4} # asset_4 - exec.move_asset_to_note + dup + push.{REMOVED_ASSET_VALUE_4} + push.{REMOVED_ASSET_KEY_4} + exec.util::move_asset_to_note # => [note_idx] push.{ATTACHMENT2} @@ -320,10 +325,14 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { # => [] end ", - REMOVED_ASSET_1 = Word::from(removed_asset_1), - REMOVED_ASSET_2 = Word::from(removed_asset_2), - REMOVED_ASSET_3 = Word::from(removed_asset_3), - REMOVED_ASSET_4 = Word::from(removed_asset_4), + REMOVED_ASSET_KEY_1 = removed_asset_1.to_key_word(), + REMOVED_ASSET_VALUE_1 = removed_asset_1.to_value_word(), + REMOVED_ASSET_KEY_2 = removed_asset_2.to_key_word(), + REMOVED_ASSET_VALUE_2 = removed_asset_2.to_value_word(), + REMOVED_ASSET_KEY_3 = removed_asset_3.to_key_word(), + REMOVED_ASSET_VALUE_3 = removed_asset_3.to_value_word(), + REMOVED_ASSET_KEY_4 = removed_asset_4.to_key_word(), + REMOVED_ASSET_VALUE_4 = removed_asset_4.to_value_word(), RECIPIENT2 = expected_output_note_2.recipient().digest(), RECIPIENT3 = expected_output_note_3.recipient().digest(), NOTETYPE1 = note_type1 as u8, @@ -335,7 +344,7 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { ATTACHMENT3 = attachment3.content().to_word(), ); - let tx_script = CodeBuilder::default().compile_tx_script(tx_script_src)?; + let tx_script = CodeBuilder::with_mock_libraries().compile_tx_script(tx_script_src)?; // expected delta // -------------------------------------------------------------------------------------------- @@ -349,8 +358,8 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { .tx_script(tx_script) .extend_advice_map(vec![(attachment3.content().to_word(), array.as_slice().to_vec())]) .extend_expected_output_notes(vec![ - OutputNote::Full(expected_output_note_2.clone()), - OutputNote::Full(expected_output_note_3.clone()), + RawOutputNote::Full(expected_output_note_2.clone()), + RawOutputNote::Full(expected_output_note_3.clone()), ]) .build()?; @@ -366,9 +375,8 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { // assert that the expected output note 1 is present let resulting_output_note_1 = executed_transaction.output_notes().get_note(0); - let expected_recipient_1 = Word::from([0, 1, 2, 3u32]); let expected_note_assets_1 = NoteAssets::new(vec![combined_asset])?; - let expected_note_id_1 = NoteId::new(expected_recipient_1, expected_note_assets_1.commitment()); + let expected_note_id_1 = NoteId::new(recipient_1, expected_note_assets_1.commitment()); assert_eq!(resulting_output_note_1.id(), expected_note_id_1); // assert that the expected output note 2 is present @@ -385,21 +393,21 @@ async fn executed_transaction_output_notes() -> anyhow::Result<()> { let resulting_output_note_3 = executed_transaction.output_notes().get_note(2); assert_eq!(expected_output_note_3.id(), resulting_output_note_3.id()); - assert_eq!(expected_output_note_3.assets(), resulting_output_note_3.assets().unwrap()); + assert_eq!(expected_output_note_3.assets(), resulting_output_note_3.assets()); - // make sure that the number of note inputs remains the same + // make sure that the number of note storage items remains the same let resulting_note_2_recipient = resulting_output_note_2.recipient().expect("output note 2 is not full"); assert_eq!( - resulting_note_2_recipient.inputs().num_values(), - expected_output_note_2.inputs().num_values() + resulting_note_2_recipient.storage().num_items(), + expected_output_note_2.storage().num_items() ); let resulting_note_3_recipient = resulting_output_note_3.recipient().expect("output note 3 is not full"); assert_eq!( - resulting_note_3_recipient.inputs().num_values(), - expected_output_note_3.inputs().num_values() + resulting_note_3_recipient.storage().num_items(), + expected_output_note_3.storage().num_items() ); Ok(()) @@ -412,22 +420,26 @@ async fn user_code_can_abort_transaction_with_summary() -> anyhow::Result<()> { let source_code = r#" use miden::standards::auth use miden::protocol::tx - const AUTH_UNAUTHORIZED_EVENT=event("miden::auth::unauthorized") + const AUTH_UNAUTHORIZED_EVENT=event("miden::protocol::auth::unauthorized") #! Inputs: [AUTH_ARGS, pad(12)] #! Outputs: [pad(16)] + @auth_script pub proc auth_abort_tx dropw # => [pad(16)] - push.0.0 exec.tx::get_block_number exec.::miden::protocol::native_account::incr_nonce - # => [[final_nonce, block_num, 0, 0], pad(16)] + exec.tx::get_block_number + push.0.0 + # => [[0, 0, block_num, final_nonce], pad(16)] # => [SALT, pad(16)] exec.auth::create_tx_summary - # => [SALT, OUTPUT_NOTES_COMMITMENT, INPUT_NOTES_COMMITMENT, ACCOUNT_DELTA_COMMITMENT] + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] - exec.auth::adv_insert_hqword + # insert tx summary into advice provider for extraction by the host + adv.insert_hqword + # => [ACCOUNT_DELTA_COMMITMENT, INPUT_NOTES_COMMITMENT, OUTPUT_NOTES_COMMITMENT, SALT] exec.auth::hash_tx_summary # => [MESSAGE, pad(16)] @@ -439,9 +451,12 @@ async fn user_code_can_abort_transaction_with_summary() -> anyhow::Result<()> { let auth_code = CodeBuilder::default() .compile_component_code("test::auth_component", source_code) .context("failed to parse auth component")?; - let auth_component = AccountComponent::new(auth_code, vec![]) - .context("failed to parse auth component")? - .with_supports_all_types(); + let auth_component = AccountComponent::new( + auth_code, + vec![], + AccountComponentMetadata::mock("test::auth_component"), + ) + .context("failed to parse auth component")?; let account = AccountBuilder::new([42; 32]) .storage_mode(AccountStorageMode::Private) @@ -452,7 +467,7 @@ async fn user_code_can_abort_transaction_with_summary() -> anyhow::Result<()> { // Consume and create a note so the input and outputs notes commitment is not the empty word. let mut rng = RpoRandomCoin::new(Word::empty()); - let output_note = create_p2id_note( + let output_note = P2idNote::create( account.id(), account.id(), vec![], @@ -463,21 +478,21 @@ async fn user_code_can_abort_transaction_with_summary() -> anyhow::Result<()> { let input_note = create_spawn_note(vec![&output_note])?; let mut builder = MockChain::builder(); - builder.add_output_note(OutputNote::Full(input_note.clone())); + builder.add_output_note(RawOutputNote::Full(input_note.clone())); let mock_chain = builder.build()?; let tx_context = mock_chain.build_tx_context(account, &[input_note.id()], &[])?.build()?; let ref_block_num = tx_context.tx_inputs().block_header().block_num().as_u32(); - let final_nonce = tx_context.account().nonce().as_int() as u32 + 1; + let final_nonce = tx_context.account().nonce().as_canonical_u64() as u32 + 1; let input_notes = tx_context.input_notes().clone(); - let output_notes = OutputNotes::new(vec![OutputNote::Partial(output_note.into())])?; + let output_notes = RawOutputNotes::new(vec![RawOutputNote::Partial(output_note.into())])?; let error = tx_context.execute().await.unwrap_err(); assert_matches!(error, TransactionExecutorError::Unauthorized(tx_summary) => { assert!(tx_summary.account_delta().vault().is_empty()); assert!(tx_summary.account_delta().storage().is_empty()); - assert_eq!(tx_summary.account_delta().nonce_delta().as_int(), 1); + assert_eq!(tx_summary.account_delta().nonce_delta().as_canonical_u64(), 1); assert_eq!(tx_summary.input_notes(), &input_notes); assert_eq!(tx_summary.output_notes(), &output_notes); assert_eq!(tx_summary.salt(), Word::from( @@ -493,9 +508,11 @@ async fn user_code_can_abort_transaction_with_summary() -> anyhow::Result<()> { #[tokio::test] async fn tx_summary_commitment_is_signed_by_falcon_auth() -> anyhow::Result<()> { let mut builder = MockChain::builder(); - let account = builder.add_existing_mock_account(Auth::BasicAuth)?; + let account = builder.add_existing_mock_account(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let mut rng = RpoRandomCoin::new(Word::empty()); - let p2id_note = create_p2id_note( + let p2id_note = P2idNote::create( account.id(), account.id(), vec![], @@ -520,25 +537,19 @@ async fn tx_summary_commitment_is_signed_by_falcon_auth() -> anyhow::Result<()> 0, 0, tx.block_header().block_num().as_u32(), - tx.final_account().nonce().as_int() as u32, + tx.final_account().nonce().as_canonical_u64() as u32, ]), ); let summary_commitment = summary.to_commitment(); let account_interface = AccountInterface::from_account(&account); let pub_key = match account_interface.auth().first().unwrap() { - AuthScheme::Falcon512Rpo { pub_key } => pub_key, - AuthScheme::NoAuth => panic!("Expected Falcon512Rpo auth scheme, got NoAuth"), - AuthScheme::Falcon512RpoMultisig { .. } => { - panic!("Expected Falcon512Rpo auth scheme, got Falcon512RpoMultisig") - }, - AuthScheme::Unknown => panic!("Expected Falcon512Rpo auth scheme, got Unknown"), - AuthScheme::EcdsaK256Keccak { .. } => { - panic!("Expected Falcon512Rpo auth scheme, got EcdsaK256Keccak") - }, - AuthScheme::EcdsaK256KeccakMultisig { .. } => { - panic!("Expected Falcon512Rpo auth scheme, got EcdsaK256KeccakMultisig") + AuthMethod::SingleSig { approver: (pub_key, _) } => pub_key, + AuthMethod::NoAuth => panic!("Expected SingleSig auth scheme, got NoAuth"), + AuthMethod::Multisig { .. } => { + panic!("Expected SingleSig auth scheme, got Multisig") }, + AuthMethod::Unknown => panic!("Expected SingleSig auth scheme, got Unknown"), }; // This is in an internal detail of the tx executor host, but this is the easiest way to check @@ -557,9 +568,10 @@ async fn tx_summary_commitment_is_signed_by_falcon_auth() -> anyhow::Result<()> #[tokio::test] async fn tx_summary_commitment_is_signed_by_ecdsa_auth() -> anyhow::Result<()> { let mut builder = MockChain::builder(); - let account = builder.add_existing_mock_account(Auth::EcdsaK256KeccakAuth)?; + let account = builder + .add_existing_mock_account(Auth::BasicAuth { auth_scheme: AuthScheme::EcdsaK256Keccak })?; let mut rng = RpoRandomCoin::new(Word::empty()); - let p2id_note = create_p2id_note( + let p2id_note = P2idNote::create( account.id(), account.id(), vec![], @@ -584,25 +596,19 @@ async fn tx_summary_commitment_is_signed_by_ecdsa_auth() -> anyhow::Result<()> { 0, 0, tx.block_header().block_num().as_u32(), - tx.final_account().nonce().as_int() as u32, + tx.final_account().nonce().as_canonical_u64() as u32, ]), ); let summary_commitment = summary.to_commitment(); let account_interface = AccountInterface::from_account(&account); let pub_key = match account_interface.auth().first().unwrap() { - AuthScheme::EcdsaK256Keccak { pub_key } => pub_key, - AuthScheme::EcdsaK256KeccakMultisig { .. } => { - panic!("Expected EcdsaK256Keccak auth scheme, got EcdsaK256KeccakMultisig") - }, - AuthScheme::NoAuth => panic!("Expected EcdsaK256Keccak auth scheme, got NoAuth"), - AuthScheme::Falcon512RpoMultisig { .. } => { - panic!("Expected EcdsaK256Keccak auth scheme, got Falcon512RpoMultisig") - }, - AuthScheme::Unknown => panic!("Expected EcdsaK256Keccak auth scheme, got Unknown"), - AuthScheme::Falcon512Rpo { .. } => { - panic!("Expected EcdsaK256Keccak auth scheme, got Falcon512Rpo") + AuthMethod::SingleSig { approver: (pub_key, _) } => pub_key, + AuthMethod::NoAuth => panic!("Expected SingleSig auth scheme, got NoAuth"), + AuthMethod::Multisig { .. } => { + panic!("Expected SingleSig auth scheme, got Multisig") }, + AuthMethod::Unknown => panic!("Expected SingleSig auth scheme, got Unknown"), }; // This is in an internal detail of the tx executor host, but this is the easiest way to check @@ -706,25 +712,28 @@ async fn test_tx_script_inputs() -> anyhow::Result<()> { #[tokio::test] async fn test_tx_script_args() -> anyhow::Result<()> { let tx_script_args = Word::from([1, 2, 3, 4u32]); + let advice_entry = Word::from([5, 6, 7, 8u32]); - let tx_script_src = r#" + let tx_script_src = format!( + r#" begin # => [TX_SCRIPT_ARGS] # `TX_SCRIPT_ARGS` value is a user provided word, which could be used during the # transaction execution. In this example it is a `[1, 2, 3, 4]` word. # assert the correctness of the argument - dupw push.1.2.3.4 assert_eqw.err="provided transaction arguments don't match the expected ones" + dupw push.{tx_script_args} assert_eqw.err="provided transaction arguments don't match the expected ones" # => [TX_SCRIPT_ARGS] # since we provided an advice map entry with the transaction script arguments as a key, # we can obtain the value of this entry - adv.push_mapval adv_push.4 + adv.push_mapval padw adv_loadw # => [[map_entry_values], TX_SCRIPT_ARGS] # assert the correctness of the map entry values - push.5.6.7.8 assert_eqw.err="obtained advice map value doesn't match the expected one" - end"#; + push.{advice_entry} assert_eqw.err="obtained advice map value doesn't match the expected one" + end"# + ); let tx_script = CodeBuilder::default() .compile_tx_script(tx_script_src) @@ -734,10 +743,7 @@ async fn test_tx_script_args() -> anyhow::Result<()> { // argument let tx_context = TransactionContextBuilder::with_existing_mock_account() .tx_script(tx_script) - .extend_advice_map([( - tx_script_args, - vec![Felt::new(5), Felt::new(6), Felt::new(7), Felt::new(8)], - )]) + .extend_advice_map([(tx_script_args, advice_entry.as_elements().to_vec())]) .tx_script_args(tx_script_args) .build()?; @@ -767,8 +773,8 @@ async fn inputs_created_correctly() -> anyhow::Result<()> { let component = AccountComponent::new( component_code.clone(), vec![StorageSlot::with_value(StorageSlotName::mock(0), Word::default())], - )? - .with_supports_all_types(); + AccountComponentMetadata::mock("test::adv_map_component"), + )?; let account_code = AccountCode::from_components( &[IncrNonceAuthComponent.into(), component.clone()], @@ -822,7 +828,9 @@ async fn inputs_created_correctly() -> anyhow::Result<()> { async fn tx_can_be_reexecuted() -> anyhow::Result<()> { let mut builder = MockChain::builder(); // Use basic auth so the tx requires a signature for successful execution. - let account = builder.add_existing_mock_account(Auth::BasicAuth)?; + let account = builder.add_existing_mock_account(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let note = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into()?, account.id(), diff --git a/crates/miden-testing/src/lib.rs b/crates/miden-testing/src/lib.rs index aab4f7c96c..6763012635 100644 --- a/crates/miden-testing/src/lib.rs +++ b/crates/miden-testing/src/lib.rs @@ -17,7 +17,7 @@ pub use mock_chain::{ }; mod tx_context; -pub use tx_context::{TransactionContext, TransactionContextBuilder}; +pub use tx_context::{ExecError, TransactionContext, TransactionContextBuilder}; pub mod executor; diff --git a/crates/miden-testing/src/mock_chain/auth.rs b/crates/miden-testing/src/mock_chain/auth.rs index c65dbfd5e6..5b7f06b06a 100644 --- a/crates/miden-testing/src/mock_chain/auth.rs +++ b/crates/miden-testing/src/mock_chain/auth.rs @@ -4,19 +4,17 @@ use alloc::vec::Vec; use miden_protocol::Word; use miden_protocol::account::AccountComponent; -use miden_protocol::account::auth::{AuthSecretKey, PublicKeyCommitment}; +use miden_protocol::account::auth::{AuthScheme, AuthSecretKey, PublicKeyCommitment}; use miden_protocol::testing::noop_auth_component::NoopAuthComponent; use miden_standards::account::auth::{ - AuthEcdsaK256Keccak, - AuthEcdsaK256KeccakAcl, - AuthEcdsaK256KeccakAclConfig, - AuthEcdsaK256KeccakMultisig, - AuthEcdsaK256KeccakMultisigConfig, - AuthFalcon512Rpo, - AuthFalcon512RpoAcl, - AuthFalcon512RpoAclConfig, - AuthFalcon512RpoMultisig, - AuthFalcon512RpoMultisigConfig, + AuthMultisig, + AuthMultisigConfig, + AuthMultisigPsm, + AuthMultisigPsmConfig, + AuthSingleSig, + AuthSingleSigAcl, + AuthSingleSigAclConfig, + PsmConfig, }; use miden_standards::testing::account_component::{ ConditionalAuthComponent, @@ -30,43 +28,32 @@ use rand_chacha::ChaCha20Rng; #[derive(Debug, Clone)] pub enum Auth { /// Creates a secret key for the account and creates a [BasicAuthenticator] used to - /// authenticate the account with [AuthFalcon512Rpo]. - BasicAuth, + /// authenticate the account with [AuthSingleSig]. + BasicAuth { auth_scheme: AuthScheme }, - /// Creates a secret key for the account and creates a [BasicAuthenticator] used to - /// authenticate the account with [AuthEcdsaK256Keccak]. - EcdsaK256KeccakAuth, - - /// Creates a secret key for the account, and creates a [BasicAuthenticator] used to - /// authenticate the account with [AuthEcdsaK256KeccakAcl]. Authentication will only be - /// triggered if any of the procedures specified in the list are called during execution. - EcdsaK256KeccakAcl { - auth_trigger_procedures: Vec, - allow_unauthorized_output_notes: bool, - allow_unauthorized_input_notes: bool, - }, - - // Ecsda Multisig - EcdsaK256KeccakMultisig { + /// Multisig + Multisig { threshold: u32, - approvers: Vec, + approvers: Vec<(PublicKeyCommitment, AuthScheme)>, proc_threshold_map: Vec<(Word, u32)>, }, - /// Multisig - Multisig { + /// Multisig with a private state manager. + MultisigPsm { threshold: u32, - approvers: Vec, + approvers: Vec<(PublicKeyCommitment, AuthScheme)>, + psm_config: PsmConfig, proc_threshold_map: Vec<(Word, u32)>, }, /// Creates a secret key for the account, and creates a [BasicAuthenticator] used to - /// authenticate the account with [AuthFalcon512RpoAcl]. Authentication will only be + /// authenticate the account with [AuthSingleSigAcl]. Authentication will only be /// triggered if any of the procedures specified in the list are called during execution. Acl { auth_trigger_procedures: Vec, allow_unauthorized_output_notes: bool, allow_unauthorized_input_notes: bool, + auth_scheme: AuthScheme, }, /// Creates a mock authentication mechanism for the account that only increments the nonce. @@ -89,48 +76,37 @@ impl Auth { /// `Some` when [`Auth::BasicAuth`] is passed." pub fn build_component(&self) -> (AccountComponent, Option) { match self { - Auth::BasicAuth => { + Auth::BasicAuth { auth_scheme } => { let mut rng = ChaCha20Rng::from_seed(Default::default()); - let sec_key = AuthSecretKey::new_falcon512_rpo_with_rng(&mut rng); + let sec_key = AuthSecretKey::with_scheme_and_rng(*auth_scheme, &mut rng) + .expect("failed to create secret key"); let pub_key = sec_key.public_key().to_commitment(); - let component = AuthFalcon512Rpo::new(pub_key).into(); + let component = AuthSingleSig::new(pub_key, *auth_scheme).into(); let authenticator = BasicAuthenticator::new(&[sec_key]); (component, Some(authenticator)) }, - Auth::EcdsaK256KeccakAuth => { - let mut rng = ChaCha20Rng::from_seed(Default::default()); - let sec_key = AuthSecretKey::new_ecdsa_k256_keccak_with_rng(&mut rng); - let pub_key = sec_key.public_key().to_commitment(); - - let component = AuthEcdsaK256Keccak::new(pub_key).into(); - let authenticator = BasicAuthenticator::new(&[sec_key]); - - (component, Some(authenticator)) - }, - Auth::EcdsaK256KeccakMultisig { threshold, approvers, proc_threshold_map } => { - let pub_keys: Vec<_> = - approvers.iter().map(|word| PublicKeyCommitment::from(*word)).collect(); - - let config = AuthEcdsaK256KeccakMultisigConfig::new(pub_keys, *threshold) + Auth::Multisig { threshold, approvers, proc_threshold_map } => { + let config = AuthMultisigConfig::new(approvers.clone(), *threshold) .and_then(|cfg| cfg.with_proc_thresholds(proc_threshold_map.clone())) .expect("invalid multisig config"); - let component = AuthEcdsaK256KeccakMultisig::new(config) - .expect("multisig component creation failed") - .into(); + let component = + AuthMultisig::new(config).expect("multisig component creation failed").into(); (component, None) }, - Auth::Multisig { threshold, approvers, proc_threshold_map } => { - let pub_keys: Vec<_> = - approvers.iter().map(|word| PublicKeyCommitment::from(*word)).collect(); - - let config = AuthFalcon512RpoMultisigConfig::new(pub_keys, *threshold) + Auth::MultisigPsm { + threshold, + approvers, + psm_config, + proc_threshold_map, + } => { + let config = AuthMultisigPsmConfig::new(approvers.clone(), *threshold, *psm_config) .and_then(|cfg| cfg.with_proc_thresholds(proc_threshold_map.clone())) - .expect("invalid multisig config"); - let component = AuthFalcon512RpoMultisig::new(config) - .expect("multisig component creation failed") + .expect("invalid multisig psm config"); + let component = AuthMultisigPsm::new(config) + .expect("multisig psm component creation failed") .into(); (component, None) @@ -139,36 +115,17 @@ impl Auth { auth_trigger_procedures, allow_unauthorized_output_notes, allow_unauthorized_input_notes, + auth_scheme, } => { let mut rng = ChaCha20Rng::from_seed(Default::default()); - let sec_key = AuthSecretKey::new_falcon512_rpo_with_rng(&mut rng); - let pub_key = sec_key.public_key().to_commitment(); - - let component = AuthFalcon512RpoAcl::new( - pub_key, - AuthFalcon512RpoAclConfig::new() - .with_auth_trigger_procedures(auth_trigger_procedures.clone()) - .with_allow_unauthorized_output_notes(*allow_unauthorized_output_notes) - .with_allow_unauthorized_input_notes(*allow_unauthorized_input_notes), - ) - .expect("component creation failed") - .into(); - let authenticator = BasicAuthenticator::new(&[sec_key]); - - (component, Some(authenticator)) - }, - Auth::EcdsaK256KeccakAcl { - auth_trigger_procedures, - allow_unauthorized_output_notes, - allow_unauthorized_input_notes, - } => { - let mut rng = ChaCha20Rng::from_seed(Default::default()); - let sec_key = AuthSecretKey::new_ecdsa_k256_keccak_with_rng(&mut rng); + let sec_key = AuthSecretKey::with_scheme_and_rng(*auth_scheme, &mut rng) + .expect("failed to create secret key"); let pub_key = sec_key.public_key().to_commitment(); - let component = AuthEcdsaK256KeccakAcl::new( + let component = AuthSingleSigAcl::new( pub_key, - AuthEcdsaK256KeccakAclConfig::new() + *auth_scheme, + AuthSingleSigAclConfig::new() .with_auth_trigger_procedures(auth_trigger_procedures.clone()) .with_allow_unauthorized_output_notes(*allow_unauthorized_output_notes) .with_allow_unauthorized_input_notes(*allow_unauthorized_input_notes), diff --git a/crates/miden-testing/src/mock_chain/chain.rs b/crates/miden-testing/src/mock_chain/chain.rs index fe75c97fc4..08d9d7a7cc 100644 --- a/crates/miden-testing/src/mock_chain/chain.rs +++ b/crates/miden-testing/src/mock_chain/chain.rs @@ -3,7 +3,7 @@ use alloc::vec::Vec; use anyhow::Context; use miden_block_prover::LocalBlockProver; -use miden_processor::DeserializationError; +use miden_processor::serde::DeserializationError; use miden_protocol::MIN_PROOF_SECURITY_LEVEL; use miden_protocol::account::auth::{AuthSecretKey, PublicKey}; use miden_protocol::account::delta::AccountUpdateDetails; @@ -32,9 +32,8 @@ use miden_protocol::transaction::{ }; use miden_tx::LocalTransactionProver; use miden_tx::auth::BasicAuthenticator; -use miden_tx::utils::{ByteReader, Deserializable, Serializable}; +use miden_tx::utils::serde::{ByteReader, ByteWriter, Deserializable, Serializable}; use miden_tx_batch_prover::LocalBatchProver; -use winterfell::ByteWriter; use super::note::MockChainNote; use crate::{MockChainBuilder, TransactionContextBuilder}; @@ -64,6 +63,7 @@ use crate::{MockChainBuilder, TransactionContextBuilder}; /// ``` /// # use anyhow::Result; /// # use miden_protocol::{ +/// # account::auth::AuthScheme, /// # asset::{Asset, FungibleAsset}, /// # note::NoteType, /// # }; @@ -76,8 +76,11 @@ use crate::{MockChainBuilder, TransactionContextBuilder}; /// /// let mut builder = MockChain::builder(); /// -/// // Add a recipient wallet. -/// let receiver = builder.add_existing_wallet(Auth::BasicAuth)?; +/// // Add a recipient wallet with basic authentication. +/// // Use either ECDSA K256 Keccak (scheme_id: 1) or Falcon512Poseidon2 (scheme_id: 2) auth scheme. +/// let receiver = builder.add_existing_wallet(Auth::BasicAuth { +/// auth_scheme: AuthScheme::Falcon512Poseidon2, +/// })?; /// /// // Add a wallet with assets. /// let sender = builder.add_existing_wallet(Auth::IncrNonce)?; @@ -127,18 +130,33 @@ use crate::{MockChainBuilder, TransactionContextBuilder}; /// /// ``` /// # use anyhow::Result; -/// # use miden_protocol::{Felt, asset::{Asset, FungibleAsset}, note::NoteType}; +/// # use miden_protocol::{ +/// # Felt, +/// # account::auth::AuthScheme, +/// # asset::{Asset, FungibleAsset}, +/// # note::NoteType +/// # }; /// # use miden_testing::{Auth, MockChain, TransactionContextBuilder}; /// # /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<()> { /// let mut builder = MockChain::builder(); /// -/// let faucet = builder.create_new_faucet(Auth::BasicAuth, "USDT", 100_000)?; +/// let faucet = builder.create_new_faucet( +/// Auth::BasicAuth { +/// auth_scheme: AuthScheme::Falcon512Poseidon2, +/// }, +/// "USDT", +/// 100_000, +/// )?; /// let asset = Asset::from(FungibleAsset::new(faucet.id(), 10)?); /// -/// let sender = builder.create_new_wallet(Auth::BasicAuth)?; -/// let target = builder.create_new_wallet(Auth::BasicAuth)?; +/// let sender = builder.create_new_wallet(Auth::BasicAuth { +/// auth_scheme: AuthScheme::Falcon512Poseidon2, +/// })?; +/// let target = builder.create_new_wallet(Auth::BasicAuth { +/// auth_scheme: AuthScheme::Falcon512Poseidon2, +/// })?; /// /// let note = builder.add_p2id_note(faucet.id(), target.id(), &[asset], NoteType::Public)?; /// @@ -169,6 +187,9 @@ pub struct MockChain { /// block. pending_transactions: Vec, + /// Batches that have been submitted to the chain but have not yet been included in a block. + pending_batches: Vec, + /// NoteID |-> MockChainNote mapping to simplify note retrieval. committed_notes: BTreeMap, @@ -218,6 +239,7 @@ impl MockChain { account_tree: AccountTree, account_authenticators: BTreeMap, secret_key: SecretKey, + genesis_notes: Vec, ) -> anyhow::Result { let mut chain = MockChain { chain: Blockchain::default(), @@ -225,6 +247,7 @@ impl MockChain { nullifier_tree: NullifierTree::default(), account_tree, pending_transactions: Vec::new(), + pending_batches: Vec::new(), committed_notes: BTreeMap::new(), committed_accounts: BTreeMap::new(), account_authenticators, @@ -237,6 +260,20 @@ impl MockChain { .apply_block(genesis_block) .context("failed to build account from builder")?; + // Update committed_notes with full note details for genesis notes. + // This is needed because apply_block only stores headers for private notes, + // but tests need full note details to create input notes. + for note in genesis_notes { + if let Some(MockChainNote::Private(_, _, inclusion_proof)) = + chain.committed_notes.get(¬e.id()) + { + chain.committed_notes.insert( + note.id(), + MockChainNote::Public(note.clone(), inclusion_proof.clone()), + ); + } + } + debug_assert_eq!(chain.blocks.len(), 1); debug_assert_eq!(chain.committed_accounts.len(), chain.account_tree.num_accounts()); @@ -721,7 +758,7 @@ impl MockChain { let account = self.committed_account(account_id)?.clone(); let account_witness = self.account_tree().open(account_id); - assert_eq!(account_witness.state_commitment(), account.commitment()); + assert_eq!(account_witness.state_commitment(), account.to_commitment()); Ok((account, account_witness)) } @@ -837,6 +874,14 @@ impl MockChain { self.pending_transactions.push(transaction); } + /// Adds the given [`ProvenBatch`] to the list of pending batches. + /// + /// A block has to be created to apply the batch effects to the chain state, e.g. using + /// [`MockChain::prove_next_block`]. + pub fn add_pending_batch(&mut self, batch: ProvenBatch) { + self.pending_batches.push(batch); + } + // PRIVATE HELPERS // ---------------------------------------------------------------------------------------- @@ -899,9 +944,11 @@ impl MockChain { ) .context("failed to create inclusion proof for output note")?; - if let OutputNote::Full(note) = created_note { - self.committed_notes - .insert(note.id(), MockChainNote::Public(note.clone(), note_inclusion_proof)); + if let OutputNote::Public(public_note) = created_note { + self.committed_notes.insert( + public_note.id(), + MockChainNote::Public(public_note.as_note().clone(), note_inclusion_proof), + ); } else { self.committed_notes.insert( created_note.id(), @@ -961,7 +1008,8 @@ impl MockChain { // Create batches from pending transactions. // ---------------------------------------------------------------------------------------- - let batches = self.pending_transactions_to_batches()?; + let mut batches = self.pending_transactions_to_batches()?; + batches.extend(core::mem::take(&mut self.pending_batches)); // Create block. // ---------------------------------------------------------------------------------------- @@ -1038,6 +1086,7 @@ impl Deserializable for MockChain { nullifier_tree, account_tree, pending_transactions, + pending_batches: Vec::new(), committed_notes, committed_accounts, account_authenticators, @@ -1094,7 +1143,12 @@ impl Serializable for AccountAuthenticator { fn write_into(&self, target: &mut W) { self.authenticator .as_ref() - .map(|auth| auth.keys().values().collect::>()) + .map(|auth| { + auth.keys() + .values() + .map(|(secret_key, public_key)| (secret_key, public_key.as_ref().clone())) + .collect::>() + }) .write_into(target); } } @@ -1148,6 +1202,7 @@ impl From for TxContextInput { #[cfg(test)] mod tests { + use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{AccountBuilder, AccountStorageMode}; use miden_protocol::asset::{Asset, FungibleAsset}; use miden_protocol::note::NoteType; @@ -1179,14 +1234,15 @@ mod tests { .with_component(BasicWallet); let mut builder = MockChain::builder(); + let auth_scheme = AuthScheme::EcdsaK256Keccak; let account = builder.add_account_from_builder( - Auth::BasicAuth, + Auth::BasicAuth { auth_scheme }, account_builder, AccountState::New, )?; let account_id = account.id(); - assert_eq!(account.nonce().as_int(), 0); + assert_eq!(account.nonce().as_canonical_u64(), 0); let note_1 = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into().unwrap(), @@ -1207,9 +1263,9 @@ mod tests { mock_chain.add_pending_executed_transaction(&tx)?; mock_chain.prove_next_block()?; - assert!(tx.final_account().nonce().as_int() > 0); + assert!(tx.final_account().nonce().as_canonical_u64() > 0); assert_eq!( - tx.final_account().commitment(), + tx.final_account().to_commitment(), mock_chain.account_tree.open(account_id).state_commitment() ); @@ -1224,7 +1280,9 @@ mod tests { for i in 0..10 { let account = builder .add_account_from_builder( - Auth::BasicAuth, + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, AccountBuilder::new([i; 32]).with_component(BasicWallet), AccountState::New, ) @@ -1305,4 +1363,26 @@ mod tests { Ok(()) } + + #[tokio::test] + async fn add_pending_batch() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + let account = builder.add_existing_mock_account(Auth::IncrNonce)?; + let mut chain = builder.build()?; + + // Execute a noop transaction and create a batch from it. + let tx = chain.build_tx_context(account.id(), &[], &[])?.build()?.execute().await?; + let proven_tx = LocalTransactionProver::default().prove_dummy(tx)?; + let proposed_batch = chain.propose_transaction_batch(vec![proven_tx])?; + let proven_batch = chain.prove_transaction_batch(proposed_batch)?; + + // Submit the batch directly and prove the block. + let num_blocks_before = chain.proven_blocks().len(); + chain.add_pending_batch(proven_batch); + chain.prove_next_block()?; + + assert_eq!(chain.proven_blocks().len(), num_blocks_before + 1); + + Ok(()) + } } diff --git a/crates/miden-testing/src/mock_chain/chain_builder.rs b/crates/miden-testing/src/mock_chain/chain_builder.rs index 19f014e18c..84dc5ba520 100644 --- a/crates/miden-testing/src/mock_chain/chain_builder.rs +++ b/crates/miden-testing/src/mock_chain/chain_builder.rs @@ -13,14 +13,13 @@ const DEFAULT_FAUCET_DECIMALS: u8 = 10; // ================================================================================================ use itertools::Itertools; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::crypto::random::RpoRandomCoin; use miden_protocol::account::delta::AccountUpdateDetails; use miden_protocol::account::{ Account, AccountBuilder, AccountDelta, AccountId, - AccountStorage, AccountStorageMode, AccountType, StorageSlot, @@ -40,17 +39,17 @@ use miden_protocol::block::{ OutputNoteBatch, ProvenBlock, }; -use miden_protocol::crypto::dsa::ecdsa_k256_keccak::SecretKey; use miden_protocol::crypto::merkle::smt::Smt; use miden_protocol::errors::NoteError; use miden_protocol::note::{Note, NoteAttachment, NoteDetails, NoteType}; use miden_protocol::testing::account_id::ACCOUNT_ID_NATIVE_ASSET_FAUCET; -use miden_protocol::testing::random_signer::RandomBlockSigner; -use miden_protocol::transaction::{OrderedTransactionHeaders, OutputNote, TransactionKernel}; -use miden_protocol::{Felt, MAX_OUTPUT_NOTES_PER_BATCH, Word, ZERO}; +use miden_protocol::testing::random_secret_key::random_secret_key; +use miden_protocol::transaction::{OrderedTransactionHeaders, RawOutputNote, TransactionKernel}; +use miden_protocol::{Felt, MAX_OUTPUT_NOTES_PER_BATCH, Word}; +use miden_standards::account::access::Ownable2Step; use miden_standards::account::faucets::{BasicFungibleFaucet, NetworkFungibleFaucet}; use miden_standards::account::wallets::BasicWallet; -use miden_standards::note::{create_p2id_note, create_p2ide_note, create_swap_note}; +use miden_standards::note::{P2idNote, P2ideNote, P2ideNoteStorage, SwapNote}; use miden_standards::testing::account_component::MockAccountComponent; use rand::Rng; @@ -105,7 +104,7 @@ use crate::{AccountState, Auth, MockChain}; pub struct MockChainBuilder { accounts: BTreeMap, account_authenticators: BTreeMap, - notes: Vec, + notes: Vec, rng: RpoRandomCoin, // Fee parameters. native_asset_id: AccountId, @@ -182,7 +181,7 @@ impl MockChainBuilder { .into_values() .map(|account| { let account_id = account.id(); - let account_commitment = account.commitment(); + let account_commitment = account.to_commitment(); let account_delta = AccountDelta::try_from(account) .expect("chain builder should only store existing accounts without seeds"); let update_details = AccountUpdateDetails::Delta(account_delta); @@ -198,7 +197,22 @@ impl MockChainBuilder { ) .context("failed to create genesis account tree")?; - let note_chunks = self.notes.into_iter().chunks(MAX_OUTPUT_NOTES_PER_BATCH); + // Extract full notes before shrinking for later use in MockChain + let full_notes: Vec = self + .notes + .iter() + .filter_map(|note| match note { + RawOutputNote::Full(n) => Some(n.clone()), + _ => None, + }) + .collect(); + + let proven_notes: Vec<_> = self + .notes + .into_iter() + .map(|note| note.to_output_note().expect("genesis note should be valid")) + .collect(); + let note_chunks = proven_notes.into_iter().chunks(MAX_OUTPUT_NOTES_PER_BATCH); let output_note_batches: Vec = note_chunks .into_iter() .map(|batch_notes| batch_notes.into_iter().enumerate().collect::>()) @@ -222,7 +236,7 @@ impl MockChainBuilder { let timestamp = MockChain::TIMESTAMP_START_SECS; let fee_parameters = FeeParameters::new(self.native_asset_id, self.verification_base_fee) .context("failed to construct fee parameters")?; - let validator_secret_key = SecretKey::random(); + let validator_secret_key = random_secret_key(); let validator_public_key = validator_secret_key.public_key(); let header = BlockHeader::new( @@ -256,6 +270,7 @@ impl MockChainBuilder { account_tree, self.account_authenticators, validator_secret_key, + full_notes, ) } @@ -309,9 +324,7 @@ impl MockChainBuilder { ) -> anyhow::Result { let token_symbol = TokenSymbol::new(token_symbol) .with_context(|| format!("invalid token symbol: {token_symbol}"))?; - let max_supply_felt = max_supply.try_into().map_err(|_| { - anyhow::anyhow!("max supply value cannot be converted to Felt: {max_supply}") - })?; + let max_supply_felt = Felt::try_from(max_supply)?; let basic_faucet = BasicFungibleFaucet::new(token_symbol, DEFAULT_FAUCET_DECIMALS, max_supply_felt) .context("failed to create BasicFungibleFaucet")?; @@ -333,35 +346,24 @@ impl MockChainBuilder { auth_method: Auth, token_symbol: &str, max_supply: u64, - total_issuance: Option, + token_supply: Option, ) -> anyhow::Result { - let token_symbol = TokenSymbol::new(token_symbol).context("invalid argument")?; + let max_supply = Felt::try_from(max_supply)?; + let token_supply = Felt::try_from(token_supply.unwrap_or(0))?; + let token_symbol = + TokenSymbol::new(token_symbol).context("failed to create token symbol")?; + let basic_faucet = - BasicFungibleFaucet::new(token_symbol, DEFAULT_FAUCET_DECIMALS, Felt::new(max_supply)) - .context("invalid argument")?; + BasicFungibleFaucet::new(token_symbol, DEFAULT_FAUCET_DECIMALS, max_supply) + .and_then(|fungible_faucet| fungible_faucet.with_token_supply(token_supply)) + .context("failed to create basic fungible faucet")?; let account_builder = AccountBuilder::new(self.rng.random()) .storage_mode(AccountStorageMode::Public) .with_component(basic_faucet) .account_type(AccountType::FungibleFaucet); - let mut account = - self.add_account_from_builder(auth_method, account_builder, AccountState::Exists)?; - - // The faucet's sysdata slot is initialized to an empty word by default. - // If total_issuance is set, overwrite it and reinsert the account. - if let Some(issuance) = total_issuance { - account - .storage_mut() - .set_item( - AccountStorage::faucet_sysdata_slot(), - Word::from([ZERO, ZERO, ZERO, Felt::new(issuance)]), - ) - .context("failed to set faucet storage")?; - self.accounts.insert(account.id(), account.clone()); - } - - Ok(account) + self.add_account_from_builder(auth_method, account_builder, AccountState::Exists) } /// Adds an existing [`NetworkFungibleFaucet`] account to the initial chain state. @@ -372,40 +374,26 @@ impl MockChainBuilder { token_symbol: &str, max_supply: u64, owner_account_id: AccountId, - total_issuance: Option, + token_supply: Option, ) -> anyhow::Result { - let token_symbol = TokenSymbol::new(token_symbol).context("invalid argument")?; - let network_faucet = NetworkFungibleFaucet::new( - token_symbol, - DEFAULT_FAUCET_DECIMALS, - Felt::new(max_supply), - owner_account_id, - ) - .context("invalid argument")?; + let max_supply = Felt::try_from(max_supply)?; + let token_supply = Felt::try_from(token_supply.unwrap_or(0))?; + let token_symbol = + TokenSymbol::new(token_symbol).context("failed to create token symbol")?; + + let network_faucet = + NetworkFungibleFaucet::new(token_symbol, DEFAULT_FAUCET_DECIMALS, max_supply) + .and_then(|fungible_faucet| fungible_faucet.with_token_supply(token_supply)) + .context("failed to create network fungible faucet")?; let account_builder = AccountBuilder::new(self.rng.random()) .storage_mode(AccountStorageMode::Network) .with_component(network_faucet) + .with_component(Ownable2Step::new(owner_account_id)) .account_type(AccountType::FungibleFaucet); - // Network faucets always use Noop auth (no authentication) - let mut account = - self.add_account_from_builder(Auth::IncrNonce, account_builder, AccountState::Exists)?; - - // The faucet's sysdata slot is initialized to an empty word by default. - // If total_issuance is set, overwrite it and reinsert the account. - if let Some(issuance) = total_issuance { - account - .storage_mut() - .set_item( - AccountStorage::faucet_sysdata_slot(), - Word::from([ZERO, ZERO, ZERO, Felt::new(issuance)]), - ) - .context("failed to set faucet storage")?; - self.accounts.insert(account.id(), account.clone()); - } - - Ok(account) + // Network faucets always use IncrNonce auth (no authentication) + self.add_account_from_builder(Auth::IncrNonce, account_builder, AccountState::Exists) } /// Creates a new public account with an [`MockAccountComponent`] and registers the @@ -517,7 +505,7 @@ impl MockChainBuilder { // ---------------------------------------------------------------------------------------- /// Adds the provided note to the initial chain state. - pub fn add_output_note(&mut self, note: impl Into) { + pub fn add_output_note(&mut self, note: impl Into) { self.notes.push(note.into()); } @@ -532,7 +520,7 @@ impl MockChainBuilder { assets: impl IntoIterator, ) -> anyhow::Result { let note = create_p2any_note(sender_account_id, note_type, assets, &mut self.rng); - self.add_output_note(OutputNote::Full(note.clone())); + self.add_output_note(RawOutputNote::Full(note.clone())); Ok(note) } @@ -549,7 +537,7 @@ impl MockChainBuilder { asset: &[Asset], note_type: NoteType, ) -> Result { - let note = create_p2id_note( + let note = P2idNote::create( sender_account_id, target_account_id, asset.to_vec(), @@ -557,12 +545,12 @@ impl MockChainBuilder { NoteAttachment::default(), &mut self.rng, )?; - self.add_output_note(OutputNote::Full(note.clone())); + self.add_output_note(RawOutputNote::Full(note.clone())); Ok(note) } - /// Adds a P2IDE [`OutputNote`] (pay‑to‑ID‑extended) to the list of genesis notes. + /// Adds a P2IDE note (pay‑to‑ID‑extended) to the list of genesis notes. /// /// A P2IDE note can include an optional `timelock_height` and/or an optional /// `reclaim_height` after which the `sender_account_id` may reclaim the @@ -576,23 +564,23 @@ impl MockChainBuilder { reclaim_height: Option, timelock_height: Option, ) -> Result { - let note = create_p2ide_note( + let storage = P2ideNoteStorage::new(target_account_id, reclaim_height, timelock_height); + + let note = P2ideNote::create( sender_account_id, - target_account_id, + storage, asset.to_vec(), - reclaim_height, - timelock_height, note_type, Default::default(), &mut self.rng, )?; - self.add_output_note(OutputNote::Full(note.clone())); + self.add_output_note(RawOutputNote::Full(note.clone())); Ok(note) } - /// Adds a public SWAP [`OutputNote`] to the list of genesis notes. + /// Adds a public SWAP note to the list of genesis notes. pub fn add_swap_note( &mut self, sender: AccountId, @@ -600,7 +588,7 @@ impl MockChainBuilder { requested_asset: Asset, payback_note_type: NoteType, ) -> anyhow::Result<(Note, NoteDetails)> { - let (swap_note, payback_note) = create_swap_note( + let (swap_note, payback_note) = SwapNote::create( sender, offered_asset, requested_asset, @@ -611,7 +599,7 @@ impl MockChainBuilder { &mut self.rng, )?; - self.add_output_note(OutputNote::Full(swap_note.clone())); + self.add_output_note(RawOutputNote::Full(swap_note.clone())); Ok((swap_note, payback_note)) } @@ -634,7 +622,7 @@ impl MockChainBuilder { I: ExactSizeIterator, { let note = create_spawn_note(output_notes)?; - self.add_output_note(OutputNote::Full(note.clone())); + self.add_output_note(RawOutputNote::Full(note.clone())); Ok(note) } diff --git a/crates/miden-testing/src/mock_chain/note.rs b/crates/miden-testing/src/mock_chain/note.rs index 759ef257ea..233c751f15 100644 --- a/crates/miden-testing/src/mock_chain/note.rs +++ b/crates/miden-testing/src/mock_chain/note.rs @@ -1,8 +1,7 @@ -use miden_processor::DeserializationError; +use miden_processor::serde::DeserializationError; use miden_protocol::note::{Note, NoteId, NoteInclusionProof, NoteMetadata}; use miden_protocol::transaction::InputNote; -use miden_tx::utils::{ByteReader, Deserializable, Serializable}; -use winterfell::ByteWriter; +use miden_tx::utils::serde::{ByteReader, ByteWriter, Deserializable, Serializable}; // MOCK CHAIN NOTE // ================================================================================================ diff --git a/crates/miden-testing/src/mock_host.rs b/crates/miden-testing/src/mock_host.rs index 0bcb1d766f..0c9d1060d5 100644 --- a/crates/miden-testing/src/mock_host.rs +++ b/crates/miden-testing/src/mock_host.rs @@ -2,15 +2,10 @@ use alloc::collections::BTreeSet; use alloc::sync::Arc; use alloc::vec::Vec; -use miden_processor::{ - AdviceMutation, - AsyncHost, - BaseHost, - EventError, - FutureMaybeSend, - MastForest, - ProcessState, -}; +use miden_processor::advice::AdviceMutation; +use miden_processor::event::EventError; +use miden_processor::mast::MastForest; +use miden_processor::{FutureMaybeSend, Host, ProcessorState}; use miden_protocol::transaction::TransactionEventId; use miden_protocol::vm::EventId; use miden_protocol::{CoreLibrary, Word}; @@ -64,7 +59,7 @@ impl<'store> MockHost<'store> { &TransactionEventId::LinkMapSet, &TransactionEventId::LinkMapGet, // TODO: It should be possible to remove this after implementing - // https://github.com/0xMiden/miden-base/issues/1852. + // https://github.com/0xMiden/protocol/issues/1852. &TransactionEventId::EpilogueBeforeTxFeeRemovedFromAccount, ] .map(TransactionEventId::event_id), @@ -78,8 +73,7 @@ impl<'store> MockHost<'store> { self.handled_events.extend( [ &TransactionEventId::AccountBeforeForeignLoad, - &TransactionEventId::AccountVaultBeforeGetBalance, - &TransactionEventId::AccountVaultBeforeHasNonFungibleAsset, + &TransactionEventId::AccountVaultBeforeGetAsset, &TransactionEventId::AccountVaultBeforeAddAsset, &TransactionEventId::AccountVaultBeforeRemoveAsset, &TransactionEventId::AccountStorageBeforeSetMapItem, @@ -90,7 +84,7 @@ impl<'store> MockHost<'store> { } } -impl<'store> BaseHost for MockHost<'store> { +impl<'store> Host for MockHost<'store> { fn get_label_and_source_file( &self, location: &miden_protocol::assembly::debuginfo::Location, @@ -100,16 +94,14 @@ impl<'store> BaseHost for MockHost<'store> { ) { self.exec_host.get_label_and_source_file(location) } -} -impl<'store> AsyncHost for MockHost<'store> { fn get_mast_forest(&self, node_digest: &Word) -> impl FutureMaybeSend>> { self.exec_host.get_mast_forest(node_digest) } fn on_event( &mut self, - process: &ProcessState, + process: &ProcessorState, ) -> impl FutureMaybeSend, EventError>> { let event_id = EventId::from_felt(process.get_stack_item(0)); diff --git a/crates/miden-testing/src/standards/mod.rs b/crates/miden-testing/src/standards/mod.rs index 288c3ee84c..76cf06a85d 100644 --- a/crates/miden-testing/src/standards/mod.rs +++ b/crates/miden-testing/src/standards/mod.rs @@ -1 +1,2 @@ mod network_account_target; +mod note_tag; diff --git a/crates/miden-testing/src/standards/network_account_target.rs b/crates/miden-testing/src/standards/network_account_target.rs index 9908377592..0b7d3d47c2 100644 --- a/crates/miden-testing/src/standards/network_account_target.rs +++ b/crates/miden-testing/src/standards/network_account_target.rs @@ -2,9 +2,9 @@ use miden_protocol::Felt; use miden_protocol::account::AccountStorageMode; -use miden_protocol::note::{NoteAttachment, NoteExecutionHint, NoteMetadata, NoteTag, NoteType}; +use miden_protocol::note::{NoteAttachment, NoteMetadata, NoteTag, NoteType}; use miden_protocol::testing::account_id::AccountIdBuilder; -use miden_standards::note::NetworkAccountTarget; +use miden_standards::note::{NetworkAccountTarget, NoteExecutionHint}; use crate::executor::CodeExecutor; @@ -16,9 +16,9 @@ async fn network_account_target_get_id() -> anyhow::Result<()> { let exec_hint = NoteExecutionHint::Always; let attachment = NoteAttachment::from(NetworkAccountTarget::new(target_id, exec_hint)?); - let metadata = - NoteMetadata::new(target_id, NoteType::Public, NoteTag::with_account_target(target_id)) - .with_attachment(attachment.clone()); + let metadata = NoteMetadata::new(target_id, NoteType::Public) + .with_tag(NoteTag::with_account_target(target_id)) + .with_attachment(attachment.clone()); let metadata_header = metadata.to_header_word(); let source = format!( @@ -26,12 +26,21 @@ async fn network_account_target_get_id() -> anyhow::Result<()> { use miden::standards::attachments::network_account_target use miden::protocol::note + const ERR_NOT_NETWORK_ACCOUNT_TARGET = "attachment is not a valid network account target" + begin push.{attachment_word} push.{metadata_header} exec.note::extract_attachment_info_from_metadata # => [attachment_kind, attachment_scheme, NOTE_ATTACHMENT] + swap + # => [attachment_scheme, attachment_kind, NOTE_ATTACHMENT] + exec.network_account_target::is_network_account_target + # => [is_valid, NOTE_ATTACHMENT] + assert.err=ERR_NOT_NETWORK_ACCOUNT_TARGET + # => [NOTE_ATTACHMENT] exec.network_account_target::get_id + # => [account_id_suffix, account_id_prefix] # cleanup stack movup.2 drop movup.2 drop end @@ -42,8 +51,8 @@ async fn network_account_target_get_id() -> anyhow::Result<()> { let exec_output = CodeExecutor::with_default_host().run(&source).await?; - assert_eq!(exec_output.stack[0], target_id.prefix().as_felt()); - assert_eq!(exec_output.stack[1], target_id.suffix()); + assert_eq!(exec_output.stack[0], target_id.suffix()); + assert_eq!(exec_output.stack[1], target_id.prefix().as_felt()); Ok(()) } @@ -65,9 +74,9 @@ async fn network_account_target_new_attachment() -> anyhow::Result<()> { begin push.{exec_hint} - push.{target_id_suffix} push.{target_id_prefix} - # => [target_id_prefix, target_id_suffix, exec_hint] + push.{target_id_suffix} + # => [target_id_suffix, target_id_prefix, exec_hint] exec.network_account_target::new # => [attachment_scheme, attachment_kind, ATTACHMENT, pad(16)] @@ -88,7 +97,8 @@ async fn network_account_target_new_attachment() -> anyhow::Result<()> { Felt::from(NetworkAccountTarget::ATTACHMENT_SCHEME.as_u32()) ); - assert_eq!(exec_output.stack.get_stack_word_be(2).unwrap(), attachment_word); + let word = exec_output.stack.get_word(2).unwrap(); + assert_eq!(word, attachment_word); Ok(()) } @@ -104,15 +114,22 @@ async fn network_account_target_attachment_round_trip() -> anyhow::Result<()> { r#" use miden::standards::attachments::network_account_target + const ERR_NOT_NETWORK_ACCOUNT_TARGET = "attachment is not a valid network account target" + begin push.{exec_hint} - push.{target_id_suffix} push.{target_id_prefix} - # => [target_id_prefix, target_id_suffix, exec_hint] + push.{target_id_suffix} + # => [target_id_suffix, target_id_prefix, exec_hint] exec.network_account_target::new # => [attachment_scheme, attachment_kind, ATTACHMENT] + exec.network_account_target::is_network_account_target + # => [is_valid, ATTACHMENT] + assert.err=ERR_NOT_NETWORK_ACCOUNT_TARGET + # => [ATTACHMENT] exec.network_account_target::get_id - # => [target_id_prefix, target_id_suffix] + # => [target_id_suffix, target_id_prefix] + # cleanup stack movup.2 drop movup.2 drop end "#, @@ -123,8 +140,8 @@ async fn network_account_target_attachment_round_trip() -> anyhow::Result<()> { let exec_output = CodeExecutor::with_default_host().run(&source).await?; - assert_eq!(exec_output.stack[0], target_id.prefix().as_felt()); - assert_eq!(exec_output.stack[1], target_id.suffix()); + assert_eq!(exec_output.stack[0], target_id.suffix()); + assert_eq!(exec_output.stack[1], target_id.prefix().as_felt()); Ok(()) } diff --git a/crates/miden-testing/src/standards/note_tag.rs b/crates/miden-testing/src/standards/note_tag.rs new file mode 100644 index 0000000000..27af5b8c23 --- /dev/null +++ b/crates/miden-testing/src/standards/note_tag.rs @@ -0,0 +1,76 @@ +use miden_protocol::note::NoteTag; +use miden_protocol::testing::account_id::AccountIdBuilder; +use miden_standards::errors::standards::ERR_NOTE_TAG_MAX_ACCOUNT_TARGET_LENGTH_EXCEEDED; + +use crate::assert_execution_error; +use crate::executor::CodeExecutor; + +#[rstest::rstest] +#[case::tag_len_0(0)] +#[case::tag_len_20(20)] +#[case::tag_len_32(32)] +#[tokio::test] +async fn test_note_tag_account_target(#[case] tag_len: u8) -> anyhow::Result<()> { + let account_id = AccountIdBuilder::new().build_with_seed([20; 32]); + let id_prefix = account_id.prefix().as_felt(); + + let expected_tag = NoteTag::with_custom_account_target(account_id, tag_len)?; + + let code = format!( + " + use miden::core::sys + use miden::standards::note_tag + + begin + push.{id_prefix} + push.{tag_len} + + exec.note_tag::create_custom_account_target + # => [note_tag] + + exec.sys::truncate_stack + end + " + ); + + let exec_output = CodeExecutor::with_default_host().run(&code).await?; + let actual_tag = exec_output.stack[0].as_canonical_u64(); + + assert_eq!( + actual_tag, + expected_tag.as_u32() as u64, + "Expected tag {:#010x}, got {:#010x}", + expected_tag.as_u32(), + actual_tag + ); + + Ok(()) +} + +#[tokio::test] +async fn test_note_tag_account_target_fails_for_large_tag_len() -> anyhow::Result<()> { + let tag_len = NoteTag::MAX_ACCOUNT_TARGET_TAG_LENGTH + 1; + let code = format!( + " + use miden::core::sys + use miden::standards::note_tag + + begin + # account ID prefix doesn't matter for this test + push.0 + push.{tag_len} + + exec.note_tag::create_custom_account_target + # => [note_tag] + + exec.sys::truncate_stack + end + " + ); + + let exec_output = CodeExecutor::with_default_host().run(&code).await; + + assert_execution_error!(exec_output, ERR_NOTE_TAG_MAX_ACCOUNT_TARGET_LENGTH_EXCEEDED); + + Ok(()) +} diff --git a/crates/miden-testing/src/tx_context/builder.rs b/crates/miden-testing/src/tx_context/builder.rs index 9bc8f47758..b61a5a5e98 100644 --- a/crates/miden-testing/src/tx_context/builder.rs +++ b/crates/miden-testing/src/tx_context/builder.rs @@ -6,7 +6,8 @@ use alloc::sync::Arc; use alloc::vec::Vec; use anyhow::Context; -use miden_processor::{AdviceInputs, Felt, Word}; +use miden_processor::advice::AdviceInputs; +use miden_processor::{Felt, Word}; use miden_protocol::EMPTY_WORD; use miden_protocol::account::auth::{PublicKeyCommitment, Signature}; use miden_protocol::account::{Account, AccountHeader, AccountId}; @@ -17,7 +18,7 @@ use miden_protocol::note::{Note, NoteId, NoteScript}; use miden_protocol::testing::account_id::ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE; use miden_protocol::testing::noop_auth_component::NoopAuthComponent; use miden_protocol::transaction::{ - OutputNote, + RawOutputNote, TransactionArgs, TransactionInputs, TransactionScript, @@ -43,7 +44,7 @@ use crate::{MockChain, MockChainNote}; /// ``` /// # use anyhow::Result; /// # use miden_testing::TransactionContextBuilder; -/// # use miden_protocol::{account::AccountBuilder,Felt, FieldElement}; +/// # use miden_protocol::{account::AccountBuilder, Felt}; /// # use miden_protocol::transaction::TransactionKernel; /// # /// # #[tokio::main(flavor = "current_thread")] @@ -62,7 +63,7 @@ use crate::{MockChain, MockChainNote}; /// "; /// /// let exec_output = tx_context.execute_code(code).await?; -/// assert_eq!(exec_output.stack.get(0).unwrap(), &Felt::new(5)); +/// assert_eq!(exec_output.stack.get(0).unwrap(), &Felt::from(5u32)); /// # Ok(()) /// # } /// ``` @@ -103,7 +104,7 @@ impl TransactionContextBuilder { signatures: Vec::new(), note_scripts: BTreeMap::new(), is_lazy_loading_enabled: true, - is_debug_mode_enabled: true, + is_debug_mode_enabled: cfg!(feature = "tx_context_debug"), } } @@ -132,8 +133,8 @@ impl TransactionContextBuilder { } /// Initializes a [TransactionContextBuilder] with a mocked fungible faucet. - pub fn with_fungible_faucet(acct_id: u128, initial_balance: Felt) -> Self { - let account = Account::mock_fungible_faucet(acct_id, initial_balance); + pub fn with_fungible_faucet(acct_id: u128) -> Self { + let account = Account::mock_fungible_faucet(acct_id); Self::new(account) } @@ -235,11 +236,10 @@ impl TransactionContextBuilder { } /// Extend the expected output notes. - pub fn extend_expected_output_notes(mut self, output_notes: Vec) -> Self { + pub fn extend_expected_output_notes(mut self, output_notes: Vec) -> Self { let output_notes = output_notes.into_iter().filter_map(|n| match n { - OutputNote::Full(note) => Some(note), - OutputNote::Partial(_) => None, - OutputNote::Header(_) => None, + RawOutputNote::Full(note) => Some(note), + RawOutputNote::Partial(_) => None, }); self.expected_output_notes.extend(output_notes); @@ -285,7 +285,7 @@ impl TransactionContextBuilder { let mut builder = MockChain::builder(); for i in self.input_notes { - builder.add_output_note(OutputNote::Full(i)); + builder.add_output_note(RawOutputNote::Full(i)); } let mut mock_chain = builder.build()?; diff --git a/crates/miden-testing/src/tx_context/context.rs b/crates/miden-testing/src/tx_context/context.rs index 45711460c5..c2ee158a6b 100644 --- a/crates/miden-testing/src/tx_context/context.rs +++ b/crates/miden-testing/src/tx_context/context.rs @@ -3,12 +3,13 @@ use alloc::collections::{BTreeMap, BTreeSet}; use alloc::sync::Arc; use alloc::vec::Vec; -use miden_processor::fast::ExecutionOutput; -use miden_processor::{ExecutionError, FutureMaybeSend, MastForest, MastForestStore, Word}; +use miden_processor::mast::MastForest; +use miden_processor::{ExecutionOutput, FutureMaybeSend, MastForestStore, Word}; use miden_protocol::account::{ Account, AccountId, PartialAccount, + StorageMapKey, StorageMapWitness, StorageSlotContent, }; @@ -43,6 +44,7 @@ use miden_tx::{ use crate::executor::CodeExecutor; use crate::mock_host::MockHost; +use crate::tx_context::ExecError; // TRANSACTION CONTEXT // ================================================================================================ @@ -73,10 +75,6 @@ impl TransactionContext { /// is run on a modified [`TransactionExecutorHost`] which is loaded with the procedures exposed /// by the transaction kernel, and also individual kernel functions (not normally exposed). /// - /// To improve the error message quality, convert the returned [`ExecutionError`] into a - /// [`Report`](miden_protocol::assembly::diagnostics::Report) or use `?` with - /// [`miden_protocol::assembly::diagnostics::Result`]. - /// /// # Errors /// /// Returns an error if the assembly or execution of the provided code fails. @@ -84,7 +82,7 @@ impl TransactionContext { /// # Panics /// /// - If the provided `code` is not a valid program. - pub async fn execute_code(&self, code: &str) -> Result { + pub async fn execute_code(&self, code: &str) -> Result { // Fetch all witnesses for note assets and the fee asset. let mut asset_vault_keys = self .tx_inputs @@ -92,7 +90,7 @@ impl TransactionContext { .iter() .flat_map(|note| note.note().assets().iter().map(Asset::vault_key)) .collect::>(); - let fee_asset_vault_key = AssetVaultKey::from_account_id( + let fee_asset_vault_key = AssetVaultKey::new_fungible( self.tx_inputs().block_header().fee_parameters().native_asset_id(), ) .expect("fee asset should be a fungible asset"); @@ -109,7 +107,7 @@ impl TransactionContext { // Add the vault key for the fee asset to the list of asset vault keys which may need to be // accessed at the end of the transaction. let fee_asset_vault_key = - AssetVaultKey::from_account_id(block_header.fee_parameters().native_asset_id()) + AssetVaultKey::new_fungible(block_header.fee_parameters().native_asset_id()) .expect("fee asset should be a fungible asset"); asset_vault_keys.insert(fee_asset_vault_key); @@ -330,7 +328,7 @@ impl DataStore for TransactionContext { &self, account_id: AccountId, map_root: Word, - map_key: Word, + map_key: StorageMapKey, ) -> impl FutureMaybeSend> { async move { if account_id == self.account().id() { diff --git a/crates/miden-testing/src/tx_context/errors.rs b/crates/miden-testing/src/tx_context/errors.rs new file mode 100644 index 0000000000..c3f1a653d0 --- /dev/null +++ b/crates/miden-testing/src/tx_context/errors.rs @@ -0,0 +1,31 @@ +use alloc::string::ToString; + +use miden_processor::ExecutionError; +use miden_protocol::assembly::diagnostics::reporting::PrintDiagnostic; +use thiserror::Error; + +// EXECUTION ERROR +// ================================================================================================ + +/// A newtype wrapper around [`ExecutionError`] that provides better error messages +/// by using [`PrintDiagnostic`] for display formatting. +#[derive(Debug, Error)] +#[error("{}", PrintDiagnostic::new(.0).to_string())] +pub struct ExecError(pub ExecutionError); + +impl ExecError { + /// Creates a new `ExecError` from an `ExecutionError`. + pub fn new(error: ExecutionError) -> Self { + Self(error) + } + + /// Returns a reference to the inner `ExecutionError`. + pub fn as_execution_error(&self) -> &ExecutionError { + &self.0 + } + + /// Consumes `ExecError` and returns the inner `ExecutionError`. + pub fn into_execution_error(self) -> ExecutionError { + self.0 + } +} diff --git a/crates/miden-testing/src/tx_context/mod.rs b/crates/miden-testing/src/tx_context/mod.rs index 787c13d36e..30cb008889 100644 --- a/crates/miden-testing/src/tx_context/mod.rs +++ b/crates/miden-testing/src/tx_context/mod.rs @@ -1,5 +1,7 @@ mod builder; mod context; +mod errors; pub use builder::TransactionContextBuilder; pub use context::TransactionContext; +pub use errors::ExecError; diff --git a/crates/miden-testing/src/utils.rs b/crates/miden-testing/src/utils.rs index 30aeb8fe76..c2493597ff 100644 --- a/crates/miden-testing/src/utils.rs +++ b/crates/miden-testing/src/utils.rs @@ -1,13 +1,15 @@ use alloc::string::String; use alloc::vec::Vec; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::crypto::random::RpoRandomCoin; +use miden_protocol::Word; use miden_protocol::account::AccountId; use miden_protocol::asset::Asset; use miden_protocol::crypto::rand::FeltRng; -use miden_protocol::note::{Note, NoteType}; -use miden_protocol::testing::storage::prepare_assets; +use miden_protocol::errors::NoteError; +use miden_protocol::note::{Note, NoteAssets, NoteMetadata, NoteTag, NoteType}; use miden_standards::code_builder::CodeBuilder; +use miden_standards::note::P2idNoteStorage; use miden_standards::testing::note::NoteBuilder; use rand::SeedableRng; use rand::rngs::SmallRng; @@ -19,7 +21,7 @@ use rand::rngs::SmallRng; macro_rules! assert_execution_error { ($execution_result:expr, $expected_err:expr) => { match $execution_result { - Err(miden_processor::ExecutionError::FailedAssertion { label: _, source_file: _, clk: _, err_code, err_msg, err: _ }) => { + Err($crate::ExecError(miden_processor::ExecutionError::OperationError { label: _, source_file: _, err: miden_processor::operation::OperationError::FailedAssertion { err_code, err_msg } })) => { if let Some(ref msg) = err_msg { assert_eq!(msg.as_ref(), $expected_err.message(), "error messages did not match"); } @@ -41,13 +43,13 @@ macro_rules! assert_transaction_executor_error { ($execution_result:expr, $expected_err:expr) => { match $execution_result { Err(miden_tx::TransactionExecutorError::TransactionProgramExecutionFailed( - miden_processor::ExecutionError::FailedAssertion { + miden_processor::ExecutionError::OperationError { label: _, source_file: _, - clk: _, - err_code, - err_msg, - err: _, + err: miden_processor::operation::OperationError::FailedAssertion { + err_code, + err_msg, + }, }, )) => { if let Some(ref msg) = err_msg { @@ -97,32 +99,31 @@ pub fn create_p2any_note( let serial_number = rng.draw_word(); let assets: Vec<_> = assets.into_iter().collect(); let mut code_body = String::new(); - for i in 0..assets.len() { - if i == 0 { - // first asset (dest_ptr is already on stack) - code_body.push_str( - " - # add first asset - - padw dup.4 mem_loadw_be - padw swapw padw padw swapdw - call.wallet::receive_asset - dropw movup.12 - # => [dest_ptr, pad(12)] - ", - ); - } else { - code_body.push_str( - " - # add next asset + for asset_idx in 0..assets.len() { + code_body.push_str(&format!( + " + # => [dest_ptr] + + # current_asset_ptr = dest_ptr + ASSET_SIZE * asset_idx + dup push.ASSET_SIZE mul.{asset_idx} + # => [current_asset_ptr, dest_ptr] + + padw dup.4 add.ASSET_VALUE_MEMORY_OFFSET mem_loadw_le + # => [ASSET_VALUE, current_asset_ptr, dest_ptr] + + padw movup.8 mem_loadw_le + # => [ASSET_KEY, ASSET_VALUE, current_asset_ptr, dest_ptr] + + padw padw swapdw + # => [ASSET_KEY, ASSET_VALUE, pad(12), dest_ptr] - add.4 dup movdn.13 - padw movup.4 mem_loadw_be call.wallet::receive_asset - dropw movup.12 - # => [dest_ptr, pad(12)]", - ); - } + # => [pad(16), dest_ptr] + + dropw dropw dropw dropw + # => [dest_ptr] + ", + )); } code_body.push_str("dropw dropw dropw dropw"); @@ -130,6 +131,8 @@ pub fn create_p2any_note( r#" use mock::account use miden::protocol::active_note + use ::miden::protocol::asset::ASSET_VALUE_MEMORY_OFFSET + use ::miden::protocol::asset::ASSET_SIZE use miden::standards::wallets::basic->wallet begin @@ -209,10 +212,10 @@ fn note_script_that_creates_notes<'note>( // Make sure that the transaction's native account matches the note sender. out.push_str(&format!( r#"exec.::miden::protocol::native_account::get_id - # => [native_account_id_prefix, native_account_id_suffix] - push.{sender_prefix} assert_eq.err="sender ID prefix does not match native account ID's prefix" - # => [native_account_id_suffix] + # => [native_account_id_suffix, native_account_id_prefix] push.{sender_suffix} assert_eq.err="sender ID suffix does not match native account ID's suffix" + # => [native_account_id_prefix] + push.{sender_prefix} assert_eq.err="sender ID prefix does not match native account ID's prefix" # => [] "#, sender_prefix = sender_id.prefix().as_felt(), @@ -250,11 +253,17 @@ fn note_script_that_creates_notes<'note>( attachment_kind = note.metadata().attachment().content().attachment_kind().as_u8(), )); - let assets_str = prepare_assets(note.assets()); - for asset in assets_str { + for asset in note.assets().iter() { out.push_str(&format!( - " push.{asset} - call.::miden::standards::wallets::basic::move_asset_to_note\n", + " dup + push.{ASSET_VALUE} + push.{ASSET_KEY} + # => [ASSET_KEY, ASSET_VALUE, note_idx, note_idx] + call.::miden::standards::wallets::basic::move_asset_to_note + # => [note_idx] + ", + ASSET_KEY = asset.to_key_word(), + ASSET_VALUE = asset.to_value_word(), )); } } @@ -263,3 +272,21 @@ fn note_script_that_creates_notes<'note>( Ok(out) } + +/// Generates a P2ID note - Pay-to-ID note with an exact serial number +pub fn create_p2id_note_exact( + sender: AccountId, + target: AccountId, + assets: Vec, + note_type: NoteType, + serial_num: Word, +) -> Result { + let recipient = P2idNoteStorage::new(target).into_recipient(serial_num); + + let tag = NoteTag::with_account_target(target); + + let metadata = NoteMetadata::new(sender, note_type).with_tag(tag); + let vault = NoteAssets::new(assets)?; + + Ok(Note::new(vault, metadata, recipient)) +} diff --git a/crates/miden-testing/tests/agglayer/asset_conversion.rs b/crates/miden-testing/tests/agglayer/asset_conversion.rs index 6cec09d255..3eda586c39 100644 --- a/crates/miden-testing/tests/agglayer/asset_conversion.rs +++ b/crates/miden-testing/tests/agglayer/asset_conversion.rs @@ -1,66 +1,36 @@ extern crate alloc; -use alloc::sync::Arc; - -use miden_agglayer::{agglayer_library, utils}; -use miden_assembly::{Assembler, DefaultSourceManager}; -use miden_core_lib::CoreLibrary; -use miden_processor::fast::{ExecutionOutput, FastProcessor}; -use miden_processor::{AdviceInputs, DefaultHost, ExecutionError, Program, StackInputs}; +use miden_agglayer::errors::{ + ERR_REMAINDER_TOO_LARGE, + ERR_SCALE_AMOUNT_EXCEEDED_LIMIT, + ERR_UNDERFLOW, + ERR_X_TOO_LARGE, +}; +use miden_agglayer::eth_types::amount::EthAmount; +use miden_processor::utils::packed_u32_elements_to_bytes; use miden_protocol::Felt; -use miden_protocol::transaction::TransactionKernel; +use miden_protocol::asset::FungibleAsset; +use miden_protocol::errors::MasmError; use primitive_types::U256; +use rand::rngs::StdRng; +use rand::{Rng, SeedableRng}; -/// Convert a Vec to a U256 -fn felts_to_u256(felts: Vec) -> U256 { - assert_eq!(felts.len(), 8, "expected exactly 8 felts"); - let array: [Felt; 8] = - [felts[0], felts[1], felts[2], felts[3], felts[4], felts[5], felts[6], felts[7]]; - let bytes = utils::felts_to_u256_bytes(array); - U256::from_little_endian(&bytes) -} - -/// Convert the top 8 u32 values from the execution stack to a U256 -fn stack_to_u256(exec_output: &ExecutionOutput) -> U256 { - let felts: Vec = exec_output.stack[0..8].to_vec(); - felts_to_u256(felts) -} - -/// Execute a program with default host -async fn execute_program_with_default_host( - program: Program, -) -> Result { - let mut host = DefaultHost::default(); - - let test_lib = TransactionKernel::library(); - host.load_library(test_lib.mast_forest()).unwrap(); +use super::test_utils::{assert_execution_fails_with, execute_masm_script}; - let std_lib = CoreLibrary::default(); - host.load_library(std_lib.mast_forest()).unwrap(); +// ================================================================================================ +// SCALE UP TESTS (Felt -> U256) +// ================================================================================================ - let asset_conversion_lib = agglayer_library(); - host.load_library(asset_conversion_lib.mast_forest()).unwrap(); - - let stack_inputs = StackInputs::new(vec![]).unwrap(); - let advice_inputs = AdviceInputs::default(); - - let processor = FastProcessor::new_debug(stack_inputs.as_slice(), advice_inputs); - processor.execute(&program, &mut host).await -} - -/// Helper function to test convert_felt_to_u256_scaled with given parameters -async fn test_convert_to_u256_helper( +/// Helper function to test scale_native_amount_to_u256 with given parameters +async fn test_scale_up_helper( miden_amount: Felt, scale_exponent: Felt, - expected_result_array: [u32; 8], - expected_result_u256: U256, + expected_result: EthAmount, ) -> anyhow::Result<()> { - let asset_conversion_lib = agglayer_library(); - let script_code = format!( " use miden::core::sys - use miden::agglayer::asset_conversion + use miden::agglayer::common::asset_conversion begin push.{}.{} @@ -71,53 +41,35 @@ async fn test_convert_to_u256_helper( scale_exponent, miden_amount, ); - let program = Assembler::new(Arc::new(DefaultSourceManager::default())) - .with_dynamic_library(CoreLibrary::default()) - .unwrap() - .with_dynamic_library(asset_conversion_lib.clone()) - .unwrap() - .assemble_program(&script_code) - .unwrap(); - - let exec_output = execute_program_with_default_host(program).await?; - - // Extract the first 8 u32 values from the stack (the U256 representation) - let actual_result: [u32; 8] = [ - exec_output.stack[0].as_int() as u32, - exec_output.stack[1].as_int() as u32, - exec_output.stack[2].as_int() as u32, - exec_output.stack[3].as_int() as u32, - exec_output.stack[4].as_int() as u32, - exec_output.stack[5].as_int() as u32, - exec_output.stack[6].as_int() as u32, - exec_output.stack[7].as_int() as u32, - ]; + let exec_output = execute_masm_script(&script_code).await?; + let actual_felts: Vec = exec_output.stack[0..8].to_vec(); - let actual_result_u256 = stack_to_u256(&exec_output); + // to_elements() returns big-endian limb order with each limb byte-swapped (LE-interpreted + // from BE source bytes). The scale-up output is native u32 limbs in LE limb order, so we + // reverse the limbs and swap bytes within each u32 to match. + let expected_felts: Vec = expected_result + .to_elements() + .into_iter() + .rev() + .map(|f| Felt::new((f.as_canonical_u64() as u32).swap_bytes() as u64)) + .collect(); - assert_eq!(actual_result, expected_result_array); - assert_eq!(actual_result_u256, expected_result_u256); + assert_eq!(actual_felts, expected_felts); Ok(()) } #[tokio::test] -async fn test_convert_to_u256_basic_examples() -> anyhow::Result<()> { +async fn test_scale_up_basic_examples() -> anyhow::Result<()> { // Test case 1: amount=1, no scaling (scale_exponent=0) - test_convert_to_u256_helper( - Felt::new(1), - Felt::new(0), - [1, 0, 0, 0, 0, 0, 0, 0], - U256::from(1u64), - ) - .await?; + test_scale_up_helper(Felt::new(1), Felt::new(0), EthAmount::from_uint_str("1").unwrap()) + .await?; // Test case 2: amount=1, scale to 1e18 (scale_exponent=18) - test_convert_to_u256_helper( + test_scale_up_helper( Felt::new(1), Felt::new(18), - [2808348672, 232830643, 0, 0, 0, 0, 0, 0], - U256::from_dec_str("1000000000000000000").unwrap(), + EthAmount::from_uint_str("1000000000000000000").unwrap(), ) .await?; @@ -125,87 +77,304 @@ async fn test_convert_to_u256_basic_examples() -> anyhow::Result<()> { } #[tokio::test] -async fn test_convert_to_u256_scaled_eth() -> anyhow::Result<()> { - // 100 units base 1e6 - let miden_amount = Felt::new(100_000_000); +async fn test_scale_up_realistic_amounts() -> anyhow::Result<()> { + // 100 units base 1e6, scale to 1e18 + test_scale_up_helper( + Felt::new(100_000_000), + Felt::new(12), + EthAmount::from_uint_str("100000000000000000000").unwrap(), + ) + .await?; - // scale to 1e18 - let target_scale = Felt::new(12); + // Large amount: 1e18 units scaled by 8 + test_scale_up_helper( + Felt::new(1000000000000000000), + Felt::new(8), + EthAmount::from_uint_str("100000000000000000000000000").unwrap(), + ) + .await?; - let asset_conversion_lib = agglayer_library(); + Ok(()) +} - let script_code = format!( - " +#[tokio::test] +async fn test_scale_up_exceeds_max_scale() { + // scale_exp = 19 should fail + let script_code = " use miden::core::sys - use miden::agglayer::asset_conversion + use miden::agglayer::common::asset_conversion begin - push.{}.{} + push.19.1 exec.asset_conversion::scale_native_amount_to_u256 exec.sys::truncate_stack end - ", - target_scale, miden_amount, - ); + "; + + assert_execution_fails_with(script_code, "maximum scaling factor is 18").await; +} + +// ================================================================================================ +// SCALE DOWN TESTS (U256 -> Felt) +// ================================================================================================ + +/// Build MASM script for verify_u256_to_native_amount_conversion +fn build_scale_down_script(x: EthAmount, scale_exp: u32, y: u64) -> String { + let x_felts = x.to_elements(); + format!( + r#" + use miden::core::sys + use miden::agglayer::common::asset_conversion + + begin + push.{}.{}.{}.{}.{}.{}.{}.{}.{}.{} + exec.asset_conversion::verify_u256_to_native_amount_conversion + exec.sys::truncate_stack + end + "#, + y, + scale_exp, + x_felts[7].as_canonical_u64(), + x_felts[6].as_canonical_u64(), + x_felts[5].as_canonical_u64(), + x_felts[4].as_canonical_u64(), + x_felts[3].as_canonical_u64(), + x_felts[2].as_canonical_u64(), + x_felts[1].as_canonical_u64(), + x_felts[0].as_canonical_u64(), + ) +} + +/// Assert that scaling down succeeds with the correct result +async fn assert_scale_down_ok(x: EthAmount, scale: u32) -> anyhow::Result { + let y = x.scale_to_token_amount(scale).unwrap().as_canonical_u64(); + let script = build_scale_down_script(x, scale, y); + let out = execute_masm_script(&script).await?; + assert_eq!(out.stack[0].as_canonical_u64(), y); + Ok(y) +} + +/// Assert that scaling down fails with the given y and expected error +async fn assert_scale_down_fails(x: EthAmount, scale: u32, y: u64, expected_error: MasmError) { + let script = build_scale_down_script(x, scale, y); + assert_execution_fails_with(&script, expected_error.message()).await; +} + +/// Test that y-1 and y+1 both fail appropriately +async fn assert_y_plus_minus_one_behavior(x: EthAmount, scale: u32) -> anyhow::Result<()> { + let y = assert_scale_down_ok(x, scale).await?; + if y > 0 { + assert_scale_down_fails(x, scale, y - 1, ERR_REMAINDER_TOO_LARGE).await; + } + assert_scale_down_fails(x, scale, y + 1, ERR_UNDERFLOW).await; + Ok(()) +} + +#[tokio::test] +async fn test_scale_down_basic_examples() -> anyhow::Result<()> { + let cases = [ + (EthAmount::from_uint_str("1000000000000000000").unwrap(), 10u32), + (EthAmount::from_uint_str("1000").unwrap(), 0u32), + (EthAmount::from_uint_str("10000000000000000000").unwrap(), 18u32), + ]; + + for (x, s) in cases { + assert_scale_down_ok(x, s).await?; + } + Ok(()) +} + +// ================================================================================================ +// FUZZING TESTS +// ================================================================================================ + +// Fuzz test that validates verify_u256_to_native_amount_conversion (U256 → Felt) +// with random realistic amounts for all scale exponents (0..=18). +#[tokio::test] +async fn test_scale_down_realistic_scenarios_fuzzing() -> anyhow::Result<()> { + const CASES_PER_SCALE: usize = 2; + const MAX_SCALE: u32 = 18; + + let mut rng = StdRng::seed_from_u64(42); + + let min_x = U256::from(10_000_000_000_000u64); // 1e13 + let desired_max_x = U256::from_dec_str("1000000000000000000000000").unwrap(); // 1e24 + let max_y = U256::from(FungibleAsset::MAX_AMOUNT); // 2^63 - 2^31 - let program = Assembler::new(Arc::new(DefaultSourceManager::default())) - .with_dynamic_library(CoreLibrary::default()) - .unwrap() - .with_dynamic_library(asset_conversion_lib.clone()) - .unwrap() - .assemble_program(&script_code) - .unwrap(); + for scale in 0..=MAX_SCALE { + let scale_factor = U256::from(10u64).pow(U256::from(scale)); - let exec_output = execute_program_with_default_host(program).await?; + // Ensure x always scales down into a y that fits the fungible-token bound. + let max_x = desired_max_x.min(max_y * scale_factor); - let expected_result = U256::from_dec_str("100000000000000000000").unwrap(); - let actual_result = stack_to_u256(&exec_output); + assert!(max_x > min_x, "max_x must exceed min_x for scale={scale}"); - assert_eq!(actual_result, expected_result); + // Sample x uniformly from [min_x, max_x). + let span: u128 = (max_x - min_x).try_into().expect("span fits in u128"); + + for _ in 0..CASES_PER_SCALE { + let offset: u128 = rng.random_range(0..span); + let x = EthAmount::from_u256(min_x + U256::from(offset)); + assert_scale_down_ok(x, scale).await?; + } + } Ok(()) } +// ================================================================================================ +// NEGATIVE TESTS +// ================================================================================================ + +#[tokio::test] +async fn test_scale_down_wrong_y_clean_case() -> anyhow::Result<()> { + let x = EthAmount::from_uint_str("10000000000000000000").unwrap(); + assert_y_plus_minus_one_behavior(x, 18).await +} + #[tokio::test] -async fn test_convert_to_u256_scaled_large_amount() -> anyhow::Result<()> { - // 100,000,000 units (base 1e10) - let miden_amount = Felt::new(1000000000000000000); +async fn test_scale_down_wrong_y_with_remainder() -> anyhow::Result<()> { + let x = EthAmount::from_uint_str("1500000000000000000").unwrap(); + assert_y_plus_minus_one_behavior(x, 18).await +} - // scale to base 1e18 - let scale_exponent = Felt::new(8); +// ================================================================================================ +// NEGATIVE TESTS - BOUNDS +// ================================================================================================ - let asset_conversion_lib = agglayer_library(); +#[tokio::test] +async fn test_scale_down_exceeds_max_scale() { + let x = EthAmount::from_uint_str("1000").unwrap(); + let s = 19u32; + let y = 1u64; + assert_scale_down_fails(x, s, y, ERR_SCALE_AMOUNT_EXCEEDED_LIMIT).await; +} + +#[tokio::test] +async fn test_scale_down_x_too_large() { + // Construct x with upper limbs non-zero (>= 2^128) + let x = EthAmount::from_u256(U256::from(1u64) << 128); + let s = 0u32; + let y = 0u64; + assert_scale_down_fails(x, s, y, ERR_X_TOO_LARGE).await; +} +// ================================================================================================ +// REMAINDER EDGE TEST +// ================================================================================================ + +#[tokio::test] +async fn test_scale_down_remainder_edge() -> anyhow::Result<()> { + // Force z = scale - 1: pick y=5, s=10, so scale=10^10 + // Set x = y*scale + (scale-1) = 5*10^10 + (10^10 - 1) = 59999999999 + let scale_exp = 10u32; + let scale = 10u64.pow(scale_exp); + let x_val = 5u64 * scale + (scale - 1); + let x = EthAmount::from_u256(U256::from(x_val)); + + assert_scale_down_ok(x, scale_exp).await?; + Ok(()) +} + +#[tokio::test] +async fn test_scale_down_remainder_exactly_scale_fails() { + // If remainder z = scale, it should fail + // Pick s=10, x = 6*scale (where scale = 10^10) + // The correct y should be 6, so providing y=5 should fail + let scale_exp = 10u32; + let scale = 10u64.pow(scale_exp); + let x = EthAmount::from_u256(U256::from(6u64 * scale)); + + // Calculate the correct y using scale_to_token_amount + let correct_y = x.scale_to_token_amount(scale_exp).unwrap().as_canonical_u64(); + assert_eq!(correct_y, 6); + + // Providing wrong_y = correct_y - 1 should fail with ERR_REMAINDER_TOO_LARGE + let wrong_y = correct_y - 1; + assert_scale_down_fails(x, scale_exp, wrong_y, ERR_REMAINDER_TOO_LARGE).await; +} + +// ================================================================================================ +// INLINE SCALE DOWN TEST +// ================================================================================================ + +#[tokio::test] +async fn test_verify_scale_down_inline() -> anyhow::Result<()> { + // Test: Take 100 * 1e18 and scale to base 1e8 + // This means we divide by 1e10 (scale_exp = 10) + // x = 100 * 1e18 = 100000000000000000000 + // y = x / 1e10 = 10000000000 (100 * 1e8) + let x = EthAmount::from_uint_str("100000000000000000000").unwrap(); + let scale_exp = 10u32; + let y = x.scale_to_token_amount(scale_exp).unwrap().as_canonical_u64(); + + let x_felts = x.to_elements(); + + // Build the MASM script inline let script_code = format!( - " + r#" use miden::core::sys - use miden::agglayer::asset_conversion - + use miden::agglayer::common::asset_conversion + begin - push.{}.{} - - exec.asset_conversion::scale_native_amount_to_u256 + # Push y (expected quotient) + push.{} + + # Push scale_exp + push.{} + + # Push x as 8 u32 limbs (little-endian, x0 at top) + push.{}.{}.{}.{}.{}.{}.{}.{} + + # Call the scale down procedure + exec.asset_conversion::verify_u256_to_native_amount_conversion + + # Truncate stack to just return y exec.sys::truncate_stack end - ", - scale_exponent, miden_amount, + "#, + y, + scale_exp, + x_felts[7].as_canonical_u64(), + x_felts[6].as_canonical_u64(), + x_felts[5].as_canonical_u64(), + x_felts[4].as_canonical_u64(), + x_felts[3].as_canonical_u64(), + x_felts[2].as_canonical_u64(), + x_felts[1].as_canonical_u64(), + x_felts[0].as_canonical_u64(), ); - let program = Assembler::new(Arc::new(DefaultSourceManager::default())) - .with_dynamic_library(CoreLibrary::default()) - .unwrap() - .with_dynamic_library(asset_conversion_lib.clone()) - .unwrap() - .assemble_program(&script_code) - .unwrap(); - - let exec_output = execute_program_with_default_host(program).await?; + // Execute the script + let exec_output = execute_masm_script(&script_code).await?; - let expected_result = U256::from_dec_str("100000000000000000000000000").unwrap(); - let actual_result = stack_to_u256(&exec_output); + // Verify the result + let result = exec_output.stack[0].as_canonical_u64(); + assert_eq!(result, y); - assert_eq!(actual_result, expected_result); + Ok(()) +} +/// Exercises u128_sub_no_underflow when x > 2^64, so x has distinct high limbs (x2 != x3). +/// +/// The u128 subtraction splits each 128-bit operand into two 64-bit halves. This test +/// ensures the high-half subtraction and borrow propagation work correctly when x_high +/// is non-zero. +#[tokio::test] +async fn test_scale_down_high_limb_subtraction() -> anyhow::Result<()> { + let x_val = U256::from_dec_str("18999999999999999999").unwrap(); + + // Verify the u32 limb structure that makes this test meaningful: + // x = x0 + x1*2^32 + x2*2^64 + x3*2^96 + // x2 and x3 must differ - otherwise the high subtraction is trivially correct + // regardless of limb ordering. + let x2 = ((x_val >> 64) & U256::from(u32::MAX)).as_u32(); + let x3 = ((x_val >> 96) & U256::from(u32::MAX)).as_u32(); + assert_eq!(x2, 1, "x2 must be non-zero for the high subtraction to be non-trivial"); + assert_eq!(x3, 0, "x3 must differ from x2"); + + let x = EthAmount::from_u256(x_val); + assert_scale_down_ok(x, 18).await?; Ok(()) } @@ -221,7 +390,7 @@ fn test_felts_to_u256_bytes_sequential_values() { Felt::new(7), Felt::new(8), ]; - let result = utils::felts_to_u256_bytes(limbs); + let result = packed_u32_elements_to_bytes(&limbs); assert_eq!(result.len(), 32); // Verify the byte layout: limbs are processed in little-endian order, each as little-endian u32 @@ -236,13 +405,13 @@ fn test_felts_to_u256_bytes_sequential_values() { fn test_felts_to_u256_bytes_edge_cases() { // Test case 1: All zeros (minimum) let limbs = [Felt::new(0); 8]; - let result = utils::felts_to_u256_bytes(limbs); + let result = packed_u32_elements_to_bytes(&limbs); assert_eq!(result.len(), 32); assert!(result.iter().all(|&b| b == 0)); // Test case 2: All max u32 values (maximum) let limbs = [Felt::new(u32::MAX as u64); 8]; - let result = utils::felts_to_u256_bytes(limbs); + let result = packed_u32_elements_to_bytes(&limbs); assert_eq!(result.len(), 32); assert!(result.iter().all(|&b| b == 255)); } diff --git a/crates/miden-testing/tests/agglayer/bridge_in.rs b/crates/miden-testing/tests/agglayer/bridge_in.rs index 81392b584d..4904bf29d7 100644 --- a/crates/miden-testing/tests/agglayer/bridge_in.rs +++ b/crates/miden-testing/tests/agglayer/bridge_in.rs @@ -1,10 +1,16 @@ extern crate alloc; -use core::slice; +use alloc::slice; +use alloc::string::String; +use anyhow::Context; +use miden_agglayer::claim_note::Keccak256Output; use miden_agglayer::{ - ClaimNoteParams, - claim_note_test_inputs, + ClaimNoteStorage, + ExitRoot, + SmtNode, + UpdateGerNote, + agglayer_library, create_claim_note, create_existing_agglayer_faucet, create_existing_bridge_account, @@ -12,128 +18,230 @@ use miden_agglayer::{ use miden_protocol::Felt; use miden_protocol::account::Account; use miden_protocol::asset::{Asset, FungibleAsset}; +use miden_protocol::crypto::SequentialCommit; use miden_protocol::crypto::rand::FeltRng; -use miden_protocol::note::{ - Note, - NoteAssets, - NoteInputs, - NoteMetadata, - NoteRecipient, - NoteTag, - NoteType, -}; -use miden_protocol::transaction::OutputNote; +use miden_protocol::note::NoteType; +use miden_protocol::testing::account_id::ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE; +use miden_protocol::transaction::RawOutputNote; use miden_standards::account::wallets::BasicWallet; -use miden_standards::note::WellKnownNote; -use miden_testing::{AccountState, Auth, MockChain}; +use miden_standards::code_builder::CodeBuilder; +use miden_standards::testing::account_component::IncrNonceAuthComponent; +use miden_testing::utils::create_p2id_note_exact; +use miden_testing::{AccountState, Auth, MockChain, TransactionContextBuilder}; +use miden_tx::utils::hex_to_bytes; use rand::Rng; +use super::test_utils::{ + ClaimDataSource, + MerkleProofVerificationFile, + SOLIDITY_MERKLE_PROOF_VECTORS, +}; + +// HELPER FUNCTIONS +// ================================================================================================ + +fn merkle_proof_verification_code( + index: usize, + merkle_paths: &MerkleProofVerificationFile, +) -> String { + let mut store_path_source = String::new(); + for height in 0..32 { + let path_node = merkle_paths.merkle_paths[index * 32 + height].as_str(); + let smt_node = SmtNode::from(hex_to_bytes(path_node).unwrap()); + let [node_lo, node_hi] = smt_node.to_words(); + store_path_source.push_str(&format!( + " + \tpush.{node_lo} mem_storew_le.{} dropw + \tpush.{node_hi} mem_storew_le.{} dropw + ", + height * 8, + height * 8 + 4 + )); + } + + let root = ExitRoot::from(hex_to_bytes(&merkle_paths.roots[index]).unwrap()); + let [root_lo, root_hi] = root.to_words(); + + let leaf = Keccak256Output::from(hex_to_bytes(&merkle_paths.leaves[index]).unwrap()); + let [leaf_lo, leaf_hi] = leaf.to_words(); + + format!( + r#" + use miden::agglayer::bridge::bridge_in + + begin + {store_path_source} + + push.{root_lo} mem_storew_le.256 dropw + push.{root_hi} mem_storew_le.260 dropw + + push.256 + push.{index} + push.0 + push.{leaf_hi} + push.{leaf_lo} + + exec.bridge_in::verify_merkle_proof + assert.err="verification failed" + end + "# + ) +} + /// Tests the bridge-in flow: CLAIM note -> Aggfaucet (FPI to Bridge) -> P2ID note created. +/// +/// Parameterized over two claim data sources: +/// - [`ClaimDataSource::Real`]: uses real [`ProofData`] and [`LeafData`] from +/// `claim_asset_vectors_real_tx.json`, captured from an actual on-chain `claimAsset` transaction. +/// - [`ClaimDataSource::Simulated`]: uses locally generated [`ProofData`] and [`LeafData`] from +/// `claim_asset_vectors_local_tx.json`, produced by simulating a `bridgeAsset()` call. +/// +/// In both cases the claim note is processed against the agglayer faucet, which validates the +/// Merkle proof and creates a P2ID note for the destination address. +/// +/// Note: Modifying anything in the real test vectors would invalidate the Merkle proof, +/// as the proof was computed for the original leaf data including the original destination. +#[rstest::rstest] +#[case::real(ClaimDataSource::Real)] +#[case::simulated(ClaimDataSource::Simulated)] #[tokio::test] -async fn test_bridge_in_claim_to_p2id() -> anyhow::Result<()> { +async fn test_bridge_in_claim_to_p2id(#[case] data_source: ClaimDataSource) -> anyhow::Result<()> { + use miden_protocol::account::auth::AuthScheme; + let mut builder = MockChain::builder(); - // CREATE BRIDGE ACCOUNT (with bridge_out component for MMR validation) + // CREATE BRIDGE ADMIN ACCOUNT (not used in this test, but distinct from GER manager) + // -------------------------------------------------------------------------------------------- + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // CREATE GER MANAGER ACCOUNT (sends the UPDATE_GER note) + // -------------------------------------------------------------------------------------------- + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // CREATE BRIDGE ACCOUNT // -------------------------------------------------------------------------------------------- let bridge_seed = builder.rng_mut().draw_word(); - let bridge_account = create_existing_bridge_account(bridge_seed); + let bridge_account = + create_existing_bridge_account(bridge_seed, bridge_admin.id(), ger_manager.id()); builder.add_account(bridge_account.clone())?; + // GET CLAIM DATA FROM JSON (source depends on the test case) + // -------------------------------------------------------------------------------------------- + let (proof_data, leaf_data, ger) = data_source.get_data(); + // CREATE AGGLAYER FAUCET ACCOUNT (with agglayer_faucet component) + // Use the origin token address and network from the claim data. // -------------------------------------------------------------------------------------------- let token_symbol = "AGG"; let decimals = 8u8; - let max_supply = Felt::new(1000000); + let max_supply = Felt::new(FungibleAsset::MAX_AMOUNT); let agglayer_faucet_seed = builder.rng_mut().draw_word(); + let origin_token_address = leaf_data.origin_token_address; + let origin_network = leaf_data.origin_network; + let scale = 10u8; + let agglayer_faucet = create_existing_agglayer_faucet( agglayer_faucet_seed, token_symbol, decimals, max_supply, + Felt::ZERO, bridge_account.id(), + &origin_token_address, + origin_network, + scale, ); builder.add_account(agglayer_faucet.clone())?; - // CREATE USER ACCOUNT TO RECEIVE P2ID NOTE + // Get the destination account ID from the leaf data. + // This requires the destination_address to be in the embedded Miden AccountId format + // (first 4 bytes must be zero). + let destination_account_id = leaf_data + .destination_address + .to_account_id() + .expect("destination address is not an embedded Miden AccountId"); + + // For the simulated case, create the destination account so we can consume the P2ID note + let destination_account = if matches!(data_source, ClaimDataSource::Simulated) { + use miden_standards::testing::mock_account::MockAccountExt; + + let dest = + Account::mock(ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE, IncrNonceAuthComponent); + // Ensure the mock account ID matches the destination embedded in the JSON test vector, + // since the claim note targets this account ID. + assert_eq!( + dest.id(), + destination_account_id, + "mock destination account ID must match the destination_account_id from the claim data" + ); + builder.add_account(dest.clone())?; + Some(dest) + } else { + None + }; + + // CREATE SENDER ACCOUNT (for creating the claim note) // -------------------------------------------------------------------------------------------- - let user_account_builder = + let sender_account_builder = Account::builder(builder.rng_mut().random()).with_component(BasicWallet); - let user_account = builder.add_account_from_builder( + let sender_account = builder.add_account_from_builder( Auth::IncrNonce, - user_account_builder, + sender_account_builder, AccountState::Exists, )?; - // CREATE CLAIM NOTE WITH P2ID OUTPUT NOTE DETAILS + // CREATE CLAIM NOTE // -------------------------------------------------------------------------------------------- - // Define amount values for the test - let amount_felt = Felt::new(100); + // The P2ID serial number is derived from the PROOF_DATA_KEY (RPO hash of proof data) + let serial_num = proof_data.to_commitment(); - // Create CLAIM note using the helper function with new agglayer claimAsset inputs - let ( - smt_proof_local_exit_root, - smt_proof_rollup_exit_root, - global_index, - mainnet_exit_root, - rollup_exit_root, - origin_network, - origin_token_address, - destination_network, - destination_address, - amount_u256, - metadata, - ) = claim_note_test_inputs(amount_felt, user_account.id()); - - // Generate a serial number for the P2ID note - let serial_num = builder.rng_mut().draw_word(); - - let claim_params = ClaimNoteParams { - smt_proof_local_exit_root, - smt_proof_rollup_exit_root, - global_index, - mainnet_exit_root: &mainnet_exit_root, - rollup_exit_root: &rollup_exit_root, - origin_network, - origin_token_address: &origin_token_address, - destination_network, - destination_address: &destination_address, - amount: amount_u256, - metadata, - claim_note_creator_account_id: user_account.id(), - agglayer_faucet_account_id: agglayer_faucet.id(), - output_note_tag: NoteTag::with_account_target(user_account.id()), - p2id_serial_number: serial_num, - destination_account_id: user_account.id(), - rng: builder.rng_mut(), - }; + // Calculate the scaled-down Miden amount using the faucet's scale factor + let miden_claim_amount = leaf_data + .amount + .scale_to_token_amount(scale as u32) + .expect("amount should scale successfully"); - // Create P2ID note for the user account (similar to network faucet test) - let p2id_script = WellKnownNote::P2ID.script(); - let p2id_inputs = vec![user_account.id().suffix(), user_account.id().prefix().as_felt()]; - let note_inputs = NoteInputs::new(p2id_inputs)?; - let p2id_recipient = NoteRecipient::new(serial_num, p2id_script.clone(), note_inputs); + let claim_inputs = ClaimNoteStorage { + proof_data, + leaf_data, + miden_claim_amount, + }; - let claim_note = create_claim_note(claim_params)?; + let claim_note = create_claim_note( + claim_inputs, + agglayer_faucet.id(), + sender_account.id(), + builder.rng_mut(), + )?; // Add the claim note to the builder before building the mock chain - builder.add_output_note(OutputNote::Full(claim_note.clone())); + builder.add_output_note(RawOutputNote::Full(claim_note.clone())); + + // CREATE UPDATE_GER NOTE WITH GLOBAL EXIT ROOT + // -------------------------------------------------------------------------------------------- + let update_ger_note = + UpdateGerNote::create(ger, ger_manager.id(), bridge_account.id(), builder.rng_mut())?; + builder.add_output_note(RawOutputNote::Full(update_ger_note.clone())); // BUILD MOCK CHAIN WITH ALL ACCOUNTS // -------------------------------------------------------------------------------------------- let mut mock_chain = builder.clone().build()?; - mock_chain.prove_next_block()?; - // CREATE EXPECTED P2ID NOTE FOR VERIFICATION + // EXECUTE UPDATE_GER NOTE TO STORE GER IN BRIDGE ACCOUNT // -------------------------------------------------------------------------------------------- - let mint_asset: Asset = FungibleAsset::new(agglayer_faucet.id(), amount_felt.into())?.into(); - let output_note_tag = NoteTag::with_account_target(user_account.id()); - let expected_p2id_note = Note::new( - NoteAssets::new(vec![mint_asset])?, - NoteMetadata::new(agglayer_faucet.id(), NoteType::Public, output_note_tag), - p2id_recipient, - ); + let update_ger_tx_context = mock_chain + .build_tx_context(bridge_account.id(), &[update_ger_note.id()], &[])? + .build()?; + let update_ger_executed = update_ger_tx_context.execute().await?; + + mock_chain.add_pending_executed_transaction(&update_ger_executed)?; + mock_chain.prove_next_block()?; // EXECUTE CLAIM NOTE AGAINST AGGLAYER FAUCET (with FPI to Bridge) // -------------------------------------------------------------------------------------------- @@ -141,7 +249,6 @@ async fn test_bridge_in_claim_to_p2id() -> anyhow::Result<()> { let tx_context = mock_chain .build_tx_context(agglayer_faucet.id(), &[], &[claim_note])? - .add_note_script(p2id_script) .foreign_accounts(vec![foreign_account_inputs]) .build()?; @@ -154,43 +261,100 @@ async fn test_bridge_in_claim_to_p2id() -> anyhow::Result<()> { assert_eq!(executed_transaction.output_notes().num_notes(), 1); let output_note = executed_transaction.output_notes().get_note(0); - // Verify the output note contains the minted fungible asset - let expected_asset = FungibleAsset::new(agglayer_faucet.id(), amount_felt.into())?; - // Verify note metadata properties assert_eq!(output_note.metadata().sender(), agglayer_faucet.id()); assert_eq!(output_note.metadata().note_type(), NoteType::Public); - assert_eq!(output_note.id(), expected_p2id_note.id()); - // Extract the full note from the OutputNote enum for detailed verification - let full_note = match output_note { - OutputNote::Full(note) => note, - _ => panic!("Expected OutputNote::Full variant for public note"), - }; + // Extract and verify P2ID asset contents + let mut assets_iter = output_note.assets().iter_fungible(); + let p2id_asset = assets_iter.next().unwrap(); - // Verify note structure and asset content - let expected_asset_obj = Asset::from(expected_asset); - assert_eq!(full_note, &expected_p2id_note); - assert!(full_note.assets().iter().any(|asset| asset == &expected_asset_obj)); + // Verify minted amount matches expected scaled value + assert_eq!( + Felt::new(p2id_asset.amount()), + miden_claim_amount, + "asset amount does not match" + ); - // Apply the transaction to the mock chain - mock_chain.add_pending_executed_transaction(&executed_transaction)?; - mock_chain.prove_next_block()?; + // Verify faucet ID matches agglayer_faucet (P2ID token issuer) + assert_eq!( + p2id_asset.faucet_id(), + agglayer_faucet.id(), + "P2ID asset faucet ID doesn't match agglayer_faucet: got {:?}, expected {:?}", + p2id_asset.faucet_id(), + agglayer_faucet.id() + ); + + // Verify full note ID construction + let expected_asset: Asset = + FungibleAsset::new(agglayer_faucet.id(), miden_claim_amount.as_canonical_u64()) + .unwrap() + .into(); + let expected_output_p2id_note = create_p2id_note_exact( + agglayer_faucet.id(), + destination_account_id, + vec![expected_asset], + NoteType::Public, + serial_num, + ) + .unwrap(); + + assert_eq!(RawOutputNote::Full(expected_output_p2id_note.clone()), *output_note); - // CONSUME THE OUTPUT NOTE WITH TARGET ACCOUNT + // CONSUME THE P2ID NOTE WITH THE DESTINATION ACCOUNT (simulated case only) // -------------------------------------------------------------------------------------------- - // Consume the output note with target account - let mut user_account_mut = user_account.clone(); - let consume_tx_context = mock_chain - .build_tx_context(user_account_mut.clone(), &[], slice::from_ref(&expected_p2id_note))? - .build()?; - let consume_executed_transaction = consume_tx_context.execute().await?; + // For the simulated case, we control the destination account and can verify the full + // end-to-end flow including P2ID consumption and balance updates. + if let Some(destination_account) = destination_account { + // Add the faucet transaction to the chain and prove the next block so the P2ID note is + // committed and can be consumed. + mock_chain.add_pending_executed_transaction(&executed_transaction)?; + mock_chain.prove_next_block()?; + + // Execute the consume transaction for the destination account + let consume_tx_context = mock_chain + .build_tx_context( + destination_account.id(), + &[], + slice::from_ref(&expected_output_p2id_note), + )? + .build()?; + let consume_executed_transaction = consume_tx_context.execute().await?; + + // Verify the destination account received the minted asset + let mut destination_account = destination_account.clone(); + destination_account.apply_delta(consume_executed_transaction.account_delta())?; + + let balance = destination_account.vault().get_balance(agglayer_faucet.id())?; + assert_eq!( + balance, + miden_claim_amount.as_canonical_u64(), + "destination account balance does not match" + ); + } + Ok(()) +} + +#[tokio::test] +async fn solidity_verify_merkle_proof_compatibility() -> anyhow::Result<()> { + let merkle_paths = &*SOLIDITY_MERKLE_PROOF_VECTORS; + + assert_eq!(merkle_paths.leaves.len(), merkle_paths.roots.len()); + assert_eq!(merkle_paths.leaves.len() * 32, merkle_paths.merkle_paths.len()); - user_account_mut.apply_delta(consume_executed_transaction.account_delta())?; + for leaf_index in 0..32 { + let source = merkle_proof_verification_code(leaf_index, merkle_paths); - // Verify the account's vault now contains the expected fungible asset - let balance = user_account_mut.vault().get_balance(agglayer_faucet.id())?; - assert_eq!(balance, expected_asset.amount()); + let tx_script = CodeBuilder::new() + .with_statically_linked_library(&agglayer_library())? + .compile_tx_script(source)?; + TransactionContextBuilder::with_existing_mock_account() + .tx_script(tx_script.clone()) + .build()? + .execute() + .await + .context(format!("failed to execute transaction with leaf index {leaf_index}"))?; + } Ok(()) } diff --git a/crates/miden-testing/tests/agglayer/bridge_out.rs b/crates/miden-testing/tests/agglayer/bridge_out.rs index ab832bb50a..ffb8b3ab19 100644 --- a/crates/miden-testing/tests/agglayer/bridge_out.rs +++ b/crates/miden-testing/tests/agglayer/bridge_out.rs @@ -1,192 +1,385 @@ extern crate alloc; -use miden_agglayer::{EthAddressFormat, b2agg_script, bridge_out_component}; +use miden_agglayer::errors::{ERR_B2AGG_TARGET_ACCOUNT_MISMATCH, ERR_FAUCET_NOT_REGISTERED}; +use miden_agglayer::{ + AggLayerBridge, + B2AggNote, + ConfigAggBridgeNote, + EthAddressFormat, + ExitRoot, + create_existing_agglayer_faucet, + create_existing_bridge_account, +}; +use miden_crypto::rand::FeltRng; +use miden_protocol::Felt; +use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{ Account, AccountId, AccountIdVersion, AccountStorageMode, AccountType, - StorageSlot, - StorageSlotName, }; use miden_protocol::asset::{Asset, FungibleAsset}; -use miden_protocol::note::{ - Note, - NoteAssets, - NoteInputs, - NoteMetadata, - NoteRecipient, - NoteScript, - NoteTag, - NoteType, -}; -use miden_protocol::transaction::OutputNote; -use miden_protocol::{Felt, Word}; -use miden_standards::account::faucets::FungibleFaucetExt; -use miden_standards::note::WellKnownNote; -use miden_testing::{AccountState, Auth, MockChain}; -use rand::Rng; - -/// Tests the B2AGG (Bridge to AggLayer) note script with bridge_out account component. -/// -/// This test flow: -/// 1. Creates a network faucet to provide assets -/// 2. Creates a bridge account with the bridge_out component (using network storage) -/// 3. Creates a B2AGG note with assets from the network faucet -/// 4. Executes the B2AGG note consumption via network transaction -/// 5. Consumes the BURN note -#[tokio::test] -async fn test_bridge_out_consumes_b2agg_note() -> anyhow::Result<()> { - let mut builder = MockChain::builder(); +use miden_protocol::note::{NoteAssets, NoteScript, NoteType}; +use miden_protocol::transaction::RawOutputNote; +use miden_standards::account::faucets::TokenMetadata; +use miden_standards::note::StandardNote; +use miden_testing::{Auth, MockChain, assert_transaction_executor_error}; +use miden_tx::utils::hex_to_bytes; - // Create a network faucet owner account - let faucet_owner_account_id = AccountId::dummy( - [1; 15], - AccountIdVersion::Version0, - AccountType::RegularAccountImmutableCode, - AccountStorageMode::Private, - ); +use super::test_utils::SOLIDITY_MMR_FRONTIER_VECTORS; - // Create a network faucet to provide assets for the B2AGG note - let faucet = - builder.add_existing_network_faucet("AGG", 1000, faucet_owner_account_id, Some(100))?; +/// Reads the Local Exit Root (double-word) from the bridge account's storage. +/// +/// The Local Exit Root is stored in two dedicated value slots: +/// - [`AggLayerBridge::ler_lo_slot_name`] — low word of the root +/// - [`AggLayerBridge::ler_hi_slot_name`] — high word of the root +/// +/// Returns the 256-bit root as 8 `Felt`s: first the 4 elements of `root_lo` (in +/// reverse of their storage order), followed by the 4 elements of `root_hi` (also in +/// reverse of their storage order). For an empty/uninitialized tree, all elements are +/// zeros. +fn read_local_exit_root(account: &Account) -> Vec { + let root_lo_slot = AggLayerBridge::ler_lo_slot_name(); + let root_hi_slot = AggLayerBridge::ler_hi_slot_name(); + + let root_lo = account + .storage() + .get_item(root_lo_slot) + .expect("should be able to read LET root lo"); + let root_hi = account + .storage() + .get_item(root_hi_slot) + .expect("should be able to read LET root hi"); + + let mut root = Vec::with_capacity(8); + root.extend(root_lo.to_vec()); + root.extend(root_hi.to_vec()); + root +} - // Create a bridge account with the bridge_out component using network (public) storage - // Add a storage map for the bridge component to store MMR frontier data - let storage_slot_name = StorageSlotName::new("miden::agglayer::let").unwrap(); - let storage_slots = vec![StorageSlot::with_empty_map(storage_slot_name)]; - let bridge_component = bridge_out_component(storage_slots); - let account_builder = Account::builder(builder.rng_mut().random()) - .storage_mode(AccountStorageMode::Public) - .with_component(bridge_component); - let mut bridge_account = - builder.add_account_from_builder(Auth::IncrNonce, account_builder, AccountState::Exists)?; - - // CREATE B2AGG NOTE WITH ASSETS - // -------------------------------------------------------------------------------------------- +fn read_let_num_leaves(account: &Account) -> u64 { + let num_leaves_slot = AggLayerBridge::let_num_leaves_slot_name(); + let value = account + .storage() + .get_item(num_leaves_slot) + .expect("should be able to read LET num leaves"); + value.to_vec()[0].as_canonical_u64() +} - let amount = Felt::new(100); - let bridge_asset: Asset = FungibleAsset::new(faucet.id(), amount.into()).unwrap().into(); - let tag = NoteTag::new(0); - let note_type = NoteType::Public; // Use Public note type for network transaction +/// Tests that 32 sequential B2AGG note consumptions match all 32 Solidity MMR roots. +/// +/// This test exercises the complete bridge-out lifecycle: +/// 1. Creates a bridge account (empty faucet registry) and an agglayer faucet with conversion +/// metadata (origin token address, network, scale) +/// 2. Registers the faucet in the bridge's faucet registry via a CONFIG_AGG_BRIDGE note +/// 3. Creates a B2AGG note with assets from the agglayer faucet +/// 4. Consumes the B2AGG note against the bridge account — the bridge's `bridge_out` procedure: +/// - Validates the faucet is registered via `convert_asset` +/// - Calls the faucet's `asset_to_origin_asset` via FPI to get the scaled amount, origin token +/// address, and origin network +/// - Writes the leaf data and computes the Keccak hash for the MMR +/// - Creates a BURN note addressed to the faucet +/// 5. Verifies the BURN note was created with the correct asset, tag, and script +/// 6. Consumes the BURN note with the faucet to burn the tokens +#[tokio::test] +async fn bridge_out_consecutive() -> anyhow::Result<()> { + let vectors = &*SOLIDITY_MMR_FRONTIER_VECTORS; + let note_count = 32usize; + assert_eq!(vectors.amounts.len(), note_count, "amount vectors should contain 32 entries"); + assert_eq!(vectors.roots.len(), note_count, "root vectors should contain 32 entries"); + assert_eq!( + vectors.destination_networks.len(), + note_count, + "destination network vectors should contain 32 entries" + ); + assert_eq!( + vectors.destination_addresses.len(), + note_count, + "destination address vectors should contain 32 entries" + ); + + let mut builder = MockChain::builder(); - // Get the B2AGG note script - let b2agg_script = b2agg_script(); + // CREATE BRIDGE ADMIN ACCOUNT (sends CONFIG_AGG_BRIDGE notes) + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; - // Create note inputs with destination network and address - // destination_network: u32 (AggLayer-assigned network ID) - // destination_address: 20 bytes (Ethereum address) split into 5 u32 values - let destination_network = Felt::new(1); // Example network ID - let destination_address = "0x1234567890abcdef1122334455667788990011aa"; - let eth_address = - EthAddressFormat::from_hex(destination_address).expect("Valid Ethereum address"); - let address_felts = eth_address.to_elements().to_vec(); + // CREATE GER MANAGER ACCOUNT (not used in this test, but distinct from admin) + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; - // Combine network ID and address felts into note inputs (6 felts total) - let mut input_felts = vec![destination_network]; - input_felts.extend(address_felts); + let mut bridge_account = create_existing_bridge_account( + builder.rng_mut().draw_word(), + bridge_admin.id(), + ger_manager.id(), + ); + builder.add_account(bridge_account.clone())?; - let inputs = NoteInputs::new(input_felts.clone())?; + let expected_amounts = vectors + .amounts + .iter() + .map(|amount| amount.parse::().expect("valid amount decimal string")) + .collect::>(); + let total_burned: u64 = expected_amounts.iter().sum(); - // Create the B2AGG note with assets from the faucet - let b2agg_note_metadata = NoteMetadata::new(faucet.id(), note_type, tag); - let b2agg_note_assets = NoteAssets::new(vec![bridge_asset])?; - let serial_num = Word::from([1, 2, 3, 4u32]); - let b2agg_note_script = NoteScript::new(b2agg_script); - let b2agg_note_recipient = NoteRecipient::new(serial_num, b2agg_note_script, inputs); - let b2agg_note = Note::new(b2agg_note_assets, b2agg_note_metadata, b2agg_note_recipient); + // CREATE AGGLAYER FAUCET ACCOUNT (with conversion metadata for FPI) + // -------------------------------------------------------------------------------------------- + let origin_token_address = EthAddressFormat::from_hex(&vectors.origin_token_address) + .expect("valid shared origin token address"); + let origin_network = 64u32; + let scale = 0u8; + let faucet = create_existing_agglayer_faucet( + builder.rng_mut().draw_word(), + "AGG", + 8, + Felt::new(FungibleAsset::MAX_AMOUNT), + Felt::new(total_burned), + bridge_account.id(), + &origin_token_address, + origin_network, + scale, + ); + builder.add_account(faucet.clone())?; + + // CONFIG_AGG_BRIDGE note to register the faucet in the bridge (sent by bridge admin) + let config_note = ConfigAggBridgeNote::create( + faucet.id(), + bridge_admin.id(), + bridge_account.id(), + builder.rng_mut(), + )?; + builder.add_output_note(RawOutputNote::Full(config_note.clone())); + + // CREATE ALL B2AGG NOTES UPFRONT (before building mock chain) + // -------------------------------------------------------------------------------------------- + let mut notes = Vec::with_capacity(note_count); + for (i, &amount) in expected_amounts.iter().enumerate().take(note_count) { + let destination_network = vectors.destination_networks[i]; + let eth_address = EthAddressFormat::from_hex(&vectors.destination_addresses[i]) + .expect("valid destination address"); + + let bridge_asset: Asset = FungibleAsset::new(faucet.id(), amount).unwrap().into(); + let note = B2AggNote::create( + destination_network, + eth_address, + NoteAssets::new(vec![bridge_asset])?, + bridge_account.id(), + faucet.id(), + builder.rng_mut(), + )?; + builder.add_output_note(RawOutputNote::Full(note.clone())); + notes.push(note); + } - // Add the B2AGG note to the mock chain - builder.add_output_note(OutputNote::Full(b2agg_note.clone())); let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; - // Get BURN note script to add to the transaction context - let burn_note_script: NoteScript = WellKnownNote::BURN.script(); - - // EXECUTE B2AGG NOTE AGAINST BRIDGE ACCOUNT (NETWORK TRANSACTION) + // STEP 1: REGISTER FAUCET VIA CONFIG_AGG_BRIDGE NOTE // -------------------------------------------------------------------------------------------- - let tx_context = mock_chain - .build_tx_context(bridge_account.id(), &[b2agg_note.id()], &[])? - .add_note_script(burn_note_script.clone()) - .build()?; - let executed_transaction = tx_context.execute().await?; + let config_executed = mock_chain + .build_tx_context(bridge_account.id(), &[config_note.id()], &[])? + .build()? + .execute() + .await?; + bridge_account.apply_delta(config_executed.account_delta())?; + mock_chain.add_pending_executed_transaction(&config_executed)?; + mock_chain.prove_next_block()?; - // VERIFY PUBLIC BURN NOTE WAS CREATED + // STEP 2: CONSUME 32 B2AGG NOTES AND VERIFY FRONTIER EVOLUTION + // -------------------------------------------------------------------------------------------- + let burn_note_script: NoteScript = StandardNote::BURN.script(); + let mut burn_note_ids = Vec::with_capacity(note_count); + + for (i, note) in notes.iter().enumerate() { + let foreign_account_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; + + let executed_tx = mock_chain + .build_tx_context(bridge_account.clone(), &[note.id()], &[])? + .add_note_script(burn_note_script.clone()) + .foreign_accounts(vec![foreign_account_inputs]) + .build()? + .execute() + .await?; + + assert_eq!( + executed_tx.output_notes().num_notes(), + 1, + "Expected one BURN note after consume #{}", + i + 1 + ); + let burn_note = match executed_tx.output_notes().get_note(0) { + RawOutputNote::Full(note) => note, + _ => panic!("Expected OutputNote::Full variant for BURN note"), + }; + burn_note_ids.push(burn_note.id()); + + let expected_asset = Asset::from(FungibleAsset::new(faucet.id(), expected_amounts[i])?); + assert!( + burn_note.assets().iter().any(|asset| asset == &expected_asset), + "BURN note after consume #{} should contain the bridged asset", + i + 1 + ); + assert_eq!( + burn_note.metadata().note_type(), + NoteType::Public, + "BURN note should be public" + ); + let attachment = burn_note.metadata().attachment(); + let network_target = miden_standards::note::NetworkAccountTarget::try_from(attachment) + .expect("BURN note attachment should be a valid NetworkAccountTarget"); + assert_eq!( + network_target.target_id(), + faucet.id(), + "BURN note attachment should target the faucet" + ); + assert_eq!( + burn_note.recipient().script().root(), + StandardNote::BURN.script_root(), + "BURN note should use the BURN script" + ); + + bridge_account.apply_delta(executed_tx.account_delta())?; + assert_eq!( + read_let_num_leaves(&bridge_account), + (i + 1) as u64, + "LET leaf count should match consumed notes" + ); + + let expected_ler = + ExitRoot::new(hex_to_bytes(&vectors.roots[i]).expect("valid root hex")).to_elements(); + assert_eq!( + read_local_exit_root(&bridge_account), + expected_ler, + "Local Exit Root after {} leaves should match the Solidity-generated root", + i + 1 + ); + + mock_chain.add_pending_executed_transaction(&executed_tx)?; + mock_chain.prove_next_block()?; + } + + // STEP 3: CONSUME ALL BURN NOTES WITH THE AGGLAYER FAUCET // -------------------------------------------------------------------------------------------- - // The bridge_out component should create a PUBLIC BURN note addressed to the faucet + let initial_token_supply = TokenMetadata::try_from(faucet.storage())?.token_supply(); assert_eq!( - executed_transaction.output_notes().num_notes(), - 1, - "Expected one BURN note to be created" + initial_token_supply, + Felt::new(total_burned), + "Initial issuance should match all pending burns" ); - let output_note = executed_transaction.output_notes().get_note(0); + let mut faucet = faucet; + for burn_note_id in burn_note_ids { + let burn_executed_tx = mock_chain + .build_tx_context(faucet.id(), &[burn_note_id], &[])? + .build()? + .execute() + .await?; + assert_eq!( + burn_executed_tx.output_notes().num_notes(), + 0, + "Burn transaction should not create output notes" + ); + faucet.apply_delta(burn_executed_tx.account_delta())?; + mock_chain.add_pending_executed_transaction(&burn_executed_tx)?; + mock_chain.prove_next_block()?; + } + + let final_token_supply = TokenMetadata::try_from(faucet.storage())?.token_supply(); + assert_eq!( + final_token_supply, + Felt::new(initial_token_supply.as_canonical_u64() - total_burned), + "Token supply should decrease by the sum of 32 bridged amounts" + ); + + Ok(()) +} - // Extract the full note from the OutputNote enum - let burn_note = match output_note { - OutputNote::Full(note) => note, - _ => panic!("Expected OutputNote::Full variant for BURN note"), - }; +/// Tests that bridging out fails when the faucet is not registered in the bridge's registry. +/// +/// This test verifies the faucet allowlist check in bridge_out's `convert_asset` procedure: +/// 1. Creates a bridge account with an empty faucet registry (no faucets registered) +/// 2. Creates a B2AGG note with an asset from an agglayer faucet +/// 3. Attempts to consume the B2AGG note against the bridge — this should fail because +/// `convert_asset` checks the faucet registry and panics with ERR_FAUCET_NOT_REGISTERED when the +/// faucet is not found +#[tokio::test] +async fn test_bridge_out_fails_with_unregistered_faucet() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); - // Verify the BURN note is public - assert_eq!(burn_note.metadata().note_type(), NoteType::Public, "BURN note should be public"); + // CREATE BRIDGE ADMIN ACCOUNT + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; - // Verify the BURN note contains the bridged asset - let expected_asset = FungibleAsset::new(faucet.id(), amount.into())?; - let expected_asset_obj = Asset::from(expected_asset); - assert!( - burn_note.assets().iter().any(|asset| asset == &expected_asset_obj), - "BURN note should contain the bridged asset" - ); + // CREATE GER MANAGER ACCOUNT (not used in this test, but distinct from admin) + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; - assert_eq!( - burn_note.metadata().tag(), - NoteTag::with_account_target(faucet.id()), - "BURN note should have the correct tag" + // CREATE BRIDGE ACCOUNT (empty faucet registry — no faucets registered) + // -------------------------------------------------------------------------------------------- + let bridge_account = create_existing_bridge_account( + builder.rng_mut().draw_word(), + bridge_admin.id(), + ger_manager.id(), ); + builder.add_account(bridge_account.clone())?; - // Verify the BURN note uses the correct script - assert_eq!( - burn_note.recipient().script().root(), - burn_note_script.root(), - "BURN note should use the BURN script" + // CREATE AGGLAYER FAUCET ACCOUNT (NOT registered in the bridge) + // -------------------------------------------------------------------------------------------- + let origin_token_address = EthAddressFormat::new([0u8; 20]); + let faucet = create_existing_agglayer_faucet( + builder.rng_mut().draw_word(), + "AGG", + 8, + Felt::new(FungibleAsset::MAX_AMOUNT), + Felt::new(100), + bridge_account.id(), + &origin_token_address, + 0, // origin_network + 0, // scale ); + builder.add_account(faucet.clone())?; - // Apply the delta to the bridge account - bridge_account.apply_delta(executed_transaction.account_delta())?; + // CREATE B2AGG NOTE WITH ASSETS FROM THE UNREGISTERED FAUCET + // -------------------------------------------------------------------------------------------- + let amount = Felt::new(100); + let bridge_asset: Asset = + FungibleAsset::new(faucet.id(), amount.as_canonical_u64()).unwrap().into(); - // Apply the transaction to the mock chain - mock_chain.add_pending_executed_transaction(&executed_transaction)?; + let destination_address = "0x1234567890abcdef1122334455667788990011aa"; + let eth_address = + EthAddressFormat::from_hex(destination_address).expect("valid Ethereum address"); + + let b2agg_note = B2AggNote::create( + 1u32, // destination_network + eth_address, + NoteAssets::new(vec![bridge_asset])?, + bridge_account.id(), + faucet.id(), + builder.rng_mut(), + )?; + + builder.add_output_note(RawOutputNote::Full(b2agg_note.clone())); + let mut mock_chain = builder.build()?; mock_chain.prove_next_block()?; - // CONSUME THE BURN NOTE WITH THE NETWORK FAUCET + // ATTEMPT TO BRIDGE OUT WITHOUT REGISTERING THE FAUCET (SHOULD FAIL) // -------------------------------------------------------------------------------------------- - // Check the initial token issuance before burning - let initial_issuance = faucet.get_token_issuance().unwrap(); - assert_eq!(initial_issuance, Felt::new(100), "Initial issuance should be 100"); - - // Execute the BURN note against the network faucet - let burn_tx_context = - mock_chain.build_tx_context(faucet.id(), &[burn_note.id()], &[])?.build()?; - let burn_executed_transaction = burn_tx_context.execute().await?; + let foreign_account_inputs = mock_chain.get_foreign_account_inputs(faucet.id())?; - // Verify the burn transaction was successful - no output notes should be created - assert_eq!( - burn_executed_transaction.output_notes().num_notes(), - 0, - "Burn transaction should not create output notes" - ); + let result = mock_chain + .build_tx_context(bridge_account.id(), &[b2agg_note.id()], &[])? + .foreign_accounts(vec![foreign_account_inputs]) + .build()? + .execute() + .await; - // Apply the delta to the faucet account and verify the token issuance decreased - let mut faucet = faucet; - faucet.apply_delta(burn_executed_transaction.account_delta())?; - let final_issuance = faucet.get_token_issuance().unwrap(); - assert_eq!( - final_issuance, - Felt::new(initial_issuance.as_int() - amount.as_int()), - "Token issuance should decrease by the burned amount" - ); + assert_transaction_executor_error!(result, ERR_FAUCET_NOT_REGISTERED); Ok(()) } @@ -203,7 +396,7 @@ async fn test_bridge_out_consumes_b2agg_note() -> anyhow::Result<()> { /// 4. The same user account consumes the B2AGG note (triggering reclaim branch) /// 5. Verifies that assets are added back to the account and no BURN note is created #[tokio::test] -async fn test_b2agg_note_reclaim_scenario() -> anyhow::Result<()> { +async fn b2agg_note_reclaim_scenario() -> anyhow::Result<()> { let mut builder = MockChain::builder(); // Create a network faucet owner account @@ -218,44 +411,54 @@ async fn test_b2agg_note_reclaim_scenario() -> anyhow::Result<()> { let faucet = builder.add_existing_network_faucet("AGG", 1000, faucet_owner_account_id, Some(100))?; + // Create a bridge admin account + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // Create a GER manager account (not used in this test, but distinct from admin) + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // Create a bridge account (includes a `bridge` component) + let bridge_account = create_existing_bridge_account( + builder.rng_mut().draw_word(), + bridge_admin.id(), + ger_manager.id(), + ); + builder.add_account(bridge_account.clone())?; + // Create a user account that will create and consume the B2AGG note - let mut user_account = builder.add_existing_wallet(Auth::BasicAuth)?; + let mut user_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; // CREATE B2AGG NOTE WITH USER ACCOUNT AS SENDER // -------------------------------------------------------------------------------------------- - let amount = Felt::new(50); - let bridge_asset: Asset = FungibleAsset::new(faucet.id(), amount.into()).unwrap().into(); - let tag = NoteTag::new(0); - let note_type = NoteType::Public; - - // Get the B2AGG note script - let b2agg_script = b2agg_script(); + let bridge_asset: Asset = + FungibleAsset::new(faucet.id(), amount.as_canonical_u64()).unwrap().into(); - // Create note inputs with destination network and address - let destination_network = Felt::new(1); + let destination_network = 1u32; let destination_address = "0x1234567890abcdef1122334455667788990011aa"; let eth_address = - EthAddressFormat::from_hex(destination_address).expect("Valid Ethereum address"); - let address_felts = eth_address.to_elements().to_vec(); - - // Combine network ID and address felts into note inputs (6 felts total) - let mut input_felts = vec![destination_network]; - input_felts.extend(address_felts); - - let inputs = NoteInputs::new(input_felts.clone())?; - - // Create the B2AGG note with the USER ACCOUNT as the sender - // This is the key difference - the note sender will be the same as the consuming account - let b2agg_note_metadata = NoteMetadata::new(user_account.id(), note_type, tag); - let b2agg_note_assets = NoteAssets::new(vec![bridge_asset])?; - let serial_num = Word::from([1, 2, 3, 4u32]); - let b2agg_note_script = NoteScript::new(b2agg_script); - let b2agg_note_recipient = NoteRecipient::new(serial_num, b2agg_note_script, inputs); - let b2agg_note = Note::new(b2agg_note_assets, b2agg_note_metadata, b2agg_note_recipient); - - // Add the B2AGG note to the mock chain - builder.add_output_note(OutputNote::Full(b2agg_note.clone())); + EthAddressFormat::from_hex(destination_address).expect("valid Ethereum address"); + + let assets = NoteAssets::new(vec![bridge_asset])?; + + // Create the B2AGG note with the USER ACCOUNT as the sender. + // This is the key difference — the note sender will be the same as the consuming account. + let b2agg_note = B2AggNote::create( + destination_network, + eth_address, + assets, + bridge_account.id(), + user_account.id(), + builder.rng_mut(), + )?; + + builder.add_output_note(RawOutputNote::Full(b2agg_note.clone())); let mut mock_chain = builder.build()?; // Store the initial asset balance of the user account @@ -270,7 +473,6 @@ async fn test_b2agg_note_reclaim_scenario() -> anyhow::Result<()> { // VERIFY NO BURN NOTE WAS CREATED (RECLAIM BRANCH) // -------------------------------------------------------------------------------------------- - // In the reclaim scenario, no BURN note should be created assert_eq!( executed_transaction.output_notes().num_notes(), 0, @@ -283,16 +485,113 @@ async fn test_b2agg_note_reclaim_scenario() -> anyhow::Result<()> { // VERIFY ASSETS WERE ADDED BACK TO THE ACCOUNT // -------------------------------------------------------------------------------------------- let final_balance = user_account.vault().get_balance(faucet.id()).unwrap_or(0u64); - let expected_balance = initial_balance + amount.as_int(); - assert_eq!( - final_balance, expected_balance, + final_balance, + initial_balance + amount.as_canonical_u64(), "User account should have received the assets back from the B2AGG note" ); - // Apply the transaction to the mock chain mock_chain.add_pending_executed_transaction(&executed_transaction)?; mock_chain.prove_next_block()?; Ok(()) } + +/// Tests that a non-target account cannot consume a B2AGG note (non-reclaim branch). +/// +/// This test covers the security check in the B2AGG note script that ensures only the +/// designated target account (specified in the note attachment) can consume the note +/// when not in reclaim mode. +/// +/// Test flow: +/// 1. Creates a network faucet to provide assets +/// 2. Creates a bridge account as the designated target for the B2AGG note +/// 3. Creates a user account as the sender (creator) of the B2AGG note +/// 4. Creates a "malicious" account with a bridge interface +/// 5. Attempts to consume the B2AGG note with the malicious account +/// 6. Verifies that the transaction fails with ERR_B2AGG_TARGET_ACCOUNT_MISMATCH +#[tokio::test] +async fn b2agg_note_non_target_account_cannot_consume() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + // Create a network faucet owner account + let faucet_owner_account_id = AccountId::dummy( + [1; 15], + AccountIdVersion::Version0, + AccountType::RegularAccountImmutableCode, + AccountStorageMode::Private, + ); + + // Create a network faucet to provide assets for the B2AGG note + let faucet = + builder.add_existing_network_faucet("AGG", 1000, faucet_owner_account_id, Some(100))?; + + // Create a bridge admin account + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // Create a GER manager account (not used in this test, but distinct from admin) + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // Create a bridge account as the designated TARGET for the B2AGG note + let bridge_account = create_existing_bridge_account( + builder.rng_mut().draw_word(), + bridge_admin.id(), + ger_manager.id(), + ); + builder.add_account(bridge_account.clone())?; + + // Create a user account as the SENDER of the B2AGG note + let sender_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // Create a "malicious" account with a bridge interface + let malicious_account = create_existing_bridge_account( + builder.rng_mut().draw_word(), + bridge_admin.id(), + ger_manager.id(), + ); + builder.add_account(malicious_account.clone())?; + + // CREATE B2AGG NOTE + // -------------------------------------------------------------------------------------------- + let amount = Felt::new(50); + let bridge_asset: Asset = + FungibleAsset::new(faucet.id(), amount.as_canonical_u64()).unwrap().into(); + + let destination_network = 1u32; + let destination_address = "0x1234567890abcdef1122334455667788990011aa"; + let eth_address = + EthAddressFormat::from_hex(destination_address).expect("valid Ethereum address"); + + let assets = NoteAssets::new(vec![bridge_asset])?; + + // Create the B2AGG note targeting the real bridge account + let b2agg_note = B2AggNote::create( + destination_network, + eth_address, + assets, + bridge_account.id(), + sender_account.id(), + builder.rng_mut(), + )?; + + builder.add_output_note(RawOutputNote::Full(b2agg_note.clone())); + let mock_chain = builder.build()?; + + // ATTEMPT TO CONSUME B2AGG NOTE WITH MALICIOUS ACCOUNT (SHOULD FAIL) + // -------------------------------------------------------------------------------------------- + let result = mock_chain + .build_tx_context(malicious_account.id(), &[], &[b2agg_note])? + .build()? + .execute() + .await; + + assert_transaction_executor_error!(result, ERR_B2AGG_TARGET_ACCOUNT_MISMATCH); + + Ok(()) +} diff --git a/crates/miden-testing/tests/agglayer/config_bridge.rs b/crates/miden-testing/tests/agglayer/config_bridge.rs new file mode 100644 index 0000000000..8251755169 --- /dev/null +++ b/crates/miden-testing/tests/agglayer/config_bridge.rs @@ -0,0 +1,91 @@ +extern crate alloc; + +use miden_agglayer::{AggLayerBridge, ConfigAggBridgeNote, create_existing_bridge_account}; +use miden_protocol::Felt; +use miden_protocol::account::auth::AuthScheme; +use miden_protocol::account::{AccountId, AccountIdVersion, AccountStorageMode, AccountType}; +use miden_protocol::block::account_tree::AccountIdKey; +use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::transaction::RawOutputNote; +use miden_testing::{Auth, MockChain}; + +/// Tests that a CONFIG_AGG_BRIDGE note registers a faucet in the bridge's faucet registry. +/// +/// Flow: +/// 1. Create an admin (sender) account +/// 2. Create a bridge account with the admin as authorized operator +/// 3. Create a CONFIG_AGG_BRIDGE note carrying a faucet ID, sent by the admin +/// 4. Consume the note with the bridge account +/// 5. Verify the faucet is now in the bridge's faucet_registry map +#[tokio::test] +async fn test_config_agg_bridge_registers_faucet() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + // CREATE BRIDGE ADMIN ACCOUNT (note sender) + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // CREATE GER MANAGER ACCOUNT (not used in this test, but distinct from admin) + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // CREATE BRIDGE ACCOUNT (starts with empty faucet registry) + let bridge_account = create_existing_bridge_account( + builder.rng_mut().draw_word(), + bridge_admin.id(), + ger_manager.id(), + ); + builder.add_account(bridge_account.clone())?; + + // Use a dummy faucet ID to register (any valid AccountId will do) + let faucet_to_register = AccountId::dummy( + [42; 15], + AccountIdVersion::Version0, + AccountType::FungibleFaucet, + AccountStorageMode::Network, + ); + + // Verify the faucet is NOT in the registry before registration + let registry_slot_name = AggLayerBridge::faucet_registry_slot_name(); + let key = AccountIdKey::new(faucet_to_register).as_word(); + let value_before = bridge_account.storage().get_map_item(registry_slot_name, key)?; + assert_eq!( + value_before, + [Felt::ZERO; 4].into(), + "Faucet should not be in registry before registration" + ); + + // CREATE CONFIG_AGG_BRIDGE NOTE + let config_note = ConfigAggBridgeNote::create( + faucet_to_register, + bridge_admin.id(), + bridge_account.id(), + builder.rng_mut(), + )?; + + builder.add_output_note(RawOutputNote::Full(config_note.clone())); + let mock_chain = builder.build()?; + + // CONSUME THE CONFIG_AGG_BRIDGE NOTE WITH THE BRIDGE ACCOUNT + let tx_context = mock_chain + .build_tx_context(bridge_account.id(), &[config_note.id()], &[])? + .build()?; + let executed_transaction = tx_context.execute().await?; + + // VERIFY FAUCET IS NOW REGISTERED + let mut updated_bridge = bridge_account.clone(); + updated_bridge.apply_delta(executed_transaction.account_delta())?; + + let value_after = updated_bridge.storage().get_map_item(registry_slot_name, key)?; + // TODO: use a getter helper on AggLayerBridge once available + // (see https://github.com/0xMiden/protocol/issues/2548) + let expected_value = [Felt::ONE, Felt::ZERO, Felt::ZERO, Felt::ZERO].into(); + assert_eq!( + value_after, expected_value, + "Faucet should be registered with value [1, 0, 0, 0]" + ); + + Ok(()) +} diff --git a/crates/miden-testing/tests/agglayer/global_index.rs b/crates/miden-testing/tests/agglayer/global_index.rs new file mode 100644 index 0000000000..b84c017e1d --- /dev/null +++ b/crates/miden-testing/tests/agglayer/global_index.rs @@ -0,0 +1,96 @@ +extern crate alloc; + +use alloc::sync::Arc; + +use miden_agglayer::errors::{ + ERR_BRIDGE_NOT_MAINNET, + ERR_LEADING_BITS_NON_ZERO, + ERR_ROLLUP_INDEX_NON_ZERO, +}; +use miden_agglayer::{GlobalIndex, agglayer_library}; +use miden_assembly::{Assembler, DefaultSourceManager}; +use miden_core_lib::CoreLibrary; +use miden_processor::Program; +use miden_testing::{ExecError, assert_execution_error}; + +use crate::agglayer::test_utils::execute_program_with_default_host; + +fn assemble_process_global_index_program(global_index: GlobalIndex) -> Program { + // Convert GlobalIndex to 8 field elements (big-endian: [0]=MSB, [7]=LSB) + let elements = global_index.to_elements(); + let [g0, g1, g2, g3, g4, g5, g6, g7] = elements.try_into().unwrap(); + + let script_code = format!( + r#" + use miden::core::sys + use miden::agglayer::bridge::bridge_in + + begin + push.{g7}.{g6}.{g5}.{g4}.{g3}.{g2}.{g1}.{g0} + exec.bridge_in::process_global_index_mainnet + exec.sys::truncate_stack + end + "# + ); + + Assembler::new(Arc::new(DefaultSourceManager::default())) + .with_dynamic_library(CoreLibrary::default()) + .unwrap() + .with_dynamic_library(agglayer_library()) + .unwrap() + .assemble_program(&script_code) + .unwrap() +} + +#[tokio::test] +async fn test_process_global_index_mainnet_returns_leaf_index() -> anyhow::Result<()> { + // Global index format (32 bytes, big-endian like Solidity uint256): + // - bytes[0..20]: leading zeros + // - bytes[20..24]: mainnet_flag = 1 (BE u32) + // - bytes[24..28]: rollup_index = 0 (BE u32) + // - bytes[28..32]: leaf_index = 2 (BE u32) + let mut bytes = [0u8; 32]; + bytes[23] = 1; // mainnet flag = 1 (BE: LSB at byte 23) + bytes[31] = 2; // leaf index = 2 (BE: LSB at byte 31) + let program = assemble_process_global_index_program(GlobalIndex::new(bytes)); + + let exec_output = execute_program_with_default_host(program, None).await?; + + assert_eq!(exec_output.stack[0].as_canonical_u64(), 2); + Ok(()) +} + +#[tokio::test] +async fn test_process_global_index_mainnet_rejects_non_zero_leading_bits() { + let mut bytes = [0u8; 32]; + bytes[3] = 1; // non-zero leading bits (BE: LSB of first u32 limb) + bytes[23] = 1; // mainnet flag = 1 + bytes[31] = 2; // leaf index = 2 + let program = assemble_process_global_index_program(GlobalIndex::new(bytes)); + + let err = execute_program_with_default_host(program, None).await.map_err(ExecError::new); + assert_execution_error!(err, ERR_LEADING_BITS_NON_ZERO); +} + +#[tokio::test] +async fn test_process_global_index_mainnet_rejects_flag_limb_upper_bits() { + let mut bytes = [0u8; 32]; + bytes[23] = 3; // mainnet flag limb = 3 (upper bits set, only lowest bit allowed) + bytes[31] = 2; // leaf index = 2 + let program = assemble_process_global_index_program(GlobalIndex::new(bytes)); + + let err = execute_program_with_default_host(program, None).await.map_err(ExecError::new); + assert_execution_error!(err, ERR_BRIDGE_NOT_MAINNET); +} + +#[tokio::test] +async fn test_process_global_index_mainnet_rejects_non_zero_rollup_index() { + let mut bytes = [0u8; 32]; + bytes[23] = 1; // mainnet flag = 1 + bytes[27] = 7; // rollup index = 7 (BE: LSB at byte 27) + bytes[31] = 2; // leaf index = 2 + let program = assemble_process_global_index_program(GlobalIndex::new(bytes)); + + let err = execute_program_with_default_host(program, None).await.map_err(ExecError::new); + assert_execution_error!(err, ERR_ROLLUP_INDEX_NON_ZERO); +} diff --git a/crates/miden-testing/tests/agglayer/leaf_utils.rs b/crates/miden-testing/tests/agglayer/leaf_utils.rs new file mode 100644 index 0000000000..6f40fb9b15 --- /dev/null +++ b/crates/miden-testing/tests/agglayer/leaf_utils.rs @@ -0,0 +1,199 @@ +extern crate alloc; + +use alloc::sync::Arc; +use alloc::vec::Vec; + +use miden_agglayer::agglayer_library; +use miden_agglayer::claim_note::Keccak256Output; +use miden_assembly::{Assembler, DefaultSourceManager}; +use miden_core_lib::CoreLibrary; +use miden_crypto::SequentialCommit; +use miden_processor::advice::AdviceInputs; +use miden_processor::utils::packed_u32_elements_to_bytes; +use miden_protocol::{Felt, Word}; +use miden_tx::utils::hex_to_bytes; + +use super::test_utils::{ + LEAF_VALUE_VECTORS_JSON, + LeafValueVector, + execute_program_with_default_host, +}; + +// HELPER FUNCTIONS +// ================================================================================================ + +fn felts_to_le_bytes(limbs: &[Felt]) -> Vec { + let mut bytes = Vec::with_capacity(limbs.len() * 4); + for limb in limbs.iter() { + let u32_value = limb.as_canonical_u64() as u32; + bytes.extend_from_slice(&u32_value.to_le_bytes()); + } + bytes +} + +// TESTS +// ================================================================================================ + +/// Test that the `pack_leaf_data` procedure produces the correct byte layout. +#[tokio::test] +async fn pack_leaf_data() -> anyhow::Result<()> { + let vector: LeafValueVector = + serde_json::from_str(LEAF_VALUE_VECTORS_JSON).expect("failed to parse leaf value vector"); + + let leaf_data = vector.to_leaf_data(); + + // Build expected bytes + let mut expected_packed_bytes: Vec = Vec::new(); + expected_packed_bytes.push(0u8); + expected_packed_bytes.extend_from_slice(&leaf_data.origin_network.to_be_bytes()); + expected_packed_bytes.extend_from_slice(leaf_data.origin_token_address.as_bytes()); + expected_packed_bytes.extend_from_slice(&leaf_data.destination_network.to_be_bytes()); + expected_packed_bytes.extend_from_slice(leaf_data.destination_address.as_bytes()); + expected_packed_bytes.extend_from_slice(leaf_data.amount.as_bytes()); + let metadata_hash_bytes: [u8; 32] = hex_to_bytes(&vector.metadata_hash).unwrap(); + expected_packed_bytes.extend_from_slice(&metadata_hash_bytes); + assert_eq!(expected_packed_bytes.len(), 113); + + let agglayer_lib = agglayer_library(); + let leaf_data_elements = leaf_data.to_elements(); + let leaf_data_bytes: Vec = packed_u32_elements_to_bytes(&leaf_data_elements); + assert_eq!( + leaf_data_bytes.len(), + 128, + "expected 8 words * 4 felts * 4 bytes per felt = 128 bytes" + ); + assert_eq!(leaf_data_bytes[116..], vec![0; 12], "the last 3 felts are pure padding"); + assert_eq!(leaf_data_bytes[3], expected_packed_bytes[0], "the first byte is the leaf type"); + assert_eq!( + leaf_data_bytes[4..8], + expected_packed_bytes[1..5], + "the next 4 bytes are the origin network" + ); + assert_eq!( + leaf_data_bytes[8..28], + expected_packed_bytes[5..25], + "the next 20 bytes are the origin token address" + ); + assert_eq!( + leaf_data_bytes[28..32], + expected_packed_bytes[25..29], + "the next 4 bytes are the destination network" + ); + assert_eq!( + leaf_data_bytes[32..52], + expected_packed_bytes[29..49], + "the next 20 bytes are the destination address" + ); + assert_eq!( + leaf_data_bytes[52..84], + expected_packed_bytes[49..81], + "the next 32 bytes are the amount" + ); + assert_eq!( + leaf_data_bytes[84..116], + expected_packed_bytes[81..113], + "the next 32 bytes are the metadata hash" + ); + + assert_eq!(leaf_data_bytes[3..116], expected_packed_bytes, "byte packing is as expected"); + + let key: Word = leaf_data.to_commitment(); + let advice_inputs = AdviceInputs::default().with_map(vec![(key, leaf_data_elements.clone())]); + + let source = format!( + r#" + use miden::core::mem + use miden::agglayer::bridge::leaf_utils + + const LEAF_DATA_START_PTR = 0 + const LEAF_DATA_NUM_WORDS = 8 + + begin + push.{key} + + adv.push_mapval + push.LEAF_DATA_START_PTR push.LEAF_DATA_NUM_WORDS + exec.mem::pipe_preimage_to_memory drop + + exec.leaf_utils::pack_leaf_data + end + "# + ); + + let program = Assembler::new(Arc::new(DefaultSourceManager::default())) + .with_dynamic_library(CoreLibrary::default()) + .unwrap() + .with_dynamic_library(agglayer_lib.clone()) + .unwrap() + .assemble_program(&source) + .unwrap(); + + let exec_output = execute_program_with_default_host(program, Some(advice_inputs)).await?; + + // Read packed elements from memory at addresses 0..29 + let ctx = miden_processor::ContextId::root(); + + let packed_elements: Vec = (0..29u32) + .map(|addr| { + exec_output + .memory + .read_element(ctx, Felt::from(addr)) + .expect("address should be valid") + }) + .collect(); + + let packed_bytes: Vec = felts_to_le_bytes(&packed_elements); + + // push 3 more zero bytes for packing, since `pack_leaf_data` should leave us with the last 3 + // bytes set to 0 (prep for hashing, where padding bytes must be 0) + expected_packed_bytes.extend_from_slice(&[0u8; 3]); + + assert_eq!( + &packed_bytes, &expected_packed_bytes, + "Packed bytes don't match expected Solidity encoding" + ); + + Ok(()) +} + +#[tokio::test] +async fn get_leaf_value() -> anyhow::Result<()> { + let vector: LeafValueVector = + serde_json::from_str(LEAF_VALUE_VECTORS_JSON).expect("failed to parse leaf value vector"); + + let leaf_data = vector.to_leaf_data(); + let key: Word = leaf_data.to_commitment(); + let advice_inputs = AdviceInputs::default().with_map(vec![(key, leaf_data.to_elements())]); + + let source = format!( + r#" + use miden::core::sys + use miden::agglayer::bridge::bridge_in + + begin + push.{key} + exec.bridge_in::get_leaf_value + exec.sys::truncate_stack + end + "# + ); + let agglayer_lib = agglayer_library(); + + let program = Assembler::new(Arc::new(DefaultSourceManager::default())) + .with_dynamic_library(CoreLibrary::default()) + .unwrap() + .with_dynamic_library(agglayer_lib.clone()) + .unwrap() + .assemble_program(&source) + .unwrap(); + + let exec_output = execute_program_with_default_host(program, Some(advice_inputs)).await?; + let computed_leaf_value: Vec = exec_output.stack[0..8].to_vec(); + let expected_leaf_value_bytes: [u8; 32] = + hex_to_bytes(&vector.leaf_value).expect("valid leaf value hex"); + let expected_leaf_value: Vec = + Keccak256Output::from(expected_leaf_value_bytes).to_elements(); + + assert_eq!(computed_leaf_value, expected_leaf_value); + Ok(()) +} diff --git a/crates/miden-testing/tests/agglayer/mmr_frontier.rs b/crates/miden-testing/tests/agglayer/mmr_frontier.rs new file mode 100644 index 0000000000..470a3b2f82 --- /dev/null +++ b/crates/miden-testing/tests/agglayer/mmr_frontier.rs @@ -0,0 +1,229 @@ +use alloc::format; +use alloc::string::ToString; + +use miden_agglayer::claim_note::SmtNode; +use miden_agglayer::{ExitRoot, agglayer_library}; +use miden_crypto::hash::keccak::{Keccak256, Keccak256Digest}; +use miden_protocol::utils::sync::LazyLock; +use miden_standards::code_builder::CodeBuilder; +use miden_testing::TransactionContextBuilder; +// KECCAK MMR FRONTIER +// ================================================================================================ + +static CANONICAL_ZEROS_32: LazyLock> = LazyLock::new(|| { + let mut zeros_by_height = Vec::with_capacity(32); + + // Push the zero of height 0 to the zeros vec. This is done separately because the zero of + // height 0 is just a plain zero array ([0u8; 32]), it doesn't require to perform any hashing. + zeros_by_height.push(Keccak256Digest::default()); + + // Compute the canonical zeros for each height from 1 to 32 + // Zero of height `n` is computed as: `ZERO_N = Keccak256::merge(ZERO_{N-1}, ZERO_{N-1})` + for _ in 1..32 { + let last_zero = zeros_by_height.last().expect("zeros vec should have at least one value"); + let current_height_zero = Keccak256::merge(&[*last_zero, *last_zero]); + zeros_by_height.push(current_height_zero); + } + + zeros_by_height +}); + +struct KeccakMmrFrontier32 { + num_leaves: u32, + frontier: [Keccak256Digest; TREE_HEIGHT], +} + +impl KeccakMmrFrontier32 { + pub fn new() -> Self { + Self { + num_leaves: 0, + frontier: [Keccak256Digest::default(); TREE_HEIGHT], + } + } + + pub fn append_and_update_frontier(&mut self, new_leaf: Keccak256Digest) -> Keccak256Digest { + let mut curr_hash = new_leaf; + let mut idx = self.num_leaves; + self.num_leaves += 1; + + for height in 0..TREE_HEIGHT { + if (idx & 1) == 0 { + // This height wasn't "occupied" yet: store cur as the subtree root at height h. + self.frontier[height] = curr_hash; + + // Pair it with the canonical zero subtree on the right at this height. + curr_hash = Keccak256::merge(&[curr_hash, CANONICAL_ZEROS_32[height]]); + } else { + // This height already had a subtree root stored in frontier[h], merge into parent. + curr_hash = Keccak256::merge(&[self.frontier[height], curr_hash]) + } + + idx >>= 1; + } + + // curr_hash at this point is equal to the root of the full tree + curr_hash + } +} + +// TESTS +// ================================================================================================ + +#[tokio::test] +async fn test_append_and_update_frontier() -> anyhow::Result<()> { + let mut mmr_frontier = KeccakMmrFrontier32::<32>::new(); + + let mut source = "use miden::agglayer::bridge::mmr_frontier32_keccak begin".to_string(); + + for round in 0..32 { + // construct the leaf from the hex representation of the round number + let leaf = Keccak256Digest::try_from(format!("{:#066x}", round).as_str()).unwrap(); + let root = mmr_frontier.append_and_update_frontier(leaf); + let num_leaves = mmr_frontier.num_leaves; + + source.push_str(&leaf_assertion_code( + SmtNode::new(leaf.into()), + ExitRoot::new(root.into()), + num_leaves, + )); + } + + source.push_str("end"); + + let tx_script = CodeBuilder::new() + .with_statically_linked_library(&agglayer_library())? + .compile_tx_script(source)?; + + TransactionContextBuilder::with_existing_mock_account() + .tx_script(tx_script.clone()) + .build()? + .execute() + .await?; + + Ok(()) +} + +#[tokio::test] +async fn test_check_empty_mmr_root() -> anyhow::Result<()> { + let zero_leaf = Keccak256Digest::default(); + let zero_31 = *CANONICAL_ZEROS_32.get(31).expect("zeros should have 32 values total"); + let empty_mmr_root = Keccak256::merge(&[zero_31, zero_31]); + + let mut source = "use miden::agglayer::bridge::mmr_frontier32_keccak begin".to_string(); + + for round in 1..=32 { + // check that pushing the zero leaves into the MMR doesn't change its root + source.push_str(&leaf_assertion_code( + SmtNode::new(zero_leaf.into()), + ExitRoot::new(empty_mmr_root.into()), + round, + )); + } + + source.push_str("end"); + + let tx_script = CodeBuilder::new() + .with_statically_linked_library(&agglayer_library())? + .compile_tx_script(source)?; + + TransactionContextBuilder::with_existing_mock_account() + .tx_script(tx_script.clone()) + .build()? + .execute() + .await?; + + Ok(()) +} + +// SOLIDITY COMPATIBILITY TESTS +// ================================================================================================ +// These tests verify that the Rust KeccakMmrFrontier32 implementation produces identical +// results to the Solidity DepositContractBase.sol implementation. +// Test vectors generated from: https://github.com/agglayer/agglayer-contracts +// Run `make generate-solidity-test-vectors` to regenerate the test vectors. + +use super::test_utils::{SOLIDITY_CANONICAL_ZEROS, SOLIDITY_MMR_FRONTIER_VECTORS}; + +/// Verifies that the Rust KeccakMmrFrontier32 produces the same canonical zeros as Solidity. +#[test] +fn test_solidity_canonical_zeros_compatibility() { + for (height, expected_hex) in SOLIDITY_CANONICAL_ZEROS.canonical_zeros.iter().enumerate() { + let expected = Keccak256Digest::try_from(expected_hex.as_str()).unwrap(); + let actual = CANONICAL_ZEROS_32[height]; + + assert_eq!( + actual, expected, + "canonical zero mismatch at height {}: expected {}, got {:?}", + height, expected_hex, actual + ); + } +} + +/// Verifies that the Rust KeccakMmrFrontier32 produces the same roots as Solidity's +/// DepositContractBase after adding each leaf. +#[test] +fn test_solidity_mmr_frontier_compatibility() { + let v = &*SOLIDITY_MMR_FRONTIER_VECTORS; + + // Validate parallel arrays have same length + assert_eq!(v.leaves.len(), v.roots.len()); + assert_eq!(v.leaves.len(), v.counts.len()); + + let mut mmr_frontier = KeccakMmrFrontier32::<32>::new(); + + for i in 0..v.leaves.len() { + let leaf = Keccak256Digest::try_from(v.leaves[i].as_str()).unwrap(); + let expected_root = Keccak256Digest::try_from(v.roots[i].as_str()).unwrap(); + + let actual_root = mmr_frontier.append_and_update_frontier(leaf); + let actual_count = mmr_frontier.num_leaves; + + assert_eq!( + actual_count, v.counts[i], + "leaf count mismatch after adding leaf {}: expected {}, got {}", + v.leaves[i], v.counts[i], actual_count + ); + + assert_eq!( + actual_root, expected_root, + "root mismatch after adding leaf {} (count={}): expected {}, got {:?}", + v.leaves[i], v.counts[i], v.roots[i], actual_root + ); + } +} + +// HELPER FUNCTIONS +// ================================================================================================ + +fn leaf_assertion_code(leaf: SmtNode, expected_root: ExitRoot, num_leaves: u32) -> String { + let [leaf_lo, leaf_hi] = leaf.to_words(); + let [root_lo, root_hi] = expected_root.to_words(); + + format!( + r#" + # load the provided leaf onto the stack + push.{leaf_hi} + push.{leaf_lo} + + # add this leaf to the MMR frontier + exec.mmr_frontier32_keccak::append_and_update_frontier + # => [NEW_ROOT_LO, NEW_ROOT_HI, new_leaf_count] + + # assert the root correctness after the first leaf was added + push.{root_lo} + push.{root_hi} + movdnw.3 + # => [EXPECTED_ROOT_LO, NEW_ROOT_LO, NEW_ROOT_HI, EXPECTED_ROOT_HI, new_leaf_count] + + assert_eqw.err="MMR root (LO) is incorrect" + # => [NEW_ROOT_HI, EXPECTED_ROOT_HI, new_leaf_count] + + assert_eqw.err="MMR root (HI) is incorrect" + # => [new_leaf_count] + + # assert the new number of leaves + push.{num_leaves} + assert_eq.err="new leaf count is incorrect" + "# + ) +} diff --git a/crates/miden-testing/tests/agglayer/mod.rs b/crates/miden-testing/tests/agglayer/mod.rs index 2a6d344c67..a497f74230 100644 --- a/crates/miden-testing/tests/agglayer/mod.rs +++ b/crates/miden-testing/tests/agglayer/mod.rs @@ -1,4 +1,10 @@ pub mod asset_conversion; mod bridge_in; mod bridge_out; +mod config_bridge; +mod global_index; +mod leaf_utils; +mod mmr_frontier; mod solidity_miden_address_conversion; +pub mod test_utils; +mod update_ger; diff --git a/crates/miden-testing/tests/agglayer/solidity_miden_address_conversion.rs b/crates/miden-testing/tests/agglayer/solidity_miden_address_conversion.rs index 2083a9dd36..33fa9597cc 100644 --- a/crates/miden-testing/tests/agglayer/solidity_miden_address_conversion.rs +++ b/crates/miden-testing/tests/agglayer/solidity_miden_address_conversion.rs @@ -5,8 +5,15 @@ use alloc::sync::Arc; use miden_agglayer::{EthAddressFormat, agglayer_library}; use miden_assembly::{Assembler, DefaultSourceManager}; use miden_core_lib::CoreLibrary; -use miden_processor::fast::{ExecutionOutput, FastProcessor}; -use miden_processor::{AdviceInputs, DefaultHost, ExecutionError, Program, StackInputs}; +use miden_processor::advice::AdviceInputs; +use miden_processor::{ + DefaultHost, + ExecutionError, + ExecutionOutput, + FastProcessor, + Program, + StackInputs, +}; use miden_protocol::Felt; use miden_protocol::account::AccountId; use miden_protocol::address::NetworkId; @@ -36,10 +43,11 @@ async fn execute_program_with_default_host( let asset_conversion_lib = agglayer_library(); host.load_library(asset_conversion_lib.mast_forest()).unwrap(); - let stack_inputs = StackInputs::new(vec![]).unwrap(); + let stack_inputs = StackInputs::new(&[]).unwrap(); let advice_inputs = AdviceInputs::default(); - let processor = FastProcessor::new_debug(stack_inputs.as_slice(), advice_inputs); + let processor = + FastProcessor::new(stack_inputs).with_advice(advice_inputs).with_debugging(true); processor.execute(&program, &mut host).await } @@ -109,31 +117,31 @@ async fn test_ethereum_address_to_account_id_in_masm() -> anyhow::Result<()> { let eth_address = EthAddressFormat::from_account_id(*original_account_id); let address_felts = eth_address.to_elements().to_vec(); - let le: Vec = address_felts + let limbs: Vec = address_felts .iter() .map(|f| { - let val = f.as_int(); + let val = f.as_canonical_u64(); assert!(val <= u32::MAX as u64, "felt value {} exceeds u32::MAX", val); val as u32 }) .collect(); - assert_eq!(le[4], 0, "test {}: expected msw limb (le[4]) to be zero", idx); + let limb0 = limbs[0]; + let limb1 = limbs[1]; + let limb2 = limbs[2]; + let limb3 = limbs[3]; + let limb4 = limbs[4]; - let addr0 = le[0]; - let addr1 = le[1]; - let addr2 = le[2]; - let addr3 = le[3]; - let addr4 = le[4]; + assert_eq!(limb0, 0, "test {}: expected msb limb (limb0) to be zero", idx); let account_id_felts: [Felt; 2] = (*original_account_id).into(); - let expected_prefix = account_id_felts[0].as_int(); - let expected_suffix = account_id_felts[1].as_int(); + let expected_prefix = account_id_felts[0]; + let expected_suffix = account_id_felts[1]; let script_code = format!( r#" use miden::core::sys - use miden::agglayer::eth_address + use miden::agglayer::common::eth_address begin push.{}.{}.{}.{}.{} @@ -141,7 +149,7 @@ async fn test_ethereum_address_to_account_id_in_masm() -> anyhow::Result<()> { exec.sys::truncate_stack end "#, - addr4, addr3, addr2, addr1, addr0 + limb4, limb3, limb2, limb1, limb0 ); let program = Assembler::new(Arc::new(DefaultSourceManager::default())) @@ -154,14 +162,13 @@ async fn test_ethereum_address_to_account_id_in_masm() -> anyhow::Result<()> { let exec_output = execute_program_with_default_host(program).await?; - let actual_prefix = exec_output.stack[0].as_int(); - let actual_suffix = exec_output.stack[1].as_int(); + let actual_suffix = exec_output.stack[0]; + let actual_prefix = exec_output.stack[1]; assert_eq!(actual_prefix, expected_prefix, "test {}: prefix mismatch", idx); assert_eq!(actual_suffix, expected_suffix, "test {}: suffix mismatch", idx); - let reconstructed_account_id = - AccountId::try_from([Felt::new(actual_prefix), Felt::new(actual_suffix)])?; + let reconstructed_account_id = AccountId::try_from_elements(actual_suffix, actual_prefix)?; assert_eq!( reconstructed_account_id, *original_account_id, diff --git a/crates/miden-testing/tests/agglayer/test_utils.rs b/crates/miden-testing/tests/agglayer/test_utils.rs new file mode 100644 index 0000000000..734e8050c3 --- /dev/null +++ b/crates/miden-testing/tests/agglayer/test_utils.rs @@ -0,0 +1,343 @@ +extern crate alloc; + +use alloc::string::String; +use alloc::sync::Arc; +use alloc::vec::Vec; + +use miden_agglayer::claim_note::{Keccak256Output, ProofData, SmtNode}; +use miden_agglayer::{ + EthAddressFormat, + EthAmount, + ExitRoot, + GlobalIndex, + LeafData, + MetadataHash, + agglayer_library, +}; +use miden_assembly::{Assembler, DefaultSourceManager}; +use miden_core_lib::CoreLibrary; +use miden_processor::advice::AdviceInputs; +use miden_processor::{ + DefaultHost, + ExecutionError, + ExecutionOutput, + FastProcessor, + Program, + StackInputs, +}; +use miden_protocol::transaction::TransactionKernel; +use miden_protocol::utils::sync::LazyLock; +use miden_tx::utils::hex_to_bytes; +use serde::Deserialize; + +// EMBEDDED TEST VECTOR JSON FILES +// ================================================================================================ + +/// Claim asset test vectors JSON — contains both LeafData and ProofData from a real claimAsset +/// transaction. +const CLAIM_ASSET_VECTORS_JSON: &str = include_str!( + "../../../miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_real_tx.json" +); + +/// Bridge asset test vectors JSON — contains test data for an L1 bridgeAsset transaction. +const BRIDGE_ASSET_VECTORS_JSON: &str = include_str!( + "../../../miden-agglayer/solidity-compat/test-vectors/claim_asset_vectors_local_tx.json" +); + +/// Leaf data test vectors JSON from the Foundry-generated file. +pub const LEAF_VALUE_VECTORS_JSON: &str = + include_str!("../../../miden-agglayer/solidity-compat/test-vectors/leaf_value_vectors.json"); + +/// Merkle proof verification vectors JSON from the Foundry-generated file. +pub const MERKLE_PROOF_VECTORS_JSON: &str = + include_str!("../../../miden-agglayer/solidity-compat/test-vectors/merkle_proof_vectors.json"); + +/// Canonical zeros JSON from the Foundry-generated file. +pub const CANONICAL_ZEROS_JSON: &str = + include_str!("../../../miden-agglayer/solidity-compat/test-vectors/canonical_zeros.json"); + +/// MMR frontier vectors JSON from the Foundry-generated file. +pub const MMR_FRONTIER_VECTORS_JSON: &str = + include_str!("../../../miden-agglayer/solidity-compat/test-vectors/mmr_frontier_vectors.json"); + +// SERDE HELPERS +// ================================================================================================ + +/// Deserializes a JSON value that may be either a number or a string into a `String`. +/// +/// Foundry's `vm.serializeUint` outputs JSON numbers for uint256 values. +/// This deserializer accepts both `"100"` (string) and `100` (number) forms. +fn deserialize_uint_to_string<'de, D>(deserializer: D) -> Result +where + D: serde::Deserializer<'de>, +{ + let value = serde_json::Value::deserialize(deserializer)?; + match value { + serde_json::Value::String(s) => Ok(s), + serde_json::Value::Number(n) => Ok(n.to_string()), + _ => Err(serde::de::Error::custom("expected a number or string for amount")), + } +} + +/// Deserializes a JSON array of values that may be either numbers or strings into `Vec`. +/// +/// Array-level counterpart of [`deserialize_uint_to_string`]. +fn deserialize_uint_vec_to_strings<'de, D>(deserializer: D) -> Result, D::Error> +where + D: serde::Deserializer<'de>, +{ + let values = Vec::::deserialize(deserializer)?; + values + .into_iter() + .map(|v| match v { + serde_json::Value::String(s) => Ok(s), + serde_json::Value::Number(n) => Ok(n.to_string()), + _ => Err(serde::de::Error::custom("expected a number or string for amount")), + }) + .collect() +} + +// TEST VECTOR TYPES +// ================================================================================================ + +/// Deserialized leaf value test vector from Solidity-generated JSON. +#[derive(Debug, Deserialize)] +pub struct LeafValueVector { + pub origin_network: u32, + pub origin_token_address: String, + pub destination_network: u32, + pub destination_address: String, + #[serde(deserialize_with = "deserialize_uint_to_string")] + pub amount: String, + pub metadata_hash: String, + #[allow(dead_code)] + pub leaf_value: String, +} + +impl LeafValueVector { + /// Converts this test vector into a `LeafData` instance. + pub fn to_leaf_data(&self) -> LeafData { + LeafData { + origin_network: self.origin_network, + origin_token_address: EthAddressFormat::from_hex(&self.origin_token_address) + .expect("valid origin token address hex"), + destination_network: self.destination_network, + destination_address: EthAddressFormat::from_hex(&self.destination_address) + .expect("valid destination address hex"), + amount: EthAmount::from_uint_str(&self.amount).expect("valid amount uint string"), + metadata_hash: MetadataHash::new( + hex_to_bytes(&self.metadata_hash).expect("valid metadata hash hex"), + ), + } + } +} + +/// Deserialized proof value test vector from Solidity-generated JSON. +/// Contains SMT proofs, exit roots, global index, and expected global exit root. +#[derive(Debug, Deserialize)] +pub struct ProofValueVector { + pub smt_proof_local_exit_root: Vec, + pub smt_proof_rollup_exit_root: Vec, + pub global_index: String, + pub mainnet_exit_root: String, + pub rollup_exit_root: String, + /// Expected global exit root: keccak256(mainnetExitRoot || rollupExitRoot) + #[allow(dead_code)] + pub global_exit_root: String, +} + +impl ProofValueVector { + /// Converts this test vector into a `ProofData` instance. + pub fn to_proof_data(&self) -> ProofData { + let smt_proof_local: [SmtNode; 32] = self + .smt_proof_local_exit_root + .iter() + .map(|s| SmtNode::new(hex_to_bytes(s).expect("valid smt proof hex"))) + .collect::>() + .try_into() + .expect("expected 32 SMT proof nodes for local exit root"); + + let smt_proof_rollup: [SmtNode; 32] = self + .smt_proof_rollup_exit_root + .iter() + .map(|s| SmtNode::new(hex_to_bytes(s).expect("valid smt proof hex"))) + .collect::>() + .try_into() + .expect("expected 32 SMT proof nodes for rollup exit root"); + + ProofData { + smt_proof_local_exit_root: smt_proof_local, + smt_proof_rollup_exit_root: smt_proof_rollup, + global_index: GlobalIndex::from_hex(&self.global_index) + .expect("valid global index hex"), + mainnet_exit_root: Keccak256Output::new( + hex_to_bytes(&self.mainnet_exit_root).expect("valid mainnet exit root hex"), + ), + rollup_exit_root: Keccak256Output::new( + hex_to_bytes(&self.rollup_exit_root).expect("valid rollup exit root hex"), + ), + } + } +} + +/// Deserialized claim asset test vector from Solidity-generated JSON. +/// Contains both LeafData and ProofData from a real claimAsset transaction. +#[derive(Debug, Deserialize)] +pub struct ClaimAssetVector { + #[serde(flatten)] + pub proof: ProofValueVector, + + #[serde(flatten)] + pub leaf: LeafValueVector, +} + +/// Deserialized Merkle proof vectors from Solidity DepositContractBase.sol. +/// Uses parallel arrays for leaves and roots. For each element from leaves/roots there are 32 +/// elements from merkle_paths, which represent the merkle path for that leaf + root. +#[derive(Debug, Deserialize)] +pub struct MerkleProofVerificationFile { + pub leaves: Vec, + pub roots: Vec, + pub merkle_paths: Vec, +} + +/// Deserialized canonical zeros from Solidity DepositContractBase.sol. +#[derive(Debug, Deserialize)] +pub struct CanonicalZerosFile { + pub canonical_zeros: Vec, +} + +/// Deserialized MMR frontier vectors from Solidity DepositContractV2. +/// +/// Each leaf is produced by `getLeafValue` using the same hardcoded fields as `bridge_out.masm` +/// (leafType=0, originNetwork=64, metadataHash=0), parametrised by +/// a shared `origin_token_address`, `amounts[i]`, and per-index +/// `destination_networks[i]` / `destination_addresses[i]`. +/// +/// Amounts are serialized as uint256 values (JSON numbers). +#[derive(Debug, Deserialize)] +pub struct MmrFrontierVectorsFile { + pub leaves: Vec, + pub roots: Vec, + pub counts: Vec, + #[serde(deserialize_with = "deserialize_uint_vec_to_strings")] + pub amounts: Vec, + pub origin_token_address: String, + pub destination_networks: Vec, + pub destination_addresses: Vec, +} + +// LAZY-PARSED TEST VECTORS +// ================================================================================================ + +/// Lazily parsed claim asset test vector from the JSON file. +pub static CLAIM_ASSET_VECTOR: LazyLock = LazyLock::new(|| { + serde_json::from_str(CLAIM_ASSET_VECTORS_JSON) + .expect("failed to parse claim asset vectors JSON") +}); + +/// Lazily parsed bridge asset test vector from the JSON file (locally simulated L1 transaction). +pub static CLAIM_ASSET_VECTOR_LOCAL: LazyLock = LazyLock::new(|| { + serde_json::from_str(BRIDGE_ASSET_VECTORS_JSON) + .expect("failed to parse bridge asset vectors JSON") +}); + +/// Lazily parsed Merkle proof vectors from the JSON file. +pub static SOLIDITY_MERKLE_PROOF_VECTORS: LazyLock = + LazyLock::new(|| { + serde_json::from_str(MERKLE_PROOF_VECTORS_JSON) + .expect("failed to parse Merkle proof vectors JSON") + }); + +/// Lazily parsed canonical zeros from the JSON file. +pub static SOLIDITY_CANONICAL_ZEROS: LazyLock = LazyLock::new(|| { + serde_json::from_str(CANONICAL_ZEROS_JSON).expect("failed to parse canonical zeros JSON") +}); + +/// Lazily parsed MMR frontier vectors from the JSON file. +pub static SOLIDITY_MMR_FRONTIER_VECTORS: LazyLock = LazyLock::new(|| { + serde_json::from_str(MMR_FRONTIER_VECTORS_JSON) + .expect("failed to parse MMR frontier vectors JSON") +}); + +// HELPER FUNCTIONS +// ================================================================================================ + +/// Identifies the source of claim data used in bridge-in tests. +#[derive(Debug, Clone, Copy)] +pub enum ClaimDataSource { + /// Real on-chain claimAsset data from claim_asset_vectors_real_tx.json. + Real, + /// Locally simulated bridgeAsset data from claim_asset_vectors_local_tx.json. + Simulated, +} + +impl ClaimDataSource { + /// Returns the `(ProofData, LeafData, ExitRoot)` tuple for this data source. + pub fn get_data(self) -> (ProofData, LeafData, ExitRoot) { + let vector = match self { + ClaimDataSource::Real => &*CLAIM_ASSET_VECTOR, + ClaimDataSource::Simulated => &*CLAIM_ASSET_VECTOR_LOCAL, + }; + let ger = ExitRoot::new( + hex_to_bytes(&vector.proof.global_exit_root).expect("valid global exit root hex"), + ); + (vector.proof.to_proof_data(), vector.leaf.to_leaf_data(), ger) + } +} + +/// Execute a program with a default host and optional advice inputs. +pub async fn execute_program_with_default_host( + program: Program, + advice_inputs: Option, +) -> Result { + let mut host = DefaultHost::default(); + + let test_lib = TransactionKernel::library(); + host.load_library(test_lib.mast_forest()).unwrap(); + + let std_lib = CoreLibrary::default(); + host.load_library(std_lib.mast_forest()).unwrap(); + + for (event_name, handler) in std_lib.handlers() { + host.register_handler(event_name, handler)?; + } + + let agglayer_lib = agglayer_library(); + host.load_library(agglayer_lib.mast_forest()).unwrap(); + + let stack_inputs = StackInputs::new(&[]).unwrap(); + let advice_inputs = advice_inputs.unwrap_or_default(); + + let processor = + FastProcessor::new(stack_inputs).with_advice(advice_inputs).with_debugging(true); + processor.execute(&program, &mut host).await +} + +/// Execute a MASM script with the default host +pub async fn execute_masm_script(script_code: &str) -> Result { + let agglayer_lib = agglayer_library(); + + let program = Assembler::new(Arc::new(DefaultSourceManager::default())) + .with_dynamic_library(CoreLibrary::default()) + .unwrap() + .with_dynamic_library(agglayer_lib) + .unwrap() + .assemble_program(script_code) + .unwrap(); + + execute_program_with_default_host(program, None).await +} + +/// Helper to assert execution fails with a specific error message +pub async fn assert_execution_fails_with(script_code: &str, expected_error: &str) { + let result = execute_masm_script(script_code).await; + assert!(result.is_err(), "Expected execution to fail but it succeeded"); + let error_msg = result.unwrap_err().to_string(); + assert!( + error_msg.contains(expected_error), + "Expected error containing '{}', got: {}", + expected_error, + error_msg + ); +} diff --git a/crates/miden-testing/tests/agglayer/update_ger.rs b/crates/miden-testing/tests/agglayer/update_ger.rs new file mode 100644 index 0000000000..c6641dc00b --- /dev/null +++ b/crates/miden-testing/tests/agglayer/update_ger.rs @@ -0,0 +1,285 @@ +extern crate alloc; + +use alloc::string::String; +use alloc::sync::Arc; +use alloc::vec::Vec; + +use miden_agglayer::{ + AggLayerBridge, + ExitRoot, + UpdateGerNote, + agglayer_library, + create_existing_bridge_account, +}; +use miden_assembly::{Assembler, DefaultSourceManager}; +use miden_core_lib::CoreLibrary; +use miden_core_lib::handlers::keccak256::KeccakPreimage; +use miden_crypto::Felt; +use miden_crypto::hash::poseidon2::Poseidon2; +use miden_processor::utils::{bytes_to_packed_u32_elements, packed_u32_elements_to_bytes}; +use miden_protocol::Word; +use miden_protocol::account::auth::AuthScheme; +use miden_protocol::crypto::rand::FeltRng; +use miden_protocol::transaction::RawOutputNote; +use miden_protocol::utils::sync::LazyLock; +use miden_testing::{Auth, MockChain}; +use miden_tx::utils::hex_to_bytes; +use serde::Deserialize; + +use super::test_utils::execute_program_with_default_host; + +// EXIT ROOT TEST VECTORS +// ================================================================================================ +// Test vectors generated from Solidity's GlobalExitRootLib.calculateGlobalExitRoot +// Run `forge test --match-contract ExitRootsTestVectors` to regenerate. + +/// Exit roots JSON embedded at compile time from the Foundry-generated file. +const EXIT_ROOTS_JSON: &str = + include_str!("../../../miden-agglayer/solidity-compat/test-vectors/exit_roots.json"); + +/// Deserialized exit root vectors from Solidity GlobalExitRootLib +#[derive(Debug, Deserialize)] +struct ExitRootsFile { + mainnet_exit_roots: Vec, + rollup_exit_roots: Vec, + global_exit_roots: Vec, +} + +/// Lazily parsed exit root vectors from the JSON file. +static EXIT_ROOTS_VECTORS: LazyLock = LazyLock::new(|| { + serde_json::from_str(EXIT_ROOTS_JSON).expect("failed to parse exit roots JSON") +}); + +#[tokio::test] +async fn update_ger_note_updates_storage() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + // CREATE BRIDGE ADMIN ACCOUNT (not used in this test, but distinct from GER manager) + // -------------------------------------------------------------------------------------------- + let bridge_admin = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // CREATE GER MANAGER ACCOUNT (note sender) + // -------------------------------------------------------------------------------------------- + let ger_manager = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + // CREATE BRIDGE ACCOUNT + // -------------------------------------------------------------------------------------------- + let bridge_seed = builder.rng_mut().draw_word(); + let bridge_account = + create_existing_bridge_account(bridge_seed, bridge_admin.id(), ger_manager.id()); + builder.add_account(bridge_account.clone())?; + + // CREATE UPDATE_GER NOTE WITH 8 STORAGE ITEMS (NEW GER AS TWO WORDS) + // -------------------------------------------------------------------------------------------- + + let ger_bytes: [u8; 32] = [ + 0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, + 0x77, 0x88, + ]; + let ger = ExitRoot::from(ger_bytes); + let update_ger_note = + UpdateGerNote::create(ger, ger_manager.id(), bridge_account.id(), builder.rng_mut())?; + + builder.add_output_note(RawOutputNote::Full(update_ger_note.clone())); + let mock_chain = builder.build()?; + + // EXECUTE UPDATE_GER NOTE AGAINST BRIDGE ACCOUNT + // -------------------------------------------------------------------------------------------- + let tx_context = mock_chain + .build_tx_context(bridge_account.id(), &[update_ger_note.id()], &[])? + .build()?; + let executed_transaction = tx_context.execute().await?; + + // VERIFY GER HASH WAS STORED IN MAP + // -------------------------------------------------------------------------------------------- + let mut updated_bridge_account = bridge_account.clone(); + updated_bridge_account.apply_delta(executed_transaction.account_delta())?; + + // Compute the expected GER hash: poseidon2::merge(GER_LOWER, GER_UPPER) + // The MASM loads GER_LOWER and GER_UPPER from note storage via mem_loadw_le, + // then calls poseidon2::merge which computes hash(GER_LOWER || GER_UPPER). + let ger_lower: Word = ger.to_elements()[0..4].try_into().unwrap(); + let ger_upper: Word = ger.to_elements()[4..8].try_into().unwrap(); + + let ger_hash = Poseidon2::merge(&[ger_lower, ger_upper]); + // TODO: use a helper getter on AggLayerBridge once available + // (see https://github.com/0xMiden/protocol/issues/2548) + let ger_storage_slot = AggLayerBridge::ger_map_slot_name(); + let stored_value = updated_bridge_account + .storage() + .get_map_item(ger_storage_slot, ger_hash) + .expect("GER hash should be stored in the map"); + + // The stored value should be [GER_KNOWN_FLAG, 0, 0, 0] = [1, 0, 0, 0] + let expected_value: Word = [Felt::ONE, Felt::ZERO, Felt::ZERO, Felt::ZERO].into(); + assert_eq!(stored_value, expected_value, "GER hash should map to [1, 0, 0, 0]"); + + Ok(()) +} + +/// Tests compute_ger with known mainnet and rollup exit roots. +/// +/// The GER (Global Exit Root) is computed as keccak256(mainnet_exit_root || rollup_exit_root). +#[tokio::test] +async fn compute_ger() -> anyhow::Result<()> { + let agglayer_lib = agglayer_library(); + let vectors = &*EXIT_ROOTS_VECTORS; + + for i in 0..vectors.mainnet_exit_roots.len() { + let mainnet_exit_root_bytes = + hex_to_bytes(vectors.mainnet_exit_roots[i].as_str()).expect("invalid hex string"); + let rollup_exit_root_bytes = + hex_to_bytes(vectors.rollup_exit_roots[i].as_str()).expect("invalid hex string"); + let expected_ger_bytes = + hex_to_bytes(vectors.global_exit_roots[i].as_str()).expect("invalid hex string"); + + // Convert expected GER to felts for comparison + let expected_ger_exit_root = ExitRoot::from(expected_ger_bytes); + let expected_ger_felts = expected_ger_exit_root.to_elements(); + + // Computed GER using keccak256 + let ger_preimage: Vec = + [mainnet_exit_root_bytes.as_ref(), rollup_exit_root_bytes.as_ref()].concat(); + let ger_preimage = KeccakPreimage::new(ger_preimage); + let computed_ger_felts: Vec = ger_preimage.digest().as_ref().to_vec(); + + assert_eq!( + computed_ger_felts, expected_ger_felts, + "Computed GER mismatch for test vector {}", + i + ); + + // Convert exit roots to packed u32 felts for memory initialization + let mainnet_felts = ExitRoot::from(mainnet_exit_root_bytes).to_elements(); + let rollup_felts = ExitRoot::from(rollup_exit_root_bytes).to_elements(); + + // Build memory initialization: mainnet at ptr 0, rollup at ptr 8 + let mem_init: Vec = mainnet_felts + .iter() + .chain(rollup_felts.iter()) + .enumerate() + .map(|(idx, f)| format!("push.{} mem_store.{}", f.as_canonical_u64(), idx)) + .collect(); + let mem_init_code = mem_init.join("\n"); + + let source = format!( + r#" + use miden::core::sys + use miden::agglayer::bridge::bridge_in + + begin + # Initialize memory with exit roots + {mem_init_code} + + # Call compute_ger with pointer to exit roots + push.0 + exec.bridge_in::compute_ger + exec.sys::truncate_stack + end + "# + ); + + let program = Assembler::new(Arc::new(DefaultSourceManager::default())) + .with_dynamic_library(CoreLibrary::default()) + .unwrap() + .with_dynamic_library(agglayer_lib.clone()) + .unwrap() + .assemble_program(&source) + .unwrap(); + + let exec_output = execute_program_with_default_host(program, None).await?; + + let result_digest: Vec = exec_output.stack[0..8].to_vec(); + + assert_eq!(result_digest, expected_ger_felts, "GER mismatch for test vector {}", i); + } + + Ok(()) +} + +/// Tests compute_ger with known mainnet and rollup exit roots. +/// +/// The GER (Global Exit Root) is computed as keccak256(mainnet_exit_root || rollup_exit_root). +#[tokio::test] +async fn test_compute_ger_basic() -> anyhow::Result<()> { + let agglayer_lib = agglayer_library(); + + // Define test exit roots (32 bytes each) + let mainnet_exit_root: [u8; 32] = [ + 0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc, 0xde, 0xf0, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, + 0x77, 0x88, + ]; + + let rollup_exit_root: [u8; 32] = [ + 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, + 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + 0x88, 0x99, + ]; + + // Concatenate the two roots (64 bytes total) + let mut ger_preimage = Vec::with_capacity(64); + ger_preimage.extend_from_slice(&mainnet_exit_root); + ger_preimage.extend_from_slice(&rollup_exit_root); + + // Compute expected GER using keccak256 + let expected_ger_preimage = KeccakPreimage::new(ger_preimage.clone()); + let expected_ger_felts: [Felt; 8] = expected_ger_preimage.digest().as_ref().try_into().unwrap(); + + let ger_bytes: [u8; 32] = packed_u32_elements_to_bytes(&expected_ger_felts).try_into().unwrap(); + + let ger = ExitRoot::from(ger_bytes); + // sanity check + assert_eq!(ger.to_elements(), expected_ger_felts); + + // Convert exit roots to packed u32 felts for memory initialization + let mainnet_felts = bytes_to_packed_u32_elements(&mainnet_exit_root); + let rollup_felts = bytes_to_packed_u32_elements(&rollup_exit_root); + + // Build memory initialization: mainnet at ptr 0, rollup at ptr 8 + let mem_init: Vec = mainnet_felts + .iter() + .chain(rollup_felts.iter()) + .enumerate() + .map(|(i, f)| format!("push.{} mem_store.{}", f.as_canonical_u64(), i)) + .collect(); + let mem_init_code = mem_init.join("\n"); + + let source = format!( + r#" + use miden::core::sys + use miden::agglayer::bridge::bridge_in + + begin + # Initialize memory with exit roots + {mem_init_code} + + # Call compute_ger with pointer to exit roots + push.0 + exec.bridge_in::compute_ger + exec.sys::truncate_stack + end + "# + ); + + let program = Assembler::new(Arc::new(DefaultSourceManager::default())) + .with_dynamic_library(CoreLibrary::default()) + .unwrap() + .with_dynamic_library(agglayer_lib.clone()) + .unwrap() + .assemble_program(&source) + .unwrap(); + + let exec_output = execute_program_with_default_host(program, None).await?; + + let result_digest: Vec = exec_output.stack[0..8].to_vec(); + + assert_eq!(result_digest, expected_ger_felts); + + Ok(()) +} diff --git a/crates/miden-testing/tests/auth/ecdsa_multisig.rs b/crates/miden-testing/tests/auth/hybrid_multisig.rs similarity index 69% rename from crates/miden-testing/tests/auth/ecdsa_multisig.rs rename to crates/miden-testing/tests/auth/hybrid_multisig.rs index c2ff6fad66..b4678ad8d0 100644 --- a/crates/miden-testing/tests/auth/ecdsa_multisig.rs +++ b/crates/miden-testing/tests/auth/hybrid_multisig.rs @@ -1,6 +1,6 @@ -use miden_processor::AdviceInputs; -use miden_processor::crypto::RpoRandomCoin; -use miden_protocol::account::auth::{AuthSecretKey, PublicKey}; +use miden_processor::advice::AdviceInputs; +use miden_processor::crypto::random::RpoRandomCoin; +use miden_protocol::account::auth::{AuthScheme, AuthSecretKey, PublicKey}; use miden_protocol::account::{ Account, AccountBuilder, @@ -14,19 +14,17 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, }; -use miden_protocol::transaction::OutputNote; +use miden_protocol::transaction::RawOutputNote; use miden_protocol::vm::AdviceMap; use miden_protocol::{Felt, Hasher, Word}; -use miden_standards::account::auth::AuthEcdsaK256KeccakMultisig; -use miden_standards::account::components::ecdsa_k256_keccak_multisig_library; -use miden_standards::account::interface::{AccountInterface, AccountInterfaceExt}; +use miden_standards::account::auth::AuthMultisig; +use miden_standards::account::components::multisig_library; use miden_standards::account::wallets::BasicWallet; use miden_standards::code_builder::CodeBuilder; -use miden_standards::errors::standards::ERR_TX_ALREADY_EXECUTED; -use miden_standards::note::create_p2id_note; +use miden_standards::note::P2idNote; use miden_standards::testing::account_interface::get_public_keys_from_account; use miden_testing::utils::create_spawn_note; -use miden_testing::{Auth, MockChainBuilder, assert_transaction_executor_error}; +use miden_testing::{Auth, MockChainBuilder}; use miden_tx::TransactionExecutorError; use miden_tx::auth::{BasicAuthenticator, SigningInputs, TransactionAuthenticator}; use rand::SeedableRng; @@ -36,7 +34,8 @@ use rand_chacha::ChaCha20Rng; // HELPER FUNCTIONS // ================================================================================================ -type MultisigTestSetup = (Vec, Vec, Vec); +type MultisigTestSetup = + (Vec, Vec, Vec, Vec); /// Sets up secret keys, public keys, and authenticators for multisig testing fn setup_keys_and_authenticators( @@ -47,15 +46,27 @@ fn setup_keys_and_authenticators( let mut rng = ChaCha20Rng::from_seed(seed); let mut secret_keys = Vec::new(); + let mut auth_schemes = Vec::new(); let mut public_keys = Vec::new(); let mut authenticators = Vec::new(); - for _ in 0..num_approvers { - let sec_key = AuthSecretKey::new_ecdsa_k256_keccak_with_rng(&mut rng); + for index in 0..num_approvers { + let sec_key = if index % 2 == 0 { + AuthSecretKey::new_falcon512_poseidon2_with_rng(&mut rng) + } else { + AuthSecretKey::new_ecdsa_k256_keccak_with_rng(&mut rng) + }; let pub_key = sec_key.public_key(); secret_keys.push(sec_key); public_keys.push(pub_key); + + // Alternate scheme IDs between Falcon (2) and ECDSA (1) + if index % 2 == 0 { + auth_schemes.push(AuthScheme::Falcon512Poseidon2); + } else { + auth_schemes.push(AuthScheme::EcdsaK256Keccak); + } } // Create authenticators for required signers @@ -64,24 +75,23 @@ fn setup_keys_and_authenticators( authenticators.push(authenticator); } - Ok((secret_keys, public_keys, authenticators)) + Ok((secret_keys, auth_schemes, public_keys, authenticators)) } /// Creates a multisig account with the specified configuration fn create_multisig_account( threshold: u32, - public_keys: &[PublicKey], + approvers: &[(PublicKey, AuthScheme)], asset_amount: u64, proc_threshold_map: Vec<(Word, u32)>, ) -> anyhow::Result { - let approvers: Vec<_> = public_keys.iter().map(|pk| pk.to_commitment().into()).collect(); + let approvers = approvers + .iter() + .map(|(pub_key, auth_scheme)| (pub_key.to_commitment(), *auth_scheme)) + .collect(); let multisig_account = AccountBuilder::new([0; 32]) - .with_auth_component(Auth::EcdsaK256KeccakMultisig { - threshold, - approvers, - proc_threshold_map, - }) + .with_auth_component(Auth::Multisig { threshold, approvers, proc_threshold_map }) .with_component(BasicWallet) .account_type(AccountType::RegularAccountUpdatableCode) .storage_mode(AccountStorageMode::Public) @@ -107,12 +117,19 @@ fn create_multisig_account( #[tokio::test] async fn test_multisig_2_of_2_with_note_creation() -> anyhow::Result<()> { // Setup keys and authenticators - let (_secret_keys, public_keys, authenticators) = setup_keys_and_authenticators(2, 2)?; + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators(2, 2)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); // Create multisig account let multisig_starting_balance = 10u64; let mut multisig_account = - create_multisig_account(2, &public_keys, multisig_starting_balance, vec![])?; + create_multisig_account(2, &approvers, multisig_starting_balance, vec![])?; let output_note_asset = FungibleAsset::mock(0); @@ -137,7 +154,7 @@ async fn test_multisig_2_of_2_with_note_creation() -> anyhow::Result<()> { // Execute transaction without signatures - should fail let tx_context_init = mock_chain .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .auth_args(salt) .build()?; @@ -160,7 +177,7 @@ async fn test_multisig_2_of_2_with_note_creation() -> anyhow::Result<()> { // Execute transaction with signatures - should succeed let tx_context_execute = mock_chain .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) .add_signature(public_keys[0].to_commitment(), msg, sig_1) .add_signature(public_keys[1].to_commitment(), msg, sig_2) .auth_args(salt) @@ -194,10 +211,17 @@ async fn test_multisig_2_of_2_with_note_creation() -> anyhow::Result<()> { #[tokio::test] async fn test_multisig_2_of_4_all_signer_combinations() -> anyhow::Result<()> { // Setup keys and authenticators (4 approvers, all 4 can sign) - let (_secret_keys, public_keys, authenticators) = setup_keys_and_authenticators(4, 4)?; + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators(4, 4)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); // Create multisig account with 4 approvers but threshold of 2 - let multisig_account = create_multisig_account(2, &public_keys, 10, vec![])?; + let multisig_account = create_multisig_account(2, &approvers, 10, vec![])?; let mut mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) .unwrap() @@ -259,82 +283,6 @@ async fn test_multisig_2_of_4_all_signer_combinations() -> anyhow::Result<()> { Ok(()) } -/// Tests multisig replay protection to prevent transaction re-execution. -/// -/// This test verifies that a 2-of-3 multisig account properly prevents replay attacks -/// by rejecting attempts to execute the same transaction twice. The first execution -/// should succeed with valid signatures, but the second attempt with identical -/// parameters should fail with ERR_TX_ALREADY_EXECUTED. -/// -/// **Roles:** -/// - 3 Approvers (2 signers required) -/// - 1 Multisig Contract -#[tokio::test] -async fn test_multisig_replay_protection() -> anyhow::Result<()> { - // Setup keys and authenticators (3 approvers, but only 2 signers) - let (_secret_keys, public_keys, authenticators) = setup_keys_and_authenticators(3, 2)?; - - // Create 2/3 multisig account - let multisig_account = create_multisig_account(2, &public_keys, 20, vec![])?; - - let mut mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) - .unwrap() - .build() - .unwrap(); - - let salt = Word::from([Felt::new(3); 4]); - - // Execute transaction without signatures first to get tx summary - let tx_context_init = mock_chain - .build_tx_context(multisig_account.id(), &[], &[])? - .auth_args(salt) - .build()?; - - let tx_summary = match tx_context_init.execute().await.unwrap_err() { - TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, - error => panic!("expected abort with tx effects: {error:?}"), - }; - - // Get signatures from 2 of the 3 approvers - let msg = tx_summary.as_ref().to_commitment(); - let tx_summary = SigningInputs::TransactionSummary(tx_summary); - - let sig_1 = authenticators[0] - .get_signature(public_keys[0].to_commitment(), &tx_summary) - .await?; - let sig_2 = authenticators[1] - .get_signature(public_keys[1].to_commitment(), &tx_summary) - .await?; - - // Execute transaction with signatures - should succeed (first execution) - let tx_context_execute = mock_chain - .build_tx_context(multisig_account.id(), &[], &[])? - .add_signature(public_keys[0].to_commitment(), msg, sig_1.clone()) - .add_signature(public_keys[1].to_commitment(), msg, sig_2.clone()) - .auth_args(salt) - .build()?; - - let executed_tx = tx_context_execute.execute().await.expect("First transaction should succeed"); - - // Apply the transaction to the mock chain - mock_chain.add_pending_executed_transaction(&executed_tx)?; - mock_chain.prove_next_block()?; - - // Attempt to execute the same transaction again - should fail due to replay protection - let tx_context_replay = mock_chain - .build_tx_context(multisig_account.id(), &[], &[])? - .add_signature(public_keys[0].to_commitment(), msg, sig_1) - .add_signature(public_keys[1].to_commitment(), msg, sig_2) - .auth_args(salt) - .build()?; - - // This should fail due to replay protection - let result = tx_context_replay.execute().await; - assert_transaction_executor_error!(result, ERR_TX_ALREADY_EXECUTED); - - Ok(()) -} - /// Tests multisig signer update functionality. /// /// This test verifies that a multisig account can: @@ -349,10 +297,16 @@ async fn test_multisig_replay_protection() -> anyhow::Result<()> { /// - 1 Transaction Script calling multisig procedures #[tokio::test] async fn test_multisig_update_signers() -> anyhow::Result<()> { - let (_secret_keys, public_keys, authenticators) = setup_keys_and_authenticators(2, 2)?; + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators(2, 2)?; - let multisig_account = create_multisig_account(2, &public_keys, 10, vec![])?; + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + let multisig_account = create_multisig_account(2, &approvers, 10, vec![])?; // SECTION 1: Execute a transaction script to update signers and threshold // ================================================================================ @@ -375,7 +329,7 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { // Setup new signers let mut advice_map = AdviceMap::default(); - let (_new_secret_keys, new_public_keys, _new_authenticators) = + let (_new_secret_keys, new_auth_schemes, new_public_keys, _new_authenticators) = setup_keys_and_authenticators(4, 4)?; let threshold = 3u64; @@ -390,10 +344,17 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { Felt::new(0), ]); - // Add each public key to the vector - for public_key in new_public_keys.iter().rev() { + for (public_key, auth_scheme) in new_public_keys.iter().rev().zip(new_auth_schemes.iter().rev()) + { let key_word: Word = public_key.to_commitment().into(); config_and_pubkeys_vector.extend_from_slice(key_word.as_elements()); + + config_and_pubkeys_vector.extend_from_slice(&[ + Felt::new(*auth_scheme as u64), + Felt::new(0), + Felt::new(0), + Felt::new(0), + ]); } // Hash the vector to create config hash @@ -405,12 +366,12 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { // Create a transaction script that calls the update_signers procedure let tx_script_code = " begin - call.::ecdsa_k256_keccak_multisig::update_signers_and_threshold + call.::miden::standards::components::auth::multisig::update_signers_and_threshold end "; let tx_script = CodeBuilder::default() - .with_dynamically_linked_library(ecdsa_k256_keccak_multisig_library())? + .with_dynamically_linked_library(multisig_library())? .compile_tx_script(tx_script_code)?; let advice_inputs = AdviceInputs { @@ -475,7 +436,7 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { let storage_key = [Felt::new(i as u64), Felt::new(0), Felt::new(0), Felt::new(0)].into(); let storage_item = updated_multisig_account .storage() - .get_map_item(AuthEcdsaK256KeccakMultisig::approver_public_keys_slot(), storage_key) + .get_map_item(AuthMultisig::approver_public_keys_slot(), storage_key) .unwrap(); let expected_word: Word = expected_key.to_commitment().into(); @@ -486,7 +447,7 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { // Verify the threshold was updated by checking the config storage slot let threshold_config_storage = updated_multisig_account .storage() - .get_item(AuthEcdsaK256KeccakMultisig::threshold_config_slot()) + .get_item(AuthMultisig::threshold_config_slot()) .unwrap(); assert_eq!( @@ -538,7 +499,7 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { } // Create a new output note for the second transaction with new signers - let output_note_new = create_p2id_note( + let output_note_new = P2idNote::create( updated_multisig_account.id(), ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE.try_into().unwrap(), vec![output_note_asset], @@ -555,13 +516,13 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { // Build the new mock chain with the updated account and notes let mut new_mock_chain_builder = MockChainBuilder::with_accounts([updated_multisig_account.clone()]).unwrap(); - new_mock_chain_builder.add_output_note(OutputNote::Full(input_note_new.clone())); + new_mock_chain_builder.add_output_note(RawOutputNote::Full(input_note_new.clone())); let new_mock_chain = new_mock_chain_builder.build().unwrap(); // Execute transaction without signatures first to get tx summary let tx_context_init_new = new_mock_chain .build_tx_context(updated_multisig_account.id(), &[input_note_new.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .auth_args(salt_new) .build()?; @@ -590,7 +551,7 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { // Execute transaction with new signatures - should succeed let tx_context_execute_new = new_mock_chain .build_tx_context(updated_multisig_account.id(), &[input_note_new.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note_new)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note_new)]) .add_signature(new_public_keys[0].to_commitment(), msg_new, sig_1_new) .add_signature(new_public_keys[1].to_commitment(), msg_new, sig_2_new) .add_signature(new_public_keys[2].to_commitment(), msg_new, sig_3_new) @@ -620,8 +581,14 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { #[tokio::test] async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { // Setup 5 original owners with threshold 4 - let (_secret_keys, public_keys, authenticators) = setup_keys_and_authenticators(5, 5)?; - let multisig_account = create_multisig_account(4, &public_keys, 10, vec![])?; + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators(5, 5)?; + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + let multisig_account = create_multisig_account(4, &approvers, 10, vec![])?; // Build mock chain let mock_chain_builder = MockChainBuilder::with_accounts([multisig_account.clone()]).unwrap(); @@ -629,6 +596,8 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { // Setup new signers (remove the last 3 owners, keeping first 2) let new_public_keys = &public_keys[0..2]; + let new_auth_schemes = &auth_schemes[0..2]; + let threshold = 1u64; let num_of_approvers = 2u64; @@ -636,10 +605,18 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { let mut config_and_pubkeys_vector = vec![Felt::new(threshold), Felt::new(num_of_approvers), Felt::new(0), Felt::new(0)]; - // Add public keys in reverse order - for public_key in new_public_keys.iter().rev() { + // Add each public key to the vector + for (public_key, auth_scheme) in new_public_keys.iter().rev().zip(new_auth_schemes.iter().rev()) + { let key_word: Word = public_key.to_commitment().into(); config_and_pubkeys_vector.extend_from_slice(key_word.as_elements()); + + config_and_pubkeys_vector.extend_from_slice(&[ + Felt::new(*auth_scheme as u64), + Felt::new(0), + Felt::new(0), + Felt::new(0), + ]); } // Create config hash and advice map @@ -649,10 +626,8 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { // Create transaction script let tx_script = CodeBuilder::default() - .with_dynamically_linked_library(ecdsa_k256_keccak_multisig_library())? - .compile_tx_script( - "begin\n call.::ecdsa_k256_keccak_multisig::update_signers_and_threshold\nend", - )?; + .with_dynamically_linked_library(multisig_library())? + .compile_tx_script("begin\n call.::miden::standards::components::auth::multisig::update_signers_and_threshold\nend")?; let advice_inputs = AdviceInputs { map: advice_map, ..Default::default() }; @@ -720,7 +695,7 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { let storage_key = [Felt::new(i as u64), Felt::new(0), Felt::new(0), Felt::new(0)].into(); let storage_item = updated_multisig_account .storage() - .get_map_item(AuthEcdsaK256KeccakMultisig::approver_public_keys_slot(), storage_key) + .get_map_item(AuthMultisig::approver_public_keys_slot(), storage_key) .unwrap(); let expected_word: Word = expected_key.to_commitment().into(); assert_eq!(storage_item, expected_word, "Public key {} doesn't match", i); @@ -729,7 +704,7 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { // Verify threshold and num_approvers let threshold_config = updated_multisig_account .storage() - .get_item(AuthEcdsaK256KeccakMultisig::threshold_config_slot()) + .get_item(AuthMultisig::threshold_config_slot()) .unwrap(); assert_eq!(threshold_config[0], Felt::new(threshold), "Threshold not updated"); assert_eq!(threshold_config[1], Felt::new(num_of_approvers), "Num approvers not updated"); @@ -752,10 +727,7 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { [Felt::new(removed_idx), Felt::new(0), Felt::new(0), Felt::new(0)].into(); let removed_owner_slot = updated_multisig_account .storage() - .get_map_item( - AuthEcdsaK256KeccakMultisig::approver_public_keys_slot(), - removed_owner_key, - ) + .get_map_item(AuthMultisig::approver_public_keys_slot(), removed_owner_key) .unwrap(); assert_eq!( removed_owner_slot, @@ -771,7 +743,7 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { let storage_key = [Felt::new(i as u64), Felt::new(0), Felt::new(0), Felt::new(0)].into(); let storage_item = updated_multisig_account .storage() - .get_map_item(AuthEcdsaK256KeccakMultisig::approver_public_keys_slot(), storage_key) + .get_map_item(AuthMultisig::approver_public_keys_slot(), storage_key) .unwrap(); if storage_item != Word::empty() { @@ -806,10 +778,16 @@ async fn test_multisig_new_approvers_cannot_sign_before_update() -> anyhow::Resu // SECTION 1: Create a multisig account with 2 original approvers // ================================================================================ - let (_secret_keys, public_keys, _authenticators) = setup_keys_and_authenticators(2, 2)?; + let (_secret_keys, auth_schemes, public_keys, _authenticators) = + setup_keys_and_authenticators(2, 2)?; - let multisig_account = create_multisig_account(2, &public_keys, 10, vec![])?; + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + let multisig_account = create_multisig_account(2, &approvers, 10, vec![])?; let mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) .unwrap() .build() @@ -824,7 +802,7 @@ async fn test_multisig_new_approvers_cannot_sign_before_update() -> anyhow::Resu // Setup new signers (these should NOT be able to sign the update transaction) let mut advice_map = AdviceMap::default(); - let (_new_secret_keys, new_public_keys, new_authenticators) = + let (_new_secret_keys, new_auth_schemes, new_public_keys, new_authenticators) = setup_keys_and_authenticators(4, 4)?; let threshold = 3u64; @@ -840,9 +818,17 @@ async fn test_multisig_new_approvers_cannot_sign_before_update() -> anyhow::Resu ]); // Add each public key to the vector - for public_key in new_public_keys.iter().rev() { + for (public_key, auth_scheme) in new_public_keys.iter().rev().zip(new_auth_schemes.iter().rev()) + { let key_word: Word = public_key.to_commitment().into(); config_and_pubkeys_vector.extend_from_slice(key_word.as_elements()); + + config_and_pubkeys_vector.extend_from_slice(&[ + Felt::new(*auth_scheme as u64), + Felt::new(0), + Felt::new(0), + Felt::new(0), + ]); } // Hash the vector to create config hash @@ -854,12 +840,12 @@ async fn test_multisig_new_approvers_cannot_sign_before_update() -> anyhow::Resu // Create a transaction script that calls the update_signers procedure let tx_script_code = " begin - call.::ecdsa_k256_keccak_multisig::update_signers_and_threshold + call.::miden::standards::components::auth::multisig::update_signers_and_threshold end "; let tx_script = CodeBuilder::default() - .with_dynamically_linked_library(ecdsa_k256_keccak_multisig_library())? + .with_dynamically_linked_library(multisig_library())? .compile_tx_script(tx_script_code)?; let advice_inputs = AdviceInputs { @@ -923,161 +909,3 @@ async fn test_multisig_new_approvers_cannot_sign_before_update() -> anyhow::Resu Ok(()) } - -/// Tests that 1-of-2 approvers can consume a note but 2-of-2 are required to send a note. -/// -/// This test verifies that a multisig account with 2 approvers and threshold 2, but a procedure -/// threshold of 1 for note consumption, can: -/// 1. Consume a note when only one approver signs the transaction -/// 2. Send a note only when both approvers sign the transaction (default threshold) -#[tokio::test] -async fn test_multisig_proc_threshold_overrides() -> anyhow::Result<()> { - // Setup keys and authenticators - let (_secret_keys, public_keys, authenticators) = setup_keys_and_authenticators(2, 2)?; - - let proc_threshold_map = vec![(BasicWallet::receive_asset_digest(), 1)]; - - // Create multisig account - let multisig_starting_balance = 10u64; - let mut multisig_account = - create_multisig_account(2, &public_keys, multisig_starting_balance, proc_threshold_map)?; - - // SECTION 1: Test note consumption with 1 signature - // ================================================================================ - - // 1. create a mock note from some random account - let mut mock_chain_builder = - MockChainBuilder::with_accounts([multisig_account.clone()]).unwrap(); - - let note = mock_chain_builder.add_p2id_note( - multisig_account.id(), - multisig_account.id(), - &[FungibleAsset::mock(1)], - NoteType::Public, - )?; - - let mut mock_chain = mock_chain_builder.build()?; - - // 2. consume without signatures - let salt = Word::from([Felt::new(1); 4]); - let tx_context = mock_chain - .build_tx_context(multisig_account.id(), &[note.id()], &[])? - .auth_args(salt) - .build()?; - - let tx_summary = match tx_context.execute().await.unwrap_err() { - TransactionExecutorError::Unauthorized(tx_summary) => tx_summary, - error => panic!("expected abort with tx summary: {error:?}"), - }; - - // 3. get signature from one approver - let msg = tx_summary.as_ref().to_commitment(); - let tx_summary_signing = SigningInputs::TransactionSummary(tx_summary.clone()); - let sig = authenticators[0] - .get_signature(public_keys[0].to_commitment(), &tx_summary_signing) - .await?; - - // 4. execute with signature - let tx_result = mock_chain - .build_tx_context(multisig_account.id(), &[note.id()], &[])? - .add_signature(public_keys[0].to_commitment(), msg, sig) - .auth_args(salt) - .build()? - .execute() - .await; - - assert!(tx_result.is_ok(), "Note consumption with 1 signature should succeed"); - - // Apply the transaction to the account - multisig_account.apply_delta(tx_result.as_ref().unwrap().account_delta())?; - mock_chain.add_pending_executed_transaction(&tx_result.unwrap())?; - mock_chain.prove_next_block()?; - - // SECTION 2: Test note sending requires 2 signatures - // ================================================================================ - - let salt2 = Word::from([Felt::new(2); 4]); - - // Create output note to send 5 units from the account - let output_note = create_p2id_note( - multisig_account.id(), - ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE.try_into().unwrap(), - vec![FungibleAsset::mock(5)], - NoteType::Public, - Default::default(), - &mut RpoRandomCoin::new(Word::from([Felt::new(42); 4])), - )?; - let multisig_account_interface = AccountInterface::from_account(&multisig_account); - let send_note_transaction_script = - multisig_account_interface.build_send_notes_script(&[output_note.clone().into()], None)?; - - // Execute transaction without signatures to get tx summary - let tx_context_init = mock_chain - .build_tx_context(multisig_account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) - .tx_script(send_note_transaction_script.clone()) - .auth_args(salt2) - .build()?; - - let tx_summary2 = match tx_context_init.execute().await.unwrap_err() { - TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, - error => panic!("expected abort with tx effects: {error:?}"), - }; - // Get signature from only ONE approver - let msg2 = tx_summary2.as_ref().to_commitment(); - let tx_summary2_signing = SigningInputs::TransactionSummary(tx_summary2.clone()); - - let sig_1 = authenticators[0] - .get_signature(public_keys[0].to_commitment(), &tx_summary2_signing) - .await?; - - // Try to execute with only 1 signature - should FAIL - let tx_context_one_sig = mock_chain - .build_tx_context(multisig_account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) - .add_signature(public_keys[0].to_commitment(), msg2, sig_1) - .tx_script(send_note_transaction_script.clone()) - .auth_args(salt2) - .build()?; - - let result = tx_context_one_sig.execute().await; - match result { - Err(TransactionExecutorError::Unauthorized(_)) => { - // Expected: transaction should fail with insufficient signatures - }, - _ => panic!( - "Transaction should fail with Unauthorized error when only 1 signature provided for note sending" - ), - } - - // Now get signatures from BOTH approvers - let sig_1 = authenticators[0] - .get_signature(public_keys[0].to_commitment(), &tx_summary2_signing) - .await?; - let sig_2 = authenticators[1] - .get_signature(public_keys[1].to_commitment(), &tx_summary2_signing) - .await?; - - // Execute with 2 signatures - should SUCCEED - let result = mock_chain - .build_tx_context(multisig_account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note)]) - .add_signature(public_keys[0].to_commitment(), msg2, sig_1) - .add_signature(public_keys[1].to_commitment(), msg2, sig_2) - .auth_args(salt2) - .tx_script(send_note_transaction_script) - .build()? - .execute() - .await; - - assert!(result.is_ok(), "Transaction should succeed with 2 signatures for note sending"); - - // Apply the transaction to the account - multisig_account.apply_delta(result.as_ref().unwrap().account_delta())?; - mock_chain.add_pending_executed_transaction(&result.unwrap())?; - mock_chain.prove_next_block()?; - - assert_eq!(multisig_account.vault().get_balance(FungibleAsset::mock_issuer())?, 6); - - Ok(()) -} diff --git a/crates/miden-testing/tests/auth/mod.rs b/crates/miden-testing/tests/auth/mod.rs index e2619483f9..33d6f35bde 100644 --- a/crates/miden-testing/tests/auth/mod.rs +++ b/crates/miden-testing/tests/auth/mod.rs @@ -1,7 +1,7 @@ -mod rpo_falcon_acl; +mod singlesig_acl; mod multisig; -mod ecdsa_acl; +mod hybrid_multisig; -mod ecdsa_multisig; +mod multisig_psm; diff --git a/crates/miden-testing/tests/auth/multisig.rs b/crates/miden-testing/tests/auth/multisig.rs index e205cbfe3e..9145044843 100644 --- a/crates/miden-testing/tests/auth/multisig.rs +++ b/crates/miden-testing/tests/auth/multisig.rs @@ -1,6 +1,6 @@ -use miden_processor::AdviceInputs; -use miden_processor::crypto::RpoRandomCoin; -use miden_protocol::account::auth::{AuthSecretKey, PublicKey}; +use miden_processor::advice::AdviceInputs; +use miden_processor::crypto::random::RpoRandomCoin; +use miden_protocol::account::auth::{AuthScheme, AuthSecretKey, PublicKey}; use miden_protocol::account::{ Account, AccountBuilder, @@ -14,16 +14,19 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, }; -use miden_protocol::transaction::OutputNote; +use miden_protocol::transaction::RawOutputNote; use miden_protocol::vm::AdviceMap; use miden_protocol::{Felt, Hasher, Word}; -use miden_standards::account::auth::AuthFalcon512RpoMultisig; -use miden_standards::account::components::falcon_512_rpo_multisig_library; +use miden_standards::account::auth::AuthMultisig; +use miden_standards::account::components::multisig_library; use miden_standards::account::interface::{AccountInterface, AccountInterfaceExt}; use miden_standards::account::wallets::BasicWallet; use miden_standards::code_builder::CodeBuilder; -use miden_standards::errors::standards::ERR_TX_ALREADY_EXECUTED; -use miden_standards::note::create_p2id_note; +use miden_standards::errors::standards::{ + ERR_PROC_THRESHOLD_EXCEEDS_NUM_APPROVERS, + ERR_TX_ALREADY_EXECUTED, +}; +use miden_standards::note::P2idNote; use miden_standards::testing::account_interface::get_public_keys_from_account; use miden_testing::utils::create_spawn_note; use miden_testing::{Auth, MockChainBuilder, assert_transaction_executor_error}; @@ -31,30 +34,41 @@ use miden_tx::TransactionExecutorError; use miden_tx::auth::{BasicAuthenticator, SigningInputs, TransactionAuthenticator}; use rand::SeedableRng; use rand_chacha::ChaCha20Rng; +use rstest::rstest; // ================================================================================================ // HELPER FUNCTIONS // ================================================================================================ -type MultisigTestSetup = (Vec, Vec, Vec); +type MultisigTestSetup = + (Vec, Vec, Vec, Vec); -/// Sets up secret keys, public keys, and authenticators for multisig testing -fn setup_keys_and_authenticators( +/// Sets up secret keys, public keys, and authenticators for multisig testing for the given scheme. +fn setup_keys_and_authenticators_with_scheme( num_approvers: usize, threshold: usize, + auth_scheme: AuthScheme, ) -> anyhow::Result { let seed: [u8; 32] = rand::random(); let mut rng = ChaCha20Rng::from_seed(seed); let mut secret_keys = Vec::new(); + let mut auth_schemes = Vec::new(); let mut public_keys = Vec::new(); let mut authenticators = Vec::new(); for _ in 0..num_approvers { - let sec_key = AuthSecretKey::new_falcon512_rpo_with_rng(&mut rng); + let sec_key = match auth_scheme { + AuthScheme::EcdsaK256Keccak => AuthSecretKey::new_ecdsa_k256_keccak_with_rng(&mut rng), + AuthScheme::Falcon512Poseidon2 => { + AuthSecretKey::new_falcon512_poseidon2_with_rng(&mut rng) + }, + _ => anyhow::bail!("unsupported auth scheme for this test: {auth_scheme:?}"), + }; let pub_key = sec_key.public_key(); secret_keys.push(sec_key); + auth_schemes.push(auth_scheme); public_keys.push(pub_key); } @@ -64,17 +78,20 @@ fn setup_keys_and_authenticators( authenticators.push(authenticator); } - Ok((secret_keys, public_keys, authenticators)) + Ok((secret_keys, auth_schemes, public_keys, authenticators)) } /// Creates a multisig account with the specified configuration fn create_multisig_account( threshold: u32, - public_keys: &[PublicKey], + approvers: &[(PublicKey, AuthScheme)], asset_amount: u64, proc_threshold_map: Vec<(Word, u32)>, ) -> anyhow::Result { - let approvers: Vec<_> = public_keys.iter().map(|pk| pk.to_commitment().into()).collect(); + let approvers = approvers + .iter() + .map(|(pub_key, auth_scheme)| (pub_key.to_commitment(), *auth_scheme)) + .collect(); let multisig_account = AccountBuilder::new([0; 32]) .with_auth_component(Auth::Multisig { threshold, approvers, proc_threshold_map }) @@ -100,15 +117,27 @@ fn create_multisig_account( /// **Roles:** /// - 2 Approvers (multisig signers) /// - 1 Multisig Contract +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] -async fn test_multisig_2_of_2_with_note_creation() -> anyhow::Result<()> { +async fn test_multisig_2_of_2_with_note_creation( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { // Setup keys and authenticators - let (_secret_keys, public_keys, authenticators) = setup_keys_and_authenticators(2, 2)?; + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); // Create multisig account let multisig_starting_balance = 10u64; let mut multisig_account = - create_multisig_account(2, &public_keys, multisig_starting_balance, vec![])?; + create_multisig_account(2, &approvers, multisig_starting_balance, vec![])?; let output_note_asset = FungibleAsset::mock(0); @@ -133,13 +162,13 @@ async fn test_multisig_2_of_2_with_note_creation() -> anyhow::Result<()> { // Execute transaction without signatures - should fail let tx_context_init = mock_chain .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .auth_args(salt) .build()?; let tx_summary = match tx_context_init.execute().await.unwrap_err() { TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, - error => panic!("expected abort with tx effects: {error:?}"), + error => anyhow::bail!("expected abort with tx effects: {error}"), }; // Get signatures from both approvers @@ -156,7 +185,7 @@ async fn test_multisig_2_of_2_with_note_creation() -> anyhow::Result<()> { // Execute transaction with signatures - should succeed let tx_context_execute = mock_chain .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) .add_signature(public_keys[0].to_commitment(), msg, sig_1) .add_signature(public_keys[1].to_commitment(), msg, sig_2) .auth_args(salt) @@ -187,13 +216,25 @@ async fn test_multisig_2_of_2_with_note_creation() -> anyhow::Result<()> { /// implementation correctly validates signatures from any valid subset. /// /// **Tested combinations:** (0,1), (0,2), (0,3), (1,2), (1,3), (2,3) +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] -async fn test_multisig_2_of_4_all_signer_combinations() -> anyhow::Result<()> { +async fn test_multisig_2_of_4_all_signer_combinations( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { // Setup keys and authenticators (4 approvers, all 4 can sign) - let (_secret_keys, public_keys, authenticators) = setup_keys_and_authenticators(4, 4)?; + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(4, 4, auth_scheme)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); // Create multisig account with 4 approvers but threshold of 2 - let multisig_account = create_multisig_account(2, &public_keys, 10, vec![])?; + let multisig_account = create_multisig_account(2, &approvers, 10, vec![])?; let mut mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) .unwrap() @@ -221,7 +262,7 @@ async fn test_multisig_2_of_4_all_signer_combinations() -> anyhow::Result<()> { let tx_summary = match tx_context_init.execute().await.unwrap_err() { TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, - error => panic!("expected abort with tx effects: {error:?}"), + error => anyhow::bail!("expected abort with tx effects: {error}"), }; // Get signatures from the specific combination of signers @@ -265,13 +306,23 @@ async fn test_multisig_2_of_4_all_signer_combinations() -> anyhow::Result<()> { /// **Roles:** /// - 3 Approvers (2 signers required) /// - 1 Multisig Contract +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] -async fn test_multisig_replay_protection() -> anyhow::Result<()> { +async fn test_multisig_replay_protection(#[case] auth_scheme: AuthScheme) -> anyhow::Result<()> { // Setup keys and authenticators (3 approvers, but only 2 signers) - let (_secret_keys, public_keys, authenticators) = setup_keys_and_authenticators(3, 2)?; + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(3, 2, auth_scheme)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); // Create 2/3 multisig account - let multisig_account = create_multisig_account(2, &public_keys, 20, vec![])?; + let multisig_account = create_multisig_account(2, &approvers, 20, vec![])?; let mut mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) .unwrap() @@ -308,12 +359,12 @@ async fn test_multisig_replay_protection() -> anyhow::Result<()> { .add_signature(public_keys[0].to_commitment(), msg, sig_1.clone()) .add_signature(public_keys[1].to_commitment(), msg, sig_2.clone()) .auth_args(salt) - .build()?; - - let executed_tx = tx_context_execute.execute().await.expect("First transaction should succeed"); + .build()? + .execute() + .await?; // Apply the transaction to the mock chain - mock_chain.add_pending_executed_transaction(&executed_tx)?; + mock_chain.add_pending_executed_transaction(&tx_context_execute)?; mock_chain.prove_next_block()?; // Attempt to execute the same transaction again - should fail due to replay protection @@ -343,11 +394,21 @@ async fn test_multisig_replay_protection() -> anyhow::Result<()> { /// - 4 New Approvers (updated multisig signers) /// - 1 Multisig Contract /// - 1 Transaction Script calling multisig procedures +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] -async fn test_multisig_update_signers() -> anyhow::Result<()> { - let (_secret_keys, public_keys, authenticators) = setup_keys_and_authenticators(2, 2)?; +async fn test_multisig_update_signers(#[case] auth_scheme: AuthScheme) -> anyhow::Result<()> { + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); - let multisig_account = create_multisig_account(2, &public_keys, 10, vec![])?; + let multisig_account = create_multisig_account(2, &approvers, 10, vec![])?; // SECTION 1: Execute a transaction script to update signers and threshold // ================================================================================ @@ -371,8 +432,8 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { // Setup new signers let mut advice_map = AdviceMap::default(); - let (_new_secret_keys, new_public_keys, _new_authenticators) = - setup_keys_and_authenticators(4, 4)?; + let (_new_secret_keys, _new_auth_schemes, new_public_keys, _new_authenticators) = + setup_keys_and_authenticators_with_scheme(4, 4, auth_scheme)?; let threshold = 3u64; let num_of_approvers = 4u64; @@ -390,6 +451,13 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { for public_key in new_public_keys.iter().rev() { let key_word: Word = public_key.to_commitment().into(); config_and_pubkeys_vector.extend_from_slice(key_word.as_elements()); + + config_and_pubkeys_vector.extend_from_slice(&[ + Felt::new(auth_scheme as u64), + Felt::new(0), + Felt::new(0), + Felt::new(0), + ]); } // Hash the vector to create config hash @@ -401,12 +469,12 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { // Create a transaction script that calls the update_signers procedure let tx_script_code = " begin - call.::falcon_512_rpo_multisig::update_signers_and_threshold + call.::miden::standards::components::auth::multisig::update_signers_and_threshold end "; let tx_script = CodeBuilder::default() - .with_dynamically_linked_library(falcon_512_rpo_multisig_library())? + .with_dynamically_linked_library(multisig_library())? .compile_tx_script(tx_script_code)?; let advice_inputs = AdviceInputs { @@ -453,8 +521,7 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { .extend_advice_inputs(advice_inputs) .build()? .execute() - .await - .unwrap(); + .await?; // Verify the transaction executed successfully assert_eq!(update_approvers_tx.account_delta().nonce_delta(), Felt::new(1)); @@ -471,7 +538,7 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { let storage_key = [Felt::new(i as u64), Felt::new(0), Felt::new(0), Felt::new(0)].into(); let storage_item = updated_multisig_account .storage() - .get_map_item(AuthFalcon512RpoMultisig::approver_public_keys_slot(), storage_key) + .get_map_item(AuthMultisig::approver_public_keys_slot(), storage_key) .unwrap(); let expected_word: Word = expected_key.to_commitment().into(); @@ -482,7 +549,8 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { // Verify the threshold was updated by checking the config storage slot let threshold_config_storage = updated_multisig_account .storage() - .get_item(AuthFalcon512RpoMultisig::threshold_config_slot())?; + .get_item(AuthMultisig::threshold_config_slot()) + .unwrap(); assert_eq!( threshold_config_storage[0], @@ -533,7 +601,7 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { } // Create a new output note for the second transaction with new signers - let output_note_new = create_p2id_note( + let output_note_new = P2idNote::create( updated_multisig_account.id(), ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE.try_into().unwrap(), vec![output_note_asset], @@ -550,13 +618,13 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { // Build the new mock chain with the updated account and notes let mut new_mock_chain_builder = MockChainBuilder::with_accounts([updated_multisig_account.clone()]).unwrap(); - new_mock_chain_builder.add_output_note(OutputNote::Full(input_note_new.clone())); + new_mock_chain_builder.add_output_note(RawOutputNote::Full(input_note_new.clone())); let new_mock_chain = new_mock_chain_builder.build().unwrap(); // Execute transaction without signatures first to get tx summary let tx_context_init_new = new_mock_chain .build_tx_context(updated_multisig_account.id(), &[input_note_new.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .auth_args(salt_new) .build()?; @@ -585,7 +653,7 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { // Execute transaction with new signatures - should succeed let tx_context_execute_new = new_mock_chain .build_tx_context(updated_multisig_account.id(), &[input_note_new.id()], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note_new)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note_new)]) .add_signature(new_public_keys[0].to_commitment(), msg_new, sig_1_new) .add_signature(new_public_keys[1].to_commitment(), msg_new, sig_2_new) .add_signature(new_public_keys[2].to_commitment(), msg_new, sig_3_new) @@ -612,11 +680,24 @@ async fn test_multisig_update_signers() -> anyhow::Result<()> { /// - 2 Updated Approvers (after removing 3 owners) /// - 1 Multisig Contract /// - 1 Transaction Script calling multisig procedures +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] -async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { +async fn test_multisig_update_signers_remove_owner( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { // Setup 5 original owners with threshold 4 - let (_secret_keys, public_keys, authenticators) = setup_keys_and_authenticators(5, 5)?; - let multisig_account = create_multisig_account(4, &public_keys, 10, vec![])?; + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(5, 5, auth_scheme)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + + let multisig_account = create_multisig_account(4, &approvers, 10, vec![])?; // Build mock chain let mock_chain_builder = MockChainBuilder::with_accounts([multisig_account.clone()]).unwrap(); @@ -631,10 +712,17 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { let mut config_and_pubkeys_vector = vec![Felt::new(threshold), Felt::new(num_of_approvers), Felt::new(0), Felt::new(0)]; - // Add public keys in reverse order + // Add each public key to the vector for public_key in new_public_keys.iter().rev() { let key_word: Word = public_key.to_commitment().into(); config_and_pubkeys_vector.extend_from_slice(key_word.as_elements()); + + config_and_pubkeys_vector.extend_from_slice(&[ + Felt::new(auth_scheme as u64), + Felt::new(0), + Felt::new(0), + Felt::new(0), + ]); } // Create config hash and advice map @@ -644,10 +732,8 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { // Create transaction script let tx_script = CodeBuilder::default() - .with_dynamically_linked_library(falcon_512_rpo_multisig_library())? - .compile_tx_script( - "begin\n call.::falcon_512_rpo_multisig::update_signers_and_threshold\nend", - )?; + .with_dynamically_linked_library(multisig_library())? + .compile_tx_script("begin\n call.::miden::standards::components::auth::multisig::update_signers_and_threshold\nend")?; let advice_inputs = AdviceInputs { map: advice_map, ..Default::default() }; @@ -697,8 +783,7 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { .extend_advice_inputs(advice_inputs) .build()? .execute() - .await - .unwrap(); + .await?; // Verify transaction success assert_eq!(update_approvers_tx.account_delta().nonce_delta(), Felt::new(1)); @@ -715,7 +800,8 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { let storage_key = [Felt::new(i as u64), Felt::new(0), Felt::new(0), Felt::new(0)].into(); let storage_item = updated_multisig_account .storage() - .get_map_item(AuthFalcon512RpoMultisig::approver_public_keys_slot(), storage_key)?; + .get_map_item(AuthMultisig::approver_public_keys_slot(), storage_key) + .unwrap(); let expected_word: Word = expected_key.to_commitment().into(); assert_eq!(storage_item, expected_word, "Public key {} doesn't match", i); } @@ -723,7 +809,8 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { // Verify threshold and num_approvers let threshold_config = updated_multisig_account .storage() - .get_item(AuthFalcon512RpoMultisig::threshold_config_slot())?; + .get_item(AuthMultisig::threshold_config_slot()) + .unwrap(); assert_eq!(threshold_config[0], Felt::new(threshold), "Threshold not updated"); assert_eq!(threshold_config[1], Felt::new(num_of_approvers), "Num approvers not updated"); @@ -745,7 +832,7 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { [Felt::new(removed_idx), Felt::new(0), Felt::new(0), Felt::new(0)].into(); let removed_owner_slot = updated_multisig_account .storage() - .get_map_item(AuthFalcon512RpoMultisig::approver_public_keys_slot(), removed_owner_key) + .get_map_item(AuthMultisig::approver_public_keys_slot(), removed_owner_key) .unwrap(); assert_eq!( removed_owner_slot, @@ -761,7 +848,7 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { let storage_key = [Felt::new(i as u64), Felt::new(0), Felt::new(0), Felt::new(0)].into(); let storage_item = updated_multisig_account .storage() - .get_map_item(AuthFalcon512RpoMultisig::approver_public_keys_slot(), storage_key) + .get_map_item(AuthMultisig::approver_public_keys_slot(), storage_key) .unwrap(); if storage_item != Word::empty() { @@ -780,6 +867,78 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { Ok(()) } +/// Tests that signer updates are rejected when stored procedure threshold overrides would become +/// unreachable for the new signer set. +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] +#[tokio::test] +async fn test_multisig_update_signers_rejects_unreachable_proc_thresholds( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (_secret_keys, auth_schemes, public_keys, _authenticators) = + setup_keys_and_authenticators_with_scheme(3, 2, auth_scheme)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + + // Configure a procedure override that is valid for the initial signer set (3-of-3), + // but invalid after updating to 2 signers. + let multisig_account = + create_multisig_account(2, &approvers, 10, vec![(BasicWallet::receive_asset_digest(), 3)])?; + + let mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) + .unwrap() + .build() + .unwrap(); + + let new_public_keys = &public_keys[0..2]; + let threshold = 2u64; + let num_of_approvers = 2u64; + + let mut config_and_pubkeys_vector = + vec![Felt::new(threshold), Felt::new(num_of_approvers), Felt::new(0), Felt::new(0)]; + + for public_key in new_public_keys.iter().rev() { + let key_word: Word = public_key.to_commitment().into(); + config_and_pubkeys_vector.extend_from_slice(key_word.as_elements()); + config_and_pubkeys_vector.extend_from_slice(&[ + Felt::new(auth_scheme as u64), + Felt::new(0), + Felt::new(0), + Felt::new(0), + ]); + } + + let multisig_config_hash = Hasher::hash_elements(&config_and_pubkeys_vector); + let mut advice_map = AdviceMap::default(); + advice_map.insert(multisig_config_hash, config_and_pubkeys_vector); + + let tx_script = CodeBuilder::default() + .with_dynamically_linked_library(multisig_library())? + .compile_tx_script("begin\n call.::miden::standards::components::auth::multisig::update_signers_and_threshold\nend")?; + + let advice_inputs = AdviceInputs { map: advice_map, ..Default::default() }; + let salt = Word::from([Felt::new(8); 4]); + + let result = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(tx_script) + .tx_script_args(multisig_config_hash) + .extend_advice_inputs(advice_inputs) + .auth_args(salt) + .build()? + .execute() + .await; + + assert_transaction_executor_error!(result, ERR_PROC_THRESHOLD_EXCEEDS_NUM_APPROVERS); + + Ok(()) +} + /// Tests that newly added approvers cannot sign transactions before the signer update is executed. /// /// This is a regression test to ensure that unauthorized parties cannot add their own public keys @@ -791,14 +950,26 @@ async fn test_multisig_update_signers_remove_owner() -> anyhow::Result<()> { /// 2. Prepare a signer update transaction with new approvers /// 3. Try to sign the transaction with the NEW approvers (should fail) /// 4. Verify that only the CURRENT approvers can sign the update transaction +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] -async fn test_multisig_new_approvers_cannot_sign_before_update() -> anyhow::Result<()> { +async fn test_multisig_new_approvers_cannot_sign_before_update( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { // SECTION 1: Create a multisig account with 2 original approvers // ================================================================================ - let (_secret_keys, public_keys, _authenticators) = setup_keys_and_authenticators(2, 2)?; + let (_secret_keys, auth_schemes, public_keys, _authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); - let multisig_account = create_multisig_account(2, &public_keys, 10, vec![])?; + let multisig_account = create_multisig_account(2, &approvers, 10, vec![])?; let mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) .unwrap() @@ -814,8 +985,8 @@ async fn test_multisig_new_approvers_cannot_sign_before_update() -> anyhow::Resu // Setup new signers (these should NOT be able to sign the update transaction) let mut advice_map = AdviceMap::default(); - let (_new_secret_keys, new_public_keys, new_authenticators) = - setup_keys_and_authenticators(4, 4)?; + let (_new_secret_keys, _new_auth_schemes, new_public_keys, new_authenticators) = + setup_keys_and_authenticators_with_scheme(4, 4, auth_scheme)?; let threshold = 3u64; let num_of_approvers = 4u64; @@ -833,6 +1004,13 @@ async fn test_multisig_new_approvers_cannot_sign_before_update() -> anyhow::Resu for public_key in new_public_keys.iter().rev() { let key_word: Word = public_key.to_commitment().into(); config_and_pubkeys_vector.extend_from_slice(key_word.as_elements()); + + config_and_pubkeys_vector.extend_from_slice(&[ + Felt::new(auth_scheme as u64), + Felt::new(0), + Felt::new(0), + Felt::new(0), + ]); } // Hash the vector to create config hash @@ -844,12 +1022,12 @@ async fn test_multisig_new_approvers_cannot_sign_before_update() -> anyhow::Resu // Create a transaction script that calls the update_signers procedure let tx_script_code = " begin - call.::falcon_512_rpo_multisig::update_signers_and_threshold + call.::miden::standards::components::auth::multisig::update_signers_and_threshold end "; let tx_script = CodeBuilder::default() - .with_dynamically_linked_library(falcon_512_rpo_multisig_library())? + .with_dynamically_linked_library(multisig_library())? .compile_tx_script(tx_script_code)?; let advice_inputs = AdviceInputs { @@ -920,17 +1098,29 @@ async fn test_multisig_new_approvers_cannot_sign_before_update() -> anyhow::Resu /// threshold of 1 for note consumption, can: /// 1. Consume a note when only one approver signs the transaction /// 2. Send a note only when both approvers sign the transaction (default threshold) +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] -async fn test_multisig_proc_threshold_overrides() -> anyhow::Result<()> { +async fn test_multisig_proc_threshold_overrides( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { // Setup keys and authenticators - let (_secret_keys, public_keys, authenticators) = setup_keys_and_authenticators(2, 2)?; + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; let proc_threshold_map = vec![(BasicWallet::receive_asset_digest(), 1)]; + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + // Create multisig account let multisig_starting_balance = 10u64; let mut multisig_account = - create_multisig_account(2, &public_keys, multisig_starting_balance, proc_threshold_map)?; + create_multisig_account(2, &approvers, multisig_starting_balance, proc_threshold_map)?; // SECTION 1: Test note consumption with 1 signature // ================================================================================ @@ -989,7 +1179,7 @@ async fn test_multisig_proc_threshold_overrides() -> anyhow::Result<()> { let salt2 = Word::from([Felt::new(2); 4]); // Create output note to send 5 units from the account - let output_note = create_p2id_note( + let output_note = P2idNote::create( multisig_account.id(), ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE.try_into().unwrap(), vec![FungibleAsset::mock(5)], @@ -1004,7 +1194,7 @@ async fn test_multisig_proc_threshold_overrides() -> anyhow::Result<()> { // Execute transaction without signatures to get tx summary let tx_context_init = mock_chain .build_tx_context(multisig_account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .tx_script(send_note_transaction_script.clone()) .auth_args(salt2) .build()?; @@ -1024,7 +1214,7 @@ async fn test_multisig_proc_threshold_overrides() -> anyhow::Result<()> { // Try to execute with only 1 signature - should FAIL let tx_context_one_sig = mock_chain .build_tx_context(multisig_account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) .add_signature(public_keys[0].to_commitment(), msg2, sig_1) .tx_script(send_note_transaction_script.clone()) .auth_args(salt2) @@ -1051,7 +1241,7 @@ async fn test_multisig_proc_threshold_overrides() -> anyhow::Result<()> { // Execute with 2 signatures - should SUCCEED let result = mock_chain .build_tx_context(multisig_account.id(), &[], &[])? - .extend_expected_output_notes(vec![OutputNote::Full(output_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) .add_signature(public_keys[0].to_commitment(), msg2, sig_1) .add_signature(public_keys[1].to_commitment(), msg2, sig_2) .auth_args(salt2) @@ -1071,3 +1261,251 @@ async fn test_multisig_proc_threshold_overrides() -> anyhow::Result<()> { Ok(()) } + +/// Tests setting a per-procedure threshold override and clearing it via `proc_threshold == 0`. +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] +#[tokio::test] +async fn test_multisig_set_procedure_threshold( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + + let mut multisig_account = create_multisig_account(2, &approvers, 10, vec![])?; + let mut mock_chain_builder = + MockChainBuilder::with_accounts([multisig_account.clone()]).unwrap(); + let one_sig_note = mock_chain_builder.add_p2id_note( + multisig_account.id(), + multisig_account.id(), + &[FungibleAsset::mock(1)], + NoteType::Public, + )?; + let clear_check_note = mock_chain_builder.add_p2id_note( + multisig_account.id(), + multisig_account.id(), + &[FungibleAsset::mock(1)], + NoteType::Public, + )?; + let mut mock_chain = mock_chain_builder.build().unwrap(); + let proc_root = BasicWallet::receive_asset_digest(); + + let set_script_code = format!( + r#" + begin + push.{proc_root} + push.1 + call.::miden::standards::components::auth::multisig::set_procedure_threshold + dropw + drop + end + "# + ); + let set_script = CodeBuilder::default() + .with_dynamically_linked_library(multisig_library())? + .compile_tx_script(set_script_code)?; + + // 1) Set override to 1 (requires default 2 signatures). + let set_salt = Word::from([Felt::new(50); 4]); + + let set_init = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(set_script.clone()) + .auth_args(set_salt) + .build()?; + let set_summary = match set_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => panic!("expected abort with tx effects: {error:?}"), + }; + let set_msg = set_summary.as_ref().to_commitment(); + let set_summary = SigningInputs::TransactionSummary(set_summary); + let set_sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &set_summary) + .await?; + let set_sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &set_summary) + .await?; + + let set_tx = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(set_script) + .add_signature(public_keys[0].to_commitment(), set_msg, set_sig_1) + .add_signature(public_keys[1].to_commitment(), set_msg, set_sig_2) + .auth_args(set_salt) + .build()? + .execute() + .await?; + + multisig_account.apply_delta(set_tx.account_delta())?; + mock_chain.add_pending_executed_transaction(&set_tx)?; + mock_chain.prove_next_block()?; + + // 2) Verify receive_asset can now execute with one signature. + let one_sig_salt = Word::from([Felt::new(51); 4]); + + let one_sig_init = mock_chain + .build_tx_context(multisig_account.id(), &[one_sig_note.id()], &[])? + .auth_args(one_sig_salt) + .build()?; + let one_sig_summary = match one_sig_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => panic!("expected abort with tx effects: {error:?}"), + }; + let one_sig_msg = one_sig_summary.as_ref().to_commitment(); + let one_sig_summary = SigningInputs::TransactionSummary(one_sig_summary); + let one_sig = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &one_sig_summary) + .await?; + + let one_sig_tx = mock_chain + .build_tx_context(multisig_account.id(), &[one_sig_note.id()], &[])? + .add_signature(public_keys[0].to_commitment(), one_sig_msg, one_sig) + .auth_args(one_sig_salt) + .build()? + .execute() + .await + .expect("override=1 should allow receive_asset with one signature"); + multisig_account.apply_delta(one_sig_tx.account_delta())?; + mock_chain.add_pending_executed_transaction(&one_sig_tx)?; + mock_chain.prove_next_block()?; + + // 3) Clear override by setting threshold to zero. + let clear_script_code = format!( + r#" + begin + push.{proc_root} + push.0 + call.::miden::standards::components::auth::multisig::set_procedure_threshold + dropw + drop + end + "# + ); + let clear_script = CodeBuilder::default() + .with_dynamically_linked_library(multisig_library())? + .compile_tx_script(clear_script_code)?; + let clear_salt = Word::from([Felt::new(52); 4]); + + let clear_init = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(clear_script.clone()) + .auth_args(clear_salt) + .build()?; + let clear_summary = match clear_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => panic!("expected abort with tx effects: {error:?}"), + }; + let clear_msg = clear_summary.as_ref().to_commitment(); + let clear_summary = SigningInputs::TransactionSummary(clear_summary); + let clear_sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &clear_summary) + .await?; + let clear_sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &clear_summary) + .await?; + + let clear_tx = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(clear_script) + .add_signature(public_keys[0].to_commitment(), clear_msg, clear_sig_1) + .add_signature(public_keys[1].to_commitment(), clear_msg, clear_sig_2) + .auth_args(clear_salt) + .build()? + .execute() + .await?; + + multisig_account.apply_delta(clear_tx.account_delta())?; + mock_chain.add_pending_executed_transaction(&clear_tx)?; + mock_chain.prove_next_block()?; + + // 4) After clear, one signature should no longer be sufficient for receive_asset. + let clear_check_salt = Word::from([Felt::new(53); 4]); + + let clear_check_init = mock_chain + .build_tx_context(multisig_account.id(), &[clear_check_note.id()], &[])? + .auth_args(clear_check_salt) + .build()?; + let clear_check_summary = match clear_check_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => panic!("expected abort with tx effects: {error:?}"), + }; + let clear_check_msg = clear_check_summary.as_ref().to_commitment(); + let clear_check_summary = SigningInputs::TransactionSummary(clear_check_summary); + let clear_check_sig = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &clear_check_summary) + .await?; + + let clear_check_result = mock_chain + .build_tx_context(multisig_account.id(), &[clear_check_note.id()], &[])? + .add_signature(public_keys[0].to_commitment(), clear_check_msg, clear_check_sig) + .auth_args(clear_check_salt) + .build()? + .execute() + .await; + + assert!( + matches!(clear_check_result, Err(TransactionExecutorError::Unauthorized(_))), + "override cleared via threshold=0 should restore default threshold requirements" + ); + + Ok(()) +} + +/// Tests setting an override threshold above num_approvers is rejected. +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] +#[tokio::test] +async fn test_multisig_set_procedure_threshold_rejects_exceeding_approvers( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (_secret_keys, auth_schemes, public_keys, _authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; + + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + + let multisig_account = create_multisig_account(2, &approvers, 10, vec![])?; + let proc_root = BasicWallet::receive_asset_digest(); + + let script_code = format!( + r#" + begin + push.{proc_root} + push.3 + call.::miden::standards::components::auth::multisig::set_procedure_threshold + end + "# + ); + let script = CodeBuilder::default() + .with_dynamically_linked_library(multisig_library())? + .compile_tx_script(script_code)?; + + let mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) + .unwrap() + .build() + .unwrap(); + let salt = Word::from([Felt::new(54); 4]); + + let tx_context_init = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(script.clone()) + .auth_args(salt) + .build()?; + + let result = tx_context_init.execute().await; + + assert_transaction_executor_error!(result, ERR_PROC_THRESHOLD_EXCEEDS_NUM_APPROVERS); + + Ok(()) +} diff --git a/crates/miden-testing/tests/auth/multisig_psm.rs b/crates/miden-testing/tests/auth/multisig_psm.rs new file mode 100644 index 0000000000..31d090d460 --- /dev/null +++ b/crates/miden-testing/tests/auth/multisig_psm.rs @@ -0,0 +1,531 @@ +use miden_protocol::account::auth::{AuthScheme, AuthSecretKey, PublicKey}; +use miden_protocol::account::{ + Account, + AccountBuilder, + AccountId, + AccountStorageMode, + AccountType, +}; +use miden_protocol::asset::FungibleAsset; +use miden_protocol::note::{Note, NoteAssets, NoteMetadata, NoteRecipient, NoteStorage, NoteType}; +use miden_protocol::testing::account_id::{ + ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, + ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE, +}; +use miden_protocol::transaction::RawOutputNote; +use miden_protocol::{Felt, Word}; +use miden_standards::account::auth::{AuthMultisigPsm, AuthMultisigPsmConfig, PsmConfig}; +use miden_standards::account::components::multisig_psm_library; +use miden_standards::account::wallets::BasicWallet; +use miden_standards::code_builder::CodeBuilder; +use miden_standards::errors::standards::{ + ERR_AUTH_PROCEDURE_MUST_BE_CALLED_ALONE, + ERR_AUTH_TRANSACTION_MUST_NOT_INCLUDE_INPUT_OR_OUTPUT_NOTES, +}; +use miden_testing::{MockChainBuilder, assert_transaction_executor_error}; +use miden_tx::TransactionExecutorError; +use miden_tx::auth::{BasicAuthenticator, SigningInputs, TransactionAuthenticator}; +use rand::SeedableRng; +use rand_chacha::ChaCha20Rng; +use rstest::rstest; + +// ================================================================================================ +// HELPER FUNCTIONS +// ================================================================================================ + +type MultisigTestSetup = + (Vec, Vec, Vec, Vec); + +/// Sets up secret keys, public keys, and authenticators for multisig testing for the given scheme. +fn setup_keys_and_authenticators_with_scheme( + num_approvers: usize, + threshold: usize, + auth_scheme: AuthScheme, +) -> anyhow::Result { + let seed: [u8; 32] = rand::random(); + let mut rng = ChaCha20Rng::from_seed(seed); + + let mut secret_keys = Vec::new(); + let mut auth_schemes = Vec::new(); + let mut public_keys = Vec::new(); + let mut authenticators = Vec::new(); + + for _ in 0..num_approvers { + let sec_key = match auth_scheme { + AuthScheme::EcdsaK256Keccak => AuthSecretKey::new_ecdsa_k256_keccak_with_rng(&mut rng), + AuthScheme::Falcon512Poseidon2 => { + AuthSecretKey::new_falcon512_poseidon2_with_rng(&mut rng) + }, + _ => anyhow::bail!("unsupported auth scheme for this test: {auth_scheme:?}"), + }; + let pub_key = sec_key.public_key(); + + secret_keys.push(sec_key); + auth_schemes.push(auth_scheme); + public_keys.push(pub_key); + } + + // Create authenticators for required signers + for secret_key in secret_keys.iter().take(threshold) { + let authenticator = BasicAuthenticator::new(core::slice::from_ref(secret_key)); + authenticators.push(authenticator); + } + + Ok((secret_keys, auth_schemes, public_keys, authenticators)) +} + +/// Creates a multisig account configured with a private state manager signer. +fn create_multisig_account_with_psm( + threshold: u32, + approvers: &[(PublicKey, AuthScheme)], + psm: PsmConfig, + asset_amount: u64, + proc_threshold_map: Vec<(Word, u32)>, +) -> anyhow::Result { + let approvers = approvers + .iter() + .map(|(pub_key, auth_scheme)| (pub_key.to_commitment(), *auth_scheme)) + .collect(); + + let config = AuthMultisigPsmConfig::new(approvers, threshold, psm)? + .with_proc_thresholds(proc_threshold_map)?; + + let multisig_account = AccountBuilder::new([0; 32]) + .with_auth_component(AuthMultisigPsm::new(config)?) + .with_component(BasicWallet) + .account_type(AccountType::RegularAccountUpdatableCode) + .storage_mode(AccountStorageMode::Public) + .with_assets(vec![FungibleAsset::mock(asset_amount)]) + .build_existing()?; + + Ok(multisig_account) +} + +// ================================================================================================ +// TESTS +// ================================================================================================ + +/// Tests that multisig authentication requires an additional PSM signature when +/// configured. +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] +#[tokio::test] +async fn test_multisig_psm_signature_required( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + + let psm_secret_key = AuthSecretKey::new_ecdsa_k256_keccak(); + let psm_public_key = psm_secret_key.public_key(); + let psm_authenticator = BasicAuthenticator::new(core::slice::from_ref(&psm_secret_key)); + + let mut multisig_account = create_multisig_account_with_psm( + 2, + &approvers, + PsmConfig::new(psm_public_key.to_commitment(), AuthScheme::EcdsaK256Keccak), + 10, + vec![], + )?; + + let output_note_asset = FungibleAsset::mock(0); + let mut mock_chain_builder = + MockChainBuilder::with_accounts([multisig_account.clone()]).unwrap(); + + let output_note = mock_chain_builder.add_p2id_note( + multisig_account.id(), + ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_UPDATABLE_CODE.try_into().unwrap(), + &[output_note_asset], + NoteType::Public, + )?; + let input_note = mock_chain_builder.add_spawn_note([&output_note])?; + let mut mock_chain = mock_chain_builder.build().unwrap(); + + let salt = Word::from([Felt::new(777); 4]); + let tx_context_init = mock_chain + .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) + .auth_args(salt) + .build()?; + + let tx_summary = match tx_context_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => anyhow::bail!("expected abort with tx effects: {error}"), + }; + let msg = tx_summary.as_ref().to_commitment(); + let tx_summary_signing = SigningInputs::TransactionSummary(tx_summary); + + let sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &tx_summary_signing) + .await?; + let sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &tx_summary_signing) + .await?; + + // Missing PSM signature must fail. + let without_psm_result = mock_chain + .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) + .add_signature(public_keys[0].to_commitment(), msg, sig_1.clone()) + .add_signature(public_keys[1].to_commitment(), msg, sig_2.clone()) + .auth_args(salt) + .build()? + .execute() + .await; + assert!(matches!(without_psm_result, Err(TransactionExecutorError::Unauthorized(_)))); + + let psm_signature = psm_authenticator + .get_signature(psm_public_key.to_commitment(), &tx_summary_signing) + .await?; + + // With PSM signature the transaction should succeed. + let tx_context_execute = mock_chain + .build_tx_context(multisig_account.id(), &[input_note.id()], &[])? + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) + .add_signature(public_keys[0].to_commitment(), msg, sig_1) + .add_signature(public_keys[1].to_commitment(), msg, sig_2) + .add_signature(psm_public_key.to_commitment(), msg, psm_signature) + .auth_args(salt) + .build()? + .execute() + .await?; + + multisig_account.apply_delta(tx_context_execute.account_delta())?; + + mock_chain.add_pending_executed_transaction(&tx_context_execute)?; + mock_chain.prove_next_block()?; + + assert_eq!( + multisig_account + .vault() + .get_balance(AccountId::try_from(ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET)?)?, + 10 - output_note_asset.unwrap_fungible().amount() + ); + + Ok(()) +} + +/// Tests that the PSM public key can be updated and then enforced. +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] +#[tokio::test] +async fn test_multisig_update_psm_public_key( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + + let old_psm_secret_key = AuthSecretKey::new_ecdsa_k256_keccak(); + let old_psm_public_key = old_psm_secret_key.public_key(); + let old_psm_authenticator = BasicAuthenticator::new(core::slice::from_ref(&old_psm_secret_key)); + + let new_psm_secret_key = AuthSecretKey::new_falcon512_poseidon2(); + let new_psm_public_key = new_psm_secret_key.public_key(); + let new_psm_auth_scheme = new_psm_secret_key.auth_scheme(); + let new_psm_authenticator = BasicAuthenticator::new(core::slice::from_ref(&new_psm_secret_key)); + + let multisig_account = create_multisig_account_with_psm( + 2, + &approvers, + PsmConfig::new(old_psm_public_key.to_commitment(), AuthScheme::EcdsaK256Keccak), + 10, + vec![], + )?; + + let mut mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) + .unwrap() + .build() + .unwrap(); + + let new_psm_key_word: Word = new_psm_public_key.to_commitment().into(); + let new_psm_scheme_id = new_psm_auth_scheme as u32; + let update_psm_script = CodeBuilder::new() + .with_dynamically_linked_library(multisig_psm_library())? + .compile_tx_script(format!( + "begin\n push.{new_psm_key_word}\n push.{new_psm_scheme_id}\n call.::miden::standards::components::auth::multisig_psm::update_psm_public_key\n drop\n dropw\nend" + ))?; + + let update_salt = Word::from([Felt::new(991); 4]); + let tx_context_init = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(update_psm_script.clone()) + .auth_args(update_salt) + .build()?; + + let tx_summary = match tx_context_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => anyhow::bail!("expected abort with tx effects: {error}"), + }; + + let update_msg = tx_summary.as_ref().to_commitment(); + let tx_summary_signing = SigningInputs::TransactionSummary(tx_summary); + let sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &tx_summary_signing) + .await?; + let sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &tx_summary_signing) + .await?; + + // PSM key rotation intentionally skips PSM signature for this update tx. + let update_psm_tx = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(update_psm_script) + .add_signature(public_keys[0].to_commitment(), update_msg, sig_1) + .add_signature(public_keys[1].to_commitment(), update_msg, sig_2) + .auth_args(update_salt) + .build()? + .execute() + .await?; + + let mut updated_multisig_account = multisig_account.clone(); + updated_multisig_account.apply_delta(update_psm_tx.account_delta())?; + let updated_psm_public_key = updated_multisig_account + .storage() + .get_map_item(AuthMultisigPsm::psm_public_key_slot(), Word::empty())?; + assert_eq!(updated_psm_public_key, Word::from(new_psm_public_key.to_commitment())); + let updated_psm_scheme_id = updated_multisig_account + .storage() + .get_map_item(AuthMultisigPsm::psm_scheme_id_slot(), Word::from([0u32, 0, 0, 0]))?; + assert_eq!( + updated_psm_scheme_id, + Word::from([new_psm_auth_scheme as u32, 0u32, 0u32, 0u32]) + ); + + mock_chain.add_pending_executed_transaction(&update_psm_tx)?; + mock_chain.prove_next_block()?; + + // Build one tx summary after key update. Old PSM must fail and new PSM must pass on this same + // transaction. + let next_salt = Word::from([Felt::new(992); 4]); + let tx_context_init_next = mock_chain + .build_tx_context(updated_multisig_account.id(), &[], &[])? + .auth_args(next_salt) + .build()?; + + let tx_summary_next = match tx_context_init_next.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => anyhow::bail!("expected abort with tx effects: {error}"), + }; + let next_msg = tx_summary_next.as_ref().to_commitment(); + let tx_summary_next_signing = SigningInputs::TransactionSummary(tx_summary_next); + + let next_sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &tx_summary_next_signing) + .await?; + let next_sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &tx_summary_next_signing) + .await?; + let old_psm_sig_next = old_psm_authenticator + .get_signature(old_psm_public_key.to_commitment(), &tx_summary_next_signing) + .await?; + let new_psm_sig_next = new_psm_authenticator + .get_signature(new_psm_public_key.to_commitment(), &tx_summary_next_signing) + .await?; + + // Old PSM signature must fail after key update. + let with_old_psm_result = mock_chain + .build_tx_context(updated_multisig_account.id(), &[], &[])? + .add_signature(public_keys[0].to_commitment(), next_msg, next_sig_1.clone()) + .add_signature(public_keys[1].to_commitment(), next_msg, next_sig_2.clone()) + .add_signature(old_psm_public_key.to_commitment(), next_msg, old_psm_sig_next) + .auth_args(next_salt) + .build()? + .execute() + .await; + assert!(matches!(with_old_psm_result, Err(TransactionExecutorError::Unauthorized(_)))); + + // New PSM signature must pass. + mock_chain + .build_tx_context(updated_multisig_account.id(), &[], &[])? + .add_signature(public_keys[0].to_commitment(), next_msg, next_sig_1) + .add_signature(public_keys[1].to_commitment(), next_msg, next_sig_2) + .add_signature(new_psm_public_key.to_commitment(), next_msg, new_psm_sig_next) + .auth_args(next_salt) + .build()? + .execute() + .await?; + + Ok(()) +} + +/// Tests that `update_psm_public_key` must be the only account action in the transaction. +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] +#[tokio::test] +async fn test_multisig_update_psm_public_key_must_be_called_alone( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (_secret_keys, auth_schemes, public_keys, authenticators) = + setup_keys_and_authenticators_with_scheme(2, 2, auth_scheme)?; + let approvers = public_keys + .iter() + .zip(auth_schemes.iter()) + .map(|(pk, scheme)| (pk.clone(), *scheme)) + .collect::>(); + + let old_psm_secret_key = AuthSecretKey::new_ecdsa_k256_keccak(); + let old_psm_public_key = old_psm_secret_key.public_key(); + let old_psm_authenticator = BasicAuthenticator::new(core::slice::from_ref(&old_psm_secret_key)); + + let new_psm_secret_key = AuthSecretKey::new_falcon512_poseidon2(); + let new_psm_public_key = new_psm_secret_key.public_key(); + let new_psm_auth_scheme = new_psm_secret_key.auth_scheme(); + + let multisig_account = create_multisig_account_with_psm( + 2, + &approvers, + PsmConfig::new(old_psm_public_key.to_commitment(), AuthScheme::EcdsaK256Keccak), + 10, + vec![], + )?; + + let new_psm_key_word: Word = new_psm_public_key.to_commitment().into(); + let new_psm_scheme_id = new_psm_auth_scheme as u32; + let update_psm_script = CodeBuilder::new() + .with_dynamically_linked_library(multisig_psm_library())? + .compile_tx_script(format!( + "begin\n push.{new_psm_key_word}\n push.{new_psm_scheme_id}\n call.::miden::standards::components::auth::multisig_psm::update_psm_public_key\n drop\n dropw\nend" + ))?; + + let mut mock_chain_builder = + MockChainBuilder::with_accounts([multisig_account.clone()]).unwrap(); + let receive_asset_note = mock_chain_builder.add_p2id_note( + multisig_account.id(), + multisig_account.id(), + &[FungibleAsset::mock(1)], + NoteType::Public, + )?; + let mock_chain = mock_chain_builder.build().unwrap(); + + let salt = Word::from([Felt::new(993); 4]); + let tx_context_init = mock_chain + .build_tx_context(multisig_account.id(), &[receive_asset_note.id()], &[])? + .tx_script(update_psm_script.clone()) + .auth_args(salt) + .build()?; + + let tx_summary = match tx_context_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => anyhow::bail!("expected abort with tx effects: {error}"), + }; + + let msg = tx_summary.as_ref().to_commitment(); + let tx_summary_signing = SigningInputs::TransactionSummary(tx_summary); + let sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &tx_summary_signing) + .await?; + let sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &tx_summary_signing) + .await?; + + let without_psm_result = mock_chain + .build_tx_context(multisig_account.id(), &[receive_asset_note.id()], &[])? + .tx_script(update_psm_script.clone()) + .add_signature(public_keys[0].to_commitment(), msg, sig_1.clone()) + .add_signature(public_keys[1].to_commitment(), msg, sig_2.clone()) + .auth_args(salt) + .build()? + .execute() + .await; + assert_transaction_executor_error!(without_psm_result, ERR_AUTH_PROCEDURE_MUST_BE_CALLED_ALONE); + + let old_psm_signature = old_psm_authenticator + .get_signature(old_psm_public_key.to_commitment(), &tx_summary_signing) + .await?; + + let with_psm_result = mock_chain + .build_tx_context(multisig_account.id(), &[receive_asset_note.id()], &[])? + .tx_script(update_psm_script) + .add_signature(public_keys[0].to_commitment(), msg, sig_1) + .add_signature(public_keys[1].to_commitment(), msg, sig_2) + .add_signature(old_psm_public_key.to_commitment(), msg, old_psm_signature) + .auth_args(salt) + .build()? + .execute() + .await; + + assert_transaction_executor_error!(with_psm_result, ERR_AUTH_PROCEDURE_MUST_BE_CALLED_ALONE); + + // Also reject rotation transactions that touch notes even when no other account procedure is + // called. + let note_script = CodeBuilder::default().compile_note_script("begin nop end")?; + let note_serial_num = Word::from([Felt::new(1), Felt::new(2), Felt::new(3), Felt::new(4)]); + let note_recipient = + NoteRecipient::new(note_serial_num, note_script.clone(), NoteStorage::default()); + let output_note = Note::new( + NoteAssets::new(vec![])?, + NoteMetadata::new(multisig_account.id(), NoteType::Public), + note_recipient, + ); + + let new_psm_key_word: Word = new_psm_public_key.to_commitment().into(); + let new_psm_scheme_id = new_psm_auth_scheme as u32; + let update_psm_with_output_script = CodeBuilder::new() + .with_dynamically_linked_library(multisig_psm_library())? + .compile_tx_script(format!( + "use miden::protocol::output_note\nbegin\n push.{recipient}\n push.{note_type}\n push.{tag}\n exec.output_note::create\n swapdw\n dropw\n dropw\n push.{new_psm_key_word}\n push.{new_psm_scheme_id}\n call.::miden::standards::components::auth::multisig_psm::update_psm_public_key\n drop\n dropw\nend", + recipient = output_note.recipient().digest(), + note_type = NoteType::Public as u8, + tag = Felt::from(output_note.metadata().tag()), + ))?; + + let mock_chain = MockChainBuilder::with_accounts([multisig_account.clone()]) + .unwrap() + .build() + .unwrap(); + + let salt = Word::from([Felt::new(994); 4]); + let tx_context_init = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(update_psm_with_output_script.clone()) + .add_note_script(note_script.clone()) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note.clone())]) + .auth_args(salt) + .build()?; + + let tx_summary = match tx_context_init.execute().await.unwrap_err() { + TransactionExecutorError::Unauthorized(tx_effects) => tx_effects, + error => anyhow::bail!("expected abort with tx effects: {error}"), + }; + + let msg = tx_summary.as_ref().to_commitment(); + let tx_summary_signing = SigningInputs::TransactionSummary(tx_summary); + let sig_1 = authenticators[0] + .get_signature(public_keys[0].to_commitment(), &tx_summary_signing) + .await?; + let sig_2 = authenticators[1] + .get_signature(public_keys[1].to_commitment(), &tx_summary_signing) + .await?; + + let result = mock_chain + .build_tx_context(multisig_account.id(), &[], &[])? + .tx_script(update_psm_with_output_script) + .add_note_script(note_script) + .extend_expected_output_notes(vec![RawOutputNote::Full(output_note)]) + .add_signature(public_keys[0].to_commitment(), msg, sig_1) + .add_signature(public_keys[1].to_commitment(), msg, sig_2) + .auth_args(salt) + .build()? + .execute() + .await; + + assert_transaction_executor_error!( + result, + ERR_AUTH_TRANSACTION_MUST_NOT_INCLUDE_INPUT_OR_OUTPUT_NOTES + ); + + Ok(()) +} diff --git a/crates/miden-testing/tests/auth/rpo_falcon_acl.rs b/crates/miden-testing/tests/auth/rpo_falcon_acl.rs deleted file mode 100644 index d6305848fb..0000000000 --- a/crates/miden-testing/tests/auth/rpo_falcon_acl.rs +++ /dev/null @@ -1,273 +0,0 @@ -use core::slice; - -use anyhow::Context; -use assert_matches::assert_matches; -use miden_protocol::account::{ - Account, - AccountBuilder, - AccountComponent, - AccountStorage, - AccountStorageMode, - AccountType, -}; -use miden_protocol::note::Note; -use miden_protocol::testing::storage::MOCK_VALUE_SLOT0; -use miden_protocol::transaction::OutputNote; -use miden_protocol::{Felt, FieldElement, Word}; -use miden_standards::account::auth::AuthFalcon512RpoAcl; -use miden_standards::code_builder::CodeBuilder; -use miden_standards::testing::account_component::MockAccountComponent; -use miden_standards::testing::note::NoteBuilder; -use miden_testing::{Auth, MockChain}; -use miden_tx::TransactionExecutorError; - -// CONSTANTS -// ================================================================================================ - -const TX_SCRIPT_NO_TRIGGER: &str = r#" - use mock::account - begin - call.account::account_procedure_1 - drop - end - "#; - -// HELPER FUNCTIONS -// ================================================================================================ - -/// Sets up the basic components needed for Falcon RPO ACL tests. -/// Returns (account, mock_chain, note). -fn setup_rpo_falcon_acl_test( - allow_unauthorized_output_notes: bool, - allow_unauthorized_input_notes: bool, -) -> anyhow::Result<(Account, MockChain, Note)> { - let component: AccountComponent = - MockAccountComponent::with_slots(AccountStorage::mock_storage_slots()).into(); - - let get_item_proc_root = component - .get_procedure_root_by_path("mock::account::get_item") - .expect("get_item procedure should exist"); - let set_item_proc_root = component - .get_procedure_root_by_path("mock::account::set_item") - .expect("set_item procedure should exist"); - let auth_trigger_procedures = vec![get_item_proc_root, set_item_proc_root]; - - let (auth_component, _authenticator) = Auth::Acl { - auth_trigger_procedures: auth_trigger_procedures.clone(), - allow_unauthorized_output_notes, - allow_unauthorized_input_notes, - } - .build_component(); - - let account = AccountBuilder::new([0; 32]) - .with_auth_component(auth_component) - .with_component(component) - .account_type(AccountType::RegularAccountUpdatableCode) - .storage_mode(AccountStorageMode::Public) - .build_existing()?; - - let mut builder = MockChain::builder(); - builder.add_account(account.clone())?; - // Create a mock note to consume (needed to make the transaction non-empty) - let note = NoteBuilder::new(account.id(), &mut rand::rng()) - .build() - .expect("failed to create mock note"); - builder.add_output_note(OutputNote::Full(note.clone())); - let mock_chain = builder.build()?; - - Ok((account, mock_chain, note)) -} - -#[tokio::test] -async fn test_rpo_falcon_acl() -> anyhow::Result<()> { - let (account, mock_chain, note) = setup_rpo_falcon_acl_test(false, true)?; - - // We need to get the authenticator separately for this test - let component: AccountComponent = - MockAccountComponent::with_slots(AccountStorage::mock_storage_slots()).into(); - - let get_item_proc_root = component - .get_procedure_root_by_path("mock::account::get_item") - .expect("get_item procedure should exist"); - let set_item_proc_root = component - .get_procedure_root_by_path("mock::account::set_item") - .expect("set_item procedure should exist"); - let auth_trigger_procedures = vec![get_item_proc_root, set_item_proc_root]; - - let (_, authenticator) = Auth::Acl { - auth_trigger_procedures: auth_trigger_procedures.clone(), - allow_unauthorized_output_notes: false, - allow_unauthorized_input_notes: true, - } - .build_component(); - - let tx_script_with_trigger_1 = format!( - r#" - use mock::account - - const MOCK_VALUE_SLOT0 = word("{mock_value_slot0}") - - begin - push.MOCK_VALUE_SLOT0[0..2] - call.account::get_item - dropw - end - "#, - mock_value_slot0 = &*MOCK_VALUE_SLOT0, - ); - - let tx_script_with_trigger_2 = format!( - r#" - use mock::account - - const MOCK_VALUE_SLOT0 = word("{mock_value_slot0}") - - begin - push.1.2.3.4 - push.MOCK_VALUE_SLOT0[0..2] - call.account::set_item - dropw dropw - end - "#, - mock_value_slot0 = &*MOCK_VALUE_SLOT0, - ); - - let tx_script_trigger_1 = - CodeBuilder::with_mock_libraries().compile_tx_script(tx_script_with_trigger_1)?; - - let tx_script_trigger_2 = - CodeBuilder::with_mock_libraries().compile_tx_script(tx_script_with_trigger_2)?; - - let tx_script_no_trigger = - CodeBuilder::with_mock_libraries().compile_tx_script(TX_SCRIPT_NO_TRIGGER)?; - - // Test 1: Transaction WITH authenticator calling trigger procedure 1 (should succeed) - let tx_context_with_auth_1 = mock_chain - .build_tx_context(account.id(), &[], slice::from_ref(¬e))? - .authenticator(authenticator.clone()) - .tx_script(tx_script_trigger_1.clone()) - .build()?; - - tx_context_with_auth_1 - .execute() - .await - .context("trigger 1 with auth should succeed")?; - - // Test 2: Transaction WITH authenticator calling trigger procedure 2 (should succeed) - let tx_context_with_auth_2 = mock_chain - .build_tx_context(account.id(), &[], slice::from_ref(¬e))? - .authenticator(authenticator) - .tx_script(tx_script_trigger_2) - .build()?; - - tx_context_with_auth_2 - .execute() - .await - .context("trigger 2 with auth should succeed")?; - - // Test 3: Transaction WITHOUT authenticator calling trigger procedure (should fail) - let tx_context_no_auth = mock_chain - .build_tx_context(account.id(), &[], slice::from_ref(¬e))? - .authenticator(None) - .tx_script(tx_script_trigger_1) - .build()?; - - let executed_tx_no_auth = tx_context_no_auth.execute().await; - - assert_matches!(executed_tx_no_auth, Err(TransactionExecutorError::MissingAuthenticator)); - - // Test 4: Transaction WITHOUT authenticator calling non-trigger procedure (should succeed) - let tx_context_no_trigger = mock_chain - .build_tx_context(account.id(), &[], slice::from_ref(¬e))? - .authenticator(None) - .tx_script(tx_script_no_trigger) - .build()?; - - let executed = tx_context_no_trigger - .execute() - .await - .context("no trigger, no auth should succeed")?; - assert_eq!( - executed.account_delta().nonce_delta(), - Felt::ZERO, - "no auth but should still trigger nonce increment" - ); - - Ok(()) -} - -#[tokio::test] -async fn test_rpo_falcon_acl_with_allow_unauthorized_output_notes() -> anyhow::Result<()> { - let (account, mock_chain, note) = setup_rpo_falcon_acl_test(true, true)?; - - // Verify the storage layout includes both authorization flags - let config_slot = account - .storage() - .get_item(AuthFalcon512RpoAcl::config_slot()) - .expect("config storage slot access failed"); - // Config Slot should be [num_trigger_procs, allow_unauthorized_output_notes, - // allow_unauthorized_input_notes, 0] With 2 procedures, - // allow_unauthorized_output_notes=true, and allow_unauthorized_input_notes=true, this should be - // [2, 1, 1, 0] - assert_eq!(config_slot, Word::from([2u32, 1, 1, 0])); - - let tx_script_no_trigger = - CodeBuilder::with_mock_libraries().compile_tx_script(TX_SCRIPT_NO_TRIGGER)?; - - // Test: Transaction WITHOUT authenticator calling non-trigger procedure (should succeed) - // This tests that when allow_unauthorized_output_notes=true, transactions without - // authenticators can still succeed even if they create output notes - let tx_context_no_trigger = mock_chain - .build_tx_context(account.id(), &[], slice::from_ref(¬e))? - .authenticator(None) - .tx_script(tx_script_no_trigger) - .build()?; - - let executed = tx_context_no_trigger - .execute() - .await - .expect("no trigger, no auth should succeed"); - assert_eq!( - executed.account_delta().nonce_delta(), - Felt::ZERO, - "no auth but should still trigger nonce increment" - ); - - Ok(()) -} - -#[tokio::test] -async fn test_rpo_falcon_acl_with_disallow_unauthorized_input_notes() -> anyhow::Result<()> { - let (account, mock_chain, note) = setup_rpo_falcon_acl_test(true, false)?; - - // Verify the storage layout includes both flags - let config_slot = account - .storage() - .get_item(AuthFalcon512RpoAcl::config_slot()) - .expect("config storage slot access failed"); - // Config Slot should be [num_trigger_procs, allow_unauthorized_output_notes, - // allow_unauthorized_input_notes, 0] With 2 procedures, - // allow_unauthorized_output_notes=true, and allow_unauthorized_input_notes=false, this should - // be [2, 1, 0, 0] - assert_eq!(config_slot, Word::from([2u32, 1, 0, 0])); - - let tx_script_no_trigger = - CodeBuilder::with_mock_libraries().compile_tx_script(TX_SCRIPT_NO_TRIGGER)?; - - // Test: Transaction WITHOUT authenticator calling non-trigger procedure but consuming input - // notes This should FAIL because allow_unauthorized_input_notes=false and we're consuming - // input notes - let tx_context_no_auth = mock_chain - .build_tx_context(account.id(), &[], slice::from_ref(¬e))? - .authenticator(None) - .tx_script(tx_script_no_trigger) - .build()?; - - let executed_tx_no_auth = tx_context_no_auth.execute().await; - - // This should fail with MissingAuthenticator error because input notes are being consumed - // and allow_unauthorized_input_notes is false - assert_matches!(executed_tx_no_auth, Err(TransactionExecutorError::MissingAuthenticator)); - - Ok(()) -} diff --git a/crates/miden-testing/tests/auth/ecdsa_acl.rs b/crates/miden-testing/tests/auth/singlesig_acl.rs similarity index 85% rename from crates/miden-testing/tests/auth/ecdsa_acl.rs rename to crates/miden-testing/tests/auth/singlesig_acl.rs index 9bdaf80ebc..04d97cd3d2 100644 --- a/crates/miden-testing/tests/auth/ecdsa_acl.rs +++ b/crates/miden-testing/tests/auth/singlesig_acl.rs @@ -1,6 +1,7 @@ use core::slice; use assert_matches::assert_matches; +use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{ Account, AccountBuilder, @@ -11,14 +12,15 @@ use miden_protocol::account::{ }; use miden_protocol::note::Note; use miden_protocol::testing::storage::MOCK_VALUE_SLOT0; -use miden_protocol::transaction::OutputNote; -use miden_protocol::{Felt, FieldElement, Word}; -use miden_standards::account::auth::AuthEcdsaK256KeccakAcl; +use miden_protocol::transaction::RawOutputNote; +use miden_protocol::{Felt, Word}; +use miden_standards::account::auth::AuthSingleSigAcl; use miden_standards::code_builder::CodeBuilder; use miden_standards::testing::account_component::MockAccountComponent; use miden_standards::testing::note::NoteBuilder; use miden_testing::{Auth, MockChain}; use miden_tx::TransactionExecutorError; +use rstest::rstest; use crate::prove_and_verify_transaction; @@ -36,11 +38,12 @@ const TX_SCRIPT_NO_TRIGGER: &str = r#" // HELPER FUNCTIONS // ================================================================================================ -/// Sets up the basic components needed for ECDSA ACL tests. +/// Sets up the basic components needed for ACL tests. /// Returns (account, mock_chain, note). -fn setup_ecdsa_acl_test( +fn setup_acl_test( allow_unauthorized_output_notes: bool, allow_unauthorized_input_notes: bool, + auth_scheme: AuthScheme, ) -> anyhow::Result<(Account, MockChain, Note)> { let component: AccountComponent = MockAccountComponent::with_slots(AccountStorage::mock_storage_slots()).into(); @@ -53,10 +56,11 @@ fn setup_ecdsa_acl_test( .expect("set_item procedure should exist"); let auth_trigger_procedures = vec![get_item_proc_root, set_item_proc_root]; - let (auth_component, _authenticator) = Auth::EcdsaK256KeccakAcl { + let (auth_component, _authenticator) = Auth::Acl { auth_trigger_procedures: auth_trigger_procedures.clone(), allow_unauthorized_output_notes, allow_unauthorized_input_notes, + auth_scheme, } .build_component(); @@ -73,15 +77,18 @@ fn setup_ecdsa_acl_test( let note = NoteBuilder::new(account.id(), &mut rand::rng()) .build() .expect("failed to create mock note"); - builder.add_output_note(OutputNote::Full(note.clone())); + builder.add_output_note(RawOutputNote::Full(note.clone())); let mock_chain = builder.build()?; Ok((account, mock_chain, note)) } +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] -async fn test_ecdsa_acl() -> anyhow::Result<()> { - let (account, mock_chain, note) = setup_ecdsa_acl_test(false, true)?; +async fn test_acl(#[case] auth_scheme: AuthScheme) -> anyhow::Result<()> { + let (account, mock_chain, note) = setup_acl_test(false, true, auth_scheme)?; // We need to get the authenticator separately for this test let component: AccountComponent = @@ -95,10 +102,11 @@ async fn test_ecdsa_acl() -> anyhow::Result<()> { .expect("set_item procedure should exist"); let auth_trigger_procedures = vec![get_item_proc_root, set_item_proc_root]; - let (_, authenticator) = Auth::EcdsaK256KeccakAcl { + let (_, authenticator) = Auth::Acl { auth_trigger_procedures: auth_trigger_procedures.clone(), allow_unauthorized_output_notes: false, allow_unauthorized_input_notes: true, + auth_scheme, } .build_component(); @@ -153,7 +161,7 @@ async fn test_ecdsa_acl() -> anyhow::Result<()> { .execute() .await .expect("trigger 1 with auth should succeed"); - prove_and_verify_transaction(executed_tx_with_auth_1)?; + prove_and_verify_transaction(executed_tx_with_auth_1).await?; // Test 2: Transaction WITH authenticator calling trigger procedure 2 (should succeed) let tx_context_with_auth_2 = mock_chain @@ -198,14 +206,19 @@ async fn test_ecdsa_acl() -> anyhow::Result<()> { Ok(()) } +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] -async fn test_ecdsa_acl_with_allow_unauthorized_output_notes() -> anyhow::Result<()> { - let (account, mock_chain, note) = setup_ecdsa_acl_test(true, true)?; +async fn test_acl_with_allow_unauthorized_output_notes( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (account, mock_chain, note) = setup_acl_test(true, true, auth_scheme)?; // Verify the storage layout includes both authorization flags let config_slot = account .storage() - .get_item(AuthEcdsaK256KeccakAcl::config_slot()) + .get_item(AuthSingleSigAcl::config_slot()) .expect("config storage slot access failed"); // Config Slot should be [num_trigger_procs, allow_unauthorized_output_notes, // allow_unauthorized_input_notes, 0] With 2 procedures, @@ -238,14 +251,19 @@ async fn test_ecdsa_acl_with_allow_unauthorized_output_notes() -> anyhow::Result Ok(()) } +#[rstest] +#[case::ecdsa(AuthScheme::EcdsaK256Keccak)] +#[case::falcon(AuthScheme::Falcon512Poseidon2)] #[tokio::test] -async fn test_ecdsa_acl_with_disallow_unauthorized_input_notes() -> anyhow::Result<()> { - let (account, mock_chain, note) = setup_ecdsa_acl_test(true, false)?; +async fn test_acl_with_disallow_unauthorized_input_notes( + #[case] auth_scheme: AuthScheme, +) -> anyhow::Result<()> { + let (account, mock_chain, note) = setup_acl_test(true, false, auth_scheme)?; // Verify the storage layout includes both flags let config_slot = account .storage() - .get_item(AuthEcdsaK256KeccakAcl::config_slot()) + .get_item(AuthSingleSigAcl::config_slot()) .expect("config storage slot access failed"); // Config Slot should be [num_trigger_procs, allow_unauthorized_output_notes, // allow_unauthorized_input_notes, 0] With 2 procedures, diff --git a/crates/miden-testing/tests/lib.rs b/crates/miden-testing/tests/lib.rs index 04df2369c4..b27b9a00d0 100644 --- a/crates/miden-testing/tests/lib.rs +++ b/crates/miden-testing/tests/lib.rs @@ -5,14 +5,14 @@ mod auth; mod scripts; mod wallet; -use miden_processor::utils::Deserializable; use miden_protocol::Word; use miden_protocol::account::AccountId; use miden_protocol::asset::FungibleAsset; use miden_protocol::crypto::utils::Serializable; -use miden_protocol::note::{Note, NoteAssets, NoteInputs, NoteMetadata, NoteRecipient, NoteType}; +use miden_protocol::note::{Note, NoteAssets, NoteMetadata, NoteRecipient, NoteStorage, NoteType}; use miden_protocol::testing::account_id::ACCOUNT_ID_SENDER; use miden_protocol::transaction::{ExecutedTransaction, ProvenTransaction}; +use miden_protocol::utils::serde::Deserializable; use miden_standards::code_builder::CodeBuilder; use miden_tx::{ LocalTransactionProver, @@ -25,7 +25,7 @@ use miden_tx::{ // ================================================================================================ #[cfg(test)] -pub fn prove_and_verify_transaction( +pub async fn prove_and_verify_transaction( executed_transaction: ExecutedTransaction, ) -> Result<(), TransactionVerifierError> { use miden_protocol::transaction::TransactionHeader; @@ -36,7 +36,7 @@ pub fn prove_and_verify_transaction( let proof_options = ProvingOptions::default(); let prover = LocalTransactionProver::new(proof_options); - let proven_transaction = prover.prove(executed_transaction).unwrap(); + let proven_transaction = prover.prove(executed_transaction).await.unwrap(); let proven_tx_header = TransactionHeader::from(&proven_transaction); assert_eq!(proven_transaction.id(), executed_transaction_id); @@ -62,8 +62,8 @@ pub fn get_note_with_fungible_asset_and_script( let sender_id = AccountId::try_from(ACCOUNT_ID_SENDER).unwrap(); let vault = NoteAssets::new(vec![fungible_asset.into()]).unwrap(); - let metadata = NoteMetadata::new(sender_id, NoteType::Public, 1.into()); - let inputs = NoteInputs::new(vec![]).unwrap(); + let metadata = NoteMetadata::new(sender_id, NoteType::Public).with_tag(1.into()); + let inputs = NoteStorage::new(vec![]).unwrap(); let recipient = NoteRecipient::new(serial_num, note_script, inputs); Note::new(vault, metadata, recipient) diff --git a/crates/miden-testing/tests/scripts/faucet.rs b/crates/miden-testing/tests/scripts/faucet.rs index aaca772c55..dd80e4f73e 100644 --- a/crates/miden-testing/tests/scripts/faucet.rs +++ b/crates/miden-testing/tests/scripts/faucet.rs @@ -3,7 +3,8 @@ extern crate alloc; use alloc::sync::Arc; use core::slice; -use miden_processor::crypto::RpoRandomCoin; +use miden_processor::crypto::random::RpoRandomCoin; +use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{ Account, AccountId, @@ -18,30 +19,32 @@ use miden_protocol::note::{ NoteAssets, NoteAttachment, NoteId, - NoteInputs, NoteMetadata, NoteRecipient, + NoteStorage, NoteTag, NoteType, }; use miden_protocol::testing::account_id::ACCOUNT_ID_PRIVATE_SENDER; -use miden_protocol::transaction::{ExecutedTransaction, OutputNote}; +use miden_protocol::transaction::{ExecutedTransaction, RawOutputNote}; use miden_protocol::{Felt, Word}; +use miden_standards::account::access::Ownable2Step; use miden_standards::account::faucets::{ BasicFungibleFaucet, - FungibleFaucetExt, NetworkFungibleFaucet, + TokenMetadata, }; use miden_standards::code_builder::CodeBuilder; use miden_standards::errors::standards::{ - ERR_FUNGIBLE_ASSET_DISTRIBUTE_WOULD_CAUSE_MAX_SUPPLY_TO_BE_EXCEEDED, + ERR_FAUCET_BURN_AMOUNT_EXCEEDS_TOKEN_SUPPLY, + ERR_FUNGIBLE_ASSET_DISTRIBUTE_AMOUNT_EXCEEDS_MAX_SUPPLY, ERR_SENDER_NOT_OWNER, }; -use miden_standards::note::{MintNoteInputs, WellKnownNote, create_burn_note, create_mint_note}; +use miden_standards::note::{BurnNote, MintNote, MintNoteStorage, StandardNote}; use miden_standards::testing::note::NoteBuilder; +use miden_testing::utils::create_p2id_note_exact; use miden_testing::{Auth, MockChain, assert_transaction_executor_error}; -use crate::scripts::swap::create_p2id_note_exact; use crate::{get_note_with_fungible_asset_and_script, prove_and_verify_transaction}; // Shared test utilities for faucet tests @@ -108,7 +111,8 @@ pub fn verify_minted_output_note( faucet: &Account, params: &FaucetTestParams, ) -> anyhow::Result<()> { - let fungible_asset: Asset = FungibleAsset::new(faucet.id(), params.amount.into())?.into(); + let fungible_asset: Asset = + FungibleAsset::new(faucet.id(), params.amount.as_canonical_u64())?.into(); let output_note = executed_transaction.output_notes().get_note(0).clone(); let assets = NoteAssets::new(vec![fungible_asset])?; @@ -117,7 +121,7 @@ pub fn verify_minted_output_note( assert_eq!(output_note.id(), id); assert_eq!( output_note.metadata(), - &NoteMetadata::new(faucet.id(), params.note_type, params.tag) + &NoteMetadata::new(faucet.id(), params.note_type).with_tag(params.tag) ); Ok(()) @@ -130,7 +134,14 @@ pub fn verify_minted_output_note( #[tokio::test] async fn minting_fungible_asset_on_existing_faucet_succeeds() -> anyhow::Result<()> { let mut builder = MockChain::builder(); - let faucet = builder.add_existing_basic_faucet(Auth::BasicAuth, "TST", 200, None)?; + let faucet = builder.add_existing_basic_faucet( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + "TST", + 200, + None, + )?; let mut mock_chain = builder.build()?; let params = FaucetTestParams { @@ -147,12 +158,20 @@ async fn minting_fungible_asset_on_existing_faucet_succeeds() -> anyhow::Result< Ok(()) } +/// Tests that distribute fails when the minted amount would exceed the max supply. #[tokio::test] async fn faucet_contract_mint_fungible_asset_fails_exceeds_max_supply() -> anyhow::Result<()> { // CONSTRUCT AND EXECUTE TX (Failure) // -------------------------------------------------------------------------------------------- let mut builder = MockChain::builder(); - let faucet = builder.add_existing_basic_faucet(Auth::BasicAuth, "TST", 200, None)?; + let faucet = builder.add_existing_basic_faucet( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + "TST", + 200, + None, + )?; let mock_chain = builder.build()?; let recipient = Word::from([0, 1, 2, 3u32]); @@ -191,11 +210,7 @@ async fn faucet_contract_mint_fungible_asset_fails_exceeds_max_supply() -> anyho .execute() .await; - // Execute the transaction and get the witness - assert_transaction_executor_error!( - tx, - ERR_FUNGIBLE_ASSET_DISTRIBUTE_WOULD_CAUSE_MAX_SUPPLY_TO_BE_EXCEEDED - ); + assert_transaction_executor_error!(tx, ERR_FUNGIBLE_ASSET_DISTRIBUTE_AMOUNT_EXCEEDS_MAX_SUPPLY); Ok(()) } @@ -206,7 +221,13 @@ async fn faucet_contract_mint_fungible_asset_fails_exceeds_max_supply() -> anyho #[tokio::test] async fn minting_fungible_asset_on_new_faucet_succeeds() -> anyhow::Result<()> { let mut builder = MockChain::builder(); - let faucet = builder.create_new_faucet(Auth::BasicAuth, "TST", 200)?; + let faucet = builder.create_new_faucet( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + "TST", + 200, + )?; let mut mock_chain = builder.build()?; let params = FaucetTestParams { @@ -229,8 +250,18 @@ async fn minting_fungible_asset_on_new_faucet_succeeds() -> anyhow::Result<()> { /// Tests that burning a fungible asset on an existing faucet succeeds and proves the transaction. #[tokio::test] async fn prove_burning_fungible_asset_on_existing_faucet_succeeds() -> anyhow::Result<()> { + let max_supply = 200u32; + let token_supply = 100u32; + let mut builder = MockChain::builder(); - let faucet = builder.add_existing_basic_faucet(Auth::BasicAuth, "TST", 200, Some(100))?; + let faucet = builder.add_existing_basic_faucet( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + "TST", + max_supply.into(), + Some(token_supply.into()), + )?; let fungible_asset = FungibleAsset::new(faucet.id(), 100).unwrap(); @@ -242,7 +273,7 @@ async fn prove_burning_fungible_asset_on_existing_faucet_succeeds() -> anyhow::R # => [] call.::miden::standards::faucets::basic_fungible::burn - # => [ASSET] + # => [ASSET_VALUE] # truncate the stack dropw @@ -251,19 +282,18 @@ async fn prove_burning_fungible_asset_on_existing_faucet_succeeds() -> anyhow::R let note = get_note_with_fungible_asset_and_script(fungible_asset, burn_note_script_code); - builder.add_output_note(OutputNote::Full(note.clone())); + builder.add_output_note(RawOutputNote::Full(note.clone())); let mock_chain = builder.build()?; + let token_metadata = TokenMetadata::try_from(faucet.storage())?; + // Check that max_supply at the word's index 0 is 200. The remainder of the word is initialized // with the metadata of the faucet which we don't need to check. - assert_eq!( - faucet.storage().get_item(BasicFungibleFaucet::metadata_slot()).unwrap()[0], - Felt::new(200) - ); + assert_eq!(token_metadata.max_supply(), Felt::from(max_supply)); - // Check that the faucet reserved slot has been correctly initialized. + // Check that the faucet's token supply has been correctly initialized. // The already issued amount should be 100. - assert_eq!(faucet.get_token_issuance().unwrap(), Felt::new(100)); + assert_eq!(token_metadata.token_supply(), Felt::from(token_supply)); // CONSTRUCT AND EXECUTE TX (Success) // -------------------------------------------------------------------------------------------- @@ -275,13 +305,62 @@ async fn prove_burning_fungible_asset_on_existing_faucet_succeeds() -> anyhow::R .await?; // Prove, serialize/deserialize and verify the transaction - prove_and_verify_transaction(executed_transaction.clone())?; + prove_and_verify_transaction(executed_transaction.clone()).await?; assert_eq!(executed_transaction.account_delta().nonce_delta(), Felt::new(1)); assert_eq!(executed_transaction.input_notes().get_note(0).id(), note.id()); Ok(()) } +/// Tests that burning a fungible asset fails when the amount exceeds the token supply. +#[tokio::test] +async fn faucet_burn_fungible_asset_fails_amount_exceeds_token_supply() -> anyhow::Result<()> { + let max_supply = 200u32; + let token_supply = 50u32; + + let mut builder = MockChain::builder(); + let faucet = builder.add_existing_basic_faucet( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + "TST", + max_supply.into(), + Some(token_supply.into()), + )?; + + // Try to burn 100 tokens when only 50 have been issued + let burn_amount = 100u64; + let fungible_asset = FungibleAsset::new(faucet.id(), burn_amount).unwrap(); + + let burn_note_script_code = " + # burn the asset + begin + dropw + # => [] + + call.::miden::standards::faucets::basic_fungible::burn + # => [ASSET_VALUE] + + # truncate the stack + dropw + end + "; + + let note = get_note_with_fungible_asset_and_script(fungible_asset, burn_note_script_code); + + builder.add_output_note(RawOutputNote::Full(note.clone())); + let mock_chain = builder.build()?; + + let tx = mock_chain + .build_tx_context(faucet.id(), &[note.id()], &[])? + .build()? + .execute() + .await; + + assert_transaction_executor_error!(tx, ERR_FAUCET_BURN_AMOUNT_EXCEEDS_TOKEN_SUPPLY); + Ok(()) +} + // TEST PUBLIC NOTE CREATION DURING NOTE CONSUMPTION // ================================================================================================ @@ -293,7 +372,14 @@ async fn prove_burning_fungible_asset_on_existing_faucet_succeeds() -> anyhow::R #[tokio::test] async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Result<()> { let mut builder = MockChain::builder(); - let faucet = builder.add_existing_basic_faucet(Auth::BasicAuth, "TST", 200, None)?; + let faucet = builder.add_existing_basic_faucet( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + "TST", + 200, + None, + )?; // Parameters for the PUBLIC note that will be created by the faucet let recipient_account_id = AccountId::try_from(ACCOUNT_ID_PRIVATE_SENDER)?; @@ -311,9 +397,9 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul let target_account_suffix = recipient_account_id.suffix(); let target_account_prefix = recipient_account_id.prefix().as_felt(); - // Use a length that is not a multiple of 8 (double word size) to make sure note inputs padding + // Use a length that is not a multiple of 8 (double word size) to make sure note storage padding // is correctly handled - let note_inputs = NoteInputs::new(vec![ + let note_storage = NoteStorage::new(vec![ target_account_suffix, target_account_prefix, Felt::new(0), @@ -324,12 +410,12 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul ])?; let note_recipient = - NoteRecipient::new(serial_num, output_note_script.clone(), note_inputs.clone()); + NoteRecipient::new(serial_num, output_note_script.clone(), note_storage.clone()); let output_script_root = note_recipient.script().root(); - let asset = FungibleAsset::new(faucet.id(), amount.into())?; - let metadata = NoteMetadata::new(faucet.id(), note_type, tag); + let asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?; + let metadata = NoteMetadata::new(faucet.id(), note_type).with_tag(tag); let expected_note = Note::new(NoteAssets::new(vec![asset.into()])?, metadata, note_recipient); let trigger_note_script_code = format!( @@ -337,14 +423,14 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul use miden::protocol::note begin - # Build recipient hash from SERIAL_NUM, SCRIPT_ROOT, and INPUTS_COMMITMENT + # Build recipient hash from SERIAL_NUM, SCRIPT_ROOT, and STORAGE_COMMITMENT push.{script_root} # => [SCRIPT_ROOT] push.{serial_num} # => [SERIAL_NUM, SCRIPT_ROOT] - # Store note inputs in memory + # Store note storage in memory push.{input0} mem_store.0 push.{input1} mem_store.1 push.{input2} mem_store.2 @@ -354,7 +440,7 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul push.{input6} mem_store.6 push.7 push.0 - # => [inputs_ptr, num_inputs = 7, SERIAL_NUM, SCRIPT_ROOT] + # => [storage_ptr, num_storage_items = 7, SERIAL_NUM, SCRIPT_ROOT] exec.note::build_recipient # => [RECIPIENT] @@ -373,13 +459,13 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul end ", note_type = note_type as u8, - input0 = note_inputs.values()[0], - input1 = note_inputs.values()[1], - input2 = note_inputs.values()[2], - input3 = note_inputs.values()[3], - input4 = note_inputs.values()[4], - input5 = note_inputs.values()[5], - input6 = note_inputs.values()[6], + input0 = note_storage.items()[0], + input1 = note_storage.items()[1], + input2 = note_storage.items()[2], + input3 = note_storage.items()[3], + input4 = note_storage.items()[4], + input5 = note_storage.items()[5], + input6 = note_storage.items()[6], script_root = output_script_root, serial_num = serial_num, tag = u32::from(tag), @@ -395,7 +481,7 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul .code(trigger_note_script_code) .build()?; - builder.add_output_note(OutputNote::Full(trigger_note.clone())); + builder.add_output_note(RawOutputNote::Full(trigger_note.clone())); let mock_chain = builder.build()?; // Execute the transaction - this should fetch the output note script from the data store. @@ -415,7 +501,7 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul // Extract the full note from the OutputNote enum let full_note = match output_note { - OutputNote::Full(note) => note, + RawOutputNote::Full(note) => note, _ => panic!("Expected OutputNote::Full variant"), }; @@ -423,23 +509,23 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul assert_eq!(full_note.metadata().note_type(), NoteType::Public); // Verify the output note contains the minted fungible asset - let expected_asset = FungibleAsset::new(faucet.id(), amount.into())?; + let expected_asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?; let expected_asset_obj = Asset::from(expected_asset); assert!(full_note.assets().iter().any(|asset| asset == &expected_asset_obj)); // Verify the note was created by the faucet assert_eq!(full_note.metadata().sender(), faucet.id()); - // Verify the note inputs commitment matches the expected commitment + // Verify the note storage commitment matches the expected commitment assert_eq!( - full_note.recipient().inputs().commitment(), - note_inputs.commitment(), - "Output note inputs commitment should match expected inputs commitment" + full_note.recipient().storage().commitment(), + note_storage.commitment(), + "Output note storage commitment should match expected storage commitment" ); assert_eq!( - full_note.recipient().inputs().num_values(), - note_inputs.num_values(), - "Output note inputs length should match expected inputs length" + full_note.recipient().storage().num_items(), + note_storage.num_items(), + "Output note number of storage items should match expected number of storage items" ); // Verify the output note ID matches the expected note ID @@ -457,6 +543,9 @@ async fn test_public_note_creation_with_script_from_datastore() -> anyhow::Resul /// Tests minting on network faucet #[tokio::test] async fn network_faucet_mint() -> anyhow::Result<()> { + let max_supply = 1000u64; + let token_supply = 50u64; + let mut builder = MockChain::builder(); let faucet_owner_account_id = AccountId::dummy( @@ -466,35 +555,42 @@ async fn network_faucet_mint() -> anyhow::Result<()> { AccountStorageMode::Private, ); - let faucet = - builder.add_existing_network_faucet("NET", 1000, faucet_owner_account_id, Some(50))?; + let faucet = builder.add_existing_network_faucet( + "NET", + max_supply, + faucet_owner_account_id, + Some(token_supply), + )?; // Create a target account to consume the minted note let mut target_account = builder.add_existing_wallet(Auth::IncrNonce)?; - // The Network Fungible Faucet component is added as the second component after auth, so its - // storage slot offset will be 2. Check that max_supply at the word's index 0 is 200. + // Check the Network Fungible Faucet's max supply. + let actual_max_supply = TokenMetadata::try_from(faucet.storage())?.max_supply(); + assert_eq!(actual_max_supply.as_canonical_u64(), max_supply); + + // Check that the creator account ID is stored in the ownership slot. + // Word: [owner_suffix, owner_prefix, nominated_suffix, nominated_prefix] + let stored_owner_id = faucet.storage().get_item(Ownable2Step::slot_name()).unwrap(); assert_eq!( - faucet.storage().get_item(NetworkFungibleFaucet::metadata_slot()).unwrap()[0], - Felt::new(1000) + stored_owner_id[0], + Felt::new(faucet_owner_account_id.suffix().as_canonical_u64()) ); + assert_eq!(stored_owner_id[1], faucet_owner_account_id.prefix().as_felt()); + assert_eq!(stored_owner_id[2], Felt::new(0)); // no nominated owner + assert_eq!(stored_owner_id[3], Felt::new(0)); - // Check that the creator account ID is stored in slot 2 (second storage slot of the component) - // The owner_account_id is stored as Word [0, 0, suffix, prefix] - let stored_owner_id = - faucet.storage().get_item(NetworkFungibleFaucet::owner_config_slot()).unwrap(); - assert_eq!(stored_owner_id[3], faucet_owner_account_id.prefix().as_felt()); - assert_eq!(stored_owner_id[2], Felt::new(faucet_owner_account_id.suffix().as_int())); - - // Check that the faucet reserved slot has been correctly initialized. + // Check that the faucet's token supply has been correctly initialized. // The already issued amount should be 50. - assert_eq!(faucet.get_token_issuance().unwrap(), Felt::new(50)); + let initial_token_supply = TokenMetadata::try_from(faucet.storage())?.token_supply(); + assert_eq!(initial_token_supply.as_canonical_u64(), token_supply); // CREATE MINT NOTE USING STANDARD NOTE // -------------------------------------------------------------------------------------------- let amount = Felt::new(75); - let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.into()).unwrap().into(); + let mint_asset: Asset = + FungibleAsset::new(faucet.id(), amount.as_canonical_u64()).unwrap().into(); let serial_num = Word::default(); let output_note_tag = NoteTag::with_account_target(target_account.id()); @@ -509,19 +605,19 @@ async fn network_faucet_mint() -> anyhow::Result<()> { let recipient = p2id_mint_output_note.recipient().digest(); // Create the MINT note using the helper function - let mint_inputs = MintNoteInputs::new_private(recipient, amount, output_note_tag.into()); + let mint_storage = MintNoteStorage::new_private(recipient, amount, output_note_tag.into()); let mut rng = RpoRandomCoin::new([Felt::from(42u32); 4].into()); - let mint_note = create_mint_note( + let mint_note = MintNote::create( faucet.id(), faucet_owner_account_id, - mint_inputs, + mint_storage, NoteAttachment::default(), &mut rng, )?; // Add the MINT note to the mock chain - builder.add_output_note(OutputNote::Full(mint_note.clone())); + builder.add_output_note(RawOutputNote::Full(mint_note.clone())); let mut mock_chain = builder.build()?; // EXECUTE MINT NOTE AGAINST NETWORK FAUCET @@ -534,7 +630,7 @@ async fn network_faucet_mint() -> anyhow::Result<()> { let output_note = executed_transaction.output_notes().get_note(0); // Verify the output note contains the minted fungible asset - let expected_asset = FungibleAsset::new(faucet.id(), amount.into())?; + let expected_asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?; let assets = NoteAssets::new(vec![expected_asset.into()])?; let expected_note_id = NoteId::new(recipient, assets.commitment()); @@ -583,7 +679,7 @@ async fn test_network_faucet_owner_can_mint() -> anyhow::Result<()> { let mock_chain = builder.build()?; let amount = Felt::new(75); - let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.into())?.into(); + let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?.into(); let output_note_tag = NoteTag::with_account_target(target_account.id()); let p2id_note = create_p2id_note_exact( @@ -595,10 +691,10 @@ async fn test_network_faucet_owner_can_mint() -> anyhow::Result<()> { )?; let recipient = p2id_note.recipient().digest(); - let mint_inputs = MintNoteInputs::new_private(recipient, amount, output_note_tag.into()); + let mint_inputs = MintNoteStorage::new_private(recipient, amount, output_note_tag.into()); let mut rng = RpoRandomCoin::new([Felt::from(42u32); 4].into()); - let mint_note = create_mint_note( + let mint_note = MintNote::create( faucet.id(), owner_account_id, mint_inputs, @@ -638,7 +734,7 @@ async fn test_network_faucet_non_owner_cannot_mint() -> anyhow::Result<()> { let mock_chain = builder.build()?; let amount = Felt::new(75); - let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.into())?.into(); + let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?.into(); let output_note_tag = NoteTag::with_account_target(target_account.id()); let p2id_note = create_p2id_note_exact( @@ -650,11 +746,11 @@ async fn test_network_faucet_non_owner_cannot_mint() -> anyhow::Result<()> { )?; let recipient = p2id_note.recipient().digest(); - let mint_inputs = MintNoteInputs::new_private(recipient, amount, output_note_tag.into()); + let mint_inputs = MintNoteStorage::new_private(recipient, amount, output_note_tag.into()); // Create mint note from NON-OWNER let mut rng = RpoRandomCoin::new([Felt::from(42u32); 4].into()); - let mint_note = create_mint_note( + let mint_note = MintNote::create( faucet.id(), non_owner_account_id, mint_inputs, @@ -688,18 +784,20 @@ async fn test_network_faucet_owner_storage() -> anyhow::Result<()> { let _mock_chain = builder.build()?; // Verify owner is stored correctly - let stored_owner = faucet.storage().get_item(NetworkFungibleFaucet::owner_config_slot())?; + let stored_owner = faucet.storage().get_item(Ownable2Step::slot_name())?; - // Storage format: [0, 0, suffix, prefix] - assert_eq!(stored_owner[3], owner_account_id.prefix().as_felt()); - assert_eq!(stored_owner[2], Felt::new(owner_account_id.suffix().as_int())); - assert_eq!(stored_owner[1], Felt::new(0)); - assert_eq!(stored_owner[0], Felt::new(0)); + // Word: [owner_suffix, owner_prefix, nominated_suffix, nominated_prefix] + assert_eq!(stored_owner[0], Felt::new(owner_account_id.suffix().as_canonical_u64())); + assert_eq!(stored_owner[1], owner_account_id.prefix().as_felt()); + assert_eq!(stored_owner[2], Felt::new(0)); // no nominated owner + assert_eq!(stored_owner[3], Felt::new(0)); Ok(()) } -/// Tests that transfer_ownership updates the owner correctly. +/// Tests that two-step transfer_ownership updates the owner correctly. +/// Step 1: Owner nominates a new owner via transfer_ownership. +/// Step 2: Nominated owner accepts via accept_ownership. #[tokio::test] async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { let mut builder = MockChain::builder(); @@ -724,7 +822,7 @@ async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; let amount = Felt::new(75); - let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.into())?.into(); + let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?.into(); let output_note_tag = NoteTag::with_account_target(target_account.id()); let p2id_note = create_p2id_note_exact( @@ -737,10 +835,10 @@ async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { let recipient = p2id_note.recipient().digest(); // Sanity Check: Prove that the initial owner can mint assets - let mint_inputs = MintNoteInputs::new_private(recipient, amount, output_note_tag.into()); + let mint_inputs = MintNoteStorage::new_private(recipient, amount, output_note_tag.into()); let mut rng = RpoRandomCoin::new([Felt::from(42u32); 4].into()); - let mint_note = create_mint_note( + let mint_note = MintNote::create( faucet.id(), initial_owner_account_id, mint_inputs.clone(), @@ -748,26 +846,24 @@ async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { &mut rng, )?; - // Action: Create transfer_ownership note script + // Step 1: Create transfer_ownership note script to nominate new owner let transfer_note_script_code = format!( r#" - use miden::standards::faucets::network_fungible->network_faucet + use miden::standards::access::ownable2step begin repeat.14 push.0 end - push.{new_owner_suffix} push.{new_owner_prefix} - call.network_faucet::transfer_ownership + push.{new_owner_suffix} + call.ownable2step::transfer_ownership dropw dropw dropw dropw end "#, new_owner_prefix = new_owner_account_id.prefix().as_felt(), - new_owner_suffix = Felt::new(new_owner_account_id.suffix().as_int()), + new_owner_suffix = Felt::new(new_owner_account_id.suffix().as_canonical_u64()), ); let source_manager = Arc::new(DefaultSourceManager::default()); - let transfer_note_script = CodeBuilder::with_source_manager(source_manager.clone()) - .compile_note_script(transfer_note_script_code.clone())?; // Create the transfer note and add it to the builder so it exists on-chain let mut rng = RpoRandomCoin::new([Felt::from(200u32); 4].into()); @@ -779,7 +875,7 @@ async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { .build()?; // Add the transfer note to the builder before building the chain - builder.add_output_note(OutputNote::Full(transfer_note.clone())); + builder.add_output_note(RawOutputNote::Full(transfer_note.clone())); let mut mock_chain = builder.build()?; // Prove the block to make the transfer note exist on-chain @@ -790,10 +886,9 @@ async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { let executed_transaction = tx_context.execute().await?; assert_eq!(executed_transaction.output_notes().num_notes(), 1); - // Action: Execute transfer_ownership via note script + // Execute transfer_ownership via note script (nominates new owner) let tx_context = mock_chain .build_tx_context(faucet.id(), &[transfer_note.id()], &[])? - .add_note_script(transfer_note_script.clone()) .with_source_manager(source_manager.clone()) .build()?; let executed_transaction = tx_context.execute().await?; @@ -802,48 +897,44 @@ async fn test_network_faucet_transfer_ownership() -> anyhow::Result<()> { mock_chain.add_pending_executed_transaction(&executed_transaction)?; mock_chain.prove_next_block()?; - // Apply the delta to the faucet account to reflect the ownership change let mut updated_faucet = faucet.clone(); updated_faucet.apply_delta(executed_transaction.account_delta())?; - // Validation 1: Try to mint using the old owner - should fail - let mut rng = RpoRandomCoin::new([Felt::from(300u32); 4].into()); - let mint_note_old_owner = create_mint_note( - updated_faucet.id(), - initial_owner_account_id, - mint_inputs.clone(), - NoteAttachment::default(), - &mut rng, - )?; + // Step 2: Accept ownership as the nominated owner + let accept_note_script_code = r#" + use miden::standards::access::ownable2step - // Use the note as an unauthenticated note (full note object) - it will be created in this - // transaction - let tx_context = mock_chain - .build_tx_context(updated_faucet.id(), &[], &[mint_note_old_owner])? - .build()?; - let result = tx_context.execute().await; - - // The distribute function uses ERR_ONLY_OWNER, which is "note sender is not the owner" - let expected_error = ERR_SENDER_NOT_OWNER; - assert_transaction_executor_error!(result, expected_error); + begin + repeat.16 push.0 end + call.ownable2step::accept_ownership + dropw dropw dropw dropw + end + "#; - // Validation 2: Try to mint using the new owner - should succeed let mut rng = RpoRandomCoin::new([Felt::from(400u32); 4].into()); - let mint_note_new_owner = create_mint_note( - updated_faucet.id(), - new_owner_account_id, - mint_inputs, - NoteAttachment::default(), - &mut rng, - )?; + let accept_note = NoteBuilder::new(new_owner_account_id, &mut rng) + .note_type(NoteType::Private) + .tag(NoteTag::default().into()) + .serial_number(Word::from([55, 66, 77, 88u32])) + .code(accept_note_script_code) + .build()?; let tx_context = mock_chain - .build_tx_context(updated_faucet.id(), &[], &[mint_note_new_owner])? + .build_tx_context(updated_faucet.clone(), &[], slice::from_ref(&accept_note))? + .with_source_manager(source_manager.clone()) .build()?; let executed_transaction = tx_context.execute().await?; - // Verify that minting succeeded - assert_eq!(executed_transaction.output_notes().num_notes(), 1); + let mut final_faucet = updated_faucet.clone(); + final_faucet.apply_delta(executed_transaction.account_delta())?; + + // Verify that owner changed to new_owner and nominated was cleared + // Word: [owner_suffix, owner_prefix, nominated_suffix, nominated_prefix] + let stored_owner = final_faucet.storage().get_item(Ownable2Step::slot_name())?; + assert_eq!(stored_owner[0], Felt::new(new_owner_account_id.suffix().as_canonical_u64())); + assert_eq!(stored_owner[1], new_owner_account_id.prefix().as_felt()); + assert_eq!(stored_owner[2], Felt::new(0)); // nominated cleared + assert_eq!(stored_owner[3], Felt::new(0)); Ok(()) } @@ -880,23 +971,21 @@ async fn test_network_faucet_only_owner_can_transfer() -> anyhow::Result<()> { // Create transfer ownership note script let transfer_note_script_code = format!( r#" - use miden::standards::faucets::network_fungible->network_faucet + use miden::standards::access::ownable2step begin repeat.14 push.0 end - push.{new_owner_suffix} push.{new_owner_prefix} - call.network_faucet::transfer_ownership + push.{new_owner_suffix} + call.ownable2step::transfer_ownership dropw dropw dropw dropw end "#, new_owner_prefix = new_owner_account_id.prefix().as_felt(), - new_owner_suffix = Felt::new(new_owner_account_id.suffix().as_int()), + new_owner_suffix = Felt::new(new_owner_account_id.suffix().as_canonical_u64()), ); let source_manager = Arc::new(DefaultSourceManager::default()); - let transfer_note_script = CodeBuilder::with_source_manager(source_manager.clone()) - .compile_note_script(transfer_note_script_code.clone())?; // Create a note from NON-OWNER that tries to transfer ownership let mut rng = RpoRandomCoin::new([Felt::from(100u32); 4].into()); @@ -909,14 +998,11 @@ async fn test_network_faucet_only_owner_can_transfer() -> anyhow::Result<()> { let tx_context = mock_chain .build_tx_context(faucet.id(), &[], &[transfer_note])? - .add_note_script(transfer_note_script.clone()) .with_source_manager(source_manager.clone()) .build()?; let result = tx_context.execute().await; - // Verify that the transaction failed with ERR_ONLY_OWNER - let expected_error = ERR_SENDER_NOT_OWNER; - assert_transaction_executor_error!(result, expected_error); + assert_transaction_executor_error!(result, ERR_SENDER_NOT_OWNER); Ok(()) } @@ -943,46 +1029,40 @@ async fn test_network_faucet_renounce_ownership() -> anyhow::Result<()> { let faucet = builder.add_existing_network_faucet("NET", 1000, owner_account_id, Some(50))?; // Check stored value before renouncing - let stored_owner_before = - faucet.storage().get_item(NetworkFungibleFaucet::owner_config_slot())?; - assert_eq!(stored_owner_before[3], owner_account_id.prefix().as_felt()); - assert_eq!(stored_owner_before[2], Felt::new(owner_account_id.suffix().as_int())); + let stored_owner_before = faucet.storage().get_item(Ownable2Step::slot_name())?; + assert_eq!(stored_owner_before[0], Felt::new(owner_account_id.suffix().as_canonical_u64())); + assert_eq!(stored_owner_before[1], owner_account_id.prefix().as_felt()); // Create renounce_ownership note script let renounce_note_script_code = r#" - use miden::standards::faucets::network_fungible->network_faucet + use miden::standards::access::ownable2step begin repeat.16 push.0 end - call.network_faucet::renounce_ownership + call.ownable2step::renounce_ownership dropw dropw dropw dropw end "#; let source_manager = Arc::new(DefaultSourceManager::default()); - let renounce_note_script = CodeBuilder::with_source_manager(source_manager.clone()) - .compile_note_script(renounce_note_script_code)?; // Create transfer note script (will be used after renounce) let transfer_note_script_code = format!( r#" - use miden::standards::faucets::network_fungible->network_faucet + use miden::standards::access::ownable2step begin repeat.14 push.0 end - push.{new_owner_suffix} push.{new_owner_prefix} - call.network_faucet::transfer_ownership + push.{new_owner_suffix} + call.ownable2step::transfer_ownership dropw dropw dropw dropw end "#, new_owner_prefix = new_owner_account_id.prefix().as_felt(), - new_owner_suffix = Felt::new(new_owner_account_id.suffix().as_int()), + new_owner_suffix = Felt::new(new_owner_account_id.suffix().as_canonical_u64()), ); - let transfer_note_script = CodeBuilder::with_source_manager(source_manager.clone()) - .compile_note_script(transfer_note_script_code.clone())?; - let mut rng = RpoRandomCoin::new([Felt::from(200u32); 4].into()); let renounce_note = NoteBuilder::new(owner_account_id, &mut rng) .note_type(NoteType::Private) @@ -999,15 +1079,14 @@ async fn test_network_faucet_renounce_ownership() -> anyhow::Result<()> { .code(transfer_note_script_code.clone()) .build()?; - builder.add_output_note(OutputNote::Full(renounce_note.clone())); - builder.add_output_note(OutputNote::Full(transfer_note.clone())); + builder.add_output_note(RawOutputNote::Full(renounce_note.clone())); + builder.add_output_note(RawOutputNote::Full(transfer_note.clone())); let mut mock_chain = builder.build()?; mock_chain.prove_next_block()?; // Execute renounce_ownership let tx_context = mock_chain .build_tx_context(faucet.id(), &[renounce_note.id()], &[])? - .add_note_script(renounce_note_script.clone()) .with_source_manager(source_manager.clone()) .build()?; let executed_transaction = tx_context.execute().await?; @@ -1019,27 +1098,22 @@ async fn test_network_faucet_renounce_ownership() -> anyhow::Result<()> { updated_faucet.apply_delta(executed_transaction.account_delta())?; // Check stored value after renouncing - should be zero - let stored_owner_after = - updated_faucet.storage().get_item(NetworkFungibleFaucet::owner_config_slot())?; + let stored_owner_after = updated_faucet.storage().get_item(Ownable2Step::slot_name())?; assert_eq!(stored_owner_after[0], Felt::new(0)); assert_eq!(stored_owner_after[1], Felt::new(0)); assert_eq!(stored_owner_after[2], Felt::new(0)); assert_eq!(stored_owner_after[3], Felt::new(0)); // Try to transfer ownership - should fail because there's no owner - // The transfer note was already added to the builder, so we need to prove another block - // to make it available on-chain after the renounce transaction mock_chain.prove_next_block()?; let tx_context = mock_chain .build_tx_context(updated_faucet.id(), &[transfer_note.id()], &[])? - .add_note_script(transfer_note_script.clone()) .with_source_manager(source_manager.clone()) .build()?; let result = tx_context.execute().await; - let expected_error = ERR_SENDER_NOT_OWNER; - assert_transaction_executor_error!(result, expected_error); + assert_transaction_executor_error!(result, ERR_SENDER_NOT_OWNER); Ok(()) } @@ -1081,7 +1155,7 @@ async fn network_faucet_burn() -> anyhow::Result<()> { // CREATE BURN NOTE // -------------------------------------------------------------------------------------------- let mut rng = RpoRandomCoin::new([Felt::from(99u32); 4].into()); - let note = create_burn_note( + let note = BurnNote::create( faucet_owner_account_id, faucet.id(), fungible_asset.into(), @@ -1089,13 +1163,13 @@ async fn network_faucet_burn() -> anyhow::Result<()> { &mut rng, )?; - builder.add_output_note(OutputNote::Full(note.clone())); + builder.add_output_note(RawOutputNote::Full(note.clone())); let mut mock_chain = builder.build()?; mock_chain.prove_next_block()?; // Check the initial token issuance before burning - let initial_issuance = faucet.get_token_issuance().unwrap(); - assert_eq!(initial_issuance, Felt::new(100)); + let initial_token_supply = TokenMetadata::try_from(faucet.storage())?.token_supply(); + assert_eq!(initial_token_supply, Felt::new(100)); // EXECUTE BURN NOTE AGAINST NETWORK FAUCET // -------------------------------------------------------------------------------------------- @@ -1111,8 +1185,11 @@ async fn network_faucet_burn() -> anyhow::Result<()> { // Apply the delta to the faucet account and verify the token issuance decreased faucet.apply_delta(executed_transaction.account_delta())?; - let final_issuance = faucet.get_token_issuance().unwrap(); - assert_eq!(final_issuance, Felt::new(initial_issuance.as_int() - burn_amount)); + let final_token_supply = TokenMetadata::try_from(faucet.storage())?.token_supply(); + assert_eq!( + final_token_supply, + Felt::new(initial_token_supply.as_canonical_u64() - burn_amount) + ); Ok(()) } @@ -1141,7 +1218,8 @@ async fn test_mint_note_output_note_types(#[case] note_type: NoteType) -> anyhow let target_account = builder.add_existing_wallet(Auth::IncrNonce)?; let amount = Felt::new(75); - let mint_asset: Asset = FungibleAsset::new(faucet.id(), amount.into()).unwrap().into(); + let mint_asset: Asset = + FungibleAsset::new(faucet.id(), amount.as_canonical_u64()).unwrap().into(); let serial_num = Word::from([1, 2, 3, 4u32]); // Create the expected P2ID output note @@ -1155,45 +1233,36 @@ async fn test_mint_note_output_note_types(#[case] note_type: NoteType) -> anyhow .unwrap(); // Create MINT note based on note type - let mint_inputs = match note_type { + let mint_storage = match note_type { NoteType::Private => { let output_note_tag = NoteTag::with_account_target(target_account.id()); let recipient = p2id_mint_output_note.recipient().digest(); - MintNoteInputs::new_private(recipient, amount, output_note_tag.into()) + MintNoteStorage::new_private(recipient, amount, output_note_tag.into()) }, NoteType::Public => { let output_note_tag = NoteTag::with_account_target(target_account.id()); - let p2id_script = WellKnownNote::P2ID.script(); - let p2id_inputs = + let p2id_script = StandardNote::P2ID.script(); + let p2id_storage = vec![target_account.id().suffix(), target_account.id().prefix().as_felt()]; - let note_inputs = NoteInputs::new(p2id_inputs)?; - let recipient = NoteRecipient::new(serial_num, p2id_script, note_inputs); - MintNoteInputs::new_public(recipient, amount, output_note_tag.into())? + let note_storage = NoteStorage::new(p2id_storage)?; + let recipient = NoteRecipient::new(serial_num, p2id_script, note_storage); + MintNoteStorage::new_public(recipient, amount, output_note_tag.into())? }, - NoteType::Encrypted => unreachable!("Encrypted note type not used in this test"), }; let mut rng = RpoRandomCoin::new([Felt::from(42u32); 4].into()); - let mint_note = create_mint_note( + let mint_note = MintNote::create( faucet.id(), faucet_owner_account_id, - mint_inputs.clone(), + mint_storage.clone(), NoteAttachment::default(), &mut rng, )?; - builder.add_output_note(OutputNote::Full(mint_note.clone())); + builder.add_output_note(RawOutputNote::Full(mint_note.clone())); let mut mock_chain = builder.build()?; - let mut tx_context_builder = - mock_chain.build_tx_context(faucet.id(), &[mint_note.id()], &[])?; - - if note_type == NoteType::Public { - let p2id_script = WellKnownNote::P2ID.script(); - tx_context_builder = tx_context_builder.add_note_script(p2id_script); - } - - let tx_context = tx_context_builder.build()?; + let tx_context = mock_chain.build_tx_context(faucet.id(), &[mint_note.id()], &[])?.build()?; let executed_transaction = tx_context.execute().await?; assert_eq!(executed_transaction.output_notes().num_notes(), 1); @@ -1209,13 +1278,12 @@ async fn test_mint_note_output_note_types(#[case] note_type: NoteType) -> anyhow NoteType::Public => { // For public notes, we get OutputNote::Full and can compare key properties let created_note = match output_note { - OutputNote::Full(note) => note, - _ => panic!("Expected OutputNote::Full variant for public note"), + RawOutputNote::Full(note) => note, + _ => panic!("Expected OutputNote::Full variant"), }; assert_eq!(created_note, &p2id_mint_output_note); }, - NoteType::Encrypted => unreachable!("Encrypted note type not used in this test"), } mock_chain.add_pending_executed_transaction(&executed_transaction)?; @@ -1230,9 +1298,100 @@ async fn test_mint_note_output_note_types(#[case] note_type: NoteType) -> anyhow target_account_mut.apply_delta(consume_executed_transaction.account_delta())?; - let expected_asset = FungibleAsset::new(faucet.id(), amount.into())?; + let expected_asset = FungibleAsset::new(faucet.id(), amount.as_canonical_u64())?; let balance = target_account_mut.vault().get_balance(faucet.id())?; assert_eq!(balance, expected_asset.amount()); Ok(()) } + +/// Tests that calling distribute multiple times in a single transaction produces output notes +/// with the correct individual amounts, not the cumulative vault totals. +#[tokio::test] +async fn multiple_distributes_in_single_tx_produce_correct_amounts() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + let faucet = builder.add_existing_basic_faucet( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + "TST", + 300, + None, + )?; + let mock_chain = builder.build()?; + + let recipient_1 = Word::from([0, 1, 2, 3u32]); + let recipient_2 = Word::from([4, 5, 6, 7u32]); + let tag = NoteTag::default(); + let note_type = NoteType::Private; + let amount_1: u64 = 100; + let amount_2: u64 = 50; + + let tx_script_code = format!( + " + begin + # --- First distribute: mint {amount_1} tokens to recipient_1 --- + padw padw push.0 + + push.{recipient_1} + push.{note_type} + push.{tag} + push.{amount_1} + # => [amount_1, tag, note_type, RECIPIENT_1, pad(9)] + + call.::miden::standards::faucets::basic_fungible::distribute + # => [note_idx, pad(15)] + + # clean up the stack before the second call + dropw dropw dropw dropw + + # --- Second distribute: mint {amount_2} tokens to recipient_2 --- + padw padw push.0 + + push.{recipient_2} + push.{note_type} + push.{tag} + push.{amount_2} + # => [amount_2, tag, note_type, RECIPIENT_2, pad(9)] + + call.::miden::standards::faucets::basic_fungible::distribute + # => [note_idx, pad(15)] + + # truncate the stack + dropw dropw dropw dropw + end + ", + note_type = note_type as u8, + tag = u32::from(tag), + ); + + let source_manager = Arc::new(DefaultSourceManager::default()); + let tx_script = CodeBuilder::with_source_manager(source_manager.clone()) + .compile_tx_script(tx_script_code)?; + let tx_context = mock_chain + .build_tx_context(faucet.clone(), &[], &[])? + .tx_script(tx_script) + .with_source_manager(source_manager) + .build()?; + + let executed_transaction = tx_context.execute().await?; + + // Verify two output notes were created + assert_eq!(executed_transaction.output_notes().num_notes(), 2); + + // Verify first note has exactly amount_1 tokens. + let expected_asset_1: Asset = FungibleAsset::new(faucet.id(), amount_1)?.into(); + let output_note_1 = executed_transaction.output_notes().get_note(0); + let assets_1 = NoteAssets::new(vec![expected_asset_1])?; + let expected_id_1 = NoteId::new(recipient_1, assets_1.commitment()); + assert_eq!(output_note_1.id(), expected_id_1); + + // Verify second note has exactly amount_2 tokens. + let expected_asset_2: Asset = FungibleAsset::new(faucet.id(), amount_2)?.into(); + let output_note_2 = executed_transaction.output_notes().get_note(1); + let assets_2 = NoteAssets::new(vec![expected_asset_2])?; + let expected_id_2 = NoteId::new(recipient_2, assets_2.commitment()); + assert_eq!(output_note_2.id(), expected_id_2); + + Ok(()) +} diff --git a/crates/miden-testing/tests/scripts/fee.rs b/crates/miden-testing/tests/scripts/fee.rs index 3fd41d7b78..144f445d08 100644 --- a/crates/miden-testing/tests/scripts/fee.rs +++ b/crates/miden-testing/tests/scripts/fee.rs @@ -45,7 +45,7 @@ async fn prove_account_creation_with_fees() -> anyhow::Result<()> { // account commitment should not be the empty word assert_ne!(tx.account_delta().to_commitment(), Word::empty()); - prove_and_verify_transaction(tx)?; + prove_and_verify_transaction(tx).await?; Ok(()) } diff --git a/crates/miden-testing/tests/scripts/mod.rs b/crates/miden-testing/tests/scripts/mod.rs index 58bf4152ad..8d15402744 100644 --- a/crates/miden-testing/tests/scripts/mod.rs +++ b/crates/miden-testing/tests/scripts/mod.rs @@ -1,5 +1,6 @@ mod faucet; mod fee; +mod ownable2step; mod p2id; mod p2ide; mod send_note; diff --git a/crates/miden-testing/tests/scripts/ownable2step.rs b/crates/miden-testing/tests/scripts/ownable2step.rs new file mode 100644 index 0000000000..df34b7de0d --- /dev/null +++ b/crates/miden-testing/tests/scripts/ownable2step.rs @@ -0,0 +1,491 @@ +extern crate alloc; + +use alloc::sync::Arc; + +use miden_processor::crypto::random::RpoRandomCoin; +use miden_protocol::Felt; +use miden_protocol::account::component::AccountComponentMetadata; +use miden_protocol::account::{ + Account, + AccountBuilder, + AccountComponent, + AccountId, + AccountStorageMode, + AccountType, + StorageSlot, +}; +use miden_protocol::assembly::DefaultSourceManager; +use miden_protocol::assembly::debuginfo::SourceManagerSync; +use miden_protocol::note::Note; +use miden_protocol::testing::account_id::AccountIdBuilder; +use miden_protocol::transaction::RawOutputNote; +use miden_standards::account::access::Ownable2Step; +use miden_standards::code_builder::CodeBuilder; +use miden_standards::errors::standards::{ + ERR_NO_NOMINATED_OWNER, + ERR_SENDER_NOT_NOMINATED_OWNER, + ERR_SENDER_NOT_OWNER, +}; +use miden_standards::testing::note::NoteBuilder; +use miden_testing::{Auth, MockChain, assert_transaction_executor_error}; + +// HELPERS +// ================================================================================================ + +fn create_ownable_account( + owner: AccountId, + initial_storage: Vec, +) -> anyhow::Result { + let component_code = r#" + use miden::standards::access::ownable2step + pub use ownable2step::get_owner + pub use ownable2step::get_nominated_owner + pub use ownable2step::transfer_ownership + pub use ownable2step::accept_ownership + pub use ownable2step::renounce_ownership + "#; + let component_code_obj = + CodeBuilder::default().compile_component_code("test::ownable", component_code)?; + + let mut storage_slots = initial_storage; + storage_slots.push(Ownable2Step::new(owner).to_storage_slot()); + + let account = AccountBuilder::new([1; 32]) + .storage_mode(AccountStorageMode::Public) + .with_auth_component(Auth::IncrNonce) + .with_component({ + let metadata = AccountComponentMetadata::new("test::ownable", AccountType::all()); + AccountComponent::new(component_code_obj, storage_slots, metadata)? + }) + .build_existing()?; + Ok(account) +} + +fn get_owner_from_storage(account: &Account) -> anyhow::Result> { + let ownable = Ownable2Step::try_from_storage(account.storage())?; + Ok(ownable.owner()) +} + +fn get_nominated_owner_from_storage(account: &Account) -> anyhow::Result> { + let ownable = Ownable2Step::try_from_storage(account.storage())?; + Ok(ownable.nominated_owner()) +} + +fn create_transfer_note( + sender: AccountId, + new_owner: AccountId, + rng: &mut RpoRandomCoin, + source_manager: Arc, +) -> anyhow::Result { + let script = format!( + r#" + use miden::standards::access::ownable2step->test_account + begin + repeat.14 push.0 end + push.{new_owner_prefix} + push.{new_owner_suffix} + call.test_account::transfer_ownership + dropw dropw dropw dropw + end + "#, + new_owner_prefix = new_owner.prefix().as_felt(), + new_owner_suffix = Felt::new(new_owner.suffix().as_canonical_u64()), + ); + + let note = NoteBuilder::new(sender, rng) + .source_manager(source_manager) + .code(script) + .build()?; + + Ok(note) +} + +fn create_accept_note( + sender: AccountId, + rng: &mut RpoRandomCoin, + source_manager: Arc, +) -> anyhow::Result { + let script = r#" + use miden::standards::access::ownable2step->test_account + begin + repeat.16 push.0 end + call.test_account::accept_ownership + dropw dropw dropw dropw + end + "#; + + let note = NoteBuilder::new(sender, rng) + .source_manager(source_manager) + .code(script) + .build()?; + + Ok(note) +} + +fn create_renounce_note( + sender: AccountId, + rng: &mut RpoRandomCoin, + source_manager: Arc, +) -> anyhow::Result { + let script = r#" + use miden::standards::access::ownable2step->test_account + begin + repeat.16 push.0 end + call.test_account::renounce_ownership + dropw dropw dropw dropw + end + "#; + + let note = NoteBuilder::new(sender, rng) + .source_manager(source_manager) + .code(script) + .build()?; + + Ok(note) +} + +// TESTS +// ================================================================================================ + +#[tokio::test] +async fn test_transfer_ownership_only_owner() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + let non_owner = AccountIdBuilder::new().build_with_seed([2; 32]); + let new_owner = AccountIdBuilder::new().build_with_seed([3; 32]); + + let account = create_ownable_account(owner, vec![])?; + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RpoRandomCoin::new([Felt::from(100u32); 4].into()); + let note = create_transfer_note(non_owner, new_owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[note.id()], &[])? + .with_source_manager(source_manager) + .build()?; + let result = tx.execute().await; + + assert_transaction_executor_error!(result, ERR_SENDER_NOT_OWNER); + Ok(()) +} + +#[tokio::test] +async fn test_complete_ownership_transfer() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + let new_owner = AccountIdBuilder::new().build_with_seed([2; 32]); + + let account = create_ownable_account(owner, vec![])?; + + // Step 1: transfer ownership + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RpoRandomCoin::new([Felt::from(100u32); 4].into()); + let transfer_note = + create_transfer_note(owner, new_owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(transfer_note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[transfer_note.id()], &[])? + .with_source_manager(Arc::clone(&source_manager)) + .build()?; + let executed = tx.execute().await?; + + let mut updated = account.clone(); + updated.apply_delta(executed.account_delta())?; + + // Verify intermediate state: owner unchanged, nominated set + assert_eq!(get_owner_from_storage(&updated)?, Some(owner)); + assert_eq!(get_nominated_owner_from_storage(&updated)?, Some(new_owner)); + + // Commit step 1 to the chain + mock_chain.add_pending_executed_transaction(&executed)?; + mock_chain.prove_next_block()?; + + // Step 2: accept ownership + let mut rng2 = RpoRandomCoin::new([Felt::from(200u32); 4].into()); + let accept_note = create_accept_note(new_owner, &mut rng2, Arc::clone(&source_manager))?; + + let tx2 = mock_chain + .build_tx_context(updated.clone(), &[], std::slice::from_ref(&accept_note))? + .with_source_manager(source_manager) + .build()?; + let executed2 = tx2.execute().await?; + + let mut final_account = updated.clone(); + final_account.apply_delta(executed2.account_delta())?; + + assert_eq!(get_owner_from_storage(&final_account)?, Some(new_owner)); + assert_eq!(get_nominated_owner_from_storage(&final_account)?, None); + Ok(()) +} + +#[tokio::test] +async fn test_accept_ownership_only_nominated_owner() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + let new_owner = AccountIdBuilder::new().build_with_seed([2; 32]); + let wrong = AccountIdBuilder::new().build_with_seed([3; 32]); + + let account = create_ownable_account(owner, vec![])?; + + // Step 1: transfer + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RpoRandomCoin::new([Felt::from(100u32); 4].into()); + let transfer_note = + create_transfer_note(owner, new_owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(transfer_note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[transfer_note.id()], &[])? + .with_source_manager(Arc::clone(&source_manager)) + .build()?; + let executed = tx.execute().await?; + + let mut updated = account.clone(); + updated.apply_delta(executed.account_delta())?; + + // Commit step 1 to the chain + mock_chain.add_pending_executed_transaction(&executed)?; + mock_chain.prove_next_block()?; + + // Step 2: wrong account tries accept + let mut rng2 = RpoRandomCoin::new([Felt::from(200u32); 4].into()); + let accept_note = create_accept_note(wrong, &mut rng2, Arc::clone(&source_manager))?; + + let tx2 = mock_chain + .build_tx_context(updated.clone(), &[], std::slice::from_ref(&accept_note))? + .with_source_manager(source_manager) + .build()?; + let result = tx2.execute().await; + + assert_transaction_executor_error!(result, ERR_SENDER_NOT_NOMINATED_OWNER); + Ok(()) +} + +#[tokio::test] +async fn test_accept_ownership_no_nominated() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + + let account = create_ownable_account(owner, vec![])?; + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RpoRandomCoin::new([Felt::from(200u32); 4].into()); + let accept_note = create_accept_note(owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(accept_note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[accept_note.id()], &[])? + .with_source_manager(source_manager) + .build()?; + let result = tx.execute().await; + + assert_transaction_executor_error!(result, ERR_NO_NOMINATED_OWNER); + Ok(()) +} + +#[tokio::test] +async fn test_cancel_transfer() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + let new_owner = AccountIdBuilder::new().build_with_seed([2; 32]); + + let account = create_ownable_account(owner, vec![])?; + + // Step 1: transfer + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RpoRandomCoin::new([Felt::from(100u32); 4].into()); + let transfer_note = + create_transfer_note(owner, new_owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(transfer_note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[transfer_note.id()], &[])? + .with_source_manager(Arc::clone(&source_manager)) + .build()?; + let executed = tx.execute().await?; + + let mut updated = account.clone(); + updated.apply_delta(executed.account_delta())?; + + // Commit step 1 to the chain + mock_chain.add_pending_executed_transaction(&executed)?; + mock_chain.prove_next_block()?; + + // Step 2: cancel by transferring to self (owner) + let mut rng2 = RpoRandomCoin::new([Felt::from(200u32); 4].into()); + let cancel_note = create_transfer_note(owner, owner, &mut rng2, Arc::clone(&source_manager))?; + + let tx2 = mock_chain + .build_tx_context(updated.clone(), &[], std::slice::from_ref(&cancel_note))? + .with_source_manager(source_manager) + .build()?; + let executed2 = tx2.execute().await?; + + let mut final_account = updated.clone(); + final_account.apply_delta(executed2.account_delta())?; + + assert_eq!(get_nominated_owner_from_storage(&final_account)?, None); + assert_eq!(get_owner_from_storage(&final_account)?, Some(owner)); + Ok(()) +} + +/// Tests that an owner can transfer to themselves when no nominated transfer exists. +/// This is a no-op but should succeed without errors. +#[tokio::test] +async fn test_transfer_to_self_no_nominated() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + + let account = create_ownable_account(owner, vec![])?; + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RpoRandomCoin::new([Felt::from(100u32); 4].into()); + let note = create_transfer_note(owner, owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[note.id()], &[])? + .with_source_manager(source_manager) + .build()?; + let executed = tx.execute().await?; + + let mut updated = account.clone(); + updated.apply_delta(executed.account_delta())?; + + assert_eq!(get_owner_from_storage(&updated)?, Some(owner)); + assert_eq!(get_nominated_owner_from_storage(&updated)?, None); + Ok(()) +} + +#[tokio::test] +async fn test_renounce_ownership() -> anyhow::Result<()> { + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + let new_owner = AccountIdBuilder::new().build_with_seed([2; 32]); + + let account = create_ownable_account(owner, vec![])?; + + // Step 1: transfer (to have nominated) + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RpoRandomCoin::new([Felt::from(100u32); 4].into()); + let transfer_note = + create_transfer_note(owner, new_owner, &mut rng, Arc::clone(&source_manager))?; + + builder.add_output_note(RawOutputNote::Full(transfer_note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[transfer_note.id()], &[])? + .with_source_manager(Arc::clone(&source_manager)) + .build()?; + let executed = tx.execute().await?; + + let mut updated = account.clone(); + updated.apply_delta(executed.account_delta())?; + + // Commit step 1 to the chain + mock_chain.add_pending_executed_transaction(&executed)?; + mock_chain.prove_next_block()?; + + // Step 2: renounce + let mut rng2 = RpoRandomCoin::new([Felt::from(200u32); 4].into()); + let renounce_note = create_renounce_note(owner, &mut rng2, Arc::clone(&source_manager))?; + + let tx2 = mock_chain + .build_tx_context(updated.clone(), &[], std::slice::from_ref(&renounce_note))? + .with_source_manager(source_manager) + .build()?; + let executed2 = tx2.execute().await?; + + let mut final_account = updated.clone(); + final_account.apply_delta(executed2.account_delta())?; + + assert_eq!(get_owner_from_storage(&final_account)?, None); + assert_eq!(get_nominated_owner_from_storage(&final_account)?, None); + Ok(()) +} + +/// Tests that transfer_ownership fails when the new owner account ID is invalid. +/// An invalid account ID has its suffix's lower 8 bits set to a non-zero value. +#[tokio::test] +async fn test_transfer_ownership_fails_with_invalid_account_id() -> anyhow::Result<()> { + use miden_protocol::errors::protocol::ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO; + + let owner = AccountIdBuilder::new().build_with_seed([1; 32]); + + let account = create_ownable_account(owner, vec![])?; + let mut builder = MockChain::builder(); + builder.add_account(account.clone())?; + + let invalid_prefix = owner.prefix().as_felt(); + let invalid_suffix = Felt::new(1); + + let script = format!( + r#" + use miden::standards::access::ownable2step->test_account + begin + repeat.14 push.0 end + push.{invalid_suffix} + push.{invalid_prefix} + call.test_account::transfer_ownership + dropw dropw dropw dropw + end + "#, + ); + + let source_manager: Arc = Arc::new(DefaultSourceManager::default()); + let mut rng = RpoRandomCoin::new([Felt::from(100u32); 4].into()); + let note = NoteBuilder::new(owner, &mut rng) + .source_manager(Arc::clone(&source_manager)) + .code(script) + .build()?; + + builder.add_output_note(RawOutputNote::Full(note.clone())); + let mut mock_chain = builder.build()?; + mock_chain.prove_next_block()?; + + let tx = mock_chain + .build_tx_context(account.id(), &[note.id()], &[])? + .with_source_manager(source_manager) + .build()?; + let result = tx.execute().await; + + assert_transaction_executor_error!( + result, + ERR_ACCOUNT_ID_SUFFIX_LEAST_SIGNIFICANT_BYTE_MUST_BE_ZERO + ); + Ok(()) +} diff --git a/crates/miden-testing/tests/scripts/p2id.rs b/crates/miden-testing/tests/scripts/p2id.rs index 7d5e438a2a..94543e7a8e 100644 --- a/crates/miden-testing/tests/scripts/p2id.rs +++ b/crates/miden-testing/tests/scripts/p2id.rs @@ -1,7 +1,8 @@ use miden_protocol::account::Account; +use miden_protocol::account::auth::AuthScheme; use miden_protocol::asset::{Asset, AssetVault, FungibleAsset}; use miden_protocol::crypto::rand::RpoRandomCoin; -use miden_protocol::note::{NoteAttachment, NoteType}; +use miden_protocol::note::{NoteAttachment, NoteTag, NoteType}; use miden_protocol::testing::account_id::{ ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_2, @@ -9,11 +10,11 @@ use miden_protocol::testing::account_id::{ ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2, ACCOUNT_ID_SENDER, }; -use miden_protocol::transaction::OutputNote; +use miden_protocol::transaction::RawOutputNote; use miden_protocol::{Felt, Word}; use miden_standards::code_builder::CodeBuilder; use miden_standards::errors::standards::ERR_P2ID_TARGET_ACCT_MISMATCH; -use miden_standards::note::create_p2id_note; +use miden_standards::note::{P2idNote, P2idNoteStorage}; use miden_testing::{Auth, MockChain, assert_transaction_executor_error}; use crate::prove_and_verify_transaction; @@ -30,9 +31,15 @@ async fn p2id_script_multiple_assets() -> anyhow::Result<()> { let mut builder = MockChain::builder(); // Create accounts - let sender_account = builder.create_new_wallet(Auth::BasicAuth)?; - let target_account = builder.add_existing_wallet(Auth::BasicAuth)?; - let malicious_account = builder.add_existing_wallet(Auth::BasicAuth)?; + let sender_account = builder.create_new_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + let target_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + let malicious_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; // Create the note let note = builder.add_p2id_note( @@ -63,8 +70,8 @@ async fn p2id_script_multiple_assets() -> anyhow::Result<()> { ); assert_eq!( - executed_transaction.final_account().commitment(), - target_account_after.commitment() + executed_transaction.final_account().to_commitment(), + target_account_after.to_commitment() ); // CONSTRUCT AND EXECUTE TX (Failure) @@ -92,8 +99,12 @@ async fn prove_consume_note_with_new_account() -> anyhow::Result<()> { let mut builder = MockChain::builder(); // Create accounts - let sender_account = builder.add_existing_wallet(Auth::BasicAuth)?; - let target_account = builder.create_new_wallet(Auth::BasicAuth)?; + let sender_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + let target_account = builder.create_new_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; // Create the note let note = builder.add_p2id_note( @@ -125,10 +136,10 @@ async fn prove_consume_note_with_new_account() -> anyhow::Result<()> { ); assert_eq!( - executed_transaction.final_account().commitment(), - target_account_after.commitment() + executed_transaction.final_account().to_commitment(), + target_account_after.to_commitment() ); - prove_and_verify_transaction(executed_transaction)?; + prove_and_verify_transaction(executed_transaction).await?; Ok(()) } @@ -140,7 +151,9 @@ async fn prove_consume_multiple_notes() -> anyhow::Result<()> { let fungible_asset_2: Asset = FungibleAsset::mock(23); let mut builder = MockChain::builder(); - let mut account = builder.add_existing_wallet(Auth::BasicAuth)?; + let mut account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let note_1 = builder.add_p2id_note( ACCOUNT_ID_SENDER.try_into()?, account.id(), @@ -170,7 +183,7 @@ async fn prove_consume_multiple_notes() -> anyhow::Result<()> { panic!("Resulting asset should be fungible"); } - Ok(prove_and_verify_transaction(executed_transaction)?) + Ok(prove_and_verify_transaction(executed_transaction).await?) } /// Consumes two existing notes and creates two other notes in the same transaction @@ -178,8 +191,12 @@ async fn prove_consume_multiple_notes() -> anyhow::Result<()> { async fn test_create_consume_multiple_notes() -> anyhow::Result<()> { let mut builder = MockChain::builder(); - let mut account = - builder.add_existing_wallet_with_assets(Auth::BasicAuth, [FungibleAsset::mock(20)])?; + let mut account = builder.add_existing_wallet_with_assets( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + [FungibleAsset::mock(20)], + )?; let input_note_faucet_id = ACCOUNT_ID_PRIVATE_FUNGIBLE_FAUCET.try_into()?; let input_note_asset_1: Asset = FungibleAsset::new(input_note_faucet_id, 11)?.into(); @@ -202,19 +219,22 @@ async fn test_create_consume_multiple_notes() -> anyhow::Result<()> { let mock_chain = builder.build()?; - let output_note_1 = create_p2id_note( + let asset_1 = FungibleAsset::mock(10); + let asset_2 = FungibleAsset::mock(5); + + let output_note_1 = P2idNote::create( account.id(), ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE_2.try_into()?, - vec![FungibleAsset::mock(10)], + vec![asset_1], NoteType::Public, NoteAttachment::default(), &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), )?; - let output_note_2 = create_p2id_note( + let output_note_2 = P2idNote::create( account.id(), ACCOUNT_ID_REGULAR_PUBLIC_ACCOUNT_IMMUTABLE_CODE.try_into()?, - vec![FungibleAsset::mock(5)], + vec![asset_2], NoteType::Public, NoteAttachment::default(), &mut RpoRandomCoin::new(Word::from([4, 3, 2, 1u32])), @@ -229,7 +249,8 @@ async fn test_create_consume_multiple_notes() -> anyhow::Result<()> { push.{tag_1} exec.output_note::create - push.{asset_1} + push.{ASSET_VALUE_1} + push.{ASSET_KEY_1} call.::miden::standards::wallets::basic::move_asset_to_note dropw dropw dropw dropw @@ -238,7 +259,8 @@ async fn test_create_consume_multiple_notes() -> anyhow::Result<()> { push.{tag_2} exec.output_note::create - push.{asset_2} + push.{ASSET_VALUE_2} + push.{ASSET_KEY_2} call.::miden::standards::wallets::basic::move_asset_to_note dropw dropw dropw dropw end @@ -246,11 +268,13 @@ async fn test_create_consume_multiple_notes() -> anyhow::Result<()> { recipient_1 = output_note_1.recipient().digest(), note_type_1 = NoteType::Public as u8, tag_1 = Felt::from(output_note_1.metadata().tag()), - asset_1 = Word::from(FungibleAsset::mock(10)), + ASSET_KEY_1 = asset_1.to_key_word(), + ASSET_VALUE_1 = asset_1.to_value_word(), recipient_2 = output_note_2.recipient().digest(), note_type_2 = NoteType::Public as u8, tag_2 = Felt::from(output_note_2.metadata().tag()), - asset_2 = Word::from(FungibleAsset::mock(5)), + ASSET_KEY_2 = asset_2.to_key_word(), + ASSET_VALUE_2 = asset_2.to_value_word(), ); let tx_script = CodeBuilder::default().compile_tx_script(tx_script_src)?; @@ -258,8 +282,8 @@ async fn test_create_consume_multiple_notes() -> anyhow::Result<()> { let tx_context = mock_chain .build_tx_context(account.id(), &[input_note_1.id(), input_note_2.id()], &[])? .extend_expected_output_notes(vec![ - OutputNote::Full(output_note_1), - OutputNote::Full(output_note_2), + RawOutputNote::Full(output_note_1), + RawOutputNote::Full(output_note_2), ]) .tx_script(tx_script) .build()?; @@ -274,3 +298,103 @@ async fn test_create_consume_multiple_notes() -> anyhow::Result<()> { assert_eq!(account.vault().get_balance(FungibleAsset::mock_issuer())?, 5); Ok(()) } + +/// Tests the P2ID `new` MASM constructor procedure. +/// This test verifies that calling `p2id::new` from a transaction script creates an output note +/// with the same recipient as `P2idNote::build_recipient` would create. +#[tokio::test] +async fn test_p2id_new_constructor() -> anyhow::Result<()> { + let mut builder = MockChain::builder(); + + let sender_account = builder.add_existing_wallet_with_assets( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + [FungibleAsset::mock(100)], + )?; + let target_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + + let mock_chain = builder.build()?; + + // Create a serial number for the note + let serial_num = Word::from([1u32, 2u32, 3u32, 4u32]); + + // Build the expected recipient using the Rust implementation + let expected_recipient = P2idNoteStorage::new(target_account.id()).into_recipient(serial_num); + + // Create a note tag for the target account + let tag = NoteTag::with_account_target(target_account.id()); + + // Build a transaction script that uses p2id::new to create a note + let tx_script_src = format!( + r#" + use miden::standards::notes::p2id + + begin + # Push inputs for p2id::new + push.{serial_num} + push.{note_type} + push.{tag} + push.{target_prefix} + push.{target_suffix} + # => [target_id_suffix, target_id_prefix, tag, note_type, SERIAL_NUM] + + exec.p2id::new + # => [note_idx] + + # Add an asset to the created note + push.{ASSET_VALUE} + push.{ASSET_KEY} + call.::miden::standards::wallets::basic::move_asset_to_note + + # Clean up stack + dropw dropw dropw dropw + end + "#, + target_prefix = target_account.id().prefix().as_felt(), + target_suffix = target_account.id().suffix(), + tag = Felt::from(tag), + note_type = NoteType::Public as u8, + serial_num = serial_num, + ASSET_KEY = FungibleAsset::mock(50).to_key_word(), + ASSET_VALUE = FungibleAsset::mock(50).to_value_word(), + ); + + let tx_script = CodeBuilder::default().compile_tx_script(&tx_script_src)?; + + // Build expected output note + let expected_output_note = P2idNote::create( + sender_account.id(), + target_account.id(), + vec![FungibleAsset::mock(50)], + NoteType::Public, + NoteAttachment::default(), + &mut RpoRandomCoin::new(serial_num), + )?; + + let tx_context = mock_chain + .build_tx_context(sender_account.id(), &[], &[])? + .extend_expected_output_notes(vec![RawOutputNote::Full(expected_output_note)]) + .tx_script(tx_script) + .build()?; + + let executed_transaction = tx_context.execute().await?; + + // Verify that one note was created + assert_eq!(executed_transaction.output_notes().num_notes(), 1); + + // Get the created note's recipient and verify it matches + let output_note = executed_transaction.output_notes().get_note(0); + let created_recipient = output_note.recipient().expect("output note should have recipient"); + + // Verify the recipient matches what we expected + assert_eq!( + created_recipient.digest(), + expected_recipient.digest(), + "The recipient created by p2id::new should match P2idNote::build_recipient" + ); + + Ok(()) +} diff --git a/crates/miden-testing/tests/scripts/p2ide.rs b/crates/miden-testing/tests/scripts/p2ide.rs index a7ac7aa0a1..67d1ea41c0 100644 --- a/crates/miden-testing/tests/scripts/p2ide.rs +++ b/crates/miden-testing/tests/scripts/p2ide.rs @@ -3,6 +3,7 @@ use core::slice; use anyhow::Context; use miden_protocol::Felt; use miden_protocol::account::Account; +use miden_protocol::account::auth::AuthScheme; use miden_protocol::asset::{Asset, AssetVault, FungibleAsset}; use miden_protocol::block::BlockNumber; use miden_protocol::note::{Note, NoteType}; @@ -53,8 +54,8 @@ async fn p2ide_script_success_without_reclaim_or_timelock() -> anyhow::Result<() Felt::new(2), ); assert_eq!( - executed_transaction_2.final_account().commitment(), - target_account_after.commitment() + executed_transaction_2.final_account().to_commitment(), + target_account_after.to_commitment() ); Ok(()) @@ -91,8 +92,8 @@ async fn p2ide_script_success_timelock_unlock_before_reclaim_height() -> anyhow: Felt::new(2), ); assert_eq!( - executed_transaction_1.final_account().commitment(), - target_account_after.commitment() + executed_transaction_1.final_account().to_commitment(), + target_account_after.to_commitment() ); Ok(()) @@ -167,7 +168,7 @@ async fn p2ide_script_timelocked_reclaim_disabled() -> anyhow::Result<()> { Felt::new(2), ); - assert_eq!(final_tx.final_account().commitment(), target_after.commitment()); + assert_eq!(final_tx.final_account().to_commitment(), target_after.to_commitment()); Ok(()) } @@ -219,8 +220,8 @@ async fn p2ide_script_reclaim_fails_before_timelock_expiry() -> anyhow::Result<( ); assert_eq!( - executed_transaction_2.final_account().commitment(), - sender_account_after.commitment() + executed_transaction_2.final_account().to_commitment(), + sender_account_after.to_commitment() ); Ok(()) @@ -302,7 +303,7 @@ async fn p2ide_script_reclaimable_timelockable() -> anyhow::Result<()> { Felt::new(2), ); - assert_eq!(final_tx.final_account().commitment(), target_after.commitment()); + assert_eq!(final_tx.final_account().to_commitment(), target_after.to_commitment()); Ok(()) } @@ -348,7 +349,7 @@ async fn p2ide_script_reclaim_success_after_timelock() -> anyhow::Result<()> { Felt::new(2), ); - assert_eq!(final_tx.final_account().commitment(), sender_after.commitment()); + assert_eq!(final_tx.final_account().to_commitment(), sender_after.to_commitment()); Ok(()) } @@ -371,9 +372,15 @@ fn setup_p2ide_test( let mut builder = MockChain::builder(); // Create sender and target accounts - let sender_account = builder.add_existing_wallet(Auth::BasicAuth)?; - let target_account = builder.add_existing_wallet(Auth::BasicAuth)?; - let malicious_account = builder.add_existing_wallet(Auth::BasicAuth)?; + let sender_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + let target_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; + let malicious_account = builder.add_existing_wallet(Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + })?; let p2ide_note = builder.add_p2ide_note( sender_account.id(), diff --git a/crates/miden-testing/tests/scripts/send_note.rs b/crates/miden-testing/tests/scripts/send_note.rs index f80a4c5a06..ae6dc8495e 100644 --- a/crates/miden-testing/tests/scripts/send_note.rs +++ b/crates/miden-testing/tests/scripts/send_note.rs @@ -1,37 +1,61 @@ use core::slice; use std::collections::BTreeMap; -use miden_protocol::asset::{Asset, FungibleAsset}; +use miden_protocol::account::auth::AuthScheme; +use miden_protocol::asset::{Asset, FungibleAsset, NonFungibleAsset}; use miden_protocol::crypto::rand::{FeltRng, RpoRandomCoin}; use miden_protocol::note::{ Note, NoteAssets, NoteAttachment, NoteAttachmentScheme, - NoteInputs, NoteMetadata, NoteRecipient, + NoteStorage, NoteTag, NoteType, PartialNote, }; -use miden_protocol::transaction::OutputNote; +use miden_protocol::transaction::RawOutputNote; use miden_protocol::{Felt, Word}; use miden_standards::account::interface::{AccountInterface, AccountInterfaceExt}; use miden_standards::code_builder::CodeBuilder; +use miden_testing::utils::create_p2any_note; use miden_testing::{Auth, MockChain}; /// Tests the execution of the generated send_note transaction script in case the sending account /// has the [`BasicWallet`][wallet] interface. /// +/// This tests consumes a SPAWN note first so that the note_idx in the send_note script is not zero +/// to make sure the note_idx is correctly kept on the stack. +/// +/// The test also sends two assets to make sure the generated script deals correctly with multiple +/// assets. +/// /// [wallet]: miden_standards::account::interface::AccountComponentInterface::BasicWallet #[tokio::test] async fn test_send_note_script_basic_wallet() -> anyhow::Result<()> { - let sent_asset = FungibleAsset::mock(10); + let total_asset = FungibleAsset::mock(100); + let sent_asset0 = NonFungibleAsset::mock(&[4, 5, 6]); + + let sent_asset1 = FungibleAsset::mock(10); + let sent_asset2 = FungibleAsset::mock(40); let mut builder = MockChain::builder(); - let sender_basic_wallet_account = - builder.add_existing_wallet_with_assets(Auth::BasicAuth, [FungibleAsset::mock(100)])?; + + let sender_basic_wallet_account = builder.add_existing_wallet_with_assets( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + [sent_asset0, total_asset], + )?; + let p2any_note = create_p2any_note( + sender_basic_wallet_account.id(), + NoteType::Private, + [sent_asset2], + &mut RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])), + ); + let spawn_note = builder.add_spawn_note([&p2any_note])?; let mock_chain = builder.build()?; let sender_account_interface = AccountInterface::from_account(&sender_basic_wallet_account); @@ -39,12 +63,13 @@ async fn test_send_note_script_basic_wallet() -> anyhow::Result<()> { let tag = NoteTag::with_account_target(sender_basic_wallet_account.id()); let elements = [9, 8, 7, 6, 5u32].map(Felt::from).to_vec(); let attachment = NoteAttachment::new_array(NoteAttachmentScheme::new(42), elements.clone())?; - let metadata = NoteMetadata::new(sender_basic_wallet_account.id(), NoteType::Public, tag) + let metadata = NoteMetadata::new(sender_basic_wallet_account.id(), NoteType::Public) + .with_tag(tag) .with_attachment(attachment.clone()); - let assets = NoteAssets::new(vec![sent_asset]).unwrap(); + let assets = NoteAssets::new(vec![sent_asset0, sent_asset1]).unwrap(); let note_script = CodeBuilder::default().compile_note_script("begin nop end").unwrap(); let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); - let recipient = NoteRecipient::new(serial_num, note_script, NoteInputs::default()); + let recipient = NoteRecipient::new(serial_num, note_script, NoteStorage::default()); let note = Note::new(assets.clone(), metadata, recipient); let partial_note: PartialNote = note.clone().into(); @@ -54,13 +79,10 @@ async fn test_send_note_script_basic_wallet() -> anyhow::Result<()> { .build_send_notes_script(slice::from_ref(&partial_note), Some(expiration_delta))?; let executed_transaction = mock_chain - .build_tx_context(sender_basic_wallet_account.id(), &[], &[]) + .build_tx_context(sender_basic_wallet_account.id(), &[spawn_note.id()], &[]) .expect("failed to build tx context") - // TODO: This shouldn't be necessary. The attachment should be included in the tx - // script's mast forest's advice map. - .extend_advice_map(vec![(attachment.content().to_word(), elements)]) .tx_script(send_note_transaction_script) - .extend_expected_output_notes(vec![OutputNote::Full(note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(note.clone())]) .build()? .execute() .await?; @@ -72,13 +94,22 @@ async fn test_send_note_script_basic_wallet() -> anyhow::Result<()> { .removed_assets() .map(|asset| (asset.vault_key(), asset)) .collect(); - assert_eq!(removed_assets.len(), 1, "one asset should have been removed"); + assert_eq!(removed_assets.len(), 2, "two assets should have been removed"); + assert_eq!( + removed_assets.remove(&sent_asset0.vault_key()).unwrap(), + sent_asset0, + "sent asset0 should be in removed assets" + ); + assert_eq!( + removed_assets.remove(&sent_asset1.vault_key()).unwrap(), + sent_asset1.unwrap_fungible().add(sent_asset2.unwrap_fungible())?.into(), + "sent asset1 + sent_asset2 should be in removed assets" + ); assert_eq!( - removed_assets.remove(&sent_asset.vault_key()).unwrap(), - sent_asset, - "sent asset should be in removed assets" + executed_transaction.output_notes().get_note(0), + &RawOutputNote::Partial(p2any_note.into()) ); - assert_eq!(executed_transaction.output_notes().get_note(0), &OutputNote::Full(note)); + assert_eq!(executed_transaction.output_notes().get_note(1), &RawOutputNote::Full(note)); Ok(()) } @@ -90,8 +121,14 @@ async fn test_send_note_script_basic_wallet() -> anyhow::Result<()> { #[tokio::test] async fn test_send_note_script_basic_fungible_faucet() -> anyhow::Result<()> { let mut builder = MockChain::builder(); - let sender_basic_fungible_faucet_account = - builder.add_existing_basic_faucet(Auth::BasicAuth, "POL", 200, None)?; + let sender_basic_fungible_faucet_account = builder.add_existing_basic_faucet( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + "POL", + 200, + None, + )?; let mock_chain = builder.build()?; let sender_account_interface = @@ -99,15 +136,15 @@ async fn test_send_note_script_basic_fungible_faucet() -> anyhow::Result<()> { let tag = NoteTag::with_account_target(sender_basic_fungible_faucet_account.id()); let attachment = NoteAttachment::new_word(NoteAttachmentScheme::new(100), Word::empty()); - let metadata = - NoteMetadata::new(sender_basic_fungible_faucet_account.id(), NoteType::Public, tag) - .with_attachment(attachment); + let metadata = NoteMetadata::new(sender_basic_fungible_faucet_account.id(), NoteType::Public) + .with_tag(tag) + .with_attachment(attachment); let assets = NoteAssets::new(vec![Asset::Fungible( FungibleAsset::new(sender_basic_fungible_faucet_account.id(), 10).unwrap(), )])?; let note_script = CodeBuilder::default().compile_note_script("begin nop end").unwrap(); let serial_num = RpoRandomCoin::new(Word::from([1, 2, 3, 4u32])).draw_word(); - let recipient = NoteRecipient::new(serial_num, note_script, NoteInputs::default()); + let recipient = NoteRecipient::new(serial_num, note_script, NoteStorage::default()); let note = Note::new(assets.clone(), metadata, recipient); let partial_note: PartialNote = note.clone().into(); @@ -120,12 +157,12 @@ async fn test_send_note_script_basic_fungible_faucet() -> anyhow::Result<()> { .build_tx_context(sender_basic_fungible_faucet_account.id(), &[], &[]) .expect("failed to build tx context") .tx_script(send_note_transaction_script) - .extend_expected_output_notes(vec![OutputNote::Full(note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(note.clone())]) .build()? .execute() .await?; - assert_eq!(executed_transaction.output_notes().get_note(0), &OutputNote::Full(note)); + assert_eq!(executed_transaction.output_notes().get_note(0), &RawOutputNote::Full(note)); Ok(()) } diff --git a/crates/miden-testing/tests/scripts/swap.rs b/crates/miden-testing/tests/scripts/swap.rs index 748b9110c2..0cd95695a9 100644 --- a/crates/miden-testing/tests/scripts/swap.rs +++ b/crates/miden-testing/tests/scripts/swap.rs @@ -1,17 +1,17 @@ use anyhow::Context; +use miden_protocol::Felt; +use miden_protocol::account::auth::AuthScheme; use miden_protocol::account::{Account, AccountId, AccountStorageMode, AccountType}; use miden_protocol::asset::{Asset, FungibleAsset, NonFungibleAsset}; -use miden_protocol::errors::NoteError; -use miden_protocol::note::{Note, NoteAssets, NoteDetails, NoteMetadata, NoteTag, NoteType}; +use miden_protocol::note::{Note, NoteDetails, NoteType}; use miden_protocol::testing::account_id::{ ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET, ACCOUNT_ID_PUBLIC_FUNGIBLE_FAUCET_1, AccountIdBuilder, }; -use miden_protocol::transaction::OutputNote; -use miden_protocol::{Felt, Word}; +use miden_protocol::transaction::RawOutputNote; use miden_standards::code_builder::CodeBuilder; -use miden_standards::note::utils; +use miden_testing::utils::create_p2id_note_exact; use miden_testing::{Auth, MockChain}; use crate::prove_and_verify_transaction; @@ -40,7 +40,8 @@ pub async fn prove_send_swap_note() -> anyhow::Result<()> { push.{tag} exec.output_note::create - push.{asset} + push.{ASSET_VALUE} + push.{ASSET_KEY} call.::miden::standards::wallets::basic::move_asset_to_note dropw dropw dropw dropw end @@ -48,7 +49,8 @@ pub async fn prove_send_swap_note() -> anyhow::Result<()> { recipient = swap_note.recipient().digest(), note_type = NoteType::Public as u8, tag = Felt::from(swap_note.metadata().tag()), - asset = Word::from(offered_asset), + ASSET_KEY = offered_asset.to_key_word(), + ASSET_VALUE = offered_asset.to_value_word(), ); let tx_script = CodeBuilder::default().compile_tx_script(tx_script_src)?; @@ -57,7 +59,7 @@ pub async fn prove_send_swap_note() -> anyhow::Result<()> { .build_tx_context(sender_account.id(), &[], &[]) .context("failed to build tx context")? .tx_script(tx_script) - .extend_expected_output_notes(vec![OutputNote::Full(swap_note.clone())]) + .extend_expected_output_notes(vec![RawOutputNote::Full(swap_note.clone())]) .build()? .execute() .await?; @@ -79,8 +81,8 @@ pub async fn prove_send_swap_note() -> anyhow::Result<()> { ); let swap_output_note = create_swap_note_tx.output_notes().iter().next().unwrap(); - assert_eq!(swap_output_note.assets().unwrap().iter().next().unwrap(), &offered_asset); - assert!(prove_and_verify_transaction(create_swap_note_tx).is_ok()); + assert_eq!(swap_output_note.assets().iter().next().unwrap(), &offered_asset); + assert!(prove_and_verify_transaction(create_swap_note_tx).await.is_ok()); Ok(()) } @@ -118,7 +120,7 @@ async fn consume_swap_note_private_payback_note() -> anyhow::Result<()> { let output_payback_note = consume_swap_note_tx.output_notes().iter().next().unwrap().clone(); assert!(output_payback_note.id() == payback_note.id()); - assert_eq!(output_payback_note.assets().unwrap().iter().next().unwrap(), &requested_asset); + assert_eq!(output_payback_note.assets().iter().next().unwrap(), &requested_asset); assert!(target_account.vault().assets().count() == 1); assert!(target_account.vault().assets().any(|asset| asset == offered_asset)); @@ -146,9 +148,11 @@ async fn consume_swap_note_private_payback_note() -> anyhow::Result<()> { assert!(sender_account.vault().assets().any(|asset| asset == requested_asset)); prove_and_verify_transaction(consume_swap_note_tx) + .await .context("failed to prove/verify consume_swap_note_tx")?; prove_and_verify_transaction(consume_payback_tx) + .await .context("failed to prove/verify consume_payback_tx")?; Ok(()) @@ -187,7 +191,7 @@ async fn consume_swap_note_public_payback_note() -> anyhow::Result<()> { let consume_swap_note_tx = mock_chain .build_tx_context(target_account.id(), &[swap_note.id()], &[]) .context("failed to build tx context")? - .extend_expected_output_notes(vec![OutputNote::Full(payback_p2id_note)]) + .extend_expected_output_notes(vec![RawOutputNote::Full(payback_p2id_note)]) .build()? .execute() .await?; @@ -196,7 +200,7 @@ async fn consume_swap_note_public_payback_note() -> anyhow::Result<()> { let output_payback_note = consume_swap_note_tx.output_notes().iter().next().unwrap().clone(); assert!(output_payback_note.id() == payback_note.id()); - assert_eq!(output_payback_note.assets().unwrap().iter().next().unwrap(), &requested_asset); + assert_eq!(output_payback_note.assets().iter().next().unwrap(), &requested_asset); assert!(target_account.vault().assets().count() == 1); assert!(target_account.vault().assets().any(|asset| asset == offered_asset)); @@ -237,7 +241,12 @@ async fn settle_coincidence_of_wants() -> anyhow::Result<()> { // CREATE ACCOUNT 1: Has asset A, wants asset B // -------------------------------------------------------------------------------------------- - let account_1 = builder.add_existing_wallet_with_assets(Auth::BasicAuth, vec![asset_a])?; + let account_1 = builder.add_existing_wallet_with_assets( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + vec![asset_a], + )?; let payback_note_type = NoteType::Private; let (swap_note_1, payback_note_1) = @@ -245,8 +254,12 @@ async fn settle_coincidence_of_wants() -> anyhow::Result<()> { // CREATE ACCOUNT 2: Has asset B, wants asset A // -------------------------------------------------------------------------------------------- - let account_2 = builder.add_existing_wallet_with_assets(Auth::BasicAuth, vec![asset_b])?; - + let account_2 = builder.add_existing_wallet_with_assets( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + vec![asset_b], + )?; let (swap_note_2, payback_note_2) = builder.add_swap_note(account_2.id(), asset_b, asset_a, payback_note_type)?; @@ -254,8 +267,12 @@ async fn settle_coincidence_of_wants() -> anyhow::Result<()> { // -------------------------------------------------------------------------------------------- // TODO: matcher account should be able to fill both SWAP notes without holding assets A & B - let matcher_account = - builder.add_existing_wallet_with_assets(Auth::BasicAuth, vec![asset_a, asset_b])?; + let matcher_account = builder.add_existing_wallet_with_assets( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + vec![asset_a, asset_b], + )?; // Initial matching account balance should have two assets. assert_eq!(matcher_account.vault().assets().count(), 2); @@ -285,10 +302,10 @@ async fn settle_coincidence_of_wants() -> anyhow::Result<()> { .expect("Payback note 2 not found"); // Verify payback note 1 contains exactly the initially requested asset B for account 1 - assert_eq!(output_payback_1.assets().unwrap().iter().next().unwrap(), &asset_b); + assert_eq!(output_payback_1.assets().iter().next().unwrap(), &asset_b); // Verify payback note 2 contains exactly the initially requested asset A for account 2 - assert_eq!(output_payback_2.assets().unwrap().iter().next().unwrap(), &asset_a); + assert_eq!(output_payback_2.assets().iter().next().unwrap(), &asset_a); Ok(()) } @@ -313,16 +330,24 @@ fn setup_swap_test(payback_note_type: NoteType) -> anyhow::Result let requested_asset = NonFungibleAsset::mock(&[1, 2, 3, 4]); let mut builder = MockChain::builder(); - let sender_account = - builder.add_existing_wallet_with_assets(Auth::BasicAuth, vec![offered_asset])?; - let target_account = - builder.add_existing_wallet_with_assets(Auth::BasicAuth, vec![requested_asset])?; + let sender_account = builder.add_existing_wallet_with_assets( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + vec![offered_asset], + )?; + let target_account = builder.add_existing_wallet_with_assets( + Auth::BasicAuth { + auth_scheme: AuthScheme::Falcon512Poseidon2, + }, + vec![requested_asset], + )?; let (swap_note, payback_note) = builder .add_swap_note(sender_account.id(), offered_asset, requested_asset, payback_note_type) .unwrap(); - builder.add_output_note(OutputNote::Full(swap_note.clone())); + builder.add_output_note(RawOutputNote::Full(swap_note.clone())); let mock_chain = builder.build()?; Ok(SwapTestSetup { @@ -335,21 +360,3 @@ fn setup_swap_test(payback_note_type: NoteType) -> anyhow::Result payback_note, }) } - -/// Generates a P2ID note - Pay-to-ID note with an exact serial number -pub fn create_p2id_note_exact( - sender: AccountId, - target: AccountId, - assets: Vec, - note_type: NoteType, - serial_num: Word, -) -> Result { - let recipient = utils::build_p2id_recipient(target, serial_num)?; - - let tag = NoteTag::with_account_target(target); - - let metadata = NoteMetadata::new(sender, note_type, tag); - let vault = NoteAssets::new(assets)?; - - Ok(Note::new(vault, metadata, recipient)) -} diff --git a/crates/miden-testing/tests/wallet/mod.rs b/crates/miden-testing/tests/wallet/mod.rs index 12386a14e3..0fff293ddf 100644 --- a/crates/miden-testing/tests/wallet/mod.rs +++ b/crates/miden-testing/tests/wallet/mod.rs @@ -1,6 +1,6 @@ use miden_protocol::Word; use miden_protocol::account::auth::AuthSecretKey; -use miden_standards::AuthScheme; +use miden_standards::AuthMethod; use miden_standards::account::wallets::create_basic_wallet; use rand_chacha::ChaCha20Rng; use rand_chacha::rand_core::SeedableRng; @@ -8,17 +8,18 @@ use rand_chacha::rand_core::SeedableRng; #[cfg(not(target_arch = "wasm32"))] #[test] fn wallet_creation() { - use miden_protocol::account::{AccountCode, AccountStorageMode, AccountType}; - use miden_standards::account::auth::AuthFalcon512Rpo; + use miden_protocol::account::{AccountCode, AccountStorageMode, AccountType, auth}; + use miden_standards::account::auth::AuthSingleSig; use miden_standards::account::wallets::BasicWallet; // we need a Falcon Public Key to create the wallet account let seed = [0_u8; 32]; let mut rng = ChaCha20Rng::from_seed(seed); - let sec_key = AuthSecretKey::new_falcon512_rpo_with_rng(&mut rng); + let sec_key = AuthSecretKey::new_falcon512_poseidon2_with_rng(&mut rng); + let auth_scheme = auth::AuthScheme::Falcon512Poseidon2; let pub_key = sec_key.public_key().to_commitment(); - let auth_scheme: AuthScheme = AuthScheme::Falcon512Rpo { pub_key }; + let auth_method: AuthMethod = AuthMethod::SingleSig { approver: (pub_key, auth_scheme) }; // we need to use an initial seed to create the wallet account let init_seed: [u8; 32] = [ @@ -29,10 +30,10 @@ fn wallet_creation() { let account_type = AccountType::RegularAccountImmutableCode; let storage_mode = AccountStorageMode::Private; - let wallet = create_basic_wallet(init_seed, auth_scheme, account_type, storage_mode).unwrap(); + let wallet = create_basic_wallet(init_seed, auth_method, account_type, storage_mode).unwrap(); let expected_code = AccountCode::from_components( - &[AuthFalcon512Rpo::new(pub_key).into(), BasicWallet.into()], + &[AuthSingleSig::new(pub_key, auth_scheme).into(), BasicWallet.into()], AccountType::RegularAccountUpdatableCode, ) .unwrap(); @@ -41,7 +42,7 @@ fn wallet_creation() { assert!(wallet.is_regular_account()); assert_eq!(wallet.code().commitment(), expected_code_commitment); assert_eq!( - wallet.storage().get_item(AuthFalcon512Rpo::public_key_slot()).unwrap(), + wallet.storage().get_item(AuthSingleSig::public_key_slot()).unwrap(), Word::from(pub_key) ); } @@ -49,16 +50,17 @@ fn wallet_creation() { #[cfg(not(target_arch = "wasm32"))] #[test] fn wallet_creation_2() { - use miden_protocol::account::{AccountCode, AccountStorageMode, AccountType}; - use miden_standards::account::auth::AuthEcdsaK256Keccak; + use miden_protocol::account::{AccountCode, AccountStorageMode, AccountType, auth}; + use miden_standards::account::auth::AuthSingleSig; use miden_standards::account::wallets::BasicWallet; // we need a ECDSA Public Key to create the wallet account let seed = [0_u8; 32]; let mut rng = ChaCha20Rng::from_seed(seed); let sec_key = AuthSecretKey::new_ecdsa_k256_keccak_with_rng(&mut rng); + let auth_scheme = auth::AuthScheme::EcdsaK256Keccak; let pub_key = sec_key.public_key().to_commitment(); - let auth_scheme: AuthScheme = AuthScheme::EcdsaK256Keccak { pub_key }; + let auth_method: AuthMethod = AuthMethod::SingleSig { approver: (pub_key, auth_scheme) }; // we need to use an initial seed to create the wallet account let init_seed: [u8; 32] = [ @@ -69,10 +71,10 @@ fn wallet_creation_2() { let account_type = AccountType::RegularAccountImmutableCode; let storage_mode = AccountStorageMode::Private; - let wallet = create_basic_wallet(init_seed, auth_scheme, account_type, storage_mode).unwrap(); + let wallet = create_basic_wallet(init_seed, auth_method, account_type, storage_mode).unwrap(); let expected_code = AccountCode::from_components( - &[AuthEcdsaK256Keccak::new(pub_key).into(), BasicWallet.into()], + &[AuthSingleSig::new(pub_key, auth_scheme).into(), BasicWallet.into()], AccountType::RegularAccountUpdatableCode, ) .unwrap(); @@ -81,7 +83,7 @@ fn wallet_creation_2() { assert!(wallet.is_regular_account()); assert_eq!(wallet.code().commitment(), expected_code_commitment); assert_eq!( - wallet.storage().get_item(AuthEcdsaK256Keccak::public_key_slot()).unwrap(), + wallet.storage().get_item(AuthSingleSig::public_key_slot()).unwrap(), Word::from(pub_key) ); } diff --git a/crates/miden-tx/README.md b/crates/miden-tx/README.md index 9808957a3d..52edc143ad 100644 --- a/crates/miden-tx/README.md +++ b/crates/miden-tx/README.md @@ -6,7 +6,7 @@ This crate contains tool for creating, executing, and proving Miden blockchain t This crate exposes a few components to compile, run, and prove transactions. -The first requirement is to have a `DataStore` implementation. `DataStore` objects are responsible to load the data needed by the transactions executor, especially the account's code, the reference block data, and the note's inputs. +The first requirement is to have a `DataStore` implementation. `DataStore` objects are responsible to load the data needed by the transactions executor, especially the account's code, the reference block data, and the note's storage. ```rust let store = DataStore:new(); diff --git a/crates/miden-tx/src/auth/tx_authenticator.rs b/crates/miden-tx/src/auth/tx_authenticator.rs index 4845421445..877d29aa61 100644 --- a/crates/miden-tx/src/auth/tx_authenticator.rs +++ b/crates/miden-tx/src/auth/tx_authenticator.rs @@ -1,6 +1,7 @@ use alloc::boxed::Box; use alloc::collections::BTreeMap; use alloc::string::ToString; +use alloc::sync::Arc; use alloc::vec::Vec; use miden_processor::FutureMaybeSend; @@ -10,7 +11,13 @@ use miden_protocol::transaction::TransactionSummary; use miden_protocol::{Felt, Hasher, Word}; use crate::errors::AuthenticationError; -use crate::utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; +use crate::utils::serde::{ + ByteReader, + ByteWriter, + Deserializable, + DeserializationError, + Serializable, +}; // SIGNATURE DATA // ================================================================================================ @@ -145,7 +152,7 @@ pub trait TransactionAuthenticator { fn get_public_key( &self, pub_key_commitment: PublicKeyCommitment, - ) -> impl FutureMaybeSend>; + ) -> impl FutureMaybeSend>>; } /// A placeholder type for the generic trait bound of `TransactionAuthenticator<'_,'_,_,T>` @@ -171,7 +178,7 @@ impl TransactionAuthenticator for UnreachableAuth { fn get_public_key( &self, _pub_key_commitment: PublicKeyCommitment, - ) -> impl FutureMaybeSend> { + ) -> impl FutureMaybeSend>> { async { unreachable!("Type `UnreachableAuth` must not be instantiated") } } } @@ -183,7 +190,7 @@ impl TransactionAuthenticator for UnreachableAuth { #[derive(Clone, Debug)] pub struct BasicAuthenticator { /// pub_key |-> (secret_key, public_key) mapping - keys: BTreeMap, + keys: BTreeMap)>, } impl BasicAuthenticator { @@ -191,7 +198,7 @@ impl BasicAuthenticator { let mut key_map = BTreeMap::new(); for secret_key in keys { let pub_key = secret_key.public_key(); - key_map.insert(pub_key.to_commitment(), (secret_key.clone(), pub_key)); + key_map.insert(pub_key.to_commitment(), (secret_key.clone(), pub_key.into())); } BasicAuthenticator { keys: key_map } @@ -200,7 +207,10 @@ impl BasicAuthenticator { pub fn from_key_pairs(keys: &[(AuthSecretKey, PublicKey)]) -> Self { let mut key_map = BTreeMap::new(); for (secret_key, public_key) in keys { - key_map.insert(public_key.to_commitment(), (secret_key.clone(), public_key.clone())); + key_map.insert( + public_key.to_commitment(), + (secret_key.clone(), public_key.clone().into()), + ); } BasicAuthenticator { keys: key_map } @@ -210,7 +220,7 @@ impl BasicAuthenticator { /// /// Map keys represent the public key commitments, and values represent the (secret_key, /// public_key) pair that the authenticator would use to sign messages. - pub fn keys(&self) -> &BTreeMap { + pub fn keys(&self) -> &BTreeMap)> { &self.keys } } @@ -244,12 +254,12 @@ impl TransactionAuthenticator for BasicAuthenticator { fn get_public_key( &self, pub_key_commitment: PublicKeyCommitment, - ) -> impl FutureMaybeSend> { - async move { self.keys.get(&pub_key_commitment).map(|(_, pub_key)| pub_key) } + ) -> impl FutureMaybeSend>> { + async move { self.keys.get(&pub_key_commitment).map(|(_, pub_key)| pub_key.clone()) } } } -// HELPER FUNCTIONS +// EMPTY AUTHENTICATOR // ================================================================================================ impl TransactionAuthenticator for () { @@ -269,22 +279,25 @@ impl TransactionAuthenticator for () { fn get_public_key( &self, _pub_key_commitment: PublicKeyCommitment, - ) -> impl FutureMaybeSend> { + ) -> impl FutureMaybeSend>> { async { None } } } +// TESTS +// ================================================================================================ + #[cfg(test)] mod test { use miden_protocol::account::auth::AuthSecretKey; - use miden_protocol::utils::{Deserializable, Serializable}; + use miden_protocol::utils::serde::{Deserializable, Serializable}; use miden_protocol::{Felt, Word}; use super::SigningInputs; #[test] fn serialize_auth_key() { - let auth_key = AuthSecretKey::new_falcon512_rpo(); + let auth_key = AuthSecretKey::new_falcon512_poseidon2(); let serialized = auth_key.to_bytes(); let deserialized = AuthSecretKey::read_from_bytes(&serialized).unwrap(); diff --git a/crates/miden-tx/src/errors/mod.rs b/crates/miden-tx/src/errors/mod.rs index 0e0cc1dda2..f9727fcae2 100644 --- a/crates/miden-tx/src/errors/mod.rs +++ b/crates/miden-tx/src/errors/mod.rs @@ -3,9 +3,10 @@ use alloc::string::String; use alloc::vec::Vec; use core::error::Error; -use miden_processor::{DeserializationError, ExecutionError}; -use miden_protocol::account::AccountId; +use miden_processor::ExecutionError; +use miden_processor::serde::DeserializationError; use miden_protocol::account::auth::PublicKeyCommitment; +use miden_protocol::account::{AccountId, StorageMapKey}; use miden_protocol::assembly::diagnostics::reporting::PrintDiagnostic; use miden_protocol::asset::AssetVaultKey; use miden_protocol::block::BlockNumber; @@ -15,8 +16,10 @@ use miden_protocol::errors::{ AccountError, AssetError, NoteError, + OutputNoteError, ProvenTransactionError, TransactionInputError, + TransactionInputsExtractionError, TransactionOutputError, }; use miden_protocol::note::{NoteId, NoteMetadata}; @@ -72,6 +75,8 @@ impl From for TransactionExecutorError { #[derive(Debug, Error)] pub enum TransactionExecutorError { + #[error("failed to read fee asset from transaction inputs")] + FeeAssetRetrievalFailed(#[source] TransactionInputsExtractionError), #[error("failed to fetch transaction inputs from the data store")] FetchTransactionInputsFailed(#[source] DataStoreError), #[error("failed to fetch asset witnesses from the data store")] @@ -145,6 +150,8 @@ pub enum TransactionProverError { RemoveFeeAssetFromDelta(#[source] AccountDeltaError), #[error("failed to construct transaction outputs")] TransactionOutputConstructionFailed(#[source] TransactionOutputError), + #[error("failed to shrink output note")] + OutputNoteShrinkFailed(#[source] OutputNoteError), #[error("failed to build proven transaction")] ProvenTransactionBuildFailed(#[source] ProvenTransactionError), // Print the diagnostic directly instead of returning the source error. In the source error @@ -204,8 +211,8 @@ pub enum TransactionKernelError { AccountDeltaRemoveAssetFailed(#[source] AccountDeltaError), #[error("failed to add asset to note")] FailedToAddAssetToNote(#[source] NoteError), - #[error("note input data has hash {actual} but expected hash {expected}")] - InvalidNoteInputs { expected: Word, actual: Word }, + #[error("note storage has commitment {actual} but expected commitment {expected}")] + InvalidNoteStorage { expected: Word, actual: Word }, #[error( "failed to respond to signature requested since no authenticator is assigned to the host" )] @@ -222,9 +229,9 @@ pub enum TransactionKernelError { source: AssetError, }, #[error( - "note inputs data extracted from the advice map by the event handler is not well formed" + "note storage data extracted from the advice map by the event handler is not well formed" )] - MalformedNoteInputs(#[source] NoteError), + MalformedNoteStorage(#[source] NoteError), #[error( "note script data `{data:?}` extracted from the advice map by the event handler is not well formed" )] @@ -247,9 +254,9 @@ pub enum TransactionKernelError { )] NoteAttachmentArrayMismatch { actual: Word, provided: Word }, #[error( - "note input data in advice provider contains fewer elements ({actual}) than specified ({specified}) by its inputs length" + "note storage in advice provider contains fewer items ({actual}) than specified ({specified}) by its number of storage items" )] - TooFewElementsForNoteInputs { specified: u64, actual: u64 }, + TooFewElementsForNoteStorage { specified: u64, actual: u64 }, #[error("account procedure with procedure root {0} is not in the account procedure index map")] UnknownAccountProcedure(Word), #[error("code commitment {0} is not in the account procedure index map")] @@ -283,7 +290,7 @@ pub enum TransactionKernelError { )] GetStorageMapWitness { map_root: Word, - map_key: Word, + map_key: StorageMapKey, // thiserror will return this when calling Error::source on TransactionKernelError. source: DataStoreError, }, diff --git a/crates/miden-tx/src/executor/data_store.rs b/crates/miden-tx/src/executor/data_store.rs index 050aeb636d..e0525d4914 100644 --- a/crates/miden-tx/src/executor/data_store.rs +++ b/crates/miden-tx/src/executor/data_store.rs @@ -2,7 +2,7 @@ use alloc::collections::BTreeSet; use alloc::vec::Vec; use miden_processor::{FutureMaybeSend, MastForestStore, Word}; -use miden_protocol::account::{AccountId, PartialAccount, StorageMapWitness}; +use miden_protocol::account::{AccountId, PartialAccount, StorageMapKey, StorageMapWitness}; use miden_protocol::asset::{AssetVaultKey, AssetWitness}; use miden_protocol::block::{BlockHeader, BlockNumber}; use miden_protocol::note::NoteScript; @@ -67,7 +67,7 @@ pub trait DataStore: MastForestStore { &self, account_id: AccountId, map_root: Word, - map_key: Word, + map_key: StorageMapKey, ) -> impl FutureMaybeSend>; /// Returns a note script with the specified root, or `None` if not found. @@ -76,6 +76,9 @@ pub trait DataStore: MastForestStore { /// If the script is not found, it returns `Ok(None)` rather than an error, as "not found" /// is a valid, expected outcome. /// + /// **Note:** Data store implementers do not need to handle standard note scripts (e.g. P2ID). + /// These are resolved directly by the transaction executor and will not trigger this method. + /// /// # Errors /// Returns an error if the data store encountered an internal error while attempting to /// retrieve the script. diff --git a/crates/miden-tx/src/executor/exec_host.rs b/crates/miden-tx/src/executor/exec_host.rs index 1679190c92..4d19a2b3bb 100644 --- a/crates/miden-tx/src/executor/exec_host.rs +++ b/crates/miden-tx/src/executor/exec_host.rs @@ -3,21 +3,17 @@ use alloc::collections::{BTreeMap, BTreeSet}; use alloc::sync::Arc; use alloc::vec::Vec; -use miden_processor::{ - AdviceMutation, - AsyncHost, - BaseHost, - EventError, - FutureMaybeSend, - MastForest, - ProcessState, -}; +use miden_processor::advice::AdviceMutation; +use miden_processor::event::EventError; +use miden_processor::mast::MastForest; +use miden_processor::{FutureMaybeSend, Host, ProcessorState}; use miden_protocol::account::auth::PublicKeyCommitment; use miden_protocol::account::{ AccountCode, AccountDelta, AccountId, PartialAccount, + StorageMapKey, StorageSlotId, StorageSlotName, }; @@ -26,16 +22,17 @@ use miden_protocol::assembly::{SourceFile, SourceManagerSync, SourceSpan}; use miden_protocol::asset::{AssetVaultKey, AssetWitness, FungibleAsset}; use miden_protocol::block::BlockNumber; use miden_protocol::crypto::merkle::smt::SmtProof; -use miden_protocol::note::{NoteInputs, NoteMetadata, NoteRecipient}; +use miden_protocol::note::{NoteMetadata, NoteRecipient, NoteScript, NoteStorage}; use miden_protocol::transaction::{ InputNote, InputNotes, - OutputNote, + RawOutputNote, TransactionAdviceInputs, TransactionSummary, }; use miden_protocol::vm::AdviceMap; use miden_protocol::{Felt, Hasher, Word}; +use miden_standards::note::StandardNote; use crate::auth::{SigningInputs, TransactionAuthenticator}; use crate::errors::TransactionKernelError; @@ -239,7 +236,7 @@ where .account_delta_tracker() .vault_delta() .fungible() - .amount(&initial_fee_asset.faucet_id()) + .amount(&initial_fee_asset.vault_key()) .unwrap_or(0); // SAFETY: Initial native asset faucet ID should be a fungible faucet and amount should @@ -282,7 +279,7 @@ where &self, active_account_id: AccountId, map_root: Word, - map_key: Word, + map_key: StorageMapKey, ) -> Result, TransactionKernelError> { let storage_map_witness = self .base_host @@ -302,7 +299,7 @@ where let smt_proof = SmtProof::from(storage_map_witness); let map_ext = AdviceMutation::extend_map(AdviceMap::from_iter([( smt_proof.leaf().hash(), - smt_proof.leaf().to_elements(), + smt_proof.leaf().to_elements().collect::>(), )])); Ok(vec![merkle_store_ext, map_ext]) @@ -364,26 +361,48 @@ where Ok(asset_witnesses.into_iter().flat_map(asset_witness_to_advice_mutation).collect()) } - /// Handles a request for a [`NoteScript`] by querying the [`DataStore`]. + /// Handles a request for a [`NoteScript`] during transaction execution when the script is not + /// already in the advice provider. + /// + /// Standard note scripts (P2ID, etc.) are resolved directly from [`StandardNote`], avoiding a + /// data store round-trip. Non-standard scripts are fetched from the [`DataStore`]. /// - /// The script is fetched from the data store and used to build a [`NoteRecipient`], which is - /// then used to create an [`OutputNoteBuilder`]. This function is only called for public notes - /// where the script is not already available in the advice provider. + /// The resolved script is used to build a [`NoteRecipient`], which is then used to create + /// an [`OutputNoteBuilder`]. This function is only called for notes where the script is not + /// already in the advice provider. + /// + /// # Errors + /// Returns an error if: + /// - The note is public and the script is not found in the data store. + /// - Constructing the recipient with the fetched script does not match the expected recipient + /// digest. + /// - The data store returns an error when fetching the script. async fn on_note_script_requested( &mut self, note_idx: usize, recipient_digest: Word, script_root: Word, metadata: NoteMetadata, - note_inputs: NoteInputs, + note_storage: NoteStorage, serial_num: Word, ) -> Result, TransactionKernelError> { - let note_script_result = self.base_host.store().get_note_script(script_root).await; + // Resolve standard note scripts directly, avoiding a data store round-trip. + let note_script: Option = + if let Some(standard_note) = StandardNote::from_script_root(script_root) { + Some(standard_note.script()) + } else { + self.base_host.store().get_note_script(script_root).await.map_err(|err| { + TransactionKernelError::other_with_source( + "failed to retrieve note script from data store", + err, + ) + })? + }; - match note_script_result { - Ok(Some(note_script)) => { + match note_script { + Some(note_script) => { let script_felts: Vec = (¬e_script).into(); - let recipient = NoteRecipient::new(serial_num, note_script, note_inputs); + let recipient = NoteRecipient::new(serial_num, note_script, note_storage); if recipient.digest() != recipient_digest { return Err(TransactionKernelError::other(format!( @@ -399,7 +418,7 @@ where script_felts, )]))]) }, - Ok(None) if metadata.is_private() => { + None if metadata.is_private() => { self.base_host.output_note_from_recipient_digest( note_idx, metadata, @@ -408,13 +427,9 @@ where Ok(Vec::new()) }, - Ok(None) => Err(TransactionKernelError::other(format!( + None => Err(TransactionKernelError::other(format!( "note script with root {script_root} not found in data store for public note" ))), - Err(err) => Err(TransactionKernelError::other_with_source( - "failed to retrieve note script from data store", - err, - )), } } @@ -426,7 +441,7 @@ where ) -> ( AccountDelta, InputNotes, - Vec, + Vec, Vec, BTreeMap>, TransactionProgress, @@ -449,10 +464,10 @@ where // HOST IMPLEMENTATION // ================================================================================================ -impl BaseHost for TransactionExecutorHost<'_, '_, STORE, AUTH> +impl Host for TransactionExecutorHost<'_, '_, STORE, AUTH> where - STORE: DataStore, - AUTH: TransactionAuthenticator, + STORE: DataStore + Sync, + AUTH: TransactionAuthenticator + Sync, { fn get_label_and_source_file( &self, @@ -463,13 +478,7 @@ where let span = source_manager.location_to_span(location.clone()).unwrap_or_default(); (span, maybe_file) } -} -impl AsyncHost for TransactionExecutorHost<'_, '_, STORE, AUTH> -where - STORE: DataStore + Sync, - AUTH: TransactionAuthenticator + Sync, -{ fn get_mast_forest(&self, node_digest: &Word) -> impl FutureMaybeSend>> { let mast_forest = self.base_host.get_mast_forest(node_digest); async move { mast_forest } @@ -477,7 +486,7 @@ where fn on_event( &mut self, - process: &ProcessState, + process: &ProcessorState, ) -> impl FutureMaybeSend, EventError>> { let core_lib_event_result = self.base_host.handle_core_lib_events(process); @@ -581,14 +590,14 @@ where recipient_digest, serial_num, script_root, - note_inputs, + note_storage, } => { self.on_note_script_requested( note_idx, recipient_digest, script_root, metadata, - note_inputs, + note_storage, serial_num, ) .await @@ -697,7 +706,7 @@ fn asset_witness_to_advice_mutation(asset_witness: AssetWitness) -> [AdviceMutat let smt_proof = SmtProof::from(asset_witness); let map_ext = AdviceMutation::extend_map(AdviceMap::from_iter([( smt_proof.leaf().hash(), - smt_proof.leaf().to_elements(), + smt_proof.leaf().to_elements().collect::>(), )])); [merkle_store_ext, map_ext] diff --git a/crates/miden-tx/src/executor/mod.rs b/crates/miden-tx/src/executor/mod.rs index eaa57746da..7ebfcd02e0 100644 --- a/crates/miden-tx/src/executor/mod.rs +++ b/crates/miden-tx/src/executor/mod.rs @@ -1,8 +1,8 @@ use alloc::collections::BTreeSet; use alloc::sync::Arc; -use miden_processor::fast::FastProcessor; -use miden_processor::{AdviceInputs, ExecutionError, StackInputs}; +use miden_processor::advice::AdviceInputs; +use miden_processor::{ExecutionError, FastProcessor, StackInputs}; pub use miden_processor::{ExecutionOptions, MastForestStore}; use miden_protocol::account::AccountId; use miden_protocol::assembly::DefaultSourceManager; @@ -80,6 +80,7 @@ where exec_options: ExecutionOptions::new( Some(MAX_TX_EXECUTION_CYCLES), MIN_TX_EXECUTION_CYCLES, + ExecutionOptions::DEFAULT_CORE_TRACE_FRAGMENT_SIZE, false, false, ) @@ -148,7 +149,7 @@ where /// stages of transaction execution take. #[must_use] pub fn with_tracing(mut self) -> Self { - self.exec_options = self.exec_options.with_tracing(); + self.exec_options = self.exec_options.with_tracing(true); self } @@ -185,13 +186,8 @@ where // instantiate the processor in debug mode only when debug mode is specified via execution // options; this is important because in debug mode execution is almost 100x slower - // TODO: the processor does not yet respect other execution options (e.g., max cycles); - // this will be fixed in v0.21 release of the VM - let processor = if self.exec_options.enable_debugging() { - FastProcessor::new_debug(stack_inputs.as_slice(), advice_inputs) - } else { - FastProcessor::new_with_advice_inputs(stack_inputs.as_slice(), advice_inputs) - }; + let processor = + FastProcessor::new_with_options(stack_inputs, advice_inputs, self.exec_options); let output = processor .execute(&TransactionKernel::main(), &mut host) @@ -237,8 +233,7 @@ where let (mut host, stack_inputs, advice_inputs) = self.prepare_transaction(&tx_inputs).await?; - let processor = - FastProcessor::new_with_advice_inputs(stack_inputs.as_slice(), advice_inputs); + let processor = FastProcessor::new(stack_inputs).with_advice(advice_inputs); let output = processor .execute(&TransactionKernel::tx_script_main(), &mut host) .await @@ -272,24 +267,34 @@ where .await .map_err(TransactionExecutorError::FetchTransactionInputsFailed)?; - // Add the vault key for the fee asset to the list of asset vault keys which will need to be - // accessed at the end of the transaction. + let native_account_vault_root = account.vault().root(); let fee_asset_vault_key = - AssetVaultKey::from_account_id(block_header.fee_parameters().native_asset_id()) + AssetVaultKey::new_fungible(block_header.fee_parameters().native_asset_id()) .expect("fee asset should be a fungible asset"); + + let mut tx_inputs = TransactionInputs::new(account, block_header, blockchain, input_notes) + .map_err(TransactionExecutorError::InvalidTransactionInputs)? + .with_tx_args(tx_args); + + // Add the vault key for the fee asset to the list of asset vault keys which will need to be + // accessed at the end of the transaction. asset_vault_keys.insert(fee_asset_vault_key); - // Fetch the witnesses for all asset vault keys. - let asset_witnesses = self - .data_store - .get_vault_asset_witnesses(account_id, account.vault().root(), asset_vault_keys) - .await - .map_err(TransactionExecutorError::FetchAssetWitnessFailed)?; + // filter out any asset vault keys for which we already have witnesses in the advice inputs + asset_vault_keys.retain(|asset_key| { + !tx_inputs.has_vault_asset_witness(native_account_vault_root, asset_key) + }); - let tx_inputs = TransactionInputs::new(account, block_header, blockchain, input_notes) - .map_err(TransactionExecutorError::InvalidTransactionInputs)? - .with_tx_args(tx_args) - .with_asset_witnesses(asset_witnesses); + // if any of the witnesses are missing, fetch them from the data store and add to tx_inputs + if !asset_vault_keys.is_empty() { + let asset_witnesses = self + .data_store + .get_vault_asset_witnesses(account_id, native_account_vault_root, asset_vault_keys) + .await + .map_err(TransactionExecutorError::FetchAssetWitnessFailed)?; + + tx_inputs = tx_inputs.with_asset_witnesses(asset_witnesses); + } Ok(tx_inputs) } @@ -306,14 +311,6 @@ where TransactionExecutorError, > { let (stack_inputs, tx_advice_inputs) = TransactionKernel::prepare_inputs(tx_inputs); - - // This reverses the stack inputs (even though it doesn't look like it does) because the - // fast processor expects the reverse order. - // - // Once we use the FastProcessor for execution and proving, we can change the way these - // inputs are constructed in TransactionKernel::prepare_inputs. - let stack_inputs = StackInputs::new(stack_inputs.iter().copied().collect()).unwrap(); - let input_notes = tx_inputs.input_notes(); let script_mast_store = ScriptMastForestStore::new( @@ -327,25 +324,23 @@ where AccountProcedureIndexMap::new([tx_inputs.account().code()]); let initial_fee_asset_balance = { + let vault_root = tx_inputs.account().vault().root(); let native_asset_id = tx_inputs.block_header().fee_parameters().native_asset_id(); - let fee_asset_vault_key = AssetVaultKey::from_account_id(native_asset_id) + let fee_asset_vault_key = AssetVaultKey::new_fungible(native_asset_id) .expect("fee asset should be a fungible asset"); - let fee_asset_witness = tx_inputs - .asset_witnesses() - .iter() - .find_map(|witness| witness.find(fee_asset_vault_key)); - - match fee_asset_witness { + let fee_asset = tx_inputs + .read_vault_asset(vault_root, fee_asset_vault_key) + .map_err(TransactionExecutorError::FeeAssetRetrievalFailed)?; + match fee_asset { Some(Asset::Fungible(fee_asset)) => fee_asset.amount(), Some(Asset::NonFungible(_)) => { return Err(TransactionExecutorError::FeeAssetMustBeFungible); }, - // If the witness does not contain the asset, its balance is zero. + // If the asset was not found, its balance is zero. None => 0, } }; - let host = TransactionExecutorHost::new( tx_inputs.account(), input_notes.clone(), diff --git a/crates/miden-tx/src/executor/notes_checker.rs b/crates/miden-tx/src/executor/notes_checker.rs index 9741e79824..0cbddbe041 100644 --- a/crates/miden-tx/src/executor/notes_checker.rs +++ b/crates/miden-tx/src/executor/notes_checker.rs @@ -1,7 +1,8 @@ use alloc::collections::BTreeMap; use alloc::vec::Vec; -use miden_processor::fast::FastProcessor; +use miden_processor::FastProcessor; +use miden_processor::advice::AdviceInputs; use miden_protocol::account::AccountId; use miden_protocol::block::BlockNumber; use miden_protocol::note::Note; @@ -12,8 +13,7 @@ use miden_protocol::transaction::{ TransactionInputs, TransactionKernel, }; -use miden_prover::AdviceInputs; -use miden_standards::note::{NoteConsumptionStatus, WellKnownNote}; +use miden_standards::note::{NoteConsumptionStatus, StandardNote}; use super::TransactionExecutor; use crate::auth::TransactionAuthenticator; @@ -120,8 +120,10 @@ where if num_notes == 0 || num_notes > MAX_NUM_CHECKER_NOTES { return Err(NoteCheckerError::InputNoteCountOutOfRange(num_notes)); } - // Ensure well-known notes are ordered first. - notes.sort_unstable_by_key(|note| WellKnownNote::from_note(note).is_none()); + // Ensure standard notes are ordered first. + notes.sort_unstable_by_key(|note| { + StandardNote::from_script_root(note.script().root()).is_none() + }); let notes = InputNotes::from(notes); let tx_inputs = self @@ -152,10 +154,10 @@ where note: InputNote, tx_args: TransactionArgs, ) -> Result { - // return the consumption status if we manage to determine it from the well-known note - if let Some(well_known_note) = WellKnownNote::from_note(note.note()) + // Return the consumption status if we manage to determine it from the standard note + if let Some(standard_note) = StandardNote::from_script_root(note.note().script().root()) && let Some(consumption_status) = - well_known_note.is_consumable(note.note(), target_account_id, block_ref) + standard_note.is_consumable(note.note(), target_account_id, block_ref) { return Ok(consumption_status); } @@ -335,8 +337,7 @@ where .await .map_err(TransactionCheckerError::TransactionPreparation)?; - let processor = - FastProcessor::new_with_advice_inputs(stack_inputs.as_slice(), advice_inputs); + let processor = FastProcessor::new(stack_inputs).with_advice(advice_inputs); let result = processor .execute(&TransactionKernel::main(), &mut host) .await diff --git a/crates/miden-tx/src/host/account_delta_tracker.rs b/crates/miden-tx/src/host/account_delta_tracker.rs index f62e7996e8..889470b735 100644 --- a/crates/miden-tx/src/host/account_delta_tracker.rs +++ b/crates/miden-tx/src/host/account_delta_tracker.rs @@ -1,3 +1,4 @@ +use miden_protocol::Felt; use miden_protocol::account::{ AccountCode, AccountDelta, @@ -5,7 +6,6 @@ use miden_protocol::account::{ AccountVaultDelta, PartialAccount, }; -use miden_protocol::{Felt, FieldElement, ZERO}; use crate::host::storage_delta_tracker::StorageDeltaTracker; @@ -44,7 +44,7 @@ impl AccountDeltaTracker { storage: StorageDeltaTracker::new(account), vault: AccountVaultDelta::default(), code, - nonce_delta: ZERO, + nonce_delta: Felt::ZERO, } } diff --git a/crates/miden-tx/src/host/kernel_process.rs b/crates/miden-tx/src/host/kernel_process.rs index 5d948de35a..dc8ec218aa 100644 --- a/crates/miden-tx/src/host/kernel_process.rs +++ b/crates/miden-tx/src/host/kernel_process.rs @@ -1,9 +1,12 @@ -use miden_processor::{ExecutionError, Felt, ProcessState}; +use miden_processor::{ExecutionError, Felt, ProcessorState}; +use miden_protocol::Word; use miden_protocol::account::{AccountId, StorageSlotId, StorageSlotType}; -use miden_protocol::note::{NoteId, NoteInputs}; +use miden_protocol::note::{NoteId, NoteStorage}; use miden_protocol::transaction::memory::{ ACCOUNT_STACK_TOP_PTR, ACCT_CODE_COMMITMENT_OFFSET, + ACCT_ID_PREFIX_IDX, + ACCT_ID_SUFFIX_IDX, ACCT_STORAGE_SLOT_ID_PREFIX_OFFSET, ACCT_STORAGE_SLOT_ID_SUFFIX_OFFSET, ACCT_STORAGE_SLOT_TYPE_OFFSET, @@ -12,7 +15,6 @@ use miden_protocol::transaction::memory::{ NATIVE_NUM_ACCT_STORAGE_SLOTS_PTR, NUM_OUTPUT_NOTES_PTR, }; -use miden_protocol::{Hasher, Word}; use crate::errors::TransactionKernelError; @@ -47,12 +49,12 @@ pub(super) trait TransactionKernelProcess { fn read_note_recipient_info_from_adv_map( &self, recipient_digest: Word, - ) -> Result<(NoteInputs, Word, Word), TransactionKernelError>; + ) -> Result<(NoteStorage, Word, Word), TransactionKernelError>; - fn read_note_inputs_from_adv_map( + fn read_note_storage_from_adv_map( &self, - inputs_commitment: &Word, - ) -> Result; + storage_commitment: &Word, + ) -> Result; fn has_advice_map_entry(&self, key: Word) -> bool; @@ -65,20 +67,21 @@ pub(super) trait TransactionKernelProcess { ) -> Result; } -impl<'a> TransactionKernelProcess for ProcessState<'a> { +impl<'a> TransactionKernelProcess for ProcessorState<'a> { fn get_active_account_ptr(&self) -> Result { let account_stack_top_ptr = self.get_mem_value(self.ctx(), ACCOUNT_STACK_TOP_PTR).ok_or_else(|| { TransactionKernelError::other("account stack top ptr should be initialized") })?; - let account_stack_top_ptr = u32::try_from(account_stack_top_ptr).map_err(|_| { - TransactionKernelError::other("account stack top ptr should fit into a u32") - })?; + let account_stack_top_ptr = u32::try_from(account_stack_top_ptr.as_canonical_u64()) + .map_err(|_| { + TransactionKernelError::other("account stack top ptr should fit into a u32") + })?; let active_account_ptr = self .get_mem_value(self.ctx(), account_stack_top_ptr) .ok_or_else(|| TransactionKernelError::other("account id should be initialized"))?; - u32::try_from(active_account_ptr) + u32::try_from(active_account_ptr.as_canonical_u64()) .map_err(|_| TransactionKernelError::other("active account ptr should fit into a u32")) } @@ -93,12 +96,15 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { TransactionKernelError::other("active account id should be initialized") })?; - AccountId::try_from([active_account_id_and_nonce[1], active_account_id_and_nonce[0]]) - .map_err(|_| { - TransactionKernelError::other( - "active account id ptr should point to a valid account ID", - ) - }) + AccountId::try_from_elements( + active_account_id_and_nonce[ACCT_ID_SUFFIX_IDX], + active_account_id_and_nonce[ACCT_ID_PREFIX_IDX], + ) + .map_err(|_| { + TransactionKernelError::other( + "active account id ptr should point to a valid account ID", + ) + }) } fn get_active_account_code_commitment(&self) -> Result { @@ -130,14 +136,14 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { NATIVE_NUM_ACCT_STORAGE_SLOTS_PTR, ))?; - Ok(num_storage_slots_felt.as_int()) + Ok(num_storage_slots_felt.as_canonical_u64()) } fn get_num_output_notes(&self) -> u64 { // Read the number from memory or default to 0 if the location hasn't been accessed // previously (e.g. when no notes have been created yet). self.get_mem_value(self.ctx(), NUM_OUTPUT_NOTES_PTR) - .map(|num_output_notes| num_output_notes.as_int()) + .map(|num_output_notes| num_output_notes.as_canonical_u64()) .unwrap_or(0) } @@ -155,7 +161,7 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { None => return Ok(None), }; // convert note address into u32 - let note_address = u32::try_from(note_address_felt).map_err(|_| { + let note_address = u32::try_from(note_address_felt.as_canonical_u64()).map_err(|_| { TransactionKernelError::other(format!( "failed to convert {note_address_felt} into a memory address (u32)" )) @@ -169,7 +175,7 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { .map_err(|err| { TransactionKernelError::other_with_source( "failed to read note address", - ExecutionError::MemoryError(err), + ExecutionError::MemoryErrorNoCtx(err), ) })? .map(NoteId::from_raw)) @@ -178,7 +184,7 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { /// Returns the vault root at the provided pointer. fn get_vault_root(&self, vault_root_ptr: Felt) -> Result { - let vault_root_ptr = u32::try_from(vault_root_ptr).map_err(|_err| { + let vault_root_ptr = u32::try_from(vault_root_ptr.as_canonical_u64()).map_err(|_err| { TransactionKernelError::other(format!( "vault root ptr should fit into a u32, but was {vault_root_ptr}" )) @@ -200,7 +206,7 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { &self, slot_ptr: Felt, ) -> Result<(StorageSlotId, StorageSlotType, Word), TransactionKernelError> { - let slot_ptr = u32::try_from(slot_ptr).map_err(|_err| { + let slot_ptr = u32::try_from(slot_ptr.as_canonical_u64()).map_err(|_err| { TransactionKernelError::other(format!( "slot ptr should fit into a u32, but was {slot_ptr}" )) @@ -234,7 +240,7 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { })?; let slot_type = slot_metadata[ACCT_STORAGE_SLOT_TYPE_OFFSET as usize]; - let slot_type = u8::try_from(slot_type).map_err(|err| { + let slot_type = u8::try_from(slot_type.as_canonical_u64()).map_err(|err| { TransactionKernelError::other(format!("failed to convert {slot_type} into u8: {err}")) })?; let slot_type = StorageSlotType::try_from(slot_type).map_err(|err| { @@ -254,52 +260,36 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { fn read_note_recipient_info_from_adv_map( &self, recipient_digest: Word, - ) -> Result<(NoteInputs, Word, Word), TransactionKernelError> { - let (sn_script_hash, inputs_commitment) = + ) -> Result<(NoteStorage, Word, Word), TransactionKernelError> { + let (sn_script_hash, storage_commitment) = read_double_word_from_adv_map(self, recipient_digest)?; let (sn_hash, script_root) = read_double_word_from_adv_map(self, sn_script_hash)?; let (serial_num, _) = read_double_word_from_adv_map(self, sn_hash)?; - let inputs = self.read_note_inputs_from_adv_map(&inputs_commitment)?; + let inputs = self.read_note_storage_from_adv_map(&storage_commitment)?; Ok((inputs, script_root, serial_num)) } - /// Extracts and validates note inputs from the advice provider. - fn read_note_inputs_from_adv_map( + /// Extracts and validates note storage from the advice provider. + fn read_note_storage_from_adv_map( &self, - inputs_commitment: &Word, - ) -> Result { - let inputs_data = self.advice_provider().get_mapped_values(inputs_commitment); + storage_commitment: &Word, + ) -> Result { + let inputs_data = self.advice_provider().get_mapped_values(storage_commitment); match inputs_data { - None => Ok(NoteInputs::default()), - Some(inputs) => { - let inputs_commitment_hash = Hasher::hash_elements(inputs_commitment.as_elements()); - let num_inputs = self - .advice_provider() - .get_mapped_values(&inputs_commitment_hash) - .ok_or_else(|| { - TransactionKernelError::other( - "expected num_inputs to be present in advice provider", - ) - })?; - if num_inputs.len() != 1 { - return Err(TransactionKernelError::other( - "expected num_inputs advice entry to contain exactly one element", - )); - } - let num_inputs = num_inputs[0].as_int() as usize; - - let note_inputs = NoteInputs::new(inputs[0..num_inputs].to_vec()) - .map_err(TransactionKernelError::MalformedNoteInputs)?; + None => Ok(NoteStorage::default()), + Some(storage_items) => { + let note_storage = NoteStorage::new(storage_items.to_vec()) + .map_err(TransactionKernelError::MalformedNoteStorage)?; - if ¬e_inputs.commitment() == inputs_commitment { - Ok(note_inputs) + if ¬e_storage.commitment() == storage_commitment { + Ok(note_storage) } else { - Err(TransactionKernelError::InvalidNoteInputs { - expected: *inputs_commitment, - actual: note_inputs.commitment(), + Err(TransactionKernelError::InvalidNoteStorage { + expected: *storage_commitment, + actual: note_storage.commitment(), }) } }, @@ -335,7 +325,7 @@ impl<'a> TransactionKernelProcess for ProcessState<'a> { /// Returns an error if the key is not present in the advice map or if the data is malformed /// (not exactly 8 elements). fn read_double_word_from_adv_map( - process: &ProcessState, + process: &ProcessorState, key: Word, ) -> Result<(Word, Word), TransactionKernelError> { let data = process diff --git a/crates/miden-tx/src/host/link_map.rs b/crates/miden-tx/src/host/link_map.rs index 5024c86dfd..41353a30d1 100644 --- a/crates/miden-tx/src/host/link_map.rs +++ b/crates/miden-tx/src/host/link_map.rs @@ -1,8 +1,8 @@ use alloc::vec::Vec; use core::cmp::Ordering; -use miden_processor::fast::ExecutionOutput; -use miden_processor::{AdviceMutation, ContextId, ProcessState}; +use miden_processor::advice::AdviceMutation; +use miden_processor::{ContextId, ExecutionOutput, ProcessorState}; use miden_protocol::{Felt, LexicographicWord, Word, ZERO}; // LINK MAP @@ -30,7 +30,8 @@ impl<'process> LinkMap<'process> { /// Creates a new link map from the provided map_ptr in the provided process. pub fn new(map_ptr: Felt, mem: &'process MemoryViewer<'process>) -> Self { - let map_ptr: u32 = map_ptr.try_into().expect("map_ptr must be a valid u32"); + let map_ptr: u32 = + u32::try_from(map_ptr.as_canonical_u64()).expect("map_ptr must be a valid u32"); Self { map_ptr, mem } } @@ -41,32 +42,32 @@ impl<'process> LinkMap<'process> { /// Handles a `LINK_MAP_SET_EVENT` emitted from a VM. /// /// Expected operand stack state before: [map_ptr, KEY, NEW_VALUE] - /// Advice stack state after: [set_operation, entry_ptr] - pub fn handle_set_event(process: &ProcessState<'_>) -> Vec { + /// Advice stack state after: [entry_ptr, set_operation] + pub fn handle_set_event(process: &ProcessorState<'_>) -> Vec { let map_ptr = process.get_stack_item(1); - let map_key = process.get_stack_word_be(2); + let map_key = process.get_stack_word(2); let mem_viewer = MemoryViewer::ProcessState(process); let link_map = LinkMap::new(map_ptr, &mem_viewer); let (set_op, entry_ptr) = link_map.compute_set_operation(LexicographicWord::from(map_key)); - vec![AdviceMutation::extend_stack([Felt::from(set_op as u8), Felt::from(entry_ptr)])] + vec![AdviceMutation::extend_stack([Felt::from(entry_ptr), Felt::from(set_op as u8)])] } /// Handles a `LINK_MAP_GET_EVENT` emitted from a VM. /// /// Expected operand stack state before: [map_ptr, KEY] - /// Advice stack state after: [get_operation, entry_ptr] - pub fn handle_get_event(process: &ProcessState<'_>) -> Vec { + /// Advice stack state after: [entry_ptr, get_operation] + pub fn handle_get_event(process: &ProcessorState<'_>) -> Vec { let map_ptr = process.get_stack_item(1); - let map_key = process.get_stack_word_be(2); + let map_key = process.get_stack_word(2); let mem_viewer = MemoryViewer::ProcessState(process); let link_map = LinkMap::new(map_ptr, &mem_viewer); let (get_op, entry_ptr) = link_map.compute_get_operation(LexicographicWord::from(map_key)); - vec![AdviceMutation::extend_stack([Felt::from(get_op as u8), Felt::from(entry_ptr)])] + vec![AdviceMutation::extend_stack([Felt::from(entry_ptr), Felt::from(get_op as u8)])] } /// Returns `true` if the map is empty, `false` otherwise. @@ -94,7 +95,10 @@ impl<'process> LinkMap<'process> { if head_ptr == ZERO { None } else { - Some(u32::try_from(head_ptr).expect("head ptr should be a valid ptr")) + Some( + u32::try_from(head_ptr.as_canonical_u64()) + .expect("head ptr should be a valid ptr"), + ) } }) } @@ -142,16 +146,15 @@ impl<'process> LinkMap<'process> { self.mem.get_kernel_mem_word(entry_ptr).expect("entry pointer should be valid"); let map_ptr = entry_metadata[0]; - let map_ptr = map_ptr.try_into().expect("entry_ptr should point to a u32 map_ptr"); + let map_ptr = u32::try_from(map_ptr.as_canonical_u64()) + .expect("entry_ptr should point to a u32 map_ptr"); let prev_entry_ptr = entry_metadata[1]; - let prev_entry_ptr = prev_entry_ptr - .try_into() + let prev_entry_ptr = u32::try_from(prev_entry_ptr.as_canonical_u64()) .expect("entry_ptr should point to a u32 prev_entry_ptr"); let next_entry_ptr = entry_metadata[2]; - let next_entry_ptr = next_entry_ptr - .try_into() + let next_entry_ptr = u32::try_from(next_entry_ptr.as_canonical_u64()) .expect("entry_ptr should point to a u32 next_entry_ptr"); EntryMetadata { map_ptr, prev_entry_ptr, next_entry_ptr } @@ -293,14 +296,14 @@ enum SetOperation { /// A abstraction over ways to view a process' memory. /// -/// More specifically, it allows using a [`LinkMap`] both with a [`ProcessState`], i.e. a process +/// More specifically, it allows using a [`LinkMap`] both with a [`ProcessorState`], i.e. a process /// that is actively executing and also an [`ExecutionOutput`], i.e. a process that has finished /// execution. /// /// This should all go away again once we change a LinkMap's implementation to be based on an actual /// map type instead of viewing a process' memory directly. pub enum MemoryViewer<'mem> { - ProcessState(&'mem ProcessState<'mem>), + ProcessState(&'mem ProcessorState<'mem>), ExecutionOutputs(&'mem ExecutionOutput), } @@ -333,14 +336,13 @@ impl<'mem> MemoryViewer<'mem> { MemoryViewer::ExecutionOutputs(execution_output) => { let tx_kernel_context = ContextId::root(); let clk = 0u32; - let err_ctx = (); // Note that this never returns None even if the location is uninitialized, but the // link map does not rely on this. Some( execution_output .memory - .read_word(tx_kernel_context, Felt::from(addr), clk.into(), &err_ctx) + .read_word(tx_kernel_context, Felt::from(addr), clk.into()) .expect("expected address to be word-aligned"), ) }, diff --git a/crates/miden-tx/src/host/mod.rs b/crates/miden-tx/src/host/mod.rs index 019a3a0291..161636bd7c 100644 --- a/crates/miden-tx/src/host/mod.rs +++ b/crates/miden-tx/src/host/mod.rs @@ -28,15 +28,11 @@ use alloc::collections::BTreeMap; use alloc::sync::Arc; use alloc::vec::Vec; -use miden_processor::{ - AdviceMutation, - EventError, - EventHandlerRegistry, - Felt, - MastForest, - MastForestStore, - ProcessState, -}; +use miden_processor::advice::AdviceMutation; +use miden_processor::event::{EventError, EventHandlerRegistry}; +use miden_processor::mast::MastForest; +use miden_processor::trace::RowIndex; +use miden_processor::{Felt, MastForestStore, ProcessorState}; use miden_protocol::Word; use miden_protocol::account::{ AccountCode, @@ -45,6 +41,7 @@ use miden_protocol::account::{ AccountId, AccountStorageHeader, PartialAccount, + StorageMapKey, StorageSlotHeader, StorageSlotId, StorageSlotName, @@ -54,12 +51,11 @@ use miden_protocol::note::{NoteAttachment, NoteId, NoteMetadata, NoteRecipient}; use miden_protocol::transaction::{ InputNote, InputNotes, - OutputNote, - OutputNotes, + RawOutputNote, + RawOutputNotes, TransactionMeasurements, TransactionSummary, }; -use miden_protocol::vm::RowIndex; pub(crate) use tx_event::{RecipientData, TransactionEvent, TransactionProgressEvent}; pub use tx_progress::TransactionProgress; @@ -192,12 +188,12 @@ impl<'store, STORE> TransactionBaseHost<'store, STORE> { /// Clones the inner [`OutputNoteBuilder`]s and returns the vector of created output notes that /// are tracked by this host. - pub fn build_output_notes(&self) -> Vec { + pub fn build_output_notes(&self) -> Vec { self.output_notes.values().cloned().map(|builder| builder.build()).collect() } /// Consumes `self` and returns the account delta, input and output notes. - pub fn into_parts(self) -> (AccountDelta, InputNotes, Vec) { + pub fn into_parts(self) -> (AccountDelta, InputNotes, Vec) { let output_notes = self.output_notes.into_values().map(|builder| builder.build()).collect(); (self.account_delta.into_delta(), self.input_notes, output_notes) @@ -269,7 +265,7 @@ impl<'store, STORE> TransactionBaseHost<'store, STORE> { /// Returns `Some` if the event was handled, `None` otherwise. pub fn handle_core_lib_events( &self, - process: &ProcessState, + process: &ProcessorState, ) -> Result>, EventError> { let event_id = EventId::from_felt(process.get_stack_item(0)); if let Some(mutations) = self.core_lib_handlers.handle_event(event_id, process)? { @@ -358,7 +354,7 @@ impl<'store, STORE> TransactionBaseHost<'store, STORE> { pub fn on_account_storage_after_set_map_item( &mut self, slot_name: StorageSlotName, - key: Word, + key: StorageMapKey, old_map_value: Word, new_map_value: Word, ) -> Result, TransactionKernelError> { @@ -405,15 +401,15 @@ impl<'store, STORE> TransactionBaseHost<'store, STORE> { /// provided commitments. pub(crate) fn build_tx_summary( &self, - salt: Word, - output_notes_commitment: Word, - input_notes_commitment: Word, account_delta_commitment: Word, + input_notes_commitment: Word, + output_notes_commitment: Word, + salt: Word, ) -> Result { let account_delta = self.build_account_delta(); let input_notes = self.input_notes(); let output_notes_vec = self.build_output_notes(); - let output_notes = OutputNotes::new(output_notes_vec).map_err(|err| { + let output_notes = RawOutputNotes::new(output_notes_vec).map_err(|err| { TransactionKernelError::TransactionSummaryConstructionFailed(Box::new(err)) })?; diff --git a/crates/miden-tx/src/host/note_builder.rs b/crates/miden-tx/src/host/note_builder.rs index eac4f8a006..d392c16b51 100644 --- a/crates/miden-tx/src/host/note_builder.rs +++ b/crates/miden-tx/src/host/note_builder.rs @@ -1,4 +1,7 @@ +use alloc::vec::Vec; + use miden_protocol::asset::Asset; +use miden_protocol::errors::NoteError; use miden_protocol::note::{ Note, NoteAssets, @@ -8,17 +11,21 @@ use miden_protocol::note::{ PartialNote, }; -use super::{OutputNote, Word}; +use super::{RawOutputNote, Word}; use crate::errors::TransactionKernelError; // OUTPUT NOTE BUILDER // ================================================================================================ /// Builder of an output note, provided primarily to enable adding assets to a note incrementally. +/// +/// Assets are accumulated in a `Vec` and the final `NoteAssets` is only constructed when +/// [`build`](Self::build) is called. This avoids recomputing the commitment hash on every asset +/// addition. #[derive(Debug, Clone)] pub struct OutputNoteBuilder { metadata: NoteMetadata, - assets: NoteAssets, + assets: Vec, recipient_digest: Word, recipient: Option, } @@ -50,7 +57,7 @@ impl OutputNoteBuilder { metadata, recipient_digest, recipient: None, - assets: NoteAssets::default(), + assets: Vec::new(), }) } @@ -60,7 +67,7 @@ impl OutputNoteBuilder { metadata, recipient_digest: recipient.digest(), recipient: Some(recipient), - assets: NoteAssets::default(), + assets: Vec::new(), } } @@ -78,9 +85,34 @@ impl OutputNoteBuilder { /// - Adding the asset to the note will push the list beyond the [NoteAssets::MAX_NUM_ASSETS] /// limit. pub fn add_asset(&mut self, asset: Asset) -> Result<(), TransactionKernelError> { - self.assets - .add_asset(asset) - .map_err(TransactionKernelError::FailedToAddAssetToNote)?; + // Check if an asset issued by the same faucet already exists in the list of assets. + if let Some(own_asset) = self.assets.iter_mut().find(|a| a.is_same(&asset)) { + match own_asset { + Asset::Fungible(f_own_asset) => { + // If a fungible asset issued by the same faucet is found, try to add the + // provided asset to it. + let new_asset = f_own_asset + .add(asset.unwrap_fungible()) + .map_err(NoteError::AddFungibleAssetBalanceError) + .map_err(TransactionKernelError::FailedToAddAssetToNote)?; + *own_asset = Asset::Fungible(new_asset); + }, + Asset::NonFungible(nf_asset) => { + return Err(TransactionKernelError::FailedToAddAssetToNote( + NoteError::DuplicateNonFungibleAsset(*nf_asset), + )); + }, + } + } else { + // If the asset is not in the list, add it to the list. + self.assets.push(asset); + if self.assets.len() > NoteAssets::MAX_NUM_ASSETS { + return Err(TransactionKernelError::FailedToAddAssetToNote( + NoteError::TooManyAssets(self.assets.len()), + )); + } + } + Ok(()) } @@ -91,17 +123,20 @@ impl OutputNoteBuilder { /// Converts this builder to an [OutputNote]. /// - /// Depending on the available information, this may result in [OutputNote::Full] or - /// [OutputNote::Partial] notes. - pub fn build(self) -> OutputNote { + /// Depending on the available information, this may result in [`OutputNote::Full`] or + /// [`OutputNote::Partial`] notes. + pub fn build(self) -> RawOutputNote { + let assets = NoteAssets::new(self.assets) + .expect("assets should be valid since add_asset validates them"); + match self.recipient { Some(recipient) => { - let note = Note::new(self.assets, self.metadata, recipient); - OutputNote::Full(note) + let note = Note::new(assets, self.metadata, recipient); + RawOutputNote::Full(note) }, None => { - let note = PartialNote::new(self.metadata, self.recipient_digest, self.assets); - OutputNote::Partial(note) + let note = PartialNote::new(self.metadata, self.recipient_digest, assets); + RawOutputNote::Partial(note) }, } } diff --git a/crates/miden-tx/src/host/storage_delta_tracker.rs b/crates/miden-tx/src/host/storage_delta_tracker.rs index 6270612130..86400615c7 100644 --- a/crates/miden-tx/src/host/storage_delta_tracker.rs +++ b/crates/miden-tx/src/host/storage_delta_tracker.rs @@ -6,6 +6,7 @@ use miden_protocol::account::{ AccountStorageDelta, AccountStorageHeader, PartialAccount, + StorageMapKey, StorageSlotDelta, StorageSlotHeader, StorageSlotName, @@ -32,7 +33,7 @@ pub struct StorageDeltaTracker { storage_header: AccountStorageHeader, /// A map from slot name to a map of key-value pairs where the key is a storage map key and /// the value represents the value of that key at the beginning of transaction execution. - init_maps: BTreeMap>, + init_maps: BTreeMap>, /// The account storage delta. delta: AccountStorageDelta, } @@ -111,7 +112,7 @@ impl StorageDeltaTracker { pub fn set_map_item( &mut self, slot_name: StorageSlotName, - key: Word, + key: StorageMapKey, prev_value: Word, new_value: Word, ) { @@ -134,7 +135,12 @@ impl StorageDeltaTracker { /// Sets the initial value of the given key in the given slot to the given value, if no value is /// already tracked for that key. - fn set_init_map_item(&mut self, slot_name: StorageSlotName, key: Word, prev_value: Word) { + fn set_init_map_item( + &mut self, + slot_name: StorageSlotName, + key: StorageMapKey, + prev_value: Word, + ) { let slot_map = self.init_maps.entry(slot_name).or_default(); slot_map.entry(key).or_insert(prev_value); } diff --git a/crates/miden-tx/src/host/tx_event.rs b/crates/miden-tx/src/host/tx_event.rs index 760a6aff4e..b84a6491df 100644 --- a/crates/miden-tx/src/host/tx_event.rs +++ b/crates/miden-tx/src/host/tx_event.rs @@ -1,7 +1,15 @@ use alloc::vec::Vec; -use miden_processor::{AdviceMutation, AdviceProvider, ProcessState, RowIndex}; -use miden_protocol::account::{AccountId, StorageMap, StorageSlotName, StorageSlotType}; +use miden_processor::ProcessorState; +use miden_processor::advice::{AdviceMutation, AdviceProvider}; +use miden_processor::trace::RowIndex; +use miden_protocol::account::{ + AccountId, + StorageMap, + StorageMapKey, + StorageSlotName, + StorageSlotType, +}; use miden_protocol::asset::{Asset, AssetVault, AssetVaultKey, FungibleAsset}; use miden_protocol::note::{ NoteAttachment, @@ -10,10 +18,10 @@ use miden_protocol::note::{ NoteAttachmentKind, NoteAttachmentScheme, NoteId, - NoteInputs, NoteMetadata, NoteRecipient, NoteScript, + NoteStorage, NoteTag, NoteType, }; @@ -77,7 +85,7 @@ pub(crate) enum TransactionEvent { AccountStorageAfterSetMapItem { slot_name: StorageSlotName, - key: Word, + key: StorageMapKey, old_value: Word, new_value: Word, }, @@ -89,7 +97,7 @@ pub(crate) enum TransactionEvent { /// The root of the storage map for which a witness is requested. map_root: Word, /// The raw map key for which a witness is requested. - map_key: Word, + map_key: StorageMapKey, }, /// The data necessary to request an asset witness from the data store. @@ -167,7 +175,7 @@ impl TransactionEvent { /// handled, `None` otherwise. pub fn extract<'store, STORE>( base_host: &TransactionBaseHost<'store, STORE>, - process: &ProcessState, + process: &ProcessorState, ) -> Result, TransactionKernelError> { let event_id = EventId::from_felt(process.get_stack_item(0)); let tx_event_id = TransactionEventId::try_from(event_id).map_err(|err| { @@ -179,9 +187,10 @@ impl TransactionEvent { let tx_event = match tx_event_id { TransactionEventId::AccountBeforeForeignLoad => { - // Expected stack state: [event, account_id_prefix, account_id_suffix] - let account_id_word = process.get_stack_word_be(1); - let account_id = AccountId::try_from([account_id_word[3], account_id_word[2]]) + // Expected stack state: [event, account_id_suffix, account_id_prefix] + let account_id_suffix = process.get_stack_item(1); + let account_id_prefix = process.get_stack_item(2); + let account_id = AccountId::try_from_elements(account_id_suffix, account_id_prefix) .map_err(|err| { TransactionKernelError::other_with_source( "failed to convert account ID word into account ID", @@ -193,83 +202,71 @@ impl TransactionEvent { }, TransactionEventId::AccountVaultBeforeAddAsset | TransactionEventId::AccountVaultBeforeRemoveAsset => { - // Expected stack state: [event, ASSET, account_vault_root_ptr] - let asset_word = process.get_stack_word_be(1); - let asset = Asset::try_from(asset_word).map_err(|source| { - TransactionKernelError::MalformedAssetInEventHandler { - handler: "on_account_vault_before_add_or_remove_asset", - source, - } - })?; - - let vault_root_ptr = process.get_stack_item(5); + // Expected stack state: [event, ASSET_KEY, ASSET_VALUE, account_vault_root_ptr] + let asset_vault_key = process.get_stack_word(1); + let vault_root_ptr = process.get_stack_item(9); + + let asset_vault_key = + AssetVaultKey::try_from(asset_vault_key).map_err(|source| { + TransactionKernelError::MalformedAssetInEventHandler { + handler: "AccountVaultBefore{Add,Remove}Asset", + source, + } + })?; let current_vault_root = process.get_vault_root(vault_root_ptr)?; on_account_vault_asset_accessed( base_host, process, - asset.vault_key(), + asset_vault_key, current_vault_root, )? }, TransactionEventId::AccountVaultAfterRemoveAsset => { - // Expected stack state: [event, ASSET] - let asset: Asset = process.get_stack_word_be(1).try_into().map_err(|source| { - TransactionKernelError::MalformedAssetInEventHandler { - handler: "on_account_vault_after_remove_asset", - source, - } - })?; + // Expected stack state: [event, ASSET_KEY, ASSET_VALUE] + let asset_key = process.get_stack_word(1); + let asset_value = process.get_stack_word(5); + + let asset = + Asset::from_key_value_words(asset_key, asset_value).map_err(|source| { + TransactionKernelError::MalformedAssetInEventHandler { + handler: "AccountVaultAfterRemoveAsset", + source, + } + })?; Some(TransactionEvent::AccountVaultAfterRemoveAsset { asset }) }, TransactionEventId::AccountVaultAfterAddAsset => { - // Expected stack state: [event, ASSET] - let asset: Asset = process.get_stack_word_be(1).try_into().map_err(|source| { - TransactionKernelError::MalformedAssetInEventHandler { - handler: "on_account_vault_after_add_asset", - source, - } - })?; + // Expected stack state: [event, ASSET_KEY, ASSET_VALUE] + let asset_key = process.get_stack_word(1); + let asset_value = process.get_stack_word(5); + + let asset = + Asset::from_key_value_words(asset_key, asset_value).map_err(|source| { + TransactionKernelError::MalformedAssetInEventHandler { + handler: "AccountVaultAfterAddAsset", + source, + } + })?; Some(TransactionEvent::AccountVaultAfterAddAsset { asset }) }, - TransactionEventId::AccountVaultBeforeGetBalance => { + TransactionEventId::AccountVaultBeforeGetAsset => { // Expected stack state: - // [event, faucet_id_prefix, faucet_id_suffix, vault_root_ptr] - let stack_top = process.get_stack_word_be(1); - let faucet_id = - AccountId::try_from([stack_top[3], stack_top[2]]).map_err(|err| { - TransactionKernelError::other_with_source( - "failed to convert faucet ID word into faucet ID", - err, - ) - })?; - let vault_root_ptr = stack_top[1]; - let vault_root = process.get_vault_root(vault_root_ptr)?; - - let vault_key = AssetVaultKey::from_account_id(faucet_id).ok_or_else(|| { - TransactionKernelError::other(format!( - "provided faucet ID {faucet_id} is not valid for fungible assets" - )) - })?; + // [event, ASSET_KEY, vault_root_ptr] + let asset_key = process.get_stack_word(1); + let vault_root_ptr = process.get_stack_item(5); - on_account_vault_asset_accessed(base_host, process, vault_key, vault_root)? - }, - TransactionEventId::AccountVaultBeforeHasNonFungibleAsset => { - // Expected stack state: [event, ASSET, vault_root_ptr] - let asset_word = process.get_stack_word_be(1); - let asset = Asset::try_from(asset_word).map_err(|err| { - TransactionKernelError::other_with_source( - "provided asset is not a valid asset", - err, - ) + let asset_key = AssetVaultKey::try_from(asset_key).map_err(|source| { + TransactionKernelError::MalformedAssetInEventHandler { + handler: "AccountVaultBeforeGetAsset", + source, + } })?; - - let vault_root_ptr = process.get_stack_item(5); let vault_root = process.get_vault_root(vault_root_ptr)?; - on_account_vault_asset_accessed(base_host, process, asset.vault_key(), vault_root)? + on_account_vault_asset_accessed(base_host, process, asset_key, vault_root)? }, TransactionEventId::AccountStorageBeforeSetItem => None, @@ -277,7 +274,7 @@ impl TransactionEvent { TransactionEventId::AccountStorageAfterSetItem => { // Expected stack state: [event, slot_ptr, VALUE] let slot_ptr = process.get_stack_item(1); - let new_value = process.get_stack_word_be(2); + let new_value = process.get_stack_word(2); let (slot_id, slot_type, _old_value) = process.get_storage_slot(slot_ptr)?; @@ -296,7 +293,8 @@ impl TransactionEvent { TransactionEventId::AccountStorageBeforeGetMapItem => { // Expected stack state: [event, slot_ptr, KEY] let slot_ptr = process.get_stack_item(1); - let map_key = process.get_stack_word_be(2); + let map_key = process.get_stack_word(2); + let map_key = StorageMapKey::from_raw(map_key); on_account_storage_map_item_accessed(base_host, process, slot_ptr, map_key)? }, @@ -304,7 +302,8 @@ impl TransactionEvent { TransactionEventId::AccountStorageBeforeSetMapItem => { // Expected stack state: [event, slot_ptr, KEY] let slot_ptr = process.get_stack_item(1); - let map_key = process.get_stack_word_be(2); + let map_key = process.get_stack_word(2); + let map_key = StorageMapKey::from_raw(map_key); on_account_storage_map_item_accessed(base_host, process, slot_ptr, map_key)? }, @@ -312,10 +311,11 @@ impl TransactionEvent { TransactionEventId::AccountStorageAfterSetMapItem => { // Expected stack state: [event, slot_ptr, KEY, OLD_VALUE, NEW_VALUE] let slot_ptr = process.get_stack_item(1); - let key = process.get_stack_word_be(2); - let old_value = process.get_stack_word_be(6); - let new_value = process.get_stack_word_be(10); + let key = process.get_stack_word(2); + let old_value = process.get_stack_word(6); + let new_value = process.get_stack_word(10); + let key = StorageMapKey::from_raw(key); // Resolve slot ID to slot name. let (slot_id, ..) = process.get_storage_slot(slot_ptr)?; let slot_header = base_host.initial_account_storage_slot(slot_id)?; @@ -337,7 +337,7 @@ impl TransactionEvent { TransactionEventId::AccountPushProcedureIndex => { // Expected stack state: [event, PROC_ROOT] - let procedure_root = process.get_stack_word_be(1); + let procedure_root = process.get_stack_word(1); let code_commitment = process.get_active_account_code_commitment()?; Some(TransactionEvent::AccountPushProcedureIndex { @@ -350,7 +350,7 @@ impl TransactionEvent { // Expected stack state: [event, tag, note_type, RECIPIENT] let tag = process.get_stack_item(1); let note_type = process.get_stack_item(2); - let recipient_digest = process.get_stack_word_be(3); + let recipient_digest = process.get_stack_word(3); let sender = base_host.native_account_id(); let metadata = build_note_metadata(sender, note_type, tag)?; @@ -359,7 +359,7 @@ impl TransactionEvent { // try to read the full recipient from the advice provider let recipient_data = if process.has_advice_map_entry(recipient_digest) { - let (note_inputs, script_root, serial_num) = + let (note_storage, script_root, serial_num) = process.read_note_recipient_info_from_adv_map(recipient_digest)?; let note_script = process @@ -378,7 +378,7 @@ impl TransactionEvent { match note_script { Some(note_script) => { let recipient = - NoteRecipient::new(serial_num, note_script, note_inputs); + NoteRecipient::new(serial_num, note_script, note_storage); if recipient.digest() != recipient_digest { return Err(TransactionKernelError::other(format!( @@ -393,7 +393,7 @@ impl TransactionEvent { recipient_digest, serial_num, script_root, - note_inputs, + note_storage, }, } } else { @@ -406,16 +406,19 @@ impl TransactionEvent { TransactionEventId::NoteAfterCreated => None, TransactionEventId::NoteBeforeAddAsset => { - // Expected stack state: [event, ASSET, note_ptr, num_of_assets, note_idx] - let note_idx = process.get_stack_item(7).as_int() as usize; - - let asset_word = process.get_stack_word_be(1); - let asset = Asset::try_from(asset_word).map_err(|source| { - TransactionKernelError::MalformedAssetInEventHandler { - handler: "on_note_before_add_asset", - source, - } - })?; + // Expected stack state: [event, ASSET_KEY, ASSET_VALUE, note_idx] + let asset_key = process.get_stack_word(1); + let asset_value = process.get_stack_word(5); + let note_idx = process.get_stack_item(9); + + let asset = + Asset::from_key_value_words(asset_key, asset_value).map_err(|source| { + TransactionKernelError::MalformedAssetInEventHandler { + handler: "NoteBeforeAddAsset", + source, + } + })?; + let note_idx = note_idx.as_canonical_u64() as usize; Some(TransactionEvent::NoteBeforeAddAsset { note_idx, asset }) }, @@ -431,7 +434,7 @@ impl TransactionEvent { let attachment_scheme = process.get_stack_item(1); let attachment_kind = process.get_stack_item(2); let note_ptr = process.get_stack_item(3); - let attachment = process.get_stack_word_be(5); + let attachment = process.get_stack_word(5); let (note_idx, attachment) = extract_note_attachment( attachment_scheme, @@ -446,8 +449,8 @@ impl TransactionEvent { TransactionEventId::AuthRequest => { // Expected stack state: [event, MESSAGE, PUB_KEY] - let message = process.get_stack_word_be(1); - let pub_key_hash = process.get_stack_word_be(5); + let message = process.get_stack_word(1); + let pub_key_hash = process.get_stack_word(5); let signature_key = Hasher::merge(&[pub_key_hash, message]); let signature = process @@ -462,16 +465,18 @@ impl TransactionEvent { TransactionEventId::Unauthorized => { // Expected stack state: [event, MESSAGE] - let message = process.get_stack_word_be(1); + let message = process.get_stack_word(1); let tx_summary = extract_tx_summary(base_host, process, message)?; Some(TransactionEvent::Unauthorized { tx_summary }) }, TransactionEventId::EpilogueBeforeTxFeeRemovedFromAccount => { - // Expected stack state: [event, FEE_ASSET] - let fee_asset = process.get_stack_word_be(1); - let fee_asset = FungibleAsset::try_from(fee_asset) + // Expected stack state: [event, FEE_ASSET_KEY, FEE_ASSET_VALUE] + let fee_asset_key = process.get_stack_word(1); + let fee_asset_value = process.get_stack_word(5); + + let fee_asset = FungibleAsset::from_key_value_words(fee_asset_key, fee_asset_value) .map_err(TransactionKernelError::FailedToConvertFeeAsset)?; Some(TransactionEvent::EpilogueBeforeTxFeeRemovedFromAccount { fee_asset }) @@ -485,17 +490,17 @@ impl TransactionEvent { }), TransactionEventId::PrologueStart => Some(TransactionEvent::Progress( - TransactionProgressEvent::PrologueStart(process.clk()), + TransactionProgressEvent::PrologueStart(process.clock()), )), TransactionEventId::PrologueEnd => Some(TransactionEvent::Progress( - TransactionProgressEvent::PrologueEnd(process.clk()), + TransactionProgressEvent::PrologueEnd(process.clock()), )), TransactionEventId::NotesProcessingStart => Some(TransactionEvent::Progress( - TransactionProgressEvent::NotesProcessingStart(process.clk()), + TransactionProgressEvent::NotesProcessingStart(process.clock()), )), TransactionEventId::NotesProcessingEnd => Some(TransactionEvent::Progress( - TransactionProgressEvent::NotesProcessingEnd(process.clk()), + TransactionProgressEvent::NotesProcessingEnd(process.clock()), )), TransactionEventId::NoteExecutionStart => { @@ -505,36 +510,36 @@ impl TransactionEvent { Some(TransactionEvent::Progress(TransactionProgressEvent::NoteExecutionStart { note_id, - clk: process.clk(), + clk: process.clock(), })) }, TransactionEventId::NoteExecutionEnd => Some(TransactionEvent::Progress( - TransactionProgressEvent::NoteExecutionEnd(process.clk()), + TransactionProgressEvent::NoteExecutionEnd(process.clock()), )), TransactionEventId::TxScriptProcessingStart => Some(TransactionEvent::Progress( - TransactionProgressEvent::TxScriptProcessingStart(process.clk()), + TransactionProgressEvent::TxScriptProcessingStart(process.clock()), )), TransactionEventId::TxScriptProcessingEnd => Some(TransactionEvent::Progress( - TransactionProgressEvent::TxScriptProcessingEnd(process.clk()), + TransactionProgressEvent::TxScriptProcessingEnd(process.clock()), )), TransactionEventId::EpilogueStart => Some(TransactionEvent::Progress( - TransactionProgressEvent::EpilogueStart(process.clk()), + TransactionProgressEvent::EpilogueStart(process.clock()), )), TransactionEventId::EpilogueEnd => Some(TransactionEvent::Progress( - TransactionProgressEvent::EpilogueEnd(process.clk()), + TransactionProgressEvent::EpilogueEnd(process.clock()), )), TransactionEventId::EpilogueAuthProcStart => Some(TransactionEvent::Progress( - TransactionProgressEvent::EpilogueAuthProcStart(process.clk()), + TransactionProgressEvent::EpilogueAuthProcStart(process.clock()), )), TransactionEventId::EpilogueAuthProcEnd => Some(TransactionEvent::Progress( - TransactionProgressEvent::EpilogueAuthProcEnd(process.clk()), + TransactionProgressEvent::EpilogueAuthProcEnd(process.clock()), )), TransactionEventId::EpilogueAfterTxCyclesObtained => Some(TransactionEvent::Progress( - TransactionProgressEvent::EpilogueAfterTxCyclesObtained(process.clk()), + TransactionProgressEvent::EpilogueAfterTxCyclesObtained(process.clock()), )), }; @@ -557,7 +562,7 @@ pub(crate) enum RecipientData { recipient_digest: Word, serial_num: Word, script_root: Word, - note_inputs: NoteInputs, + note_storage: NoteStorage, }, } @@ -567,11 +572,12 @@ pub(crate) enum RecipientData { /// - If not, returns `Some` with all necessary data for requesting it. fn on_account_vault_asset_accessed<'store, STORE>( base_host: &TransactionBaseHost<'store, STORE>, - process: &ProcessState, + process: &ProcessorState, vault_key: AssetVaultKey, vault_root: Word, ) -> Result, TransactionKernelError> { - let leaf_index = Felt::new(vault_key.to_leaf_index().value()); + let leaf_index = + Felt::try_from(vault_key.to_leaf_index().value()).expect("expected key index to be a felt"); let active_account_id = process.get_active_account_id()?; // For the native account we need to explicitly request the initial vault root, while for @@ -604,9 +610,9 @@ fn on_account_vault_asset_accessed<'store, STORE>( /// - If not, returns `Some` with all necessary data for requesting it. fn on_account_storage_map_item_accessed<'store, STORE>( base_host: &TransactionBaseHost<'store, STORE>, - process: &ProcessState, + process: &ProcessorState, slot_ptr: Felt, - map_key: Word, + map_key: StorageMapKey, ) -> Result, TransactionKernelError> { let (slot_id, slot_type, current_map_root) = process.get_storage_slot(slot_ptr)?; @@ -617,7 +623,9 @@ fn on_account_storage_map_item_accessed<'store, STORE>( } let active_account_id = process.get_active_account_id()?; - let leaf_index: Felt = StorageMap::map_key_to_leaf_index(map_key) + let leaf_index: Felt = map_key + .hash() + .to_leaf_index() .value() .try_into() .expect("expected key index to be a felt"); @@ -664,7 +672,7 @@ fn on_account_storage_map_item_accessed<'store, STORE>( /// ``` fn extract_tx_summary<'store, STORE>( base_host: &TransactionBaseHost<'store, STORE>, - process: &ProcessState, + process: &ProcessorState, message: Word, ) -> Result { let Some(commitments) = process.advice_provider().get_mapped_values(&message) else { @@ -679,16 +687,16 @@ fn extract_tx_summary<'store, STORE>( )); } - let salt = extract_word(commitments, 0); - let output_notes_commitment = extract_word(commitments, 4); - let input_notes_commitment = extract_word(commitments, 8); - let account_delta_commitment = extract_word(commitments, 12); + let account_delta_commitment = extract_word(commitments, 0); + let input_notes_commitment = extract_word(commitments, 4); + let output_notes_commitment = extract_word(commitments, 8); + let salt = extract_word(commitments, 12); let tx_summary = base_host.build_tx_summary( - salt, - output_notes_commitment, - input_notes_commitment, account_delta_commitment, + input_notes_commitment, + output_notes_commitment, + salt, )?; if tx_summary.to_commitment() != message { @@ -709,7 +717,7 @@ fn build_note_metadata( note_type: Felt, tag: Felt, ) -> Result { - let note_type = u8::try_from(note_type) + let note_type = u8::try_from(note_type.as_canonical_u64()) .map_err(|_| TransactionKernelError::other("failed to decode note_type into u8")) .and_then(|note_type_byte| { NoteType::try_from(note_type_byte).map_err(|source| { @@ -720,11 +728,11 @@ fn build_note_metadata( }) })?; - let tag = u32::try_from(tag) + let tag = u32::try_from(tag.as_canonical_u64()) .map_err(|_| TransactionKernelError::other("failed to decode note tag into u32")) .map(NoteTag::new)?; - Ok(NoteMetadata::new(sender, note_type, tag)) + Ok(NoteMetadata::new(sender, note_type).with_tag(tag)) } fn extract_note_attachment( @@ -736,7 +744,7 @@ fn extract_note_attachment( ) -> Result<(usize, NoteAttachment), TransactionKernelError> { let note_idx = note_ptr_to_idx(note_ptr)?; - let attachment_kind = u8::try_from(attachment_kind) + let attachment_kind = u8::try_from(attachment_kind.as_canonical_u64()) .map_err(|_| TransactionKernelError::other("failed to convert attachment kind to u8")) .and_then(|attachment_kind| { NoteAttachmentKind::try_from(attachment_kind).map_err(|source| { @@ -747,7 +755,7 @@ fn extract_note_attachment( }) })?; - let attachment_scheme = u32::try_from(attachment_scheme) + let attachment_scheme = u32::try_from(attachment_scheme.as_canonical_u64()) .map_err(|_| TransactionKernelError::other("failed to convert attachment scheme to u32")) .map(NoteAttachmentScheme::new)?; @@ -806,7 +814,7 @@ fn extract_word(commitments: &[Felt], start: usize) -> Word { /// Converts the provided note ptr into the corresponding note index. fn note_ptr_to_idx(note_ptr: Felt) -> Result { - u32::try_from(note_ptr) + u32::try_from(note_ptr.as_canonical_u64()) .map_err(|_| TransactionKernelError::other("failed to convert note_ptr to u32")) .and_then(|note_ptr| { note_ptr diff --git a/crates/miden-tx/src/prover/mod.rs b/crates/miden-tx/src/prover/mod.rs index 60d13b4c52..ec2b54ec50 100644 --- a/crates/miden-tx/src/prover/mod.rs +++ b/crates/miden-tx/src/prover/mod.rs @@ -8,12 +8,11 @@ use miden_protocol::block::BlockNumber; use miden_protocol::transaction::{ InputNote, InputNotes, - OutputNote, ProvenTransaction, - ProvenTransactionBuilder, TransactionInputs, TransactionKernel, TransactionOutputs, + TxAccountUpdate, }; pub use miden_prover::ProvingOptions; use miden_prover::{ExecutionProof, Word, prove}; @@ -56,26 +55,17 @@ impl LocalTransactionProver { proof: ExecutionProof, ) -> Result { // erase private note information (convert private full notes to just headers) - let output_notes: Vec<_> = tx_outputs.output_notes.iter().map(OutputNote::shrink).collect(); + let output_notes: Vec<_> = tx_outputs + .output_notes + .iter() + .map(|note| note.to_output_note()) + .collect::, _>>() + .map_err(TransactionProverError::OutputNoteShrinkFailed)?; // Compute the commitment of the pre-fee delta, which goes into the proven transaction, // since it is the output of the transaction and so is needed for proof verification. let pre_fee_delta_commitment: Word = pre_fee_account_delta.to_commitment(); - let builder = ProvenTransactionBuilder::new( - account.id(), - account.initial_commitment(), - tx_outputs.account.commitment(), - pre_fee_delta_commitment, - ref_block_num, - ref_block_commitment, - tx_outputs.fee, - tx_outputs.expiration_block_num, - proof, - ) - .add_input_notes(input_notes) - .add_output_notes(output_notes); - // The full transaction delta is the pre fee delta with the fee asset removed. let mut post_fee_account_delta = pre_fee_account_delta; post_fee_account_delta @@ -83,18 +73,35 @@ impl LocalTransactionProver { .remove_asset(Asset::from(tx_outputs.fee)) .map_err(TransactionProverError::RemoveFeeAssetFromDelta)?; - let builder = match account.has_public_state() { - true => { - let account_update_details = AccountUpdateDetails::Delta(post_fee_account_delta); - builder.account_update_details(account_update_details) - }, - false => builder, + let account_update_details = if account.has_public_state() { + AccountUpdateDetails::Delta(post_fee_account_delta) + } else { + AccountUpdateDetails::Private }; - builder.build().map_err(TransactionProverError::ProvenTransactionBuildFailed) + let account_update = TxAccountUpdate::new( + account.id(), + account.initial_commitment(), + tx_outputs.account.to_commitment(), + pre_fee_delta_commitment, + account_update_details, + ) + .map_err(TransactionProverError::ProvenTransactionBuildFailed)?; + + ProvenTransaction::new( + account_update, + input_notes.iter(), + output_notes, + ref_block_num, + ref_block_commitment, + tx_outputs.fee, + tx_outputs.expiration_block_num, + proof, + ) + .map_err(TransactionProverError::ProvenTransactionBuildFailed) } - pub fn prove( + pub async fn prove( &self, tx_inputs: impl Into, ) -> Result { @@ -133,6 +140,7 @@ impl LocalTransactionProver { &mut host, self.proof_options.clone(), ) + .await .map_err(TransactionProverError::TransactionProgramExecutionFailed)?; // Extract transaction outputs and process transaction data. diff --git a/crates/miden-tx/src/prover/prover_host.rs b/crates/miden-tx/src/prover/prover_host.rs index db00cdf2d0..f990ccb098 100644 --- a/crates/miden-tx/src/prover/prover_host.rs +++ b/crates/miden-tx/src/prover/prover_host.rs @@ -1,25 +1,20 @@ use alloc::sync::Arc; use alloc::vec::Vec; -use miden_processor::{ - AdviceMutation, - BaseHost, - EventError, - MastForest, - MastForestStore, - ProcessState, - SyncHost, -}; +use miden_processor::advice::AdviceMutation; +use miden_processor::event::EventError; +use miden_processor::mast::MastForest; +use miden_processor::{FutureMaybeSend, Host, MastForestStore, ProcessorState}; use miden_protocol::Word; use miden_protocol::account::{AccountDelta, PartialAccount}; use miden_protocol::assembly::debuginfo::Location; use miden_protocol::assembly::{SourceFile, SourceSpan}; -use miden_protocol::transaction::{InputNote, InputNotes, OutputNote}; +use miden_protocol::transaction::{InputNote, InputNotes, RawOutputNote}; use crate::host::{RecipientData, ScriptMastForestStore, TransactionBaseHost, TransactionEvent}; use crate::{AccountProcedureIndexMap, TransactionKernelError}; -/// The transaction prover host is responsible for handling [`SyncHost`] requests made by the +/// The transaction prover host is responsible for handling [`Host`] requests made by the /// transaction kernel during proving. pub struct TransactionProverHost<'store, STORE> where @@ -59,7 +54,7 @@ where // -------------------------------------------------------------------------------------------- /// Consumes `self` and returns the account delta, input and output notes. - pub fn into_parts(self) -> (AccountDelta, InputNotes, Vec) { + pub fn into_parts(self) -> (AccountDelta, InputNotes, Vec) { self.base_host.into_parts() } } @@ -67,7 +62,7 @@ where // HOST IMPLEMENTATION // ================================================================================================ -impl BaseHost for TransactionProverHost<'_, STORE> +impl Host for TransactionProverHost<'_, STORE> where STORE: MastForestStore, { @@ -80,17 +75,29 @@ where // is only used to improve error message quality which we shouldn't run into here. (SourceSpan::UNKNOWN, None) } + + fn get_mast_forest(&self, node_digest: &Word) -> impl FutureMaybeSend>> { + let result = self.base_host.get_mast_forest(node_digest); + async move { result } + } + + fn on_event( + &mut self, + process: &ProcessorState, + ) -> impl FutureMaybeSend, EventError>> { + let result = self.on_event_sync(process); + async move { result } + } } -impl SyncHost for TransactionProverHost<'_, STORE> +impl TransactionProverHost<'_, STORE> where STORE: MastForestStore, { - fn get_mast_forest(&self, node_digest: &Word) -> Option> { - self.base_host.get_mast_forest(node_digest) - } - - fn on_event(&mut self, process: &ProcessState) -> Result, EventError> { + fn on_event_sync( + &mut self, + process: &ProcessorState, + ) -> Result, EventError> { if let Some(advice_mutations) = self.base_host.handle_core_lib_events(process)? { return Ok(advice_mutations); } diff --git a/deny.toml b/deny.toml index 3679142018..839727ffb2 100644 --- a/deny.toml +++ b/deny.toml @@ -12,6 +12,7 @@ ignore = [ "RUSTSEC-2024-0436", # paste is unmaintained but no alternative available "RUSTSEC-2025-0055", # tracing-subscriber vulnerability - will be fixed by upgrade "RUSTSEC-2025-0056", # adler is unmaintained but used by miniz_oxide + "RUSTSEC-2025-0141", # bincode is unmaintained, replace with wincode (https://github.com/0xMiden/miden-vm/issues/2550) ] yanked = "warn" @@ -22,7 +23,7 @@ allow = [ "Apache-2.0", "BSD-2-Clause", "BSD-3-Clause", - "ISC", + "CC0-1.0", "MIT", "Unicode-3.0", "Zlib", @@ -54,14 +55,11 @@ skip-tree = [ { name = "rustc_version", version = "=0.2.*" }, # Allow unicode-width v0.1.x - used by miden-formatting vs textwrap conflict { name = "unicode-width", version = "=0.1.*" }, - # Allow windows-targets v0.48.x - older Windows target version - { name = "windows-targets", version = "=0.48.*" }, - # Allow windows-sys v0.48.x/v0.59.x - multiple Windows system libraries - { name = "windows-sys", version = "=0.48.*" }, - { name = "windows-sys", version = "=0.59.*" }, # Allow syn v1.x and v2.x - our derive macros need v1.x while ecosystem uses v2.x { name = "syn", version = "=1.0.109" }, - { name = "syn", version = "=2.0.111" }, + { name = "syn", version = "=2.0.117" }, + # Allow spin v0.9.x - legacy version used by some dependencies + { name = "spin", version = "=0.9.*" }, ] wildcards = "allow" diff --git a/docs/src/account/address.md b/docs/src/account/address.md index fe979e730e..9ceda4fa58 100644 --- a/docs/src/account/address.md +++ b/docs/src/account/address.md @@ -107,8 +107,8 @@ The encryption key routing parameter enables secure note payload encryption by a The supported **encryption schemes** are: - `X25519_XChaCha20Poly1305`: Curve25519-based key exchange with XChaCha20-Poly1305 authenticated encryption - `K256_XChaCha20Poly1305`: secp256k1-based key exchange with XChaCha20-Poly1305 authenticated encryption -- `X25519_AeadRpo`: Curve25519-based key exchange with RPO-based authenticated encryption -- `K256_AeadRpo`: secp256k1-based key exchange with RPO-based authenticated encryption +- `X25519_AeadPoseidon2`: Curve25519-based key exchange with Poseidon2-based authenticated encryption +- `K256_AeadPoseidon2`: secp256k1-based key exchange with Poseidon2-based authenticated encryption The encryption key is optional in an address. If not provided, senders may use alternative encryption mechanisms or send unencrypted notes. diff --git a/docs/src/account/components.md b/docs/src/account/components.md index 300524086f..4adcbcb6e3 100644 --- a/docs/src/account/components.md +++ b/docs/src/account/components.md @@ -7,7 +7,7 @@ title: "Components" Account components are reusable units of functionality that define a part of an account's code and storage. Multiple account components can be merged together to form an account's final [code](./code) and [storage](./storage). -As an example, consider a typical wallet account, capable of holding a user's assets and requiring authentication whenever assets are added or removed. Such an account can be created by merging a `BasicWallet` component with an `Falcon512Rpo` authentication component. The basic wallet does not need any storage, but contains the code to move assets in and out of the account vault. The authentication component holds a user's public key in storage and additionally contains the code to verify a signature against that public key. Together, these components form a fully functional wallet account. +As an example, consider a typical wallet account, capable of holding a user's assets and requiring authentication whenever assets are added or removed. Such an account can be created by merging a `BasicWallet` component with a `Falcon512Poseidon2` authentication component. The basic wallet does not need any storage, but contains the code to move assets in and out of the account vault. The authentication component holds a user's public key in storage and additionally contains the code to verify a signature against that public key. Together, these components form a fully functional wallet account. ## Account Component schemas @@ -55,7 +55,7 @@ type = [ [[storage.slots]] name = "demo::owner_public_key" description = "This is a typed value supplied at instantiation and interpreted as a Falcon public key" -type = "miden::standards::auth::falcon512_rpo::pub_key" +type = "miden::standards::auth::pub_key" [[storage.slots]] name = "demo::protocol_version" @@ -98,7 +98,7 @@ In TOML, these are declared using dotted array keys: **Value-slot** entries describe their schema via `WordSchema`. A value type can be either: -- **Simple**: defined through the `type = ""` field, indicating the expected `SchemaTypeId` for the entire word. The value is supplied at instantiation time via `InitStorageData`. Felt types are stored as full words in the following layout: `[0, 0, 0, ]`. +- **Simple**: defined through the `type = ""` field, indicating the expected `SchemaType` for the entire word. The value is supplied at instantiation time via `InitStorageData`. Felt types are stored as full words in the following layout: `[0, 0, 0, ]`. - **Composite**: provided through `type = [ ... ]`, which contains exactly four `FeltSchema` descriptors. Each element is either a named typed field (optionally with `default-value`) or a `void` element for reserved/padding zeros. Composite schema entries reuse the existing TOML structure for four-element words, while simple schemas rely on `type`. In our example, the `token_metadata` slot uses a composite schema (`type = [...]`) mixing typed fields (`max_supply`, `decimals`) with defaults (`symbol`) and a reserved/padding `void` element. @@ -113,7 +113,7 @@ shape of the `type` field. ##### Word types -Simple schemas accept `word` (default) and word-shaped types such as `miden::standards::auth::falcon512_rpo::pub_key` or `miden::standards::auth::ecdsa_k256_keccak::pub_key` (parsed from hexadecimal strings). +Simple schemas accept `word` (default) and word-shaped types such as `miden::standards::auth::pub_key` (parsed from hexadecimal strings). Simple schemas can also use any felt type (e.g. `u8`, `u16`, `u32`, `felt`, `miden::standards::fungible_faucets::metadata::token_symbol`, `void`). The value is parsed as a felt and stored as a word with the parsed felt in the last element and the remaining elements set to `0`. @@ -138,7 +138,7 @@ Valid field element types are `void`, `u8`, `u16`, `u32`, `felt` (default) and ` - `void` is a special type which always evaluates to `0` and does not produce an init requirement; it is intended for reserved or padding elements. - `u8`, `u16` and `u32` values can be parsed as decimal numbers and represent 8-bit, 16-bit and 32-bit unsigned integers. - `felt` values represent a field element, and can be parsed as decimal or hexadecimal numbers. -- `miden::standards::fungible_faucets::metadata::token_symbol` values represent basic fungible token symbols, parsed as 1–6 uppercase ASCII characters. +- `miden::standards::fungible_faucets::metadata::token_symbol` values represent basic fungible token symbols, parsed as 1–12 uppercase ASCII characters. ##### Value slots @@ -149,7 +149,7 @@ Single-slot entries are represented by `ValueSlotSchema` and occupy one slot (on - an array of 4 felt schema descriptors (composite slot schema). - `default-value` (optional): An overridable default for simple slots. If omitted, the slot is required at instantiation (unless `type = "void"`). -In our TOML example, the first entry defines a composite schema, while the second is an init-supplied value typed as `miden::standards::auth::falcon512_rpo::pub_key`. +In our TOML example, the first entry defines a composite schema, while the second is an init-supplied value typed as `miden::standards::auth::pub_key`. ##### Storage map slots @@ -173,10 +173,10 @@ You can type maps at the slot level via `type.key` and `type.value` (each a `Wor ```toml [[storage.slots]] name = "demo::typed_map" -type = { key = "word", value = "miden::standards::auth::falcon512_rpo::pub_key" } +type = { key = "word", value = "miden::standards::auth::pub_key" } ``` -This declares that all keys are `word` and all values are `miden::standards::auth::falcon512_rpo::pub_key`, regardless of whether the map contents come from `default-values = [...]` (static) or are supplied at instantiation via `InitStorageData`. +This declares that all keys are `word` and all values are `miden::standards::auth::pub_key`, regardless of whether the map contents come from `default-values = [...]` (static) or are supplied at instantiation via `InitStorageData`. `type.key` / `type.value` are validated when building map entries from `InitStorageData` (and when validating `default-values`). diff --git a/docs/src/note.md b/docs/src/note.md index b9ee08d714..cef4107b3b 100644 --- a/docs/src/note.md +++ b/docs/src/note.md @@ -22,7 +22,7 @@ These components are: 1. [Assets](#assets) 2. [Script](#script) -3. [Inputs](#inputs) +3. [Storage](#storage) 4. [Serial number](#serial-number) 5. [Metadata](#metadata) @@ -42,13 +42,13 @@ The code executed when the `Note` is consumed. Each `Note` has a script that defines the conditions under which it can be consumed. When accounts consume notes in transactions, `Note` scripts call the account’s interface functions. This enables all sorts of operations beyond simple asset transfers. The Miden VM’s Turing completeness allows for arbitrary logic, making `Note` scripts highly versatile. There is no limit to the amount of code a `Note` can hold. -### Inputs +### Storage :::note -Arguments passed to the `Note` script during execution. +The storage of the `Note` that it can access during execution. ::: -A `Note` can have up to 128 input values, which adds up to a maximum of 1 KB of data. The `Note` script can access these inputs. They can convey arbitrary parameters for `Note` consumption. +A `Note` can store up to 1024 items in its storage, which adds up to a maximum of 8 KB of data. The `Note` script can access storage during execution and it is used to parameterize a note's script. For instance, a P2ID note stores the ID of the target account that can consume the note. This makes the P2ID note script reusable by changing the target account ID. ### Serial number @@ -82,7 +82,7 @@ Example use cases for attachments are: - Communicate the note details of a private note in encrypted form. This means the encrypted note is attached publicly to the otherwise private note. - For [network transactions](./transaction.md#network-transaction), encode the ID of the network account that should consume the note. This is a standardized attachment scheme in miden-standards called `NetworkAccountTarget`. -- Communicate the details of a _private_ note to the receiver so they can derive the note. For example, the payback note of a partially fillable swap note can be private and the receiver already knows a few details: It is a P2ID note, the serial number is derived from the SWAP note's serial number and the note inputs are the account ID of the receiver. The receiver only needs to now the exact amount that was filled to derive the full note for consumption. This amount can be encoded in the public attachment of the payback note, which allows this use case to work with private notes and still not require a side-channel. +- Communicate the details of a _private_ note to the receiver so they can derive the note. For example, the payback note of a partially fillable swap note can be private and the receiver already knows a few details: It is a P2ID note, the serial number is derived from the SWAP note's serial number and the note storage is the account ID of the receiver. The receiver only needs to now the exact amount that was filled to derive the full note for consumption. This amount can be encoded in the public attachment of the payback note, which allows this use case to work with private notes and still not require a side-channel. ## Note Lifecycle @@ -139,7 +139,7 @@ Using `Note` tags strikes a balance between privacy and efficiency. Without tags ### Note consumption -To consume a `Note`, the consumer must know its data, including the inputs needed to compute the nullifier. Consumption occurs as part of a transaction. Upon successful consumption a nullifier is generated for the consumed notes. +To consume a `Note`, the consumer must know its data, including the note's storage which is needed to compute the nullifier. Consumption occurs as part of a transaction. Upon successful consumption a nullifier is generated for the consumed notes. Upon successful verification of the transaction: @@ -151,28 +151,28 @@ Upon successful verification of the transaction: Consumption of a `Note` can be restricted to certain accounts or entities. For instance, the P2ID and P2IDE `Note` scripts target a specific account ID. Alternatively, Miden defines a RECIPIENT (represented as 32 bytes) computed as: ```arduino -hash(hash(hash(serial_num, [0; 4]), script_root), input_commitment) +hash(hash(hash(serial_num, [0; 4]), script_root), storage_commitment) ``` Only those who know the RECIPIENT’s pre-image can consume the `Note`. For private notes, this ensures an additional layer of control and privacy, as only parties with the correct data can claim the `Note`. The [transaction prologue](transaction) requires all necessary data to compute the `Note` hash. This setup allows scenario-specific restrictions on who may consume a `Note`. -For a practical example, refer to the [SWAP note script](https://github.com/0xMiden/miden-base/blob/next/crates/miden-standards/asm/standards/notes/swap.masm), where the RECIPIENT ensures that only a defined target can consume the swapped asset. +For a practical example, refer to the [SWAP note script](https://github.com/0xMiden/protocol/blob/next/crates/miden-standards/asm/standards/notes/swap.masm), where the RECIPIENT ensures that only a defined target can consume the swapped asset. #### Note nullifier ensuring private consumption The `Note` nullifier, computed as: ```arduino -hash(serial_num, script_root, input_commitment, vault_hash) +hash(serial_num, script_root, storage_commitment, vault_hash) ``` This achieves the following properties: - Every `Note` can be reduced to a single unique nullifier. - One cannot derive a note's hash from its nullifier. -- To compute the nullifier, one must know all components of the `Note`: serial_num, script_root, input_commitment, and vault_hash. +- To compute the nullifier, one must know all components of the `Note`: serial_num, script_root, storage_commitment, and vault_hash. That means if a `Note` is private and the operator stores only the note's hash, only those with the `Note` details know if this `Note` has been consumed already. Zcash first [introduced](https://zcash.github.io/orchard/design/nullifiers.html#nullifiers) this approach. @@ -182,7 +182,7 @@ That means if a `Note` is private and the operator stores only the note's hash, ## Standard Note Types -The miden-base repository provides several standard note scripts that implement common use cases for asset transfers and interactions. These pre-built note types offer secure, tested implementations for typical scenarios. +The `miden::standards` library provides several standard note scripts that implement common use cases for asset transfers and interactions. These pre-built note types offer secure, tested implementations for typical scenarios. ### P2ID (Pay-to-ID) @@ -191,7 +191,7 @@ The P2ID note script implements a simple pay-to-account-ID pattern. It adds all **Key characteristics:** - **Purpose:** Direct asset transfer to a specific account ID -- **Inputs:** Requires exactly 2 note inputs containing the target account ID +- **Storage:** Requires exactly 2 storage items containing the target account ID - **Validation:** Ensures the consuming account's ID matches the target account ID specified in the note - **Requirements:** Target account must expose the `miden::standards::wallets::basic::receive_asset` procedure @@ -204,7 +204,7 @@ The P2IDE note script extends P2ID with additional features including time-locki **Key characteristics:** - **Purpose:** Advanced asset transfer with time-lock and reclaim capabilities -- **Inputs:** Requires exactly 4 note inputs: +- **Storage:** Requires exactly 4 storage items: - Target account ID - Reclaim block height (when sender can reclaim) - Time-lock block height (when target can consume) @@ -226,7 +226,7 @@ The SWAP note script implements atomic asset swapping functionality. **Key characteristics:** - **Purpose:** Atomic asset exchange between two parties -- **Inputs:** Requires exactly 16 note inputs specifying: +- **Storage:** Requires exactly 16 storage items specifying: - Requested asset details - Payback note recipient information - Note creation parameters (type, tag, attachment) diff --git a/docs/src/protocol_library.md b/docs/src/protocol_library.md index d9cef7f02e..3076434d5c 100644 --- a/docs/src/protocol_library.md +++ b/docs/src/protocol_library.md @@ -33,20 +33,22 @@ Active account procedures can be used to read from storage, fetch or compute com | Procedure | Description | Context | | -------------------------------- | ----------------------------- | ----------------------------- | -| `get_id` | Returns the ID of the active account.

**Inputs:** `[]`
**Outputs:** `[account_id_prefix, account_id_suffix]` | Any | +| `get_id` | Returns the ID of the active account.

**Inputs:** `[]`
**Outputs:** `[account_id_suffix, account_id_prefix]` | Any | | `get_nonce` | Returns the nonce of the active account. Always returns the initial nonce as it can only be incremented in auth procedures.

**Inputs:** `[]`
**Outputs:** `[nonce]` | Any | | `get_initial_commitment` | Returns the active account commitment at the beginning of the transaction.

**Inputs:** `[]`
**Outputs:** `[INIT_COMMITMENT]` | Any | | `compute_commitment` | Computes and returns the account commitment from account data stored in memory.

**Inputs:** `[]`
**Outputs:** `[ACCOUNT_COMMITMENT]` | Any | | `get_code_commitment` | Gets the account code commitment of the active account.

**Inputs:** `[]`
**Outputs:** `[CODE_COMMITMENT]` | Account | | `get_initial_storage_commitment` | Returns the storage commitment of the active account at the beginning of the transaction.

**Inputs:** `[]`
**Outputs:** `[INIT_STORAGE_COMMITMENT]` | Any | | `compute_storage_commitment` | Computes the latest account storage commitment of the active account.

**Inputs:** `[]`
**Outputs:** `[STORAGE_COMMITMENT]` | Account | -| `get_item` | Gets an item from the account storage.

**Inputs:** `[slot_id_prefix, slot_id_suffix]`
**Outputs:** `[VALUE]` | Account | -| `get_initial_item` | Gets the initial item from the account storage slot as it was at the beginning of the transaction.

**Inputs:** `[slot_id_prefix, slot_id_suffix]`
**Outputs:** `[VALUE]` | Account | -| `get_map_item` | Returns the VALUE located under the specified KEY within the map contained in the given account storage slot.

**Inputs:** `[slot_id_prefix, slot_id_suffix, KEY]`
**Outputs:** `[VALUE]` | Account | -| `get_initial_map_item` | Gets the initial VALUE from the account storage map as it was at the beginning of the transaction.

**Inputs:** `[slot_id_prefix, slot_id_suffix, KEY]`
**Outputs:** `[VALUE]` | Account | -| `get_balance` | Returns the balance of the fungible asset associated with the provided faucet_id in the active account's vault.

**Inputs:** `[faucet_id_prefix, faucet_id_suffix]`
**Outputs:** `[balance]` | Any | -| `get_initial_balance` | Returns the balance of the fungible asset associated with the provided faucet_id in the active account's vault at the beginning of the transaction.

**Inputs:** `[faucet_id_prefix, faucet_id_suffix]`
**Outputs:** `[init_balance]` | Any | -| `has_non_fungible_asset` | Returns a boolean indicating whether the non-fungible asset is present in the active account's vault.

**Inputs:** `[ASSET]`
**Outputs:** `[has_asset]` | Any | +| `get_item` | Gets an item from the account storage.

**Inputs:** `[slot_id_suffix, slot_id_prefix]`
**Outputs:** `[VALUE]` | Account | +| `get_initial_item` | Gets the initial item from the account storage slot as it was at the beginning of the transaction.

**Inputs:** `[slot_id_suffix, slot_id_prefix]`
**Outputs:** `[VALUE]` | Account | +| `get_map_item` | Returns the VALUE located under the specified KEY within the map contained in the given account storage slot.

**Inputs:** `[slot_id_suffix, slot_id_prefix, KEY]`
**Outputs:** `[VALUE]` | Account | +| `get_initial_map_item` | Gets the initial VALUE from the account storage map as it was at the beginning of the transaction.

**Inputs:** `[slot_id_suffix, slot_id_prefix, KEY]`
**Outputs:** `[VALUE]` | Account | +| `get_asset` | Returns the asset associated with the provided asset vault key in the active account's vault.

**Inputs:** `[ASSET_KEY]`
**Outputs:** `[ASSET_VALUE]` | Any | +| `get_initial_asset` | Returns the asset associated with the provided asset vault key in the active account's vault at the beginning of the transaction.

**Inputs:** `[ASSET_KEY]`
**Outputs:** `[ASSET_VALUE]` | Any | +| `get_balance` | Returns the balance of the fungible asset associated with the provided faucet_id in the active account's vault.

**Inputs:** `[faucet_id_suffix, faucet_id_prefix]`
**Outputs:** `[balance]` | Any | +| `get_initial_balance` | Returns the balance of the fungible asset associated with the provided faucet_id in the active account's vault at the beginning of the transaction.

**Inputs:** `[faucet_id_suffix, faucet_id_prefix]`
**Outputs:** `[init_balance]` | Any | +| `has_non_fungible_asset` | Returns a boolean indicating whether the non-fungible asset is present in the active account's vault.

**Inputs:** `[ASSET_VALUE]`
**Outputs:** `[has_asset]` | Any | | `get_initial_vault_root` | Returns the vault root of the active account at the beginning of the transaction.

**Inputs:** `[]`
**Outputs:** `[INIT_VAULT_ROOT]` | Any | | `get_vault_root` | Returns the vault root of the active account.

**Inputs:** `[]`
**Outputs:** `[VAULT_ROOT]` | Any | | `get_num_procedures` | Returns the number of procedures in the active account.

**Inputs:** `[]`
**Outputs:** `[num_procedures]` | Any | @@ -59,13 +61,13 @@ Native account procedures can be used to write to storage, add or remove assets | Procedure | Description | Context | | ------------------------------ | ------------------------------ | ------------------------------ | -| `get_id` | Returns the ID of the native account of the transaction.

**Inputs:** `[]`
**Outputs:** `[account_id_prefix, account_id_suffix]` | Any | +| `get_id` | Returns the ID of the native account of the transaction.

**Inputs:** `[]`
**Outputs:** `[account_id_suffix, account_id_prefix]` | Any | | `incr_nonce` | Increments the nonce of the native account by one and returns the new nonce. Can only be called from auth procedures.

**Inputs:** `[]`
**Outputs:** `[final_nonce]` | Auth | | `compute_delta_commitment` | Computes the commitment to the native account's delta. Can only be called from auth procedures.

**Inputs:** `[]`
**Outputs:** `[DELTA_COMMITMENT]` | Auth | -| `set_item` | Sets an item in the native account storage.

**Inputs:** `[slot_id_prefix, slot_id_suffix, VALUE]`
**Outputs:** `[OLD_VALUE]` | Native & Account | -| `set_map_item` | Sets VALUE under the specified KEY within the map contained in the given native account storage slot.

**Inputs:** `[slot_id_prefix, slot_id_suffix, KEY, VALUE]`
**Outputs:** `[OLD_VALUE]` | Native & Account | -| `add_asset` | Adds the specified asset to the vault. For fungible assets, returns the total after addition.

**Inputs:** `[ASSET]`
**Outputs:** `[ASSET']` | Native & Account | -| `remove_asset` | Removes the specified asset from the vault.

**Inputs:** `[ASSET]`
**Outputs:** `[ASSET]` | Native & Account | +| `set_item` | Sets an item in the native account storage.

**Inputs:** `[slot_id_suffix, slot_id_prefix, VALUE]`
**Outputs:** `[OLD_VALUE]` | Native & Account | +| `set_map_item` | Sets VALUE under the specified KEY within the map contained in the given native account storage slot.

**Inputs:** `[slot_id_suffix, slot_id_prefix, KEY, VALUE]`
**Outputs:** `[OLD_VALUE]` | Native & Account | +| `add_asset` | Adds the specified asset to the vault. For fungible assets, returns the total after addition.

**Inputs:** `[ASSET_KEY, ASSET_VALUE]`
**Outputs:** `[ASSET_VALUE']` | Native & Account | +| `remove_asset` | Removes the specified asset from the vault.

**Inputs:** `[ASSET_KEY, ASSET_VALUE]`
**Outputs:** `[ASSET_VALUE]` | Native & Account | | `was_procedure_called` | Returns 1 if a native account procedure was called during transaction execution, and 0 otherwise.

**Inputs:** `[PROC_ROOT]`
**Outputs:** `[was_called]` | Any | ## Active Note Procedures (`miden::protocol::active_note`) @@ -76,9 +78,9 @@ Active note procedures can be used to fetch data from the note that is currently | ----------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------- | | `get_assets` | Writes the [assets](note.md#assets) of the active note into memory starting at the specified address.

**Inputs:** `[dest_ptr]`
**Outputs:** `[num_assets, dest_ptr]` | Note | | `get_recipient` | Returns the [recipient](note.md#note-recipient-restricting-consumption) of the active note.

**Inputs:** `[]`
**Outputs:** `[RECIPIENT]` | Note | -| `get_inputs` | Writes the note's [inputs](note.md#inputs) to the specified memory address.

**Inputs:** `[dest_ptr]`
**Outputs:** `[num_inputs, dest_ptr]` | Note | +| `get_storage` | Writes the note's [inputs](note.md#inputs) to the specified memory address.

**Inputs:** `[dest_ptr]`
**Outputs:** `[num_storage_items, dest_ptr]` | Note | | `get_metadata` | Returns the [metadata](note.md#metadata) of the active note.

**Inputs:** `[]`
**Outputs:** `[METADATA]` | Note | -| `get_sender` | Returns the sender of the active note.

**Inputs:** `[]`
**Outputs:** `[sender_id_prefix, sender_id_suffix]` | Note | +| `get_sender` | Returns the sender of the active note.

**Inputs:** `[]`
**Outputs:** `[sender_id_suffix, sender_id_prefix]` | Note | | `get_serial_number` | Returns the [serial number](note.md#serial-number) of the active note.

**Inputs:** `[]`
**Outputs:** `[SERIAL_NUMBER]` | Note | | `get_script_root` | Returns the [script root](note.md#script) of the active note.

**Inputs:** `[]`
**Outputs:** `[SCRIPT_ROOT]` | Note | @@ -92,8 +94,8 @@ Input note procedures can be used to fetch data on input notes consumed by the t | `get_assets` | Writes the [assets](note.md#assets) of the input note with the specified index into memory starting at the specified address.

**Inputs:** `[dest_ptr, note_index]`
**Outputs:** `[num_assets, dest_ptr, note_index]` | Any | | `get_recipient` | Returns the [recipient](note.md#note-recipient-restricting-consumption) of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[RECIPIENT]` | Any | | `get_metadata` | Returns the [metadata](note.md#metadata) of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[METADATA]` | Any | -| `get_sender` | Returns the sender of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[sender_id_prefix, sender_id_suffix]` | Any | -| `get_inputs_info` | Returns the [inputs](note.md#inputs) commitment and length of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[NOTE_INPUTS_COMMITMENT, num_inputs]` | Any | +| `get_sender` | Returns the sender of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[sender_id_suffix, sender_id_prefix]` | Any | +| `get_storage_info` | Returns the [inputs](note.md#inputs) commitment and length of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[NOTE_STORAGE_COMMITMENT, num_storage_items]` | Any | | `get_script_root` | Returns the [script root](note.md#script) of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[SCRIPT_ROOT]` | Any | | `get_serial_number` | Returns the [serial number](note.md#serial-number) of the input note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[SERIAL_NUMBER]` | Any | @@ -106,7 +108,7 @@ Output note procedures can be used to fetch data on output notes created by the | `create` | Creates a new output note and returns its index.

**Inputs:** `[tag, note_type, RECIPIENT]`
**Outputs:** `[note_idx]` | Native & Account | | `get_assets_info` | Returns the information about assets in the output note with the specified index.

**Inputs:** `[note_index]`
**Outputs:** `[ASSETS_COMMITMENT, num_assets]` | Any | | `get_assets` | Writes the assets of the output note with the specified index into memory starting at the specified address.

**Inputs:** `[dest_ptr, note_index]`
**Outputs:** `[num_assets, dest_ptr, note_index]` | Any | -| `add_asset` | Adds the `ASSET` to the output note specified by the index.

**Inputs:** `[ASSET, note_idx]`
**Outputs:** `[]` | Native | +| `add_asset` | Adds the asset to the output note specified by the index.

**Inputs:** `[ASSET_KEY, ASSET_VALUE, note_idx]`
**Outputs:** `[]` | Native | | `set_attachment` | Sets the attachment of the note specified by the index.

If attachment_kind == Array, there must be an advice map entry for ATTACHMENT.

**Inputs:**
`Operand Stack: [note_idx, attachment_scheme, attachment_kind, ATTACHMENT]`
`Advice map: { ATTACHMENT?: [[ATTACHMENT_ELEMENTS]] }`
**Outputs:** `[]` | Native | | `set_array_attachment` | Sets the attachment of the note specified by the note index to the provided ATTACHMENT which commits to an array of felts.

**Inputs:**
`Operand Stack: [note_idx, attachment_scheme, ATTACHMENT]`
`Advice map: { ATTACHMENT: [[ATTACHMENT_ELEMENTS]] }`
**Outputs:** `[]` | Native | | `set_word_attachment` | Sets the attachment of the note specified by the note index to the provided word.

**Inputs:** `[note_idx, attachment_scheme, ATTACHMENT]`
**Outputs:** `[]` | @@ -119,11 +121,11 @@ Note utility procedures can be used to compute the required utility data or writ | Procedure | Description | Context | | --------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------- | -| `compute_inputs_commitment` | Computes the commitment to the output note inputs starting at the specified memory address.

**Inputs:** `[inputs_ptr, num_inputs]`
**Outputs:** `[INPUTS_COMMITMENT]` | Any | +| `compute_storage_commitment` | Computes the commitment to the output note storage starting at the specified memory address.

**Inputs:** `[storage_ptr, num_storage_items]`
**Outputs:** `[STORAGE_COMMITMENT]` | Any | | `write_assets_to_memory` | Writes the assets data stored in the advice map to the memory specified by the provided destination pointer.

**Inputs:** `[ASSETS_COMMITMENT, num_assets, dest_ptr]`
**Outputs:** `[num_assets, dest_ptr]` | Any | -| `build_recipient_hash` | Returns the `RECIPIENT` for a specified `SERIAL_NUM`, `SCRIPT_ROOT`, and inputs commitment.

**Inputs:** `[SERIAL_NUM, SCRIPT_ROOT, INPUT_COMMITMENT]`
**Outputs:** `[RECIPIENT]` | Any | -| `build_recipient` | Builds the recipient hash from note inputs, script root, and serial number.

**Inputs:** `[inputs_ptr, num_inputs, SERIAL_NUM, SCRIPT_ROOT]`
**Outputs:** `[RECIPIENT]` | Any | -| `extract_sender_from_metadata` | Extracts the sender ID from the provided metadata word.

**Inputs:** `[METADATA]`
**Outputs:** `[sender_id_prefix, sender_id_suffix]` | Any | +| `build_recipient_hash` | Returns the `RECIPIENT` for a specified `SERIAL_NUM`, `SCRIPT_ROOT`, and storage commitment.

**Inputs:** `[SERIAL_NUM, SCRIPT_ROOT, STORAGE_COMMITMENT]`
**Outputs:** `[RECIPIENT]` | Any | +| `build_recipient` | Builds the recipient hash from note storage, script root, and serial number.

**Inputs:** `[storage_ptr, num_storage_items, SERIAL_NUM, SCRIPT_ROOT]`
**Outputs:** `[RECIPIENT]` | Any | +| `extract_sender_from_metadata` | Extracts the sender ID from the provided metadata word.

**Inputs:** `[METADATA]`
**Outputs:** `[sender_id_suffix, sender_id_prefix]` | Any | ## Transaction Procedures (`miden::protocol::tx`) @@ -138,7 +140,7 @@ Transaction procedures manage transaction-level operations including note creati | `get_output_notes_commitment` | Returns the output notes commitment hash.

**Inputs:** `[]`
**Outputs:** `[OUTPUT_NOTES_COMMITMENT]` | Any | | `get_num_input_notes` | Returns the total number of input notes consumed by this transaction.

**Inputs:** `[]`
**Outputs:** `[num_input_notes]` | Any | | `get_num_output_notes` | Returns the current number of output notes created in this transaction.

**Inputs:** `[]`
**Outputs:** `[num_output_notes]` | Any | -| `execute_foreign_procedure` | Executes the provided procedure against the foreign account.

**Inputs:** `[foreign_account_id_prefix, foreign_account_id_suffix, FOREIGN_PROC_ROOT, , pad(n)]`
**Outputs:** `[]` | Any | +| `execute_foreign_procedure` | Executes the provided procedure against the foreign account.

**Inputs:** `[foreign_account_id_suffix, foreign_account_id_prefix, FOREIGN_PROC_ROOT, , pad(n)]`
**Outputs:** `[]` | Any | | `get_expiration_block_delta` | Returns the transaction expiration delta, or 0 if not set.

**Inputs:** `[]`
**Outputs:** `[block_height_delta]` | Any | | `update_expiration_block_delta` | Updates the transaction expiration delta.

**Inputs:** `[block_height_delta]`
**Outputs:** `[]` | Any | @@ -148,12 +150,11 @@ Faucet procedures allow reading and writing to faucet accounts to mint and burn | Procedure | Description | Context | | ------------------------------ | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------- | -| `create_fungible_asset` | Creates a fungible asset for the faucet the transaction is being executed against.

**Inputs:** `[amount]`
**Outputs:** `[ASSET]` | Faucet | -| `create_non_fungible_asset` | Creates a non-fungible asset for the faucet the transaction is being executed against.

**Inputs:** `[DATA_HASH]`
**Outputs:** `[ASSET]` | Faucet | -| `mint` | Mint an asset from the faucet the transaction is being executed against.

**Inputs:** `[ASSET]`
**Outputs:** `[ASSET]` | Native & Account & Faucet | -| `burn` | Burn an asset from the faucet the transaction is being executed against.

**Inputs:** `[ASSET]`
**Outputs:** `[ASSET]` | Native & Account & Faucet | -| `get_total_issuance` | Returns the total issuance of the fungible faucet the transaction is being executed against.

**Inputs:** `[]`
**Outputs:** `[total_issuance]` | Faucet | -| `is_non_fungible_asset_issued` | Returns a boolean indicating whether the provided non-fungible asset has been already issued by this faucet.

**Inputs:** `[ASSET]`
**Outputs:** `[is_issued]` | Faucet | +| `create_fungible_asset` | Creates a fungible asset for the faucet the transaction is being executed against.

**Inputs:** `[amount]`
**Outputs:** `[ASSET_KEY, ASSET_VALUE]` | Faucet | +| `create_non_fungible_asset` | Creates a non-fungible asset for the faucet the transaction is being executed against.

**Inputs:** `[DATA_HASH]`
**Outputs:** `[ASSET_KEY, ASSET_VALUE]` | Faucet | +| `mint` | Mint an asset from the faucet the transaction is being executed against.

**Inputs:** `[ASSET_KEY, ASSET_VALUE]`
**Outputs:** `[NEW_ASSET_VALUE]` | Native & Account & Faucet | +| `burn` | Burn an asset from the faucet the transaction is being executed against.

**Inputs:** `[ASSET_KEY, ASSET_VALUE]`
**Outputs:** `[ASSET_VALUE]` | Native & Account & Faucet | +| `has_callbacks` | Returns whether the active account defines callbacks.

**Inputs:** `[]`
**Outputs:** `[has_callbacks]` | Any | ## Asset Procedures (`miden::protocol::asset`) @@ -161,5 +162,5 @@ Asset procedures provide utilities for creating fungible and non-fungible assets | Procedure | Description | Context | | -------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------- | -| `build_fungible_asset` | Builds a fungible asset for the specified fungible faucet and amount.

**Inputs:** `[faucet_id_prefix, faucet_id_suffix, amount]`
**Outputs:** `[ASSET]` | Any | -| `build_non_fungible_asset` | Builds a non-fungible asset for the specified non-fungible faucet and data hash.

**Inputs:** `[faucet_id_prefix, DATA_HASH]`
**Outputs:** `[ASSET]` | Any | +| `create_fungible_asset` | Builds a fungible asset for the specified fungible faucet and amount.

**Inputs:** `[enable_callbacks, faucet_id_suffix, faucet_id_prefix, amount]`
**Outputs:** `[ASSET_KEY, ASSET_VALUE]` | Any | +| `create_non_fungible_asset` | Builds a non-fungible asset for the specified non-fungible faucet and data hash.

**Inputs:** `[faucet_id_suffix, faucet_id_prefix, DATA_HASH]`
**Outputs:** `[ASSET_KEY, ASSET_VALUE]` | Any | diff --git a/docs/src/transaction.md b/docs/src/transaction.md index c4c9477cf5..06f4c83f0b 100644 --- a/docs/src/transaction.md +++ b/docs/src/transaction.md @@ -34,7 +34,7 @@ Every `Transaction` describes the process of an account changing its state. This A `Transaction` requires several inputs: - **Account**: A `Transaction` is always executed against a single account. The executor must have complete knowledge of the account's state. -- **Notes**: A `Transaction` can consume and output up to `1024` notes. The executor must have complete knowledge of the note data, including note inputs, before consumption. For private notes, the data cannot be fetched from the blockchain and must be received through an off-chain channel. +- **Notes**: A `Transaction` can consume and output up to `1024` notes. The executor must have complete knowledge of the note data, including note storage, before consumption. For private notes, the data cannot be fetched from the blockchain and must be received through an off-chain channel. - **Blockchain state**: The current reference block and information about the notes database used to authenticate notes to be consumed must be retrieved from the Miden operator before execution. Usually, notes to be consumed in a `Transaction` must have been created before the reference block. - **Transaction script (optional)**: The `Transaction` script is code defined by the executor. And like note scripts, they can invoke account methods, e.g., sign a transaction. There is no limit to the amount of code a `Transaction` script can hold. - **Transaction arguments (optional)**: For every note, the executor can inject transaction arguments that are present at runtime. If the note script — and therefore the note creator — allows, the note script can read those arguments to allow dynamic execution. See below for an example. @@ -64,11 +64,11 @@ To illustrate the `Transaction` protocol, we provide two examples for a basic `T ### Creating a P2ID note -Let's assume account A wants to create a P2ID note. P2ID notes are pay-to-ID notes that can only be consumed by a specified target account ID. Note creators can provide the target account ID using the [note inputs](note#inputs). +Let's assume account A wants to create a P2ID note. P2ID notes are pay-to-ID notes that can only be consumed by a specified target account ID. Note creators can provide the target account ID using the [note storage](note#inputs). -In this example, account A uses the basic wallet and the authentication component provided by `miden-standards`. The basic wallet component defines the methods `wallets::basic::create_note` and `wallets::basic::move_asset_to_note` to create notes with assets, and `wallets::basic::receive_asset` to receive assets. The authentication component exposes `auth::basic::auth_tx_falcon512_rpo` which allows for signing a transaction. Some account methods like `active_account::get_id` are always exposed. +In this example, account A uses the basic wallet and the single-sig authentication component provided by `miden-standards`. The basic wallet component defines the methods `wallets::basic::create_note` and `wallets::basic::move_asset_to_note` to create notes with assets, and `wallets::basic::receive_asset` to receive assets. The authentication component exposes `auth::singlesig::auth_tx` which allows for signing a transaction. Some account methods like `active_account::get_id` are always exposed. -The executor inputs to the Miden VM a `Transaction` script in which it places on the stack the data (tag, aux, note_type, execution_hint, RECIPIENT) of the note(s) that it wants to create using `wallets::basic::create_note` during the said `Transaction`. The [`NoteRecipient`](https://github.com/0xMiden/miden-base/blob/main/crates/miden-protocol/src/note/recipient.rs) is a value that describes under which condition a note can be consumed and is built using a `serial_number`, the `note_script` (in this case P2ID script) and the `note_inputs`. The Miden VM will execute the `Transaction` script and create the note(s). After having been created, the executor can use `wallets::basic::move_asset_to_note` to move assets from the account's vault to the notes vault. +The executor inputs to the Miden VM a `Transaction` script in which it places on the stack the data (tag, aux, note_type, execution_hint, RECIPIENT) of the note(s) that it wants to create using `wallets::basic::create_note` during the said `Transaction`. The [`NoteRecipient`](https://github.com/0xMiden/protocol/blob/next/crates/miden-protocol/src/note/recipient.rs) is a value that describes under which condition a note can be consumed and is built using a `serial_number`, the `note_script` (in this case P2ID script) and the `note_inputs`. The Miden VM will execute the `Transaction` script and create the note(s). After having been created, the executor can use `wallets::basic::move_asset_to_note` to move assets from the account's vault to the notes vault. After finalizing the `Transaction` the updated state and created note(s) can now be submitted to the Miden operator to be recorded on-chain. @@ -80,11 +80,11 @@ To start the transaction process, the executor fetches and prepares all the inpu In the transaction's prologue the data is being authenticated by re-hashing the provided values and comparing them to the blockchain's data (this is how private data can be used and verified during the execution of transaction without actually revealing it to the network). -Then the P2ID note script is being executed. The script starts by reading the note inputs `active_note::get_inputs` — in our case the account ID of the intended target account. It checks if the provided target account ID equals the account ID of the executing account. This is the first time the note invokes a method exposed by the `Transaction` kernel, `active_account::get_id`. +Then the P2ID note script is being executed. The script starts by reading the note storage `active_note::get_storage` — in our case the account ID of the intended target account. It checks if the provided target account ID equals the account ID of the executing account. This is the first time the note invokes a method exposed by the `Transaction` kernel, `active_account::get_id`. If the check passes, the note script pushes the assets it holds into the account's vault. For every asset the note contains, the script calls the `wallets::basic::receive_asset` method exposed by the account's wallet component. The `wallets::basic::receive_asset` procedure calls `native_account::add_asset`, which cannot be called from the note itself. This allows accounts to control what functionality to expose, e.g. whether the account supports receiving assets or not, and the note cannot bypass that. -After the assets are stored in the account's vault, the transaction script is being executed. The script calls `auth::basic::auth_tx_falcon512_rpo` which is explicitly exposed in the account interface. The method is used to verify a provided signature against a public key stored in the account's storage and a commitment to this specific transaction. If the signature can be verified, the method increments the nonce. +After the assets are stored in the account's vault, the transaction script is being executed. The script calls `auth::singlesig::auth_tx` which is explicitly exposed in the account interface. The method is used to verify a provided signature against a public key stored in the account's storage and a commitment to this specific transaction. If the signature can be verified, the method increments the nonce. The Epilogue finalizes the transaction by computing the final account hash, asserting the nonce increment and checking that no assets were created or destroyed in the transaction — that means the net sum of all assets must stay the same.