From 4794e003c51180fb44b86330253d4f10d6bd48a8 Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Fri, 3 Oct 2025 20:19:42 +0200 Subject: [PATCH 01/15] init --- .../multisig/aiken-crowdfund/README.md | 1 + .../multisig/aiken-crowdfund/aiken.lock | 26 + .../multisig/aiken-crowdfund/aiken.toml | 23 + .../aiken-crowdfund/build/aiken-compile.lock | 0 .../packages/aiken-lang-stdlib/.editorconfig | 9 + .../packages/aiken-lang-stdlib/.gitattributes | 2 + .../workflows/continuous-integration.yml | 64 + .../packages/aiken-lang-stdlib/.gitignore | 3 + .../packages/aiken-lang-stdlib/CHANGELOG.md | 805 ++++++++++ .../build/packages/aiken-lang-stdlib/LICENSE | 201 +++ .../packages/aiken-lang-stdlib/README.md | 71 + .../packages/aiken-lang-stdlib/aiken.lock | 16 + .../packages/aiken-lang-stdlib/aiken.toml | 15 + .../aiken-lang-stdlib/lib/aiken/cbor.ak | 293 ++++ .../aiken-lang-stdlib/lib/aiken/cbor.test.ak | 297 ++++ .../aiken-lang-stdlib/lib/aiken/collection.ak | 4 + .../lib/aiken/collection/dict.ak | 1174 ++++++++++++++ .../lib/aiken/collection/list.ak | 1411 +++++++++++++++++ .../lib/aiken/collection/pairs.ak | 833 ++++++++++ .../aiken-lang-stdlib/lib/aiken/crypto.ak | 147 ++ .../lib/aiken/crypto/bls12_381/g1.ak | 115 ++ .../lib/aiken/crypto/bls12_381/g2.ak | 124 ++ .../lib/aiken/crypto/bls12_381/scalar.ak | 255 +++ .../aiken-lang-stdlib/lib/aiken/interval.ak | 680 ++++++++ .../aiken-lang-stdlib/lib/aiken/math.ak | 424 +++++ .../lib/aiken/math/rational.ak | 871 ++++++++++ .../lib/aiken/math/rational.tests.ak | 65 + .../aiken-lang-stdlib/lib/aiken/option.ak | 312 ++++ .../lib/aiken/primitive/bytearray.ak | 668 ++++++++ .../lib/aiken/primitive/int.ak | 156 ++ .../lib/aiken/primitive/string.ak | 139 ++ .../aiken-lang-stdlib/lib/cardano/address.ak | 86 + .../lib/cardano/address/credential.ak | 30 + .../aiken-lang-stdlib/lib/cardano/assets.ak | 920 +++++++++++ .../lib/cardano/certificate.ak | 93 ++ .../lib/cardano/governance.ak | 109 ++ .../cardano/governance/protocol_parameters.ak | 360 +++++ .../lib/cardano/governance/voter.ak | 62 + .../lib/cardano/script_context.ak | 62 + .../lib/cardano/transaction.ak | 225 +++ .../cardano/transaction/output_reference.ak | 23 + .../lib/cardano/transaction/script_purpose.ak | 126 ++ .../build/packages/packages.toml | 9 + .../.github/workflows/build_docs.yml | 50 + .../.github/workflows/release.yml | 80 + .../build/packages/sidan-lab-vodka/.gitignore | 16 + .../build/packages/sidan-lab-vodka/LICENSE | 201 +++ .../build/packages/sidan-lab-vodka/README.md | 136 ++ .../build/packages/sidan-lab-vodka/aiken.lock | 15 + .../build/packages/sidan-lab-vodka/aiken.toml | 18 + .../packages/sidan-lab-vodka/assets/logo.png | Bin 0 -> 74727 bytes .../sidan-lab-vodka/build/aiken-compile.lock | 0 .../packages/aiken-lang-stdlib/.editorconfig | 9 + .../packages/aiken-lang-stdlib/.gitattributes | 2 + .../workflows/continuous-integration.yml | 64 + .../packages/aiken-lang-stdlib/.gitignore | 3 + .../packages/aiken-lang-stdlib/CHANGELOG.md | 805 ++++++++++ .../build/packages/aiken-lang-stdlib/LICENSE | 201 +++ .../packages/aiken-lang-stdlib/README.md | 71 + .../packages/aiken-lang-stdlib/aiken.lock | 16 + .../packages/aiken-lang-stdlib/aiken.toml | 15 + .../aiken-lang-stdlib/lib/aiken/cbor.ak | 293 ++++ .../aiken-lang-stdlib/lib/aiken/cbor.test.ak | 297 ++++ .../aiken-lang-stdlib/lib/aiken/collection.ak | 4 + .../lib/aiken/collection/dict.ak | 1174 ++++++++++++++ .../lib/aiken/collection/list.ak | 1411 +++++++++++++++++ .../lib/aiken/collection/pairs.ak | 833 ++++++++++ .../aiken-lang-stdlib/lib/aiken/crypto.ak | 147 ++ .../lib/aiken/crypto/bls12_381/g1.ak | 115 ++ .../lib/aiken/crypto/bls12_381/g2.ak | 124 ++ .../lib/aiken/crypto/bls12_381/scalar.ak | 255 +++ .../aiken-lang-stdlib/lib/aiken/interval.ak | 680 ++++++++ .../aiken-lang-stdlib/lib/aiken/math.ak | 424 +++++ .../lib/aiken/math/rational.ak | 871 ++++++++++ .../lib/aiken/math/rational.tests.ak | 65 + .../aiken-lang-stdlib/lib/aiken/option.ak | 312 ++++ .../lib/aiken/primitive/bytearray.ak | 668 ++++++++ .../lib/aiken/primitive/int.ak | 156 ++ .../lib/aiken/primitive/string.ak | 139 ++ .../aiken-lang-stdlib/lib/cardano/address.ak | 86 + .../lib/cardano/address/credential.ak | 30 + .../aiken-lang-stdlib/lib/cardano/assets.ak | 920 +++++++++++ .../lib/cardano/certificate.ak | 93 ++ .../lib/cardano/governance.ak | 109 ++ .../cardano/governance/protocol_parameters.ak | 360 +++++ .../lib/cardano/governance/voter.ak | 62 + .../lib/cardano/script_context.ak | 62 + .../lib/cardano/transaction.ak | 225 +++ .../cardano/transaction/output_reference.ak | 23 + .../lib/cardano/transaction/script_purpose.ak | 126 ++ .../build/packages/packages.toml | 4 + .../build/packages/sidan-lab-vodka/lib/cip.ak | 73 + .../packages/sidan-lab-vodka/lib/cocktail.ak | 161 ++ .../lib/cocktail/vodka_address.ak | 56 + .../lib/cocktail/vodka_converter.ak | 40 + .../lib/cocktail/vodka_extra_signatories.ak | 46 + .../lib/cocktail/vodka_inputs.ak | 123 ++ .../lib/cocktail/vodka_mints.ak | 68 + .../lib/cocktail/vodka_outputs.ak | 90 ++ .../lib/cocktail/vodka_redeemers.ak | 45 + .../lib/cocktail/vodka_validity_range.ak | 34 + .../lib/cocktail/vodka_value.ak | 80 + .../packages/sidan-lab-vodka/lib/mocktail.ak | 776 +++++++++ .../lib/mocktail/virgin_address.ak | 57 + .../lib/mocktail/virgin_key_hash.ak | 47 + .../lib/mocktail/virgin_output_reference.ak | 16 + .../lib/mocktail/virgin_outputs.ak | 30 + .../lib/mocktail/virgin_validity_range.ak | 28 + .../packages/sidan-lab-vodka/plutus.json | 14 + .../multisig/aiken-crowdfund/lib/types.ak | 26 + .../multisig/aiken-crowdfund/lib/utils.ak | 39 + .../aiken-crowdfund/specs/1_auth_tokens.md | 15 + .../multisig/aiken-crowdfund/specs/2_proxy.md | 19 + .../aiken-crowdfund/specs/_scripts.md | 20 + .../specs/application_setup_doc.md | 13 + .../aiken-crowdfund/specs/user_action_doc.md | 6 + .../validators/auth_token/mint.ak | 27 + .../aiken-crowdfund/validators/proxy/spend.ak | 175 ++ .../validators/tests/auth_token/mint.ak | 63 + .../validators/tests/crowdfund/spend.ak | 1177 ++++++++++++++ .../tests/integration_test/contribute_fund.ak | 329 ++++ .../aiken-crowdfund/validators/tests/utils.ak | 64 + src/components/multisig/common.ts | 221 +++ 123 files changed, 27057 insertions(+) create mode 100644 src/components/multisig/aiken-crowdfund/README.md create mode 100644 src/components/multisig/aiken-crowdfund/aiken.lock create mode 100644 src/components/multisig/aiken-crowdfund/aiken.toml create mode 100644 src/components/multisig/aiken-crowdfund/build/aiken-compile.lock create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.editorconfig create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.gitattributes create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.gitignore create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/CHANGELOG.md create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/LICENSE create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/README.md create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/aiken.lock create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/aiken.toml create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/option.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/address.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/packages.toml create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.github/workflows/release.yml create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.gitignore create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/LICENSE create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/README.md create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/aiken.lock create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/aiken.toml create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/assets/logo.png create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/aiken-compile.lock create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/packages.toml create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cip.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak create mode 100644 src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/plutus.json create mode 100644 src/components/multisig/aiken-crowdfund/lib/types.ak create mode 100644 src/components/multisig/aiken-crowdfund/lib/utils.ak create mode 100644 src/components/multisig/aiken-crowdfund/specs/1_auth_tokens.md create mode 100644 src/components/multisig/aiken-crowdfund/specs/2_proxy.md create mode 100644 src/components/multisig/aiken-crowdfund/specs/_scripts.md create mode 100644 src/components/multisig/aiken-crowdfund/specs/application_setup_doc.md create mode 100644 src/components/multisig/aiken-crowdfund/specs/user_action_doc.md create mode 100644 src/components/multisig/aiken-crowdfund/validators/auth_token/mint.ak create mode 100644 src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak create mode 100644 src/components/multisig/aiken-crowdfund/validators/tests/auth_token/mint.ak create mode 100644 src/components/multisig/aiken-crowdfund/validators/tests/crowdfund/spend.ak create mode 100644 src/components/multisig/aiken-crowdfund/validators/tests/integration_test/contribute_fund.ak create mode 100644 src/components/multisig/aiken-crowdfund/validators/tests/utils.ak create mode 100644 src/components/multisig/common.ts diff --git a/src/components/multisig/aiken-crowdfund/README.md b/src/components/multisig/aiken-crowdfund/README.md new file mode 100644 index 00000000..e76d2249 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/README.md @@ -0,0 +1 @@ +Implement in core mesh repo `mesh-contract` diff --git a/src/components/multisig/aiken-crowdfund/aiken.lock b/src/components/multisig/aiken-crowdfund/aiken.lock new file mode 100644 index 00000000..2e65bd16 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/aiken.lock @@ -0,0 +1,26 @@ +# This file was generated by Aiken +# You typically do not need to edit this file + +[[requirements]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +source = "github" + +[[requirements]] +name = "sidan-lab/vodka" +version = "0.1.13" +source = "github" + +[[packages]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +requirements = [] +source = "github" + +[[packages]] +name = "sidan-lab/vodka" +version = "0.1.13" +requirements = [] +source = "github" + +[etags] diff --git a/src/components/multisig/aiken-crowdfund/aiken.toml b/src/components/multisig/aiken-crowdfund/aiken.toml new file mode 100644 index 00000000..7a5ec5fe --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/aiken.toml @@ -0,0 +1,23 @@ +name = "multisig-Extension" +version = "0.0.0" +compiler = "v1.1.16" +plutus = "v3" +license = "Apache-2.0" +description = "Aiken contracts for project 'multisig-Extension'" + +[repository] +user = "QS" +project = "multisig" +platform = "github" + +[[dependencies]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +source = "github" + +[[dependencies]] +name = "sidan-lab/vodka" +version = "0.1.13" +source = "github" + +[config] diff --git a/src/components/multisig/aiken-crowdfund/build/aiken-compile.lock b/src/components/multisig/aiken-crowdfund/build/aiken-compile.lock new file mode 100644 index 00000000..e69de29b diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.editorconfig b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.editorconfig new file mode 100644 index 00000000..0759674c --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.editorconfig @@ -0,0 +1,9 @@ +root = true + +[*.ak] +indent_style = space +indent_size = 2 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.gitattributes b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.gitattributes new file mode 100644 index 00000000..99fefcf4 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.gitattributes @@ -0,0 +1,2 @@ +# Temp hack to get some syntax highlighting on github +*.ak linguist-language=Gleam diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml new file mode 100644 index 00000000..b0081ac7 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml @@ -0,0 +1,64 @@ +name: Continuous Integration + +on: + workflow_dispatch: + push: + branches: ["main"] + tags: ["*.*.*"] + pull_request: + branches: ["main"] + +env: + CARGO_TERM_COLOR: always + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: "pages" + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: 📥 Checkout repository + uses: actions/checkout@v3 + + - name: 🧰 Setup Pages + uses: actions/configure-pages@v2 + + - name: 🧰 Install Aiken + uses: aiken-lang/setup-aiken@v1 + with: + version: v1.1.9 + + - name: 📝 Run fmt + run: aiken fmt --check + + - name: 🔬 Run tests + run: aiken check + + - name: 📘 Generate documentation + shell: bash + working-directory: . + run: aiken docs -o docs + + - name: 📦 Upload artifact + uses: actions/upload-pages-artifact@v2 + with: + path: "docs/" + + deploy: + if: ${{ startsWith(github.ref, 'refs/tags') }} + needs: build + runs-on: ubuntu-latest + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: + - name: 🚀 Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v1 diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.gitignore b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.gitignore new file mode 100644 index 00000000..3a3d38e6 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.gitignore @@ -0,0 +1,3 @@ +build/ +docs/ +.DS_Store \ No newline at end of file diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/CHANGELOG.md b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/CHANGELOG.md new file mode 100644 index 00000000..62345b32 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/CHANGELOG.md @@ -0,0 +1,805 @@ +# Changelog + +## v2.2.0 - 2024-12-13 + +### Added + +- [`aiken/cbor.{deserialise}`](https://aiken-lang.github.io/stdlib/aiken/cbor.html#deserialise): to recover `Data` from CBOR bytes. +- [`aiken/collection/pairs.{insert_with_by_ascending_key}`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#insert_with_by_ascending_key): for inserting in pairs while specifying how to combine values on key conflict. + +## v2.1.0 - 2024-09-14 + +### Added + +- Various new helper functions: + - [`aiken/collection/list.{for_each}`](https://aiken-lang.github.io/stdlib/aiken/collection/list.html#for_each): for performing many side-effects. + - [`aiken/collection/dict.{pop}`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html#pop): for accessing and removing a value from a dictionnary in a single op. + - [`aiken/primitive/bytearray.{starts_with}`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#starts_with): for matching bytearray prefixes. + - [`aiken/primitive/math/rational.{pow}`](https://aiken-lang.github.io/stdlib/aiken/primitive/math/rational.html#pow): for computing (int) powers of rational numbers. + - [`cardano/assets.{match}`](https://aiken-lang.github.io/stdlib/cardano/assets.html#match): efficiently compare two value-like. + - [`cardano/assets.{restricted_to}`](https://aiken-lang.github.io/stdlib/cardano/assets.html#restricted_to): extracting value subsets from parent value. + +- Comparison functions for various Cardano types: + - [`cardano/address/credential.{compare}`](https://aiken-lang.github.io/stdlib/cardano/address/credential.html#compare): for ordering credentials. + - [`cardano/governance/voter.{compare}`](https://aiken-lang.github.io/stdlib/cardano/governacen/voter.html#compare): for ordering voters. + - [`cardano/transaction/output_reference.{compare}`](https://aiken-lang.github.io/stdlib/cardano/transaction/output_reference.html#compare): for ordering output references. + - [`cardano/transaction/script_purpose.{compare}`](https://aiken-lang.github.io/stdlib/cardano/transaction/script_purpose.html#compare): for ordering script purpose. + +- New BLS12-381 crypto modules: + - [`aiken/crypto/bls12_381/g1`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/g1.html) + - [`aiken/crypto/bls12_381/g2`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/g2.html) + - [`aiken/crypto/bls12_381/scalar`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/scalar.html) + +### Changed + +- N/A + +### Removed + +- N/A + +## v2.0.0 - 2024-09-01 + +> [!NOTE] +> Significant performance improvements (mostly on CPU) across all boards mostly due to the integration of Plutus V3. +> +>
see benchmarks +> +> test | cpu | mem +> --- | --- | --- +> aiken/cbor.{serialise_1} | -38.20% | ±0.00% +> aiken/cbor.{serialise_2} | -38.20% | ±0.00% +> aiken/cbor.{serialise_3} | -37.25% | ±0.00% +> aiken/cbor.{serialise_4} | -41.95% | ±0.00% +> aiken/cbor.{serialise_5} | -42.77% | ±0.00% +> aiken/cbor.{serialise_6} | -42.63% | ±0.00% +> aiken/cbor.{serialise_7} | -40.51% | ±0.00% +> aiken/cbor.{serialise_8} | -37.25% | ±0.00% +> aiken/cbor.{serialise_9} | -41.95% | ±0.00% +> aiken/cbor.{diagnostic_1} | -47.62% | -4.35% +> aiken/cbor.{diagnostic_2} | -45.16% | -2.87% +> aiken/cbor.{diagnostic_3} | -43.32% | -13.33% +> aiken/cbor.{diagnostic_4} | -38.28% | -8.03% +> aiken/cbor.{diagnostic_5} | -44.15% | -14.59% +> aiken/cbor.{diagnostic_6} | -42.77% | -12.21% +> aiken/cbor.{diagnostic_7} | -43.87% | -16.87% +> aiken/cbor.{diagnostic_7_alt} | -42.99% | -11.56% +> aiken/cbor.{diagnostic_8} | -46.00% | -10.23% +> aiken/cbor.{diagnostic_9} | -42.81% | -2.81% +> aiken/cbor.{diagnostic_10} | -38.28% | -8.03% +> aiken/cbor.{diagnostic_10_alt} | -38.43% | -8.03% +> aiken/cbor.{diagnostic_11} | -44.00% | -8.51% +> aiken/cbor.{diagnostic_12} | -45.65% | -11.56% +> aiken/cbor.{diagnostic_13} | -44.44% | -9.34% +> aiken/cbor.{diagnostic_14} | -43.59% | -19.77% +> aiken/cbor.{diagnostic_15} | -46.50% | -3.67% +> aiken/cbor.{diagnostic_16} | -41.89% | -13.41% +> aiken/collection/dict.{bench_from_ascending_pairs} | -20.48% | ±0.00% +> aiken/collection/dict.{from_list_1} | -20.16% | ±0.00% +> aiken/collection/dict.{from_list_2} | -18.28% | ±0.00% +> aiken/collection/dict.{from_list_3} | -17.83% | ±0.00% +> aiken/collection/dict.{from_list_4} | -18.97% | ±0.00% +> aiken/collection/dict.{bench_from_pairs} | -25.28% | ±0.00% +> aiken/collection/dict.{find_1} | -20.63% | ±0.00% +> aiken/collection/dict.{find_2} | -20.43% | ±0.00% +> aiken/collection/dict.{find_3} | -22.03% | ±0.00% +> aiken/collection/dict.{find_4} | -22.53% | ±0.00% +> aiken/collection/dict.{get_1} | -20.63% | ±0.00% +> aiken/collection/dict.{get_2} | -22.72% | ±0.00% +> aiken/collection/dict.{get_3} | -23.26% | ±0.00% +> aiken/collection/dict.{get_4} | -26.91% | ±0.00% +> aiken/collection/dict.{get_5} | -26.30% | ±0.00% +> aiken/collection/dict.{has_key_1} | -28.07% | ±0.00% +> aiken/collection/dict.{has_key_2} | -30.77% | ±0.00% +> aiken/collection/dict.{has_key_3} | -30.22% | ±0.00% +> aiken/collection/dict.{has_key_4} | -27.25% | ±0.00% +> aiken/collection/dict.{is_empty_1} | -27.86% | ±0.00% +> aiken/collection/dict.{keys_1} | -20.30% | ±0.00% +> aiken/collection/dict.{keys_2} | -17.48% | ±0.00% +> aiken/collection/dict.{size_1} | -37.90% | ±0.00% +> aiken/collection/dict.{size_2} | -32.34% | ±0.00% +> aiken/collection/dict.{size_3} | -27.97% | ±0.00% +> aiken/collection/dict.{values_1} | -20.30% | ±0.00% +> aiken/collection/dict.{values_2} | -17.58% | ±0.00% +> aiken/collection/dict.{delete_1} | -20.16% | ±0.00% +> aiken/collection/dict.{delete_2} | -24.29% | ±0.00% +> aiken/collection/dict.{delete_3} | -21.03% | ±0.00% +> aiken/collection/dict.{delete_4} | -25.03% | ±0.00% +> aiken/collection/dict.{delete_5} | -27.22% | ±0.00% +> aiken/collection/dict.{delete_6} | -25.83% | ±0.00% +> aiken/collection/dict.{filter_1} | -20.16% | ±0.00% +> aiken/collection/dict.{filter_2} | -19.61% | ±0.00% +> aiken/collection/dict.{filter_3} | -20.15% | ±0.00% +> aiken/collection/dict.{insert_1} | -22.83% | ±0.00% +> aiken/collection/dict.{insert_2} | -21.77% | ±0.00% +> aiken/collection/dict.{insert_with_1} | -17.21% | ±0.00% +> aiken/collection/dict.{insert_with_2} | -22.66% | ±0.00% +> aiken/collection/dict.{insert_with_3} | -25.81% | ±0.00% +> aiken/collection/dict.{map_1} | -19.56% | ±0.00% +> aiken/collection/dict.{map_2} | -23.66% | ±0.00% +> aiken/collection/dict.{union_1} | -17.91% | ±0.00% +> aiken/collection/dict.{union_2} | -8.67% | ±0.00% +> aiken/collection/dict.{union_3} | -22.82% | ±0.00% +> aiken/collection/dict.{union_4} | -22.77% | ±0.00% +> aiken/collection/dict.{union_with_1} | -22.90% | ±0.00% +> aiken/collection/dict.{fold_1} | -35.94% | ±0.00% +> aiken/collection/dict.{fold_2} | -22.31% | ±0.00% +> aiken/collection/dict.{foldr_1} | -36.21% | ±0.00% +> aiken/collection/dict.{foldr_2} | -21.93% | ±0.00% +> aiken/collection/dict.{to_list_1} | -98.69% | -66.72% +> aiken/collection/dict.{to_list_2} | -98.91% | -66.72% +> aiken/collection/list.{push_1} | -8.02% | ±0.00% +> aiken/collection/list.{push_2} | 1.25% | ±0.00% +> aiken/collection/list.{range_1} | -27.77% | ±0.00% +> aiken/collection/list.{range_2} | -27.39% | ±0.00% +> aiken/collection/list.{repeat_1} | -23.72% | ±0.00% +> aiken/collection/list.{repeat_2} | -27.96% | ±0.00% +> aiken/collection/list.{all_1} | -28.36% | ±0.00% +> aiken/collection/list.{all_2} | -27.59% | ±0.00% +> aiken/collection/list.{all_3} | -27.94% | ±0.00% +> aiken/collection/list.{any_1} | -28.23% | ±0.00% +> aiken/collection/list.{any_2} | -28.09% | ±0.00% +> aiken/collection/list.{any_3} | -26.95% | ±0.00% +> aiken/collection/list.{at_1} | -27.60% | ±0.00% +> aiken/collection/list.{at_2} | -19.96% | ±0.00% +> aiken/collection/list.{at_3} | -27.60% | ±0.00% +> aiken/collection/list.{at_4} | -20.77% | ±0.00% +> aiken/collection/list.{at_5} | -25.75% | ±0.00% +> aiken/collection/list.{count_empty} | -36.83% | ±0.00% +> aiken/collection/list.{count_all} | -32.37% | ±0.00% +> aiken/collection/list.{count_some} | -31.73% | ±0.00% +> aiken/collection/list.{count_none} | -30.44% | ±0.00% +> aiken/collection/list.{find_1} | -20.59% | ±0.00% +> aiken/collection/list.{find_2} | -25.53% | ±0.00% +> aiken/collection/list.{find_3} | -19.64% | ±0.00% +> aiken/collection/list.{has_1} | -27.88% | ±0.00% +> aiken/collection/list.{has_2} | -27.69% | ±0.00% +> aiken/collection/list.{has_3} | -26.95% | ±0.00% +> aiken/collection/list.{head_1} | -14.03% | ±0.00% +> aiken/collection/list.{head_2} | -16.90% | ±0.00% +> aiken/collection/list.{is_empty_1} | -26.48% | ±0.00% +> aiken/collection/list.{is_empty_2} | -25.35% | ±0.00% +> aiken/collection/list.{index_of_1} | -25.62% | ±0.00% +> aiken/collection/list.{index_of_2} | -27.52% | ±0.00% +> aiken/collection/list.{index_of_3} | -26.65% | ±0.00% +> aiken/collection/list.{index_of_4} | -19.96% | ±0.00% +> aiken/collection/list.{last_1} | -19.18% | ±0.00% +> aiken/collection/list.{last_2} | -16.26% | ±0.00% +> aiken/collection/list.{last_3} | -17.13% | ±0.00% +> aiken/collection/list.{length_1} | -37.90% | ±0.00% +> aiken/collection/list.{length_2} | -30.89% | ±0.00% +> aiken/collection/list.{delete_1} | -20.20% | ±0.00% +> aiken/collection/list.{delete_2} | -15.02% | ±0.00% +> aiken/collection/list.{delete_3} | -20.55% | ±0.00% +> aiken/collection/list.{delete_4} | -22.46% | ±0.00% +> aiken/collection/list.{drop_1} | -24.62% | ±0.00% +> aiken/collection/list.{drop_2} | -28.08% | ±0.00% +> aiken/collection/list.{drop_while_1} | -19.79% | ±0.00% +> aiken/collection/list.{drop_while_2} | -22.25% | ±0.00% +> aiken/collection/list.{drop_while_3} | 0.86% | ±0.00% +> aiken/collection/list.{drop_while_4} | -27.26% | ±0.00% +> aiken/collection/list.{filter_1} | -20.20% | ±0.00% +> aiken/collection/list.{filter_2} | -32.06% | ±0.00% +> aiken/collection/list.{filter_3} | -31.39% | ±0.00% +> aiken/collection/list.{filter_map_1} | -21.10% | ±0.00% +> aiken/collection/list.{filter_map_2} | -28.74% | ±0.00% +> aiken/collection/list.{init_1} | -19.64% | ±0.00% +> aiken/collection/list.{init_2} | -20.01% | ±0.00% +> aiken/collection/list.{init_3} | -13.72% | ±0.00% +> aiken/collection/list.{partition_1} | -14.63% | ±0.00% +> aiken/collection/list.{partition_2} | -16.85% | ±0.00% +> aiken/collection/list.{partition_3} | -16.63% | ±0.00% +> aiken/collection/list.{partition_4} | -16.87% | ±0.00% +> aiken/collection/list.{partition_5} | -22.94% | ±0.00% +> aiken/collection/list.{slice_1} | -29.08% | -2.81% +> aiken/collection/list.{slice_2} | -30.11% | -2.25% +> aiken/collection/list.{slice_3} | -30.29% | -1.46% +> aiken/collection/list.{slice_4} | -28.53% | -1.48% +> aiken/collection/list.{slice_5} | -29.73% | -1.64% +> aiken/collection/list.{slice_6} | -32.01% | -1.80% +> aiken/collection/list.{span_1} | -15.05% | ±0.00% +> aiken/collection/list.{span_2} | -18.03% | ±0.00% +> aiken/collection/list.{span_3} | -12.49% | ±0.00% +> aiken/collection/list.{span_4} | -18.13% | ±0.00% +> aiken/collection/list.{tail_1} | -8.88% | ±0.00% +> aiken/collection/list.{tail_2} | -16.90% | ±0.00% +> aiken/collection/list.{take_1} | -24.98% | ±0.00% +> aiken/collection/list.{take_2} | -24.35% | ±0.00% +> aiken/collection/list.{take_while_1} | -20.20% | ±0.00% +> aiken/collection/list.{take_while_2} | -21.56% | ±0.00% +> aiken/collection/list.{take_while_3} | -22.46% | ±0.00% +> aiken/collection/list.{take_while_4} | -21.02% | ±0.00% +> aiken/collection/list.{unique_1} | -20.20% | ±0.00% +> aiken/collection/list.{unique_2} | -24.34% | ±0.00% +> aiken/collection/list.{flat_map_1} | -19.79% | ±0.00% +> aiken/collection/list.{flat_map_2} | -13.36% | ±0.00% +> aiken/collection/list.{indexed_map_1} | -20.10% | ±0.00% +> aiken/collection/list.{indexed_map_2} | -23.36% | ±0.00% +> aiken/collection/list.{map_1} | -19.79% | ±0.00% +> aiken/collection/list.{map_2} | -16.75% | ±0.00% +> aiken/collection/list.{map2_1} | -20.10% | ±0.00% +> aiken/collection/list.{map2_2} | -17.46% | ±0.00% +> aiken/collection/list.{map2_3} | -15.92% | ±0.00% +> aiken/collection/list.{map3_1} | -20.39% | ±0.00% +> aiken/collection/list.{map3_2} | -19.22% | ±0.00% +> aiken/collection/list.{reverse_1} | -20.10% | ±0.00% +> aiken/collection/list.{reverse_2} | -12.26% | ±0.00% +> aiken/collection/list.{sort_1} | -22.31% | ±0.00% +> aiken/collection/list.{sort_2} | -17.93% | ±0.00% +> aiken/collection/list.{sort_3} | -23.09% | ±0.00% +> aiken/collection/list.{sort_4} | -20.20% | ±0.00% +> aiken/collection/list.{unzip_1} | -14.01% | ±0.00% +> aiken/collection/list.{unzip_2} | -5.48% | ±0.00% +> aiken/collection/list.{concat_1} | -6.56% | ±0.00% +> aiken/collection/list.{concat_2} | -11.25% | ±0.00% +> aiken/collection/list.{concat_3} | -9.35% | ±0.00% +> aiken/collection/list.{difference_1} | -24.23% | ±0.00% +> aiken/collection/list.{difference_2} | -22.59% | ±0.00% +> aiken/collection/list.{difference_3} | -10.64% | ±0.00% +> aiken/collection/list.{difference_4} | -21.68% | ±0.00% +> aiken/collection/list.{zip_1} | -20.10% | ±0.00% +> aiken/collection/list.{zip_2} | -19.17% | ±0.00% +> aiken/collection/list.{zip_3} | -10.35% | ±0.00% +> aiken/collection/list.{foldl_1} | -36.95% | ±0.00% +> aiken/collection/list.{foldl_2} | -26.90% | ±0.00% +> aiken/collection/list.{foldl_3} | -11.27% | ±0.00% +> aiken/collection/list.{foldr_1} | -26.68% | ±0.00% +> aiken/collection/list.{foldr_2} | -38.04% | ±0.00% +> aiken/collection/list.{foldr_3} | -10.14% | ±0.00% +> aiken/collection/list.{indexed_foldr_1} | -36.95% | ±0.00% +> aiken/collection/list.{indexed_foldr_2} | -11.06% | ±0.00% +> aiken/collection/list.{reduce_1} | -36.95% | ±0.00% +> aiken/collection/list.{reduce_2} | -27.99% | ±0.00% +> aiken/collection/list.{reduce_3} | -23.54% | ±0.00% +> aiken/collection/list.{reduce_4} | -24.84% | ±0.00% +> aiken/collection/pairs.{get_all_1} | -21.10% | ±0.00% +> aiken/collection/pairs.{get_all_2} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_all_3} | -19.53% | ±0.00% +> aiken/collection/pairs.{get_all_4} | -18.70% | ±0.00% +> aiken/collection/pairs.{get_all_5} | -21.19% | ±0.00% +> aiken/collection/pairs.{get_first_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{get_first_2} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_first_3} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_first_4} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_first_5} | -21.05% | ±0.00% +> aiken/collection/pairs.{get_last_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{get_last_2} | -21.13% | ±0.00% +> aiken/collection/pairs.{get_last_3} | -21.16% | ±0.00% +> aiken/collection/pairs.{get_last_4} | -21.79% | ±0.00% +> aiken/collection/pairs.{get_last_5} | -21.05% | ±0.00% +> aiken/collection/pairs.{find_all_1} | -21.10% | ±0.00% +> aiken/collection/pairs.{find_all_2} | -18.33% | ±0.00% +> aiken/collection/pairs.{find_all_3} | -20.51% | ±0.00% +> aiken/collection/pairs.{find_all_4} | -17.79% | ±0.00% +> aiken/collection/pairs.{find_first_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{find_first_2} | -18.28% | ±0.00% +> aiken/collection/pairs.{find_first_3} | -20.22% | ±0.00% +> aiken/collection/pairs.{find_first_4} | -18.28% | ±0.00% +> aiken/collection/pairs.{find_last_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{find_last_2} | -20.70% | ±0.00% +> aiken/collection/pairs.{find_last_3} | -20.22% | ±0.00% +> aiken/collection/pairs.{find_last_4} | -20.98% | ±0.00% +> aiken/collection/pairs.{has_key_1} | -28.07% | ±0.00% +> aiken/collection/pairs.{has_key_2} | -25.70% | ±0.00% +> aiken/collection/pairs.{has_key_3} | -25.80% | ±0.00% +> aiken/collection/pairs.{has_key_4} | -24.93% | ±0.00% +> aiken/collection/pairs.{has_key_5} | -25.70% | ±0.00% +> aiken/collection/pairs.{keys_1} | -20.30% | ±0.00% +> aiken/collection/pairs.{keys_2} | -13.89% | ±0.00% +> aiken/collection/pairs.{keys_3} | -10.43% | ±0.00% +> aiken/collection/pairs.{values_1} | -20.30% | ±0.00% +> aiken/collection/pairs.{values_2} | -14.02% | ±0.00% +> aiken/collection/pairs.{values_3} | -10.65% | ±0.00% +> aiken/collection/pairs.{values_4} | -8.53% | ±0.00% +> aiken/collection/pairs.{map_1} | -11.17% | ±0.00% +> aiken/collection/pairs.{map_2} | -12.89% | ±0.00% +> aiken/collection/pairs.{foldl_1} | -35.94% | ±0.00% +> aiken/collection/pairs.{foldl_2} | -22.31% | ±0.00% +> aiken/collection/pairs.{foldr_1} | -36.21% | ±0.00% +> aiken/collection/pairs.{foldr_2} | -21.93% | ±0.00% +> aiken/collection/pairs.{foldr_3} | -20.00% | ±0.00% +> aiken/interval.{contains_1} | -21.08% | -4.01% +> aiken/interval.{contains_2} | -31.22% | -13.95% +> aiken/interval.{contains_3} | -26.80% | -10.08% +> aiken/interval.{contains_4} | -31.02% | -13.67% +> aiken/interval.{contains_5} | -32.32% | -13.59% +> aiken/interval.{contains_6} | -28.15% | -9.81% +> aiken/interval.{contains_7} | -32.11% | -13.32% +> aiken/interval.{contains_8} | -29.56% | -12.59% +> aiken/interval.{contains_9} | -29.68% | -12.78% +> aiken/interval.{contains_10} | -29.68% | -12.78% +> aiken/interval.{contains_11} | -35.17% | -17.77% +> aiken/interval.{contains_12} | -21.09% | -3.86% +> aiken/interval.{is_entirely_after_1} | -29.89% | -13.81% +> aiken/interval.{is_entirely_after_2} | -29.63% | -13.39% +> aiken/interval.{is_entirely_after_3} | -29.63% | -13.39% +> aiken/interval.{is_entirely_after_4} | -29.48% | -11.81% +> aiken/interval.{is_entirely_after_5} | -29.70% | -12.14% +> aiken/interval.{is_entirely_after_6} | -36.09% | -19.77% +> aiken/interval.{is_entirely_after_7} | -24.19% | -3.99% +> aiken/interval.{is_entirely_after_8} | -24.19% | -3.99% +> aiken/interval.{is_entirely_after_9} | -24.19% | -3.99% +> aiken/interval.{is_entirely_before_1} | -28.44% | -13.48% +> aiken/interval.{is_entirely_before_2} | -28.24% | -13.09% +> aiken/interval.{is_entirely_before_3} | -28.24% | -13.09% +> aiken/interval.{is_entirely_before_4} | -28.44% | -11.88% +> aiken/interval.{is_entirely_before_5} | -28.26% | -11.57% +> aiken/interval.{is_entirely_before_6} | -34.63% | -19.34% +> aiken/interval.{is_entirely_before_7} | -22.97% | -4.02% +> aiken/interval.{is_entirely_before_8} | -22.97% | -4.02% +> aiken/interval.{is_entirely_before_9} | -22.97% | -4.02% +> aiken/interval.{hull_1} | -21.51% | -0.73% +> aiken/interval.{hull_2} | -23.06% | -0.80% +> aiken/interval.{hull_3} | -22.00% | -0.86% +> aiken/interval.{intersection_1} | -21.51% | -0.73% +> aiken/interval.{intersection_2} | -21.51% | -0.73% +> aiken/interval.{intersection_3} | -26.55% | -4.65% +> aiken/interval.{intersection_4} | -26.45% | -4.51% +> aiken/interval.{intersection_5} | -22.87% | -0.76% +> aiken/interval.{intersection_6} | -19.73% | -0.98% +> aiken/math.{abs_1} | -61.39% | -21.07% +> aiken/math.{abs_2} | -70.90% | -34.84% +> aiken/math.{clamp_1} | -60.95% | -23.55% +> aiken/math.{clamp_2} | -60.95% | -23.55% +> aiken/math.{clamp_3} | -59.22% | -18.20% +> aiken/math.{gcd_test1} | -47.20% | ±0.00% +> aiken/math.{gcd_test2} | -47.81% | ±0.00% +> aiken/math.{gcd_test3} | -46.10% | ±0.00% +> aiken/math.{is_sqrt1} | -87.41% | -68.64% +> aiken/math.{is_sqrt2} | -87.41% | -68.64% +> aiken/math.{log_10_2} | -51.35% | -8.40% +> aiken/math.{log_42_2} | -51.46% | -8.24% +> aiken/math.{log_42_3} | -51.05% | -7.81% +> aiken/math.{log_5_0} | -54.05% | -12.92% +> aiken/math.{log_4_4} | -50.59% | -9.31% +> aiken/math.{log_4_43} | -49.14% | -7.28% +> aiken/math.{max_1} | -61.39% | -21.07% +> aiken/math.{max_2} | -61.39% | -21.07% +> aiken/math.{max_3} | -61.39% | -21.07% +> aiken/math.{min_1} | -61.39% | -21.07% +> aiken/math.{min_2} | -61.39% | -21.07% +> aiken/math.{min_3} | -61.39% | -21.07% +> aiken/math.{pow_3_5} | -46.34% | ±0.00% +> aiken/math.{pow_7_2} | -46.38% | ±0.00% +> aiken/math.{pow_3__4} | -43.50% | ±0.00% +> aiken/math.{pow_0_0} | -43.95% | ±0.00% +> aiken/math.{pow_513_3} | -45.80% | ±0.00% +> aiken/math.{pow_2_4} | -46.79% | ±0.00% +> aiken/math.{pow_2_42} | -46.77% | ±0.00% +> aiken/math.{pow2_neg} | -44.71% | ±0.00% +> aiken/math.{pow2_0} | -45.00% | ±0.00% +> aiken/math.{pow2_1} | -45.00% | ±0.00% +> aiken/math.{pow2_4} | -45.00% | ±0.00% +> aiken/math.{pow2_42} | -42.01% | ±0.00% +> aiken/math.{pow2_256} | -41.40% | ±0.00% +> aiken/math.{sqrt1} | -32.56% | -17.18% +> aiken/math.{sqrt2} | -32.56% | -17.18% +> aiken/math.{sqrt3} | -49.99% | -8.90% +> aiken/math.{sqrt4} | -51.76% | -3.90% +> aiken/math.{sqrt5} | -52.63% | -1.33% +> aiken/math.{sqrt6} | -28.16% | -15.41% +> aiken/math/rational.{from_int_1} | -14.32% | ±0.00% +> aiken/math/rational.{new_1} | -22.98% | ±0.00% +> aiken/math/rational.{zero_1} | -8.08% | ±0.00% +> aiken/math/rational.{denominator_1} | -28.33% | ±0.00% +> aiken/math/rational.{numerator_1} | -29.34% | ±0.00% +> aiken/math/rational.{abs_examples} | -18.25% | ±0.00% +> aiken/math/rational.{negate_1} | -15.39% | ±0.00% +> aiken/math/rational.{reciprocal_1} | -23.28% | ±0.00% +> aiken/math/rational.{reduce_1} | -31.89% | ±0.00% +> aiken/math/rational.{add_1} | -15.11% | ±0.00% +> aiken/math/rational.{add_2} | -15.11% | ±0.00% +> aiken/math/rational.{div_1} | -22.31% | -2.75% +> aiken/math/rational.{div_2} | -22.37% | -2.79% +> aiken/math/rational.{mul_1} | -13.37% | ±0.00% +> aiken/math/rational.{mul_2} | -13.37% | ±0.00% +> aiken/math/rational.{mul_3} | -26.25% | ±0.00% +> aiken/math/rational.{sub_1} | -15.11% | ±0.00% +> aiken/math/rational.{sub_2} | -15.11% | ±0.00% +> aiken/math/rational.{sub_3} | -15.11% | ±0.00% +> aiken/math/rational.{compare_1} | -21.70% | ±0.00% +> aiken/math/rational.{compare_with_eq} | -23.05% | ±0.00% +> aiken/math/rational.{compare_with_neq} | -22.33% | ±0.00% +> aiken/math/rational.{compare_with_gte} | -22.48% | ±0.00% +> aiken/math/rational.{compare_with_gt} | -23.18% | ±0.00% +> aiken/math/rational.{compare_with_lte} | -22.48% | ±0.00% +> aiken/math/rational.{compare_with_lt} | -23.18% | ±0.00% +> aiken/math/rational.{arithmetic_mean_1} | -23.31% | ±0.00% +> aiken/math/rational.{arithmetic_mean_2} | -23.31% | ±0.00% +> aiken/math/rational.{arithmetic_mean_3} | -20.58% | ±0.00% +> aiken/math/rational.{geometric_mean1} | -29.87% | ±0.00% +> aiken/math/rational.{geometric_mean2} | -24.52% | ±0.00% +> aiken/math/rational.{geometric_mean3} | -24.52% | ±0.00% +> aiken/math/rational.{geometric_mean4} | -33.55% | ±0.00% +> aiken/math/rational.{geometric_mean5} | -45.34% | ±0.00% +> aiken/math/rational.{ceil_1} | -36.26% | ±0.00% +> aiken/math/rational.{floor_1} | -29.49% | ±0.00% +> aiken/math/rational.{proper_fraction_1} | -18.44% | ±0.00% +> aiken/math/rational.{proper_fraction_2} | -18.44% | ±0.00% +> aiken/math/rational.{proper_fraction_3} | -18.44% | ±0.00% +> aiken/math/rational.{round_1} | -25.17% | ±0.00% +> aiken/math/rational.{round_even_1} | -25.91% | ±0.00% +> aiken/math/rational.{truncate_1} | -29.49% | ±0.00% +> aiken/option.{is_none_1} | -26.56% | ±0.00% +> aiken/option.{is_none_2} | -27.52% | ±0.00% +> aiken/option.{is_some_1} | -27.52% | ±0.00% +> aiken/option.{is_some_2} | -26.56% | ±0.00% +> aiken/option.{and_then_1} | -20.19% | ±0.00% +> aiken/option.{and_then_2} | -22.15% | ±0.00% +> aiken/option.{and_then_3} | -21.85% | ±0.00% +> aiken/option.{choice_1} | -17.11% | ±0.00% +> aiken/option.{choice_2} | -19.75% | ±0.00% +> aiken/option.{choice_3} | -18.68% | ±0.00% +> aiken/option.{flatten_1} | -12.25% | ±0.00% +> aiken/option.{flatten_2} | -15.41% | ±0.00% +> aiken/option.{flatten_3} | -19.46% | ±0.00% +> aiken/option.{flatten_4} | -14.31% | ±0.00% +> aiken/option.{map_1} | -19.89% | ±0.00% +> aiken/option.{map_2} | -18.18% | ±0.00% +> aiken/option.{map2_1} | -20.47% | ±0.00% +> aiken/option.{map2_2} | -19.93% | ±0.00% +> aiken/option.{map2_3} | -13.64% | ±0.00% +> aiken/option.{map3_1} | -20.74% | ±0.00% +> aiken/option.{map3_2} | -20.00% | ±0.00% +> aiken/option.{map3_3} | -19.90% | ±0.00% +> aiken/option.{or_try_1} | -14.36% | ±0.00% +> aiken/option.{or_try_2} | -14.36% | ±0.00% +> aiken/option.{or_else_1} | -38.16% | ±0.00% +> aiken/option.{or_else_2} | -27.62% | ±0.00% +> aiken/primitive/bytearray.{from_string_1} | -62.36% | ±0.00% +> aiken/primitive/bytearray.{from_string_2} | -41.62% | ±0.00% +> aiken/primitive/bytearray.{push_1} | -97.51% | -80.06% +> aiken/primitive/bytearray.{push_2} | -97.51% | -80.06% +> aiken/primitive/bytearray.{push_3} | -88.82% | -89.83% +> aiken/primitive/bytearray.{index_of_1} | -39.75% | ±0.00% +> aiken/primitive/bytearray.{index_of_2} | -43.19% | ±0.00% +> aiken/primitive/bytearray.{index_of_3} | -41.70% | ±0.00% +> aiken/primitive/bytearray.{index_of_4} | -37.24% | ±0.00% +> aiken/primitive/bytearray.{index_of_5} | -26.02% | ±0.00% +> aiken/primitive/bytearray.{is_empty_1} | -37.52% | ±0.00% +> aiken/primitive/bytearray.{is_empty_2} | -33.77% | ±0.00% +> aiken/primitive/bytearray.{length_1} | -49.73% | ±0.00% +> aiken/primitive/bytearray.{length_2} | -49.73% | ±0.00% +> aiken/primitive/bytearray.{test_bit_0} | -45.48% | 5.88% +> aiken/primitive/bytearray.{test_bit_1} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_2} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_3} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_7} | -58.31% | -11.81% +> aiken/primitive/bytearray.{test_bit_8} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_20_21_22_23} | -44.38% | 5.52% +> aiken/primitive/bytearray.{drop_1} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{drop_2} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{drop_3} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{drop_4} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{slice_1} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_2} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_3} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_4} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_5} | -98.79% | -90.04% +> aiken/primitive/bytearray.{take_1} | -97.81% | -83.40% +> aiken/primitive/bytearray.{take_2} | -97.81% | -83.40% +> aiken/primitive/bytearray.{take_3} | -97.81% | -83.40% +> aiken/primitive/bytearray.{take_4} | -97.81% | -83.40% +> aiken/primitive/bytearray.{concat_1} | -96.22% | -80.06% +> aiken/primitive/bytearray.{concat_2} | -96.22% | -80.06% +> aiken/primitive/bytearray.{concat_3} | -96.22% | -80.06% +> aiken/primitive/bytearray.{concat_4} | -96.22% | -80.06% +> aiken/primitive/bytearray.{foldl_1} | -40.96% | ±0.00% +> aiken/primitive/bytearray.{foldl_2} | -40.09% | ±0.00% +> aiken/primitive/bytearray.{foldl_3} | -40.29% | ±0.00% +> aiken/primitive/bytearray.{foldl_4} | -44.76% | ±0.00% +> aiken/primitive/bytearray.{foldr_1} | -42.56% | ±0.00% +> aiken/primitive/bytearray.{foldr_2} | -40.93% | ±0.00% +> aiken/primitive/bytearray.{foldr_3} | -45.34% | ±0.00% +> aiken/primitive/bytearray.{reduce_1} | -42.95% | ±0.00% +> aiken/primitive/bytearray.{reduce_2} | -44.60% | ±0.00% +> aiken/primitive/bytearray.{to_string_1} | -69.56% | ±0.00% +> aiken/primitive/bytearray.{to_string_2} | -53.54% | ±0.00% +> aiken/primitive/bytearray.{to_hex_1} | -48.15% | ±0.00% +> aiken/primitive/bytearray.{to_hex_2} | -48.15% | ±0.00% +> aiken/primitive/int.{from_utf8_1} | -37.06% | ±0.00% +> aiken/primitive/int.{from_utf8_2} | -33.40% | ±0.00% +> aiken/primitive/int.{from_utf8_3} | -37.06% | ±0.00% +> aiken/primitive/int.{from_utf8_4} | -32.78% | ±0.00% +> aiken/primitive/int.{from_utf8_5} | -32.05% | ±0.00% +> aiken/primitive/int.{from_utf8_6} | -31.36% | ±0.00% +> aiken/primitive/string.{from_bytearray_1} | -69.56% | ±0.00% +> aiken/primitive/string.{from_bytearray_2} | -53.54% | ±0.00% +> aiken/primitive/string.{from_bytearray_3} | -53.54% | ±0.00% +> aiken/primitive/string.{from_int_1} | -40.54% | -7.05% +> aiken/primitive/string.{from_int_2} | -45.93% | -5.30% +> aiken/primitive/string.{from_int_3} | -47.62% | -4.35% +> aiken/primitive/string.{from_int_4} | -48.58% | -3.69% +> aiken/primitive/string.{concat_1} | -92.30% | -80.10% +> aiken/primitive/string.{concat_2} | -97.34% | -85.87% +> aiken/primitive/string.{concat_3} | -98.67% | -80.35% +> aiken/primitive/string.{join_1} | -42.87% | ±0.00% +> aiken/primitive/string.{join_2} | -37.65% | ±0.00% +> aiken/primitive/string.{to_bytearray_1} | -62.36% | ±0.00% +> aiken/primitive/string.{to_bytearray_2} | -41.62% | ±0.00% +> aiken/primitive/string.{to_bytearray_3} | -41.62% | ±0.00% +> cardano/assets.{from_asset_list_1} | -20.51% | ±0.00% +> cardano/assets.{from_asset_list_2} | -10.09% | ±0.00% +> cardano/assets.{from_asset_list_3} | -12.21% | ±0.00% +> cardano/assets.{from_asset_list_4} | -16.22% | ±0.00% +> cardano/assets.{from_asset_list_5} | -14.60% | ±0.00% +> cardano/assets.{from_asset_list_6} | -20.97% | ±0.00% +> cardano/assets.{from_asset_list_7} | -20.25% | ±0.00% +> cardano/assets.{from_asset_list_8} | -14.51% | ±0.00% +> cardano/assets.{from_asset_list_9} | -16.07% | ±0.00% +> cardano/assets.{add_1} | -27.84% | ±0.00% +> cardano/assets.{add_2} | -27.56% | -0.54% +> cardano/assets.{add_3} | -26.39% | ±0.00% +> cardano/assets.{add_4} | -29.75% | -10.41% +> cardano/assets.{add_5} | -27.80% | ±0.00% +> cardano/assets.{merge_1} | -26.02% | ±0.00% +> cardano/assets.{merge_2} | -19.60% | ±0.00% +> cardano/assets.{merge_3} | -23.80% | ±0.00% +> cardano/assets.{merge_4} | -25.92% | ±0.00% +> cardano/assets.{merge_5} | -27.61% | -1.98% +> cardano/assets.{without_lovelace_1} | -28.00% | -2.24% +> cardano/assets.{without_lovelace_2} | -27.49% | ±0.00% +> cardano/assets.{without_lovelace_3} | -23.40% | -0.34% +> cardano/assets.{flatten_with_1} | -21.10% | ±0.00% +> cardano/assets.{flatten_with_2} | -22.77% | ±0.00% +> cardano/assets.{reduce_1} | -24.31% | ±0.00% +> cardano/assets.{reduce_2} | -20.89% | ±0.00% +> cardano/assets.{reduce_3} | -36.21% | ±0.00% +>
+ +### Added + +- New modules covering Conway-related features (i.e. governance) + - [`cardano/governance`](https://aiken-lang.github.io/stdlib/cardano/governance.html) + - [`cardano/governance/protocol_parameters`](https://aiken-lang.github.io/stdlib/cardano/governance/protocol_parameters.html) + +- New primitives in `aiken/collection/pairs`: + - [`insert_by_ascending_key`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#insert_by_ascending_key) + - [`repsert_by_ascending_key`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#repsert_by_ascending_key) + +- New primitives in `aiken/crypto`: + - [`blake2b_224`](https://aiken-lang.github.io/stdlib/aiken/crypto.html#blake2b_224) + - [`keccak_256`](https://aiken-lang.github.io/stdlib/aiken/crypto.html#keccak_256) + +- New primitives in `aiken/math`: + - [`log2`](https://aiken-lang.github.io/stdlib/aiken/math.html#log2) + +- New primitives in `aiken/primitive/bytearray`: + - [`at`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#at) + - [`from_int_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#from_int_big_endian) + - [`from_int_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#from_int_little_endian) + - [`to_int_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#to_int_big_endian) + - [`to_int_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#to_int_little_endian) + +- New primitives in `aiken/primitive/int`: + - [`from_bytearray_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html#from_bytearray_big_endian) + - [`from_bytearray_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html#from_bytearray_little_endian) + +- New primitives in `aiken/crypto`: + - [`verify_ecdsa_signature`](https://aiken-lang.github.io/stdlib/cardano/credential.html#verify_ecdsa_signature) + - [`verify_schnorr_signature`](https://aiken-lang.github.io/stdlib/cardano/credential.html#verify_schnorr_signature) + +### Changed + +- Few modules have been relocated and better organized: + - `aiken/hash` -> [`aiken/crypto`](https://aiken-lang.github.io/stdlib/aiken/crypto.html) + - **collections** + - `aiken/dict` -> [`aiken/collection/dict`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html) + - `aiken/list` -> [`aiken/collection/list`](https://aiken-lang.github.io/stdlib/aiken/collection/list.html) + - `aiken/pairs` -> [`aiken/collection/pairs`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html) + - **primitive** + - `aiken/bytearray` -> [`aiken/primitive/bytearray`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html) + - `aiken/int` -> [`aiken/primitive/int`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html) + - `aiken/string` -> [`aiken/primitive/string`](https://aiken-lang.github.io/stdlib/aiken/primitive/string.html) + - **cardano** + - `aiken/transaction` -> [`cardano/transaction`](https://aiken-lang.github.io/stdlib/cardano/transaction.html) + - `aiken/transaction/certificate` -> [`cardano/certificate`](https://aiken-lang.github.io/stdlib/cardano/certificate.html) + - `aiken/transaction/credential` -> [`cardano/address`](https://aiken-lang.github.io/stdlib/cardano/address.html) & `aiken/crypto` + - `aiken/transaction/value` -> [`cardano/assets`](https://aiken-lang.github.io/stdlib/cardano/assets.html) + +- Several zero-argument functions have been turned into top-level constants + - `aiken/dict.new()` -> [`aiken/collection/dict.empty`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html#empty) + - `aiken/interval.empty()` -> [`aiken/interval.empty`](https://aiken-lang.github.io/stdlib/aiken/interval.html#empty) + - `aiken/interval.everything()` -> [`aiken/interval.everything`](https://aiken-lang.github.io/stdlib/aiken/interval.html#everything) + - `aiken/math/rational.zero()` -> [`aiken/math/rational.zero`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#zero) + - `aiken/transaction/value.zero()` -> [`cardano/assets.zero`](https://aiken-lang.github.io/stdlib/cardano/assets.html#zero) + +- The `Transaction` type from [`cardano/transaction`](https://aiken-lang.github.io/stdlib/cardano/transaction.html) (originally `aiken/transaction`) has been greatly reworked to match the new transaction format in Plutus V3. + +- The `ScriptContext` type has split from `cardano/transaction` (originally `aiken/transaction`) and moved into its own module [`cardano/script_context`](https://aiken-lang.github.io/stdlib/cardano/script_context.html) and adjusted to its new form as per Plutus V3. + +- The constructors of [`Credential`](https://aiken-lang.github.io/stdlib/cardano/address.html#credential) have been renamed from `VerificationKeyCredential` and `ScriptCredential` into `VerificationKey` and `Script` respectively. + +- The function `remove_all`, `remove_first` and `remove_last` from [`aiken/collection/pairs`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html) (originally `aiken/pairs`) have been renamed to `delete_all`, `delete_first` and `delete_last` respectively. + +- The function `verify_signature` from [`aiken/crypto`](https://aiken-lang.github.io/stdlib/aiken/crypto.html) (originally `aiken/credential`) has been renamed to `verify_ed25519_signature`. + +### Removed + +- The module `aiken/time`. The `PosixTime` alias is no longer used anywhere. + +- `MintedValue` (from `aiken/transaction/value` originally) and its associated functions are no longer needed and, therefore, gone. + +## v1.9.0 - 2024-05-24 + +### Added + +- A new module [`aiken/pairs`](https://aiken-lang.github.io/stdlib/aiken/pairs.html) to work with associative lists (a.k.a. `Pairs`). + +### Changed + +- **BREAKING-CHANGE**
+ Specialized all `Dict`'s key to `ByteArray`, and thus remove the need for passing an extra comparison function in many functions. `Dict` are however still specialized with a phantom type for keys. + +- **BREAKING-CHANGE**
+ Few functions from `Dict` have been renamed for consistency: + - `from_list` -> `from_pairs` + - `from_ascending_list` -> `from_ascending_pairs` + - `to_list` -> `to_pairs` + +### Removed + +N/A + +## v1.8.0 - 2024-03-28 + +### Added + +- [`value.reduce`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#reduce) to efficiently fold over a value and its elements. + +- [`value.from_asset_list`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#from_asset_list) to turn an asset list into a Value while enforcing invariants expected of `Value`. + +- [`math.is_sqrt`](https://aiken-lang.github.io/stdlib/aiken/math.html#is_sqrt) as a more efficient alternative to `sqrt`. + +### Changed + +- Disclaimers in documentation to [`bytearray.to_string`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#to_string) and [`string.from_bytearray`](https://aiken-lang.github.io/stdlib/aiken/string.html#from_bytearray) regarding UTF-8 encoding. + +### Removed + +N/A + +## v1.7.0 - 2023-11-07 + +### Added + +- [`list.index_of`](https://aiken-lang.github.io/stdlib/aiken/list.html#index_of): For getting a values index in a list. +- [`transaction.placeholder`](https://aiken-lang.github.io/stdlib/aiken/transaction.html#placeholder): For constructing test transactions. +- [`transaction.value.is_zero`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#is_zero): For checking whether a value is null. + +### Changed + +- [`value.to_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_minted_value) now correctly preserves the invariant of `MintedValue`: it always contain a null quantity of Ada. + +### Removed + +N/A + +## v1.6.0 - 2023-09-08 + +### Added + +- [`math.pow2`](https://aiken-lang.github.io/stdlib/aiken/math.html#pow2): For faster exponentions for powers of two. +- [`bytearray.test_bit`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#test_bit): For testing if a bit is set in a bytearray (MSB). + +## v1.5.0 - 2023-08-16 + +### Removed + +- retired `list.and` and `list.or` because of the new keywords for logical op chaining. + +## v1.4.0 - 2023-07-21 + +### Changed + +- Fixed missing null-check on `value.add`. Adding a null quantity of token is now correctly a no-op. + +## v1.3.0 - 2023-06-30 + +### Added + +- [`math.sqrt`](https://aiken-lang.github.io/stdlib/aiken/math.html#sqrt): For calculating integer square roots using a quadratically convergent method. +- [`math/rational.numerator`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#numerator) & [`math/rational.denominator`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#numerator): For accessing parts of a rational value. +- [`math/rational.arithmetic_mean`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#arithmetic_mean): For computing [arithmetic mean](https://en.wikipedia.org/wiki/Arithmetic_mean) of rational values. +- [`math/rational.geometric_mean`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#geometric_mean): For computing [geometric mean](https://en.wikipedia.org/wiki/Geometric_mean) of two rational values. + +### Changed + +- Clear empty asset lists in [`Value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#Value) on various operations. Before that fix, it could happen that removing all assets from a given policy would lead to an empty dictionnary of assets still be present in the `Value`. + +## v1.2.0 - 2023-06-17 + +### Added + +- [`transaction/value.MintedValue`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#MintedValue) +- [`transaction/value.from_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#from_minted_value): Convert from `MintedValue` to `Value` +- [`transaction/value.to_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_minted_value): Convert from `Value` to `MintedValue` +- [`transaction/bytearray.to_hex`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#to_hex): Convert a `ByteArray` to a hex encoded `String` +- [`math/rational`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html): Working with rational numbers. + - [x] `abs` + - [x] `add` + - [x] `ceil` + - [x] `compare` + - [x] `compare_with` + - [x] `div` + - [x] `floor` + - [x] `from_int` + - [x] `mul` + - [x] `negate` + - [x] `new` + - [x] `proper_fraction` + - [x] `reciprocal` + - [x] `reduce` + - [x] `round` + - [x] `round_even` + - [x] `sub` + - [x] `truncate` + - [x] `zero` + +### Removed + +- module `MintedValue` was merged with `Value` + +## v1.1.0 - 2023-06-06 + +### Added + +- [`list.count`](https://aiken-lang.github.io/stdlib/aiken/list.html#count): Count how many items in the list satisfy the given predicate. + +- [`int.from_utf8`](https://aiken-lang.github.io/stdlib/aiken/int.html#from_utf8): Parse an integer from a utf-8 encoded `ByteArray`, when possible. + +- [`dict.foldl`](https://aiken-lang.github.io/stdlib/aiken/dict.html#foldl) & [`dict.foldr`](https://aiken-lang.github.io/stdlib/aiken/dict.html#foldr): for left and right folds over dictionnary elements in ascending key order. + +- [`dict.insert_with`](https://aiken-lang.github.io/stdlib/aiken/dict.html#insert_with): Insert a value in the dictionary at a given key. When the key already exist, the provided merge function is called. + +- [`transaction/value.add`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#add): Add a (positive or negative) quantity of a single token to a value. This is more efficient than `merge` for a single asset. + +- [`transaction/value.to_dict`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_dict): Convert a `Value` into a dictionnary of dictionnaries. + +- A new module [`transaction/minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/minted_value.html): This is used exclusively for representing values present in the `mint` field of transactions. This allows to simplify some of the implementation for `Value` which no longer needs to handle the special case where null-quantity tokens would be present. It isn't possible to construct `MintedValue` by hand, they come from the script context entirely and are 'read-only'. + +- More documentation for `dict` and `interval` modules. + +### Changed + +> **Warning** +> +> Most of those changes are breaking-changes. Though, given we're still in an +> alpha state, only the `minor` component is bumped from the version number. +> Please forgive us. + +- Rework `list.{foldl, foldr, reduce, indexed_foldr}`, `dict.{fold}`, `bytearray.{foldl, foldr, reduce}` to take the iterator as last argument. For example: + + ``` + fn foldl(self: List, with: fn(a, b) -> b, zero: b) -> b + + ↓ becomes + + fn foldl(self: List, zero: b, with: fn(a, b) -> b) -> b + ``` + +- Fixed implementation of `bytearray.slice`; `slice` would otherwise behave as if the second argument were an offset. + +- Rename `transaction/value.add` into `transaction/value.merge`. + +- Swap arguments of the merge function in `dict.union_with`; the first value received now corresponds to the value already present in the dictionnary. + +- Fixed various examples from the documentation + +### Removed + +- Removed `dict.fold`; replaced with `dict.foldl` and `dict.foldr` to remove ambiguity. + +## v1.0.0 - 2023-04-13 + +### Added + +N/A + +### Changed + +N/A + +### Removed + +N/A diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/LICENSE b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/LICENSE new file mode 100644 index 00000000..4a1de273 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2022 Lucas Rosa + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/README.md b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/README.md new file mode 100644 index 00000000..4cd6fef2 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/README.md @@ -0,0 +1,71 @@ +
+
+

Aiken Aiken Standard Library

+ +[![Licence](https://img.shields.io/github/license/aiken-lang/stdlib?style=for-the-badge)](https://github.com/aiken-lang/stdlib/blob/main/LICENSE) +[![Continuous Integration](https://img.shields.io/github/actions/workflow/status/aiken-lang/stdlib/continuous-integration.yml?style=for-the-badge)](https://github.com/aiken-lang/stdlib/actions/workflows/continuous-integration.yml) + +
+
+ +## Getting started + +``` +aiken add aiken-lang/stdlib --version v2 +``` + +## Compatibility + +aiken's version | stdlib's version(s) +--- | --- +`v1.1.3`
`v1.1.4`
`v1.1.5`
`v1.1.6`
`v1.1.7` | `>= 2.1.0` +`v1.1.1`
`v1.1.2` | `>= 2.0.0` && `< 2.1.0` +`v1.0.29-alpha`
`v1.0.28-alpha` | `>= 1.9.0` && `< 2.0.0` +`v1.0.26-alpha` | `<= 1.8.0` && `< 1.9.0` + +## Overview + +The official standard library for the [Aiken](https://aiken-lang.org) Cardano +smart-contract language. + +It extends the language builtins with useful data-types, functions, constants +and aliases that make using Aiken a bliss. + +```aiken +use aiken/collection/list +use aiken/crypto.{VerificationKeyHash} +use cardano/transaction.{OutputReference, Transaction} + +pub type Datum { + owner: VerificationKeyHash, +} + +pub type Redeemer { + msg: ByteArray, +} + +/// A simple validator which replicates a basic public/private signature lock. +/// +/// - The key (hash) is set as datum when the funds are sent to the script address. +/// - The spender is expected to provide a signature, and the string 'Hello, World!' as message +/// - The signature is implicitly verified by the ledger, and included as 'extra_signatories' +/// +validator hello_world { + spend(datum: Option, redeemer: Redeemer, _, self: Transaction) { + expect Some(Datum { owner }) = datum + + let must_say_hello = redeemer.msg == "Hello, World!" + + let must_be_signed = list.has(self.extra_signatories, owner) + + and { + must_say_hello, + must_be_signed, + } + } +} +``` + +## Stats + +![Alt](https://repobeats.axiom.co/api/embed/f0a17e7f6133630e165b9e56ec5447bef32fe831.svg "Repobeats analytics image") diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/aiken.lock b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/aiken.lock new file mode 100644 index 00000000..769ac20f --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/aiken.lock @@ -0,0 +1,16 @@ +# This file was generated by Aiken +# You typically do not need to edit this file + +[[requirements]] +name = "aiken-lang/fuzz" +version = "v2" +source = "github" + +[[packages]] +name = "aiken-lang/fuzz" +version = "v2" +requirements = [] +source = "github" + +[etags] +"aiken-lang/fuzz@v2" = [{ secs_since_epoch = 1734106349, nanos_since_epoch = 450591000 }, "64a32283418d58cade34059d3855b857e84505541158c541c460cafa0d355475"] diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/aiken.toml b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/aiken.toml new file mode 100644 index 00000000..cbc76a0b --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/aiken.toml @@ -0,0 +1,15 @@ +name = "aiken-lang/stdlib" +version = "2.2.0" +compiler = "v1.1.9" +plutus = "v3" +description = "The Aiken Standard Library" + +[repository] +user = "aiken-lang" +project = "stdlib" +platform = "github" + +[[dependencies]] +name = "aiken-lang/fuzz" +version = "v2" +source = "github" diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak new file mode 100644 index 00000000..f0c66d69 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak @@ -0,0 +1,293 @@ +use aiken +use aiken/builtin.{decode_utf8, serialise_data} +use aiken/primitive/bytearray + +/// Obtain a String representation of _anything_. This is particularly (and only) useful for tracing +/// and debugging. This function is expensive and should not be used in any production code as it +/// will very likely explodes the validator's budget. +/// +/// The output is a [CBOR diagnostic](https://www.rfc-editor.org/rfc/rfc8949#name-diagnostic-notation) +/// of the underlying on-chain binary representation of the data. It's not as +/// easy to read as plain Aiken code, but it is handy for troubleshooting values +/// _at runtime_. Incidentally, getting familiar with reading CBOR diagnostic is +/// a good idea in the Cardano world. +/// +/// ```aiken +/// cbor.diagnostic(42) == "42" +/// cbor.diagnostic(#"a1b2") == "h'A1B2'" +/// cbor.diagnostic([1, 2, 3]) == "[_ 1, 2, 3]" +/// cbor.diagnostic([]) == "[]" +/// cbor.diagnostic((1, 2)) == "[_ 1, 2]" +/// cbor.diagnostic((1, #"ff", 3)) == "[_ 1, h'FF', 3]" +/// cbor.diagnostic([(1, #"ff")]) == "{_ 1: h'FF' }" +/// cbor.diagnostic(Some(42)) == "121([_ 42])" +/// cbor.diagnostic(None) == "122([])" +/// ``` +pub fn diagnostic(self: Data) -> String { + aiken.diagnostic(self, #"") + |> decode_utf8 +} + +/// Deserialise a [CBOR](https://www.rfc-editor.org/rfc/rfc8949) Data. This is the reverse operation of [serialise](#serialise). +/// In particular, we have the following property: +/// +/// ```aiken +/// cbor.deserialise(cbor.serialise(any_data)) == Some(any_data) +/// ``` +/// +/// > [!CAUTION] +/// > Unfortunately, this function isn't derived from a builtin primitive. It +/// > is therefore an order of magnitude more expensive than its counterpart +/// > and shall be used with care. +/// > +/// > In general, one might prefer avoiding deserialisation unless truly necessary. +/// > Yet, it may come in handy for testing and in rare scenarios. +pub fn deserialise(bytes: ByteArray) -> Option { + let length = bytearray.length(bytes) + + let peek = + fn(offset: Int, callback: fn(Byte) -> Decoder) -> Decoder { + fn(cursor) { + if 0 >= cursor { + deserialise_failure + } else { + callback(bytearray.at(bytes, length - cursor))(cursor - offset) + } + } + } + + let take = + fn(n: Int, callback: fn(ByteArray) -> Decoder) -> Decoder { + fn(cursor) { + if 0 >= cursor { + deserialise_failure + } else { + callback(builtin.slice_bytearray(length - cursor, n, bytes))( + cursor - n, + ) + } + } + } + + if length == 0 { + None + } else { + let Pair(result, consumed) = decode_data(peek, take)(length) + if consumed != 0 { + None + } else { + Some(result) + } + } +} + +/// Serialise any value to binary, encoding using [CBOR](https://www.rfc-editor.org/rfc/rfc8949). +/// +/// This is particularly useful in combination with hashing functions, as a way +/// to obtain a byte representation that matches the serialised representation +/// used by the ledger in the context of on-chain code. +/// +/// Note that the output matches the output of [`diagnostic`](#diagnostic), +/// though with a different encoding. [`diagnostic`](#diagnostic) is merely a +/// textual representation of the CBOR encoding that is human friendly and +/// useful for debugging. +/// +/// ```aiken +/// cbor.serialise(42) == #"182a" +/// cbor.serialise(#"a1b2") == #"42a1b2" +/// cbor.serialise([]) == #"80" +/// cbor.serialise((1, 2)) == #"9f0102ff" +/// cbor.serialise((1, #"ff", 3)) == #"9f0141ff03ff" +/// cbor.serialise([(1, #"ff")]) == #"a10141ff" +/// cbor.serialise(Some(42)) == #"d8799f182aff" +/// cbor.serialise(None) == #"d87a80" +/// ``` +pub fn serialise(self: Data) -> ByteArray { + serialise_data(self) +} + +type Byte = + Int + +type Decoder
= + fn(Int) -> Pair + +type Peek = + fn(Int, fn(Byte) -> Decoder) -> Decoder + +type Take = + fn(Int, fn(ByteArray) -> Decoder) -> Decoder + +fn return(data: Data) -> Decoder { + fn(cursor) { Pair(data, cursor) } +} + +const deserialise_failure: Pair = { + let empty: Data = "" + Pair(empty, -1) + } + +const token_begin_bytes = 0x5f + +const token_begin_list = 0x9f + +const token_begin_map = 0xbf + +const token_break = 0xff + +fn decode_data(peek: Peek, take: Take) -> Decoder { + let next <- peek(1) + let major_type = next / 32 + if major_type <= 2 { + if major_type == 0 { + let i <- decode_uint(peek, take, next) + return(builtin.i_data(i)) + } else if major_type == 1 { + let i <- decode_uint(peek, take, next - 32) + return(builtin.i_data(-i - 1)) + } else { + if next == token_begin_bytes { + let b <- decode_chunks(peek, take) + return(builtin.b_data(b)) + } else { + let b <- decode_bytes(peek, take, next - 64) + return(builtin.b_data(b)) + } + } + } else if major_type == 6 { + let tag <- decode_uint(peek, take, next - 192) + let next <- peek(1) + if tag == 102 { + fn(_) { deserialise_failure } + } else { + let ix = + if tag >= 1280 { + tag - 1280 + 7 + } else { + tag - 121 + } + if next == token_begin_list { + let fields <- decode_indefinite(peek, take, decode_data) + return(builtin.constr_data(ix, fields)) + } else { + let size <- decode_uint(peek, take, next - 128) + let fields <- decode_definite(peek, take, decode_data, size) + return(builtin.constr_data(ix, fields)) + } + } + } else if major_type == 4 { + if next == token_begin_list { + let xs <- decode_indefinite(peek, take, decode_data) + return(builtin.list_data(xs)) + } else { + let size <- decode_uint(peek, take, next - 128) + let xs <- decode_definite(peek, take, decode_data, size) + return(builtin.list_data(xs)) + } + } else if major_type == 5 { + if next == token_begin_map { + let xs <- decode_indefinite(peek, take, decode_pair) + return(builtin.map_data(xs)) + } else { + let size <- decode_uint(peek, take, next - 160) + let xs <- decode_definite(peek, take, decode_pair, size) + return(builtin.map_data(xs)) + } + } else { + fn(_) { deserialise_failure } + } +} + +fn decode_pair(peek: Peek, take: Take) -> Decoder> { + fn(cursor) { + let Pair(k, cursor) = decode_data(peek, take)(cursor) + let Pair(v, cursor) = decode_data(peek, take)(cursor) + Pair(Pair(k, v), cursor) + } +} + +fn decode_uint( + peek: Peek, + take: Take, + header: Int, + and_then: fn(Int) -> Decoder, +) -> Decoder { + if header < 24 { + and_then(header) + } else if header == 24 { + let payload <- peek(1) + and_then(payload) + } else if header < 28 { + let width = bytearray.at(#[2, 4, 8], header - 25) + let payload <- take(width) + and_then(bytearray.to_int_big_endian(payload)) + } else { + fn(_) { deserialise_failure } + } +} + +fn decode_bytes( + peek: Peek, + take: Take, + header: Int, + and_then: fn(ByteArray) -> Decoder, +) -> Decoder { + let width <- decode_uint(peek, take, header) + let bytes <- take(width) + and_then(bytes) +} + +fn decode_chunks( + peek: Peek, + take: Take, + and_then: fn(ByteArray) -> Decoder, +) -> Decoder { + let next <- peek(1) + if next == token_break { + and_then("") + } else { + let chunk <- decode_bytes(peek, take, next - 64) + let chunks <- decode_chunks(peek, take) + and_then(builtin.append_bytearray(chunk, chunks)) + } +} + +fn decode_definite( + peek: Peek, + take: Take, + decode_one: fn(Peek, Take) -> Decoder, + size: Int, + and_then: fn(List) -> Decoder, +) -> Decoder { + if size <= 0 { + and_then([]) + } else { + fn(cursor) { + let Pair(elem, cursor) = decode_one(peek, take)(cursor) + { + let elems <- decode_definite(peek, take, decode_one, size - 1) + and_then([elem, ..elems]) + }(cursor) + } + } +} + +fn decode_indefinite( + peek: Peek, + take: Take, + decode_one: fn(Peek, Take) -> Decoder, + and_then: fn(List) -> Decoder, +) -> Decoder { + let next <- peek(1) + if next == token_break { + and_then([]) + } else { + fn(cursor) { + let Pair(elem, cursor) = decode_one(peek, take)(cursor + 1) + { + let elems <- decode_indefinite(peek, take, decode_one) + and_then([elem, ..elems]) + }(cursor) + } + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak new file mode 100644 index 00000000..28d9f5bb --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak @@ -0,0 +1,297 @@ +use aiken/cbor.{deserialise, diagnostic, serialise} +use aiken/fuzz + +// ------------------------------------------------------------------ diagnostic + +test diagnostic_1() { + diagnostic(42) == @"42" +} + +test diagnostic_2() { + diagnostic(#"a1b2") == @"h'A1B2'" +} + +test diagnostic_3() { + diagnostic([1, 2, 3]) == @"[_ 1, 2, 3]" +} + +test diagnostic_4() { + diagnostic([]) == @"[]" +} + +test diagnostic_5() { + diagnostic((1, 2)) == @"[_ 1, 2]" +} + +test diagnostic_6() { + diagnostic((1, #"ff", 3)) == @"[_ 1, h'FF', 3]" +} + +test diagnostic_7() { + diagnostic([(1, #"ff")]) == @"[_ [_ 1, h'FF']]" +} + +test diagnostic_7_alt() { + diagnostic([Pair(1, #"ff")]) == @"{_ 1: h'FF' }" +} + +test diagnostic_8() { + diagnostic(Some(42)) == @"121([_ 42])" +} + +test diagnostic_9() { + diagnostic(None) == @"122([])" +} + +test diagnostic_10() { + let xs: List<(Int, Int)> = [] + diagnostic(xs) == @"[]" +} + +test diagnostic_10_alt() { + let xs: Pairs = [] + diagnostic(xs) == @"{}" +} + +type Foo { + foo: Bar, +} + +type Bar { + A + B(Int) +} + +test diagnostic_11() { + diagnostic(Foo { foo: A }) == @"121([_ 121([])])" +} + +test diagnostic_12() { + diagnostic(Foo { foo: B(42) }) == @"121([_ 122([_ 42])])" +} + +type Baz { + a0: Int, + b0: ByteArray, +} + +test diagnostic_13() { + diagnostic(Baz { a0: 14, b0: #"ff" }) == @"121([_ 14, h'FF'])" +} + +test diagnostic_14() { + diagnostic([0]) == @"[_ 0]" +} + +test diagnostic_15() { + diagnostic(-42) == @"-42" +} + +test diagnostic_16() { + diagnostic([-1, 0, 1]) == @"[_ -1, 0, 1]" +} + +// ------------------------------------------------------------------ serialise + +test serialise_1() { + serialise(42) == #"182a" +} + +test serialise_2() { + serialise(#"a1b2") == #"42a1b2" +} + +test serialise_3() { + serialise([]) == #"80" +} + +test serialise_4() { + serialise((1, 2)) == #"9f0102ff" +} + +test serialise_5() { + serialise((1, #"ff", 3)) == #"9f0141ff03ff" +} + +test serialise_6() { + serialise([(1, #"ff")]) == #"9f9f0141ffffff" +} + +test serialise_7() { + serialise(Some(42)) == #"d8799f182aff" +} + +test serialise_8() { + serialise(None) == #"d87a80" +} + +test serialise_9() { + serialise([Pair(1, #"ff")]) == #"a10141ff" +} + +// ------------------------------------------------------------------ deserialise + +type AnyData { + AnyInt(Int) + AnyByteArray(ByteArray) + AnyList(List) + AnyPairs(Pairs) + AnyUnaryConstr0(UnaryConstr0) + AnyUnaryConstr1(UnaryConstr1) + AnyUnaryConstr2(UnaryConstr2) + AnyBinaryConstr0(BinaryConstr0) + AnyBinaryConstr1(BinaryConstr1) +} + +type UnaryConstr0 { + UnaryConstr0 +} + +type UnaryConstr1 { + field0: String, +} + +type UnaryConstr2 { + field0: Int, + field1: List>, +} + +type BinaryConstr0 = + Bool + +type BinaryConstr1 = + Option + +fn any_pair(any_key: Fuzzer, any_value: Fuzzer) -> Fuzzer> { + let k <- fuzz.and_then(any_key) + let v <- fuzz.map(any_value) + Pair(k, v) +} + +fn any_data() -> Fuzzer { + fuzz.either6( + { + let i <- fuzz.map(fuzz.int()) + AnyInt(i) + }, + { + let bs <- fuzz.map(fuzz.bytearray()) + AnyByteArray(bs) + }, + { + let xs <- fuzz.map(fuzz.list(fuzz.int())) + AnyList(xs) + }, + { + let ps <- fuzz.map(fuzz.list(any_pair(fuzz.bytearray(), fuzz.int()))) + AnyPairs(ps) + }, + fuzz.either3( + fuzz.constant(AnyUnaryConstr0(UnaryConstr0)), + fuzz.constant(AnyUnaryConstr1(UnaryConstr1(@"lorem ipsum"))), + { + let i <- fuzz.and_then(fuzz.int()) + let xs <- fuzz.map(fuzz.list(fuzz.list(fuzz.bytearray()))) + AnyUnaryConstr2(UnaryConstr2(i, xs)) + }, + ), + fuzz.either( + { + let b <- fuzz.map(fuzz.bool()) + AnyBinaryConstr0(b) + }, + { + let o <- fuzz.map(fuzz.option(fuzz.int())) + AnyBinaryConstr1(o) + }, + ), + ) +} + +test unit_deserialise_not_enough_bytes_1() { + expect None = deserialise(#"") +} + +test unit_deserialise_not_enough_bytes_2() { + expect None = deserialise(#"82") +} + +test unit_deserialise_non_empty_leftovers() { + expect None = deserialise(#"811442") +} + +test unit_deserialise_invalid_header() { + expect None = deserialise(#"f1") +} + +test unit_deserialise_invalid_uint() { + expect None = deserialise(#"1d0013bdae") +} + +/// A full script context with a minting policy and various assets. Meant to be +/// non-trivial and cover many things we might encounter in a transaction. +test bench_deserialise_script_context() { + expect Some(_) = + deserialise( + #"d8799fd8799f9fd8799fd8799f5820000000000000000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87980d87a80ffffff9fd8799fd8799f5820000000000000000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87980d87a80ffffff9fd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87a9f5820923918e403bf43c34b4ef6b48eb2ee04babed17320d8d1b9ff9ad086e86f44ecffd87a80ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffffffffa340a1401a000f4240581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea14561696b656e182a581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874a14474756e611b00005af3107a4000d87980d87a80ffd8799fd8799fd87a9f581c00000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffffffffa240a1401a000f4240581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea14763617264616e6f01d87980d8799f581c68ad54b3a8124d9fe5caaaf2011a85d72096e696a2fb3d7f86c41717ffffff182aa2581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea24561696b656e2d4763617264616e6f01581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874a14474756e611b00005af3107a400080a0d8799fd8799fd87980d87a80ffd8799fd87b80d87a80ffff80a2d8799f581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262effd87980d8799f581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874ff182aa15820923918e403bf43c34b4ef6b48eb2ee04babed17320d8d1b9ff9ad086e86f44ecd879805820e757985e48e43a95a185ddba08c814bc20f81cb68544ac937a9b992e4e6c38a0a080d87a80d87a80ff182ad8799f581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874ffff", + ) +} + +test prop_deserialise_any_data(any via any_data()) { + when any is { + AnyInt(i) -> { + fuzz.label(@"Int") + expect Some(data) = deserialise(serialise(i)) + expect i_decoded: Int = data + i_decoded == i + } + AnyByteArray(bs) -> { + fuzz.label(@"ByteArray") + expect Some(data) = deserialise(serialise(bs)) + expect bs_decoded: ByteArray = data + bs_decoded == bs + } + AnyList(xs) -> { + fuzz.label(@"List") + expect Some(data) = deserialise(serialise(xs)) + expect xs_decoded: List = data + xs_decoded == xs + } + AnyPairs(ps) -> { + fuzz.label(@"Pairs") + expect Some(data) = deserialise(serialise(ps)) + expect ps_decoded: Pairs = data + ps_decoded == ps + } + AnyUnaryConstr0(constr) -> { + fuzz.label(@"(unary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: UnaryConstr0 = data + constr_decoded == constr + } + AnyUnaryConstr1(constr) -> { + fuzz.label(@"(unary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: UnaryConstr1 = data + constr_decoded == constr + } + AnyUnaryConstr2(constr) -> { + fuzz.label(@"(unary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: UnaryConstr2 = data + constr_decoded == constr + } + AnyBinaryConstr0(constr) -> { + fuzz.label(@"(binary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: BinaryConstr0 = data + constr_decoded == constr + } + AnyBinaryConstr1(constr) -> { + fuzz.label(@"(binary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: BinaryConstr1 = data + constr_decoded == constr + } + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak new file mode 100644 index 00000000..3d4d332e --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak @@ -0,0 +1,4 @@ +/// A non negative integer that materializes the position of an element in a +/// collection. +pub type Index = + Int diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak new file mode 100644 index 00000000..681d0bae --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak @@ -0,0 +1,1174 @@ +//// A module for working with bytearray dictionaries. +//// +//// +//// > [!IMPORTANT] +//// > +//// > Dictionaries are **ordered sets** of key-value pairs, which thus +//// > preserve some invariants. Specifically, each key is only present once in +//// > the dictionary and all keys are stored in ascending lexicographic order. +//// > +//// > These invariants allow for more optimized functions to operate on `Dict`, +//// > but as a trade-offs, prevent `Dict` from being serializable. To recover a `Dict` +//// > from an unknown `Data`, you must first recover an `Pairs` and use +//// > [`dict.from_ascending_list`](#from_ascending_list). + +use aiken/builtin + +/// An opaque `Dict`. The type is opaque because the module maintains some +/// invariant, namely: there's only one occurrence of a given key in the dictionary. +/// +/// Note that the `key` parameter is a phantom-type, and only present as a +/// means of documentation. Keys can be any type, yet will need to comparable +/// to use functions like `insert`. +/// +/// See for example: +/// +/// ```aiken +/// pub type Value = +/// Dict> +/// ``` +pub opaque type Dict { + inner: Pairs, +} + +// ## Constructing + +/// An empty dictionnary. +/// ```aiken +/// dict.to_pairs(dict.empty) == [] +/// ``` +pub const empty: Dict = Dict { inner: [] } + +const foo = #"666f6f" + +const bar = #"626172" + +const baz = #"62617a" + +const fixture_1 = + empty + |> insert(foo, 42) + |> insert(bar, 14) + +/// Like ['from_pairs'](#from_pairs), but from an already sorted list by ascending +/// keys. This function fails (i.e. halts the program execution) if the list isn't +/// sorted. +/// +/// ```aiken +/// let pairs = [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// +/// let result = +/// dict.from_ascending_pairs(pairs) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +/// +/// This is meant to be used to turn a list constructed off-chain into a `Dict` +/// which has taken care of maintaining interval invariants. This function still +/// performs a sanity check on all keys to avoid silly mistakes. It is, however, +/// considerably faster than ['from_pairs'](from_pairs) +pub fn from_ascending_pairs(xs: Pairs) -> Dict { + let Void = check_ascending_list(xs) + Dict { inner: xs } +} + +fn check_ascending_list(xs: Pairs) { + when xs is { + [] -> Void + [_] -> Void + [Pair(x0, _), Pair(x1, _) as e, ..rest] -> + if builtin.less_than_bytearray(x0, x1) { + check_ascending_list([e, ..rest]) + } else { + fail @"keys in associative list aren't in ascending order" + } + } +} + +/// Like [`from_ascending_pairs`](#from_ascending_pairs) but fails if **any** +/// value doesn't satisfy the predicate. +/// +/// ```aiken +/// let pairs = [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// +/// dict.from_ascending_pairs_with(pairs, fn(x) { x <= 250 }) // fail +/// ``` +pub fn from_ascending_pairs_with( + xs: Pairs, + predicate: fn(value) -> Bool, +) -> Dict { + let Void = check_ascending_pairs_with(xs, predicate) + Dict { inner: xs } +} + +fn check_ascending_pairs_with( + xs: Pairs, + predicate: fn(value) -> Bool, +) { + when xs is { + [] -> Void + [Pair(_, v)] -> + if predicate(v) { + Void + } else { + fail @"value doesn't satisfy predicate" + } + [Pair(x0, v0), Pair(x1, _) as e, ..rest] -> + if builtin.less_than_bytearray(x0, x1) { + if predicate(v0) { + check_ascending_pairs_with([e, ..rest], predicate) + } else { + fail @"value doesn't satisfy predicate" + } + } else { + fail @"keys in pairs aren't in ascending order" + } + } +} + +test bench_from_ascending_pairs() { + let dict = + from_ascending_pairs( + [ + Pair("aaaa", 1), Pair("aaab", 9), Pair("aaba", 5), Pair("aabb", 13), + Pair("abaa", 2), Pair("abab", 10), Pair("abba", 6), Pair("abbb", 14), + Pair("baaa", 3), Pair("baab", 11), Pair("baba", 7), Pair("babb", 15), + Pair("bbaa", 4), Pair("bbab", 12), Pair("bbba", 8), Pair("bbbb", 16), + ], + ) + + size(dict) == 16 +} + +/// Construct a dictionary from a list of key-value pairs. Note that when a key is present +/// multiple times, the first occurrence prevails. +/// +/// ```aiken +/// let pairs = [Pair("a", 100), Pair("c", 300), Pair("b", 200)] +/// +/// let result = +/// dict.from_pairs(pairs) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn from_pairs(self: Pairs) -> Dict { + Dict { inner: do_from_pairs(self) } +} + +fn do_from_pairs(xs: Pairs) -> Pairs { + when xs is { + [] -> [] + [Pair(k, v), ..rest] -> do_insert(do_from_pairs(rest), k, v) + } +} + +test from_list_1() { + from_pairs([]) == empty +} + +test from_list_2() { + from_pairs([Pair(foo, 42), Pair(bar, 14)]) == from_pairs( + [Pair(bar, 14), Pair(foo, 42)], + ) +} + +test from_list_3() { + from_pairs([Pair(foo, 42), Pair(bar, 14)]) == fixture_1 +} + +test from_list_4() { + from_pairs([Pair(foo, 42), Pair(bar, 14), Pair(foo, 1337)]) == fixture_1 +} + +test bench_from_pairs() { + let dict = + from_pairs( + [ + Pair("bbba", 8), Pair("bbab", 12), Pair("aabb", 13), Pair("aaab", 9), + Pair("bbbb", 16), Pair("aaaa", 1), Pair("aaba", 5), Pair("abab", 10), + Pair("baba", 7), Pair("baab", 11), Pair("abaa", 2), Pair("baaa", 3), + Pair("bbaa", 4), Pair("babb", 15), Pair("abbb", 14), Pair("abba", 6), + ], + ) + + size(dict) == 16 +} + +// ## Inspecting + +/// Finds a value in the dictionary, and returns the first key found to have that value. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 42) +/// |> dict.insert(key: "b", value: 14) +/// |> dict.insert(key: "c", value: 42) +/// |> dict.find(42) +/// +/// result == Some("a") +/// ``` +pub fn find(self: Dict, value v: value) -> Option { + do_find(self.inner, v) +} + +fn do_find(self: Pairs, value v: value) -> Option { + when self is { + [] -> None + [Pair(k2, v2), ..rest] -> + if v == v2 { + Some(k2) + } else { + do_find(rest, v) + } + } +} + +test find_1() { + find(empty, foo) == None +} + +test find_2() { + find( + empty + |> insert(foo, 14), + 14, + ) == Some(foo) +} + +test find_3() { + find( + empty + |> insert(foo, 14), + 42, + ) == None +} + +test find_4() { + find( + empty + |> insert(foo, 14) + |> insert(bar, 42) + |> insert(baz, 14), + 14, + ) == Some(baz) +} + +/// Get a value in the dict by its key. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: "Aiken") +/// |> dict.get(key: "a") +/// +/// result == Some("Aiken") +/// ``` +pub fn get(self: Dict, key: ByteArray) -> Option { + do_get(self.inner, key) +} + +fn do_get(self: Pairs, key k: ByteArray) -> Option { + when self is { + [] -> None + [Pair(k2, v), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + if k == k2 { + Some(v) + } else { + None + } + } else { + do_get(rest, k) + } + } +} + +test get_1() { + get(empty, foo) == None +} + +test get_2() { + let m = + empty + |> insert(foo, "Aiken") + |> insert(bar, "awesome") + get(m, key: foo) == Some("Aiken") +} + +test get_3() { + let m = + empty + |> insert(foo, "Aiken") + |> insert(bar, "awesome") + get(m, key: baz) == None +} + +test get_4() { + let m = + empty + |> insert("aaa", "1") + |> insert("bbb", "2") + |> insert("ccc", "3") + |> insert("ddd", "4") + |> insert("eee", "5") + |> insert("fff", "6") + |> insert("ggg", "7") + |> insert("hhh", "8") + |> insert("iii", "9") + |> insert("jjj", "10") + + get(m, "bcd") == None +} + +test get_5() { + let m = + empty + |> insert("aaa", "1") + |> insert("bbb", "2") + |> insert("ccc", "3") + |> insert("ddd", "4") + |> insert("eee", "5") + |> insert("fff", "6") + |> insert("ggg", "7") + |> insert("hhh", "8") + |> insert("iii", "9") + |> insert("jjj", "10") + + get(m, "kkk") == None +} + +/// Check if a key exists in the dictionary. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: "Aiken") +/// |> dict.has_key("a") +/// +/// result == True +/// ``` +pub fn has_key(self: Dict, key k: ByteArray) -> Bool { + do_has_key(self.inner, k) +} + +fn do_has_key(self: Pairs, key k: ByteArray) -> Bool { + when self is { + [] -> False + [Pair(k2, _), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + k == k2 + } else { + do_has_key(rest, k) + } + } +} + +test has_key_1() { + !has_key(empty, foo) +} + +test has_key_2() { + has_key( + empty + |> insert(foo, 14), + foo, + ) +} + +test has_key_3() { + !has_key( + empty + |> insert(foo, 14), + bar, + ) +} + +test has_key_4() { + has_key( + empty + |> insert(foo, 14) + |> insert(bar, 42), + bar, + ) +} + +/// Efficiently checks whether a dictionary is empty. +/// ```aiken +/// dict.is_empty(dict.empty) == True +/// ``` +pub fn is_empty(self: Dict) -> Bool { + when self.inner is { + [] -> True + _ -> False + } +} + +test is_empty_1() { + is_empty(empty) +} + +/// Extract all the keys present in a given `Dict`. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 14) +/// |> dict.insert("b", 42) +/// |> dict.insert("a", 1337) +/// |> dict.keys() +/// +/// result == ["a", "b"] +/// ``` +pub fn keys(self: Dict) -> List { + do_keys(self.inner) +} + +fn do_keys(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(k, _), ..rest] -> [k, ..do_keys(rest)] + } +} + +test keys_1() { + keys(empty) == [] +} + +test keys_2() { + keys( + empty + |> insert(foo, 0) + |> insert(bar, 0), + ) == [bar, foo] +} + +/// Return the number of key-value pairs in the dictionary. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 100) +/// |> dict.insert("b", 200) +/// |> dict.insert("c", 300) +/// |> dict.size() +/// +/// result == 3 +/// ``` +pub fn size(self: Dict) -> Int { + do_size(self.inner) +} + +fn do_size(self: Pairs) -> Int { + when self is { + [] -> 0 + [_, ..rest] -> 1 + do_size(rest) + } +} + +test size_1() { + size(empty) == 0 +} + +test size_2() { + size( + empty + |> insert(foo, 14), + ) == 1 +} + +test size_3() { + size( + empty + |> insert(foo, 14) + |> insert(bar, 42), + ) == 2 +} + +/// Extract all the values present in a given `Dict`. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 14) +/// |> dict.insert("b", 42) +/// |> dict.insert("c", 1337) +/// |> dict.values() +/// +/// result == [14, 42, 1337] +/// ``` +pub fn values(self: Dict) -> List { + do_values(self.inner) +} + +fn do_values(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(_, v), ..rest] -> [v, ..do_values(rest)] + } +} + +test values_1() { + values(empty) == [] +} + +test values_2() { + values( + empty + |> insert(foo, 3) + |> insert(bar, 4), + ) == [4, 3] +} + +// ## Modifying + +/// Remove a key-value pair from the dictionary. If the key is not found, no changes are made. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.delete(key: "a") +/// |> dict.to_pairs() +/// +/// result == [Pair("b", 200)] +/// ``` +pub fn delete(self: Dict, key: ByteArray) -> Dict { + Dict { inner: do_delete(self.inner, key) } +} + +fn do_delete( + self: Pairs, + key k: ByteArray, +) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + if k == k2 { + rest + } else { + self + } + } else { + [Pair(k2, v2), ..do_delete(rest, k)] + } + } +} + +test delete_1() { + delete(empty, foo) == empty +} + +test delete_2() { + let m = + empty + |> insert(foo, 14) + delete(m, foo) == empty +} + +test delete_3() { + let m = + empty + |> insert(foo, 14) + delete(m, bar) == m +} + +test delete_4() { + let m = + empty + |> insert(foo, 14) + |> insert(bar, 14) + !has_key(delete(m, foo), foo) +} + +test delete_5() { + let m = + empty + |> insert(foo, 14) + |> insert(bar, 14) + has_key(delete(m, bar), foo) +} + +test delete_6() { + let m = + empty + |> insert("aaa", 1) + |> insert("bbb", 2) + |> insert("ccc", 3) + |> insert("ddd", 4) + |> insert("eee", 5) + |> insert("fff", 6) + |> insert("ggg", 7) + |> insert("hhh", 8) + |> insert("iii", 9) + |> insert("jjj", 10) + + delete(m, "bcd") == m +} + +/// Keep only the key-value pairs that pass the given predicate. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.insert(key: "c", value: 300) +/// |> dict.filter(fn(k, _v) { k != "a" }) +/// |> dict.to_pairs() +/// +/// result == [Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn filter( + self: Dict, + with: fn(ByteArray, value) -> Bool, +) -> Dict { + Dict { inner: do_filter(self.inner, with) } +} + +fn do_filter( + self: Pairs, + with: fn(ByteArray, value) -> Bool, +) -> Pairs { + when self is { + [] -> [] + [Pair(k, v), ..rest] -> + if with(k, v) { + [Pair(k, v), ..do_filter(rest, with)] + } else { + do_filter(rest, with) + } + } +} + +test filter_1() { + filter(empty, fn(_, _) { True }) == empty +} + +test filter_2() { + let expected = + empty + |> insert(foo, 42) + filter(fixture_1, fn(_, v) { v > 14 }) == expected +} + +test filter_3() { + let expected = + empty + |> insert(bar, 14) + filter(fixture_1, fn(k, _) { k == bar }) == expected +} + +/// Insert a value in the dictionary at a given key. If the key already exists, its value is **overridden**. If you need ways to combine keys together, use (`insert_with`)[#insert_with]. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 1) +/// |> dict.insert(key: "b", value: 2) +/// |> dict.insert(key: "a", value: 3) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 3), Pair("b", 2)] +/// ``` +pub fn insert( + self: Dict, + key k: ByteArray, + value v: value, +) -> Dict { + Dict { inner: do_insert(self.inner, k, v) } +} + +fn do_insert( + self: Pairs, + key k: ByteArray, + value v: value, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if builtin.less_than_bytearray(k, k2) { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, v), ..rest] + } else { + [Pair(k2, v2), ..do_insert(rest, k, v)] + } + } + } +} + +test insert_1() { + let m1 = + empty + |> insert(foo, 42) + let m2 = + empty + |> insert(foo, 14) + insert(m1, foo, 14) == m2 +} + +test insert_2() { + let m1 = + empty + |> insert(foo, 42) + let m2 = + empty + |> insert(bar, 14) + insert(m1, bar, 14) == insert(m2, foo, 42) +} + +/// Insert a value in the dictionary at a given key. When the key already exist, the provided +/// merge function is called. The value existing in the dictionary is passed as the second argument +/// to the merge function, and the new value is passed as the third argument. +/// +/// ```aiken +/// let sum = +/// fn (_k, a, b) { Some(a + b) } +/// +/// let result = +/// dict.empty +/// |> dict.insert_with(key: "a", value: 1, with: sum) +/// |> dict.insert_with(key: "b", value: 2, with: sum) +/// |> dict.insert_with(key: "a", value: 3, with: sum) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 4), Pair("b", 2)] +/// ``` +pub fn insert_with( + self: Dict, + key k: ByteArray, + value v: value, + with: fn(ByteArray, value, value) -> Option, +) -> Dict { + Dict { + inner: do_insert_with(self.inner, k, v, fn(k, v1, v2) { with(k, v2, v1) }), + } +} + +test insert_with_1() { + let sum = + fn(_k, a, b) { Some(a + b) } + + let result = + empty + |> insert_with(key: "foo", value: 1, with: sum) + |> insert_with(key: "bar", value: 2, with: sum) + |> to_pairs() + + result == [Pair("bar", 2), Pair("foo", 1)] +} + +test insert_with_2() { + let sum = + fn(_k, a, b) { Some(a + b) } + + let result = + empty + |> insert_with(key: "foo", value: 1, with: sum) + |> insert_with(key: "bar", value: 2, with: sum) + |> insert_with(key: "foo", value: 3, with: sum) + |> to_pairs() + + result == [Pair("bar", 2), Pair("foo", 4)] +} + +test insert_with_3() { + let with = + fn(k, a, _b) { + if k == "foo" { + Some(a) + } else { + None + } + } + + let result = + empty + |> insert_with(key: "foo", value: 1, with: with) + |> insert_with(key: "bar", value: 2, with: with) + |> insert_with(key: "foo", value: 3, with: with) + |> insert_with(key: "bar", value: 4, with: with) + |> to_pairs() + + result == [Pair("foo", 1)] +} + +/// Apply a function to all key-value pairs in a Dict. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 100) +/// |> dict.insert("b", 200) +/// |> dict.insert("c", 300) +/// |> dict.map(fn(_k, v) { v * 2 }) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 200), Pair("b", 400), Pair("c", 600)] +/// ``` +pub fn map(self: Dict, with: fn(ByteArray, a) -> b) -> Dict { + Dict { inner: do_map(self.inner, with) } +} + +fn do_map( + self: Pairs, + with: fn(ByteArray, a) -> b, +) -> Pairs { + when self is { + [] -> [] + [Pair(k, v), ..rest] -> [Pair(k, with(k, v)), ..do_map(rest, with)] + } +} + +test map_1() { + let result = + fixture_1 + |> map(with: fn(k, _) { k }) + get(result, foo) == Some(foo) +} + +test map_2() { + let result = + fixture_1 + |> map(with: fn(_, v) { v + 1 }) + get(result, foo) == Some(43) && size(result) == size(fixture_1) +} + +/// Remove a key-value pair from the dictionary and return its value. If the key is not found, no changes are made. +/// +/// ```aiken +/// let (value, _) = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.pop(key: "a") +/// +/// result == 100 +/// ``` +pub fn pop( + self: Dict, + key: ByteArray, +) -> (Option, Dict) { + do_pop(self.inner, key, fn(value, inner) { (value, Dict { inner }) }) +} + +fn do_pop( + self: Pairs, + key k: ByteArray, + return: fn(Option, Pairs) -> result, +) -> result { + when self is { + [] -> return(None, []) + [Pair(k2, v2), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + if k == k2 { + return(Some(v2), rest) + } else { + return(None, self) + } + } else { + do_pop( + rest, + k, + fn(value, inner) { return(value, [Pair(k2, v2), ..inner]) }, + ) + } + } +} + +test pop_1() { + pop(empty, foo) == (None, empty) +} + +test pop_2() { + let m = + empty + |> insert(foo, 14) + pop(m, foo) == (Some(14), empty) +} + +test pop_3() { + let m = + empty + |> insert(foo, 14) + pop(m, bar) == (None, m) +} + +test pop_4() { + let m = + empty + |> insert(foo, 14) + |> insert(bar, 14) + pop(m, foo) == (Some(14), empty |> insert(bar, 14)) +} + +test pop_6() { + let m = + empty + |> insert("aaa", 1) + |> insert("bbb", 2) + |> insert("ccc", 3) + |> insert("ddd", 4) + |> insert("eee", 5) + |> insert("fff", 6) + |> insert("ggg", 7) + |> insert("hhh", 8) + |> insert("iii", 9) + |> insert("jjj", 10) + + pop(m, "bcd") == (None, m) +} + +// ## Combining + +/// Combine two dictionaries. If the same key exist in both the left and +/// right dictionary, values from the left are preferred (i.e. left-biaised). +/// +/// ```aiken +/// let left_dict = dict.from_pairs([Pair("a", 100), Pair("b", 200)]) +/// let right_dict = dict.from_pairs([Pair("a", 150), Pair("c", 300)]) +/// +/// let result = +/// dict.union(left_dict, right_dict) |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn union( + left: Dict, + right: Dict, +) -> Dict { + Dict { inner: do_union(left.inner, right.inner) } +} + +fn do_union( + left: Pairs, + right: Pairs, +) -> Pairs { + when left is { + [] -> right + [Pair(k, v), ..rest] -> do_union(rest, do_insert(right, k, v)) + } +} + +test union_1() { + union(fixture_1, empty) == fixture_1 +} + +test union_2() { + union(empty, fixture_1) == fixture_1 +} + +test union_3() { + let left = + empty + |> insert(foo, 14) + let right = + empty + |> insert(bar, 42) + |> insert(baz, 1337) + union(left, right) == from_pairs( + [Pair(foo, 14), Pair(baz, 1337), Pair(bar, 42)], + ) +} + +test union_4() { + let left = + empty + |> insert(foo, 14) + let right = + empty + |> insert(bar, 42) + |> insert(foo, 1337) + union(left, right) == from_pairs([Pair(foo, 14), Pair(bar, 42)]) +} + +/// Like [`union`](#union) but allows specifying the behavior to adopt when a key is present +/// in both dictionaries. The first value received correspond to the value in the left +/// dictionnary, whereas the second argument corresponds to the value in the right dictionnary. +/// +/// When passing `None`, the value is removed and not present in the union. +/// +/// ```aiken +/// let left_dict = dict.from_pairs([Pair("a", 100), Pair("b", 200)]) +/// let right_dict = dict.from_pairs([Pair("a", 150), Pair("c", 300)]) +/// +/// let result = +/// dict.union_with( +/// left_dict, +/// right_dict, +/// fn(_k, v1, v2) { Some(v1 + v2) }, +/// ) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 250), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn union_with( + left: Dict, + right: Dict, + with: fn(ByteArray, value, value) -> Option, +) -> Dict { + Dict { inner: do_union_with(left.inner, right.inner, with) } +} + +fn do_union_with( + left: Pairs, + right: Pairs, + with: fn(ByteArray, value, value) -> Option, +) -> Pairs { + when left is { + [] -> right + [Pair(k, v), ..rest] -> + do_union_with(rest, do_insert_with(right, k, v, with), with) + } +} + +fn do_insert_with( + self: Pairs, + key k: ByteArray, + value v: value, + with: fn(ByteArray, value, value) -> Option, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if builtin.less_than_bytearray(k, k2) { + [Pair(k, v), ..self] + } else { + if k == k2 { + when with(k, v, v2) is { + Some(combined) -> [Pair(k, combined), ..rest] + None -> rest + } + } else { + [Pair(k2, v2), ..do_insert_with(rest, k, v, with)] + } + } + } +} + +test union_with_1() { + let left = + empty + |> insert(foo, 14) + + let right = + empty + |> insert(bar, 42) + |> insert(foo, 1337) + + let result = union_with(left, right, with: fn(_, l, r) { Some(l + r) }) + + result == from_pairs([Pair(foo, 1351), Pair(bar, 42)]) +} + +// ## Transforming + +/// Fold over the key-value pairs in a dictionary. The fold direction follows keys +/// in ascending order and is done from left-to-right. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.insert(key: "c", value: 300) +/// |> dict.foldl(0, fn(_k, v, r) { v + r }) +/// +/// result == 600 +/// ``` +pub fn foldl( + self: Dict, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + do_foldl(self.inner, zero, with) +} + +fn do_foldl( + self: Pairs, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> do_foldl(rest, with(k, v, zero), with) + } +} + +test fold_1() { + foldl(empty, 14, fn(_, _, _) { 42 }) == 14 +} + +test fold_2() { + foldl(fixture_1, zero: 0, with: fn(_, v, total) { v + total }) == 56 +} + +/// Fold over the key-value pairs in a dictionary. The fold direction follows keys +/// in ascending order and is done from right-to-left. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.insert(key: "c", value: 300) +/// |> dict.foldr(0, fn(_k, v, r) { v + r }) +/// +/// result == 600 +/// ``` +pub fn foldr( + self: Dict, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + do_foldr(self.inner, zero, with) +} + +fn do_foldr( + self: Pairs, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> with(k, v, do_foldr(rest, zero, with)) + } +} + +test foldr_1() { + foldr(empty, 14, fn(_, _, _) { 42 }) == 14 +} + +test foldr_2() { + foldr(fixture_1, zero: 0, with: fn(_, v, total) { v + total }) == 56 +} + +/// Get the inner list holding the dictionary data. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 100) +/// |> dict.insert("b", 200) +/// |> dict.insert("c", 300) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn to_pairs(self: Dict) -> Pairs { + self.inner +} + +test to_list_1() { + to_pairs(empty) == [] +} + +test to_list_2() { + to_pairs(fixture_1) == [Pair(bar, 14), Pair(foo, 42)] +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak new file mode 100644 index 00000000..b8e7f675 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak @@ -0,0 +1,1411 @@ +use aiken/builtin +use aiken/primitive/bytearray +use aiken/primitive/int + +// ## Constructing + +/// Add an element in front of the list. Sometimes useful when combined with +/// other functions. +/// +/// ```aiken +/// list.push([2, 3], 1) == [1, ..[2, 3]] == [1, 2, 3] +/// ``` +pub fn push(self: List, elem: a) -> List { + [elem, ..self] +} + +test push_1() { + push([], 0) == [0] +} + +test push_2() { + push([2, 3], 1) == [1, 2, 3] +} + +/// Construct a list of a integer from a given range. +/// +/// ```aiken +/// list.range(0, 3) == [0, 1, 2, 3] +/// list.range(-1, 1) == [-1, 0, 1] +/// ``` +pub fn range(from: Int, to: Int) -> List { + if from > to { + [] + } else { + [from, ..range(from + 1, to)] + } +} + +test range_1() { + range(0, 3) == [0, 1, 2, 3] +} + +test range_2() { + range(-1, 1) == [-1, 0, 1] +} + +/// Construct a list filled with n copies of a value. +/// +/// ```aiken +/// list.repeat("na", 3) == ["na", "na", "na"] +/// ``` +pub fn repeat(elem: a, n_times: Int) -> List { + if n_times <= 0 { + [] + } else { + [elem, ..repeat(elem, n_times - 1)] + } +} + +test repeat_1() { + repeat(42, 0) == [] +} + +test repeat_2() { + repeat(14, 3) == [14, 14, 14] +} + +// ## Inspecting + +/// Determine if all elements of the list satisfy the given predicate. +/// +/// Note: an empty list always satisfies the predicate. +/// +/// ```aiken +/// list.all([], fn(n) { n > 0 }) == True +/// list.all([1, 2, 3], fn(n) { n > 0 }) == True +/// list.all([1, 2, 3], fn(n) { n == 2 }) == False +/// ``` +pub fn all(self: List, predicate: fn(a) -> Bool) -> Bool { + when self is { + [] -> True + [x, ..xs] -> predicate(x) && all(xs, predicate) + } +} + +test all_1() { + all([1, 2, 3], fn(n) { n > 0 }) == True +} + +test all_2() { + all([1, 2, 3], fn(n) { n > 42 }) == False +} + +test all_3() { + all([], fn(n) { n == 42 }) == True +} + +/// Determine if at least one element of the list satisfies the given predicate. +/// +/// Note: an empty list never satisfies the predicate. +/// +/// ```aiken +/// list.any([], fn(n) { n > 2 }) == False +/// list.any([1, 2, 3], fn(n) { n > 0 }) == True +/// list.any([1, 2, 3], fn(n) { n == 2 }) == True +/// list.any([1, 2, 3], fn(n) { n < 0 }) == False +/// ``` +pub fn any(self: List, predicate: fn(a) -> Bool) -> Bool { + when self is { + [] -> False + [x, ..xs] -> predicate(x) || any(xs, predicate) + } +} + +test any_1() { + any([1, 2, 3], fn(n) { n > 0 }) == True +} + +test any_2() { + any([1, 2, 3], fn(n) { n > 42 }) == False +} + +test any_3() { + any([], fn(n) { n == 42 }) == False +} + +/// Return Some(item) at the index or None if the index is out of range. The index is 0-based. +/// +/// ```aiken +/// list.at([1, 2, 3], 1) == Some(2) +/// list.at([1, 2, 3], 42) == None +/// ``` +pub fn at(self: List, index: Int) -> Option { + when self is { + [] -> None + [x, ..xs] -> + if index == 0 { + Some(x) + } else { + at(xs, index - 1) + } + } +} + +test at_1() { + at([1, 2, 3], -1) == None +} + +test at_2() { + at([], 0) == None +} + +test at_3() { + at([1, 2, 3], 3) == None +} + +test at_4() { + at([1], 0) == Some(1) +} + +test at_5() { + at([1, 2, 3], 2) == Some(3) +} + +/// Count how many items in the list satisfy the given predicate. +/// +/// ```aiken +/// list.count([], fn(a) { a > 2}) == 0 +/// list.count([1, 2, 3], fn(a) { n > 0 }) == 3 +/// list.count([1, 2, 3], fn(a) { n >= 2 }) == 2 +/// list.count([1, 2, 3], fn(a) { n > 5 }) == 0 +/// ``` +pub fn count(self: List, predicate: fn(a) -> Bool) -> Int { + foldr( + self, + 0, + fn(item, total) { + if predicate(item) { + total + 1 + } else { + total + } + }, + ) +} + +test count_empty() { + count([], fn(a) { a > 2 }) == 0 +} + +test count_all() { + count([1, 2, 3], fn(a) { a > 0 }) == 3 +} + +test count_some() { + count([1, 2, 3], fn(a) { a >= 2 }) == 2 +} + +test count_none() { + count([1, 2, 3], fn(a) { a > 5 }) == 0 +} + +/// Find the first element satisfying the given predicate, if any. +/// +/// ```aiken +/// list.find([1, 2, 3], fn(x) { x == 2 }) == Some(2) +/// list.find([4, 5, 6], fn(x) { x == 2 }) == None +/// ``` +pub fn find(self: List, predicate: fn(a) -> Bool) -> Option { + when self is { + [] -> None + [x, ..xs] -> + if predicate(x) { + Some(x) + } else { + find(xs, predicate) + } + } +} + +test find_1() { + find([1, 2, 3], fn(x) { x == 1 }) == Some(1) +} + +test find_2() { + find([1, 2, 3], fn(x) { x > 42 }) == None +} + +test find_3() { + find([], fn(_) { True }) == None +} + +/// Figures out whether a list contain the given element. +/// +/// ```aiken +/// list.has([1, 2, 3], 2) == True +/// list.has([1, 2, 3], 14) == False +/// list.has([], 14) == False +/// ``` +pub fn has(self: List, elem: a) -> Bool { + when self is { + [] -> False + [x, ..xs] -> + if x == elem { + True + } else { + has(xs, elem) + } + } +} + +test has_1() { + has([1, 2, 3], 1) == True +} + +test has_2() { + has([1, 2, 3], 14) == False +} + +test has_3() { + has([], 14) == False +} + +/// Get the first element of a list +/// +/// ```aiken +/// list.head([1, 2, 3]) == Some(1) +/// list.head([]) == None +/// ``` +pub fn head(self: List) -> Option { + when self is { + [] -> None + _ -> Some(builtin.head_list(self)) + } +} + +test head_1() { + head([1, 2, 3]) == Some(1) +} + +test head_2() { + head([]) == None +} + +/// Checks whether a list is empty. +/// +/// ```aiken +/// list.is_empty([]) == True +/// list.is_empty([1, 2, 3]) == False +/// ``` +pub fn is_empty(self: List) -> Bool { + when self is { + [] -> True + _ -> False + } +} + +test is_empty_1() { + is_empty([]) == True +} + +test is_empty_2() { + is_empty([1, 2, 3]) == False +} + +/// Gets the index of an element of a list, if any. Otherwise, returns None. +/// +/// ```aiken +/// list.index_of([1, 5, 2], 2) == Some(2) +/// list.index_of([1, 7, 3], 4) == None +/// list.index_of([1, 0, 9, 6], 6) == 3 +/// list.index_of([], 6) == None +/// ``` +pub fn index_of(self: List, elem: a) -> Option { + do_index_of(self, elem, 0) +} + +fn do_index_of(self: List, elem: a, i: Int) -> Option { + when self is { + [] -> None + [x, ..xs] -> + if x == elem { + Some(i) + } else { + do_index_of(xs, elem, i + 1) + } + } +} + +test index_of_1() { + index_of([1, 5, 2], 2) == Some(2) +} + +test index_of_2() { + index_of([1, 7, 3], 4) == None +} + +test index_of_3() { + index_of([1, 0, 9, 6], 6) == Some(3) +} + +test index_of_4() { + index_of([], 6) == None +} + +/// Get the last in the given list, if any. +/// +/// ```aiken +/// list.last([]) == None +/// list.last([1, 2, 3]) == Some(3) +/// ``` +pub fn last(self: List) -> Option { + when self is { + [] -> None + [x] -> Some(x) + [_, ..xs] -> last(xs) + } +} + +test last_1() { + last([]) == None +} + +test last_2() { + last([1]) == Some(1) +} + +test last_3() { + last([1, 2, 3, 4]) == Some(4) +} + +/// Get the number of elements in the given list. +/// +/// ```aiken +/// list.length([]) == 0 +/// list.length([1, 2, 3]) == 3 +/// ``` +pub fn length(self: List) -> Int { + when self is { + [] -> 0 + [_, ..xs] -> 1 + length(xs) + } +} + +test length_1() { + length([]) == 0 +} + +test length_2() { + length([1, 2, 3]) == 3 +} + +// ## Modifying + +// ### Extracting + +/// Remove the first occurrence of the given element from the list. +/// +/// ```aiken +/// list.delete([1, 2, 3, 1], 1) == [2, 3, 1] +/// list.delete([1, 2, 3], 14) == [1, 2, 3] +/// ``` +pub fn delete(self: List, elem: a) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if x == elem { + xs + } else { + [x, ..delete(xs, elem)] + } + } +} + +test delete_1() { + delete([], 42) == [] +} + +test delete_2() { + delete([1, 2, 3, 1], 1) == [2, 3, 1] +} + +test delete_3() { + delete([1, 2, 3], 14) == [1, 2, 3] +} + +test delete_4() { + delete([2], 2) == [] +} + +/// Drop the first `n` elements of a list. +/// +/// ```aiken +/// list.drop([1, 2, 3], 2) == [3] +/// list.drop([], 42) == [] +/// list.drop([1, 2, 3], 42) == [] +/// ``` +pub fn drop(self: List, n: Int) -> List { + if n <= 0 { + self + } else { + when self is { + [] -> [] + [_x, ..xs] -> drop(xs, n - 1) + } + } +} + +test drop_1() { + drop([], 42) == [] +} + +test drop_2() { + drop([1, 2, 3], 2) == [3] +} + +/// Returns the suffix of the given list after removing all elements that satisfy the predicate. +/// +/// ```aiken +/// list.drop_while([1, 2, 3], fn(x) { x < 2 }) == [2, 3] +/// list.drop_while([], fn(x) { x > 2 }) == [] +/// list.drop_while([1, 2, 3], fn(x) { x == 3 }) == [1, 2, 3] +/// ``` +pub fn drop_while(self: List, predicate: fn(a) -> Bool) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if predicate(x) { + drop_while(xs, predicate) + } else { + self + } + } +} + +test drop_while_1() { + drop_while([], fn(x) { x > 2 }) == [] +} + +test drop_while_2() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + drop_while(xs, fn(x) { x > 5 }) == [5, 4, 3, 2, 1] +} + +test drop_while_3() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + drop_while(xs, fn(x) { x == 42 }) == xs +} + +test drop_while_4() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + drop_while(xs, fn(x) { x < 42 }) == [] +} + +/// Produce a list of elements that satisfy a predicate. +/// +/// ```aiken +/// list.filter([1, 2, 3], fn(x) { x >= 2 }) == [2, 3] +/// list.filter([], fn(x) { x > 2 }) == [] +/// list.filter([1, 2, 3], fn(x) { x == 3 }) == [3] +/// ``` +pub fn filter(self: List, predicate: fn(a) -> Bool) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if predicate(x) { + [x, ..filter(xs, predicate)] + } else { + filter(xs, predicate) + } + } +} + +test filter_1() { + filter([], fn(x) { x > 0 }) == [] +} + +test filter_2() { + let xs = [1, 2, 3, 4, 5, 6] + filter(xs, fn(x) { builtin.mod_integer(x, 2) == 0 }) == [2, 4, 6] +} + +test filter_3() { + let filter_foldr = + fn(xs, f) { + foldr( + xs, + [], + fn(x, ys) { + if f(x) { + [x, ..ys] + } else { + ys + } + }, + ) + } + + let is_odd = + fn(n) { builtin.mod_integer(n, 2) != 0 } + + filter_foldr([1, 2, 3], is_odd) == filter([1, 2, 3], is_odd) +} + +/// Produce a list of transformed elements that satisfy a predicate. +/// +/// ```aiken +/// let transform = fn(x) { if x % 2 == 0 { None } else { Some(3*x) } } +/// list.filter_map([1, 2, 3], transform) == [3, 9] +/// ``` +pub fn filter_map(self: List, predicate: fn(a) -> Option) -> List { + when self is { + [] -> [] + [x, ..xs] -> + when predicate(x) is { + None -> filter_map(xs, predicate) + Some(y) -> [y, ..filter_map(xs, predicate)] + } + } +} + +test filter_map_1() { + filter_map([], fn(_) { Some(42) }) == [] +} + +test filter_map_2() { + filter_map( + [1, 2, 3, 4, 5, 6], + fn(x) { + if builtin.mod_integer(x, 2) != 0 { + Some(3 * x) + } else { + None + } + }, + ) == [3, 9, 15] +} + +/// Return all elements except the last one. +/// +/// ```aiken +/// list.init([]) == None +/// list.init([1, 2, 3]) == Some([1, 2]) +/// ``` +pub fn init(self: List) -> Option> { + when self is { + [] -> None + _ -> Some(do_init(self)) + } +} + +fn do_init(self: List) -> List { + when self is { + [] -> fail @"unreachable" + [_] -> [] + [x, ..xs] -> [x, ..do_init(xs)] + } +} + +test init_1() { + init([]) == None +} + +test init_2() { + init([1]) == Some([]) +} + +test init_3() { + init([1, 2, 3, 4]) == Some([1, 2, 3]) +} + +/// Returns a tuple with all elements that satisfy the predicate at first +/// element, and the rest as second element. +/// +/// ```aiken +/// list.partition([1, 2, 3, 4], fn(x) { x % 2 == 0 }) == ([2, 4], [1, 3]) +/// ``` +pub fn partition(self: List, predicate: fn(a) -> Bool) -> (List, List) { + when self is { + [] -> ([], []) + [x, ..xs] -> { + let (left, right) = partition(xs, predicate) + if predicate(x) { + ([x, ..left], right) + } else { + (left, [x, ..right]) + } + } + } +} + +test partition_1() { + partition([], fn(x) { x > 2 }) == ([], []) +} + +test partition_2() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + partition(xs, fn(x) { x > 5 }) == ([10, 9, 8, 7, 6], [5, 4, 3, 2, 1]) +} + +test partition_3() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + partition(xs, fn(x) { x == 42 }) == ([], xs) +} + +test partition_4() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + partition(xs, fn(x) { x < 42 }) == (xs, []) +} + +test partition_5() { + partition([1, 2, 3, 4], fn(x) { x % 2 == 0 }) == ([2, 4], [1, 3]) +} + +/// Extract a sublist from the given list using 0-based indexes. Negative +/// indexes wrap over, so `-1` refers to the last element of the list. +/// +/// ```aiken +/// list.slice([1, 2, 3, 4, 5, 6], from: 2, to: 4) == [3, 4, 5] +/// list.slice([1, 2, 3, 4, 5, 6], from: -2, to: -1) == [5, 6] +/// list.slice([1, 2, 3, 4, 5, 6], from: 1, to: -1) == [2, 3, 4, 5, 6] +/// ``` +pub fn slice(self: List, from: Int, to: Int) -> List { + let (i, l) = + if from >= 0 { + (from, None) + } else { + let l = length(self) + (l + from, Some(l)) + } + + let j = + if to >= 0 { + to - i + 1 + } else { + when l is { + Some(l) -> l + to - i + 1 + None -> length(self) + to - i + 1 + } + } + + self + |> drop(i) + |> take(j) +} + +test slice_1() { + slice([1, 2, 3], 0, 2) == [1, 2, 3] +} + +test slice_2() { + slice([1, 2, 3, 4, 5, 6], from: 2, to: 4) == [3, 4, 5] +} + +test slice_3() { + slice([1, 2, 3, 4, 5, 6], from: -2, to: -1) == [5, 6] +} + +test slice_4() { + slice([1, 2, 3, 4, 5, 6], from: 1, to: -1) == [2, 3, 4, 5, 6] +} + +test slice_5() { + slice([1, 2, 3, 4, 5, 6], from: -4, to: -3) == [3, 4] +} + +test slice_6() { + slice([1, 2, 3, 4, 5, 6], from: -2, to: 1) == [] +} + +/// Cut a list in two, such that the first list contains the given number of / +/// elements and the second list contains the rest. +/// +/// Fundamentally equivalent to (but more efficient): +/// +/// ```aiken +/// // span(xs, n) == (take(xs, n), drop(xs, n)) +/// span([1, 2, 3, 4, 5], 3) == ([1, 2, 3], [4, 5]) +/// ``` +pub fn span(self: List, n: Int) -> (List, List) { + when self is { + [] -> ([], []) + [x, ..xs] -> + if n <= 0 { + ([], self) + } else { + let (left, right) = span(xs, n - 1) + ([x, ..left], right) + } + } +} + +test span_1() { + span([], 2) == ([], []) +} + +test span_2() { + span([1, 2, 3], 2) == ([1, 2], [3]) +} + +test span_3() { + span([1, 2, 3], -1) == ([], [1, 2, 3]) +} + +test span_4() { + span([1, 2, 3], 42) == ([1, 2, 3], []) +} + +/// Get elements of a list after the first one, if any. +/// +/// ```aiken +/// list.tail([]) == None +/// list.tail([1, 2, 3]) == Some([2, 3]) +/// ``` +pub fn tail(self: List) -> Option> { + when self is { + [] -> None + [_, ..xs] -> Some(xs) + } +} + +test tail_1() { + tail([1, 2, 3]) == Some([2, 3]) +} + +test tail_2() { + tail([]) == None +} + +/// Get the first `n` elements of a list. +/// +/// ```aiken +/// list.take([1, 2, 3], 2) == [1, 2] +/// list.take([1, 2, 3], 14) == [1, 2, 3] +/// ``` +pub fn take(self: List, n: Int) -> List { + if n <= 0 { + [] + } else { + when self is { + [] -> [] + [x, ..xs] -> [x, ..take(xs, n - 1)] + } + } +} + +test take_1() { + take([], 42) == [] +} + +test take_2() { + take([1, 2, 3], 2) == [1, 2] +} + +/// Returns the longest prefix of the given list where all elements satisfy the predicate. +/// +/// ```aiken +/// list.take_while([1, 2, 3], fn(x) { x > 2 }) == [] +/// list.take_while([1, 2, 3], fn(x) { x < 2 }) == [1] +/// ``` +pub fn take_while(self: List, predicate: fn(a) -> Bool) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if predicate(x) { + [x, ..take_while(xs, predicate)] + } else { + [] + } + } +} + +test take_while_1() { + take_while([], fn(x) { x > 2 }) == [] +} + +test take_while_2() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + take_while(xs, fn(x) { x > 5 }) == [10, 9, 8, 7, 6] +} + +test take_while_3() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + take_while(xs, fn(x) { x == 42 }) == [] +} + +test take_while_4() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + take_while(xs, fn(x) { x < 42 }) == xs +} + +/// Removes duplicate elements from a list. +/// +/// ```aiken +/// list.unique([1, 2, 3, 1]) == [1, 2, 3] +/// ``` +pub fn unique(self: List) -> List { + when self is { + [] -> [] + [x, ..xs] -> [x, ..unique(filter(xs, fn(y) { y != x }))] + } +} + +test unique_1() { + unique([]) == [] +} + +test unique_2() { + let xs = [1, 2, 3, 1, 1, 3, 4, 1, 2, 3, 2, 4, 5, 6, 7, 8, 9, 10, 9] + unique(xs) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] +} + +// ### Mapping + +/// Map elements of a list into a new list and flatten the result. +/// +/// ```aiken +/// list.flat_map([1, 2, 3], fn(a) { [a, 2*a] }) == [1, 2, 2, 4, 3, 6] +/// ``` +pub fn flat_map(self: List, with: fn(a) -> List) -> List { + foldr(self, [], fn(x, xs) { concat(with(x), xs) }) +} + +test flat_map_1() { + flat_map([], fn(a) { [a] }) == [] +} + +test flat_map_2() { + flat_map([1, 2, 3], fn(a) { [a, a] }) == [1, 1, 2, 2, 3, 3] +} + +/// Perform an action for each element of a list. +/// +/// ```aiken +/// list.for_each(labels, do: fn(lbl) { trace lbl Void }) +/// ``` +pub fn for_each(self: List, do: fn(a) -> Void) -> Void { + foldr(self, Void, fn(x, _) { do(x) }) +} + +test for_each_1() { + for_each( + [@"hello", @"world"], + do: fn(lbl) { + trace lbl + Void + }, + ) +} + +/// List [`map`](#map) but provides the position (0-based) of the elements while iterating. +/// +/// ```aiken +/// list.indexed_map([1, 2, 3], fn(i, x) { i + x }) == [1, 3, 5] +/// ``` +pub fn indexed_map(self: List, with: fn(Int, a) -> result) -> List { + do_indexed_map(0, self, with) +} + +fn do_indexed_map( + n: Int, + self: List, + with: fn(Int, a) -> result, +) -> List { + when self is { + [] -> [] + [x, ..xs] -> [with(n, x), ..do_indexed_map(n + 1, xs, with)] + } +} + +test indexed_map_1() { + indexed_map([], fn(i, _n) { i }) == [] +} + +test indexed_map_2() { + indexed_map( + [4, 8, 13, 2], + fn(i, n) { + if n == 8 { + n + } else { + i + } + }, + ) == [0, 8, 2, 3] +} + +/// Apply a function to each element of a list. +/// +/// ```aiken +/// list.map([1, 2, 3, 4], fn(n) { n + 1 }) == [2, 3, 4, 5] +/// ``` +pub fn map(self: List, with: fn(a) -> result) -> List { + when self is { + [] -> [] + [x, ..xs] -> [with(x), ..map(xs, with)] + } +} + +test map_1() { + map([], fn(n) { n + 1 }) == [] +} + +test map_2() { + map([1, 2, 3, 4], fn(n) { n + 1 }) == [2, 3, 4, 5] +} + +/// Apply a function of two arguments, combining elements from two lists. +/// +/// Note: if one list is longer, the extra elements are dropped. +/// +/// ```aiken +/// list.map2([1, 2, 3], [1, 2], fn(a, b) { a + b }) == [2, 4] +/// ``` +pub fn map2( + self: List, + bs: List, + with: fn(a, b) -> result, +) -> List { + when self is { + [] -> [] + [x, ..xs] -> + when bs is { + [] -> [] + [y, ..ys] -> [with(x, y), ..map2(xs, ys, with)] + } + } +} + +test map2_1() { + map2([], [1, 2, 3], fn(a, b) { a + b }) == [] +} + +test map2_2() { + map2([1, 2, 3], [1, 2], fn(a, b) { a + b }) == [2, 4] +} + +test map2_3() { + map2([42], [1, 2, 3], fn(_a, b) { Some(b) }) == [Some(1)] +} + +/// Apply a function of three arguments, combining elements from three lists. +/// +/// Note: if one list is longer, the extra elements are dropped. +/// +/// ```aiken +/// list.map3([1, 2, 3], [1, 2], [1, 2, 3], fn(a, b, c) { a + b + c }) == [3, 6] +/// ``` +pub fn map3( + self: List, + bs: List, + cs: List, + with: fn(a, b, c) -> result, +) -> List { + when self is { + [] -> [] + [x, ..xs] -> + when bs is { + [] -> [] + [y, ..ys] -> + when cs is { + [] -> [] + [z, ..zs] -> [with(x, y, z), ..map3(xs, ys, zs, with)] + } + } + } +} + +test map3_1() { + map3([], [], [1, 2, 3], fn(a, b, c) { a + b + c }) == [] +} + +test map3_2() { + map3([1, 2, 3], [1, 2], [1, 2, 3], fn(a, b, c) { a + b + c }) == [3, 6] +} + +/// Return the list with its elements in the reserve order. +/// +/// ```aiken +/// list.reverse([1, 2, 3]) == [3, 2, 1] +/// ``` +pub fn reverse(self: List) -> List { + foldl(self, [], fn(x, xs) { [x, ..xs] }) +} + +test reverse_1() { + reverse([]) == [] +} + +test reverse_2() { + reverse([1, 2, 3]) == [3, 2, 1] +} + +/// Sort a list in ascending order using the given comparison function. +/// +/// ```aiken +/// use aiken/int +/// +/// sort([3, 1, 4, 0, 2], int.compare) == [0, 1, 2, 3, 4] +/// sort([1, 2, 3], int.compare) == [1, 2, 3] +/// ``` +pub fn sort(self: List, compare: fn(a, a) -> Ordering) -> List { + when self is { + [] -> [] + [x, ..xs] -> insert(sort(xs, compare), x, compare) + } +} + +fn insert(self: List, e: a, compare: fn(a, a) -> Ordering) -> List { + when self is { + [] -> [e] + [x, ..xs] -> + if compare(e, x) == Less { + [e, ..self] + } else { + [x, ..insert(xs, e, compare)] + } + } +} + +test sort_1() { + let xs = [6, 7, 5, 4, 1, 3, 9, 8, 0, 2] + sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} + +test sort_2() { + let xs = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} + +test sort_3() { + let xs = [9, 8, 7, 6, 5, 4, 3, 2, 1, 0] + sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} + +test sort_4() { + sort([], int.compare) == [] +} + +/// Decompose a list of tuples into a tuple of lists. +/// +/// ``` +/// list.unzip([(1, "a"), (2, "b")]) == ([1, 2], ["a", "b"]) +/// ``` +pub fn unzip(self: List<(a, b)>) -> (List, List) { + when self is { + [] -> ([], []) + [(a, b), ..xs] -> { + let (a_tail, b_tail) = unzip(xs) + ([a, ..a_tail], [b, ..b_tail]) + } + } +} + +test unzip_1() { + unzip([]) == ([], []) +} + +test unzip_2() { + unzip([(1, "a"), (2, "b")]) == ([1, 2], ["a", "b"]) +} + +// ## Combining + +/// Merge two lists together. +/// +/// ```aiken +/// list.concat([], []) == [] +/// list.concat([], [1, 2, 3]) == [1, 2, 3] +/// list.concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] +/// ``` +pub fn concat(left: List, right: List) -> List { + when left is { + [] -> right + [x, ..xs] -> [x, ..concat(xs, right)] + } +} + +test concat_1() { + concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] +} + +test concat_2() { + concat([1, 2, 3], []) == [1, 2, 3] +} + +test concat_3() { + concat([], [1, 2, 3]) == [1, 2, 3] +} + +/// Remove the first occurrence of each element of the second list from the first one. +/// +/// ``` +/// list.difference(["h", "e", "l", "l", "o"], ["l", "e", "l"]) == ["h", "o"] +/// list.difference([1, 2, 3, 4, 5], [1, 1, 2]) == [3, 4, 5] +/// list.difference([1, 2, 3], []) == [1, 2, 3] +/// ``` +pub fn difference(self: List, with: List) -> List { + when with is { + [] -> self + [x, ..xs] -> difference(delete(self, x), xs) + } +} + +test difference_1() { + difference(["h", "e", "l", "l", "o"], ["l", "e", "l"]) == ["h", "o"] +} + +test difference_2() { + difference([1, 2, 3, 4, 5], [1, 1, 2]) == [3, 4, 5] +} + +test difference_3() { + difference([1, 2, 3], []) == [1, 2, 3] +} + +test difference_4() { + difference([], [1, 2, 3]) == [] +} + +/// Combine two lists together. +/// +/// Note: if one list is longer, the extra elements are dropped. +/// +/// ```aiken +/// list.zip([1, 2], ["a", "b", "c"]) == [(1, "a"), (2, "b")] +/// ``` +pub fn zip(self: List, bs: List) -> List<(a, b)> { + when self is { + [] -> [] + [x, ..xs] -> + when bs is { + [] -> [] + [y, ..ys] -> [(x, y), ..zip(xs, ys)] + } + } +} + +test zip_1() { + zip([], [1, 2, 3]) == [] +} + +test zip_2() { + zip([1, 2, 3], []) == [] +} + +test zip_3() { + zip([1, 2], ["a", "b", "c"]) == [(1, "a"), (2, "b")] +} + +// ## Transforming + +/// Reduce a list from left to right. +/// +/// ```aiken +/// list.foldl([1, 2, 3], 0, fn(n, total) { n + total }) == 6 +/// list.foldl([1, 2, 3], [], fn(x, xs) { [x, ..xs] }) == [3, 2, 1] +/// ``` +pub fn foldl(self: List, zero: b, with: fn(a, b) -> b) -> b { + when self is { + [] -> zero + [x, ..xs] -> foldl(xs, with(x, zero), with) + } +} + +type Fold2 = + fn(a, b) -> result + +pub fn foldl2( + self: List, + zero_a: a, + zero_b: b, + with: fn(elem, a, b, Fold2) -> result, + return: Fold2, +) -> result { + do_foldl2(self, with, return)(zero_a, zero_b) +} + +fn do_foldl2( + self: List, + with: fn(elem, a, b, Fold2) -> result, + return: Fold2, +) -> Fold2 { + when self is { + [] -> return + [x, ..xs] -> do_foldl2(xs, with, fn(a, b) { with(x, a, b, return) }) + } +} + +test foldl2_optimized() { + let + len, + sum, + <- + foldl2( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + 0, + 0, + fn(n, len, sum, return) { return(len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +test foldl2_classic() { + let (len, sum) = + foldl( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + (0, 0), + fn(n, (len, sum)) { (len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +type Foo { + Foo(Int, Int) +} + +test foldl2_pair() { + let Pair(len, sum) = + foldl( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + Pair(0, 0), + fn(n, Pair(len, sum)) { Pair(len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +test foldl2_foo() { + let Foo(len, sum) = + foldl( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + Foo(0, 0), + fn(n, Foo(len, sum)) { Foo(len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +test foldl_1() { + foldl([], 0, fn(_, _) { 1 }) == 0 +} + +test foldl_2() { + foldl([1, 2, 3, 4, 5], 0, fn(n, total) { n + total }) == 15 +} + +test foldl_3() { + foldl([1, 2, 3, 4], [], fn(x, xs) { [x, ..xs] }) == [4, 3, 2, 1] +} + +/// Reduce a list from right to left. +/// +/// ```aiken +/// list.foldr([1, 2, 3], 0, fn(n, total) { n + total }) == 6 +/// list.foldr([1, 2, 3], [], fn(x, xs) { [x, ..xs] }) == [1, 2, 3] +/// ``` +pub fn foldr(self: List, zero: b, with: fn(a, b) -> b) -> b { + when self is { + [] -> zero + [x, ..xs] -> with(x, foldr(xs, zero, with)) + } +} + +test foldr_1() { + foldr([1, 2, 3, 4, 5], 0, fn(n, total) { n + total }) == 15 +} + +test foldr_2() { + foldr( + [1, 2, 3], + "", + fn(n, _str) { + if builtin.mod_integer(n, 2) == 0 { + "foo" + } else { + "bar" + } + }, + ) == "bar" +} + +test foldr_3() { + foldr([1, 2, 3, 4], [], fn(x, xs) { [x, ..xs] }) == [1, 2, 3, 4] +} + +/// Like [`foldr`](#foldr), but also provides the position (0-based) of the elements when iterating. +/// +/// ```aiken +/// let group = fn(i, x, xs) { [(i, x), ..xs] } +/// list.indexed_foldr(["a", "b", "c"], [], group) == [ +/// (0, "a"), +/// (1, "b"), +/// (2, "c") +/// ] +/// ``` +pub fn indexed_foldr( + self: List, + zero: result, + with: fn(Int, a, result) -> result, +) -> result { + do_indexed_foldr(0, self, zero, with) +} + +fn do_indexed_foldr( + n: Int, + self: List, + zero: result, + with: fn(Int, a, result) -> result, +) -> result { + when self is { + [] -> zero + [x, ..xs] -> with(n, x, do_indexed_foldr(n + 1, xs, zero, with)) + } +} + +test indexed_foldr_1() { + indexed_foldr([], 0, fn(i, x, xs) { i + x + xs }) == 0 +} + +test indexed_foldr_2() { + let letters = ["a", "b", "c"] + indexed_foldr(letters, [], fn(i, x, xs) { [(i, x), ..xs] }) == [ + (0, "a"), (1, "b"), (2, "c"), + ] +} + +/// Reduce a list from left to right using the accumulator as left operand. +/// Said differently, this is [`foldl`](#foldl) with callback arguments swapped. +/// +/// ```aiken +/// list.reduce([#[1], #[2], #[3]], #[0], bytearray.concat) == #[0, 1, 2, 3] +/// list.reduce([True, False, True], False, fn(b, a) { or { b, a } }) == True +/// ``` +pub fn reduce(self: List, zero: b, with: fn(b, a) -> b) -> b { + foldl(self, zero, flip(with)) +} + +test reduce_1() { + reduce([], 0, fn(n, total) { n + total }) == 0 +} + +test reduce_2() { + reduce([1, 2, 3], 0, fn(n, total) { n + total }) == 6 +} + +test reduce_3() { + reduce([True, False, True], False, fn(left, right) { left || right }) == True +} + +test reduce_4() { + reduce( + [#[1], #[2], #[3]], + #[9], + fn(left, right) { bytearray.concat(left, right) }, + ) == #[9, 1, 2, 3] +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak new file mode 100644 index 00000000..01bfe763 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak @@ -0,0 +1,833 @@ +//// A module for working with associative lists (a.k.a `Pairs`). +//// +//// While any function that works on `List` also work on `Pairs`, this module provides some extra helpers +//// that are specifically tailored to working with associative lists. Fundamentally, a `Pairs` is +//// a type-alias to `List>`. +//// +//// > [!CAUTION] +//// > +//// > Unlike dictionnaries (a.k.a. [`Dict`](./dict.html#Dict), associative lists make no assumption +//// > about the ordering of elements within the list. As a result, lookup +//// > functions do traverse the entire list when invoked. They are also not _sets_, +//// > and thus allow for duplicate keys. This is reflected in the functions used +//// > to interact with them. + +use aiken/builtin +use aiken/primitive/bytearray + +// ## Inspecting + +/// Get all values in the alist associated with a given key. +/// +/// ```aiken +/// pairs.get_all([], "a") == [] +/// pairs.get_all([Pair("a", 1)], "a") == [1] +/// pairs.get_all([Pair("a", 1), Pair("b", 2)], "a") == [1] +/// pairs.get_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [1, 3] +/// ``` +pub fn get_all(self: Pairs, key k: key) -> List { + when self is { + [] -> [] + [Pair(k2, v), ..rest] -> + if k == k2 { + [v, ..get_all(rest, k)] + } else { + get_all(rest, k) + } + } +} + +test get_all_1() { + get_all([], "a") == [] +} + +test get_all_2() { + get_all([Pair("a", 1)], "a") == [1] +} + +test get_all_3() { + get_all([Pair("a", 1), Pair("b", 2)], "a") == [1] +} + +test get_all_4() { + get_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [1, 3] +} + +test get_all_5() { + get_all([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == [] +} + +/// Get the value in the alist by its key. +/// If multiple values with the same key exist, only the first one is returned. +/// +/// ```aiken +/// pairs.get_first([], "a") == None +/// pairs.get_first([Pair("a", 1)], "a") == Some(1) +/// pairs.get_first([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +/// pairs.get_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(1) +/// ``` +pub fn get_first(self: Pairs, key k: key) -> Option { + when self is { + [] -> None + [Pair(k2, v), ..rest] -> + if k == k2 { + Some(v) + } else { + get_first(rest, k) + } + } +} + +test get_first_1() { + get_first([], "a") == None +} + +test get_first_2() { + get_first([Pair("a", 1)], "a") == Some(1) +} + +test get_first_3() { + get_first([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +} + +test get_first_4() { + get_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(1) +} + +test get_first_5() { + get_first([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == None +} + +/// Get the value in the alist by its key. +/// If multiple values with the same key exist, only the last one is returned. +/// +/// ```aiken +/// pairs.get_last([], "a") == None +/// pairs.get_last([Pair("a", 1)], "a") == Some(1) +/// pairs.get_last([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +/// pairs.get_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(3) +/// ``` +pub fn get_last(self: Pairs, key k: key) -> Option { + when self is { + [] -> None + [Pair(k2, v), ..rest] -> + if k == k2 { + when get_last(rest, k) is { + None -> Some(v) + some -> some + } + } else { + get_last(rest, k) + } + } +} + +test get_last_1() { + get_last([], "a") == None +} + +test get_last_2() { + get_last([Pair("a", 1)], "a") == Some(1) +} + +test get_last_3() { + get_last([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +} + +test get_last_4() { + get_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(3) +} + +test get_last_5() { + get_last([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == None +} + +/// Finds all keys in the alist associated with a given value. +/// +/// ```aiken +/// pairs.find_all([], 1) == [] +/// pairs.find_all([Pair("a", 1)], 1) == ["a"] +/// pairs.find_all([Pair("a", 1), Pair("b", 2)], 1) == ["a"] +/// pairs.find_all([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == ["a", "c"] +/// ``` +pub fn find_all(self: Pairs, v: value) -> List { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if v == v2 { + [k2, ..find_all(rest, v)] + } else { + find_all(rest, v) + } + } +} + +test find_all_1() { + find_all([], "a") == [] +} + +test find_all_2() { + find_all([Pair("a", 14)], 14) == ["a"] +} + +test find_all_3() { + find_all([Pair("a", 14)], 42) == [] +} + +test find_all_4() { + find_all([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == ["a", "c"] +} + +/// Finds the first key in the alist associated with a given value, if any. +/// +/// ```aiken +/// pairs.find_first([], 1) == None +/// pairs.find_first([Pair("a", 1)], 1) == Some("a") +/// pairs.find_first([Pair("a", 1), Pair("b", 2)], 1) == Some("a") +/// pairs.find_first([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == Some("a") +/// ``` +pub fn find_first(self: Pairs, v: value) -> Option { + when self is { + [] -> None + [Pair(k2, v2), ..rest] -> + if v == v2 { + Some(k2) + } else { + find_first(rest, v) + } + } +} + +test find_first_1() { + find_first([], "a") == None +} + +test find_first_2() { + find_first([Pair("a", 14)], 14) == Some("a") +} + +test find_first_3() { + find_first([Pair("a", 14)], 42) == None +} + +test find_first_4() { + find_first([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == Some("a") +} + +/// Finds the last key in the alist associated with a given value, if any. +/// +/// ```aiken +/// pairs.find_last([], 1) == None +/// pairs.find_last([Pair("a", 1)], 1) == Some("a") +/// pairs.find_last([Pair("a", 1), Pair("b", 2)], 1) == Some("a") +/// pairs.find_last([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == Some("c") +/// ``` +pub fn find_last(self: Pairs, v: value) -> Option { + when self is { + [] -> None + [Pair(k2, v2), ..rest] -> + if v == v2 { + when find_last(rest, v) is { + None -> Some(k2) + some -> some + } + } else { + find_last(rest, v) + } + } +} + +test find_last_1() { + find_last([], "a") == None +} + +test find_last_2() { + find_last([Pair("a", 14)], 14) == Some("a") +} + +test find_last_3() { + find_last([Pair("a", 14)], 42) == None +} + +test find_last_4() { + find_last([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == Some("c") +} + +/// Check if a key exists in the pairs. +/// +/// ```aiken +/// pairs.has_key([], "a") == False +/// pairs.has_key([Pair("a", 1)], "a") == True +/// pairs.has_key([Pair("a", 1), Pair("b", 2)], "a") == True +/// pairs.has_key([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == True +/// ``` +pub fn has_key(self: Pairs, k: key) -> Bool { + when self is { + [] -> False + // || is lazy so this is fine + [Pair(k2, _), ..rest] -> k == k2 || has_key(rest, k) + } +} + +test has_key_1() { + !has_key([], "a") +} + +test has_key_2() { + has_key([Pair("a", 14)], "a") +} + +test has_key_3() { + !has_key([Pair("a", 14)], "b") +} + +test has_key_4() { + has_key([Pair("a", 14), Pair("b", 42)], "b") +} + +test has_key_5() { + has_key([Pair("a", 14), Pair("b", 42), Pair("a", 42)], "a") +} + +/// Extract all the keys present in a given `Pairs`. +/// +/// ```aiken +/// pairs.keys([]) == [] +/// pairs.keys([Pair("a", 1)]) == ["a"] +/// pairs.keys([Pair("a", 1), Pair("b", 2)]) == ["a", "b"] +/// pairs.keys([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == ["a", "b", "a"] +/// ``` +pub fn keys(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(k, _), ..rest] -> [k, ..keys(rest)] + } +} + +test keys_1() { + keys([]) == [] +} + +test keys_2() { + keys([Pair("a", 0)]) == ["a"] +} + +test keys_3() { + keys([Pair("a", 0), Pair("b", 0)]) == ["a", "b"] +} + +/// Extract all the values present in a given `Pairs`. +/// +/// ```aiken +/// pairs.values([]) == [] +/// pairs.values([Pair("a", 1)]) == [1] +/// pairs.values([Pair("a", 1), Pair("b", 2)]) == [1, 2] +/// pairs.values([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == [1, 2, 3] +/// ``` +pub fn values(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(_, v), ..rest] -> [v, ..values(rest)] + } +} + +test values_1() { + values([]) == [] +} + +test values_2() { + values([Pair("a", 1)]) == [1] +} + +test values_3() { + values([Pair("a", 1), Pair("b", 2)]) == [1, 2] +} + +test values_4() { + values([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == [1, 2, 3] +} + +// ## Modifying + +/// Remove all key-value pairs matching the key from the Pairs. If the key is not found, no changes are made. +/// +/// ```aiken +/// pairs.delete_all([], "a") == [] +/// pairs.delete_all([Pair("a", 1)], "a") == [] +/// pairs.delete_all([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] +/// pairs.delete_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("b", 2)] +/// ``` +pub fn delete_all(self: Pairs, key k: key) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if k == k2 { + delete_all(rest, k) + } else { + [Pair(k2, v2), ..delete_all(rest, k)] + } + } +} + +test delete_all_1() { + delete_all([], "a") == [] +} + +test delete_all_2() { + delete_all([Pair("a", 14)], "a") == [] +} + +test delete_all_3() { + let fixture = [Pair("a", 14)] + delete_all(fixture, "b") == fixture +} + +test delete_all_4() { + let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] + delete_all(fixture, "a") == [Pair("b", 2)] +} + +/// Remove a single key-value pair from the `Pairs`. If the key is not found, no changes are made. +/// Duplicate keys are not deleted. Only the **first** key found is deleted. +/// +/// ```aiken +/// pairs.delete_first([], "a") == [] +/// pairs.delete_first([Pair("a", 1)], "a") == [] +/// pairs.delete_first([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] +/// pairs.delete_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("b", 2), Pair("a", 3)] +/// ``` +pub fn delete_first(self: Pairs, key k: key) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if k == k2 { + rest + } else { + [Pair(k2, v2), ..delete_first(rest, k)] + } + } +} + +test delete_first_1() { + delete_first([], "a") == [] +} + +test delete_first_2() { + delete_first([Pair("a", 14)], "a") == [] +} + +test delete_first_3() { + let fixture = [Pair("a", 14)] + delete_first(fixture, "b") == fixture +} + +test delete_first_4() { + let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] + delete_first(fixture, "a") == [Pair("b", 2), Pair("a", 3)] +} + +/// Remove a single key-value pair from the Pairs. If the key is not found, no changes are made. +/// Duplicate keys are not deleted. Only the **last** key found is deleted. +/// +/// ```aiken +/// pairs.delete_last([], "a") == [] +/// pairs.delete_last([Pair("a", 1)], "a") == [] +/// pairs.delete_last([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] +/// pairs.delete_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("a", 1), Pair("b", 2)] +/// ``` +pub fn delete_last(self: Pairs, key k: key) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if k == k2 { + let tail = delete_last(rest, k) + if tail == rest { + rest + } else { + [Pair(k2, v2), ..tail] + } + } else { + [Pair(k2, v2), ..delete_last(rest, k)] + } + } +} + +test delete_last_1() { + delete_last([], "a") == [] +} + +test delete_last_2() { + delete_last([Pair("a", 14)], "a") == [] +} + +test delete_last_3() { + let fixture = [Pair("a", 14)] + delete_last(fixture, "b") == fixture +} + +test delete_last_4() { + let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] + delete_last(fixture, "a") == [Pair("a", 1), Pair("b", 2)] +} + +/// Insert a value in the `Pairs` at a given key. If the key already exists, +/// the value is added in front. +/// +/// > [!CAUTION] +/// > The list is only traversed up to the given key and the traversal +/// > stops as soon as a higher key is encountered. Said differently, the list +/// > is assumed to **be ordered by ascending keys**! If it is not, expect the +/// > unexpected. +/// +/// ```aiken +/// use aiken/primitive/bytearray +/// +/// let result = +/// [] +/// |> pairs.insert_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare) +/// |> pairs.insert_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare) +/// |> pairs.insert_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare) +/// +/// result == [Pair("bar", 2), Pair("foo", 3), Pair("foo", 1)] +/// ``` +pub fn insert_by_ascending_key( + self: Pairs, + key k: key, + value v: value, + compare: fn(key, key) -> Ordering, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if compare(k, k2) == Less { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, v), ..self] + } else { + [Pair(k2, v2), ..insert_by_ascending_key(rest, k, v, compare)] + } + } + } +} + +test insert_by_ascending_key_1() { + let m = + [] + |> insert_by_ascending_key("foo", 42, bytearray.compare) + |> insert_by_ascending_key("foo", 14, bytearray.compare) + + m == [Pair("foo", 14), Pair("foo", 42)] +} + +test insert_by_ascending_key_2() { + let m = + [] + |> insert_by_ascending_key("foo", 42, bytearray.compare) + |> insert_by_ascending_key("bar", 14, bytearray.compare) + |> insert_by_ascending_key("baz", 1337, bytearray.compare) + + m == [Pair("bar", 14), Pair("baz", 1337), Pair("foo", 42)] +} + +/// Like [`insert_by_ascending_key`](#insert_by_ascending_key) but specifies +/// how to combine two values on a key conflict. +/// +/// > [!CAUTION] +/// > The list is only traversed up to the given key and the traversal +/// > stops as soon as a higher key is encountered. Said differently, the list +/// > is assumed to **be ordered by ascending keys**! If it is not, expect the +/// > unexpected. +/// +/// ```aiken +/// use aiken/primitive/bytearray +/// +/// let add_integer = fn(x, y) { x + y } +/// +/// let result = +/// [] +/// |> pairs.insert_with_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare, with: add_integer) +/// |> pairs.insert_with_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare, with: add_integer) +/// |> pairs.insert_with_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare, with: add_integer) +/// +/// result == [Pair("bar", 2), Pair("foo", 4)] +/// ``` +pub fn insert_with_by_ascending_key( + self: Pairs, + key k: key, + value v: value, + compare: fn(key, key) -> Ordering, + with: fn(value, value) -> value, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if compare(k, k2) == Less { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, with(v, v2)), ..rest] + } else { + [ + Pair(k2, v2), + ..insert_with_by_ascending_key(rest, k, v, compare, with) + ] + } + } + } +} + +test insert_with_by_ascending_key_1() { + let compare_un_b_data = + fn(l, r) { + bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) + } + + let m = + [] + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 42, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 14, + compare_un_b_data, + builtin.add_integer, + ) + + m == [Pair("foo" |> builtin.b_data, 56)] +} + +test insert_with_by_ascending_key_2() { + let compare_un_b_data = + fn(l, r) { + bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) + } + + let m = + [] + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 42, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "bar" |> builtin.b_data, + 14, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "baz" |> builtin.b_data, + 1337, + compare_un_b_data, + builtin.add_integer, + ) + + m == [ + Pair("bar" |> builtin.b_data, 14), + Pair("baz" |> builtin.b_data, 1337), + Pair("foo" |> builtin.b_data, 42), + ] +} + +test insert_with_by_ascending_key_3() { + let compare_un_b_data = + fn(l, r) { + bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) + } + + let result = + [] + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 1, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "bar" |> builtin.b_data, + 2, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 3, + compare_un_b_data, + builtin.add_integer, + ) + + result == [Pair("bar" |> builtin.b_data, 2), Pair("foo" |> builtin.b_data, 4)] +} + +/// Apply a function to all key-value pairs in a alist, replacing the values. +/// +/// ```aiken +/// let fixture = [Pair("a", 100), Pair("b", 200)] +/// +/// pairs.map(fixture, fn(_k, v) { v * 2 }) == [Pair("a", 200), Pair("b", 400)] +/// ``` +pub fn map( + self: Pairs, + with: fn(key, value) -> result, +) -> Pairs { + when self is { + [] -> [] + [Pair(k, v), ..rest] -> [Pair(k, with(k, v)), ..map(rest, with)] + } +} + +test map_1() { + let fixture = [Pair("a", 1), Pair("b", 2)] + + map(fixture, with: fn(k, _) { k }) == [Pair("a", "a"), Pair("b", "b")] +} + +test map_2() { + let fixture = [Pair("a", 1), Pair("b", 2)] + + map(fixture, with: fn(_, v) { v + 1 }) == [Pair("a", 2), Pair("b", 3)] +} + +/// Insert a value in the `Pairs` at a given key. If the key already exists, +/// its value is replaced. +/// +/// > [!CAUTION] +/// > The list is only traversed up to the given key and the traversal +/// > stops as soon as a higher key is encountered. Said differently, the list +/// > is assumed to **be ordered by ascending keys**! If it is not, expect the +/// > unexpected. +/// +/// ```aiken +/// use aiken/primitive/bytearray +/// +/// let result = +/// [] +/// |> pairs.repsert_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare) +/// |> pairs.repsert_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare) +/// |> pairs.repsert_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare) +/// +/// result == [Pair("bar", 2), Pair("foo", 3)] +/// ``` +pub fn repsert_by_ascending_key( + self: Pairs, + key k: key, + value v: value, + compare: fn(key, key) -> Ordering, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if compare(k, k2) == Less { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, v), ..rest] + } else { + [Pair(k2, v2), ..repsert_by_ascending_key(rest, k, v, compare)] + } + } + } +} + +test repsert_by_ascending_key_1() { + let m = + [] + |> repsert_by_ascending_key("foo", 42, bytearray.compare) + |> repsert_by_ascending_key("foo", 14, bytearray.compare) + + m == [Pair("foo", 14)] +} + +test repsert_by_ascending_key_2() { + let m = + [] + |> repsert_by_ascending_key("foo", 42, bytearray.compare) + |> repsert_by_ascending_key("bar", 14, bytearray.compare) + |> repsert_by_ascending_key("baz", 1337, bytearray.compare) + + m == [Pair("bar", 14), Pair("baz", 1337), Pair("foo", 42)] +} + +// ## Transforming + +/// Fold over the key-value pairs in a pairs. The fold direction follows keys +/// in ascending order and is done from left-to-right. +/// +/// ```aiken +/// let fixture = [ +/// Pair(1, 100), +/// Pair(2, 200), +/// Pair(3, 300), +/// ] +/// +/// pairs.foldl(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 +/// ``` +pub fn foldl( + self: Pairs, + zero: result, + with: fn(key, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> foldl(rest, with(k, v, zero), with) + } +} + +test foldl_1() { + foldl([], 14, fn(_, _, _) { 42 }) == 14 +} + +test foldl_2() { + foldl( + [Pair("a", 42), Pair("b", 14)], + zero: 0, + with: fn(_, v, total) { v + total }, + ) == 56 +} + +/// Fold over the key-value pairs in a Pairs. The fold direction follows the +/// order of elements in the Pairs and is done from right-to-left. +/// +/// ```aiken +/// let fixture = [ +/// Pair(1, 100), +/// Pair(2, 200), +/// Pair(3, 300), +/// ] +/// +/// pairs.foldr(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 +/// ``` +pub fn foldr( + self: Pairs, + zero: result, + with: fn(key, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> with(k, v, foldr(rest, zero, with)) + } +} + +test foldr_1() { + foldr([], 14, fn(_, _, _) { 42 }) == 14 +} + +test foldr_2() { + foldr( + [Pair("a", 42), Pair("b", 14)], + zero: 0, + with: fn(_, v, total) { v + total }, + ) == 56 +} + +test foldr_3() { + let fixture = [Pair(1, 100), Pair(2, 200), Pair(3, 300)] + + foldr(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak new file mode 100644 index 00000000..46a7dda5 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak @@ -0,0 +1,147 @@ +use aiken/builtin + +pub type VerificationKey = + ByteArray + +pub type VerificationKeyHash = + Hash + +pub type Script = + ByteArray + +pub type ScriptHash = + Hash + +pub type Signature = + ByteArray + +pub type DataHash = + Hash + +/// A `Hash` is nothing more than a `ByteArray`, but it carries extra +/// information for readability. +/// +/// On-chain, any hash digest value is represented as a plain 'ByteArray'. +/// Though in practice, hashes come from different sources and have +/// different semantics. +/// +/// Hence, while this type-alias doesn't provide any strong type-guarantees, +/// it helps writing functions signatures with more meaningful types than mere +/// 'ByteArray'. +/// +/// Compare for example: +/// +/// ```aiken +/// pub type Credential { +/// VerificationKey(ByteArray) +/// Script(ByteArray) +/// } +/// ``` +/// +/// with +/// +/// ```aiken +/// pub type Credential { +/// VerificationKey(Hash) +/// Script(Hash) +/// } +/// ``` +/// +/// Both are strictly equivalent, but the second reads much better. +pub type Hash = + ByteArray + +// ## Hashing + +/// A blake2b-224 hash algorithm. +/// +/// Typically used for: +/// +/// - [`Credential`](../cardano/address.html#Credential) +/// - [`PolicyId`](../cardano/assets.html#PolicyId) +/// +/// Note: there's no function to calculate blake2b-224 hash digests on-chain. +pub opaque type Blake2b_224 { + Blake2b_224 +} + +/// Compute the blake2b-224 hash digest (28 bytes) of some data. +pub fn blake2b_224(bytes: ByteArray) -> Hash { + builtin.blake2b_224(bytes) +} + +/// A blake2b-256 hash algorithm. +/// +/// Typically used for: +/// +/// - [`TransactionId`](../cardano/transaction.html#TransactionId) +pub opaque type Blake2b_256 { + Blake2b_256 +} + +/// Compute the blake2b-256 hash digest (32 bytes) of some data. +pub fn blake2b_256(bytes: ByteArray) -> Hash { + builtin.blake2b_256(bytes) +} + +/// A Keccak-256 hash algorithm. +pub opaque type Keccak_256 { + Keccak_256 +} + +/// Compute the keccak-256 hash digest (32 bytes) of some data. +pub fn keccak_256(bytes: ByteArray) -> Hash { + builtin.keccak_256(bytes) +} + +/// A SHA2-256 hash algorithm. +pub opaque type Sha2_256 { + Sha2_256 +} + +/// Compute the sha2-256 hash digest (32 bytes) of some data. +pub fn sha2_256(bytes: ByteArray) -> Hash { + builtin.sha2_256(bytes) +} + +/// A SHA3-256 hash algorithm. +pub opaque type Sha3_256 { + Sha3_256 +} + +/// Compute the sha3-256 hash digest (32 bytes) of some data. +pub fn sha3_256(bytes: ByteArray) -> Hash { + builtin.sha3_256(bytes) +} + +// ## Verifying signatures + +/// Verify an ECDCA signature (over secp256k1) using the given verification key. +/// Returns `True` when the signature is valid. +pub fn verify_ecdsa_signature( + key: VerificationKey, + msg: ByteArray, + sig: Signature, +) -> Bool { + builtin.verify_ecdsa_secp256k1_signature(key, msg, sig) +} + +/// Verify an Ed25519 signature using the given verification key. +/// Returns `True` when the signature is valid. +pub fn verify_ed25519_signature( + key: VerificationKey, + msg: ByteArray, + sig: Signature, +) -> Bool { + builtin.verify_ed25519_signature(key, msg, sig) +} + +/// Verify a Schnorr signature (over secp256k1) using the given verification key. +/// Returns `True` when the signature is valid. +pub fn verify_schnorr_signature( + key: VerificationKey, + msg: ByteArray, + sig: Signature, +) -> Bool { + builtin.verify_schnorr_secp256k1_signature(key, msg, sig) +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak new file mode 100644 index 00000000..d7b4cc19 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak @@ -0,0 +1,115 @@ +//// This module is designed for cryptographic operations involving the BLS12-381 elliptic curve, particularly focusing on the G1 group of the curve. +//// +//// The key functionalities provided by this module include: +//// - Defining the generator of the G1 group, which is a fixed base point on the elliptic curve used for various cryptographic computations. +//// - Implementing the additive identity (zero) in the G1 group, which plays a crucial role in elliptic curve arithmetic. +//// - Providing functions to compress and decompress points in the G1 group. Compression reduces the size of the point representation, which is useful for efficient storage and transmission. Decompression restores the original point from its compressed form. +//// - Implementing basic arithmetic operations on the points in the G1 group, such as addition and subtraction. +//// - Enabling the exponentiation of a point in the G1 group with a scalar, which is a fundamental operation in elliptic curve cryptography. +//// - Offering a function to hash arbitrary data to a point in the G1 group, a process important in several cryptographic protocols. +//// +//// This module ensures that all operations respect the properties of the BLS12-381 curve and the mathematical structure of the G1 group. + +use aiken/builtin +use aiken/crypto/bls12_381/scalar.{Scalar} + +/// The compressed generator of the G1 group of the BLS12-381 curve. +/// This constant represents a fixed base point on the elliptic curve. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const generator: G1Element = + #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb" + +test generator_1() { + builtin.bls12_381_g1_scalar_mul(scalar.field_prime, generator) == #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" +} + +/// Represents the additive identity (zero) in the G1 group. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const zero: G1Element = + #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + +test zero_1() { + and { + zero == builtin.bls12_381_g1_scalar_mul(scalar.field_prime, generator), + zero == builtin.bls12_381_g1_scalar_mul( + scalar.field_prime, + #"88c7e388ee58f1db9a24d7098b01d13634298bebf2d159254975bd450cb0d287fcc622eb71edde8b469a8513551baf1f", + ), + zero == builtin.bls12_381_g1_scalar_mul( + scalar.field_prime, + #"a6ac32e625dc30b8d31bacf5f4c89c27b0388b15f57ae10de8d5cec02dd1f113c9a31077be05ab587ca57a88d34deb75", + ), + } +} + +/// Compresses a point in the G1 group into a more compact representation. +/// The compressed representation is a 48-byte string, corresponding to a modified `x` coordinate. +/// The leading most significant 3 bits of this string indicate how to reconstruct the `y` coordinate. +/// +/// > [!NOTE] +/// > More explicitly via [Zcash's spec](https://github.com/supranational/blst#serialization-format): +/// > +/// > The most-significant three bits of a G1 or G2 encoding should be masked away before the coordinate(s) are interpreted. These bits are used to unambiguously represent the underlying element: +/// > +/// > - The most significant bit, when set, indicates that the point is in compressed form. Otherwise, the point is in uncompressed form. +/// > - The second-most significant bit indicates that the point is at infinity. If this bit is set, the remaining bits of the group element's encoding should be set to zero. +/// > - The third-most significant bit is set if (and only if) this point is in compressed form and it is not the point at infinity and its y-coordinate is the lexicographically largest of the two associated with the encoded x-coordinate. +pub fn compress(point) { + builtin.bls12_381_g1_compress(point) +} + +test compress_1() { + compress( + #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb", + ) == #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb" +} + +/// Decompresses a point in the G1 group from its compressed form. +pub fn decompress(bytes) { + builtin.bls12_381_g1_uncompress(bytes) +} + +pub fn equal(left, right) { + builtin.bls12_381_g1_equal(left, right) +} + +test equal_1() { + equal(generator, generator) +} + +/// Adds two points in the G1 group. +pub fn add(left, right) { + builtin.bls12_381_g1_add(left, right) +} + +/// Subtracts one point in the G1 group from another. +pub fn sub(left, right) { + builtin.bls12_381_g1_add(left, builtin.bls12_381_g1_neg(right)) +} + +test sub_1() { + generator == sub(add(generator, generator), generator) +} + +/// Exponentiates a point in the G1 group with a `scalar`. +/// This operation is equivalent to the repeated addition of the point with itself `e` times. +pub fn scale(point, e: Scalar) { + builtin.bls12_381_g1_scalar_mul(scalar.to_int(e), point) +} + +test scale_1() { + expect Some(x) = scalar.new(2) + builtin.bls12_381_g1_add(generator, generator) == scale(generator, x) +} + +/// Hashes arbitrary data to a point in the G1 group. +/// You can use the `domain_separation_tag` parameter to cryptographically separate different uses of the hash function between applications. +pub fn hash_to_group(bytes: ByteArray, domain_separation_tag: ByteArray) { + builtin.bls12_381_g1_hash_to_group(bytes, domain_separation_tag) +} + +test hash_to_group_1() { + hash_to_group("hello", "world") == #"89223b03c629cc6bcbbdccbba46b6679bc6a79db82f2d3bd115899a45a5a38c391587b59d3d1e297f977d1c4ee9e3388" +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak new file mode 100644 index 00000000..7a2013db --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak @@ -0,0 +1,124 @@ +//// This module is designed for cryptographic operations involving the BLS12-381 elliptic curve, particularly focusing on the G2 group of the curve. +//// +//// The key functionalities provided by this module include: +//// - Defining the generator of the G2 group, which is a fixed base point on the elliptic curve used for various cryptographic computations. +//// - Implementing the additive identity (zero) in the G2 group, which plays a crucial role in elliptic curve arithmetic. +//// - Providing functions to compress and decompress points in the G2 group. Compression reduces the size of the point representation, which is useful for efficient storage and transmission. Decompression restores the original point from its compressed form. +//// - Implementing basic arithmetic operations on the points in the G2 group, such as addition and subtraction. +//// - Enabling the exponentiation of a point in the G2 group with a scalar, which is a fundamental operation in elliptic curve cryptography. +//// - Offering a function to hash arbitrary data to a point in the G2 group, a process important in several cryptographic protocols. +//// +//// This module ensures that all operations respect the properties of the BLS12-381 curve and the mathematical structure of the G2 group. + +use aiken/builtin +use aiken/crypto/bls12_381/scalar.{Scalar} + +/// The compressed generator of the G2 group of the BLS12-381 curve. +/// This constant represents a fixed base point on the elliptic curve. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const generator: G2Element = + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" + +test generator_1() { + builtin.bls12_381_g2_scalar_mul(scalar.field_prime, generator) == #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" +} + +/// Represents the additive identity (zero) in the G2 group. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const zero: G2Element = + #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + +test zero_1() { + and { + zero == builtin.bls12_381_g2_scalar_mul(scalar.field_prime, generator), + zero == builtin.bls12_381_g2_scalar_mul( + scalar.field_prime, + #"9964a9ac2ee28a4dab595ff0970d446373bf46701c5d0b29ce8e1ba995d811a1c7b193c928269192c64ba1fbe4b1940207c251e086b452b920bc72e3cebab46ce672b9b088ca620a471d3b888d9737f6abd165319aa457dbf8835e3d34196051", + ), + zero == builtin.bls12_381_g2_scalar_mul( + scalar.field_prime, + #"a900e25cb53cf1eeb1a82c0c83292937c49c97966351273767a204256a7ef6e95aa391404387075d361e7b13ccd694db03aa73ee0e1bd2c3dd735582b99fdf71696de72e4eda18ae99ea45995f1c9605aa0057008ee9a4da604b5716fb4a345b", + ), + } +} + +/// Compresses a point in the G2 group into a more compact representation. +/// The compressed representation is the concatenation of two 48-byte strings, corresponding to a modified and complexified `x` coordinate. +/// The leading most significant 3 bits of this string indicate how to reconstruct the `y` coordinate. +/// +/// > [!NOTE] +/// > More explicitly via [Zcash's spec](https://github.com/supranational/blst#serialization-format): +/// > +/// > The most-significant three bits of a G1 or G2 encoding should be masked away before the coordinate(s) are interpreted. These bits are used to unambiguously represent the underlying element: +/// > +/// > - The most significant bit, when set, indicates that the point is in compressed form. Otherwise, the point is in uncompressed form. +/// > - The second-most significant bit indicates that the point is at infinity. If this bit is set, the remaining bits of the group element's encoding should be set to zero. +/// > - The third-most significant bit is set if (and only if) this point is in compressed form and it is not the point at infinity and its y-coordinate is the lexicographically largest of the two associated with the encoded x-coordinate. +pub fn compress(point) { + builtin.bls12_381_g2_compress(point) +} + +test compress_1() { + let g2 = + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" + compress(g2) == #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" +} + +/// Decompresses a point in the G2 group from its compressed form. +pub fn decompress(bytes) { + builtin.bls12_381_g2_uncompress(bytes) +} + +test decompress_1() { + let g2 = + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" + generator == g2 +} + +pub fn equal(left, right) { + builtin.bls12_381_g2_equal(left, right) +} + +test equal_1() { + equal( + generator, + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8", + ) +} + +/// Adds two points in the G2 group. +pub fn add(left, right) { + builtin.bls12_381_g2_add(left, right) +} + +/// Subtracts one point in the G2 group from another. +pub fn sub(left, right) { + builtin.bls12_381_g2_add(left, builtin.bls12_381_g2_neg(right)) +} + +test sub_1() { + generator == sub(add(generator, generator), generator) +} + +/// Exponentiates a point in the G2 group with a `scalar`. +/// This operation is equivalent to the repeated addition of the point with itself `e` times. +pub fn scale(point, e: Scalar) { + builtin.bls12_381_g2_scalar_mul(scalar.to_int(e), point) +} + +test scale_1() { + expect Some(x) = scalar.new(2) + builtin.bls12_381_g2_add(generator, generator) == scale(generator, x) +} + +/// Hashes arbitrary data to a point in the G2 group. +/// You can use the `domain_separation_tag` parameter to cryptographically separate different uses of the hash function between applications. +pub fn hash_to_group(bytes, domain_separation_tag) { + builtin.bls12_381_g2_hash_to_group(bytes, domain_separation_tag) +} + +test hash_to_group_1() { + hash_to_group("hello", "world") == #"a18486bba1dc8321f4998ed4268c6df8dfa5618dd5c91595844059d517f8104bf8031d3e766f9c99db1d6f58b201ee9614de92fc08f9e5cc3a6cd814e871857cb6e3924e8a4fa48775116c5f158d58ceda63614d62f6b7bc47db798d656969a5" +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak new file mode 100644 index 00000000..cf028ad7 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak @@ -0,0 +1,255 @@ +//// This module implements arithmetic operations in the scalar field associated with the BLS12-381 elliptic curve. +//// The scalar field, defined over a prime number `q`, is derived from the order of the subgroup G1. +//// +//// More explicitly, we have the identity: +//// +//// ```aiken +//// builtin.bls12_381_g1_scalar_mul(q, bls12_381_g1_generator) == 1 +//// ``` +//// +//// where, +//// +//// ```aiken +//// q = 52435875175126190479447740508185965837690552500527637822603658699938581184513 +//// ``` +//// +//// This module provides functionality for basic arithmetic operations (addition, subtraction, multiplication, division) within this scalar field. +//// Additionally, it includes advanced operations such as exponentiation and calculation of multiplicative inverses, tailored for cryptographic applications. + +use aiken/builtin + +/// The prime number defining the scalar field of the BLS12-381 curve. +pub const field_prime = + 52435875175126190479447740508185965837690552500527637822603658699938581184513 + +/// Represents the additive identity (zero) in the `Scalar` field. +pub const zero: Scalar = Scalar(0) + +/// Represents the multiplicative identity (one) in the `Scalar` field. +pub const one: Scalar = Scalar(1) + +/// Opaque type representing an element of the finite field `Scalar`. +pub opaque type Scalar { + integer: Int, +} + +// ## Constructing + +/// Constructs a new `Scalar` element from an integer, ensuring it's within the valid range of the field. +/// Returns `None` if the integer is negative or greater than the prime number defining the field. +pub fn new(n: Int) -> Option { + if n >= 0 && n < field_prime { + Some(Scalar(n)) + } else { + None + } +} + +test new_1() { + and { + new(-1) == None, + new(field_prime) == None, + new(834884848) == Some(Scalar(834884848)), + } +} + +/// Constructs a new `Scalar` element from a Big-Endian (most-significant bits first) `ByteArray`. +pub fn from_bytearray_big_endian(bytes: ByteArray) -> Option { + new(builtin.bytearray_to_integer(True, bytes)) +} + +test from_bytearray_big_endian_1() { + from_bytearray_big_endian(#"ffff00") == Some(Scalar(16776960)) +} + +/// Constructs a new `Scalar` element from a Little-Endian (least-significant bits first) `ByteArray`. +pub fn from_bytearray_little_endian(bytes: ByteArray) -> Option { + new(builtin.bytearray_to_integer(False, bytes)) +} + +test from_bytearray_little_endian_1() { + from_bytearray_little_endian(#"ffff00") == Some(Scalar(65535)) +} + +// ## Modifying + +/// Exponentiates an `Scalar` element by a non-negative integer exponent, using repeated squaring. +/// Note that this function returns `scalar.zero` for negative exponents. +/// A dedicated builtin function for this is in the making, see CIP 109. +pub fn scale(self: Scalar, e: Int) -> Scalar { + if e < 0 { + zero + } else if e == 0 { + one + } else if e % 2 == 0 { + scale(mul(self, self), e / 2) + } else { + mul(self, scale(mul(self, self), ( e - 1 ) / 2)) + } +} + +test scale_1() { + and { + scale(Scalar(834884848), -1) == zero, + scale(Scalar(834884848), 0) == one, + scale(Scalar(834884848), 1) == Scalar(834884848), + scale(Scalar(834884848), 2) == Scalar(697032709419983104), + scale(Scalar(834884848), 3) == Scalar(581942047655130761945608192), + scale(Scalar(field_prime - 4), 200) == Scalar( + 12843927705572658539565969578937286576443167978938369866871449552629978143484, + ), + } +} + +/// A faster version of `scale` for the case where the exponent is a power of two. +/// That is, the exponent `e = 2^k` for some non-negative integer `k`. Which is used alot in zk-SNARKs. +pub fn scale2(self: Scalar, k: Int) -> Scalar { + if k < 0 { + zero + } else { + do_scale2(self, k) + } +} + +fn do_scale2(self: Scalar, k: Int) -> Scalar { + if k == 0 { + self + } else { + do_scale2(mul(self, self), k - 1) + } +} + +test scale2_1() { + and { + scale2(Scalar(834884848), -1) == zero, + scale2(Scalar(834884848), 0) == scale(Scalar(834884848), 1), + scale2(Scalar(834884848), 1) == scale(Scalar(834884848), 2), + scale2(Scalar(834884848), 2) == scale(Scalar(834884848), 4), + scale2(Scalar(834884848), 3) == scale(Scalar(834884848), 8), + scale2(Scalar(834884848), 4) == scale(Scalar(834884848), 16), + } +} + +// ## Combining + +/// Adds two `Scalar` elements, ensuring the result stays within the finite field range. +pub fn add(left: Scalar, right: Scalar) -> Scalar { + Scalar(( left.integer + right.integer ) % field_prime) +} + +test add_1() { + and { + (add(Scalar(834884848), Scalar(834884848)) == Scalar(1669769696))?, + (add(Scalar(field_prime - 1), Scalar(1)) == Scalar(0))?, + (add(Scalar(3), Scalar(field_prime)) == Scalar(3))?, + } +} + +/// Divides one `Scalar` element by another, returning `None` if the divisor is zero. +pub fn div(left: Scalar, right: Scalar) -> Option { + if right == zero { + None + } else { + Some(mul(left, scale(right, field_prime - 2))) + } +} + +test div_1() { + and { + div(Scalar(834884848), Scalar(834884848)) == Some(Scalar(1)), + div(Scalar(834884848), zero) == None, + div(Scalar(field_prime - 1), Scalar(2)) == Some( + Scalar( + 26217937587563095239723870254092982918845276250263818911301829349969290592256, + ), + ), + } +} + +/// Multiplies two `Scalar` elements, with the result constrained within the finite field. +pub fn mul(left: Scalar, right: Scalar) -> Scalar { + Scalar(left.integer * right.integer % field_prime) +} + +test mul_1() { + and { + mul(Scalar(834884848), Scalar(834884848)) == Scalar(697032709419983104), + mul(zero, Scalar(834884848)) == zero, + mul(Scalar(field_prime - 1), Scalar(2)) == Scalar( + 52435875175126190479447740508185965837690552500527637822603658699938581184511, + ), + } +} + +/// Calculates the additive inverse of a `Scalar` element. +pub fn neg(self: Scalar) -> Scalar { + // this is basicly sub(zero, self), but more efficient as it saves one modulo operation + if self.integer == 0 { + self + } else { + Scalar(field_prime - self.integer) + } +} + +test neg_1() { + and { + neg(Scalar(834884848)) == Scalar( + 52435875175126190479447740508185965837690552500527637822603658699937746299665, + ), + neg(zero) == zero, + neg(one) == Scalar(field_prime - 1), + } +} + +/// Calculates the multiplicative inverse of an `Scalar` element, returning `None` if the element is zero. +pub fn recip(self: Scalar) -> Option { + div(one, self) +} + +test recip_1() { + and { + recip(Scalar(834884848)) == Some( + Scalar( + 35891248691642227249400403463796410930702563777316955162085759263735363466421, + ), + ), + recip(zero) == None, + } +} + +/// Subtracts one `Scalar` element from another, with the result wrapped within the finite field range. +pub fn sub(left: Scalar, right: Scalar) -> Scalar { + Scalar(( left.integer - right.integer ) % field_prime) +} + +test sub_1() { + and { + (sub(Scalar(834884848), Scalar(834884848)) == zero)?, + (sub(zero, Scalar(5)) == Scalar(field_prime - 5))?, + } +} + +// ## Transforming + +/// Converts a `Scalar` element back to its integer representation. +pub fn to_int(self: Scalar) -> Int { + self.integer +} + +test to_int_1() { + to_int(Scalar(834884848)) == 834884848 +} + +/// Converts a `Scalar` element to a Big-Endian (most-significant bits first) `ByteArray`. +pub fn to_bytearray_big_endian(self: Scalar, size: Int) -> ByteArray { + builtin.integer_to_bytearray(True, size, self.integer) +} + +/// Converts a `Scalar` element to a Little-Endian (least-significant bits first) `ByteArray`. +pub fn to_bytearray_little_endian(self: Scalar, size: Int) -> ByteArray { + builtin.integer_to_bytearray(False, size, self.integer) +} + +test to_bytearray_1() { + to_bytearray_big_endian(Scalar(16777215), 3) == #"ffffff" +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak new file mode 100644 index 00000000..96179f9b --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak @@ -0,0 +1,680 @@ +//// In a eUTxO-based blockchain like Cardano, the management of time can be +//// finicky. +//// +//// Indeed, in order to maintain a complete determinism in the execution of +//// scripts, it is impossible to introduce a notion of _"current time"_ since +//// the execution would then depend on factor that are external to the +//// transaction itself: the ineluctable stream of time flowing in our universe. +//// +//// Hence, to work around that, we typically define time intervals, which gives +//// window -- a.k.a intervals -- within which the transaction can be executed. +//// From within a script, it isn't possible to know when exactly the script is +//// executed, but we can reason about the interval bounds to validate pieces of +//// logic. + +// TODO: Replace 'Int' with a generic 'a' once we have comparable traits. + +/// A type to represent intervals of values. Interval are inhabited by a type +/// `a` which is useful for non-infinite intervals that have a finite +/// lower-bound and/or upper-bound. +/// +/// This allows to represent all kind of mathematical intervals: +/// +/// ```aiken +/// // [1; 10] +/// let i0: Interval = Interval +/// { lower_bound: +/// IntervalBound { bound_type: Finite(1), is_inclusive: True } +/// , upper_bound: +/// IntervalBound { bound_type: Finite(10), is_inclusive: True } +/// } +/// ``` +/// +/// ```aiken +/// // (20; infinity) +/// let i1: Interval = Interval +/// { lower_bound: +/// IntervalBound { bound_type: Finite(20), is_inclusive: False } +/// , upper_bound: +/// IntervalBound { bound_type: PositiveInfinity, is_inclusive: False } +/// } +/// ``` +pub type Interval { + lower_bound: IntervalBound, + upper_bound: IntervalBound, +} + +/// An interval bound, either inclusive or exclusive. +pub type IntervalBound { + bound_type: IntervalBoundType, + is_inclusive: Bool, +} + +/// A type of interval bound. Where finite, a value of type `a` must be +/// provided. `a` will typically be an `Int`, representing a number of seconds or +/// milliseconds. +pub type IntervalBoundType { + NegativeInfinity + Finite(a) + PositiveInfinity +} + +// ## Constructing + +/// Create an interval that includes all values greater than the given bound. i.e [lower_bound, +INF) +/// +/// ```aiken +/// interval.after(10) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: PositiveInfinity, is_inclusive: True }, +/// } +/// ``` +pub fn after(lower_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values after (and not including) the given bound. i.e (lower_bound, +INF) +/// +/// ```aiken +/// interval.entirely_after(10) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: PositiveInfinity, is_inclusive: True }, +/// } +/// ``` +pub fn entirely_after(lower_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: False, + }, + upper_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values before (and including) the given bound. i.e (-INF, upper_bound] +/// +/// ```aiken +/// interval.before(100) == Interval { +/// lower_bound: IntervalBound { bound_type: NegativeInfinity, is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: True }, +/// } +/// ``` +pub fn before(upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values before (and not including) the given bound. i.e (-INF, upper_bound) +/// +/// ```aiken +/// interval.entirely_before(10) == Interval { +/// lower_bound: IntervalBound { bound_type: NegativeInfinity, is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, +/// } +/// ``` +pub fn entirely_before(upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: False, + }, + } +} + +/// Create an interval that includes all values between two bounds, including the bounds. i.e. [lower_bound, upper_bound] +/// +/// ```aiken +/// interval.between(10, 100) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: True }, +/// } +/// ``` +pub fn between(lower_bound: a, upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values between two bounds, excluding the bounds. i.e. (lower_bound, upper_bound) +/// +/// ```aiken +/// interval.entirely_between(10, 100) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: False }, +/// } +/// ``` +pub fn entirely_between(lower_bound: a, upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: False, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: False, + }, + } +} + +/// Create an empty interval that contains no value. +/// +/// ```aiken +/// interval.contains(empty, 0) == False +/// interval.contains(empty, 1000) == False +/// ``` +pub const empty: Interval = + Interval { + lower_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + } + +/// Create an interval that contains every possible values. i.e. (-INF, +INF) +/// +/// ```aiken +/// interval.contains(everything, 0) == True +/// interval.contains(everything, 1000) == True +/// ``` +pub const everything: Interval = + Interval { + lower_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + } + +// ## Inspecting + +/// Checks whether an element is contained within the interval. +/// +/// ```aiken +/// let iv = +/// Interval { +/// lower_bound: IntervalBound { +/// bound_type: Finite(14), +/// is_inclusive: True +/// }, +/// upper_bound: IntervalBound { +/// bound_type: Finite(42), +/// is_inclusive: False +/// }, +/// } +/// +/// interval.contains(iv, 25) == True +/// interval.contains(iv, 0) == False +/// interval.contains(iv, 14) == True +/// interval.contains(iv, 42) == False +/// ``` +pub fn contains(self: Interval, elem: Int) -> Bool { + let is_greater_than_lower_bound = + when self.lower_bound.bound_type is { + NegativeInfinity -> True + Finite(lower_bound) -> + if self.lower_bound.is_inclusive { + elem >= lower_bound + } else { + elem > lower_bound + } + PositiveInfinity -> False + } + + let is_smaller_than_upper_bound = + when self.upper_bound.bound_type is { + NegativeInfinity -> False + Finite(upper_bound) -> + if self.upper_bound.is_inclusive { + elem <= upper_bound + } else { + elem < upper_bound + } + PositiveInfinity -> True + } + + is_greater_than_lower_bound && is_smaller_than_upper_bound +} + +test contains_1() { + let iv = everything + contains(iv, 14) +} + +test contains_2() { + let iv = entirely_before(15) + contains(iv, 14) +} + +test contains_3() { + let iv = before(14) + contains(iv, 14) +} + +test contains_4() { + let iv = entirely_before(14) + !contains(iv, 14) +} + +test contains_5() { + let iv = entirely_after(13) + contains(iv, 14) +} + +test contains_6() { + let iv = after(14) + contains(iv, 14) +} + +test contains_7() { + let iv = entirely_after(14) + !contains(iv, 14) +} + +test contains_8() { + let iv = between(42, 1337) + !contains(iv, 14) +} + +test contains_9() { + let iv = between(0, 42) + contains(iv, 14) +} + +test contains_10() { + let iv = between(0, 42) + contains(iv, 42) +} + +test contains_11() { + let iv = entirely_between(0, 42) + !contains(iv, 0) +} + +test contains_12() { + let iv = empty + !contains(iv, 14) +} + +/// Tells whether an interval is empty; i.e. that is contains no value. +/// +/// ```aiken +/// let iv1 = interval.empty +/// +/// let iv2 = Interval { +/// lower_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, +/// } +/// +/// let iv3 = Interval { +/// lower_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: False }, +/// } +/// +/// interval.is_empty(iv1) == True +/// interval.is_empty(iv2) == True +/// interval.is_empty(iv3) == False +/// +/// // Note: Two empty intervals are not necessarily equal. +/// iv1 != iv2 +/// ``` +pub fn is_empty(self: Interval) -> Bool { + let ordering = + compare_bound_type(self.lower_bound.bound_type, self.upper_bound.bound_type) + + when ordering is { + Greater -> True + Equal -> !(self.lower_bound.is_inclusive && self.upper_bound.is_inclusive) + Less -> { + let is_open_interval = + !self.lower_bound.is_inclusive && !self.upper_bound.is_inclusive + if is_open_interval { + when (self.lower_bound.bound_type, self.upper_bound.bound_type) is { + (Finite(lower_bound), Finite(upper_bound)) -> + lower_bound + 1 == upper_bound + _ -> False + } + } else { + False + } + } + } +} + +/// Check whether the interval is entirely after the point "a" +/// +/// ```aiken +/// interval.is_entirely_after(interval.after(10), 5) == True +/// interval.is_entirely_after(interval.after(10), 10) == False +/// interval.is_entirely_after(interval.after(10), 15) == False +/// interval.is_entirely_after(interval.between(10, 20), 30) == False +/// interval.is_entirely_after(interval.between(10, 20), 5) == True +pub fn is_entirely_after(self: Interval, point: Int) -> Bool { + when self.lower_bound.bound_type is { + Finite(low) -> + if self.lower_bound.is_inclusive { + point < low + } else { + point <= low + } + _ -> False + } +} + +test is_entirely_after_1() { + is_entirely_after(after(10), 5) +} + +test is_entirely_after_2() { + !is_entirely_after(after(10), 10) +} + +test is_entirely_after_3() { + !is_entirely_after(after(10), 15) +} + +test is_entirely_after_4() { + !is_entirely_after(between(10, 20), 30) +} + +test is_entirely_after_5() { + is_entirely_after(between(10, 20), 5) +} + +test is_entirely_after_6() { + is_entirely_after(entirely_after(10), 10) +} + +test is_entirely_after_7() { + !is_entirely_after(before(10), 5) +} + +test is_entirely_after_8() { + !is_entirely_after(before(10), 15) +} + +test is_entirely_after_9() { + !is_entirely_after(entirely_before(10), 5) +} + +/// Check whether the interval is entirely before the point "a" +/// +/// ```aiken +/// interval.is_entirely_before(interval.before(10), 15) == True +/// interval.is_entirely_before(interval.before(10), 10) == False +/// interval.is_entirely_before(interval.before(10), 5) == False +/// interval.is_entirely_before(interval.between(10, 20), 30) == True +/// interval.is_entirely_before(interval.between(10, 20), 5) == False +pub fn is_entirely_before(self: Interval, point: Int) -> Bool { + when self.upper_bound.bound_type is { + Finite(hi) -> + if self.upper_bound.is_inclusive { + hi < point + } else { + hi <= point + } + _ -> False + } +} + +test is_entirely_before_1() { + is_entirely_before(before(10), 15) +} + +test is_entirely_before_2() { + !is_entirely_before(before(10), 10) +} + +test is_entirely_before_3() { + !is_entirely_before(before(10), 5) +} + +test is_entirely_before_4() { + is_entirely_before(between(10, 20), 30) +} + +test is_entirely_before_5() { + !is_entirely_before(between(10, 20), 5) +} + +test is_entirely_before_6() { + is_entirely_before(entirely_before(10), 10) +} + +test is_entirely_before_7() { + !is_entirely_before(after(10), 15) +} + +test is_entirely_before_8() { + !is_entirely_before(after(10), 5) +} + +test is_entirely_before_9() { + !is_entirely_before(entirely_after(10), 5) +} + +// ## Combining + +/// Computes the smallest interval containing the two given intervals, if any +/// +/// ```aiken +/// let iv1 = between(0, 10) +/// let iv2 = between(2, 14) +/// hull(iv1, iv2) == between(0, 14) +/// +/// let iv1 = between(5, 10) +/// let iv2 = before(0) +/// hull(iv1, iv2) == before(10) +/// +/// let iv1 = entirely_after(0) +/// let iv2 = between(10, 42) +/// hull(iv1, iv2) = entirely_after(0) +/// ``` +pub fn hull(iv1: Interval, iv2: Interval) -> Interval { + Interval { + lower_bound: min(iv1.lower_bound, iv2.lower_bound), + upper_bound: max(iv1.upper_bound, iv2.upper_bound), + } +} + +test hull_1() { + let iv1 = between(0, 10) + let iv2 = between(2, 14) + hull(iv1, iv2) == between(0, 14) +} + +test hull_2() { + let iv1 = between(5, 10) + let iv2 = before(0) + hull(iv1, iv2) == before(10) +} + +test hull_3() { + let iv1 = entirely_after(0) + let iv2 = between(10, 42) + hull(iv1, iv2) == entirely_after(0) +} + +/// Computes the largest interval contains in the two given intervals, if any. +/// +/// ```aiken +/// let iv1 = interval.between(0, 10) +/// let iv2 = interval.between(2, 14) +/// interval.intersection(iv1, iv2) == interval.between(2, 10) +/// +/// let iv1 = interval.entirely_before(10) +/// let iv2 = interval.entirely_after(0) +/// interval.intersection(iv1, iv2) == interval.entirely_between(0, 10) +/// +/// let iv1 = interval.between(0, 1) +/// let iv2 = interval.between(2, 3) +/// interval.intersection(iv1, iv2) |> interval.is_empty +/// ``` +pub fn intersection(iv1: Interval, iv2: Interval) -> Interval { + Interval { + lower_bound: max(iv1.lower_bound, iv2.lower_bound), + upper_bound: min(iv1.upper_bound, iv2.upper_bound), + } +} + +test intersection_1() { + let iv1 = between(0, 10) + let iv2 = between(2, 14) + intersection(iv1, iv2) == between(2, 10) +} + +test intersection_2() { + let iv1 = between(0, 1) + let iv2 = between(1, 2) + intersection(iv1, iv2) == between(1, 1) +} + +test intersection_3() { + let iv1 = between(0, 1) + let iv2 = entirely_between(1, 2) + intersection(iv1, iv2) + |> is_empty +} + +test intersection_4() { + let iv1 = entirely_between(0, 1) + let iv2 = entirely_between(1, 2) + intersection(iv1, iv2) + |> is_empty +} + +test intersection_5() { + let iv1 = between(0, 10) + let iv2 = before(4) + intersection(iv1, iv2) == between(0, 4) +} + +test intersection_6() { + let iv1 = entirely_before(10) + let iv2 = entirely_after(0) + intersection(iv1, iv2) == entirely_between(0, 10) +} + +/// Return the highest bound of the two. +/// +/// ```aiken +/// let ib1 = IntervalBound { bound_type: Finite(0), is_inclusive: False } +/// let ib2 = IntervalBound { bound_type: Finite(1), is_inclusive: False } +/// +/// interval.max(ib1, ib2) == ib2 +/// ``` +pub fn max( + left: IntervalBound, + right: IntervalBound, +) -> IntervalBound { + when compare_bound(left, right) is { + Less -> right + Equal -> left + Greater -> left + } +} + +/// Return the smallest bound of the two. +/// +/// ```aiken +/// let ib1 = IntervalBound { bound_type: Finite(0), is_inclusive: False } +/// let ib2 = IntervalBound { bound_type: Finite(1), is_inclusive: False } +/// +/// interval.min(ib1, ib2) == ib1 +/// ``` +pub fn min( + left: IntervalBound, + right: IntervalBound, +) -> IntervalBound { + when compare_bound(left, right) is { + Less -> left + Equal -> left + Greater -> right + } +} + +fn compare_bound( + left: IntervalBound, + right: IntervalBound, +) -> Ordering { + when compare_bound_type(left.bound_type, right.bound_type) is { + Less -> Less + Greater -> Greater + Equal -> + if left.is_inclusive == right.is_inclusive { + Equal + } else if left.is_inclusive { + Greater + } else { + Less + } + } +} + +fn compare_bound_type( + left: IntervalBoundType, + right: IntervalBoundType, +) -> Ordering { + when left is { + NegativeInfinity -> + when right is { + NegativeInfinity -> Equal + _ -> Less + } + PositiveInfinity -> + when right is { + PositiveInfinity -> Equal + _ -> Greater + } + Finite(left) -> + when right is { + NegativeInfinity -> Greater + PositiveInfinity -> Less + Finite(right) -> + if left < right { + Less + } else if left == right { + Equal + } else { + Greater + } + } + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math.ak new file mode 100644 index 00000000..dd575e7a --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math.ak @@ -0,0 +1,424 @@ +//// This module contains some basic Math utilities. Standard arithmetic +//// operations on integers are available through native operators: +//// +//// Operator | Description +//// --- | :--- +//// `+` | Arithmetic sum +//// `-` | Arithmetic difference +//// `/` | Whole division +//// `*` | Arithmetic multiplication +//// `%` | Remainder by whole division +//// +//// Here are a few examples: +//// +//// ```aiken +//// 1 + 1 // 2 +//// 10 - 2 // 8 +//// 40 / 14 // 2 +//// 3 * 4 // 12 +//// 10 % 3 // 1 + +use aiken/builtin + +/// Calculate the absolute value of an integer. +/// +/// ```aiken +/// math.abs(-42) == 42 +/// math.abs(14) == 14 +/// ``` +pub fn abs(self: Int) -> Int { + if self < 0 { + 0 - self + } else { + self + } +} + +test abs_1() { + abs(14) == 14 +} + +test abs_2() { + abs(-42) == 42 +} + +/// Restrict the value of an integer between two min and max bounds +/// +/// ```aiken +/// math.clamp(14, min: 0, max: 10) == 10 +/// ``` +pub fn clamp(self: Int, min: Int, max: Int) -> Int { + if self < min { + min + } else { + if self > max { + max + } else { + self + } + } +} + +test clamp_1() { + clamp(14, min: 0, max: 10) == 10 +} + +test clamp_2() { + clamp(7, min: 0, max: 10) == 7 +} + +test clamp_3() { + clamp(7, min: 10, max: 100) == 10 +} + +/// The greatest common divisor of two integers. +/// +/// ```aiken +/// math.gcd(42, 14) == 14 +/// math.gcd(14, 42) == 14 +/// math.gcd(0, 0) == 0 +/// ``` +pub fn gcd(x: Int, y: Int) -> Int { + abs(do_gcd(x, y)) +} + +fn do_gcd(x: Int, y: Int) -> Int { + when y is { + 0 -> x + _ -> do_gcd(y, x % y) + } +} + +test gcd_test1() { + gcd(10, 300) == 10 +} + +test gcd_test2() { + gcd(-10, 300) == 10 +} + +test gcd_test3() { + gcd(42, 14) == 14 +} + +/// Checks if an integer has a given integer square root x. +/// The check has constant time complexity $O(1)$. +/// +/// ```aiken +/// math.is_sqrt(0, 0) +/// math.is_sqrt(25, 5) +/// !math.is_sqrt(25, -5) +/// math.is_sqrt(44203, 210) +/// ``` +pub fn is_sqrt(self: Int, x: Int) -> Bool { + x * x <= self && ( x + 1 ) * ( x + 1 ) > self +} + +test is_sqrt1() { + is_sqrt(44203, 210) +} + +test is_sqrt2() { + is_sqrt(975461057789971041, 987654321) +} + +/// The logarithm in base `b` of an element using integer divisions. +/// +/// ```aiken +/// math.log(10, base: 2) == 3 +/// math.log(42, base: 2) == 5 +/// math.log(42, base: 3) == 3 +/// math.log(5, base: 0) == 0 +/// math.log(4, base: 4) == 1 +/// math.log(4, base: 42) == 0 +/// ``` +pub fn log(self: Int, base: Int) -> Int { + if base <= 0 { + 0 + } else if self == base { + 1 + } else if self < base { + 0 + } else { + 1 + log(self / base, base) + } +} + +test log_10_2() { + log(10, base: 2) == 3 +} + +test log_42_2() { + log(42, base: 2) == 5 +} + +test log_42_3() { + log(42, base: 3) == 3 +} + +test log_5_0() { + log(5, base: 0) == 0 +} + +test log_4_4() { + log(4, base: 4) == 1 +} + +test log_4_43() { + log(4, base: 43) == 0 +} + +/// The integer logarithm in base 2. Faster than [`log`](#log) in this particular case. +/// +/// ```aiken +/// math.log2(1) == 0 +/// math.log2(2) == 1 +/// math.log2(3) == 1 +/// math.log2(4) == 2 +/// math.log2(256) == 8 +/// math.log2(257) == 8 +/// math.log2(511) == 8 +/// math.log2(1025) == 10 +/// ``` +pub fn log2(x: Int) -> Int { + expect x > 0 + let s = builtin.integer_to_bytearray(True, 0, x) + let len = builtin.length_of_bytearray(s) + let b = builtin.index_bytearray(s, 0) + len * 8 - if b < 2 { + 8 + } else if b < 4 { + 7 + } else if b < 8 { + 6 + } else if b < 16 { + 5 + } else if b < 32 { + 4 + } else if b < 64 { + 3 + } else if b < 128 { + 2 + } else { + 1 + } +} + +test log2_matrix() { + and { + log2(1) == 0, + log2(2) == 1, + log2(3) == 1, + log2(4) == 2, + log2(256) == 8, + log2(257) == 8, + log2(511) == 8, + log2(1025) == 10, + } +} + +/// Return the maximum of two integers. +pub fn max(a: Int, b: Int) -> Int { + if a > b { + a + } else { + b + } +} + +test max_1() { + max(0, 0) == 0 +} + +test max_2() { + max(14, 42) == 42 +} + +test max_3() { + max(42, 14) == 42 +} + +/// Return the minimum of two integers. +pub fn min(a: Int, b: Int) -> Int { + if a > b { + b + } else { + a + } +} + +test min_1() { + min(0, 0) == 0 +} + +test min_2() { + min(14, 42) == 14 +} + +test min_3() { + min(42, 14) == 14 +} + +/// Calculates a number to the power of `e` using the exponentiation by +/// squaring method. +/// +/// ```aiken +/// math.pow(3, 5) == 243 +/// math.pow(7, 2) == 49 +/// math.pow(3, -4) == 0 +/// math.pow(0, 0) == 1 +/// math.pow(513, 3) == 135005697 +/// ``` +pub fn pow(self: Int, e: Int) -> Int { + if e < 0 { + 0 + } else if e == 0 { + 1 + } else if e % 2 == 0 { + pow(self * self, e / 2) + } else { + self * pow(self * self, ( e - 1 ) / 2) + } +} + +test pow_3_5() { + pow(3, 5) == 243 +} + +test pow_7_2() { + pow(7, 2) == 49 +} + +test pow_3__4() { + // negative powers round to zero + pow(3, -4) == 0 +} + +test pow_0_0() { + // sorry math + pow(0, 0) == 1 +} + +test pow_513_3() { + pow(513, 3) == 135005697 +} + +test pow_2_4() { + pow(2, 4) == 16 +} + +test pow_2_42() { + pow(2, 42) == 4398046511104 +} + +/// Calculates the power of 2 for a given exponent `e`. Much cheaper than +/// using `pow(2, _)` for small exponents $0 < e < 256$. +/// +/// ```aiken +/// math.pow2(-2) == 0 +/// math.pow2(0) == 1 +/// math.pow2(1) == 2 +/// math.pow2(4) == 16 +/// math.pow2(42) == 4398046511104 +/// ``` +pub fn pow2(e: Int) -> Int { + // do_pow2(e, 1) + if e < 8 { + if e < 0 { + 0 + } else { + builtin.index_bytearray(#[1, 2, 4, 8, 16, 32, 64, 128], e) + } + } else if e < 32 { + 256 * pow2(e - 8) + } else { + 4294967296 * pow2(e - 32) + } +} + +test pow2_neg() { + pow2(-2) == 0 +} + +test pow2_0() { + pow2(0) == 1 +} + +test pow2_1() { + pow2(1) == 2 +} + +test pow2_4() { + pow2(4) == 16 +} + +test pow2_42() { + pow2(42) == 4398046511104 +} + +test pow2_256() { + pow2(256) == 115792089237316195423570985008687907853269984665640564039457584007913129639936 +} + +/// Calculates the square root of an integer using the [Babylonian +/// method](https://en.wikipedia.org/wiki/Methods_of_computing_square_roots#Babylonian_method). This returns either the exact result or the smallest integer +/// nearest to the square root. +/// +/// Returns `None` for negative values. +/// +/// ```aiken +/// math.sqrt(0) == Some(0) +/// math.sqrt(25) == Some(5) +/// math.sqrt(44203) == Some(210) +/// math.sqrt(-42) == None +/// ``` +/// +/// > [!TIP] +/// > This function can be quite expensive to perform on-chain. Prefer using [`is_sqrt`](#is_sqrt) whenever possible. +pub fn sqrt(self: Int) -> Option { + if self < 0 { + None + } else if self <= 1 { + Some(self) + } else { + Some(sqrt_babylonian(self, self, ( self + 1 ) / 2)) + } +} + +// The basic idea is that if x is an overestimate to the square root of a +// non-negative real number S then S/x will be an underestimate, or vice versa, +// and so the average of these two numbers may reasonably be expected to provide a +// better approximation (though the formal proof of that assertion depends on the +// inequality of arithmetic and geometric means that shows this average is always +// an overestimate of the square root. +fn sqrt_babylonian(self: Int, x: Int, y: Int) -> Int { + if y >= x { + x + } else { + sqrt_babylonian(self, y, ( y + self / y ) / 2) + } +} + +test sqrt1() { + sqrt(0) == Some(0) +} + +test sqrt2() { + sqrt(1) == Some(1) +} + +test sqrt3() { + sqrt(25) == Some(5) +} + +test sqrt4() { + sqrt(44203) == Some(210) +} + +test sqrt5() { + sqrt(975461057789971041) == Some(987654321) +} + +test sqrt6() { + sqrt(-42) == None +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak new file mode 100644 index 00000000..88fe7ab7 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak @@ -0,0 +1,871 @@ +//// This module implements operations between rational numbers. +//// +//// > [!CAUTION] +//// > Internally, rational aren't automatically reduced as this is **only done on-demand**. +//// > +//// > Thus, for example: +//// > +//// > ```aiken +//// > rational.new(2, 3) != rational.new(4, 6) +//// > ``` +//// > +//// > Comparing rational values should, therefore, only happen after reduction (see [reduce](#reduce)) or via the [compare](#compare) method. + +use aiken/builtin +use aiken/collection/list +use aiken/math +use aiken/option + +/// Opaque type used to ensure the sign of the Rational is managed strictly in the numerator. +pub opaque type Rational { + numerator: Int, + denominator: Int, +} + +// ## Constructing + +/// Create a new `Rational` from an `Int`. +/// +/// ```aiken +/// Some(rational.from_int(14)) == rational.new(14, 1) +/// Some(rational.from_int(-5)) == rational.new(-5, 1) +/// Some(rational.from_int(0)) == rational.new(0, 1) +/// ``` +pub fn from_int(numerator: Int) -> Rational { + Rational { numerator, denominator: 1 } +} + +test from_int_1() { + and { + (from_int(14) == ratio(14, 1))?, + (from_int(-5) == ratio(-5, 1))?, + (from_int(0) == ratio(0, 1))?, + } +} + +/// An unsafe constructor for `Rational` values. Assumes that the following invariants are +/// enforced: +/// +/// - the denominator is positive (the sign is managed in the numerator); +/// - the denominator is not null. +/// +/// This function is mainly used as a quick way to construct rationals from literal values. +fn ratio(numerator: Int, denominator: Int) -> Rational { + Rational { numerator, denominator } +} + +/// Make a `Rational` number from the ratio of two integers. +/// +/// Returns `None` when the denominator is null. +/// +/// ```aiken +/// rational.new(14, 42) == Some(r) +/// rational.new(14, 0) == None +/// ``` +pub fn new(numerator: Int, denominator: Int) -> Option { + if denominator == 0 { + None + } else if denominator < 0 { + Some(Rational { numerator: -numerator, denominator: -denominator }) + } else { + Some(Rational { numerator, denominator }) + } +} + +test new_1() { + and { + (new(2, 0) == None)?, + (new(2, 3) == Some(ratio(2, 3)))?, + (new(-2, 3) == Some(ratio(-2, 3)))?, + (new(2, -3) == Some(ratio(-2, 3)))?, + (new(2, 4) == Some(ratio(2, 4)))?, + (new(-2, -3) == Some(ratio(2, 3)))?, + (new(-2, -4) == Some(ratio(2, 4)))?, + } +} + +/// A null `Rational`. +pub const zero: Rational = Rational { numerator: 0, denominator: 1 } + +test zero_1() { + zero == ratio(0, 1) +} + +// ## Inspecting + +/// Get the denominator of a rational value. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.denominator(x) == 3 +/// ``` +pub fn denominator(self: Rational) -> Int { + self.denominator +} + +test denominator_1() { + expect Some(x) = new(2, 3) + expect Some(y) = new(-2, 3) + expect Some(z) = new(2, -3) + expect Some(w) = new(-2, -3) + and { + (denominator(x) == 3)?, + (denominator(y) == 3)?, + (denominator(z) == 3)?, + (denominator(w) == 3)?, + } +} + +/// Get the numerator of a rational value. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.numerator(x) == 2 +/// ``` +pub fn numerator(self: Rational) -> Int { + self.numerator +} + +test numerator_1() { + expect Some(x) = new(2, 3) + expect Some(y) = new(-2, 3) + expect Some(z) = new(2, -3) + expect Some(w) = new(-2, -3) + + and { + (numerator(x) == 2)?, + (numerator(y) == -2)?, + (numerator(z) == -2)?, + (numerator(w) == 2)?, + } +} + +// ## Modifying + +/// Absolute value of a `Rational`. +/// +/// ```aiken +/// expect Some(x) = rational.new(3, 2) +/// expect Some(y) = rational.new(-3, 2) +/// +/// rational.abs(x) == x +/// rational.abs(y) == x +/// ``` +pub fn abs(self: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = self + Rational { numerator: math.abs(a_n), denominator: a_d } +} + +test abs_examples() { + and { + (abs(ratio(5, 2)) == ratio(5, 2))?, + (abs(ratio(-5, 2)) == ratio(5, 2))?, + (abs(ratio(5, 2)) == abs(ratio(-5, 2)))?, + } +} + +/// Change the sign of a `Rational`. +/// +/// ```aiken +/// expect Some(x) = rational.new(3, 2) +/// expect Some(y) = rational.new(-3, 2) +/// +/// rational.negate(x) == y +/// rational.negate(y) == x +/// ``` +pub fn negate(a: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = a + Rational { numerator: -a_n, denominator: a_d } +} + +test negate_1() { + and { + (negate(ratio(5, 2)) == ratio(-5, 2))?, + (negate(ratio(-5, 2)) == ratio(5, 2))?, + (negate(negate(ratio(5, 2))) == ratio(5, 2))?, + } +} + +/// Reciprocal of a `Rational` number. That is, a new `Rational` where the +/// numerator and denominator have been swapped. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 5) +/// rational.reciprocal(x) == rational.new(5, 2) +/// +/// let y = rational.zero +/// rational.reciprocal(y) == None +/// ``` +pub fn reciprocal(self: Rational) -> Option { + let Rational { numerator: a_n, denominator: a_d } = self + if a_n < 0 { + Some(Rational { numerator: -a_d, denominator: -a_n }) + } else if a_n > 0 { + Some(Rational { numerator: a_d, denominator: a_n }) + } else { + None + } +} + +test reciprocal_1() { + and { + (reciprocal(ratio(5, 2)) == new(2, 5))?, + (reciprocal(ratio(-5, 2)) == new(-2, 5))?, + (reciprocal(ratio(0, 2)) == None)?, + (reciprocal(ratio(2, 3)) == new(3, 2))?, + (reciprocal(ratio(-2, 3)) == new(-3, 2))?, + } +} + +/// Reduce a rational to its irreducible form. This operation makes the +/// numerator and denominator coprime. +/// +/// ```aiken +/// expect Some(x) = rational.new(80, 200) +/// Some(rational.reduce(x)) == rational.new(2, 5) +/// ``` +pub fn reduce(self: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = self + let d = math.gcd(a_n, a_d) + Rational { numerator: a_n / d, denominator: a_d / d } +} + +test reduce_1() { + and { + (reduce(ratio(80, 200)) == ratio(2, 5))?, + (reduce(ratio(-5, 1)) == ratio(-5, 1))?, + (reduce(ratio(0, 3)) == ratio(0, 1))?, + } +} + +// ## Combining + +// ### Arithmetic operations + +/// Addition: sum of two rational values +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// Some(rational.add(x, y)) == rational.new(17, 12) +/// ``` +pub fn add(left: Rational, right: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + Rational { numerator: a_n * b_d + b_n * a_d, denominator: a_d * b_d } +} + +test add_1() { + add(ratio(2, 3), ratio(3, 4)) == ratio(17, 12) +} + +test add_2() { + add(ratio(-2, 3), ratio(3, 4)) == ratio(1, 12) +} + +/// Division: quotient of two rational values. Returns `None` when the second +/// value is null. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// rational.div(x, y) == rational.new(8, 9) +/// ``` +pub fn div(left: Rational, right: Rational) -> Option { + reciprocal(right) |> option.map(mul(left, _)) +} + +test div_1() { + div(ratio(2, 3), ratio(3, 4)) == new(8, 9) +} + +test div_2() { + div(ratio(2, 3), ratio(-3, 4)) == new(-8, 9) +} + +/// Multiplication: the product of two rational values. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// Some(rational.mul(x, y)) == rational.new(6, 12) +/// ``` +pub fn mul(left: Rational, right: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + Rational { numerator: a_n * b_n, denominator: a_d * b_d } +} + +test mul_1() { + mul(ratio(2, 3), ratio(3, 4)) == ratio(6, 12) +} + +test mul_2() { + mul(ratio(-2, 3), ratio(-3, 4)) == ratio(6, 12) +} + +test mul_3() { + let result = + ratio(2, 5) + |> mul(ratio(1, 8)) + |> mul(ratio(3, 10)) + |> mul(ratio(21, 100)) + |> mul(ratio(3, 5)) + |> mul(ratio(2, 8)) + |> mul(ratio(4, 10)) + |> mul(ratio(22, 100)) + |> reduce + + result == ratio(2079, 50000000) +} + +/// Subtraction: difference of two rational values +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// Some(rational.sub(x, y)) == rational.new(-1, 12) +/// ``` +pub fn sub(left: Rational, right: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + Rational { numerator: a_n * b_d - b_n * a_d, denominator: a_d * b_d } +} + +test sub_1() { + sub(ratio(2, 3), ratio(3, 4)) == ratio(-1, 12) +} + +test sub_2() { + sub(ratio(2, 3), ratio(-3, 4)) == ratio(17, 12) +} + +test sub_3() { + sub(ratio(-2, 3), ratio(3, 4)) == ratio(-17, 12) +} + +// ### Ordering + +/// Compare two rationals for an ordering. This is safe to use even for +/// non-reduced rationals. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// expect Some(z) = rational.new(4, 6) +/// +/// compare(x, y) == Less +/// compare(y, x) == Greater +/// compare(x, x) == Equal +/// compare(x, z) == Equal +/// ``` +pub fn compare(left: Rational, right: Rational) -> Ordering { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + + let l = a_n * b_d + let r = b_n * a_d + + if l < r { + Less + } else if l > r { + Greater + } else { + Equal + } +} + +test compare_1() { + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + expect Some(z) = new(4, 6) + and { + compare(x, y) == Less, + compare(y, x) == Greater, + compare(x, x) == Equal, + compare(x, z) == Equal, + } +} + +/// Comparison of two rational values using a chosen heuristic. For example: +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// rational.compare_with(x, >, y) == False +/// rational.compare_with(y, >, x) == True +/// rational.compare_with(x, >, x) == False +/// rational.compare_with(x, >=, x) == True +/// rational.compare_with(x, ==, x) == True +/// rational.compare_with(x, ==, y) == False +/// ``` +pub fn compare_with( + left: Rational, + with: fn(Int, Int) -> Bool, + right: Rational, +) -> Bool { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + with(a_n * b_d, b_n * a_d) +} + +// TODO: Rewrite tests using binary-operator as first-class functions once aiken-lang/aiken#619 is merged. + +test compare_with_eq() { + let eq = + compare_with(_, fn(l, r) { l == r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + !eq(x, y)? && !eq(y, x)? && eq(x, x)? +} + +test compare_with_neq() { + let neq = + compare_with(_, fn(l, r) { l != r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + neq(x, y)? && neq(y, x)? && !neq(x, x)? +} + +test compare_with_gte() { + let gte = + compare_with(_, fn(l, r) { l >= r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + !gte(x, y)? && gte(y, x)? && gte(x, x)? +} + +test compare_with_gt() { + let gt = + compare_with(_, fn(l, r) { l > r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + !gt(x, y)? && gt(y, x)? && !gt(x, x)? +} + +test compare_with_lte() { + let lte = + compare_with(_, fn(l, r) { l <= r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + lte(x, y)? && !lte(y, x)? && lte(x, x)? +} + +test compare_with_lt() { + let lt = + compare_with(_, fn(l, r) { l < r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + lt(x, y)? && !lt(y, x)? && !lt(x, x)? +} + +// ### Means + +/// Calculate the arithmetic mean between two `Rational` values. +/// +/// ```aiken +/// let x = rational.from_int(0) +/// let y = rational.from_int(1) +/// let z = rational.from_int(2) +/// +/// expect Some(result) = rational.arithmetic_mean([x, y, z]) +/// +/// rational.compare(result, y) == Equal +/// ``` +pub fn arithmetic_mean(self: List) -> Option { + div(list.foldr(self, zero, add), from_int(list.length(self))) +} + +test arithmetic_mean_1() { + let x = ratio(1, 2) + let y = ratio(1, 2) + expect Some(z) = arithmetic_mean([x, y]) + reduce(z) == ratio(1, 2) +} + +test arithmetic_mean_2() { + let x = ratio(1, 1) + let y = ratio(2, 1) + expect Some(z) = arithmetic_mean([x, y]) + reduce(z) == ratio(3, 2) +} + +test arithmetic_mean_3() { + let xs = + [ + ratio(1, 1), + ratio(2, 1), + ratio(3, 1), + ratio(4, 1), + ratio(5, 1), + ratio(6, 1), + ] + expect Some(z) = arithmetic_mean(xs) + reduce(z) == ratio(7, 2) +} + +/// Calculate the geometric mean between two `Rational` values. This returns +/// either the exact result or the smallest integer nearest to the square root +/// for the numerator and denominator. +/// +/// ```aiken +/// expect Some(x) = rational.new(1, 3) +/// expect Some(y) = rational.new(1, 6) +/// +/// rational.geometric_mean(x, y) == rational.new(1, 4) +/// ``` +pub fn geometric_mean(left: Rational, right: Rational) -> Option { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + when math.sqrt(a_n * b_n) is { + Some(numerator) -> + when math.sqrt(a_d * b_d) is { + Some(denominator) -> Some(Rational { numerator, denominator }) + None -> None + } + None -> None + } +} + +test geometric_mean1() { + expect Some(x) = new(1, 2) + expect Some(y) = new(1, 2) + geometric_mean(x, y) == new(1, 2) +} + +test geometric_mean2() { + expect Some(x) = new(-1, 2) + expect Some(y) = new(1, 2) + geometric_mean(x, y) == None +} + +test geometric_mean3() { + expect Some(x) = new(1, 2) + expect Some(y) = new(-1, 2) + geometric_mean(x, y) == None +} + +test geometric_mean4() { + expect Some(x) = new(1, 3) + expect Some(y) = new(1, 6) + geometric_mean(x, y) == new(1, 4) +} + +test geometric_mean5() { + expect Some(x) = new(67, 2500) + expect Some(y) = new(35331, 1000) + expect Some(yi) = reciprocal(y) + geometric_mean(x, yi) == new(258, 9398) +} + +// ## Transforming + +/// Returns the smallest `Int` not less than a given `Rational` +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.ceil(x) == 1 +/// +/// expect Some(y) = rational.new(44, 14) +/// rational.ceil(y) == 4 +/// +/// expect Some(z) = rational.new(-14, 3) +/// rational.ceil(z) == -4 +/// ``` +pub fn ceil(self: Rational) -> Int { + let Rational { numerator, denominator } = self + if builtin.remainder_integer(numerator, denominator) > 0 { + builtin.quotient_integer(numerator, denominator) + 1 + } else { + builtin.quotient_integer(numerator, denominator) + } +} + +test ceil_1() { + and { + (ceil(ratio(13, 5)) == 3)?, + (ceil(ratio(15, 5)) == 3)?, + (ceil(ratio(16, 5)) == 4)?, + (ceil(ratio(-3, 5)) == 0)?, + (ceil(ratio(-5, 5)) == -1)?, + (ceil(ratio(-14, 3)) == -4)?, + (ceil(ratio(-14, 6)) == -2)?, + (ceil(ratio(44, 14)) == 4)?, + } +} + +/// Returns the greatest `Int` no greater than a given `Rational` +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.floor(x) == 0 +/// +/// expect Some(y) = rational.new(44, 14) +/// rational.floor(y) == 3 +/// +/// expect Some(z) = rational.new(-14, 3) +/// rational.floor(z) == -5 +/// ``` +pub fn floor(self: Rational) -> Int { + let Rational { numerator: a_n, denominator: a_d } = self + a_n / a_d +} + +test floor_1() { + and { + (floor(ratio(5, 2)) == 2)?, + (floor(ratio(5, 3)) == 1)?, + (floor(ratio(5, 4)) == 1)?, + (floor(ratio(5, 5)) == 1)?, + (floor(ratio(5, 6)) == 0)?, + (floor(ratio(8, 3)) == 2)?, + (floor(ratio(-14, 3)) == -5)?, + } +} + +/// Computes the rational number x raised to the power y. Returns `None` for +/// invalid exponentiation. +/// +/// ```aiken +/// expect Some(x) = rational.new(50, 2500) +/// rational.reduce(rational.pow(x, 3)) == rational.new(1, 125000) +/// +/// expect Some(x) = rational.new(50, 2500) +/// rational.reduce(rational.pow(x, -3)) == rational.new(125000, 1) +/// ``` +pub fn pow(x: Rational, y: Int) -> Option { + let Rational { numerator: a, denominator: b } = x + + if a == 0 && y <= 0 { + None + } else if y > 0 { + Some(Rational { numerator: math.pow(a, y), denominator: math.pow(b, y) }) + } else if y < 0 { + Some(Rational { numerator: math.pow(b, -y), denominator: math.pow(a, -y) }) + } else { + Some(Rational { numerator: 1, denominator: 1 }) + } +} + +test pow_negative_exponent_non_zero_fraction() { + expect Some(base) = new(50, 2500) + expect Some(calculated_result) = pow(base, -3) + expect Some(expected_result) = new(125000, 1) + reduce(calculated_result) == expected_result +} + +test pow_positive_exponent() { + expect Some(base) = new(50, 2500) + expect Some(calculated_result) = pow(base, 3) + expect Some(expected_result) = new(1, 125000) + reduce(calculated_result) == expected_result +} + +test pow_exponent_zero() { + expect Some(base) = new(50, 2500) + pow(base, 0) == new(1, 1) +} + +test pow_rational_zero_exponent_zero() { + expect Some(base) = new(0, 1) + pow(base, 0) == None +} + +/// Returns the proper fraction of a given `Rational` `r`. That is, a 2-tuple of +/// an `Int` and `Rational` (n, f) such that: +/// +/// - `r = n + f`; +/// - `n` and `f` have the same sign as `r`; +/// - `f` has an absolute value less than 1. +pub fn proper_fraction(self: Rational) -> (Int, Rational) { + let Rational { numerator, denominator } = self + ( + builtin.quotient_integer(numerator, denominator), + Rational { + numerator: builtin.remainder_integer(numerator, denominator), + denominator, + }, + ) +} + +test proper_fraction_1() { + let r = ratio(10, 7) + let (n, f) = proper_fraction(r) + and { + (n == 1)?, + (f == ratio(3, 7))?, + (r == add(from_int(n), f))?, + } +} + +test proper_fraction_2() { + let r = ratio(-10, 7) + let (n, f) = proper_fraction(r) + and { + (n == -1)?, + (f == ratio(-3, 7))?, + (r == add(from_int(n), f))?, + } +} + +test proper_fraction_3() { + let r = ratio(4, 2) + let (n, f) = proper_fraction(r) + and { + (n == 2)?, + (f == ratio(0, 2))?, + (r == add(from_int(n), f))?, + } +} + +/// Round the argument to the nearest whole number. If the argument is +/// equidistant between two values, the greater value is returned (it +/// rounds half towards positive infinity). +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.round(x) == 1 +/// +/// expect Some(y) = rational.new(3, 2) +/// rational.round(y) == 2 +/// +/// expect Some(z) = rational.new(-3, 2) +/// rational.round(z) == -1 +/// ``` +/// +/// > [!CAUTION] +/// > This behaves differently than _Haskell_. If you're coming from `PlutusTx`, beware that in Haskell, rounding on equidistant values depends on the whole number being odd or even. +/// > If you need this behaviour, use [`round_even`](#round_even). +pub fn round(self: Rational) -> Int { + let (n, f) = proper_fraction(self) + + let is_negative = f.numerator < 0 + + when compare(abs(f), ratio(1, 2)) is { + Less -> n + Equal -> + if is_negative { + n + } else { + n + 1 + } + Greater -> + if is_negative { + n - 1 + } else { + n + 1 + } + } +} + +test round_1() { + and { + (round(ratio(10, 7)) == 1)?, + (round(ratio(11, 7)) == 2)?, + (round(ratio(3, 2)) == 2)?, + (round(ratio(5, 2)) == 3)?, + (round(ratio(-3, 2)) == -1)?, + (round(ratio(-2, 3)) == -1)?, + (round(ratio(-10, 7)) == -1)?, + (round(ratio(4, 2)) == 2)?, + } +} + +/// Round the argument to the nearest whole number. If the argument is +/// equidistant between two values, it returns the value that is even (it +/// rounds half to even, also known as 'banker's rounding'). +/// +/// ```aiken +/// expect Some(w) = rational.new(2, 3) +/// rational.round_even(w) == 1 +/// +/// expect Some(x) = rational.new(3, 2) +/// rational.round_even(x) == 2 +/// +/// expect Some(y) = rational.new(5, 2) +/// rational.round_even(y) == 2 +/// +/// expect Some(y) = rational.new(-3, 2) +/// rational.round_even(y) == -2 +/// ``` +pub fn round_even(self: Rational) -> Int { + let (n, f) = proper_fraction(self) + + let m = + when compare(f, ratio(0, 1)) is { + Less -> -1 + _ -> 1 + } + + let is_even = n % 2 == 0 + + when compare(abs(f), ratio(1, 2)) is { + Less -> n + Equal -> + if is_even { + n + } else { + n + m + } + Greater -> n + m + } +} + +test round_even_1() { + and { + (round_even(ratio(10, 7)) == 1)?, + (round_even(ratio(11, 7)) == 2)?, + (round_even(ratio(3, 2)) == 2)?, + (round_even(ratio(5, 2)) == 2)?, + (round_even(ratio(-3, 2)) == -2)?, + (round_even(ratio(-2, 3)) == -1)?, + (round_even(ratio(-10, 7)) == -1)?, + (round_even(ratio(4, 2)) == 2)?, + } +} + +/// Returns the nearest `Int` between zero and a given `Rational`. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.truncate(x) == 0 +/// +/// expect Some(y) = rational.new(44, 14) +/// rational.truncate(y) == 3 +/// +/// expect Some(z) = rational.new(-14, 3) +/// rational.truncate(z) == -4 +/// ``` +pub fn truncate(self: Rational) -> Int { + let Rational { numerator: a_n, denominator: a_d } = self + builtin.quotient_integer(a_n, a_d) +} + +test truncate_1() { + and { + (truncate(ratio(5, 2)) == 2)?, + (truncate(ratio(5, 3)) == 1)?, + (truncate(ratio(5, 4)) == 1)?, + (truncate(ratio(5, 5)) == 1)?, + (truncate(ratio(5, 6)) == 0)?, + (truncate(ratio(8, 3)) == 2)?, + (truncate(ratio(-14, 3)) == -4)?, + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak new file mode 100644 index 00000000..ab8cbc17 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak @@ -0,0 +1,65 @@ +use aiken/fuzz.{both, either, map} +use aiken/math/rational.{Rational, new, pow} + +const any_positive_rational: Fuzzer = + either( + map( + both(fuzz.int_at_least(1), fuzz.int_at_least(1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + map( + both(fuzz.int_at_most(-1), fuzz.int_at_most(-1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + ) + +const any_negative_rational: Fuzzer = + either( + map( + both(fuzz.int_at_most(-1), fuzz.int_at_least(1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + map( + both(fuzz.int_at_least(1), fuzz.int_at_most(-1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + ) + +const any_non_zero_rational: Fuzzer = + either(any_negative_rational, any_positive_rational) + +test prop_power_of_zero_returns_one(rational via any_non_zero_rational) { + expect Some(calculated_result) = pow(rational, 0) + expect Some(expected_result) = new(1, 1) + calculated_result == expected_result +} + +test prop_power_of_one_returns_same_fraction(rational via any_non_zero_rational) { + expect Some(calculated_result) = pow(rational, 1) + calculated_result == rational +} + +test prop_power_numerator_zero_exponent_negative_returns_none( + (denominator, exponent) via both(fuzz.int_at_least(1), fuzz.int_at_most(-1)), +) { + expect Some(fraction) = new(0, denominator) + expect None = pow(fraction, exponent) +} + +test prop_power_unit_fraction_is_immutable(exponent via fuzz.int()) { + expect Some(unit) = new(1, 1) + expect Some(calculated_result) = pow(unit, exponent) + calculated_result == unit +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/option.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/option.ak new file mode 100644 index 00000000..cf5ef7dc --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/option.ak @@ -0,0 +1,312 @@ +//// A type to capture optional results; useful for handling errors. +//// +//// Note that the `Option` type and its constructors are readily available in Aiken. They are part of the [Prelude](https://aiken-lang.github.io/prelude/aiken.html#Option) module imported by default in every module. + +// ## Inspecting + +/// Asserts whether an option is `None`. +pub fn is_none(self: Option) -> Bool { + when self is { + Some(_) -> False + _ -> True + } +} + +test is_none_1() { + is_none(Some(0)) == False +} + +test is_none_2() { + is_none(None) == True +} + +/// Asserts whether an option is `Some`, irrespective of the value it contains. +pub fn is_some(self: Option) -> Bool { + when self is { + Some(_) -> True + _ -> False + } +} + +test is_some_1() { + is_some(Some(0)) == True +} + +test is_some_2() { + is_some(None) == False +} + +// ## Combining + +/// Chain together many computations that may fail. +/// +/// ```aiken +/// self +/// |> dict.get(policy_id) +/// |> option.and_then(dict.get(_, asset_name)) +/// |> option.or_else(0) +/// ``` +pub fn and_then( + self: Option, + then: fn(a) -> Option, +) -> Option { + when self is { + None -> None + Some(a) -> then(a) + } +} + +fn try_decrement(n: Int) -> Option { + if n > 0 { + Some(n - 1) + } else { + None + } +} + +test and_then_1() { + let result = + None + |> and_then(try_decrement) + result == None +} + +test and_then_2() { + let result = + Some(14) + |> and_then(try_decrement) + result == Some(13) +} + +test and_then_3() { + let result = + Some(0) + |> and_then(try_decrement) + result == None +} + +/// Picks the first element which is not None. If there's no such element, return None. +/// +/// ```aiken +/// option.choice([]) == None +/// option.choice([Some(14), Some(42)]) == Some(14) +/// option.choice([None, Some(42)]) == Some(42) +/// option.choice([None, None]) == None +/// ``` +pub fn choice(self: List>) -> Option { + when self is { + [] -> None + [head, ..others] -> + when head is { + None -> choice(others) + _ -> head + } + } +} + +test choice_1() { + Some(1) == choice([Some(1), Some(2)]) +} + +test choice_2() { + None == choice([]) +} + +test choice_3() { + Some(1) == choice([None, Some(1)]) +} + +/// Converts from `Option>` to `Option`. +/// +/// ```aiken +/// option.flatten(Some(Some(42))) == Some(42) +/// option.flatten(Some(None)) == None +/// option.flatten(None) == None +/// ``` +/// +/// Flattening only removes one level of nesting at a time: +/// +/// ```aiken +/// flatten(Some(Some(Some(42)))) == Some(Some(42)) +/// Some(Some(Some(42))) |> flatten |> flatten == Some(42) +/// ``` +pub fn flatten(opt: Option>) -> Option { + when opt is { + Some(inner) -> inner + None -> None + } +} + +test flatten_1() { + let x: Option> = Some(Some(6)) + Some(6) == flatten(x) +} + +test flatten_2() { + let x: Option> = Some(None) + None == flatten(x) +} + +test flatten_3() { + let x: Option> = None + None == flatten(x) +} + +test flatten_4() { + let x: Option>> = Some(Some(Some(6))) + + let result = + x + |> flatten + |> flatten + + Some(6) == result +} + +/// Apply a function to the inner value of an [`Option`](#option) +/// +/// ```aiken +/// option.map(None, fn(n) { n * 2 }) == None +/// option.map(Some(14), fn(n) { n * 2 }) == Some(28) +/// ``` +pub fn map(self: Option, with: fn(a) -> result) -> Option { + when self is { + None -> None + Some(a) -> Some(with(a)) + } +} + +test map_1() { + map(None, fn(_) { Void }) == None +} + +test map_2() { + map(Some(14), fn(n) { n + 1 }) == Some(15) +} + +/// Combine two [`Option`](#option) together. +/// +/// ```aiken +/// type Foo { +/// Foo(Int, Int) +/// } +/// +/// option.map2(Some(14), Some(42), Foo) == Some(Foo(14, 42)) +/// option.map2(None, Some(42), Foo) == None +/// option.map2(Some(14), None, Foo) == None +/// ``` +pub fn map2( + opt_a: Option, + opt_b: Option, + with: fn(a, b) -> result, +) -> Option { + when opt_a is { + None -> None + Some(a) -> + when opt_b is { + None -> None + Some(b) -> Some(with(a, b)) + } + } +} + +test map2_1() { + map2(None, Some(42), fn(_, _) { 14 }) == None +} + +test map2_2() { + map2(Some(42), None, fn(_, _) { 14 }) == None +} + +test map2_3() { + map2(Some(14), Some(42), fn(a, b) { (a, b) }) == Some((14, 42)) +} + +/// Combine three [`Option`](#option) together. +/// +/// ```aiken +/// type Foo { +/// Foo(Int, Int, Int) +/// } +/// +/// option.map3(Some(14), Some(42), Some(1337), Foo) == Some(Foo(14, 42, 1337)) +/// option.map3(None, Some(42), Some(1337), Foo) == None +/// option.map3(Some(14), None, None, Foo) == None +/// ``` +pub fn map3( + opt_a: Option, + opt_b: Option, + opt_c: Option, + with: fn(a, b, c) -> result, +) -> Option { + when opt_a is { + None -> None + Some(a) -> + when opt_b is { + None -> None + Some(b) -> + when opt_c is { + None -> None + Some(c) -> Some(with(a, b, c)) + } + } + } +} + +test map3_1() { + map3(None, Some(42), None, fn(_, _, _) { 14 }) == None +} + +test map3_2() { + map3(Some(42), None, None, fn(_, _, _) { 14 }) == None +} + +test map3_3() { + map3(Some(14), Some(42), Some(1337), fn(a, b, c) { c - a + b }) == Some(1365) +} + +/// Like [`or_else`](#or_else) but allows returning an `Option`. +/// This is effectively mapping the error branch. +/// +/// ```aiken +/// option.or_try(None, fn(_) { Some("aiken") }) == Some("aiken") +/// option.or_try(Some(42), fn(_) { Some(14) }) == Some(42) +/// option.or_try(None, fn (_) { fail }) => 💥 +/// ``` +pub fn or_try(self: Option, compute_default: fn() -> Option) -> Option { + when self is { + None -> compute_default() + _ -> self + } +} + +test or_try_1() { + or_try(None, fn() { Some("aiken") }) == Some("aiken") +} + +test or_try_2() { + or_try(Some(42), fn() { fail }) == Some(42) +} + +// ## Transforming + +/// Provide a default value, turning an optional value into a normal value. +/// +/// ```aiken +/// option.or_else(None, "aiken") == "aiken" +/// option.or_else(Some(42), 14) == 42 +/// ``` +pub fn or_else(self: Option, default: a) -> a { + when self is { + None -> default + Some(a) -> a + } +} + +test or_else_1() { + or_else(None, "aiken") == "aiken" +} + +test or_else_2() { + or_else(Some(42), 14) == 42 +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak new file mode 100644 index 00000000..d2f125f5 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak @@ -0,0 +1,668 @@ +use aiken/builtin +use aiken/math +use aiken/option + +pub type Byte = + Int + +// ## Constructing + +/// Encode an integer value as a Big-Endian (most-significant bytes first) `ByteArray`. +/// The size is the expected size in number of bytes. +/// +/// > [!IMPORTANT] +/// > This function fails (i.e. halts the program) if the value cannot fit in the given size. When the +/// > size is _too large_, the array is left-padded with zeroes. +/// +/// ```aiken +/// bytearray.from_int_big_endian(1_000_000, 3) == #"0f4240" +/// bytearray.from_int_big_endian(1_000_000, 5) == #"00000f4240" +/// bytearray.from_int_big_endian(0, 8) == #"0000000000000000" +/// bytearray.from_int_big_endian(1_000_000, 1) => 💥 +/// ``` +pub fn from_int_big_endian(self: Int, size: Int) -> ByteArray { + builtin.integer_to_bytearray(True, size, self) +} + +test from_int_big_endian_1() { + from_int_big_endian(1_000_000, 3) == #"0f4240" +} + +test from_int_big_endian_2() { + from_int_big_endian(1_000_000, 5) == #"00000f4240" +} + +test from_int_big_endian_3() { + from_int_big_endian(0, 8) == #"0000000000000000" +} + +test from_int_big_endian_4() fail { + from_int_big_endian(1_000_000, 1) == #"40" +} + +/// Encode an integer value as a Little-Endian (least-significant bytes first) `ByteArray`. +/// The size is the expected size in number of bytes. +/// +/// > [!IMPORTANT] +/// > This function fails (i.e. halts the program) if the value cannot fit in the given size. When the +/// > size is _too large_, the array is right-padded with zeroes. +/// +/// ```aiken +/// bytearray.from_int_little_endian(1_000_000, 3) == #"40420f" +/// bytearray.from_int_little_endian(1_000_000, 5) == #"40420f0000" +/// bytearray.from_int_little_endian(0, 8) == #"0000000000000000" +/// bytearray.from_int_little_endian(1_000_000, 1) => 💥 +/// ``` +pub fn from_int_little_endian(self: Int, size: Int) -> ByteArray { + builtin.integer_to_bytearray(False, size, self) +} + +test from_int_little_endian_1() { + from_int_little_endian(1_000_000, 3) == #"40420f" +} + +test from_int_little_endian_2() { + from_int_little_endian(1_000_000, 5) == #"40420f0000" +} + +test from_int_little_endian_3() { + from_int_little_endian(0, 8) == #"0000000000000000" +} + +test from_int_little_endian_4() fail { + from_int_little_endian(1_000_000, 1) == #"40" +} + +/// Convert a `String` into a `ByteArray`. +/// +/// ```aiken +/// bytearray.from_string(@"ABC") == #"414243" +/// ``` +pub fn from_string(str: String) -> ByteArray { + builtin.encode_utf8(str) +} + +test from_string_1() { + from_string(@"") == "" +} + +test from_string_2() { + from_string(@"ABC") == #"414243" +} + +/// Add a byte element in front of a `ByteArray`. When the given byte is +/// greater than 255, it wraps-around. **PlutusV2 behavior** So 256 is mapped to 0, 257 to 1, and so +/// forth. +/// In PlutusV3 this will error instead of wrapping around. +/// +/// ```aiken +/// bytearray.push(#"", 0) == #"00" +/// bytearray.push(#"0203", 1) == #"010203" +/// bytearray.push(#"0203", 257) == #"010203" +/// ``` +pub fn push(self: ByteArray, byte: Byte) -> ByteArray { + builtin.cons_bytearray(byte, self) +} + +test push_1() { + push(#[], 0) == #[0] +} + +test push_2() { + push(#[2, 3], 1) == #[1, 2, 3] +} + +test push_3() fail { + let x = 257 + push(#[2, 3], x) == #[1, 2, 3] +} + +// ## Inspecting + +/// Get the `Byte` at the given index, or crash. +/// +/// > [!WARNING] +/// > This functions fails (i.e. halts the program) if there's no byte at the given index. +pub fn at(self: ByteArray, index: Int) -> Byte { + builtin.index_bytearray(self, index) +} + +/// Search the start and end positions of a sub-array in a `ByteArray`. +/// +/// ```aiken +/// bytearray.index_of("Hello, World!", "World") == Some((7, 11)) +/// bytearray.index_of("Hello, World!", "foo") == None +/// bytearray.index_of("Hello, World!", "!") == Some((12, 12)) +/// bytearray.index_of("Hello, World!", "o") == Some((4, 4)) +/// bytearray.index_of("Hello, World!", "Hello, World!") == Some((0, 12)) +/// ``` +pub fn index_of(self: ByteArray, bytes: ByteArray) -> Option<(Int, Int)> { + let offset = length(bytes) + + do_index_of(self, bytes, 0, offset, length(self)) + |> option.map(fn(ix) { (ix, ix + offset - 1) }) +} + +fn do_index_of( + self: ByteArray, + bytes: ByteArray, + cursor: Int, + offset: Int, + size: Int, +) -> Option { + if cursor + offset > size { + None + } else { + if builtin.slice_bytearray(cursor, offset, self) == bytes { + Some(cursor) + } else { + do_index_of(self, bytes, cursor + 1, offset, size) + } + } +} + +test index_of_1() { + index_of("Hello, World!", "World") == Some((7, 11)) +} + +test index_of_2() { + index_of("Hello, World!", "foo") == None +} + +test index_of_3() { + index_of("Hello, World!", "!") == Some((12, 12)) +} + +test index_of_4() { + index_of("Hello, World!", "o") == Some((4, 4)) +} + +test index_of_5() { + index_of("Hello, World!", "Hello, World!") == Some((0, 12)) +} + +/// Returns `True` when the given `ByteArray` is empty. +/// +/// ```aiken +/// bytearray.is_empty(#"") == True +/// bytearray.is_empty(#"00ff") == False +/// ``` +pub fn is_empty(self: ByteArray) -> Bool { + builtin.length_of_bytearray(self) == 0 +} + +test is_empty_1() { + is_empty(#"") == True +} + +test is_empty_2() { + is_empty(#"01") == False +} + +/// Returns the number of bytes in a `ByteArray`. +/// +/// ```aiken +/// bytearray.length(#[1, 2, 3]) == 3 +/// ``` +pub fn length(self: ByteArray) -> Int { + builtin.length_of_bytearray(self) +} + +test length_1() { + length(#"") == 0 +} + +test length_2() { + length(#"010203") == 3 +} + +/// Checks whether a bit (Most-Significant-Bit first) is set in the given 'ByteArray'. +/// +/// For example, consider the following bytearray: `#"8b765f"`. It can also be written as the +/// following bits sequence: +/// +/// `8` | `b` | `7` | `6` | `5` | `f` +/// --- | --- | --- | --- | --- | --- +/// `1000` | `1011` | `0111` | `0110` | `0101` | `1111` +/// +/// And thus, we have: +/// +/// ```aiken +/// test_bit(#"8b765f", 0) == True +/// test_bit(#"8b765f", 1) == False +/// test_bit(#"8b765f", 2) == False +/// test_bit(#"8b765f", 3) == False +/// test_bit(#"8b765f", 7) == True +/// test_bit(#"8b765f", 8) == False +/// test_bit(#"8b765f", 20) == True +/// test_bit(#"8b765f", 21) == True +/// test_bit(#"8b765f", 22) == True +/// test_bit(#"8b765f", 23) == True +/// ``` +pub fn test_bit(self: ByteArray, ix: Int) -> Bool { + builtin.less_than_equals_bytearray( + #[128], + builtin.cons_bytearray( + builtin.index_bytearray(self, ix / 8) * math.pow2(ix % 8) % 256, + "", + ), + ) +} + +test test_bit_0() { + test_bit(#"8b765f", 0) +} + +test test_bit_1() { + !test_bit(#"8b765f", 1) +} + +test test_bit_2() { + !test_bit(#"8b765f", 2) +} + +test test_bit_3() { + !test_bit(#"8b765f", 3) +} + +test test_bit_7() { + test_bit(#"8b765f", 7) +} + +test test_bit_8() { + !test_bit(#"8b765f", 8) +} + +test test_bit_20_21_22_23() { + and { + test_bit(#"8b765f", 20), + test_bit(#"8b765f", 21), + test_bit(#"8b765f", 22), + test_bit(#"8b765f", 23), + } +} + +// ## Modifying + +/// Returns the suffix of a `ByteArray` after `n` elements. +/// +/// ```aiken +/// bytearray.drop(#[1, 2, 3], n: 2) == #[3] +/// ``` +pub fn drop(self: ByteArray, n: Int) -> ByteArray { + builtin.slice_bytearray(n, builtin.length_of_bytearray(self) - n, self) +} + +test drop_1() { + let x = #"01020304050607" + drop(x, 2) == #"0304050607" +} + +test drop_2() { + let x = #"01020304050607" + drop(x, 0) == x +} + +test drop_3() { + let x = #"01" + drop(x, 1) == #"" +} + +test drop_4() { + let x = #"" + drop(x, 2) == #"" +} + +/// Extract a `ByteArray` as a slice of another `ByteArray`. +/// +/// Indexes are 0-based and inclusive. +/// +/// ```aiken +/// bytearray.slice(#[0, 1, 2, 3, 4, 5, 6], start: 1, end: 3) == #[1, 2, 3] +/// ``` +pub fn slice(self: ByteArray, start: Int, end: Int) -> ByteArray { + builtin.slice_bytearray(start, end - start + 1, self) +} + +test slice_1() { + slice(#"", 1, 2) == #"" +} + +test slice_2() { + slice(#"010203", 1, 2) == #"0203" +} + +test slice_3() { + slice(#"010203", 0, 42) == #"010203" +} + +test slice_4() { + slice(#[0, 1, 2, 3, 4], 0, 3) == #[0, 1, 2, 3] +} + +test slice_5() { + slice(#[0, 1, 2, 3, 4], 1, 2) == #[1, 2] +} + +/// Returns the n-length prefix of a `ByteArray`. +/// +/// ```aiken +/// bytearray.take(#[1, 2, 3], n: 2) == #[1, 2] +/// ``` +pub fn take(self: ByteArray, n: Int) -> ByteArray { + builtin.slice_bytearray(0, n, self) +} + +test take_1() { + let x = #"01020304050607" + take(x, 2) == #"0102" +} + +test take_2() { + let x = #"01020304050607" + take(x, 0) == #"" +} + +test take_3() { + let x = #"01" + take(x, 1) == x +} + +test take_4() { + let x = #"010203" + take(x, 0) == #"" +} + +// ## Combining + +/// Combine two `ByteArray` together. +/// +/// ```aiken +/// bytearray.concat(left: #[1, 2, 3], right: #[4, 5, 6]) == #[1, 2, 3, 4, 5, 6] +/// ``` +pub fn concat(left: ByteArray, right: ByteArray) -> ByteArray { + builtin.append_bytearray(left, right) +} + +test concat_1() { + concat(#"", #"") == #"" +} + +test concat_2() { + concat(#"", #"01") == #"01" +} + +test concat_3() { + concat(#"0102", #"") == #"0102" +} + +test concat_4() { + concat(#"0102", #"0304") == #"01020304" +} + +/// Compare two bytearrays lexicographically. +/// +/// ```aiken +/// bytearray.compare(#"00", #"FF") == Less +/// bytearray.compare(#"42", #"42") == Equal +/// bytearray.compare(#"FF", #"00") == Greater +/// ``` +pub fn compare(left: ByteArray, right: ByteArray) -> Ordering { + if builtin.less_than_bytearray(left, right) { + Less + } else if builtin.equals_bytearray(left, right) { + Equal + } else { + Greater + } +} + +// ## Transforming + +/// Left-fold over bytes of a [`ByteArray`](https://aiken-lang.github.io/prelude/aiken.html#ByteArray). Note that every byte given to the callback function is comprised between 0 and 255. +/// +/// ```aiken +/// bytearray.foldl(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 44203 +/// bytearray.foldl(#[1, 2, 3], #"", flip(bytearray.push)) == #[3, 2, 1] +/// ``` +pub fn foldl( + self: ByteArray, + zero: result, + with: fn(Int, result) -> result, +) -> result { + do_foldl(self, zero, builtin.length_of_bytearray(self), 0, with) +} + +fn do_foldl( + self: ByteArray, + zero: result, + len: Int, + cursor: Int, + with: fn(Int, result) -> result, +) -> result { + if cursor == len { + zero + } else { + do_foldl( + self, + with(builtin.index_bytearray(self, cursor), zero), + len, + cursor + 1, + with, + ) + } +} + +test foldl_1() { + foldl(#[], 42, fn(byte, acc) { byte + acc }) == 42 +} + +test foldl_2() { + foldl(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 44203 +} + +test foldl_3() { + foldl( + #"356cf088720a169dae0ce0bb1df8588944389fa43322f0d6ef4ed8c069bfd405", + 0, + fn(byte, acc) { acc * 256 + byte }, + ) == 24165060555594911913195642527692216679757672038384202527929620681761931383813 +} + +test foldl_4() { + foldl(#[1, 2, 3, 4, 5], #"", flip(push)) == #[5, 4, 3, 2, 1] +} + +/// Right-fold over bytes of a [`ByteArray`](https://aiken-lang.github.io/prelude/aiken.html#ByteArray). Note that every byte given to the callback function is comprised between 0 and 255. +/// +/// ```aiken +/// bytearray.foldr(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 43948 +/// bytearray.foldl(#[1, 2, 3], #"", flip(bytearray.push)) == #[1, 2, 3] +/// ``` +pub fn foldr( + self: ByteArray, + zero: result, + with: fn(Int, result) -> result, +) -> result { + do_foldr(self, zero, builtin.length_of_bytearray(self) - 1, with) +} + +fn do_foldr( + self: ByteArray, + zero: result, + cursor: Int, + with: fn(Int, result) -> result, +) -> result { + if cursor < 0 { + zero + } else { + do_foldr( + self, + with(builtin.index_bytearray(self, cursor), zero), + cursor - 1, + with, + ) + } +} + +test foldr_1() { + foldr(#[], 42, fn(byte, acc) { byte + acc }) == 42 +} + +test foldr_2() { + foldr(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 43948 +} + +test foldr_3() { + foldr(#[1, 2, 3, 4, 5], #"", flip(push)) == #[1, 2, 3, 4, 5] +} + +/// Reduce bytes in a ByteArray from left to right using the accumulator as left operand. +/// Said differently, this is [`foldl`](#foldl) with callback arguments swapped. +/// +/// ```aiken +/// bytearray.reduce(#[1,2,3], #[], bytearray.push) == #[3, 2, 1] +/// ``` +pub fn reduce( + self: ByteArray, + zero: result, + with: fn(result, Int) -> result, +) -> result { + foldl(self, zero, flip(with)) +} + +test reduce_1() { + reduce(#[], #[], push) == #[] +} + +test reduce_2() { + reduce(#[1, 2, 3], #[], push) == #[3, 2, 1] +} + +/// Interpret a Big-Endian (most-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// bytearray.to_int_big_endian(#"0f4240") == 1_000_000 +/// bytearray.to_int_big_endian(#"00000f4240") == 1_000_000 +/// bytearray.to_int_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn to_int_big_endian(self: ByteArray) -> Int { + builtin.bytearray_to_integer(True, self) +} + +test to_int_big_endian_1() { + to_int_big_endian(#"0f4240") == 1_000_000 +} + +test to_int_big_endian_2() { + to_int_big_endian(#"00000f4240") == 1_000_000 +} + +test to_int_big_endian_3() { + to_int_big_endian(#"0000000000000000") == 0 +} + +/// Interpret a Little-Endian (least-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// bytearray.to_int_big_endian(#"40420f") == 1_000_000 +/// bytearray.to_int_big_endian(#"40420f0000") == 1_000_000 +/// bytearray.to_int_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn to_int_little_endian(self: ByteArray) -> Int { + builtin.bytearray_to_integer(False, self) +} + +test to_int_little_endian_1() { + to_int_little_endian(#"40420f") == 1_000_000 +} + +test to_int_little_endian_2() { + to_int_little_endian(#"40420f0000") == 1_000_000 +} + +test to_int_little_endian_3() { + to_int_little_endian(#"0000000000000000") == 0 +} + +/// Convert a `ByteArray` into a `String`. +/// +/// > [!WARNING] +/// > This functions fails (i.e. halts the program) if the underlying `ByteArray` isn't UTF-8-encoded. In particular, you cannot convert arbitrary hash digests using this function. +/// > +/// > For converting arbitrary `ByteArray`s, use [bytearray.to_hex](#to_hex). +/// +/// ```aiken +/// bytearray.to_string(#"414243") == "ABC" +/// bytearray.to_string(some_hash) => 💥 +/// ``` +pub fn to_string(self: ByteArray) -> String { + builtin.decode_utf8(self) +} + +test to_string_1() { + to_string("") == @"" +} + +test to_string_2() { + to_string("ABC") == @"ABC" +} + +/// Encode a `ByteArray` as a hexidecimal `String`. +/// +/// ```aiken +/// bytearray.to_hex("Hello world!") == @"48656c6c6f20776f726c6421" +/// ``` +pub fn to_hex(self: ByteArray) -> String { + self + |> encode_base16(builtin.length_of_bytearray(self) - 1, "") + |> builtin.decode_utf8 +} + +test to_hex_1() { + to_hex("Hello world!") == @"48656C6C6F20776F726C6421" +} + +test to_hex_2() { + to_hex("The quick brown fox jumps over the lazy dog") == @"54686520717569636B2062726F776E20666F78206A756D7073206F76657220746865206C617A7920646F67" +} + +/// Checks whether a `ByteArray` starts with a given prefix. +/// +/// ```aiken +/// bytearray.starts_with("Hello, World!", prefix: "Hello") == True +/// bytearray.starts_with("", prefix: "") == True +/// bytearray.starts_with("Hello", prefix: "Hello, World!") == False +/// ``` +pub fn starts_with(self: ByteArray, prefix: ByteArray) -> Bool { + let prefix_length = length(prefix) + if length(self) < prefix_length { + False + } else { + take(self, prefix_length) == prefix + } +} + +test starts_with_1() { + starts_with("", "") +} + +test starts_with_2() { + starts_with("Hello, World!", "Hello, World!") +} + +test starts_with_3() { + !starts_with("Hello, World!", "hello") +} + +test starts_with_4() { + !starts_with("", "World") +} + +test starts_with_5() { + starts_with("Hello, World", "Hello") +} + +test starts_with_6() { + !starts_with("foo", "foo_") +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak new file mode 100644 index 00000000..217749e9 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak @@ -0,0 +1,156 @@ +use aiken/builtin.{bytearray_to_integer, decode_utf8} +use aiken/math +use aiken/option +use aiken/primitive/bytearray + +// ## Combining + +/// Compare two integers. +/// +/// ```aiken +/// int.compare(14, 42) == Less +/// int.compare(14, 14) == Equal +/// int.compare(42, 14) == Greater +/// ``` +pub fn compare(left: Int, right: Int) -> Ordering { + if left < right { + Less + } else if left > right { + Greater + } else { + Equal + } +} + +// ## Transforming + +/// Interpret a Big-Endian (most-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// int.from_bytearray_big_endian(#"0f4240") == 1_000_000 +/// int.from_bytearray_big_endian(#"00000f4240") == 1_000_000 +/// int.from_bytearray_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn from_bytearray_big_endian(self: ByteArray) -> Int { + bytearray_to_integer(True, self) +} + +test from_bytearray_big_endian_1() { + from_bytearray_big_endian(#"0f4240") == 1_000_000 +} + +test from_bytearray_big_endian_2() { + from_bytearray_big_endian(#"00000f4240") == 1_000_000 +} + +test from_bytearray_big_endian_3() { + from_bytearray_big_endian(#"0000000000000000") == 0 +} + +/// Interpret a Little-Endian (least-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// int.from_bytearray_big_endian(#"40420f") == 1_000_000 +/// int.from_bytearray_big_endian(#"40420f0000") == 1_000_000 +/// int.from_bytearray_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn from_bytearray_little_endian(self: ByteArray) -> Int { + bytearray_to_integer(False, self) +} + +test from_bytearray_little_endian_1() { + from_bytearray_little_endian(#"40420f") == 1_000_000 +} + +test from_bytearray_little_endian_2() { + from_bytearray_little_endian(#"40420f0000") == 1_000_000 +} + +test from_bytearray_little_endian_3() { + from_bytearray_little_endian(#"0000000000000000") == 0 +} + +/// Parse an integer from a utf-8 encoded `ByteArray`, when possible. +/// +/// ```aiken +/// int.from_utf8("14") == Some(14) +/// int.from_utf8("-42") == Some(-42) +/// int.from_utf8("007") == Some(7) +/// int.from_utf8("foo") == None +/// int.from_utf8("1.0") == None +/// int.from_utf8("1-2") == None +/// ``` +pub fn from_utf8(bytes: ByteArray) -> Option { + bytes + |> bytearray.foldr( + Some((0, 0)), + fn(byte, st) { + when st is { + None -> None + Some((n, e)) -> + if byte < 48 || byte > 57 { + if byte == 45 { + Some((-n, 0)) + } else { + None + } + } else if n < 0 { + None + } else { + let digit = byte - 48 + Some((n + digit * math.pow(10, e), e + 1)) + } + } + }, + ) + |> option.map(fn(tuple) { tuple.1st }) +} + +test from_utf8_1() { + from_utf8("0017") == Some(17) +} + +test from_utf8_2() { + from_utf8("42") == Some(42) +} + +test from_utf8_3() { + from_utf8("1337") == Some(1337) +} + +test from_utf8_4() { + from_utf8("-14") == Some(-14) +} + +test from_utf8_5() { + from_utf8("foo") == None +} + +test from_utf8_6() { + from_utf8("1-2") == None +} + +/// Convert an `Int` to its `String` representation. +/// +/// ```aiken +/// int.to_string(42) == @"42" +/// ``` +pub fn to_string(n: Int) -> String { + diagnostic(n, "") |> decode_utf8 +} + +test to_string_1() { + to_string(0) == @"0" +} + +test to_string_2() { + to_string(5) == @"5" +} + +test to_string_3() { + to_string(42) == @"42" +} + +test to_string_4() { + to_string(200) == @"200" +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak new file mode 100644 index 00000000..35fa5567 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak @@ -0,0 +1,139 @@ +use aiken/builtin.{ + append_bytearray, append_string, decode_utf8, encode_utf8, length_of_bytearray, +} + +// ## Constructing + +/// Convert a `ByteArray` into a `String` +/// +/// > [!WARNING] +/// > This functions fails if the underlying `ByteArray` isn't UTF-8-encoded. In particular, you cannot convert arbitrary hash digests using this function. +/// > +/// > For converting arbitrary `ByteArray`s, use [bytearray.to_hex](./bytearray.html#to_hex). +/// +/// ```aiken +/// string.from_bytearray("foo") == @"foo" +/// string.from_bytearray(#"666f6f") == @"foo" +/// string.from_bytearray(some_hash) -> fail +/// ``` +pub fn from_bytearray(bytes: ByteArray) -> String { + decode_utf8(bytes) +} + +test from_bytearray_1() { + from_bytearray(#[]) == @"" +} + +test from_bytearray_2() { + from_bytearray(#[65, 66, 67]) == @"ABC" +} + +test from_bytearray_3() { + from_bytearray("ABC") == @"ABC" +} + +/// Convert an `Int` to its `String` representation. +/// +/// ```aiken +/// string.from_int(42) == @"42" +/// ``` +pub fn from_int(n: Int) -> String { + diagnostic(n, "") |> decode_utf8 +} + +test from_int_1() { + from_int(0) == @"0" +} + +test from_int_2() { + from_int(5) == @"5" +} + +test from_int_3() { + from_int(42) == @"42" +} + +test from_int_4() { + from_int(200) == @"200" +} + +// ## Combining + +/// Combine two `String` together. +/// +/// ```aiken +/// string.concat(left: @"Hello", right: @", World!") == @"Hello, World!" +/// ``` +pub fn concat(left: String, right: String) -> String { + append_string(left, right) +} + +test concat_1() { + concat(@"", @"") == @"" +} + +test concat_2() { + concat(@"", @"foo") == concat(@"foo", @"") +} + +test concat_3() { + concat(left: @"Hello", right: @", World!") == @"Hello, World!" +} + +/// Join a list of strings, separated by a given _delimiter_. +/// +/// ```aiken +/// string.join([], @"+") == @"" +/// string.join([@"a", @"b", @"c"], @",") == @"a,b,c" +/// ``` +pub fn join(list: List, delimiter: String) -> String { + do_join(list, encode_utf8(delimiter), #"") + |> decode_utf8 +} + +fn do_join(xs, delimiter, bytes) { + when xs is { + [] -> bytes + [x, ..rest] -> + do_join( + rest, + delimiter, + if length_of_bytearray(bytes) == 0 { + encode_utf8(x) + } else { + append_bytearray(bytes, append_bytearray(delimiter, encode_utf8(x))) + }, + ) + } +} + +test join_1() { + join([], @",") == @"" +} + +test join_2() { + join([@"a", @"b", @"c"], @",") == @"a,b,c" +} + +// ## Transforming + +/// Convert a `String` into a `ByteArray` +/// +/// ```aiken +/// string.to_bytearray(@"foo") == "foo" +/// ``` +pub fn to_bytearray(self: String) -> ByteArray { + encode_utf8(self) +} + +test to_bytearray_1() { + to_bytearray(@"") == "" +} + +test to_bytearray_2() { + to_bytearray(@"ABC") == #[65, 66, 67] +} + +test to_bytearray_3() { + to_bytearray(@"ABC") == "ABC" +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/address.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/address.ak new file mode 100644 index 00000000..0167b90f --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/address.ak @@ -0,0 +1,86 @@ +use aiken/crypto.{ + Blake2b_224, Hash, Script, ScriptHash, VerificationKey, VerificationKeyHash, +} + +/// A general structure for representing an on-chain `Credential`. +/// +/// Credentials are always one of two kinds: a direct public/private key +/// pair, or a script (native or Plutus). +pub type Credential { + VerificationKey(VerificationKeyHash) + Script(ScriptHash) +} + +// ## Constructing + +/// A Cardano `Address` typically holding one or two credential references. +/// +/// Note that legacy bootstrap addresses (a.k.a. 'Byron addresses') are +/// completely excluded from Plutus contexts. Thus, from an on-chain +/// perspective only exists addresses of type 00, 01, ..., 07 as detailed +/// in [CIP-0019 :: Shelley Addresses](https://github.com/cardano-foundation/CIPs/tree/master/CIP-0019/#shelley-addresses). +pub type Address { + payment_credential: PaymentCredential, + stake_credential: Option, +} + +/// Smart-constructor for an [Address](#Address) from a [script](#Script) hash. The address has no delegation rights whatsoever. +pub fn from_script(script: Hash) -> Address { + Address { payment_credential: Script(script), stake_credential: None } +} + +/// Smart-constructor for an [Address](#Address) from a [verification key](#VerificationKey) hash. The resulting address has no delegation rights whatsoever. +pub fn from_verification_key(vk: Hash) -> Address { + Address { payment_credential: VerificationKey(vk), stake_credential: None } +} + +/// Set (or reset) the delegation part of an [Address](#Address) using a [verification key](#VerificationKey) hash. This is useful when combined with [`from_verification_key`](#from_verification_key) and/or [`from_script`](#from_script). +pub fn with_delegation_key( + self: Address, + vk: Hash, +) -> Address { + Address { + payment_credential: self.payment_credential, + stake_credential: Some(Inline(VerificationKey(vk))), + } +} + +/// Set (or reset) the delegation part of an [Address](#Address) using a [script](#Script) hash. This is useful when combined with [`from_verification_key`](#from_verification_key) and/or [`from_script`](#from_script). +pub fn with_delegation_script( + self: Address, + script: Hash, +) -> Address { + Address { + payment_credential: self.payment_credential, + stake_credential: Some(Inline(Script(script))), + } +} + +/// Represent a type of object that can be represented either inline (by hash) +/// or via a reference (i.e. a pointer to an on-chain location). +/// +/// This is mainly use for capturing pointers to a stake credential +/// registration certificate in the case of so-called pointer addresses. +pub type Referenced { + Inline(a) + Pointer { slot_number: Int, transaction_index: Int, certificate_index: Int } +} + +/// A `StakeCredential` represents the delegation and rewards withdrawal conditions +/// associated with some stake address / account. +/// +/// A `StakeCredential` is either provided inline, or, by reference using an +/// on-chain pointer. +/// +/// Read more about pointers in [CIP-0019 :: Pointers](https://github.com/cardano-foundation/CIPs/tree/master/CIP-0019/#pointers). +pub type StakeCredential = + Referenced + +/// A 'PaymentCredential' represents the spending conditions associated with +/// some output. Hence, +/// +/// - a `VerificationKey` captures an output locked by a public/private key pair; +/// - and a `Script` captures an output locked by a native or Plutus script. +/// +pub type PaymentCredential = + Credential diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak new file mode 100644 index 00000000..2ebeaa91 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak @@ -0,0 +1,30 @@ +use aiken/primitive/bytearray +use cardano/address.{Credential, Script, VerificationKey} + +pub fn compare(left: Credential, right: Credential) -> Ordering { + when left is { + Script(left) -> + when right is { + Script(right) -> bytearray.compare(left, right) + _ -> Less + } + VerificationKey(left) -> + when right is { + Script(_) -> Greater + VerificationKey(right) -> bytearray.compare(left, right) + } + } +} + +test compare_matrix() { + and { + (compare(Script(""), Script("")) == Equal)?, + (compare(VerificationKey(""), VerificationKey("")) == Equal)?, + (compare(Script(""), VerificationKey("")) == Less)?, + (compare(VerificationKey(""), Script("")) == Greater)?, + (compare(Script("01"), Script("02")) == Less)?, + (compare(Script("02"), Script("01")) == Greater)?, + (compare(VerificationKey("01"), VerificationKey("02")) == Less)?, + (compare(VerificationKey("02"), VerificationKey("01")) == Greater)?, + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak new file mode 100644 index 00000000..664a3983 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak @@ -0,0 +1,920 @@ +use aiken/builtin +use aiken/collection/dict.{Dict, from_ascending_pairs_with} +use aiken/collection/list +use aiken/crypto.{Blake2b_224, Hash, Script} +use aiken/option + +/// Lovelace is now a type wrapper for Int. +pub type Lovelace = + Int + +/// A type-alias for a `PolicyId`. A `PolicyId` is always 28-byte long +pub type PolicyId = + Hash + +/// Ada, the native currency, isn't associated with any `PolicyId` (it's not +/// possible to mint Ada!). +/// +/// By convention, it is an empty `ByteArray`. +pub const ada_policy_id = "" + +/// A type-alias for 'AssetName`, which are free-form byte-arrays between +/// 0 and 32 bytes. +pub type AssetName = + ByteArray + +/// Ada, the native currency, isn't associated with any `AssetName` (it's not +/// possible to mint Ada!). +/// +/// By convention, it is an empty `ByteArray`. +pub const ada_asset_name = "" + +/// A multi-asset output `Value`. Contains tokens indexed by [PolicyId](#PolicyId) and [AssetName](#AssetName). +/// +/// > [!IMPORTANT] +/// > This type maintain some invariants by construction; in particular, a `Value` will never contain a +/// zero quantity of a particular token. +pub opaque type Value { + inner: Dict>, +} + +// ## Constructing + +/// Construct a `Value` from an asset identifier (i.e. `PolicyId` + `AssetName`) +/// and a given quantity. +pub fn from_asset( + policy_id: PolicyId, + asset_name: AssetName, + quantity: Int, +) -> Value { + if quantity == 0 { + zero + } else { + let asset = + dict.empty + |> dict.insert(asset_name, quantity) + dict.empty + |> dict.insert(policy_id, asset) + |> Value + } +} + +/// Promote an arbitrary list of assets into a `Value`. This function fails +/// (i.e. halts the program execution) if: +/// +/// - there's any duplicate amongst `PolicyId`; +/// - there's any duplicate amongst `AssetName`; +/// - the `AssetName` aren't sorted in ascending lexicographic order; or +/// - any asset quantity is null. +/// +/// This function is meant to turn arbitrary user-defined `Data` into safe `Value`, +/// while checking for internal invariants. +pub fn from_asset_list(xs: Pairs>) -> Value { + xs + |> list.foldr( + dict.empty, + fn(inner, acc) { + expect Pair(p, [_, ..] as x) = inner + x + |> from_ascending_pairs_with(fn(v) { v != 0 }) + |> dict.insert_with( + acc, + p, + _, + fn(_, _, _) { + fail @"Duplicate policy in the asset list." + }, + ) + }, + ) + |> Value +} + +test from_asset_list_1() { + let v = from_asset_list([]) + v == zero +} + +test from_asset_list_2() fail { + let v = from_asset_list([Pair(#"33", [])]) + v == zero +} + +test from_asset_list_3() fail { + let v = from_asset_list([Pair(#"33", [Pair(#"", 0)])]) + v != zero +} + +test from_asset_list_4() { + let v = from_asset_list([Pair(#"33", [Pair(#"", 1)])]) + flatten(v) == [(#"33", #"", 1)] +} + +test from_asset_list_5() { + let v = from_asset_list([Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)])]) + flatten(v) == [(#"33", #"", 1), (#"33", #"33", 1)] +} + +test from_asset_list_6() fail { + let v = + from_asset_list( + [ + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + ], + ) + v != zero +} + +test from_asset_list_7() fail { + let v = + from_asset_list( + [ + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"34", [Pair(#"", 1), Pair(#"", 1)]), + ], + ) + v != zero +} + +test from_asset_list_8() { + let v = + from_asset_list( + [ + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"34", [Pair(#"31", 1)]), Pair(#"35", [Pair(#"", 1)]), + ], + ) + flatten(v) == [ + (#"33", #"", 1), (#"33", #"33", 1), (#"34", #"31", 1), (#"35", #"", 1), + ] +} + +test from_asset_list_9() { + let v = + from_asset_list( + [ + Pair(#"35", [Pair(#"", 1)]), Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"34", [Pair(#"31", 1)]), + ], + ) + flatten(v) == [ + (#"33", #"", 1), (#"33", #"33", 1), (#"34", #"31", 1), (#"35", #"", 1), + ] +} + +/// Construct a `Value` from a lovelace quantity. +/// +/// Friendly reminder: 1 Ada = 1.000.000 Lovelace +pub fn from_lovelace(quantity: Int) -> Value { + from_asset(ada_policy_id, ada_asset_name, quantity) +} + +/// Construct an empty `Value` with nothing in it. +pub const zero: Value = Value { inner: dict.empty } + +// ## Inspecting + +/// Check is a `Value` is zero. That is, it has no assets and holds no Ada/Lovelace. +pub fn is_zero(self: Value) -> Bool { + self == zero +} + +/// Efficiently compare two values together, allowing a custom behaviour for Ada/Lovelace. +/// The second parameter is provided as `Data`, allowing to conveniently compare serialized +/// datums or similar structurually equivalent types (such as `Pairs>`). +/// +/// The third argument is a callback function to assert the left and right lovelace +/// quantities. Its first argument refers to the quantity of the first argument of +/// `match`, and the second argument of the callback to the quantity of the second +/// argument of `match`. In the absence of lovelace in any value, it defaults to `0`. +/// +/// ```aiken +/// const value: Value = +/// assets.from_lovelace(30) +/// |> assets.add("foo", "bar", 1) +/// |> assets.add("foo", "baz", 42) +/// +/// const datum: Data = +/// assets.from_lovelace(20) +/// |> assets.add("foo", "bar", 1) +/// |> assets.add("foo", "baz", 42) +/// +/// True == assets.match(value, datum, >=) +/// +/// False == assets.match(value, datum, ==) +/// +/// True == assets.match(value, datum, fn(value_lovelace, datum_lovelace) { +/// 2 * datum_lovelace >= value_lovelace +/// }) +/// ``` +pub fn match( + left: Value, + right: Data, + assert_lovelace: fn(Lovelace, Lovelace) -> Bool, +) -> Bool { + builtin.choose_data( + right, + False, + { + let (left_lovelace, left_assets) = dict.pop(left.inner, ada_policy_id) + let left_assets: Data = left_assets + let left_lovelace = + when left_lovelace is { + Some(tokens) -> builtin.head_list(dict.to_pairs(tokens)).2nd + None -> 0 + } + when builtin.un_map_data(right) is { + [] -> left_assets == right && assert_lovelace(left_lovelace, 0) + [first_asset, ..right_assets] -> + if first_asset.1st == builtin.b_data(ada_policy_id) { + and { + assert_lovelace( + left_lovelace, + builtin.un_i_data( + builtin.head_list(builtin.un_map_data(first_asset.2nd)).2nd, + ), + ), + left_assets == builtin.map_data(right_assets), + } + } else { + and { + assert_lovelace(left_lovelace, 0), + left_assets == right, + } + } + } + }, + False, + False, + False, + ) +} + +const fixture_match_value: Value = + zero + |> add(ada_policy_id, ada_asset_name, 42) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + +const fixture_match_data: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + +const fixture_match_data_missing_foo_02: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("bar", "01", 42) + +const fixture_match_data_altered_foo_01: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 14) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + +const fixture_match_data_missing_bar: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + +const fixture_match_data_extra_policy: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + |> add("baz", "01", 1) + +const fixture_match_data_extra_asset: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("foo", "03", 1) + |> add("bar", "01", 42) + +const fixture_match_data_no_assets: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + +test match_1() { + match(fixture_match_value, fixture_match_data, fn(_, _) { True }) +} + +test match_2() { + !match( + fixture_match_value, + fixture_match_data, + fn(source, target) { source == target }, + ) +} + +test match_3() { + !match( + fixture_match_value, + fixture_match_data_missing_foo_02, + fn(_, _) { True }, + ) +} + +test match_4() { + !match(fixture_match_value, fixture_match_data_missing_bar, fn(_, _) { True }) +} + +test match_5() { + !match( + fixture_match_value, + fixture_match_data_altered_foo_01, + fn(_, _) { True }, + ) +} + +test match_6() { + !match( + fixture_match_value, + fixture_match_data_extra_policy, + fn(_, _) { True }, + ) +} + +test match_7() { + !match(fixture_match_value, fixture_match_data_extra_asset, fn(_, _) { True }) +} + +test match_8() { + !match(fixture_match_value, fixture_match_data_no_assets, fn(_, _) { True }) +} + +test match_9() { + match(zero, zero, ==) +} + +test match_10() { + match( + without_lovelace(fixture_match_value), + without_lovelace(fixture_match_value), + fn(left, right) { left == 0 && right == 0 }, + ) +} + +test match_11() { + match( + without_lovelace(fixture_match_value), + fixture_match_value, + fn(left, right) { left == 0 && right > 0 }, + ) +} + +test match_12() { + match( + fixture_match_value, + without_lovelace(fixture_match_value), + fn(left, right) { left > 0 && right == 0 }, + ) +} + +test match_13() { + match( + zero |> add(ada_policy_id, ada_asset_name, 42), + zero, + fn(left, right) { left == 42 && right == 0 }, + ) +} + +test match_14() { + match( + zero, + zero |> add(ada_policy_id, ada_asset_name, 42), + fn(left, right) { left == 0 && right == 42 }, + ) +} + +const fixture_match_benchmark_left: Value = + zero + |> add(ada_policy_id, ada_asset_name, 1337) + |> add( + #"0246a14d04c3a0e9b65f6b90a3d1aa5faee5d56ab1e30ec7e8b02f29", + "MATTR", + 200, + ) + |> add( + #"0a9e126256cb38c4865cdac6eb2ada51c328ba0df2ebde22ae126c0d", + "ProphecyPoster076", + 1, + ) + |> add( + #"1774343241680e4daef7cbfe3536fc857ce23fb66cd0b66320b2e3dd", + "BISON", + 12_004_999_999, + ) + |> add( + #"279c909f348e533da5808898f87f9a14bb2c3dfbbacccd631d927a3f", + "SNEK", + 1486, + ) + |> add( + #"651dfc074202423585996ffa717cb45237d307e705e2cc3dab1ccabd", + "MAYZSilverFoundersEdition0035", + 1, + ) + |> add( + #"63df49056617dd14034986cf7c250bad6552fd2f0f9c71d797932008", + "CardanoSpaceSession", + 20, + ) + |> add( + #"5b01968867e13432afaa2f814e1d15e332d6cd0aa77e350972b0967d", + "ADAOGovernanceToken", + 1, + ) + |> add( + #"a0028f350aaabe0545fdcb56b039bfb08e4bb4d8c4d7c3c7d481c235", + "HOSKY", + 400_001_000, + ) + |> add( + #"da8c30857834c6ae7203935b89278c532b3995245295456f993e1d24", + "LQ", + 10_635_899, + ) + |> add( + #"95d9a98c2f7999a3d5e0f4d795cb1333837c09eb0f24835cd2ce954c", + "GrandmasterAdventurer659", + 1, + ) + |> add( + #"702cbdb06a81ef2fa4f85f9e32159c03f502539d762a71194fc11eb3", + "AdventurerOfThiolden8105", + 1, + ) + |> add( + #"d0112837f8f856b2ca14f69b375bc394e73d146fdadcc993bb993779", + "DiscoSolaris3725", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld535", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld1213", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld1518", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld1537", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld4199", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld3767", + 1, + ) + |> add( + #"7597444754551a8c17edbf7291cdaeca898ca02ee4e732b09a949396", + "Algae1", + 1, + ) + |> add( + #"7597444754551a8c17edbf7291cdaeca898ca02ee4e732b09a949396", + "Algae2", + 1, + ) + +const fixture_match_benchmark_right: Data = fixture_match_benchmark_left + +test match_benchmark() { + match(fixture_match_benchmark_left, fixture_match_benchmark_right, ==) +} + +test match_benchmark_vs() { + let data: Data = fixture_match_benchmark_right + expect pairs: Pairs> = data + fixture_match_benchmark_left == from_asset_list(pairs) +} + +/// A specialized version of `quantity_of` for the Ada currency. +pub fn lovelace_of(self: Value) -> Int { + quantity_of(self, ada_policy_id, ada_asset_name) +} + +/// A list of all token policies in that Value with non-zero tokens. +pub fn policies(self: Value) -> List { + dict.keys(self.inner) +} + +/// Extract the quantity of a given asset. +pub fn quantity_of( + self: Value, + policy_id: PolicyId, + asset_name: AssetName, +) -> Int { + self.inner + |> dict.get(policy_id) + |> option.and_then(dict.get(_, asset_name)) + |> option.or_else(0) +} + +/// Get all tokens associated with a given policy. +pub fn tokens(self: Value, policy_id: PolicyId) -> Dict { + self.inner + |> dict.get(policy_id) + |> option.or_else(dict.empty) +} + +// ## Combining + +/// Add a (positive or negative) quantity of a single token to a assets. +/// This is more efficient than [`merge`](#merge) for a single asset. +pub fn add( + self: Value, + policy_id: PolicyId, + asset_name: AssetName, + quantity: Int, +) -> Value { + if quantity == 0 { + self + } else { + let helper = + fn(_, left, _right) { + let inner_result = + dict.insert_with( + left, + asset_name, + quantity, + fn(_k, ql, qr) { + let q = ql + qr + if q == 0 { + None + } else { + Some(q) + } + }, + ) + if dict.is_empty(inner_result) { + None + } else { + Some(inner_result) + } + } + + Value( + dict.insert_with( + self.inner, + policy_id, + dict.from_ascending_pairs([Pair(asset_name, quantity)]), + helper, + ), + ) + } +} + +test add_1() { + let v = + zero + |> add(#"acab", #"beef", 321) + |> add(#"acab", #"beef", -321) + v == zero +} + +test add_2() { + let v = + from_lovelace(123) + |> add(#"acab", #"beef", 321) + |> add(#"acab", #"beef", -1 * 321) + v == from_lovelace(123) +} + +test add_3() { + let v = + from_lovelace(1) + |> add(ada_policy_id, ada_asset_name, 2) + |> add(ada_policy_id, ada_asset_name, 3) + v == from_lovelace(6) +} + +test add_4() { + let v = + zero + |> add(#"acab", #"beef", 0) + v == zero +} + +test add_5() { + let v = + zero + |> add(#"acab", #"beef", 0) + |> add(#"acab", #"beef", 0) + v == zero +} + +/// Combine two `Value` together. +pub fn merge(left v0: Value, right v1: Value) -> Value { + Value( + dict.union_with( + v0.inner, + v1.inner, + fn(_, a0, a1) { + let result = + dict.union_with( + a0, + a1, + fn(_, q0, q1) { + let q = q0 + q1 + if q == 0 { + None + } else { + Some(q) + } + }, + ) + if dict.is_empty(result) { + None + } else { + Some(result) + } + }, + ), + ) +} + +test merge_1() { + let v1 = from_lovelace(1) + let v2 = from_lovelace(-1) + merge(v1, v2) == zero +} + +test merge_2() { + let v1 = from_asset(#"00", #"", 1) + let v2 = from_asset(#"01", #"", 2) + let v3 = from_asset(#"02", #"", 3) + let v = + from_lovelace(42) + |> merge(v3) + |> merge(v1) + |> merge(v2) + + flatten(v) == [ + (#"", #"", 42), (#"00", #"", 1), (#"01", #"", 2), (#"02", #"", 3), + ] +} + +test merge_3() { + let v1 = from_asset(#"00", #"", 1) + let v2 = from_asset(#"00", #"", -1) + let v3 = from_asset(#"01", #"", 1) + + let v = + zero + |> merge(v1) + |> merge(v2) + |> merge(v3) + + flatten(v) == [(#"01", #"", 1)] +} + +test merge_4() { + let v1 = from_asset(#"00", #"", 1) + let v2 = from_asset(#"00", #"", -1) + + merge(v1, v2) == zero +} + +test merge_5() { + let v = + zero + |> add(#"acab", #"beef", 0) + + merge(zero, v) == zero +} + +/// Negates quantities of all tokens (including Ada) in that `Value`. +/// +/// ``` +/// v1 +/// |> assets.negate +/// |> assets.merge(v1) +/// |> assets.is_zero +/// // True +/// ``` +pub fn negate(self: Value) -> Value { + dict.map(self.inner, fn(_, a) { dict.map(a, fn(_, q) { 0 - q }) }) + |> Value +} + +/// Get a subset of the assets restricted to the given policies. +pub fn restricted_to(self: Value, mask: List) -> Value { + list.foldr( + policies(self), + zero, + fn(policy_id, value) { + if list.has(mask, policy_id) { + dict.foldr( + tokens(self, policy_id), + value, + fn(asset_name, quantity, value) { + add(value, policy_id, asset_name, quantity) + }, + ) + } else { + value + } + }, + ) +} + +test restricted_to_1() { + let self = from_lovelace(42) |> add("foo", "", 1) + restricted_to(self, []) == zero +} + +test restricted_to_2() { + let self = from_lovelace(42) |> add("foo", "", 1) + restricted_to(self, [ada_policy_id]) == from_lovelace(42) +} + +test restricted_to_3() { + let self = from_lovelace(42) |> add("foo", "", 1) |> add("bar", "", 1) + restricted_to(self, ["foo", "bar"]) == without_lovelace(self) +} + +test restricted_to_4() { + let self = from_lovelace(42) |> add("foo", "bar", 1) |> add("foo", "baz", 1) + restricted_to(self, ["foo"]) == without_lovelace(self) +} + +test restricted_to_5() { + let self = from_lovelace(42) |> add("foo", "bar", 1) |> add("foo", "baz", 1) + restricted_to(self, [ada_policy_id, "foo"]) == self +} + +/// Get a `Value` excluding Ada. +pub fn without_lovelace(self: Value) -> Value { + dict.delete(self.inner, ada_policy_id) + |> Value +} + +test without_lovelace_1() { + let v = from_lovelace(1000000) + without_lovelace(v) == zero +} + +test without_lovelace_2() { + let v = from_lovelace(1000000) + let v2 = from_lovelace(50000000) + without_lovelace(v) == without_lovelace(v2) +} + +test without_lovelace_3() { + let v = + from_asset(#"010203", #"040506", 100) + |> add(ada_policy_id, ada_asset_name, 100000000) + let v2 = from_asset(#"010203", #"040506", 100) + without_lovelace(v) == without_lovelace(v2) && without_lovelace(v) == v2 +} + +// ## Transforming + +/// Flatten a `Value` as list of 3-tuple `(PolicyId, AssetName, Quantity)`. +/// +/// Handy to manipulate values as uniform lists. +pub fn flatten(self: Value) -> List<(PolicyId, AssetName, Int)> { + dict.foldr( + self.inner, + [], + fn(policy_id, asset_list, value) { + dict.foldr( + asset_list, + value, + fn(asset_name, quantity, xs) { + [(policy_id, asset_name, quantity), ..xs] + }, + ) + }, + ) +} + +/// Flatten a `Value` as a list of results, possibly discarding some along the way. +/// +/// When the transform function returns `None`, the result is discarded altogether. +pub fn flatten_with( + self: Value, + with: fn(PolicyId, AssetName, Int) -> Option, +) -> List { + dict.foldr( + self.inner, + [], + fn(policy_id, asset_list, value) { + dict.foldr( + asset_list, + value, + fn(asset_name, quantity, xs) { + when with(policy_id, asset_name, quantity) is { + None -> xs + Some(x) -> [x, ..xs] + } + }, + ) + }, + ) +} + +test flatten_with_1() { + flatten_with(zero, fn(p, a, q) { Some((p, a, q)) }) == [] +} + +test flatten_with_2() { + let v = + zero + |> add("a", "1", 14) + |> add("b", "", 42) + |> add("a", "2", 42) + + flatten_with( + v, + fn(p, a, q) { + if q == 42 { + Some((p, a)) + } else { + None + } + }, + ) == [("a", "2"), ("b", "")] +} + +/// Reduce a value into a single result +/// +/// ``` +/// assets.zero +/// |> assets.add("a", "1", 10) +/// |> assets.add("b", "2", 20) +/// |> assets.reduce(v, 0, fn(_, _, quantity, acc) { acc + quantity }) +/// // 30 +/// ``` +pub fn reduce( + self: Value, + start: result, + with: fn(PolicyId, AssetName, Int, result) -> result, +) -> result { + dict.foldr( + self.inner, + start, + fn(policy_id, asset_list, result) { + dict.foldr(asset_list, result, with(policy_id, _, _, _)) + }, + ) +} + +test reduce_1() { + let v = + zero + |> add("a", "1", 10) + |> add("b", "2", 20) + let result = reduce(v, 0, fn(_, _, quantity, acc) { acc + quantity }) + result == 30 +} + +test reduce_2() { + let v = + zero + |> add("a", "1", 5) + |> add("a", "2", 15) + |> add("b", "", 10) + let result = + reduce( + v, + [], + fn(policy_id, asset_name, _, acc) { [(policy_id, asset_name), ..acc] }, + ) + result == [("a", "1"), ("a", "2"), ("b", "")] +} + +test reduce_3() { + let v = zero + let result = reduce(v, 1, fn(_, _, quantity, acc) { acc + quantity }) + result == 1 +} + +/// Convert the value into a dictionary of dictionaries. +pub fn to_dict(self: Value) -> Dict> { + self.inner +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak new file mode 100644 index 00000000..f0b6d258 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak @@ -0,0 +1,93 @@ +use aiken/crypto.{Blake2b_224, Hash, VerificationKey, VerificationKeyHash} +use cardano/address.{Credential} +use cardano/assets.{Lovelace} + +pub type StakePoolId = + Hash + +/// An on-chain certificate attesting of some operation. Publishing +/// certificates triggers different kind of rules; most of the time, +/// they require signatures from specific keys. +pub type Certificate { + /// Register a stake credential with an optional deposit amount. + /// The deposit is always present when using the new registration certificate + /// format available since the Conway era. + RegisterCredential { + credential: Credential, + /// > [!NOTE] + /// > The `deposit` ought to be an `Option`, but due to unfortunate + /// > circumstances it will always be instantiated to `None` even when set in + /// > the host transaction. This is what the `Never` type captures here. + deposit: Never, + } + /// Un-Register a stake credential with an optional refund amount + /// The deposit is always present when using the new de-registration certificate + /// format available since the Conway era. + UnregisterCredential { + credential: Credential, + /// > [!NOTE] + /// > The `refund` ought to be an `Option`, but due to unfortunate + /// > circumstances it will always be instantiated to `None` even when set in + /// > the host transaction. This is what the `Never` type captures here. + refund: Never, + } + /// Delegate stake to a [Delegate](#Delegate). + DelegateCredential { credential: Credential, delegate: Delegate } + /// Register and delegate staking credential to a Delegatee in one certificate. + RegisterAndDelegateCredential { + credential: Credential, + delegate: Delegate, + deposit: Lovelace, + } + /// Register a delegate representative (a.k.a DRep). The deposit is explicit and + /// is refunded when the delegate steps down (unregister). + RegisterDelegateRepresentative { + delegate_representative: Credential, + deposit: Lovelace, + } + /// Update a delegate representative (a.k.a DRep). The certificate also contains + /// metadata which aren't visible on-chain. + UpdateDelegateRepresentative { delegate_representative: Credential } + /// UnRegister a delegate representative, and refund back its past deposit. + UnregisterDelegateRepresentative { + delegate_representative: Credential, + refund: Lovelace, + } + /// Register a new stake pool + RegisterStakePool { + /// The hash digest of the stake pool's cold (public) key + stake_pool: StakePoolId, + /// The hash digest of the stake pool's VRF (public) key + vrf: VerificationKeyHash, + } + /// Retire a stake pool. 'at_epoch' indicates in which the retirement will take place + RetireStakePool { stake_pool: StakePoolId, at_epoch: Int } + /// Authorize a Hot credential for a specific Committee member's cold credential + AuthorizeConstitutionalCommitteeProxy { + constitutional_committee_member: Credential, + proxy: Credential, + } + /// Step down from the constitutional committee as a member. + RetireFromConstitutionalCommittee { + constitutional_committee_member: Credential, + } +} + +/// A type of stake delegation that can be either block-production, vote or +/// both. Note that delegation types aren't cancelling one another, so it is +/// possible to delegate block production in one transaction, and delegate vote +/// in another. This second delegation **does NOT** invalidate the first one. +pub type Delegate { + DelegateBlockProduction { stake_pool: StakePoolId } + DelegateVote { delegate_representative: DelegateRepresentative } + DelegateBoth { + stake_pool: StakePoolId, + delegate_representative: DelegateRepresentative, + } +} + +pub type DelegateRepresentative { + Registered(Credential) + AlwaysAbstain + AlwaysNoConfidence +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak new file mode 100644 index 00000000..3ec96800 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak @@ -0,0 +1,109 @@ +use aiken/collection.{Index} +use aiken/crypto.{Blake2b_256, Hash, ScriptHash, VerificationKeyHash} +use aiken/math/rational.{Rational} +use cardano/address.{Credential} +use cardano/assets.{Lovelace} +use cardano/governance/protocol_parameters.{ProtocolParametersUpdate} + +pub type ProposalProcedure { + deposit: Lovelace, + return_address: Credential, + governance_action: GovernanceAction, +} + +pub type GovernanceAction { + ProtocolParameters { + /// The last governance action of type 'ProtocolParameters'. They must all + /// form a chain. + ancestor: Option, + /// The new proposed protocol parameters. Only values set to `Some` are relevant. + new_parameters: ProtocolParametersUpdate, + /// The optional guardrails script defined in the constitution. The script + /// is executed by the ledger in addition to the hard-coded ledger rules. + /// + /// It must pass for the new protocol parameters to be deemed valid. + guardrails: Option, + } + HardFork { + /// The last governance action of type `HardFork`. They must all + /// form a chain. + ancestor: Option, + /// The new proposed version. Few rules apply to proposing new versions: + /// + /// - The `major` component, if incremented, must be exactly one more than the current. + /// - The `minor` component, if incremented, must be exactly one more than the current. + /// - If the `major` component is incremented, `minor` must be set to `0`. + /// - Neither `minor` nor `major` can be decremented. + new_version: ProtocolVersion, + } + TreasuryWithdrawal { + /// A collection of beneficiaries, which can be plain verification key + /// hashes or script hashes (e.g. DAO). + beneficiaries: Pairs, + /// The optional guardrails script defined in the constitution. The script + /// is executed by the ledger in addition to the hard-coded ledger rules. + /// + /// It must pass for the withdrawals to be authorized. + guardrails: Option, + } + NoConfidence { + /// The last governance action of type `NoConfidence` or + /// `ConstitutionalCommittee`. They must all / form a chain. + ancestor: Option, + } + ConstitutionalCommittee { + /// The last governance action of type `NoConfidence` or + /// `ConstitutionalCommittee`. They must all / form a chain. + ancestor: Option, + /// Constitutional members to be removed. + evicted_members: List, + /// Constitutional members to be added. + added_members: Pairs, + /// The new quorum value, as a ratio of a numerator and a denominator. The + /// quorum specifies the threshold of 'Yes' votes necessary for the + /// constitutional committee to accept a proposal procedure. + quorum: Rational, + } + NewConstitution { + /// The last governance action of type `Constitution` or + /// `ConstitutionalCommittee`. They must all / form a chain. + ancestor: Option, + /// The new proposed constitution. + constitution: Constitution, + } + NicePoll +} + +pub type Vote { + No + Yes + Abstain +} + +pub type TransactionId = + Hash + +pub type GovernanceActionId { + transaction: TransactionId, + proposal_procedure: Index, +} + +pub type ProtocolVersion { + major: Int, + minor: Int, +} + +pub type Constitution { + guardrails: Option, +} + +/// An epoch number after which constitutional committee member +/// mandate expires. +pub type Mandate = + Int + +pub type Voter { + ConstitutionalCommitteeMember(Credential) + DelegateRepresentative(Credential) + StakePool(VerificationKeyHash) +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak new file mode 100644 index 00000000..d9e7be95 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak @@ -0,0 +1,360 @@ +use aiken/math/rational.{Rational} +use cardano/assets.{Lovelace} + +pub opaque type ProtocolParametersUpdate { + inner: Pairs, +} + +pub type ScriptExecutionPrices { + memory: Rational, + cpu: Rational, +} + +pub type ExecutionUnits { + memory: Int, + cpu: Int, +} + +pub type StakePoolOperatorVotingThresholds { + motion_of_no_confidence: Rational, + constitutional_committee: ConstitutionalCommitteeThresholds, + hard_fork: Rational, + protocol_parameters: ProtocolParametersThresholds< + Rational, + Void, + Void, + Void, + Void, + >, +} + +pub type DelegateRepresentativeVotingThresholds { + motion_of_no_confidence: Rational, + constitutional_committee: ConstitutionalCommitteeThresholds, + constitution: Rational, + hard_fork: Rational, + protocol_parameters: ProtocolParametersThresholds< + Void, + Rational, + Rational, + Rational, + Rational, + >, + treasury_withdrawal: Rational, +} + +pub type ProtocolParametersThresholds< + security, + network, + economic, + technical, + governance, +> { + security_group: security, + network_group: network, + economic_group: economic, + technical_group: technical, + governance_group: governance, +} + +pub type ConstitutionalCommitteeThresholds { + default: Rational, + under_no_confidence: Rational, +} + +/// The linear coefficient that intervenes in the transaction fee calculation. +/// It is multiplied by the size of the transaction in bytes to obtain a Lovelace value. +pub fn min_fee_coefficient(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 0, into_int) +} + +/// The constant factor that intervenes in the transaction fee calculation. It is +/// a flat cost of lovelace that is added to every fee calculation. +pub fn min_fee_constant(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 1, into_int) +} + +/// The maximum size of a serialized block body, expressed in bytes. +pub fn max_block_body_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 2, into_int) +} + +/// The maximum size of a serialized transaction (body + witnesses), expressed in bytes. +pub fn max_transaction_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 3, into_int) +} + +/// The maximum size of a serialized block header, expressed in bytes. +pub fn max_block_header_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 4, into_int) +} + +/// The required deposit amount when registering stake credentials, expressed in Lovelace. +pub fn stake_credential_deposit( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 5, into_int) +} + +/// The required deposit amount when registering a stake pool, expressed in Lovelace. +pub fn stake_pool_deposit(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 6, into_int) +} + +/// The maximum number of epoch in the future allowed for a stake pool retirement to be scheduled. +pub fn stake_pool_retirement_horizon( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 7, into_int) +} + +/// The desired/optimal number of fully saturated stake pools in the system. Also known as the _'k-parameter'_. +pub fn desired_number_of_stake_pools( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 8, into_int) +} + +/// A parameter controlling the influence of an pool owner's pledge on the rewards. Also known as _'a0'_. +pub fn stake_pool_pledge_influence( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 9, into_rational) +} + +/// The monetary expansion parameter, controlling the fraction of Ada put in circulation on every epoch through the incentivies model. Also known as _'ρ'_. +pub fn monetary_expansion(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 10, into_rational) +} + +/// The parameter controlling what fraction (%) of available rewards is sent to the treasury on every epoch. Also known as _'τ'_. +pub fn treasury_expansion(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 11, into_rational) +} + +/// Minimum authorized constant cost that stake pools can declare when registering, expressed in Lovelace. +pub fn min_stake_pool_cost(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 16, into_int) +} + +/// The linear coefficient that intervenes in the calculation of the minimum Ada value that any UTxO must hold. It is expressed in Lovelace per Byte, and is also known as the 'coins per utxo byte' parameter. +pub fn min_utxo_deposit_coefficient( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 17, into_int) +} + +/// The costs associated with the various operations of the Plutus Virtual Machine, which can be different for each Plutus version. +pub fn cost_models(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 18, identity) +} + +/// The price, in Lovelace per unit, of the execution units corresponding to cpu and memory usage of on-chain scripts. +pub fn script_execution_prices( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 19, into_script_execution_prices) +} + +/// The maximum execution units allowed for a single transaction. +pub fn max_transaction_execution_units( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 20, into_execution_units) +} + +/// The maximum execution units allowed for a single block. +pub fn max_block_execution_units( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 21, into_execution_units) +} + +/// The maximum size of a serialized value in a transaction output. This effectively limits +/// the maximum kinds of assets that can be sent in a single output. It is expressed in bytes. +pub fn max_value_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 22, into_int) +} + +/// The scaling factor applied to the transaction cost for defining the minimum collateral +/// amount. It is expressed in percent points (so 100 = 100%). +pub fn collateral_percentage(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 23, into_int) +} + +/// The maximum number of collateral inputs allowed in the transaction. +pub fn max_collateral_inputs(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 24, into_int) +} + +/// The various governance voting thresholds pertaining to stake pool operators. +pub fn stake_pool_operator_voting_thresholds( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 25, into_spo_voting_thresholds) +} + +/// The various governance voting thresholds pertaining to delegate representatives +/// (a.k.a DReps). +pub fn delegate_representative_voting_thresholds( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 26, into_drep_voting_thresholds) +} + +/// The minimum number of members in the constitutional committee. Any updates of the committee +/// must leave at least this number of members. +pub fn min_constitutional_committee_size( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 27, into_int) +} + +/// The maximum length of a constitutional committee member, expressed in number of epochs. +pub fn max_constitutional_committee_mandate( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 28, into_int) +} + +/// The lifetime of any governance proposal. An action that hasn't been approved beyond that +/// period is considered inactive and discarded. It is expressed in number of epochs. +pub fn governance_proposal_lifetime( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 29, into_int) +} + +/// The required deposit amount for governance proposal procedures, expressed in Lovelace. +pub fn governance_proposal_deposit( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 30, into_int) +} + +/// The required deposit amount when registering as a delegate representative, expressed in +/// Lovelace. +pub fn delegate_representative_deposit( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 31, into_int) +} + +/// The maximum number of epochs that a delegate representative can stay inactive (i.e. no +/// voting) without becoming _inactive_ and removed from thresholds calculations. +pub fn delegate_representative_max_idle_time( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 32, into_int) +} + +/// The base tier fee coefficient for reference scripts. Reference scripts gets increasingly +/// more expensives every ~24KB, the base coefficient is a multiplicating factor which grows +/// exponentially with each tier. +pub fn reference_scripts_tier_fee_initial_factor( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 33, into_rational) +} + +// Internals ------------------------------------------------------------------- + +type ProtocolParametersIndex = + Int + +fn get_protocol_param( + self: Pairs, + ix: ProtocolParametersIndex, + into: fn(Data) -> a, +) -> Option { + when self is { + [] -> None + [Pair(jx, param), ..tail] -> + if ix == jx { + Some(into(param)) + } else { + get_protocol_param(tail, ix, into) + } + } +} + +fn into_int(param: Data) -> Int { + expect param: Int = param + param +} + +fn into_rational(param: Data) -> Rational { + expect [numerator, denominator]: List = param + expect Some(r) = rational.new(numerator, denominator) + r +} + +fn into_execution_units(param: Data) -> ExecutionUnits { + expect [memory, cpu]: List = param + ExecutionUnits { memory, cpu } +} + +fn into_script_execution_prices(param: Data) -> ScriptExecutionPrices { + expect [memory, cpu]: List = param + let memory = into_rational(memory) + let cpu = into_rational(cpu) + ScriptExecutionPrices { memory, cpu } +} + +fn into_spo_voting_thresholds(param: Data) -> StakePoolOperatorVotingThresholds { + expect [ + motion_of_no_confidence, constitutional_committee, + constitutional_committee_under_no_confidence, hard_fork, + protocol_parameters_security_group, + ]: List = param + + StakePoolOperatorVotingThresholds { + motion_of_no_confidence: into_rational(motion_of_no_confidence), + constitutional_committee: ConstitutionalCommitteeThresholds { + default: into_rational(constitutional_committee), + under_no_confidence: into_rational( + constitutional_committee_under_no_confidence, + ), + }, + hard_fork: into_rational(hard_fork), + protocol_parameters: ProtocolParametersThresholds { + security_group: into_rational(protocol_parameters_security_group), + network_group: Void, + economic_group: Void, + technical_group: Void, + governance_group: Void, + }, + } +} + +fn into_drep_voting_thresholds( + param: Data, +) -> DelegateRepresentativeVotingThresholds { + expect [ + motion_of_no_confidence, constitutional_committee, + constitutional_committee_under_no_confidence, constitution, hard_fork, + protocol_parameters_network_group, protocol_parameters_economic_group, + protocol_parameters_technical_group, protocol_parameters_governance_group, + treasury_withdrawal, + ]: List = param + + DelegateRepresentativeVotingThresholds { + motion_of_no_confidence: into_rational(motion_of_no_confidence), + constitutional_committee: ConstitutionalCommitteeThresholds { + default: into_rational(constitutional_committee), + under_no_confidence: into_rational( + constitutional_committee_under_no_confidence, + ), + }, + constitution: into_rational(constitution), + hard_fork: into_rational(hard_fork), + protocol_parameters: ProtocolParametersThresholds { + security_group: Void, + network_group: into_rational(protocol_parameters_network_group), + economic_group: into_rational(protocol_parameters_economic_group), + technical_group: into_rational(protocol_parameters_technical_group), + governance_group: into_rational(protocol_parameters_governance_group), + }, + treasury_withdrawal: into_rational(treasury_withdrawal), + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak new file mode 100644 index 00000000..e723e2d5 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak @@ -0,0 +1,62 @@ +use aiken/primitive/bytearray +use cardano/address.{Script} +use cardano/address/credential +use cardano/governance.{ + ConstitutionalCommitteeMember, DelegateRepresentative, StakePool, Voter, +} + +pub fn compare(left: Voter, right: Voter) -> Ordering { + when left is { + ConstitutionalCommitteeMember(left) -> + when right is { + ConstitutionalCommitteeMember(right) -> credential.compare(left, right) + _ -> Less + } + DelegateRepresentative(left) -> + when right is { + DelegateRepresentative(right) -> credential.compare(left, right) + ConstitutionalCommitteeMember(_) -> Greater + _ -> Less + } + StakePool(left) -> + when right is { + StakePool(right) -> bytearray.compare(left, right) + _ -> Greater + } + } +} + +test compare_matrix() { + let cc0 = ConstitutionalCommitteeMember(Script("0")) + let cc1 = ConstitutionalCommitteeMember(Script("1")) + + let drep0 = DelegateRepresentative(Script("0")) + let drep1 = DelegateRepresentative(Script("1")) + + let spo0 = StakePool("0") + let spo1 = StakePool("1") + + and { + (compare(cc0, cc0) == Equal)?, + (compare(cc0, cc1) == Less)?, + (compare(cc1, cc0) == Greater)?, + (compare(drep0, drep0) == Equal)?, + (compare(drep0, drep1) == Less)?, + (compare(drep1, drep0) == Greater)?, + (compare(spo0, spo0) == Equal)?, + (compare(spo0, spo1) == Less)?, + (compare(spo1, spo0) == Greater)?, + (compare(cc0, drep0) == Less)?, + (compare(cc0, drep1) == Less)?, + (compare(cc0, spo0) == Less)?, + (compare(cc0, spo1) == Less)?, + (compare(drep0, cc0) == Greater)?, + (compare(drep0, cc1) == Greater)?, + (compare(drep0, spo0) == Less)?, + (compare(drep0, spo1) == Less)?, + (compare(spo0, cc0) == Greater)?, + (compare(spo0, cc1) == Greater)?, + (compare(spo0, drep0) == Greater)?, + (compare(spo0, drep1) == Greater)?, + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak new file mode 100644 index 00000000..ff73836a --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak @@ -0,0 +1,62 @@ +//// This module contains utilities for manually dealing with [`ScriptContext`](#ScriptContext). This is only ever useful for writing custom `else` handlers in validators. +//// +//// > [!NOTE] +//// > Unless you know what you're doing, you should prefer using named handlers: +//// > +//// > - `mint` +//// > - `spend` +//// > - `withdraw` +//// > - `publish` +//// > - `vote` +//// > - `propose` + +use aiken/collection.{Index} +use cardano/address.{Credential} +use cardano/assets.{PolicyId} +use cardano/certificate.{Certificate} +use cardano/governance.{ProposalProcedure, Voter} +use cardano/transaction.{OutputReference, Redeemer, Transaction} + +/// A context given to a script by the Cardano ledger when being executed. +/// +/// The context contains information about the entire transaction that contains +/// the script. The transaction may also contain other scripts; to distinguish +/// between multiple scripts, the [`ScriptContext`](#ScriptContext) contains a +/// [`ScriptInfo`](#ScriptInfo) which indicates which script (or, for what +/// purpose) the transaction is being executed. +pub type ScriptContext { + transaction: Transaction, + redeemer: Redeemer, + info: ScriptInfo, +} + +/// Characterizes the script information. The main (and only) difference with [`ScriptPurpose`](./transaction.html#ScriptPurpose) resides in the `Spending` variant which here contains a second field `datum: Option`. +pub type ScriptInfo { + /// For scripts executed as minting/burning policies, to insert + /// or remove assets from circulation. It's parameterized by the identifier + /// of the associated policy. + Minting(PolicyId) + /// For scripts that are used as payment credentials for addresses in + /// transaction outputs. They govern the rule by which the output they + /// reference can be spent. + Spending { output: OutputReference, datum: Option } + /// For scripts that validate reward withdrawals from a reward account. + /// + /// The argument identifies the target reward account. + Withdrawing(Credential) + /// Needed when delegating to a pool using stake credentials defined as a + /// custom script. This purpose is also triggered when de-registering such + /// stake credentials. + /// + /// The Int is a 0-based index of the given `Certificate` in `certificates`. + Publishing { at: Index, certificate: Certificate } + /// Voting for a type of voter using a governance action id to vote + /// yes / no / abstain inside a transaction. + /// + /// The voter is who is doing the governance action. + Voting(Voter) + /// Used to propose a governance action. + /// + /// A 0-based index of the given `ProposalProcedure` in `proposal_procedures`. + Proposing { at: Index, proposal_procedure: ProposalProcedure } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak new file mode 100644 index 00000000..6511a596 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak @@ -0,0 +1,225 @@ +use aiken/builtin +use aiken/collection.{Index} +use aiken/collection/dict.{Dict} +use aiken/collection/list +use aiken/crypto.{ + Blake2b_256, DataHash, Hash, ScriptHash, VerificationKeyHash, blake2b_256, +} +use aiken/interval.{Interval} +use aiken/option +use cardano/address.{Address, Credential, Script, VerificationKey} +use cardano/assets.{Lovelace, PolicyId, Value} +use cardano/certificate.{Certificate} +use cardano/governance.{GovernanceActionId, ProposalProcedure, Vote, Voter} + +pub type TransactionId = + Hash + +/// Characterizes the script purpose. +pub type ScriptPurpose { + /// For scripts executed as minting/burning policies, to insert + /// or remove assets from circulation. It's parameterized by the identifier + /// of the associated policy. + Mint(PolicyId) + /// For scripts that are used as payment credentials for addresses in + /// transaction outputs. They govern the rule by which the output they + /// reference can be spent. + Spend(OutputReference) + /// For scripts that validate reward withdrawals from a reward account. + /// + /// The argument identifies the target reward account. + Withdraw(Credential) + /// Needed when delegating to a pool using stake credentials defined as a + /// custom script. This purpose is also triggered when de-registering such + /// stake credentials. + /// + /// The Int is a 0-based index of the given `Certificate` in `certificates`. + Publish { at: Index, certificate: Certificate } + /// Voting for a type of voter using a governance action id to vote + /// yes / no / abstain inside a transaction. + /// + /// The voter is who is doing the governance action. + Vote(Voter) + /// Used to propose a governance action. + /// + /// A 0-based index of the given `ProposalProcedure` in `proposal_procedures`. + Propose { at: Index, proposal_procedure: ProposalProcedure } +} + +/// A Cardano `Transaction`, as seen by on-chain scripts. +/// +/// Note that this is a representation of a transaction, and not the 1:1 +/// translation of the transaction as seen by the ledger. In particular, +/// on-chain scripts can't see inputs locked by bootstrap addresses, outputs +/// to bootstrap addresses or just transaction metadata. +pub type Transaction { + inputs: List, + reference_inputs: List, + outputs: List, + fee: Lovelace, + mint: Value, + certificates: List, + /// > [!IMPORTANT] + /// > Withdrawals are ordered by ascending [Credential](./credential.html#Credential). Yet, note that [`Script`](./credential.html#Credential) credentials are treated as **lower values** than [`VerificationKey`](./credential.html#Credential) credentials. + withdrawals: Pairs, + validity_range: ValidityRange, + extra_signatories: List, + /// > [!IMPORTANT] + /// > Redeemers are ordered by ascending [ScriptPurpose](./transaction.html#ScriptPurpose). + redeemers: Pairs, + datums: Dict, + id: TransactionId, + /// > [!IMPORTANT] + /// > Votes are ordered by ascending [Voter](./governance.html#Voter) and [GovernanceActionId](./governance.html#GovernanceActionId).
First constructor variants in a type are treated as lower indices; except for [Credential](./credential.html#Credential) where [`Script`](./credential.html#Credential) credentials are treated as **lower values** than [`VerificationKey`](./credential.html#Credential) credentials. + votes: Pairs>, + proposal_procedures: List, + current_treasury_amount: Option, + treasury_donation: Option, +} + +/// An interval of POSIX time, measured in **number of milliseconds** since 1970-01-01T00:00:00Z. +pub type ValidityRange = + Interval + +/// An `Input` made of an output reference and, the resolved value associated with that output. +pub type Input { + output_reference: OutputReference, + output: Output, +} + +/// An `OutputReference` is a unique reference to an output on-chain. The `output_index` +/// corresponds to the position in the output list of the transaction (identified by its id) +/// that produced that output +pub type OutputReference { + transaction_id: Hash, + output_index: Int, +} + +/// A transaction `Output`, with an address, a value and optional datums and script references. +pub type Output { + address: Address, + value: Value, + datum: Datum, + reference_script: Option, +} + +/// An output `Datum`. +pub type Datum { + NoDatum + /// A datum referenced by its hash digest. + DatumHash(DataHash) + /// A datum completely inlined in the output. + InlineDatum(Data) +} + +/// A type-alias for Redeemers, passed to scripts for validation. The `Data` is +/// opaque because it is user-defined and it is the script's responsibility to +/// parse it into its expected form. +pub type Redeemer = + Data + +// ## Querying + +/// Find an input by its [`OutputReference`](#OutputReference). This is typically used in +/// combination with the `Spend` [`ScriptPurpose`](#ScriptPurpose) to find a script's own +/// input. +/// +/// ```aiken +/// validator { +/// spend(datum, redeemer, my_output_reference, self) { +/// expect Some(input) = +/// self.inputs +/// |> transaction.find_input(my_output_reference) +/// } +/// } +/// ``` +pub fn find_input( + inputs: List, + output_reference: OutputReference, +) -> Option { + inputs + |> list.find(fn(input) { input.output_reference == output_reference }) +} + +/// Find a [`Datum`](#Datum) by its hash, if present. The function looks first for +/// datums in the witness set, and then for inline datums if it doesn't find any in +/// witnesses. +pub fn find_datum( + outputs: List, + datums: Dict, + datum_hash: DataHash, +) -> Option { + datums + |> dict.get(datum_hash) + |> option.or_try( + fn() { + outputs + |> list.filter_map( + fn(output) { + when output.datum is { + InlineDatum(data) -> + if blake2b_256(builtin.serialise_data(data)) == datum_hash { + Some(data) + } else { + None + } + _ -> None + } + }, + ) + |> list.head + }, + ) +} + +/// Find all outputs that are paying into the given script hash, if any. This is useful for +/// contracts running over multiple transactions. +pub fn find_script_outputs( + outputs: List, + script_hash: ScriptHash, +) -> List { + outputs + |> list.filter( + fn(output) { + when output.address.payment_credential is { + Script(addr_script_hash) -> script_hash == addr_script_hash + VerificationKey(_) -> False + } + }, + ) +} + +// ## Testing + +/// A placeholder / empty `Transaction` to serve as a base in a transaction +/// builder. This is particularly useful for constructing test transactions. +/// +/// Every field is empty or null, and we have in particular: +/// +/// ```aiken +/// use aiken/interval +/// +/// transaction.placeholder.id == +/// #"0000000000000000000000000000000000000000000000000000000000000000" +/// +/// transaction.placeholder.validity_range == interval.everything +/// ``` +pub const placeholder: Transaction = + Transaction { + inputs: [], + reference_inputs: [], + outputs: [], + fee: 0, + mint: assets.zero, + certificates: [], + withdrawals: [], + validity_range: interval.everything, + extra_signatories: [], + redeemers: [], + datums: dict.empty, + id: #"0000000000000000000000000000000000000000000000000000000000000000", + votes: [], + proposal_procedures: [], + current_treasury_amount: None, + treasury_donation: None, + } diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak new file mode 100644 index 00000000..70b7550d --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak @@ -0,0 +1,23 @@ +use aiken/primitive/bytearray +use aiken/primitive/int +use cardano/transaction.{OutputReference} + +pub fn compare(left: OutputReference, right: OutputReference) -> Ordering { + when bytearray.compare(left.transaction_id, right.transaction_id) is { + Equal -> int.compare(left.output_index, right.output_index) + ordering -> ordering + } +} + +test compare_matrix() { + and { + (compare(OutputReference("", 0), OutputReference("", 0)) == Equal)?, + (compare(OutputReference("00", 42), OutputReference("00", 42)) == Equal)?, + (compare(OutputReference("00", 0), OutputReference("01", 0)) == Less)?, + (compare(OutputReference("01", 0), OutputReference("00", 0)) == Greater)?, + (compare(OutputReference("00", 42), OutputReference("01", 14)) == Less)?, + (compare(OutputReference("01", 14), OutputReference("00", 42)) == Greater)?, + (compare(OutputReference("", 42), OutputReference("", 14)) == Greater)?, + (compare(OutputReference("", 14), OutputReference("", 42)) == Less)?, + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak new file mode 100644 index 00000000..4fef2cbe --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak @@ -0,0 +1,126 @@ +use aiken/primitive/bytearray +use aiken/primitive/int +use cardano/address.{Script, VerificationKey} +use cardano/address/credential +use cardano/certificate.{RegisterCredential} +use cardano/governance.{NicePoll, ProposalProcedure, StakePool} +use cardano/governance/voter +use cardano/transaction.{ + Mint, OutputReference, Propose, Publish, ScriptPurpose, Spend, Vote, Withdraw, +} +use cardano/transaction/output_reference + +pub fn compare(left: ScriptPurpose, right: ScriptPurpose) -> Ordering { + when left is { + Mint(left) -> + when right is { + Mint(right) -> bytearray.compare(left, right) + _ -> Less + } + + Spend(left) -> + when right is { + Spend(right) -> output_reference.compare(left, right) + Mint(_) -> Greater + _ -> Less + } + + Withdraw(left) -> + when right is { + Withdraw(right) -> credential.compare(left, right) + Spend(_) | Mint(_) -> Greater + _ -> Less + } + + Publish(left, _) -> + when right is { + Publish(right, _) -> int.compare(left, right) + Spend(_) | Mint(_) | Withdraw(_) -> Greater + _ -> Less + } + + Vote(left) -> + when right is { + Vote(right) -> voter.compare(left, right) + Propose(..) -> Less + _ -> Greater + } + + Propose(left, _) -> + when right is { + Propose(right, _) -> int.compare(left, right) + _ -> Greater + } + } +} + +test compare_matrix() { + let mint0 = Mint("0") + let mint1 = Mint("1") + + let spend0 = Spend(OutputReference("", 0)) + let spend1 = Spend(OutputReference("", 1)) + + let withdraw0 = Withdraw(VerificationKey("0")) + let withdraw1 = Withdraw(VerificationKey("1")) + + let publish0 = Publish(0, RegisterCredential(Script(""), Never)) + let publish1 = Publish(1, RegisterCredential(Script(""), Never)) + + let vote0 = Vote(StakePool("0")) + let vote1 = Vote(StakePool("1")) + + let propose0 = Propose(0, ProposalProcedure(0, Script(""), NicePoll)) + let propose1 = Propose(1, ProposalProcedure(0, Script(""), NicePoll)) + + and { + (compare(mint0, mint0) == Equal)?, + (compare(mint0, mint1) == Less)?, + (compare(mint1, mint0) == Greater)?, + (compare(mint0, spend0) == Less)?, + (compare(mint0, withdraw0) == Less)?, + (compare(mint0, publish0) == Less)?, + (compare(mint0, vote0) == Less)?, + (compare(mint0, propose0) == Less)?, + (compare(spend0, spend0) == Equal)?, + (compare(spend0, spend1) == Less)?, + (compare(spend1, spend0) == Greater)?, + (compare(spend0, mint0) == Greater)?, + (compare(spend0, withdraw0) == Less)?, + (compare(spend0, publish0) == Less)?, + (compare(spend0, vote0) == Less)?, + (compare(spend0, propose0) == Less)?, + (compare(withdraw0, withdraw0) == Equal)?, + (compare(withdraw0, withdraw1) == Less)?, + (compare(withdraw1, withdraw0) == Greater)?, + (compare(withdraw0, mint0) == Greater)?, + (compare(withdraw0, spend0) == Greater)?, + (compare(withdraw0, publish0) == Less)?, + (compare(withdraw0, vote0) == Less)?, + (compare(withdraw0, propose0) == Less)?, + (compare(publish0, publish0) == Equal)?, + (compare(publish0, publish1) == Less)?, + (compare(publish1, publish0) == Greater)?, + (compare(publish0, mint0) == Greater)?, + (compare(publish0, spend0) == Greater)?, + (compare(publish0, withdraw0) == Greater)?, + (compare(publish0, vote0) == Less)?, + (compare(publish0, propose0) == Less)?, + (compare(vote0, vote0) == Equal)?, + (compare(vote0, vote1) == Less)?, + (compare(vote1, vote0) == Greater)?, + (compare(vote0, mint0) == Greater)?, + (compare(vote0, spend0) == Greater)?, + (compare(vote0, withdraw0) == Greater)?, + (compare(vote0, publish0) == Greater)?, + (compare(vote0, propose0) == Less)?, + (compare(propose0, propose0) == Equal)?, + (compare(propose0, propose1) == Less)?, + (compare(propose1, propose0) == Greater)?, + (compare(propose0, mint0) == Greater)?, + (compare(propose0, spend0) == Greater)?, + (compare(propose0, withdraw0) == Greater)?, + (compare(propose0, publish0) == Greater)?, + (compare(propose0, vote0) == Greater)?, + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/packages.toml b/src/components/multisig/aiken-crowdfund/build/packages/packages.toml new file mode 100644 index 00000000..a0f1cf2a --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/packages.toml @@ -0,0 +1,9 @@ +[[packages]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +source = "github" + +[[packages]] +name = "sidan-lab/vodka" +version = "0.1.13" +source = "github" diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml new file mode 100644 index 00000000..89cc4e58 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml @@ -0,0 +1,50 @@ +# Simple workflow for deploying static content to GitHub Pages +name: Build CI + +on: + # Runs on pushes targeting the default branch + push: + branches: ["main"] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages +permissions: + contents: read + pages: write + id-token: write + +# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. +# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + # Single deploy job since we're just deploying + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Pages + uses: actions/configure-pages@v5 + + - uses: aiken-lang/setup-aiken@v1 + with: + version: v1.1.9 + - run: aiken fmt --check + - run: aiken check -D + - run: aiken docs + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + with: + # Upload entire repository + path: "./docs" + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.github/workflows/release.yml b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.github/workflows/release.yml new file mode 100644 index 00000000..db43ff65 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.github/workflows/release.yml @@ -0,0 +1,80 @@ +name: Auto Release + +on: + pull_request: + types: + - closed + branches: + - main + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - uses: aiken-lang/setup-aiken@v1 + with: + version: v1.1.9 + - run: aiken fmt --check + - run: aiken check -D + - run: aiken docs + + check-version: + runs-on: ubuntu-latest + if: github.event.pull_request.merged == true + outputs: + version-updated: ${{ steps.compare-versions.outputs.version-updated }} + version: ${{ steps.compare-versions.outputs.version }} + steps: + - name: Checkout main branch at commit before merge + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.base.sha }} + + - name: Get package version from main branch before merge + id: pre-merge-version + run: | + PRE_MERGE_VERSION=$(grep -m 1 '^version = ' aiken.toml | sed 's/version = "\(.*\)"/\1/') + echo "pre_merge_version=$PRE_MERGE_VERSION" >> "$GITHUB_OUTPUT" + + - name: Checkout main branch at commit after merge + uses: actions/checkout@v4 + with: + ref: "main" + + - name: Get package version from main branch after merge + id: post-merge-version + run: | + POST_MERGE_VERSION=$(grep -m 1 '^version = ' aiken.toml | sed 's/version = "\(.*\)"/\1/') + echo "post_merge_version=$POST_MERGE_VERSION" >> "$GITHUB_OUTPUT" + + - name: Compare versions + id: compare-versions + run: | + if [[ "${{ steps.pre-merge-version.outputs.pre_merge_version }}" != "${{ steps.post-merge-version.outputs.post_merge_version }}" ]]; then + echo "version-updated=true" >> "$GITHUB_OUTPUT" + echo "version=${{ steps.post-merge-version.outputs.post_merge_version }}" >> "$GITHUB_OUTPUT" + else + echo "version-updated=false" >> "$GITHUB_OUTPUT" + fi + + release: + needs: [build, check-version] + if: needs.check-version.outputs.version-updated == 'true' + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Create a Release in a GitHub Action + uses: comnoco/create-release-action@v2.0.5 + with: + tag_name: ${{ needs.check-version.outputs.version }} + release_name: ${{ needs.check-version.outputs.version }} + draft: false + prerelease: false + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.gitignore b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.gitignore new file mode 100644 index 00000000..7b31be95 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.gitignore @@ -0,0 +1,16 @@ +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + +docs \ No newline at end of file diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/LICENSE b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/README.md b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/README.md new file mode 100644 index 00000000..7abc3ead --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/README.md @@ -0,0 +1,136 @@ +
+
+

Aiken Vodka - Library for Aiken Development

+ +[![Licence](https://img.shields.io/github/license/sidan-lab/vodka)](https://github.com/sidan-lab/vodka/blob/main/LICENSE) +[![Continuous Integration](https://github.com/sidan-lab/vodka/actions/workflows/build_docs.yml/badge.svg?branch=main)](https://github.com/sidan-lab/vodka/actions/workflows/build_docs.yml) +[![Twitter/X](https://img.shields.io/badge/Follow%20us-@sidan__lab-blue?logo=x)](https://x.com/sidan_lab) + +
+
+ +Vodka is a library build for [Aiken](https://aiken-lang.org/) development. It offers + +1. [Cocktail](https://sidan-lab.github.io/vodka/cocktail.html) - Validating utils in writing on-chain code in aiken +2. [Mocktail](https://sidan-lab.github.io/vodka/mocktail.html) - Unit test utils for easy building mock value for unit test + +## Start mixing + +Simply run + +```sh +aiken add sidan-lab/vodka --version 0.1.13 +``` + +or putting the below in you `aiken.toml` + +```toml +[[dependencies]] +name = "sidan-lab/vodka" +version = "0.1.13" +source = "github" +``` + +## Version + +Vodka is now upgraded to support latest PlutusV3 with latest version, if you want to use the old version compatible for legacy aiken version, please refer to below's table + +| Vodka | Aiken Compiler | `aiken-lang/stdlib` | +| ---------- | -------------- | ------------------- | +| 0.1.13 | ^v1.1.9 | v2.2.0 | +| 0.1.6 | ^v1.1.5 | v2.1.0 | +| 0.0.1-beta | v1.0.29-alpha | v1.9.0 | + +## Vodka is pure and simple + +For your transaction. + +```rs +let Transaction { inputs, outputs, extra_signatories, .. } = context.transaction +``` + +Locating inputs & outputs: + +```rs +when (inputs_at(inputs, target_address), outputs_at(outputs, target_address)) is { + ([only_input], [only_output]) -> ... + _ -> False +} +``` + +Checking signature with: + +```rs +key_signed(extra_signatories, key_hash_required) +``` + +## Imports and function groups + +All onchain utility functions can be imported from `cocktail` and are grouped with a naming convention of `vodka_`. + +```ak +use cocktail.{} +``` + +| Type | Naming Convention | +| ------------------------------------ | ----------------------------------------- | +| Address | `vodka_address.{}` | +| Value | `vodka_value.{}` | +| transaction.extra_signatories | `vodka_extra_signatories.{}` | +| transaction.inputs | `vodka_inputs.{}` | +| transaction.mints | `vodka_mints.{}` | +| transaction.outputs | `vodka_outputs.{}` | +| transaction.redeemers | `vodka_redeemers.{}` | +| transaction.validity_range | `vodka_validity_range.{}` | +| ByteArray and Int conversion & utils | `vodka_converter.{}` | + +## Taste it before vodka cocktail, mocktail can be mixed, blended and Mesh + +Building unit testing in vodka, easily indicating how you should build in [whisky](https://whisky.sidan.io/) and [Mesh](https://meshjs.dev/). + +You can taste if your transaction can pass your aiken contract validation: + +```rs +# Mock transaction +let mock_tx: Transaction = mocktail_tx() + ... + |> required_signer_hash(is_key_provided, mock_pub_key_hex(1)) + |> complete() +``` + +Then move it to blend a whisky: + +```rs +let mut tx = MeshTxBuilder::new_core() +tx.spending_plutus_script_v2() + ... + .required_signer_hash(key_hash) + .complete(None) + +``` + +Or Mesh: + +```ts +const txBuilder = new MeshTxBuilder(); +await txBuilder + ... + .requiredSignerHash(keyHash) + .complete(); +``` + +## CIP Support + +All CIP supporting utility can be imported under `cip` + +```rs +use cip.{cip68_100} + +let reference_token_name = cip68_100(asset_name) +``` + +## Documentation + +Please refer to the [hosted documentation](https://sidan-lab.github.io/vodka/). + +![Alt](https://repobeats.axiom.co/api/embed/54410212b620c3299be792bde8965a3371348895.svg "Repobeats analytics image") diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/aiken.lock b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/aiken.lock new file mode 100644 index 00000000..31951300 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/aiken.lock @@ -0,0 +1,15 @@ +# This file was generated by Aiken +# You typically do not need to edit this file + +[[requirements]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +source = "github" + +[[packages]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +requirements = [] +source = "github" + +[etags] diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/aiken.toml b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/aiken.toml new file mode 100644 index 00000000..2f35f2fb --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/aiken.toml @@ -0,0 +1,18 @@ +name = "sidan-lab/vodka" +version = "0.1.13" +compiler = "v1.1.9" +plutus = "v3" +license = "Apache-2.0" +description = "Aiken utils for project 'sidan-lab/vodka" + +[repository] +user = "sidan-lab" +project = "vodka" +platform = "github" + +[[dependencies]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +source = "github" + +[config] diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/assets/logo.png b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/assets/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..2c456908159575c5cef268b16780bba695dff570 GIT binary patch literal 74727 zcmYhicRbZ?{6BsMPGuztnW0p6Wn>+SkU}z&ot4ckdmM6y5<)4vZXvQ|k8?^H38jo< zos*e)PT?FJ=e&Q{>GS!1zrR0vxc|AYbB)({j@SF4nTY}Cq2q^OFc_zip`JMm#sdD6 z1$KZ9{K1IG#DhNv0}O3~U@*}S%ztpduff_dxMa4Go~}h$-r96%o@~E8fo^IUI<#?6 z^(?#50V63^V+8AA36FG^7q09+ygo=aA&y44i;pO7JT?U$f4wFg$Fk}4_&{UQsU+Da zIB}gzyDN*4!`<1-8xf3O*+DAT)~YtDa&|i%t2?M`exa7v$Cozbva>@+*Jn#|MUm9|$_`xw+i3y&oA_C`eR6@yGV# zxB}8wXd*;;qMvXm{CVu8v}4otPC~v71qd>yl%H%N}uxcZmbX3 z%*EzGI-@Io-Q6h!-|m|l9+P*+iC#nA(~VhCM=HK{j$!{aPB@JE@{9N;Ju4_TEEs~w z4255j2adc5mqtiPNX+(#;BLnDAVbaVYY4i*zZq&o`!e|rg4-Wx`4cT%S>kxgeenUw z)2x;{PTbAU&l-|Vhy{H^Rd5>aS>Kp#Y~qr#JItjVN<6-El!xbFHUb6~-Cj4d|AEd8b08X7br0j4Js}AL%%MynIJU ziUQ5CG}pfTQBtT@TfUN_uwb{#W=+FJ`p17xUZ2*;_;<{9&cR>K-&B@cXihZb#j4tA zzN28xi~4XF_<@y647>$?Puk3FtB}22)e_G*V_%6qoYN^*hkTS_u=F7OUa+(O3}ND| zniJ>iGMUVOy+>QIS7kAI#T(lX7f!f@^}b)V@6&quGBzp6M}ZZlr{|i>@#gBbBa3F{ z%6+H|=U8-G$N#!WC8A*&5Ao4**z;3Q#3n!fs^gWT;id$p)h` zV{x`e`a8OtRWEulUjB;?PDNtUuIlMY*&Tufd11NDsNsKUj{`~f zUJ%Y0#}F>gaJA=q!7qcWNO=Wgp5Z>VtZ*Bm$8i)3B z9tUBOe*>nYr*|jlVAw(O3EcNU&E%C?=;@m}qNx6MvqsFNQr$RDJEyu5*lz7l^0=By zo-waamiq9&J+9fb7rHCjr)r%ZcU|0cU)ph53S%cn;;P}?A+*zadU`82nJZt)LXdPD z_8126p`y=!2-AcoI(UfOTK2ufxvoG5LF90Hgy?R8KR!RY;EC{@{W6Fvw)yW@Fd!}I$ZGe!;$B!cL0ghfe>w9^~jg~SP7J{aqf zn?ZMMKx>p84p5sDwkhbk>vK^(q`0$1CpWTj=Z!tIiz@b-;rr*v zUgb`>>Y`Y1%Pg0eD=%ll3%H{+NAw}~vUB@ym;XqdyWaxcsN5F|%T=_o`#NQJ>#Mql zz``lP)FX;VZB`QEzhvvKj0>t|o^o@SYtr~q`m~t#v~QiQ8i9qk^X#ye$KslcPb4QL z$-ZX3<-I<$XPa1ou-$orY1PPZ-rLR>@`}-qRsvBubjQXPr6&Ta_Il!H4rRno{R4B$ zX_hcv3-cg;&X23NS-FT0qBU0nWLbhT-3bFrPyi>3|IoFh>fIaG8s%S(X zgVwU+f7yb6G!CV@!Zjpdcm%~YC)^j{no;C@zrjdb;HjT<(A;!>hVG(IPV;M7B{$E2 z0Ddw)Qz3TDY}%RlJPzXI@a8ho)0-D&ChEt;QnP2<73Ky_cn*i8Z4T|T(mBg8?G%F zc8092Lw!3EY7_*;ub=v{1{rzug#C98r)Cfl=JP#Xa<%S9N@yrp8@Iq8@>%M-{jBgQ zg=-_dl_xYO-`7hMIuGtBmwO`6?2o;;Ex_6df7k9kSuP97#B#G0jIQ3jK!u*F_|Zz& zFsgJxq~|Q`+Ooi!h?KCfxoftJ!rIfKWQ}4$*TGYx#o?Z(hnk?RzOkhn(>BD=I59+w zP}oa+JP4BO#mqR&b1U=vMs>Cb+7mk;+F#kr2>oGq!SqW+qXZ4@qOHqp*?*3p`ZU;Y6x?(QoDBJ#d%PxH49e!DGL_KY>cBLN8=z?&Gv3H5GY-L z1LdX|d(kdAI{J>tLoH?fms5Rqoo%+M@*7#;&VLAG z{HIoHh4a+Fd0g+Lyzx`oIa~gKC9N$&SVH2+DQ0jqP9~T6p((X_XGm!pL6cvPxkE1D z9mue96Ym%F!ljQ#;TrM+!s{p3L?)tCgFY3ryTv#+y^H@+tRHvgbIbcjlg%KakCiJU z8SG>sTzaDOqa;IDW;erYE5Njm?@Z#(?}>bV^puV+PFchHZ%m=pj|5UMqTbFb#D4Hm6oYVHep7~`REjwdCV<*`#7qAgi9p-u+;q5b=R9t^K< zWR=B96{5AAR%&tRf~bxJuSYm4(%(C92iydVc@rwuY*A`tRttW5Q>3TzlgCwckUeS2 zA0wkYvi%tnd7;|fcN_lnWW0CeL0X~_xCL2x$J3k1ZTcCnUx#aNZorAS>|D|x{%f6` zY&&Qi6^@13Z9{kU^j`ZjTbfS+6|OD1(}ruSXxp^HT5t|;+ab9xKn(+) z`}13|^vnJ4)8#@!eF7&R{1di($rF{`|L22QmT{z_MJQuOk;q1Nj4RZ?$pSX~hK*8nbpb(Y}T`LNKFsM|qFE-^1sn-q8sj$`cRkEZds$A5pTX-Py+_Cc(hk~Q* z^{mzkSyNeou+~Gwipe;P+U}k&v9#(&PazGzk6)&6lK{p%I*e z;F?Od^axcRLkgzgLf{i81DEq*JGXDos0DmlNdEMzF}Ya8RsPJOwfh>VyzPv>u`vY1 zz9UZ@Sxqqnjf>fK4f7amBkMHnl6ui+bdn`U5Y+Goi`FjqAQdR(wE1D&psIMIJFefq z?N+LPo5hR#FK!^r@@%%#32af_^Q#RwDgq6+`>kjJno81oB(qS|mBI)d?eHC*JVG*5 zImjkM6RRM03x-iJSs&HwzGT-)Mb1am1(7aihwt20|IB_~`1PPoTZr@!suLB)J2H)Q z_!0F~LSmSe83P}vCvY-xsz~T)74l$e3%I5l zi8}?5y0W9enzC2Vi@FY;^e*62Z_e~OG)#hHjgd9M+};|_T!pKl4my^|21)Oc4ksqhT^<8yz~@_uR(HRw5M z0fn7Oc#fdL2nY8MkWJ8OI(m9hH(0@a-V$Yx;X*)c6Ao3)=zeQ zY>j%-QhXxgBK##gZV8D&>}!(Ky}9i5^lraoJ~|fEfiuKl9Dz+wGyjbGNSr2iIl%@G^@7#B&M!+H4hZu>QMI2IgMWMmmozl4@*~k4`)9}tI_5!1pmAj~^JjWbwmf58tmyVM#!dc-Py3R6)3%z;u0|lKPaKaVkh2^*3{`>L2*zdJaA#3}w zof`<7qcRCTHC7C&bb?Zq9v+gpc4DCH2Ezh9G-ya~-Qr8EyxZV3YxZUQyjXw@Kw^Z?utE3q=q_TJdrAWnE#tqONA1KviUd#(xj~tQl%R*3tvPbq-xdsS3J10o1#V(dUF=h`Q)pF zx`YIe89?%`T4J30$I10L_7b$u!hg!-akr$^7Ut$$ zM-w%v2f_E~=hxH_)F7VE@%}|in6%cx9l!ENu|ri#*Lcz^nHZ$kYX-GrM6|@AsG3=~ z_XfEr`azIn(=r}odP&G7LyfJhJ~A<6$(QP}`19@Z*W1rM%Er>Qkvk`dO>w4M_?o2N z5_9I}YNe5A*!~G}nSF3;B_30`?-;+xs22bF3_>zAsLJp1mJBx&eT(JPS+ zj&fTz`Mo^bSBHqNsVIkofChZHf;`594`Al&mk?EQb(|`^u?2%Qpg)z4hVG(5qnH%OFW60o0gWG+C)>-ikBGct5#e+&`BtvVLup z?ZwDa4!AVcT_Ic{g1~0yr|1LNU=j1y4*1~S-= zKt;X3?Xvmo`1s0L+e22&Qpj!20N2o&2!D66&f9Y)@VBgRSZCf!D%`Qo>pN)3CQQx~ zrcDmQnIeZAf=jJT+L74a@)qNv5BW4gQXM37zb9Aj*8SX7;79>KI!M4LorrmGjF;$Glp*x zO!wUmExURd`)+WsGVcp2kI$zjxio*3!skapCoGxEKuzLA>}KcvqZ-IW86#4-BB=OI zSsET!K-WC>u9o8D-znz6wa-evj@xLhOiKF5JUL!PjAQ>8xiGFrcB-z6cROW>D@2Qm zVfO?_razGAbsiIb8uUaeRd*%hZDa_wlIkujjQ=@^d-k0bAtZZ1ZO9t#|MIq?nVzdR z6NF68nh@LK9FY#guTOx?$g6`KK8oZWl%ZhO*cb8FXv z5|%Gvj+)skS#jv0EkOQainIn%%cyFLr_5*N4{1Za%z3w|7HWWnWED=lccXV%Pf38byRo^A1F^Z zWJnD}pBs1xr~ibWraQ%7Dbq<2nW9v;b?0vLPpqyt2e;umkQ!nA9oa2`PKg@Gr3zp5 znTw>f@^95_j6AP=P?%GgEY_zSg9HipDw_MXw~}3EW90jvOS9->pBafV@7PyI->JRH zk1INTokBs?ZN_LGj>ZYz9Y12{E!k7(SJAi zhZX0DC{(14O=i=%{TlY<^~rWBM}AL1#M3lKkM9nXb~9ny*;tK$`3HjSyED3bz1yzM znXUieTVb4UVI>9Z`)uWGhtrdo8K?AOa2s-Pccqu!X-<0R{5a(_4fAM`L0eTwz~)y9 zx*QI@(Yzf>nsT*=8SkZ{>fZynP?WohsJJlNU3daIy5mB8a9!vKk7q9EyhmEG@O{Ch z#l6CYd_1JGm;6(LZG0cx-sPm?0sI9j!2=WgfNW>AieR=TZhPa9nGnLu)5hm#Y-;3V zOF_(F*+A`Auo9|GGDI`4&&-}BMqr1BSmh>!3h-^T05!TPy>=#P4I@6}=6!*Rt)0=1 zkBF)M_n1yj1-p~q+GM^eHi(!Yd46izrdZ#fREl8*^@+og^HI_cGx7fYRTMjhtiI*D zx&DRGb9n3Sz%lLTuk7#Ye_t-~fZBZOSBIgKK@dM)P%SmloxB*y7D5WBQzOb!{I1KPBh`dQM>|yTjYZN<~9oJUX+STaLzFPunjgl)g-GTvng`^wQH8M2kKyM%j zsy!NZ^^aK+@AyYjaD&6nbKt+gAx>*+=pNO7}wh4hGm+wAlIoFVPTexN+k#Yv- z2rnGRZYpL-knK6eM2C`@<`UJ~nf) z_lQAO>-Qd(J+(5WL!f&50hIKDf_9F0`7o(|fH2N`qmcJPe&OXS=dqwT9{S z-COiy^aSOmsN8jgT~7MWyuhP|X0?9jxoVDuobwQE&*7K}!DLBiMLn5nz3ybd{hI{G zAT;0RZs_R^GMn7qA2{gFg%(Y5;-*iB%s@kTc8fKAbb>~>g89twH||*vCVaY_GjoFY zImMOSNMmb>|1BO=T7taRm^U4pHl)!L^yci0wnzRnlOiz}Bro9J#+8nsS8r0#^D4Hl zfK=eiHdY2u^u*)43(&KiSBDUG)AeosEum&v#Q^!0i|Zu%f9Kf2#F5skjCs^cbVcS1JMB9VE(DEEYy7kpn3UpoC~Cg5C5)(?B1;u#|>-KE#TLp z;Ulw6IY-3*M;UGf$Aeh4Tf%RG!-%*+ZkEhv{FB0?vv4JMV9oucL61ARB$VSzfNOV~ zh6g{JkJ7JuZ;O`9MT8)e$oz*%Q_S{r?`Qv;tBk^bR!WskDR-I`%_)S!!Fni8^LeN5 zDj=zutd7-lw?wIbR3>R{p}37ymkTQRT#_IVvJKke!$VPO^>Zs|2RC819R*zIn9n6W zS2yNc=9&|GVlkl`syQ@*M^NS6^;w-ExAu0s{ZPCPn$Ef#fjUpc>gLM${1RT{_*;@Y zJ)Ed%Yj-^Y>YkaD!%8|2??e-q*MrYWOfs>6XMcT}U+7jgBjWJGDIxJ! zD)kLxCHLSWhxUEPilNU810y_i>p11RqYxfS0edbstcR|vp)zd|r~Jd0#6iOa$NlHe zq{&+tOdbaD?pUt}%SudM1zY>QPC%g7BzB_kNwPNLf~z57p|=Eyng4xepk@|*oI5C= z8KGsgF~+sSI0v+0LYsykqszbgkkg7I42`ra$G=YMz4*THWwPOg4Ok3(=ko*GkwFHn zkVbgKq|L8D6^EIIz%G+7Gl}1Cnc<-?tqagnjMhN}Y&n;3(XfhivS>OkNQ)UEh>?4( zOHHbv9iC!>Wo{CCyXem816!{fk?eo;NQ9PFozA=!oO8YL3Tk(ykK)}L|lMQelBR*Do^Gu!Zb=eS$15%SGD{t$EcIIQ( z1e)P^U$o>_`c zbv|5C!N@&wU4RbAU%JzKs}R(;=YEZ{G(v((LI1WorPZ_ASANW&0yLw+_u}~`sNUgK zR9{#qei}YO9Rb?PYf{|*f=_kXkwc#xga0J@x_(E|J?2@1qYhh+{%rBU@EB9Dd~qHXlZTBdXf~nEC&*q#)72n|9?UbJ zSu=j&fSG!RDxu8<5SL#cQf-$58!T910Yc=rID%~{I}hHExw&#}{+(drpkHgU6iw{C zz9Glvh$*$W1#)P`*R?>E?(YGuvZfHm68cqxxkAnv+{ox$E{M}QKJnXc^wLS(-a1{H zNQe-9kmPe3yrwH3fn^`p?&udxA=C>bi1oAfG-^LzS6M5?{;GotST6dwL1X7Q5%Uj> zpp}9+!PJ4}Wmv)3H5(UfHA_sLyQ)N;KJ$!jF)z+1(;H0j5MCst?tifV#uOw_(BF(a zb9)yv&6thpFg=8(VZ3Zn8pxj0s>``EEpy!u1WE619IWXyWo5vc+n6uf%v>%!B-Rn` zP4bor3yX%B_d4TBKX` z*>yV3@+(FzHq!wH1e&$&5Z-iOreOHxk45<>hIR$5>KVOo+F817OLl4vBl#I{(TWym z`oc^Fw%L)Cxi`cZQ?6tgN56}2nrgc#aJisZR0h7%w8>KLvSSc={9%&MVQ_Er6;vcc zAS}&I=q!jfjxPW8`;rML624n=9;891@8E;&BcB<_n1qJ5x#rI295XQlaZ}#U7vR== z>($*cY+-!lCa;h)5<<)ha&b8`jsW)~mC|x-aI79zK;$+BckV5Bei&f%knI|k96@tc z5%M2(yr=u9C8Z}9@KVBtiKl35*2c`HlZvQo)w|K}YAdF!FZOiR9eWJ)E|q{2U!!Xg zORc8W-mGB~nLs;-hWWo&*z{EgZO^i+own7&F}?9}zSE#?tuyia?+vGzH_eGli@C9+ zU!Ar*2h%~vLRf|n(c2pTs?|ZS99>7Iip6+bA)#+m5Z`GU)(`WoV+QU4b!cwzrU)Qy zZcl(NUzb=dJ;#n*iq^%o3P&9z6LOoRBx;#WxikN)MopdX0Hf)|o1STq1{Jp0{2y1A z|1W={N1N*J+3hXTW-)udIXW3mgb0FoOAIK#NNf|zXQyRr8CQ*XSokf;@G*!KydN6A z-_?nF^$c2KRyTT*lgI;skI~=CnRxmWJ)-ajhC*oa92?ZlQsAk=fsmqrI&O>Qsn~ND9;xeVEEPagBn%rxid1-#W#q1 zEmHj1dLDot-K{AZ+g5stR01_fu%Ffaz?gLDy)C;vKh5#o6_Ix>?Y}wu7z%y88mPAH zIr2CcGpM_6xGwLC>zG=?U};DP^**6w* ziC;{FEztc-|Mjz@oxvijQzB2N)C}4D+c0KQ2i=a~)@Z!CN%rNoP)wMAJWu){^YB_T z`DZ7f9|b*uA41%rNw5U3zxbo8hX{B2rr1=ej(A8tmol;Vr@Kj#h8Z0+>608A1~D`I z+r%o%qwqo1TbTQf8}q3783Cl-D1))v=Rnsa6a}Yux2r-|zL1a+^u{B=QtE<&zDuQ*-&}L)plRrG!K= zg!Yhsro@rs%ylMF2&{2eB;za%`w9p=C(Fk)$X@aIT<-v;y~)%Bjn zoI5N2m+K(T^OV8&gSfzZK6Opgs;4VYqFYWoDb&W|H6RaSf6fINiJw3rb3Gq@fy}Ob z+?;~BtYHD@n=g=du3%E?A(5{hPtZz_>#JL!^SOaFppb+}GS&AAfiJg?*7PgM-JK=C z9r5ibo`WeMO>>tdasPysiWZIV?eHD2F=icIY?*qphFz*c#Sc$W z19I$J3KrWmno1BXpv#c`;}rad-~Hv%=>vasaSD50J~HxZ?!Wc}Q5oMXDcIF~+PZ&n z$y54sR>1x*1KH#0Ekjw#I+c6v=huSUM*}K1CMprI9Tj474)uTNu4_=#Ve)D;_nlg> zA%AaiRx=8ks1nb6t-H0@y%l7GZm!>kDv<8cO(LlHFG_<(gb4q|JjtQq1mDJ{O$-Y| zH*-;6N|+hy3Dlm=PuN9<|7kbK^x+8_X^`IKlAEG>yD=P)Tn-EY_;Et`=@5uc@9%}i-27wxYg|`1agsI^pfa~>1T>I1#>fApVuhw9%%mc~u0F1_a>ewA z76Al)Hxb;(5g;3;FrL!6o91rSLAIOWGk=izD*l|WAJ+S0F;9Rz_1n*?5_Q;#4`$JH zV1FvdKFJUUV(Q^o&@c5(N1Cf49(pqA^PmoT-?-fNz1w99$OF&S1)c(>`Flu~`lI_> zKyS5Jg*Q8hZai8*pYS*lY8 zprr$(At9Bv)(MSim#jepB>zg3VpvanyoB+Y%H_^OPMm1AsH?Q;q$H-v_;G1EmVlJM zH{rcLjB$gS1_4!^Jg(+_4fw>qY?*ZbuO-hp>8o|C`MP~b>*2MZ-hE-zkCkxiMdupt ztzC5Nb7j&Ey;i`A4edlx`|<%TDd#gPn~5)U!2*D+bBAt-iB9^of2$klidc8)WuOGj z-V}qab%Rp-@WDrQ8qfpg@80d!KpF^ACN$98?eB7{^gg0M3eN^glTkRAS96&fZV=5! z{`kfsMzaZjH=e?(_^%oh{k{Hf{*+$$t&mxuQ?Bm;Za1h?AVq)d!ihcDUX!2xzh^vA z{COWOII68!E!CSj!3F9Q+W?(9JdouEsY`nULFne?J~ECzVM(!~$EE@m_lR2B7& z3QD%w#(zf6%7YORjn-9jy-^_Q8;%3PTk7i}3Io*nmo=7B?YotxG zTOs{Az$}I@IuSbtzM*yG7oA}E>n{OHS;KHJ{hB9q|B+K36{?`7&1H&SqCisK?{8-S zD`V~Mm$1q**mO0k;&vR)^a9CVFA6juDa}GuR6kxUMD%y{$<*d>7JO#q07HSVH)uKc z{}^AjW7i{ngIcy7Xe#my)r&N=GC1+K+P;nG0JD-+6V?}v-|rr7pzK{BYUJFLkzjy6 zc_b^1wG)~$ZIh=Hw$`v7z<(H^rxiP+>H5#%T2^g?O`4nP$FMa&OCH53)Ol^pdM>qs;Hk_NsDpe# z(i-31#d>F!!6*f&^BTu<1FbfzU=1UtT>IXG6`oQObRbPgaiqZ=rY${cfr@{)i~NlK z*C~{WUBgsokAgIl9bUsc8Cnp0~n1*69H=5yc>N3;XQhVPvli`)T8zHW32fOJ_$c zti7}_J;~4>RE{l@b*#sieege^d~JKiulzrzV`>}!UwdqM3;_yps9(3z{~f~z!$~6u6H!tUb@HIHwNf>SJ^X$^So(TLAm$#w zaDm=-GqTgEvNoKZbL+eQ6Vj$(p)ui3SVvnyO2hd8yUF@^;BM^>zPz{U@PfME3b_#3 z6%Q{wH+&7c%JQf^u3B#LbOLWJ=tdwp69VcXpuuAA*8sQT|3`Qtd3xZ-ZbiSbdDKEm zzi?4_BC2M^0;alI=>6LaK!XmPu@{%Fo)^%EU)xr*ZQN=G4N5oJd8g7o;3Ju+b(4b4 zWnAK!uTiFoB}qOR{xk*I)csHr&fc0ytUG6P+V?r$6P-3^9VZ>(X{470FwbiZG`nUH z{z_ayWd4oKwL9ds=OtC>f~WC|5+eULZkhlcH1R);-wtDW z@88jyyYjaLet6ekBl2r0X+9T%s9JzJ>j+ne5{8P(vfnwBv4F7sf+hnW;L=J%k*Yf@&u7IL3FfS3;b+KVUMkWOGR#Y`C$aB;y=n^xfGppwRSGsTe zYR#TYl}J#Ec(sN;+(QU89CFFJvw^M^CtlrmB}X^>F2?bP-My~2$b1ql{y0a6;JGIz z;)}^H3y{D$ljvrNlh=*kvVt3e+RIXLEW)j>BO z#??u6kkY>pPg}LhIi(GK&tv!@9Y&UcjrlKty^LrT@<|#04(66WMZao541fW#Z*yGp z9I8(l_ah32c)A2Sz$^>yyMS%X%U9ZIsb0Fcw~;E*J6FIk;V4$lX}vQ#&t+EPSOA_kMRMj<7WXy zHZb4)V}5t9iIwA*95Gc61jSS+hb#bD=VMTxo?}L+2Wy_{{clxQ&Bl`6U?f- zWE`7(y@r0Nq@pY`X|mWH_(Y1^-ZVMir=bnOb_*olc^vO*s)3#fXl5S^#n=jJ2f0oZ z>P#Impb+|JFH28)x5qsPHW%O@Nx)SBlWY0RZWcZ#ltJfSNGnbRP|9;?TkK==tPGTMTmUtqPKe!2a|6nXbw z5J@{h=V~vhuiiySDz(c6jO)nB)!+MM7XHE#HpCKBvlyT^&jLaxC-Ri?v9g1^EVEev zvM9M$%!PG4oNd{Xi#<`H7X)eel`O4c-Xdb;I|9x__>O{l=sTtUsAzM}sl=W>HRGZx z#ExyI0*|LSa~Mm=iCzcpy<<7*ah&TtFyIxj6hqVaW^r&0Owz*xID`=fYFx2zaoc@k z0d?y7eUFQ0N3Fw!yqQVrI4}f7EJoHHNy*66pN>FLim{IUpwUjsMI0%i+WQ+h0ud8#@H zeT>0SMm)|R=3k~H#lx&HB@AihV9=u3%VNe~8Q(D4@j!dj zdPnOZ1NEcCU+HlzAbh3|SE9ACKZV1jfgpiq+dzpTL|?Y{Vw);YG~jMniV2&ih!HVE zc*+f2BqFW!&|%GEVO(S%qNl|O+tP88{*ajLjgBELa|$-w&BgeLnr7oN()#vV*m=f_9*U(R91xnxUdP{W#z_ZXY!093 zy+!4H4Z__454l^3*xqrn$j}T>mh)>n8V$O18koehv^@E@aJxoB&%8`lCoXBnHG?t* z2-_#%P6=`RTs!zi*@*-5;HNcbd*i+KPZwjjn-`;>oGUr>dY{0HuD9Srkgh976T74# z$FLK;uHEWSUf%H@0KCIB(@M?h$@^zQM*je$;g7ww@vj4GjjOIstYgWZ=HYZcra)6L z1Y?Bci>%J@hC1gqV3wnsOg;fnEa}qmcgU>YCO1EdtwhG!whPHdb?2x)Z@DnE%DK;1 zGEsJq?qS+=?co04y!VQc#p;2m3uI?p8$8Xn%?)5E(9Q=cG#WpuU%8w)1sP@$VmxmN zT&~;Nf8BF7mamw4hO{Hp3H0(UGJ#y%8Q5})=m$Aox&?m0G| znwCo&Faqc#cqtllulYy_TdH@YN?Ysm`A~wDR8JA#3`(?$Xxl0$j(ZM=lFN_l=w*TG za~;wU65OlozTdA5H1Bn%K&@ zZ|3fJmbayFM2aRFl0lr?42+rQYbI2OFjY(YE&fL;b>ncY=O;(jDI5tObRPbkowqNz z9s@~RoTQ^QcBzbn&L8G-hbg?7)zvRs=|mK=@JhoQqHlgG2wB^rHOLG3X5#>{xzQuj zJJKj{%$LtC?MZ}l6 zYZci~2jV{H+-Lc2nt7$**`iwKUd3`%aCTk7) zqe6q)tU+d%xYQrZlLYgu=2L+!r{%^u-yn9vh$IAIk_s$dz{q<4f!av$ifciDX;(F< zL{G-9LH8Ij&gH`lPb+lxK5bqfGQH`xi(w7R9lOzcs`=19ZhrL~`5gFi!vh`~L6ZA_ zF&>gH7#jp;C9EkK^GYZc?40?r?3QZG!lFT$z-@kft>-3?lPvlz@BCrNy=GE0DISn%(Ehs4ZS*B#)RtUInm?8{_~ zjbCB8`h0S1DME~W|8#)D$4&gI295A{OlM%{c&?IJ7PHmnD?N=)DQetB?%=r$P8<=j zge85UzQQey&1l?bAN6W71P+Mrwt(cD*DpW+N;AE8!%U?99Pz_eqV!A$R!6xMN(|II za>ut^Z$W*8wrpuZYB4Q3tH3x~?(}_6edlXn5Ek$AnK34&ll5Ul$S7h?KG(6+jO&WG z75s~?1ttC;k!E_CjTF9IG0c{rW*i8Q)T0{t`EgY8f*5uE<-wO8O(&WN7elk0D zV4%$3vDNxD9`enjVXY$5Js^=x4TNjz+sKF08t=jzS2*F*Uod;eCst^3ey=I(x%w*1 z@86a>gm+f2A5WT_`APSg zY*Fgux5|lfv;BoIC}~A&x5eT+s}&I6Ft$sK#EQzqf>Wt;=>t`Erbd~mLrzg!D%h0L zSDxUv0FeJ@LY_Pq%{^NYQU@smouUqyXgek?H)v#cPl#ld$vX#O^$0oL>oi704<90615!U0@=WJvC z^kiee+SC5?^qbWP@D&b**!E_FO}y@m!@;N{WOEdO5KWb{r2E{W5Ii<(-JnvoY!|3^ z0`NMCeszaKFX>nBSF;o>yy8#)1v;Sl?g^rK(XrcVR^8{QSiT$@sd^wpGz$JWZ|;q( zARlXqEIzR6cJENE%_~CTe>sy-?Hj$DDg=MHqmFXJf<`R8gtIZ7p7wKAd8-A|9$Bdx zy{*NDR5SQz;fk02wjeeH6IXb4-$JA4@WW*AeB@UaZy%{1>0))iAOoRd*lq;zn`(@W z>^G>g*#-A^N-hcTvAWruQ@b&X@mu_rm$>IxcC`ro%qtj^?)%3Yhkkwg)xIOrdUCZK z0`~03z}S?6qSlkdl=zQRYBisn^3TG!t8YxZK*!^1sIblw2L*}T%WxPz$CdG^**VZM`YHcy@|3`|nw9OPH-K?F|C+?6cPrDF!X_;cKFM@~)T z&TZm^x&{~u!R~sESqNC&dM9slo2O)) z*xD&C_gIb29bg+5vB^ND{97_2J>h+K?VSE(h@3jf@aF9L?fB-Lf+NR1ej=U+?_>Q& z{%QeRl*p;Xl#$FEpYlQW7mrN#JP>s!vE6<5X1 z#|VV^6TuL7RZDemW6Pl^ElY7?ZrohB5ZiXk=&L6RHSuPv@O<niV6>*}+q|ni_G6pP|33_L=?4obaE#{Ncd&IuU=S z>u2YnGemmq5S;M=O43)EP$s^8bBukT5^?=BpwIKb0jmgH{G9UR=*eQ6rS+pg(KDM) zpKFZJdiW8jL3k@ic)Q2kU)O~yXn0wP13T0LWzsa$9%al04yp|qJ-To2G$yb%kl8tc z+Vvt{eshdU>j>_al~|fWMhTA8uJn|mA3JYgI7aN^w1ah;WT2tU;?#J_Tswm4BiTx- z&$hO`U$_u@o|zD!mQDWphHKindzy*3XL}jm8(0JOMTTh|ol9VmwI|#peaY4BVoNmP zkpFW*?uDT^Yna%9{fKGj&uu&AEEH${x%&5j222Ql@M5Pc&KyWzcxb zTS>>UwG8pK*0?bh#HFwHv7G~AoS(pSM*nBPB(rA2Ixd2*W$bpA+$>xlqmb9%h#l<_Lc z1W}C$(fzjq!%Fu>{SKtTeHt;jerEjEQkC4vpfK0OXs@qI2KKQlkI~C%|V@HaPb-$g6nfS(q0m zCu+(EyXyy>(DcG@9~Jkly6@t2f`1m~Mm^0udXhio#M{~&$Oa8-=RZs3KR7ak1d>)` zHQZ<2qgwhz#XdU78>#OJM!i|Q_rec#$R25LPvJ&t|F~=_@g7JJji0z@&zuzfbMT{F z;1@cz;}0~vs1mS%(a4^2Ji2)ILgU=eaVh=xRn8GVv3JUr*2J)2^oY0_huVL9OYim; zCob~l^^t5D@X=1S#1Mlfn%gi4!aDQY_|b>!U|yruYpawM?k2+wYhqWKm5CqoAb(x$ zcbZg8YqO(6&fL%5R0EexzLWeWZu|be_8u?-izfTdN$xJb^o@Pi5sUnx(j1AetpcO} z9Nc}uJ23^-*Zl}oyRf*YYwqre-u$(4ieqZZ-#r3BR^E;{@-Pux8fVN`>&yQutDqAs zbdsu@l$<#AZ`kiFb)_4DI+!E18LEl0Fiw#@ab3RmhzoNga{Hs9o4NkZsdH7zL7!+#_i_|{Ou~HwJ`lM$ZeEAk*kqkfesabLNKiHOCpe6jS zt`7bdXbU9~N|yK{e&$=$<%7?3ZFjLJE(iZ433vVM6&6dW5LsBfuf#E`_4D>0HOAwW z574SNm!;n9mL#qXS)I2#DO!5%49yV(>-3{Fz3Gq*o>IG3*YzqG?|NM4oh7UvMJ~Xh zeK6t@(;%2tMljo>!+^GH90l5!=L>oho8sduEF7td&oth~qleg;Ke7Q)a?jl=>DT8?c@MJW`+u)}B9^kRv@Ty@?d-@b&{GG_s?UF#0nO zA;E`>ks~6&;yIP|4D#qRR}-EH3Ps-X4%04&MXfd6mc)pKc@1pn-Dmz-4hBB;jVj*C zoW6cyx?$xkcWrSuEj8g(wk-L%d@I>M;KOEhY}(7#lP{8b!&4R5w0YH5I#)$~eZYsL zQ&46_Fo(hGF-qp^(REn#pKo+8->B-u0zE)lV|_B(Vlf+)%0TeL?PJbO0RpOXUzMn{XK0XxiPRa;x$*!V%Qy9j-KAo1^YN z?lX4o4Iy6Zs%;M&sWe041LZc}gUsq^)XC@AL@%0~^IcB{mUi2(p+ut=Og8`GuV3Sf zJ(ug}gf$$jD24Pe3}@Iea{ubKcE#>9bUh_zYAF`zjv3SCT z9ZSiiPn?g%H*NN~l&g;ONZ`9fy4&3jixmwAoXKDP5I(^JgB22Ry|91%e&QO!Te-cO z!h2d(_Gnh0{ZQUyF=?X5yU7&FbithvG|0n37#Z(@E>2U^dC4B!y|RcvuU)J^oClN54h~N08UEGJkqOa52@sQiaDXU8%db>f>Z1aOSu2j4qh$P z&%&(yV%0Q!B#bm=^7rm7sHKZG@#RP zKGrr!V+^zKh4 z#cGL{sV+MtLn4kcNYnIhT4R{P_+clpR4zwSTBJKz;pC`@3hB46Xp6MgjGP?nlz1Iy z%OoW1 z{j1yoC6`FnhCRAPcYzFY!<$Q`${>adpg@8I%;@t#nIG3f(5fUXPJMW1sjO4xC}6XE zpRk?=3F6(!t~!i=Pz3eisWg8qbQnRm_e*=`Y-XxwgXBxHjaZ3h-~dL8&!?1aw{bb5^fGM=!Bfww{tE9++)&5 z@|R=kuR=bK1x5F_O(aonRf+@kj<20O$jp}XJ3!Ikm6HyJSd`2w=lJymeoUUz`>B38 zKACc6{E>U1UZ)vI(0>1=kYct!mo@0V9*uu?|99g0dwaw+q?A+M zHVJ|K*l?~-mfF+5sJPsHdpjQuS`HkQq##LPiTt4cL{&%P*e#+EJXfz=m31L>cc#gd zUL1vzb>{?6x}+TFS>I-#JYi<3`-50wwB?T_e-$Ewtz! z#_@H;F}cCTHaK`qCTmQlwQAk%#uTO4BdHb_FmI7B3QN}>;oZ-c54VTwp|&~D$07Xp z4TChc-7dG=9<}*N?u-IapKj+}$M2oA@@gQ%w*=TF>x2|y&~!WJPmcts`REb1$~p+Y zUmT_e(OSevRS>HH%B9~*MD1WrMiwsB5@H$mDQdbE)787cdVVP{`p7Mb$5{C7V~{`g2wHA03=f*0j8 zt|K^&UDypuHVsQQ^_k9astJoaRVecp*Q%mg*Iy`-uD*tCwN5j187NF7R>tl(FA(d8?nvKrStv=XDmOhZX(w<9;GCDrQ(kSvP5r< zH1-!15o6RMVpFY7@2q=#tDHP_eP&He@)$#j)(D*fB_!j+X!;V)8{iDy_-N{6%J|^HUX;PYH{PmnBu{| z{+I4X;t9~5yrk*yf52aa57G=bi|w`yj-t03vn^S(}X-XGl#xR<-`{;CRjvsnJS&Y`bK=vey8YpC-nV_&dl z)tOxAnbJDfyD8bIuLE7QTm!+s1ql=$xA}@Q$2D&2#C(R6O z@S|BH5fKIUq(%JoxT_H>r!`N>cC=mz?~m>2m91dj2I~CU?Qq-Vo{zz=|3VVs&*2Jn zsY7JMY8~!x)I{pzJi}%XoSc$el*B3GW6H~XtRfn?)1)(~OJTK@Haw~l#ISI%CqqAudnw*Y{wy3!>NsQMIQdxpDBY-I`!r@bCgcq^lp<7FUFV)`;!!Cj z+FDg~QdnyJnAHm%Mrg_54fu3USK$ri0J(EY={99gyK7ov~AZ<*k@pT+zDNlFqWlmGs?{ z*JWFO^i&t3il#)(x{U3&$<)0j&Ud`r6q%n$Uv7)BPSMia()Vljv^cGCs*}-uxcy_f z1-Cv4A7TbA7J=-9q+Ub5@MY*I!|&s z7lJa2VFn5)B#df;E9{}TtXC{EfV)06k30y57^;DK=*8F6``)4#V&A-!kWvl6VU}eO z;lKS;g`12R&X*(!OvO|6ez1)X=1X%K|7y-rJ)6}L)A`Vg0Dg-Zfl+Yd|jP*$WG})P0@US@yLYU8um3WDu>A6$^+h zJcYl>8t(ACCLyn?BvxtOT91&#r5U*0Y{4swE65s|?ahvz@u8IG-lExzHspPWD2W*; zyk++YFnLvq%&DmqT&YuYtIcW6*T-9WF%u5;?_p4M@gvk|)u4ZSHqrU}>eJ8Bk{Sj5 zyDgT+kGBUa>~c#&WI2k-+!7t`D#ciUOgkO-dB*4*=(AMsI~D%6Vea_!m0;*~k85f+ z@n=5zWIM0W<`S;AG&{p7iH(-spF%+G$aCZ>sx3m~J?Y#^#w*6b;h3AcHzQ!srDu0h z0s7y<2#BfSQx-${9vy4HK zaR`3$H5{L-ptJv>;$GZWsZd43#DG9ItI&W@DUyn8{`MMNU@+AVfAQhvZ&oko)0p02 zRTBzB6Ef-dak?wQlXH69kPG>iA0Ox3EU$Wn!sX@MGfk_x+p&BKFwC}8HpC7_zn{m$ ztM#i@5Z2v+wLPh(8D(LfV&K{Dd0mP2$XkGyPW06g)sr1R;?q(LqQOlI_H^hhJJ!Jn=-d+64SH-pGEgYsiB2sG|3Z%gr=vmCXlEK?rQshWM6 zKft#}z61xIk}o8s->~qHXhG!MSscpIKY5=h?>^t`V`6&D6I6F16_MpGM`M7PscEguoE5Q zo**sbcbO7bO}M!&GhtBvQ~9|T$IWqNKp*{_-QVs&^}(YDgfN>$UXW@Kh+5`hD(NTy zNt7-_9|EMK)W(V5PJ*W}sH&@>EmppZBAuw8^(+^qkCG>v=#SqeZx$afv&UP@@e+q9 z&jx=I+<#RmckhnP)bDHupm?spe;CYhN0jnd$YPRO@VaxZ_r}`q1KwKK7T5T9&sq%U zU1^rHFttwZ)#KaG zP%_DQiJk1isIT5TXjk2A21T*GJ0)u6OoCOUeZj8L{^L-`D$*SkT06!M&b;-N&l zd27VlY_J0d0vNd7UFp?dg#|K+Cr3L6#m`2b{#*maf^rs;e1w$o^tvP$llRoREQIr` zoNx`9$yD3TIS=ds&mawOYZB&*h3OVF1(eRKQ^f4v;sJ9b!9$-wXW=`lb#))dhP2if za*PULzT!LA3OB1>OhU<^Ih1D{(ICvGKa8Tz9$E++jAbTn%a{-A^Q`996W-kXr9~qp6{IH|2E^T zT${vC>h{CG>rGXDoi4JC%O9hQl<=msuV}N@1dDClhW;67^?+(ghR78BJNUO`%|LZ= z8SruAKDpHYvC;Jt7ox)a`%#;dIgmoM24WlT2#wNypF?C*t)e!y_SLLVT0AiSSmA$z?}0>=r@|5PcLcGu{v~lN%7y(_qNq@^-<1h1 z1~flv)HjI)aa8E}ShHZ_^B5l%DRJjWk$s}G-_n;n!b!-npj78MoY^R9pAYs&SG%Oe zUM&Flb~7*Oqh{hKVo*a-_}lrhrTd+=1=@z&?!}Vt;02HwTHk*E#;{`mm4CCa{|c~j z-Y-^IApjLPxstKS>vv?!k}vU(6{gl|ByO4p_y~&Qm=RV77bA*4nEV%2_089XsYhw( zOSlZ*KcPGef78Z_M!TEok}hTEyEKk3%-Yjd#>`v{T*!wCT-mEbZATEbR{)RZMpR~c zxvDaDFzbitKSI4QC#vR=e#|Wp)o2HXf7Sz7s$1VhC)UmOGO@VMow>2C9E#3C`l+n~ zh*@|$nSj1-eF4>2O_qPjIM>bd_<8lbyq}nUiW7JQXZ|_gF=v86R&sp8_wQArM-KMQlgqyGSlHvHGm}f zpUSXgQG^pmbOf z^bCk6W2~S3tq$fXuc)p!P-JiU=^2B5XB(Kia#qv$r!zzNI2nyimQNb-@{!vhwp72J zd&3f`0ZqBcR#nUW4S?DXk8tk+FUJ?eQvWu^Mt<2xdif6+{ip;KrqNHgqB5*Jtrgw< zes2Bpd&9rC_s2zYPt3?<7pW^90+AH+h(qv+bP4Ly(jA6+SNzF1dj9@lXwQv(5N{%_ zm{$&5S;j`ERT+;juS_{e7CkR<@Brt(@yiGfuXag!N_u(L!1RYMWEz1iNUsTHMaJ4X3fX4$SIHD#2X@gy*piI_jD={*w^2?`A{AjaDvQE1or#XEt2?U zAQ-^Mdoo0ZKa3I>r4{g#={&8GeR5?jRuUDGDasx>YR>kp`$tsK{I$8o$*tC-DdNu8 zHsc9q;};w)C$Gnn1F7E>1(OPoa9`Ux5?P0T~hdpeuoW*Qf3us#& z_dN)J*AhW9tuf(e-?5mIp_0zCkQ64;yp_|IS`m{;Ex0zu+$uA*=mI>Yh`Q{NB>LZi z;-7DG!frOZBQwHfjvS@Z;yBuo>->+L%q)s!CcUI7Vcyg403hs6+NBK)_HLF;>XusC z3@Ae{>H%;g4t|@I?~Q0qxFuYyr4WayXAmO%D1v?Qg&b#j8DV2Zb#E$tojoPoKp6HC zE$l%4EP^pX#uPu-&=vKOc(+(ycT!nNHR265Khp}L-%)p#VE?sS#_uezXZh{+<3uK# zhL&vjCEjVc<&#V6Sct4j{_WhATa32r0eQ`NbI>_|C%joW49{E(cKDj#2cSB zh>p%MIukZ%d#q=!Ag(M`%=m0+BHh~6PUJRLxWYMS1d<7(dH1fN!3*1;pYdT~0lB`2 za%?m&^YLwnx`v~TgD9!JloNX^I#TM7OvTJCSEKePcf`o_vS?CI)y%gpD3se@FYf`g zlGSLOO6Y}$^j$6KGEMbqH$>`=&E2&(RgP`Ho(5mp;e|fK3FBX{-w008=f&k0uMo}T zzkhhC?0^-#a0}==YQo;CUT@hQo1&t+TJqlQ5YJNGHahlG^)%3>9>IA?yf*dIQZiK8 zQC*tfjq}h5A{fB9lK~n)lWJfWx^p2+r<|=u4qSKR@#d{SOtve;QoOll=ChF)r2;u9G^BFJVp>93dnv>`^b3=O-wNeC8xc>&qt2ay>w? zD$PppLz|8E)wajE(3BaWjVg!{00KAo7e`2B4k@j5Gf6vhCxG2weG+6%ETr?)=VYIp zk8HW2{z1LJm)^S4nDez7V@pcjC*D`dp{mItWnaCZe$j8usEO>j zDkpMScB$3vTUvU=^GOF*6uK*JsWTuoeZY zuA{pM{gz*B2%9^nuUHHgpNNYi=4xh>I2wvDX7ZI6_9|rawH|-x1u+BEk=y4Zg5KlX z4*jbZ&441WhMA}h#b=-F{GPvWu(Il%DxP4~gX=;nZ=!ie9Zj~e$i5YiCVOi;Tk7!d5~WG~j?Bxt+&~siH^E_Dd+8~(iS>Bj%R6jg_*YyB zS0M?aOGBy<%h@(K)JHjajN>$7G)&Z_&USP!_GSGOV^3c}w116|lgeZZ?L-?fnw{0i zL@`L8QOy~(q`?4m#&6T>_x%amgpHTu&Q&iq5Ixjaxq3lrU5qqom^1#~9(2g0^5 zL3Su2g6lL^p+WL`j?d%S#5tcDAOf!DB3X}r6sJ}}6u2JBe;23#QQ6?9b{LmQz;=l+ z!O<3|tpaiZtR;rSY`i91RH(A;>FA~|^vq{Y7fZuSYE!X}ocHWjyt3PIUy)%hcuu{s z3Xo1GOdq?D*}bcij9|<$ov>Nbg{U9?APv;+&AE>OoLtZBfnwr9*q?THhhadgk$9!! zQV@Y;mJn2P4H}EOKIbv9l|zkr#`|{>77L$w$3sV9>7qdvcD%TZc%>+ZYk~I(p8Y*2 zXT8l~X!fy$zF(A2g2cm7ZAVb$O^R{?YoZazk$@A3~;s> z#h|1<(dhB^9PBsv)`w)KaI4Qb5RP*l0@~I5(LX}2o5m{Xh}FN8PZtbT%MGdNv>{x%u74~BX@%w1(o^KJ&?XG3)kasmBDwH?-Egy;! zJzIp}jL`*&zp35#<22X*%6o4zUy|5{x(dSE|8_-Ybx?Z8Sses5QhjT(Qzv?mFWzG! z@(c=*T)nx}*BZEYEc}YxVRQWXVRJR4k|=%TF_s}=S#U4bCg?a1wo_)Vt^itE9V-#X z;wC&#eq`li14c&a+dt_~_Ge34P@hz>sVT4~Pka9SjieW3pCihQ@aX7>K8v=GXJapK zpuB&>#>QVCl0oP>Vbe)5@}=vt1j8C+;+N}PQ;Q2>#}DVC>KCsVz6JYKZ3`97tFon3 zEe62LboMeP8JP!`XruRzIPX9S38ul;#6&hByOwSxj1TQjq{6#_sIRqZitFeCREP&v?tg}`l8k0cv6{vJopf*dvF*e7Z3FuM0| z+bfPN%)x%*lkK}rbjQxkBx`-OBIRkQh=;9vwM?W9mjajn<9lUOk^uR%tJ&q4+E1(- zvZ#Ro^b!W5KwmpHt7J6nOSiq$_D{z+YR zmneeW4wE2}Xyh2p(_U*1`)66tZ85)YwT0U7%SLV84V0l3< z^NKP5(hrQT8Ujrv`=Q*?3Bc+6rV*NUOQdIWcSPPX(%n8TLPz)}9xJ)mpD(5~;KoZ( zl4pDixVc*&^l)41Yc10$;4>s`{u(R0aUdP5+e zV&9#a_mofyIFpYG-Ug!1z+LD8cj2qzeFwShv~A?o%|gwu1(Ub$2UZC3u(f{ae2vNu z_mhw9Pd<4HWDz{wGhPkG#NS*Q78O9B*Sqw|faK0DrxCpjV3*i;&t|mY_@ih~l5%C7 zZJYb6$A6^+IHFnK0}2^e)VNyQ@5$1m2%uOM%ysO?HSO7}ZB67j|0Ta-+ue7*uBM`W zO6tqEL)UQ&iesNMF1=v7-+6cOoy~X;AL{d{lcv=*5C{lzl^=qhJ1A@!TRx`KH&ZMF z&CqGJIJXkzQQrpSkE*-h?UJm!t9H`DNNtoENzZK2^S#>+94d=JJNw5y1d@0fL|5-t z-BIU+F65Qss6wA`p{v6CIj*AChu}A~Waf+d+<8Zd1vjpV%}Xgk*2m2%`iRCkVQeI4 zhfI%bhX_S(Wu7Q}@SyLU1p?!2^u*VwP_(|+w7tj(si`~g6$uUxp0jP1?k{Gu`IwT4 zw8Xul-5X7L2z2J{TS7??j+;OWQ`$L2JBxxnx@TuSIMxt5c)3!JUv5j-1$mXg^3-~? z?Y{%I&J!YB2gwD^+$*6~!l;xb_fInV6$$MG_V}m_%}`NYsBfL1#Q{*XSV#p{^p4it zCzM&J$kOm+)+f_m%y1VeTu$@(rlSHi=s%p1&t2o{J-l0kr;qug5%Q_zFlZ)ZCwo8m zO3!cP8_ns?ThdIARAu+=_sh90xOE|%BlVdH9p~C9t_bw3iDR@zKf%ZM-_G(a4e!adxT&Lf*AqN}^BXvIxW?ym*_6n8rw&lGbJehb8JTh^@YqgoKx9b!+)>V67O z!iJ)fVeAQ;TLk$7TFdibffM~hiYu~1@cggzLxoBzl_3Ko3{JCCNRqKuhS9^#V_zrl z!Pb*B+EEP#bTjOoLK3zmPdivmWx?Xh6ywJ{#hRwS8?>+M&|7nF@_XAI3bs{CF7@

E@E@a&0UPEjT$6Iy< zp;n(TgbIMvX1FQZpU)5@xQGg@@_M0sz>jv+pOjc{ACJ0E?n2VXvY4{#YcTnm4{}_z za{WpjINaS$`q6PXFdgR}f-MDmWY5k-Q!gDBlX}P%TYCK$;DX#Oju9f;ALThCwU4Y_xs|L-wxgheU%inI9^ycMi5*UU*J5BY3hiGE zIw;@iyBLsU?UxX)=fS|>4;<@{qmugUgmh2Yg@}hKmuz1U&Q1mABrpGwL4SBf%Jdje z2>tXZ;LWZy(Y$<=6L}nRcEJQcisP)L>4+4NxgY<#gz?oP^UyZ6`%yt`Ly0Z{&dJM^ z%D4#wv)q05KvT%4Ge;yeerTil$QDRY2MzI|Ugs&cH(3#OOBXkgMdS;WRJ9HSWbj^l zF|f#)(wLRhrkoLeI^`oO}jkq=eHS(<#(U*$Wt zjdzr~_7Z@V7St~V?i8^3_&+xVm4_~ZZT{li7*#3Pm%B-|V~T7|P2136QF)mz$s=ygFmaJ$>&z+= zDq~AWu7-$yAzld4)Mh~W0`%7LoPep zfM;c{;Fnzt1P9p6fm7@%{4YKTaWZ?{)IC~kcsZMtpbuuhyOG81+{^{&MNE9|PK|Sqr*VGXADR1KdnnE!*D}S-`Q>0Qs53 zBgJ}myCI1>-CUM-ti;d}Bor*Vq+ug~l!qtSxx)xT45Y6lHnL-FVpGkN+16y~_^)nt4 z&B5rz+XPxGii)_-5MxPfxJy?PgPJbqHtvOl;wvI6 zR+MPWQXNlgD_hHb>_P+DrMl^C{lg4umM2d+l}xjffe1_hHxCcT*hgcSw|`Ay8BNVG zAJP0|*|qo*x-tqE!0KjPW7>5Kc*-PS*v^s=Vz`TVa04US)qA{tHuEr?!!>GpMf!~})m6G2d*5?!|_~G6;C4aCLcPNiah7}xK z+C-(^>YIi?en76frZ^{xm_;(6eTM&L_ze9fdSVr?j6a}xVUAl0MGcH|xd8Ma)X`1t zNG$^dtA9oQ?wA2!JJ zl=oX{?Xm9w&y|>{t3kBi5<^k?RpWO%LPLQRx)s`D`oRlB%5UM4{i)`crl$KI{ugI^ z$;arrK>w=!e{_r_PJBcRaWWE;C7>Q>NISas3T#}U;>w>|wL`^?V=zwvK{c{!@tDb0 zJg~ph71Y5Z>rT=X-k6e==-_9G4&dL01~(o5H~qV6zk!+OsBQz9Q6$kn&&^(18cXQdlYFCV2i4_Q6Ubd zaS?f%*&yg6AbgD#!F>9nV18Ow2*FHkXG{Kw5&Z9q&nttOfe7FGI--|4@&8={YqKwR z>z7I!3i%-YcTvdEb@@}HQ?U}G6Gb1RGcA?sWay&|@TXj6sow>%E?YV=rpS!z3F|@x zn-=Bd)Eoh9jjC*@L3LaTxEhYnDBA6S%Ll6jpGou4!QpbB_z#EC1-e9I3eBRk7Cm-Y z^O&LQSNmPXD>247WI8dy(~EOHj`BN#Jp=?q`Nd;O{W(YSb4Qcu6wLAYRc`fcq!Una z%Zkc{jgND+RqQpprkYxma+iN(vhj4DP4ap~#DZj`_iyPcF3b(Ow)RUk`}(@)=ddv0 zA>)h$popHheK4Zku_|ZQrl%W_rSyK^gsiMOG9UIOKvgKl54Z-|uO3^xrsij(eFWFT zE@HUX@<@>JhfCrQbCA=fQH{w*Kcj)}EL(P=9&z-bIqczY4Fdh{>Ow!PrRs1D1Vr(o zSKE|(+WViOMx-y+UYfJs^tqM{Kw$Zci+u#{rMv?{_Y||?{4BRAlLO>;pLnXl)?&Pc zatRDx-fym5GbILDJ{67#>u8H;fXzKGK)@vE_Wa^Y%Vp54FC7aJ^Isi)eCD-iwA?dN z-YB^=7nFQ-WAeYx`{g!8+`^d;z2;DsvEUskUfy#g4^IpXFal*3-iC@1jIrLEy?0>r z(VO)8KjCMKyX|^2FC!|6aCGg}sSPYG*Hfrb5iGB8`SY1+_|2A^KSBEs^9iCQa}oll znINbWjT3R^l>-v)CW!AR107U@Sq@o^8-b;|0Qzh>Is+z;5j*sM0E2#e{j|TGNW(rn zEwi+LJIylryS0Xw(D)-Rtnqd=%F$i?ywlX}(?IE)mekvQt_OoTxHos0JbR930F&n6 zoZ|xkRxcocKRUf3Afw^9N`cHQY&tdBt(*D$3RX-I<`L_^5GW^0y9Oc2g&}Mnz>pEcL?WllCQKN#)ayRh_2x74|H9U6@<-Cif`pi+Do4BlJ77 zr|(gc#{X9SFPV4_J?T)jE||~G_XueB_H=V1HLq^~zj1HFYml zf?n|R4DzYs?cH7-YcNh{=#3*GMe{tMyt?YsyaKN;f&Klx@Kd2UQ>WwI`Tl$DE8mKu z?d$C&@G`#twKPgg=UA+susu+c07ND(Huy!(lV%@o^4?de?(m#=mKF7i%g{FTBUj+FPPMINc|CP7XcbNH?U_ned-ud zibPj^j*}IX*j|81c-VJmS!(W=iqiYJU4#%4%X0gA`11i*2Bo>5rzg>?u$_gi3;(Hl zVs?27XcNC>Vrp8;_mHU%o>I13iZ{7`5imP;HDeO|)K&$aDSm9!P11LN#f`vpo0mez z-DUTKS>SR9^js`UQB{&sBnB-mxzW?@Cz|ZK5G}(q#~5R98NC|#%TB;Z1Qs;2z6s)C zSH=x>J-Me2u6NNZpkcodiQYbSt!C?E=_N81Zav6;RuHW+#ioe+Xq~Z}R zuunFdpB_gp95l*2!08n&PID*Fj`gPC6b+A~@#!4fT$2*&rdNm9SwY2}3rVtOwscg@ zcx6qGotGs-K;Gu<*=1ukV@WdZ@`RaD0V4s>qhLH%B5HJ<&o{yB#zukxD+))7fK$FS zN<&AIg*7wq)+N(!e;pj0}Lkh!$q+Cs_Uje>C0ms~1clJ1G-!0@qt$X~E>h-v6wU7;Di0H6O zwVAmzD$P|ap^2F!Tf982<9(Q?!)XkQ`|@xLn(ejbe*eJMN9s0|WMBQVI+NrA@5Qpy z3xd6C=A&eDYQf3z=Tqj2*45y|8fFSU+-!RNI8o>MgHhUIwW3O|NeVwf6br5)=YjZh zw*lL76%~ISy7uzGSNh%qYI$lXLygZjt-{%IC>C{p6dRMxsm6m7vzw`KA|Zn;$w}{H zDcspVNK50=Zj^wETenWriZjuOSv@oeZ$O#(_l>0(QE@>cA?)EO)zqbLZMcKDO3vi| z7H7Ao6bZLEtisjfTC+__+iNl|&7JiL+YP2T(Sq-$$h23k)(mJhn)}ZdKlN(+V${An zwjv$XH+$&XQV8K`!l#8V;BWKL6@)G3A}@TC(-VGfmuJvC?WHSyi6>@wmbT;Un6fM$dH}Ao>U- z%psL+H~Wgyio^3SSrlKZPb8XjC*nDuRwuyR-<hz#G3|R8} zBWNrpARoWhlsJxb{3g)p%5Gp1(ccXEyzg;6BY(ed*1YnOu>Yo&%?P=A#omhbapYO$ zKc65eM#n=Sa89pJ^+Z1*JCt%-l5MAwQE4nak3Kwf^+5VkI4CHCukVg%vymq=YtlGa z6Y1N3miN+zO7(6y2>SX78TsEEm4gu75t(*r$!EM9Psy&t?9?4Mo2}y2?0+d&`@PNS zzms}R-Bh%L-I`@|zO%%JMYQY88z)&ndt=br3V`f8+Z=)m9uNF)v1m1Zozm4B zzdEe&hp0Erp?q0c`;p3x3o*#AglDrP7w;PW;`RW7pUYNsN&>nN(Zpv^yK744g-XNU znSl}+b;Lq-`gQ;zWsh=7DEWeS9W+No7L9G3a7CfgoNY-r2hWgT*zUZuuhGZi-Slk0 zvioxf*O?_%Bb$ASvQ2Dj%l2%)#}Ic*-m%$GPe& zJV()M9D6RqDBy9$*L&!}3q{`%UE3|>Cw$HFK!?@Z_ySQVt%CqyCzG1}o5_7RFvD1D zfvAnIYa;)%;!*o#h$i;U;2T*yvX~FZS=zBLv4KuRMFo*%=|#)Rt*9~u(VR#Wv7!G-iL;#+u4n* zMcK?&*7JP{E{ED=bE~_drer8aSp3cZK0NpxN+eH)KPtkim=3kpHCh_8M?X%w`L(6f z>)+5QR(4~A?fjL9VVME&s+{_-I9hGl4my1kW_S)I?sgE;d5rC0%^!z2IlK4Z@ zC-p3e`x?fS=yrpongO^UZ!(~V8?Fp!cw@T2@fTSl!K)k78EefC^cWe?+Y9ho!+n-X z5T+h85m&K9U~)lLM5qZx_;1#hhWa+Mb4Sw_ylq;E7^ee%My%g1dt`!%m@IQN%X~Yv z^C*g{t>;qAsDl(bC;_};rL3mpKXpFII-DQ6`888G=AR>W>++tZnXOab-4~{b2CFNY zaZef0zoZ-_z?*nJ{8nJUs2))*3UU@NPM>p%v4)>S#abQE)vn@l2LE%keBgx|7)uOY z59UDGFnyef2kP?rHyA6OQd3IWS=1SnsI~55Ass+Lzk>&X4)ArA4AVN!Zg$9SHvI4^ z6v?XC!3@$;6w%OuV6pIpGCq2fH zzH~l_@J!`4^kFS{vAFLB?A0R#&O|_6V3lPm(o%ixVSTNyr*G{;)ASfwJiNAaB@7ID zzIRk@&F1vvkn7v+=RUat-cLS9?KAXWYDmfRt-zf0rHr6fzL#TKpPE8VGsZD5L1Gv-G+5- zugU56%?oY89R=6wt+($!gBA(qd0=u6K@xLcUdz zK>?zqsAiU<>~uQY>WAGGB}1!a2ny-WF0dZxW(>4AuHxD;H!z8n&ch~3!h1nI*BzzU z7^k61Dp_9FifQ$S*e18QhAePHR-@|> zq*p73tddlGIIdT%zXm#1LX|BmZvsH#1Ey6$#YE2J=7rLH7Y*9Y%h>2QoIhyU`JT>K*^>+i1{d_h;cFAYH@Mg`~TKSz}UVp%WuN z9W)g|wKlAydPiYd@EfHjq>E0-$|dDipGfs49&f4sV%liaQg_s78`Pt|W7vNy1ZRnW65`Xl7+cwDIg zB1i*u_mg{2K@(d~c?RxdKGbgx(=EHJbcx@A`zVto{0Xz%LQX#zYIR@iNNBf=vko_2 zRVZ;coOkaXTN0jNjXEDO+z|qCJs3Bz>%YIoUDZb=_W3IxQPE?k)Qk1%VBL_>D&k#Y z>2QkFx)Ou+kF1+inoOaoQ$L&=Gnby1Yx(u!Kd~GQebnVHnzmLR(A<05O~<+n>vqU zntWLXPb202W==Md9*coWTx}8=MDybA@zbkrel-H#;q2@wc z-5r`xDUBio(c;I4;5Xx+NnE~5Uk3A!znPG4IZr(N*hx>0c|!A~eQJ{E{G0(jr~#Vh zJu|}TmHU3-;oyVaC>Ac}yMT!-_d;TgPlFw&{FKVWGFBYvEN?ZTJY^fr#|9qX3c(rB zDZXMwjQCZCxZMs$Qdiv8)7lwcSIpo;4FL@m_^uX6me>K6)yD6tn?7#Z>2L~(8jRth z5fn%Ysakni8p-%`^Cj8z$Z385&*!+1^O588>|eC?LP~A%5Hv-)P47NztD*PikHpHB z-PcUFvqsKFB|QEfY@xmnUf=|c^yE#UacOQ=#T zl2XLE3U0YsJvRIDGvo2w!U3(!_K9U@=;xymUrwvE^%Cl9JUqt5hr>16w1~dDNgsij z+RCaPF|AG+t8A1S?IFSiYBQCyF1^#%A;69*r@Z%)^%zGS(=}$3J`6B-P(Ed+a<(Tq z;l~2eU=1Deb1&gw5VU@-U6mmLr2akJYkJz2->Y$QQNm6EGvU6iBNRBEK8mG568Ik6gOxE5AFky1TBD?rS z2VvHsumS(V@CM@|rn4)c0G7v}Qw71WE)+*fk}36ph{DFc9A>d!Nv!50_D4UEQtMf# z_A<+uT{eR3uhE>3vv$TP9Fl$EsR$i+YegqXnO)CiWY%yf-Msz!xarBRsO9XB_VOv7_4Fg3PaG+wqF-lU{4g&2z>l# zQe>422x^@Fq>_ri54UXyLlJ;X@Zsm?RxLC|E3mbKQHj1c1R|&hMQ>uAg(d-bU8Wu6 zF~?OLY!j02CUT_&9zKbX;xoJ-$J^su$#ZST*se1Kh>r#gBgN&45dvCL%=j zSvY=cP}4pl@GxI1R~K@PVzC-kPyOw@7u_QkKn(%3o7?3gJ&_1@mgF(x$oo2Ee`Xb` zw%!VWnepXU_}PhpU$MuaTsC30X5lmUP#`WmI#_(%RQTdwFF{mQ)f2LBlaMYM@R$lP zfwWkebhMqB9^ARfmnL6^Pfw|40maT&og28Ybp-M(V*bq<>8i!Y*xBH8fe1Pe$Fo(f z*8r~Hngzm>WZnO4`rLwKxFFhxNI&|%cwBqie4pZ9h}h<}tvIk|aQL}z+nc#dL;9_V z`-8yO^!_&eynH`kpJODmzonOB>q`T;ByX<)t=-}6Yj^;~aG+>6#^sQNY6a}Td z9KV#y1TAXZnLV^Re*y4g^*SztMV*_(KDY0+LQXBgavO1r%%$ThZdA`x3qc@qbwQSp z+DSlgi^lty@llJ@zpspr;!i`QjF(|fK9bn3r>c5+5hRmG#VJj(oiYWGkA_CI)D~al zgx){pj4pC7$!^XETn>ML)efY=%R z(|Ne}%MJJYC72?zWUaoqw>#xD0WwZ<=*sMb@wpnCn0m|0YxMu_W8+IfgA+V_-tnc? zr^wBS6>1|&i?+Hq;Db^!K2#spzWrt2+H*-XP!%u7Lk0Wg z!@>wt&(4OD@7dU2Es1=7U}WxWM3Ommm0a18zS6(&nm%+m+*|rD@76FdkB9QPeLpSX zK1r;r?BDs+t zIdk3s@%}{XOhrVXnI~%qf_dEC?8~$l2}FL{-?cGlLOwQ2#CGc&*d(4^)sL>6rk4J< z@7s@Y)GkCt+&tl1CQA2Tht=AbL{Gv>{LHc_UsA;6Ksx-O)E3&Ahz{27)zy_mKPZ#Wqnfx+Yn^+xZ-;s0p* z3Wq44@9SrQ6$Jre ze1GpBu(LDI+&TB$d(YgNrM7n`x(50mhmXDWar=9H#^tsYRc_!b+4z1AOR0mkI5Pk&f4~3Q_gd0HlKcbp@}#2X7Hb)F0hsWfxP|@cLKOf@y)#E zEYs2FBPJ0;VU)XIY1ZvvgC5Jq=O5RbdXEf!8`qt4? zUB|cmV4*(WNCgExncjm$ZXSp0VXgIL%Q$+Y#3s{ijKVp6a_Ja$rIaSz{$Dl}3@Te2d ztq(p?&lgxO{Mx@O;b2RGw+@D0PK+T@j>Z)wYO%nzjE%~j;xlvMvkMGepy^=o;-OpA zMV_wfpQvIpm=kYCelKeT z_;}<7!1Dut$!PYAd#ge#{>N86*RvImXTPDfWSUcu>{zW7(=oq0#&_YP6bYb-1T44% zvH<#=X&0~Eynfu6`-_bqnD>PjeuZ3VONh*c)H1$j1 zyn&|X+oDH5KX@0Y{KiY@2I=!BB7C8`0(ktOpS}jnrGQ#G$@2J}Qh?YMkR-Gk%g7{i z0bQ>+wkuxKnabBtFc)y4^6W{q$^p+K?Oin&yP$(SS`SK=f?P(*vDTcU;aaV8ZPaO# zUz^<_;q0aW-516-9w2=SDEq3`%kl>OXl#9rE_Jy+N4KSoXbSfX*hr>$Vpwb*yud{# z--hX8Gb`O;ALqu4ayRgUz=+XFpWgFN7+S-+CJyx9y6^+fUMxFguWyduzu}H)E}Crb z$oX2j32x7yXk9BvySeyym|)b}mH7tS#aJTTA6gz9*XL3g5=NV=x;=gB~#Ii#E5# zT~{HS{YfNbZTxkjf47aY{qsJ)SC{|twdYjQ#lFd}({5cHGKq|>FJOjpfpcZS=AKsN zUDSNI<-@<$Zc<6g5+8q(%$8K<_aE}f-Eq7?fo~7sM%Bf5Xju3|C88y;J?EDvjKSee zU4P%&_PH=Y0x2g83%ZE;-Apj~6m>^6U(ARC zO^o$uKUB7__2Q*|Vd1d5I;7HSAZ9SroxEbUzBLHmMB^Z%aqrk=j8k~jGa{_LI%`(A zK0)B7pF7)H8Mk3MtWh)t+=BLzJ=PiAA+<<&)DjWbLd~{1X2JT83(_=g&9fu*c(+9a zv_KO->VFHluOXLdjiQ?v<33*?`cg18K4GZm^D>m151)NC0_yWWFOKzpPQFqU+gkPp z-&=G^wz54qz8p&A4GGXSRIahx?w`3Z7g5*aDlr#VJ`G{o^t_67C&a4~@6bLkw<$F$ z=|tru!?z^=BXCY|ju@=e?jt!*frrjC&}%B`v?MC%+Vu$A-9ZB{{YY#O5NS>DRh;hy zKGe6`t=HZ>6~0HBLz0>&{TsYp7Vdnp#5_pTinxp<2uG>{sN#wr>17-pXEB9K$>LV45G^$^{f>V(XFyOs0mqp=te) z=YP!K*x|a5Q3>cz`PzgrZ8D^_((z5Z;fvY_t{x7gk3xLQ)97?^3<3Gz);Xm{O@HB6 zr2Eaq=;%JB5y{5Ksj#hoP(A~&wqRK6#S^0CeY6CaSn|5{Hn;_iG;ju2fJWJ$BZgZt zya3PF-zq-q2_wF(mc|WE)9B=ol|6S47VxS)F@7pHVL`uP(diN{6o@QpG%m7&cT2gw z%+oHC92@*I=~DWaOc}#z(!UOUk^W7fRh>`d;Z}W-CRv@@%+Z4NEq)i}cW~F|>zL}w zcz#Pua7NClU5-jxg-#h>LBSzTE++`oRu6lDMmn&~(q4FI6AdfyN&)GU?{q)g7{o7WhhxllB(* zB(oH#9DozN{{&S94xiOacmd6BQ!MqxQW=1Bq~2ye(LQvz=jbtAQjeYgO>kN8+r5iUQ%jZg+psRQ zE9(H{X%&gi#h0t*lQ2&L)xN}Wr@NmQcr8#yhGl2PoZiJX+4O3NQ|DA%@ZS6v#n!<3 zb-Y`Y6j@Ici*nFN4#DWY_%~1Ql~$f}$awVC;LD>qzRRPk``a^H7a-)dpO<@F zU~+`K$GzV={Vn0g3J1opDN_^X5DleZ2{mU^b<#XlfwcsF^YKTclDkc=3@!I#t69K(;DO zs2Zqd!S$9{TOdp3aWY*}sqVpfm$lG#E);U=hw4It8^`av0as~EB6`gN@MON7Zzod2 zTWgNiAi6-zAx&<^;A_oFURmKfk0)TijlvSK>#sp9bKy{fCxI;Z>?^pf3mV^6I2zwD z?a>Q#uh{9N)_|25UW|s$y+;`L2K!?n20xW%$KR3U-??XkUB`n1QB1!#BnzoS5|h*p zf!zT==VR`JlM!6AJE>A;w1t1Ydfyx||425E3zcCo4!S?c$cTHNT$=@ zG&}A{$5-3KEuCkUO>cqr#kn3%e}tqfsbT|J3+*=DKOk!7*`x7wo7OyH`mMNyp8Jdz zeq_!1o4~TJcO%JABMku%D%@9QhOmuv@?v^Z@!&n8r`n{mnb2=&1#}qyB;;wUBnYyb zMQy8`5O~6+d^lp6y;m_&-$8HH?nwh#E`jjONH9uFlmQ^R{6r7DT&x?n{6VqW?)>Kj zi5Oi<(uh`d=lvtn43g9=v_Bl^j>lG|S-HS5V;RvL`x)9>QwJK06kYld==j`fW0Dg} zLXPIUw|xOp4{=iQ4B04v%6Gi3?o8Xd6>JFW2~V1CwRQ5iY8w=&_N6*z>6bbVY&fSY z2T)1mzzZ7aYf4$^0aeugH+=s6EBo{AmsvWtdbDbJ4#cQDzId@kn@K zEnLoq-jRs%jB6DmXr{rzrefbIV#f2Ad=O~rEmUQ+xl&_xC@YjslB&)2Mmgs1&2NZ{ zs&a4D2G9GAdLU1DlSuQ1p{1(GB2a`Y>`Ayg{Pu(yNHDSZTiW9AKNjp>J$>8GNX^{ zXlFzvoy15lL0s;pj8ggC9}R3kKKY8c-I-FHGhWWLDb2oje}X@HmBSD;_1s=OG|0O7 zK7wP_i>u*NH@@c0NYQ_V`rCUj#q7Sp`{|dJR-r-j=d50yn!p{!>`CPK@Va#6( z;PPa@-S%Sck0lMAwwObb=x&D-+@oFOe(N6j+Eis_cpJr6= z^bY~VtH`{GIl!=8>ORiqCK>jnUyxjAQg0wL8(h;#PS`7Sr(Eo5Fkdh^eE($I8fF95 zq)!Ht)pO1~MHlK%^J%Xm;)JR_d^D`%Th!vyo5zittnRpkWy0PS(~EC;n|3zCRzQ54 z@{QL7phc7yP9ZC;P{?l)wEXC%d0G z*&lsNZQa>hEgP!RBb|+n8?=-Kpi9IAiC?Me#HnltSt*h9^oOTuHpCG2bR9&@@;-LNk}d9O zq?)+iQ16YT=YGan3?X|sKCgg|r67>cEuRJ=X;vSA99+`?5H*Gl0>IEr&T?w7_)V?s{N9Sn5Ze$y5{p75 zJ9QNm`PzclXqp$cJM>NDNyFS9kZ>g!y4Sq_zSHj$y(FXcT2X) zujEs{7e|vXBt9^>vG$u_r?9v4?FJw4U_rfZb>uCI>5RHqq?<&Qd+yR;_JT&N+sP_b743b4$HB8b5~rzCep)(0D@P zH#4eD6&!KVX&sBK9)6#A{S$WKqS)$nn-nt8T1#kFI~Tkp&Qf%n)W<_NbeS#vO4>b1 zE_d?C`M+z*`&|ASITwU|V{$CWr6}>&2rA!b`>B>6^8ps5@n82GcpMKC*-ZV z>V6#A0KSKJF%5H~Vt>Q2Ql-`4HuYvl#rkvM_d%I3iZ1V+1 z4JJxr3Eq`xpwkBxh?2Q04RR-zEB_w!gRAWYQC0mnw>mgG@%6Q0VR1Q?>lj?_^wvX6 z*S@%ype-u=4|i~srU`4Imh}DUf;)|O+z76eK{qE51rHnPnM1l!0V^&kjko5k&uCC23XD;NyrkJn2{qo?i-)gQs%UG|ea%JUWKKP#mD$Rel+3gwDFy zI9BWKK?Ucyk^X2k=v82SgA}a~5d{4{^+7brO4xWz@#MbNqEeaU>If*5_-gwRmp`%p zQD#~?{TqR0!Amqw8|u|wHQj)u)hVHuBm5eEX0qA%NV5?2S8M4UkIC*2ZZ$rmFwdys zx4S|kfD3=Uohl1K1gAx^xk5QV=@IvwO_}O+UfApLu@7j+v?Kq1a{a)GO8-nQMkn$` z)6LffcC$NJtO6dqdnV0`%z3p=#=W}#l#aGL#lzCB zOM1a`ZZKm#<2_Sbd9*4dSw>L7dsSlnA>|o(lxU5Mt=4^n&`A%A+*!^Hk9j~gZ<`KH z(hd8ZWFfqs)Urd6$03`fSh3FJ-gaTa{#Ztnlh4CPI$}psMkLI^U(Yk$49ah&7rshx zi~PMuX$hJgtIsrp+uM0|81bqipQQGFzvAOW(N$oX-ljni4c=zro|6GKF1_gWs_|u2 zs8i5b4Kmc*o&$5+m-BTsEBSiP#5B5kQ)s#Ahd`6l2~OxUJDAniZGr{{tt7=MoV{2crqRGJ zX9(PnuiY8$`f|xlJVDAMhFtVcuLFFhwNGqf^Th0j1+8^TL8&q*508ORBVpua~thK$;X{pLFSJUSvNu}f$JGc)XvZooWYn=(voQ)nd4!}(ZB2T) z9^zeQ@VOEg944ILWAe$>DZU`D z{0vXL5CfJY?|M=pNVEP%(f+W;#OB!@)Uwnj+We2L%{^7{La7M>WE7U~Xjl0Vk=z~n zfl$ftq_2xEtv~0!SXP3r;nU-qGCs*oH{FI&oiVT3o%Wxcs42feWZViN3FL4nX!SsQ zt78qn+9XH=v82GSY1Xnxzk`P=?)mV)fgr&X-bh?+W>T~USMKL~Oz|;tH@&r7XOOBl z|1px8WI^u^9N+8R0Lp?I!lu;A!B(z|7pKdl%tspJ1O_^X~0b+7(fy5<1^ z9Lgyy+iRte=bG2oie`@xdx3}tgV{>zqFY74W;gKN1q}q0Ln1Pr)B2^0X(R+51^BZU zpLWb6ODItj0Xib24MG2dheSi#Gvhcu2*`-yPf_WHheO)y{FITeO}TG#qF8$gPJ$1; zEWE=!R2VE)7wp!8XocqlSD1@XNVoH!y$n~2}x1JKjP?DL$@3PJm@*0a;)#;+qlyI zW|z$$=`xV!#Jo(FD!UtENIrQDc)=%c;h6 z!`d+$lkQ9rn>GI!Q$j!o#JZW>TLnNdqExGi#>BgX_pJv+2-sj)1XqkqzFLZauT9ihw0r`F{VGQtk_DqlZH$ zJ7EEb45vn-#lrbnN79^G(Bx<6y5Zy>@#{zhv~hSc4D#JQbfu@go75HZh5ZOe%9Ko& zGh}J7F>|a!(txCCFSCv%@W@J^`8h%)90nR$d6Zi>z1P6UkqlMlw0V==@Tu#OQRH~- zJ^B)z;<9oZf)Hm62>$GW2KjhiyJI2Wxu!>CD)z_ab^AyPrqv2yEo41PPD}zWOt~3Q zU45>9KO?}{rm#z^rAf8;M+kG1sIeB?NwXA!lSc}!io_7JD>AnyAT-_6$}IPdiljI< z`Mo2Dg-x8>4o&(UFnw$W|Y9e-94zgkAW zz35g{#QuHK`X;y-TBCy9;C{!Tc$UIgMGPHGEX$vzi;{g-#9PPnGF&aOUHMb@Ft0_-Ls2W>tfC^=tNSeKAs_Rt*%26SW=XlJ)ku zK@fZb#oP%Pd+Axdi4nuyg(OK*W}iSYqq%?b=oDx0S^HE{?yWAWGuh+{B+z5pN=?rC zhUSe#g>cilL6vGfAKVh*v)&cb+XK_InYssLVCVNV5cOLbS)Jmk?$VcxyC9=vw=W+rA8!zqWNKJ?ver zRHo;N9#n+Ybo(;T!a%#OjPQdd_1jv8 zoD}P7pgI$9{8fIx#634!e8l;?3o1^wz6Y-Os6IGlB$I~?+8qx>;sQ%hPWip71<=TF**=Drl%AEqM1_z{5x@mmPgo!=Lv3+n(za<=|XHsDI~5R>%6NkPD#i|C?O1GWYF)?853~+iOp4 zo1Hr5_M!9F8asEbAHa8%Tfh5Ipa^tb?6Nd*nbR=`JXC;_fMZO z+;@VYslzLgbsi&xAtFiVVVhVQN%4V((e{#`!lzP;ixP@loG5mEjNM2WWrNh!e$ERb z=!2ork%>W<-mQBR^5IQ^H&yBuEg(mHlP#aV5?tPwai9jIbrnB-h)8Qr#s?JE9{&ik za1$;d9xy>}AVWn-5iBM4onTK1IePv##n1gzDgg*{dP&sT#pV0BC&a()=SB zM|t@-OIt!u{|wKDKHvOqk_r7OspMSc|IgH7XYQ`XOh{ISV3|TTYH1|I2OKQJGK*u& z|3naI*~jTU__IC}ky4Jv~6qrPyWyn^3K z!$Gf*KlE%o`<6yDWx;?-MFcu^}GmOnwV5abQ#bss6oa&zMojx9!FUx+d3 z!ft%xZt+vr7t8%daH0-41Spzb>nQd{wz2K~D0K0<)Zimra9r!opm?xcVsM?g@m-6% z2sp%T6D?YYWB>3ji%9VzxqHqk-1xd7AIBG&D+O6kHYlaTC3^%URo2Lo`G|ypV0>6W z#L{eWL9Eo8O(KF!mj*vd3*_&Nnx87}34aTnVV|3qZ4Q#+SP^4V%JlPQ1oc&Gb4bW=3YM$ETNm4^IHm6i~&ipM#-J9stpvqLXp z$ZQ_3kL`ZDfe*}z!mrk}4Aw7FqK=h)(xG^lBondxUhCZD9)+q+V;7^lVC|dR06$7c zo25ScUtuI({~@3TJ;pBVfJw4oybvlpNK@7%N(yBRQ{~pFig~FhI=r9nanth9zVjJg zzG~Tvw?PsZ&bXRIy@C%2TSV@f+H6+ZP}eS<%c{N3mk{YgLGRyOyCUd$^|+6X^z4y>0y3i2Gu4%dA+L37W!^E& zON8CTvIDf`6~T{1!tS+KvyB`wM%tD5UWXw5o*#G$6THp~T+nT0%(1|euA-IMYUdeW znX>&zcb~mKS&}TPw9IjQ(*MA_6I`^_=4{`(O;b z0z|fOBY_h0)ltbILB0!0r3TF83ZDSB_=nDh)Z$2YZdELSrGs50CiOdJ^lZ}iHTu|-_R1(g3X@YYTSIBG zHz~nxxr72&Hg$ffX5&*{+uQPcs|NL`L*r z;fv2A4Q5&z_@wfKWCI^?=}%-$wggUSLyj$0!v&H2aR-$b`Hqtv6U0oJ=qa*Dbpz@C zFMm%l1jOj1oAIvlB&lxI-MTPIj#ZJ?Ig}1a17~he9;1e{t0NTe!q+1U1-VgAzj4l-=%mU@tq5P?$OgIa@N zLgn-Q>sTpuowVJ)w*oJ<1w9(SD~)ra;whLAXWzY)An3YpafE^6L!tPqG==zGa1Crz z3Ih(O`${aqNB!{Kqr?xKtmii=Y6)The7ykSn@YLw!f@el>bLLVoF?ZEjaqxFo98-PP7&-2paH=iow5sNJ!SRS!iAVbF9a;CZ56xM6K6b zB?%;iG|=VmyW5~;Fgkr5_W`72$P^NBNp?S2A8z>A-T_3y$`kLA#m>yDK{r0u>}f#K zGuX|jKj+oN&%C!kB{=I^>=|xGAH{FFi5Cq0@hd?TPjJ5g@fR=k4Zee2)1tG~4J+gw zNfam;rjjjhWhc+B)w6zU@1t(9+*F1N{bJp=E!?s)lT%n$>Bs9XLo3{OedluS8$P%_ zzIwBKiH^)?>-6}*YiN(P`0#5@Z;$Ws^|3FCxP)DxNCZKh;CYb`gIm#J)~~RUK(qp} zsTc!U4zGmESDStm$P+o%DWI1{1VBJ(l&G9dy*nRx3De3H1>IES(X(HC~TDnJ+;; z;tlx}$M)z6`^Wa2CE~Z`14*N+<=z zN#HRvc<3z0<%U%!Z@~vJ>!T!F!v?_&{X{~ekf~d`DxS}!Rt2xnsM1LgYuHC0d?MF} zAmt=woLtm*-vRI)TGRa1SN;PV_biC3{*9RzHOe|DSfx4xGEvd1mB<V zCD#8#A9)dYc%wEHIdnJDk9Gbd)OQ8EBI;p@?@1F{PI*kHrDQR55C0^eC+2@JPJ7IQ z`kOL`JDE6p@n!a`a zfW-iD9E;NgrG%Y-imcehqYuQWlH9*I&%PI@rUE7|UTaWVqce3zG-k2P zc~m%NNH+ZFC1_)k{x4yo4izkQplI!KzbHQBb+mc(9c~Hm3pzdBb4nB|_7=i$vyTdL z8Sm)8gM;Uc!!~N-d~M)65d@Z%Nyo~7CF~eFcqOD8I#shY@PoOw3*})E!JvroIu`Ab zzB|`O-f2q=Yk2_^V7Rw=?a5PbZeOpRbYyKhzx{ZU)h;3Aqx0ogqWrsT%A+sWfsX{2(AVv~CfK_s_XSo^!=>#w(6eniBBP(Bds! zR^RW>+;e|GAP$Xb$8+DV7C}xlkK#41-}_jZ4%INj3#NN!ocHWmB^JI5HIRadcH`ZS zvST&znz*)*pm@Z1-Ia!CB&pUQul*eLaiFU$Fq&$Hca@ao3pvyd;??>CP-kFbBHJ(q zbrzkQ>r>_MoG!V~;TIu_xL+tTqdyQz4jr$9X5TTz9e|tz_GjAG?|-^!X@!g-AIb_e z0=V3XF86eJBFfyoO?^1WBldEa?5nm+mNMXc|;hh92*qg#Hq-(<^D z-O_{!VsAbheg5TnwM4Hz3l$|rZ@0U`+0S}zf_U)nuABU9KIQ54ysNU&{YO5v8tA?@ za^f&GxWgYYIvVoM$8AoZ2H#bc?vF^ji7b? z=Qd)rvUiJr?5BXXQ1B|Y zE{D!6?hEtA#J?pLJ>`TpEDds@ot2kfK`{m|iKX%NPP%Xkl*M6NzQQi%H>HEHM_wSdGF~@60SjmL-HLb|Cy2(tRk0ybR)2yqH z-Wgs-^=Ku-3eJ(2n8IQ;wz+a|murL~p$X8YtYqSpm}(-Uqcw1Nqsg_+a?_@Gmrej@ z7=QK3`-?@1MjQym)>~O0B7gCOvOfbA4Ka?pHTT&kT4ZYknd~iGhw4c){E9r)LI%be86lDM<=ekps@mOHSmY)V2UeCVjJq}7`)NoXLU;KijHYNG-Zxy%aGZ|!@ z(6S>}U7=U;C^?i$AIpsoEBC!q=OO{0t-T52b$0#ofvvbw;eRypfRg4ETKT^Jl`49E zzy#pCg-$&g^7gu(|JUx^T+Zbl{3YQ84Ekqhy@5|u&bR2tEl;O^(#R*m=Ckzgejgg z`{bGLaubMm#Y>&d%W*AoK@(=t1fx?YgpoLZ(_?8$08RnhY_%+3jz?(sW7nAzUs*JHO>b9><`#?b&PGG6da?{>kUx-K8*7E!n zVrDR1jF}pU0@_oYY**Hmp1h)ERN6Q(vQ&DNcd+RsQQ{bGN7As6@KOKbn?DY2uh$R5 zmOtcl4j<5l#dY_mn7N8oq^m#j1%GSI8A`e&ytd1O5cp{tb9#w7_l4`*b?G z!=dT!!tiUh9Na^7-T|G$ITCImwZoqX$D{!2_uLzw(?BBd5_@^ zhOkXb;ex~EAFIk7^kG!TN}8Y+)@JJ`k6soQAVy{09-7}07kYOvy#sG9-C<&dheuMg zchfT&d=FD-I^huZ9Cb6v#l0&HhJU7%LR~16{UtR)t;Aa3w)P{26+Jj}z!g*X zaaKL~-D_+)3!IMZ5UL`U7-yS&eh=(S(~Cm+C1~6mM#Xfx}N&R9r+ARj5UB=yA5n`jrjHH7by7% zmw#SW0^C^VTmyCWZOvcBeROte^13Gz1aCkcuHrc}%0bx_pV{Am+`x{3kfWN3dRMRv zBTT){e{_9k?r{8+LwKmZK@~kRXa{En5tGwVYnd<0-!=U29|EHBT8yFUU7bt(Ye}E8 z-0#C@_l|${JI~^}&BY*SX4nMZPE~cYXungL13uefhYbxQFaHD<+3bcyB%X)804CX! z9CM&n&2%Gk9$=?jMHsB!q*u=L|IR)% z7xcLKoP>Fkvf)Ee5hEn6254~0D6+(|j2}>*Mm31He196@$BB~Swz4JV2nUR(ZEAwG z_HZ&K5j%KPe~eUrMW-*yAxC^STPX;}9!Fp)tlTj|?fU$8#y&km0V9OUmdL`6oP1k% zM7qV^{P)-@19I|IrrBg+xLi>mMxk(KeOs152jr~ucjRL0J$9VM&fgb+-TB#XX6Cx< zhAI{CX>>xsWabvazyE&L`~voG=mS{yn%6bV%zhI`)|B})+Mf|bZvA_)?_cWMTOHpk z6^5?00nl-2?0xH~iPGxFH!tT12x!Eymle3}2S536Q+C@{576Y#V4&aQw=h!t`IM;a zL{&(6Bwm%IL9UV0=E1ygWL;C!W$A1XpK#ax2o=$`GXHYV?LLD}-18N;Z~~9M7W2eW z+jx)x|LX=p7@IHH?ekk^53_@~*8EYxYI<%MHcCmoNH2y2X%cZi2WF*BI3T0Y?)`-k z^F!X?eCCa5{17o^23Tiz830uU{`Mx!9J0doNO^g^W;xAU(((^7vCq z>ok+xG@DQAdh3_!MtZ%6K*~wof5`Ky$p@cEe4W|0bX3}gJcU5%sT{$qY(4KnOVtYwtL zos9mBQ%n`WULz~sBPJK8#k=sn0)6^2L+g4qs9O|Nk3EuNrpJy%<1lBm4&XexbdM;W zz3hT5XY}umu36f2+D<^M$3k+AF0MR?Uzv~|QB|2x@FvK&3G+KfD)u?r88b2ulS6gw+sdX>VKe}=zit|v}T~FY|BtiaFHuF)J^7uK7oN! zHpMrQ>tXQ{QUM^Nwj56YNK7Qb&q=v%YpiaOm*INn2@k#r@4K}v0_R$0a%f`r3p?mZX_ZzH)`UkK8|q3k*B>Cg?Y+P6E0qos!L86W=f{(TdH zEP6#D{SFvUHcmHA))j#368J(05lLEYHIHP(@%`kjETwcxlwy-?uRq?NZvuwG{ba!O zvcbcnk_=6Wf#^hR7kF&+p~;?f^h}I)H(B93hGMQHW&jbZHcTUNGC7T((JPjEU)b+b zWgMmud}LeXCAPaUuDco)CX}!aOvOc|b^xavYfI7){N5m+j&eQ9Ml*jO$+J2|9!FpX z`g&Gv;iC_deJS2no$(_AW7zM4c7VEPb@bBBik8FddvgJxLfx>aWcI^~@M40F)EIX_ zBLV(fUyz$c9+U_7NjIjzPYnv)Vuz=LH1XMNuqm0qeL92qXhLl&%x(I09#mfLN;P0M zdB4-o|EagSLCi>hWlTOxQ!x~IeTfIfs<|jpwm*6bvXyh;aj=rmi&5m*u(M?6pLO{o7{ZSw@yIY~T?3`3 z3L#S9vv=X)Xl*E_ENR1Y%k6mgc{M%Z6mXWlyH5~?zPl8@c{u(UjVf`@&N=D8^<)92 zkwz_=4@Ig|Lh76}0xWi$fV_o=)fVDMIZ;39>M|=57mErJBdQQ@BE0Wu|6GdBYS5+? zQ>8ED3Nk1uJ^|Fbc}6)v*We;vnAQ|g8f**Q-n!hkE6A6amz&lcdiesU)yV-kKHOP8Y_7`?wcDQ1E3_ zrv$5Wk^a^a0dJ2WW=8)xp(ep9nXq@blQw*0nl!UP#4OC$-Jj0$`N#O@;`M|yqbkwZ zbXtsR)HF$d&VTb0=US)6948Zu{z?K#4kfL-0IebjcFEHx6dQeCe9$q9;oiaAPJ)n6sn&EDFt-xcGhtB3u>Fsp2|7`n;) zqx$vNO z<<-z6#shhbJbJQ89U24HEQwJ^;MXgqcCl|Q&GQ(d-HcwpwpYbJX>U2Z>r`F#F`Mg( zq^AL2`unXo})lvNoKqjfj$;>yJ7~UJZD=V(n_&j z@a31^N3Ve07N^q{<8R7<-hudGmgOx8^P2>^Pzn7Biq9;d=%-k^T!Q3}Vr7z&ayyC` zv4ycc*US9^>e~o0PKbMz!h`nK#TH4Q`9{90@ru=;Bj23ed!ikk;jEC44p2@0+VYen zl`S~`f}{7i@e{4&&{#b;2Q%R;ZexpfD0Wo1$N7(u)!W6ue z(V^i1V4DLdrD;JXLyCVjK_cX`8NitH!dpmrq!wDvM~W>{fIo!1b#ZpjSR^4P`zC(s zISSa0s@=dCrL>so6ey!ym%CJzbCiqI>m`t<=0uUo!zw_)Q*!Dv!?skxgkqwEP=TuG z?fkRZDt({f7xkKBR!eWxb?m)B4w+O4Mp5DwU$rLAC(QpXUUu0wn3Lt zN-Oqe2h`hSq}Y4c{ux***@RDHpeJBY3g%CgmTULmf($$+v20wvQbFh)q zBv7Z9Y@QGnTFzrlPS%DK0>n1#SdQBQ7Bz;8*7hmo;Npg)FKI>!4r~rHY_5LI^z(d<% z6^y;{)5WIYJI8v8{J@!&6Kp8MUgPMh69|~kC~N@uk8bU2AQw#c9`lFrDRuN`IBL6a z!o&a4bLLk$S0@eMAW0AdTT+mRw!)94zzfvGI8m}Uhvl2+v4FZLaKm@9Sh1b5BLps# zaLDLF#W~{ZX30jD-dNhCs#DEoR0q~~p`8X`h5ncb5XQsEdc#n)Y-k8LC6Tey$8Nmv zzH|CBa&7E(QI_xG35*D+SAf!!4g3Gdd+)a>lVEN5nE^y$1zia%N?1h@Nd|J9H6SBN z5s4B66eJ@sNFK$E2!aTbm89gHlOjQoEOAJRB#Dyqyw&WUbFS}i_~s|Bi)VWJsjjZR ztLm<5!I;)bz1z-fv4;=)Pd_xkcg`fY`QBF#KjZfDPRZA~loMHeqayf3j2D)-EwP)* z1u3_+{M>x0yL;LE6}tG}Is%Int{2Tim&45fv>et#dyzF!l|GGF5}OZtQHG#8yZ zPT7W@peTQ0!jNPz<#V9qcWB=Evc}sg*=o4E`}ydGdM~O7khxzi`pA-Si@WHs*l_!A zKUB#zaD)nQ`q#IHRJEx$_+_{rjHYWD+IudQg?u$43{tjx&0x#Pbrn>Tu*Hkh-Bi9b zrBvS}tWCt({^xMskyaM+Li^N*z{^#SQx{|SGcA8^;yyzx@BNI^F*QL&_gW37qqmJX zoTf9snOpkZna(Ql!$j;~yi*J3?EUmrpurn))o53m?Y zXCa?uzv6MMk@GRFJHYD8{hY*#3L<@qs1?}UyP7XD4!<5r9hD&S0#nEVQYWqH{?pia zc5U8dF7|R?x zpJNR51(fa=iCEjA$$Xg4jGp?oWX-nq)|uzrDRv=~+L$KB1;GmOg7o>3ulZEF<;sXg-eyFU?Vq@>^vM>yGUtXn^VIwM z6myEB@lTS!uhhycx2~}eR^N5ZE}z7=2(n<9WQEtHg;ja?!P^d7?*sKB_ZDvVaB4=^ zxumOos`gh3eLdO*%i;Xd_Y0NFOBqMY*>;g3_C`r=7>|#}pK^>tjK|6bvpix8;7#p- z%E_(2(Jq{!U1Ib(@nP$T@147Au|i370%Y^rpjTE5l^5EoUsqG@z^SS0s{iRTGNyD% z0IPkJE8G-6p3B8wzo_*^Hp(b+Erm)2{!+;_PHTWM%!_DlQLEq^gNHQcLs?3W~D zuZA|ulg3b;|F0Kr*^Ic)L7=}W5z22CG z5@x~lgs+(rD~cy^*Nhf#J#6i$y5l?%8`=D9{UFu_uGi1SnGgAM8=57r@eWz1nJBox zVBuh@m7?T3jdY;wPJIIszJED8s8Y`ym>$kuVqkhK?HsuXz8&-_BYIduYAzRk_uQ`I zJ*-}#692|QZ59v5N1#wBSuRp-du5xo*_pe78S*JXEq4V8^j9%$KLkS8rd7s;Gk}Mq%6zH?Lg1C^zO~k&s8gcx8OQDuvb>Z=`tz2uf zxN1&Z`e7Dd!QgBu$HJrJhKM-J;8A7UA5V1yiE~fq?hx&wkeLsB5fw~bs`!D^#7M)U z*1v=ByVGl$`}VSrYHE?J3VP)#i(qH-F)!^6Yxx;&!X;2W?=2GXOjivA zzgqo=HSZK2B?Ot4&%20&5%dqZRyt=3vS%qw+oUfSjujLY{0M=am-dX~z5!j&&R@m$ zZ;24OEjNyk)6uM#sKj;obkRhmHfKBMrOdw!U);S&e#M`dRcL4cp1e_^4;-0}(3ss<8nwv)ecRncE<>0Q$TM%xF&yUmJs|dkWakX;N(Yp%W@0sY*z0TT)Fz2Ur#Fp&asNlr;u zZ^xt|$6b8P0wgf58FBU9j3GOFn11&Bt6~7{Y@|555q>UiT3O=H95S-JV}pT~rNB4q9-Jdq0r`Ppvn zjagtqWWBX!s8lm@ttOrTw_%BKi-S0LhMh)K z8NUzqCeI6)809+0>x8e3!f2 zwO+M<>Mlmyvhd9`ab)Utun_IDz0@;TXvrl3UC}UKA2xY)oC6zSFF>KYGK`fPCCs`L z9m~01FSrTnup=D3UdQ|7%>Kh}tU8)-O7g|C@89d1EN;f`@lg?6==b=p8pkty&U(*7 zhA6GWV==__NTsg(3hqziT`v_ImE0U+Ax!PD<^~sD;Dl_Rfi2eu-BRPz89Fh1DFJy3 zeiq9C(IDT?;#WI@>vfE@MiU_K9e1eqg0yJf_jK}FG~LiA(D2+bp0-1>p^`8CqkSs} z`yBcsl0LtIbK_hc!NS4`_vF1#muVYGax`IIyn;cGnER%z9Pa;+-ml180(Kn#Dd6P(OX{N*8L-&f77d|H@1Vy*M zr+0L=U5b0l*)J!e)x8d-FybgO9Q#m0oFi#z>93~-w5A2MX~REbSP0eMy*4+ydmi83 zXh;+Nu_6ULqVn00MT;sIW<;7Jcx)HP+ka>`(f`X(R!B*Qxqgk9HIYv)slJ3OR9^Fs zpyVyd(hWVNX86IN!3%=aH1+!85WKQBn2{&bB*tKT_xI_b^2+8|>=&fsb0-CP<%O>q z+!RhTkN=o9e3mBeovw$?BKz;r#zW6p0W9BVAy^qV_f+^(Ol2`HDao}J83Cv58#(h% za)eXwI`&x4u@H=wTDlEzP)wDdyV3P-A7d_h`SQWPCoEFrS}b4U3q3Et$lzvm3Z>u^ z#RaUB{23u0vS+~sHg8+HJas=+{=%C;R}8QB-bhz%naQkc8u<6Nzy=!vcY;k7K)@Giv5EdlXk`TOI?C<#*ET;MBuf1 z14l}QVd(xK--Pkn1OplPrQKlWx9e2I_>1CgLr8+irU=?F#CDioc)&tXpX%{B+&jJL`)vApZOTjvuUCPG6Q+O$o3IC3ZAz`h!={M>%Zz@Uvi0sg{|Xle{L6(3^ocrlE~ig^X6j~t;(%Sv>7>m zLkSd0D=kIiSNVv8mKxOKwalny5Sv)L7_?pM#jMY@`|e2W;r?!So*}z*`xo~oAR(@) z1}atD{bOJh3E}2U7WGN+=+fh3;dlp|?gZN z&CTRW_{TJrY7@EaoSxo$q$W3(Api?Q6{t`=e_$H=dd}z>0eU@yft-#iJ9{-j&#%iF;&b!xLJslD&9L zjtQrI9a~gN4li9!fL=2X@DyBeb@M%+G^MeWt-dGaOFVbLh$D||(9awq%F!jRmX3aA z*dEaS_~|@-n?2oDMXWQV`n<~n1XFY!-8@tM{YyX9V3(;L`s6*wA*GyOcD z%@yi(axFmNAEu7<4%71`kUA0T{FNVU71Q_ogK^NJfCY2?Z<=u_ki+`63emoM2`dTv z%*0i!6?X5+l?P#)Jy8`)g2C)21r1ZtoWwr|utY!1AKz{kz67ncp0!`PHU`9{V^9Z$uuj(+mE zC-!bEZy;9eiDnI$0w~se(!)1*;IEm=7X8dkv+ut@H{H5GzB*=> zh;h6K{v45!CKHaI`+ci@L6@u8%TMY65C68#wvLN1-B6rV&@wY+T@IdPeQ|`wa8^lPsFlqb5;gx z;DG0$r|?rhBw&#~`47tw#%fjrT`ub%w zw0}b3tRlzjEO6$@v~TMQpjfmM8$snq0L zv{<&3zMF6Y26fvwD9p4=NW$w5c*B!-PRSkafrrT@GNVj=VjK#zVNb*e-+(@>Z17oV?UM1yHfy^qbgEs#AQN_Nz7 zEUz{Gd%{5eIW5mASf?3%3X=oJ7Ps7;o9R$w>{b*xbXRI$OO+m$)f+Et$`I?JF>p3E zBc6Kf6vs zLGq_6bnNDhYv!`_Hi&etZy9XD5X&)k{|^c-TsmTto_a9kyB-$d1}grln z4s)=vyjb5#Shr#eWb?=~vIU$_&)Vpp&dEA_=wh{vLl3|huV3ONX1)L^Emy|flhw8unu`67(iDu4$4YMuHeVSZ#A>w?E zdrExsGpE1xJhGCbmt&xa<6XA^W?{IY#0xq5=P=4q7{yzf!jvh;JY;E83)i~Q94{-| zPqg?EGOZgW&&WfOVATY~;_e$icxC#T`@}GBnSm@H)leNLwdvJ+kEOEdaiTLDq3WYt z0`_E!MjnEH<1r&7%$iqiOsR{>uXQQzjzUB*{(T!yZ(h&(%Wp!<6Sriwho8(%9`kzf zm~kRJAb639*u`FvwkpRTj#qXtc3mL7N!%$~M&Xwx8dEBP8Sv2h#I%fj5 zFv&7$#-V_x@a8B2whi*pA=Wgh#NEq^W4|siRVma=(=9TxM|E4CZLx1Ax-ZSkk%x)WZmV~$ zd{xO=k=uy~?Ihy|_M^}ho3U+jwcm!fUuA#6I_2-xpS@#>kC55;8H9HLsV5g&;4;U! z$DyG+79G6!Md?J6Q{3dm>eW$4>4--E73mwK14Z(CHDMKu4IXP^-%YCq_~;8dnx`>~ zU~2y$TLrXmxMU|jm}eMp6^o>EcqC87z@7&>yZaFYd)z*Jge?*`{4~+1iDrDU-c!|W zTBScB@4?xJ>PM2W_-$MI_eJxwK-Er9!aBLNr^l2QP3bIfsf;oBA#ERw7nWnLl5(u9 zI8&*{a&^3;=pd6=R0w5o*Rc{A?v%X(=DH(g%CK#OW|vrwKgcoK?O)aF{Nt4w8X!Da~eQ z!GzkSP$;-2jcsdrN#TArNK~|t()R|d;a1^xr&>};EtrBKFt3=1d&x^9@u)){H%*zj z%C*=Q+FscjYYFciI1d@sw`%uO3Oai4s^OIT`#U$KH;&qxCCIEaDk#w69Wtr%nhu|Gv&eL|QUA&5w>)89cYGzT^ z&0K6{2_y2DO$@deE)Wo>^=WC>P(w0z%_e-0Nc%NTnjLRYg~f_U(HY#gw=G`A@@tLWtl z0z>T9+jokhdN#zmUqYBE6(a#SHsL>7=A}P=ZTRiteZdjk(>ZRzk_HD;zsicc$T3#J z+Q%<1MaCR1WWnWo%5+U*0V?jHX)~obB{{+lw5G7sfjQ9=(a^QCOwL zdX-?!fa@YP7e1J_^NOVr^=c{I-Z6B61G*o}xeG$NNLW$`^%{_NrIh*jW}WsyMchY)!n_sdP$%HE$9 z^9%4||5&)VtO}8LL)vry2L0uObQtDR>ZKj7GgxfyGM#xil4a69bxF>*wB<(Edfiab#>})c;LKkY)B=ovG>yv@;+rD;&SffHW0K&an3hnd@0? zzd}R{HD8C;oNoVA_^@^1&D~=no6odzbM|K2AD9v%Cr~aq$yPRf@d;z89IaUS$X}vB zJrxLL$z$CuK-uHF!(g(~b#98AU<-@)&6KxMWveh=N1l|I8T?kfozZodvbARC*+ZIG zW6kZp!dokDk#w1_dP!J@AT_1b)4+6FzP_mcHT`)FTrSTVOXV^NG)oF1Xt5GUZ)^XW zuDWNm)!p%l$zRM>0N;(Akao$W4oIMVq~wXm=*DMr4mtK%1yi*_bqny$r|9;@dO$ob z&7KYpt4bs@_2=<=Z^xKwmqP-K1T3_bE?n}EqS&>6WFfm)c8^PsJ2J$68tKz6q2TVP zZDu@^UwbaCeP{Y~f9>o!5drd5n2s1^QwT@fi`+I6eCInr14b7#Pv{Kvz$^rM+CRiS zVPy{p!M}f>HjRz(Q`6EjH?}+mmsl#-t3GY_@z6QcU)*ofiiBPRLbtS$ken^_)+%5B zt}spz=SUH)tIigXm~Xy4cgm&~n#kMX60eQKWp z)@VE^Ih;K_X(!l3B2Q=e3<{9-r`rmJO^)uTcTvNM3E&IoAhIl?kF*1`hl$R8(!Xu= zD~j1z#K>zpGg4#B@#w^tri&#z?%78cg7_8SnlY4dn%K^Bdv;K2d9&TvL+I;d>Gmz> zlhcFls;`j#l3P-m;wI~WX5781`{8(_tg%CaXGciY9R+zc#K9_56>1AVDZyRa3n`yJ z4+&=FV6V@Dy(SGA56$ie1E%&PZ{hOcr)~;oS(dJ1=xw3Eku0gy)_1A%7bh&J;@Vvs zjl>K7On*lFTsth-qnmu1h%=|~LG`Oj!H6r;TtxxQ?ag>`yy3E|woW$-A*K4+kOhvG zh}*Y8zmFC)#ONR~M>A+Ky*7g_L+-G(gs^7BE0+>AEl!bMbGg?U)7X;z(nSWIVhC3@ z-C0x8x+Y9V0EIt^UaLR|A9inGYqjG?2>rRnTi1dyH;vMuzpNeEFaa{$CanyWsde9b z!|mwb%dPR#Id~&)>LvdD15k%pkn-+N%HgbS4zaGVV^-6K->Tobxnsb*xrm`zqFEDqETK?e6Aq=Z*OoZ3Ytn z7hzr9tF;UQimTjI-_OB@m2a+J2hIV% z@9P3X=`wH!_kmcQz?MD%Uz}_S+&RnlF;W~99pa4rX!0e}eAmYuEcINK(%^QOk8ANu zU|4W>j80>X%bOW>>91c8HOKNI@&A5$DaJ_Q2`s`mEW(rI+_uW`TK=OO0=EXQzfb5n zpZdj3E=^sg%xc=>>f7KyG_kgI5;-lH3J|P>a!rmgwj=)RgYw9Va>#*Q_ru~5T(kuG z1Jqwid09*I z7qVD+dbkt6(hwfVSX`u%gnJZpy}~L zw!I;=mJ@r;^s^kfhn>afDs}%UdHx2!yWI=&+OOASCwZnIj(?W>$RVVPL^wBjSeuFK z-_l}iS8s_fN3_cz_dxf9-F}|5YYXXlx8>>fX?=w-Tq@uTULX{v7%<<5c&OmkDLC%} zp|LooIUm>S%2)Xzm+wv^rBK;zo?pP1(fL~k3*m?b?!iPk4FpD_==MSQHT-KRaeXvD z1CPCi1yycbK7%_Qb9b6U&<@V@L!pkQ1rzgn*oF?fwcj;bu*u$J-X=V%+Y-6XKn6S+ zgtYS+DQ1tuiL|HJm28CoB#3K&p1nwcR&-szKFfULfqtaE=t+P*gyiV<#3O|K;L-;* zL7(-oT*vN|L>?B`gHRE$QURS*d0f2S%OMK;G1%d{!H7HC%o%7d(nhitPOgRu}qme9%A9eC$E zFIA$k8UG}YwUSN%TYEF^J{e2ohE{)RY=T^yk3baau#aK|!Qr;_%6@e_;=N4fX>RXS zdAc{}%Yk1U$Lv9RMY%LdzW|%T{ zlK5YJXQovx-W{YB!C-*{r&|mm!c!S9bFh4;ZG}Q{Ua%Fxpz!0#W4vCM^bZt0|KBqr z_vziokJxOaorO1b)2Htg*0B8Nb$X%c0PhYnT$UhaFsyH%qU^cJ3XM@riN%EBmMis; zZ&ZhNIoCIyyBXc z(kEM7+4HMe{$o9^b9>$f94|7H4aIR7n>{dZBt>7v^70+Eej$ZG>E+k}E2I$jN8V7R zK)p|DUp{peyHRW}K-ZF;H>oN>H0OcXkR`*+Wh0+F2D^c6BeM#^p`Z)|kdDcRP8j);=gsmrbHPoJ+|KRkPiz+;H~P$$ z02zRC5y*$Jv(m74rJ$q`A)-vJ&3>WhM!jIKU9tQ`AOEOTeyCl^ zEByQ8Uv-5!cZc0(Fxk4%5nb`&zl2P>@nA%enxf-dd=&hHIChdzSP{iqh^iOy$nOov%*~| zRV;+CnFV!RAxj5Gz*qq%tnYg$oWs72m2>j=vXD!Z1+ZSN%io2_=b31o|6v%~#QbaL zU#32dL7_>E3HBg4!U>WX;hTBi5@}LK`Z)vxat0)z#2Ttkkr<-EGGNr?LC(r^rBa6S&!X-3S&PeR0~gI2d?Ta(Gl)Q? zN3RGCw}4=Sf(oTQd~_{k;6~BoZwak-4c{P#b%pg=dCSAP^6Z0k-KRcXLi0I1vi2MU zkH7ln^AG@D=}`~f%q2P+R4VnVpwo~{hM5v{J9aG7PR{bl@^u||Em_Fkka%KeNnR?r z)m6!B8PF&R+Xo?WYIkJYB)eJ?HA>E!7{`NkMBKf&)C9 zIlAGaw>SkrDESGR=Jk>S8gqp&6k%v};}`EI9KzM1&Opw=N=f_e$qw{*u<7JF^q^)OJ(#6 zWt>*t+N{@CYnx4te*2v{=0dax%e9#82X#K_4y9CZO`ubtZ7bgoPLAGAD~4FplEdj) zZn7Yt7PJRj(iv%>nCeE87_&Ls;6)>-?3#Iq~FZ4#QVd2TkwgXHi z&086-du?r2b~^vTF&i3U8k&X~9$3gM3v(5An9ix*@hO!tkiK$N(=;ZKVOcV<9Pbok z&i{;3+$cpibmHszq91zLQ?A|JCYNC%JD@%A#U%i4&vsHNVf2*guIc-#b6(3_>t8Dh zvshtID?^Jz6boV&qRxQ4Wk3BndOC|C6~F+%{zMf6zTqibOmSZ z$3rW+KsLT|6+1=Y4Z}ZiVIy}{$VO9->@r?z`kQVDx}|J!BGsZrZzyKl#}ue-(^rem zM`shEP!@85BR+Re`%~)`;RNglT92oI`>+Z3OirHp2dBYLfW~n`A?6LDmG2d@b*P>j+tUm2%Dn;=-*j;BYdBm^&n} z)3V*wa$DbN%(p?f5|KkM1pp>}Z+HuezPCNlD%t=nBbRPwsyEYGdTEZ9sKrz8#bQ$y z(Aw{xM=a!|nH+VvEvs$N`S71u3p&68WBdzt|8e9u3mFJ3T$HJaCoF_(5!8Vs7k}&S z1!n9Zl7$c z4un-`4rdh_)XgX#?rax0zY@$s$RU+nQ^lDnhGieND`H>ZkbI6LFN0+2d~tdjyRGq4hC`m&Q&y51 zwR$k6>%S1#5Chsme%>!qFJRQ3T#Iyg z>713&7B>$lZquXX<<34uXpj+frOO-J_W~&P z(sU-A=umRd6Cm|4w1`JTlZd@T;QaemxFTTi7%73`R;Fjno_%Ts{81b6o*pG=+sW<; z=9*hyiH2RWh!k%;BaH&(i_UU%LwRZijHf>qJXkFk9@^eHtpbo>y(vYUJi7|kG{C&v ze~b?VcYmmR5A4>ROcdFlf-{A2&#U1++gy!v)Hl{xxUZUegzR^ii8i%`4xU4XZ>}LZ z_gv4WY3rc^n)4AYvU$HO8DeS7fTe6ol$O7e=8?ogc%9{wa)T0zVNu}gVsfX0b|ro* zm9yN?ye>A#1cCfoa9ULjDkL;9m4h|gX_q+b-@m%jvs{f4l0bl1fO@@(`^WFilP{}` z+sb|@Q9_4JA@?$QpvcfxNC2xz&bTN*{=S7$KOYZ50*D7ud#J-(LZn@?%Df$PN3rqv-?k%lf@ufm~aXQz%v?cjx zC%=-&&JP22CwcOi>URNbx>BM~W3%ZIV>{(@YRyNRrvKVo!9O>sxJp@2(S4Qgb~_M* z7N4awlfa_x$l(a*?(w#howm3`3o@P2_fQ(B&4-q!1@D@oUqHtnttnmy2A;`oYGY0a)!KA~8~cP*6o*n>y+B0e zsC|1w!UA{KwtYI7;>+#}#dCJS@RI4)9MKV*#;n0U*ls^~KibafX>8(GQ+y5tFYt{v zh!)Rkl4@f{HS2FpbIS7J1JDA^53wh)DioYeKQes(h5;Y%mJqFrUs{^_i~sYArO(bm zAIpYbn%I?3^ZBnj+av6!0Ij^m)OelQ$U1MKty+Qg&x0EUXmh9`c=Jw;f}0t6L;D_b z`ltS<&W#Ho&Yh@Z&=y$mwEvcIM>n!S5@;9HiP+H>N^Fbw3)|K4$~X4B2Sf6sx=0oX zP+q;BbLuBZhfGpC!2{To&u-(^d7VW~;rL~Se(ff*19Z&!G|Cx;%exE@Q6nJIshFA! z4UCNSEr9BuPn;6Mntj+0sg6!6%n9;t3D~ANitfpKshn=0dhq`1A>viqd!Y7|pZuA`r&U4oxI4901WWV@w`Ab_isggr8Pb#PM^rmQ6jyv38q21CIX zL{&;(bd>T0&sWes9uqF{j2g^SW__&gfyR*m-@^ns5 zBdZCyn{QLR&D|AciGQQPQ9MbxFMGF%x*sOVH{0Hi%-D$O0ttN3#u`W4snai3B;dr? z@*K#1zSTVK;@^02v1LxcfAI$Fl|UdeV}ZChP5u$qgL+%CIx(${UpAZz3I~>N7Ik~M zlEeEF4?#?2Pt#-g@luzF-+CCU%$JkP_TpXQ{*9dSVxCGzy|m7tq4uJoo?h5Ll+6p~ z#|@dE_AN9>P2E+T@+SSudz+0<7{-eB z&U2w>SST+sqi{aePIZ6+Cq7eOJ|05fE%&laqks=)#=rv$FYfPhk$G=)$K~dfmTPDZ z2hkkf&=avJKHNZx2Xvm0-2tI1DpsA;Z0fj zujiIq4HT*=06oMJ02QVWxiakZEiEheV$Puh&CjqdO4)Hq*w79JgfYiWvGg7<=BpSJ zdNc^(snZ30OeQjgoIEdP`ur5AK%ihDco)!pl~`8X0V3#hlQAjcth|(TfxKBj6A#BY z8zApH29-6SNKyBnqjyq85DBPOnDhChe46f%5}Nb-e+!Xm&8Cc=3A4)l*8cVd3e{bv zs5d~lTZqUuoRi7pto+puL6ux@Wy3j8@E9)!p5|^Uv}0XOeFRZ2%o#2}z{bPCm_0eV zK^%Jn7K?|DXXNmPr0e3`xZd8zg>VB9eai(jo^Nq3XJzs>iTo2fsz=iWcK&@I9AM$8 z{JJyCK6djyI%ia1lEsih!ko2Wl2Uz~mA3Q9@DAx=M!c_p7s*2xXg@{m`xz4gcmaCd zVl6jp=-(%SbBx})NH~@;L;U`96l^>UH6*~CuZQN-ydOpS-p`Q&q3m0FHQN@*jE176 zWnT1>C2&Ih$Y%<^?N-3TpkHUk^d>(NeKzt%-QQ&Z9DzgbdDc9~gG?hRTp=}@qbDR{WIm#!O7&szC+VoqS^Y_C42%Tth8ZMTyl@NI9ET^xQVXSlu>UqmT z?q82cTxK!<^-bX>a>rwYQY`v58ih&wW65r)1)8Nj}87 z&Dl33Pl$J2gViNFq2Zg;?C9@Shgi6HZCV!_^l2GfOp%doOKBV%@>*j0Mp$@^2L zRHcMl%f(9T@3{^t0-peLKrq|nrYyOmAwwZ0W!Sh2u+a-58QN0!ZY z@;RFFrY!lTn^C~?!S`>GNV#HRwt=~-`0FpPy!9FTb+uT=yJBWkkM?d9DRK{i!6ZS= zzs8@D{%QYy$o+d3#oJrTT|Vy@LKWl)FL5{Dd|os9;?^>UAiM!$^k5d+eprUC06xBK zS7~e08C+j^xHGJR|AQuc_t6G<>KTt~=bSVGp2ysrXU4jr-4-?!d2_!>W)N+ivux$S z8QiZOQuCu6Or+kSxk5f_bFd>WNsxNgL#;oJ7l&O3V}qskyr!+o+?~0^b`OgA0$3+s zU<1WzK5PV1!McI}mZH1eh+>IP3P#=yLd7KX)$ryS=oMDlVZz?r<*6~hLhfG z(qL9^;raIuN@(OVr5KS<-I-kEm#e)Pm-prLvZ7+dCM2G)Z^$%V?)gO`hsGSk*`27` zQ&$A!Lx`0Cb+9(?ZAw@Hoz3iIJ$xgtOYF33Gaoe-#zu7?*!!E}d*nwJtZa4iuu%Zjchf1DY4BO0$S(ogH5vDhWpdKku#%0Ti1*H&6bukR#q9O*XS`s zJ;7S)5Fpl;5yqEFb{!%3me9nzfvai6w*dXh*J`#wIL+2IKe_)6EE3F=5)wyl65IHV zzIe!zUAmb9=Q)w9({@QblGS|v#g;~NYh9nb{9M2nMJyJv@-R_kFlG8Vc~FjdtYU4e z7?#x!>1+BZI!@F5+8!65vQk=PD0JvHnJ8=&;{}90QMPtY=@vxxp z&BCw*@q8K~O>F5IwkIvz^{pByi`vn?%?&vdY&X=;V?0mFK{Q1I#Hwj_|KA~EIDrpV4snRhR@A`w2fb8$P(|wqRB(+ z;P-9bBfm*)dVC&Mp3H$X_}w^#M1Bu#HG6j42#P!#*gwn)Db$lh9}4cs0NcrbjDlz) zWWR_kSCr3dz&H0H#7NNc>Sb!lV;Fy8@;$h;2RK6x`^hLedo5!KxT>9GD7F zp#>Wcj+;V>s3%(hO0jr^>39x~<u>wn(SaD5oqCq=2;H(^+St}3>rdo)da&drof4i6Uic(;pW%rQa@9Z;!h-cVJ!(_d~eG)`6lQ{<5Mb#td{c~PyDWa%W%YO7rdL_EFg>vf! z#m0SgzoVxFUFmlV%^a540l8ld<#!%LE)mvaz=nHtD^a6xQHe7^A^t;I;#&dnlbsV) z=QnlVysRL$S!iz+pMmJoUmFGCue-z~xLbqKOgRyVY}uofFcS*f$Wv9cVpE2bX?EN@ zIwc>PxF7s@agPs=2^jCFnr(hC#t$E7D1t-F4z4#r2SrPqYqEEr?Y_(q6Q#M;?J-#%tVjO+#xuwy=gPd>>A_0xRq9>BabN~lCsp3oj zVJ)EfTr6yIp$Gkvto!`_^-vQ zYFqS>6rys=b_e!Pf@nr-G?u8U{U-seJAWxTZ9mj20c(5o-E;eHGxJVZ#x zS{S{cJOj{y#BewD(xNj+J&tYagq#4ngiYNXk`j zH_qo$k;_J?K&rQ>ZVd3ZXNv{wYs??WZ0%Sr6~jTMeE@;~ofkhq9E8DR2a(}%PXSjd zQsv;}-kj%KG1F^`-bzQEblT-A3Lx;aGeK(`dMW!VohU%5@ulg$-#N?1ZRSyo1|^ZO zvaThVB%85&eiqgD``x)xBj?3k{*ATZ1!AacHaxG-f=iz?ncH7B z)piv;Eg^f!m9(Jp{As`URqww&ElX}4aw}UM7f{>9-AarW_yf*@o!8}HyFgBn;TULb z5K+ttqBS|fQGchI+AWvC&MhD3`;jy1r`}y077KTQf|$S5{W5Y>d+f_RaWgcg)7(K4 zXwY9|{k9@X+<&*W%lzdzzNqaZ*9Ey(Uyd#iWN}9KZAHM zel(y1v}MdFtRa!HCR3{K*wT+-3+I!iPVPh8NK!ihHx`#qv$`^rONE~}g$`fOx9~;6 zIUD*p3-M)Y&~TD7Qso`;DL@&hZvog_@pJVcC(}$|B1jr#$g#W7;K4N|iy8_WrUw!i zWMlO6bCwWl;L9%2i+a_C_Sa6z5R)q@uG!2r8|`Q6R|cAe;UZD(lY`C>R>{^H3CC^6d&x*;C2$qfMY)q z9|7A_4=oGO4CWEcC7(AG!XJi^$t%MCp}u-W^pBl$nfxG)4X5zEtzCv|J8_1MIVJNz z-qqciC#KH3`aO0L+f>trAE-k~MdsER#)8N}sBX{k%kMe*NFmqFAtPfMzdVVSDm5$iymBpW}0h66S*_ZB3 zG&ozV|EP|?epPLk>&=^T0oLMqwI`;}KZdopoM$YSUc11#SK<8|`L^8BPiR$Ck<57x zC?zm>#J-dI*4@`};IZP{$NY*JvN`ONl1ZZN3k#3Mx%%4c`hq#w@u{QB%4a;{RjPba z41$fn<+Wy5onC#D-9=_^eM$ZbPwm~hHhT+GvWhCScf&wE=NjwH z3^|0~Z9`;VpYmvT!iT&Q#(+Nv%t!&?6HAZm2Ygx(I|Y5l5spBg|Gx}#(rN>wS}8sHDWmbV zvAz3dx}wvKmCdzZRLL!O9c~aT3f5s-7Wtk+ijv*))DC0oTB@s;U6+|DMeO6?3e_th zE2y54YP?Wd^P1dehmg$=4C=}>x!P2m!r71R*#&M>5rmFYqPsNh$WaZA%%C(Be6b8D6cZ9d}7w#9WFKs~&SHy84FzsSjP|{cZ zaZx{JNK|J@hyOtY-&BKf=)DY1?{7rSsq9UUQ*FzOHC9(F^mugpqc2-t`76v?-VMHr zh{l-asX(#LHP-c)T&l~G)r+pxAMdG2BuM4!vAud-VYKbDHrbKG@VfebQT&@G{y#>O zsNgg2;DIb*{&h2X-H+VS+ulX)qVi!w_PtemKVLq!Ua`4Ryy@W?yY%Cm)qaz;Q?gsJ zC*iO{z>DYA$OB8>+wbTPY>w1#-=Qv7f06e{dl`J9gQZt@0SXt|`$QY+&__^#Ual*>jS>_QP-7>~G}SRse9 z%s10_ifyJ3ouuFB88*fgxZETBkg&DTBDYRPvo87UYKM>h3?aOEb^CsYba?jR+042!8ir0E=B4+<0vKO;&s@T-+_X!a<{K z{I)tCTH0O_&ff#@3CVNl-68!{-t1q@KX0$uuO#06e*q{1*Zi*s09e6`0svr@(PH(G zRevB~MvH&LBQ3V`001C~0ssKuJgdb?;*+*71VAGP;ue7bfD0%9001t!Bk5yWOdxoj z0PXg1g1}IVWWcf@u#7D) z+t`&4pl{5Ek7coJInke;{(AX;000`J0002+X;~0hwJZQkv?vOU^`IcInijAdK7gDp zSMrx1(t}m~xz;EAg*apr01yxb002N}%hk)(y~@j&^D-eY)PsV+^7pHe=xv1oCQ4|7 zx$x`NkLBOV01zDo002N6mXo84X#r_5`CoplCIwat1*`j{FD3|d z;r~psP{-=usZIZ1{+%jU{=b~$FIV+f|BU_m0Dxpr0001_X1Qv*$?AR2SBnFy1p{;8 z|I0MNYN24&#bf~mgKk-!ys!Q}xcK?izXyfZC;F=s`ad^XBQ{(`@TQRzcLk9<`F;3&?*IUuLID5(&_$~gJ85z2v|TQy1uh=mwpuh;{aF6lvX4#w-&IgpUhA%} zW1rZQ)J+rgI*+;dxow4%)!&yF9#?981^(jQB`!|xS*ZsA^d1EO0062d3pTk}B+!;$ zUrZkC`dICI->*0Qe%p2K;!vHAy7bokRZVh>H1)psi001=f{{yGKK*)4z R+-?8>002ovPDHLkV1hagpV|Nb literal 0 HcmV?d00001 diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/aiken-compile.lock b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/aiken-compile.lock new file mode 100644 index 00000000..e69de29b diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig new file mode 100644 index 00000000..0759674c --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig @@ -0,0 +1,9 @@ +root = true + +[*.ak] +indent_style = space +indent_size = 2 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes new file mode 100644 index 00000000..99fefcf4 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes @@ -0,0 +1,2 @@ +# Temp hack to get some syntax highlighting on github +*.ak linguist-language=Gleam diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml new file mode 100644 index 00000000..b0081ac7 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml @@ -0,0 +1,64 @@ +name: Continuous Integration + +on: + workflow_dispatch: + push: + branches: ["main"] + tags: ["*.*.*"] + pull_request: + branches: ["main"] + +env: + CARGO_TERM_COLOR: always + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: "pages" + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: 📥 Checkout repository + uses: actions/checkout@v3 + + - name: 🧰 Setup Pages + uses: actions/configure-pages@v2 + + - name: 🧰 Install Aiken + uses: aiken-lang/setup-aiken@v1 + with: + version: v1.1.9 + + - name: 📝 Run fmt + run: aiken fmt --check + + - name: 🔬 Run tests + run: aiken check + + - name: 📘 Generate documentation + shell: bash + working-directory: . + run: aiken docs -o docs + + - name: 📦 Upload artifact + uses: actions/upload-pages-artifact@v2 + with: + path: "docs/" + + deploy: + if: ${{ startsWith(github.ref, 'refs/tags') }} + needs: build + runs-on: ubuntu-latest + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: + - name: 🚀 Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v1 diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore new file mode 100644 index 00000000..3a3d38e6 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore @@ -0,0 +1,3 @@ +build/ +docs/ +.DS_Store \ No newline at end of file diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md new file mode 100644 index 00000000..62345b32 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md @@ -0,0 +1,805 @@ +# Changelog + +## v2.2.0 - 2024-12-13 + +### Added + +- [`aiken/cbor.{deserialise}`](https://aiken-lang.github.io/stdlib/aiken/cbor.html#deserialise): to recover `Data` from CBOR bytes. +- [`aiken/collection/pairs.{insert_with_by_ascending_key}`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#insert_with_by_ascending_key): for inserting in pairs while specifying how to combine values on key conflict. + +## v2.1.0 - 2024-09-14 + +### Added + +- Various new helper functions: + - [`aiken/collection/list.{for_each}`](https://aiken-lang.github.io/stdlib/aiken/collection/list.html#for_each): for performing many side-effects. + - [`aiken/collection/dict.{pop}`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html#pop): for accessing and removing a value from a dictionnary in a single op. + - [`aiken/primitive/bytearray.{starts_with}`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#starts_with): for matching bytearray prefixes. + - [`aiken/primitive/math/rational.{pow}`](https://aiken-lang.github.io/stdlib/aiken/primitive/math/rational.html#pow): for computing (int) powers of rational numbers. + - [`cardano/assets.{match}`](https://aiken-lang.github.io/stdlib/cardano/assets.html#match): efficiently compare two value-like. + - [`cardano/assets.{restricted_to}`](https://aiken-lang.github.io/stdlib/cardano/assets.html#restricted_to): extracting value subsets from parent value. + +- Comparison functions for various Cardano types: + - [`cardano/address/credential.{compare}`](https://aiken-lang.github.io/stdlib/cardano/address/credential.html#compare): for ordering credentials. + - [`cardano/governance/voter.{compare}`](https://aiken-lang.github.io/stdlib/cardano/governacen/voter.html#compare): for ordering voters. + - [`cardano/transaction/output_reference.{compare}`](https://aiken-lang.github.io/stdlib/cardano/transaction/output_reference.html#compare): for ordering output references. + - [`cardano/transaction/script_purpose.{compare}`](https://aiken-lang.github.io/stdlib/cardano/transaction/script_purpose.html#compare): for ordering script purpose. + +- New BLS12-381 crypto modules: + - [`aiken/crypto/bls12_381/g1`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/g1.html) + - [`aiken/crypto/bls12_381/g2`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/g2.html) + - [`aiken/crypto/bls12_381/scalar`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/scalar.html) + +### Changed + +- N/A + +### Removed + +- N/A + +## v2.0.0 - 2024-09-01 + +> [!NOTE] +> Significant performance improvements (mostly on CPU) across all boards mostly due to the integration of Plutus V3. +> +>

see benchmarks +> +> test | cpu | mem +> --- | --- | --- +> aiken/cbor.{serialise_1} | -38.20% | ±0.00% +> aiken/cbor.{serialise_2} | -38.20% | ±0.00% +> aiken/cbor.{serialise_3} | -37.25% | ±0.00% +> aiken/cbor.{serialise_4} | -41.95% | ±0.00% +> aiken/cbor.{serialise_5} | -42.77% | ±0.00% +> aiken/cbor.{serialise_6} | -42.63% | ±0.00% +> aiken/cbor.{serialise_7} | -40.51% | ±0.00% +> aiken/cbor.{serialise_8} | -37.25% | ±0.00% +> aiken/cbor.{serialise_9} | -41.95% | ±0.00% +> aiken/cbor.{diagnostic_1} | -47.62% | -4.35% +> aiken/cbor.{diagnostic_2} | -45.16% | -2.87% +> aiken/cbor.{diagnostic_3} | -43.32% | -13.33% +> aiken/cbor.{diagnostic_4} | -38.28% | -8.03% +> aiken/cbor.{diagnostic_5} | -44.15% | -14.59% +> aiken/cbor.{diagnostic_6} | -42.77% | -12.21% +> aiken/cbor.{diagnostic_7} | -43.87% | -16.87% +> aiken/cbor.{diagnostic_7_alt} | -42.99% | -11.56% +> aiken/cbor.{diagnostic_8} | -46.00% | -10.23% +> aiken/cbor.{diagnostic_9} | -42.81% | -2.81% +> aiken/cbor.{diagnostic_10} | -38.28% | -8.03% +> aiken/cbor.{diagnostic_10_alt} | -38.43% | -8.03% +> aiken/cbor.{diagnostic_11} | -44.00% | -8.51% +> aiken/cbor.{diagnostic_12} | -45.65% | -11.56% +> aiken/cbor.{diagnostic_13} | -44.44% | -9.34% +> aiken/cbor.{diagnostic_14} | -43.59% | -19.77% +> aiken/cbor.{diagnostic_15} | -46.50% | -3.67% +> aiken/cbor.{diagnostic_16} | -41.89% | -13.41% +> aiken/collection/dict.{bench_from_ascending_pairs} | -20.48% | ±0.00% +> aiken/collection/dict.{from_list_1} | -20.16% | ±0.00% +> aiken/collection/dict.{from_list_2} | -18.28% | ±0.00% +> aiken/collection/dict.{from_list_3} | -17.83% | ±0.00% +> aiken/collection/dict.{from_list_4} | -18.97% | ±0.00% +> aiken/collection/dict.{bench_from_pairs} | -25.28% | ±0.00% +> aiken/collection/dict.{find_1} | -20.63% | ±0.00% +> aiken/collection/dict.{find_2} | -20.43% | ±0.00% +> aiken/collection/dict.{find_3} | -22.03% | ±0.00% +> aiken/collection/dict.{find_4} | -22.53% | ±0.00% +> aiken/collection/dict.{get_1} | -20.63% | ±0.00% +> aiken/collection/dict.{get_2} | -22.72% | ±0.00% +> aiken/collection/dict.{get_3} | -23.26% | ±0.00% +> aiken/collection/dict.{get_4} | -26.91% | ±0.00% +> aiken/collection/dict.{get_5} | -26.30% | ±0.00% +> aiken/collection/dict.{has_key_1} | -28.07% | ±0.00% +> aiken/collection/dict.{has_key_2} | -30.77% | ±0.00% +> aiken/collection/dict.{has_key_3} | -30.22% | ±0.00% +> aiken/collection/dict.{has_key_4} | -27.25% | ±0.00% +> aiken/collection/dict.{is_empty_1} | -27.86% | ±0.00% +> aiken/collection/dict.{keys_1} | -20.30% | ±0.00% +> aiken/collection/dict.{keys_2} | -17.48% | ±0.00% +> aiken/collection/dict.{size_1} | -37.90% | ±0.00% +> aiken/collection/dict.{size_2} | -32.34% | ±0.00% +> aiken/collection/dict.{size_3} | -27.97% | ±0.00% +> aiken/collection/dict.{values_1} | -20.30% | ±0.00% +> aiken/collection/dict.{values_2} | -17.58% | ±0.00% +> aiken/collection/dict.{delete_1} | -20.16% | ±0.00% +> aiken/collection/dict.{delete_2} | -24.29% | ±0.00% +> aiken/collection/dict.{delete_3} | -21.03% | ±0.00% +> aiken/collection/dict.{delete_4} | -25.03% | ±0.00% +> aiken/collection/dict.{delete_5} | -27.22% | ±0.00% +> aiken/collection/dict.{delete_6} | -25.83% | ±0.00% +> aiken/collection/dict.{filter_1} | -20.16% | ±0.00% +> aiken/collection/dict.{filter_2} | -19.61% | ±0.00% +> aiken/collection/dict.{filter_3} | -20.15% | ±0.00% +> aiken/collection/dict.{insert_1} | -22.83% | ±0.00% +> aiken/collection/dict.{insert_2} | -21.77% | ±0.00% +> aiken/collection/dict.{insert_with_1} | -17.21% | ±0.00% +> aiken/collection/dict.{insert_with_2} | -22.66% | ±0.00% +> aiken/collection/dict.{insert_with_3} | -25.81% | ±0.00% +> aiken/collection/dict.{map_1} | -19.56% | ±0.00% +> aiken/collection/dict.{map_2} | -23.66% | ±0.00% +> aiken/collection/dict.{union_1} | -17.91% | ±0.00% +> aiken/collection/dict.{union_2} | -8.67% | ±0.00% +> aiken/collection/dict.{union_3} | -22.82% | ±0.00% +> aiken/collection/dict.{union_4} | -22.77% | ±0.00% +> aiken/collection/dict.{union_with_1} | -22.90% | ±0.00% +> aiken/collection/dict.{fold_1} | -35.94% | ±0.00% +> aiken/collection/dict.{fold_2} | -22.31% | ±0.00% +> aiken/collection/dict.{foldr_1} | -36.21% | ±0.00% +> aiken/collection/dict.{foldr_2} | -21.93% | ±0.00% +> aiken/collection/dict.{to_list_1} | -98.69% | -66.72% +> aiken/collection/dict.{to_list_2} | -98.91% | -66.72% +> aiken/collection/list.{push_1} | -8.02% | ±0.00% +> aiken/collection/list.{push_2} | 1.25% | ±0.00% +> aiken/collection/list.{range_1} | -27.77% | ±0.00% +> aiken/collection/list.{range_2} | -27.39% | ±0.00% +> aiken/collection/list.{repeat_1} | -23.72% | ±0.00% +> aiken/collection/list.{repeat_2} | -27.96% | ±0.00% +> aiken/collection/list.{all_1} | -28.36% | ±0.00% +> aiken/collection/list.{all_2} | -27.59% | ±0.00% +> aiken/collection/list.{all_3} | -27.94% | ±0.00% +> aiken/collection/list.{any_1} | -28.23% | ±0.00% +> aiken/collection/list.{any_2} | -28.09% | ±0.00% +> aiken/collection/list.{any_3} | -26.95% | ±0.00% +> aiken/collection/list.{at_1} | -27.60% | ±0.00% +> aiken/collection/list.{at_2} | -19.96% | ±0.00% +> aiken/collection/list.{at_3} | -27.60% | ±0.00% +> aiken/collection/list.{at_4} | -20.77% | ±0.00% +> aiken/collection/list.{at_5} | -25.75% | ±0.00% +> aiken/collection/list.{count_empty} | -36.83% | ±0.00% +> aiken/collection/list.{count_all} | -32.37% | ±0.00% +> aiken/collection/list.{count_some} | -31.73% | ±0.00% +> aiken/collection/list.{count_none} | -30.44% | ±0.00% +> aiken/collection/list.{find_1} | -20.59% | ±0.00% +> aiken/collection/list.{find_2} | -25.53% | ±0.00% +> aiken/collection/list.{find_3} | -19.64% | ±0.00% +> aiken/collection/list.{has_1} | -27.88% | ±0.00% +> aiken/collection/list.{has_2} | -27.69% | ±0.00% +> aiken/collection/list.{has_3} | -26.95% | ±0.00% +> aiken/collection/list.{head_1} | -14.03% | ±0.00% +> aiken/collection/list.{head_2} | -16.90% | ±0.00% +> aiken/collection/list.{is_empty_1} | -26.48% | ±0.00% +> aiken/collection/list.{is_empty_2} | -25.35% | ±0.00% +> aiken/collection/list.{index_of_1} | -25.62% | ±0.00% +> aiken/collection/list.{index_of_2} | -27.52% | ±0.00% +> aiken/collection/list.{index_of_3} | -26.65% | ±0.00% +> aiken/collection/list.{index_of_4} | -19.96% | ±0.00% +> aiken/collection/list.{last_1} | -19.18% | ±0.00% +> aiken/collection/list.{last_2} | -16.26% | ±0.00% +> aiken/collection/list.{last_3} | -17.13% | ±0.00% +> aiken/collection/list.{length_1} | -37.90% | ±0.00% +> aiken/collection/list.{length_2} | -30.89% | ±0.00% +> aiken/collection/list.{delete_1} | -20.20% | ±0.00% +> aiken/collection/list.{delete_2} | -15.02% | ±0.00% +> aiken/collection/list.{delete_3} | -20.55% | ±0.00% +> aiken/collection/list.{delete_4} | -22.46% | ±0.00% +> aiken/collection/list.{drop_1} | -24.62% | ±0.00% +> aiken/collection/list.{drop_2} | -28.08% | ±0.00% +> aiken/collection/list.{drop_while_1} | -19.79% | ±0.00% +> aiken/collection/list.{drop_while_2} | -22.25% | ±0.00% +> aiken/collection/list.{drop_while_3} | 0.86% | ±0.00% +> aiken/collection/list.{drop_while_4} | -27.26% | ±0.00% +> aiken/collection/list.{filter_1} | -20.20% | ±0.00% +> aiken/collection/list.{filter_2} | -32.06% | ±0.00% +> aiken/collection/list.{filter_3} | -31.39% | ±0.00% +> aiken/collection/list.{filter_map_1} | -21.10% | ±0.00% +> aiken/collection/list.{filter_map_2} | -28.74% | ±0.00% +> aiken/collection/list.{init_1} | -19.64% | ±0.00% +> aiken/collection/list.{init_2} | -20.01% | ±0.00% +> aiken/collection/list.{init_3} | -13.72% | ±0.00% +> aiken/collection/list.{partition_1} | -14.63% | ±0.00% +> aiken/collection/list.{partition_2} | -16.85% | ±0.00% +> aiken/collection/list.{partition_3} | -16.63% | ±0.00% +> aiken/collection/list.{partition_4} | -16.87% | ±0.00% +> aiken/collection/list.{partition_5} | -22.94% | ±0.00% +> aiken/collection/list.{slice_1} | -29.08% | -2.81% +> aiken/collection/list.{slice_2} | -30.11% | -2.25% +> aiken/collection/list.{slice_3} | -30.29% | -1.46% +> aiken/collection/list.{slice_4} | -28.53% | -1.48% +> aiken/collection/list.{slice_5} | -29.73% | -1.64% +> aiken/collection/list.{slice_6} | -32.01% | -1.80% +> aiken/collection/list.{span_1} | -15.05% | ±0.00% +> aiken/collection/list.{span_2} | -18.03% | ±0.00% +> aiken/collection/list.{span_3} | -12.49% | ±0.00% +> aiken/collection/list.{span_4} | -18.13% | ±0.00% +> aiken/collection/list.{tail_1} | -8.88% | ±0.00% +> aiken/collection/list.{tail_2} | -16.90% | ±0.00% +> aiken/collection/list.{take_1} | -24.98% | ±0.00% +> aiken/collection/list.{take_2} | -24.35% | ±0.00% +> aiken/collection/list.{take_while_1} | -20.20% | ±0.00% +> aiken/collection/list.{take_while_2} | -21.56% | ±0.00% +> aiken/collection/list.{take_while_3} | -22.46% | ±0.00% +> aiken/collection/list.{take_while_4} | -21.02% | ±0.00% +> aiken/collection/list.{unique_1} | -20.20% | ±0.00% +> aiken/collection/list.{unique_2} | -24.34% | ±0.00% +> aiken/collection/list.{flat_map_1} | -19.79% | ±0.00% +> aiken/collection/list.{flat_map_2} | -13.36% | ±0.00% +> aiken/collection/list.{indexed_map_1} | -20.10% | ±0.00% +> aiken/collection/list.{indexed_map_2} | -23.36% | ±0.00% +> aiken/collection/list.{map_1} | -19.79% | ±0.00% +> aiken/collection/list.{map_2} | -16.75% | ±0.00% +> aiken/collection/list.{map2_1} | -20.10% | ±0.00% +> aiken/collection/list.{map2_2} | -17.46% | ±0.00% +> aiken/collection/list.{map2_3} | -15.92% | ±0.00% +> aiken/collection/list.{map3_1} | -20.39% | ±0.00% +> aiken/collection/list.{map3_2} | -19.22% | ±0.00% +> aiken/collection/list.{reverse_1} | -20.10% | ±0.00% +> aiken/collection/list.{reverse_2} | -12.26% | ±0.00% +> aiken/collection/list.{sort_1} | -22.31% | ±0.00% +> aiken/collection/list.{sort_2} | -17.93% | ±0.00% +> aiken/collection/list.{sort_3} | -23.09% | ±0.00% +> aiken/collection/list.{sort_4} | -20.20% | ±0.00% +> aiken/collection/list.{unzip_1} | -14.01% | ±0.00% +> aiken/collection/list.{unzip_2} | -5.48% | ±0.00% +> aiken/collection/list.{concat_1} | -6.56% | ±0.00% +> aiken/collection/list.{concat_2} | -11.25% | ±0.00% +> aiken/collection/list.{concat_3} | -9.35% | ±0.00% +> aiken/collection/list.{difference_1} | -24.23% | ±0.00% +> aiken/collection/list.{difference_2} | -22.59% | ±0.00% +> aiken/collection/list.{difference_3} | -10.64% | ±0.00% +> aiken/collection/list.{difference_4} | -21.68% | ±0.00% +> aiken/collection/list.{zip_1} | -20.10% | ±0.00% +> aiken/collection/list.{zip_2} | -19.17% | ±0.00% +> aiken/collection/list.{zip_3} | -10.35% | ±0.00% +> aiken/collection/list.{foldl_1} | -36.95% | ±0.00% +> aiken/collection/list.{foldl_2} | -26.90% | ±0.00% +> aiken/collection/list.{foldl_3} | -11.27% | ±0.00% +> aiken/collection/list.{foldr_1} | -26.68% | ±0.00% +> aiken/collection/list.{foldr_2} | -38.04% | ±0.00% +> aiken/collection/list.{foldr_3} | -10.14% | ±0.00% +> aiken/collection/list.{indexed_foldr_1} | -36.95% | ±0.00% +> aiken/collection/list.{indexed_foldr_2} | -11.06% | ±0.00% +> aiken/collection/list.{reduce_1} | -36.95% | ±0.00% +> aiken/collection/list.{reduce_2} | -27.99% | ±0.00% +> aiken/collection/list.{reduce_3} | -23.54% | ±0.00% +> aiken/collection/list.{reduce_4} | -24.84% | ±0.00% +> aiken/collection/pairs.{get_all_1} | -21.10% | ±0.00% +> aiken/collection/pairs.{get_all_2} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_all_3} | -19.53% | ±0.00% +> aiken/collection/pairs.{get_all_4} | -18.70% | ±0.00% +> aiken/collection/pairs.{get_all_5} | -21.19% | ±0.00% +> aiken/collection/pairs.{get_first_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{get_first_2} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_first_3} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_first_4} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_first_5} | -21.05% | ±0.00% +> aiken/collection/pairs.{get_last_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{get_last_2} | -21.13% | ±0.00% +> aiken/collection/pairs.{get_last_3} | -21.16% | ±0.00% +> aiken/collection/pairs.{get_last_4} | -21.79% | ±0.00% +> aiken/collection/pairs.{get_last_5} | -21.05% | ±0.00% +> aiken/collection/pairs.{find_all_1} | -21.10% | ±0.00% +> aiken/collection/pairs.{find_all_2} | -18.33% | ±0.00% +> aiken/collection/pairs.{find_all_3} | -20.51% | ±0.00% +> aiken/collection/pairs.{find_all_4} | -17.79% | ±0.00% +> aiken/collection/pairs.{find_first_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{find_first_2} | -18.28% | ±0.00% +> aiken/collection/pairs.{find_first_3} | -20.22% | ±0.00% +> aiken/collection/pairs.{find_first_4} | -18.28% | ±0.00% +> aiken/collection/pairs.{find_last_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{find_last_2} | -20.70% | ±0.00% +> aiken/collection/pairs.{find_last_3} | -20.22% | ±0.00% +> aiken/collection/pairs.{find_last_4} | -20.98% | ±0.00% +> aiken/collection/pairs.{has_key_1} | -28.07% | ±0.00% +> aiken/collection/pairs.{has_key_2} | -25.70% | ±0.00% +> aiken/collection/pairs.{has_key_3} | -25.80% | ±0.00% +> aiken/collection/pairs.{has_key_4} | -24.93% | ±0.00% +> aiken/collection/pairs.{has_key_5} | -25.70% | ±0.00% +> aiken/collection/pairs.{keys_1} | -20.30% | ±0.00% +> aiken/collection/pairs.{keys_2} | -13.89% | ±0.00% +> aiken/collection/pairs.{keys_3} | -10.43% | ±0.00% +> aiken/collection/pairs.{values_1} | -20.30% | ±0.00% +> aiken/collection/pairs.{values_2} | -14.02% | ±0.00% +> aiken/collection/pairs.{values_3} | -10.65% | ±0.00% +> aiken/collection/pairs.{values_4} | -8.53% | ±0.00% +> aiken/collection/pairs.{map_1} | -11.17% | ±0.00% +> aiken/collection/pairs.{map_2} | -12.89% | ±0.00% +> aiken/collection/pairs.{foldl_1} | -35.94% | ±0.00% +> aiken/collection/pairs.{foldl_2} | -22.31% | ±0.00% +> aiken/collection/pairs.{foldr_1} | -36.21% | ±0.00% +> aiken/collection/pairs.{foldr_2} | -21.93% | ±0.00% +> aiken/collection/pairs.{foldr_3} | -20.00% | ±0.00% +> aiken/interval.{contains_1} | -21.08% | -4.01% +> aiken/interval.{contains_2} | -31.22% | -13.95% +> aiken/interval.{contains_3} | -26.80% | -10.08% +> aiken/interval.{contains_4} | -31.02% | -13.67% +> aiken/interval.{contains_5} | -32.32% | -13.59% +> aiken/interval.{contains_6} | -28.15% | -9.81% +> aiken/interval.{contains_7} | -32.11% | -13.32% +> aiken/interval.{contains_8} | -29.56% | -12.59% +> aiken/interval.{contains_9} | -29.68% | -12.78% +> aiken/interval.{contains_10} | -29.68% | -12.78% +> aiken/interval.{contains_11} | -35.17% | -17.77% +> aiken/interval.{contains_12} | -21.09% | -3.86% +> aiken/interval.{is_entirely_after_1} | -29.89% | -13.81% +> aiken/interval.{is_entirely_after_2} | -29.63% | -13.39% +> aiken/interval.{is_entirely_after_3} | -29.63% | -13.39% +> aiken/interval.{is_entirely_after_4} | -29.48% | -11.81% +> aiken/interval.{is_entirely_after_5} | -29.70% | -12.14% +> aiken/interval.{is_entirely_after_6} | -36.09% | -19.77% +> aiken/interval.{is_entirely_after_7} | -24.19% | -3.99% +> aiken/interval.{is_entirely_after_8} | -24.19% | -3.99% +> aiken/interval.{is_entirely_after_9} | -24.19% | -3.99% +> aiken/interval.{is_entirely_before_1} | -28.44% | -13.48% +> aiken/interval.{is_entirely_before_2} | -28.24% | -13.09% +> aiken/interval.{is_entirely_before_3} | -28.24% | -13.09% +> aiken/interval.{is_entirely_before_4} | -28.44% | -11.88% +> aiken/interval.{is_entirely_before_5} | -28.26% | -11.57% +> aiken/interval.{is_entirely_before_6} | -34.63% | -19.34% +> aiken/interval.{is_entirely_before_7} | -22.97% | -4.02% +> aiken/interval.{is_entirely_before_8} | -22.97% | -4.02% +> aiken/interval.{is_entirely_before_9} | -22.97% | -4.02% +> aiken/interval.{hull_1} | -21.51% | -0.73% +> aiken/interval.{hull_2} | -23.06% | -0.80% +> aiken/interval.{hull_3} | -22.00% | -0.86% +> aiken/interval.{intersection_1} | -21.51% | -0.73% +> aiken/interval.{intersection_2} | -21.51% | -0.73% +> aiken/interval.{intersection_3} | -26.55% | -4.65% +> aiken/interval.{intersection_4} | -26.45% | -4.51% +> aiken/interval.{intersection_5} | -22.87% | -0.76% +> aiken/interval.{intersection_6} | -19.73% | -0.98% +> aiken/math.{abs_1} | -61.39% | -21.07% +> aiken/math.{abs_2} | -70.90% | -34.84% +> aiken/math.{clamp_1} | -60.95% | -23.55% +> aiken/math.{clamp_2} | -60.95% | -23.55% +> aiken/math.{clamp_3} | -59.22% | -18.20% +> aiken/math.{gcd_test1} | -47.20% | ±0.00% +> aiken/math.{gcd_test2} | -47.81% | ±0.00% +> aiken/math.{gcd_test3} | -46.10% | ±0.00% +> aiken/math.{is_sqrt1} | -87.41% | -68.64% +> aiken/math.{is_sqrt2} | -87.41% | -68.64% +> aiken/math.{log_10_2} | -51.35% | -8.40% +> aiken/math.{log_42_2} | -51.46% | -8.24% +> aiken/math.{log_42_3} | -51.05% | -7.81% +> aiken/math.{log_5_0} | -54.05% | -12.92% +> aiken/math.{log_4_4} | -50.59% | -9.31% +> aiken/math.{log_4_43} | -49.14% | -7.28% +> aiken/math.{max_1} | -61.39% | -21.07% +> aiken/math.{max_2} | -61.39% | -21.07% +> aiken/math.{max_3} | -61.39% | -21.07% +> aiken/math.{min_1} | -61.39% | -21.07% +> aiken/math.{min_2} | -61.39% | -21.07% +> aiken/math.{min_3} | -61.39% | -21.07% +> aiken/math.{pow_3_5} | -46.34% | ±0.00% +> aiken/math.{pow_7_2} | -46.38% | ±0.00% +> aiken/math.{pow_3__4} | -43.50% | ±0.00% +> aiken/math.{pow_0_0} | -43.95% | ±0.00% +> aiken/math.{pow_513_3} | -45.80% | ±0.00% +> aiken/math.{pow_2_4} | -46.79% | ±0.00% +> aiken/math.{pow_2_42} | -46.77% | ±0.00% +> aiken/math.{pow2_neg} | -44.71% | ±0.00% +> aiken/math.{pow2_0} | -45.00% | ±0.00% +> aiken/math.{pow2_1} | -45.00% | ±0.00% +> aiken/math.{pow2_4} | -45.00% | ±0.00% +> aiken/math.{pow2_42} | -42.01% | ±0.00% +> aiken/math.{pow2_256} | -41.40% | ±0.00% +> aiken/math.{sqrt1} | -32.56% | -17.18% +> aiken/math.{sqrt2} | -32.56% | -17.18% +> aiken/math.{sqrt3} | -49.99% | -8.90% +> aiken/math.{sqrt4} | -51.76% | -3.90% +> aiken/math.{sqrt5} | -52.63% | -1.33% +> aiken/math.{sqrt6} | -28.16% | -15.41% +> aiken/math/rational.{from_int_1} | -14.32% | ±0.00% +> aiken/math/rational.{new_1} | -22.98% | ±0.00% +> aiken/math/rational.{zero_1} | -8.08% | ±0.00% +> aiken/math/rational.{denominator_1} | -28.33% | ±0.00% +> aiken/math/rational.{numerator_1} | -29.34% | ±0.00% +> aiken/math/rational.{abs_examples} | -18.25% | ±0.00% +> aiken/math/rational.{negate_1} | -15.39% | ±0.00% +> aiken/math/rational.{reciprocal_1} | -23.28% | ±0.00% +> aiken/math/rational.{reduce_1} | -31.89% | ±0.00% +> aiken/math/rational.{add_1} | -15.11% | ±0.00% +> aiken/math/rational.{add_2} | -15.11% | ±0.00% +> aiken/math/rational.{div_1} | -22.31% | -2.75% +> aiken/math/rational.{div_2} | -22.37% | -2.79% +> aiken/math/rational.{mul_1} | -13.37% | ±0.00% +> aiken/math/rational.{mul_2} | -13.37% | ±0.00% +> aiken/math/rational.{mul_3} | -26.25% | ±0.00% +> aiken/math/rational.{sub_1} | -15.11% | ±0.00% +> aiken/math/rational.{sub_2} | -15.11% | ±0.00% +> aiken/math/rational.{sub_3} | -15.11% | ±0.00% +> aiken/math/rational.{compare_1} | -21.70% | ±0.00% +> aiken/math/rational.{compare_with_eq} | -23.05% | ±0.00% +> aiken/math/rational.{compare_with_neq} | -22.33% | ±0.00% +> aiken/math/rational.{compare_with_gte} | -22.48% | ±0.00% +> aiken/math/rational.{compare_with_gt} | -23.18% | ±0.00% +> aiken/math/rational.{compare_with_lte} | -22.48% | ±0.00% +> aiken/math/rational.{compare_with_lt} | -23.18% | ±0.00% +> aiken/math/rational.{arithmetic_mean_1} | -23.31% | ±0.00% +> aiken/math/rational.{arithmetic_mean_2} | -23.31% | ±0.00% +> aiken/math/rational.{arithmetic_mean_3} | -20.58% | ±0.00% +> aiken/math/rational.{geometric_mean1} | -29.87% | ±0.00% +> aiken/math/rational.{geometric_mean2} | -24.52% | ±0.00% +> aiken/math/rational.{geometric_mean3} | -24.52% | ±0.00% +> aiken/math/rational.{geometric_mean4} | -33.55% | ±0.00% +> aiken/math/rational.{geometric_mean5} | -45.34% | ±0.00% +> aiken/math/rational.{ceil_1} | -36.26% | ±0.00% +> aiken/math/rational.{floor_1} | -29.49% | ±0.00% +> aiken/math/rational.{proper_fraction_1} | -18.44% | ±0.00% +> aiken/math/rational.{proper_fraction_2} | -18.44% | ±0.00% +> aiken/math/rational.{proper_fraction_3} | -18.44% | ±0.00% +> aiken/math/rational.{round_1} | -25.17% | ±0.00% +> aiken/math/rational.{round_even_1} | -25.91% | ±0.00% +> aiken/math/rational.{truncate_1} | -29.49% | ±0.00% +> aiken/option.{is_none_1} | -26.56% | ±0.00% +> aiken/option.{is_none_2} | -27.52% | ±0.00% +> aiken/option.{is_some_1} | -27.52% | ±0.00% +> aiken/option.{is_some_2} | -26.56% | ±0.00% +> aiken/option.{and_then_1} | -20.19% | ±0.00% +> aiken/option.{and_then_2} | -22.15% | ±0.00% +> aiken/option.{and_then_3} | -21.85% | ±0.00% +> aiken/option.{choice_1} | -17.11% | ±0.00% +> aiken/option.{choice_2} | -19.75% | ±0.00% +> aiken/option.{choice_3} | -18.68% | ±0.00% +> aiken/option.{flatten_1} | -12.25% | ±0.00% +> aiken/option.{flatten_2} | -15.41% | ±0.00% +> aiken/option.{flatten_3} | -19.46% | ±0.00% +> aiken/option.{flatten_4} | -14.31% | ±0.00% +> aiken/option.{map_1} | -19.89% | ±0.00% +> aiken/option.{map_2} | -18.18% | ±0.00% +> aiken/option.{map2_1} | -20.47% | ±0.00% +> aiken/option.{map2_2} | -19.93% | ±0.00% +> aiken/option.{map2_3} | -13.64% | ±0.00% +> aiken/option.{map3_1} | -20.74% | ±0.00% +> aiken/option.{map3_2} | -20.00% | ±0.00% +> aiken/option.{map3_3} | -19.90% | ±0.00% +> aiken/option.{or_try_1} | -14.36% | ±0.00% +> aiken/option.{or_try_2} | -14.36% | ±0.00% +> aiken/option.{or_else_1} | -38.16% | ±0.00% +> aiken/option.{or_else_2} | -27.62% | ±0.00% +> aiken/primitive/bytearray.{from_string_1} | -62.36% | ±0.00% +> aiken/primitive/bytearray.{from_string_2} | -41.62% | ±0.00% +> aiken/primitive/bytearray.{push_1} | -97.51% | -80.06% +> aiken/primitive/bytearray.{push_2} | -97.51% | -80.06% +> aiken/primitive/bytearray.{push_3} | -88.82% | -89.83% +> aiken/primitive/bytearray.{index_of_1} | -39.75% | ±0.00% +> aiken/primitive/bytearray.{index_of_2} | -43.19% | ±0.00% +> aiken/primitive/bytearray.{index_of_3} | -41.70% | ±0.00% +> aiken/primitive/bytearray.{index_of_4} | -37.24% | ±0.00% +> aiken/primitive/bytearray.{index_of_5} | -26.02% | ±0.00% +> aiken/primitive/bytearray.{is_empty_1} | -37.52% | ±0.00% +> aiken/primitive/bytearray.{is_empty_2} | -33.77% | ±0.00% +> aiken/primitive/bytearray.{length_1} | -49.73% | ±0.00% +> aiken/primitive/bytearray.{length_2} | -49.73% | ±0.00% +> aiken/primitive/bytearray.{test_bit_0} | -45.48% | 5.88% +> aiken/primitive/bytearray.{test_bit_1} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_2} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_3} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_7} | -58.31% | -11.81% +> aiken/primitive/bytearray.{test_bit_8} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_20_21_22_23} | -44.38% | 5.52% +> aiken/primitive/bytearray.{drop_1} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{drop_2} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{drop_3} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{drop_4} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{slice_1} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_2} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_3} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_4} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_5} | -98.79% | -90.04% +> aiken/primitive/bytearray.{take_1} | -97.81% | -83.40% +> aiken/primitive/bytearray.{take_2} | -97.81% | -83.40% +> aiken/primitive/bytearray.{take_3} | -97.81% | -83.40% +> aiken/primitive/bytearray.{take_4} | -97.81% | -83.40% +> aiken/primitive/bytearray.{concat_1} | -96.22% | -80.06% +> aiken/primitive/bytearray.{concat_2} | -96.22% | -80.06% +> aiken/primitive/bytearray.{concat_3} | -96.22% | -80.06% +> aiken/primitive/bytearray.{concat_4} | -96.22% | -80.06% +> aiken/primitive/bytearray.{foldl_1} | -40.96% | ±0.00% +> aiken/primitive/bytearray.{foldl_2} | -40.09% | ±0.00% +> aiken/primitive/bytearray.{foldl_3} | -40.29% | ±0.00% +> aiken/primitive/bytearray.{foldl_4} | -44.76% | ±0.00% +> aiken/primitive/bytearray.{foldr_1} | -42.56% | ±0.00% +> aiken/primitive/bytearray.{foldr_2} | -40.93% | ±0.00% +> aiken/primitive/bytearray.{foldr_3} | -45.34% | ±0.00% +> aiken/primitive/bytearray.{reduce_1} | -42.95% | ±0.00% +> aiken/primitive/bytearray.{reduce_2} | -44.60% | ±0.00% +> aiken/primitive/bytearray.{to_string_1} | -69.56% | ±0.00% +> aiken/primitive/bytearray.{to_string_2} | -53.54% | ±0.00% +> aiken/primitive/bytearray.{to_hex_1} | -48.15% | ±0.00% +> aiken/primitive/bytearray.{to_hex_2} | -48.15% | ±0.00% +> aiken/primitive/int.{from_utf8_1} | -37.06% | ±0.00% +> aiken/primitive/int.{from_utf8_2} | -33.40% | ±0.00% +> aiken/primitive/int.{from_utf8_3} | -37.06% | ±0.00% +> aiken/primitive/int.{from_utf8_4} | -32.78% | ±0.00% +> aiken/primitive/int.{from_utf8_5} | -32.05% | ±0.00% +> aiken/primitive/int.{from_utf8_6} | -31.36% | ±0.00% +> aiken/primitive/string.{from_bytearray_1} | -69.56% | ±0.00% +> aiken/primitive/string.{from_bytearray_2} | -53.54% | ±0.00% +> aiken/primitive/string.{from_bytearray_3} | -53.54% | ±0.00% +> aiken/primitive/string.{from_int_1} | -40.54% | -7.05% +> aiken/primitive/string.{from_int_2} | -45.93% | -5.30% +> aiken/primitive/string.{from_int_3} | -47.62% | -4.35% +> aiken/primitive/string.{from_int_4} | -48.58% | -3.69% +> aiken/primitive/string.{concat_1} | -92.30% | -80.10% +> aiken/primitive/string.{concat_2} | -97.34% | -85.87% +> aiken/primitive/string.{concat_3} | -98.67% | -80.35% +> aiken/primitive/string.{join_1} | -42.87% | ±0.00% +> aiken/primitive/string.{join_2} | -37.65% | ±0.00% +> aiken/primitive/string.{to_bytearray_1} | -62.36% | ±0.00% +> aiken/primitive/string.{to_bytearray_2} | -41.62% | ±0.00% +> aiken/primitive/string.{to_bytearray_3} | -41.62% | ±0.00% +> cardano/assets.{from_asset_list_1} | -20.51% | ±0.00% +> cardano/assets.{from_asset_list_2} | -10.09% | ±0.00% +> cardano/assets.{from_asset_list_3} | -12.21% | ±0.00% +> cardano/assets.{from_asset_list_4} | -16.22% | ±0.00% +> cardano/assets.{from_asset_list_5} | -14.60% | ±0.00% +> cardano/assets.{from_asset_list_6} | -20.97% | ±0.00% +> cardano/assets.{from_asset_list_7} | -20.25% | ±0.00% +> cardano/assets.{from_asset_list_8} | -14.51% | ±0.00% +> cardano/assets.{from_asset_list_9} | -16.07% | ±0.00% +> cardano/assets.{add_1} | -27.84% | ±0.00% +> cardano/assets.{add_2} | -27.56% | -0.54% +> cardano/assets.{add_3} | -26.39% | ±0.00% +> cardano/assets.{add_4} | -29.75% | -10.41% +> cardano/assets.{add_5} | -27.80% | ±0.00% +> cardano/assets.{merge_1} | -26.02% | ±0.00% +> cardano/assets.{merge_2} | -19.60% | ±0.00% +> cardano/assets.{merge_3} | -23.80% | ±0.00% +> cardano/assets.{merge_4} | -25.92% | ±0.00% +> cardano/assets.{merge_5} | -27.61% | -1.98% +> cardano/assets.{without_lovelace_1} | -28.00% | -2.24% +> cardano/assets.{without_lovelace_2} | -27.49% | ±0.00% +> cardano/assets.{without_lovelace_3} | -23.40% | -0.34% +> cardano/assets.{flatten_with_1} | -21.10% | ±0.00% +> cardano/assets.{flatten_with_2} | -22.77% | ±0.00% +> cardano/assets.{reduce_1} | -24.31% | ±0.00% +> cardano/assets.{reduce_2} | -20.89% | ±0.00% +> cardano/assets.{reduce_3} | -36.21% | ±0.00% +>
+ +### Added + +- New modules covering Conway-related features (i.e. governance) + - [`cardano/governance`](https://aiken-lang.github.io/stdlib/cardano/governance.html) + - [`cardano/governance/protocol_parameters`](https://aiken-lang.github.io/stdlib/cardano/governance/protocol_parameters.html) + +- New primitives in `aiken/collection/pairs`: + - [`insert_by_ascending_key`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#insert_by_ascending_key) + - [`repsert_by_ascending_key`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#repsert_by_ascending_key) + +- New primitives in `aiken/crypto`: + - [`blake2b_224`](https://aiken-lang.github.io/stdlib/aiken/crypto.html#blake2b_224) + - [`keccak_256`](https://aiken-lang.github.io/stdlib/aiken/crypto.html#keccak_256) + +- New primitives in `aiken/math`: + - [`log2`](https://aiken-lang.github.io/stdlib/aiken/math.html#log2) + +- New primitives in `aiken/primitive/bytearray`: + - [`at`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#at) + - [`from_int_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#from_int_big_endian) + - [`from_int_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#from_int_little_endian) + - [`to_int_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#to_int_big_endian) + - [`to_int_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#to_int_little_endian) + +- New primitives in `aiken/primitive/int`: + - [`from_bytearray_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html#from_bytearray_big_endian) + - [`from_bytearray_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html#from_bytearray_little_endian) + +- New primitives in `aiken/crypto`: + - [`verify_ecdsa_signature`](https://aiken-lang.github.io/stdlib/cardano/credential.html#verify_ecdsa_signature) + - [`verify_schnorr_signature`](https://aiken-lang.github.io/stdlib/cardano/credential.html#verify_schnorr_signature) + +### Changed + +- Few modules have been relocated and better organized: + - `aiken/hash` -> [`aiken/crypto`](https://aiken-lang.github.io/stdlib/aiken/crypto.html) + - **collections** + - `aiken/dict` -> [`aiken/collection/dict`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html) + - `aiken/list` -> [`aiken/collection/list`](https://aiken-lang.github.io/stdlib/aiken/collection/list.html) + - `aiken/pairs` -> [`aiken/collection/pairs`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html) + - **primitive** + - `aiken/bytearray` -> [`aiken/primitive/bytearray`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html) + - `aiken/int` -> [`aiken/primitive/int`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html) + - `aiken/string` -> [`aiken/primitive/string`](https://aiken-lang.github.io/stdlib/aiken/primitive/string.html) + - **cardano** + - `aiken/transaction` -> [`cardano/transaction`](https://aiken-lang.github.io/stdlib/cardano/transaction.html) + - `aiken/transaction/certificate` -> [`cardano/certificate`](https://aiken-lang.github.io/stdlib/cardano/certificate.html) + - `aiken/transaction/credential` -> [`cardano/address`](https://aiken-lang.github.io/stdlib/cardano/address.html) & `aiken/crypto` + - `aiken/transaction/value` -> [`cardano/assets`](https://aiken-lang.github.io/stdlib/cardano/assets.html) + +- Several zero-argument functions have been turned into top-level constants + - `aiken/dict.new()` -> [`aiken/collection/dict.empty`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html#empty) + - `aiken/interval.empty()` -> [`aiken/interval.empty`](https://aiken-lang.github.io/stdlib/aiken/interval.html#empty) + - `aiken/interval.everything()` -> [`aiken/interval.everything`](https://aiken-lang.github.io/stdlib/aiken/interval.html#everything) + - `aiken/math/rational.zero()` -> [`aiken/math/rational.zero`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#zero) + - `aiken/transaction/value.zero()` -> [`cardano/assets.zero`](https://aiken-lang.github.io/stdlib/cardano/assets.html#zero) + +- The `Transaction` type from [`cardano/transaction`](https://aiken-lang.github.io/stdlib/cardano/transaction.html) (originally `aiken/transaction`) has been greatly reworked to match the new transaction format in Plutus V3. + +- The `ScriptContext` type has split from `cardano/transaction` (originally `aiken/transaction`) and moved into its own module [`cardano/script_context`](https://aiken-lang.github.io/stdlib/cardano/script_context.html) and adjusted to its new form as per Plutus V3. + +- The constructors of [`Credential`](https://aiken-lang.github.io/stdlib/cardano/address.html#credential) have been renamed from `VerificationKeyCredential` and `ScriptCredential` into `VerificationKey` and `Script` respectively. + +- The function `remove_all`, `remove_first` and `remove_last` from [`aiken/collection/pairs`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html) (originally `aiken/pairs`) have been renamed to `delete_all`, `delete_first` and `delete_last` respectively. + +- The function `verify_signature` from [`aiken/crypto`](https://aiken-lang.github.io/stdlib/aiken/crypto.html) (originally `aiken/credential`) has been renamed to `verify_ed25519_signature`. + +### Removed + +- The module `aiken/time`. The `PosixTime` alias is no longer used anywhere. + +- `MintedValue` (from `aiken/transaction/value` originally) and its associated functions are no longer needed and, therefore, gone. + +## v1.9.0 - 2024-05-24 + +### Added + +- A new module [`aiken/pairs`](https://aiken-lang.github.io/stdlib/aiken/pairs.html) to work with associative lists (a.k.a. `Pairs`). + +### Changed + +- **BREAKING-CHANGE**
+ Specialized all `Dict`'s key to `ByteArray`, and thus remove the need for passing an extra comparison function in many functions. `Dict` are however still specialized with a phantom type for keys. + +- **BREAKING-CHANGE**
+ Few functions from `Dict` have been renamed for consistency: + - `from_list` -> `from_pairs` + - `from_ascending_list` -> `from_ascending_pairs` + - `to_list` -> `to_pairs` + +### Removed + +N/A + +## v1.8.0 - 2024-03-28 + +### Added + +- [`value.reduce`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#reduce) to efficiently fold over a value and its elements. + +- [`value.from_asset_list`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#from_asset_list) to turn an asset list into a Value while enforcing invariants expected of `Value`. + +- [`math.is_sqrt`](https://aiken-lang.github.io/stdlib/aiken/math.html#is_sqrt) as a more efficient alternative to `sqrt`. + +### Changed + +- Disclaimers in documentation to [`bytearray.to_string`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#to_string) and [`string.from_bytearray`](https://aiken-lang.github.io/stdlib/aiken/string.html#from_bytearray) regarding UTF-8 encoding. + +### Removed + +N/A + +## v1.7.0 - 2023-11-07 + +### Added + +- [`list.index_of`](https://aiken-lang.github.io/stdlib/aiken/list.html#index_of): For getting a values index in a list. +- [`transaction.placeholder`](https://aiken-lang.github.io/stdlib/aiken/transaction.html#placeholder): For constructing test transactions. +- [`transaction.value.is_zero`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#is_zero): For checking whether a value is null. + +### Changed + +- [`value.to_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_minted_value) now correctly preserves the invariant of `MintedValue`: it always contain a null quantity of Ada. + +### Removed + +N/A + +## v1.6.0 - 2023-09-08 + +### Added + +- [`math.pow2`](https://aiken-lang.github.io/stdlib/aiken/math.html#pow2): For faster exponentions for powers of two. +- [`bytearray.test_bit`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#test_bit): For testing if a bit is set in a bytearray (MSB). + +## v1.5.0 - 2023-08-16 + +### Removed + +- retired `list.and` and `list.or` because of the new keywords for logical op chaining. + +## v1.4.0 - 2023-07-21 + +### Changed + +- Fixed missing null-check on `value.add`. Adding a null quantity of token is now correctly a no-op. + +## v1.3.0 - 2023-06-30 + +### Added + +- [`math.sqrt`](https://aiken-lang.github.io/stdlib/aiken/math.html#sqrt): For calculating integer square roots using a quadratically convergent method. +- [`math/rational.numerator`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#numerator) & [`math/rational.denominator`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#numerator): For accessing parts of a rational value. +- [`math/rational.arithmetic_mean`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#arithmetic_mean): For computing [arithmetic mean](https://en.wikipedia.org/wiki/Arithmetic_mean) of rational values. +- [`math/rational.geometric_mean`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#geometric_mean): For computing [geometric mean](https://en.wikipedia.org/wiki/Geometric_mean) of two rational values. + +### Changed + +- Clear empty asset lists in [`Value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#Value) on various operations. Before that fix, it could happen that removing all assets from a given policy would lead to an empty dictionnary of assets still be present in the `Value`. + +## v1.2.0 - 2023-06-17 + +### Added + +- [`transaction/value.MintedValue`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#MintedValue) +- [`transaction/value.from_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#from_minted_value): Convert from `MintedValue` to `Value` +- [`transaction/value.to_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_minted_value): Convert from `Value` to `MintedValue` +- [`transaction/bytearray.to_hex`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#to_hex): Convert a `ByteArray` to a hex encoded `String` +- [`math/rational`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html): Working with rational numbers. + - [x] `abs` + - [x] `add` + - [x] `ceil` + - [x] `compare` + - [x] `compare_with` + - [x] `div` + - [x] `floor` + - [x] `from_int` + - [x] `mul` + - [x] `negate` + - [x] `new` + - [x] `proper_fraction` + - [x] `reciprocal` + - [x] `reduce` + - [x] `round` + - [x] `round_even` + - [x] `sub` + - [x] `truncate` + - [x] `zero` + +### Removed + +- module `MintedValue` was merged with `Value` + +## v1.1.0 - 2023-06-06 + +### Added + +- [`list.count`](https://aiken-lang.github.io/stdlib/aiken/list.html#count): Count how many items in the list satisfy the given predicate. + +- [`int.from_utf8`](https://aiken-lang.github.io/stdlib/aiken/int.html#from_utf8): Parse an integer from a utf-8 encoded `ByteArray`, when possible. + +- [`dict.foldl`](https://aiken-lang.github.io/stdlib/aiken/dict.html#foldl) & [`dict.foldr`](https://aiken-lang.github.io/stdlib/aiken/dict.html#foldr): for left and right folds over dictionnary elements in ascending key order. + +- [`dict.insert_with`](https://aiken-lang.github.io/stdlib/aiken/dict.html#insert_with): Insert a value in the dictionary at a given key. When the key already exist, the provided merge function is called. + +- [`transaction/value.add`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#add): Add a (positive or negative) quantity of a single token to a value. This is more efficient than `merge` for a single asset. + +- [`transaction/value.to_dict`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_dict): Convert a `Value` into a dictionnary of dictionnaries. + +- A new module [`transaction/minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/minted_value.html): This is used exclusively for representing values present in the `mint` field of transactions. This allows to simplify some of the implementation for `Value` which no longer needs to handle the special case where null-quantity tokens would be present. It isn't possible to construct `MintedValue` by hand, they come from the script context entirely and are 'read-only'. + +- More documentation for `dict` and `interval` modules. + +### Changed + +> **Warning** +> +> Most of those changes are breaking-changes. Though, given we're still in an +> alpha state, only the `minor` component is bumped from the version number. +> Please forgive us. + +- Rework `list.{foldl, foldr, reduce, indexed_foldr}`, `dict.{fold}`, `bytearray.{foldl, foldr, reduce}` to take the iterator as last argument. For example: + + ``` + fn foldl(self: List
, with: fn(a, b) -> b, zero: b) -> b + + ↓ becomes + + fn foldl(self: List, zero: b, with: fn(a, b) -> b) -> b + ``` + +- Fixed implementation of `bytearray.slice`; `slice` would otherwise behave as if the second argument were an offset. + +- Rename `transaction/value.add` into `transaction/value.merge`. + +- Swap arguments of the merge function in `dict.union_with`; the first value received now corresponds to the value already present in the dictionnary. + +- Fixed various examples from the documentation + +### Removed + +- Removed `dict.fold`; replaced with `dict.foldl` and `dict.foldr` to remove ambiguity. + +## v1.0.0 - 2023-04-13 + +### Added + +N/A + +### Changed + +N/A + +### Removed + +N/A diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE new file mode 100644 index 00000000..4a1de273 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2022 Lucas Rosa + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md new file mode 100644 index 00000000..4cd6fef2 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md @@ -0,0 +1,71 @@ +
+
+

Aiken Aiken Standard Library

+ +[![Licence](https://img.shields.io/github/license/aiken-lang/stdlib?style=for-the-badge)](https://github.com/aiken-lang/stdlib/blob/main/LICENSE) +[![Continuous Integration](https://img.shields.io/github/actions/workflow/status/aiken-lang/stdlib/continuous-integration.yml?style=for-the-badge)](https://github.com/aiken-lang/stdlib/actions/workflows/continuous-integration.yml) + +
+
+ +## Getting started + +``` +aiken add aiken-lang/stdlib --version v2 +``` + +## Compatibility + +aiken's version | stdlib's version(s) +--- | --- +`v1.1.3`
`v1.1.4`
`v1.1.5`
`v1.1.6`
`v1.1.7` | `>= 2.1.0` +`v1.1.1`
`v1.1.2` | `>= 2.0.0` && `< 2.1.0` +`v1.0.29-alpha`
`v1.0.28-alpha` | `>= 1.9.0` && `< 2.0.0` +`v1.0.26-alpha` | `<= 1.8.0` && `< 1.9.0` + +## Overview + +The official standard library for the [Aiken](https://aiken-lang.org) Cardano +smart-contract language. + +It extends the language builtins with useful data-types, functions, constants +and aliases that make using Aiken a bliss. + +```aiken +use aiken/collection/list +use aiken/crypto.{VerificationKeyHash} +use cardano/transaction.{OutputReference, Transaction} + +pub type Datum { + owner: VerificationKeyHash, +} + +pub type Redeemer { + msg: ByteArray, +} + +/// A simple validator which replicates a basic public/private signature lock. +/// +/// - The key (hash) is set as datum when the funds are sent to the script address. +/// - The spender is expected to provide a signature, and the string 'Hello, World!' as message +/// - The signature is implicitly verified by the ledger, and included as 'extra_signatories' +/// +validator hello_world { + spend(datum: Option, redeemer: Redeemer, _, self: Transaction) { + expect Some(Datum { owner }) = datum + + let must_say_hello = redeemer.msg == "Hello, World!" + + let must_be_signed = list.has(self.extra_signatories, owner) + + and { + must_say_hello, + must_be_signed, + } + } +} +``` + +## Stats + +![Alt](https://repobeats.axiom.co/api/embed/f0a17e7f6133630e165b9e56ec5447bef32fe831.svg "Repobeats analytics image") diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock new file mode 100644 index 00000000..769ac20f --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock @@ -0,0 +1,16 @@ +# This file was generated by Aiken +# You typically do not need to edit this file + +[[requirements]] +name = "aiken-lang/fuzz" +version = "v2" +source = "github" + +[[packages]] +name = "aiken-lang/fuzz" +version = "v2" +requirements = [] +source = "github" + +[etags] +"aiken-lang/fuzz@v2" = [{ secs_since_epoch = 1734106349, nanos_since_epoch = 450591000 }, "64a32283418d58cade34059d3855b857e84505541158c541c460cafa0d355475"] diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml new file mode 100644 index 00000000..cbc76a0b --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml @@ -0,0 +1,15 @@ +name = "aiken-lang/stdlib" +version = "2.2.0" +compiler = "v1.1.9" +plutus = "v3" +description = "The Aiken Standard Library" + +[repository] +user = "aiken-lang" +project = "stdlib" +platform = "github" + +[[dependencies]] +name = "aiken-lang/fuzz" +version = "v2" +source = "github" diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak new file mode 100644 index 00000000..f0c66d69 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak @@ -0,0 +1,293 @@ +use aiken +use aiken/builtin.{decode_utf8, serialise_data} +use aiken/primitive/bytearray + +/// Obtain a String representation of _anything_. This is particularly (and only) useful for tracing +/// and debugging. This function is expensive and should not be used in any production code as it +/// will very likely explodes the validator's budget. +/// +/// The output is a [CBOR diagnostic](https://www.rfc-editor.org/rfc/rfc8949#name-diagnostic-notation) +/// of the underlying on-chain binary representation of the data. It's not as +/// easy to read as plain Aiken code, but it is handy for troubleshooting values +/// _at runtime_. Incidentally, getting familiar with reading CBOR diagnostic is +/// a good idea in the Cardano world. +/// +/// ```aiken +/// cbor.diagnostic(42) == "42" +/// cbor.diagnostic(#"a1b2") == "h'A1B2'" +/// cbor.diagnostic([1, 2, 3]) == "[_ 1, 2, 3]" +/// cbor.diagnostic([]) == "[]" +/// cbor.diagnostic((1, 2)) == "[_ 1, 2]" +/// cbor.diagnostic((1, #"ff", 3)) == "[_ 1, h'FF', 3]" +/// cbor.diagnostic([(1, #"ff")]) == "{_ 1: h'FF' }" +/// cbor.diagnostic(Some(42)) == "121([_ 42])" +/// cbor.diagnostic(None) == "122([])" +/// ``` +pub fn diagnostic(self: Data) -> String { + aiken.diagnostic(self, #"") + |> decode_utf8 +} + +/// Deserialise a [CBOR](https://www.rfc-editor.org/rfc/rfc8949) Data. This is the reverse operation of [serialise](#serialise). +/// In particular, we have the following property: +/// +/// ```aiken +/// cbor.deserialise(cbor.serialise(any_data)) == Some(any_data) +/// ``` +/// +/// > [!CAUTION] +/// > Unfortunately, this function isn't derived from a builtin primitive. It +/// > is therefore an order of magnitude more expensive than its counterpart +/// > and shall be used with care. +/// > +/// > In general, one might prefer avoiding deserialisation unless truly necessary. +/// > Yet, it may come in handy for testing and in rare scenarios. +pub fn deserialise(bytes: ByteArray) -> Option { + let length = bytearray.length(bytes) + + let peek = + fn(offset: Int, callback: fn(Byte) -> Decoder) -> Decoder { + fn(cursor) { + if 0 >= cursor { + deserialise_failure + } else { + callback(bytearray.at(bytes, length - cursor))(cursor - offset) + } + } + } + + let take = + fn(n: Int, callback: fn(ByteArray) -> Decoder) -> Decoder { + fn(cursor) { + if 0 >= cursor { + deserialise_failure + } else { + callback(builtin.slice_bytearray(length - cursor, n, bytes))( + cursor - n, + ) + } + } + } + + if length == 0 { + None + } else { + let Pair(result, consumed) = decode_data(peek, take)(length) + if consumed != 0 { + None + } else { + Some(result) + } + } +} + +/// Serialise any value to binary, encoding using [CBOR](https://www.rfc-editor.org/rfc/rfc8949). +/// +/// This is particularly useful in combination with hashing functions, as a way +/// to obtain a byte representation that matches the serialised representation +/// used by the ledger in the context of on-chain code. +/// +/// Note that the output matches the output of [`diagnostic`](#diagnostic), +/// though with a different encoding. [`diagnostic`](#diagnostic) is merely a +/// textual representation of the CBOR encoding that is human friendly and +/// useful for debugging. +/// +/// ```aiken +/// cbor.serialise(42) == #"182a" +/// cbor.serialise(#"a1b2") == #"42a1b2" +/// cbor.serialise([]) == #"80" +/// cbor.serialise((1, 2)) == #"9f0102ff" +/// cbor.serialise((1, #"ff", 3)) == #"9f0141ff03ff" +/// cbor.serialise([(1, #"ff")]) == #"a10141ff" +/// cbor.serialise(Some(42)) == #"d8799f182aff" +/// cbor.serialise(None) == #"d87a80" +/// ``` +pub fn serialise(self: Data) -> ByteArray { + serialise_data(self) +} + +type Byte = + Int + +type Decoder
= + fn(Int) -> Pair + +type Peek = + fn(Int, fn(Byte) -> Decoder) -> Decoder + +type Take = + fn(Int, fn(ByteArray) -> Decoder) -> Decoder + +fn return(data: Data) -> Decoder { + fn(cursor) { Pair(data, cursor) } +} + +const deserialise_failure: Pair = { + let empty: Data = "" + Pair(empty, -1) + } + +const token_begin_bytes = 0x5f + +const token_begin_list = 0x9f + +const token_begin_map = 0xbf + +const token_break = 0xff + +fn decode_data(peek: Peek, take: Take) -> Decoder { + let next <- peek(1) + let major_type = next / 32 + if major_type <= 2 { + if major_type == 0 { + let i <- decode_uint(peek, take, next) + return(builtin.i_data(i)) + } else if major_type == 1 { + let i <- decode_uint(peek, take, next - 32) + return(builtin.i_data(-i - 1)) + } else { + if next == token_begin_bytes { + let b <- decode_chunks(peek, take) + return(builtin.b_data(b)) + } else { + let b <- decode_bytes(peek, take, next - 64) + return(builtin.b_data(b)) + } + } + } else if major_type == 6 { + let tag <- decode_uint(peek, take, next - 192) + let next <- peek(1) + if tag == 102 { + fn(_) { deserialise_failure } + } else { + let ix = + if tag >= 1280 { + tag - 1280 + 7 + } else { + tag - 121 + } + if next == token_begin_list { + let fields <- decode_indefinite(peek, take, decode_data) + return(builtin.constr_data(ix, fields)) + } else { + let size <- decode_uint(peek, take, next - 128) + let fields <- decode_definite(peek, take, decode_data, size) + return(builtin.constr_data(ix, fields)) + } + } + } else if major_type == 4 { + if next == token_begin_list { + let xs <- decode_indefinite(peek, take, decode_data) + return(builtin.list_data(xs)) + } else { + let size <- decode_uint(peek, take, next - 128) + let xs <- decode_definite(peek, take, decode_data, size) + return(builtin.list_data(xs)) + } + } else if major_type == 5 { + if next == token_begin_map { + let xs <- decode_indefinite(peek, take, decode_pair) + return(builtin.map_data(xs)) + } else { + let size <- decode_uint(peek, take, next - 160) + let xs <- decode_definite(peek, take, decode_pair, size) + return(builtin.map_data(xs)) + } + } else { + fn(_) { deserialise_failure } + } +} + +fn decode_pair(peek: Peek, take: Take) -> Decoder> { + fn(cursor) { + let Pair(k, cursor) = decode_data(peek, take)(cursor) + let Pair(v, cursor) = decode_data(peek, take)(cursor) + Pair(Pair(k, v), cursor) + } +} + +fn decode_uint( + peek: Peek, + take: Take, + header: Int, + and_then: fn(Int) -> Decoder, +) -> Decoder { + if header < 24 { + and_then(header) + } else if header == 24 { + let payload <- peek(1) + and_then(payload) + } else if header < 28 { + let width = bytearray.at(#[2, 4, 8], header - 25) + let payload <- take(width) + and_then(bytearray.to_int_big_endian(payload)) + } else { + fn(_) { deserialise_failure } + } +} + +fn decode_bytes( + peek: Peek, + take: Take, + header: Int, + and_then: fn(ByteArray) -> Decoder, +) -> Decoder { + let width <- decode_uint(peek, take, header) + let bytes <- take(width) + and_then(bytes) +} + +fn decode_chunks( + peek: Peek, + take: Take, + and_then: fn(ByteArray) -> Decoder, +) -> Decoder { + let next <- peek(1) + if next == token_break { + and_then("") + } else { + let chunk <- decode_bytes(peek, take, next - 64) + let chunks <- decode_chunks(peek, take) + and_then(builtin.append_bytearray(chunk, chunks)) + } +} + +fn decode_definite( + peek: Peek, + take: Take, + decode_one: fn(Peek, Take) -> Decoder, + size: Int, + and_then: fn(List) -> Decoder, +) -> Decoder { + if size <= 0 { + and_then([]) + } else { + fn(cursor) { + let Pair(elem, cursor) = decode_one(peek, take)(cursor) + { + let elems <- decode_definite(peek, take, decode_one, size - 1) + and_then([elem, ..elems]) + }(cursor) + } + } +} + +fn decode_indefinite( + peek: Peek, + take: Take, + decode_one: fn(Peek, Take) -> Decoder, + and_then: fn(List) -> Decoder, +) -> Decoder { + let next <- peek(1) + if next == token_break { + and_then([]) + } else { + fn(cursor) { + let Pair(elem, cursor) = decode_one(peek, take)(cursor + 1) + { + let elems <- decode_indefinite(peek, take, decode_one) + and_then([elem, ..elems]) + }(cursor) + } + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak new file mode 100644 index 00000000..28d9f5bb --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak @@ -0,0 +1,297 @@ +use aiken/cbor.{deserialise, diagnostic, serialise} +use aiken/fuzz + +// ------------------------------------------------------------------ diagnostic + +test diagnostic_1() { + diagnostic(42) == @"42" +} + +test diagnostic_2() { + diagnostic(#"a1b2") == @"h'A1B2'" +} + +test diagnostic_3() { + diagnostic([1, 2, 3]) == @"[_ 1, 2, 3]" +} + +test diagnostic_4() { + diagnostic([]) == @"[]" +} + +test diagnostic_5() { + diagnostic((1, 2)) == @"[_ 1, 2]" +} + +test diagnostic_6() { + diagnostic((1, #"ff", 3)) == @"[_ 1, h'FF', 3]" +} + +test diagnostic_7() { + diagnostic([(1, #"ff")]) == @"[_ [_ 1, h'FF']]" +} + +test diagnostic_7_alt() { + diagnostic([Pair(1, #"ff")]) == @"{_ 1: h'FF' }" +} + +test diagnostic_8() { + diagnostic(Some(42)) == @"121([_ 42])" +} + +test diagnostic_9() { + diagnostic(None) == @"122([])" +} + +test diagnostic_10() { + let xs: List<(Int, Int)> = [] + diagnostic(xs) == @"[]" +} + +test diagnostic_10_alt() { + let xs: Pairs = [] + diagnostic(xs) == @"{}" +} + +type Foo { + foo: Bar, +} + +type Bar { + A + B(Int) +} + +test diagnostic_11() { + diagnostic(Foo { foo: A }) == @"121([_ 121([])])" +} + +test diagnostic_12() { + diagnostic(Foo { foo: B(42) }) == @"121([_ 122([_ 42])])" +} + +type Baz { + a0: Int, + b0: ByteArray, +} + +test diagnostic_13() { + diagnostic(Baz { a0: 14, b0: #"ff" }) == @"121([_ 14, h'FF'])" +} + +test diagnostic_14() { + diagnostic([0]) == @"[_ 0]" +} + +test diagnostic_15() { + diagnostic(-42) == @"-42" +} + +test diagnostic_16() { + diagnostic([-1, 0, 1]) == @"[_ -1, 0, 1]" +} + +// ------------------------------------------------------------------ serialise + +test serialise_1() { + serialise(42) == #"182a" +} + +test serialise_2() { + serialise(#"a1b2") == #"42a1b2" +} + +test serialise_3() { + serialise([]) == #"80" +} + +test serialise_4() { + serialise((1, 2)) == #"9f0102ff" +} + +test serialise_5() { + serialise((1, #"ff", 3)) == #"9f0141ff03ff" +} + +test serialise_6() { + serialise([(1, #"ff")]) == #"9f9f0141ffffff" +} + +test serialise_7() { + serialise(Some(42)) == #"d8799f182aff" +} + +test serialise_8() { + serialise(None) == #"d87a80" +} + +test serialise_9() { + serialise([Pair(1, #"ff")]) == #"a10141ff" +} + +// ------------------------------------------------------------------ deserialise + +type AnyData { + AnyInt(Int) + AnyByteArray(ByteArray) + AnyList(List) + AnyPairs(Pairs) + AnyUnaryConstr0(UnaryConstr0) + AnyUnaryConstr1(UnaryConstr1) + AnyUnaryConstr2(UnaryConstr2) + AnyBinaryConstr0(BinaryConstr0) + AnyBinaryConstr1(BinaryConstr1) +} + +type UnaryConstr0 { + UnaryConstr0 +} + +type UnaryConstr1 { + field0: String, +} + +type UnaryConstr2 { + field0: Int, + field1: List>, +} + +type BinaryConstr0 = + Bool + +type BinaryConstr1 = + Option + +fn any_pair(any_key: Fuzzer, any_value: Fuzzer) -> Fuzzer> { + let k <- fuzz.and_then(any_key) + let v <- fuzz.map(any_value) + Pair(k, v) +} + +fn any_data() -> Fuzzer { + fuzz.either6( + { + let i <- fuzz.map(fuzz.int()) + AnyInt(i) + }, + { + let bs <- fuzz.map(fuzz.bytearray()) + AnyByteArray(bs) + }, + { + let xs <- fuzz.map(fuzz.list(fuzz.int())) + AnyList(xs) + }, + { + let ps <- fuzz.map(fuzz.list(any_pair(fuzz.bytearray(), fuzz.int()))) + AnyPairs(ps) + }, + fuzz.either3( + fuzz.constant(AnyUnaryConstr0(UnaryConstr0)), + fuzz.constant(AnyUnaryConstr1(UnaryConstr1(@"lorem ipsum"))), + { + let i <- fuzz.and_then(fuzz.int()) + let xs <- fuzz.map(fuzz.list(fuzz.list(fuzz.bytearray()))) + AnyUnaryConstr2(UnaryConstr2(i, xs)) + }, + ), + fuzz.either( + { + let b <- fuzz.map(fuzz.bool()) + AnyBinaryConstr0(b) + }, + { + let o <- fuzz.map(fuzz.option(fuzz.int())) + AnyBinaryConstr1(o) + }, + ), + ) +} + +test unit_deserialise_not_enough_bytes_1() { + expect None = deserialise(#"") +} + +test unit_deserialise_not_enough_bytes_2() { + expect None = deserialise(#"82") +} + +test unit_deserialise_non_empty_leftovers() { + expect None = deserialise(#"811442") +} + +test unit_deserialise_invalid_header() { + expect None = deserialise(#"f1") +} + +test unit_deserialise_invalid_uint() { + expect None = deserialise(#"1d0013bdae") +} + +/// A full script context with a minting policy and various assets. Meant to be +/// non-trivial and cover many things we might encounter in a transaction. +test bench_deserialise_script_context() { + expect Some(_) = + deserialise( + #"d8799fd8799f9fd8799fd8799f5820000000000000000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87980d87a80ffffff9fd8799fd8799f5820000000000000000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87980d87a80ffffff9fd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87a9f5820923918e403bf43c34b4ef6b48eb2ee04babed17320d8d1b9ff9ad086e86f44ecffd87a80ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffffffffa340a1401a000f4240581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea14561696b656e182a581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874a14474756e611b00005af3107a4000d87980d87a80ffd8799fd8799fd87a9f581c00000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffffffffa240a1401a000f4240581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea14763617264616e6f01d87980d8799f581c68ad54b3a8124d9fe5caaaf2011a85d72096e696a2fb3d7f86c41717ffffff182aa2581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea24561696b656e2d4763617264616e6f01581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874a14474756e611b00005af3107a400080a0d8799fd8799fd87980d87a80ffd8799fd87b80d87a80ffff80a2d8799f581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262effd87980d8799f581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874ff182aa15820923918e403bf43c34b4ef6b48eb2ee04babed17320d8d1b9ff9ad086e86f44ecd879805820e757985e48e43a95a185ddba08c814bc20f81cb68544ac937a9b992e4e6c38a0a080d87a80d87a80ff182ad8799f581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874ffff", + ) +} + +test prop_deserialise_any_data(any via any_data()) { + when any is { + AnyInt(i) -> { + fuzz.label(@"Int") + expect Some(data) = deserialise(serialise(i)) + expect i_decoded: Int = data + i_decoded == i + } + AnyByteArray(bs) -> { + fuzz.label(@"ByteArray") + expect Some(data) = deserialise(serialise(bs)) + expect bs_decoded: ByteArray = data + bs_decoded == bs + } + AnyList(xs) -> { + fuzz.label(@"List") + expect Some(data) = deserialise(serialise(xs)) + expect xs_decoded: List = data + xs_decoded == xs + } + AnyPairs(ps) -> { + fuzz.label(@"Pairs") + expect Some(data) = deserialise(serialise(ps)) + expect ps_decoded: Pairs = data + ps_decoded == ps + } + AnyUnaryConstr0(constr) -> { + fuzz.label(@"(unary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: UnaryConstr0 = data + constr_decoded == constr + } + AnyUnaryConstr1(constr) -> { + fuzz.label(@"(unary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: UnaryConstr1 = data + constr_decoded == constr + } + AnyUnaryConstr2(constr) -> { + fuzz.label(@"(unary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: UnaryConstr2 = data + constr_decoded == constr + } + AnyBinaryConstr0(constr) -> { + fuzz.label(@"(binary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: BinaryConstr0 = data + constr_decoded == constr + } + AnyBinaryConstr1(constr) -> { + fuzz.label(@"(binary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: BinaryConstr1 = data + constr_decoded == constr + } + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak new file mode 100644 index 00000000..3d4d332e --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak @@ -0,0 +1,4 @@ +/// A non negative integer that materializes the position of an element in a +/// collection. +pub type Index = + Int diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak new file mode 100644 index 00000000..681d0bae --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak @@ -0,0 +1,1174 @@ +//// A module for working with bytearray dictionaries. +//// +//// +//// > [!IMPORTANT] +//// > +//// > Dictionaries are **ordered sets** of key-value pairs, which thus +//// > preserve some invariants. Specifically, each key is only present once in +//// > the dictionary and all keys are stored in ascending lexicographic order. +//// > +//// > These invariants allow for more optimized functions to operate on `Dict`, +//// > but as a trade-offs, prevent `Dict` from being serializable. To recover a `Dict` +//// > from an unknown `Data`, you must first recover an `Pairs` and use +//// > [`dict.from_ascending_list`](#from_ascending_list). + +use aiken/builtin + +/// An opaque `Dict`. The type is opaque because the module maintains some +/// invariant, namely: there's only one occurrence of a given key in the dictionary. +/// +/// Note that the `key` parameter is a phantom-type, and only present as a +/// means of documentation. Keys can be any type, yet will need to comparable +/// to use functions like `insert`. +/// +/// See for example: +/// +/// ```aiken +/// pub type Value = +/// Dict> +/// ``` +pub opaque type Dict { + inner: Pairs, +} + +// ## Constructing + +/// An empty dictionnary. +/// ```aiken +/// dict.to_pairs(dict.empty) == [] +/// ``` +pub const empty: Dict = Dict { inner: [] } + +const foo = #"666f6f" + +const bar = #"626172" + +const baz = #"62617a" + +const fixture_1 = + empty + |> insert(foo, 42) + |> insert(bar, 14) + +/// Like ['from_pairs'](#from_pairs), but from an already sorted list by ascending +/// keys. This function fails (i.e. halts the program execution) if the list isn't +/// sorted. +/// +/// ```aiken +/// let pairs = [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// +/// let result = +/// dict.from_ascending_pairs(pairs) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +/// +/// This is meant to be used to turn a list constructed off-chain into a `Dict` +/// which has taken care of maintaining interval invariants. This function still +/// performs a sanity check on all keys to avoid silly mistakes. It is, however, +/// considerably faster than ['from_pairs'](from_pairs) +pub fn from_ascending_pairs(xs: Pairs) -> Dict { + let Void = check_ascending_list(xs) + Dict { inner: xs } +} + +fn check_ascending_list(xs: Pairs) { + when xs is { + [] -> Void + [_] -> Void + [Pair(x0, _), Pair(x1, _) as e, ..rest] -> + if builtin.less_than_bytearray(x0, x1) { + check_ascending_list([e, ..rest]) + } else { + fail @"keys in associative list aren't in ascending order" + } + } +} + +/// Like [`from_ascending_pairs`](#from_ascending_pairs) but fails if **any** +/// value doesn't satisfy the predicate. +/// +/// ```aiken +/// let pairs = [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// +/// dict.from_ascending_pairs_with(pairs, fn(x) { x <= 250 }) // fail +/// ``` +pub fn from_ascending_pairs_with( + xs: Pairs, + predicate: fn(value) -> Bool, +) -> Dict { + let Void = check_ascending_pairs_with(xs, predicate) + Dict { inner: xs } +} + +fn check_ascending_pairs_with( + xs: Pairs, + predicate: fn(value) -> Bool, +) { + when xs is { + [] -> Void + [Pair(_, v)] -> + if predicate(v) { + Void + } else { + fail @"value doesn't satisfy predicate" + } + [Pair(x0, v0), Pair(x1, _) as e, ..rest] -> + if builtin.less_than_bytearray(x0, x1) { + if predicate(v0) { + check_ascending_pairs_with([e, ..rest], predicate) + } else { + fail @"value doesn't satisfy predicate" + } + } else { + fail @"keys in pairs aren't in ascending order" + } + } +} + +test bench_from_ascending_pairs() { + let dict = + from_ascending_pairs( + [ + Pair("aaaa", 1), Pair("aaab", 9), Pair("aaba", 5), Pair("aabb", 13), + Pair("abaa", 2), Pair("abab", 10), Pair("abba", 6), Pair("abbb", 14), + Pair("baaa", 3), Pair("baab", 11), Pair("baba", 7), Pair("babb", 15), + Pair("bbaa", 4), Pair("bbab", 12), Pair("bbba", 8), Pair("bbbb", 16), + ], + ) + + size(dict) == 16 +} + +/// Construct a dictionary from a list of key-value pairs. Note that when a key is present +/// multiple times, the first occurrence prevails. +/// +/// ```aiken +/// let pairs = [Pair("a", 100), Pair("c", 300), Pair("b", 200)] +/// +/// let result = +/// dict.from_pairs(pairs) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn from_pairs(self: Pairs) -> Dict { + Dict { inner: do_from_pairs(self) } +} + +fn do_from_pairs(xs: Pairs) -> Pairs { + when xs is { + [] -> [] + [Pair(k, v), ..rest] -> do_insert(do_from_pairs(rest), k, v) + } +} + +test from_list_1() { + from_pairs([]) == empty +} + +test from_list_2() { + from_pairs([Pair(foo, 42), Pair(bar, 14)]) == from_pairs( + [Pair(bar, 14), Pair(foo, 42)], + ) +} + +test from_list_3() { + from_pairs([Pair(foo, 42), Pair(bar, 14)]) == fixture_1 +} + +test from_list_4() { + from_pairs([Pair(foo, 42), Pair(bar, 14), Pair(foo, 1337)]) == fixture_1 +} + +test bench_from_pairs() { + let dict = + from_pairs( + [ + Pair("bbba", 8), Pair("bbab", 12), Pair("aabb", 13), Pair("aaab", 9), + Pair("bbbb", 16), Pair("aaaa", 1), Pair("aaba", 5), Pair("abab", 10), + Pair("baba", 7), Pair("baab", 11), Pair("abaa", 2), Pair("baaa", 3), + Pair("bbaa", 4), Pair("babb", 15), Pair("abbb", 14), Pair("abba", 6), + ], + ) + + size(dict) == 16 +} + +// ## Inspecting + +/// Finds a value in the dictionary, and returns the first key found to have that value. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 42) +/// |> dict.insert(key: "b", value: 14) +/// |> dict.insert(key: "c", value: 42) +/// |> dict.find(42) +/// +/// result == Some("a") +/// ``` +pub fn find(self: Dict, value v: value) -> Option { + do_find(self.inner, v) +} + +fn do_find(self: Pairs, value v: value) -> Option { + when self is { + [] -> None + [Pair(k2, v2), ..rest] -> + if v == v2 { + Some(k2) + } else { + do_find(rest, v) + } + } +} + +test find_1() { + find(empty, foo) == None +} + +test find_2() { + find( + empty + |> insert(foo, 14), + 14, + ) == Some(foo) +} + +test find_3() { + find( + empty + |> insert(foo, 14), + 42, + ) == None +} + +test find_4() { + find( + empty + |> insert(foo, 14) + |> insert(bar, 42) + |> insert(baz, 14), + 14, + ) == Some(baz) +} + +/// Get a value in the dict by its key. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: "Aiken") +/// |> dict.get(key: "a") +/// +/// result == Some("Aiken") +/// ``` +pub fn get(self: Dict, key: ByteArray) -> Option { + do_get(self.inner, key) +} + +fn do_get(self: Pairs, key k: ByteArray) -> Option { + when self is { + [] -> None + [Pair(k2, v), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + if k == k2 { + Some(v) + } else { + None + } + } else { + do_get(rest, k) + } + } +} + +test get_1() { + get(empty, foo) == None +} + +test get_2() { + let m = + empty + |> insert(foo, "Aiken") + |> insert(bar, "awesome") + get(m, key: foo) == Some("Aiken") +} + +test get_3() { + let m = + empty + |> insert(foo, "Aiken") + |> insert(bar, "awesome") + get(m, key: baz) == None +} + +test get_4() { + let m = + empty + |> insert("aaa", "1") + |> insert("bbb", "2") + |> insert("ccc", "3") + |> insert("ddd", "4") + |> insert("eee", "5") + |> insert("fff", "6") + |> insert("ggg", "7") + |> insert("hhh", "8") + |> insert("iii", "9") + |> insert("jjj", "10") + + get(m, "bcd") == None +} + +test get_5() { + let m = + empty + |> insert("aaa", "1") + |> insert("bbb", "2") + |> insert("ccc", "3") + |> insert("ddd", "4") + |> insert("eee", "5") + |> insert("fff", "6") + |> insert("ggg", "7") + |> insert("hhh", "8") + |> insert("iii", "9") + |> insert("jjj", "10") + + get(m, "kkk") == None +} + +/// Check if a key exists in the dictionary. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: "Aiken") +/// |> dict.has_key("a") +/// +/// result == True +/// ``` +pub fn has_key(self: Dict, key k: ByteArray) -> Bool { + do_has_key(self.inner, k) +} + +fn do_has_key(self: Pairs, key k: ByteArray) -> Bool { + when self is { + [] -> False + [Pair(k2, _), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + k == k2 + } else { + do_has_key(rest, k) + } + } +} + +test has_key_1() { + !has_key(empty, foo) +} + +test has_key_2() { + has_key( + empty + |> insert(foo, 14), + foo, + ) +} + +test has_key_3() { + !has_key( + empty + |> insert(foo, 14), + bar, + ) +} + +test has_key_4() { + has_key( + empty + |> insert(foo, 14) + |> insert(bar, 42), + bar, + ) +} + +/// Efficiently checks whether a dictionary is empty. +/// ```aiken +/// dict.is_empty(dict.empty) == True +/// ``` +pub fn is_empty(self: Dict) -> Bool { + when self.inner is { + [] -> True + _ -> False + } +} + +test is_empty_1() { + is_empty(empty) +} + +/// Extract all the keys present in a given `Dict`. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 14) +/// |> dict.insert("b", 42) +/// |> dict.insert("a", 1337) +/// |> dict.keys() +/// +/// result == ["a", "b"] +/// ``` +pub fn keys(self: Dict) -> List { + do_keys(self.inner) +} + +fn do_keys(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(k, _), ..rest] -> [k, ..do_keys(rest)] + } +} + +test keys_1() { + keys(empty) == [] +} + +test keys_2() { + keys( + empty + |> insert(foo, 0) + |> insert(bar, 0), + ) == [bar, foo] +} + +/// Return the number of key-value pairs in the dictionary. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 100) +/// |> dict.insert("b", 200) +/// |> dict.insert("c", 300) +/// |> dict.size() +/// +/// result == 3 +/// ``` +pub fn size(self: Dict) -> Int { + do_size(self.inner) +} + +fn do_size(self: Pairs) -> Int { + when self is { + [] -> 0 + [_, ..rest] -> 1 + do_size(rest) + } +} + +test size_1() { + size(empty) == 0 +} + +test size_2() { + size( + empty + |> insert(foo, 14), + ) == 1 +} + +test size_3() { + size( + empty + |> insert(foo, 14) + |> insert(bar, 42), + ) == 2 +} + +/// Extract all the values present in a given `Dict`. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 14) +/// |> dict.insert("b", 42) +/// |> dict.insert("c", 1337) +/// |> dict.values() +/// +/// result == [14, 42, 1337] +/// ``` +pub fn values(self: Dict) -> List { + do_values(self.inner) +} + +fn do_values(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(_, v), ..rest] -> [v, ..do_values(rest)] + } +} + +test values_1() { + values(empty) == [] +} + +test values_2() { + values( + empty + |> insert(foo, 3) + |> insert(bar, 4), + ) == [4, 3] +} + +// ## Modifying + +/// Remove a key-value pair from the dictionary. If the key is not found, no changes are made. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.delete(key: "a") +/// |> dict.to_pairs() +/// +/// result == [Pair("b", 200)] +/// ``` +pub fn delete(self: Dict, key: ByteArray) -> Dict { + Dict { inner: do_delete(self.inner, key) } +} + +fn do_delete( + self: Pairs, + key k: ByteArray, +) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + if k == k2 { + rest + } else { + self + } + } else { + [Pair(k2, v2), ..do_delete(rest, k)] + } + } +} + +test delete_1() { + delete(empty, foo) == empty +} + +test delete_2() { + let m = + empty + |> insert(foo, 14) + delete(m, foo) == empty +} + +test delete_3() { + let m = + empty + |> insert(foo, 14) + delete(m, bar) == m +} + +test delete_4() { + let m = + empty + |> insert(foo, 14) + |> insert(bar, 14) + !has_key(delete(m, foo), foo) +} + +test delete_5() { + let m = + empty + |> insert(foo, 14) + |> insert(bar, 14) + has_key(delete(m, bar), foo) +} + +test delete_6() { + let m = + empty + |> insert("aaa", 1) + |> insert("bbb", 2) + |> insert("ccc", 3) + |> insert("ddd", 4) + |> insert("eee", 5) + |> insert("fff", 6) + |> insert("ggg", 7) + |> insert("hhh", 8) + |> insert("iii", 9) + |> insert("jjj", 10) + + delete(m, "bcd") == m +} + +/// Keep only the key-value pairs that pass the given predicate. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.insert(key: "c", value: 300) +/// |> dict.filter(fn(k, _v) { k != "a" }) +/// |> dict.to_pairs() +/// +/// result == [Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn filter( + self: Dict, + with: fn(ByteArray, value) -> Bool, +) -> Dict { + Dict { inner: do_filter(self.inner, with) } +} + +fn do_filter( + self: Pairs, + with: fn(ByteArray, value) -> Bool, +) -> Pairs { + when self is { + [] -> [] + [Pair(k, v), ..rest] -> + if with(k, v) { + [Pair(k, v), ..do_filter(rest, with)] + } else { + do_filter(rest, with) + } + } +} + +test filter_1() { + filter(empty, fn(_, _) { True }) == empty +} + +test filter_2() { + let expected = + empty + |> insert(foo, 42) + filter(fixture_1, fn(_, v) { v > 14 }) == expected +} + +test filter_3() { + let expected = + empty + |> insert(bar, 14) + filter(fixture_1, fn(k, _) { k == bar }) == expected +} + +/// Insert a value in the dictionary at a given key. If the key already exists, its value is **overridden**. If you need ways to combine keys together, use (`insert_with`)[#insert_with]. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 1) +/// |> dict.insert(key: "b", value: 2) +/// |> dict.insert(key: "a", value: 3) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 3), Pair("b", 2)] +/// ``` +pub fn insert( + self: Dict, + key k: ByteArray, + value v: value, +) -> Dict { + Dict { inner: do_insert(self.inner, k, v) } +} + +fn do_insert( + self: Pairs, + key k: ByteArray, + value v: value, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if builtin.less_than_bytearray(k, k2) { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, v), ..rest] + } else { + [Pair(k2, v2), ..do_insert(rest, k, v)] + } + } + } +} + +test insert_1() { + let m1 = + empty + |> insert(foo, 42) + let m2 = + empty + |> insert(foo, 14) + insert(m1, foo, 14) == m2 +} + +test insert_2() { + let m1 = + empty + |> insert(foo, 42) + let m2 = + empty + |> insert(bar, 14) + insert(m1, bar, 14) == insert(m2, foo, 42) +} + +/// Insert a value in the dictionary at a given key. When the key already exist, the provided +/// merge function is called. The value existing in the dictionary is passed as the second argument +/// to the merge function, and the new value is passed as the third argument. +/// +/// ```aiken +/// let sum = +/// fn (_k, a, b) { Some(a + b) } +/// +/// let result = +/// dict.empty +/// |> dict.insert_with(key: "a", value: 1, with: sum) +/// |> dict.insert_with(key: "b", value: 2, with: sum) +/// |> dict.insert_with(key: "a", value: 3, with: sum) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 4), Pair("b", 2)] +/// ``` +pub fn insert_with( + self: Dict, + key k: ByteArray, + value v: value, + with: fn(ByteArray, value, value) -> Option, +) -> Dict { + Dict { + inner: do_insert_with(self.inner, k, v, fn(k, v1, v2) { with(k, v2, v1) }), + } +} + +test insert_with_1() { + let sum = + fn(_k, a, b) { Some(a + b) } + + let result = + empty + |> insert_with(key: "foo", value: 1, with: sum) + |> insert_with(key: "bar", value: 2, with: sum) + |> to_pairs() + + result == [Pair("bar", 2), Pair("foo", 1)] +} + +test insert_with_2() { + let sum = + fn(_k, a, b) { Some(a + b) } + + let result = + empty + |> insert_with(key: "foo", value: 1, with: sum) + |> insert_with(key: "bar", value: 2, with: sum) + |> insert_with(key: "foo", value: 3, with: sum) + |> to_pairs() + + result == [Pair("bar", 2), Pair("foo", 4)] +} + +test insert_with_3() { + let with = + fn(k, a, _b) { + if k == "foo" { + Some(a) + } else { + None + } + } + + let result = + empty + |> insert_with(key: "foo", value: 1, with: with) + |> insert_with(key: "bar", value: 2, with: with) + |> insert_with(key: "foo", value: 3, with: with) + |> insert_with(key: "bar", value: 4, with: with) + |> to_pairs() + + result == [Pair("foo", 1)] +} + +/// Apply a function to all key-value pairs in a Dict. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 100) +/// |> dict.insert("b", 200) +/// |> dict.insert("c", 300) +/// |> dict.map(fn(_k, v) { v * 2 }) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 200), Pair("b", 400), Pair("c", 600)] +/// ``` +pub fn map(self: Dict, with: fn(ByteArray, a) -> b) -> Dict { + Dict { inner: do_map(self.inner, with) } +} + +fn do_map( + self: Pairs, + with: fn(ByteArray, a) -> b, +) -> Pairs { + when self is { + [] -> [] + [Pair(k, v), ..rest] -> [Pair(k, with(k, v)), ..do_map(rest, with)] + } +} + +test map_1() { + let result = + fixture_1 + |> map(with: fn(k, _) { k }) + get(result, foo) == Some(foo) +} + +test map_2() { + let result = + fixture_1 + |> map(with: fn(_, v) { v + 1 }) + get(result, foo) == Some(43) && size(result) == size(fixture_1) +} + +/// Remove a key-value pair from the dictionary and return its value. If the key is not found, no changes are made. +/// +/// ```aiken +/// let (value, _) = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.pop(key: "a") +/// +/// result == 100 +/// ``` +pub fn pop( + self: Dict, + key: ByteArray, +) -> (Option, Dict) { + do_pop(self.inner, key, fn(value, inner) { (value, Dict { inner }) }) +} + +fn do_pop( + self: Pairs, + key k: ByteArray, + return: fn(Option, Pairs) -> result, +) -> result { + when self is { + [] -> return(None, []) + [Pair(k2, v2), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + if k == k2 { + return(Some(v2), rest) + } else { + return(None, self) + } + } else { + do_pop( + rest, + k, + fn(value, inner) { return(value, [Pair(k2, v2), ..inner]) }, + ) + } + } +} + +test pop_1() { + pop(empty, foo) == (None, empty) +} + +test pop_2() { + let m = + empty + |> insert(foo, 14) + pop(m, foo) == (Some(14), empty) +} + +test pop_3() { + let m = + empty + |> insert(foo, 14) + pop(m, bar) == (None, m) +} + +test pop_4() { + let m = + empty + |> insert(foo, 14) + |> insert(bar, 14) + pop(m, foo) == (Some(14), empty |> insert(bar, 14)) +} + +test pop_6() { + let m = + empty + |> insert("aaa", 1) + |> insert("bbb", 2) + |> insert("ccc", 3) + |> insert("ddd", 4) + |> insert("eee", 5) + |> insert("fff", 6) + |> insert("ggg", 7) + |> insert("hhh", 8) + |> insert("iii", 9) + |> insert("jjj", 10) + + pop(m, "bcd") == (None, m) +} + +// ## Combining + +/// Combine two dictionaries. If the same key exist in both the left and +/// right dictionary, values from the left are preferred (i.e. left-biaised). +/// +/// ```aiken +/// let left_dict = dict.from_pairs([Pair("a", 100), Pair("b", 200)]) +/// let right_dict = dict.from_pairs([Pair("a", 150), Pair("c", 300)]) +/// +/// let result = +/// dict.union(left_dict, right_dict) |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn union( + left: Dict, + right: Dict, +) -> Dict { + Dict { inner: do_union(left.inner, right.inner) } +} + +fn do_union( + left: Pairs, + right: Pairs, +) -> Pairs { + when left is { + [] -> right + [Pair(k, v), ..rest] -> do_union(rest, do_insert(right, k, v)) + } +} + +test union_1() { + union(fixture_1, empty) == fixture_1 +} + +test union_2() { + union(empty, fixture_1) == fixture_1 +} + +test union_3() { + let left = + empty + |> insert(foo, 14) + let right = + empty + |> insert(bar, 42) + |> insert(baz, 1337) + union(left, right) == from_pairs( + [Pair(foo, 14), Pair(baz, 1337), Pair(bar, 42)], + ) +} + +test union_4() { + let left = + empty + |> insert(foo, 14) + let right = + empty + |> insert(bar, 42) + |> insert(foo, 1337) + union(left, right) == from_pairs([Pair(foo, 14), Pair(bar, 42)]) +} + +/// Like [`union`](#union) but allows specifying the behavior to adopt when a key is present +/// in both dictionaries. The first value received correspond to the value in the left +/// dictionnary, whereas the second argument corresponds to the value in the right dictionnary. +/// +/// When passing `None`, the value is removed and not present in the union. +/// +/// ```aiken +/// let left_dict = dict.from_pairs([Pair("a", 100), Pair("b", 200)]) +/// let right_dict = dict.from_pairs([Pair("a", 150), Pair("c", 300)]) +/// +/// let result = +/// dict.union_with( +/// left_dict, +/// right_dict, +/// fn(_k, v1, v2) { Some(v1 + v2) }, +/// ) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 250), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn union_with( + left: Dict, + right: Dict, + with: fn(ByteArray, value, value) -> Option, +) -> Dict { + Dict { inner: do_union_with(left.inner, right.inner, with) } +} + +fn do_union_with( + left: Pairs, + right: Pairs, + with: fn(ByteArray, value, value) -> Option, +) -> Pairs { + when left is { + [] -> right + [Pair(k, v), ..rest] -> + do_union_with(rest, do_insert_with(right, k, v, with), with) + } +} + +fn do_insert_with( + self: Pairs, + key k: ByteArray, + value v: value, + with: fn(ByteArray, value, value) -> Option, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if builtin.less_than_bytearray(k, k2) { + [Pair(k, v), ..self] + } else { + if k == k2 { + when with(k, v, v2) is { + Some(combined) -> [Pair(k, combined), ..rest] + None -> rest + } + } else { + [Pair(k2, v2), ..do_insert_with(rest, k, v, with)] + } + } + } +} + +test union_with_1() { + let left = + empty + |> insert(foo, 14) + + let right = + empty + |> insert(bar, 42) + |> insert(foo, 1337) + + let result = union_with(left, right, with: fn(_, l, r) { Some(l + r) }) + + result == from_pairs([Pair(foo, 1351), Pair(bar, 42)]) +} + +// ## Transforming + +/// Fold over the key-value pairs in a dictionary. The fold direction follows keys +/// in ascending order and is done from left-to-right. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.insert(key: "c", value: 300) +/// |> dict.foldl(0, fn(_k, v, r) { v + r }) +/// +/// result == 600 +/// ``` +pub fn foldl( + self: Dict, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + do_foldl(self.inner, zero, with) +} + +fn do_foldl( + self: Pairs, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> do_foldl(rest, with(k, v, zero), with) + } +} + +test fold_1() { + foldl(empty, 14, fn(_, _, _) { 42 }) == 14 +} + +test fold_2() { + foldl(fixture_1, zero: 0, with: fn(_, v, total) { v + total }) == 56 +} + +/// Fold over the key-value pairs in a dictionary. The fold direction follows keys +/// in ascending order and is done from right-to-left. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.insert(key: "c", value: 300) +/// |> dict.foldr(0, fn(_k, v, r) { v + r }) +/// +/// result == 600 +/// ``` +pub fn foldr( + self: Dict, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + do_foldr(self.inner, zero, with) +} + +fn do_foldr( + self: Pairs, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> with(k, v, do_foldr(rest, zero, with)) + } +} + +test foldr_1() { + foldr(empty, 14, fn(_, _, _) { 42 }) == 14 +} + +test foldr_2() { + foldr(fixture_1, zero: 0, with: fn(_, v, total) { v + total }) == 56 +} + +/// Get the inner list holding the dictionary data. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 100) +/// |> dict.insert("b", 200) +/// |> dict.insert("c", 300) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn to_pairs(self: Dict) -> Pairs { + self.inner +} + +test to_list_1() { + to_pairs(empty) == [] +} + +test to_list_2() { + to_pairs(fixture_1) == [Pair(bar, 14), Pair(foo, 42)] +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak new file mode 100644 index 00000000..b8e7f675 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak @@ -0,0 +1,1411 @@ +use aiken/builtin +use aiken/primitive/bytearray +use aiken/primitive/int + +// ## Constructing + +/// Add an element in front of the list. Sometimes useful when combined with +/// other functions. +/// +/// ```aiken +/// list.push([2, 3], 1) == [1, ..[2, 3]] == [1, 2, 3] +/// ``` +pub fn push(self: List, elem: a) -> List { + [elem, ..self] +} + +test push_1() { + push([], 0) == [0] +} + +test push_2() { + push([2, 3], 1) == [1, 2, 3] +} + +/// Construct a list of a integer from a given range. +/// +/// ```aiken +/// list.range(0, 3) == [0, 1, 2, 3] +/// list.range(-1, 1) == [-1, 0, 1] +/// ``` +pub fn range(from: Int, to: Int) -> List { + if from > to { + [] + } else { + [from, ..range(from + 1, to)] + } +} + +test range_1() { + range(0, 3) == [0, 1, 2, 3] +} + +test range_2() { + range(-1, 1) == [-1, 0, 1] +} + +/// Construct a list filled with n copies of a value. +/// +/// ```aiken +/// list.repeat("na", 3) == ["na", "na", "na"] +/// ``` +pub fn repeat(elem: a, n_times: Int) -> List { + if n_times <= 0 { + [] + } else { + [elem, ..repeat(elem, n_times - 1)] + } +} + +test repeat_1() { + repeat(42, 0) == [] +} + +test repeat_2() { + repeat(14, 3) == [14, 14, 14] +} + +// ## Inspecting + +/// Determine if all elements of the list satisfy the given predicate. +/// +/// Note: an empty list always satisfies the predicate. +/// +/// ```aiken +/// list.all([], fn(n) { n > 0 }) == True +/// list.all([1, 2, 3], fn(n) { n > 0 }) == True +/// list.all([1, 2, 3], fn(n) { n == 2 }) == False +/// ``` +pub fn all(self: List, predicate: fn(a) -> Bool) -> Bool { + when self is { + [] -> True + [x, ..xs] -> predicate(x) && all(xs, predicate) + } +} + +test all_1() { + all([1, 2, 3], fn(n) { n > 0 }) == True +} + +test all_2() { + all([1, 2, 3], fn(n) { n > 42 }) == False +} + +test all_3() { + all([], fn(n) { n == 42 }) == True +} + +/// Determine if at least one element of the list satisfies the given predicate. +/// +/// Note: an empty list never satisfies the predicate. +/// +/// ```aiken +/// list.any([], fn(n) { n > 2 }) == False +/// list.any([1, 2, 3], fn(n) { n > 0 }) == True +/// list.any([1, 2, 3], fn(n) { n == 2 }) == True +/// list.any([1, 2, 3], fn(n) { n < 0 }) == False +/// ``` +pub fn any(self: List, predicate: fn(a) -> Bool) -> Bool { + when self is { + [] -> False + [x, ..xs] -> predicate(x) || any(xs, predicate) + } +} + +test any_1() { + any([1, 2, 3], fn(n) { n > 0 }) == True +} + +test any_2() { + any([1, 2, 3], fn(n) { n > 42 }) == False +} + +test any_3() { + any([], fn(n) { n == 42 }) == False +} + +/// Return Some(item) at the index or None if the index is out of range. The index is 0-based. +/// +/// ```aiken +/// list.at([1, 2, 3], 1) == Some(2) +/// list.at([1, 2, 3], 42) == None +/// ``` +pub fn at(self: List, index: Int) -> Option { + when self is { + [] -> None + [x, ..xs] -> + if index == 0 { + Some(x) + } else { + at(xs, index - 1) + } + } +} + +test at_1() { + at([1, 2, 3], -1) == None +} + +test at_2() { + at([], 0) == None +} + +test at_3() { + at([1, 2, 3], 3) == None +} + +test at_4() { + at([1], 0) == Some(1) +} + +test at_5() { + at([1, 2, 3], 2) == Some(3) +} + +/// Count how many items in the list satisfy the given predicate. +/// +/// ```aiken +/// list.count([], fn(a) { a > 2}) == 0 +/// list.count([1, 2, 3], fn(a) { n > 0 }) == 3 +/// list.count([1, 2, 3], fn(a) { n >= 2 }) == 2 +/// list.count([1, 2, 3], fn(a) { n > 5 }) == 0 +/// ``` +pub fn count(self: List, predicate: fn(a) -> Bool) -> Int { + foldr( + self, + 0, + fn(item, total) { + if predicate(item) { + total + 1 + } else { + total + } + }, + ) +} + +test count_empty() { + count([], fn(a) { a > 2 }) == 0 +} + +test count_all() { + count([1, 2, 3], fn(a) { a > 0 }) == 3 +} + +test count_some() { + count([1, 2, 3], fn(a) { a >= 2 }) == 2 +} + +test count_none() { + count([1, 2, 3], fn(a) { a > 5 }) == 0 +} + +/// Find the first element satisfying the given predicate, if any. +/// +/// ```aiken +/// list.find([1, 2, 3], fn(x) { x == 2 }) == Some(2) +/// list.find([4, 5, 6], fn(x) { x == 2 }) == None +/// ``` +pub fn find(self: List, predicate: fn(a) -> Bool) -> Option { + when self is { + [] -> None + [x, ..xs] -> + if predicate(x) { + Some(x) + } else { + find(xs, predicate) + } + } +} + +test find_1() { + find([1, 2, 3], fn(x) { x == 1 }) == Some(1) +} + +test find_2() { + find([1, 2, 3], fn(x) { x > 42 }) == None +} + +test find_3() { + find([], fn(_) { True }) == None +} + +/// Figures out whether a list contain the given element. +/// +/// ```aiken +/// list.has([1, 2, 3], 2) == True +/// list.has([1, 2, 3], 14) == False +/// list.has([], 14) == False +/// ``` +pub fn has(self: List, elem: a) -> Bool { + when self is { + [] -> False + [x, ..xs] -> + if x == elem { + True + } else { + has(xs, elem) + } + } +} + +test has_1() { + has([1, 2, 3], 1) == True +} + +test has_2() { + has([1, 2, 3], 14) == False +} + +test has_3() { + has([], 14) == False +} + +/// Get the first element of a list +/// +/// ```aiken +/// list.head([1, 2, 3]) == Some(1) +/// list.head([]) == None +/// ``` +pub fn head(self: List) -> Option { + when self is { + [] -> None + _ -> Some(builtin.head_list(self)) + } +} + +test head_1() { + head([1, 2, 3]) == Some(1) +} + +test head_2() { + head([]) == None +} + +/// Checks whether a list is empty. +/// +/// ```aiken +/// list.is_empty([]) == True +/// list.is_empty([1, 2, 3]) == False +/// ``` +pub fn is_empty(self: List) -> Bool { + when self is { + [] -> True + _ -> False + } +} + +test is_empty_1() { + is_empty([]) == True +} + +test is_empty_2() { + is_empty([1, 2, 3]) == False +} + +/// Gets the index of an element of a list, if any. Otherwise, returns None. +/// +/// ```aiken +/// list.index_of([1, 5, 2], 2) == Some(2) +/// list.index_of([1, 7, 3], 4) == None +/// list.index_of([1, 0, 9, 6], 6) == 3 +/// list.index_of([], 6) == None +/// ``` +pub fn index_of(self: List, elem: a) -> Option { + do_index_of(self, elem, 0) +} + +fn do_index_of(self: List, elem: a, i: Int) -> Option { + when self is { + [] -> None + [x, ..xs] -> + if x == elem { + Some(i) + } else { + do_index_of(xs, elem, i + 1) + } + } +} + +test index_of_1() { + index_of([1, 5, 2], 2) == Some(2) +} + +test index_of_2() { + index_of([1, 7, 3], 4) == None +} + +test index_of_3() { + index_of([1, 0, 9, 6], 6) == Some(3) +} + +test index_of_4() { + index_of([], 6) == None +} + +/// Get the last in the given list, if any. +/// +/// ```aiken +/// list.last([]) == None +/// list.last([1, 2, 3]) == Some(3) +/// ``` +pub fn last(self: List) -> Option { + when self is { + [] -> None + [x] -> Some(x) + [_, ..xs] -> last(xs) + } +} + +test last_1() { + last([]) == None +} + +test last_2() { + last([1]) == Some(1) +} + +test last_3() { + last([1, 2, 3, 4]) == Some(4) +} + +/// Get the number of elements in the given list. +/// +/// ```aiken +/// list.length([]) == 0 +/// list.length([1, 2, 3]) == 3 +/// ``` +pub fn length(self: List) -> Int { + when self is { + [] -> 0 + [_, ..xs] -> 1 + length(xs) + } +} + +test length_1() { + length([]) == 0 +} + +test length_2() { + length([1, 2, 3]) == 3 +} + +// ## Modifying + +// ### Extracting + +/// Remove the first occurrence of the given element from the list. +/// +/// ```aiken +/// list.delete([1, 2, 3, 1], 1) == [2, 3, 1] +/// list.delete([1, 2, 3], 14) == [1, 2, 3] +/// ``` +pub fn delete(self: List, elem: a) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if x == elem { + xs + } else { + [x, ..delete(xs, elem)] + } + } +} + +test delete_1() { + delete([], 42) == [] +} + +test delete_2() { + delete([1, 2, 3, 1], 1) == [2, 3, 1] +} + +test delete_3() { + delete([1, 2, 3], 14) == [1, 2, 3] +} + +test delete_4() { + delete([2], 2) == [] +} + +/// Drop the first `n` elements of a list. +/// +/// ```aiken +/// list.drop([1, 2, 3], 2) == [3] +/// list.drop([], 42) == [] +/// list.drop([1, 2, 3], 42) == [] +/// ``` +pub fn drop(self: List, n: Int) -> List { + if n <= 0 { + self + } else { + when self is { + [] -> [] + [_x, ..xs] -> drop(xs, n - 1) + } + } +} + +test drop_1() { + drop([], 42) == [] +} + +test drop_2() { + drop([1, 2, 3], 2) == [3] +} + +/// Returns the suffix of the given list after removing all elements that satisfy the predicate. +/// +/// ```aiken +/// list.drop_while([1, 2, 3], fn(x) { x < 2 }) == [2, 3] +/// list.drop_while([], fn(x) { x > 2 }) == [] +/// list.drop_while([1, 2, 3], fn(x) { x == 3 }) == [1, 2, 3] +/// ``` +pub fn drop_while(self: List, predicate: fn(a) -> Bool) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if predicate(x) { + drop_while(xs, predicate) + } else { + self + } + } +} + +test drop_while_1() { + drop_while([], fn(x) { x > 2 }) == [] +} + +test drop_while_2() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + drop_while(xs, fn(x) { x > 5 }) == [5, 4, 3, 2, 1] +} + +test drop_while_3() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + drop_while(xs, fn(x) { x == 42 }) == xs +} + +test drop_while_4() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + drop_while(xs, fn(x) { x < 42 }) == [] +} + +/// Produce a list of elements that satisfy a predicate. +/// +/// ```aiken +/// list.filter([1, 2, 3], fn(x) { x >= 2 }) == [2, 3] +/// list.filter([], fn(x) { x > 2 }) == [] +/// list.filter([1, 2, 3], fn(x) { x == 3 }) == [3] +/// ``` +pub fn filter(self: List, predicate: fn(a) -> Bool) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if predicate(x) { + [x, ..filter(xs, predicate)] + } else { + filter(xs, predicate) + } + } +} + +test filter_1() { + filter([], fn(x) { x > 0 }) == [] +} + +test filter_2() { + let xs = [1, 2, 3, 4, 5, 6] + filter(xs, fn(x) { builtin.mod_integer(x, 2) == 0 }) == [2, 4, 6] +} + +test filter_3() { + let filter_foldr = + fn(xs, f) { + foldr( + xs, + [], + fn(x, ys) { + if f(x) { + [x, ..ys] + } else { + ys + } + }, + ) + } + + let is_odd = + fn(n) { builtin.mod_integer(n, 2) != 0 } + + filter_foldr([1, 2, 3], is_odd) == filter([1, 2, 3], is_odd) +} + +/// Produce a list of transformed elements that satisfy a predicate. +/// +/// ```aiken +/// let transform = fn(x) { if x % 2 == 0 { None } else { Some(3*x) } } +/// list.filter_map([1, 2, 3], transform) == [3, 9] +/// ``` +pub fn filter_map(self: List, predicate: fn(a) -> Option) -> List { + when self is { + [] -> [] + [x, ..xs] -> + when predicate(x) is { + None -> filter_map(xs, predicate) + Some(y) -> [y, ..filter_map(xs, predicate)] + } + } +} + +test filter_map_1() { + filter_map([], fn(_) { Some(42) }) == [] +} + +test filter_map_2() { + filter_map( + [1, 2, 3, 4, 5, 6], + fn(x) { + if builtin.mod_integer(x, 2) != 0 { + Some(3 * x) + } else { + None + } + }, + ) == [3, 9, 15] +} + +/// Return all elements except the last one. +/// +/// ```aiken +/// list.init([]) == None +/// list.init([1, 2, 3]) == Some([1, 2]) +/// ``` +pub fn init(self: List) -> Option> { + when self is { + [] -> None + _ -> Some(do_init(self)) + } +} + +fn do_init(self: List) -> List { + when self is { + [] -> fail @"unreachable" + [_] -> [] + [x, ..xs] -> [x, ..do_init(xs)] + } +} + +test init_1() { + init([]) == None +} + +test init_2() { + init([1]) == Some([]) +} + +test init_3() { + init([1, 2, 3, 4]) == Some([1, 2, 3]) +} + +/// Returns a tuple with all elements that satisfy the predicate at first +/// element, and the rest as second element. +/// +/// ```aiken +/// list.partition([1, 2, 3, 4], fn(x) { x % 2 == 0 }) == ([2, 4], [1, 3]) +/// ``` +pub fn partition(self: List, predicate: fn(a) -> Bool) -> (List, List) { + when self is { + [] -> ([], []) + [x, ..xs] -> { + let (left, right) = partition(xs, predicate) + if predicate(x) { + ([x, ..left], right) + } else { + (left, [x, ..right]) + } + } + } +} + +test partition_1() { + partition([], fn(x) { x > 2 }) == ([], []) +} + +test partition_2() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + partition(xs, fn(x) { x > 5 }) == ([10, 9, 8, 7, 6], [5, 4, 3, 2, 1]) +} + +test partition_3() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + partition(xs, fn(x) { x == 42 }) == ([], xs) +} + +test partition_4() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + partition(xs, fn(x) { x < 42 }) == (xs, []) +} + +test partition_5() { + partition([1, 2, 3, 4], fn(x) { x % 2 == 0 }) == ([2, 4], [1, 3]) +} + +/// Extract a sublist from the given list using 0-based indexes. Negative +/// indexes wrap over, so `-1` refers to the last element of the list. +/// +/// ```aiken +/// list.slice([1, 2, 3, 4, 5, 6], from: 2, to: 4) == [3, 4, 5] +/// list.slice([1, 2, 3, 4, 5, 6], from: -2, to: -1) == [5, 6] +/// list.slice([1, 2, 3, 4, 5, 6], from: 1, to: -1) == [2, 3, 4, 5, 6] +/// ``` +pub fn slice(self: List, from: Int, to: Int) -> List { + let (i, l) = + if from >= 0 { + (from, None) + } else { + let l = length(self) + (l + from, Some(l)) + } + + let j = + if to >= 0 { + to - i + 1 + } else { + when l is { + Some(l) -> l + to - i + 1 + None -> length(self) + to - i + 1 + } + } + + self + |> drop(i) + |> take(j) +} + +test slice_1() { + slice([1, 2, 3], 0, 2) == [1, 2, 3] +} + +test slice_2() { + slice([1, 2, 3, 4, 5, 6], from: 2, to: 4) == [3, 4, 5] +} + +test slice_3() { + slice([1, 2, 3, 4, 5, 6], from: -2, to: -1) == [5, 6] +} + +test slice_4() { + slice([1, 2, 3, 4, 5, 6], from: 1, to: -1) == [2, 3, 4, 5, 6] +} + +test slice_5() { + slice([1, 2, 3, 4, 5, 6], from: -4, to: -3) == [3, 4] +} + +test slice_6() { + slice([1, 2, 3, 4, 5, 6], from: -2, to: 1) == [] +} + +/// Cut a list in two, such that the first list contains the given number of / +/// elements and the second list contains the rest. +/// +/// Fundamentally equivalent to (but more efficient): +/// +/// ```aiken +/// // span(xs, n) == (take(xs, n), drop(xs, n)) +/// span([1, 2, 3, 4, 5], 3) == ([1, 2, 3], [4, 5]) +/// ``` +pub fn span(self: List, n: Int) -> (List, List) { + when self is { + [] -> ([], []) + [x, ..xs] -> + if n <= 0 { + ([], self) + } else { + let (left, right) = span(xs, n - 1) + ([x, ..left], right) + } + } +} + +test span_1() { + span([], 2) == ([], []) +} + +test span_2() { + span([1, 2, 3], 2) == ([1, 2], [3]) +} + +test span_3() { + span([1, 2, 3], -1) == ([], [1, 2, 3]) +} + +test span_4() { + span([1, 2, 3], 42) == ([1, 2, 3], []) +} + +/// Get elements of a list after the first one, if any. +/// +/// ```aiken +/// list.tail([]) == None +/// list.tail([1, 2, 3]) == Some([2, 3]) +/// ``` +pub fn tail(self: List) -> Option> { + when self is { + [] -> None + [_, ..xs] -> Some(xs) + } +} + +test tail_1() { + tail([1, 2, 3]) == Some([2, 3]) +} + +test tail_2() { + tail([]) == None +} + +/// Get the first `n` elements of a list. +/// +/// ```aiken +/// list.take([1, 2, 3], 2) == [1, 2] +/// list.take([1, 2, 3], 14) == [1, 2, 3] +/// ``` +pub fn take(self: List, n: Int) -> List { + if n <= 0 { + [] + } else { + when self is { + [] -> [] + [x, ..xs] -> [x, ..take(xs, n - 1)] + } + } +} + +test take_1() { + take([], 42) == [] +} + +test take_2() { + take([1, 2, 3], 2) == [1, 2] +} + +/// Returns the longest prefix of the given list where all elements satisfy the predicate. +/// +/// ```aiken +/// list.take_while([1, 2, 3], fn(x) { x > 2 }) == [] +/// list.take_while([1, 2, 3], fn(x) { x < 2 }) == [1] +/// ``` +pub fn take_while(self: List, predicate: fn(a) -> Bool) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if predicate(x) { + [x, ..take_while(xs, predicate)] + } else { + [] + } + } +} + +test take_while_1() { + take_while([], fn(x) { x > 2 }) == [] +} + +test take_while_2() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + take_while(xs, fn(x) { x > 5 }) == [10, 9, 8, 7, 6] +} + +test take_while_3() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + take_while(xs, fn(x) { x == 42 }) == [] +} + +test take_while_4() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + take_while(xs, fn(x) { x < 42 }) == xs +} + +/// Removes duplicate elements from a list. +/// +/// ```aiken +/// list.unique([1, 2, 3, 1]) == [1, 2, 3] +/// ``` +pub fn unique(self: List) -> List { + when self is { + [] -> [] + [x, ..xs] -> [x, ..unique(filter(xs, fn(y) { y != x }))] + } +} + +test unique_1() { + unique([]) == [] +} + +test unique_2() { + let xs = [1, 2, 3, 1, 1, 3, 4, 1, 2, 3, 2, 4, 5, 6, 7, 8, 9, 10, 9] + unique(xs) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] +} + +// ### Mapping + +/// Map elements of a list into a new list and flatten the result. +/// +/// ```aiken +/// list.flat_map([1, 2, 3], fn(a) { [a, 2*a] }) == [1, 2, 2, 4, 3, 6] +/// ``` +pub fn flat_map(self: List, with: fn(a) -> List) -> List { + foldr(self, [], fn(x, xs) { concat(with(x), xs) }) +} + +test flat_map_1() { + flat_map([], fn(a) { [a] }) == [] +} + +test flat_map_2() { + flat_map([1, 2, 3], fn(a) { [a, a] }) == [1, 1, 2, 2, 3, 3] +} + +/// Perform an action for each element of a list. +/// +/// ```aiken +/// list.for_each(labels, do: fn(lbl) { trace lbl Void }) +/// ``` +pub fn for_each(self: List, do: fn(a) -> Void) -> Void { + foldr(self, Void, fn(x, _) { do(x) }) +} + +test for_each_1() { + for_each( + [@"hello", @"world"], + do: fn(lbl) { + trace lbl + Void + }, + ) +} + +/// List [`map`](#map) but provides the position (0-based) of the elements while iterating. +/// +/// ```aiken +/// list.indexed_map([1, 2, 3], fn(i, x) { i + x }) == [1, 3, 5] +/// ``` +pub fn indexed_map(self: List, with: fn(Int, a) -> result) -> List { + do_indexed_map(0, self, with) +} + +fn do_indexed_map( + n: Int, + self: List, + with: fn(Int, a) -> result, +) -> List { + when self is { + [] -> [] + [x, ..xs] -> [with(n, x), ..do_indexed_map(n + 1, xs, with)] + } +} + +test indexed_map_1() { + indexed_map([], fn(i, _n) { i }) == [] +} + +test indexed_map_2() { + indexed_map( + [4, 8, 13, 2], + fn(i, n) { + if n == 8 { + n + } else { + i + } + }, + ) == [0, 8, 2, 3] +} + +/// Apply a function to each element of a list. +/// +/// ```aiken +/// list.map([1, 2, 3, 4], fn(n) { n + 1 }) == [2, 3, 4, 5] +/// ``` +pub fn map(self: List, with: fn(a) -> result) -> List { + when self is { + [] -> [] + [x, ..xs] -> [with(x), ..map(xs, with)] + } +} + +test map_1() { + map([], fn(n) { n + 1 }) == [] +} + +test map_2() { + map([1, 2, 3, 4], fn(n) { n + 1 }) == [2, 3, 4, 5] +} + +/// Apply a function of two arguments, combining elements from two lists. +/// +/// Note: if one list is longer, the extra elements are dropped. +/// +/// ```aiken +/// list.map2([1, 2, 3], [1, 2], fn(a, b) { a + b }) == [2, 4] +/// ``` +pub fn map2( + self: List, + bs: List, + with: fn(a, b) -> result, +) -> List { + when self is { + [] -> [] + [x, ..xs] -> + when bs is { + [] -> [] + [y, ..ys] -> [with(x, y), ..map2(xs, ys, with)] + } + } +} + +test map2_1() { + map2([], [1, 2, 3], fn(a, b) { a + b }) == [] +} + +test map2_2() { + map2([1, 2, 3], [1, 2], fn(a, b) { a + b }) == [2, 4] +} + +test map2_3() { + map2([42], [1, 2, 3], fn(_a, b) { Some(b) }) == [Some(1)] +} + +/// Apply a function of three arguments, combining elements from three lists. +/// +/// Note: if one list is longer, the extra elements are dropped. +/// +/// ```aiken +/// list.map3([1, 2, 3], [1, 2], [1, 2, 3], fn(a, b, c) { a + b + c }) == [3, 6] +/// ``` +pub fn map3( + self: List, + bs: List, + cs: List, + with: fn(a, b, c) -> result, +) -> List { + when self is { + [] -> [] + [x, ..xs] -> + when bs is { + [] -> [] + [y, ..ys] -> + when cs is { + [] -> [] + [z, ..zs] -> [with(x, y, z), ..map3(xs, ys, zs, with)] + } + } + } +} + +test map3_1() { + map3([], [], [1, 2, 3], fn(a, b, c) { a + b + c }) == [] +} + +test map3_2() { + map3([1, 2, 3], [1, 2], [1, 2, 3], fn(a, b, c) { a + b + c }) == [3, 6] +} + +/// Return the list with its elements in the reserve order. +/// +/// ```aiken +/// list.reverse([1, 2, 3]) == [3, 2, 1] +/// ``` +pub fn reverse(self: List) -> List { + foldl(self, [], fn(x, xs) { [x, ..xs] }) +} + +test reverse_1() { + reverse([]) == [] +} + +test reverse_2() { + reverse([1, 2, 3]) == [3, 2, 1] +} + +/// Sort a list in ascending order using the given comparison function. +/// +/// ```aiken +/// use aiken/int +/// +/// sort([3, 1, 4, 0, 2], int.compare) == [0, 1, 2, 3, 4] +/// sort([1, 2, 3], int.compare) == [1, 2, 3] +/// ``` +pub fn sort(self: List, compare: fn(a, a) -> Ordering) -> List { + when self is { + [] -> [] + [x, ..xs] -> insert(sort(xs, compare), x, compare) + } +} + +fn insert(self: List, e: a, compare: fn(a, a) -> Ordering) -> List { + when self is { + [] -> [e] + [x, ..xs] -> + if compare(e, x) == Less { + [e, ..self] + } else { + [x, ..insert(xs, e, compare)] + } + } +} + +test sort_1() { + let xs = [6, 7, 5, 4, 1, 3, 9, 8, 0, 2] + sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} + +test sort_2() { + let xs = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} + +test sort_3() { + let xs = [9, 8, 7, 6, 5, 4, 3, 2, 1, 0] + sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} + +test sort_4() { + sort([], int.compare) == [] +} + +/// Decompose a list of tuples into a tuple of lists. +/// +/// ``` +/// list.unzip([(1, "a"), (2, "b")]) == ([1, 2], ["a", "b"]) +/// ``` +pub fn unzip(self: List<(a, b)>) -> (List, List) { + when self is { + [] -> ([], []) + [(a, b), ..xs] -> { + let (a_tail, b_tail) = unzip(xs) + ([a, ..a_tail], [b, ..b_tail]) + } + } +} + +test unzip_1() { + unzip([]) == ([], []) +} + +test unzip_2() { + unzip([(1, "a"), (2, "b")]) == ([1, 2], ["a", "b"]) +} + +// ## Combining + +/// Merge two lists together. +/// +/// ```aiken +/// list.concat([], []) == [] +/// list.concat([], [1, 2, 3]) == [1, 2, 3] +/// list.concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] +/// ``` +pub fn concat(left: List, right: List) -> List { + when left is { + [] -> right + [x, ..xs] -> [x, ..concat(xs, right)] + } +} + +test concat_1() { + concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] +} + +test concat_2() { + concat([1, 2, 3], []) == [1, 2, 3] +} + +test concat_3() { + concat([], [1, 2, 3]) == [1, 2, 3] +} + +/// Remove the first occurrence of each element of the second list from the first one. +/// +/// ``` +/// list.difference(["h", "e", "l", "l", "o"], ["l", "e", "l"]) == ["h", "o"] +/// list.difference([1, 2, 3, 4, 5], [1, 1, 2]) == [3, 4, 5] +/// list.difference([1, 2, 3], []) == [1, 2, 3] +/// ``` +pub fn difference(self: List, with: List) -> List { + when with is { + [] -> self + [x, ..xs] -> difference(delete(self, x), xs) + } +} + +test difference_1() { + difference(["h", "e", "l", "l", "o"], ["l", "e", "l"]) == ["h", "o"] +} + +test difference_2() { + difference([1, 2, 3, 4, 5], [1, 1, 2]) == [3, 4, 5] +} + +test difference_3() { + difference([1, 2, 3], []) == [1, 2, 3] +} + +test difference_4() { + difference([], [1, 2, 3]) == [] +} + +/// Combine two lists together. +/// +/// Note: if one list is longer, the extra elements are dropped. +/// +/// ```aiken +/// list.zip([1, 2], ["a", "b", "c"]) == [(1, "a"), (2, "b")] +/// ``` +pub fn zip(self: List, bs: List) -> List<(a, b)> { + when self is { + [] -> [] + [x, ..xs] -> + when bs is { + [] -> [] + [y, ..ys] -> [(x, y), ..zip(xs, ys)] + } + } +} + +test zip_1() { + zip([], [1, 2, 3]) == [] +} + +test zip_2() { + zip([1, 2, 3], []) == [] +} + +test zip_3() { + zip([1, 2], ["a", "b", "c"]) == [(1, "a"), (2, "b")] +} + +// ## Transforming + +/// Reduce a list from left to right. +/// +/// ```aiken +/// list.foldl([1, 2, 3], 0, fn(n, total) { n + total }) == 6 +/// list.foldl([1, 2, 3], [], fn(x, xs) { [x, ..xs] }) == [3, 2, 1] +/// ``` +pub fn foldl(self: List, zero: b, with: fn(a, b) -> b) -> b { + when self is { + [] -> zero + [x, ..xs] -> foldl(xs, with(x, zero), with) + } +} + +type Fold2 = + fn(a, b) -> result + +pub fn foldl2( + self: List, + zero_a: a, + zero_b: b, + with: fn(elem, a, b, Fold2) -> result, + return: Fold2, +) -> result { + do_foldl2(self, with, return)(zero_a, zero_b) +} + +fn do_foldl2( + self: List, + with: fn(elem, a, b, Fold2) -> result, + return: Fold2, +) -> Fold2 { + when self is { + [] -> return + [x, ..xs] -> do_foldl2(xs, with, fn(a, b) { with(x, a, b, return) }) + } +} + +test foldl2_optimized() { + let + len, + sum, + <- + foldl2( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + 0, + 0, + fn(n, len, sum, return) { return(len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +test foldl2_classic() { + let (len, sum) = + foldl( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + (0, 0), + fn(n, (len, sum)) { (len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +type Foo { + Foo(Int, Int) +} + +test foldl2_pair() { + let Pair(len, sum) = + foldl( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + Pair(0, 0), + fn(n, Pair(len, sum)) { Pair(len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +test foldl2_foo() { + let Foo(len, sum) = + foldl( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + Foo(0, 0), + fn(n, Foo(len, sum)) { Foo(len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +test foldl_1() { + foldl([], 0, fn(_, _) { 1 }) == 0 +} + +test foldl_2() { + foldl([1, 2, 3, 4, 5], 0, fn(n, total) { n + total }) == 15 +} + +test foldl_3() { + foldl([1, 2, 3, 4], [], fn(x, xs) { [x, ..xs] }) == [4, 3, 2, 1] +} + +/// Reduce a list from right to left. +/// +/// ```aiken +/// list.foldr([1, 2, 3], 0, fn(n, total) { n + total }) == 6 +/// list.foldr([1, 2, 3], [], fn(x, xs) { [x, ..xs] }) == [1, 2, 3] +/// ``` +pub fn foldr(self: List, zero: b, with: fn(a, b) -> b) -> b { + when self is { + [] -> zero + [x, ..xs] -> with(x, foldr(xs, zero, with)) + } +} + +test foldr_1() { + foldr([1, 2, 3, 4, 5], 0, fn(n, total) { n + total }) == 15 +} + +test foldr_2() { + foldr( + [1, 2, 3], + "", + fn(n, _str) { + if builtin.mod_integer(n, 2) == 0 { + "foo" + } else { + "bar" + } + }, + ) == "bar" +} + +test foldr_3() { + foldr([1, 2, 3, 4], [], fn(x, xs) { [x, ..xs] }) == [1, 2, 3, 4] +} + +/// Like [`foldr`](#foldr), but also provides the position (0-based) of the elements when iterating. +/// +/// ```aiken +/// let group = fn(i, x, xs) { [(i, x), ..xs] } +/// list.indexed_foldr(["a", "b", "c"], [], group) == [ +/// (0, "a"), +/// (1, "b"), +/// (2, "c") +/// ] +/// ``` +pub fn indexed_foldr( + self: List, + zero: result, + with: fn(Int, a, result) -> result, +) -> result { + do_indexed_foldr(0, self, zero, with) +} + +fn do_indexed_foldr( + n: Int, + self: List, + zero: result, + with: fn(Int, a, result) -> result, +) -> result { + when self is { + [] -> zero + [x, ..xs] -> with(n, x, do_indexed_foldr(n + 1, xs, zero, with)) + } +} + +test indexed_foldr_1() { + indexed_foldr([], 0, fn(i, x, xs) { i + x + xs }) == 0 +} + +test indexed_foldr_2() { + let letters = ["a", "b", "c"] + indexed_foldr(letters, [], fn(i, x, xs) { [(i, x), ..xs] }) == [ + (0, "a"), (1, "b"), (2, "c"), + ] +} + +/// Reduce a list from left to right using the accumulator as left operand. +/// Said differently, this is [`foldl`](#foldl) with callback arguments swapped. +/// +/// ```aiken +/// list.reduce([#[1], #[2], #[3]], #[0], bytearray.concat) == #[0, 1, 2, 3] +/// list.reduce([True, False, True], False, fn(b, a) { or { b, a } }) == True +/// ``` +pub fn reduce(self: List, zero: b, with: fn(b, a) -> b) -> b { + foldl(self, zero, flip(with)) +} + +test reduce_1() { + reduce([], 0, fn(n, total) { n + total }) == 0 +} + +test reduce_2() { + reduce([1, 2, 3], 0, fn(n, total) { n + total }) == 6 +} + +test reduce_3() { + reduce([True, False, True], False, fn(left, right) { left || right }) == True +} + +test reduce_4() { + reduce( + [#[1], #[2], #[3]], + #[9], + fn(left, right) { bytearray.concat(left, right) }, + ) == #[9, 1, 2, 3] +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak new file mode 100644 index 00000000..01bfe763 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak @@ -0,0 +1,833 @@ +//// A module for working with associative lists (a.k.a `Pairs`). +//// +//// While any function that works on `List` also work on `Pairs`, this module provides some extra helpers +//// that are specifically tailored to working with associative lists. Fundamentally, a `Pairs` is +//// a type-alias to `List>`. +//// +//// > [!CAUTION] +//// > +//// > Unlike dictionnaries (a.k.a. [`Dict`](./dict.html#Dict), associative lists make no assumption +//// > about the ordering of elements within the list. As a result, lookup +//// > functions do traverse the entire list when invoked. They are also not _sets_, +//// > and thus allow for duplicate keys. This is reflected in the functions used +//// > to interact with them. + +use aiken/builtin +use aiken/primitive/bytearray + +// ## Inspecting + +/// Get all values in the alist associated with a given key. +/// +/// ```aiken +/// pairs.get_all([], "a") == [] +/// pairs.get_all([Pair("a", 1)], "a") == [1] +/// pairs.get_all([Pair("a", 1), Pair("b", 2)], "a") == [1] +/// pairs.get_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [1, 3] +/// ``` +pub fn get_all(self: Pairs, key k: key) -> List { + when self is { + [] -> [] + [Pair(k2, v), ..rest] -> + if k == k2 { + [v, ..get_all(rest, k)] + } else { + get_all(rest, k) + } + } +} + +test get_all_1() { + get_all([], "a") == [] +} + +test get_all_2() { + get_all([Pair("a", 1)], "a") == [1] +} + +test get_all_3() { + get_all([Pair("a", 1), Pair("b", 2)], "a") == [1] +} + +test get_all_4() { + get_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [1, 3] +} + +test get_all_5() { + get_all([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == [] +} + +/// Get the value in the alist by its key. +/// If multiple values with the same key exist, only the first one is returned. +/// +/// ```aiken +/// pairs.get_first([], "a") == None +/// pairs.get_first([Pair("a", 1)], "a") == Some(1) +/// pairs.get_first([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +/// pairs.get_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(1) +/// ``` +pub fn get_first(self: Pairs, key k: key) -> Option { + when self is { + [] -> None + [Pair(k2, v), ..rest] -> + if k == k2 { + Some(v) + } else { + get_first(rest, k) + } + } +} + +test get_first_1() { + get_first([], "a") == None +} + +test get_first_2() { + get_first([Pair("a", 1)], "a") == Some(1) +} + +test get_first_3() { + get_first([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +} + +test get_first_4() { + get_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(1) +} + +test get_first_5() { + get_first([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == None +} + +/// Get the value in the alist by its key. +/// If multiple values with the same key exist, only the last one is returned. +/// +/// ```aiken +/// pairs.get_last([], "a") == None +/// pairs.get_last([Pair("a", 1)], "a") == Some(1) +/// pairs.get_last([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +/// pairs.get_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(3) +/// ``` +pub fn get_last(self: Pairs, key k: key) -> Option { + when self is { + [] -> None + [Pair(k2, v), ..rest] -> + if k == k2 { + when get_last(rest, k) is { + None -> Some(v) + some -> some + } + } else { + get_last(rest, k) + } + } +} + +test get_last_1() { + get_last([], "a") == None +} + +test get_last_2() { + get_last([Pair("a", 1)], "a") == Some(1) +} + +test get_last_3() { + get_last([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +} + +test get_last_4() { + get_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(3) +} + +test get_last_5() { + get_last([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == None +} + +/// Finds all keys in the alist associated with a given value. +/// +/// ```aiken +/// pairs.find_all([], 1) == [] +/// pairs.find_all([Pair("a", 1)], 1) == ["a"] +/// pairs.find_all([Pair("a", 1), Pair("b", 2)], 1) == ["a"] +/// pairs.find_all([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == ["a", "c"] +/// ``` +pub fn find_all(self: Pairs, v: value) -> List { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if v == v2 { + [k2, ..find_all(rest, v)] + } else { + find_all(rest, v) + } + } +} + +test find_all_1() { + find_all([], "a") == [] +} + +test find_all_2() { + find_all([Pair("a", 14)], 14) == ["a"] +} + +test find_all_3() { + find_all([Pair("a", 14)], 42) == [] +} + +test find_all_4() { + find_all([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == ["a", "c"] +} + +/// Finds the first key in the alist associated with a given value, if any. +/// +/// ```aiken +/// pairs.find_first([], 1) == None +/// pairs.find_first([Pair("a", 1)], 1) == Some("a") +/// pairs.find_first([Pair("a", 1), Pair("b", 2)], 1) == Some("a") +/// pairs.find_first([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == Some("a") +/// ``` +pub fn find_first(self: Pairs, v: value) -> Option { + when self is { + [] -> None + [Pair(k2, v2), ..rest] -> + if v == v2 { + Some(k2) + } else { + find_first(rest, v) + } + } +} + +test find_first_1() { + find_first([], "a") == None +} + +test find_first_2() { + find_first([Pair("a", 14)], 14) == Some("a") +} + +test find_first_3() { + find_first([Pair("a", 14)], 42) == None +} + +test find_first_4() { + find_first([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == Some("a") +} + +/// Finds the last key in the alist associated with a given value, if any. +/// +/// ```aiken +/// pairs.find_last([], 1) == None +/// pairs.find_last([Pair("a", 1)], 1) == Some("a") +/// pairs.find_last([Pair("a", 1), Pair("b", 2)], 1) == Some("a") +/// pairs.find_last([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == Some("c") +/// ``` +pub fn find_last(self: Pairs, v: value) -> Option { + when self is { + [] -> None + [Pair(k2, v2), ..rest] -> + if v == v2 { + when find_last(rest, v) is { + None -> Some(k2) + some -> some + } + } else { + find_last(rest, v) + } + } +} + +test find_last_1() { + find_last([], "a") == None +} + +test find_last_2() { + find_last([Pair("a", 14)], 14) == Some("a") +} + +test find_last_3() { + find_last([Pair("a", 14)], 42) == None +} + +test find_last_4() { + find_last([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == Some("c") +} + +/// Check if a key exists in the pairs. +/// +/// ```aiken +/// pairs.has_key([], "a") == False +/// pairs.has_key([Pair("a", 1)], "a") == True +/// pairs.has_key([Pair("a", 1), Pair("b", 2)], "a") == True +/// pairs.has_key([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == True +/// ``` +pub fn has_key(self: Pairs, k: key) -> Bool { + when self is { + [] -> False + // || is lazy so this is fine + [Pair(k2, _), ..rest] -> k == k2 || has_key(rest, k) + } +} + +test has_key_1() { + !has_key([], "a") +} + +test has_key_2() { + has_key([Pair("a", 14)], "a") +} + +test has_key_3() { + !has_key([Pair("a", 14)], "b") +} + +test has_key_4() { + has_key([Pair("a", 14), Pair("b", 42)], "b") +} + +test has_key_5() { + has_key([Pair("a", 14), Pair("b", 42), Pair("a", 42)], "a") +} + +/// Extract all the keys present in a given `Pairs`. +/// +/// ```aiken +/// pairs.keys([]) == [] +/// pairs.keys([Pair("a", 1)]) == ["a"] +/// pairs.keys([Pair("a", 1), Pair("b", 2)]) == ["a", "b"] +/// pairs.keys([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == ["a", "b", "a"] +/// ``` +pub fn keys(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(k, _), ..rest] -> [k, ..keys(rest)] + } +} + +test keys_1() { + keys([]) == [] +} + +test keys_2() { + keys([Pair("a", 0)]) == ["a"] +} + +test keys_3() { + keys([Pair("a", 0), Pair("b", 0)]) == ["a", "b"] +} + +/// Extract all the values present in a given `Pairs`. +/// +/// ```aiken +/// pairs.values([]) == [] +/// pairs.values([Pair("a", 1)]) == [1] +/// pairs.values([Pair("a", 1), Pair("b", 2)]) == [1, 2] +/// pairs.values([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == [1, 2, 3] +/// ``` +pub fn values(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(_, v), ..rest] -> [v, ..values(rest)] + } +} + +test values_1() { + values([]) == [] +} + +test values_2() { + values([Pair("a", 1)]) == [1] +} + +test values_3() { + values([Pair("a", 1), Pair("b", 2)]) == [1, 2] +} + +test values_4() { + values([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == [1, 2, 3] +} + +// ## Modifying + +/// Remove all key-value pairs matching the key from the Pairs. If the key is not found, no changes are made. +/// +/// ```aiken +/// pairs.delete_all([], "a") == [] +/// pairs.delete_all([Pair("a", 1)], "a") == [] +/// pairs.delete_all([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] +/// pairs.delete_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("b", 2)] +/// ``` +pub fn delete_all(self: Pairs, key k: key) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if k == k2 { + delete_all(rest, k) + } else { + [Pair(k2, v2), ..delete_all(rest, k)] + } + } +} + +test delete_all_1() { + delete_all([], "a") == [] +} + +test delete_all_2() { + delete_all([Pair("a", 14)], "a") == [] +} + +test delete_all_3() { + let fixture = [Pair("a", 14)] + delete_all(fixture, "b") == fixture +} + +test delete_all_4() { + let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] + delete_all(fixture, "a") == [Pair("b", 2)] +} + +/// Remove a single key-value pair from the `Pairs`. If the key is not found, no changes are made. +/// Duplicate keys are not deleted. Only the **first** key found is deleted. +/// +/// ```aiken +/// pairs.delete_first([], "a") == [] +/// pairs.delete_first([Pair("a", 1)], "a") == [] +/// pairs.delete_first([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] +/// pairs.delete_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("b", 2), Pair("a", 3)] +/// ``` +pub fn delete_first(self: Pairs, key k: key) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if k == k2 { + rest + } else { + [Pair(k2, v2), ..delete_first(rest, k)] + } + } +} + +test delete_first_1() { + delete_first([], "a") == [] +} + +test delete_first_2() { + delete_first([Pair("a", 14)], "a") == [] +} + +test delete_first_3() { + let fixture = [Pair("a", 14)] + delete_first(fixture, "b") == fixture +} + +test delete_first_4() { + let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] + delete_first(fixture, "a") == [Pair("b", 2), Pair("a", 3)] +} + +/// Remove a single key-value pair from the Pairs. If the key is not found, no changes are made. +/// Duplicate keys are not deleted. Only the **last** key found is deleted. +/// +/// ```aiken +/// pairs.delete_last([], "a") == [] +/// pairs.delete_last([Pair("a", 1)], "a") == [] +/// pairs.delete_last([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] +/// pairs.delete_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("a", 1), Pair("b", 2)] +/// ``` +pub fn delete_last(self: Pairs, key k: key) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if k == k2 { + let tail = delete_last(rest, k) + if tail == rest { + rest + } else { + [Pair(k2, v2), ..tail] + } + } else { + [Pair(k2, v2), ..delete_last(rest, k)] + } + } +} + +test delete_last_1() { + delete_last([], "a") == [] +} + +test delete_last_2() { + delete_last([Pair("a", 14)], "a") == [] +} + +test delete_last_3() { + let fixture = [Pair("a", 14)] + delete_last(fixture, "b") == fixture +} + +test delete_last_4() { + let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] + delete_last(fixture, "a") == [Pair("a", 1), Pair("b", 2)] +} + +/// Insert a value in the `Pairs` at a given key. If the key already exists, +/// the value is added in front. +/// +/// > [!CAUTION] +/// > The list is only traversed up to the given key and the traversal +/// > stops as soon as a higher key is encountered. Said differently, the list +/// > is assumed to **be ordered by ascending keys**! If it is not, expect the +/// > unexpected. +/// +/// ```aiken +/// use aiken/primitive/bytearray +/// +/// let result = +/// [] +/// |> pairs.insert_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare) +/// |> pairs.insert_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare) +/// |> pairs.insert_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare) +/// +/// result == [Pair("bar", 2), Pair("foo", 3), Pair("foo", 1)] +/// ``` +pub fn insert_by_ascending_key( + self: Pairs, + key k: key, + value v: value, + compare: fn(key, key) -> Ordering, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if compare(k, k2) == Less { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, v), ..self] + } else { + [Pair(k2, v2), ..insert_by_ascending_key(rest, k, v, compare)] + } + } + } +} + +test insert_by_ascending_key_1() { + let m = + [] + |> insert_by_ascending_key("foo", 42, bytearray.compare) + |> insert_by_ascending_key("foo", 14, bytearray.compare) + + m == [Pair("foo", 14), Pair("foo", 42)] +} + +test insert_by_ascending_key_2() { + let m = + [] + |> insert_by_ascending_key("foo", 42, bytearray.compare) + |> insert_by_ascending_key("bar", 14, bytearray.compare) + |> insert_by_ascending_key("baz", 1337, bytearray.compare) + + m == [Pair("bar", 14), Pair("baz", 1337), Pair("foo", 42)] +} + +/// Like [`insert_by_ascending_key`](#insert_by_ascending_key) but specifies +/// how to combine two values on a key conflict. +/// +/// > [!CAUTION] +/// > The list is only traversed up to the given key and the traversal +/// > stops as soon as a higher key is encountered. Said differently, the list +/// > is assumed to **be ordered by ascending keys**! If it is not, expect the +/// > unexpected. +/// +/// ```aiken +/// use aiken/primitive/bytearray +/// +/// let add_integer = fn(x, y) { x + y } +/// +/// let result = +/// [] +/// |> pairs.insert_with_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare, with: add_integer) +/// |> pairs.insert_with_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare, with: add_integer) +/// |> pairs.insert_with_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare, with: add_integer) +/// +/// result == [Pair("bar", 2), Pair("foo", 4)] +/// ``` +pub fn insert_with_by_ascending_key( + self: Pairs, + key k: key, + value v: value, + compare: fn(key, key) -> Ordering, + with: fn(value, value) -> value, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if compare(k, k2) == Less { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, with(v, v2)), ..rest] + } else { + [ + Pair(k2, v2), + ..insert_with_by_ascending_key(rest, k, v, compare, with) + ] + } + } + } +} + +test insert_with_by_ascending_key_1() { + let compare_un_b_data = + fn(l, r) { + bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) + } + + let m = + [] + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 42, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 14, + compare_un_b_data, + builtin.add_integer, + ) + + m == [Pair("foo" |> builtin.b_data, 56)] +} + +test insert_with_by_ascending_key_2() { + let compare_un_b_data = + fn(l, r) { + bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) + } + + let m = + [] + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 42, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "bar" |> builtin.b_data, + 14, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "baz" |> builtin.b_data, + 1337, + compare_un_b_data, + builtin.add_integer, + ) + + m == [ + Pair("bar" |> builtin.b_data, 14), + Pair("baz" |> builtin.b_data, 1337), + Pair("foo" |> builtin.b_data, 42), + ] +} + +test insert_with_by_ascending_key_3() { + let compare_un_b_data = + fn(l, r) { + bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) + } + + let result = + [] + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 1, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "bar" |> builtin.b_data, + 2, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 3, + compare_un_b_data, + builtin.add_integer, + ) + + result == [Pair("bar" |> builtin.b_data, 2), Pair("foo" |> builtin.b_data, 4)] +} + +/// Apply a function to all key-value pairs in a alist, replacing the values. +/// +/// ```aiken +/// let fixture = [Pair("a", 100), Pair("b", 200)] +/// +/// pairs.map(fixture, fn(_k, v) { v * 2 }) == [Pair("a", 200), Pair("b", 400)] +/// ``` +pub fn map( + self: Pairs, + with: fn(key, value) -> result, +) -> Pairs { + when self is { + [] -> [] + [Pair(k, v), ..rest] -> [Pair(k, with(k, v)), ..map(rest, with)] + } +} + +test map_1() { + let fixture = [Pair("a", 1), Pair("b", 2)] + + map(fixture, with: fn(k, _) { k }) == [Pair("a", "a"), Pair("b", "b")] +} + +test map_2() { + let fixture = [Pair("a", 1), Pair("b", 2)] + + map(fixture, with: fn(_, v) { v + 1 }) == [Pair("a", 2), Pair("b", 3)] +} + +/// Insert a value in the `Pairs` at a given key. If the key already exists, +/// its value is replaced. +/// +/// > [!CAUTION] +/// > The list is only traversed up to the given key and the traversal +/// > stops as soon as a higher key is encountered. Said differently, the list +/// > is assumed to **be ordered by ascending keys**! If it is not, expect the +/// > unexpected. +/// +/// ```aiken +/// use aiken/primitive/bytearray +/// +/// let result = +/// [] +/// |> pairs.repsert_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare) +/// |> pairs.repsert_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare) +/// |> pairs.repsert_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare) +/// +/// result == [Pair("bar", 2), Pair("foo", 3)] +/// ``` +pub fn repsert_by_ascending_key( + self: Pairs, + key k: key, + value v: value, + compare: fn(key, key) -> Ordering, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if compare(k, k2) == Less { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, v), ..rest] + } else { + [Pair(k2, v2), ..repsert_by_ascending_key(rest, k, v, compare)] + } + } + } +} + +test repsert_by_ascending_key_1() { + let m = + [] + |> repsert_by_ascending_key("foo", 42, bytearray.compare) + |> repsert_by_ascending_key("foo", 14, bytearray.compare) + + m == [Pair("foo", 14)] +} + +test repsert_by_ascending_key_2() { + let m = + [] + |> repsert_by_ascending_key("foo", 42, bytearray.compare) + |> repsert_by_ascending_key("bar", 14, bytearray.compare) + |> repsert_by_ascending_key("baz", 1337, bytearray.compare) + + m == [Pair("bar", 14), Pair("baz", 1337), Pair("foo", 42)] +} + +// ## Transforming + +/// Fold over the key-value pairs in a pairs. The fold direction follows keys +/// in ascending order and is done from left-to-right. +/// +/// ```aiken +/// let fixture = [ +/// Pair(1, 100), +/// Pair(2, 200), +/// Pair(3, 300), +/// ] +/// +/// pairs.foldl(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 +/// ``` +pub fn foldl( + self: Pairs, + zero: result, + with: fn(key, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> foldl(rest, with(k, v, zero), with) + } +} + +test foldl_1() { + foldl([], 14, fn(_, _, _) { 42 }) == 14 +} + +test foldl_2() { + foldl( + [Pair("a", 42), Pair("b", 14)], + zero: 0, + with: fn(_, v, total) { v + total }, + ) == 56 +} + +/// Fold over the key-value pairs in a Pairs. The fold direction follows the +/// order of elements in the Pairs and is done from right-to-left. +/// +/// ```aiken +/// let fixture = [ +/// Pair(1, 100), +/// Pair(2, 200), +/// Pair(3, 300), +/// ] +/// +/// pairs.foldr(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 +/// ``` +pub fn foldr( + self: Pairs, + zero: result, + with: fn(key, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> with(k, v, foldr(rest, zero, with)) + } +} + +test foldr_1() { + foldr([], 14, fn(_, _, _) { 42 }) == 14 +} + +test foldr_2() { + foldr( + [Pair("a", 42), Pair("b", 14)], + zero: 0, + with: fn(_, v, total) { v + total }, + ) == 56 +} + +test foldr_3() { + let fixture = [Pair(1, 100), Pair(2, 200), Pair(3, 300)] + + foldr(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak new file mode 100644 index 00000000..46a7dda5 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak @@ -0,0 +1,147 @@ +use aiken/builtin + +pub type VerificationKey = + ByteArray + +pub type VerificationKeyHash = + Hash + +pub type Script = + ByteArray + +pub type ScriptHash = + Hash + +pub type Signature = + ByteArray + +pub type DataHash = + Hash + +/// A `Hash` is nothing more than a `ByteArray`, but it carries extra +/// information for readability. +/// +/// On-chain, any hash digest value is represented as a plain 'ByteArray'. +/// Though in practice, hashes come from different sources and have +/// different semantics. +/// +/// Hence, while this type-alias doesn't provide any strong type-guarantees, +/// it helps writing functions signatures with more meaningful types than mere +/// 'ByteArray'. +/// +/// Compare for example: +/// +/// ```aiken +/// pub type Credential { +/// VerificationKey(ByteArray) +/// Script(ByteArray) +/// } +/// ``` +/// +/// with +/// +/// ```aiken +/// pub type Credential { +/// VerificationKey(Hash) +/// Script(Hash) +/// } +/// ``` +/// +/// Both are strictly equivalent, but the second reads much better. +pub type Hash = + ByteArray + +// ## Hashing + +/// A blake2b-224 hash algorithm. +/// +/// Typically used for: +/// +/// - [`Credential`](../cardano/address.html#Credential) +/// - [`PolicyId`](../cardano/assets.html#PolicyId) +/// +/// Note: there's no function to calculate blake2b-224 hash digests on-chain. +pub opaque type Blake2b_224 { + Blake2b_224 +} + +/// Compute the blake2b-224 hash digest (28 bytes) of some data. +pub fn blake2b_224(bytes: ByteArray) -> Hash { + builtin.blake2b_224(bytes) +} + +/// A blake2b-256 hash algorithm. +/// +/// Typically used for: +/// +/// - [`TransactionId`](../cardano/transaction.html#TransactionId) +pub opaque type Blake2b_256 { + Blake2b_256 +} + +/// Compute the blake2b-256 hash digest (32 bytes) of some data. +pub fn blake2b_256(bytes: ByteArray) -> Hash { + builtin.blake2b_256(bytes) +} + +/// A Keccak-256 hash algorithm. +pub opaque type Keccak_256 { + Keccak_256 +} + +/// Compute the keccak-256 hash digest (32 bytes) of some data. +pub fn keccak_256(bytes: ByteArray) -> Hash { + builtin.keccak_256(bytes) +} + +/// A SHA2-256 hash algorithm. +pub opaque type Sha2_256 { + Sha2_256 +} + +/// Compute the sha2-256 hash digest (32 bytes) of some data. +pub fn sha2_256(bytes: ByteArray) -> Hash { + builtin.sha2_256(bytes) +} + +/// A SHA3-256 hash algorithm. +pub opaque type Sha3_256 { + Sha3_256 +} + +/// Compute the sha3-256 hash digest (32 bytes) of some data. +pub fn sha3_256(bytes: ByteArray) -> Hash { + builtin.sha3_256(bytes) +} + +// ## Verifying signatures + +/// Verify an ECDCA signature (over secp256k1) using the given verification key. +/// Returns `True` when the signature is valid. +pub fn verify_ecdsa_signature( + key: VerificationKey, + msg: ByteArray, + sig: Signature, +) -> Bool { + builtin.verify_ecdsa_secp256k1_signature(key, msg, sig) +} + +/// Verify an Ed25519 signature using the given verification key. +/// Returns `True` when the signature is valid. +pub fn verify_ed25519_signature( + key: VerificationKey, + msg: ByteArray, + sig: Signature, +) -> Bool { + builtin.verify_ed25519_signature(key, msg, sig) +} + +/// Verify a Schnorr signature (over secp256k1) using the given verification key. +/// Returns `True` when the signature is valid. +pub fn verify_schnorr_signature( + key: VerificationKey, + msg: ByteArray, + sig: Signature, +) -> Bool { + builtin.verify_schnorr_secp256k1_signature(key, msg, sig) +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak new file mode 100644 index 00000000..d7b4cc19 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak @@ -0,0 +1,115 @@ +//// This module is designed for cryptographic operations involving the BLS12-381 elliptic curve, particularly focusing on the G1 group of the curve. +//// +//// The key functionalities provided by this module include: +//// - Defining the generator of the G1 group, which is a fixed base point on the elliptic curve used for various cryptographic computations. +//// - Implementing the additive identity (zero) in the G1 group, which plays a crucial role in elliptic curve arithmetic. +//// - Providing functions to compress and decompress points in the G1 group. Compression reduces the size of the point representation, which is useful for efficient storage and transmission. Decompression restores the original point from its compressed form. +//// - Implementing basic arithmetic operations on the points in the G1 group, such as addition and subtraction. +//// - Enabling the exponentiation of a point in the G1 group with a scalar, which is a fundamental operation in elliptic curve cryptography. +//// - Offering a function to hash arbitrary data to a point in the G1 group, a process important in several cryptographic protocols. +//// +//// This module ensures that all operations respect the properties of the BLS12-381 curve and the mathematical structure of the G1 group. + +use aiken/builtin +use aiken/crypto/bls12_381/scalar.{Scalar} + +/// The compressed generator of the G1 group of the BLS12-381 curve. +/// This constant represents a fixed base point on the elliptic curve. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const generator: G1Element = + #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb" + +test generator_1() { + builtin.bls12_381_g1_scalar_mul(scalar.field_prime, generator) == #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" +} + +/// Represents the additive identity (zero) in the G1 group. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const zero: G1Element = + #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + +test zero_1() { + and { + zero == builtin.bls12_381_g1_scalar_mul(scalar.field_prime, generator), + zero == builtin.bls12_381_g1_scalar_mul( + scalar.field_prime, + #"88c7e388ee58f1db9a24d7098b01d13634298bebf2d159254975bd450cb0d287fcc622eb71edde8b469a8513551baf1f", + ), + zero == builtin.bls12_381_g1_scalar_mul( + scalar.field_prime, + #"a6ac32e625dc30b8d31bacf5f4c89c27b0388b15f57ae10de8d5cec02dd1f113c9a31077be05ab587ca57a88d34deb75", + ), + } +} + +/// Compresses a point in the G1 group into a more compact representation. +/// The compressed representation is a 48-byte string, corresponding to a modified `x` coordinate. +/// The leading most significant 3 bits of this string indicate how to reconstruct the `y` coordinate. +/// +/// > [!NOTE] +/// > More explicitly via [Zcash's spec](https://github.com/supranational/blst#serialization-format): +/// > +/// > The most-significant three bits of a G1 or G2 encoding should be masked away before the coordinate(s) are interpreted. These bits are used to unambiguously represent the underlying element: +/// > +/// > - The most significant bit, when set, indicates that the point is in compressed form. Otherwise, the point is in uncompressed form. +/// > - The second-most significant bit indicates that the point is at infinity. If this bit is set, the remaining bits of the group element's encoding should be set to zero. +/// > - The third-most significant bit is set if (and only if) this point is in compressed form and it is not the point at infinity and its y-coordinate is the lexicographically largest of the two associated with the encoded x-coordinate. +pub fn compress(point) { + builtin.bls12_381_g1_compress(point) +} + +test compress_1() { + compress( + #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb", + ) == #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb" +} + +/// Decompresses a point in the G1 group from its compressed form. +pub fn decompress(bytes) { + builtin.bls12_381_g1_uncompress(bytes) +} + +pub fn equal(left, right) { + builtin.bls12_381_g1_equal(left, right) +} + +test equal_1() { + equal(generator, generator) +} + +/// Adds two points in the G1 group. +pub fn add(left, right) { + builtin.bls12_381_g1_add(left, right) +} + +/// Subtracts one point in the G1 group from another. +pub fn sub(left, right) { + builtin.bls12_381_g1_add(left, builtin.bls12_381_g1_neg(right)) +} + +test sub_1() { + generator == sub(add(generator, generator), generator) +} + +/// Exponentiates a point in the G1 group with a `scalar`. +/// This operation is equivalent to the repeated addition of the point with itself `e` times. +pub fn scale(point, e: Scalar) { + builtin.bls12_381_g1_scalar_mul(scalar.to_int(e), point) +} + +test scale_1() { + expect Some(x) = scalar.new(2) + builtin.bls12_381_g1_add(generator, generator) == scale(generator, x) +} + +/// Hashes arbitrary data to a point in the G1 group. +/// You can use the `domain_separation_tag` parameter to cryptographically separate different uses of the hash function between applications. +pub fn hash_to_group(bytes: ByteArray, domain_separation_tag: ByteArray) { + builtin.bls12_381_g1_hash_to_group(bytes, domain_separation_tag) +} + +test hash_to_group_1() { + hash_to_group("hello", "world") == #"89223b03c629cc6bcbbdccbba46b6679bc6a79db82f2d3bd115899a45a5a38c391587b59d3d1e297f977d1c4ee9e3388" +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak new file mode 100644 index 00000000..7a2013db --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak @@ -0,0 +1,124 @@ +//// This module is designed for cryptographic operations involving the BLS12-381 elliptic curve, particularly focusing on the G2 group of the curve. +//// +//// The key functionalities provided by this module include: +//// - Defining the generator of the G2 group, which is a fixed base point on the elliptic curve used for various cryptographic computations. +//// - Implementing the additive identity (zero) in the G2 group, which plays a crucial role in elliptic curve arithmetic. +//// - Providing functions to compress and decompress points in the G2 group. Compression reduces the size of the point representation, which is useful for efficient storage and transmission. Decompression restores the original point from its compressed form. +//// - Implementing basic arithmetic operations on the points in the G2 group, such as addition and subtraction. +//// - Enabling the exponentiation of a point in the G2 group with a scalar, which is a fundamental operation in elliptic curve cryptography. +//// - Offering a function to hash arbitrary data to a point in the G2 group, a process important in several cryptographic protocols. +//// +//// This module ensures that all operations respect the properties of the BLS12-381 curve and the mathematical structure of the G2 group. + +use aiken/builtin +use aiken/crypto/bls12_381/scalar.{Scalar} + +/// The compressed generator of the G2 group of the BLS12-381 curve. +/// This constant represents a fixed base point on the elliptic curve. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const generator: G2Element = + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" + +test generator_1() { + builtin.bls12_381_g2_scalar_mul(scalar.field_prime, generator) == #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" +} + +/// Represents the additive identity (zero) in the G2 group. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const zero: G2Element = + #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + +test zero_1() { + and { + zero == builtin.bls12_381_g2_scalar_mul(scalar.field_prime, generator), + zero == builtin.bls12_381_g2_scalar_mul( + scalar.field_prime, + #"9964a9ac2ee28a4dab595ff0970d446373bf46701c5d0b29ce8e1ba995d811a1c7b193c928269192c64ba1fbe4b1940207c251e086b452b920bc72e3cebab46ce672b9b088ca620a471d3b888d9737f6abd165319aa457dbf8835e3d34196051", + ), + zero == builtin.bls12_381_g2_scalar_mul( + scalar.field_prime, + #"a900e25cb53cf1eeb1a82c0c83292937c49c97966351273767a204256a7ef6e95aa391404387075d361e7b13ccd694db03aa73ee0e1bd2c3dd735582b99fdf71696de72e4eda18ae99ea45995f1c9605aa0057008ee9a4da604b5716fb4a345b", + ), + } +} + +/// Compresses a point in the G2 group into a more compact representation. +/// The compressed representation is the concatenation of two 48-byte strings, corresponding to a modified and complexified `x` coordinate. +/// The leading most significant 3 bits of this string indicate how to reconstruct the `y` coordinate. +/// +/// > [!NOTE] +/// > More explicitly via [Zcash's spec](https://github.com/supranational/blst#serialization-format): +/// > +/// > The most-significant three bits of a G1 or G2 encoding should be masked away before the coordinate(s) are interpreted. These bits are used to unambiguously represent the underlying element: +/// > +/// > - The most significant bit, when set, indicates that the point is in compressed form. Otherwise, the point is in uncompressed form. +/// > - The second-most significant bit indicates that the point is at infinity. If this bit is set, the remaining bits of the group element's encoding should be set to zero. +/// > - The third-most significant bit is set if (and only if) this point is in compressed form and it is not the point at infinity and its y-coordinate is the lexicographically largest of the two associated with the encoded x-coordinate. +pub fn compress(point) { + builtin.bls12_381_g2_compress(point) +} + +test compress_1() { + let g2 = + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" + compress(g2) == #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" +} + +/// Decompresses a point in the G2 group from its compressed form. +pub fn decompress(bytes) { + builtin.bls12_381_g2_uncompress(bytes) +} + +test decompress_1() { + let g2 = + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" + generator == g2 +} + +pub fn equal(left, right) { + builtin.bls12_381_g2_equal(left, right) +} + +test equal_1() { + equal( + generator, + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8", + ) +} + +/// Adds two points in the G2 group. +pub fn add(left, right) { + builtin.bls12_381_g2_add(left, right) +} + +/// Subtracts one point in the G2 group from another. +pub fn sub(left, right) { + builtin.bls12_381_g2_add(left, builtin.bls12_381_g2_neg(right)) +} + +test sub_1() { + generator == sub(add(generator, generator), generator) +} + +/// Exponentiates a point in the G2 group with a `scalar`. +/// This operation is equivalent to the repeated addition of the point with itself `e` times. +pub fn scale(point, e: Scalar) { + builtin.bls12_381_g2_scalar_mul(scalar.to_int(e), point) +} + +test scale_1() { + expect Some(x) = scalar.new(2) + builtin.bls12_381_g2_add(generator, generator) == scale(generator, x) +} + +/// Hashes arbitrary data to a point in the G2 group. +/// You can use the `domain_separation_tag` parameter to cryptographically separate different uses of the hash function between applications. +pub fn hash_to_group(bytes, domain_separation_tag) { + builtin.bls12_381_g2_hash_to_group(bytes, domain_separation_tag) +} + +test hash_to_group_1() { + hash_to_group("hello", "world") == #"a18486bba1dc8321f4998ed4268c6df8dfa5618dd5c91595844059d517f8104bf8031d3e766f9c99db1d6f58b201ee9614de92fc08f9e5cc3a6cd814e871857cb6e3924e8a4fa48775116c5f158d58ceda63614d62f6b7bc47db798d656969a5" +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak new file mode 100644 index 00000000..cf028ad7 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak @@ -0,0 +1,255 @@ +//// This module implements arithmetic operations in the scalar field associated with the BLS12-381 elliptic curve. +//// The scalar field, defined over a prime number `q`, is derived from the order of the subgroup G1. +//// +//// More explicitly, we have the identity: +//// +//// ```aiken +//// builtin.bls12_381_g1_scalar_mul(q, bls12_381_g1_generator) == 1 +//// ``` +//// +//// where, +//// +//// ```aiken +//// q = 52435875175126190479447740508185965837690552500527637822603658699938581184513 +//// ``` +//// +//// This module provides functionality for basic arithmetic operations (addition, subtraction, multiplication, division) within this scalar field. +//// Additionally, it includes advanced operations such as exponentiation and calculation of multiplicative inverses, tailored for cryptographic applications. + +use aiken/builtin + +/// The prime number defining the scalar field of the BLS12-381 curve. +pub const field_prime = + 52435875175126190479447740508185965837690552500527637822603658699938581184513 + +/// Represents the additive identity (zero) in the `Scalar` field. +pub const zero: Scalar = Scalar(0) + +/// Represents the multiplicative identity (one) in the `Scalar` field. +pub const one: Scalar = Scalar(1) + +/// Opaque type representing an element of the finite field `Scalar`. +pub opaque type Scalar { + integer: Int, +} + +// ## Constructing + +/// Constructs a new `Scalar` element from an integer, ensuring it's within the valid range of the field. +/// Returns `None` if the integer is negative or greater than the prime number defining the field. +pub fn new(n: Int) -> Option { + if n >= 0 && n < field_prime { + Some(Scalar(n)) + } else { + None + } +} + +test new_1() { + and { + new(-1) == None, + new(field_prime) == None, + new(834884848) == Some(Scalar(834884848)), + } +} + +/// Constructs a new `Scalar` element from a Big-Endian (most-significant bits first) `ByteArray`. +pub fn from_bytearray_big_endian(bytes: ByteArray) -> Option { + new(builtin.bytearray_to_integer(True, bytes)) +} + +test from_bytearray_big_endian_1() { + from_bytearray_big_endian(#"ffff00") == Some(Scalar(16776960)) +} + +/// Constructs a new `Scalar` element from a Little-Endian (least-significant bits first) `ByteArray`. +pub fn from_bytearray_little_endian(bytes: ByteArray) -> Option { + new(builtin.bytearray_to_integer(False, bytes)) +} + +test from_bytearray_little_endian_1() { + from_bytearray_little_endian(#"ffff00") == Some(Scalar(65535)) +} + +// ## Modifying + +/// Exponentiates an `Scalar` element by a non-negative integer exponent, using repeated squaring. +/// Note that this function returns `scalar.zero` for negative exponents. +/// A dedicated builtin function for this is in the making, see CIP 109. +pub fn scale(self: Scalar, e: Int) -> Scalar { + if e < 0 { + zero + } else if e == 0 { + one + } else if e % 2 == 0 { + scale(mul(self, self), e / 2) + } else { + mul(self, scale(mul(self, self), ( e - 1 ) / 2)) + } +} + +test scale_1() { + and { + scale(Scalar(834884848), -1) == zero, + scale(Scalar(834884848), 0) == one, + scale(Scalar(834884848), 1) == Scalar(834884848), + scale(Scalar(834884848), 2) == Scalar(697032709419983104), + scale(Scalar(834884848), 3) == Scalar(581942047655130761945608192), + scale(Scalar(field_prime - 4), 200) == Scalar( + 12843927705572658539565969578937286576443167978938369866871449552629978143484, + ), + } +} + +/// A faster version of `scale` for the case where the exponent is a power of two. +/// That is, the exponent `e = 2^k` for some non-negative integer `k`. Which is used alot in zk-SNARKs. +pub fn scale2(self: Scalar, k: Int) -> Scalar { + if k < 0 { + zero + } else { + do_scale2(self, k) + } +} + +fn do_scale2(self: Scalar, k: Int) -> Scalar { + if k == 0 { + self + } else { + do_scale2(mul(self, self), k - 1) + } +} + +test scale2_1() { + and { + scale2(Scalar(834884848), -1) == zero, + scale2(Scalar(834884848), 0) == scale(Scalar(834884848), 1), + scale2(Scalar(834884848), 1) == scale(Scalar(834884848), 2), + scale2(Scalar(834884848), 2) == scale(Scalar(834884848), 4), + scale2(Scalar(834884848), 3) == scale(Scalar(834884848), 8), + scale2(Scalar(834884848), 4) == scale(Scalar(834884848), 16), + } +} + +// ## Combining + +/// Adds two `Scalar` elements, ensuring the result stays within the finite field range. +pub fn add(left: Scalar, right: Scalar) -> Scalar { + Scalar(( left.integer + right.integer ) % field_prime) +} + +test add_1() { + and { + (add(Scalar(834884848), Scalar(834884848)) == Scalar(1669769696))?, + (add(Scalar(field_prime - 1), Scalar(1)) == Scalar(0))?, + (add(Scalar(3), Scalar(field_prime)) == Scalar(3))?, + } +} + +/// Divides one `Scalar` element by another, returning `None` if the divisor is zero. +pub fn div(left: Scalar, right: Scalar) -> Option { + if right == zero { + None + } else { + Some(mul(left, scale(right, field_prime - 2))) + } +} + +test div_1() { + and { + div(Scalar(834884848), Scalar(834884848)) == Some(Scalar(1)), + div(Scalar(834884848), zero) == None, + div(Scalar(field_prime - 1), Scalar(2)) == Some( + Scalar( + 26217937587563095239723870254092982918845276250263818911301829349969290592256, + ), + ), + } +} + +/// Multiplies two `Scalar` elements, with the result constrained within the finite field. +pub fn mul(left: Scalar, right: Scalar) -> Scalar { + Scalar(left.integer * right.integer % field_prime) +} + +test mul_1() { + and { + mul(Scalar(834884848), Scalar(834884848)) == Scalar(697032709419983104), + mul(zero, Scalar(834884848)) == zero, + mul(Scalar(field_prime - 1), Scalar(2)) == Scalar( + 52435875175126190479447740508185965837690552500527637822603658699938581184511, + ), + } +} + +/// Calculates the additive inverse of a `Scalar` element. +pub fn neg(self: Scalar) -> Scalar { + // this is basicly sub(zero, self), but more efficient as it saves one modulo operation + if self.integer == 0 { + self + } else { + Scalar(field_prime - self.integer) + } +} + +test neg_1() { + and { + neg(Scalar(834884848)) == Scalar( + 52435875175126190479447740508185965837690552500527637822603658699937746299665, + ), + neg(zero) == zero, + neg(one) == Scalar(field_prime - 1), + } +} + +/// Calculates the multiplicative inverse of an `Scalar` element, returning `None` if the element is zero. +pub fn recip(self: Scalar) -> Option { + div(one, self) +} + +test recip_1() { + and { + recip(Scalar(834884848)) == Some( + Scalar( + 35891248691642227249400403463796410930702563777316955162085759263735363466421, + ), + ), + recip(zero) == None, + } +} + +/// Subtracts one `Scalar` element from another, with the result wrapped within the finite field range. +pub fn sub(left: Scalar, right: Scalar) -> Scalar { + Scalar(( left.integer - right.integer ) % field_prime) +} + +test sub_1() { + and { + (sub(Scalar(834884848), Scalar(834884848)) == zero)?, + (sub(zero, Scalar(5)) == Scalar(field_prime - 5))?, + } +} + +// ## Transforming + +/// Converts a `Scalar` element back to its integer representation. +pub fn to_int(self: Scalar) -> Int { + self.integer +} + +test to_int_1() { + to_int(Scalar(834884848)) == 834884848 +} + +/// Converts a `Scalar` element to a Big-Endian (most-significant bits first) `ByteArray`. +pub fn to_bytearray_big_endian(self: Scalar, size: Int) -> ByteArray { + builtin.integer_to_bytearray(True, size, self.integer) +} + +/// Converts a `Scalar` element to a Little-Endian (least-significant bits first) `ByteArray`. +pub fn to_bytearray_little_endian(self: Scalar, size: Int) -> ByteArray { + builtin.integer_to_bytearray(False, size, self.integer) +} + +test to_bytearray_1() { + to_bytearray_big_endian(Scalar(16777215), 3) == #"ffffff" +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak new file mode 100644 index 00000000..96179f9b --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak @@ -0,0 +1,680 @@ +//// In a eUTxO-based blockchain like Cardano, the management of time can be +//// finicky. +//// +//// Indeed, in order to maintain a complete determinism in the execution of +//// scripts, it is impossible to introduce a notion of _"current time"_ since +//// the execution would then depend on factor that are external to the +//// transaction itself: the ineluctable stream of time flowing in our universe. +//// +//// Hence, to work around that, we typically define time intervals, which gives +//// window -- a.k.a intervals -- within which the transaction can be executed. +//// From within a script, it isn't possible to know when exactly the script is +//// executed, but we can reason about the interval bounds to validate pieces of +//// logic. + +// TODO: Replace 'Int' with a generic 'a' once we have comparable traits. + +/// A type to represent intervals of values. Interval are inhabited by a type +/// `a` which is useful for non-infinite intervals that have a finite +/// lower-bound and/or upper-bound. +/// +/// This allows to represent all kind of mathematical intervals: +/// +/// ```aiken +/// // [1; 10] +/// let i0: Interval = Interval +/// { lower_bound: +/// IntervalBound { bound_type: Finite(1), is_inclusive: True } +/// , upper_bound: +/// IntervalBound { bound_type: Finite(10), is_inclusive: True } +/// } +/// ``` +/// +/// ```aiken +/// // (20; infinity) +/// let i1: Interval = Interval +/// { lower_bound: +/// IntervalBound { bound_type: Finite(20), is_inclusive: False } +/// , upper_bound: +/// IntervalBound { bound_type: PositiveInfinity, is_inclusive: False } +/// } +/// ``` +pub type Interval { + lower_bound: IntervalBound, + upper_bound: IntervalBound, +} + +/// An interval bound, either inclusive or exclusive. +pub type IntervalBound { + bound_type: IntervalBoundType, + is_inclusive: Bool, +} + +/// A type of interval bound. Where finite, a value of type `a` must be +/// provided. `a` will typically be an `Int`, representing a number of seconds or +/// milliseconds. +pub type IntervalBoundType { + NegativeInfinity + Finite(a) + PositiveInfinity +} + +// ## Constructing + +/// Create an interval that includes all values greater than the given bound. i.e [lower_bound, +INF) +/// +/// ```aiken +/// interval.after(10) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: PositiveInfinity, is_inclusive: True }, +/// } +/// ``` +pub fn after(lower_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values after (and not including) the given bound. i.e (lower_bound, +INF) +/// +/// ```aiken +/// interval.entirely_after(10) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: PositiveInfinity, is_inclusive: True }, +/// } +/// ``` +pub fn entirely_after(lower_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: False, + }, + upper_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values before (and including) the given bound. i.e (-INF, upper_bound] +/// +/// ```aiken +/// interval.before(100) == Interval { +/// lower_bound: IntervalBound { bound_type: NegativeInfinity, is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: True }, +/// } +/// ``` +pub fn before(upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values before (and not including) the given bound. i.e (-INF, upper_bound) +/// +/// ```aiken +/// interval.entirely_before(10) == Interval { +/// lower_bound: IntervalBound { bound_type: NegativeInfinity, is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, +/// } +/// ``` +pub fn entirely_before(upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: False, + }, + } +} + +/// Create an interval that includes all values between two bounds, including the bounds. i.e. [lower_bound, upper_bound] +/// +/// ```aiken +/// interval.between(10, 100) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: True }, +/// } +/// ``` +pub fn between(lower_bound: a, upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values between two bounds, excluding the bounds. i.e. (lower_bound, upper_bound) +/// +/// ```aiken +/// interval.entirely_between(10, 100) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: False }, +/// } +/// ``` +pub fn entirely_between(lower_bound: a, upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: False, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: False, + }, + } +} + +/// Create an empty interval that contains no value. +/// +/// ```aiken +/// interval.contains(empty, 0) == False +/// interval.contains(empty, 1000) == False +/// ``` +pub const empty: Interval = + Interval { + lower_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + } + +/// Create an interval that contains every possible values. i.e. (-INF, +INF) +/// +/// ```aiken +/// interval.contains(everything, 0) == True +/// interval.contains(everything, 1000) == True +/// ``` +pub const everything: Interval = + Interval { + lower_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + } + +// ## Inspecting + +/// Checks whether an element is contained within the interval. +/// +/// ```aiken +/// let iv = +/// Interval { +/// lower_bound: IntervalBound { +/// bound_type: Finite(14), +/// is_inclusive: True +/// }, +/// upper_bound: IntervalBound { +/// bound_type: Finite(42), +/// is_inclusive: False +/// }, +/// } +/// +/// interval.contains(iv, 25) == True +/// interval.contains(iv, 0) == False +/// interval.contains(iv, 14) == True +/// interval.contains(iv, 42) == False +/// ``` +pub fn contains(self: Interval, elem: Int) -> Bool { + let is_greater_than_lower_bound = + when self.lower_bound.bound_type is { + NegativeInfinity -> True + Finite(lower_bound) -> + if self.lower_bound.is_inclusive { + elem >= lower_bound + } else { + elem > lower_bound + } + PositiveInfinity -> False + } + + let is_smaller_than_upper_bound = + when self.upper_bound.bound_type is { + NegativeInfinity -> False + Finite(upper_bound) -> + if self.upper_bound.is_inclusive { + elem <= upper_bound + } else { + elem < upper_bound + } + PositiveInfinity -> True + } + + is_greater_than_lower_bound && is_smaller_than_upper_bound +} + +test contains_1() { + let iv = everything + contains(iv, 14) +} + +test contains_2() { + let iv = entirely_before(15) + contains(iv, 14) +} + +test contains_3() { + let iv = before(14) + contains(iv, 14) +} + +test contains_4() { + let iv = entirely_before(14) + !contains(iv, 14) +} + +test contains_5() { + let iv = entirely_after(13) + contains(iv, 14) +} + +test contains_6() { + let iv = after(14) + contains(iv, 14) +} + +test contains_7() { + let iv = entirely_after(14) + !contains(iv, 14) +} + +test contains_8() { + let iv = between(42, 1337) + !contains(iv, 14) +} + +test contains_9() { + let iv = between(0, 42) + contains(iv, 14) +} + +test contains_10() { + let iv = between(0, 42) + contains(iv, 42) +} + +test contains_11() { + let iv = entirely_between(0, 42) + !contains(iv, 0) +} + +test contains_12() { + let iv = empty + !contains(iv, 14) +} + +/// Tells whether an interval is empty; i.e. that is contains no value. +/// +/// ```aiken +/// let iv1 = interval.empty +/// +/// let iv2 = Interval { +/// lower_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, +/// } +/// +/// let iv3 = Interval { +/// lower_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: False }, +/// } +/// +/// interval.is_empty(iv1) == True +/// interval.is_empty(iv2) == True +/// interval.is_empty(iv3) == False +/// +/// // Note: Two empty intervals are not necessarily equal. +/// iv1 != iv2 +/// ``` +pub fn is_empty(self: Interval) -> Bool { + let ordering = + compare_bound_type(self.lower_bound.bound_type, self.upper_bound.bound_type) + + when ordering is { + Greater -> True + Equal -> !(self.lower_bound.is_inclusive && self.upper_bound.is_inclusive) + Less -> { + let is_open_interval = + !self.lower_bound.is_inclusive && !self.upper_bound.is_inclusive + if is_open_interval { + when (self.lower_bound.bound_type, self.upper_bound.bound_type) is { + (Finite(lower_bound), Finite(upper_bound)) -> + lower_bound + 1 == upper_bound + _ -> False + } + } else { + False + } + } + } +} + +/// Check whether the interval is entirely after the point "a" +/// +/// ```aiken +/// interval.is_entirely_after(interval.after(10), 5) == True +/// interval.is_entirely_after(interval.after(10), 10) == False +/// interval.is_entirely_after(interval.after(10), 15) == False +/// interval.is_entirely_after(interval.between(10, 20), 30) == False +/// interval.is_entirely_after(interval.between(10, 20), 5) == True +pub fn is_entirely_after(self: Interval, point: Int) -> Bool { + when self.lower_bound.bound_type is { + Finite(low) -> + if self.lower_bound.is_inclusive { + point < low + } else { + point <= low + } + _ -> False + } +} + +test is_entirely_after_1() { + is_entirely_after(after(10), 5) +} + +test is_entirely_after_2() { + !is_entirely_after(after(10), 10) +} + +test is_entirely_after_3() { + !is_entirely_after(after(10), 15) +} + +test is_entirely_after_4() { + !is_entirely_after(between(10, 20), 30) +} + +test is_entirely_after_5() { + is_entirely_after(between(10, 20), 5) +} + +test is_entirely_after_6() { + is_entirely_after(entirely_after(10), 10) +} + +test is_entirely_after_7() { + !is_entirely_after(before(10), 5) +} + +test is_entirely_after_8() { + !is_entirely_after(before(10), 15) +} + +test is_entirely_after_9() { + !is_entirely_after(entirely_before(10), 5) +} + +/// Check whether the interval is entirely before the point "a" +/// +/// ```aiken +/// interval.is_entirely_before(interval.before(10), 15) == True +/// interval.is_entirely_before(interval.before(10), 10) == False +/// interval.is_entirely_before(interval.before(10), 5) == False +/// interval.is_entirely_before(interval.between(10, 20), 30) == True +/// interval.is_entirely_before(interval.between(10, 20), 5) == False +pub fn is_entirely_before(self: Interval, point: Int) -> Bool { + when self.upper_bound.bound_type is { + Finite(hi) -> + if self.upper_bound.is_inclusive { + hi < point + } else { + hi <= point + } + _ -> False + } +} + +test is_entirely_before_1() { + is_entirely_before(before(10), 15) +} + +test is_entirely_before_2() { + !is_entirely_before(before(10), 10) +} + +test is_entirely_before_3() { + !is_entirely_before(before(10), 5) +} + +test is_entirely_before_4() { + is_entirely_before(between(10, 20), 30) +} + +test is_entirely_before_5() { + !is_entirely_before(between(10, 20), 5) +} + +test is_entirely_before_6() { + is_entirely_before(entirely_before(10), 10) +} + +test is_entirely_before_7() { + !is_entirely_before(after(10), 15) +} + +test is_entirely_before_8() { + !is_entirely_before(after(10), 5) +} + +test is_entirely_before_9() { + !is_entirely_before(entirely_after(10), 5) +} + +// ## Combining + +/// Computes the smallest interval containing the two given intervals, if any +/// +/// ```aiken +/// let iv1 = between(0, 10) +/// let iv2 = between(2, 14) +/// hull(iv1, iv2) == between(0, 14) +/// +/// let iv1 = between(5, 10) +/// let iv2 = before(0) +/// hull(iv1, iv2) == before(10) +/// +/// let iv1 = entirely_after(0) +/// let iv2 = between(10, 42) +/// hull(iv1, iv2) = entirely_after(0) +/// ``` +pub fn hull(iv1: Interval, iv2: Interval) -> Interval { + Interval { + lower_bound: min(iv1.lower_bound, iv2.lower_bound), + upper_bound: max(iv1.upper_bound, iv2.upper_bound), + } +} + +test hull_1() { + let iv1 = between(0, 10) + let iv2 = between(2, 14) + hull(iv1, iv2) == between(0, 14) +} + +test hull_2() { + let iv1 = between(5, 10) + let iv2 = before(0) + hull(iv1, iv2) == before(10) +} + +test hull_3() { + let iv1 = entirely_after(0) + let iv2 = between(10, 42) + hull(iv1, iv2) == entirely_after(0) +} + +/// Computes the largest interval contains in the two given intervals, if any. +/// +/// ```aiken +/// let iv1 = interval.between(0, 10) +/// let iv2 = interval.between(2, 14) +/// interval.intersection(iv1, iv2) == interval.between(2, 10) +/// +/// let iv1 = interval.entirely_before(10) +/// let iv2 = interval.entirely_after(0) +/// interval.intersection(iv1, iv2) == interval.entirely_between(0, 10) +/// +/// let iv1 = interval.between(0, 1) +/// let iv2 = interval.between(2, 3) +/// interval.intersection(iv1, iv2) |> interval.is_empty +/// ``` +pub fn intersection(iv1: Interval, iv2: Interval) -> Interval { + Interval { + lower_bound: max(iv1.lower_bound, iv2.lower_bound), + upper_bound: min(iv1.upper_bound, iv2.upper_bound), + } +} + +test intersection_1() { + let iv1 = between(0, 10) + let iv2 = between(2, 14) + intersection(iv1, iv2) == between(2, 10) +} + +test intersection_2() { + let iv1 = between(0, 1) + let iv2 = between(1, 2) + intersection(iv1, iv2) == between(1, 1) +} + +test intersection_3() { + let iv1 = between(0, 1) + let iv2 = entirely_between(1, 2) + intersection(iv1, iv2) + |> is_empty +} + +test intersection_4() { + let iv1 = entirely_between(0, 1) + let iv2 = entirely_between(1, 2) + intersection(iv1, iv2) + |> is_empty +} + +test intersection_5() { + let iv1 = between(0, 10) + let iv2 = before(4) + intersection(iv1, iv2) == between(0, 4) +} + +test intersection_6() { + let iv1 = entirely_before(10) + let iv2 = entirely_after(0) + intersection(iv1, iv2) == entirely_between(0, 10) +} + +/// Return the highest bound of the two. +/// +/// ```aiken +/// let ib1 = IntervalBound { bound_type: Finite(0), is_inclusive: False } +/// let ib2 = IntervalBound { bound_type: Finite(1), is_inclusive: False } +/// +/// interval.max(ib1, ib2) == ib2 +/// ``` +pub fn max( + left: IntervalBound, + right: IntervalBound, +) -> IntervalBound { + when compare_bound(left, right) is { + Less -> right + Equal -> left + Greater -> left + } +} + +/// Return the smallest bound of the two. +/// +/// ```aiken +/// let ib1 = IntervalBound { bound_type: Finite(0), is_inclusive: False } +/// let ib2 = IntervalBound { bound_type: Finite(1), is_inclusive: False } +/// +/// interval.min(ib1, ib2) == ib1 +/// ``` +pub fn min( + left: IntervalBound, + right: IntervalBound, +) -> IntervalBound { + when compare_bound(left, right) is { + Less -> left + Equal -> left + Greater -> right + } +} + +fn compare_bound( + left: IntervalBound, + right: IntervalBound, +) -> Ordering { + when compare_bound_type(left.bound_type, right.bound_type) is { + Less -> Less + Greater -> Greater + Equal -> + if left.is_inclusive == right.is_inclusive { + Equal + } else if left.is_inclusive { + Greater + } else { + Less + } + } +} + +fn compare_bound_type( + left: IntervalBoundType, + right: IntervalBoundType, +) -> Ordering { + when left is { + NegativeInfinity -> + when right is { + NegativeInfinity -> Equal + _ -> Less + } + PositiveInfinity -> + when right is { + PositiveInfinity -> Equal + _ -> Greater + } + Finite(left) -> + when right is { + NegativeInfinity -> Greater + PositiveInfinity -> Less + Finite(right) -> + if left < right { + Less + } else if left == right { + Equal + } else { + Greater + } + } + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak new file mode 100644 index 00000000..dd575e7a --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak @@ -0,0 +1,424 @@ +//// This module contains some basic Math utilities. Standard arithmetic +//// operations on integers are available through native operators: +//// +//// Operator | Description +//// --- | :--- +//// `+` | Arithmetic sum +//// `-` | Arithmetic difference +//// `/` | Whole division +//// `*` | Arithmetic multiplication +//// `%` | Remainder by whole division +//// +//// Here are a few examples: +//// +//// ```aiken +//// 1 + 1 // 2 +//// 10 - 2 // 8 +//// 40 / 14 // 2 +//// 3 * 4 // 12 +//// 10 % 3 // 1 + +use aiken/builtin + +/// Calculate the absolute value of an integer. +/// +/// ```aiken +/// math.abs(-42) == 42 +/// math.abs(14) == 14 +/// ``` +pub fn abs(self: Int) -> Int { + if self < 0 { + 0 - self + } else { + self + } +} + +test abs_1() { + abs(14) == 14 +} + +test abs_2() { + abs(-42) == 42 +} + +/// Restrict the value of an integer between two min and max bounds +/// +/// ```aiken +/// math.clamp(14, min: 0, max: 10) == 10 +/// ``` +pub fn clamp(self: Int, min: Int, max: Int) -> Int { + if self < min { + min + } else { + if self > max { + max + } else { + self + } + } +} + +test clamp_1() { + clamp(14, min: 0, max: 10) == 10 +} + +test clamp_2() { + clamp(7, min: 0, max: 10) == 7 +} + +test clamp_3() { + clamp(7, min: 10, max: 100) == 10 +} + +/// The greatest common divisor of two integers. +/// +/// ```aiken +/// math.gcd(42, 14) == 14 +/// math.gcd(14, 42) == 14 +/// math.gcd(0, 0) == 0 +/// ``` +pub fn gcd(x: Int, y: Int) -> Int { + abs(do_gcd(x, y)) +} + +fn do_gcd(x: Int, y: Int) -> Int { + when y is { + 0 -> x + _ -> do_gcd(y, x % y) + } +} + +test gcd_test1() { + gcd(10, 300) == 10 +} + +test gcd_test2() { + gcd(-10, 300) == 10 +} + +test gcd_test3() { + gcd(42, 14) == 14 +} + +/// Checks if an integer has a given integer square root x. +/// The check has constant time complexity $O(1)$. +/// +/// ```aiken +/// math.is_sqrt(0, 0) +/// math.is_sqrt(25, 5) +/// !math.is_sqrt(25, -5) +/// math.is_sqrt(44203, 210) +/// ``` +pub fn is_sqrt(self: Int, x: Int) -> Bool { + x * x <= self && ( x + 1 ) * ( x + 1 ) > self +} + +test is_sqrt1() { + is_sqrt(44203, 210) +} + +test is_sqrt2() { + is_sqrt(975461057789971041, 987654321) +} + +/// The logarithm in base `b` of an element using integer divisions. +/// +/// ```aiken +/// math.log(10, base: 2) == 3 +/// math.log(42, base: 2) == 5 +/// math.log(42, base: 3) == 3 +/// math.log(5, base: 0) == 0 +/// math.log(4, base: 4) == 1 +/// math.log(4, base: 42) == 0 +/// ``` +pub fn log(self: Int, base: Int) -> Int { + if base <= 0 { + 0 + } else if self == base { + 1 + } else if self < base { + 0 + } else { + 1 + log(self / base, base) + } +} + +test log_10_2() { + log(10, base: 2) == 3 +} + +test log_42_2() { + log(42, base: 2) == 5 +} + +test log_42_3() { + log(42, base: 3) == 3 +} + +test log_5_0() { + log(5, base: 0) == 0 +} + +test log_4_4() { + log(4, base: 4) == 1 +} + +test log_4_43() { + log(4, base: 43) == 0 +} + +/// The integer logarithm in base 2. Faster than [`log`](#log) in this particular case. +/// +/// ```aiken +/// math.log2(1) == 0 +/// math.log2(2) == 1 +/// math.log2(3) == 1 +/// math.log2(4) == 2 +/// math.log2(256) == 8 +/// math.log2(257) == 8 +/// math.log2(511) == 8 +/// math.log2(1025) == 10 +/// ``` +pub fn log2(x: Int) -> Int { + expect x > 0 + let s = builtin.integer_to_bytearray(True, 0, x) + let len = builtin.length_of_bytearray(s) + let b = builtin.index_bytearray(s, 0) + len * 8 - if b < 2 { + 8 + } else if b < 4 { + 7 + } else if b < 8 { + 6 + } else if b < 16 { + 5 + } else if b < 32 { + 4 + } else if b < 64 { + 3 + } else if b < 128 { + 2 + } else { + 1 + } +} + +test log2_matrix() { + and { + log2(1) == 0, + log2(2) == 1, + log2(3) == 1, + log2(4) == 2, + log2(256) == 8, + log2(257) == 8, + log2(511) == 8, + log2(1025) == 10, + } +} + +/// Return the maximum of two integers. +pub fn max(a: Int, b: Int) -> Int { + if a > b { + a + } else { + b + } +} + +test max_1() { + max(0, 0) == 0 +} + +test max_2() { + max(14, 42) == 42 +} + +test max_3() { + max(42, 14) == 42 +} + +/// Return the minimum of two integers. +pub fn min(a: Int, b: Int) -> Int { + if a > b { + b + } else { + a + } +} + +test min_1() { + min(0, 0) == 0 +} + +test min_2() { + min(14, 42) == 14 +} + +test min_3() { + min(42, 14) == 14 +} + +/// Calculates a number to the power of `e` using the exponentiation by +/// squaring method. +/// +/// ```aiken +/// math.pow(3, 5) == 243 +/// math.pow(7, 2) == 49 +/// math.pow(3, -4) == 0 +/// math.pow(0, 0) == 1 +/// math.pow(513, 3) == 135005697 +/// ``` +pub fn pow(self: Int, e: Int) -> Int { + if e < 0 { + 0 + } else if e == 0 { + 1 + } else if e % 2 == 0 { + pow(self * self, e / 2) + } else { + self * pow(self * self, ( e - 1 ) / 2) + } +} + +test pow_3_5() { + pow(3, 5) == 243 +} + +test pow_7_2() { + pow(7, 2) == 49 +} + +test pow_3__4() { + // negative powers round to zero + pow(3, -4) == 0 +} + +test pow_0_0() { + // sorry math + pow(0, 0) == 1 +} + +test pow_513_3() { + pow(513, 3) == 135005697 +} + +test pow_2_4() { + pow(2, 4) == 16 +} + +test pow_2_42() { + pow(2, 42) == 4398046511104 +} + +/// Calculates the power of 2 for a given exponent `e`. Much cheaper than +/// using `pow(2, _)` for small exponents $0 < e < 256$. +/// +/// ```aiken +/// math.pow2(-2) == 0 +/// math.pow2(0) == 1 +/// math.pow2(1) == 2 +/// math.pow2(4) == 16 +/// math.pow2(42) == 4398046511104 +/// ``` +pub fn pow2(e: Int) -> Int { + // do_pow2(e, 1) + if e < 8 { + if e < 0 { + 0 + } else { + builtin.index_bytearray(#[1, 2, 4, 8, 16, 32, 64, 128], e) + } + } else if e < 32 { + 256 * pow2(e - 8) + } else { + 4294967296 * pow2(e - 32) + } +} + +test pow2_neg() { + pow2(-2) == 0 +} + +test pow2_0() { + pow2(0) == 1 +} + +test pow2_1() { + pow2(1) == 2 +} + +test pow2_4() { + pow2(4) == 16 +} + +test pow2_42() { + pow2(42) == 4398046511104 +} + +test pow2_256() { + pow2(256) == 115792089237316195423570985008687907853269984665640564039457584007913129639936 +} + +/// Calculates the square root of an integer using the [Babylonian +/// method](https://en.wikipedia.org/wiki/Methods_of_computing_square_roots#Babylonian_method). This returns either the exact result or the smallest integer +/// nearest to the square root. +/// +/// Returns `None` for negative values. +/// +/// ```aiken +/// math.sqrt(0) == Some(0) +/// math.sqrt(25) == Some(5) +/// math.sqrt(44203) == Some(210) +/// math.sqrt(-42) == None +/// ``` +/// +/// > [!TIP] +/// > This function can be quite expensive to perform on-chain. Prefer using [`is_sqrt`](#is_sqrt) whenever possible. +pub fn sqrt(self: Int) -> Option { + if self < 0 { + None + } else if self <= 1 { + Some(self) + } else { + Some(sqrt_babylonian(self, self, ( self + 1 ) / 2)) + } +} + +// The basic idea is that if x is an overestimate to the square root of a +// non-negative real number S then S/x will be an underestimate, or vice versa, +// and so the average of these two numbers may reasonably be expected to provide a +// better approximation (though the formal proof of that assertion depends on the +// inequality of arithmetic and geometric means that shows this average is always +// an overestimate of the square root. +fn sqrt_babylonian(self: Int, x: Int, y: Int) -> Int { + if y >= x { + x + } else { + sqrt_babylonian(self, y, ( y + self / y ) / 2) + } +} + +test sqrt1() { + sqrt(0) == Some(0) +} + +test sqrt2() { + sqrt(1) == Some(1) +} + +test sqrt3() { + sqrt(25) == Some(5) +} + +test sqrt4() { + sqrt(44203) == Some(210) +} + +test sqrt5() { + sqrt(975461057789971041) == Some(987654321) +} + +test sqrt6() { + sqrt(-42) == None +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak new file mode 100644 index 00000000..88fe7ab7 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak @@ -0,0 +1,871 @@ +//// This module implements operations between rational numbers. +//// +//// > [!CAUTION] +//// > Internally, rational aren't automatically reduced as this is **only done on-demand**. +//// > +//// > Thus, for example: +//// > +//// > ```aiken +//// > rational.new(2, 3) != rational.new(4, 6) +//// > ``` +//// > +//// > Comparing rational values should, therefore, only happen after reduction (see [reduce](#reduce)) or via the [compare](#compare) method. + +use aiken/builtin +use aiken/collection/list +use aiken/math +use aiken/option + +/// Opaque type used to ensure the sign of the Rational is managed strictly in the numerator. +pub opaque type Rational { + numerator: Int, + denominator: Int, +} + +// ## Constructing + +/// Create a new `Rational` from an `Int`. +/// +/// ```aiken +/// Some(rational.from_int(14)) == rational.new(14, 1) +/// Some(rational.from_int(-5)) == rational.new(-5, 1) +/// Some(rational.from_int(0)) == rational.new(0, 1) +/// ``` +pub fn from_int(numerator: Int) -> Rational { + Rational { numerator, denominator: 1 } +} + +test from_int_1() { + and { + (from_int(14) == ratio(14, 1))?, + (from_int(-5) == ratio(-5, 1))?, + (from_int(0) == ratio(0, 1))?, + } +} + +/// An unsafe constructor for `Rational` values. Assumes that the following invariants are +/// enforced: +/// +/// - the denominator is positive (the sign is managed in the numerator); +/// - the denominator is not null. +/// +/// This function is mainly used as a quick way to construct rationals from literal values. +fn ratio(numerator: Int, denominator: Int) -> Rational { + Rational { numerator, denominator } +} + +/// Make a `Rational` number from the ratio of two integers. +/// +/// Returns `None` when the denominator is null. +/// +/// ```aiken +/// rational.new(14, 42) == Some(r) +/// rational.new(14, 0) == None +/// ``` +pub fn new(numerator: Int, denominator: Int) -> Option { + if denominator == 0 { + None + } else if denominator < 0 { + Some(Rational { numerator: -numerator, denominator: -denominator }) + } else { + Some(Rational { numerator, denominator }) + } +} + +test new_1() { + and { + (new(2, 0) == None)?, + (new(2, 3) == Some(ratio(2, 3)))?, + (new(-2, 3) == Some(ratio(-2, 3)))?, + (new(2, -3) == Some(ratio(-2, 3)))?, + (new(2, 4) == Some(ratio(2, 4)))?, + (new(-2, -3) == Some(ratio(2, 3)))?, + (new(-2, -4) == Some(ratio(2, 4)))?, + } +} + +/// A null `Rational`. +pub const zero: Rational = Rational { numerator: 0, denominator: 1 } + +test zero_1() { + zero == ratio(0, 1) +} + +// ## Inspecting + +/// Get the denominator of a rational value. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.denominator(x) == 3 +/// ``` +pub fn denominator(self: Rational) -> Int { + self.denominator +} + +test denominator_1() { + expect Some(x) = new(2, 3) + expect Some(y) = new(-2, 3) + expect Some(z) = new(2, -3) + expect Some(w) = new(-2, -3) + and { + (denominator(x) == 3)?, + (denominator(y) == 3)?, + (denominator(z) == 3)?, + (denominator(w) == 3)?, + } +} + +/// Get the numerator of a rational value. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.numerator(x) == 2 +/// ``` +pub fn numerator(self: Rational) -> Int { + self.numerator +} + +test numerator_1() { + expect Some(x) = new(2, 3) + expect Some(y) = new(-2, 3) + expect Some(z) = new(2, -3) + expect Some(w) = new(-2, -3) + + and { + (numerator(x) == 2)?, + (numerator(y) == -2)?, + (numerator(z) == -2)?, + (numerator(w) == 2)?, + } +} + +// ## Modifying + +/// Absolute value of a `Rational`. +/// +/// ```aiken +/// expect Some(x) = rational.new(3, 2) +/// expect Some(y) = rational.new(-3, 2) +/// +/// rational.abs(x) == x +/// rational.abs(y) == x +/// ``` +pub fn abs(self: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = self + Rational { numerator: math.abs(a_n), denominator: a_d } +} + +test abs_examples() { + and { + (abs(ratio(5, 2)) == ratio(5, 2))?, + (abs(ratio(-5, 2)) == ratio(5, 2))?, + (abs(ratio(5, 2)) == abs(ratio(-5, 2)))?, + } +} + +/// Change the sign of a `Rational`. +/// +/// ```aiken +/// expect Some(x) = rational.new(3, 2) +/// expect Some(y) = rational.new(-3, 2) +/// +/// rational.negate(x) == y +/// rational.negate(y) == x +/// ``` +pub fn negate(a: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = a + Rational { numerator: -a_n, denominator: a_d } +} + +test negate_1() { + and { + (negate(ratio(5, 2)) == ratio(-5, 2))?, + (negate(ratio(-5, 2)) == ratio(5, 2))?, + (negate(negate(ratio(5, 2))) == ratio(5, 2))?, + } +} + +/// Reciprocal of a `Rational` number. That is, a new `Rational` where the +/// numerator and denominator have been swapped. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 5) +/// rational.reciprocal(x) == rational.new(5, 2) +/// +/// let y = rational.zero +/// rational.reciprocal(y) == None +/// ``` +pub fn reciprocal(self: Rational) -> Option { + let Rational { numerator: a_n, denominator: a_d } = self + if a_n < 0 { + Some(Rational { numerator: -a_d, denominator: -a_n }) + } else if a_n > 0 { + Some(Rational { numerator: a_d, denominator: a_n }) + } else { + None + } +} + +test reciprocal_1() { + and { + (reciprocal(ratio(5, 2)) == new(2, 5))?, + (reciprocal(ratio(-5, 2)) == new(-2, 5))?, + (reciprocal(ratio(0, 2)) == None)?, + (reciprocal(ratio(2, 3)) == new(3, 2))?, + (reciprocal(ratio(-2, 3)) == new(-3, 2))?, + } +} + +/// Reduce a rational to its irreducible form. This operation makes the +/// numerator and denominator coprime. +/// +/// ```aiken +/// expect Some(x) = rational.new(80, 200) +/// Some(rational.reduce(x)) == rational.new(2, 5) +/// ``` +pub fn reduce(self: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = self + let d = math.gcd(a_n, a_d) + Rational { numerator: a_n / d, denominator: a_d / d } +} + +test reduce_1() { + and { + (reduce(ratio(80, 200)) == ratio(2, 5))?, + (reduce(ratio(-5, 1)) == ratio(-5, 1))?, + (reduce(ratio(0, 3)) == ratio(0, 1))?, + } +} + +// ## Combining + +// ### Arithmetic operations + +/// Addition: sum of two rational values +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// Some(rational.add(x, y)) == rational.new(17, 12) +/// ``` +pub fn add(left: Rational, right: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + Rational { numerator: a_n * b_d + b_n * a_d, denominator: a_d * b_d } +} + +test add_1() { + add(ratio(2, 3), ratio(3, 4)) == ratio(17, 12) +} + +test add_2() { + add(ratio(-2, 3), ratio(3, 4)) == ratio(1, 12) +} + +/// Division: quotient of two rational values. Returns `None` when the second +/// value is null. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// rational.div(x, y) == rational.new(8, 9) +/// ``` +pub fn div(left: Rational, right: Rational) -> Option { + reciprocal(right) |> option.map(mul(left, _)) +} + +test div_1() { + div(ratio(2, 3), ratio(3, 4)) == new(8, 9) +} + +test div_2() { + div(ratio(2, 3), ratio(-3, 4)) == new(-8, 9) +} + +/// Multiplication: the product of two rational values. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// Some(rational.mul(x, y)) == rational.new(6, 12) +/// ``` +pub fn mul(left: Rational, right: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + Rational { numerator: a_n * b_n, denominator: a_d * b_d } +} + +test mul_1() { + mul(ratio(2, 3), ratio(3, 4)) == ratio(6, 12) +} + +test mul_2() { + mul(ratio(-2, 3), ratio(-3, 4)) == ratio(6, 12) +} + +test mul_3() { + let result = + ratio(2, 5) + |> mul(ratio(1, 8)) + |> mul(ratio(3, 10)) + |> mul(ratio(21, 100)) + |> mul(ratio(3, 5)) + |> mul(ratio(2, 8)) + |> mul(ratio(4, 10)) + |> mul(ratio(22, 100)) + |> reduce + + result == ratio(2079, 50000000) +} + +/// Subtraction: difference of two rational values +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// Some(rational.sub(x, y)) == rational.new(-1, 12) +/// ``` +pub fn sub(left: Rational, right: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + Rational { numerator: a_n * b_d - b_n * a_d, denominator: a_d * b_d } +} + +test sub_1() { + sub(ratio(2, 3), ratio(3, 4)) == ratio(-1, 12) +} + +test sub_2() { + sub(ratio(2, 3), ratio(-3, 4)) == ratio(17, 12) +} + +test sub_3() { + sub(ratio(-2, 3), ratio(3, 4)) == ratio(-17, 12) +} + +// ### Ordering + +/// Compare two rationals for an ordering. This is safe to use even for +/// non-reduced rationals. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// expect Some(z) = rational.new(4, 6) +/// +/// compare(x, y) == Less +/// compare(y, x) == Greater +/// compare(x, x) == Equal +/// compare(x, z) == Equal +/// ``` +pub fn compare(left: Rational, right: Rational) -> Ordering { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + + let l = a_n * b_d + let r = b_n * a_d + + if l < r { + Less + } else if l > r { + Greater + } else { + Equal + } +} + +test compare_1() { + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + expect Some(z) = new(4, 6) + and { + compare(x, y) == Less, + compare(y, x) == Greater, + compare(x, x) == Equal, + compare(x, z) == Equal, + } +} + +/// Comparison of two rational values using a chosen heuristic. For example: +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// rational.compare_with(x, >, y) == False +/// rational.compare_with(y, >, x) == True +/// rational.compare_with(x, >, x) == False +/// rational.compare_with(x, >=, x) == True +/// rational.compare_with(x, ==, x) == True +/// rational.compare_with(x, ==, y) == False +/// ``` +pub fn compare_with( + left: Rational, + with: fn(Int, Int) -> Bool, + right: Rational, +) -> Bool { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + with(a_n * b_d, b_n * a_d) +} + +// TODO: Rewrite tests using binary-operator as first-class functions once aiken-lang/aiken#619 is merged. + +test compare_with_eq() { + let eq = + compare_with(_, fn(l, r) { l == r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + !eq(x, y)? && !eq(y, x)? && eq(x, x)? +} + +test compare_with_neq() { + let neq = + compare_with(_, fn(l, r) { l != r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + neq(x, y)? && neq(y, x)? && !neq(x, x)? +} + +test compare_with_gte() { + let gte = + compare_with(_, fn(l, r) { l >= r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + !gte(x, y)? && gte(y, x)? && gte(x, x)? +} + +test compare_with_gt() { + let gt = + compare_with(_, fn(l, r) { l > r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + !gt(x, y)? && gt(y, x)? && !gt(x, x)? +} + +test compare_with_lte() { + let lte = + compare_with(_, fn(l, r) { l <= r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + lte(x, y)? && !lte(y, x)? && lte(x, x)? +} + +test compare_with_lt() { + let lt = + compare_with(_, fn(l, r) { l < r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + lt(x, y)? && !lt(y, x)? && !lt(x, x)? +} + +// ### Means + +/// Calculate the arithmetic mean between two `Rational` values. +/// +/// ```aiken +/// let x = rational.from_int(0) +/// let y = rational.from_int(1) +/// let z = rational.from_int(2) +/// +/// expect Some(result) = rational.arithmetic_mean([x, y, z]) +/// +/// rational.compare(result, y) == Equal +/// ``` +pub fn arithmetic_mean(self: List) -> Option { + div(list.foldr(self, zero, add), from_int(list.length(self))) +} + +test arithmetic_mean_1() { + let x = ratio(1, 2) + let y = ratio(1, 2) + expect Some(z) = arithmetic_mean([x, y]) + reduce(z) == ratio(1, 2) +} + +test arithmetic_mean_2() { + let x = ratio(1, 1) + let y = ratio(2, 1) + expect Some(z) = arithmetic_mean([x, y]) + reduce(z) == ratio(3, 2) +} + +test arithmetic_mean_3() { + let xs = + [ + ratio(1, 1), + ratio(2, 1), + ratio(3, 1), + ratio(4, 1), + ratio(5, 1), + ratio(6, 1), + ] + expect Some(z) = arithmetic_mean(xs) + reduce(z) == ratio(7, 2) +} + +/// Calculate the geometric mean between two `Rational` values. This returns +/// either the exact result or the smallest integer nearest to the square root +/// for the numerator and denominator. +/// +/// ```aiken +/// expect Some(x) = rational.new(1, 3) +/// expect Some(y) = rational.new(1, 6) +/// +/// rational.geometric_mean(x, y) == rational.new(1, 4) +/// ``` +pub fn geometric_mean(left: Rational, right: Rational) -> Option { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + when math.sqrt(a_n * b_n) is { + Some(numerator) -> + when math.sqrt(a_d * b_d) is { + Some(denominator) -> Some(Rational { numerator, denominator }) + None -> None + } + None -> None + } +} + +test geometric_mean1() { + expect Some(x) = new(1, 2) + expect Some(y) = new(1, 2) + geometric_mean(x, y) == new(1, 2) +} + +test geometric_mean2() { + expect Some(x) = new(-1, 2) + expect Some(y) = new(1, 2) + geometric_mean(x, y) == None +} + +test geometric_mean3() { + expect Some(x) = new(1, 2) + expect Some(y) = new(-1, 2) + geometric_mean(x, y) == None +} + +test geometric_mean4() { + expect Some(x) = new(1, 3) + expect Some(y) = new(1, 6) + geometric_mean(x, y) == new(1, 4) +} + +test geometric_mean5() { + expect Some(x) = new(67, 2500) + expect Some(y) = new(35331, 1000) + expect Some(yi) = reciprocal(y) + geometric_mean(x, yi) == new(258, 9398) +} + +// ## Transforming + +/// Returns the smallest `Int` not less than a given `Rational` +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.ceil(x) == 1 +/// +/// expect Some(y) = rational.new(44, 14) +/// rational.ceil(y) == 4 +/// +/// expect Some(z) = rational.new(-14, 3) +/// rational.ceil(z) == -4 +/// ``` +pub fn ceil(self: Rational) -> Int { + let Rational { numerator, denominator } = self + if builtin.remainder_integer(numerator, denominator) > 0 { + builtin.quotient_integer(numerator, denominator) + 1 + } else { + builtin.quotient_integer(numerator, denominator) + } +} + +test ceil_1() { + and { + (ceil(ratio(13, 5)) == 3)?, + (ceil(ratio(15, 5)) == 3)?, + (ceil(ratio(16, 5)) == 4)?, + (ceil(ratio(-3, 5)) == 0)?, + (ceil(ratio(-5, 5)) == -1)?, + (ceil(ratio(-14, 3)) == -4)?, + (ceil(ratio(-14, 6)) == -2)?, + (ceil(ratio(44, 14)) == 4)?, + } +} + +/// Returns the greatest `Int` no greater than a given `Rational` +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.floor(x) == 0 +/// +/// expect Some(y) = rational.new(44, 14) +/// rational.floor(y) == 3 +/// +/// expect Some(z) = rational.new(-14, 3) +/// rational.floor(z) == -5 +/// ``` +pub fn floor(self: Rational) -> Int { + let Rational { numerator: a_n, denominator: a_d } = self + a_n / a_d +} + +test floor_1() { + and { + (floor(ratio(5, 2)) == 2)?, + (floor(ratio(5, 3)) == 1)?, + (floor(ratio(5, 4)) == 1)?, + (floor(ratio(5, 5)) == 1)?, + (floor(ratio(5, 6)) == 0)?, + (floor(ratio(8, 3)) == 2)?, + (floor(ratio(-14, 3)) == -5)?, + } +} + +/// Computes the rational number x raised to the power y. Returns `None` for +/// invalid exponentiation. +/// +/// ```aiken +/// expect Some(x) = rational.new(50, 2500) +/// rational.reduce(rational.pow(x, 3)) == rational.new(1, 125000) +/// +/// expect Some(x) = rational.new(50, 2500) +/// rational.reduce(rational.pow(x, -3)) == rational.new(125000, 1) +/// ``` +pub fn pow(x: Rational, y: Int) -> Option { + let Rational { numerator: a, denominator: b } = x + + if a == 0 && y <= 0 { + None + } else if y > 0 { + Some(Rational { numerator: math.pow(a, y), denominator: math.pow(b, y) }) + } else if y < 0 { + Some(Rational { numerator: math.pow(b, -y), denominator: math.pow(a, -y) }) + } else { + Some(Rational { numerator: 1, denominator: 1 }) + } +} + +test pow_negative_exponent_non_zero_fraction() { + expect Some(base) = new(50, 2500) + expect Some(calculated_result) = pow(base, -3) + expect Some(expected_result) = new(125000, 1) + reduce(calculated_result) == expected_result +} + +test pow_positive_exponent() { + expect Some(base) = new(50, 2500) + expect Some(calculated_result) = pow(base, 3) + expect Some(expected_result) = new(1, 125000) + reduce(calculated_result) == expected_result +} + +test pow_exponent_zero() { + expect Some(base) = new(50, 2500) + pow(base, 0) == new(1, 1) +} + +test pow_rational_zero_exponent_zero() { + expect Some(base) = new(0, 1) + pow(base, 0) == None +} + +/// Returns the proper fraction of a given `Rational` `r`. That is, a 2-tuple of +/// an `Int` and `Rational` (n, f) such that: +/// +/// - `r = n + f`; +/// - `n` and `f` have the same sign as `r`; +/// - `f` has an absolute value less than 1. +pub fn proper_fraction(self: Rational) -> (Int, Rational) { + let Rational { numerator, denominator } = self + ( + builtin.quotient_integer(numerator, denominator), + Rational { + numerator: builtin.remainder_integer(numerator, denominator), + denominator, + }, + ) +} + +test proper_fraction_1() { + let r = ratio(10, 7) + let (n, f) = proper_fraction(r) + and { + (n == 1)?, + (f == ratio(3, 7))?, + (r == add(from_int(n), f))?, + } +} + +test proper_fraction_2() { + let r = ratio(-10, 7) + let (n, f) = proper_fraction(r) + and { + (n == -1)?, + (f == ratio(-3, 7))?, + (r == add(from_int(n), f))?, + } +} + +test proper_fraction_3() { + let r = ratio(4, 2) + let (n, f) = proper_fraction(r) + and { + (n == 2)?, + (f == ratio(0, 2))?, + (r == add(from_int(n), f))?, + } +} + +/// Round the argument to the nearest whole number. If the argument is +/// equidistant between two values, the greater value is returned (it +/// rounds half towards positive infinity). +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.round(x) == 1 +/// +/// expect Some(y) = rational.new(3, 2) +/// rational.round(y) == 2 +/// +/// expect Some(z) = rational.new(-3, 2) +/// rational.round(z) == -1 +/// ``` +/// +/// > [!CAUTION] +/// > This behaves differently than _Haskell_. If you're coming from `PlutusTx`, beware that in Haskell, rounding on equidistant values depends on the whole number being odd or even. +/// > If you need this behaviour, use [`round_even`](#round_even). +pub fn round(self: Rational) -> Int { + let (n, f) = proper_fraction(self) + + let is_negative = f.numerator < 0 + + when compare(abs(f), ratio(1, 2)) is { + Less -> n + Equal -> + if is_negative { + n + } else { + n + 1 + } + Greater -> + if is_negative { + n - 1 + } else { + n + 1 + } + } +} + +test round_1() { + and { + (round(ratio(10, 7)) == 1)?, + (round(ratio(11, 7)) == 2)?, + (round(ratio(3, 2)) == 2)?, + (round(ratio(5, 2)) == 3)?, + (round(ratio(-3, 2)) == -1)?, + (round(ratio(-2, 3)) == -1)?, + (round(ratio(-10, 7)) == -1)?, + (round(ratio(4, 2)) == 2)?, + } +} + +/// Round the argument to the nearest whole number. If the argument is +/// equidistant between two values, it returns the value that is even (it +/// rounds half to even, also known as 'banker's rounding'). +/// +/// ```aiken +/// expect Some(w) = rational.new(2, 3) +/// rational.round_even(w) == 1 +/// +/// expect Some(x) = rational.new(3, 2) +/// rational.round_even(x) == 2 +/// +/// expect Some(y) = rational.new(5, 2) +/// rational.round_even(y) == 2 +/// +/// expect Some(y) = rational.new(-3, 2) +/// rational.round_even(y) == -2 +/// ``` +pub fn round_even(self: Rational) -> Int { + let (n, f) = proper_fraction(self) + + let m = + when compare(f, ratio(0, 1)) is { + Less -> -1 + _ -> 1 + } + + let is_even = n % 2 == 0 + + when compare(abs(f), ratio(1, 2)) is { + Less -> n + Equal -> + if is_even { + n + } else { + n + m + } + Greater -> n + m + } +} + +test round_even_1() { + and { + (round_even(ratio(10, 7)) == 1)?, + (round_even(ratio(11, 7)) == 2)?, + (round_even(ratio(3, 2)) == 2)?, + (round_even(ratio(5, 2)) == 2)?, + (round_even(ratio(-3, 2)) == -2)?, + (round_even(ratio(-2, 3)) == -1)?, + (round_even(ratio(-10, 7)) == -1)?, + (round_even(ratio(4, 2)) == 2)?, + } +} + +/// Returns the nearest `Int` between zero and a given `Rational`. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.truncate(x) == 0 +/// +/// expect Some(y) = rational.new(44, 14) +/// rational.truncate(y) == 3 +/// +/// expect Some(z) = rational.new(-14, 3) +/// rational.truncate(z) == -4 +/// ``` +pub fn truncate(self: Rational) -> Int { + let Rational { numerator: a_n, denominator: a_d } = self + builtin.quotient_integer(a_n, a_d) +} + +test truncate_1() { + and { + (truncate(ratio(5, 2)) == 2)?, + (truncate(ratio(5, 3)) == 1)?, + (truncate(ratio(5, 4)) == 1)?, + (truncate(ratio(5, 5)) == 1)?, + (truncate(ratio(5, 6)) == 0)?, + (truncate(ratio(8, 3)) == 2)?, + (truncate(ratio(-14, 3)) == -4)?, + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak new file mode 100644 index 00000000..ab8cbc17 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak @@ -0,0 +1,65 @@ +use aiken/fuzz.{both, either, map} +use aiken/math/rational.{Rational, new, pow} + +const any_positive_rational: Fuzzer = + either( + map( + both(fuzz.int_at_least(1), fuzz.int_at_least(1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + map( + both(fuzz.int_at_most(-1), fuzz.int_at_most(-1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + ) + +const any_negative_rational: Fuzzer = + either( + map( + both(fuzz.int_at_most(-1), fuzz.int_at_least(1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + map( + both(fuzz.int_at_least(1), fuzz.int_at_most(-1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + ) + +const any_non_zero_rational: Fuzzer = + either(any_negative_rational, any_positive_rational) + +test prop_power_of_zero_returns_one(rational via any_non_zero_rational) { + expect Some(calculated_result) = pow(rational, 0) + expect Some(expected_result) = new(1, 1) + calculated_result == expected_result +} + +test prop_power_of_one_returns_same_fraction(rational via any_non_zero_rational) { + expect Some(calculated_result) = pow(rational, 1) + calculated_result == rational +} + +test prop_power_numerator_zero_exponent_negative_returns_none( + (denominator, exponent) via both(fuzz.int_at_least(1), fuzz.int_at_most(-1)), +) { + expect Some(fraction) = new(0, denominator) + expect None = pow(fraction, exponent) +} + +test prop_power_unit_fraction_is_immutable(exponent via fuzz.int()) { + expect Some(unit) = new(1, 1) + expect Some(calculated_result) = pow(unit, exponent) + calculated_result == unit +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak new file mode 100644 index 00000000..cf5ef7dc --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak @@ -0,0 +1,312 @@ +//// A type to capture optional results; useful for handling errors. +//// +//// Note that the `Option` type and its constructors are readily available in Aiken. They are part of the [Prelude](https://aiken-lang.github.io/prelude/aiken.html#Option) module imported by default in every module. + +// ## Inspecting + +/// Asserts whether an option is `None`. +pub fn is_none(self: Option) -> Bool { + when self is { + Some(_) -> False + _ -> True + } +} + +test is_none_1() { + is_none(Some(0)) == False +} + +test is_none_2() { + is_none(None) == True +} + +/// Asserts whether an option is `Some`, irrespective of the value it contains. +pub fn is_some(self: Option) -> Bool { + when self is { + Some(_) -> True + _ -> False + } +} + +test is_some_1() { + is_some(Some(0)) == True +} + +test is_some_2() { + is_some(None) == False +} + +// ## Combining + +/// Chain together many computations that may fail. +/// +/// ```aiken +/// self +/// |> dict.get(policy_id) +/// |> option.and_then(dict.get(_, asset_name)) +/// |> option.or_else(0) +/// ``` +pub fn and_then( + self: Option, + then: fn(a) -> Option, +) -> Option { + when self is { + None -> None + Some(a) -> then(a) + } +} + +fn try_decrement(n: Int) -> Option { + if n > 0 { + Some(n - 1) + } else { + None + } +} + +test and_then_1() { + let result = + None + |> and_then(try_decrement) + result == None +} + +test and_then_2() { + let result = + Some(14) + |> and_then(try_decrement) + result == Some(13) +} + +test and_then_3() { + let result = + Some(0) + |> and_then(try_decrement) + result == None +} + +/// Picks the first element which is not None. If there's no such element, return None. +/// +/// ```aiken +/// option.choice([]) == None +/// option.choice([Some(14), Some(42)]) == Some(14) +/// option.choice([None, Some(42)]) == Some(42) +/// option.choice([None, None]) == None +/// ``` +pub fn choice(self: List>) -> Option { + when self is { + [] -> None + [head, ..others] -> + when head is { + None -> choice(others) + _ -> head + } + } +} + +test choice_1() { + Some(1) == choice([Some(1), Some(2)]) +} + +test choice_2() { + None == choice([]) +} + +test choice_3() { + Some(1) == choice([None, Some(1)]) +} + +/// Converts from `Option>` to `Option`. +/// +/// ```aiken +/// option.flatten(Some(Some(42))) == Some(42) +/// option.flatten(Some(None)) == None +/// option.flatten(None) == None +/// ``` +/// +/// Flattening only removes one level of nesting at a time: +/// +/// ```aiken +/// flatten(Some(Some(Some(42)))) == Some(Some(42)) +/// Some(Some(Some(42))) |> flatten |> flatten == Some(42) +/// ``` +pub fn flatten(opt: Option>) -> Option { + when opt is { + Some(inner) -> inner + None -> None + } +} + +test flatten_1() { + let x: Option> = Some(Some(6)) + Some(6) == flatten(x) +} + +test flatten_2() { + let x: Option> = Some(None) + None == flatten(x) +} + +test flatten_3() { + let x: Option> = None + None == flatten(x) +} + +test flatten_4() { + let x: Option>> = Some(Some(Some(6))) + + let result = + x + |> flatten + |> flatten + + Some(6) == result +} + +/// Apply a function to the inner value of an [`Option`](#option) +/// +/// ```aiken +/// option.map(None, fn(n) { n * 2 }) == None +/// option.map(Some(14), fn(n) { n * 2 }) == Some(28) +/// ``` +pub fn map(self: Option, with: fn(a) -> result) -> Option { + when self is { + None -> None + Some(a) -> Some(with(a)) + } +} + +test map_1() { + map(None, fn(_) { Void }) == None +} + +test map_2() { + map(Some(14), fn(n) { n + 1 }) == Some(15) +} + +/// Combine two [`Option`](#option) together. +/// +/// ```aiken +/// type Foo { +/// Foo(Int, Int) +/// } +/// +/// option.map2(Some(14), Some(42), Foo) == Some(Foo(14, 42)) +/// option.map2(None, Some(42), Foo) == None +/// option.map2(Some(14), None, Foo) == None +/// ``` +pub fn map2( + opt_a: Option, + opt_b: Option, + with: fn(a, b) -> result, +) -> Option { + when opt_a is { + None -> None + Some(a) -> + when opt_b is { + None -> None + Some(b) -> Some(with(a, b)) + } + } +} + +test map2_1() { + map2(None, Some(42), fn(_, _) { 14 }) == None +} + +test map2_2() { + map2(Some(42), None, fn(_, _) { 14 }) == None +} + +test map2_3() { + map2(Some(14), Some(42), fn(a, b) { (a, b) }) == Some((14, 42)) +} + +/// Combine three [`Option`](#option) together. +/// +/// ```aiken +/// type Foo { +/// Foo(Int, Int, Int) +/// } +/// +/// option.map3(Some(14), Some(42), Some(1337), Foo) == Some(Foo(14, 42, 1337)) +/// option.map3(None, Some(42), Some(1337), Foo) == None +/// option.map3(Some(14), None, None, Foo) == None +/// ``` +pub fn map3( + opt_a: Option, + opt_b: Option, + opt_c: Option, + with: fn(a, b, c) -> result, +) -> Option { + when opt_a is { + None -> None + Some(a) -> + when opt_b is { + None -> None + Some(b) -> + when opt_c is { + None -> None + Some(c) -> Some(with(a, b, c)) + } + } + } +} + +test map3_1() { + map3(None, Some(42), None, fn(_, _, _) { 14 }) == None +} + +test map3_2() { + map3(Some(42), None, None, fn(_, _, _) { 14 }) == None +} + +test map3_3() { + map3(Some(14), Some(42), Some(1337), fn(a, b, c) { c - a + b }) == Some(1365) +} + +/// Like [`or_else`](#or_else) but allows returning an `Option`. +/// This is effectively mapping the error branch. +/// +/// ```aiken +/// option.or_try(None, fn(_) { Some("aiken") }) == Some("aiken") +/// option.or_try(Some(42), fn(_) { Some(14) }) == Some(42) +/// option.or_try(None, fn (_) { fail }) => 💥 +/// ``` +pub fn or_try(self: Option, compute_default: fn() -> Option) -> Option { + when self is { + None -> compute_default() + _ -> self + } +} + +test or_try_1() { + or_try(None, fn() { Some("aiken") }) == Some("aiken") +} + +test or_try_2() { + or_try(Some(42), fn() { fail }) == Some(42) +} + +// ## Transforming + +/// Provide a default value, turning an optional value into a normal value. +/// +/// ```aiken +/// option.or_else(None, "aiken") == "aiken" +/// option.or_else(Some(42), 14) == 42 +/// ``` +pub fn or_else(self: Option, default: a) -> a { + when self is { + None -> default + Some(a) -> a + } +} + +test or_else_1() { + or_else(None, "aiken") == "aiken" +} + +test or_else_2() { + or_else(Some(42), 14) == 42 +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak new file mode 100644 index 00000000..d2f125f5 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak @@ -0,0 +1,668 @@ +use aiken/builtin +use aiken/math +use aiken/option + +pub type Byte = + Int + +// ## Constructing + +/// Encode an integer value as a Big-Endian (most-significant bytes first) `ByteArray`. +/// The size is the expected size in number of bytes. +/// +/// > [!IMPORTANT] +/// > This function fails (i.e. halts the program) if the value cannot fit in the given size. When the +/// > size is _too large_, the array is left-padded with zeroes. +/// +/// ```aiken +/// bytearray.from_int_big_endian(1_000_000, 3) == #"0f4240" +/// bytearray.from_int_big_endian(1_000_000, 5) == #"00000f4240" +/// bytearray.from_int_big_endian(0, 8) == #"0000000000000000" +/// bytearray.from_int_big_endian(1_000_000, 1) => 💥 +/// ``` +pub fn from_int_big_endian(self: Int, size: Int) -> ByteArray { + builtin.integer_to_bytearray(True, size, self) +} + +test from_int_big_endian_1() { + from_int_big_endian(1_000_000, 3) == #"0f4240" +} + +test from_int_big_endian_2() { + from_int_big_endian(1_000_000, 5) == #"00000f4240" +} + +test from_int_big_endian_3() { + from_int_big_endian(0, 8) == #"0000000000000000" +} + +test from_int_big_endian_4() fail { + from_int_big_endian(1_000_000, 1) == #"40" +} + +/// Encode an integer value as a Little-Endian (least-significant bytes first) `ByteArray`. +/// The size is the expected size in number of bytes. +/// +/// > [!IMPORTANT] +/// > This function fails (i.e. halts the program) if the value cannot fit in the given size. When the +/// > size is _too large_, the array is right-padded with zeroes. +/// +/// ```aiken +/// bytearray.from_int_little_endian(1_000_000, 3) == #"40420f" +/// bytearray.from_int_little_endian(1_000_000, 5) == #"40420f0000" +/// bytearray.from_int_little_endian(0, 8) == #"0000000000000000" +/// bytearray.from_int_little_endian(1_000_000, 1) => 💥 +/// ``` +pub fn from_int_little_endian(self: Int, size: Int) -> ByteArray { + builtin.integer_to_bytearray(False, size, self) +} + +test from_int_little_endian_1() { + from_int_little_endian(1_000_000, 3) == #"40420f" +} + +test from_int_little_endian_2() { + from_int_little_endian(1_000_000, 5) == #"40420f0000" +} + +test from_int_little_endian_3() { + from_int_little_endian(0, 8) == #"0000000000000000" +} + +test from_int_little_endian_4() fail { + from_int_little_endian(1_000_000, 1) == #"40" +} + +/// Convert a `String` into a `ByteArray`. +/// +/// ```aiken +/// bytearray.from_string(@"ABC") == #"414243" +/// ``` +pub fn from_string(str: String) -> ByteArray { + builtin.encode_utf8(str) +} + +test from_string_1() { + from_string(@"") == "" +} + +test from_string_2() { + from_string(@"ABC") == #"414243" +} + +/// Add a byte element in front of a `ByteArray`. When the given byte is +/// greater than 255, it wraps-around. **PlutusV2 behavior** So 256 is mapped to 0, 257 to 1, and so +/// forth. +/// In PlutusV3 this will error instead of wrapping around. +/// +/// ```aiken +/// bytearray.push(#"", 0) == #"00" +/// bytearray.push(#"0203", 1) == #"010203" +/// bytearray.push(#"0203", 257) == #"010203" +/// ``` +pub fn push(self: ByteArray, byte: Byte) -> ByteArray { + builtin.cons_bytearray(byte, self) +} + +test push_1() { + push(#[], 0) == #[0] +} + +test push_2() { + push(#[2, 3], 1) == #[1, 2, 3] +} + +test push_3() fail { + let x = 257 + push(#[2, 3], x) == #[1, 2, 3] +} + +// ## Inspecting + +/// Get the `Byte` at the given index, or crash. +/// +/// > [!WARNING] +/// > This functions fails (i.e. halts the program) if there's no byte at the given index. +pub fn at(self: ByteArray, index: Int) -> Byte { + builtin.index_bytearray(self, index) +} + +/// Search the start and end positions of a sub-array in a `ByteArray`. +/// +/// ```aiken +/// bytearray.index_of("Hello, World!", "World") == Some((7, 11)) +/// bytearray.index_of("Hello, World!", "foo") == None +/// bytearray.index_of("Hello, World!", "!") == Some((12, 12)) +/// bytearray.index_of("Hello, World!", "o") == Some((4, 4)) +/// bytearray.index_of("Hello, World!", "Hello, World!") == Some((0, 12)) +/// ``` +pub fn index_of(self: ByteArray, bytes: ByteArray) -> Option<(Int, Int)> { + let offset = length(bytes) + + do_index_of(self, bytes, 0, offset, length(self)) + |> option.map(fn(ix) { (ix, ix + offset - 1) }) +} + +fn do_index_of( + self: ByteArray, + bytes: ByteArray, + cursor: Int, + offset: Int, + size: Int, +) -> Option { + if cursor + offset > size { + None + } else { + if builtin.slice_bytearray(cursor, offset, self) == bytes { + Some(cursor) + } else { + do_index_of(self, bytes, cursor + 1, offset, size) + } + } +} + +test index_of_1() { + index_of("Hello, World!", "World") == Some((7, 11)) +} + +test index_of_2() { + index_of("Hello, World!", "foo") == None +} + +test index_of_3() { + index_of("Hello, World!", "!") == Some((12, 12)) +} + +test index_of_4() { + index_of("Hello, World!", "o") == Some((4, 4)) +} + +test index_of_5() { + index_of("Hello, World!", "Hello, World!") == Some((0, 12)) +} + +/// Returns `True` when the given `ByteArray` is empty. +/// +/// ```aiken +/// bytearray.is_empty(#"") == True +/// bytearray.is_empty(#"00ff") == False +/// ``` +pub fn is_empty(self: ByteArray) -> Bool { + builtin.length_of_bytearray(self) == 0 +} + +test is_empty_1() { + is_empty(#"") == True +} + +test is_empty_2() { + is_empty(#"01") == False +} + +/// Returns the number of bytes in a `ByteArray`. +/// +/// ```aiken +/// bytearray.length(#[1, 2, 3]) == 3 +/// ``` +pub fn length(self: ByteArray) -> Int { + builtin.length_of_bytearray(self) +} + +test length_1() { + length(#"") == 0 +} + +test length_2() { + length(#"010203") == 3 +} + +/// Checks whether a bit (Most-Significant-Bit first) is set in the given 'ByteArray'. +/// +/// For example, consider the following bytearray: `#"8b765f"`. It can also be written as the +/// following bits sequence: +/// +/// `8` | `b` | `7` | `6` | `5` | `f` +/// --- | --- | --- | --- | --- | --- +/// `1000` | `1011` | `0111` | `0110` | `0101` | `1111` +/// +/// And thus, we have: +/// +/// ```aiken +/// test_bit(#"8b765f", 0) == True +/// test_bit(#"8b765f", 1) == False +/// test_bit(#"8b765f", 2) == False +/// test_bit(#"8b765f", 3) == False +/// test_bit(#"8b765f", 7) == True +/// test_bit(#"8b765f", 8) == False +/// test_bit(#"8b765f", 20) == True +/// test_bit(#"8b765f", 21) == True +/// test_bit(#"8b765f", 22) == True +/// test_bit(#"8b765f", 23) == True +/// ``` +pub fn test_bit(self: ByteArray, ix: Int) -> Bool { + builtin.less_than_equals_bytearray( + #[128], + builtin.cons_bytearray( + builtin.index_bytearray(self, ix / 8) * math.pow2(ix % 8) % 256, + "", + ), + ) +} + +test test_bit_0() { + test_bit(#"8b765f", 0) +} + +test test_bit_1() { + !test_bit(#"8b765f", 1) +} + +test test_bit_2() { + !test_bit(#"8b765f", 2) +} + +test test_bit_3() { + !test_bit(#"8b765f", 3) +} + +test test_bit_7() { + test_bit(#"8b765f", 7) +} + +test test_bit_8() { + !test_bit(#"8b765f", 8) +} + +test test_bit_20_21_22_23() { + and { + test_bit(#"8b765f", 20), + test_bit(#"8b765f", 21), + test_bit(#"8b765f", 22), + test_bit(#"8b765f", 23), + } +} + +// ## Modifying + +/// Returns the suffix of a `ByteArray` after `n` elements. +/// +/// ```aiken +/// bytearray.drop(#[1, 2, 3], n: 2) == #[3] +/// ``` +pub fn drop(self: ByteArray, n: Int) -> ByteArray { + builtin.slice_bytearray(n, builtin.length_of_bytearray(self) - n, self) +} + +test drop_1() { + let x = #"01020304050607" + drop(x, 2) == #"0304050607" +} + +test drop_2() { + let x = #"01020304050607" + drop(x, 0) == x +} + +test drop_3() { + let x = #"01" + drop(x, 1) == #"" +} + +test drop_4() { + let x = #"" + drop(x, 2) == #"" +} + +/// Extract a `ByteArray` as a slice of another `ByteArray`. +/// +/// Indexes are 0-based and inclusive. +/// +/// ```aiken +/// bytearray.slice(#[0, 1, 2, 3, 4, 5, 6], start: 1, end: 3) == #[1, 2, 3] +/// ``` +pub fn slice(self: ByteArray, start: Int, end: Int) -> ByteArray { + builtin.slice_bytearray(start, end - start + 1, self) +} + +test slice_1() { + slice(#"", 1, 2) == #"" +} + +test slice_2() { + slice(#"010203", 1, 2) == #"0203" +} + +test slice_3() { + slice(#"010203", 0, 42) == #"010203" +} + +test slice_4() { + slice(#[0, 1, 2, 3, 4], 0, 3) == #[0, 1, 2, 3] +} + +test slice_5() { + slice(#[0, 1, 2, 3, 4], 1, 2) == #[1, 2] +} + +/// Returns the n-length prefix of a `ByteArray`. +/// +/// ```aiken +/// bytearray.take(#[1, 2, 3], n: 2) == #[1, 2] +/// ``` +pub fn take(self: ByteArray, n: Int) -> ByteArray { + builtin.slice_bytearray(0, n, self) +} + +test take_1() { + let x = #"01020304050607" + take(x, 2) == #"0102" +} + +test take_2() { + let x = #"01020304050607" + take(x, 0) == #"" +} + +test take_3() { + let x = #"01" + take(x, 1) == x +} + +test take_4() { + let x = #"010203" + take(x, 0) == #"" +} + +// ## Combining + +/// Combine two `ByteArray` together. +/// +/// ```aiken +/// bytearray.concat(left: #[1, 2, 3], right: #[4, 5, 6]) == #[1, 2, 3, 4, 5, 6] +/// ``` +pub fn concat(left: ByteArray, right: ByteArray) -> ByteArray { + builtin.append_bytearray(left, right) +} + +test concat_1() { + concat(#"", #"") == #"" +} + +test concat_2() { + concat(#"", #"01") == #"01" +} + +test concat_3() { + concat(#"0102", #"") == #"0102" +} + +test concat_4() { + concat(#"0102", #"0304") == #"01020304" +} + +/// Compare two bytearrays lexicographically. +/// +/// ```aiken +/// bytearray.compare(#"00", #"FF") == Less +/// bytearray.compare(#"42", #"42") == Equal +/// bytearray.compare(#"FF", #"00") == Greater +/// ``` +pub fn compare(left: ByteArray, right: ByteArray) -> Ordering { + if builtin.less_than_bytearray(left, right) { + Less + } else if builtin.equals_bytearray(left, right) { + Equal + } else { + Greater + } +} + +// ## Transforming + +/// Left-fold over bytes of a [`ByteArray`](https://aiken-lang.github.io/prelude/aiken.html#ByteArray). Note that every byte given to the callback function is comprised between 0 and 255. +/// +/// ```aiken +/// bytearray.foldl(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 44203 +/// bytearray.foldl(#[1, 2, 3], #"", flip(bytearray.push)) == #[3, 2, 1] +/// ``` +pub fn foldl( + self: ByteArray, + zero: result, + with: fn(Int, result) -> result, +) -> result { + do_foldl(self, zero, builtin.length_of_bytearray(self), 0, with) +} + +fn do_foldl( + self: ByteArray, + zero: result, + len: Int, + cursor: Int, + with: fn(Int, result) -> result, +) -> result { + if cursor == len { + zero + } else { + do_foldl( + self, + with(builtin.index_bytearray(self, cursor), zero), + len, + cursor + 1, + with, + ) + } +} + +test foldl_1() { + foldl(#[], 42, fn(byte, acc) { byte + acc }) == 42 +} + +test foldl_2() { + foldl(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 44203 +} + +test foldl_3() { + foldl( + #"356cf088720a169dae0ce0bb1df8588944389fa43322f0d6ef4ed8c069bfd405", + 0, + fn(byte, acc) { acc * 256 + byte }, + ) == 24165060555594911913195642527692216679757672038384202527929620681761931383813 +} + +test foldl_4() { + foldl(#[1, 2, 3, 4, 5], #"", flip(push)) == #[5, 4, 3, 2, 1] +} + +/// Right-fold over bytes of a [`ByteArray`](https://aiken-lang.github.io/prelude/aiken.html#ByteArray). Note that every byte given to the callback function is comprised between 0 and 255. +/// +/// ```aiken +/// bytearray.foldr(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 43948 +/// bytearray.foldl(#[1, 2, 3], #"", flip(bytearray.push)) == #[1, 2, 3] +/// ``` +pub fn foldr( + self: ByteArray, + zero: result, + with: fn(Int, result) -> result, +) -> result { + do_foldr(self, zero, builtin.length_of_bytearray(self) - 1, with) +} + +fn do_foldr( + self: ByteArray, + zero: result, + cursor: Int, + with: fn(Int, result) -> result, +) -> result { + if cursor < 0 { + zero + } else { + do_foldr( + self, + with(builtin.index_bytearray(self, cursor), zero), + cursor - 1, + with, + ) + } +} + +test foldr_1() { + foldr(#[], 42, fn(byte, acc) { byte + acc }) == 42 +} + +test foldr_2() { + foldr(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 43948 +} + +test foldr_3() { + foldr(#[1, 2, 3, 4, 5], #"", flip(push)) == #[1, 2, 3, 4, 5] +} + +/// Reduce bytes in a ByteArray from left to right using the accumulator as left operand. +/// Said differently, this is [`foldl`](#foldl) with callback arguments swapped. +/// +/// ```aiken +/// bytearray.reduce(#[1,2,3], #[], bytearray.push) == #[3, 2, 1] +/// ``` +pub fn reduce( + self: ByteArray, + zero: result, + with: fn(result, Int) -> result, +) -> result { + foldl(self, zero, flip(with)) +} + +test reduce_1() { + reduce(#[], #[], push) == #[] +} + +test reduce_2() { + reduce(#[1, 2, 3], #[], push) == #[3, 2, 1] +} + +/// Interpret a Big-Endian (most-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// bytearray.to_int_big_endian(#"0f4240") == 1_000_000 +/// bytearray.to_int_big_endian(#"00000f4240") == 1_000_000 +/// bytearray.to_int_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn to_int_big_endian(self: ByteArray) -> Int { + builtin.bytearray_to_integer(True, self) +} + +test to_int_big_endian_1() { + to_int_big_endian(#"0f4240") == 1_000_000 +} + +test to_int_big_endian_2() { + to_int_big_endian(#"00000f4240") == 1_000_000 +} + +test to_int_big_endian_3() { + to_int_big_endian(#"0000000000000000") == 0 +} + +/// Interpret a Little-Endian (least-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// bytearray.to_int_big_endian(#"40420f") == 1_000_000 +/// bytearray.to_int_big_endian(#"40420f0000") == 1_000_000 +/// bytearray.to_int_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn to_int_little_endian(self: ByteArray) -> Int { + builtin.bytearray_to_integer(False, self) +} + +test to_int_little_endian_1() { + to_int_little_endian(#"40420f") == 1_000_000 +} + +test to_int_little_endian_2() { + to_int_little_endian(#"40420f0000") == 1_000_000 +} + +test to_int_little_endian_3() { + to_int_little_endian(#"0000000000000000") == 0 +} + +/// Convert a `ByteArray` into a `String`. +/// +/// > [!WARNING] +/// > This functions fails (i.e. halts the program) if the underlying `ByteArray` isn't UTF-8-encoded. In particular, you cannot convert arbitrary hash digests using this function. +/// > +/// > For converting arbitrary `ByteArray`s, use [bytearray.to_hex](#to_hex). +/// +/// ```aiken +/// bytearray.to_string(#"414243") == "ABC" +/// bytearray.to_string(some_hash) => 💥 +/// ``` +pub fn to_string(self: ByteArray) -> String { + builtin.decode_utf8(self) +} + +test to_string_1() { + to_string("") == @"" +} + +test to_string_2() { + to_string("ABC") == @"ABC" +} + +/// Encode a `ByteArray` as a hexidecimal `String`. +/// +/// ```aiken +/// bytearray.to_hex("Hello world!") == @"48656c6c6f20776f726c6421" +/// ``` +pub fn to_hex(self: ByteArray) -> String { + self + |> encode_base16(builtin.length_of_bytearray(self) - 1, "") + |> builtin.decode_utf8 +} + +test to_hex_1() { + to_hex("Hello world!") == @"48656C6C6F20776F726C6421" +} + +test to_hex_2() { + to_hex("The quick brown fox jumps over the lazy dog") == @"54686520717569636B2062726F776E20666F78206A756D7073206F76657220746865206C617A7920646F67" +} + +/// Checks whether a `ByteArray` starts with a given prefix. +/// +/// ```aiken +/// bytearray.starts_with("Hello, World!", prefix: "Hello") == True +/// bytearray.starts_with("", prefix: "") == True +/// bytearray.starts_with("Hello", prefix: "Hello, World!") == False +/// ``` +pub fn starts_with(self: ByteArray, prefix: ByteArray) -> Bool { + let prefix_length = length(prefix) + if length(self) < prefix_length { + False + } else { + take(self, prefix_length) == prefix + } +} + +test starts_with_1() { + starts_with("", "") +} + +test starts_with_2() { + starts_with("Hello, World!", "Hello, World!") +} + +test starts_with_3() { + !starts_with("Hello, World!", "hello") +} + +test starts_with_4() { + !starts_with("", "World") +} + +test starts_with_5() { + starts_with("Hello, World", "Hello") +} + +test starts_with_6() { + !starts_with("foo", "foo_") +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak new file mode 100644 index 00000000..217749e9 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak @@ -0,0 +1,156 @@ +use aiken/builtin.{bytearray_to_integer, decode_utf8} +use aiken/math +use aiken/option +use aiken/primitive/bytearray + +// ## Combining + +/// Compare two integers. +/// +/// ```aiken +/// int.compare(14, 42) == Less +/// int.compare(14, 14) == Equal +/// int.compare(42, 14) == Greater +/// ``` +pub fn compare(left: Int, right: Int) -> Ordering { + if left < right { + Less + } else if left > right { + Greater + } else { + Equal + } +} + +// ## Transforming + +/// Interpret a Big-Endian (most-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// int.from_bytearray_big_endian(#"0f4240") == 1_000_000 +/// int.from_bytearray_big_endian(#"00000f4240") == 1_000_000 +/// int.from_bytearray_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn from_bytearray_big_endian(self: ByteArray) -> Int { + bytearray_to_integer(True, self) +} + +test from_bytearray_big_endian_1() { + from_bytearray_big_endian(#"0f4240") == 1_000_000 +} + +test from_bytearray_big_endian_2() { + from_bytearray_big_endian(#"00000f4240") == 1_000_000 +} + +test from_bytearray_big_endian_3() { + from_bytearray_big_endian(#"0000000000000000") == 0 +} + +/// Interpret a Little-Endian (least-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// int.from_bytearray_big_endian(#"40420f") == 1_000_000 +/// int.from_bytearray_big_endian(#"40420f0000") == 1_000_000 +/// int.from_bytearray_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn from_bytearray_little_endian(self: ByteArray) -> Int { + bytearray_to_integer(False, self) +} + +test from_bytearray_little_endian_1() { + from_bytearray_little_endian(#"40420f") == 1_000_000 +} + +test from_bytearray_little_endian_2() { + from_bytearray_little_endian(#"40420f0000") == 1_000_000 +} + +test from_bytearray_little_endian_3() { + from_bytearray_little_endian(#"0000000000000000") == 0 +} + +/// Parse an integer from a utf-8 encoded `ByteArray`, when possible. +/// +/// ```aiken +/// int.from_utf8("14") == Some(14) +/// int.from_utf8("-42") == Some(-42) +/// int.from_utf8("007") == Some(7) +/// int.from_utf8("foo") == None +/// int.from_utf8("1.0") == None +/// int.from_utf8("1-2") == None +/// ``` +pub fn from_utf8(bytes: ByteArray) -> Option { + bytes + |> bytearray.foldr( + Some((0, 0)), + fn(byte, st) { + when st is { + None -> None + Some((n, e)) -> + if byte < 48 || byte > 57 { + if byte == 45 { + Some((-n, 0)) + } else { + None + } + } else if n < 0 { + None + } else { + let digit = byte - 48 + Some((n + digit * math.pow(10, e), e + 1)) + } + } + }, + ) + |> option.map(fn(tuple) { tuple.1st }) +} + +test from_utf8_1() { + from_utf8("0017") == Some(17) +} + +test from_utf8_2() { + from_utf8("42") == Some(42) +} + +test from_utf8_3() { + from_utf8("1337") == Some(1337) +} + +test from_utf8_4() { + from_utf8("-14") == Some(-14) +} + +test from_utf8_5() { + from_utf8("foo") == None +} + +test from_utf8_6() { + from_utf8("1-2") == None +} + +/// Convert an `Int` to its `String` representation. +/// +/// ```aiken +/// int.to_string(42) == @"42" +/// ``` +pub fn to_string(n: Int) -> String { + diagnostic(n, "") |> decode_utf8 +} + +test to_string_1() { + to_string(0) == @"0" +} + +test to_string_2() { + to_string(5) == @"5" +} + +test to_string_3() { + to_string(42) == @"42" +} + +test to_string_4() { + to_string(200) == @"200" +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak new file mode 100644 index 00000000..35fa5567 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak @@ -0,0 +1,139 @@ +use aiken/builtin.{ + append_bytearray, append_string, decode_utf8, encode_utf8, length_of_bytearray, +} + +// ## Constructing + +/// Convert a `ByteArray` into a `String` +/// +/// > [!WARNING] +/// > This functions fails if the underlying `ByteArray` isn't UTF-8-encoded. In particular, you cannot convert arbitrary hash digests using this function. +/// > +/// > For converting arbitrary `ByteArray`s, use [bytearray.to_hex](./bytearray.html#to_hex). +/// +/// ```aiken +/// string.from_bytearray("foo") == @"foo" +/// string.from_bytearray(#"666f6f") == @"foo" +/// string.from_bytearray(some_hash) -> fail +/// ``` +pub fn from_bytearray(bytes: ByteArray) -> String { + decode_utf8(bytes) +} + +test from_bytearray_1() { + from_bytearray(#[]) == @"" +} + +test from_bytearray_2() { + from_bytearray(#[65, 66, 67]) == @"ABC" +} + +test from_bytearray_3() { + from_bytearray("ABC") == @"ABC" +} + +/// Convert an `Int` to its `String` representation. +/// +/// ```aiken +/// string.from_int(42) == @"42" +/// ``` +pub fn from_int(n: Int) -> String { + diagnostic(n, "") |> decode_utf8 +} + +test from_int_1() { + from_int(0) == @"0" +} + +test from_int_2() { + from_int(5) == @"5" +} + +test from_int_3() { + from_int(42) == @"42" +} + +test from_int_4() { + from_int(200) == @"200" +} + +// ## Combining + +/// Combine two `String` together. +/// +/// ```aiken +/// string.concat(left: @"Hello", right: @", World!") == @"Hello, World!" +/// ``` +pub fn concat(left: String, right: String) -> String { + append_string(left, right) +} + +test concat_1() { + concat(@"", @"") == @"" +} + +test concat_2() { + concat(@"", @"foo") == concat(@"foo", @"") +} + +test concat_3() { + concat(left: @"Hello", right: @", World!") == @"Hello, World!" +} + +/// Join a list of strings, separated by a given _delimiter_. +/// +/// ```aiken +/// string.join([], @"+") == @"" +/// string.join([@"a", @"b", @"c"], @",") == @"a,b,c" +/// ``` +pub fn join(list: List, delimiter: String) -> String { + do_join(list, encode_utf8(delimiter), #"") + |> decode_utf8 +} + +fn do_join(xs, delimiter, bytes) { + when xs is { + [] -> bytes + [x, ..rest] -> + do_join( + rest, + delimiter, + if length_of_bytearray(bytes) == 0 { + encode_utf8(x) + } else { + append_bytearray(bytes, append_bytearray(delimiter, encode_utf8(x))) + }, + ) + } +} + +test join_1() { + join([], @",") == @"" +} + +test join_2() { + join([@"a", @"b", @"c"], @",") == @"a,b,c" +} + +// ## Transforming + +/// Convert a `String` into a `ByteArray` +/// +/// ```aiken +/// string.to_bytearray(@"foo") == "foo" +/// ``` +pub fn to_bytearray(self: String) -> ByteArray { + encode_utf8(self) +} + +test to_bytearray_1() { + to_bytearray(@"") == "" +} + +test to_bytearray_2() { + to_bytearray(@"ABC") == #[65, 66, 67] +} + +test to_bytearray_3() { + to_bytearray(@"ABC") == "ABC" +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak new file mode 100644 index 00000000..0167b90f --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak @@ -0,0 +1,86 @@ +use aiken/crypto.{ + Blake2b_224, Hash, Script, ScriptHash, VerificationKey, VerificationKeyHash, +} + +/// A general structure for representing an on-chain `Credential`. +/// +/// Credentials are always one of two kinds: a direct public/private key +/// pair, or a script (native or Plutus). +pub type Credential { + VerificationKey(VerificationKeyHash) + Script(ScriptHash) +} + +// ## Constructing + +/// A Cardano `Address` typically holding one or two credential references. +/// +/// Note that legacy bootstrap addresses (a.k.a. 'Byron addresses') are +/// completely excluded from Plutus contexts. Thus, from an on-chain +/// perspective only exists addresses of type 00, 01, ..., 07 as detailed +/// in [CIP-0019 :: Shelley Addresses](https://github.com/cardano-foundation/CIPs/tree/master/CIP-0019/#shelley-addresses). +pub type Address { + payment_credential: PaymentCredential, + stake_credential: Option, +} + +/// Smart-constructor for an [Address](#Address) from a [script](#Script) hash. The address has no delegation rights whatsoever. +pub fn from_script(script: Hash) -> Address { + Address { payment_credential: Script(script), stake_credential: None } +} + +/// Smart-constructor for an [Address](#Address) from a [verification key](#VerificationKey) hash. The resulting address has no delegation rights whatsoever. +pub fn from_verification_key(vk: Hash) -> Address { + Address { payment_credential: VerificationKey(vk), stake_credential: None } +} + +/// Set (or reset) the delegation part of an [Address](#Address) using a [verification key](#VerificationKey) hash. This is useful when combined with [`from_verification_key`](#from_verification_key) and/or [`from_script`](#from_script). +pub fn with_delegation_key( + self: Address, + vk: Hash, +) -> Address { + Address { + payment_credential: self.payment_credential, + stake_credential: Some(Inline(VerificationKey(vk))), + } +} + +/// Set (or reset) the delegation part of an [Address](#Address) using a [script](#Script) hash. This is useful when combined with [`from_verification_key`](#from_verification_key) and/or [`from_script`](#from_script). +pub fn with_delegation_script( + self: Address, + script: Hash, +) -> Address { + Address { + payment_credential: self.payment_credential, + stake_credential: Some(Inline(Script(script))), + } +} + +/// Represent a type of object that can be represented either inline (by hash) +/// or via a reference (i.e. a pointer to an on-chain location). +/// +/// This is mainly use for capturing pointers to a stake credential +/// registration certificate in the case of so-called pointer addresses. +pub type Referenced { + Inline(a) + Pointer { slot_number: Int, transaction_index: Int, certificate_index: Int } +} + +/// A `StakeCredential` represents the delegation and rewards withdrawal conditions +/// associated with some stake address / account. +/// +/// A `StakeCredential` is either provided inline, or, by reference using an +/// on-chain pointer. +/// +/// Read more about pointers in [CIP-0019 :: Pointers](https://github.com/cardano-foundation/CIPs/tree/master/CIP-0019/#pointers). +pub type StakeCredential = + Referenced + +/// A 'PaymentCredential' represents the spending conditions associated with +/// some output. Hence, +/// +/// - a `VerificationKey` captures an output locked by a public/private key pair; +/// - and a `Script` captures an output locked by a native or Plutus script. +/// +pub type PaymentCredential = + Credential diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak new file mode 100644 index 00000000..2ebeaa91 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak @@ -0,0 +1,30 @@ +use aiken/primitive/bytearray +use cardano/address.{Credential, Script, VerificationKey} + +pub fn compare(left: Credential, right: Credential) -> Ordering { + when left is { + Script(left) -> + when right is { + Script(right) -> bytearray.compare(left, right) + _ -> Less + } + VerificationKey(left) -> + when right is { + Script(_) -> Greater + VerificationKey(right) -> bytearray.compare(left, right) + } + } +} + +test compare_matrix() { + and { + (compare(Script(""), Script("")) == Equal)?, + (compare(VerificationKey(""), VerificationKey("")) == Equal)?, + (compare(Script(""), VerificationKey("")) == Less)?, + (compare(VerificationKey(""), Script("")) == Greater)?, + (compare(Script("01"), Script("02")) == Less)?, + (compare(Script("02"), Script("01")) == Greater)?, + (compare(VerificationKey("01"), VerificationKey("02")) == Less)?, + (compare(VerificationKey("02"), VerificationKey("01")) == Greater)?, + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak new file mode 100644 index 00000000..664a3983 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak @@ -0,0 +1,920 @@ +use aiken/builtin +use aiken/collection/dict.{Dict, from_ascending_pairs_with} +use aiken/collection/list +use aiken/crypto.{Blake2b_224, Hash, Script} +use aiken/option + +/// Lovelace is now a type wrapper for Int. +pub type Lovelace = + Int + +/// A type-alias for a `PolicyId`. A `PolicyId` is always 28-byte long +pub type PolicyId = + Hash + +/// Ada, the native currency, isn't associated with any `PolicyId` (it's not +/// possible to mint Ada!). +/// +/// By convention, it is an empty `ByteArray`. +pub const ada_policy_id = "" + +/// A type-alias for 'AssetName`, which are free-form byte-arrays between +/// 0 and 32 bytes. +pub type AssetName = + ByteArray + +/// Ada, the native currency, isn't associated with any `AssetName` (it's not +/// possible to mint Ada!). +/// +/// By convention, it is an empty `ByteArray`. +pub const ada_asset_name = "" + +/// A multi-asset output `Value`. Contains tokens indexed by [PolicyId](#PolicyId) and [AssetName](#AssetName). +/// +/// > [!IMPORTANT] +/// > This type maintain some invariants by construction; in particular, a `Value` will never contain a +/// zero quantity of a particular token. +pub opaque type Value { + inner: Dict>, +} + +// ## Constructing + +/// Construct a `Value` from an asset identifier (i.e. `PolicyId` + `AssetName`) +/// and a given quantity. +pub fn from_asset( + policy_id: PolicyId, + asset_name: AssetName, + quantity: Int, +) -> Value { + if quantity == 0 { + zero + } else { + let asset = + dict.empty + |> dict.insert(asset_name, quantity) + dict.empty + |> dict.insert(policy_id, asset) + |> Value + } +} + +/// Promote an arbitrary list of assets into a `Value`. This function fails +/// (i.e. halts the program execution) if: +/// +/// - there's any duplicate amongst `PolicyId`; +/// - there's any duplicate amongst `AssetName`; +/// - the `AssetName` aren't sorted in ascending lexicographic order; or +/// - any asset quantity is null. +/// +/// This function is meant to turn arbitrary user-defined `Data` into safe `Value`, +/// while checking for internal invariants. +pub fn from_asset_list(xs: Pairs>) -> Value { + xs + |> list.foldr( + dict.empty, + fn(inner, acc) { + expect Pair(p, [_, ..] as x) = inner + x + |> from_ascending_pairs_with(fn(v) { v != 0 }) + |> dict.insert_with( + acc, + p, + _, + fn(_, _, _) { + fail @"Duplicate policy in the asset list." + }, + ) + }, + ) + |> Value +} + +test from_asset_list_1() { + let v = from_asset_list([]) + v == zero +} + +test from_asset_list_2() fail { + let v = from_asset_list([Pair(#"33", [])]) + v == zero +} + +test from_asset_list_3() fail { + let v = from_asset_list([Pair(#"33", [Pair(#"", 0)])]) + v != zero +} + +test from_asset_list_4() { + let v = from_asset_list([Pair(#"33", [Pair(#"", 1)])]) + flatten(v) == [(#"33", #"", 1)] +} + +test from_asset_list_5() { + let v = from_asset_list([Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)])]) + flatten(v) == [(#"33", #"", 1), (#"33", #"33", 1)] +} + +test from_asset_list_6() fail { + let v = + from_asset_list( + [ + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + ], + ) + v != zero +} + +test from_asset_list_7() fail { + let v = + from_asset_list( + [ + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"34", [Pair(#"", 1), Pair(#"", 1)]), + ], + ) + v != zero +} + +test from_asset_list_8() { + let v = + from_asset_list( + [ + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"34", [Pair(#"31", 1)]), Pair(#"35", [Pair(#"", 1)]), + ], + ) + flatten(v) == [ + (#"33", #"", 1), (#"33", #"33", 1), (#"34", #"31", 1), (#"35", #"", 1), + ] +} + +test from_asset_list_9() { + let v = + from_asset_list( + [ + Pair(#"35", [Pair(#"", 1)]), Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"34", [Pair(#"31", 1)]), + ], + ) + flatten(v) == [ + (#"33", #"", 1), (#"33", #"33", 1), (#"34", #"31", 1), (#"35", #"", 1), + ] +} + +/// Construct a `Value` from a lovelace quantity. +/// +/// Friendly reminder: 1 Ada = 1.000.000 Lovelace +pub fn from_lovelace(quantity: Int) -> Value { + from_asset(ada_policy_id, ada_asset_name, quantity) +} + +/// Construct an empty `Value` with nothing in it. +pub const zero: Value = Value { inner: dict.empty } + +// ## Inspecting + +/// Check is a `Value` is zero. That is, it has no assets and holds no Ada/Lovelace. +pub fn is_zero(self: Value) -> Bool { + self == zero +} + +/// Efficiently compare two values together, allowing a custom behaviour for Ada/Lovelace. +/// The second parameter is provided as `Data`, allowing to conveniently compare serialized +/// datums or similar structurually equivalent types (such as `Pairs>`). +/// +/// The third argument is a callback function to assert the left and right lovelace +/// quantities. Its first argument refers to the quantity of the first argument of +/// `match`, and the second argument of the callback to the quantity of the second +/// argument of `match`. In the absence of lovelace in any value, it defaults to `0`. +/// +/// ```aiken +/// const value: Value = +/// assets.from_lovelace(30) +/// |> assets.add("foo", "bar", 1) +/// |> assets.add("foo", "baz", 42) +/// +/// const datum: Data = +/// assets.from_lovelace(20) +/// |> assets.add("foo", "bar", 1) +/// |> assets.add("foo", "baz", 42) +/// +/// True == assets.match(value, datum, >=) +/// +/// False == assets.match(value, datum, ==) +/// +/// True == assets.match(value, datum, fn(value_lovelace, datum_lovelace) { +/// 2 * datum_lovelace >= value_lovelace +/// }) +/// ``` +pub fn match( + left: Value, + right: Data, + assert_lovelace: fn(Lovelace, Lovelace) -> Bool, +) -> Bool { + builtin.choose_data( + right, + False, + { + let (left_lovelace, left_assets) = dict.pop(left.inner, ada_policy_id) + let left_assets: Data = left_assets + let left_lovelace = + when left_lovelace is { + Some(tokens) -> builtin.head_list(dict.to_pairs(tokens)).2nd + None -> 0 + } + when builtin.un_map_data(right) is { + [] -> left_assets == right && assert_lovelace(left_lovelace, 0) + [first_asset, ..right_assets] -> + if first_asset.1st == builtin.b_data(ada_policy_id) { + and { + assert_lovelace( + left_lovelace, + builtin.un_i_data( + builtin.head_list(builtin.un_map_data(first_asset.2nd)).2nd, + ), + ), + left_assets == builtin.map_data(right_assets), + } + } else { + and { + assert_lovelace(left_lovelace, 0), + left_assets == right, + } + } + } + }, + False, + False, + False, + ) +} + +const fixture_match_value: Value = + zero + |> add(ada_policy_id, ada_asset_name, 42) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + +const fixture_match_data: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + +const fixture_match_data_missing_foo_02: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("bar", "01", 42) + +const fixture_match_data_altered_foo_01: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 14) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + +const fixture_match_data_missing_bar: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + +const fixture_match_data_extra_policy: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + |> add("baz", "01", 1) + +const fixture_match_data_extra_asset: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("foo", "03", 1) + |> add("bar", "01", 42) + +const fixture_match_data_no_assets: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + +test match_1() { + match(fixture_match_value, fixture_match_data, fn(_, _) { True }) +} + +test match_2() { + !match( + fixture_match_value, + fixture_match_data, + fn(source, target) { source == target }, + ) +} + +test match_3() { + !match( + fixture_match_value, + fixture_match_data_missing_foo_02, + fn(_, _) { True }, + ) +} + +test match_4() { + !match(fixture_match_value, fixture_match_data_missing_bar, fn(_, _) { True }) +} + +test match_5() { + !match( + fixture_match_value, + fixture_match_data_altered_foo_01, + fn(_, _) { True }, + ) +} + +test match_6() { + !match( + fixture_match_value, + fixture_match_data_extra_policy, + fn(_, _) { True }, + ) +} + +test match_7() { + !match(fixture_match_value, fixture_match_data_extra_asset, fn(_, _) { True }) +} + +test match_8() { + !match(fixture_match_value, fixture_match_data_no_assets, fn(_, _) { True }) +} + +test match_9() { + match(zero, zero, ==) +} + +test match_10() { + match( + without_lovelace(fixture_match_value), + without_lovelace(fixture_match_value), + fn(left, right) { left == 0 && right == 0 }, + ) +} + +test match_11() { + match( + without_lovelace(fixture_match_value), + fixture_match_value, + fn(left, right) { left == 0 && right > 0 }, + ) +} + +test match_12() { + match( + fixture_match_value, + without_lovelace(fixture_match_value), + fn(left, right) { left > 0 && right == 0 }, + ) +} + +test match_13() { + match( + zero |> add(ada_policy_id, ada_asset_name, 42), + zero, + fn(left, right) { left == 42 && right == 0 }, + ) +} + +test match_14() { + match( + zero, + zero |> add(ada_policy_id, ada_asset_name, 42), + fn(left, right) { left == 0 && right == 42 }, + ) +} + +const fixture_match_benchmark_left: Value = + zero + |> add(ada_policy_id, ada_asset_name, 1337) + |> add( + #"0246a14d04c3a0e9b65f6b90a3d1aa5faee5d56ab1e30ec7e8b02f29", + "MATTR", + 200, + ) + |> add( + #"0a9e126256cb38c4865cdac6eb2ada51c328ba0df2ebde22ae126c0d", + "ProphecyPoster076", + 1, + ) + |> add( + #"1774343241680e4daef7cbfe3536fc857ce23fb66cd0b66320b2e3dd", + "BISON", + 12_004_999_999, + ) + |> add( + #"279c909f348e533da5808898f87f9a14bb2c3dfbbacccd631d927a3f", + "SNEK", + 1486, + ) + |> add( + #"651dfc074202423585996ffa717cb45237d307e705e2cc3dab1ccabd", + "MAYZSilverFoundersEdition0035", + 1, + ) + |> add( + #"63df49056617dd14034986cf7c250bad6552fd2f0f9c71d797932008", + "CardanoSpaceSession", + 20, + ) + |> add( + #"5b01968867e13432afaa2f814e1d15e332d6cd0aa77e350972b0967d", + "ADAOGovernanceToken", + 1, + ) + |> add( + #"a0028f350aaabe0545fdcb56b039bfb08e4bb4d8c4d7c3c7d481c235", + "HOSKY", + 400_001_000, + ) + |> add( + #"da8c30857834c6ae7203935b89278c532b3995245295456f993e1d24", + "LQ", + 10_635_899, + ) + |> add( + #"95d9a98c2f7999a3d5e0f4d795cb1333837c09eb0f24835cd2ce954c", + "GrandmasterAdventurer659", + 1, + ) + |> add( + #"702cbdb06a81ef2fa4f85f9e32159c03f502539d762a71194fc11eb3", + "AdventurerOfThiolden8105", + 1, + ) + |> add( + #"d0112837f8f856b2ca14f69b375bc394e73d146fdadcc993bb993779", + "DiscoSolaris3725", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld535", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld1213", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld1518", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld1537", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld4199", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld3767", + 1, + ) + |> add( + #"7597444754551a8c17edbf7291cdaeca898ca02ee4e732b09a949396", + "Algae1", + 1, + ) + |> add( + #"7597444754551a8c17edbf7291cdaeca898ca02ee4e732b09a949396", + "Algae2", + 1, + ) + +const fixture_match_benchmark_right: Data = fixture_match_benchmark_left + +test match_benchmark() { + match(fixture_match_benchmark_left, fixture_match_benchmark_right, ==) +} + +test match_benchmark_vs() { + let data: Data = fixture_match_benchmark_right + expect pairs: Pairs> = data + fixture_match_benchmark_left == from_asset_list(pairs) +} + +/// A specialized version of `quantity_of` for the Ada currency. +pub fn lovelace_of(self: Value) -> Int { + quantity_of(self, ada_policy_id, ada_asset_name) +} + +/// A list of all token policies in that Value with non-zero tokens. +pub fn policies(self: Value) -> List { + dict.keys(self.inner) +} + +/// Extract the quantity of a given asset. +pub fn quantity_of( + self: Value, + policy_id: PolicyId, + asset_name: AssetName, +) -> Int { + self.inner + |> dict.get(policy_id) + |> option.and_then(dict.get(_, asset_name)) + |> option.or_else(0) +} + +/// Get all tokens associated with a given policy. +pub fn tokens(self: Value, policy_id: PolicyId) -> Dict { + self.inner + |> dict.get(policy_id) + |> option.or_else(dict.empty) +} + +// ## Combining + +/// Add a (positive or negative) quantity of a single token to a assets. +/// This is more efficient than [`merge`](#merge) for a single asset. +pub fn add( + self: Value, + policy_id: PolicyId, + asset_name: AssetName, + quantity: Int, +) -> Value { + if quantity == 0 { + self + } else { + let helper = + fn(_, left, _right) { + let inner_result = + dict.insert_with( + left, + asset_name, + quantity, + fn(_k, ql, qr) { + let q = ql + qr + if q == 0 { + None + } else { + Some(q) + } + }, + ) + if dict.is_empty(inner_result) { + None + } else { + Some(inner_result) + } + } + + Value( + dict.insert_with( + self.inner, + policy_id, + dict.from_ascending_pairs([Pair(asset_name, quantity)]), + helper, + ), + ) + } +} + +test add_1() { + let v = + zero + |> add(#"acab", #"beef", 321) + |> add(#"acab", #"beef", -321) + v == zero +} + +test add_2() { + let v = + from_lovelace(123) + |> add(#"acab", #"beef", 321) + |> add(#"acab", #"beef", -1 * 321) + v == from_lovelace(123) +} + +test add_3() { + let v = + from_lovelace(1) + |> add(ada_policy_id, ada_asset_name, 2) + |> add(ada_policy_id, ada_asset_name, 3) + v == from_lovelace(6) +} + +test add_4() { + let v = + zero + |> add(#"acab", #"beef", 0) + v == zero +} + +test add_5() { + let v = + zero + |> add(#"acab", #"beef", 0) + |> add(#"acab", #"beef", 0) + v == zero +} + +/// Combine two `Value` together. +pub fn merge(left v0: Value, right v1: Value) -> Value { + Value( + dict.union_with( + v0.inner, + v1.inner, + fn(_, a0, a1) { + let result = + dict.union_with( + a0, + a1, + fn(_, q0, q1) { + let q = q0 + q1 + if q == 0 { + None + } else { + Some(q) + } + }, + ) + if dict.is_empty(result) { + None + } else { + Some(result) + } + }, + ), + ) +} + +test merge_1() { + let v1 = from_lovelace(1) + let v2 = from_lovelace(-1) + merge(v1, v2) == zero +} + +test merge_2() { + let v1 = from_asset(#"00", #"", 1) + let v2 = from_asset(#"01", #"", 2) + let v3 = from_asset(#"02", #"", 3) + let v = + from_lovelace(42) + |> merge(v3) + |> merge(v1) + |> merge(v2) + + flatten(v) == [ + (#"", #"", 42), (#"00", #"", 1), (#"01", #"", 2), (#"02", #"", 3), + ] +} + +test merge_3() { + let v1 = from_asset(#"00", #"", 1) + let v2 = from_asset(#"00", #"", -1) + let v3 = from_asset(#"01", #"", 1) + + let v = + zero + |> merge(v1) + |> merge(v2) + |> merge(v3) + + flatten(v) == [(#"01", #"", 1)] +} + +test merge_4() { + let v1 = from_asset(#"00", #"", 1) + let v2 = from_asset(#"00", #"", -1) + + merge(v1, v2) == zero +} + +test merge_5() { + let v = + zero + |> add(#"acab", #"beef", 0) + + merge(zero, v) == zero +} + +/// Negates quantities of all tokens (including Ada) in that `Value`. +/// +/// ``` +/// v1 +/// |> assets.negate +/// |> assets.merge(v1) +/// |> assets.is_zero +/// // True +/// ``` +pub fn negate(self: Value) -> Value { + dict.map(self.inner, fn(_, a) { dict.map(a, fn(_, q) { 0 - q }) }) + |> Value +} + +/// Get a subset of the assets restricted to the given policies. +pub fn restricted_to(self: Value, mask: List) -> Value { + list.foldr( + policies(self), + zero, + fn(policy_id, value) { + if list.has(mask, policy_id) { + dict.foldr( + tokens(self, policy_id), + value, + fn(asset_name, quantity, value) { + add(value, policy_id, asset_name, quantity) + }, + ) + } else { + value + } + }, + ) +} + +test restricted_to_1() { + let self = from_lovelace(42) |> add("foo", "", 1) + restricted_to(self, []) == zero +} + +test restricted_to_2() { + let self = from_lovelace(42) |> add("foo", "", 1) + restricted_to(self, [ada_policy_id]) == from_lovelace(42) +} + +test restricted_to_3() { + let self = from_lovelace(42) |> add("foo", "", 1) |> add("bar", "", 1) + restricted_to(self, ["foo", "bar"]) == without_lovelace(self) +} + +test restricted_to_4() { + let self = from_lovelace(42) |> add("foo", "bar", 1) |> add("foo", "baz", 1) + restricted_to(self, ["foo"]) == without_lovelace(self) +} + +test restricted_to_5() { + let self = from_lovelace(42) |> add("foo", "bar", 1) |> add("foo", "baz", 1) + restricted_to(self, [ada_policy_id, "foo"]) == self +} + +/// Get a `Value` excluding Ada. +pub fn without_lovelace(self: Value) -> Value { + dict.delete(self.inner, ada_policy_id) + |> Value +} + +test without_lovelace_1() { + let v = from_lovelace(1000000) + without_lovelace(v) == zero +} + +test without_lovelace_2() { + let v = from_lovelace(1000000) + let v2 = from_lovelace(50000000) + without_lovelace(v) == without_lovelace(v2) +} + +test without_lovelace_3() { + let v = + from_asset(#"010203", #"040506", 100) + |> add(ada_policy_id, ada_asset_name, 100000000) + let v2 = from_asset(#"010203", #"040506", 100) + without_lovelace(v) == without_lovelace(v2) && without_lovelace(v) == v2 +} + +// ## Transforming + +/// Flatten a `Value` as list of 3-tuple `(PolicyId, AssetName, Quantity)`. +/// +/// Handy to manipulate values as uniform lists. +pub fn flatten(self: Value) -> List<(PolicyId, AssetName, Int)> { + dict.foldr( + self.inner, + [], + fn(policy_id, asset_list, value) { + dict.foldr( + asset_list, + value, + fn(asset_name, quantity, xs) { + [(policy_id, asset_name, quantity), ..xs] + }, + ) + }, + ) +} + +/// Flatten a `Value` as a list of results, possibly discarding some along the way. +/// +/// When the transform function returns `None`, the result is discarded altogether. +pub fn flatten_with( + self: Value, + with: fn(PolicyId, AssetName, Int) -> Option, +) -> List { + dict.foldr( + self.inner, + [], + fn(policy_id, asset_list, value) { + dict.foldr( + asset_list, + value, + fn(asset_name, quantity, xs) { + when with(policy_id, asset_name, quantity) is { + None -> xs + Some(x) -> [x, ..xs] + } + }, + ) + }, + ) +} + +test flatten_with_1() { + flatten_with(zero, fn(p, a, q) { Some((p, a, q)) }) == [] +} + +test flatten_with_2() { + let v = + zero + |> add("a", "1", 14) + |> add("b", "", 42) + |> add("a", "2", 42) + + flatten_with( + v, + fn(p, a, q) { + if q == 42 { + Some((p, a)) + } else { + None + } + }, + ) == [("a", "2"), ("b", "")] +} + +/// Reduce a value into a single result +/// +/// ``` +/// assets.zero +/// |> assets.add("a", "1", 10) +/// |> assets.add("b", "2", 20) +/// |> assets.reduce(v, 0, fn(_, _, quantity, acc) { acc + quantity }) +/// // 30 +/// ``` +pub fn reduce( + self: Value, + start: result, + with: fn(PolicyId, AssetName, Int, result) -> result, +) -> result { + dict.foldr( + self.inner, + start, + fn(policy_id, asset_list, result) { + dict.foldr(asset_list, result, with(policy_id, _, _, _)) + }, + ) +} + +test reduce_1() { + let v = + zero + |> add("a", "1", 10) + |> add("b", "2", 20) + let result = reduce(v, 0, fn(_, _, quantity, acc) { acc + quantity }) + result == 30 +} + +test reduce_2() { + let v = + zero + |> add("a", "1", 5) + |> add("a", "2", 15) + |> add("b", "", 10) + let result = + reduce( + v, + [], + fn(policy_id, asset_name, _, acc) { [(policy_id, asset_name), ..acc] }, + ) + result == [("a", "1"), ("a", "2"), ("b", "")] +} + +test reduce_3() { + let v = zero + let result = reduce(v, 1, fn(_, _, quantity, acc) { acc + quantity }) + result == 1 +} + +/// Convert the value into a dictionary of dictionaries. +pub fn to_dict(self: Value) -> Dict> { + self.inner +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak new file mode 100644 index 00000000..f0b6d258 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak @@ -0,0 +1,93 @@ +use aiken/crypto.{Blake2b_224, Hash, VerificationKey, VerificationKeyHash} +use cardano/address.{Credential} +use cardano/assets.{Lovelace} + +pub type StakePoolId = + Hash + +/// An on-chain certificate attesting of some operation. Publishing +/// certificates triggers different kind of rules; most of the time, +/// they require signatures from specific keys. +pub type Certificate { + /// Register a stake credential with an optional deposit amount. + /// The deposit is always present when using the new registration certificate + /// format available since the Conway era. + RegisterCredential { + credential: Credential, + /// > [!NOTE] + /// > The `deposit` ought to be an `Option`, but due to unfortunate + /// > circumstances it will always be instantiated to `None` even when set in + /// > the host transaction. This is what the `Never` type captures here. + deposit: Never, + } + /// Un-Register a stake credential with an optional refund amount + /// The deposit is always present when using the new de-registration certificate + /// format available since the Conway era. + UnregisterCredential { + credential: Credential, + /// > [!NOTE] + /// > The `refund` ought to be an `Option`, but due to unfortunate + /// > circumstances it will always be instantiated to `None` even when set in + /// > the host transaction. This is what the `Never` type captures here. + refund: Never, + } + /// Delegate stake to a [Delegate](#Delegate). + DelegateCredential { credential: Credential, delegate: Delegate } + /// Register and delegate staking credential to a Delegatee in one certificate. + RegisterAndDelegateCredential { + credential: Credential, + delegate: Delegate, + deposit: Lovelace, + } + /// Register a delegate representative (a.k.a DRep). The deposit is explicit and + /// is refunded when the delegate steps down (unregister). + RegisterDelegateRepresentative { + delegate_representative: Credential, + deposit: Lovelace, + } + /// Update a delegate representative (a.k.a DRep). The certificate also contains + /// metadata which aren't visible on-chain. + UpdateDelegateRepresentative { delegate_representative: Credential } + /// UnRegister a delegate representative, and refund back its past deposit. + UnregisterDelegateRepresentative { + delegate_representative: Credential, + refund: Lovelace, + } + /// Register a new stake pool + RegisterStakePool { + /// The hash digest of the stake pool's cold (public) key + stake_pool: StakePoolId, + /// The hash digest of the stake pool's VRF (public) key + vrf: VerificationKeyHash, + } + /// Retire a stake pool. 'at_epoch' indicates in which the retirement will take place + RetireStakePool { stake_pool: StakePoolId, at_epoch: Int } + /// Authorize a Hot credential for a specific Committee member's cold credential + AuthorizeConstitutionalCommitteeProxy { + constitutional_committee_member: Credential, + proxy: Credential, + } + /// Step down from the constitutional committee as a member. + RetireFromConstitutionalCommittee { + constitutional_committee_member: Credential, + } +} + +/// A type of stake delegation that can be either block-production, vote or +/// both. Note that delegation types aren't cancelling one another, so it is +/// possible to delegate block production in one transaction, and delegate vote +/// in another. This second delegation **does NOT** invalidate the first one. +pub type Delegate { + DelegateBlockProduction { stake_pool: StakePoolId } + DelegateVote { delegate_representative: DelegateRepresentative } + DelegateBoth { + stake_pool: StakePoolId, + delegate_representative: DelegateRepresentative, + } +} + +pub type DelegateRepresentative { + Registered(Credential) + AlwaysAbstain + AlwaysNoConfidence +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak new file mode 100644 index 00000000..3ec96800 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak @@ -0,0 +1,109 @@ +use aiken/collection.{Index} +use aiken/crypto.{Blake2b_256, Hash, ScriptHash, VerificationKeyHash} +use aiken/math/rational.{Rational} +use cardano/address.{Credential} +use cardano/assets.{Lovelace} +use cardano/governance/protocol_parameters.{ProtocolParametersUpdate} + +pub type ProposalProcedure { + deposit: Lovelace, + return_address: Credential, + governance_action: GovernanceAction, +} + +pub type GovernanceAction { + ProtocolParameters { + /// The last governance action of type 'ProtocolParameters'. They must all + /// form a chain. + ancestor: Option, + /// The new proposed protocol parameters. Only values set to `Some` are relevant. + new_parameters: ProtocolParametersUpdate, + /// The optional guardrails script defined in the constitution. The script + /// is executed by the ledger in addition to the hard-coded ledger rules. + /// + /// It must pass for the new protocol parameters to be deemed valid. + guardrails: Option, + } + HardFork { + /// The last governance action of type `HardFork`. They must all + /// form a chain. + ancestor: Option, + /// The new proposed version. Few rules apply to proposing new versions: + /// + /// - The `major` component, if incremented, must be exactly one more than the current. + /// - The `minor` component, if incremented, must be exactly one more than the current. + /// - If the `major` component is incremented, `minor` must be set to `0`. + /// - Neither `minor` nor `major` can be decremented. + new_version: ProtocolVersion, + } + TreasuryWithdrawal { + /// A collection of beneficiaries, which can be plain verification key + /// hashes or script hashes (e.g. DAO). + beneficiaries: Pairs, + /// The optional guardrails script defined in the constitution. The script + /// is executed by the ledger in addition to the hard-coded ledger rules. + /// + /// It must pass for the withdrawals to be authorized. + guardrails: Option, + } + NoConfidence { + /// The last governance action of type `NoConfidence` or + /// `ConstitutionalCommittee`. They must all / form a chain. + ancestor: Option, + } + ConstitutionalCommittee { + /// The last governance action of type `NoConfidence` or + /// `ConstitutionalCommittee`. They must all / form a chain. + ancestor: Option, + /// Constitutional members to be removed. + evicted_members: List, + /// Constitutional members to be added. + added_members: Pairs, + /// The new quorum value, as a ratio of a numerator and a denominator. The + /// quorum specifies the threshold of 'Yes' votes necessary for the + /// constitutional committee to accept a proposal procedure. + quorum: Rational, + } + NewConstitution { + /// The last governance action of type `Constitution` or + /// `ConstitutionalCommittee`. They must all / form a chain. + ancestor: Option, + /// The new proposed constitution. + constitution: Constitution, + } + NicePoll +} + +pub type Vote { + No + Yes + Abstain +} + +pub type TransactionId = + Hash + +pub type GovernanceActionId { + transaction: TransactionId, + proposal_procedure: Index, +} + +pub type ProtocolVersion { + major: Int, + minor: Int, +} + +pub type Constitution { + guardrails: Option, +} + +/// An epoch number after which constitutional committee member +/// mandate expires. +pub type Mandate = + Int + +pub type Voter { + ConstitutionalCommitteeMember(Credential) + DelegateRepresentative(Credential) + StakePool(VerificationKeyHash) +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak new file mode 100644 index 00000000..d9e7be95 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak @@ -0,0 +1,360 @@ +use aiken/math/rational.{Rational} +use cardano/assets.{Lovelace} + +pub opaque type ProtocolParametersUpdate { + inner: Pairs, +} + +pub type ScriptExecutionPrices { + memory: Rational, + cpu: Rational, +} + +pub type ExecutionUnits { + memory: Int, + cpu: Int, +} + +pub type StakePoolOperatorVotingThresholds { + motion_of_no_confidence: Rational, + constitutional_committee: ConstitutionalCommitteeThresholds, + hard_fork: Rational, + protocol_parameters: ProtocolParametersThresholds< + Rational, + Void, + Void, + Void, + Void, + >, +} + +pub type DelegateRepresentativeVotingThresholds { + motion_of_no_confidence: Rational, + constitutional_committee: ConstitutionalCommitteeThresholds, + constitution: Rational, + hard_fork: Rational, + protocol_parameters: ProtocolParametersThresholds< + Void, + Rational, + Rational, + Rational, + Rational, + >, + treasury_withdrawal: Rational, +} + +pub type ProtocolParametersThresholds< + security, + network, + economic, + technical, + governance, +> { + security_group: security, + network_group: network, + economic_group: economic, + technical_group: technical, + governance_group: governance, +} + +pub type ConstitutionalCommitteeThresholds { + default: Rational, + under_no_confidence: Rational, +} + +/// The linear coefficient that intervenes in the transaction fee calculation. +/// It is multiplied by the size of the transaction in bytes to obtain a Lovelace value. +pub fn min_fee_coefficient(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 0, into_int) +} + +/// The constant factor that intervenes in the transaction fee calculation. It is +/// a flat cost of lovelace that is added to every fee calculation. +pub fn min_fee_constant(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 1, into_int) +} + +/// The maximum size of a serialized block body, expressed in bytes. +pub fn max_block_body_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 2, into_int) +} + +/// The maximum size of a serialized transaction (body + witnesses), expressed in bytes. +pub fn max_transaction_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 3, into_int) +} + +/// The maximum size of a serialized block header, expressed in bytes. +pub fn max_block_header_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 4, into_int) +} + +/// The required deposit amount when registering stake credentials, expressed in Lovelace. +pub fn stake_credential_deposit( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 5, into_int) +} + +/// The required deposit amount when registering a stake pool, expressed in Lovelace. +pub fn stake_pool_deposit(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 6, into_int) +} + +/// The maximum number of epoch in the future allowed for a stake pool retirement to be scheduled. +pub fn stake_pool_retirement_horizon( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 7, into_int) +} + +/// The desired/optimal number of fully saturated stake pools in the system. Also known as the _'k-parameter'_. +pub fn desired_number_of_stake_pools( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 8, into_int) +} + +/// A parameter controlling the influence of an pool owner's pledge on the rewards. Also known as _'a0'_. +pub fn stake_pool_pledge_influence( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 9, into_rational) +} + +/// The monetary expansion parameter, controlling the fraction of Ada put in circulation on every epoch through the incentivies model. Also known as _'ρ'_. +pub fn monetary_expansion(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 10, into_rational) +} + +/// The parameter controlling what fraction (%) of available rewards is sent to the treasury on every epoch. Also known as _'τ'_. +pub fn treasury_expansion(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 11, into_rational) +} + +/// Minimum authorized constant cost that stake pools can declare when registering, expressed in Lovelace. +pub fn min_stake_pool_cost(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 16, into_int) +} + +/// The linear coefficient that intervenes in the calculation of the minimum Ada value that any UTxO must hold. It is expressed in Lovelace per Byte, and is also known as the 'coins per utxo byte' parameter. +pub fn min_utxo_deposit_coefficient( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 17, into_int) +} + +/// The costs associated with the various operations of the Plutus Virtual Machine, which can be different for each Plutus version. +pub fn cost_models(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 18, identity) +} + +/// The price, in Lovelace per unit, of the execution units corresponding to cpu and memory usage of on-chain scripts. +pub fn script_execution_prices( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 19, into_script_execution_prices) +} + +/// The maximum execution units allowed for a single transaction. +pub fn max_transaction_execution_units( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 20, into_execution_units) +} + +/// The maximum execution units allowed for a single block. +pub fn max_block_execution_units( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 21, into_execution_units) +} + +/// The maximum size of a serialized value in a transaction output. This effectively limits +/// the maximum kinds of assets that can be sent in a single output. It is expressed in bytes. +pub fn max_value_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 22, into_int) +} + +/// The scaling factor applied to the transaction cost for defining the minimum collateral +/// amount. It is expressed in percent points (so 100 = 100%). +pub fn collateral_percentage(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 23, into_int) +} + +/// The maximum number of collateral inputs allowed in the transaction. +pub fn max_collateral_inputs(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 24, into_int) +} + +/// The various governance voting thresholds pertaining to stake pool operators. +pub fn stake_pool_operator_voting_thresholds( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 25, into_spo_voting_thresholds) +} + +/// The various governance voting thresholds pertaining to delegate representatives +/// (a.k.a DReps). +pub fn delegate_representative_voting_thresholds( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 26, into_drep_voting_thresholds) +} + +/// The minimum number of members in the constitutional committee. Any updates of the committee +/// must leave at least this number of members. +pub fn min_constitutional_committee_size( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 27, into_int) +} + +/// The maximum length of a constitutional committee member, expressed in number of epochs. +pub fn max_constitutional_committee_mandate( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 28, into_int) +} + +/// The lifetime of any governance proposal. An action that hasn't been approved beyond that +/// period is considered inactive and discarded. It is expressed in number of epochs. +pub fn governance_proposal_lifetime( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 29, into_int) +} + +/// The required deposit amount for governance proposal procedures, expressed in Lovelace. +pub fn governance_proposal_deposit( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 30, into_int) +} + +/// The required deposit amount when registering as a delegate representative, expressed in +/// Lovelace. +pub fn delegate_representative_deposit( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 31, into_int) +} + +/// The maximum number of epochs that a delegate representative can stay inactive (i.e. no +/// voting) without becoming _inactive_ and removed from thresholds calculations. +pub fn delegate_representative_max_idle_time( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 32, into_int) +} + +/// The base tier fee coefficient for reference scripts. Reference scripts gets increasingly +/// more expensives every ~24KB, the base coefficient is a multiplicating factor which grows +/// exponentially with each tier. +pub fn reference_scripts_tier_fee_initial_factor( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 33, into_rational) +} + +// Internals ------------------------------------------------------------------- + +type ProtocolParametersIndex = + Int + +fn get_protocol_param( + self: Pairs, + ix: ProtocolParametersIndex, + into: fn(Data) -> a, +) -> Option { + when self is { + [] -> None + [Pair(jx, param), ..tail] -> + if ix == jx { + Some(into(param)) + } else { + get_protocol_param(tail, ix, into) + } + } +} + +fn into_int(param: Data) -> Int { + expect param: Int = param + param +} + +fn into_rational(param: Data) -> Rational { + expect [numerator, denominator]: List = param + expect Some(r) = rational.new(numerator, denominator) + r +} + +fn into_execution_units(param: Data) -> ExecutionUnits { + expect [memory, cpu]: List = param + ExecutionUnits { memory, cpu } +} + +fn into_script_execution_prices(param: Data) -> ScriptExecutionPrices { + expect [memory, cpu]: List = param + let memory = into_rational(memory) + let cpu = into_rational(cpu) + ScriptExecutionPrices { memory, cpu } +} + +fn into_spo_voting_thresholds(param: Data) -> StakePoolOperatorVotingThresholds { + expect [ + motion_of_no_confidence, constitutional_committee, + constitutional_committee_under_no_confidence, hard_fork, + protocol_parameters_security_group, + ]: List = param + + StakePoolOperatorVotingThresholds { + motion_of_no_confidence: into_rational(motion_of_no_confidence), + constitutional_committee: ConstitutionalCommitteeThresholds { + default: into_rational(constitutional_committee), + under_no_confidence: into_rational( + constitutional_committee_under_no_confidence, + ), + }, + hard_fork: into_rational(hard_fork), + protocol_parameters: ProtocolParametersThresholds { + security_group: into_rational(protocol_parameters_security_group), + network_group: Void, + economic_group: Void, + technical_group: Void, + governance_group: Void, + }, + } +} + +fn into_drep_voting_thresholds( + param: Data, +) -> DelegateRepresentativeVotingThresholds { + expect [ + motion_of_no_confidence, constitutional_committee, + constitutional_committee_under_no_confidence, constitution, hard_fork, + protocol_parameters_network_group, protocol_parameters_economic_group, + protocol_parameters_technical_group, protocol_parameters_governance_group, + treasury_withdrawal, + ]: List = param + + DelegateRepresentativeVotingThresholds { + motion_of_no_confidence: into_rational(motion_of_no_confidence), + constitutional_committee: ConstitutionalCommitteeThresholds { + default: into_rational(constitutional_committee), + under_no_confidence: into_rational( + constitutional_committee_under_no_confidence, + ), + }, + constitution: into_rational(constitution), + hard_fork: into_rational(hard_fork), + protocol_parameters: ProtocolParametersThresholds { + security_group: Void, + network_group: into_rational(protocol_parameters_network_group), + economic_group: into_rational(protocol_parameters_economic_group), + technical_group: into_rational(protocol_parameters_technical_group), + governance_group: into_rational(protocol_parameters_governance_group), + }, + treasury_withdrawal: into_rational(treasury_withdrawal), + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak new file mode 100644 index 00000000..e723e2d5 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak @@ -0,0 +1,62 @@ +use aiken/primitive/bytearray +use cardano/address.{Script} +use cardano/address/credential +use cardano/governance.{ + ConstitutionalCommitteeMember, DelegateRepresentative, StakePool, Voter, +} + +pub fn compare(left: Voter, right: Voter) -> Ordering { + when left is { + ConstitutionalCommitteeMember(left) -> + when right is { + ConstitutionalCommitteeMember(right) -> credential.compare(left, right) + _ -> Less + } + DelegateRepresentative(left) -> + when right is { + DelegateRepresentative(right) -> credential.compare(left, right) + ConstitutionalCommitteeMember(_) -> Greater + _ -> Less + } + StakePool(left) -> + when right is { + StakePool(right) -> bytearray.compare(left, right) + _ -> Greater + } + } +} + +test compare_matrix() { + let cc0 = ConstitutionalCommitteeMember(Script("0")) + let cc1 = ConstitutionalCommitteeMember(Script("1")) + + let drep0 = DelegateRepresentative(Script("0")) + let drep1 = DelegateRepresentative(Script("1")) + + let spo0 = StakePool("0") + let spo1 = StakePool("1") + + and { + (compare(cc0, cc0) == Equal)?, + (compare(cc0, cc1) == Less)?, + (compare(cc1, cc0) == Greater)?, + (compare(drep0, drep0) == Equal)?, + (compare(drep0, drep1) == Less)?, + (compare(drep1, drep0) == Greater)?, + (compare(spo0, spo0) == Equal)?, + (compare(spo0, spo1) == Less)?, + (compare(spo1, spo0) == Greater)?, + (compare(cc0, drep0) == Less)?, + (compare(cc0, drep1) == Less)?, + (compare(cc0, spo0) == Less)?, + (compare(cc0, spo1) == Less)?, + (compare(drep0, cc0) == Greater)?, + (compare(drep0, cc1) == Greater)?, + (compare(drep0, spo0) == Less)?, + (compare(drep0, spo1) == Less)?, + (compare(spo0, cc0) == Greater)?, + (compare(spo0, cc1) == Greater)?, + (compare(spo0, drep0) == Greater)?, + (compare(spo0, drep1) == Greater)?, + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak new file mode 100644 index 00000000..ff73836a --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak @@ -0,0 +1,62 @@ +//// This module contains utilities for manually dealing with [`ScriptContext`](#ScriptContext). This is only ever useful for writing custom `else` handlers in validators. +//// +//// > [!NOTE] +//// > Unless you know what you're doing, you should prefer using named handlers: +//// > +//// > - `mint` +//// > - `spend` +//// > - `withdraw` +//// > - `publish` +//// > - `vote` +//// > - `propose` + +use aiken/collection.{Index} +use cardano/address.{Credential} +use cardano/assets.{PolicyId} +use cardano/certificate.{Certificate} +use cardano/governance.{ProposalProcedure, Voter} +use cardano/transaction.{OutputReference, Redeemer, Transaction} + +/// A context given to a script by the Cardano ledger when being executed. +/// +/// The context contains information about the entire transaction that contains +/// the script. The transaction may also contain other scripts; to distinguish +/// between multiple scripts, the [`ScriptContext`](#ScriptContext) contains a +/// [`ScriptInfo`](#ScriptInfo) which indicates which script (or, for what +/// purpose) the transaction is being executed. +pub type ScriptContext { + transaction: Transaction, + redeemer: Redeemer, + info: ScriptInfo, +} + +/// Characterizes the script information. The main (and only) difference with [`ScriptPurpose`](./transaction.html#ScriptPurpose) resides in the `Spending` variant which here contains a second field `datum: Option`. +pub type ScriptInfo { + /// For scripts executed as minting/burning policies, to insert + /// or remove assets from circulation. It's parameterized by the identifier + /// of the associated policy. + Minting(PolicyId) + /// For scripts that are used as payment credentials for addresses in + /// transaction outputs. They govern the rule by which the output they + /// reference can be spent. + Spending { output: OutputReference, datum: Option } + /// For scripts that validate reward withdrawals from a reward account. + /// + /// The argument identifies the target reward account. + Withdrawing(Credential) + /// Needed when delegating to a pool using stake credentials defined as a + /// custom script. This purpose is also triggered when de-registering such + /// stake credentials. + /// + /// The Int is a 0-based index of the given `Certificate` in `certificates`. + Publishing { at: Index, certificate: Certificate } + /// Voting for a type of voter using a governance action id to vote + /// yes / no / abstain inside a transaction. + /// + /// The voter is who is doing the governance action. + Voting(Voter) + /// Used to propose a governance action. + /// + /// A 0-based index of the given `ProposalProcedure` in `proposal_procedures`. + Proposing { at: Index, proposal_procedure: ProposalProcedure } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak new file mode 100644 index 00000000..6511a596 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak @@ -0,0 +1,225 @@ +use aiken/builtin +use aiken/collection.{Index} +use aiken/collection/dict.{Dict} +use aiken/collection/list +use aiken/crypto.{ + Blake2b_256, DataHash, Hash, ScriptHash, VerificationKeyHash, blake2b_256, +} +use aiken/interval.{Interval} +use aiken/option +use cardano/address.{Address, Credential, Script, VerificationKey} +use cardano/assets.{Lovelace, PolicyId, Value} +use cardano/certificate.{Certificate} +use cardano/governance.{GovernanceActionId, ProposalProcedure, Vote, Voter} + +pub type TransactionId = + Hash + +/// Characterizes the script purpose. +pub type ScriptPurpose { + /// For scripts executed as minting/burning policies, to insert + /// or remove assets from circulation. It's parameterized by the identifier + /// of the associated policy. + Mint(PolicyId) + /// For scripts that are used as payment credentials for addresses in + /// transaction outputs. They govern the rule by which the output they + /// reference can be spent. + Spend(OutputReference) + /// For scripts that validate reward withdrawals from a reward account. + /// + /// The argument identifies the target reward account. + Withdraw(Credential) + /// Needed when delegating to a pool using stake credentials defined as a + /// custom script. This purpose is also triggered when de-registering such + /// stake credentials. + /// + /// The Int is a 0-based index of the given `Certificate` in `certificates`. + Publish { at: Index, certificate: Certificate } + /// Voting for a type of voter using a governance action id to vote + /// yes / no / abstain inside a transaction. + /// + /// The voter is who is doing the governance action. + Vote(Voter) + /// Used to propose a governance action. + /// + /// A 0-based index of the given `ProposalProcedure` in `proposal_procedures`. + Propose { at: Index, proposal_procedure: ProposalProcedure } +} + +/// A Cardano `Transaction`, as seen by on-chain scripts. +/// +/// Note that this is a representation of a transaction, and not the 1:1 +/// translation of the transaction as seen by the ledger. In particular, +/// on-chain scripts can't see inputs locked by bootstrap addresses, outputs +/// to bootstrap addresses or just transaction metadata. +pub type Transaction { + inputs: List, + reference_inputs: List, + outputs: List, + fee: Lovelace, + mint: Value, + certificates: List, + /// > [!IMPORTANT] + /// > Withdrawals are ordered by ascending [Credential](./credential.html#Credential). Yet, note that [`Script`](./credential.html#Credential) credentials are treated as **lower values** than [`VerificationKey`](./credential.html#Credential) credentials. + withdrawals: Pairs, + validity_range: ValidityRange, + extra_signatories: List, + /// > [!IMPORTANT] + /// > Redeemers are ordered by ascending [ScriptPurpose](./transaction.html#ScriptPurpose). + redeemers: Pairs, + datums: Dict, + id: TransactionId, + /// > [!IMPORTANT] + /// > Votes are ordered by ascending [Voter](./governance.html#Voter) and [GovernanceActionId](./governance.html#GovernanceActionId).
First constructor variants in a type are treated as lower indices; except for [Credential](./credential.html#Credential) where [`Script`](./credential.html#Credential) credentials are treated as **lower values** than [`VerificationKey`](./credential.html#Credential) credentials. + votes: Pairs>, + proposal_procedures: List, + current_treasury_amount: Option, + treasury_donation: Option, +} + +/// An interval of POSIX time, measured in **number of milliseconds** since 1970-01-01T00:00:00Z. +pub type ValidityRange = + Interval + +/// An `Input` made of an output reference and, the resolved value associated with that output. +pub type Input { + output_reference: OutputReference, + output: Output, +} + +/// An `OutputReference` is a unique reference to an output on-chain. The `output_index` +/// corresponds to the position in the output list of the transaction (identified by its id) +/// that produced that output +pub type OutputReference { + transaction_id: Hash, + output_index: Int, +} + +/// A transaction `Output`, with an address, a value and optional datums and script references. +pub type Output { + address: Address, + value: Value, + datum: Datum, + reference_script: Option, +} + +/// An output `Datum`. +pub type Datum { + NoDatum + /// A datum referenced by its hash digest. + DatumHash(DataHash) + /// A datum completely inlined in the output. + InlineDatum(Data) +} + +/// A type-alias for Redeemers, passed to scripts for validation. The `Data` is +/// opaque because it is user-defined and it is the script's responsibility to +/// parse it into its expected form. +pub type Redeemer = + Data + +// ## Querying + +/// Find an input by its [`OutputReference`](#OutputReference). This is typically used in +/// combination with the `Spend` [`ScriptPurpose`](#ScriptPurpose) to find a script's own +/// input. +/// +/// ```aiken +/// validator { +/// spend(datum, redeemer, my_output_reference, self) { +/// expect Some(input) = +/// self.inputs +/// |> transaction.find_input(my_output_reference) +/// } +/// } +/// ``` +pub fn find_input( + inputs: List, + output_reference: OutputReference, +) -> Option { + inputs + |> list.find(fn(input) { input.output_reference == output_reference }) +} + +/// Find a [`Datum`](#Datum) by its hash, if present. The function looks first for +/// datums in the witness set, and then for inline datums if it doesn't find any in +/// witnesses. +pub fn find_datum( + outputs: List, + datums: Dict, + datum_hash: DataHash, +) -> Option { + datums + |> dict.get(datum_hash) + |> option.or_try( + fn() { + outputs + |> list.filter_map( + fn(output) { + when output.datum is { + InlineDatum(data) -> + if blake2b_256(builtin.serialise_data(data)) == datum_hash { + Some(data) + } else { + None + } + _ -> None + } + }, + ) + |> list.head + }, + ) +} + +/// Find all outputs that are paying into the given script hash, if any. This is useful for +/// contracts running over multiple transactions. +pub fn find_script_outputs( + outputs: List, + script_hash: ScriptHash, +) -> List { + outputs + |> list.filter( + fn(output) { + when output.address.payment_credential is { + Script(addr_script_hash) -> script_hash == addr_script_hash + VerificationKey(_) -> False + } + }, + ) +} + +// ## Testing + +/// A placeholder / empty `Transaction` to serve as a base in a transaction +/// builder. This is particularly useful for constructing test transactions. +/// +/// Every field is empty or null, and we have in particular: +/// +/// ```aiken +/// use aiken/interval +/// +/// transaction.placeholder.id == +/// #"0000000000000000000000000000000000000000000000000000000000000000" +/// +/// transaction.placeholder.validity_range == interval.everything +/// ``` +pub const placeholder: Transaction = + Transaction { + inputs: [], + reference_inputs: [], + outputs: [], + fee: 0, + mint: assets.zero, + certificates: [], + withdrawals: [], + validity_range: interval.everything, + extra_signatories: [], + redeemers: [], + datums: dict.empty, + id: #"0000000000000000000000000000000000000000000000000000000000000000", + votes: [], + proposal_procedures: [], + current_treasury_amount: None, + treasury_donation: None, + } diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak new file mode 100644 index 00000000..70b7550d --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak @@ -0,0 +1,23 @@ +use aiken/primitive/bytearray +use aiken/primitive/int +use cardano/transaction.{OutputReference} + +pub fn compare(left: OutputReference, right: OutputReference) -> Ordering { + when bytearray.compare(left.transaction_id, right.transaction_id) is { + Equal -> int.compare(left.output_index, right.output_index) + ordering -> ordering + } +} + +test compare_matrix() { + and { + (compare(OutputReference("", 0), OutputReference("", 0)) == Equal)?, + (compare(OutputReference("00", 42), OutputReference("00", 42)) == Equal)?, + (compare(OutputReference("00", 0), OutputReference("01", 0)) == Less)?, + (compare(OutputReference("01", 0), OutputReference("00", 0)) == Greater)?, + (compare(OutputReference("00", 42), OutputReference("01", 14)) == Less)?, + (compare(OutputReference("01", 14), OutputReference("00", 42)) == Greater)?, + (compare(OutputReference("", 42), OutputReference("", 14)) == Greater)?, + (compare(OutputReference("", 14), OutputReference("", 42)) == Less)?, + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak new file mode 100644 index 00000000..4fef2cbe --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak @@ -0,0 +1,126 @@ +use aiken/primitive/bytearray +use aiken/primitive/int +use cardano/address.{Script, VerificationKey} +use cardano/address/credential +use cardano/certificate.{RegisterCredential} +use cardano/governance.{NicePoll, ProposalProcedure, StakePool} +use cardano/governance/voter +use cardano/transaction.{ + Mint, OutputReference, Propose, Publish, ScriptPurpose, Spend, Vote, Withdraw, +} +use cardano/transaction/output_reference + +pub fn compare(left: ScriptPurpose, right: ScriptPurpose) -> Ordering { + when left is { + Mint(left) -> + when right is { + Mint(right) -> bytearray.compare(left, right) + _ -> Less + } + + Spend(left) -> + when right is { + Spend(right) -> output_reference.compare(left, right) + Mint(_) -> Greater + _ -> Less + } + + Withdraw(left) -> + when right is { + Withdraw(right) -> credential.compare(left, right) + Spend(_) | Mint(_) -> Greater + _ -> Less + } + + Publish(left, _) -> + when right is { + Publish(right, _) -> int.compare(left, right) + Spend(_) | Mint(_) | Withdraw(_) -> Greater + _ -> Less + } + + Vote(left) -> + when right is { + Vote(right) -> voter.compare(left, right) + Propose(..) -> Less + _ -> Greater + } + + Propose(left, _) -> + when right is { + Propose(right, _) -> int.compare(left, right) + _ -> Greater + } + } +} + +test compare_matrix() { + let mint0 = Mint("0") + let mint1 = Mint("1") + + let spend0 = Spend(OutputReference("", 0)) + let spend1 = Spend(OutputReference("", 1)) + + let withdraw0 = Withdraw(VerificationKey("0")) + let withdraw1 = Withdraw(VerificationKey("1")) + + let publish0 = Publish(0, RegisterCredential(Script(""), Never)) + let publish1 = Publish(1, RegisterCredential(Script(""), Never)) + + let vote0 = Vote(StakePool("0")) + let vote1 = Vote(StakePool("1")) + + let propose0 = Propose(0, ProposalProcedure(0, Script(""), NicePoll)) + let propose1 = Propose(1, ProposalProcedure(0, Script(""), NicePoll)) + + and { + (compare(mint0, mint0) == Equal)?, + (compare(mint0, mint1) == Less)?, + (compare(mint1, mint0) == Greater)?, + (compare(mint0, spend0) == Less)?, + (compare(mint0, withdraw0) == Less)?, + (compare(mint0, publish0) == Less)?, + (compare(mint0, vote0) == Less)?, + (compare(mint0, propose0) == Less)?, + (compare(spend0, spend0) == Equal)?, + (compare(spend0, spend1) == Less)?, + (compare(spend1, spend0) == Greater)?, + (compare(spend0, mint0) == Greater)?, + (compare(spend0, withdraw0) == Less)?, + (compare(spend0, publish0) == Less)?, + (compare(spend0, vote0) == Less)?, + (compare(spend0, propose0) == Less)?, + (compare(withdraw0, withdraw0) == Equal)?, + (compare(withdraw0, withdraw1) == Less)?, + (compare(withdraw1, withdraw0) == Greater)?, + (compare(withdraw0, mint0) == Greater)?, + (compare(withdraw0, spend0) == Greater)?, + (compare(withdraw0, publish0) == Less)?, + (compare(withdraw0, vote0) == Less)?, + (compare(withdraw0, propose0) == Less)?, + (compare(publish0, publish0) == Equal)?, + (compare(publish0, publish1) == Less)?, + (compare(publish1, publish0) == Greater)?, + (compare(publish0, mint0) == Greater)?, + (compare(publish0, spend0) == Greater)?, + (compare(publish0, withdraw0) == Greater)?, + (compare(publish0, vote0) == Less)?, + (compare(publish0, propose0) == Less)?, + (compare(vote0, vote0) == Equal)?, + (compare(vote0, vote1) == Less)?, + (compare(vote1, vote0) == Greater)?, + (compare(vote0, mint0) == Greater)?, + (compare(vote0, spend0) == Greater)?, + (compare(vote0, withdraw0) == Greater)?, + (compare(vote0, publish0) == Greater)?, + (compare(vote0, propose0) == Less)?, + (compare(propose0, propose0) == Equal)?, + (compare(propose0, propose1) == Less)?, + (compare(propose1, propose0) == Greater)?, + (compare(propose0, mint0) == Greater)?, + (compare(propose0, spend0) == Greater)?, + (compare(propose0, withdraw0) == Greater)?, + (compare(propose0, publish0) == Greater)?, + (compare(propose0, vote0) == Greater)?, + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/packages.toml b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/packages.toml new file mode 100644 index 00000000..f0c64bc6 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/packages.toml @@ -0,0 +1,4 @@ +[[packages]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +source = "github" diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cip.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cip.ak new file mode 100644 index 00000000..a4ca9d6f --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cip.ak @@ -0,0 +1,73 @@ +//// Vodka provides utility functions to build CIP onchain code +//// +//// ### Example usage +//// ```aiken +//// use cip +//// use cardano/assets.{AssetName} +//// +//// let cip68_100_asset_name = cip.cip68_100(asset_name) +//// let cip68_222_asset_name = cip.cip68_222(asset_name) +//// let cip68_333_asset_name = cip.cip68_333(asset_name) +//// let cip68_444_asset_name = cip.cip68_444(asset_name) + +use aiken/primitive/bytearray.{concat} +use cardano/assets.{AssetName} + +/// The byte prefix for CIP-68 asset - Reference Token +pub const cip68_100_prefix: ByteArray = #"000643b0" + +/// The byte prefix for CIP-68 asset - Non-Fungible Token +pub const cip68_222_prefix: ByteArray = #"000de140" + +/// The byte prefix for CIP-68 asset - Fungible Token +pub const cip68_333_prefix: ByteArray = #"0014df10" + +/// The byte prefix for CIP-68 asset - Rich-Fungible Token +pub const cip68_444_prefix: ByteArray = #"001bc280" + +pub fn drop_cip68_prefix(cip_68_asset_name: AssetName) -> AssetName { + cip_68_asset_name |> bytearray.drop(4) +} + +test test_drop_cip68_prefix() { + let cip68_100_asset_name = #"000643b012345678" + drop_cip68_prefix(cip68_100_asset_name) == #"12345678" +} + +/// Obtain the asset name for CIP-68 asset - Reference Token +/// ```aiken +/// let cip68_100_asset_name = cip68_100(asset_name) +/// ``` +pub fn cip68_100(asset_name: AssetName) -> AssetName { + concat(cip68_100_prefix, asset_name) +} + +/// Obtain the asset name for CIP-68 asset - Non-Fungible Token +/// ```aiken +/// let cip68_222_asset_name = cip68_222(asset_name) +/// ``` +pub fn cip68_222(asset_name: AssetName) -> AssetName { + concat(cip68_222_prefix, asset_name) +} + +/// Obtain the asset name for CIP-68 asset - Fungible Token +/// ```aiken +/// let cip68_333_asset_name = cip68_333(asset_name) +/// ``` +pub fn cip68_333(asset_name: AssetName) -> AssetName { + concat(cip68_333_prefix, asset_name) +} + +/// Obtain the asset name for CIP-68 asset - Rich-Fungible Token +/// ```aiken +/// let cip68_444_asset_name = cip68_444(asset_name) +/// ``` +pub fn cip68_444(asset_name: AssetName) -> AssetName { + concat(cip68_444_prefix, asset_name) +} + +/// The metadata attached with CIP-68 reference token (100) +pub type CIP68Metadata { + metadata: Pairs, + version: Int, +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail.ak new file mode 100644 index 00000000..f67cb2cc --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail.ak @@ -0,0 +1,161 @@ +//// Vodka cocktail provides utility functions to build Aiken onchain code +//// +//// All onchain utility functions are grouped with a naming convention of `vodka_`, +//// and all can be imported directly with `use cocktail` +//// +//// ### Cardano data types +//// - `Address` - [`use vodka_address`](./cocktail/vodka_address.html) +//// - `Value` - [`use vodka_value`](./cocktail/vodka_value.html) +//// +//// ### Transaction types +//// - `extra_signatories` - [Documentation](./cocktail/vodka_extra_signatories.html) +//// - `inputs` - [Documentation](./cocktail/vodka_inputs.html) +//// - `outputs` - [Documentation](./cocktail/vodka_outputs.html) +//// - `mints` - [Documentation](./cocktail/vodka_mints.html) +//// - `validity_range` - [Documentation](./cocktail/vodka_validity_range.html) +//// - `Redeemers` - [Documentation](./cocktail/vodka_redeemers.html) + +use cocktail/vodka_address +use cocktail/vodka_converter +use cocktail/vodka_extra_signatories +use cocktail/vodka_inputs +use cocktail/vodka_mints +use cocktail/vodka_outputs +use cocktail/vodka_redeemers +use cocktail/vodka_validity_range +use cocktail/vodka_value + +// Address + +/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) +pub const compare_script_address = vodka_address.compare_script_address + +/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) +pub const compare_address = vodka_address.compare_address + +/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) +pub const address_payment_key = vodka_address.address_payment_key + +/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) +pub const address_pub_key = vodka_address.address_pub_key + +/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) +pub const address_script_hash = vodka_address.address_script_hash + +// Converter + +/// Documentation please refer to [`vodka_converter`](./cocktail/vodka_converter.html) +pub const convert_int_to_bytes = vodka_converter.convert_int_to_bytes + +/// Documentation please refer to [`vodka_converter`](./cocktail/vodka_converter.html) +pub const get_number_digit = vodka_converter.get_number_digit + +// Extra Signatories + +/// Documentation please refer to [`vodka_extra_signatories`](./cocktail/vodka_extra_signatories.html) +pub const key_signed = vodka_extra_signatories.key_signed + +/// Documentation please refer to [`vodka_extra_signatories`](./cocktail/vodka_extra_signatories.html) +pub const one_of_keys_signed = vodka_extra_signatories.one_of_keys_signed + +/// Documentation please refer to [`vodka_extra_signatories`](./cocktail/vodka_extra_signatories.html) +pub const all_key_signed = vodka_extra_signatories.all_key_signed + +// Inputs + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const input_inline_datum = vodka_inputs.input_inline_datum + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const only_input_datum_with = vodka_inputs.only_input_datum_with + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const inputs_at = vodka_inputs.inputs_at + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const inputs_with = vodka_inputs.inputs_with + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const inputs_with_policy = vodka_inputs.inputs_with_policy + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const inputs_at_with = vodka_inputs.inputs_at_with + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const inputs_at_with_policy = vodka_inputs.inputs_at_with_policy + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const inputs_token_quantity = vodka_inputs.inputs_token_quantity + +// Mints + +/// Documentation please refer to [`vodka_mints`](./cocktail/vodka_mints.html) +pub const check_policy_only_burn = vodka_mints.check_policy_only_burn + +/// Documentation please refer to [`vodka_mints`](./cocktail/vodka_mints.html) +pub const only_minted_token = vodka_mints.only_minted_token + +/// Documentation please refer to [`vodka_mints`](./cocktail/vodka_mints.html) +pub const policy_only_minted_token = vodka_mints.policy_only_minted_token + +/// Documentation please refer to [`vodka_mints`](./cocktail/vodka_mints.html) +pub const token_minted = vodka_mints.token_minted + +// Outputs + +/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) +pub const output_inline_datum = vodka_outputs.output_inline_datum + +/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) +pub const outputs_at = vodka_outputs.outputs_at + +/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) +pub const outputs_with = vodka_outputs.outputs_with + +/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) +pub const outputs_with_policy = vodka_outputs.outputs_with_policy + +/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) +pub const outputs_at_with = vodka_outputs.outputs_at_with + +/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) +pub const outputs_at_with_policy = vodka_outputs.outputs_at_with_policy + +// Redeemers + +/// Documentation please refer to [`vodka_redeemers`](./cocktail/vodka_redeemers.html) +pub const redeemer_from = vodka_redeemers.redeemer_from + +/// Documentation please refer to [`vodka_redeemers`](./cocktail/vodka_redeemers.html) +pub const withdrawal_redeemer = vodka_redeemers.withdrawal_redeemer + +/// Documentation please refer to [`vodka_redeemers`](./cocktail/vodka_redeemers.html) +pub const compare_output_reference = vodka_redeemers.compare_output_reference + +// Validity Range + +/// Documentation please refer to [`vodka_validity_range`](./cocktail/vodka_validity_range.html) +pub const valid_after = vodka_validity_range.valid_after + +/// Documentation please refer to [`vodka_validity_range`](./cocktail/vodka_validity_range.html) +pub const valid_before = vodka_validity_range.valid_before + +// Value + +/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) +pub const value_length = vodka_value.value_length + +/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) +pub const get_all_value_to = vodka_value.get_all_value_to + +/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) +pub const get_all_value_from = vodka_value.get_all_value_from + +/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) +pub const value_geq = vodka_value.value_geq + +/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) +pub const value_policy_info = vodka_value.value_policy_info + +/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) +pub const value_tokens = vodka_value.value_tokens diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak new file mode 100644 index 00000000..5bbf8a3d --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak @@ -0,0 +1,56 @@ +use aiken/crypto.{Blake2b_224, Hash, ScriptHash, VerificationKeyHash} +use aiken/primitive/bytearray +use cardano/address.{Address, Script, VerificationKey} + +/// Compare two script addresses +pub fn compare_script_address(x: Address, y: Address) -> Ordering { + expect Script(x_hash) = x.payment_credential + expect Script(y_hash) = y.payment_credential + bytearray.compare(x_hash, y_hash) +} + +/// Compare two addresses +pub fn compare_address(x: Address, y: Address) -> Ordering { + let x_hash = x.payment_credential + let y_hash = y.payment_credential + when (x_hash, y_hash) is { + (Script(x_script_hash), Script(y_script_hash)) -> + bytearray.compare(x_script_hash, y_script_hash) + (VerificationKey(x_key_hash), VerificationKey(y_key_hash)) -> + bytearray.compare(x_key_hash, y_key_hash) + _ -> Equal + } +} + +/// Obtain the payment key of an address, it can be either a script hash or a verification key +/// ```aiken +/// let payment_key_hash = address_payment_key(address) +/// ``` +pub fn address_payment_key(address: Address) -> Hash { + when address.payment_credential is { + Script(hash) -> hash + VerificationKey(key_hash) -> key_hash + } +} + +/// Obtain the verification key of an address, None if it is a script address +/// ```aiken +/// expect Some(pub_key_hash) = address_pub_key(address) +/// ``` +pub fn address_pub_key(address: Address) -> Option { + when address.payment_credential is { + VerificationKey(key_hash) -> Some(key_hash) + _ -> None + } +} + +/// Obtain the script hash of an address, None if it is a verification key address +/// ```aiken +/// expect Some(script_hash) = address_pub_key(address) +/// ``` +pub fn address_script_hash(address: Address) -> Option { + when address.payment_credential is { + Script(script_hash) -> Some(script_hash) + _ -> None + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak new file mode 100644 index 00000000..f13c84aa --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak @@ -0,0 +1,40 @@ +use aiken/primitive/bytearray + +/// Convert an integer to a "stringify" ByteArray value +/// ```aiken +/// let int_bytes = convert_int_to_bytes(123) +/// let this_is_true = int_bytes == "123" +/// ``` +pub fn convert_int_to_bytes(i: Int) -> ByteArray { + convert_int_to_bytes_go(i, get_number_digit(i)) +} + +fn convert_int_to_bytes_go(newInt: Int, digit: Int) -> ByteArray { + if digit == 1 { + bytearray.push("", newInt + 48) + } else { + bytearray.push( + convert_int_to_bytes_go(newInt % digit, digit / 10), + newInt / digit + 48, + ) + } +} + +/// Get the number of digits in an integer +pub fn get_number_digit(i: Int) -> Int { + go_get_number_digit(i, 1) +} + +fn go_get_number_digit(newInt: Int, digit: Int) -> Int { + if newInt < 10 { + digit + } else { + go_get_number_digit(newInt / 10, digit * 10) + } +} + +test byte_conversion() { + convert_int_to_bytes(1) == "1" && convert_int_to_bytes(123) == "123" && convert_int_to_bytes( + 672912, + ) == "672912" +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak new file mode 100644 index 00000000..17eb3b20 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak @@ -0,0 +1,46 @@ +use aiken/collection/list + +/// Check if a key is signed by any of the extra_signatories +/// ```aiken +/// let extra_signatories = ["key1", "key2", "key3"] +/// +/// let key_to_test_1 = "key2" +/// let this_is_true = key_signed(extra_signatories, key_to_test_1) +/// +/// let key_to_test_2 = "key4" +/// let this_is_false = key_signed(extra_signatories, key_to_test_2) +/// ``` +pub fn key_signed(extra_signatories: List, key: ByteArray) { + list.has(extra_signatories, key) +} + +/// Check if any of the keys are signed by the extra_signatories +/// ```aiken +/// let extra_signatories = ["key1", "key2", "key3"] +/// +/// let keys_to_test_1 = ["key4", "key2", "key5"] +/// let this_is_true = one_of_keys_signed(extra_signatories, keys_to_test_1) +/// +/// let keys_to_test_2 = ["key4", "key5"] +/// let this_is_false = one_of_keys_signed(extra_signatories, keys_to_test_2) +/// ``` +pub fn one_of_keys_signed( + extra_signatories: List, + keys: List, +) { + list.any(keys, fn(key) { key_signed(extra_signatories, key) }) +} + +/// Check if all of the keys are signed by the extra_signatories +/// ```aiken +/// let extra_signatories = ["key1", "key2", "key3"] +/// +/// let keys_to_test_1 = ["key2", "key3"] +/// let this_is_true = all_keys_signed(extra_signatories, keys_to_test_1) +/// +/// let keys_to_test_2 = ["key2", "key4"] +/// let this_is_false = all_keys_signed(extra_signatories, keys_to_test_2) +/// ``` +pub fn all_key_signed(extra_signatories: List, keys: List) { + list.all(keys, fn(key) { key_signed(extra_signatories, key) }) +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak new file mode 100644 index 00000000..5ddaaa7f --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak @@ -0,0 +1,123 @@ +use aiken/collection/list +use cardano/address.{Address} +use cardano/assets.{AssetName, PolicyId, flatten, quantity_of} +use cardano/transaction.{InlineDatum, Input} + +/// Extracts the inline datum from an input. +/// ```aiken +/// expect inline_datum: MyDatumType = input_inline_datum(input) +/// ``` +pub fn input_inline_datum(input: Input) { + expect InlineDatum(raw_datum) = input.output.datum + raw_datum +} + +/// Extract the inline datum by locating the first input in a list of inputs by Address and PolicyId. +/// ```aiken +/// expect inline_datum: MyDatumType = only_input_datum_with(inputs, policy, name) +/// ``` +pub fn only_input_datum_with( + inputs: List, + policy: PolicyId, + name: AssetName, +) { + expect Some(input) = + list.find( + inputs, + fn(input) { quantity_of(input.output.value, policy, name) == 1 }, + ) + input_inline_datum(input) +} + +/// Filters inputs by Address. +/// ```aiken +/// let filtered_inputs = inputs_at(inputs, address) +/// ``` +pub fn inputs_at(inputs: List, address: Address) -> List { + list.filter(inputs, fn(input) { input.output.address == address }) +} + +/// Filters inputs by PolicyId and AssetName. +/// ```aiken +/// let filtered_inputs = inputs_with(inputs, policy, name) +/// ``` +pub fn inputs_with( + inputs: List, + policy: PolicyId, + name: AssetName, +) -> List { + list.filter( + inputs, + fn(input) { quantity_of(input.output.value, policy, name) == 1 }, + ) +} + +/// Filters inputs by token policy. +/// ```aiken +/// let filtered_inputs = inputs_with_policy(inputs, policy) +/// ``` +pub fn inputs_with_policy(inputs: List, policy: PolicyId) -> List { + list.filter( + inputs, + fn(input) { + list.any(flatten(input.output.value), fn(token) { token.1st == policy }) + }, + ) +} + +/// Filters inputs by Address, PolicyId, and AssetName. +/// ```aiken +/// let filtered_inputs = inputs_at_with(inputs, address, policy, name) +/// ``` +pub fn inputs_at_with( + inputs: List, + address: Address, + policy: PolicyId, + name: AssetName, +) -> List { + list.filter( + inputs, + fn(input) { + input.output.address == address && quantity_of( + input.output.value, + policy, + name, + ) == 1 + }, + ) +} + +/// Filters inputs by Address and PolicyId. +/// ```aiken +/// let filtered_inputs = inputs_at_with_policy(inputs, address, policy) +/// ``` +pub fn inputs_at_with_policy( + inputs: List, + address: Address, + policy: PolicyId, +) -> List { + list.filter( + inputs, + fn(input) { + input.output.address == address && list.any( + flatten(input.output.value), + fn(token) { token.1st == policy }, + ) + }, + ) +} + +/// Calculate the total quantity of a token in a list of inputs. +/// ```aiken +/// let total_quantity = inputs_token_quantity(inputs, token) +/// ``` +pub fn inputs_token_quantity( + inputs: List, + token: (PolicyId, AssetName), +) -> Int { + list.map( + inputs, + fn(input) { quantity_of(input.output.value, token.1st, token.2nd) }, + ) + |> list.foldr(0, fn(n, total) { n + total }) +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak new file mode 100644 index 00000000..aa7dfe9a --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak @@ -0,0 +1,68 @@ +use aiken/collection/list +use cardano/assets.{AssetName, PolicyId, Value, flatten} + +/// Check if a certain PolicyId is burning only if exists in the minted value. +/// ```aiken +/// let is_policy_only_burn = check_policy_only_burn(minted_value, policy) +/// ``` +pub fn check_policy_only_burn(mint: Value, policy: PolicyId) -> Bool { + let mint_value = flatten(mint) + list.all( + mint_value, + fn(x) { + if x.1st == policy { + x.3rd < 0 + } else { + True + } + }, + ) +} + +/// Check if a certain policy has only minted this token. +/// ```aiken +/// let is_policy_only_minted = check_policy_only_minted(minted_value, policy, name, quantity) +/// ``` +pub fn policy_only_minted_token( + mint: Value, + policy: PolicyId, + name: AssetName, + quantity: Int, +) { + when flatten(mint) |> list.filter(fn(asset) { asset.1st == policy }) is { + [(_, minted_asset_name, minted_quantity)] -> + minted_asset_name == name && minted_quantity == quantity + _ -> False + } +} + +/// Check if the minted value contains only one distinct asset with particular PolicyId. +/// ```aiken +/// let is_only_minted_token = only_minted_token(minted_value, policy, name, quantity) +/// ``` +pub fn only_minted_token( + mint: Value, + policy: PolicyId, + name: AssetName, + quantity: Int, +) { + when flatten(mint) is { + [(minted_policy, minted_asset_name, minted_quantity)] -> + minted_policy == policy && minted_asset_name == name && minted_quantity == quantity + _ -> False + } +} + +/// Check if the minted value contains a token with particular PolicyId, AssetName, and quantity. +/// ```aiken +/// let is_token_minted = token_minted(minted_value, policy, name, quantity) +/// ``` +pub fn token_minted( + mint: Value, + policy: PolicyId, + name: AssetName, + quantity: Int, +) { + flatten(mint) + |> list.any(fn(x) { x.1st == policy && x.2nd == name && x.3rd == quantity }) +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak new file mode 100644 index 00000000..d83ffaf5 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak @@ -0,0 +1,90 @@ +use aiken/collection/list +use cardano/address.{Address} +use cardano/assets.{AssetName, PolicyId, flatten, quantity_of} +use cardano/transaction.{InlineDatum, Output} + +/// Extracts the inline datum from an output. +/// ```aiken +/// expect inline_datum: MyDatumType = output_inline_datum(output) +/// ``` +pub fn output_inline_datum(output: Output) { + expect InlineDatum(raw_datum) = output.datum + raw_datum +} + +/// Filters outputs by Address. +/// ```aiken +/// let filtered_outputs = outputs_at(outputs, address) +/// ``` +pub fn outputs_at(outputs: List, address: Address) -> List { + list.filter(outputs, fn(output) { output.address == address }) +} + +/// Filters outputs by PolicyId and AssetName. +/// ```aiken +/// let filtered_outputs = outputs_with(outputs, policy, name) +/// ``` +pub fn outputs_with( + outputs: List, + policy: PolicyId, + name: AssetName, +) -> List { + list.filter( + outputs, + fn(output) { quantity_of(output.value, policy, name) == 1 }, + ) +} + +/// Filters outputs by token policy. +/// ```aiken +/// let filtered_outputs = outputs_with_policy(outputs, policy) +/// ``` +pub fn outputs_with_policy( + outputs: List, + policy: PolicyId, +) -> List { + list.filter( + outputs, + fn(output) { + list.any(flatten(output.value), fn(token) { token.1st == policy }) + }, + ) +} + +/// Filters outputs by Address, PolicyId, and AssetName. +/// ```aiken +/// let filtered_outputs = outputs_at_with(outputs, address, policy, name) +/// ``` +pub fn outputs_at_with( + outputs: List, + address: Address, + policy: PolicyId, + name: AssetName, +) -> List { + list.filter( + outputs, + fn(output) { + output.address == address && quantity_of(output.value, policy, name) == 1 + }, + ) +} + +/// Filters outputs by Address and PolicyId. +/// ```aiken +/// let filtered_outputs = outputs_at_with_policy(outputs, address, policy) +/// ``` +pub fn outputs_at_with_policy( + outputs: List, + address: Address, + policy: PolicyId, +) -> List { + list.filter( + outputs, + fn(output) { + output.address == address && list.any( + flatten(output.value), + fn(token) { token.1st == policy }, + ) + }, + ) +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak new file mode 100644 index 00000000..df3d69e9 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak @@ -0,0 +1,45 @@ +use aiken/collection/pairs +use aiken/primitive/bytearray +use cardano/address.{Address, Script} +use cardano/transaction.{ + Input, OutputReference, Redeemer, ScriptPurpose, Spend, Withdraw, find_input, +} + +/// Obtain the redeemer for a given output reference and address +/// ```aiken +/// expect Some(redeemer) = redeemer_from(redeemers, inputs, output_reference, input_address) +/// ``` +pub fn redeemer_from( + redeemers: Pairs, + inputs: List, + output_reference: OutputReference, + input_address: Address, +) -> Option { + expect Some(redeemer) = redeemers |> pairs.get_first(Spend(output_reference)) + expect Some(input) = find_input(inputs, output_reference) + if input.output.address == input_address { + Some(redeemer) + } else { + None + } +} + +/// Obtain the first redeemer for a given withdrawal script hash +/// ```aiken +/// expect Some(raw_redeemer) = withdrawal_redeemer(redeemers, withdrawal_script_hash) +/// expect my_redeemer: MyRedeemerType = raw_redeemer; +/// ``` +pub fn withdrawal_redeemer( + redeemers: Pairs, + withdrawal_script_hash: ByteArray, +) -> Option { + redeemers + |> pairs.get_first(Withdraw(Script(withdrawal_script_hash))) +} + +/// Compare the output reference of two spend transactions +pub fn compare_output_reference(x, y) { + expect Spend(out_ref_x) = x + expect Spend(out_ref_y) = y + bytearray.compare(out_ref_x.transaction_id, out_ref_y.transaction_id) +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak new file mode 100644 index 00000000..498bc600 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak @@ -0,0 +1,34 @@ +use aiken/interval.{Finite, IntervalBound} +use cardano/transaction.{ValidityRange} + +/// Check if the validity range is after the required timestamp. +/// ```aiken +/// let is_valid_after = valid_after(transaction.validity_range, required_timestamp) +/// ``` +pub fn valid_after( + validity_range: ValidityRange, + required_timestamp: Int, +) -> Bool { + let IntervalBound { bound_type, is_inclusive } = validity_range.lower_bound + when (bound_type, is_inclusive) is { + (Finite(lower_bound), True) -> lower_bound > required_timestamp + (Finite(lower_bound), False) -> lower_bound >= required_timestamp + _ -> False + } +} + +/// Check if the validity range is before the required timestamp. +/// ```aiken +/// let is_valid_before = valid_before(transaction.validity_range, required_timestamp) +/// ``` +pub fn valid_before( + validity_range: ValidityRange, + required_timestamp: Int, +) -> Bool { + let IntervalBound { bound_type, is_inclusive } = validity_range.upper_bound + when (bound_type, is_inclusive) is { + (Finite(upper_bound), True) -> upper_bound < required_timestamp + (Finite(upper_bound), False) -> upper_bound <= required_timestamp + _ -> False + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak new file mode 100644 index 00000000..770ddbcc --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak @@ -0,0 +1,80 @@ +use aiken/collection/list +use cardano/address.{Address} +use cardano/assets.{ + AssetName, PolicyId, Value, flatten, merge, quantity_of, zero, +} +use cardano/transaction.{Input, Output} + +/// Calulate the length of a value +/// ```aiken +/// let value_length = value_length(value) +/// ``` +pub fn value_length(value: Value) -> Int { + list.length(flatten(value)) +} + +/// Get the value send to a particular address in a list of outputs +/// ```aiken +/// let value_to = get_all_value_to(outputs, address) +/// ``` +pub fn get_all_value_to(outputs: List, address: Address) -> Value { + list.foldr( + outputs, + zero, + fn(output, acc_value) { + if output.address == address { + merge(acc_value, output.value) + } else { + acc_value + } + }, + ) +} + +/// Get the value coming from a particular address in a list of inputs +/// ```aiken +/// let value_from = get_all_value_from(inputs, address) +/// ``` +pub fn get_all_value_from(inputs: List, address: Address) -> Value { + list.foldr( + inputs, + zero, + fn(input, acc_value) { + if input.output.address == address { + merge(acc_value, input.output.value) + } else { + acc_value + } + }, + ) +} + +/// Check if the first value provided is greater than or equal to the second value +/// ```aiken +/// let is_geq = value_geq(supposed_greater, supposed_smaller) +/// ``` +pub fn value_geq(greater: Value, smaller: Value) -> Bool { + list.all( + flatten(smaller), + fn(token) { quantity_of(greater, token.1st, token.2nd) >= token.3rd }, + ) +} + +/// Obtain the information (i.e. flattened value) of a policy in a value +/// ```aiken +/// expect Some((policyId, assetName, quantity)) = value_policy_info(value, policy) +/// ``` +pub fn value_policy_info( + value: Value, + policy: ByteArray, +) -> Option<(ByteArray, ByteArray, Int)> { + list.find(flatten(value), fn(t) { t.1st == policy }) +} + +/// Obtain the non-lovelace information (i.e. flattened value) of a policy in a value +/// ```aiken +/// let tokens = value_tokens(value) +/// ``` +pub fn value_tokens(value: Value) -> List<(PolicyId, AssetName, Int)> { + list.filter(flatten(value), fn(t) { t.1st != "" }) +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail.ak new file mode 100644 index 00000000..79a8e7c7 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail.ak @@ -0,0 +1,776 @@ +//// Mocktail contains a set of functions to build transactions for testing purposes. +//// +//// To use Mocktail Tx, there are 4 steps +//// 1. Starts with [`mocktail_tx()`](./mocktail.html#mocktail_tx) to create a new transaction builder. +//// 2. Use tx building methods similar to MeshJS lower level APIs to build the transaction. +//// 3. Call [`complete`](./mocktail.html#complete) to complete building transaction. +//// 4. Finally, if there is any whole items to be added to the transaction, use the `add` functions. +//// +//// Mocktail is built with devex and multiple test cases compatibility in mind. +//// 1. It is pipable. +//// 2. For every tx building and adding methods, it takes first param as condition. that function will only run when this condition is `True`. +//// +//// ## Example +//// ```aiken +//// let tx: Transaction = +//// mocktail_tx() +//// |> required_signer_hash(is_signature_provided, mock_pub_key_hash(0)) +//// |> script_withdrawal(True, mock_script_hash(0), 0) +//// |> script_withdrawal(True, mock_script_hash(1), 0) +//// |> required_signer_hash(True, mock_pub_key_hash(1)) +//// |> complete() +//// |> add_reference_input(True, mock_oracle_ref_input_1()) +//// |> add_reference_input(True, mock_oracle_ref_input_2()) +//// |> add_output(True, mock_pub_key_output(mock_fee_collection_address, mock_fee)) +//// ``` + +use aiken/cbor +use aiken/collection/dict +use aiken/collection/list +use aiken/crypto.{VerificationKeyHash, blake2b_256} +use aiken/interval.{Finite, Interval} +use cardano/address.{Address, Credential, Script} +use cardano/assets.{Value, from_asset, merge, zero} +use cardano/certificate.{Certificate} +use cardano/transaction.{ + InlineDatum, Input, Output, OutputReference, Redeemer, ScriptPurpose, + Transaction, TransactionId, placeholder, +} +use mocktail/virgin_address +use mocktail/virgin_key_hash +use mocktail/virgin_output_reference +use mocktail/virgin_outputs +use mocktail/virgin_validity_range + +/// A mock transaction builder. It can be initialized with `mocktail_tx()`. +pub type MocktailTx { + tx: Transaction, + queue_input: Option, + queue_output: Option, + queue_ref_input: Option, +} + +/// Initialize a new mock transaction builder, and output a built transaction wiht [`.complete().`](./mocktail.html#complete) +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn mocktail_tx() -> MocktailTx { + MocktailTx { + tx: placeholder, + queue_input: None, + queue_output: None, + queue_ref_input: None, + } +} + +/// Tx building method - Add an input to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> tx_in(condition, tx_hash, tx_index, amount, address) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn tx_in( + mocktail_tx: MocktailTx, + condition: Bool, + tx_hash: ByteArray, + tx_index: Int, + amount: Value, + address: Address, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let queue_input = + Some( + Input { + output_reference: OutputReference { + transaction_id: tx_hash, + output_index: tx_index, + }, + output: mock_pub_key_output(address, amount), + }, + ) + when mocktail_tx.queue_input is { + Some(input) -> + MocktailTx { + ..mocktail_tx, + queue_input: queue_input, + tx: mocktail_tx.tx |> add_input(True, input), + } + None -> MocktailTx { ..mocktail_tx, queue_input: queue_input } + } + } +} + +/// Tx building method - Add an input with inline datum to the transaction. +/// This can only be used right after [`tx_in`](./mocktail.html#tx_in). +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> tx_in(condition, tx_hash, tx_index, amount, address) +/// |> tx_in_inline_datum(condition, datum) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn tx_in_inline_datum( + mocktail_tx: MocktailTx, + condition: Bool, + datum: Data, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let queue_input = + when mocktail_tx.queue_input is { + Some(input) -> { + let Input { output_reference, output } = input + Some( + Input { + output_reference, + output: Output { ..output, datum: InlineDatum(datum) }, + }, + ) + } + None -> + Some( + Input { + output_reference: mock_utxo_ref(0, 0), + output: mock_script_output( + mock_script_address(0, None), + zero, + InlineDatum(datum), + ), + }, + ) + } + MocktailTx { ..mocktail_tx, queue_input: queue_input } + } +} + +/// Tx building method - Add an output to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> tx_out(condition, address, amount) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn tx_out( + mocktail_tx: MocktailTx, + condition: Bool, + address: Address, + amount: Value, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let queue_output = Some(mock_pub_key_output(address, amount)) + when mocktail_tx.queue_output is { + Some(output) -> + MocktailTx { + ..mocktail_tx, + queue_output: queue_output, + tx: mocktail_tx.tx |> add_output(True, output), + } + None -> MocktailTx { ..mocktail_tx, queue_output: queue_output } + } + } +} + +/// Tx building method - Add an output with inline datum to the transaction. +/// This can only be used right after [`tx_out`](./mocktail.html#tx_out). +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> tx_out(condition, address, amount) +/// |> tx_out_inline_datum(condition, datum) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn tx_out_inline_datum( + mocktail_tx: MocktailTx, + condition: Bool, + datum: Data, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let queue_output = + when mocktail_tx.queue_output is { + Some(output) -> Some(Output { ..output, datum: InlineDatum(datum) }) + None -> + Some( + mock_script_output( + mock_script_address(0, None), + zero, + InlineDatum(datum), + ), + ) + } + MocktailTx { ..mocktail_tx, queue_output: queue_output } + } +} + +/// Tx building method - Add a mint to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> mint(condition, quantity, policy_id, token_name) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn mint( + mocktail_tx: MocktailTx, + condition: Bool, + quantity: Int, + policy_id: ByteArray, + token_name: ByteArray, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + MocktailTx { + ..mocktail_tx, + tx: mocktail_tx.tx + |> add_mint(True, from_asset(policy_id, token_name, quantity)), + } + } +} + +/// Tx building method - Add a reference input to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ref_tx_in(condition, tx_hash, tx_index, amount, address) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn ref_tx_in( + mocktail_tx: MocktailTx, + condition: Bool, + tx_hash: ByteArray, + tx_index: Int, + amount: Value, + address: Address, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let queue_ref_input = + Some( + Input { + output_reference: OutputReference { + transaction_id: tx_hash, + output_index: tx_index, + }, + output: mock_pub_key_output(address, amount), + }, + ) + when mocktail_tx.queue_ref_input is { + Some(input) -> + MocktailTx { + ..mocktail_tx, + queue_ref_input: queue_ref_input, + tx: mocktail_tx.tx |> add_reference_input(True, input), + } + None -> MocktailTx { ..mocktail_tx, queue_ref_input: queue_ref_input } + } + } +} + +/// Tx building method - Add an inline datum to last reference input in the transaction. +/// This can only be used right after [`ref_tx_in`](./mocktail.html#ref_tx_in). +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ref_tx_in(condition, tx_hash, tx_index, amount, address) +/// |> ref_tx_in_inline_datum(condition, datum) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn ref_tx_in_inline_datum( + mocktail_tx: MocktailTx, + condition: Bool, + datum: Data, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let queue_ref_input = + when mocktail_tx.queue_ref_input is { + Some(input) -> { + let Input { output_reference, output } = input + Some( + Input { + output_reference, + output: Output { ..output, datum: InlineDatum(datum) }, + }, + ) + } + None -> + Some( + Input { + output_reference: mock_utxo_ref(0, 0), + output: mock_script_output( + mock_script_address(0, None), + zero, + InlineDatum(datum), + ), + }, + ) + } + MocktailTx { ..mocktail_tx, queue_ref_input: queue_ref_input } + } +} + +/// Tx building method - Add a a lower bound validity range to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> valid_hereafter(condition, time) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn invalid_before( + mocktail_tx: MocktailTx, + condition: Bool, + time: Int, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let tx = mocktail_tx.tx + let upper_bound = + when tx.validity_range.upper_bound.bound_type is { + Finite(x) -> Some(x) + _ -> None + } + MocktailTx { + ..mocktail_tx, + tx: Transaction { + ..tx, + validity_range: mock_interval(Some(time), upper_bound), + }, + } + } +} + +/// Tx building method - Add a a upper bound validity range to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> valid_hereafter(condition, time) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn invalid_hereafter( + mocktail_tx: MocktailTx, + condition: Bool, + time: Int, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let tx = mocktail_tx.tx + let lower_bound = + when tx.validity_range.lower_bound.bound_type is { + Finite(x) -> Some(x) + _ -> None + } + MocktailTx { + ..mocktail_tx, + tx: Transaction { + ..tx, + validity_range: mock_interval(lower_bound, Some(time)), + }, + } + } +} + +/// Tx building method - Add a required signer hash to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> required_signer_hash(condition, key) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn required_signer_hash( + mocktail_tx: MocktailTx, + condition: Bool, + key: ByteArray, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + MocktailTx { + ..mocktail_tx, + tx: mocktail_tx.tx |> add_extra_signatory(True, key), + } + } +} + +/// Tx building method - Add a script withdrawal to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> script_withdrawal(condition, script_hash, withdrawal_amount) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn script_withdrawal( + mocktail_tx: MocktailTx, + condition: Bool, + script_hash: ByteArray, + withdrawal_amount: Int, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + MocktailTx { + ..mocktail_tx, + tx: mocktail_tx.tx + |> add_withdrawal(True, Pair(Script(script_hash), withdrawal_amount)), + } + } +} + +/// Tx building method - Conclude the transaction building process, and return the built transaction. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +pub fn complete(mocktail_tx: MocktailTx) -> Transaction { + let tx = mocktail_tx.tx + let tx = + when mocktail_tx.queue_input is { + Some(input) -> tx |> add_input(True, input) + None -> tx + } + let tx = + when mocktail_tx.queue_output is { + Some(output) -> tx |> add_output(True, output) + None -> tx + } + let tx = + when mocktail_tx.queue_ref_input is { + Some(input) -> tx |> add_reference_input(True, input) + None -> tx + } + tx +} + +/// Tx maniputlator - Add an input to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_input(condition, input) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_input(tx: Transaction, condition: Bool, input: Input) -> Transaction { + if !condition { + tx + } else { + Transaction { ..tx, inputs: tx.inputs |> list.concat([input]) } + } +} + +/// Tx maniputlator - Add a reference input to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_reference_input(condition, input) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_reference_input( + tx: Transaction, + condition: Bool, + input: Input, +) -> Transaction { + if !condition { + tx + } else { + Transaction { + ..tx, + reference_inputs: tx.reference_inputs |> list.concat([input]), + } + } +} + +/// Tx maniputlator - Add an output to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let t = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_output(condition, output) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_output( + tx: Transaction, + condition: Bool, + output: Output, +) -> Transaction { + if !condition { + tx + } else { + Transaction { ..tx, outputs: tx.outputs |> list.concat([output]) } + } +} + +/// Tx maniputlator - Set a fee to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> set_fee(condition, lovelace_fee) +/// |> ...other tx maniputlator methods +/// ``` +pub fn set_fee( + tx: Transaction, + condition: Bool, + lovelace_fee: Int, +) -> Transaction { + if !condition { + tx + } else { + Transaction { ..tx, fee: lovelace_fee } + } +} + +/// Tx maniputlator - Add a mint to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_mint(condition, mint) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_mint(tx: Transaction, condition: Bool, mint: Value) -> Transaction { + if !condition { + tx + } else { + Transaction { + ..tx, + mint: tx.mint + |> merge(mint), + } + } +} + +/// Tx maniputlator - Add a certificate to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_certificate(condition, certificate) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_certificate( + tx: Transaction, + condition: Bool, + certificate: Certificate, +) -> Transaction { + if !condition { + tx + } else { + Transaction { + ..tx, + certificates: tx.certificates |> list.concat([certificate]), + } + } +} + +/// Tx maniputlator - Add a withdrawal to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_withdrawal(condition, stake_credential, amount) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_withdrawal( + tx: Transaction, + condition: Bool, + withdrawal: Pair, +) -> Transaction { + if !condition { + tx + } else { + Transaction { + ..tx, + withdrawals: tx.withdrawals |> list.concat([withdrawal]), + } + } +} + +/// Tx maniputlator - Add an extra signatory to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_extra_signatory(condition, signatory) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_extra_signatory( + tx: Transaction, + condition: Bool, + signatory: VerificationKeyHash, +) -> Transaction { + if !condition { + tx + } else { + Transaction { + ..tx, + extra_signatories: tx.extra_signatories |> list.concat([signatory]), + } + } +} + +/// Tx maniputlator - Add a redeemer to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_redeemer(condition, redeemer) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_redeemer( + tx: Transaction, + condition: Bool, + redeemer: Pair, +) -> Transaction { + if !condition { + tx + } else { + Transaction { ..tx, redeemers: tx.redeemers |> list.concat([redeemer]) } + } +} + +/// Tx maniputlator - Add a datum to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_datum(condition, datum) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_datum(tx: Transaction, condition: Bool, datum: Data) -> Transaction { + if !condition { + tx + } else { + let datum_hash = blake2b_256(cbor.serialise(datum)) + Transaction { ..tx, datums: tx.datums |> dict.insert(datum_hash, datum) } + } +} + +/// Tx maniputlator - Set the transaction id. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> set_transaction_id(condition, transaction_id) +/// |> ...other tx maniputlator methods +/// ``` +pub fn set_transaction_id( + tx: Transaction, + condition: Bool, + transaction_id: TransactionId, +) -> Transaction { + if !condition { + tx + } else { + Transaction { ..tx, id: transaction_id } + } +} + +// Address + +/// Documentation please refer to [`virgin_address`](./mocktail/virgin_address.html) +pub const mock_verfication_key_credential = + virgin_address.mock_verfication_key_credential + +/// Documentation please refer to [`virgin_address`](./mocktail/virgin_address.html) +pub const mock_pub_key_address = virgin_address.mock_pub_key_address + +/// Documentation please refer to [`virgin_address`](./mocktail/virgin_address.html) +pub const mock_script_credential = virgin_address.mock_script_credential + +/// Documentation please refer to [`virgin_address`](./mocktail/virgin_address.html) +pub const mock_script_address = virgin_address.mock_script_address + +// Key hash + +/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) +pub const mock_key_hash = virgin_key_hash.mock_key_hash + +/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) +pub const mock_policy_id = virgin_key_hash.mock_policy_id + +/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) +pub const mock_pub_key_hash = virgin_key_hash.mock_pub_key_hash + +/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) +pub const mock_script_hash = virgin_key_hash.mock_script_hash + +/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) +pub const mock_stake_key_hash = virgin_key_hash.mock_stake_key_hash + +/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) +pub const mock_script_stake_key_hash = + virgin_key_hash.mock_script_stake_key_hash + +// Output reference + +/// Documentation please refer to [`virgin_output_reference`](./mocktail/virgin_output_reference.html) +pub const mock_tx_hash = virgin_output_reference.mock_tx_hash + +/// Documentation please refer to [`virgin_output_reference`](./mocktail/virgin_output_reference.html) +pub const mock_utxo_ref = virgin_output_reference.mock_utxo_ref + +// Outputs + +/// Documentation please refer to [`virgin_outputs`](./mocktail/virgin_outputs.html) +pub const mock_output = virgin_outputs.mock_output + +/// Documentation please refer to [`virgin_outputs`](./mocktail/virgin_outputs.html) +pub const mock_pub_key_output = virgin_outputs.mock_pub_key_output + +/// Documentation please refer to [`virgin_outputs`](./mocktail/virgin_outputs.html) +pub const mock_script_output = virgin_outputs.mock_script_output + +// Validity range + +/// Documentation please refer to [`virgin_validity_range`](./mocktail/virgin_validity_range.html) +pub const mock_interval = virgin_validity_range.mock_interval + +test preserve_tx_in_order() { + let tx: Transaction = + mocktail_tx() + |> tx_in(True, mock_tx_hash(0), 0, zero, mock_pub_key_address(0, None)) + |> tx_in(True, mock_tx_hash(1), 1, zero, mock_pub_key_address(1, None)) + |> complete() + expect [input1, input2] = tx.inputs + and { + input1.output_reference.transaction_id == mock_tx_hash(0), + input1.output_reference.output_index == 0, + input2.output_reference.transaction_id == mock_tx_hash(1), + input2.output_reference.output_index == 1, + } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak new file mode 100644 index 00000000..30a6e40b --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak @@ -0,0 +1,57 @@ +use cardano/address.{ + Address, Credential, Inline, Script, StakeCredential, VerificationKey, +} +use mocktail/virgin_key_hash.{ + mock_pub_key_hash, mock_script_hash, mock_script_stake_key_hash, + mock_stake_key_hash, +} + +/// Mock a pub key credential +/// `variation` same the same index as `mock_pub_key_hash` +pub fn mock_verfication_key_credential(variation: Int) -> Credential { + VerificationKey(mock_pub_key_hash(variation)) +} + +/// Mock a pub key address +/// `variation` same the same index as `mock_pub_key_hash` +/// `stake_credential` is optional +pub fn mock_pub_key_address( + variation: Int, + stake_credential: Option, +) -> Address { + Address { + payment_credential: mock_verfication_key_credential(variation), + stake_credential, + } +} + +/// Mock a script credential +/// `variation` same the same index as `mock_script_hash` +pub fn mock_script_credential(variation: Int) -> Credential { + Script(mock_script_hash(variation)) +} + +/// Mock a script address +/// `variation` same the same index as `mock_script_hash` +/// `stake_credential` is optional +pub fn mock_script_address( + variation: Int, + stake_credential: Option, +) -> Address { + Address { + payment_credential: mock_script_credential(variation), + stake_credential, + } +} + +/// Mock a pub key stake credential +/// `variation` same the same index as `mock_stake_key_hash` +pub fn mock_pub_key_stake_cred(variation: Int) -> StakeCredential { + Inline(VerificationKey(mock_stake_key_hash(variation))) +} + +/// Mock a script stake credential +/// `variation` same the same index as `mock_script_stake_key_hash` +pub fn mock_script_stake_cred(variation: Int) -> StakeCredential { + Inline(Script(mock_script_stake_key_hash(variation))) +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak new file mode 100644 index 00000000..9a32dd06 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak @@ -0,0 +1,47 @@ +use aiken/cbor.{serialise} +use aiken/crypto.{ScriptHash, VerificationKeyHash, blake2b_224} +use aiken/primitive/bytearray.{concat} +use cardano/assets.{PolicyId} + +pub const root_hash = + #"a2c20c77887ace1cd986193e4e75babd8993cfd56995cd5cfce609c2" + +/// Mock a key in hexadecimal format +pub fn mock_key_hash(variation: Int) -> ByteArray { + serialise(variation) |> concat(root_hash) |> blake2b_224() +} + +/// Mock a PolicyID +/// The variation is used to distinguish between different PolicyIDs +/// Use this but not other `mock_key_hash` functions to avoid hash collision +pub fn mock_policy_id(variation: Int) -> PolicyId { + mock_key_hash(variation) +} + +/// Mock a public key hash +/// The variation is used to distinguish between different public keys +/// Use this but not other `mock_key_hash` functions to avoid hash collision +pub fn mock_pub_key_hash(variation: Int) -> VerificationKeyHash { + mock_key_hash(variation + 1000) +} + +/// Mock a script hash +/// The variation is used to distinguish between different scripts +/// Use this but not other `mock_key_hash` functions to avoid hash collision +pub fn mock_script_hash(variation: Int) -> ScriptHash { + mock_key_hash(variation + 2000) +} + +/// Mock a stake key hash +/// The variation is used to distinguish between different stake keys +/// Use this but not other `mock_key_hash` functions to avoid hash collision +pub fn mock_stake_key_hash(variation: Int) -> VerificationKeyHash { + mock_key_hash(variation + 3000) +} + +/// Mock a script stake key hash +/// The variation is used to distinguish between different scripts +/// Use this but not other `mock_key_hash` functions to avoid hash collision +pub fn mock_script_stake_key_hash(variation: Int) -> ScriptHash { + mock_key_hash(variation + 4000) +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak new file mode 100644 index 00000000..03ec9c02 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak @@ -0,0 +1,16 @@ +use aiken/cbor.{serialise} +use aiken/crypto.{Blake2b_256, Hash, blake2b_256} +use aiken/primitive/bytearray.{concat} +use cardano/transaction.{OutputReference, Transaction} + +const root_hash = + #"5a077cbcdffb88b104f292aacb9687ce93e2191e103a30a0cc5505c18b719f98" + +pub fn mock_tx_hash(variation: Int) -> Hash { + serialise(variation) |> concat(root_hash) |> blake2b_256() +} + +/// Mock an output reference +pub fn mock_utxo_ref(variation: Int, output_index: Int) -> OutputReference { + OutputReference { transaction_id: mock_tx_hash(variation), output_index } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak new file mode 100644 index 00000000..b75181ba --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak @@ -0,0 +1,30 @@ +use aiken/crypto.{ScriptHash} +use cardano/address.{Address} +use cardano/assets.{Value} +use cardano/transaction.{Datum, NoDatum, Output} + +/// Mock an output +pub fn mock_output( + address: Address, + value: Value, + datum: Datum, + reference_script: Option, +) -> Output { + Output { address, value, datum, reference_script } +} + +/// Mock an output with a public key address +/// `datum` and `reference_script` is omitted as it is seldom used in practice +pub fn mock_pub_key_output(address: Address, value: Value) -> Output { + mock_output(address, value, NoDatum, reference_script: None) +} + +/// Mock an output with a script address +/// `reference_script` is omitted as it is seldom used in practice +pub fn mock_script_output( + address: Address, + value: Value, + datum: Datum, +) -> Output { + mock_output(address, value, datum, reference_script: None) +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak new file mode 100644 index 00000000..c11a249d --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak @@ -0,0 +1,28 @@ +use aiken/interval.{ + Finite, Interval, IntervalBound, NegativeInfinity, PositiveInfinity, +} +use cardano/transaction.{ValidityRange} + +/// Mock a validity range with the given lower and upper bounds. +pub fn mock_interval(lower: Option, upper: Option) -> ValidityRange { + let lower_bound = + when lower is { + Some(lower_bound_number) -> + IntervalBound { + bound_type: Finite(lower_bound_number), + is_inclusive: True, + } + None -> IntervalBound { bound_type: NegativeInfinity, is_inclusive: True } + } + let upper_bound = + when upper is { + Some(upper_bound_number) -> + IntervalBound { + bound_type: Finite(upper_bound_number), + is_inclusive: True, + } + None -> IntervalBound { bound_type: PositiveInfinity, is_inclusive: True } + } + + Interval { lower_bound, upper_bound } +} diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/plutus.json b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/plutus.json new file mode 100644 index 00000000..ebc0bcfa --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/plutus.json @@ -0,0 +1,14 @@ +{ + "preamble": { + "title": "sidan-lab/vodka", + "description": "Aiken utils for project 'sidan-lab/vodka", + "version": "0.1.13", + "plutusVersion": "v3", + "compiler": { + "name": "Aiken", + "version": "v1.1.9+2217206" + }, + "license": "Apache-2.0" + }, + "validators": [] +} \ No newline at end of file diff --git a/src/components/multisig/aiken-crowdfund/lib/types.ak b/src/components/multisig/aiken-crowdfund/lib/types.ak new file mode 100644 index 00000000..0232fc7a --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/lib/types.ak @@ -0,0 +1,26 @@ +use cardano/address.{Address} + +pub type MintPolarity { + RMint + RBurn +} + +pub type CrowdfundRedeemer { + ContributeFund + CompleteCrowdfund + ContributorWithdrawal + RemoveEmptyInstance +} + +pub type CrowdfundDatum { + completion_script: ByteArray, + share_token: ByteArray, + crowdfund_address: Address, + fundraise_target: Int, + current_fundraised_amount: Int, + allow_over_subscription: Bool, + deadline: Int, + expiry_buffer: Int, + fee_address: Address, + min_charge: Int, +} diff --git a/src/components/multisig/aiken-crowdfund/lib/utils.ak b/src/components/multisig/aiken-crowdfund/lib/utils.ak new file mode 100644 index 00000000..75d90a27 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/lib/utils.ak @@ -0,0 +1,39 @@ +use aiken/collection/list +use aiken/collection/pairs +use cardano/address.{Address} +use cardano/assets.{Lovelace, from_lovelace} +use cardano/transaction.{Input, Output, Redeemer, ScriptPurpose, Spend} + +pub fn redeemer_with_input( + redeemers: Pairs, + input: Input, +) -> Option { + let output_reference = input.output_reference + redeemers |> pairs.get_first(Spend(output_reference)) +} + +pub fn check_fundraise_target( + allow_over_subscription: Bool, + fundraise_target: Int, + current_fundraised_amount: Int, +) -> Bool { + if allow_over_subscription { + True + } else { + current_fundraised_amount <= fundraise_target + } +} + +pub fn outputs_at_with_lovelace( + outputs: List, + address: Address, + lovelace: Lovelace, +) -> List { + list.filter( + outputs, + fn(output) { + let is_lovelace_match = output.value == from_lovelace(lovelace) + is_lovelace_match && output.address == address + }, + ) +} diff --git a/src/components/multisig/aiken-crowdfund/specs/1_auth_tokens.md b/src/components/multisig/aiken-crowdfund/specs/1_auth_tokens.md new file mode 100644 index 00000000..78afb6a1 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/specs/1_auth_tokens.md @@ -0,0 +1,15 @@ +# Auth Tokens - One Shot + +## Parameter + +- `utxo_ref`: UTxO to be spent at minting + +## User Action + +1. Mint - Redeemer `RMint` + + - Transaction hash as parameterized is included in input + +2. Burn - Redeemer `RBurn` + + - The current policy id only has negative minting value in transaction body. diff --git a/src/components/multisig/aiken-crowdfund/specs/2_proxy.md b/src/components/multisig/aiken-crowdfund/specs/2_proxy.md new file mode 100644 index 00000000..1f9abb9d --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/specs/2_proxy.md @@ -0,0 +1,19 @@ +# Specification - Crowdfund + +## Parameter + +- `auth_token`: The policy id of `AuthToken` + +## User Action + +1. SpendFunds + +2. RegisterDrep + +3. DeregisterDrep + +4. VoteasDrep + +5. RemoveEmptyInstance + + - `auth_token` from current input is burnt diff --git a/src/components/multisig/aiken-crowdfund/specs/_scripts.md b/src/components/multisig/aiken-crowdfund/specs/_scripts.md new file mode 100644 index 00000000..c44c42f3 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/specs/_scripts.md @@ -0,0 +1,20 @@ +# Aiken Crowdfunding + +## 1. Auth Tokens + +The tokens are held in a native script multisig wallet and have to be included in every transaction. + + +## 2. Proxy + +The validator that represents the actual treasury / drep + +## Param dependency tree + +1. First layer + + - `auth_tokens` - `utxo_ref` + +2. Second layer + + - `proxy` - param `auth_tokens` diff --git a/src/components/multisig/aiken-crowdfund/specs/application_setup_doc.md b/src/components/multisig/aiken-crowdfund/specs/application_setup_doc.md new file mode 100644 index 00000000..743db0a5 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/specs/application_setup_doc.md @@ -0,0 +1,13 @@ +# Application Setup Documentation + +## Setup + +The are 2 steps of setting up the applications: + +1. Minting `auth_token`, one time minting policy with empty token name with a quantity decided by the user. + + - Validation: 1.1 + +2. Sending the the `auth_token` to the owner multisig + + - Validation: N/A \ No newline at end of file diff --git a/src/components/multisig/aiken-crowdfund/specs/user_action_doc.md b/src/components/multisig/aiken-crowdfund/specs/user_action_doc.md new file mode 100644 index 00000000..dcb542e7 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/specs/user_action_doc.md @@ -0,0 +1,6 @@ +# User Actions Documentation + +## Multisig Users + + + diff --git a/src/components/multisig/aiken-crowdfund/validators/auth_token/mint.ak b/src/components/multisig/aiken-crowdfund/validators/auth_token/mint.ak new file mode 100644 index 00000000..57706664 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/validators/auth_token/mint.ak @@ -0,0 +1,27 @@ +use aiken/collection/dict +use aiken/collection/list +use cardano/assets.{PolicyId} +use cardano/transaction.{OutputReference, Transaction} +use types.{MintPolarity, RBurn, RMint} + +validator auth_token(utxo_ref: OutputReference) { + mint(redeemer: MintPolarity, policy_id: PolicyId, self: Transaction) { + expect [Pair(_asset_name, quantity)] = + self.mint + |> assets.tokens(policy_id) + |> dict.to_pairs() + let Transaction { inputs, .. } = self + when redeemer is { + RMint -> { + let is_output_consumed = + list.any(inputs, fn(input) { input.output_reference == utxo_ref }) + is_output_consumed? && quantity == 1 + } + RBurn -> quantity == -1 + } + } + + else(_) { + fail + } +} diff --git a/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak b/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak new file mode 100644 index 00000000..544ee930 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak @@ -0,0 +1,175 @@ +use aiken/collection/pairs.{has_key} +use cardano/address.{Credential, Script} +use cardano/assets.{PolicyId, lovelace_of} +use cardano/transaction.{OutputReference, Transaction, find_input} +use cocktail.{ + inputs_at_with_policy, key_signed, only_minted_token, output_inline_datum, + outputs_at_with_policy, policy_only_minted_token, valid_after, valid_before, + value_length, +} +use types.{ + CompleteCrowdfund, ContributeFund, ContributorWithdrawal, CrowdfundDatum, + CrowdfundRedeemer, RemoveEmptyInstance, +} +use utils.{check_fundraise_target, outputs_at_with_lovelace} + +validator crowdfund(auth_token: PolicyId, proposer_key_hash: ByteArray) { + spend( + datum_opt: Option, + redeemer: CrowdfundRedeemer, + input: OutputReference, + self: Transaction, + ) { + let Transaction { + inputs, + validity_range, + mint, + outputs, + withdrawals, + extra_signatories, + .. + } = self + + expect Some(own_input) = find_input(inputs, input) + expect Some(auth_input_datum) = datum_opt + + let current_address = own_input.output.address + + // check only 1 auth toke input from current address + expect [auth_input] = + inputs_at_with_policy(inputs, current_address, auth_token) + + let CrowdfundDatum { + current_fundraised_amount, + fundraise_target, + allow_over_subscription, + deadline, + completion_script, + share_token, + min_charge, + fee_address, + expiry_buffer, + .. + } = auth_input_datum + when redeemer is { + ContributeFund -> { + expect [auth_output] = + outputs_at_with_policy(outputs, current_address, auth_token) + + let lovelace_from_auth_input = auth_input.output.value |> lovelace_of() + let lovelace_from_auth_output = auth_output.value |> lovelace_of() + expect auth_output_datum: CrowdfundDatum = + output_inline_datum(auth_output) + + let fundraise_added = + lovelace_from_auth_output - lovelace_from_auth_input + let fundraise_check = + (fundraise_added == auth_output_datum.current_fundraised_amount - current_fundraised_amount)? && (fundraise_added >= 2000000)? + + let fundraise_target_check = + check_fundraise_target( + allow_over_subscription, + fundraise_target, + auth_output_datum.current_fundraised_amount, + ) + + let validity_check = valid_before(validity_range, deadline) + + let output_datum_check = + auth_output_datum == CrowdfundDatum { + ..auth_input_datum, + current_fundraised_amount: current_fundraised_amount + fundraise_added, + } + + let is_auth_output_value_clean = value_length(auth_output.value) == 2 + fundraise_check? && fundraise_target_check? && validity_check? && output_datum_check? && is_auth_output_value_clean? && only_minted_token( + mint, + share_token, + completion_script, + fundraise_added, + )? + } + + CompleteCrowdfund -> { + let input_lovelace_check = + lovelace_of(auth_input.output.value) >= min_charge + current_fundraised_amount + + expect [_] = outputs_at_with_lovelace(outputs, fee_address, min_charge) + + let fundraise_check = current_fundraised_amount >= fundraise_target + let completion_script_withdrawal_credential: Credential = + Script(completion_script) + let withdrawal_script_check = + withdrawals + |> has_key(completion_script_withdrawal_credential) + fundraise_check? && withdrawal_script_check? && input_lovelace_check? && policy_only_minted_token( + mint, + auth_token, + completion_script, + -1, + )? + } + + ContributorWithdrawal -> { + let validity_check = + valid_after(validity_range, deadline + expiry_buffer) + let fund_check = current_fundraised_amount <= fundraise_target + + expect [auth_output] = + outputs_at_with_policy(outputs, current_address, auth_token) + + let lovelace_from_auth_input = auth_input.output.value |> lovelace_of() + let lovelace_from_auth_output = auth_output.value |> lovelace_of() + + let lovelace_withdrawn = + lovelace_from_auth_output - lovelace_from_auth_input + + let lovelace_withdrawn_check = lovelace_withdrawn < 0 + + expect auth_output_datum: CrowdfundDatum = + output_inline_datum(auth_output) + let output_datum_check = + auth_output_datum == CrowdfundDatum { + ..auth_input_datum, + current_fundraised_amount: current_fundraised_amount + lovelace_withdrawn, + } + + let is_auth_output_value_clean = value_length(auth_output.value) == 2 + (validity_check || fund_check)? && lovelace_withdrawn_check? && output_datum_check? && is_auth_output_value_clean? && only_minted_token( + mint, + share_token, + completion_script, + lovelace_withdrawn, + )? + } + + RemoveEmptyInstance -> { + let validity_check = valid_after(validity_range, deadline) + + let token_burnt_check = + if current_fundraised_amount > 0 { + policy_only_minted_token( + mint, + share_token, + completion_script, + -current_fundraised_amount, + )? && policy_only_minted_token( + mint, + auth_token, + completion_script, + -1, + )? + } else { + only_minted_token(mint, auth_token, completion_script, -1)? + } + let proposer_key_signed_check = + key_signed(extra_signatories, proposer_key_hash) + validity_check? && token_burnt_check? && proposer_key_signed_check? + } + } + } + + else(_) { + fail + } +} diff --git a/src/components/multisig/aiken-crowdfund/validators/tests/auth_token/mint.ak b/src/components/multisig/aiken-crowdfund/validators/tests/auth_token/mint.ak new file mode 100644 index 00000000..b113dd1c --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/validators/tests/auth_token/mint.ak @@ -0,0 +1,63 @@ +use auth_token/mint as auth_token_mint +use cardano/assets.{add, from_asset, zero} +use cardano/transaction.{Transaction, placeholder} +use mocktail.{ + complete, mint, mock_policy_id, mock_pub_key_address, mock_tx_hash, + mock_utxo_ref, mocktail_tx, tx_in, +} +use tests/utils.{mock_auth_token} +use types.{RBurn, RMint} + +test s1_mint_success_mint() { + let redeemer = RMint + let input_utxo = mock_utxo_ref(0, 1) + let policy_id = mock_auth_token + + let tx = + mocktail_tx() + |> mint(True, 1, policy_id, "") + |> tx_in(True, mock_tx_hash(0), 1, zero, mock_pub_key_address(0, None)) + |> complete() + + auth_token_mint.auth_token.mint(input_utxo, redeemer, policy_id, tx) +} + +test s1_mint_fail_mint_no_utxo_ref_supply() { + let redeemer = RMint + let policy_id = mock_auth_token + + let tx = + mocktail_tx() + |> mint(True, 1, policy_id, "") + |> tx_in(True, mock_tx_hash(0), 1, zero, mock_pub_key_address(0, None)) + |> complete() + !auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) +} + +test s1_mint_success_burn() { + let redeemer = RBurn + let policy_id = mock_auth_token + + let tx = Transaction { ..placeholder, mint: from_asset(policy_id, "", -1) } + auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) +} + +test s1_mint_success_burn_with_other_minting() { + let redeemer = RBurn + let policy_id = mock_auth_token + + let tx = + Transaction { + ..placeholder, + mint: from_asset(policy_id, "", -1) |> add(mock_policy_id(999), "", 1), + } + auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) +} + +test s1_mint_fail_burn_with_mint() { + let redeemer = RBurn + let policy_id = mock_auth_token + + let tx = Transaction { ..placeholder, mint: from_asset(policy_id, "", 1) } + !auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) +} diff --git a/src/components/multisig/aiken-crowdfund/validators/tests/crowdfund/spend.ak b/src/components/multisig/aiken-crowdfund/validators/tests/crowdfund/spend.ak new file mode 100644 index 00000000..2b68446f --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/validators/tests/crowdfund/spend.ak @@ -0,0 +1,1177 @@ +use cardano/assets.{add, from_lovelace} +use cardano/transaction.{Transaction} +use crowdfund/spend as crowdfund_spend +use mocktail.{ + add_extra_signatory, complete, invalid_before, invalid_hereafter, mint, + mock_policy_id, mock_pub_key_address, mock_tx_hash, mock_utxo_ref, mocktail_tx, + script_withdrawal, tx_in, tx_in_inline_datum, tx_out, tx_out_inline_datum, +} +use tests/utils.{ + mock_auth_token, mock_completion_script, mock_contribute_min_fundraised_amount, + mock_contribute_over_fundraised_amount, mock_crowdfund_address, + mock_crowdfund_datum, mock_current_fundraised_amount, mock_deadline, + mock_expiry_buffer, mock_extra_fundraised_amount, mock_fee_address, + mock_fundraise_target, mock_min_charge, mock_proposer_key_hash, + mock_share_token, +} +use types.{ + CompleteCrowdfund, ContributeFund, ContributorWithdrawal, RemoveEmptyInstance, +} + +type ContributeFundTestCase { + is_only_one_auth_inputed: Bool, + is_only_one_auth_outputed: Bool, + is_auth_output_datum_correct: Bool, + is_auth_output_value_clean: Bool, + is_deadline_not_passed: Bool, + is_shares_minted: Bool, +} + +fn mock_contribute_fund_tx( + test_case: ContributeFundTestCase, + current_fundraised_amount: Int, + contribute_amount: Int, + allow_over_subscription: Bool, +) -> Transaction { + let ContributeFundTestCase { + is_only_one_auth_inputed, + is_only_one_auth_outputed, + is_auth_output_datum_correct, + is_auth_output_value_clean, + is_deadline_not_passed, + is_shares_minted, + } = test_case + + let input_value = + from_lovelace(current_fundraised_amount) + |> add(mock_auth_token, mock_completion_script, 1) + + let output_value = + from_lovelace(current_fundraised_amount + contribute_amount) + |> add(mock_auth_token, mock_completion_script, 1) + + let auth_correct_output_datum = + mock_crowdfund_datum( + current_fundraised_amount + contribute_amount, + allow_over_subscription, + ) + + mocktail_tx() + |> tx_in(True, mock_tx_hash(0), 0, input_value, mock_crowdfund_address) + |> tx_in_inline_datum( + True, + mock_crowdfund_datum(current_fundraised_amount, allow_over_subscription), + ) + |> tx_in( + !is_only_one_auth_inputed, + mock_tx_hash(0), + 1, + input_value, + mock_crowdfund_address, + ) + |> tx_in( + True, + mock_tx_hash(1), + 0, + from_lovelace(contribute_amount), + mock_pub_key_address(0, None), + ) + |> tx_out(is_auth_output_value_clean, mock_crowdfund_address, output_value) + |> tx_out( + !is_auth_output_value_clean, + mock_crowdfund_address, + output_value |> add(mock_policy_id(999), mock_completion_script, 1), + ) + |> tx_out_inline_datum(is_auth_output_datum_correct, auth_correct_output_datum) + |> tx_out_inline_datum( + !is_auth_output_datum_correct, + mock_crowdfund_datum( + current_fundraised_amount + contribute_amount + 9999999, + allow_over_subscription, + ), + ) + |> tx_out(!is_only_one_auth_outputed, mock_crowdfund_address, output_value) + |> invalid_hereafter(is_deadline_not_passed, mock_deadline - 3600 * 24) + |> invalid_hereafter(!is_deadline_not_passed, mock_deadline + 3600 * 24) + |> mint( + is_shares_minted, + contribute_amount, + mock_share_token, + mock_completion_script, + ) + |> mint( + !is_shares_minted, + contribute_amount + 9999999, + mock_share_token, + mock_completion_script, + ) + |> complete() +} + +test s3_spend_success_contribute_fund_with_not_allow_over_subscription() { + let tx = + mock_contribute_fund_tx( + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + }, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), + ContributeFund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_success_contribute_fund_with_allow_over_subscription() { + let tx = + mock_contribute_fund_tx( + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + }, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + True, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, True)), + ContributeFund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_success_contribute_fund_with_allow_over_subscription_and_over_fundraised() { + let tx = + mock_contribute_fund_tx( + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + }, + mock_current_fundraised_amount, + mock_contribute_over_fundraised_amount, + True, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, True)), + ContributeFund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contribute_fund_with_not_allow_over_subscription_but_over_fundraised() { + let tx = + mock_contribute_fund_tx( + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + }, + mock_current_fundraised_amount, + mock_contribute_over_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), + ContributeFund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contribute_fund_with_over_current_fundraised() { + let tx = + mock_contribute_fund_tx( + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + }, + mock_contribute_over_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), + ContributeFund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contribute_fund_with_more_than_one_auth_inputed() fail { + let tx = + mock_contribute_fund_tx( + ContributeFundTestCase { + is_only_one_auth_inputed: False, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + }, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), + ContributeFund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contribute_fund_with_more_than_one_auth_outputed() fail { + let tx = + mock_contribute_fund_tx( + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: False, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + }, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), + ContributeFund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contribute_fund_with_incorrect_auth_output_datum() { + let tx = + mock_contribute_fund_tx( + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: False, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + }, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), + ContributeFund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contribute_fund_with_auth_output_not_clean() { + let tx = + mock_contribute_fund_tx( + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: False, + is_deadline_not_passed: True, + is_shares_minted: True, + }, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), + ContributeFund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contribute_fund_with_deadline_passed() { + let tx = + mock_contribute_fund_tx( + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: False, + is_shares_minted: True, + }, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), + ContributeFund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contribute_fund_with_wrong_shares_minted() { + let tx = + mock_contribute_fund_tx( + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: False, + }, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), + ContributeFund, + mock_utxo_ref(0, 0), + tx, + ) +} + +type CompleteCrowdfundTestCase { + is_only_one_auth_inputed: Bool, + is_output_to_fee_address_correct: Bool, + is_auth_burnt: Bool, + is_completion_script_executed: Bool, +} + +fn mock_complete_crowdfund_tx( + test_case: CompleteCrowdfundTestCase, + current_fundraised_amount: Int, + allow_over_subscription: Bool, +) -> Transaction { + let CompleteCrowdfundTestCase { + is_only_one_auth_inputed, + is_output_to_fee_address_correct, + is_auth_burnt, + is_completion_script_executed, + } = test_case + + let input_value = + from_lovelace(current_fundraised_amount + mock_min_charge) + |> add(mock_auth_token, mock_completion_script, 1) + + let output_value = from_lovelace(mock_min_charge) + + mocktail_tx() + |> tx_in(True, mock_tx_hash(0), 0, input_value, mock_crowdfund_address) + |> tx_in_inline_datum( + True, + mock_crowdfund_datum(current_fundraised_amount, allow_over_subscription), + ) + |> tx_in( + !is_only_one_auth_inputed, + mock_tx_hash(0), + 1, + input_value, + mock_crowdfund_address, + ) + |> tx_out(is_output_to_fee_address_correct, mock_fee_address, output_value) + |> tx_out( + !is_output_to_fee_address_correct, + mock_fee_address, + from_lovelace(mock_extra_fundraised_amount - 10), + ) + |> script_withdrawal( + is_completion_script_executed, + mock_completion_script, + 2_000_000, + ) + |> mint(is_auth_burnt, -1, mock_auth_token, mock_completion_script) + |> complete() +} + +test s3_spend_success_complete_crowdfund_with_amount_equal_to_target() { + let tx = + mock_complete_crowdfund_tx( + CompleteCrowdfundTestCase { + is_only_one_auth_inputed: True, + is_output_to_fee_address_correct: True, + is_auth_burnt: True, + is_completion_script_executed: True, + }, + mock_fundraise_target, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_fundraise_target, False)), + CompleteCrowdfund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_success_complete_crowdfund_with_amount_larger_than_target() { + let tx = + mock_complete_crowdfund_tx( + CompleteCrowdfundTestCase { + is_only_one_auth_inputed: True, + is_output_to_fee_address_correct: True, + is_auth_burnt: True, + is_completion_script_executed: True, + }, + mock_contribute_over_fundraised_amount, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), + CompleteCrowdfund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_complete_crowdfund_with_amount_less_than_target() { + let tx = + mock_complete_crowdfund_tx( + CompleteCrowdfundTestCase { + is_only_one_auth_inputed: True, + is_output_to_fee_address_correct: True, + is_auth_burnt: True, + is_completion_script_executed: True, + }, + mock_fundraise_target - 1, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_fundraise_target - 1, False)), + CompleteCrowdfund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_complete_crowdfund_with_more_than_one_auth_inputed() fail { + let tx = + mock_complete_crowdfund_tx( + CompleteCrowdfundTestCase { + is_only_one_auth_inputed: False, + is_output_to_fee_address_correct: True, + is_auth_burnt: True, + is_completion_script_executed: True, + }, + mock_fundraise_target, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_fundraise_target, False)), + CompleteCrowdfund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_complete_crowdfund_with_incorrect_fee_output() fail { + let tx = + mock_complete_crowdfund_tx( + CompleteCrowdfundTestCase { + is_only_one_auth_inputed: True, + is_output_to_fee_address_correct: False, + is_auth_burnt: True, + is_completion_script_executed: True, + }, + mock_fundraise_target, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_fundraise_target, False)), + CompleteCrowdfund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_complete_crowdfund_with_no_auth_burnt() { + let tx = + mock_complete_crowdfund_tx( + CompleteCrowdfundTestCase { + is_only_one_auth_inputed: True, + is_output_to_fee_address_correct: True, + is_auth_burnt: False, + is_completion_script_executed: True, + }, + mock_fundraise_target, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_fundraise_target, False)), + CompleteCrowdfund, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_complete_crowdfund_with_no_completion_script_executed() { + let tx = + mock_complete_crowdfund_tx( + CompleteCrowdfundTestCase { + is_only_one_auth_inputed: True, + is_output_to_fee_address_correct: True, + is_auth_burnt: True, + is_completion_script_executed: False, + }, + mock_fundraise_target, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_fundraise_target, False)), + CompleteCrowdfund, + mock_utxo_ref(0, 0), + tx, + ) +} + +type ContributorWithdrawalTestCase { + is_only_one_auth_inputed: Bool, + is_only_one_auth_outputed: Bool, + is_auth_output_datum_correct: Bool, + is_auth_output_value_clean: Bool, + is_auth_unlock_value_correct: Bool, + is_deadline_passed: Bool, + is_shares_burnt: Bool, +} + +fn mock_contributor_withdrawal_tx( + test_case: ContributorWithdrawalTestCase, + current_fundraised_amount: Int, + withdraw_amount: Int, + allow_over_subscription: Bool, +) -> Transaction { + let ContributorWithdrawalTestCase { + is_only_one_auth_inputed, + is_only_one_auth_outputed, + is_auth_output_datum_correct, + is_auth_output_value_clean, + is_auth_unlock_value_correct, + is_deadline_passed, + is_shares_burnt, + } = test_case + + let input_value = + from_lovelace(current_fundraised_amount) + |> add(mock_auth_token, mock_completion_script, 1) + + let output_value = + if is_auth_unlock_value_correct { + from_lovelace(current_fundraised_amount - withdraw_amount) + |> add(mock_auth_token, mock_completion_script, 1) + } else { + from_lovelace(current_fundraised_amount - withdraw_amount + 10) + |> add(mock_auth_token, mock_completion_script, 1) + } + + let auth_correct_output_datum = + mock_crowdfund_datum( + current_fundraised_amount - withdraw_amount, + allow_over_subscription, + ) + + mocktail_tx() + |> tx_in(True, mock_tx_hash(0), 0, input_value, mock_crowdfund_address) + |> tx_in_inline_datum( + True, + mock_crowdfund_datum(current_fundraised_amount, allow_over_subscription), + ) + |> tx_in( + !is_only_one_auth_inputed, + mock_tx_hash(0), + 1, + input_value, + mock_crowdfund_address, + ) + |> tx_out(True, mock_pub_key_address(0, None), from_lovelace(withdraw_amount)) + |> tx_out(is_auth_output_value_clean, mock_crowdfund_address, output_value) + |> tx_out( + !is_auth_output_value_clean, + mock_crowdfund_address, + output_value |> add(mock_policy_id(999), mock_completion_script, 1), + ) + |> tx_out_inline_datum(is_auth_output_datum_correct, auth_correct_output_datum) + |> tx_out_inline_datum( + !is_auth_output_datum_correct, + mock_crowdfund_datum( + current_fundraised_amount - withdraw_amount + 9999999, + allow_over_subscription, + ), + ) + |> tx_out(!is_only_one_auth_outputed, mock_crowdfund_address, output_value) + |> invalid_before( + is_deadline_passed, + mock_deadline + mock_expiry_buffer + 3600 * 24, + ) + |> invalid_before( + !is_deadline_passed, + mock_deadline + mock_expiry_buffer - 3600 * 24, + ) + |> mint( + is_shares_burnt, + -withdraw_amount, + mock_share_token, + mock_completion_script, + ) + |> mint( + !is_shares_burnt, + -withdraw_amount + 9999999, + mock_share_token, + mock_completion_script, + ) + |> complete() +} + +test s3_spend_success_contributor_withdraw_with_deadline_passed_but_fundraised_reach_target() { + let tx = + mock_contributor_withdrawal_tx( + ContributorWithdrawalTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_auth_unlock_value_correct: True, + is_deadline_passed: True, + is_shares_burnt: True, + }, + mock_contribute_over_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), + ContributorWithdrawal, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_success_contributor_withdraw_with_deadline_passed_and_fundraised_less_than_target() { + let tx = + mock_contributor_withdrawal_tx( + ContributorWithdrawalTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_auth_unlock_value_correct: True, + is_deadline_passed: True, + is_shares_burnt: True, + }, + mock_current_fundraised_amount * 2, + mock_contribute_min_fundraised_amount, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount * 2, False)), + ContributorWithdrawal, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_success_contributor_withdraw_with_deadline_not_passed_but_fundraised_less_than_target() { + let tx = + mock_contributor_withdrawal_tx( + ContributorWithdrawalTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_auth_unlock_value_correct: True, + is_deadline_passed: False, + is_shares_burnt: True, + }, + mock_current_fundraised_amount * 2, + mock_contribute_min_fundraised_amount, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount * 2, False)), + ContributorWithdrawal, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contributor_withdraw_with_deadline_not_passed_and_fundraised_reached_target() { + let tx = + mock_contributor_withdrawal_tx( + ContributorWithdrawalTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_auth_unlock_value_correct: True, + is_deadline_passed: False, + is_shares_burnt: True, + }, + mock_contribute_over_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), + ContributorWithdrawal, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contributor_withdraw_with_more_thanone_auth_inputed() fail { + let tx = + mock_contributor_withdrawal_tx( + ContributorWithdrawalTestCase { + is_only_one_auth_inputed: False, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_auth_unlock_value_correct: True, + is_deadline_passed: True, + is_shares_burnt: True, + }, + mock_contribute_over_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), + ContributorWithdrawal, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contributor_withdraw_with_more_thanone_auth_outputed() fail { + let tx = + mock_contributor_withdrawal_tx( + ContributorWithdrawalTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: False, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_auth_unlock_value_correct: True, + is_deadline_passed: True, + is_shares_burnt: True, + }, + mock_contribute_over_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), + ContributorWithdrawal, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contributor_withdraw_with_incorrect_output_datum() { + let tx = + mock_contributor_withdrawal_tx( + ContributorWithdrawalTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: False, + is_auth_output_value_clean: True, + is_auth_unlock_value_correct: True, + is_deadline_passed: True, + is_shares_burnt: True, + }, + mock_contribute_over_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), + ContributorWithdrawal, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contributor_withdraw_with_auth_output_value_not_clean() { + let tx = + mock_contributor_withdrawal_tx( + ContributorWithdrawalTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: False, + is_auth_unlock_value_correct: True, + is_deadline_passed: True, + is_shares_burnt: True, + }, + mock_contribute_over_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), + ContributorWithdrawal, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contributor_withdraw_with_incorrect_auth_unlock_value() { + let tx = + mock_contributor_withdrawal_tx( + ContributorWithdrawalTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_auth_unlock_value_correct: False, + is_deadline_passed: True, + is_shares_burnt: True, + }, + mock_contribute_over_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), + ContributorWithdrawal, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_contributor_withdraw_with_incorrect_shares_burnt() { + let tx = + mock_contributor_withdrawal_tx( + ContributorWithdrawalTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_auth_unlock_value_correct: True, + is_deadline_passed: True, + is_shares_burnt: False, + }, + mock_contribute_over_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), + ContributorWithdrawal, + mock_utxo_ref(0, 0), + tx, + ) +} + +type RemoveEmptyInstanceTestCase { + is_only_one_auth_inputed: Bool, + is_deadline_passed: Bool, + is_shares_burnt: Bool, + is_auth_burnt: Bool, + is_proposer_signed: Bool, +} + +fn mock_remove_empty_instance_tx( + test_case: RemoveEmptyInstanceTestCase, + current_fundraised_amount: Int, + allow_over_subscription: Bool, +) -> Transaction { + let RemoveEmptyInstanceTestCase { + is_only_one_auth_inputed, + is_deadline_passed, + is_shares_burnt, + is_auth_burnt, + is_proposer_signed, + } = test_case + + let input_value = + from_lovelace(current_fundraised_amount) + |> add(mock_auth_token, mock_completion_script, 1) + + mocktail_tx() + |> tx_in(True, mock_tx_hash(0), 0, input_value, mock_crowdfund_address) + |> tx_in_inline_datum( + True, + mock_crowdfund_datum(current_fundraised_amount, allow_over_subscription), + ) + |> tx_in( + !is_only_one_auth_inputed, + mock_tx_hash(0), + 1, + input_value, + mock_crowdfund_address, + ) + |> invalid_before(is_deadline_passed, mock_deadline + 3600 * 24) + |> invalid_before(!is_deadline_passed, mock_deadline - 3600 * 24) + |> mint( + is_shares_burnt, + -current_fundraised_amount, + mock_share_token, + mock_completion_script, + ) + |> mint( + !is_shares_burnt, + -current_fundraised_amount + 9999999, + mock_share_token, + mock_completion_script, + ) + |> mint(is_auth_burnt, -1, mock_auth_token, mock_completion_script) + |> complete() + |> add_extra_signatory(is_proposer_signed, mock_proposer_key_hash) +} + +test s3_spend_success_remove_empty_instance_wih_zero_fund() { + let tx = + mock_remove_empty_instance_tx( + RemoveEmptyInstanceTestCase { + is_only_one_auth_inputed: True, + is_deadline_passed: True, + is_shares_burnt: True, + is_auth_burnt: True, + is_proposer_signed: True, + }, + 0, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(0, False)), + RemoveEmptyInstance, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_success_remove_empty_instance_wih_some_fund() { + let tx = + mock_remove_empty_instance_tx( + RemoveEmptyInstanceTestCase { + is_only_one_auth_inputed: True, + is_deadline_passed: True, + is_shares_burnt: True, + is_auth_burnt: True, + is_proposer_signed: True, + }, + mock_current_fundraised_amount, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), + RemoveEmptyInstance, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_remove_empty_instance_wih_more_than_one_auth_inputed() fail { + let tx = + mock_remove_empty_instance_tx( + RemoveEmptyInstanceTestCase { + is_only_one_auth_inputed: False, + is_deadline_passed: True, + is_shares_burnt: True, + is_auth_burnt: True, + is_proposer_signed: True, + }, + mock_current_fundraised_amount, + False, + ) + + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), + RemoveEmptyInstance, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_remove_empty_instance_wih_deadline_not_passed() { + let tx = + mock_remove_empty_instance_tx( + RemoveEmptyInstanceTestCase { + is_only_one_auth_inputed: True, + is_deadline_passed: False, + is_shares_burnt: True, + is_auth_burnt: True, + is_proposer_signed: True, + }, + mock_current_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), + RemoveEmptyInstance, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_remove_empty_instance_wih_share_not_burnt() { + let tx = + mock_remove_empty_instance_tx( + RemoveEmptyInstanceTestCase { + is_only_one_auth_inputed: True, + is_deadline_passed: True, + is_shares_burnt: False, + is_auth_burnt: True, + is_proposer_signed: True, + }, + mock_current_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), + RemoveEmptyInstance, + mock_utxo_ref(0, 0), + tx, + ) +} + +test s3_spend_fail_remove_empty_instance_wih_authe_not_burnt() { + let tx = + mock_remove_empty_instance_tx( + RemoveEmptyInstanceTestCase { + is_only_one_auth_inputed: True, + is_deadline_passed: True, + is_shares_burnt: True, + is_auth_burnt: False, + is_proposer_signed: True, + }, + mock_current_fundraised_amount, + False, + ) + + !crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), + RemoveEmptyInstance, + mock_utxo_ref(0, 0), + tx, + ) +} diff --git a/src/components/multisig/aiken-crowdfund/validators/tests/integration_test/contribute_fund.ak b/src/components/multisig/aiken-crowdfund/validators/tests/integration_test/contribute_fund.ak new file mode 100644 index 00000000..1bbcc7e2 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/validators/tests/integration_test/contribute_fund.ak @@ -0,0 +1,329 @@ +use cardano/assets.{add, from_lovelace} +use cardano/transaction.{OutputReference, Spend, Transaction} +use crowdfund/spend as crowdfund_spend +use mocktail.{ + add_redeemer, complete, invalid_hereafter, mint, mock_policy_id, + mock_pub_key_address, mock_tx_hash, mock_utxo_ref, mocktail_tx, tx_in, + tx_in_inline_datum, tx_out, tx_out_inline_datum, +} +use share_token/mint as share_token_mint +use tests/utils.{ + mock_auth_token, mock_completion_script, mock_contribute_min_fundraised_amount, + mock_contribute_over_fundraised_amount, mock_crowdfund_address, + mock_crowdfund_datum, mock_current_fundraised_amount, mock_deadline, + mock_proposer_key_hash, mock_share_token, +} +use types.{CompleteCrowdfund, ContributeFund, CrowdfundRedeemer, RMint} + +type ContributeFundTestCase { + is_only_one_auth_inputed: Bool, + is_only_one_auth_outputed: Bool, + is_auth_output_datum_correct: Bool, + is_auth_output_value_clean: Bool, + is_deadline_not_passed: Bool, + is_shares_minted: Bool, +} + +fn mock_contribute_fund_tx( + test_case: ContributeFundTestCase, + current_fundraised_amount: Int, + contribute_amount: Int, + allow_over_subscription: Bool, + auth_token_redeemer: CrowdfundRedeemer, +) -> Transaction { + let ContributeFundTestCase { + is_only_one_auth_inputed, + is_only_one_auth_outputed, + is_auth_output_datum_correct, + is_auth_output_value_clean, + is_deadline_not_passed, + is_shares_minted, + } = test_case + let auth_token_redeemer_data: Data = auth_token_redeemer + + let input_value = + from_lovelace(current_fundraised_amount) + |> add(mock_auth_token, mock_completion_script, 1) + + let output_value = + from_lovelace(current_fundraised_amount + contribute_amount) + |> add(mock_auth_token, mock_completion_script, 1) + + let auth_correct_output_datum = + mock_crowdfund_datum( + current_fundraised_amount + contribute_amount, + allow_over_subscription, + ) + + mocktail_tx() + |> tx_in(True, mock_tx_hash(0), 0, input_value, mock_crowdfund_address) + |> tx_in_inline_datum( + True, + mock_crowdfund_datum(current_fundraised_amount, allow_over_subscription), + ) + |> tx_in( + !is_only_one_auth_inputed, + mock_tx_hash(0), + 1, + input_value, + mock_crowdfund_address, + ) + |> tx_in( + True, + mock_tx_hash(1), + 0, + from_lovelace(contribute_amount), + mock_pub_key_address(0, None), + ) + |> tx_out(is_auth_output_value_clean, mock_crowdfund_address, output_value) + |> tx_out( + !is_auth_output_value_clean, + mock_crowdfund_address, + output_value |> add(mock_policy_id(999), mock_completion_script, 1), + ) + |> tx_out_inline_datum(is_auth_output_datum_correct, auth_correct_output_datum) + |> tx_out_inline_datum( + !is_auth_output_datum_correct, + mock_crowdfund_datum( + current_fundraised_amount + contribute_amount + 9999999, + allow_over_subscription, + ), + ) + |> tx_out(!is_only_one_auth_outputed, mock_crowdfund_address, output_value) + |> invalid_hereafter(is_deadline_not_passed, mock_deadline - 3600 * 24) + |> invalid_hereafter(!is_deadline_not_passed, mock_deadline + 3600 * 24) + |> mint( + is_shares_minted, + contribute_amount, + mock_share_token, + mock_completion_script, + ) + |> mint( + !is_shares_minted, + contribute_amount + 9999999, + mock_share_token, + mock_completion_script, + ) + |> complete() + |> add_redeemer( + True, + Pair( + Spend( + OutputReference { transaction_id: mock_tx_hash(0), output_index: 0 }, + ), + auth_token_redeemer_data, + ), + ) +} + +fn check_all_scripts( + test_case: ContributeFundTestCase, + current_fundraised_amount: Int, + contribute_amount: Int, + allow_over_subscription: Bool, + auth_token_redeemer: CrowdfundRedeemer, +) { + let tx = + mock_contribute_fund_tx( + test_case, + current_fundraised_amount, + contribute_amount, + allow_over_subscription, + auth_token_redeemer, + ) + + let check_auth_spend = + crowdfund_spend.crowdfund.spend( + mock_auth_token, + mock_proposer_key_hash, + Some( + mock_crowdfund_datum( + mock_current_fundraised_amount, + allow_over_subscription, + ), + ), + auth_token_redeemer, + mock_utxo_ref(0, 0), + tx, + ) + let check_share_mint = + share_token_mint.share_token.mint( + mock_auth_token, + RMint, + mock_share_token, + tx, + ) + + check_auth_spend? && check_share_mint? +} + +test contribute_fund_fail_with_not_allow_over_subscription_but_over_fundraised() { + let test_case = + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + } + !check_all_scripts( + test_case, + mock_current_fundraised_amount, + mock_contribute_over_fundraised_amount, + False, + ContributeFund, + ) +} + +test contribute_fund_fail_with_over_current_fundraised() { + let test_case = + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + } + !check_all_scripts( + test_case, + mock_contribute_over_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ContributeFund, + ) +} + +test contribute_fund_fail_with_wrong_redeemer() fail { + let test_case = + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + } + check_all_scripts( + test_case, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + CompleteCrowdfund, + ) +} + +test contribute_fund_fail_with_more_than_one_auth_inputed() fail { + let test_case = + ContributeFundTestCase { + is_only_one_auth_inputed: False, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + } + check_all_scripts( + test_case, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ContributeFund, + ) +} + +test contribute_fund_fail_with_more_than_one_auth_outputed() fail { + let test_case = + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: False, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + } + check_all_scripts( + test_case, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ContributeFund, + ) +} + +test contribute_fund_fail_with_incorrect_auth_output_datum() { + let test_case = + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: False, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: True, + } + !check_all_scripts( + test_case, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ContributeFund, + ) +} + +test contribute_fund_fail_with_auth_output_not_clean() { + let test_case = + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: False, + is_deadline_not_passed: True, + is_shares_minted: True, + } + !check_all_scripts( + test_case, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ContributeFund, + ) +} + +test contribute_fund_fail_with_deadline_passed() { + let test_case = + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: False, + is_shares_minted: True, + } + !check_all_scripts( + test_case, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ContributeFund, + ) +} + +test contribute_fund_fail_with_wrong_shares_minted() { + let test_case = + ContributeFundTestCase { + is_only_one_auth_inputed: True, + is_only_one_auth_outputed: True, + is_auth_output_datum_correct: True, + is_auth_output_value_clean: True, + is_deadline_not_passed: True, + is_shares_minted: False, + } + !check_all_scripts( + test_case, + mock_current_fundraised_amount, + mock_contribute_min_fundraised_amount, + False, + ContributeFund, + ) +} diff --git a/src/components/multisig/aiken-crowdfund/validators/tests/utils.ak b/src/components/multisig/aiken-crowdfund/validators/tests/utils.ak new file mode 100644 index 00000000..c2ee6a99 --- /dev/null +++ b/src/components/multisig/aiken-crowdfund/validators/tests/utils.ak @@ -0,0 +1,64 @@ +use cardano/address.{from_script} +use mocktail.{ + mock_policy_id, mock_pub_key_hash, mock_script_hash, + mock_script_stake_key_hash, +} +use types.{CrowdfundDatum} + +pub const mock_auth_token = mock_policy_id(0) + +pub const mock_share_token = mock_policy_id(1) + +pub const mock_completion_script = mock_script_hash(0) + +pub const mock_crowdfund_spend_script_hash = mock_script_hash(1) + +pub const mock_crowdfund_stake_script_hash = mock_script_stake_key_hash(0) + +pub const mock_crowdfund_address = from_script(mock_crowdfund_spend_script_hash) + +pub const mock_fee_address = from_script("fee_address") + +pub const mock_fundraise_target = 100000000000 + +pub const mock_deadline = 1750735607 + +pub const mock_expiry_buffer = 3600 * 24 + +pub const mock_min_charge = 10 + +pub fn mock_crowdfund_datum( + current_fundraised_amount: Int, + allow_over_subscription: Bool, +) { + CrowdfundDatum { + completion_script: mock_completion_script, + share_token: mock_share_token, + crowdfund_address: mock_crowdfund_address, + fundraise_target: mock_fundraise_target, + current_fundraised_amount, + allow_over_subscription, + deadline: mock_deadline, + expiry_buffer: mock_expiry_buffer, + fee_address: mock_fee_address, + min_charge: mock_min_charge, + } +} + +pub const mock_current_fundraised_amount = 2000000 + +pub const mock_extra_fundraised_amount = 4000000 + +pub const mock_contribute_less_than_min_fundraised_amount = 1999999 + +pub const mock_contribute_min_fundraised_amount = 2000000 + +pub const mock_contribute_over_fundraised_amount = 100000000001 + +pub const auth_allow_over_subscription = + mock_crowdfund_datum(mock_current_fundraised_amount, True) + +pub const auth_not_allow_over_subscription = + mock_crowdfund_datum(mock_current_fundraised_amount, False) + +pub const mock_proposer_key_hash = mock_pub_key_hash(0) diff --git a/src/components/multisig/common.ts b/src/components/multisig/common.ts new file mode 100644 index 00000000..e4ce1fc0 --- /dev/null +++ b/src/components/multisig/common.ts @@ -0,0 +1,221 @@ +import { + BrowserWallet, + IFetcher, + IWallet, + LanguageVersion, + MeshTxBuilder, + MeshWallet, + serializePlutusScript, + UTxO, +} from "@meshsdk/core"; + +export type MeshTxInitiatorInput = { + mesh: MeshTxBuilder; + fetcher?: IFetcher; + wallet?: IWallet; + networkId?: number; + stakeCredential?: string; + version?: number; +}; + +export class MeshTxInitiator { + mesh: MeshTxBuilder; + fetcher?: IFetcher; + wallet?: IWallet; + stakeCredential?: string; + networkId = 0; + version = 2; + languageVersion: LanguageVersion = "V2"; + + constructor({ + mesh, + fetcher, + wallet, + networkId = 0, + stakeCredential = "c08f0294ead5ab7ae0ce5471dd487007919297ba95230af22f25e575", + version = 2, + }: MeshTxInitiatorInput) { + this.mesh = mesh; + if (fetcher) { + this.fetcher = fetcher; + } + if (wallet) { + this.wallet = wallet; + } + + this.networkId = networkId; + switch (this.networkId) { + case 1: + this.mesh.setNetwork("mainnet"); + break; + default: + this.mesh.setNetwork("preprod"); + } + + this.version = version; + switch (this.version) { + case 1: + this.languageVersion = "V2"; + break; + default: + this.languageVersion = "V3"; + } + + if (stakeCredential) { + this.stakeCredential = stakeCredential; + } + } + + getScriptAddress = (scriptCbor: string) => { + const { address } = serializePlutusScript( + { code: scriptCbor, version: this.languageVersion }, + this.stakeCredential, + this.networkId, + ); + return address; + }; + + protected signSubmitReset = async () => { + const signedTx = this.mesh.completeSigning(); + const txHash = await this.mesh.submitTx(signedTx); + this.mesh.reset(); + return txHash; + }; + + protected queryUtxos = async (walletAddress: string): Promise => { + if (this.fetcher) { + const utxos = await this.fetcher.fetchAddressUTxOs(walletAddress); + return utxos; + } + return []; + }; + + protected getWalletDappAddress = async () => { + if (this.wallet) { + const usedAddresses = await this.wallet.getUsedAddresses(); + if (usedAddresses.length > 0) { + return usedAddresses[0]; + } + const unusedAddresses = await this.wallet.getUnusedAddresses(); + if (unusedAddresses.length > 0) { + return unusedAddresses[0]; + } + } + return ""; + }; + + protected getWalletCollateral = async (): Promise => { + if (this.wallet) { + const utxos = await this.wallet.getCollateral(); + return utxos[0]; + } + return undefined; + }; + + protected getWalletUtxosWithMinLovelace = async ( + lovelace: number, + providedUtxos: UTxO[] = [], + ) => { + let utxos: UTxO[] = providedUtxos; + if (this.wallet && (!providedUtxos || providedUtxos.length === 0)) { + utxos = await this.wallet.getUtxos(); + } + return utxos.filter((u) => { + const lovelaceAmount = u.output.amount.find( + (a: any) => a.unit === "lovelace", + )?.quantity; + return Number(lovelaceAmount) > lovelace; + }); + }; + + protected getWalletUtxosWithToken = async ( + assetHex: string, + userUtxos: UTxO[] = [], + ) => { + let utxos: UTxO[] = userUtxos; + if (this.wallet && userUtxos.length === 0) { + utxos = await this.wallet.getUtxos(); + } + return utxos.filter((u) => { + const assetAmount = u.output.amount.find( + (a: any) => a.unit === assetHex, + )?.quantity; + return Number(assetAmount) >= 1; + }); + }; + + protected getAddressUtxosWithMinLovelace = async ( + walletAddress: string, + lovelace: number, + providedUtxos: UTxO[] = [], + ) => { + let utxos: UTxO[] = providedUtxos; + if (this.fetcher && (!providedUtxos || providedUtxos.length === 0)) { + utxos = await this.fetcher.fetchAddressUTxOs(walletAddress); + } + return utxos.filter((u) => { + const lovelaceAmount = u.output.amount.find( + (a: any) => a.unit === "lovelace", + )?.quantity; + return Number(lovelaceAmount) > lovelace; + }); + }; + + protected getAddressUtxosWithToken = async ( + walletAddress: string, + assetHex: string, + userUtxos: UTxO[] = [], + ) => { + let utxos: UTxO[] = userUtxos; + if (this.fetcher && userUtxos.length === 0) { + utxos = await this.fetcher.fetchAddressUTxOs(walletAddress); + } + return utxos.filter((u) => { + const assetAmount = u.output.amount.find( + (a: any) => a.unit === assetHex, + )?.quantity; + return Number(assetAmount) >= 1; + }); + }; + + protected getWalletInfoForTx = async () => { + const utxos = await this.wallet?.getUtxos(); + const collateral = await this.getWalletCollateral(); + const walletAddress = await this.getWalletDappAddress(); + if (!utxos || utxos?.length === 0) { + throw new Error("No utxos found"); + } + if (!collateral) { + throw new Error("No collateral found"); + } + if (!walletAddress) { + throw new Error("No wallet address found"); + } + return { utxos, collateral, walletAddress }; + }; + + protected _getUtxoByTxHash = async ( + txHash: string, + scriptCbor?: string, + ): Promise => { + if (this.fetcher) { + const utxos = await this.fetcher?.fetchUTxOs(txHash); + let scriptUtxo = utxos[0]; + + if (scriptCbor) { + const scriptAddr = serializePlutusScript( + { code: scriptCbor, version: this.languageVersion }, + this.stakeCredential, + this.networkId, + ).address; + scriptUtxo = + utxos.filter((utxo) => utxo.output.address === scriptAddr)[0] || + utxos[0]; + } + + return scriptUtxo; + } + + return undefined; + }; +} From ff69f36741eeb4cb61d0b13b09cf962ac6765a91 Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Mon, 6 Oct 2025 20:34:54 +0200 Subject: [PATCH 02/15] s01 --- .../validators/auth_token/mint.ak | 4 +-- .../aiken-crowdfund/validators/proxy/spend.ak | 35 ++----------------- 2 files changed, 4 insertions(+), 35 deletions(-) diff --git a/src/components/multisig/aiken-crowdfund/validators/auth_token/mint.ak b/src/components/multisig/aiken-crowdfund/validators/auth_token/mint.ak index 57706664..11e14cf2 100644 --- a/src/components/multisig/aiken-crowdfund/validators/auth_token/mint.ak +++ b/src/components/multisig/aiken-crowdfund/validators/auth_token/mint.ak @@ -15,9 +15,9 @@ validator auth_token(utxo_ref: OutputReference) { RMint -> { let is_output_consumed = list.any(inputs, fn(input) { input.output_reference == utxo_ref }) - is_output_consumed? && quantity == 1 + is_output_consumed? && quantity == 10 } - RBurn -> quantity == -1 + RBurn -> quantity == -10 } } diff --git a/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak b/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak index 544ee930..04f2fc04 100644 --- a/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak +++ b/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak @@ -39,18 +39,6 @@ validator crowdfund(auth_token: PolicyId, proposer_key_hash: ByteArray) { expect [auth_input] = inputs_at_with_policy(inputs, current_address, auth_token) - let CrowdfundDatum { - current_fundraised_amount, - fundraise_target, - allow_over_subscription, - deadline, - completion_script, - share_token, - min_charge, - fee_address, - expiry_buffer, - .. - } = auth_input_datum when redeemer is { ContributeFund -> { expect [auth_output] = @@ -144,27 +132,8 @@ validator crowdfund(auth_token: PolicyId, proposer_key_hash: ByteArray) { } RemoveEmptyInstance -> { - let validity_check = valid_after(validity_range, deadline) - - let token_burnt_check = - if current_fundraised_amount > 0 { - policy_only_minted_token( - mint, - share_token, - completion_script, - -current_fundraised_amount, - )? && policy_only_minted_token( - mint, - auth_token, - completion_script, - -1, - )? - } else { - only_minted_token(mint, auth_token, completion_script, -1)? - } - let proposer_key_signed_check = - key_signed(extra_signatories, proposer_key_hash) - validity_check? && token_burnt_check? && proposer_key_signed_check? + + let token_burnt_check = only_minted_token(mint, auth_token, completion_script, -10)? } } } From 2cce24671eccf5f46f69d345d16e1d2db2b296d0 Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Tue, 7 Oct 2025 09:21:17 +0200 Subject: [PATCH 03/15] s02 --- .../multisig/aiken-crowdfund/lib/types.ak | 16 +-- .../aiken-crowdfund/validators/proxy/spend.ak | 103 +----------------- 2 files changed, 4 insertions(+), 115 deletions(-) diff --git a/src/components/multisig/aiken-crowdfund/lib/types.ak b/src/components/multisig/aiken-crowdfund/lib/types.ak index 0232fc7a..1df217f3 100644 --- a/src/components/multisig/aiken-crowdfund/lib/types.ak +++ b/src/components/multisig/aiken-crowdfund/lib/types.ak @@ -6,21 +6,7 @@ pub type MintPolarity { } pub type CrowdfundRedeemer { - ContributeFund - CompleteCrowdfund - ContributorWithdrawal + RemoveEmptyInstance } -pub type CrowdfundDatum { - completion_script: ByteArray, - share_token: ByteArray, - crowdfund_address: Address, - fundraise_target: Int, - current_fundraised_amount: Int, - allow_over_subscription: Bool, - deadline: Int, - expiry_buffer: Int, - fee_address: Address, - min_charge: Int, -} diff --git a/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak b/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak index 04f2fc04..b51fad5f 100644 --- a/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak +++ b/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak @@ -13,9 +13,8 @@ use types.{ } use utils.{check_fundraise_target, outputs_at_with_lovelace} -validator crowdfund(auth_token: PolicyId, proposer_key_hash: ByteArray) { +validator proxy(auth_token: PolicyId) { spend( - datum_opt: Option, redeemer: CrowdfundRedeemer, input: OutputReference, self: Transaction, @@ -30,106 +29,10 @@ validator crowdfund(auth_token: PolicyId, proposer_key_hash: ByteArray) { .. } = self - expect Some(own_input) = find_input(inputs, input) - expect Some(auth_input_datum) = datum_opt - - let current_address = own_input.output.address - - // check only 1 auth toke input from current address - expect [auth_input] = - inputs_at_with_policy(inputs, current_address, auth_token) - when redeemer is { - ContributeFund -> { - expect [auth_output] = - outputs_at_with_policy(outputs, current_address, auth_token) - - let lovelace_from_auth_input = auth_input.output.value |> lovelace_of() - let lovelace_from_auth_output = auth_output.value |> lovelace_of() - expect auth_output_datum: CrowdfundDatum = - output_inline_datum(auth_output) - - let fundraise_added = - lovelace_from_auth_output - lovelace_from_auth_input - let fundraise_check = - (fundraise_added == auth_output_datum.current_fundraised_amount - current_fundraised_amount)? && (fundraise_added >= 2000000)? - - let fundraise_target_check = - check_fundraise_target( - allow_over_subscription, - fundraise_target, - auth_output_datum.current_fundraised_amount, - ) - - let validity_check = valid_before(validity_range, deadline) - - let output_datum_check = - auth_output_datum == CrowdfundDatum { - ..auth_input_datum, - current_fundraised_amount: current_fundraised_amount + fundraise_added, - } - - let is_auth_output_value_clean = value_length(auth_output.value) == 2 - fundraise_check? && fundraise_target_check? && validity_check? && output_datum_check? && is_auth_output_value_clean? && only_minted_token( - mint, - share_token, - completion_script, - fundraise_added, - )? - } - - CompleteCrowdfund -> { - let input_lovelace_check = - lovelace_of(auth_input.output.value) >= min_charge + current_fundraised_amount - expect [_] = outputs_at_with_lovelace(outputs, fee_address, min_charge) - - let fundraise_check = current_fundraised_amount >= fundraise_target - let completion_script_withdrawal_credential: Credential = - Script(completion_script) - let withdrawal_script_check = - withdrawals - |> has_key(completion_script_withdrawal_credential) - fundraise_check? && withdrawal_script_check? && input_lovelace_check? && policy_only_minted_token( - mint, - auth_token, - completion_script, - -1, - )? - } - - ContributorWithdrawal -> { - let validity_check = - valid_after(validity_range, deadline + expiry_buffer) - let fund_check = current_fundraised_amount <= fundraise_target - - expect [auth_output] = - outputs_at_with_policy(outputs, current_address, auth_token) - - let lovelace_from_auth_input = auth_input.output.value |> lovelace_of() - let lovelace_from_auth_output = auth_output.value |> lovelace_of() - - let lovelace_withdrawn = - lovelace_from_auth_output - lovelace_from_auth_input - - let lovelace_withdrawn_check = lovelace_withdrawn < 0 - - expect auth_output_datum: CrowdfundDatum = - output_inline_datum(auth_output) - let output_datum_check = - auth_output_datum == CrowdfundDatum { - ..auth_input_datum, - current_fundraised_amount: current_fundraised_amount + lovelace_withdrawn, - } - - let is_auth_output_value_clean = value_length(auth_output.value) == 2 - (validity_check || fund_check)? && lovelace_withdrawn_check? && output_datum_check? && is_auth_output_value_clean? && only_minted_token( - mint, - share_token, - completion_script, - lovelace_withdrawn, - )? - } + + RemoveEmptyInstance -> { From 019d3d82f971e5919715815311f3269b25c2d5e3 Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Tue, 7 Oct 2025 09:39:00 +0200 Subject: [PATCH 04/15] s03 --- src/components/multisig/aiken-crowdfund/lib/types.ak | 2 +- .../multisig/aiken-crowdfund/validators/proxy/spend.ak | 9 +++------ 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/src/components/multisig/aiken-crowdfund/lib/types.ak b/src/components/multisig/aiken-crowdfund/lib/types.ak index 1df217f3..e799db0a 100644 --- a/src/components/multisig/aiken-crowdfund/lib/types.ak +++ b/src/components/multisig/aiken-crowdfund/lib/types.ak @@ -5,7 +5,7 @@ pub type MintPolarity { RBurn } -pub type CrowdfundRedeemer { +pub type ProxyRedeemer { RemoveEmptyInstance } diff --git a/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak b/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak index b51fad5f..d72261d5 100644 --- a/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak +++ b/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak @@ -8,14 +8,12 @@ use cocktail.{ value_length, } use types.{ - CompleteCrowdfund, ContributeFund, ContributorWithdrawal, CrowdfundDatum, - CrowdfundRedeemer, RemoveEmptyInstance, + ProxyRedeemer, RemoveEmptyInstance, } -use utils.{check_fundraise_target, outputs_at_with_lovelace} validator proxy(auth_token: PolicyId) { spend( - redeemer: CrowdfundRedeemer, + redeemer: ProxyRedeemer, input: OutputReference, self: Transaction, ) { @@ -31,11 +29,10 @@ validator proxy(auth_token: PolicyId) { when redeemer is { - + RemoveEmptyInstance -> { - let token_burnt_check = only_minted_token(mint, auth_token, completion_script, -10)? } } From 7bff59d4eea14def73cd020e1edd17f095fee21b Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Tue, 7 Oct 2025 15:23:35 +0200 Subject: [PATCH 05/15] x04 --- .../multisig/aiken-crowdfund/lib/utils.ak | 39 - .../aiken-crowdfund/validators/proxy/spend.ak | 44 - .../validators/tests/crowdfund/spend.ak | 1177 ----------------- .../tests/integration_test/contribute_fund.ak | 329 ----- .../README.md | 0 .../aiken.lock | 0 .../aiken.toml | 6 +- .../build/aiken-compile.lock | 0 .../packages/aiken-lang-stdlib/.editorconfig | 0 .../packages/aiken-lang-stdlib/.gitattributes | 0 .../workflows/continuous-integration.yml | 0 .../packages/aiken-lang-stdlib/.gitignore | 0 .../packages/aiken-lang-stdlib/CHANGELOG.md | 0 .../build/packages/aiken-lang-stdlib/LICENSE | 0 .../packages/aiken-lang-stdlib/README.md | 0 .../packages/aiken-lang-stdlib/aiken.lock | 0 .../packages/aiken-lang-stdlib/aiken.toml | 0 .../aiken-lang-stdlib/lib/aiken/cbor.ak | 0 .../aiken-lang-stdlib/lib/aiken/cbor.test.ak | 0 .../aiken-lang-stdlib/lib/aiken/collection.ak | 0 .../lib/aiken/collection/dict.ak | 0 .../lib/aiken/collection/list.ak | 0 .../lib/aiken/collection/pairs.ak | 0 .../aiken-lang-stdlib/lib/aiken/crypto.ak | 0 .../lib/aiken/crypto/bls12_381/g1.ak | 0 .../lib/aiken/crypto/bls12_381/g2.ak | 0 .../lib/aiken/crypto/bls12_381/scalar.ak | 0 .../aiken-lang-stdlib/lib/aiken/interval.ak | 0 .../aiken-lang-stdlib/lib/aiken/math.ak | 0 .../lib/aiken/math/rational.ak | 0 .../lib/aiken/math/rational.tests.ak | 0 .../aiken-lang-stdlib/lib/aiken/option.ak | 0 .../lib/aiken/primitive/bytearray.ak | 0 .../lib/aiken/primitive/int.ak | 0 .../lib/aiken/primitive/string.ak | 0 .../aiken-lang-stdlib/lib/cardano/address.ak | 0 .../lib/cardano/address/credential.ak | 0 .../aiken-lang-stdlib/lib/cardano/assets.ak | 0 .../lib/cardano/certificate.ak | 0 .../lib/cardano/governance.ak | 0 .../cardano/governance/protocol_parameters.ak | 0 .../lib/cardano/governance/voter.ak | 0 .../lib/cardano/script_context.ak | 0 .../lib/cardano/transaction.ak | 0 .../cardano/transaction/output_reference.ak | 0 .../lib/cardano/transaction/script_purpose.ak | 0 .../build/packages/packages.toml | 0 .../.github/workflows/build_docs.yml | 0 .../.github/workflows/release.yml | 0 .../build/packages/sidan-lab-vodka/.gitignore | 0 .../build/packages/sidan-lab-vodka/LICENSE | 0 .../build/packages/sidan-lab-vodka/README.md | 0 .../build/packages/sidan-lab-vodka/aiken.lock | 0 .../build/packages/sidan-lab-vodka/aiken.toml | 0 .../packages/sidan-lab-vodka/assets/logo.png | Bin .../sidan-lab-vodka/build/aiken-compile.lock | 0 .../packages/aiken-lang-stdlib/.editorconfig | 0 .../packages/aiken-lang-stdlib/.gitattributes | 0 .../workflows/continuous-integration.yml | 0 .../packages/aiken-lang-stdlib/.gitignore | 0 .../packages/aiken-lang-stdlib/CHANGELOG.md | 0 .../build/packages/aiken-lang-stdlib/LICENSE | 0 .../packages/aiken-lang-stdlib/README.md | 0 .../packages/aiken-lang-stdlib/aiken.lock | 0 .../packages/aiken-lang-stdlib/aiken.toml | 0 .../aiken-lang-stdlib/lib/aiken/cbor.ak | 0 .../aiken-lang-stdlib/lib/aiken/cbor.test.ak | 0 .../aiken-lang-stdlib/lib/aiken/collection.ak | 0 .../lib/aiken/collection/dict.ak | 0 .../lib/aiken/collection/list.ak | 0 .../lib/aiken/collection/pairs.ak | 0 .../aiken-lang-stdlib/lib/aiken/crypto.ak | 0 .../lib/aiken/crypto/bls12_381/g1.ak | 0 .../lib/aiken/crypto/bls12_381/g2.ak | 0 .../lib/aiken/crypto/bls12_381/scalar.ak | 0 .../aiken-lang-stdlib/lib/aiken/interval.ak | 0 .../aiken-lang-stdlib/lib/aiken/math.ak | 0 .../lib/aiken/math/rational.ak | 0 .../lib/aiken/math/rational.tests.ak | 0 .../aiken-lang-stdlib/lib/aiken/option.ak | 0 .../lib/aiken/primitive/bytearray.ak | 0 .../lib/aiken/primitive/int.ak | 0 .../lib/aiken/primitive/string.ak | 0 .../aiken-lang-stdlib/lib/cardano/address.ak | 0 .../lib/cardano/address/credential.ak | 0 .../aiken-lang-stdlib/lib/cardano/assets.ak | 0 .../lib/cardano/certificate.ak | 0 .../lib/cardano/governance.ak | 0 .../cardano/governance/protocol_parameters.ak | 0 .../lib/cardano/governance/voter.ak | 0 .../lib/cardano/script_context.ak | 0 .../lib/cardano/transaction.ak | 0 .../cardano/transaction/output_reference.ak | 0 .../lib/cardano/transaction/script_purpose.ak | 0 .../build/packages/packages.toml | 0 .../build/packages/sidan-lab-vodka/lib/cip.ak | 0 .../packages/sidan-lab-vodka/lib/cocktail.ak | 0 .../lib/cocktail/vodka_address.ak | 0 .../lib/cocktail/vodka_converter.ak | 0 .../lib/cocktail/vodka_extra_signatories.ak | 0 .../lib/cocktail/vodka_inputs.ak | 0 .../lib/cocktail/vodka_mints.ak | 0 .../lib/cocktail/vodka_outputs.ak | 0 .../lib/cocktail/vodka_redeemers.ak | 0 .../lib/cocktail/vodka_validity_range.ak | 0 .../lib/cocktail/vodka_value.ak | 0 .../packages/sidan-lab-vodka/lib/mocktail.ak | 0 .../lib/mocktail/virgin_address.ak | 0 .../lib/mocktail/virgin_key_hash.ak | 0 .../lib/mocktail/virgin_output_reference.ak | 0 .../lib/mocktail/virgin_outputs.ak | 0 .../lib/mocktail/virgin_validity_range.ak | 0 .../packages/sidan-lab-vodka/plutus.json | 0 .../lib/types.ak | 7 +- .../multisig/aiken-proxy/plutus.json | 163 +++ .../specs/1_auth_tokens.md | 0 .../specs/2_proxy.md | 0 .../specs/_scripts.md | 0 .../specs/application_setup_doc.md | 0 .../specs/user_action_doc.md | 0 .../validators/auth_token/mint.ak | 0 .../aiken-proxy/validators/proxy/spend.ak | 34 + .../validators/tests/auth_token/mint.ak | 0 .../validators/tests/proxy/spend.ak | 65 + .../validators/tests/utils.ak | 0 125 files changed, 267 insertions(+), 1597 deletions(-) delete mode 100644 src/components/multisig/aiken-crowdfund/lib/utils.ak delete mode 100644 src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak delete mode 100644 src/components/multisig/aiken-crowdfund/validators/tests/crowdfund/spend.ak delete mode 100644 src/components/multisig/aiken-crowdfund/validators/tests/integration_test/contribute_fund.ak rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/README.md (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/aiken.lock (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/aiken.toml (72%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/aiken-compile.lock (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/.editorconfig (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/.gitattributes (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/.gitignore (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/CHANGELOG.md (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/LICENSE (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/README.md (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/aiken.lock (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/aiken.toml (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/math.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/option.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/cardano/address.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/packages.toml (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/.github/workflows/release.yml (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/.gitignore (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/LICENSE (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/README.md (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/aiken.lock (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/aiken.toml (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/assets/logo.png (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/aiken-compile.lock (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/build/packages/packages.toml (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/cip.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/cocktail.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/mocktail.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/build/packages/sidan-lab-vodka/plutus.json (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/lib/types.ak (70%) create mode 100644 src/components/multisig/aiken-proxy/plutus.json rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/specs/1_auth_tokens.md (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/specs/2_proxy.md (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/specs/_scripts.md (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/specs/application_setup_doc.md (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/specs/user_action_doc.md (100%) rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/validators/auth_token/mint.ak (100%) create mode 100644 src/components/multisig/aiken-proxy/validators/proxy/spend.ak rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/validators/tests/auth_token/mint.ak (100%) create mode 100644 src/components/multisig/aiken-proxy/validators/tests/proxy/spend.ak rename src/components/multisig/{aiken-crowdfund => aiken-proxy}/validators/tests/utils.ak (100%) diff --git a/src/components/multisig/aiken-crowdfund/lib/utils.ak b/src/components/multisig/aiken-crowdfund/lib/utils.ak deleted file mode 100644 index 75d90a27..00000000 --- a/src/components/multisig/aiken-crowdfund/lib/utils.ak +++ /dev/null @@ -1,39 +0,0 @@ -use aiken/collection/list -use aiken/collection/pairs -use cardano/address.{Address} -use cardano/assets.{Lovelace, from_lovelace} -use cardano/transaction.{Input, Output, Redeemer, ScriptPurpose, Spend} - -pub fn redeemer_with_input( - redeemers: Pairs, - input: Input, -) -> Option { - let output_reference = input.output_reference - redeemers |> pairs.get_first(Spend(output_reference)) -} - -pub fn check_fundraise_target( - allow_over_subscription: Bool, - fundraise_target: Int, - current_fundraised_amount: Int, -) -> Bool { - if allow_over_subscription { - True - } else { - current_fundraised_amount <= fundraise_target - } -} - -pub fn outputs_at_with_lovelace( - outputs: List, - address: Address, - lovelace: Lovelace, -) -> List { - list.filter( - outputs, - fn(output) { - let is_lovelace_match = output.value == from_lovelace(lovelace) - is_lovelace_match && output.address == address - }, - ) -} diff --git a/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak b/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak deleted file mode 100644 index d72261d5..00000000 --- a/src/components/multisig/aiken-crowdfund/validators/proxy/spend.ak +++ /dev/null @@ -1,44 +0,0 @@ -use aiken/collection/pairs.{has_key} -use cardano/address.{Credential, Script} -use cardano/assets.{PolicyId, lovelace_of} -use cardano/transaction.{OutputReference, Transaction, find_input} -use cocktail.{ - inputs_at_with_policy, key_signed, only_minted_token, output_inline_datum, - outputs_at_with_policy, policy_only_minted_token, valid_after, valid_before, - value_length, -} -use types.{ - ProxyRedeemer, RemoveEmptyInstance, -} - -validator proxy(auth_token: PolicyId) { - spend( - redeemer: ProxyRedeemer, - input: OutputReference, - self: Transaction, - ) { - let Transaction { - inputs, - validity_range, - mint, - outputs, - withdrawals, - extra_signatories, - .. - } = self - - when redeemer is { - - - - - RemoveEmptyInstance -> { - let token_burnt_check = only_minted_token(mint, auth_token, completion_script, -10)? - } - } - } - - else(_) { - fail - } -} diff --git a/src/components/multisig/aiken-crowdfund/validators/tests/crowdfund/spend.ak b/src/components/multisig/aiken-crowdfund/validators/tests/crowdfund/spend.ak deleted file mode 100644 index 2b68446f..00000000 --- a/src/components/multisig/aiken-crowdfund/validators/tests/crowdfund/spend.ak +++ /dev/null @@ -1,1177 +0,0 @@ -use cardano/assets.{add, from_lovelace} -use cardano/transaction.{Transaction} -use crowdfund/spend as crowdfund_spend -use mocktail.{ - add_extra_signatory, complete, invalid_before, invalid_hereafter, mint, - mock_policy_id, mock_pub_key_address, mock_tx_hash, mock_utxo_ref, mocktail_tx, - script_withdrawal, tx_in, tx_in_inline_datum, tx_out, tx_out_inline_datum, -} -use tests/utils.{ - mock_auth_token, mock_completion_script, mock_contribute_min_fundraised_amount, - mock_contribute_over_fundraised_amount, mock_crowdfund_address, - mock_crowdfund_datum, mock_current_fundraised_amount, mock_deadline, - mock_expiry_buffer, mock_extra_fundraised_amount, mock_fee_address, - mock_fundraise_target, mock_min_charge, mock_proposer_key_hash, - mock_share_token, -} -use types.{ - CompleteCrowdfund, ContributeFund, ContributorWithdrawal, RemoveEmptyInstance, -} - -type ContributeFundTestCase { - is_only_one_auth_inputed: Bool, - is_only_one_auth_outputed: Bool, - is_auth_output_datum_correct: Bool, - is_auth_output_value_clean: Bool, - is_deadline_not_passed: Bool, - is_shares_minted: Bool, -} - -fn mock_contribute_fund_tx( - test_case: ContributeFundTestCase, - current_fundraised_amount: Int, - contribute_amount: Int, - allow_over_subscription: Bool, -) -> Transaction { - let ContributeFundTestCase { - is_only_one_auth_inputed, - is_only_one_auth_outputed, - is_auth_output_datum_correct, - is_auth_output_value_clean, - is_deadline_not_passed, - is_shares_minted, - } = test_case - - let input_value = - from_lovelace(current_fundraised_amount) - |> add(mock_auth_token, mock_completion_script, 1) - - let output_value = - from_lovelace(current_fundraised_amount + contribute_amount) - |> add(mock_auth_token, mock_completion_script, 1) - - let auth_correct_output_datum = - mock_crowdfund_datum( - current_fundraised_amount + contribute_amount, - allow_over_subscription, - ) - - mocktail_tx() - |> tx_in(True, mock_tx_hash(0), 0, input_value, mock_crowdfund_address) - |> tx_in_inline_datum( - True, - mock_crowdfund_datum(current_fundraised_amount, allow_over_subscription), - ) - |> tx_in( - !is_only_one_auth_inputed, - mock_tx_hash(0), - 1, - input_value, - mock_crowdfund_address, - ) - |> tx_in( - True, - mock_tx_hash(1), - 0, - from_lovelace(contribute_amount), - mock_pub_key_address(0, None), - ) - |> tx_out(is_auth_output_value_clean, mock_crowdfund_address, output_value) - |> tx_out( - !is_auth_output_value_clean, - mock_crowdfund_address, - output_value |> add(mock_policy_id(999), mock_completion_script, 1), - ) - |> tx_out_inline_datum(is_auth_output_datum_correct, auth_correct_output_datum) - |> tx_out_inline_datum( - !is_auth_output_datum_correct, - mock_crowdfund_datum( - current_fundraised_amount + contribute_amount + 9999999, - allow_over_subscription, - ), - ) - |> tx_out(!is_only_one_auth_outputed, mock_crowdfund_address, output_value) - |> invalid_hereafter(is_deadline_not_passed, mock_deadline - 3600 * 24) - |> invalid_hereafter(!is_deadline_not_passed, mock_deadline + 3600 * 24) - |> mint( - is_shares_minted, - contribute_amount, - mock_share_token, - mock_completion_script, - ) - |> mint( - !is_shares_minted, - contribute_amount + 9999999, - mock_share_token, - mock_completion_script, - ) - |> complete() -} - -test s3_spend_success_contribute_fund_with_not_allow_over_subscription() { - let tx = - mock_contribute_fund_tx( - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - }, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), - ContributeFund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_success_contribute_fund_with_allow_over_subscription() { - let tx = - mock_contribute_fund_tx( - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - }, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - True, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, True)), - ContributeFund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_success_contribute_fund_with_allow_over_subscription_and_over_fundraised() { - let tx = - mock_contribute_fund_tx( - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - }, - mock_current_fundraised_amount, - mock_contribute_over_fundraised_amount, - True, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, True)), - ContributeFund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contribute_fund_with_not_allow_over_subscription_but_over_fundraised() { - let tx = - mock_contribute_fund_tx( - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - }, - mock_current_fundraised_amount, - mock_contribute_over_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), - ContributeFund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contribute_fund_with_over_current_fundraised() { - let tx = - mock_contribute_fund_tx( - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - }, - mock_contribute_over_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), - ContributeFund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contribute_fund_with_more_than_one_auth_inputed() fail { - let tx = - mock_contribute_fund_tx( - ContributeFundTestCase { - is_only_one_auth_inputed: False, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - }, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), - ContributeFund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contribute_fund_with_more_than_one_auth_outputed() fail { - let tx = - mock_contribute_fund_tx( - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: False, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - }, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), - ContributeFund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contribute_fund_with_incorrect_auth_output_datum() { - let tx = - mock_contribute_fund_tx( - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: False, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - }, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), - ContributeFund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contribute_fund_with_auth_output_not_clean() { - let tx = - mock_contribute_fund_tx( - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: False, - is_deadline_not_passed: True, - is_shares_minted: True, - }, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), - ContributeFund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contribute_fund_with_deadline_passed() { - let tx = - mock_contribute_fund_tx( - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: False, - is_shares_minted: True, - }, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), - ContributeFund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contribute_fund_with_wrong_shares_minted() { - let tx = - mock_contribute_fund_tx( - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: False, - }, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), - ContributeFund, - mock_utxo_ref(0, 0), - tx, - ) -} - -type CompleteCrowdfundTestCase { - is_only_one_auth_inputed: Bool, - is_output_to_fee_address_correct: Bool, - is_auth_burnt: Bool, - is_completion_script_executed: Bool, -} - -fn mock_complete_crowdfund_tx( - test_case: CompleteCrowdfundTestCase, - current_fundraised_amount: Int, - allow_over_subscription: Bool, -) -> Transaction { - let CompleteCrowdfundTestCase { - is_only_one_auth_inputed, - is_output_to_fee_address_correct, - is_auth_burnt, - is_completion_script_executed, - } = test_case - - let input_value = - from_lovelace(current_fundraised_amount + mock_min_charge) - |> add(mock_auth_token, mock_completion_script, 1) - - let output_value = from_lovelace(mock_min_charge) - - mocktail_tx() - |> tx_in(True, mock_tx_hash(0), 0, input_value, mock_crowdfund_address) - |> tx_in_inline_datum( - True, - mock_crowdfund_datum(current_fundraised_amount, allow_over_subscription), - ) - |> tx_in( - !is_only_one_auth_inputed, - mock_tx_hash(0), - 1, - input_value, - mock_crowdfund_address, - ) - |> tx_out(is_output_to_fee_address_correct, mock_fee_address, output_value) - |> tx_out( - !is_output_to_fee_address_correct, - mock_fee_address, - from_lovelace(mock_extra_fundraised_amount - 10), - ) - |> script_withdrawal( - is_completion_script_executed, - mock_completion_script, - 2_000_000, - ) - |> mint(is_auth_burnt, -1, mock_auth_token, mock_completion_script) - |> complete() -} - -test s3_spend_success_complete_crowdfund_with_amount_equal_to_target() { - let tx = - mock_complete_crowdfund_tx( - CompleteCrowdfundTestCase { - is_only_one_auth_inputed: True, - is_output_to_fee_address_correct: True, - is_auth_burnt: True, - is_completion_script_executed: True, - }, - mock_fundraise_target, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_fundraise_target, False)), - CompleteCrowdfund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_success_complete_crowdfund_with_amount_larger_than_target() { - let tx = - mock_complete_crowdfund_tx( - CompleteCrowdfundTestCase { - is_only_one_auth_inputed: True, - is_output_to_fee_address_correct: True, - is_auth_burnt: True, - is_completion_script_executed: True, - }, - mock_contribute_over_fundraised_amount, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), - CompleteCrowdfund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_complete_crowdfund_with_amount_less_than_target() { - let tx = - mock_complete_crowdfund_tx( - CompleteCrowdfundTestCase { - is_only_one_auth_inputed: True, - is_output_to_fee_address_correct: True, - is_auth_burnt: True, - is_completion_script_executed: True, - }, - mock_fundraise_target - 1, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_fundraise_target - 1, False)), - CompleteCrowdfund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_complete_crowdfund_with_more_than_one_auth_inputed() fail { - let tx = - mock_complete_crowdfund_tx( - CompleteCrowdfundTestCase { - is_only_one_auth_inputed: False, - is_output_to_fee_address_correct: True, - is_auth_burnt: True, - is_completion_script_executed: True, - }, - mock_fundraise_target, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_fundraise_target, False)), - CompleteCrowdfund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_complete_crowdfund_with_incorrect_fee_output() fail { - let tx = - mock_complete_crowdfund_tx( - CompleteCrowdfundTestCase { - is_only_one_auth_inputed: True, - is_output_to_fee_address_correct: False, - is_auth_burnt: True, - is_completion_script_executed: True, - }, - mock_fundraise_target, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_fundraise_target, False)), - CompleteCrowdfund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_complete_crowdfund_with_no_auth_burnt() { - let tx = - mock_complete_crowdfund_tx( - CompleteCrowdfundTestCase { - is_only_one_auth_inputed: True, - is_output_to_fee_address_correct: True, - is_auth_burnt: False, - is_completion_script_executed: True, - }, - mock_fundraise_target, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_fundraise_target, False)), - CompleteCrowdfund, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_complete_crowdfund_with_no_completion_script_executed() { - let tx = - mock_complete_crowdfund_tx( - CompleteCrowdfundTestCase { - is_only_one_auth_inputed: True, - is_output_to_fee_address_correct: True, - is_auth_burnt: True, - is_completion_script_executed: False, - }, - mock_fundraise_target, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_fundraise_target, False)), - CompleteCrowdfund, - mock_utxo_ref(0, 0), - tx, - ) -} - -type ContributorWithdrawalTestCase { - is_only_one_auth_inputed: Bool, - is_only_one_auth_outputed: Bool, - is_auth_output_datum_correct: Bool, - is_auth_output_value_clean: Bool, - is_auth_unlock_value_correct: Bool, - is_deadline_passed: Bool, - is_shares_burnt: Bool, -} - -fn mock_contributor_withdrawal_tx( - test_case: ContributorWithdrawalTestCase, - current_fundraised_amount: Int, - withdraw_amount: Int, - allow_over_subscription: Bool, -) -> Transaction { - let ContributorWithdrawalTestCase { - is_only_one_auth_inputed, - is_only_one_auth_outputed, - is_auth_output_datum_correct, - is_auth_output_value_clean, - is_auth_unlock_value_correct, - is_deadline_passed, - is_shares_burnt, - } = test_case - - let input_value = - from_lovelace(current_fundraised_amount) - |> add(mock_auth_token, mock_completion_script, 1) - - let output_value = - if is_auth_unlock_value_correct { - from_lovelace(current_fundraised_amount - withdraw_amount) - |> add(mock_auth_token, mock_completion_script, 1) - } else { - from_lovelace(current_fundraised_amount - withdraw_amount + 10) - |> add(mock_auth_token, mock_completion_script, 1) - } - - let auth_correct_output_datum = - mock_crowdfund_datum( - current_fundraised_amount - withdraw_amount, - allow_over_subscription, - ) - - mocktail_tx() - |> tx_in(True, mock_tx_hash(0), 0, input_value, mock_crowdfund_address) - |> tx_in_inline_datum( - True, - mock_crowdfund_datum(current_fundraised_amount, allow_over_subscription), - ) - |> tx_in( - !is_only_one_auth_inputed, - mock_tx_hash(0), - 1, - input_value, - mock_crowdfund_address, - ) - |> tx_out(True, mock_pub_key_address(0, None), from_lovelace(withdraw_amount)) - |> tx_out(is_auth_output_value_clean, mock_crowdfund_address, output_value) - |> tx_out( - !is_auth_output_value_clean, - mock_crowdfund_address, - output_value |> add(mock_policy_id(999), mock_completion_script, 1), - ) - |> tx_out_inline_datum(is_auth_output_datum_correct, auth_correct_output_datum) - |> tx_out_inline_datum( - !is_auth_output_datum_correct, - mock_crowdfund_datum( - current_fundraised_amount - withdraw_amount + 9999999, - allow_over_subscription, - ), - ) - |> tx_out(!is_only_one_auth_outputed, mock_crowdfund_address, output_value) - |> invalid_before( - is_deadline_passed, - mock_deadline + mock_expiry_buffer + 3600 * 24, - ) - |> invalid_before( - !is_deadline_passed, - mock_deadline + mock_expiry_buffer - 3600 * 24, - ) - |> mint( - is_shares_burnt, - -withdraw_amount, - mock_share_token, - mock_completion_script, - ) - |> mint( - !is_shares_burnt, - -withdraw_amount + 9999999, - mock_share_token, - mock_completion_script, - ) - |> complete() -} - -test s3_spend_success_contributor_withdraw_with_deadline_passed_but_fundraised_reach_target() { - let tx = - mock_contributor_withdrawal_tx( - ContributorWithdrawalTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_auth_unlock_value_correct: True, - is_deadline_passed: True, - is_shares_burnt: True, - }, - mock_contribute_over_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), - ContributorWithdrawal, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_success_contributor_withdraw_with_deadline_passed_and_fundraised_less_than_target() { - let tx = - mock_contributor_withdrawal_tx( - ContributorWithdrawalTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_auth_unlock_value_correct: True, - is_deadline_passed: True, - is_shares_burnt: True, - }, - mock_current_fundraised_amount * 2, - mock_contribute_min_fundraised_amount, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount * 2, False)), - ContributorWithdrawal, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_success_contributor_withdraw_with_deadline_not_passed_but_fundraised_less_than_target() { - let tx = - mock_contributor_withdrawal_tx( - ContributorWithdrawalTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_auth_unlock_value_correct: True, - is_deadline_passed: False, - is_shares_burnt: True, - }, - mock_current_fundraised_amount * 2, - mock_contribute_min_fundraised_amount, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount * 2, False)), - ContributorWithdrawal, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contributor_withdraw_with_deadline_not_passed_and_fundraised_reached_target() { - let tx = - mock_contributor_withdrawal_tx( - ContributorWithdrawalTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_auth_unlock_value_correct: True, - is_deadline_passed: False, - is_shares_burnt: True, - }, - mock_contribute_over_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), - ContributorWithdrawal, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contributor_withdraw_with_more_thanone_auth_inputed() fail { - let tx = - mock_contributor_withdrawal_tx( - ContributorWithdrawalTestCase { - is_only_one_auth_inputed: False, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_auth_unlock_value_correct: True, - is_deadline_passed: True, - is_shares_burnt: True, - }, - mock_contribute_over_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), - ContributorWithdrawal, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contributor_withdraw_with_more_thanone_auth_outputed() fail { - let tx = - mock_contributor_withdrawal_tx( - ContributorWithdrawalTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: False, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_auth_unlock_value_correct: True, - is_deadline_passed: True, - is_shares_burnt: True, - }, - mock_contribute_over_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), - ContributorWithdrawal, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contributor_withdraw_with_incorrect_output_datum() { - let tx = - mock_contributor_withdrawal_tx( - ContributorWithdrawalTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: False, - is_auth_output_value_clean: True, - is_auth_unlock_value_correct: True, - is_deadline_passed: True, - is_shares_burnt: True, - }, - mock_contribute_over_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), - ContributorWithdrawal, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contributor_withdraw_with_auth_output_value_not_clean() { - let tx = - mock_contributor_withdrawal_tx( - ContributorWithdrawalTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: False, - is_auth_unlock_value_correct: True, - is_deadline_passed: True, - is_shares_burnt: True, - }, - mock_contribute_over_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), - ContributorWithdrawal, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contributor_withdraw_with_incorrect_auth_unlock_value() { - let tx = - mock_contributor_withdrawal_tx( - ContributorWithdrawalTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_auth_unlock_value_correct: False, - is_deadline_passed: True, - is_shares_burnt: True, - }, - mock_contribute_over_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), - ContributorWithdrawal, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_contributor_withdraw_with_incorrect_shares_burnt() { - let tx = - mock_contributor_withdrawal_tx( - ContributorWithdrawalTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_auth_unlock_value_correct: True, - is_deadline_passed: True, - is_shares_burnt: False, - }, - mock_contribute_over_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_contribute_over_fundraised_amount, False)), - ContributorWithdrawal, - mock_utxo_ref(0, 0), - tx, - ) -} - -type RemoveEmptyInstanceTestCase { - is_only_one_auth_inputed: Bool, - is_deadline_passed: Bool, - is_shares_burnt: Bool, - is_auth_burnt: Bool, - is_proposer_signed: Bool, -} - -fn mock_remove_empty_instance_tx( - test_case: RemoveEmptyInstanceTestCase, - current_fundraised_amount: Int, - allow_over_subscription: Bool, -) -> Transaction { - let RemoveEmptyInstanceTestCase { - is_only_one_auth_inputed, - is_deadline_passed, - is_shares_burnt, - is_auth_burnt, - is_proposer_signed, - } = test_case - - let input_value = - from_lovelace(current_fundraised_amount) - |> add(mock_auth_token, mock_completion_script, 1) - - mocktail_tx() - |> tx_in(True, mock_tx_hash(0), 0, input_value, mock_crowdfund_address) - |> tx_in_inline_datum( - True, - mock_crowdfund_datum(current_fundraised_amount, allow_over_subscription), - ) - |> tx_in( - !is_only_one_auth_inputed, - mock_tx_hash(0), - 1, - input_value, - mock_crowdfund_address, - ) - |> invalid_before(is_deadline_passed, mock_deadline + 3600 * 24) - |> invalid_before(!is_deadline_passed, mock_deadline - 3600 * 24) - |> mint( - is_shares_burnt, - -current_fundraised_amount, - mock_share_token, - mock_completion_script, - ) - |> mint( - !is_shares_burnt, - -current_fundraised_amount + 9999999, - mock_share_token, - mock_completion_script, - ) - |> mint(is_auth_burnt, -1, mock_auth_token, mock_completion_script) - |> complete() - |> add_extra_signatory(is_proposer_signed, mock_proposer_key_hash) -} - -test s3_spend_success_remove_empty_instance_wih_zero_fund() { - let tx = - mock_remove_empty_instance_tx( - RemoveEmptyInstanceTestCase { - is_only_one_auth_inputed: True, - is_deadline_passed: True, - is_shares_burnt: True, - is_auth_burnt: True, - is_proposer_signed: True, - }, - 0, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(0, False)), - RemoveEmptyInstance, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_success_remove_empty_instance_wih_some_fund() { - let tx = - mock_remove_empty_instance_tx( - RemoveEmptyInstanceTestCase { - is_only_one_auth_inputed: True, - is_deadline_passed: True, - is_shares_burnt: True, - is_auth_burnt: True, - is_proposer_signed: True, - }, - mock_current_fundraised_amount, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), - RemoveEmptyInstance, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_remove_empty_instance_wih_more_than_one_auth_inputed() fail { - let tx = - mock_remove_empty_instance_tx( - RemoveEmptyInstanceTestCase { - is_only_one_auth_inputed: False, - is_deadline_passed: True, - is_shares_burnt: True, - is_auth_burnt: True, - is_proposer_signed: True, - }, - mock_current_fundraised_amount, - False, - ) - - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), - RemoveEmptyInstance, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_remove_empty_instance_wih_deadline_not_passed() { - let tx = - mock_remove_empty_instance_tx( - RemoveEmptyInstanceTestCase { - is_only_one_auth_inputed: True, - is_deadline_passed: False, - is_shares_burnt: True, - is_auth_burnt: True, - is_proposer_signed: True, - }, - mock_current_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), - RemoveEmptyInstance, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_remove_empty_instance_wih_share_not_burnt() { - let tx = - mock_remove_empty_instance_tx( - RemoveEmptyInstanceTestCase { - is_only_one_auth_inputed: True, - is_deadline_passed: True, - is_shares_burnt: False, - is_auth_burnt: True, - is_proposer_signed: True, - }, - mock_current_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), - RemoveEmptyInstance, - mock_utxo_ref(0, 0), - tx, - ) -} - -test s3_spend_fail_remove_empty_instance_wih_authe_not_burnt() { - let tx = - mock_remove_empty_instance_tx( - RemoveEmptyInstanceTestCase { - is_only_one_auth_inputed: True, - is_deadline_passed: True, - is_shares_burnt: True, - is_auth_burnt: False, - is_proposer_signed: True, - }, - mock_current_fundraised_amount, - False, - ) - - !crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some(mock_crowdfund_datum(mock_current_fundraised_amount, False)), - RemoveEmptyInstance, - mock_utxo_ref(0, 0), - tx, - ) -} diff --git a/src/components/multisig/aiken-crowdfund/validators/tests/integration_test/contribute_fund.ak b/src/components/multisig/aiken-crowdfund/validators/tests/integration_test/contribute_fund.ak deleted file mode 100644 index 1bbcc7e2..00000000 --- a/src/components/multisig/aiken-crowdfund/validators/tests/integration_test/contribute_fund.ak +++ /dev/null @@ -1,329 +0,0 @@ -use cardano/assets.{add, from_lovelace} -use cardano/transaction.{OutputReference, Spend, Transaction} -use crowdfund/spend as crowdfund_spend -use mocktail.{ - add_redeemer, complete, invalid_hereafter, mint, mock_policy_id, - mock_pub_key_address, mock_tx_hash, mock_utxo_ref, mocktail_tx, tx_in, - tx_in_inline_datum, tx_out, tx_out_inline_datum, -} -use share_token/mint as share_token_mint -use tests/utils.{ - mock_auth_token, mock_completion_script, mock_contribute_min_fundraised_amount, - mock_contribute_over_fundraised_amount, mock_crowdfund_address, - mock_crowdfund_datum, mock_current_fundraised_amount, mock_deadline, - mock_proposer_key_hash, mock_share_token, -} -use types.{CompleteCrowdfund, ContributeFund, CrowdfundRedeemer, RMint} - -type ContributeFundTestCase { - is_only_one_auth_inputed: Bool, - is_only_one_auth_outputed: Bool, - is_auth_output_datum_correct: Bool, - is_auth_output_value_clean: Bool, - is_deadline_not_passed: Bool, - is_shares_minted: Bool, -} - -fn mock_contribute_fund_tx( - test_case: ContributeFundTestCase, - current_fundraised_amount: Int, - contribute_amount: Int, - allow_over_subscription: Bool, - auth_token_redeemer: CrowdfundRedeemer, -) -> Transaction { - let ContributeFundTestCase { - is_only_one_auth_inputed, - is_only_one_auth_outputed, - is_auth_output_datum_correct, - is_auth_output_value_clean, - is_deadline_not_passed, - is_shares_minted, - } = test_case - let auth_token_redeemer_data: Data = auth_token_redeemer - - let input_value = - from_lovelace(current_fundraised_amount) - |> add(mock_auth_token, mock_completion_script, 1) - - let output_value = - from_lovelace(current_fundraised_amount + contribute_amount) - |> add(mock_auth_token, mock_completion_script, 1) - - let auth_correct_output_datum = - mock_crowdfund_datum( - current_fundraised_amount + contribute_amount, - allow_over_subscription, - ) - - mocktail_tx() - |> tx_in(True, mock_tx_hash(0), 0, input_value, mock_crowdfund_address) - |> tx_in_inline_datum( - True, - mock_crowdfund_datum(current_fundraised_amount, allow_over_subscription), - ) - |> tx_in( - !is_only_one_auth_inputed, - mock_tx_hash(0), - 1, - input_value, - mock_crowdfund_address, - ) - |> tx_in( - True, - mock_tx_hash(1), - 0, - from_lovelace(contribute_amount), - mock_pub_key_address(0, None), - ) - |> tx_out(is_auth_output_value_clean, mock_crowdfund_address, output_value) - |> tx_out( - !is_auth_output_value_clean, - mock_crowdfund_address, - output_value |> add(mock_policy_id(999), mock_completion_script, 1), - ) - |> tx_out_inline_datum(is_auth_output_datum_correct, auth_correct_output_datum) - |> tx_out_inline_datum( - !is_auth_output_datum_correct, - mock_crowdfund_datum( - current_fundraised_amount + contribute_amount + 9999999, - allow_over_subscription, - ), - ) - |> tx_out(!is_only_one_auth_outputed, mock_crowdfund_address, output_value) - |> invalid_hereafter(is_deadline_not_passed, mock_deadline - 3600 * 24) - |> invalid_hereafter(!is_deadline_not_passed, mock_deadline + 3600 * 24) - |> mint( - is_shares_minted, - contribute_amount, - mock_share_token, - mock_completion_script, - ) - |> mint( - !is_shares_minted, - contribute_amount + 9999999, - mock_share_token, - mock_completion_script, - ) - |> complete() - |> add_redeemer( - True, - Pair( - Spend( - OutputReference { transaction_id: mock_tx_hash(0), output_index: 0 }, - ), - auth_token_redeemer_data, - ), - ) -} - -fn check_all_scripts( - test_case: ContributeFundTestCase, - current_fundraised_amount: Int, - contribute_amount: Int, - allow_over_subscription: Bool, - auth_token_redeemer: CrowdfundRedeemer, -) { - let tx = - mock_contribute_fund_tx( - test_case, - current_fundraised_amount, - contribute_amount, - allow_over_subscription, - auth_token_redeemer, - ) - - let check_auth_spend = - crowdfund_spend.crowdfund.spend( - mock_auth_token, - mock_proposer_key_hash, - Some( - mock_crowdfund_datum( - mock_current_fundraised_amount, - allow_over_subscription, - ), - ), - auth_token_redeemer, - mock_utxo_ref(0, 0), - tx, - ) - let check_share_mint = - share_token_mint.share_token.mint( - mock_auth_token, - RMint, - mock_share_token, - tx, - ) - - check_auth_spend? && check_share_mint? -} - -test contribute_fund_fail_with_not_allow_over_subscription_but_over_fundraised() { - let test_case = - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - } - !check_all_scripts( - test_case, - mock_current_fundraised_amount, - mock_contribute_over_fundraised_amount, - False, - ContributeFund, - ) -} - -test contribute_fund_fail_with_over_current_fundraised() { - let test_case = - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - } - !check_all_scripts( - test_case, - mock_contribute_over_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ContributeFund, - ) -} - -test contribute_fund_fail_with_wrong_redeemer() fail { - let test_case = - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - } - check_all_scripts( - test_case, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - CompleteCrowdfund, - ) -} - -test contribute_fund_fail_with_more_than_one_auth_inputed() fail { - let test_case = - ContributeFundTestCase { - is_only_one_auth_inputed: False, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - } - check_all_scripts( - test_case, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ContributeFund, - ) -} - -test contribute_fund_fail_with_more_than_one_auth_outputed() fail { - let test_case = - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: False, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - } - check_all_scripts( - test_case, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ContributeFund, - ) -} - -test contribute_fund_fail_with_incorrect_auth_output_datum() { - let test_case = - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: False, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: True, - } - !check_all_scripts( - test_case, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ContributeFund, - ) -} - -test contribute_fund_fail_with_auth_output_not_clean() { - let test_case = - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: False, - is_deadline_not_passed: True, - is_shares_minted: True, - } - !check_all_scripts( - test_case, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ContributeFund, - ) -} - -test contribute_fund_fail_with_deadline_passed() { - let test_case = - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: False, - is_shares_minted: True, - } - !check_all_scripts( - test_case, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ContributeFund, - ) -} - -test contribute_fund_fail_with_wrong_shares_minted() { - let test_case = - ContributeFundTestCase { - is_only_one_auth_inputed: True, - is_only_one_auth_outputed: True, - is_auth_output_datum_correct: True, - is_auth_output_value_clean: True, - is_deadline_not_passed: True, - is_shares_minted: False, - } - !check_all_scripts( - test_case, - mock_current_fundraised_amount, - mock_contribute_min_fundraised_amount, - False, - ContributeFund, - ) -} diff --git a/src/components/multisig/aiken-crowdfund/README.md b/src/components/multisig/aiken-proxy/README.md similarity index 100% rename from src/components/multisig/aiken-crowdfund/README.md rename to src/components/multisig/aiken-proxy/README.md diff --git a/src/components/multisig/aiken-crowdfund/aiken.lock b/src/components/multisig/aiken-proxy/aiken.lock similarity index 100% rename from src/components/multisig/aiken-crowdfund/aiken.lock rename to src/components/multisig/aiken-proxy/aiken.lock diff --git a/src/components/multisig/aiken-crowdfund/aiken.toml b/src/components/multisig/aiken-proxy/aiken.toml similarity index 72% rename from src/components/multisig/aiken-crowdfund/aiken.toml rename to src/components/multisig/aiken-proxy/aiken.toml index 7a5ec5fe..c8b0955c 100644 --- a/src/components/multisig/aiken-crowdfund/aiken.toml +++ b/src/components/multisig/aiken-proxy/aiken.toml @@ -1,9 +1,9 @@ -name = "multisig-Extension" +name = "mesh/proxy" version = "0.0.0" -compiler = "v1.1.16" +compiler = "v1.1.17" plutus = "v3" license = "Apache-2.0" -description = "Aiken contracts for project 'multisig-Extension'" +description = "Aiken contracts for project 'aiken-proxy'" [repository] user = "QS" diff --git a/src/components/multisig/aiken-crowdfund/build/aiken-compile.lock b/src/components/multisig/aiken-proxy/build/aiken-compile.lock similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/aiken-compile.lock rename to src/components/multisig/aiken-proxy/build/aiken-compile.lock diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.editorconfig b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.editorconfig similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.editorconfig rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.editorconfig diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.gitattributes b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.gitattributes similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.gitattributes rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.gitattributes diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.gitignore b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.gitignore similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/.gitignore rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.gitignore diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/CHANGELOG.md b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/CHANGELOG.md similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/CHANGELOG.md rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/CHANGELOG.md diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/LICENSE b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/LICENSE similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/LICENSE rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/LICENSE diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/README.md b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/README.md similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/README.md rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/README.md diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/aiken.lock b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/aiken.lock similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/aiken.lock rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/aiken.lock diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/aiken.toml b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/aiken.toml similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/aiken.toml rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/aiken.toml diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/option.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/option.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/option.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/option.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/address.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/address.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/address.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/address.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak rename to src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/packages.toml b/src/components/multisig/aiken-proxy/build/packages/packages.toml similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/packages.toml rename to src/components/multisig/aiken-proxy/build/packages/packages.toml diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.github/workflows/release.yml b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.github/workflows/release.yml similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.github/workflows/release.yml rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.github/workflows/release.yml diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.gitignore b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.gitignore similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/.gitignore rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.gitignore diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/LICENSE b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/LICENSE similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/LICENSE rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/LICENSE diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/README.md b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/README.md similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/README.md rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/README.md diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/aiken.lock b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/aiken.lock similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/aiken.lock rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/aiken.lock diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/aiken.toml b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/aiken.toml similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/aiken.toml rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/aiken.toml diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/assets/logo.png b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/assets/logo.png similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/assets/logo.png rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/assets/logo.png diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/aiken-compile.lock b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/aiken-compile.lock similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/aiken-compile.lock rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/aiken-compile.lock diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/packages.toml b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/packages.toml similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/build/packages/packages.toml rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/packages.toml diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cip.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cip.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cip.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cip.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak diff --git a/src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/plutus.json b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/plutus.json similarity index 100% rename from src/components/multisig/aiken-crowdfund/build/packages/sidan-lab-vodka/plutus.json rename to src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/plutus.json diff --git a/src/components/multisig/aiken-crowdfund/lib/types.ak b/src/components/multisig/aiken-proxy/lib/types.ak similarity index 70% rename from src/components/multisig/aiken-crowdfund/lib/types.ak rename to src/components/multisig/aiken-proxy/lib/types.ak index e799db0a..eafe990b 100644 --- a/src/components/multisig/aiken-crowdfund/lib/types.ak +++ b/src/components/multisig/aiken-proxy/lib/types.ak @@ -1,12 +1,9 @@ -use cardano/address.{Address} - pub type MintPolarity { RMint RBurn } pub type ProxyRedeemer { - + SpendFunds RemoveEmptyInstance -} - +} \ No newline at end of file diff --git a/src/components/multisig/aiken-proxy/plutus.json b/src/components/multisig/aiken-proxy/plutus.json new file mode 100644 index 00000000..ee53388c --- /dev/null +++ b/src/components/multisig/aiken-proxy/plutus.json @@ -0,0 +1,163 @@ +{ + "preamble": { + "title": "mesh/proxy", + "description": "Aiken contracts for project 'aiken-proxy'", + "version": "0.0.0", + "plutusVersion": "v3", + "compiler": { + "name": "Aiken", + "version": "v1.1.17+c3a7fba" + }, + "license": "Apache-2.0" + }, + "validators": [ + { + "title": "auth_token/mint.auth_token.mint", + "redeemer": { + "title": "redeemer", + "schema": { + "$ref": "#/definitions/types~1MintPolarity" + } + }, + "parameters": [ + { + "title": "utxo_ref", + "schema": { + "$ref": "#/definitions/cardano~1transaction~1OutputReference" + } + } + ], + "compiledCode": "59017b010100229800aba2aba1aba0aab9faab9eaab9dab9a488888896600264653001300800198041804800cdc3a400130080024888966002600460106ea800e2646644b300130050018acc004c030dd5003c00a2c806a2b30013370e9001000c56600260186ea801e00516403516402880504c966002601e00313259800980298059baa0078acc004c8cc004004dd6180818069baa0052259800800c528456600266ebcc044c038dd5180880080a4528c4cc008008c04800500c201e899b8700148052294100a44cdc3800a40268050dd698059807000c5900c192cc004cdc3a400460146ea8006297adef6c6089bab300e300b37540028048c8cc004004dd59807180798079807980798059baa0032259800800c5300103d87a8000899192cc004cdc8802800c56600266e3c014006266e95200033010300e0024bd7045300103d87a80004031133004004301200340306eb8c030004c03c00500d18049baa005375c601860126ea800e2c8038601000260066ea802229344d9590011", + "hash": "eaff18079b75649a8cfc35b99f8a145c4fabf5c02e239822316e9321" + }, + { + "title": "auth_token/mint.auth_token.else", + "redeemer": { + "schema": {} + }, + "parameters": [ + { + "title": "utxo_ref", + "schema": { + "$ref": "#/definitions/cardano~1transaction~1OutputReference" + } + } + ], + "compiledCode": "59017b010100229800aba2aba1aba0aab9faab9eaab9dab9a488888896600264653001300800198041804800cdc3a400130080024888966002600460106ea800e2646644b300130050018acc004c030dd5003c00a2c806a2b30013370e9001000c56600260186ea801e00516403516402880504c966002601e00313259800980298059baa0078acc004c8cc004004dd6180818069baa0052259800800c528456600266ebcc044c038dd5180880080a4528c4cc008008c04800500c201e899b8700148052294100a44cdc3800a40268050dd698059807000c5900c192cc004cdc3a400460146ea8006297adef6c6089bab300e300b37540028048c8cc004004dd59807180798079807980798059baa0032259800800c5300103d87a8000899192cc004cdc8802800c56600266e3c014006266e95200033010300e0024bd7045300103d87a80004031133004004301200340306eb8c030004c03c00500d18049baa005375c601860126ea800e2c8038601000260066ea802229344d9590011", + "hash": "eaff18079b75649a8cfc35b99f8a145c4fabf5c02e239822316e9321" + }, + { + "title": "proxy/spend.proxy.spend", + "datum": { + "title": "_datum", + "schema": { + "$ref": "#/definitions/Data" + } + }, + "redeemer": { + "title": "redeemer", + "schema": { + "$ref": "#/definitions/types~1ProxyRedeemer" + } + }, + "parameters": [ + { + "title": "auth_token", + "schema": { + "$ref": "#/definitions/cardano~1assets~1PolicyId" + } + } + ], + "compiledCode": "590253010100229800aba2aba1aba0aab9faab9eaab9dab9a9bae0024888888896600264653001300900198049805000cc0240092225980099b8748008c020dd500144ca6002601a003300d300e00198049baa0048992cc004cdc3a400060146ea8016266e212000323300100132330010013758602000644b30010018a5eb8226644b3001323300100132330010013756602a602c60246ea8010896600200314bd7044c8cc88c8cc004004dd5980b802112cc00400620071323301b374e660366ea4014cc06cc060004cc06cc0640052f5c066006006603a004603600280c8dd7180980099801801980c001180b000a0282259800800c528456600266e3cdd7180a9bac301500100f8a51899801001180b000a020404d13301200233004004001899802002000a01c30110013012001403c44b30010018a40011337009001198010011808800a01c899912cc0040060051598009808800c4c8c96600266e3cdd71809001006456600266e3cdd71809000a44100899b87375a602460260029009c528201a8a50403460240026eb0c040006004807100e0a5032330010013756601e6020602000444b30010018a5eb82264664464660020026eacc044010896600200310038991980a9ba733015375200a6602a60240026602a602600297ae03300300330170023015001404c6eb8c034004cc00c00cc048008c04000500e2012300e300e300a375400491112cc004cdc3a400000515980098071baa008800c5900f456600266e1d20020028acc004c038dd500440062c807a2c806100c0c024dd50014590070c024004c010dd5004c52689b2b200401", + "hash": "0dc0f294df05817b4471985a0fd255fa8c9b81fadc4c630a66783943" + }, + { + "title": "proxy/spend.proxy.else", + "redeemer": { + "schema": {} + }, + "parameters": [ + { + "title": "auth_token", + "schema": { + "$ref": "#/definitions/cardano~1assets~1PolicyId" + } + } + ], + "compiledCode": "590253010100229800aba2aba1aba0aab9faab9eaab9dab9a9bae0024888888896600264653001300900198049805000cc0240092225980099b8748008c020dd500144ca6002601a003300d300e00198049baa0048992cc004cdc3a400060146ea8016266e212000323300100132330010013758602000644b30010018a5eb8226644b3001323300100132330010013756602a602c60246ea8010896600200314bd7044c8cc88c8cc004004dd5980b802112cc00400620071323301b374e660366ea4014cc06cc060004cc06cc0640052f5c066006006603a004603600280c8dd7180980099801801980c001180b000a0282259800800c528456600266e3cdd7180a9bac301500100f8a51899801001180b000a020404d13301200233004004001899802002000a01c30110013012001403c44b30010018a40011337009001198010011808800a01c899912cc0040060051598009808800c4c8c96600266e3cdd71809001006456600266e3cdd71809000a44100899b87375a602460260029009c528201a8a50403460240026eb0c040006004807100e0a5032330010013756601e6020602000444b30010018a5eb82264664464660020026eacc044010896600200310038991980a9ba733015375200a6602a60240026602a602600297ae03300300330170023015001404c6eb8c034004cc00c00cc048008c04000500e2012300e300e300a375400491112cc004cdc3a400000515980098071baa008800c5900f456600266e1d20020028acc004c038dd500440062c807a2c806100c0c024dd50014590070c024004c010dd5004c52689b2b200401", + "hash": "0dc0f294df05817b4471985a0fd255fa8c9b81fadc4c630a66783943" + } + ], + "definitions": { + "ByteArray": { + "title": "ByteArray", + "dataType": "bytes" + }, + "Data": { + "title": "Data", + "description": "Any Plutus data." + }, + "Int": { + "dataType": "integer" + }, + "cardano/assets/PolicyId": { + "title": "PolicyId", + "dataType": "bytes" + }, + "cardano/transaction/OutputReference": { + "title": "OutputReference", + "description": "An `OutputReference` is a unique reference to an output on-chain. The `output_index`\n corresponds to the position in the output list of the transaction (identified by its id)\n that produced that output", + "anyOf": [ + { + "title": "OutputReference", + "dataType": "constructor", + "index": 0, + "fields": [ + { + "title": "transaction_id", + "$ref": "#/definitions/ByteArray" + }, + { + "title": "output_index", + "$ref": "#/definitions/Int" + } + ] + } + ] + }, + "types/MintPolarity": { + "title": "MintPolarity", + "anyOf": [ + { + "title": "RMint", + "dataType": "constructor", + "index": 0, + "fields": [] + }, + { + "title": "RBurn", + "dataType": "constructor", + "index": 1, + "fields": [] + } + ] + }, + "types/ProxyRedeemer": { + "title": "ProxyRedeemer", + "anyOf": [ + { + "title": "SpendFunds", + "dataType": "constructor", + "index": 0, + "fields": [] + }, + { + "title": "RemoveEmptyInstance", + "dataType": "constructor", + "index": 1, + "fields": [] + } + ] + } + } +} \ No newline at end of file diff --git a/src/components/multisig/aiken-crowdfund/specs/1_auth_tokens.md b/src/components/multisig/aiken-proxy/specs/1_auth_tokens.md similarity index 100% rename from src/components/multisig/aiken-crowdfund/specs/1_auth_tokens.md rename to src/components/multisig/aiken-proxy/specs/1_auth_tokens.md diff --git a/src/components/multisig/aiken-crowdfund/specs/2_proxy.md b/src/components/multisig/aiken-proxy/specs/2_proxy.md similarity index 100% rename from src/components/multisig/aiken-crowdfund/specs/2_proxy.md rename to src/components/multisig/aiken-proxy/specs/2_proxy.md diff --git a/src/components/multisig/aiken-crowdfund/specs/_scripts.md b/src/components/multisig/aiken-proxy/specs/_scripts.md similarity index 100% rename from src/components/multisig/aiken-crowdfund/specs/_scripts.md rename to src/components/multisig/aiken-proxy/specs/_scripts.md diff --git a/src/components/multisig/aiken-crowdfund/specs/application_setup_doc.md b/src/components/multisig/aiken-proxy/specs/application_setup_doc.md similarity index 100% rename from src/components/multisig/aiken-crowdfund/specs/application_setup_doc.md rename to src/components/multisig/aiken-proxy/specs/application_setup_doc.md diff --git a/src/components/multisig/aiken-crowdfund/specs/user_action_doc.md b/src/components/multisig/aiken-proxy/specs/user_action_doc.md similarity index 100% rename from src/components/multisig/aiken-crowdfund/specs/user_action_doc.md rename to src/components/multisig/aiken-proxy/specs/user_action_doc.md diff --git a/src/components/multisig/aiken-crowdfund/validators/auth_token/mint.ak b/src/components/multisig/aiken-proxy/validators/auth_token/mint.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/validators/auth_token/mint.ak rename to src/components/multisig/aiken-proxy/validators/auth_token/mint.ak diff --git a/src/components/multisig/aiken-proxy/validators/proxy/spend.ak b/src/components/multisig/aiken-proxy/validators/proxy/spend.ak new file mode 100644 index 00000000..8cc68e1a --- /dev/null +++ b/src/components/multisig/aiken-proxy/validators/proxy/spend.ak @@ -0,0 +1,34 @@ +use aiken/collection/list +use cardano/assets.{PolicyId} +use cardano/transaction.{OutputReference, Transaction} +use cocktail.{only_minted_token, outputs_with_policy} +use types.{ProxyRedeemer, RemoveEmptyInstance, SpendFunds} + +validator proxy(auth_token: PolicyId) { + spend( + _datum: Option, + redeemer: ProxyRedeemer, + _input: OutputReference, + self: Transaction, + ) { + let Transaction { mint, outputs, .. } = self + + when redeemer is { + // check if one auth token is moved in the transaction + SpendFunds -> { + // Check if any token from the auth_token policy is present in the outputs + // This means the token is being moved/transferred + let auth_tokens_in_outputs = outputs_with_policy(outputs, auth_token) + // The token is considered "moved" if any auth token appears in outputs + // (meaning it's being transferred somewhere) + list.length(auth_tokens_in_outputs) > 0 + } + + RemoveEmptyInstance -> only_minted_token(mint, auth_token, "", -10)? + } + } + + else(_) { + fail + } +} diff --git a/src/components/multisig/aiken-crowdfund/validators/tests/auth_token/mint.ak b/src/components/multisig/aiken-proxy/validators/tests/auth_token/mint.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/validators/tests/auth_token/mint.ak rename to src/components/multisig/aiken-proxy/validators/tests/auth_token/mint.ak diff --git a/src/components/multisig/aiken-proxy/validators/tests/proxy/spend.ak b/src/components/multisig/aiken-proxy/validators/tests/proxy/spend.ak new file mode 100644 index 00000000..05e836fc --- /dev/null +++ b/src/components/multisig/aiken-proxy/validators/tests/proxy/spend.ak @@ -0,0 +1,65 @@ +use cardano/assets.{add, from_lovelace} +use mocktail.{ + complete, mint, mock_policy_id, mock_pub_key_address, mock_utxo_ref, + mocktail_tx, tx_out, +} +use proxy/spend as proxy_spend +use types.{RemoveEmptyInstance, SpendFunds} + +const auth_token = mock_policy_id(0) + +// SpendFunds: succeeds when any token of auth_token policy appears in outputs +test p_proxy_spend_success_spend_funds_when_auth_token_in_outputs() { + let value = from_lovelace(2_000_000) |> add(auth_token, "", 1) + let tx = + mocktail_tx() + |> tx_out(True, mock_pub_key_address(0, None), value) + |> complete() + + proxy_spend.proxy.spend(auth_token, None, SpendFunds, mock_utxo_ref(0, 0), tx) +} + +// SpendFunds: fails when no auth_token policy appears in outputs +test p_proxy_spend_fail_spend_funds_when_no_auth_token_in_outputs() { + let value = from_lovelace(2_000_000) + let tx = + mocktail_tx() + |> tx_out(True, mock_pub_key_address(1, None), value) + |> complete() + + !proxy_spend.proxy.spend( + auth_token, + None, + SpendFunds, + mock_utxo_ref(0, 0), + tx, + ) +} + +// RemoveEmptyInstance: succeeds when exact burn of -10 under auth_token policy +test p_proxy_spend_success_remove_empty_instance_with_auth_burnt() { + let tx = + mocktail_tx() + |> mint(True, -10, auth_token, "") + |> complete() + + proxy_spend.proxy.spend( + auth_token, + None, + RemoveEmptyInstance, + mock_utxo_ref(0, 0), + tx, + ) +} + +// RemoveEmptyInstance: fails when no burn occurs +test p_proxy_spend_fail_remove_empty_instance_without_burn() { + let tx = mocktail_tx() |> complete() + !proxy_spend.proxy.spend( + auth_token, + None, + RemoveEmptyInstance, + mock_utxo_ref(0, 0), + tx, + ) +} diff --git a/src/components/multisig/aiken-crowdfund/validators/tests/utils.ak b/src/components/multisig/aiken-proxy/validators/tests/utils.ak similarity index 100% rename from src/components/multisig/aiken-crowdfund/validators/tests/utils.ak rename to src/components/multisig/aiken-proxy/validators/tests/utils.ak From 4e4686706409518a52467de01bdfb7c2d662510a Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Tue, 7 Oct 2025 15:23:47 +0200 Subject: [PATCH 06/15] s05 --- src/components/multisig/aiken-proxy/lib/types.ak | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/components/multisig/aiken-proxy/lib/types.ak b/src/components/multisig/aiken-proxy/lib/types.ak index eafe990b..3084fe32 100644 --- a/src/components/multisig/aiken-proxy/lib/types.ak +++ b/src/components/multisig/aiken-proxy/lib/types.ak @@ -6,4 +6,4 @@ pub type MintPolarity { pub type ProxyRedeemer { SpendFunds RemoveEmptyInstance -} \ No newline at end of file +} From acdc0a1e3413b24789486cc4802a05f89ad6898a Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Wed, 8 Oct 2025 13:55:24 +0200 Subject: [PATCH 07/15] s06 --- src/components/multisig/README.md | 0 .../multisig/proxy/aiken-workspace/README.md | 1 + .../multisig/proxy/aiken-workspace/aiken.lock | 26 + .../multisig/proxy/aiken-workspace/aiken.toml | 23 + .../aiken-workspace/build/aiken-compile.lock | 0 .../packages/aiken-lang-stdlib/.editorconfig | 9 + .../packages/aiken-lang-stdlib/.gitattributes | 2 + .../workflows/continuous-integration.yml | 64 + .../packages/aiken-lang-stdlib/.gitignore | 3 + .../packages/aiken-lang-stdlib/CHANGELOG.md | 805 ++++++++++ .../build/packages/aiken-lang-stdlib/LICENSE | 201 +++ .../packages/aiken-lang-stdlib/README.md | 71 + .../packages/aiken-lang-stdlib/aiken.lock | 16 + .../packages/aiken-lang-stdlib/aiken.toml | 15 + .../aiken-lang-stdlib/lib/aiken/cbor.ak | 293 ++++ .../aiken-lang-stdlib/lib/aiken/cbor.test.ak | 297 ++++ .../aiken-lang-stdlib/lib/aiken/collection.ak | 4 + .../lib/aiken/collection/dict.ak | 1174 ++++++++++++++ .../lib/aiken/collection/list.ak | 1411 +++++++++++++++++ .../lib/aiken/collection/pairs.ak | 833 ++++++++++ .../aiken-lang-stdlib/lib/aiken/crypto.ak | 147 ++ .../lib/aiken/crypto/bls12_381/g1.ak | 115 ++ .../lib/aiken/crypto/bls12_381/g2.ak | 124 ++ .../lib/aiken/crypto/bls12_381/scalar.ak | 255 +++ .../aiken-lang-stdlib/lib/aiken/interval.ak | 680 ++++++++ .../aiken-lang-stdlib/lib/aiken/math.ak | 424 +++++ .../lib/aiken/math/rational.ak | 871 ++++++++++ .../lib/aiken/math/rational.tests.ak | 65 + .../aiken-lang-stdlib/lib/aiken/option.ak | 312 ++++ .../lib/aiken/primitive/bytearray.ak | 668 ++++++++ .../lib/aiken/primitive/int.ak | 156 ++ .../lib/aiken/primitive/string.ak | 139 ++ .../aiken-lang-stdlib/lib/cardano/address.ak | 86 + .../lib/cardano/address/credential.ak | 30 + .../aiken-lang-stdlib/lib/cardano/assets.ak | 920 +++++++++++ .../lib/cardano/certificate.ak | 93 ++ .../lib/cardano/governance.ak | 109 ++ .../cardano/governance/protocol_parameters.ak | 360 +++++ .../lib/cardano/governance/voter.ak | 62 + .../lib/cardano/script_context.ak | 62 + .../lib/cardano/transaction.ak | 225 +++ .../cardano/transaction/output_reference.ak | 23 + .../lib/cardano/transaction/script_purpose.ak | 126 ++ .../build/packages/packages.toml | 9 + .../.github/workflows/build_docs.yml | 50 + .../.github/workflows/release.yml | 80 + .../build/packages/sidan-lab-vodka/.gitignore | 16 + .../build/packages/sidan-lab-vodka/LICENSE | 201 +++ .../build/packages/sidan-lab-vodka/README.md | 136 ++ .../build/packages/sidan-lab-vodka/aiken.lock | 15 + .../build/packages/sidan-lab-vodka/aiken.toml | 18 + .../packages/sidan-lab-vodka/assets/logo.png | Bin 0 -> 74727 bytes .../sidan-lab-vodka/build/aiken-compile.lock | 0 .../packages/aiken-lang-stdlib/.editorconfig | 9 + .../packages/aiken-lang-stdlib/.gitattributes | 2 + .../workflows/continuous-integration.yml | 64 + .../packages/aiken-lang-stdlib/.gitignore | 3 + .../packages/aiken-lang-stdlib/CHANGELOG.md | 805 ++++++++++ .../build/packages/aiken-lang-stdlib/LICENSE | 201 +++ .../packages/aiken-lang-stdlib/README.md | 71 + .../packages/aiken-lang-stdlib/aiken.lock | 16 + .../packages/aiken-lang-stdlib/aiken.toml | 15 + .../aiken-lang-stdlib/lib/aiken/cbor.ak | 293 ++++ .../aiken-lang-stdlib/lib/aiken/cbor.test.ak | 297 ++++ .../aiken-lang-stdlib/lib/aiken/collection.ak | 4 + .../lib/aiken/collection/dict.ak | 1174 ++++++++++++++ .../lib/aiken/collection/list.ak | 1411 +++++++++++++++++ .../lib/aiken/collection/pairs.ak | 833 ++++++++++ .../aiken-lang-stdlib/lib/aiken/crypto.ak | 147 ++ .../lib/aiken/crypto/bls12_381/g1.ak | 115 ++ .../lib/aiken/crypto/bls12_381/g2.ak | 124 ++ .../lib/aiken/crypto/bls12_381/scalar.ak | 255 +++ .../aiken-lang-stdlib/lib/aiken/interval.ak | 680 ++++++++ .../aiken-lang-stdlib/lib/aiken/math.ak | 424 +++++ .../lib/aiken/math/rational.ak | 871 ++++++++++ .../lib/aiken/math/rational.tests.ak | 65 + .../aiken-lang-stdlib/lib/aiken/option.ak | 312 ++++ .../lib/aiken/primitive/bytearray.ak | 668 ++++++++ .../lib/aiken/primitive/int.ak | 156 ++ .../lib/aiken/primitive/string.ak | 139 ++ .../aiken-lang-stdlib/lib/cardano/address.ak | 86 + .../lib/cardano/address/credential.ak | 30 + .../aiken-lang-stdlib/lib/cardano/assets.ak | 920 +++++++++++ .../lib/cardano/certificate.ak | 93 ++ .../lib/cardano/governance.ak | 109 ++ .../cardano/governance/protocol_parameters.ak | 360 +++++ .../lib/cardano/governance/voter.ak | 62 + .../lib/cardano/script_context.ak | 62 + .../lib/cardano/transaction.ak | 225 +++ .../cardano/transaction/output_reference.ak | 23 + .../lib/cardano/transaction/script_purpose.ak | 126 ++ .../build/packages/packages.toml | 4 + .../build/packages/sidan-lab-vodka/lib/cip.ak | 73 + .../packages/sidan-lab-vodka/lib/cocktail.ak | 161 ++ .../lib/cocktail/vodka_address.ak | 56 + .../lib/cocktail/vodka_converter.ak | 40 + .../lib/cocktail/vodka_extra_signatories.ak | 46 + .../lib/cocktail/vodka_inputs.ak | 123 ++ .../lib/cocktail/vodka_mints.ak | 68 + .../lib/cocktail/vodka_outputs.ak | 90 ++ .../lib/cocktail/vodka_redeemers.ak | 45 + .../lib/cocktail/vodka_validity_range.ak | 34 + .../lib/cocktail/vodka_value.ak | 80 + .../packages/sidan-lab-vodka/lib/mocktail.ak | 776 +++++++++ .../lib/mocktail/virgin_address.ak | 57 + .../lib/mocktail/virgin_key_hash.ak | 47 + .../lib/mocktail/virgin_output_reference.ak | 16 + .../lib/mocktail/virgin_outputs.ak | 30 + .../lib/mocktail/virgin_validity_range.ak | 28 + .../packages/sidan-lab-vodka/plutus.json | 14 + .../proxy/aiken-workspace/lib/types.ak | 9 + .../proxy/aiken-workspace/plutus.json | 163 ++ .../aiken-workspace/specs/1_auth_tokens.md | 15 + .../proxy/aiken-workspace/specs/2_proxy.md | 19 + .../proxy/aiken-workspace/specs/_scripts.md | 20 + .../specs/application_setup_doc.md | 13 + .../aiken-workspace/specs/user_action_doc.md | 6 + .../validators/auth_token/mint.ak | 27 + .../aiken-workspace/validators/proxy/spend.ak | 34 + .../validators/tests/auth_token/mint.ak | 63 + .../validators/tests/proxy/spend.ak | 65 + .../aiken-workspace/validators/tests/utils.ak | 64 + src/components/multisig/{ => proxy}/common.ts | 0 src/components/multisig/proxy/index.ts | 1 + src/components/multisig/proxy/offchain.ts | 234 +++ 125 files changed, 25596 insertions(+) create mode 100644 src/components/multisig/README.md create mode 100644 src/components/multisig/proxy/aiken-workspace/README.md create mode 100644 src/components/multisig/proxy/aiken-workspace/aiken.lock create mode 100644 src/components/multisig/proxy/aiken-workspace/aiken.toml create mode 100644 src/components/multisig/proxy/aiken-workspace/build/aiken-compile.lock create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.editorconfig create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.gitattributes create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.gitignore create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/CHANGELOG.md create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/LICENSE create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/README.md create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/aiken.lock create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/aiken.toml create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/math.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/option.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/address.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/packages.toml create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/.github/workflows/release.yml create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/.gitignore create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/LICENSE create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/README.md create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/aiken.lock create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/aiken.toml create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/assets/logo.png create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/aiken-compile.lock create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/packages.toml create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cip.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/plutus.json create mode 100644 src/components/multisig/proxy/aiken-workspace/lib/types.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/plutus.json create mode 100644 src/components/multisig/proxy/aiken-workspace/specs/1_auth_tokens.md create mode 100644 src/components/multisig/proxy/aiken-workspace/specs/2_proxy.md create mode 100644 src/components/multisig/proxy/aiken-workspace/specs/_scripts.md create mode 100644 src/components/multisig/proxy/aiken-workspace/specs/application_setup_doc.md create mode 100644 src/components/multisig/proxy/aiken-workspace/specs/user_action_doc.md create mode 100644 src/components/multisig/proxy/aiken-workspace/validators/auth_token/mint.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/validators/proxy/spend.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/validators/tests/auth_token/mint.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/validators/tests/proxy/spend.ak create mode 100644 src/components/multisig/proxy/aiken-workspace/validators/tests/utils.ak rename src/components/multisig/{ => proxy}/common.ts (100%) create mode 100644 src/components/multisig/proxy/index.ts create mode 100644 src/components/multisig/proxy/offchain.ts diff --git a/src/components/multisig/README.md b/src/components/multisig/README.md new file mode 100644 index 00000000..e69de29b diff --git a/src/components/multisig/proxy/aiken-workspace/README.md b/src/components/multisig/proxy/aiken-workspace/README.md new file mode 100644 index 00000000..e76d2249 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/README.md @@ -0,0 +1 @@ +Implement in core mesh repo `mesh-contract` diff --git a/src/components/multisig/proxy/aiken-workspace/aiken.lock b/src/components/multisig/proxy/aiken-workspace/aiken.lock new file mode 100644 index 00000000..2e65bd16 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/aiken.lock @@ -0,0 +1,26 @@ +# This file was generated by Aiken +# You typically do not need to edit this file + +[[requirements]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +source = "github" + +[[requirements]] +name = "sidan-lab/vodka" +version = "0.1.13" +source = "github" + +[[packages]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +requirements = [] +source = "github" + +[[packages]] +name = "sidan-lab/vodka" +version = "0.1.13" +requirements = [] +source = "github" + +[etags] diff --git a/src/components/multisig/proxy/aiken-workspace/aiken.toml b/src/components/multisig/proxy/aiken-workspace/aiken.toml new file mode 100644 index 00000000..c8b0955c --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/aiken.toml @@ -0,0 +1,23 @@ +name = "mesh/proxy" +version = "0.0.0" +compiler = "v1.1.17" +plutus = "v3" +license = "Apache-2.0" +description = "Aiken contracts for project 'aiken-proxy'" + +[repository] +user = "QS" +project = "multisig" +platform = "github" + +[[dependencies]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +source = "github" + +[[dependencies]] +name = "sidan-lab/vodka" +version = "0.1.13" +source = "github" + +[config] diff --git a/src/components/multisig/proxy/aiken-workspace/build/aiken-compile.lock b/src/components/multisig/proxy/aiken-workspace/build/aiken-compile.lock new file mode 100644 index 00000000..e69de29b diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.editorconfig b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.editorconfig new file mode 100644 index 00000000..0759674c --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.editorconfig @@ -0,0 +1,9 @@ +root = true + +[*.ak] +indent_style = space +indent_size = 2 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.gitattributes b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.gitattributes new file mode 100644 index 00000000..99fefcf4 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.gitattributes @@ -0,0 +1,2 @@ +# Temp hack to get some syntax highlighting on github +*.ak linguist-language=Gleam diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml new file mode 100644 index 00000000..b0081ac7 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml @@ -0,0 +1,64 @@ +name: Continuous Integration + +on: + workflow_dispatch: + push: + branches: ["main"] + tags: ["*.*.*"] + pull_request: + branches: ["main"] + +env: + CARGO_TERM_COLOR: always + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: "pages" + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: 📥 Checkout repository + uses: actions/checkout@v3 + + - name: 🧰 Setup Pages + uses: actions/configure-pages@v2 + + - name: 🧰 Install Aiken + uses: aiken-lang/setup-aiken@v1 + with: + version: v1.1.9 + + - name: 📝 Run fmt + run: aiken fmt --check + + - name: 🔬 Run tests + run: aiken check + + - name: 📘 Generate documentation + shell: bash + working-directory: . + run: aiken docs -o docs + + - name: 📦 Upload artifact + uses: actions/upload-pages-artifact@v2 + with: + path: "docs/" + + deploy: + if: ${{ startsWith(github.ref, 'refs/tags') }} + needs: build + runs-on: ubuntu-latest + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: + - name: 🚀 Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v1 diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.gitignore b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.gitignore new file mode 100644 index 00000000..3a3d38e6 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/.gitignore @@ -0,0 +1,3 @@ +build/ +docs/ +.DS_Store \ No newline at end of file diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/CHANGELOG.md b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/CHANGELOG.md new file mode 100644 index 00000000..62345b32 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/CHANGELOG.md @@ -0,0 +1,805 @@ +# Changelog + +## v2.2.0 - 2024-12-13 + +### Added + +- [`aiken/cbor.{deserialise}`](https://aiken-lang.github.io/stdlib/aiken/cbor.html#deserialise): to recover `Data` from CBOR bytes. +- [`aiken/collection/pairs.{insert_with_by_ascending_key}`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#insert_with_by_ascending_key): for inserting in pairs while specifying how to combine values on key conflict. + +## v2.1.0 - 2024-09-14 + +### Added + +- Various new helper functions: + - [`aiken/collection/list.{for_each}`](https://aiken-lang.github.io/stdlib/aiken/collection/list.html#for_each): for performing many side-effects. + - [`aiken/collection/dict.{pop}`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html#pop): for accessing and removing a value from a dictionnary in a single op. + - [`aiken/primitive/bytearray.{starts_with}`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#starts_with): for matching bytearray prefixes. + - [`aiken/primitive/math/rational.{pow}`](https://aiken-lang.github.io/stdlib/aiken/primitive/math/rational.html#pow): for computing (int) powers of rational numbers. + - [`cardano/assets.{match}`](https://aiken-lang.github.io/stdlib/cardano/assets.html#match): efficiently compare two value-like. + - [`cardano/assets.{restricted_to}`](https://aiken-lang.github.io/stdlib/cardano/assets.html#restricted_to): extracting value subsets from parent value. + +- Comparison functions for various Cardano types: + - [`cardano/address/credential.{compare}`](https://aiken-lang.github.io/stdlib/cardano/address/credential.html#compare): for ordering credentials. + - [`cardano/governance/voter.{compare}`](https://aiken-lang.github.io/stdlib/cardano/governacen/voter.html#compare): for ordering voters. + - [`cardano/transaction/output_reference.{compare}`](https://aiken-lang.github.io/stdlib/cardano/transaction/output_reference.html#compare): for ordering output references. + - [`cardano/transaction/script_purpose.{compare}`](https://aiken-lang.github.io/stdlib/cardano/transaction/script_purpose.html#compare): for ordering script purpose. + +- New BLS12-381 crypto modules: + - [`aiken/crypto/bls12_381/g1`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/g1.html) + - [`aiken/crypto/bls12_381/g2`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/g2.html) + - [`aiken/crypto/bls12_381/scalar`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/scalar.html) + +### Changed + +- N/A + +### Removed + +- N/A + +## v2.0.0 - 2024-09-01 + +> [!NOTE] +> Significant performance improvements (mostly on CPU) across all boards mostly due to the integration of Plutus V3. +> +>
see benchmarks +> +> test | cpu | mem +> --- | --- | --- +> aiken/cbor.{serialise_1} | -38.20% | ±0.00% +> aiken/cbor.{serialise_2} | -38.20% | ±0.00% +> aiken/cbor.{serialise_3} | -37.25% | ±0.00% +> aiken/cbor.{serialise_4} | -41.95% | ±0.00% +> aiken/cbor.{serialise_5} | -42.77% | ±0.00% +> aiken/cbor.{serialise_6} | -42.63% | ±0.00% +> aiken/cbor.{serialise_7} | -40.51% | ±0.00% +> aiken/cbor.{serialise_8} | -37.25% | ±0.00% +> aiken/cbor.{serialise_9} | -41.95% | ±0.00% +> aiken/cbor.{diagnostic_1} | -47.62% | -4.35% +> aiken/cbor.{diagnostic_2} | -45.16% | -2.87% +> aiken/cbor.{diagnostic_3} | -43.32% | -13.33% +> aiken/cbor.{diagnostic_4} | -38.28% | -8.03% +> aiken/cbor.{diagnostic_5} | -44.15% | -14.59% +> aiken/cbor.{diagnostic_6} | -42.77% | -12.21% +> aiken/cbor.{diagnostic_7} | -43.87% | -16.87% +> aiken/cbor.{diagnostic_7_alt} | -42.99% | -11.56% +> aiken/cbor.{diagnostic_8} | -46.00% | -10.23% +> aiken/cbor.{diagnostic_9} | -42.81% | -2.81% +> aiken/cbor.{diagnostic_10} | -38.28% | -8.03% +> aiken/cbor.{diagnostic_10_alt} | -38.43% | -8.03% +> aiken/cbor.{diagnostic_11} | -44.00% | -8.51% +> aiken/cbor.{diagnostic_12} | -45.65% | -11.56% +> aiken/cbor.{diagnostic_13} | -44.44% | -9.34% +> aiken/cbor.{diagnostic_14} | -43.59% | -19.77% +> aiken/cbor.{diagnostic_15} | -46.50% | -3.67% +> aiken/cbor.{diagnostic_16} | -41.89% | -13.41% +> aiken/collection/dict.{bench_from_ascending_pairs} | -20.48% | ±0.00% +> aiken/collection/dict.{from_list_1} | -20.16% | ±0.00% +> aiken/collection/dict.{from_list_2} | -18.28% | ±0.00% +> aiken/collection/dict.{from_list_3} | -17.83% | ±0.00% +> aiken/collection/dict.{from_list_4} | -18.97% | ±0.00% +> aiken/collection/dict.{bench_from_pairs} | -25.28% | ±0.00% +> aiken/collection/dict.{find_1} | -20.63% | ±0.00% +> aiken/collection/dict.{find_2} | -20.43% | ±0.00% +> aiken/collection/dict.{find_3} | -22.03% | ±0.00% +> aiken/collection/dict.{find_4} | -22.53% | ±0.00% +> aiken/collection/dict.{get_1} | -20.63% | ±0.00% +> aiken/collection/dict.{get_2} | -22.72% | ±0.00% +> aiken/collection/dict.{get_3} | -23.26% | ±0.00% +> aiken/collection/dict.{get_4} | -26.91% | ±0.00% +> aiken/collection/dict.{get_5} | -26.30% | ±0.00% +> aiken/collection/dict.{has_key_1} | -28.07% | ±0.00% +> aiken/collection/dict.{has_key_2} | -30.77% | ±0.00% +> aiken/collection/dict.{has_key_3} | -30.22% | ±0.00% +> aiken/collection/dict.{has_key_4} | -27.25% | ±0.00% +> aiken/collection/dict.{is_empty_1} | -27.86% | ±0.00% +> aiken/collection/dict.{keys_1} | -20.30% | ±0.00% +> aiken/collection/dict.{keys_2} | -17.48% | ±0.00% +> aiken/collection/dict.{size_1} | -37.90% | ±0.00% +> aiken/collection/dict.{size_2} | -32.34% | ±0.00% +> aiken/collection/dict.{size_3} | -27.97% | ±0.00% +> aiken/collection/dict.{values_1} | -20.30% | ±0.00% +> aiken/collection/dict.{values_2} | -17.58% | ±0.00% +> aiken/collection/dict.{delete_1} | -20.16% | ±0.00% +> aiken/collection/dict.{delete_2} | -24.29% | ±0.00% +> aiken/collection/dict.{delete_3} | -21.03% | ±0.00% +> aiken/collection/dict.{delete_4} | -25.03% | ±0.00% +> aiken/collection/dict.{delete_5} | -27.22% | ±0.00% +> aiken/collection/dict.{delete_6} | -25.83% | ±0.00% +> aiken/collection/dict.{filter_1} | -20.16% | ±0.00% +> aiken/collection/dict.{filter_2} | -19.61% | ±0.00% +> aiken/collection/dict.{filter_3} | -20.15% | ±0.00% +> aiken/collection/dict.{insert_1} | -22.83% | ±0.00% +> aiken/collection/dict.{insert_2} | -21.77% | ±0.00% +> aiken/collection/dict.{insert_with_1} | -17.21% | ±0.00% +> aiken/collection/dict.{insert_with_2} | -22.66% | ±0.00% +> aiken/collection/dict.{insert_with_3} | -25.81% | ±0.00% +> aiken/collection/dict.{map_1} | -19.56% | ±0.00% +> aiken/collection/dict.{map_2} | -23.66% | ±0.00% +> aiken/collection/dict.{union_1} | -17.91% | ±0.00% +> aiken/collection/dict.{union_2} | -8.67% | ±0.00% +> aiken/collection/dict.{union_3} | -22.82% | ±0.00% +> aiken/collection/dict.{union_4} | -22.77% | ±0.00% +> aiken/collection/dict.{union_with_1} | -22.90% | ±0.00% +> aiken/collection/dict.{fold_1} | -35.94% | ±0.00% +> aiken/collection/dict.{fold_2} | -22.31% | ±0.00% +> aiken/collection/dict.{foldr_1} | -36.21% | ±0.00% +> aiken/collection/dict.{foldr_2} | -21.93% | ±0.00% +> aiken/collection/dict.{to_list_1} | -98.69% | -66.72% +> aiken/collection/dict.{to_list_2} | -98.91% | -66.72% +> aiken/collection/list.{push_1} | -8.02% | ±0.00% +> aiken/collection/list.{push_2} | 1.25% | ±0.00% +> aiken/collection/list.{range_1} | -27.77% | ±0.00% +> aiken/collection/list.{range_2} | -27.39% | ±0.00% +> aiken/collection/list.{repeat_1} | -23.72% | ±0.00% +> aiken/collection/list.{repeat_2} | -27.96% | ±0.00% +> aiken/collection/list.{all_1} | -28.36% | ±0.00% +> aiken/collection/list.{all_2} | -27.59% | ±0.00% +> aiken/collection/list.{all_3} | -27.94% | ±0.00% +> aiken/collection/list.{any_1} | -28.23% | ±0.00% +> aiken/collection/list.{any_2} | -28.09% | ±0.00% +> aiken/collection/list.{any_3} | -26.95% | ±0.00% +> aiken/collection/list.{at_1} | -27.60% | ±0.00% +> aiken/collection/list.{at_2} | -19.96% | ±0.00% +> aiken/collection/list.{at_3} | -27.60% | ±0.00% +> aiken/collection/list.{at_4} | -20.77% | ±0.00% +> aiken/collection/list.{at_5} | -25.75% | ±0.00% +> aiken/collection/list.{count_empty} | -36.83% | ±0.00% +> aiken/collection/list.{count_all} | -32.37% | ±0.00% +> aiken/collection/list.{count_some} | -31.73% | ±0.00% +> aiken/collection/list.{count_none} | -30.44% | ±0.00% +> aiken/collection/list.{find_1} | -20.59% | ±0.00% +> aiken/collection/list.{find_2} | -25.53% | ±0.00% +> aiken/collection/list.{find_3} | -19.64% | ±0.00% +> aiken/collection/list.{has_1} | -27.88% | ±0.00% +> aiken/collection/list.{has_2} | -27.69% | ±0.00% +> aiken/collection/list.{has_3} | -26.95% | ±0.00% +> aiken/collection/list.{head_1} | -14.03% | ±0.00% +> aiken/collection/list.{head_2} | -16.90% | ±0.00% +> aiken/collection/list.{is_empty_1} | -26.48% | ±0.00% +> aiken/collection/list.{is_empty_2} | -25.35% | ±0.00% +> aiken/collection/list.{index_of_1} | -25.62% | ±0.00% +> aiken/collection/list.{index_of_2} | -27.52% | ±0.00% +> aiken/collection/list.{index_of_3} | -26.65% | ±0.00% +> aiken/collection/list.{index_of_4} | -19.96% | ±0.00% +> aiken/collection/list.{last_1} | -19.18% | ±0.00% +> aiken/collection/list.{last_2} | -16.26% | ±0.00% +> aiken/collection/list.{last_3} | -17.13% | ±0.00% +> aiken/collection/list.{length_1} | -37.90% | ±0.00% +> aiken/collection/list.{length_2} | -30.89% | ±0.00% +> aiken/collection/list.{delete_1} | -20.20% | ±0.00% +> aiken/collection/list.{delete_2} | -15.02% | ±0.00% +> aiken/collection/list.{delete_3} | -20.55% | ±0.00% +> aiken/collection/list.{delete_4} | -22.46% | ±0.00% +> aiken/collection/list.{drop_1} | -24.62% | ±0.00% +> aiken/collection/list.{drop_2} | -28.08% | ±0.00% +> aiken/collection/list.{drop_while_1} | -19.79% | ±0.00% +> aiken/collection/list.{drop_while_2} | -22.25% | ±0.00% +> aiken/collection/list.{drop_while_3} | 0.86% | ±0.00% +> aiken/collection/list.{drop_while_4} | -27.26% | ±0.00% +> aiken/collection/list.{filter_1} | -20.20% | ±0.00% +> aiken/collection/list.{filter_2} | -32.06% | ±0.00% +> aiken/collection/list.{filter_3} | -31.39% | ±0.00% +> aiken/collection/list.{filter_map_1} | -21.10% | ±0.00% +> aiken/collection/list.{filter_map_2} | -28.74% | ±0.00% +> aiken/collection/list.{init_1} | -19.64% | ±0.00% +> aiken/collection/list.{init_2} | -20.01% | ±0.00% +> aiken/collection/list.{init_3} | -13.72% | ±0.00% +> aiken/collection/list.{partition_1} | -14.63% | ±0.00% +> aiken/collection/list.{partition_2} | -16.85% | ±0.00% +> aiken/collection/list.{partition_3} | -16.63% | ±0.00% +> aiken/collection/list.{partition_4} | -16.87% | ±0.00% +> aiken/collection/list.{partition_5} | -22.94% | ±0.00% +> aiken/collection/list.{slice_1} | -29.08% | -2.81% +> aiken/collection/list.{slice_2} | -30.11% | -2.25% +> aiken/collection/list.{slice_3} | -30.29% | -1.46% +> aiken/collection/list.{slice_4} | -28.53% | -1.48% +> aiken/collection/list.{slice_5} | -29.73% | -1.64% +> aiken/collection/list.{slice_6} | -32.01% | -1.80% +> aiken/collection/list.{span_1} | -15.05% | ±0.00% +> aiken/collection/list.{span_2} | -18.03% | ±0.00% +> aiken/collection/list.{span_3} | -12.49% | ±0.00% +> aiken/collection/list.{span_4} | -18.13% | ±0.00% +> aiken/collection/list.{tail_1} | -8.88% | ±0.00% +> aiken/collection/list.{tail_2} | -16.90% | ±0.00% +> aiken/collection/list.{take_1} | -24.98% | ±0.00% +> aiken/collection/list.{take_2} | -24.35% | ±0.00% +> aiken/collection/list.{take_while_1} | -20.20% | ±0.00% +> aiken/collection/list.{take_while_2} | -21.56% | ±0.00% +> aiken/collection/list.{take_while_3} | -22.46% | ±0.00% +> aiken/collection/list.{take_while_4} | -21.02% | ±0.00% +> aiken/collection/list.{unique_1} | -20.20% | ±0.00% +> aiken/collection/list.{unique_2} | -24.34% | ±0.00% +> aiken/collection/list.{flat_map_1} | -19.79% | ±0.00% +> aiken/collection/list.{flat_map_2} | -13.36% | ±0.00% +> aiken/collection/list.{indexed_map_1} | -20.10% | ±0.00% +> aiken/collection/list.{indexed_map_2} | -23.36% | ±0.00% +> aiken/collection/list.{map_1} | -19.79% | ±0.00% +> aiken/collection/list.{map_2} | -16.75% | ±0.00% +> aiken/collection/list.{map2_1} | -20.10% | ±0.00% +> aiken/collection/list.{map2_2} | -17.46% | ±0.00% +> aiken/collection/list.{map2_3} | -15.92% | ±0.00% +> aiken/collection/list.{map3_1} | -20.39% | ±0.00% +> aiken/collection/list.{map3_2} | -19.22% | ±0.00% +> aiken/collection/list.{reverse_1} | -20.10% | ±0.00% +> aiken/collection/list.{reverse_2} | -12.26% | ±0.00% +> aiken/collection/list.{sort_1} | -22.31% | ±0.00% +> aiken/collection/list.{sort_2} | -17.93% | ±0.00% +> aiken/collection/list.{sort_3} | -23.09% | ±0.00% +> aiken/collection/list.{sort_4} | -20.20% | ±0.00% +> aiken/collection/list.{unzip_1} | -14.01% | ±0.00% +> aiken/collection/list.{unzip_2} | -5.48% | ±0.00% +> aiken/collection/list.{concat_1} | -6.56% | ±0.00% +> aiken/collection/list.{concat_2} | -11.25% | ±0.00% +> aiken/collection/list.{concat_3} | -9.35% | ±0.00% +> aiken/collection/list.{difference_1} | -24.23% | ±0.00% +> aiken/collection/list.{difference_2} | -22.59% | ±0.00% +> aiken/collection/list.{difference_3} | -10.64% | ±0.00% +> aiken/collection/list.{difference_4} | -21.68% | ±0.00% +> aiken/collection/list.{zip_1} | -20.10% | ±0.00% +> aiken/collection/list.{zip_2} | -19.17% | ±0.00% +> aiken/collection/list.{zip_3} | -10.35% | ±0.00% +> aiken/collection/list.{foldl_1} | -36.95% | ±0.00% +> aiken/collection/list.{foldl_2} | -26.90% | ±0.00% +> aiken/collection/list.{foldl_3} | -11.27% | ±0.00% +> aiken/collection/list.{foldr_1} | -26.68% | ±0.00% +> aiken/collection/list.{foldr_2} | -38.04% | ±0.00% +> aiken/collection/list.{foldr_3} | -10.14% | ±0.00% +> aiken/collection/list.{indexed_foldr_1} | -36.95% | ±0.00% +> aiken/collection/list.{indexed_foldr_2} | -11.06% | ±0.00% +> aiken/collection/list.{reduce_1} | -36.95% | ±0.00% +> aiken/collection/list.{reduce_2} | -27.99% | ±0.00% +> aiken/collection/list.{reduce_3} | -23.54% | ±0.00% +> aiken/collection/list.{reduce_4} | -24.84% | ±0.00% +> aiken/collection/pairs.{get_all_1} | -21.10% | ±0.00% +> aiken/collection/pairs.{get_all_2} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_all_3} | -19.53% | ±0.00% +> aiken/collection/pairs.{get_all_4} | -18.70% | ±0.00% +> aiken/collection/pairs.{get_all_5} | -21.19% | ±0.00% +> aiken/collection/pairs.{get_first_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{get_first_2} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_first_3} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_first_4} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_first_5} | -21.05% | ±0.00% +> aiken/collection/pairs.{get_last_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{get_last_2} | -21.13% | ±0.00% +> aiken/collection/pairs.{get_last_3} | -21.16% | ±0.00% +> aiken/collection/pairs.{get_last_4} | -21.79% | ±0.00% +> aiken/collection/pairs.{get_last_5} | -21.05% | ±0.00% +> aiken/collection/pairs.{find_all_1} | -21.10% | ±0.00% +> aiken/collection/pairs.{find_all_2} | -18.33% | ±0.00% +> aiken/collection/pairs.{find_all_3} | -20.51% | ±0.00% +> aiken/collection/pairs.{find_all_4} | -17.79% | ±0.00% +> aiken/collection/pairs.{find_first_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{find_first_2} | -18.28% | ±0.00% +> aiken/collection/pairs.{find_first_3} | -20.22% | ±0.00% +> aiken/collection/pairs.{find_first_4} | -18.28% | ±0.00% +> aiken/collection/pairs.{find_last_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{find_last_2} | -20.70% | ±0.00% +> aiken/collection/pairs.{find_last_3} | -20.22% | ±0.00% +> aiken/collection/pairs.{find_last_4} | -20.98% | ±0.00% +> aiken/collection/pairs.{has_key_1} | -28.07% | ±0.00% +> aiken/collection/pairs.{has_key_2} | -25.70% | ±0.00% +> aiken/collection/pairs.{has_key_3} | -25.80% | ±0.00% +> aiken/collection/pairs.{has_key_4} | -24.93% | ±0.00% +> aiken/collection/pairs.{has_key_5} | -25.70% | ±0.00% +> aiken/collection/pairs.{keys_1} | -20.30% | ±0.00% +> aiken/collection/pairs.{keys_2} | -13.89% | ±0.00% +> aiken/collection/pairs.{keys_3} | -10.43% | ±0.00% +> aiken/collection/pairs.{values_1} | -20.30% | ±0.00% +> aiken/collection/pairs.{values_2} | -14.02% | ±0.00% +> aiken/collection/pairs.{values_3} | -10.65% | ±0.00% +> aiken/collection/pairs.{values_4} | -8.53% | ±0.00% +> aiken/collection/pairs.{map_1} | -11.17% | ±0.00% +> aiken/collection/pairs.{map_2} | -12.89% | ±0.00% +> aiken/collection/pairs.{foldl_1} | -35.94% | ±0.00% +> aiken/collection/pairs.{foldl_2} | -22.31% | ±0.00% +> aiken/collection/pairs.{foldr_1} | -36.21% | ±0.00% +> aiken/collection/pairs.{foldr_2} | -21.93% | ±0.00% +> aiken/collection/pairs.{foldr_3} | -20.00% | ±0.00% +> aiken/interval.{contains_1} | -21.08% | -4.01% +> aiken/interval.{contains_2} | -31.22% | -13.95% +> aiken/interval.{contains_3} | -26.80% | -10.08% +> aiken/interval.{contains_4} | -31.02% | -13.67% +> aiken/interval.{contains_5} | -32.32% | -13.59% +> aiken/interval.{contains_6} | -28.15% | -9.81% +> aiken/interval.{contains_7} | -32.11% | -13.32% +> aiken/interval.{contains_8} | -29.56% | -12.59% +> aiken/interval.{contains_9} | -29.68% | -12.78% +> aiken/interval.{contains_10} | -29.68% | -12.78% +> aiken/interval.{contains_11} | -35.17% | -17.77% +> aiken/interval.{contains_12} | -21.09% | -3.86% +> aiken/interval.{is_entirely_after_1} | -29.89% | -13.81% +> aiken/interval.{is_entirely_after_2} | -29.63% | -13.39% +> aiken/interval.{is_entirely_after_3} | -29.63% | -13.39% +> aiken/interval.{is_entirely_after_4} | -29.48% | -11.81% +> aiken/interval.{is_entirely_after_5} | -29.70% | -12.14% +> aiken/interval.{is_entirely_after_6} | -36.09% | -19.77% +> aiken/interval.{is_entirely_after_7} | -24.19% | -3.99% +> aiken/interval.{is_entirely_after_8} | -24.19% | -3.99% +> aiken/interval.{is_entirely_after_9} | -24.19% | -3.99% +> aiken/interval.{is_entirely_before_1} | -28.44% | -13.48% +> aiken/interval.{is_entirely_before_2} | -28.24% | -13.09% +> aiken/interval.{is_entirely_before_3} | -28.24% | -13.09% +> aiken/interval.{is_entirely_before_4} | -28.44% | -11.88% +> aiken/interval.{is_entirely_before_5} | -28.26% | -11.57% +> aiken/interval.{is_entirely_before_6} | -34.63% | -19.34% +> aiken/interval.{is_entirely_before_7} | -22.97% | -4.02% +> aiken/interval.{is_entirely_before_8} | -22.97% | -4.02% +> aiken/interval.{is_entirely_before_9} | -22.97% | -4.02% +> aiken/interval.{hull_1} | -21.51% | -0.73% +> aiken/interval.{hull_2} | -23.06% | -0.80% +> aiken/interval.{hull_3} | -22.00% | -0.86% +> aiken/interval.{intersection_1} | -21.51% | -0.73% +> aiken/interval.{intersection_2} | -21.51% | -0.73% +> aiken/interval.{intersection_3} | -26.55% | -4.65% +> aiken/interval.{intersection_4} | -26.45% | -4.51% +> aiken/interval.{intersection_5} | -22.87% | -0.76% +> aiken/interval.{intersection_6} | -19.73% | -0.98% +> aiken/math.{abs_1} | -61.39% | -21.07% +> aiken/math.{abs_2} | -70.90% | -34.84% +> aiken/math.{clamp_1} | -60.95% | -23.55% +> aiken/math.{clamp_2} | -60.95% | -23.55% +> aiken/math.{clamp_3} | -59.22% | -18.20% +> aiken/math.{gcd_test1} | -47.20% | ±0.00% +> aiken/math.{gcd_test2} | -47.81% | ±0.00% +> aiken/math.{gcd_test3} | -46.10% | ±0.00% +> aiken/math.{is_sqrt1} | -87.41% | -68.64% +> aiken/math.{is_sqrt2} | -87.41% | -68.64% +> aiken/math.{log_10_2} | -51.35% | -8.40% +> aiken/math.{log_42_2} | -51.46% | -8.24% +> aiken/math.{log_42_3} | -51.05% | -7.81% +> aiken/math.{log_5_0} | -54.05% | -12.92% +> aiken/math.{log_4_4} | -50.59% | -9.31% +> aiken/math.{log_4_43} | -49.14% | -7.28% +> aiken/math.{max_1} | -61.39% | -21.07% +> aiken/math.{max_2} | -61.39% | -21.07% +> aiken/math.{max_3} | -61.39% | -21.07% +> aiken/math.{min_1} | -61.39% | -21.07% +> aiken/math.{min_2} | -61.39% | -21.07% +> aiken/math.{min_3} | -61.39% | -21.07% +> aiken/math.{pow_3_5} | -46.34% | ±0.00% +> aiken/math.{pow_7_2} | -46.38% | ±0.00% +> aiken/math.{pow_3__4} | -43.50% | ±0.00% +> aiken/math.{pow_0_0} | -43.95% | ±0.00% +> aiken/math.{pow_513_3} | -45.80% | ±0.00% +> aiken/math.{pow_2_4} | -46.79% | ±0.00% +> aiken/math.{pow_2_42} | -46.77% | ±0.00% +> aiken/math.{pow2_neg} | -44.71% | ±0.00% +> aiken/math.{pow2_0} | -45.00% | ±0.00% +> aiken/math.{pow2_1} | -45.00% | ±0.00% +> aiken/math.{pow2_4} | -45.00% | ±0.00% +> aiken/math.{pow2_42} | -42.01% | ±0.00% +> aiken/math.{pow2_256} | -41.40% | ±0.00% +> aiken/math.{sqrt1} | -32.56% | -17.18% +> aiken/math.{sqrt2} | -32.56% | -17.18% +> aiken/math.{sqrt3} | -49.99% | -8.90% +> aiken/math.{sqrt4} | -51.76% | -3.90% +> aiken/math.{sqrt5} | -52.63% | -1.33% +> aiken/math.{sqrt6} | -28.16% | -15.41% +> aiken/math/rational.{from_int_1} | -14.32% | ±0.00% +> aiken/math/rational.{new_1} | -22.98% | ±0.00% +> aiken/math/rational.{zero_1} | -8.08% | ±0.00% +> aiken/math/rational.{denominator_1} | -28.33% | ±0.00% +> aiken/math/rational.{numerator_1} | -29.34% | ±0.00% +> aiken/math/rational.{abs_examples} | -18.25% | ±0.00% +> aiken/math/rational.{negate_1} | -15.39% | ±0.00% +> aiken/math/rational.{reciprocal_1} | -23.28% | ±0.00% +> aiken/math/rational.{reduce_1} | -31.89% | ±0.00% +> aiken/math/rational.{add_1} | -15.11% | ±0.00% +> aiken/math/rational.{add_2} | -15.11% | ±0.00% +> aiken/math/rational.{div_1} | -22.31% | -2.75% +> aiken/math/rational.{div_2} | -22.37% | -2.79% +> aiken/math/rational.{mul_1} | -13.37% | ±0.00% +> aiken/math/rational.{mul_2} | -13.37% | ±0.00% +> aiken/math/rational.{mul_3} | -26.25% | ±0.00% +> aiken/math/rational.{sub_1} | -15.11% | ±0.00% +> aiken/math/rational.{sub_2} | -15.11% | ±0.00% +> aiken/math/rational.{sub_3} | -15.11% | ±0.00% +> aiken/math/rational.{compare_1} | -21.70% | ±0.00% +> aiken/math/rational.{compare_with_eq} | -23.05% | ±0.00% +> aiken/math/rational.{compare_with_neq} | -22.33% | ±0.00% +> aiken/math/rational.{compare_with_gte} | -22.48% | ±0.00% +> aiken/math/rational.{compare_with_gt} | -23.18% | ±0.00% +> aiken/math/rational.{compare_with_lte} | -22.48% | ±0.00% +> aiken/math/rational.{compare_with_lt} | -23.18% | ±0.00% +> aiken/math/rational.{arithmetic_mean_1} | -23.31% | ±0.00% +> aiken/math/rational.{arithmetic_mean_2} | -23.31% | ±0.00% +> aiken/math/rational.{arithmetic_mean_3} | -20.58% | ±0.00% +> aiken/math/rational.{geometric_mean1} | -29.87% | ±0.00% +> aiken/math/rational.{geometric_mean2} | -24.52% | ±0.00% +> aiken/math/rational.{geometric_mean3} | -24.52% | ±0.00% +> aiken/math/rational.{geometric_mean4} | -33.55% | ±0.00% +> aiken/math/rational.{geometric_mean5} | -45.34% | ±0.00% +> aiken/math/rational.{ceil_1} | -36.26% | ±0.00% +> aiken/math/rational.{floor_1} | -29.49% | ±0.00% +> aiken/math/rational.{proper_fraction_1} | -18.44% | ±0.00% +> aiken/math/rational.{proper_fraction_2} | -18.44% | ±0.00% +> aiken/math/rational.{proper_fraction_3} | -18.44% | ±0.00% +> aiken/math/rational.{round_1} | -25.17% | ±0.00% +> aiken/math/rational.{round_even_1} | -25.91% | ±0.00% +> aiken/math/rational.{truncate_1} | -29.49% | ±0.00% +> aiken/option.{is_none_1} | -26.56% | ±0.00% +> aiken/option.{is_none_2} | -27.52% | ±0.00% +> aiken/option.{is_some_1} | -27.52% | ±0.00% +> aiken/option.{is_some_2} | -26.56% | ±0.00% +> aiken/option.{and_then_1} | -20.19% | ±0.00% +> aiken/option.{and_then_2} | -22.15% | ±0.00% +> aiken/option.{and_then_3} | -21.85% | ±0.00% +> aiken/option.{choice_1} | -17.11% | ±0.00% +> aiken/option.{choice_2} | -19.75% | ±0.00% +> aiken/option.{choice_3} | -18.68% | ±0.00% +> aiken/option.{flatten_1} | -12.25% | ±0.00% +> aiken/option.{flatten_2} | -15.41% | ±0.00% +> aiken/option.{flatten_3} | -19.46% | ±0.00% +> aiken/option.{flatten_4} | -14.31% | ±0.00% +> aiken/option.{map_1} | -19.89% | ±0.00% +> aiken/option.{map_2} | -18.18% | ±0.00% +> aiken/option.{map2_1} | -20.47% | ±0.00% +> aiken/option.{map2_2} | -19.93% | ±0.00% +> aiken/option.{map2_3} | -13.64% | ±0.00% +> aiken/option.{map3_1} | -20.74% | ±0.00% +> aiken/option.{map3_2} | -20.00% | ±0.00% +> aiken/option.{map3_3} | -19.90% | ±0.00% +> aiken/option.{or_try_1} | -14.36% | ±0.00% +> aiken/option.{or_try_2} | -14.36% | ±0.00% +> aiken/option.{or_else_1} | -38.16% | ±0.00% +> aiken/option.{or_else_2} | -27.62% | ±0.00% +> aiken/primitive/bytearray.{from_string_1} | -62.36% | ±0.00% +> aiken/primitive/bytearray.{from_string_2} | -41.62% | ±0.00% +> aiken/primitive/bytearray.{push_1} | -97.51% | -80.06% +> aiken/primitive/bytearray.{push_2} | -97.51% | -80.06% +> aiken/primitive/bytearray.{push_3} | -88.82% | -89.83% +> aiken/primitive/bytearray.{index_of_1} | -39.75% | ±0.00% +> aiken/primitive/bytearray.{index_of_2} | -43.19% | ±0.00% +> aiken/primitive/bytearray.{index_of_3} | -41.70% | ±0.00% +> aiken/primitive/bytearray.{index_of_4} | -37.24% | ±0.00% +> aiken/primitive/bytearray.{index_of_5} | -26.02% | ±0.00% +> aiken/primitive/bytearray.{is_empty_1} | -37.52% | ±0.00% +> aiken/primitive/bytearray.{is_empty_2} | -33.77% | ±0.00% +> aiken/primitive/bytearray.{length_1} | -49.73% | ±0.00% +> aiken/primitive/bytearray.{length_2} | -49.73% | ±0.00% +> aiken/primitive/bytearray.{test_bit_0} | -45.48% | 5.88% +> aiken/primitive/bytearray.{test_bit_1} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_2} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_3} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_7} | -58.31% | -11.81% +> aiken/primitive/bytearray.{test_bit_8} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_20_21_22_23} | -44.38% | 5.52% +> aiken/primitive/bytearray.{drop_1} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{drop_2} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{drop_3} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{drop_4} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{slice_1} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_2} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_3} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_4} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_5} | -98.79% | -90.04% +> aiken/primitive/bytearray.{take_1} | -97.81% | -83.40% +> aiken/primitive/bytearray.{take_2} | -97.81% | -83.40% +> aiken/primitive/bytearray.{take_3} | -97.81% | -83.40% +> aiken/primitive/bytearray.{take_4} | -97.81% | -83.40% +> aiken/primitive/bytearray.{concat_1} | -96.22% | -80.06% +> aiken/primitive/bytearray.{concat_2} | -96.22% | -80.06% +> aiken/primitive/bytearray.{concat_3} | -96.22% | -80.06% +> aiken/primitive/bytearray.{concat_4} | -96.22% | -80.06% +> aiken/primitive/bytearray.{foldl_1} | -40.96% | ±0.00% +> aiken/primitive/bytearray.{foldl_2} | -40.09% | ±0.00% +> aiken/primitive/bytearray.{foldl_3} | -40.29% | ±0.00% +> aiken/primitive/bytearray.{foldl_4} | -44.76% | ±0.00% +> aiken/primitive/bytearray.{foldr_1} | -42.56% | ±0.00% +> aiken/primitive/bytearray.{foldr_2} | -40.93% | ±0.00% +> aiken/primitive/bytearray.{foldr_3} | -45.34% | ±0.00% +> aiken/primitive/bytearray.{reduce_1} | -42.95% | ±0.00% +> aiken/primitive/bytearray.{reduce_2} | -44.60% | ±0.00% +> aiken/primitive/bytearray.{to_string_1} | -69.56% | ±0.00% +> aiken/primitive/bytearray.{to_string_2} | -53.54% | ±0.00% +> aiken/primitive/bytearray.{to_hex_1} | -48.15% | ±0.00% +> aiken/primitive/bytearray.{to_hex_2} | -48.15% | ±0.00% +> aiken/primitive/int.{from_utf8_1} | -37.06% | ±0.00% +> aiken/primitive/int.{from_utf8_2} | -33.40% | ±0.00% +> aiken/primitive/int.{from_utf8_3} | -37.06% | ±0.00% +> aiken/primitive/int.{from_utf8_4} | -32.78% | ±0.00% +> aiken/primitive/int.{from_utf8_5} | -32.05% | ±0.00% +> aiken/primitive/int.{from_utf8_6} | -31.36% | ±0.00% +> aiken/primitive/string.{from_bytearray_1} | -69.56% | ±0.00% +> aiken/primitive/string.{from_bytearray_2} | -53.54% | ±0.00% +> aiken/primitive/string.{from_bytearray_3} | -53.54% | ±0.00% +> aiken/primitive/string.{from_int_1} | -40.54% | -7.05% +> aiken/primitive/string.{from_int_2} | -45.93% | -5.30% +> aiken/primitive/string.{from_int_3} | -47.62% | -4.35% +> aiken/primitive/string.{from_int_4} | -48.58% | -3.69% +> aiken/primitive/string.{concat_1} | -92.30% | -80.10% +> aiken/primitive/string.{concat_2} | -97.34% | -85.87% +> aiken/primitive/string.{concat_3} | -98.67% | -80.35% +> aiken/primitive/string.{join_1} | -42.87% | ±0.00% +> aiken/primitive/string.{join_2} | -37.65% | ±0.00% +> aiken/primitive/string.{to_bytearray_1} | -62.36% | ±0.00% +> aiken/primitive/string.{to_bytearray_2} | -41.62% | ±0.00% +> aiken/primitive/string.{to_bytearray_3} | -41.62% | ±0.00% +> cardano/assets.{from_asset_list_1} | -20.51% | ±0.00% +> cardano/assets.{from_asset_list_2} | -10.09% | ±0.00% +> cardano/assets.{from_asset_list_3} | -12.21% | ±0.00% +> cardano/assets.{from_asset_list_4} | -16.22% | ±0.00% +> cardano/assets.{from_asset_list_5} | -14.60% | ±0.00% +> cardano/assets.{from_asset_list_6} | -20.97% | ±0.00% +> cardano/assets.{from_asset_list_7} | -20.25% | ±0.00% +> cardano/assets.{from_asset_list_8} | -14.51% | ±0.00% +> cardano/assets.{from_asset_list_9} | -16.07% | ±0.00% +> cardano/assets.{add_1} | -27.84% | ±0.00% +> cardano/assets.{add_2} | -27.56% | -0.54% +> cardano/assets.{add_3} | -26.39% | ±0.00% +> cardano/assets.{add_4} | -29.75% | -10.41% +> cardano/assets.{add_5} | -27.80% | ±0.00% +> cardano/assets.{merge_1} | -26.02% | ±0.00% +> cardano/assets.{merge_2} | -19.60% | ±0.00% +> cardano/assets.{merge_3} | -23.80% | ±0.00% +> cardano/assets.{merge_4} | -25.92% | ±0.00% +> cardano/assets.{merge_5} | -27.61% | -1.98% +> cardano/assets.{without_lovelace_1} | -28.00% | -2.24% +> cardano/assets.{without_lovelace_2} | -27.49% | ±0.00% +> cardano/assets.{without_lovelace_3} | -23.40% | -0.34% +> cardano/assets.{flatten_with_1} | -21.10% | ±0.00% +> cardano/assets.{flatten_with_2} | -22.77% | ±0.00% +> cardano/assets.{reduce_1} | -24.31% | ±0.00% +> cardano/assets.{reduce_2} | -20.89% | ±0.00% +> cardano/assets.{reduce_3} | -36.21% | ±0.00% +>
+ +### Added + +- New modules covering Conway-related features (i.e. governance) + - [`cardano/governance`](https://aiken-lang.github.io/stdlib/cardano/governance.html) + - [`cardano/governance/protocol_parameters`](https://aiken-lang.github.io/stdlib/cardano/governance/protocol_parameters.html) + +- New primitives in `aiken/collection/pairs`: + - [`insert_by_ascending_key`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#insert_by_ascending_key) + - [`repsert_by_ascending_key`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#repsert_by_ascending_key) + +- New primitives in `aiken/crypto`: + - [`blake2b_224`](https://aiken-lang.github.io/stdlib/aiken/crypto.html#blake2b_224) + - [`keccak_256`](https://aiken-lang.github.io/stdlib/aiken/crypto.html#keccak_256) + +- New primitives in `aiken/math`: + - [`log2`](https://aiken-lang.github.io/stdlib/aiken/math.html#log2) + +- New primitives in `aiken/primitive/bytearray`: + - [`at`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#at) + - [`from_int_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#from_int_big_endian) + - [`from_int_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#from_int_little_endian) + - [`to_int_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#to_int_big_endian) + - [`to_int_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#to_int_little_endian) + +- New primitives in `aiken/primitive/int`: + - [`from_bytearray_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html#from_bytearray_big_endian) + - [`from_bytearray_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html#from_bytearray_little_endian) + +- New primitives in `aiken/crypto`: + - [`verify_ecdsa_signature`](https://aiken-lang.github.io/stdlib/cardano/credential.html#verify_ecdsa_signature) + - [`verify_schnorr_signature`](https://aiken-lang.github.io/stdlib/cardano/credential.html#verify_schnorr_signature) + +### Changed + +- Few modules have been relocated and better organized: + - `aiken/hash` -> [`aiken/crypto`](https://aiken-lang.github.io/stdlib/aiken/crypto.html) + - **collections** + - `aiken/dict` -> [`aiken/collection/dict`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html) + - `aiken/list` -> [`aiken/collection/list`](https://aiken-lang.github.io/stdlib/aiken/collection/list.html) + - `aiken/pairs` -> [`aiken/collection/pairs`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html) + - **primitive** + - `aiken/bytearray` -> [`aiken/primitive/bytearray`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html) + - `aiken/int` -> [`aiken/primitive/int`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html) + - `aiken/string` -> [`aiken/primitive/string`](https://aiken-lang.github.io/stdlib/aiken/primitive/string.html) + - **cardano** + - `aiken/transaction` -> [`cardano/transaction`](https://aiken-lang.github.io/stdlib/cardano/transaction.html) + - `aiken/transaction/certificate` -> [`cardano/certificate`](https://aiken-lang.github.io/stdlib/cardano/certificate.html) + - `aiken/transaction/credential` -> [`cardano/address`](https://aiken-lang.github.io/stdlib/cardano/address.html) & `aiken/crypto` + - `aiken/transaction/value` -> [`cardano/assets`](https://aiken-lang.github.io/stdlib/cardano/assets.html) + +- Several zero-argument functions have been turned into top-level constants + - `aiken/dict.new()` -> [`aiken/collection/dict.empty`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html#empty) + - `aiken/interval.empty()` -> [`aiken/interval.empty`](https://aiken-lang.github.io/stdlib/aiken/interval.html#empty) + - `aiken/interval.everything()` -> [`aiken/interval.everything`](https://aiken-lang.github.io/stdlib/aiken/interval.html#everything) + - `aiken/math/rational.zero()` -> [`aiken/math/rational.zero`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#zero) + - `aiken/transaction/value.zero()` -> [`cardano/assets.zero`](https://aiken-lang.github.io/stdlib/cardano/assets.html#zero) + +- The `Transaction` type from [`cardano/transaction`](https://aiken-lang.github.io/stdlib/cardano/transaction.html) (originally `aiken/transaction`) has been greatly reworked to match the new transaction format in Plutus V3. + +- The `ScriptContext` type has split from `cardano/transaction` (originally `aiken/transaction`) and moved into its own module [`cardano/script_context`](https://aiken-lang.github.io/stdlib/cardano/script_context.html) and adjusted to its new form as per Plutus V3. + +- The constructors of [`Credential`](https://aiken-lang.github.io/stdlib/cardano/address.html#credential) have been renamed from `VerificationKeyCredential` and `ScriptCredential` into `VerificationKey` and `Script` respectively. + +- The function `remove_all`, `remove_first` and `remove_last` from [`aiken/collection/pairs`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html) (originally `aiken/pairs`) have been renamed to `delete_all`, `delete_first` and `delete_last` respectively. + +- The function `verify_signature` from [`aiken/crypto`](https://aiken-lang.github.io/stdlib/aiken/crypto.html) (originally `aiken/credential`) has been renamed to `verify_ed25519_signature`. + +### Removed + +- The module `aiken/time`. The `PosixTime` alias is no longer used anywhere. + +- `MintedValue` (from `aiken/transaction/value` originally) and its associated functions are no longer needed and, therefore, gone. + +## v1.9.0 - 2024-05-24 + +### Added + +- A new module [`aiken/pairs`](https://aiken-lang.github.io/stdlib/aiken/pairs.html) to work with associative lists (a.k.a. `Pairs`). + +### Changed + +- **BREAKING-CHANGE**
+ Specialized all `Dict`'s key to `ByteArray`, and thus remove the need for passing an extra comparison function in many functions. `Dict` are however still specialized with a phantom type for keys. + +- **BREAKING-CHANGE**
+ Few functions from `Dict` have been renamed for consistency: + - `from_list` -> `from_pairs` + - `from_ascending_list` -> `from_ascending_pairs` + - `to_list` -> `to_pairs` + +### Removed + +N/A + +## v1.8.0 - 2024-03-28 + +### Added + +- [`value.reduce`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#reduce) to efficiently fold over a value and its elements. + +- [`value.from_asset_list`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#from_asset_list) to turn an asset list into a Value while enforcing invariants expected of `Value`. + +- [`math.is_sqrt`](https://aiken-lang.github.io/stdlib/aiken/math.html#is_sqrt) as a more efficient alternative to `sqrt`. + +### Changed + +- Disclaimers in documentation to [`bytearray.to_string`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#to_string) and [`string.from_bytearray`](https://aiken-lang.github.io/stdlib/aiken/string.html#from_bytearray) regarding UTF-8 encoding. + +### Removed + +N/A + +## v1.7.0 - 2023-11-07 + +### Added + +- [`list.index_of`](https://aiken-lang.github.io/stdlib/aiken/list.html#index_of): For getting a values index in a list. +- [`transaction.placeholder`](https://aiken-lang.github.io/stdlib/aiken/transaction.html#placeholder): For constructing test transactions. +- [`transaction.value.is_zero`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#is_zero): For checking whether a value is null. + +### Changed + +- [`value.to_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_minted_value) now correctly preserves the invariant of `MintedValue`: it always contain a null quantity of Ada. + +### Removed + +N/A + +## v1.6.0 - 2023-09-08 + +### Added + +- [`math.pow2`](https://aiken-lang.github.io/stdlib/aiken/math.html#pow2): For faster exponentions for powers of two. +- [`bytearray.test_bit`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#test_bit): For testing if a bit is set in a bytearray (MSB). + +## v1.5.0 - 2023-08-16 + +### Removed + +- retired `list.and` and `list.or` because of the new keywords for logical op chaining. + +## v1.4.0 - 2023-07-21 + +### Changed + +- Fixed missing null-check on `value.add`. Adding a null quantity of token is now correctly a no-op. + +## v1.3.0 - 2023-06-30 + +### Added + +- [`math.sqrt`](https://aiken-lang.github.io/stdlib/aiken/math.html#sqrt): For calculating integer square roots using a quadratically convergent method. +- [`math/rational.numerator`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#numerator) & [`math/rational.denominator`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#numerator): For accessing parts of a rational value. +- [`math/rational.arithmetic_mean`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#arithmetic_mean): For computing [arithmetic mean](https://en.wikipedia.org/wiki/Arithmetic_mean) of rational values. +- [`math/rational.geometric_mean`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#geometric_mean): For computing [geometric mean](https://en.wikipedia.org/wiki/Geometric_mean) of two rational values. + +### Changed + +- Clear empty asset lists in [`Value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#Value) on various operations. Before that fix, it could happen that removing all assets from a given policy would lead to an empty dictionnary of assets still be present in the `Value`. + +## v1.2.0 - 2023-06-17 + +### Added + +- [`transaction/value.MintedValue`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#MintedValue) +- [`transaction/value.from_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#from_minted_value): Convert from `MintedValue` to `Value` +- [`transaction/value.to_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_minted_value): Convert from `Value` to `MintedValue` +- [`transaction/bytearray.to_hex`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#to_hex): Convert a `ByteArray` to a hex encoded `String` +- [`math/rational`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html): Working with rational numbers. + - [x] `abs` + - [x] `add` + - [x] `ceil` + - [x] `compare` + - [x] `compare_with` + - [x] `div` + - [x] `floor` + - [x] `from_int` + - [x] `mul` + - [x] `negate` + - [x] `new` + - [x] `proper_fraction` + - [x] `reciprocal` + - [x] `reduce` + - [x] `round` + - [x] `round_even` + - [x] `sub` + - [x] `truncate` + - [x] `zero` + +### Removed + +- module `MintedValue` was merged with `Value` + +## v1.1.0 - 2023-06-06 + +### Added + +- [`list.count`](https://aiken-lang.github.io/stdlib/aiken/list.html#count): Count how many items in the list satisfy the given predicate. + +- [`int.from_utf8`](https://aiken-lang.github.io/stdlib/aiken/int.html#from_utf8): Parse an integer from a utf-8 encoded `ByteArray`, when possible. + +- [`dict.foldl`](https://aiken-lang.github.io/stdlib/aiken/dict.html#foldl) & [`dict.foldr`](https://aiken-lang.github.io/stdlib/aiken/dict.html#foldr): for left and right folds over dictionnary elements in ascending key order. + +- [`dict.insert_with`](https://aiken-lang.github.io/stdlib/aiken/dict.html#insert_with): Insert a value in the dictionary at a given key. When the key already exist, the provided merge function is called. + +- [`transaction/value.add`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#add): Add a (positive or negative) quantity of a single token to a value. This is more efficient than `merge` for a single asset. + +- [`transaction/value.to_dict`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_dict): Convert a `Value` into a dictionnary of dictionnaries. + +- A new module [`transaction/minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/minted_value.html): This is used exclusively for representing values present in the `mint` field of transactions. This allows to simplify some of the implementation for `Value` which no longer needs to handle the special case where null-quantity tokens would be present. It isn't possible to construct `MintedValue` by hand, they come from the script context entirely and are 'read-only'. + +- More documentation for `dict` and `interval` modules. + +### Changed + +> **Warning** +> +> Most of those changes are breaking-changes. Though, given we're still in an +> alpha state, only the `minor` component is bumped from the version number. +> Please forgive us. + +- Rework `list.{foldl, foldr, reduce, indexed_foldr}`, `dict.{fold}`, `bytearray.{foldl, foldr, reduce}` to take the iterator as last argument. For example: + + ``` + fn foldl(self: List
, with: fn(a, b) -> b, zero: b) -> b + + ↓ becomes + + fn foldl(self: List, zero: b, with: fn(a, b) -> b) -> b + ``` + +- Fixed implementation of `bytearray.slice`; `slice` would otherwise behave as if the second argument were an offset. + +- Rename `transaction/value.add` into `transaction/value.merge`. + +- Swap arguments of the merge function in `dict.union_with`; the first value received now corresponds to the value already present in the dictionnary. + +- Fixed various examples from the documentation + +### Removed + +- Removed `dict.fold`; replaced with `dict.foldl` and `dict.foldr` to remove ambiguity. + +## v1.0.0 - 2023-04-13 + +### Added + +N/A + +### Changed + +N/A + +### Removed + +N/A diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/LICENSE b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/LICENSE new file mode 100644 index 00000000..4a1de273 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2022 Lucas Rosa + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/README.md b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/README.md new file mode 100644 index 00000000..4cd6fef2 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/README.md @@ -0,0 +1,71 @@ +
+
+

Aiken Aiken Standard Library

+ +[![Licence](https://img.shields.io/github/license/aiken-lang/stdlib?style=for-the-badge)](https://github.com/aiken-lang/stdlib/blob/main/LICENSE) +[![Continuous Integration](https://img.shields.io/github/actions/workflow/status/aiken-lang/stdlib/continuous-integration.yml?style=for-the-badge)](https://github.com/aiken-lang/stdlib/actions/workflows/continuous-integration.yml) + +
+
+ +## Getting started + +``` +aiken add aiken-lang/stdlib --version v2 +``` + +## Compatibility + +aiken's version | stdlib's version(s) +--- | --- +`v1.1.3`
`v1.1.4`
`v1.1.5`
`v1.1.6`
`v1.1.7` | `>= 2.1.0` +`v1.1.1`
`v1.1.2` | `>= 2.0.0` && `< 2.1.0` +`v1.0.29-alpha`
`v1.0.28-alpha` | `>= 1.9.0` && `< 2.0.0` +`v1.0.26-alpha` | `<= 1.8.0` && `< 1.9.0` + +## Overview + +The official standard library for the [Aiken](https://aiken-lang.org) Cardano +smart-contract language. + +It extends the language builtins with useful data-types, functions, constants +and aliases that make using Aiken a bliss. + +```aiken +use aiken/collection/list +use aiken/crypto.{VerificationKeyHash} +use cardano/transaction.{OutputReference, Transaction} + +pub type Datum { + owner: VerificationKeyHash, +} + +pub type Redeemer { + msg: ByteArray, +} + +/// A simple validator which replicates a basic public/private signature lock. +/// +/// - The key (hash) is set as datum when the funds are sent to the script address. +/// - The spender is expected to provide a signature, and the string 'Hello, World!' as message +/// - The signature is implicitly verified by the ledger, and included as 'extra_signatories' +/// +validator hello_world { + spend(datum: Option, redeemer: Redeemer, _, self: Transaction) { + expect Some(Datum { owner }) = datum + + let must_say_hello = redeemer.msg == "Hello, World!" + + let must_be_signed = list.has(self.extra_signatories, owner) + + and { + must_say_hello, + must_be_signed, + } + } +} +``` + +## Stats + +![Alt](https://repobeats.axiom.co/api/embed/f0a17e7f6133630e165b9e56ec5447bef32fe831.svg "Repobeats analytics image") diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/aiken.lock b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/aiken.lock new file mode 100644 index 00000000..769ac20f --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/aiken.lock @@ -0,0 +1,16 @@ +# This file was generated by Aiken +# You typically do not need to edit this file + +[[requirements]] +name = "aiken-lang/fuzz" +version = "v2" +source = "github" + +[[packages]] +name = "aiken-lang/fuzz" +version = "v2" +requirements = [] +source = "github" + +[etags] +"aiken-lang/fuzz@v2" = [{ secs_since_epoch = 1734106349, nanos_since_epoch = 450591000 }, "64a32283418d58cade34059d3855b857e84505541158c541c460cafa0d355475"] diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/aiken.toml b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/aiken.toml new file mode 100644 index 00000000..cbc76a0b --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/aiken.toml @@ -0,0 +1,15 @@ +name = "aiken-lang/stdlib" +version = "2.2.0" +compiler = "v1.1.9" +plutus = "v3" +description = "The Aiken Standard Library" + +[repository] +user = "aiken-lang" +project = "stdlib" +platform = "github" + +[[dependencies]] +name = "aiken-lang/fuzz" +version = "v2" +source = "github" diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak new file mode 100644 index 00000000..f0c66d69 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak @@ -0,0 +1,293 @@ +use aiken +use aiken/builtin.{decode_utf8, serialise_data} +use aiken/primitive/bytearray + +/// Obtain a String representation of _anything_. This is particularly (and only) useful for tracing +/// and debugging. This function is expensive and should not be used in any production code as it +/// will very likely explodes the validator's budget. +/// +/// The output is a [CBOR diagnostic](https://www.rfc-editor.org/rfc/rfc8949#name-diagnostic-notation) +/// of the underlying on-chain binary representation of the data. It's not as +/// easy to read as plain Aiken code, but it is handy for troubleshooting values +/// _at runtime_. Incidentally, getting familiar with reading CBOR diagnostic is +/// a good idea in the Cardano world. +/// +/// ```aiken +/// cbor.diagnostic(42) == "42" +/// cbor.diagnostic(#"a1b2") == "h'A1B2'" +/// cbor.diagnostic([1, 2, 3]) == "[_ 1, 2, 3]" +/// cbor.diagnostic([]) == "[]" +/// cbor.diagnostic((1, 2)) == "[_ 1, 2]" +/// cbor.diagnostic((1, #"ff", 3)) == "[_ 1, h'FF', 3]" +/// cbor.diagnostic([(1, #"ff")]) == "{_ 1: h'FF' }" +/// cbor.diagnostic(Some(42)) == "121([_ 42])" +/// cbor.diagnostic(None) == "122([])" +/// ``` +pub fn diagnostic(self: Data) -> String { + aiken.diagnostic(self, #"") + |> decode_utf8 +} + +/// Deserialise a [CBOR](https://www.rfc-editor.org/rfc/rfc8949) Data. This is the reverse operation of [serialise](#serialise). +/// In particular, we have the following property: +/// +/// ```aiken +/// cbor.deserialise(cbor.serialise(any_data)) == Some(any_data) +/// ``` +/// +/// > [!CAUTION] +/// > Unfortunately, this function isn't derived from a builtin primitive. It +/// > is therefore an order of magnitude more expensive than its counterpart +/// > and shall be used with care. +/// > +/// > In general, one might prefer avoiding deserialisation unless truly necessary. +/// > Yet, it may come in handy for testing and in rare scenarios. +pub fn deserialise(bytes: ByteArray) -> Option { + let length = bytearray.length(bytes) + + let peek = + fn(offset: Int, callback: fn(Byte) -> Decoder) -> Decoder { + fn(cursor) { + if 0 >= cursor { + deserialise_failure + } else { + callback(bytearray.at(bytes, length - cursor))(cursor - offset) + } + } + } + + let take = + fn(n: Int, callback: fn(ByteArray) -> Decoder) -> Decoder { + fn(cursor) { + if 0 >= cursor { + deserialise_failure + } else { + callback(builtin.slice_bytearray(length - cursor, n, bytes))( + cursor - n, + ) + } + } + } + + if length == 0 { + None + } else { + let Pair(result, consumed) = decode_data(peek, take)(length) + if consumed != 0 { + None + } else { + Some(result) + } + } +} + +/// Serialise any value to binary, encoding using [CBOR](https://www.rfc-editor.org/rfc/rfc8949). +/// +/// This is particularly useful in combination with hashing functions, as a way +/// to obtain a byte representation that matches the serialised representation +/// used by the ledger in the context of on-chain code. +/// +/// Note that the output matches the output of [`diagnostic`](#diagnostic), +/// though with a different encoding. [`diagnostic`](#diagnostic) is merely a +/// textual representation of the CBOR encoding that is human friendly and +/// useful for debugging. +/// +/// ```aiken +/// cbor.serialise(42) == #"182a" +/// cbor.serialise(#"a1b2") == #"42a1b2" +/// cbor.serialise([]) == #"80" +/// cbor.serialise((1, 2)) == #"9f0102ff" +/// cbor.serialise((1, #"ff", 3)) == #"9f0141ff03ff" +/// cbor.serialise([(1, #"ff")]) == #"a10141ff" +/// cbor.serialise(Some(42)) == #"d8799f182aff" +/// cbor.serialise(None) == #"d87a80" +/// ``` +pub fn serialise(self: Data) -> ByteArray { + serialise_data(self) +} + +type Byte = + Int + +type Decoder
= + fn(Int) -> Pair + +type Peek = + fn(Int, fn(Byte) -> Decoder) -> Decoder + +type Take = + fn(Int, fn(ByteArray) -> Decoder) -> Decoder + +fn return(data: Data) -> Decoder { + fn(cursor) { Pair(data, cursor) } +} + +const deserialise_failure: Pair = { + let empty: Data = "" + Pair(empty, -1) + } + +const token_begin_bytes = 0x5f + +const token_begin_list = 0x9f + +const token_begin_map = 0xbf + +const token_break = 0xff + +fn decode_data(peek: Peek, take: Take) -> Decoder { + let next <- peek(1) + let major_type = next / 32 + if major_type <= 2 { + if major_type == 0 { + let i <- decode_uint(peek, take, next) + return(builtin.i_data(i)) + } else if major_type == 1 { + let i <- decode_uint(peek, take, next - 32) + return(builtin.i_data(-i - 1)) + } else { + if next == token_begin_bytes { + let b <- decode_chunks(peek, take) + return(builtin.b_data(b)) + } else { + let b <- decode_bytes(peek, take, next - 64) + return(builtin.b_data(b)) + } + } + } else if major_type == 6 { + let tag <- decode_uint(peek, take, next - 192) + let next <- peek(1) + if tag == 102 { + fn(_) { deserialise_failure } + } else { + let ix = + if tag >= 1280 { + tag - 1280 + 7 + } else { + tag - 121 + } + if next == token_begin_list { + let fields <- decode_indefinite(peek, take, decode_data) + return(builtin.constr_data(ix, fields)) + } else { + let size <- decode_uint(peek, take, next - 128) + let fields <- decode_definite(peek, take, decode_data, size) + return(builtin.constr_data(ix, fields)) + } + } + } else if major_type == 4 { + if next == token_begin_list { + let xs <- decode_indefinite(peek, take, decode_data) + return(builtin.list_data(xs)) + } else { + let size <- decode_uint(peek, take, next - 128) + let xs <- decode_definite(peek, take, decode_data, size) + return(builtin.list_data(xs)) + } + } else if major_type == 5 { + if next == token_begin_map { + let xs <- decode_indefinite(peek, take, decode_pair) + return(builtin.map_data(xs)) + } else { + let size <- decode_uint(peek, take, next - 160) + let xs <- decode_definite(peek, take, decode_pair, size) + return(builtin.map_data(xs)) + } + } else { + fn(_) { deserialise_failure } + } +} + +fn decode_pair(peek: Peek, take: Take) -> Decoder> { + fn(cursor) { + let Pair(k, cursor) = decode_data(peek, take)(cursor) + let Pair(v, cursor) = decode_data(peek, take)(cursor) + Pair(Pair(k, v), cursor) + } +} + +fn decode_uint( + peek: Peek, + take: Take, + header: Int, + and_then: fn(Int) -> Decoder, +) -> Decoder { + if header < 24 { + and_then(header) + } else if header == 24 { + let payload <- peek(1) + and_then(payload) + } else if header < 28 { + let width = bytearray.at(#[2, 4, 8], header - 25) + let payload <- take(width) + and_then(bytearray.to_int_big_endian(payload)) + } else { + fn(_) { deserialise_failure } + } +} + +fn decode_bytes( + peek: Peek, + take: Take, + header: Int, + and_then: fn(ByteArray) -> Decoder, +) -> Decoder { + let width <- decode_uint(peek, take, header) + let bytes <- take(width) + and_then(bytes) +} + +fn decode_chunks( + peek: Peek, + take: Take, + and_then: fn(ByteArray) -> Decoder, +) -> Decoder { + let next <- peek(1) + if next == token_break { + and_then("") + } else { + let chunk <- decode_bytes(peek, take, next - 64) + let chunks <- decode_chunks(peek, take) + and_then(builtin.append_bytearray(chunk, chunks)) + } +} + +fn decode_definite( + peek: Peek, + take: Take, + decode_one: fn(Peek, Take) -> Decoder, + size: Int, + and_then: fn(List) -> Decoder, +) -> Decoder { + if size <= 0 { + and_then([]) + } else { + fn(cursor) { + let Pair(elem, cursor) = decode_one(peek, take)(cursor) + { + let elems <- decode_definite(peek, take, decode_one, size - 1) + and_then([elem, ..elems]) + }(cursor) + } + } +} + +fn decode_indefinite( + peek: Peek, + take: Take, + decode_one: fn(Peek, Take) -> Decoder, + and_then: fn(List) -> Decoder, +) -> Decoder { + let next <- peek(1) + if next == token_break { + and_then([]) + } else { + fn(cursor) { + let Pair(elem, cursor) = decode_one(peek, take)(cursor + 1) + { + let elems <- decode_indefinite(peek, take, decode_one) + and_then([elem, ..elems]) + }(cursor) + } + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak new file mode 100644 index 00000000..28d9f5bb --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak @@ -0,0 +1,297 @@ +use aiken/cbor.{deserialise, diagnostic, serialise} +use aiken/fuzz + +// ------------------------------------------------------------------ diagnostic + +test diagnostic_1() { + diagnostic(42) == @"42" +} + +test diagnostic_2() { + diagnostic(#"a1b2") == @"h'A1B2'" +} + +test diagnostic_3() { + diagnostic([1, 2, 3]) == @"[_ 1, 2, 3]" +} + +test diagnostic_4() { + diagnostic([]) == @"[]" +} + +test diagnostic_5() { + diagnostic((1, 2)) == @"[_ 1, 2]" +} + +test diagnostic_6() { + diagnostic((1, #"ff", 3)) == @"[_ 1, h'FF', 3]" +} + +test diagnostic_7() { + diagnostic([(1, #"ff")]) == @"[_ [_ 1, h'FF']]" +} + +test diagnostic_7_alt() { + diagnostic([Pair(1, #"ff")]) == @"{_ 1: h'FF' }" +} + +test diagnostic_8() { + diagnostic(Some(42)) == @"121([_ 42])" +} + +test diagnostic_9() { + diagnostic(None) == @"122([])" +} + +test diagnostic_10() { + let xs: List<(Int, Int)> = [] + diagnostic(xs) == @"[]" +} + +test diagnostic_10_alt() { + let xs: Pairs = [] + diagnostic(xs) == @"{}" +} + +type Foo { + foo: Bar, +} + +type Bar { + A + B(Int) +} + +test diagnostic_11() { + diagnostic(Foo { foo: A }) == @"121([_ 121([])])" +} + +test diagnostic_12() { + diagnostic(Foo { foo: B(42) }) == @"121([_ 122([_ 42])])" +} + +type Baz { + a0: Int, + b0: ByteArray, +} + +test diagnostic_13() { + diagnostic(Baz { a0: 14, b0: #"ff" }) == @"121([_ 14, h'FF'])" +} + +test diagnostic_14() { + diagnostic([0]) == @"[_ 0]" +} + +test diagnostic_15() { + diagnostic(-42) == @"-42" +} + +test diagnostic_16() { + diagnostic([-1, 0, 1]) == @"[_ -1, 0, 1]" +} + +// ------------------------------------------------------------------ serialise + +test serialise_1() { + serialise(42) == #"182a" +} + +test serialise_2() { + serialise(#"a1b2") == #"42a1b2" +} + +test serialise_3() { + serialise([]) == #"80" +} + +test serialise_4() { + serialise((1, 2)) == #"9f0102ff" +} + +test serialise_5() { + serialise((1, #"ff", 3)) == #"9f0141ff03ff" +} + +test serialise_6() { + serialise([(1, #"ff")]) == #"9f9f0141ffffff" +} + +test serialise_7() { + serialise(Some(42)) == #"d8799f182aff" +} + +test serialise_8() { + serialise(None) == #"d87a80" +} + +test serialise_9() { + serialise([Pair(1, #"ff")]) == #"a10141ff" +} + +// ------------------------------------------------------------------ deserialise + +type AnyData { + AnyInt(Int) + AnyByteArray(ByteArray) + AnyList(List) + AnyPairs(Pairs) + AnyUnaryConstr0(UnaryConstr0) + AnyUnaryConstr1(UnaryConstr1) + AnyUnaryConstr2(UnaryConstr2) + AnyBinaryConstr0(BinaryConstr0) + AnyBinaryConstr1(BinaryConstr1) +} + +type UnaryConstr0 { + UnaryConstr0 +} + +type UnaryConstr1 { + field0: String, +} + +type UnaryConstr2 { + field0: Int, + field1: List>, +} + +type BinaryConstr0 = + Bool + +type BinaryConstr1 = + Option + +fn any_pair(any_key: Fuzzer, any_value: Fuzzer) -> Fuzzer> { + let k <- fuzz.and_then(any_key) + let v <- fuzz.map(any_value) + Pair(k, v) +} + +fn any_data() -> Fuzzer { + fuzz.either6( + { + let i <- fuzz.map(fuzz.int()) + AnyInt(i) + }, + { + let bs <- fuzz.map(fuzz.bytearray()) + AnyByteArray(bs) + }, + { + let xs <- fuzz.map(fuzz.list(fuzz.int())) + AnyList(xs) + }, + { + let ps <- fuzz.map(fuzz.list(any_pair(fuzz.bytearray(), fuzz.int()))) + AnyPairs(ps) + }, + fuzz.either3( + fuzz.constant(AnyUnaryConstr0(UnaryConstr0)), + fuzz.constant(AnyUnaryConstr1(UnaryConstr1(@"lorem ipsum"))), + { + let i <- fuzz.and_then(fuzz.int()) + let xs <- fuzz.map(fuzz.list(fuzz.list(fuzz.bytearray()))) + AnyUnaryConstr2(UnaryConstr2(i, xs)) + }, + ), + fuzz.either( + { + let b <- fuzz.map(fuzz.bool()) + AnyBinaryConstr0(b) + }, + { + let o <- fuzz.map(fuzz.option(fuzz.int())) + AnyBinaryConstr1(o) + }, + ), + ) +} + +test unit_deserialise_not_enough_bytes_1() { + expect None = deserialise(#"") +} + +test unit_deserialise_not_enough_bytes_2() { + expect None = deserialise(#"82") +} + +test unit_deserialise_non_empty_leftovers() { + expect None = deserialise(#"811442") +} + +test unit_deserialise_invalid_header() { + expect None = deserialise(#"f1") +} + +test unit_deserialise_invalid_uint() { + expect None = deserialise(#"1d0013bdae") +} + +/// A full script context with a minting policy and various assets. Meant to be +/// non-trivial and cover many things we might encounter in a transaction. +test bench_deserialise_script_context() { + expect Some(_) = + deserialise( + #"d8799fd8799f9fd8799fd8799f5820000000000000000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87980d87a80ffffff9fd8799fd8799f5820000000000000000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87980d87a80ffffff9fd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87a9f5820923918e403bf43c34b4ef6b48eb2ee04babed17320d8d1b9ff9ad086e86f44ecffd87a80ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffffffffa340a1401a000f4240581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea14561696b656e182a581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874a14474756e611b00005af3107a4000d87980d87a80ffd8799fd8799fd87a9f581c00000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffffffffa240a1401a000f4240581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea14763617264616e6f01d87980d8799f581c68ad54b3a8124d9fe5caaaf2011a85d72096e696a2fb3d7f86c41717ffffff182aa2581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea24561696b656e2d4763617264616e6f01581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874a14474756e611b00005af3107a400080a0d8799fd8799fd87980d87a80ffd8799fd87b80d87a80ffff80a2d8799f581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262effd87980d8799f581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874ff182aa15820923918e403bf43c34b4ef6b48eb2ee04babed17320d8d1b9ff9ad086e86f44ecd879805820e757985e48e43a95a185ddba08c814bc20f81cb68544ac937a9b992e4e6c38a0a080d87a80d87a80ff182ad8799f581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874ffff", + ) +} + +test prop_deserialise_any_data(any via any_data()) { + when any is { + AnyInt(i) -> { + fuzz.label(@"Int") + expect Some(data) = deserialise(serialise(i)) + expect i_decoded: Int = data + i_decoded == i + } + AnyByteArray(bs) -> { + fuzz.label(@"ByteArray") + expect Some(data) = deserialise(serialise(bs)) + expect bs_decoded: ByteArray = data + bs_decoded == bs + } + AnyList(xs) -> { + fuzz.label(@"List") + expect Some(data) = deserialise(serialise(xs)) + expect xs_decoded: List = data + xs_decoded == xs + } + AnyPairs(ps) -> { + fuzz.label(@"Pairs") + expect Some(data) = deserialise(serialise(ps)) + expect ps_decoded: Pairs = data + ps_decoded == ps + } + AnyUnaryConstr0(constr) -> { + fuzz.label(@"(unary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: UnaryConstr0 = data + constr_decoded == constr + } + AnyUnaryConstr1(constr) -> { + fuzz.label(@"(unary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: UnaryConstr1 = data + constr_decoded == constr + } + AnyUnaryConstr2(constr) -> { + fuzz.label(@"(unary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: UnaryConstr2 = data + constr_decoded == constr + } + AnyBinaryConstr0(constr) -> { + fuzz.label(@"(binary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: BinaryConstr0 = data + constr_decoded == constr + } + AnyBinaryConstr1(constr) -> { + fuzz.label(@"(binary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: BinaryConstr1 = data + constr_decoded == constr + } + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak new file mode 100644 index 00000000..3d4d332e --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak @@ -0,0 +1,4 @@ +/// A non negative integer that materializes the position of an element in a +/// collection. +pub type Index = + Int diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak new file mode 100644 index 00000000..681d0bae --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak @@ -0,0 +1,1174 @@ +//// A module for working with bytearray dictionaries. +//// +//// +//// > [!IMPORTANT] +//// > +//// > Dictionaries are **ordered sets** of key-value pairs, which thus +//// > preserve some invariants. Specifically, each key is only present once in +//// > the dictionary and all keys are stored in ascending lexicographic order. +//// > +//// > These invariants allow for more optimized functions to operate on `Dict`, +//// > but as a trade-offs, prevent `Dict` from being serializable. To recover a `Dict` +//// > from an unknown `Data`, you must first recover an `Pairs` and use +//// > [`dict.from_ascending_list`](#from_ascending_list). + +use aiken/builtin + +/// An opaque `Dict`. The type is opaque because the module maintains some +/// invariant, namely: there's only one occurrence of a given key in the dictionary. +/// +/// Note that the `key` parameter is a phantom-type, and only present as a +/// means of documentation. Keys can be any type, yet will need to comparable +/// to use functions like `insert`. +/// +/// See for example: +/// +/// ```aiken +/// pub type Value = +/// Dict> +/// ``` +pub opaque type Dict { + inner: Pairs, +} + +// ## Constructing + +/// An empty dictionnary. +/// ```aiken +/// dict.to_pairs(dict.empty) == [] +/// ``` +pub const empty: Dict = Dict { inner: [] } + +const foo = #"666f6f" + +const bar = #"626172" + +const baz = #"62617a" + +const fixture_1 = + empty + |> insert(foo, 42) + |> insert(bar, 14) + +/// Like ['from_pairs'](#from_pairs), but from an already sorted list by ascending +/// keys. This function fails (i.e. halts the program execution) if the list isn't +/// sorted. +/// +/// ```aiken +/// let pairs = [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// +/// let result = +/// dict.from_ascending_pairs(pairs) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +/// +/// This is meant to be used to turn a list constructed off-chain into a `Dict` +/// which has taken care of maintaining interval invariants. This function still +/// performs a sanity check on all keys to avoid silly mistakes. It is, however, +/// considerably faster than ['from_pairs'](from_pairs) +pub fn from_ascending_pairs(xs: Pairs) -> Dict { + let Void = check_ascending_list(xs) + Dict { inner: xs } +} + +fn check_ascending_list(xs: Pairs) { + when xs is { + [] -> Void + [_] -> Void + [Pair(x0, _), Pair(x1, _) as e, ..rest] -> + if builtin.less_than_bytearray(x0, x1) { + check_ascending_list([e, ..rest]) + } else { + fail @"keys in associative list aren't in ascending order" + } + } +} + +/// Like [`from_ascending_pairs`](#from_ascending_pairs) but fails if **any** +/// value doesn't satisfy the predicate. +/// +/// ```aiken +/// let pairs = [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// +/// dict.from_ascending_pairs_with(pairs, fn(x) { x <= 250 }) // fail +/// ``` +pub fn from_ascending_pairs_with( + xs: Pairs, + predicate: fn(value) -> Bool, +) -> Dict { + let Void = check_ascending_pairs_with(xs, predicate) + Dict { inner: xs } +} + +fn check_ascending_pairs_with( + xs: Pairs, + predicate: fn(value) -> Bool, +) { + when xs is { + [] -> Void + [Pair(_, v)] -> + if predicate(v) { + Void + } else { + fail @"value doesn't satisfy predicate" + } + [Pair(x0, v0), Pair(x1, _) as e, ..rest] -> + if builtin.less_than_bytearray(x0, x1) { + if predicate(v0) { + check_ascending_pairs_with([e, ..rest], predicate) + } else { + fail @"value doesn't satisfy predicate" + } + } else { + fail @"keys in pairs aren't in ascending order" + } + } +} + +test bench_from_ascending_pairs() { + let dict = + from_ascending_pairs( + [ + Pair("aaaa", 1), Pair("aaab", 9), Pair("aaba", 5), Pair("aabb", 13), + Pair("abaa", 2), Pair("abab", 10), Pair("abba", 6), Pair("abbb", 14), + Pair("baaa", 3), Pair("baab", 11), Pair("baba", 7), Pair("babb", 15), + Pair("bbaa", 4), Pair("bbab", 12), Pair("bbba", 8), Pair("bbbb", 16), + ], + ) + + size(dict) == 16 +} + +/// Construct a dictionary from a list of key-value pairs. Note that when a key is present +/// multiple times, the first occurrence prevails. +/// +/// ```aiken +/// let pairs = [Pair("a", 100), Pair("c", 300), Pair("b", 200)] +/// +/// let result = +/// dict.from_pairs(pairs) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn from_pairs(self: Pairs) -> Dict { + Dict { inner: do_from_pairs(self) } +} + +fn do_from_pairs(xs: Pairs) -> Pairs { + when xs is { + [] -> [] + [Pair(k, v), ..rest] -> do_insert(do_from_pairs(rest), k, v) + } +} + +test from_list_1() { + from_pairs([]) == empty +} + +test from_list_2() { + from_pairs([Pair(foo, 42), Pair(bar, 14)]) == from_pairs( + [Pair(bar, 14), Pair(foo, 42)], + ) +} + +test from_list_3() { + from_pairs([Pair(foo, 42), Pair(bar, 14)]) == fixture_1 +} + +test from_list_4() { + from_pairs([Pair(foo, 42), Pair(bar, 14), Pair(foo, 1337)]) == fixture_1 +} + +test bench_from_pairs() { + let dict = + from_pairs( + [ + Pair("bbba", 8), Pair("bbab", 12), Pair("aabb", 13), Pair("aaab", 9), + Pair("bbbb", 16), Pair("aaaa", 1), Pair("aaba", 5), Pair("abab", 10), + Pair("baba", 7), Pair("baab", 11), Pair("abaa", 2), Pair("baaa", 3), + Pair("bbaa", 4), Pair("babb", 15), Pair("abbb", 14), Pair("abba", 6), + ], + ) + + size(dict) == 16 +} + +// ## Inspecting + +/// Finds a value in the dictionary, and returns the first key found to have that value. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 42) +/// |> dict.insert(key: "b", value: 14) +/// |> dict.insert(key: "c", value: 42) +/// |> dict.find(42) +/// +/// result == Some("a") +/// ``` +pub fn find(self: Dict, value v: value) -> Option { + do_find(self.inner, v) +} + +fn do_find(self: Pairs, value v: value) -> Option { + when self is { + [] -> None + [Pair(k2, v2), ..rest] -> + if v == v2 { + Some(k2) + } else { + do_find(rest, v) + } + } +} + +test find_1() { + find(empty, foo) == None +} + +test find_2() { + find( + empty + |> insert(foo, 14), + 14, + ) == Some(foo) +} + +test find_3() { + find( + empty + |> insert(foo, 14), + 42, + ) == None +} + +test find_4() { + find( + empty + |> insert(foo, 14) + |> insert(bar, 42) + |> insert(baz, 14), + 14, + ) == Some(baz) +} + +/// Get a value in the dict by its key. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: "Aiken") +/// |> dict.get(key: "a") +/// +/// result == Some("Aiken") +/// ``` +pub fn get(self: Dict, key: ByteArray) -> Option { + do_get(self.inner, key) +} + +fn do_get(self: Pairs, key k: ByteArray) -> Option { + when self is { + [] -> None + [Pair(k2, v), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + if k == k2 { + Some(v) + } else { + None + } + } else { + do_get(rest, k) + } + } +} + +test get_1() { + get(empty, foo) == None +} + +test get_2() { + let m = + empty + |> insert(foo, "Aiken") + |> insert(bar, "awesome") + get(m, key: foo) == Some("Aiken") +} + +test get_3() { + let m = + empty + |> insert(foo, "Aiken") + |> insert(bar, "awesome") + get(m, key: baz) == None +} + +test get_4() { + let m = + empty + |> insert("aaa", "1") + |> insert("bbb", "2") + |> insert("ccc", "3") + |> insert("ddd", "4") + |> insert("eee", "5") + |> insert("fff", "6") + |> insert("ggg", "7") + |> insert("hhh", "8") + |> insert("iii", "9") + |> insert("jjj", "10") + + get(m, "bcd") == None +} + +test get_5() { + let m = + empty + |> insert("aaa", "1") + |> insert("bbb", "2") + |> insert("ccc", "3") + |> insert("ddd", "4") + |> insert("eee", "5") + |> insert("fff", "6") + |> insert("ggg", "7") + |> insert("hhh", "8") + |> insert("iii", "9") + |> insert("jjj", "10") + + get(m, "kkk") == None +} + +/// Check if a key exists in the dictionary. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: "Aiken") +/// |> dict.has_key("a") +/// +/// result == True +/// ``` +pub fn has_key(self: Dict, key k: ByteArray) -> Bool { + do_has_key(self.inner, k) +} + +fn do_has_key(self: Pairs, key k: ByteArray) -> Bool { + when self is { + [] -> False + [Pair(k2, _), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + k == k2 + } else { + do_has_key(rest, k) + } + } +} + +test has_key_1() { + !has_key(empty, foo) +} + +test has_key_2() { + has_key( + empty + |> insert(foo, 14), + foo, + ) +} + +test has_key_3() { + !has_key( + empty + |> insert(foo, 14), + bar, + ) +} + +test has_key_4() { + has_key( + empty + |> insert(foo, 14) + |> insert(bar, 42), + bar, + ) +} + +/// Efficiently checks whether a dictionary is empty. +/// ```aiken +/// dict.is_empty(dict.empty) == True +/// ``` +pub fn is_empty(self: Dict) -> Bool { + when self.inner is { + [] -> True + _ -> False + } +} + +test is_empty_1() { + is_empty(empty) +} + +/// Extract all the keys present in a given `Dict`. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 14) +/// |> dict.insert("b", 42) +/// |> dict.insert("a", 1337) +/// |> dict.keys() +/// +/// result == ["a", "b"] +/// ``` +pub fn keys(self: Dict) -> List { + do_keys(self.inner) +} + +fn do_keys(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(k, _), ..rest] -> [k, ..do_keys(rest)] + } +} + +test keys_1() { + keys(empty) == [] +} + +test keys_2() { + keys( + empty + |> insert(foo, 0) + |> insert(bar, 0), + ) == [bar, foo] +} + +/// Return the number of key-value pairs in the dictionary. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 100) +/// |> dict.insert("b", 200) +/// |> dict.insert("c", 300) +/// |> dict.size() +/// +/// result == 3 +/// ``` +pub fn size(self: Dict) -> Int { + do_size(self.inner) +} + +fn do_size(self: Pairs) -> Int { + when self is { + [] -> 0 + [_, ..rest] -> 1 + do_size(rest) + } +} + +test size_1() { + size(empty) == 0 +} + +test size_2() { + size( + empty + |> insert(foo, 14), + ) == 1 +} + +test size_3() { + size( + empty + |> insert(foo, 14) + |> insert(bar, 42), + ) == 2 +} + +/// Extract all the values present in a given `Dict`. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 14) +/// |> dict.insert("b", 42) +/// |> dict.insert("c", 1337) +/// |> dict.values() +/// +/// result == [14, 42, 1337] +/// ``` +pub fn values(self: Dict) -> List { + do_values(self.inner) +} + +fn do_values(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(_, v), ..rest] -> [v, ..do_values(rest)] + } +} + +test values_1() { + values(empty) == [] +} + +test values_2() { + values( + empty + |> insert(foo, 3) + |> insert(bar, 4), + ) == [4, 3] +} + +// ## Modifying + +/// Remove a key-value pair from the dictionary. If the key is not found, no changes are made. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.delete(key: "a") +/// |> dict.to_pairs() +/// +/// result == [Pair("b", 200)] +/// ``` +pub fn delete(self: Dict, key: ByteArray) -> Dict { + Dict { inner: do_delete(self.inner, key) } +} + +fn do_delete( + self: Pairs, + key k: ByteArray, +) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + if k == k2 { + rest + } else { + self + } + } else { + [Pair(k2, v2), ..do_delete(rest, k)] + } + } +} + +test delete_1() { + delete(empty, foo) == empty +} + +test delete_2() { + let m = + empty + |> insert(foo, 14) + delete(m, foo) == empty +} + +test delete_3() { + let m = + empty + |> insert(foo, 14) + delete(m, bar) == m +} + +test delete_4() { + let m = + empty + |> insert(foo, 14) + |> insert(bar, 14) + !has_key(delete(m, foo), foo) +} + +test delete_5() { + let m = + empty + |> insert(foo, 14) + |> insert(bar, 14) + has_key(delete(m, bar), foo) +} + +test delete_6() { + let m = + empty + |> insert("aaa", 1) + |> insert("bbb", 2) + |> insert("ccc", 3) + |> insert("ddd", 4) + |> insert("eee", 5) + |> insert("fff", 6) + |> insert("ggg", 7) + |> insert("hhh", 8) + |> insert("iii", 9) + |> insert("jjj", 10) + + delete(m, "bcd") == m +} + +/// Keep only the key-value pairs that pass the given predicate. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.insert(key: "c", value: 300) +/// |> dict.filter(fn(k, _v) { k != "a" }) +/// |> dict.to_pairs() +/// +/// result == [Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn filter( + self: Dict, + with: fn(ByteArray, value) -> Bool, +) -> Dict { + Dict { inner: do_filter(self.inner, with) } +} + +fn do_filter( + self: Pairs, + with: fn(ByteArray, value) -> Bool, +) -> Pairs { + when self is { + [] -> [] + [Pair(k, v), ..rest] -> + if with(k, v) { + [Pair(k, v), ..do_filter(rest, with)] + } else { + do_filter(rest, with) + } + } +} + +test filter_1() { + filter(empty, fn(_, _) { True }) == empty +} + +test filter_2() { + let expected = + empty + |> insert(foo, 42) + filter(fixture_1, fn(_, v) { v > 14 }) == expected +} + +test filter_3() { + let expected = + empty + |> insert(bar, 14) + filter(fixture_1, fn(k, _) { k == bar }) == expected +} + +/// Insert a value in the dictionary at a given key. If the key already exists, its value is **overridden**. If you need ways to combine keys together, use (`insert_with`)[#insert_with]. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 1) +/// |> dict.insert(key: "b", value: 2) +/// |> dict.insert(key: "a", value: 3) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 3), Pair("b", 2)] +/// ``` +pub fn insert( + self: Dict, + key k: ByteArray, + value v: value, +) -> Dict { + Dict { inner: do_insert(self.inner, k, v) } +} + +fn do_insert( + self: Pairs, + key k: ByteArray, + value v: value, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if builtin.less_than_bytearray(k, k2) { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, v), ..rest] + } else { + [Pair(k2, v2), ..do_insert(rest, k, v)] + } + } + } +} + +test insert_1() { + let m1 = + empty + |> insert(foo, 42) + let m2 = + empty + |> insert(foo, 14) + insert(m1, foo, 14) == m2 +} + +test insert_2() { + let m1 = + empty + |> insert(foo, 42) + let m2 = + empty + |> insert(bar, 14) + insert(m1, bar, 14) == insert(m2, foo, 42) +} + +/// Insert a value in the dictionary at a given key. When the key already exist, the provided +/// merge function is called. The value existing in the dictionary is passed as the second argument +/// to the merge function, and the new value is passed as the third argument. +/// +/// ```aiken +/// let sum = +/// fn (_k, a, b) { Some(a + b) } +/// +/// let result = +/// dict.empty +/// |> dict.insert_with(key: "a", value: 1, with: sum) +/// |> dict.insert_with(key: "b", value: 2, with: sum) +/// |> dict.insert_with(key: "a", value: 3, with: sum) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 4), Pair("b", 2)] +/// ``` +pub fn insert_with( + self: Dict, + key k: ByteArray, + value v: value, + with: fn(ByteArray, value, value) -> Option, +) -> Dict { + Dict { + inner: do_insert_with(self.inner, k, v, fn(k, v1, v2) { with(k, v2, v1) }), + } +} + +test insert_with_1() { + let sum = + fn(_k, a, b) { Some(a + b) } + + let result = + empty + |> insert_with(key: "foo", value: 1, with: sum) + |> insert_with(key: "bar", value: 2, with: sum) + |> to_pairs() + + result == [Pair("bar", 2), Pair("foo", 1)] +} + +test insert_with_2() { + let sum = + fn(_k, a, b) { Some(a + b) } + + let result = + empty + |> insert_with(key: "foo", value: 1, with: sum) + |> insert_with(key: "bar", value: 2, with: sum) + |> insert_with(key: "foo", value: 3, with: sum) + |> to_pairs() + + result == [Pair("bar", 2), Pair("foo", 4)] +} + +test insert_with_3() { + let with = + fn(k, a, _b) { + if k == "foo" { + Some(a) + } else { + None + } + } + + let result = + empty + |> insert_with(key: "foo", value: 1, with: with) + |> insert_with(key: "bar", value: 2, with: with) + |> insert_with(key: "foo", value: 3, with: with) + |> insert_with(key: "bar", value: 4, with: with) + |> to_pairs() + + result == [Pair("foo", 1)] +} + +/// Apply a function to all key-value pairs in a Dict. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 100) +/// |> dict.insert("b", 200) +/// |> dict.insert("c", 300) +/// |> dict.map(fn(_k, v) { v * 2 }) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 200), Pair("b", 400), Pair("c", 600)] +/// ``` +pub fn map(self: Dict, with: fn(ByteArray, a) -> b) -> Dict { + Dict { inner: do_map(self.inner, with) } +} + +fn do_map( + self: Pairs, + with: fn(ByteArray, a) -> b, +) -> Pairs { + when self is { + [] -> [] + [Pair(k, v), ..rest] -> [Pair(k, with(k, v)), ..do_map(rest, with)] + } +} + +test map_1() { + let result = + fixture_1 + |> map(with: fn(k, _) { k }) + get(result, foo) == Some(foo) +} + +test map_2() { + let result = + fixture_1 + |> map(with: fn(_, v) { v + 1 }) + get(result, foo) == Some(43) && size(result) == size(fixture_1) +} + +/// Remove a key-value pair from the dictionary and return its value. If the key is not found, no changes are made. +/// +/// ```aiken +/// let (value, _) = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.pop(key: "a") +/// +/// result == 100 +/// ``` +pub fn pop( + self: Dict, + key: ByteArray, +) -> (Option, Dict) { + do_pop(self.inner, key, fn(value, inner) { (value, Dict { inner }) }) +} + +fn do_pop( + self: Pairs, + key k: ByteArray, + return: fn(Option, Pairs) -> result, +) -> result { + when self is { + [] -> return(None, []) + [Pair(k2, v2), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + if k == k2 { + return(Some(v2), rest) + } else { + return(None, self) + } + } else { + do_pop( + rest, + k, + fn(value, inner) { return(value, [Pair(k2, v2), ..inner]) }, + ) + } + } +} + +test pop_1() { + pop(empty, foo) == (None, empty) +} + +test pop_2() { + let m = + empty + |> insert(foo, 14) + pop(m, foo) == (Some(14), empty) +} + +test pop_3() { + let m = + empty + |> insert(foo, 14) + pop(m, bar) == (None, m) +} + +test pop_4() { + let m = + empty + |> insert(foo, 14) + |> insert(bar, 14) + pop(m, foo) == (Some(14), empty |> insert(bar, 14)) +} + +test pop_6() { + let m = + empty + |> insert("aaa", 1) + |> insert("bbb", 2) + |> insert("ccc", 3) + |> insert("ddd", 4) + |> insert("eee", 5) + |> insert("fff", 6) + |> insert("ggg", 7) + |> insert("hhh", 8) + |> insert("iii", 9) + |> insert("jjj", 10) + + pop(m, "bcd") == (None, m) +} + +// ## Combining + +/// Combine two dictionaries. If the same key exist in both the left and +/// right dictionary, values from the left are preferred (i.e. left-biaised). +/// +/// ```aiken +/// let left_dict = dict.from_pairs([Pair("a", 100), Pair("b", 200)]) +/// let right_dict = dict.from_pairs([Pair("a", 150), Pair("c", 300)]) +/// +/// let result = +/// dict.union(left_dict, right_dict) |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn union( + left: Dict, + right: Dict, +) -> Dict { + Dict { inner: do_union(left.inner, right.inner) } +} + +fn do_union( + left: Pairs, + right: Pairs, +) -> Pairs { + when left is { + [] -> right + [Pair(k, v), ..rest] -> do_union(rest, do_insert(right, k, v)) + } +} + +test union_1() { + union(fixture_1, empty) == fixture_1 +} + +test union_2() { + union(empty, fixture_1) == fixture_1 +} + +test union_3() { + let left = + empty + |> insert(foo, 14) + let right = + empty + |> insert(bar, 42) + |> insert(baz, 1337) + union(left, right) == from_pairs( + [Pair(foo, 14), Pair(baz, 1337), Pair(bar, 42)], + ) +} + +test union_4() { + let left = + empty + |> insert(foo, 14) + let right = + empty + |> insert(bar, 42) + |> insert(foo, 1337) + union(left, right) == from_pairs([Pair(foo, 14), Pair(bar, 42)]) +} + +/// Like [`union`](#union) but allows specifying the behavior to adopt when a key is present +/// in both dictionaries. The first value received correspond to the value in the left +/// dictionnary, whereas the second argument corresponds to the value in the right dictionnary. +/// +/// When passing `None`, the value is removed and not present in the union. +/// +/// ```aiken +/// let left_dict = dict.from_pairs([Pair("a", 100), Pair("b", 200)]) +/// let right_dict = dict.from_pairs([Pair("a", 150), Pair("c", 300)]) +/// +/// let result = +/// dict.union_with( +/// left_dict, +/// right_dict, +/// fn(_k, v1, v2) { Some(v1 + v2) }, +/// ) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 250), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn union_with( + left: Dict, + right: Dict, + with: fn(ByteArray, value, value) -> Option, +) -> Dict { + Dict { inner: do_union_with(left.inner, right.inner, with) } +} + +fn do_union_with( + left: Pairs, + right: Pairs, + with: fn(ByteArray, value, value) -> Option, +) -> Pairs { + when left is { + [] -> right + [Pair(k, v), ..rest] -> + do_union_with(rest, do_insert_with(right, k, v, with), with) + } +} + +fn do_insert_with( + self: Pairs, + key k: ByteArray, + value v: value, + with: fn(ByteArray, value, value) -> Option, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if builtin.less_than_bytearray(k, k2) { + [Pair(k, v), ..self] + } else { + if k == k2 { + when with(k, v, v2) is { + Some(combined) -> [Pair(k, combined), ..rest] + None -> rest + } + } else { + [Pair(k2, v2), ..do_insert_with(rest, k, v, with)] + } + } + } +} + +test union_with_1() { + let left = + empty + |> insert(foo, 14) + + let right = + empty + |> insert(bar, 42) + |> insert(foo, 1337) + + let result = union_with(left, right, with: fn(_, l, r) { Some(l + r) }) + + result == from_pairs([Pair(foo, 1351), Pair(bar, 42)]) +} + +// ## Transforming + +/// Fold over the key-value pairs in a dictionary. The fold direction follows keys +/// in ascending order and is done from left-to-right. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.insert(key: "c", value: 300) +/// |> dict.foldl(0, fn(_k, v, r) { v + r }) +/// +/// result == 600 +/// ``` +pub fn foldl( + self: Dict, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + do_foldl(self.inner, zero, with) +} + +fn do_foldl( + self: Pairs, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> do_foldl(rest, with(k, v, zero), with) + } +} + +test fold_1() { + foldl(empty, 14, fn(_, _, _) { 42 }) == 14 +} + +test fold_2() { + foldl(fixture_1, zero: 0, with: fn(_, v, total) { v + total }) == 56 +} + +/// Fold over the key-value pairs in a dictionary. The fold direction follows keys +/// in ascending order and is done from right-to-left. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.insert(key: "c", value: 300) +/// |> dict.foldr(0, fn(_k, v, r) { v + r }) +/// +/// result == 600 +/// ``` +pub fn foldr( + self: Dict, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + do_foldr(self.inner, zero, with) +} + +fn do_foldr( + self: Pairs, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> with(k, v, do_foldr(rest, zero, with)) + } +} + +test foldr_1() { + foldr(empty, 14, fn(_, _, _) { 42 }) == 14 +} + +test foldr_2() { + foldr(fixture_1, zero: 0, with: fn(_, v, total) { v + total }) == 56 +} + +/// Get the inner list holding the dictionary data. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 100) +/// |> dict.insert("b", 200) +/// |> dict.insert("c", 300) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn to_pairs(self: Dict) -> Pairs { + self.inner +} + +test to_list_1() { + to_pairs(empty) == [] +} + +test to_list_2() { + to_pairs(fixture_1) == [Pair(bar, 14), Pair(foo, 42)] +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak new file mode 100644 index 00000000..b8e7f675 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak @@ -0,0 +1,1411 @@ +use aiken/builtin +use aiken/primitive/bytearray +use aiken/primitive/int + +// ## Constructing + +/// Add an element in front of the list. Sometimes useful when combined with +/// other functions. +/// +/// ```aiken +/// list.push([2, 3], 1) == [1, ..[2, 3]] == [1, 2, 3] +/// ``` +pub fn push(self: List, elem: a) -> List { + [elem, ..self] +} + +test push_1() { + push([], 0) == [0] +} + +test push_2() { + push([2, 3], 1) == [1, 2, 3] +} + +/// Construct a list of a integer from a given range. +/// +/// ```aiken +/// list.range(0, 3) == [0, 1, 2, 3] +/// list.range(-1, 1) == [-1, 0, 1] +/// ``` +pub fn range(from: Int, to: Int) -> List { + if from > to { + [] + } else { + [from, ..range(from + 1, to)] + } +} + +test range_1() { + range(0, 3) == [0, 1, 2, 3] +} + +test range_2() { + range(-1, 1) == [-1, 0, 1] +} + +/// Construct a list filled with n copies of a value. +/// +/// ```aiken +/// list.repeat("na", 3) == ["na", "na", "na"] +/// ``` +pub fn repeat(elem: a, n_times: Int) -> List { + if n_times <= 0 { + [] + } else { + [elem, ..repeat(elem, n_times - 1)] + } +} + +test repeat_1() { + repeat(42, 0) == [] +} + +test repeat_2() { + repeat(14, 3) == [14, 14, 14] +} + +// ## Inspecting + +/// Determine if all elements of the list satisfy the given predicate. +/// +/// Note: an empty list always satisfies the predicate. +/// +/// ```aiken +/// list.all([], fn(n) { n > 0 }) == True +/// list.all([1, 2, 3], fn(n) { n > 0 }) == True +/// list.all([1, 2, 3], fn(n) { n == 2 }) == False +/// ``` +pub fn all(self: List, predicate: fn(a) -> Bool) -> Bool { + when self is { + [] -> True + [x, ..xs] -> predicate(x) && all(xs, predicate) + } +} + +test all_1() { + all([1, 2, 3], fn(n) { n > 0 }) == True +} + +test all_2() { + all([1, 2, 3], fn(n) { n > 42 }) == False +} + +test all_3() { + all([], fn(n) { n == 42 }) == True +} + +/// Determine if at least one element of the list satisfies the given predicate. +/// +/// Note: an empty list never satisfies the predicate. +/// +/// ```aiken +/// list.any([], fn(n) { n > 2 }) == False +/// list.any([1, 2, 3], fn(n) { n > 0 }) == True +/// list.any([1, 2, 3], fn(n) { n == 2 }) == True +/// list.any([1, 2, 3], fn(n) { n < 0 }) == False +/// ``` +pub fn any(self: List, predicate: fn(a) -> Bool) -> Bool { + when self is { + [] -> False + [x, ..xs] -> predicate(x) || any(xs, predicate) + } +} + +test any_1() { + any([1, 2, 3], fn(n) { n > 0 }) == True +} + +test any_2() { + any([1, 2, 3], fn(n) { n > 42 }) == False +} + +test any_3() { + any([], fn(n) { n == 42 }) == False +} + +/// Return Some(item) at the index or None if the index is out of range. The index is 0-based. +/// +/// ```aiken +/// list.at([1, 2, 3], 1) == Some(2) +/// list.at([1, 2, 3], 42) == None +/// ``` +pub fn at(self: List, index: Int) -> Option { + when self is { + [] -> None + [x, ..xs] -> + if index == 0 { + Some(x) + } else { + at(xs, index - 1) + } + } +} + +test at_1() { + at([1, 2, 3], -1) == None +} + +test at_2() { + at([], 0) == None +} + +test at_3() { + at([1, 2, 3], 3) == None +} + +test at_4() { + at([1], 0) == Some(1) +} + +test at_5() { + at([1, 2, 3], 2) == Some(3) +} + +/// Count how many items in the list satisfy the given predicate. +/// +/// ```aiken +/// list.count([], fn(a) { a > 2}) == 0 +/// list.count([1, 2, 3], fn(a) { n > 0 }) == 3 +/// list.count([1, 2, 3], fn(a) { n >= 2 }) == 2 +/// list.count([1, 2, 3], fn(a) { n > 5 }) == 0 +/// ``` +pub fn count(self: List, predicate: fn(a) -> Bool) -> Int { + foldr( + self, + 0, + fn(item, total) { + if predicate(item) { + total + 1 + } else { + total + } + }, + ) +} + +test count_empty() { + count([], fn(a) { a > 2 }) == 0 +} + +test count_all() { + count([1, 2, 3], fn(a) { a > 0 }) == 3 +} + +test count_some() { + count([1, 2, 3], fn(a) { a >= 2 }) == 2 +} + +test count_none() { + count([1, 2, 3], fn(a) { a > 5 }) == 0 +} + +/// Find the first element satisfying the given predicate, if any. +/// +/// ```aiken +/// list.find([1, 2, 3], fn(x) { x == 2 }) == Some(2) +/// list.find([4, 5, 6], fn(x) { x == 2 }) == None +/// ``` +pub fn find(self: List, predicate: fn(a) -> Bool) -> Option { + when self is { + [] -> None + [x, ..xs] -> + if predicate(x) { + Some(x) + } else { + find(xs, predicate) + } + } +} + +test find_1() { + find([1, 2, 3], fn(x) { x == 1 }) == Some(1) +} + +test find_2() { + find([1, 2, 3], fn(x) { x > 42 }) == None +} + +test find_3() { + find([], fn(_) { True }) == None +} + +/// Figures out whether a list contain the given element. +/// +/// ```aiken +/// list.has([1, 2, 3], 2) == True +/// list.has([1, 2, 3], 14) == False +/// list.has([], 14) == False +/// ``` +pub fn has(self: List, elem: a) -> Bool { + when self is { + [] -> False + [x, ..xs] -> + if x == elem { + True + } else { + has(xs, elem) + } + } +} + +test has_1() { + has([1, 2, 3], 1) == True +} + +test has_2() { + has([1, 2, 3], 14) == False +} + +test has_3() { + has([], 14) == False +} + +/// Get the first element of a list +/// +/// ```aiken +/// list.head([1, 2, 3]) == Some(1) +/// list.head([]) == None +/// ``` +pub fn head(self: List) -> Option { + when self is { + [] -> None + _ -> Some(builtin.head_list(self)) + } +} + +test head_1() { + head([1, 2, 3]) == Some(1) +} + +test head_2() { + head([]) == None +} + +/// Checks whether a list is empty. +/// +/// ```aiken +/// list.is_empty([]) == True +/// list.is_empty([1, 2, 3]) == False +/// ``` +pub fn is_empty(self: List) -> Bool { + when self is { + [] -> True + _ -> False + } +} + +test is_empty_1() { + is_empty([]) == True +} + +test is_empty_2() { + is_empty([1, 2, 3]) == False +} + +/// Gets the index of an element of a list, if any. Otherwise, returns None. +/// +/// ```aiken +/// list.index_of([1, 5, 2], 2) == Some(2) +/// list.index_of([1, 7, 3], 4) == None +/// list.index_of([1, 0, 9, 6], 6) == 3 +/// list.index_of([], 6) == None +/// ``` +pub fn index_of(self: List, elem: a) -> Option { + do_index_of(self, elem, 0) +} + +fn do_index_of(self: List, elem: a, i: Int) -> Option { + when self is { + [] -> None + [x, ..xs] -> + if x == elem { + Some(i) + } else { + do_index_of(xs, elem, i + 1) + } + } +} + +test index_of_1() { + index_of([1, 5, 2], 2) == Some(2) +} + +test index_of_2() { + index_of([1, 7, 3], 4) == None +} + +test index_of_3() { + index_of([1, 0, 9, 6], 6) == Some(3) +} + +test index_of_4() { + index_of([], 6) == None +} + +/// Get the last in the given list, if any. +/// +/// ```aiken +/// list.last([]) == None +/// list.last([1, 2, 3]) == Some(3) +/// ``` +pub fn last(self: List) -> Option { + when self is { + [] -> None + [x] -> Some(x) + [_, ..xs] -> last(xs) + } +} + +test last_1() { + last([]) == None +} + +test last_2() { + last([1]) == Some(1) +} + +test last_3() { + last([1, 2, 3, 4]) == Some(4) +} + +/// Get the number of elements in the given list. +/// +/// ```aiken +/// list.length([]) == 0 +/// list.length([1, 2, 3]) == 3 +/// ``` +pub fn length(self: List) -> Int { + when self is { + [] -> 0 + [_, ..xs] -> 1 + length(xs) + } +} + +test length_1() { + length([]) == 0 +} + +test length_2() { + length([1, 2, 3]) == 3 +} + +// ## Modifying + +// ### Extracting + +/// Remove the first occurrence of the given element from the list. +/// +/// ```aiken +/// list.delete([1, 2, 3, 1], 1) == [2, 3, 1] +/// list.delete([1, 2, 3], 14) == [1, 2, 3] +/// ``` +pub fn delete(self: List, elem: a) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if x == elem { + xs + } else { + [x, ..delete(xs, elem)] + } + } +} + +test delete_1() { + delete([], 42) == [] +} + +test delete_2() { + delete([1, 2, 3, 1], 1) == [2, 3, 1] +} + +test delete_3() { + delete([1, 2, 3], 14) == [1, 2, 3] +} + +test delete_4() { + delete([2], 2) == [] +} + +/// Drop the first `n` elements of a list. +/// +/// ```aiken +/// list.drop([1, 2, 3], 2) == [3] +/// list.drop([], 42) == [] +/// list.drop([1, 2, 3], 42) == [] +/// ``` +pub fn drop(self: List, n: Int) -> List { + if n <= 0 { + self + } else { + when self is { + [] -> [] + [_x, ..xs] -> drop(xs, n - 1) + } + } +} + +test drop_1() { + drop([], 42) == [] +} + +test drop_2() { + drop([1, 2, 3], 2) == [3] +} + +/// Returns the suffix of the given list after removing all elements that satisfy the predicate. +/// +/// ```aiken +/// list.drop_while([1, 2, 3], fn(x) { x < 2 }) == [2, 3] +/// list.drop_while([], fn(x) { x > 2 }) == [] +/// list.drop_while([1, 2, 3], fn(x) { x == 3 }) == [1, 2, 3] +/// ``` +pub fn drop_while(self: List, predicate: fn(a) -> Bool) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if predicate(x) { + drop_while(xs, predicate) + } else { + self + } + } +} + +test drop_while_1() { + drop_while([], fn(x) { x > 2 }) == [] +} + +test drop_while_2() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + drop_while(xs, fn(x) { x > 5 }) == [5, 4, 3, 2, 1] +} + +test drop_while_3() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + drop_while(xs, fn(x) { x == 42 }) == xs +} + +test drop_while_4() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + drop_while(xs, fn(x) { x < 42 }) == [] +} + +/// Produce a list of elements that satisfy a predicate. +/// +/// ```aiken +/// list.filter([1, 2, 3], fn(x) { x >= 2 }) == [2, 3] +/// list.filter([], fn(x) { x > 2 }) == [] +/// list.filter([1, 2, 3], fn(x) { x == 3 }) == [3] +/// ``` +pub fn filter(self: List, predicate: fn(a) -> Bool) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if predicate(x) { + [x, ..filter(xs, predicate)] + } else { + filter(xs, predicate) + } + } +} + +test filter_1() { + filter([], fn(x) { x > 0 }) == [] +} + +test filter_2() { + let xs = [1, 2, 3, 4, 5, 6] + filter(xs, fn(x) { builtin.mod_integer(x, 2) == 0 }) == [2, 4, 6] +} + +test filter_3() { + let filter_foldr = + fn(xs, f) { + foldr( + xs, + [], + fn(x, ys) { + if f(x) { + [x, ..ys] + } else { + ys + } + }, + ) + } + + let is_odd = + fn(n) { builtin.mod_integer(n, 2) != 0 } + + filter_foldr([1, 2, 3], is_odd) == filter([1, 2, 3], is_odd) +} + +/// Produce a list of transformed elements that satisfy a predicate. +/// +/// ```aiken +/// let transform = fn(x) { if x % 2 == 0 { None } else { Some(3*x) } } +/// list.filter_map([1, 2, 3], transform) == [3, 9] +/// ``` +pub fn filter_map(self: List, predicate: fn(a) -> Option) -> List { + when self is { + [] -> [] + [x, ..xs] -> + when predicate(x) is { + None -> filter_map(xs, predicate) + Some(y) -> [y, ..filter_map(xs, predicate)] + } + } +} + +test filter_map_1() { + filter_map([], fn(_) { Some(42) }) == [] +} + +test filter_map_2() { + filter_map( + [1, 2, 3, 4, 5, 6], + fn(x) { + if builtin.mod_integer(x, 2) != 0 { + Some(3 * x) + } else { + None + } + }, + ) == [3, 9, 15] +} + +/// Return all elements except the last one. +/// +/// ```aiken +/// list.init([]) == None +/// list.init([1, 2, 3]) == Some([1, 2]) +/// ``` +pub fn init(self: List) -> Option> { + when self is { + [] -> None + _ -> Some(do_init(self)) + } +} + +fn do_init(self: List) -> List { + when self is { + [] -> fail @"unreachable" + [_] -> [] + [x, ..xs] -> [x, ..do_init(xs)] + } +} + +test init_1() { + init([]) == None +} + +test init_2() { + init([1]) == Some([]) +} + +test init_3() { + init([1, 2, 3, 4]) == Some([1, 2, 3]) +} + +/// Returns a tuple with all elements that satisfy the predicate at first +/// element, and the rest as second element. +/// +/// ```aiken +/// list.partition([1, 2, 3, 4], fn(x) { x % 2 == 0 }) == ([2, 4], [1, 3]) +/// ``` +pub fn partition(self: List, predicate: fn(a) -> Bool) -> (List, List) { + when self is { + [] -> ([], []) + [x, ..xs] -> { + let (left, right) = partition(xs, predicate) + if predicate(x) { + ([x, ..left], right) + } else { + (left, [x, ..right]) + } + } + } +} + +test partition_1() { + partition([], fn(x) { x > 2 }) == ([], []) +} + +test partition_2() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + partition(xs, fn(x) { x > 5 }) == ([10, 9, 8, 7, 6], [5, 4, 3, 2, 1]) +} + +test partition_3() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + partition(xs, fn(x) { x == 42 }) == ([], xs) +} + +test partition_4() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + partition(xs, fn(x) { x < 42 }) == (xs, []) +} + +test partition_5() { + partition([1, 2, 3, 4], fn(x) { x % 2 == 0 }) == ([2, 4], [1, 3]) +} + +/// Extract a sublist from the given list using 0-based indexes. Negative +/// indexes wrap over, so `-1` refers to the last element of the list. +/// +/// ```aiken +/// list.slice([1, 2, 3, 4, 5, 6], from: 2, to: 4) == [3, 4, 5] +/// list.slice([1, 2, 3, 4, 5, 6], from: -2, to: -1) == [5, 6] +/// list.slice([1, 2, 3, 4, 5, 6], from: 1, to: -1) == [2, 3, 4, 5, 6] +/// ``` +pub fn slice(self: List, from: Int, to: Int) -> List { + let (i, l) = + if from >= 0 { + (from, None) + } else { + let l = length(self) + (l + from, Some(l)) + } + + let j = + if to >= 0 { + to - i + 1 + } else { + when l is { + Some(l) -> l + to - i + 1 + None -> length(self) + to - i + 1 + } + } + + self + |> drop(i) + |> take(j) +} + +test slice_1() { + slice([1, 2, 3], 0, 2) == [1, 2, 3] +} + +test slice_2() { + slice([1, 2, 3, 4, 5, 6], from: 2, to: 4) == [3, 4, 5] +} + +test slice_3() { + slice([1, 2, 3, 4, 5, 6], from: -2, to: -1) == [5, 6] +} + +test slice_4() { + slice([1, 2, 3, 4, 5, 6], from: 1, to: -1) == [2, 3, 4, 5, 6] +} + +test slice_5() { + slice([1, 2, 3, 4, 5, 6], from: -4, to: -3) == [3, 4] +} + +test slice_6() { + slice([1, 2, 3, 4, 5, 6], from: -2, to: 1) == [] +} + +/// Cut a list in two, such that the first list contains the given number of / +/// elements and the second list contains the rest. +/// +/// Fundamentally equivalent to (but more efficient): +/// +/// ```aiken +/// // span(xs, n) == (take(xs, n), drop(xs, n)) +/// span([1, 2, 3, 4, 5], 3) == ([1, 2, 3], [4, 5]) +/// ``` +pub fn span(self: List, n: Int) -> (List, List) { + when self is { + [] -> ([], []) + [x, ..xs] -> + if n <= 0 { + ([], self) + } else { + let (left, right) = span(xs, n - 1) + ([x, ..left], right) + } + } +} + +test span_1() { + span([], 2) == ([], []) +} + +test span_2() { + span([1, 2, 3], 2) == ([1, 2], [3]) +} + +test span_3() { + span([1, 2, 3], -1) == ([], [1, 2, 3]) +} + +test span_4() { + span([1, 2, 3], 42) == ([1, 2, 3], []) +} + +/// Get elements of a list after the first one, if any. +/// +/// ```aiken +/// list.tail([]) == None +/// list.tail([1, 2, 3]) == Some([2, 3]) +/// ``` +pub fn tail(self: List) -> Option> { + when self is { + [] -> None + [_, ..xs] -> Some(xs) + } +} + +test tail_1() { + tail([1, 2, 3]) == Some([2, 3]) +} + +test tail_2() { + tail([]) == None +} + +/// Get the first `n` elements of a list. +/// +/// ```aiken +/// list.take([1, 2, 3], 2) == [1, 2] +/// list.take([1, 2, 3], 14) == [1, 2, 3] +/// ``` +pub fn take(self: List, n: Int) -> List { + if n <= 0 { + [] + } else { + when self is { + [] -> [] + [x, ..xs] -> [x, ..take(xs, n - 1)] + } + } +} + +test take_1() { + take([], 42) == [] +} + +test take_2() { + take([1, 2, 3], 2) == [1, 2] +} + +/// Returns the longest prefix of the given list where all elements satisfy the predicate. +/// +/// ```aiken +/// list.take_while([1, 2, 3], fn(x) { x > 2 }) == [] +/// list.take_while([1, 2, 3], fn(x) { x < 2 }) == [1] +/// ``` +pub fn take_while(self: List, predicate: fn(a) -> Bool) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if predicate(x) { + [x, ..take_while(xs, predicate)] + } else { + [] + } + } +} + +test take_while_1() { + take_while([], fn(x) { x > 2 }) == [] +} + +test take_while_2() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + take_while(xs, fn(x) { x > 5 }) == [10, 9, 8, 7, 6] +} + +test take_while_3() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + take_while(xs, fn(x) { x == 42 }) == [] +} + +test take_while_4() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + take_while(xs, fn(x) { x < 42 }) == xs +} + +/// Removes duplicate elements from a list. +/// +/// ```aiken +/// list.unique([1, 2, 3, 1]) == [1, 2, 3] +/// ``` +pub fn unique(self: List) -> List { + when self is { + [] -> [] + [x, ..xs] -> [x, ..unique(filter(xs, fn(y) { y != x }))] + } +} + +test unique_1() { + unique([]) == [] +} + +test unique_2() { + let xs = [1, 2, 3, 1, 1, 3, 4, 1, 2, 3, 2, 4, 5, 6, 7, 8, 9, 10, 9] + unique(xs) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] +} + +// ### Mapping + +/// Map elements of a list into a new list and flatten the result. +/// +/// ```aiken +/// list.flat_map([1, 2, 3], fn(a) { [a, 2*a] }) == [1, 2, 2, 4, 3, 6] +/// ``` +pub fn flat_map(self: List, with: fn(a) -> List) -> List { + foldr(self, [], fn(x, xs) { concat(with(x), xs) }) +} + +test flat_map_1() { + flat_map([], fn(a) { [a] }) == [] +} + +test flat_map_2() { + flat_map([1, 2, 3], fn(a) { [a, a] }) == [1, 1, 2, 2, 3, 3] +} + +/// Perform an action for each element of a list. +/// +/// ```aiken +/// list.for_each(labels, do: fn(lbl) { trace lbl Void }) +/// ``` +pub fn for_each(self: List, do: fn(a) -> Void) -> Void { + foldr(self, Void, fn(x, _) { do(x) }) +} + +test for_each_1() { + for_each( + [@"hello", @"world"], + do: fn(lbl) { + trace lbl + Void + }, + ) +} + +/// List [`map`](#map) but provides the position (0-based) of the elements while iterating. +/// +/// ```aiken +/// list.indexed_map([1, 2, 3], fn(i, x) { i + x }) == [1, 3, 5] +/// ``` +pub fn indexed_map(self: List, with: fn(Int, a) -> result) -> List { + do_indexed_map(0, self, with) +} + +fn do_indexed_map( + n: Int, + self: List, + with: fn(Int, a) -> result, +) -> List { + when self is { + [] -> [] + [x, ..xs] -> [with(n, x), ..do_indexed_map(n + 1, xs, with)] + } +} + +test indexed_map_1() { + indexed_map([], fn(i, _n) { i }) == [] +} + +test indexed_map_2() { + indexed_map( + [4, 8, 13, 2], + fn(i, n) { + if n == 8 { + n + } else { + i + } + }, + ) == [0, 8, 2, 3] +} + +/// Apply a function to each element of a list. +/// +/// ```aiken +/// list.map([1, 2, 3, 4], fn(n) { n + 1 }) == [2, 3, 4, 5] +/// ``` +pub fn map(self: List, with: fn(a) -> result) -> List { + when self is { + [] -> [] + [x, ..xs] -> [with(x), ..map(xs, with)] + } +} + +test map_1() { + map([], fn(n) { n + 1 }) == [] +} + +test map_2() { + map([1, 2, 3, 4], fn(n) { n + 1 }) == [2, 3, 4, 5] +} + +/// Apply a function of two arguments, combining elements from two lists. +/// +/// Note: if one list is longer, the extra elements are dropped. +/// +/// ```aiken +/// list.map2([1, 2, 3], [1, 2], fn(a, b) { a + b }) == [2, 4] +/// ``` +pub fn map2( + self: List, + bs: List, + with: fn(a, b) -> result, +) -> List { + when self is { + [] -> [] + [x, ..xs] -> + when bs is { + [] -> [] + [y, ..ys] -> [with(x, y), ..map2(xs, ys, with)] + } + } +} + +test map2_1() { + map2([], [1, 2, 3], fn(a, b) { a + b }) == [] +} + +test map2_2() { + map2([1, 2, 3], [1, 2], fn(a, b) { a + b }) == [2, 4] +} + +test map2_3() { + map2([42], [1, 2, 3], fn(_a, b) { Some(b) }) == [Some(1)] +} + +/// Apply a function of three arguments, combining elements from three lists. +/// +/// Note: if one list is longer, the extra elements are dropped. +/// +/// ```aiken +/// list.map3([1, 2, 3], [1, 2], [1, 2, 3], fn(a, b, c) { a + b + c }) == [3, 6] +/// ``` +pub fn map3( + self: List, + bs: List, + cs: List, + with: fn(a, b, c) -> result, +) -> List { + when self is { + [] -> [] + [x, ..xs] -> + when bs is { + [] -> [] + [y, ..ys] -> + when cs is { + [] -> [] + [z, ..zs] -> [with(x, y, z), ..map3(xs, ys, zs, with)] + } + } + } +} + +test map3_1() { + map3([], [], [1, 2, 3], fn(a, b, c) { a + b + c }) == [] +} + +test map3_2() { + map3([1, 2, 3], [1, 2], [1, 2, 3], fn(a, b, c) { a + b + c }) == [3, 6] +} + +/// Return the list with its elements in the reserve order. +/// +/// ```aiken +/// list.reverse([1, 2, 3]) == [3, 2, 1] +/// ``` +pub fn reverse(self: List) -> List { + foldl(self, [], fn(x, xs) { [x, ..xs] }) +} + +test reverse_1() { + reverse([]) == [] +} + +test reverse_2() { + reverse([1, 2, 3]) == [3, 2, 1] +} + +/// Sort a list in ascending order using the given comparison function. +/// +/// ```aiken +/// use aiken/int +/// +/// sort([3, 1, 4, 0, 2], int.compare) == [0, 1, 2, 3, 4] +/// sort([1, 2, 3], int.compare) == [1, 2, 3] +/// ``` +pub fn sort(self: List, compare: fn(a, a) -> Ordering) -> List { + when self is { + [] -> [] + [x, ..xs] -> insert(sort(xs, compare), x, compare) + } +} + +fn insert(self: List, e: a, compare: fn(a, a) -> Ordering) -> List { + when self is { + [] -> [e] + [x, ..xs] -> + if compare(e, x) == Less { + [e, ..self] + } else { + [x, ..insert(xs, e, compare)] + } + } +} + +test sort_1() { + let xs = [6, 7, 5, 4, 1, 3, 9, 8, 0, 2] + sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} + +test sort_2() { + let xs = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} + +test sort_3() { + let xs = [9, 8, 7, 6, 5, 4, 3, 2, 1, 0] + sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} + +test sort_4() { + sort([], int.compare) == [] +} + +/// Decompose a list of tuples into a tuple of lists. +/// +/// ``` +/// list.unzip([(1, "a"), (2, "b")]) == ([1, 2], ["a", "b"]) +/// ``` +pub fn unzip(self: List<(a, b)>) -> (List, List) { + when self is { + [] -> ([], []) + [(a, b), ..xs] -> { + let (a_tail, b_tail) = unzip(xs) + ([a, ..a_tail], [b, ..b_tail]) + } + } +} + +test unzip_1() { + unzip([]) == ([], []) +} + +test unzip_2() { + unzip([(1, "a"), (2, "b")]) == ([1, 2], ["a", "b"]) +} + +// ## Combining + +/// Merge two lists together. +/// +/// ```aiken +/// list.concat([], []) == [] +/// list.concat([], [1, 2, 3]) == [1, 2, 3] +/// list.concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] +/// ``` +pub fn concat(left: List, right: List) -> List { + when left is { + [] -> right + [x, ..xs] -> [x, ..concat(xs, right)] + } +} + +test concat_1() { + concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] +} + +test concat_2() { + concat([1, 2, 3], []) == [1, 2, 3] +} + +test concat_3() { + concat([], [1, 2, 3]) == [1, 2, 3] +} + +/// Remove the first occurrence of each element of the second list from the first one. +/// +/// ``` +/// list.difference(["h", "e", "l", "l", "o"], ["l", "e", "l"]) == ["h", "o"] +/// list.difference([1, 2, 3, 4, 5], [1, 1, 2]) == [3, 4, 5] +/// list.difference([1, 2, 3], []) == [1, 2, 3] +/// ``` +pub fn difference(self: List, with: List) -> List { + when with is { + [] -> self + [x, ..xs] -> difference(delete(self, x), xs) + } +} + +test difference_1() { + difference(["h", "e", "l", "l", "o"], ["l", "e", "l"]) == ["h", "o"] +} + +test difference_2() { + difference([1, 2, 3, 4, 5], [1, 1, 2]) == [3, 4, 5] +} + +test difference_3() { + difference([1, 2, 3], []) == [1, 2, 3] +} + +test difference_4() { + difference([], [1, 2, 3]) == [] +} + +/// Combine two lists together. +/// +/// Note: if one list is longer, the extra elements are dropped. +/// +/// ```aiken +/// list.zip([1, 2], ["a", "b", "c"]) == [(1, "a"), (2, "b")] +/// ``` +pub fn zip(self: List, bs: List) -> List<(a, b)> { + when self is { + [] -> [] + [x, ..xs] -> + when bs is { + [] -> [] + [y, ..ys] -> [(x, y), ..zip(xs, ys)] + } + } +} + +test zip_1() { + zip([], [1, 2, 3]) == [] +} + +test zip_2() { + zip([1, 2, 3], []) == [] +} + +test zip_3() { + zip([1, 2], ["a", "b", "c"]) == [(1, "a"), (2, "b")] +} + +// ## Transforming + +/// Reduce a list from left to right. +/// +/// ```aiken +/// list.foldl([1, 2, 3], 0, fn(n, total) { n + total }) == 6 +/// list.foldl([1, 2, 3], [], fn(x, xs) { [x, ..xs] }) == [3, 2, 1] +/// ``` +pub fn foldl(self: List, zero: b, with: fn(a, b) -> b) -> b { + when self is { + [] -> zero + [x, ..xs] -> foldl(xs, with(x, zero), with) + } +} + +type Fold2 = + fn(a, b) -> result + +pub fn foldl2( + self: List, + zero_a: a, + zero_b: b, + with: fn(elem, a, b, Fold2) -> result, + return: Fold2, +) -> result { + do_foldl2(self, with, return)(zero_a, zero_b) +} + +fn do_foldl2( + self: List, + with: fn(elem, a, b, Fold2) -> result, + return: Fold2, +) -> Fold2 { + when self is { + [] -> return + [x, ..xs] -> do_foldl2(xs, with, fn(a, b) { with(x, a, b, return) }) + } +} + +test foldl2_optimized() { + let + len, + sum, + <- + foldl2( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + 0, + 0, + fn(n, len, sum, return) { return(len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +test foldl2_classic() { + let (len, sum) = + foldl( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + (0, 0), + fn(n, (len, sum)) { (len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +type Foo { + Foo(Int, Int) +} + +test foldl2_pair() { + let Pair(len, sum) = + foldl( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + Pair(0, 0), + fn(n, Pair(len, sum)) { Pair(len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +test foldl2_foo() { + let Foo(len, sum) = + foldl( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + Foo(0, 0), + fn(n, Foo(len, sum)) { Foo(len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +test foldl_1() { + foldl([], 0, fn(_, _) { 1 }) == 0 +} + +test foldl_2() { + foldl([1, 2, 3, 4, 5], 0, fn(n, total) { n + total }) == 15 +} + +test foldl_3() { + foldl([1, 2, 3, 4], [], fn(x, xs) { [x, ..xs] }) == [4, 3, 2, 1] +} + +/// Reduce a list from right to left. +/// +/// ```aiken +/// list.foldr([1, 2, 3], 0, fn(n, total) { n + total }) == 6 +/// list.foldr([1, 2, 3], [], fn(x, xs) { [x, ..xs] }) == [1, 2, 3] +/// ``` +pub fn foldr(self: List, zero: b, with: fn(a, b) -> b) -> b { + when self is { + [] -> zero + [x, ..xs] -> with(x, foldr(xs, zero, with)) + } +} + +test foldr_1() { + foldr([1, 2, 3, 4, 5], 0, fn(n, total) { n + total }) == 15 +} + +test foldr_2() { + foldr( + [1, 2, 3], + "", + fn(n, _str) { + if builtin.mod_integer(n, 2) == 0 { + "foo" + } else { + "bar" + } + }, + ) == "bar" +} + +test foldr_3() { + foldr([1, 2, 3, 4], [], fn(x, xs) { [x, ..xs] }) == [1, 2, 3, 4] +} + +/// Like [`foldr`](#foldr), but also provides the position (0-based) of the elements when iterating. +/// +/// ```aiken +/// let group = fn(i, x, xs) { [(i, x), ..xs] } +/// list.indexed_foldr(["a", "b", "c"], [], group) == [ +/// (0, "a"), +/// (1, "b"), +/// (2, "c") +/// ] +/// ``` +pub fn indexed_foldr( + self: List, + zero: result, + with: fn(Int, a, result) -> result, +) -> result { + do_indexed_foldr(0, self, zero, with) +} + +fn do_indexed_foldr( + n: Int, + self: List, + zero: result, + with: fn(Int, a, result) -> result, +) -> result { + when self is { + [] -> zero + [x, ..xs] -> with(n, x, do_indexed_foldr(n + 1, xs, zero, with)) + } +} + +test indexed_foldr_1() { + indexed_foldr([], 0, fn(i, x, xs) { i + x + xs }) == 0 +} + +test indexed_foldr_2() { + let letters = ["a", "b", "c"] + indexed_foldr(letters, [], fn(i, x, xs) { [(i, x), ..xs] }) == [ + (0, "a"), (1, "b"), (2, "c"), + ] +} + +/// Reduce a list from left to right using the accumulator as left operand. +/// Said differently, this is [`foldl`](#foldl) with callback arguments swapped. +/// +/// ```aiken +/// list.reduce([#[1], #[2], #[3]], #[0], bytearray.concat) == #[0, 1, 2, 3] +/// list.reduce([True, False, True], False, fn(b, a) { or { b, a } }) == True +/// ``` +pub fn reduce(self: List, zero: b, with: fn(b, a) -> b) -> b { + foldl(self, zero, flip(with)) +} + +test reduce_1() { + reduce([], 0, fn(n, total) { n + total }) == 0 +} + +test reduce_2() { + reduce([1, 2, 3], 0, fn(n, total) { n + total }) == 6 +} + +test reduce_3() { + reduce([True, False, True], False, fn(left, right) { left || right }) == True +} + +test reduce_4() { + reduce( + [#[1], #[2], #[3]], + #[9], + fn(left, right) { bytearray.concat(left, right) }, + ) == #[9, 1, 2, 3] +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak new file mode 100644 index 00000000..01bfe763 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak @@ -0,0 +1,833 @@ +//// A module for working with associative lists (a.k.a `Pairs`). +//// +//// While any function that works on `List` also work on `Pairs`, this module provides some extra helpers +//// that are specifically tailored to working with associative lists. Fundamentally, a `Pairs` is +//// a type-alias to `List>`. +//// +//// > [!CAUTION] +//// > +//// > Unlike dictionnaries (a.k.a. [`Dict`](./dict.html#Dict), associative lists make no assumption +//// > about the ordering of elements within the list. As a result, lookup +//// > functions do traverse the entire list when invoked. They are also not _sets_, +//// > and thus allow for duplicate keys. This is reflected in the functions used +//// > to interact with them. + +use aiken/builtin +use aiken/primitive/bytearray + +// ## Inspecting + +/// Get all values in the alist associated with a given key. +/// +/// ```aiken +/// pairs.get_all([], "a") == [] +/// pairs.get_all([Pair("a", 1)], "a") == [1] +/// pairs.get_all([Pair("a", 1), Pair("b", 2)], "a") == [1] +/// pairs.get_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [1, 3] +/// ``` +pub fn get_all(self: Pairs, key k: key) -> List { + when self is { + [] -> [] + [Pair(k2, v), ..rest] -> + if k == k2 { + [v, ..get_all(rest, k)] + } else { + get_all(rest, k) + } + } +} + +test get_all_1() { + get_all([], "a") == [] +} + +test get_all_2() { + get_all([Pair("a", 1)], "a") == [1] +} + +test get_all_3() { + get_all([Pair("a", 1), Pair("b", 2)], "a") == [1] +} + +test get_all_4() { + get_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [1, 3] +} + +test get_all_5() { + get_all([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == [] +} + +/// Get the value in the alist by its key. +/// If multiple values with the same key exist, only the first one is returned. +/// +/// ```aiken +/// pairs.get_first([], "a") == None +/// pairs.get_first([Pair("a", 1)], "a") == Some(1) +/// pairs.get_first([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +/// pairs.get_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(1) +/// ``` +pub fn get_first(self: Pairs, key k: key) -> Option { + when self is { + [] -> None + [Pair(k2, v), ..rest] -> + if k == k2 { + Some(v) + } else { + get_first(rest, k) + } + } +} + +test get_first_1() { + get_first([], "a") == None +} + +test get_first_2() { + get_first([Pair("a", 1)], "a") == Some(1) +} + +test get_first_3() { + get_first([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +} + +test get_first_4() { + get_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(1) +} + +test get_first_5() { + get_first([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == None +} + +/// Get the value in the alist by its key. +/// If multiple values with the same key exist, only the last one is returned. +/// +/// ```aiken +/// pairs.get_last([], "a") == None +/// pairs.get_last([Pair("a", 1)], "a") == Some(1) +/// pairs.get_last([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +/// pairs.get_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(3) +/// ``` +pub fn get_last(self: Pairs, key k: key) -> Option { + when self is { + [] -> None + [Pair(k2, v), ..rest] -> + if k == k2 { + when get_last(rest, k) is { + None -> Some(v) + some -> some + } + } else { + get_last(rest, k) + } + } +} + +test get_last_1() { + get_last([], "a") == None +} + +test get_last_2() { + get_last([Pair("a", 1)], "a") == Some(1) +} + +test get_last_3() { + get_last([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +} + +test get_last_4() { + get_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(3) +} + +test get_last_5() { + get_last([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == None +} + +/// Finds all keys in the alist associated with a given value. +/// +/// ```aiken +/// pairs.find_all([], 1) == [] +/// pairs.find_all([Pair("a", 1)], 1) == ["a"] +/// pairs.find_all([Pair("a", 1), Pair("b", 2)], 1) == ["a"] +/// pairs.find_all([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == ["a", "c"] +/// ``` +pub fn find_all(self: Pairs, v: value) -> List { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if v == v2 { + [k2, ..find_all(rest, v)] + } else { + find_all(rest, v) + } + } +} + +test find_all_1() { + find_all([], "a") == [] +} + +test find_all_2() { + find_all([Pair("a", 14)], 14) == ["a"] +} + +test find_all_3() { + find_all([Pair("a", 14)], 42) == [] +} + +test find_all_4() { + find_all([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == ["a", "c"] +} + +/// Finds the first key in the alist associated with a given value, if any. +/// +/// ```aiken +/// pairs.find_first([], 1) == None +/// pairs.find_first([Pair("a", 1)], 1) == Some("a") +/// pairs.find_first([Pair("a", 1), Pair("b", 2)], 1) == Some("a") +/// pairs.find_first([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == Some("a") +/// ``` +pub fn find_first(self: Pairs, v: value) -> Option { + when self is { + [] -> None + [Pair(k2, v2), ..rest] -> + if v == v2 { + Some(k2) + } else { + find_first(rest, v) + } + } +} + +test find_first_1() { + find_first([], "a") == None +} + +test find_first_2() { + find_first([Pair("a", 14)], 14) == Some("a") +} + +test find_first_3() { + find_first([Pair("a", 14)], 42) == None +} + +test find_first_4() { + find_first([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == Some("a") +} + +/// Finds the last key in the alist associated with a given value, if any. +/// +/// ```aiken +/// pairs.find_last([], 1) == None +/// pairs.find_last([Pair("a", 1)], 1) == Some("a") +/// pairs.find_last([Pair("a", 1), Pair("b", 2)], 1) == Some("a") +/// pairs.find_last([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == Some("c") +/// ``` +pub fn find_last(self: Pairs, v: value) -> Option { + when self is { + [] -> None + [Pair(k2, v2), ..rest] -> + if v == v2 { + when find_last(rest, v) is { + None -> Some(k2) + some -> some + } + } else { + find_last(rest, v) + } + } +} + +test find_last_1() { + find_last([], "a") == None +} + +test find_last_2() { + find_last([Pair("a", 14)], 14) == Some("a") +} + +test find_last_3() { + find_last([Pair("a", 14)], 42) == None +} + +test find_last_4() { + find_last([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == Some("c") +} + +/// Check if a key exists in the pairs. +/// +/// ```aiken +/// pairs.has_key([], "a") == False +/// pairs.has_key([Pair("a", 1)], "a") == True +/// pairs.has_key([Pair("a", 1), Pair("b", 2)], "a") == True +/// pairs.has_key([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == True +/// ``` +pub fn has_key(self: Pairs, k: key) -> Bool { + when self is { + [] -> False + // || is lazy so this is fine + [Pair(k2, _), ..rest] -> k == k2 || has_key(rest, k) + } +} + +test has_key_1() { + !has_key([], "a") +} + +test has_key_2() { + has_key([Pair("a", 14)], "a") +} + +test has_key_3() { + !has_key([Pair("a", 14)], "b") +} + +test has_key_4() { + has_key([Pair("a", 14), Pair("b", 42)], "b") +} + +test has_key_5() { + has_key([Pair("a", 14), Pair("b", 42), Pair("a", 42)], "a") +} + +/// Extract all the keys present in a given `Pairs`. +/// +/// ```aiken +/// pairs.keys([]) == [] +/// pairs.keys([Pair("a", 1)]) == ["a"] +/// pairs.keys([Pair("a", 1), Pair("b", 2)]) == ["a", "b"] +/// pairs.keys([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == ["a", "b", "a"] +/// ``` +pub fn keys(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(k, _), ..rest] -> [k, ..keys(rest)] + } +} + +test keys_1() { + keys([]) == [] +} + +test keys_2() { + keys([Pair("a", 0)]) == ["a"] +} + +test keys_3() { + keys([Pair("a", 0), Pair("b", 0)]) == ["a", "b"] +} + +/// Extract all the values present in a given `Pairs`. +/// +/// ```aiken +/// pairs.values([]) == [] +/// pairs.values([Pair("a", 1)]) == [1] +/// pairs.values([Pair("a", 1), Pair("b", 2)]) == [1, 2] +/// pairs.values([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == [1, 2, 3] +/// ``` +pub fn values(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(_, v), ..rest] -> [v, ..values(rest)] + } +} + +test values_1() { + values([]) == [] +} + +test values_2() { + values([Pair("a", 1)]) == [1] +} + +test values_3() { + values([Pair("a", 1), Pair("b", 2)]) == [1, 2] +} + +test values_4() { + values([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == [1, 2, 3] +} + +// ## Modifying + +/// Remove all key-value pairs matching the key from the Pairs. If the key is not found, no changes are made. +/// +/// ```aiken +/// pairs.delete_all([], "a") == [] +/// pairs.delete_all([Pair("a", 1)], "a") == [] +/// pairs.delete_all([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] +/// pairs.delete_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("b", 2)] +/// ``` +pub fn delete_all(self: Pairs, key k: key) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if k == k2 { + delete_all(rest, k) + } else { + [Pair(k2, v2), ..delete_all(rest, k)] + } + } +} + +test delete_all_1() { + delete_all([], "a") == [] +} + +test delete_all_2() { + delete_all([Pair("a", 14)], "a") == [] +} + +test delete_all_3() { + let fixture = [Pair("a", 14)] + delete_all(fixture, "b") == fixture +} + +test delete_all_4() { + let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] + delete_all(fixture, "a") == [Pair("b", 2)] +} + +/// Remove a single key-value pair from the `Pairs`. If the key is not found, no changes are made. +/// Duplicate keys are not deleted. Only the **first** key found is deleted. +/// +/// ```aiken +/// pairs.delete_first([], "a") == [] +/// pairs.delete_first([Pair("a", 1)], "a") == [] +/// pairs.delete_first([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] +/// pairs.delete_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("b", 2), Pair("a", 3)] +/// ``` +pub fn delete_first(self: Pairs, key k: key) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if k == k2 { + rest + } else { + [Pair(k2, v2), ..delete_first(rest, k)] + } + } +} + +test delete_first_1() { + delete_first([], "a") == [] +} + +test delete_first_2() { + delete_first([Pair("a", 14)], "a") == [] +} + +test delete_first_3() { + let fixture = [Pair("a", 14)] + delete_first(fixture, "b") == fixture +} + +test delete_first_4() { + let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] + delete_first(fixture, "a") == [Pair("b", 2), Pair("a", 3)] +} + +/// Remove a single key-value pair from the Pairs. If the key is not found, no changes are made. +/// Duplicate keys are not deleted. Only the **last** key found is deleted. +/// +/// ```aiken +/// pairs.delete_last([], "a") == [] +/// pairs.delete_last([Pair("a", 1)], "a") == [] +/// pairs.delete_last([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] +/// pairs.delete_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("a", 1), Pair("b", 2)] +/// ``` +pub fn delete_last(self: Pairs, key k: key) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if k == k2 { + let tail = delete_last(rest, k) + if tail == rest { + rest + } else { + [Pair(k2, v2), ..tail] + } + } else { + [Pair(k2, v2), ..delete_last(rest, k)] + } + } +} + +test delete_last_1() { + delete_last([], "a") == [] +} + +test delete_last_2() { + delete_last([Pair("a", 14)], "a") == [] +} + +test delete_last_3() { + let fixture = [Pair("a", 14)] + delete_last(fixture, "b") == fixture +} + +test delete_last_4() { + let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] + delete_last(fixture, "a") == [Pair("a", 1), Pair("b", 2)] +} + +/// Insert a value in the `Pairs` at a given key. If the key already exists, +/// the value is added in front. +/// +/// > [!CAUTION] +/// > The list is only traversed up to the given key and the traversal +/// > stops as soon as a higher key is encountered. Said differently, the list +/// > is assumed to **be ordered by ascending keys**! If it is not, expect the +/// > unexpected. +/// +/// ```aiken +/// use aiken/primitive/bytearray +/// +/// let result = +/// [] +/// |> pairs.insert_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare) +/// |> pairs.insert_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare) +/// |> pairs.insert_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare) +/// +/// result == [Pair("bar", 2), Pair("foo", 3), Pair("foo", 1)] +/// ``` +pub fn insert_by_ascending_key( + self: Pairs, + key k: key, + value v: value, + compare: fn(key, key) -> Ordering, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if compare(k, k2) == Less { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, v), ..self] + } else { + [Pair(k2, v2), ..insert_by_ascending_key(rest, k, v, compare)] + } + } + } +} + +test insert_by_ascending_key_1() { + let m = + [] + |> insert_by_ascending_key("foo", 42, bytearray.compare) + |> insert_by_ascending_key("foo", 14, bytearray.compare) + + m == [Pair("foo", 14), Pair("foo", 42)] +} + +test insert_by_ascending_key_2() { + let m = + [] + |> insert_by_ascending_key("foo", 42, bytearray.compare) + |> insert_by_ascending_key("bar", 14, bytearray.compare) + |> insert_by_ascending_key("baz", 1337, bytearray.compare) + + m == [Pair("bar", 14), Pair("baz", 1337), Pair("foo", 42)] +} + +/// Like [`insert_by_ascending_key`](#insert_by_ascending_key) but specifies +/// how to combine two values on a key conflict. +/// +/// > [!CAUTION] +/// > The list is only traversed up to the given key and the traversal +/// > stops as soon as a higher key is encountered. Said differently, the list +/// > is assumed to **be ordered by ascending keys**! If it is not, expect the +/// > unexpected. +/// +/// ```aiken +/// use aiken/primitive/bytearray +/// +/// let add_integer = fn(x, y) { x + y } +/// +/// let result = +/// [] +/// |> pairs.insert_with_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare, with: add_integer) +/// |> pairs.insert_with_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare, with: add_integer) +/// |> pairs.insert_with_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare, with: add_integer) +/// +/// result == [Pair("bar", 2), Pair("foo", 4)] +/// ``` +pub fn insert_with_by_ascending_key( + self: Pairs, + key k: key, + value v: value, + compare: fn(key, key) -> Ordering, + with: fn(value, value) -> value, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if compare(k, k2) == Less { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, with(v, v2)), ..rest] + } else { + [ + Pair(k2, v2), + ..insert_with_by_ascending_key(rest, k, v, compare, with) + ] + } + } + } +} + +test insert_with_by_ascending_key_1() { + let compare_un_b_data = + fn(l, r) { + bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) + } + + let m = + [] + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 42, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 14, + compare_un_b_data, + builtin.add_integer, + ) + + m == [Pair("foo" |> builtin.b_data, 56)] +} + +test insert_with_by_ascending_key_2() { + let compare_un_b_data = + fn(l, r) { + bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) + } + + let m = + [] + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 42, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "bar" |> builtin.b_data, + 14, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "baz" |> builtin.b_data, + 1337, + compare_un_b_data, + builtin.add_integer, + ) + + m == [ + Pair("bar" |> builtin.b_data, 14), + Pair("baz" |> builtin.b_data, 1337), + Pair("foo" |> builtin.b_data, 42), + ] +} + +test insert_with_by_ascending_key_3() { + let compare_un_b_data = + fn(l, r) { + bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) + } + + let result = + [] + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 1, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "bar" |> builtin.b_data, + 2, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 3, + compare_un_b_data, + builtin.add_integer, + ) + + result == [Pair("bar" |> builtin.b_data, 2), Pair("foo" |> builtin.b_data, 4)] +} + +/// Apply a function to all key-value pairs in a alist, replacing the values. +/// +/// ```aiken +/// let fixture = [Pair("a", 100), Pair("b", 200)] +/// +/// pairs.map(fixture, fn(_k, v) { v * 2 }) == [Pair("a", 200), Pair("b", 400)] +/// ``` +pub fn map( + self: Pairs, + with: fn(key, value) -> result, +) -> Pairs { + when self is { + [] -> [] + [Pair(k, v), ..rest] -> [Pair(k, with(k, v)), ..map(rest, with)] + } +} + +test map_1() { + let fixture = [Pair("a", 1), Pair("b", 2)] + + map(fixture, with: fn(k, _) { k }) == [Pair("a", "a"), Pair("b", "b")] +} + +test map_2() { + let fixture = [Pair("a", 1), Pair("b", 2)] + + map(fixture, with: fn(_, v) { v + 1 }) == [Pair("a", 2), Pair("b", 3)] +} + +/// Insert a value in the `Pairs` at a given key. If the key already exists, +/// its value is replaced. +/// +/// > [!CAUTION] +/// > The list is only traversed up to the given key and the traversal +/// > stops as soon as a higher key is encountered. Said differently, the list +/// > is assumed to **be ordered by ascending keys**! If it is not, expect the +/// > unexpected. +/// +/// ```aiken +/// use aiken/primitive/bytearray +/// +/// let result = +/// [] +/// |> pairs.repsert_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare) +/// |> pairs.repsert_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare) +/// |> pairs.repsert_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare) +/// +/// result == [Pair("bar", 2), Pair("foo", 3)] +/// ``` +pub fn repsert_by_ascending_key( + self: Pairs, + key k: key, + value v: value, + compare: fn(key, key) -> Ordering, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if compare(k, k2) == Less { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, v), ..rest] + } else { + [Pair(k2, v2), ..repsert_by_ascending_key(rest, k, v, compare)] + } + } + } +} + +test repsert_by_ascending_key_1() { + let m = + [] + |> repsert_by_ascending_key("foo", 42, bytearray.compare) + |> repsert_by_ascending_key("foo", 14, bytearray.compare) + + m == [Pair("foo", 14)] +} + +test repsert_by_ascending_key_2() { + let m = + [] + |> repsert_by_ascending_key("foo", 42, bytearray.compare) + |> repsert_by_ascending_key("bar", 14, bytearray.compare) + |> repsert_by_ascending_key("baz", 1337, bytearray.compare) + + m == [Pair("bar", 14), Pair("baz", 1337), Pair("foo", 42)] +} + +// ## Transforming + +/// Fold over the key-value pairs in a pairs. The fold direction follows keys +/// in ascending order and is done from left-to-right. +/// +/// ```aiken +/// let fixture = [ +/// Pair(1, 100), +/// Pair(2, 200), +/// Pair(3, 300), +/// ] +/// +/// pairs.foldl(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 +/// ``` +pub fn foldl( + self: Pairs, + zero: result, + with: fn(key, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> foldl(rest, with(k, v, zero), with) + } +} + +test foldl_1() { + foldl([], 14, fn(_, _, _) { 42 }) == 14 +} + +test foldl_2() { + foldl( + [Pair("a", 42), Pair("b", 14)], + zero: 0, + with: fn(_, v, total) { v + total }, + ) == 56 +} + +/// Fold over the key-value pairs in a Pairs. The fold direction follows the +/// order of elements in the Pairs and is done from right-to-left. +/// +/// ```aiken +/// let fixture = [ +/// Pair(1, 100), +/// Pair(2, 200), +/// Pair(3, 300), +/// ] +/// +/// pairs.foldr(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 +/// ``` +pub fn foldr( + self: Pairs, + zero: result, + with: fn(key, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> with(k, v, foldr(rest, zero, with)) + } +} + +test foldr_1() { + foldr([], 14, fn(_, _, _) { 42 }) == 14 +} + +test foldr_2() { + foldr( + [Pair("a", 42), Pair("b", 14)], + zero: 0, + with: fn(_, v, total) { v + total }, + ) == 56 +} + +test foldr_3() { + let fixture = [Pair(1, 100), Pair(2, 200), Pair(3, 300)] + + foldr(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak new file mode 100644 index 00000000..46a7dda5 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak @@ -0,0 +1,147 @@ +use aiken/builtin + +pub type VerificationKey = + ByteArray + +pub type VerificationKeyHash = + Hash + +pub type Script = + ByteArray + +pub type ScriptHash = + Hash + +pub type Signature = + ByteArray + +pub type DataHash = + Hash + +/// A `Hash` is nothing more than a `ByteArray`, but it carries extra +/// information for readability. +/// +/// On-chain, any hash digest value is represented as a plain 'ByteArray'. +/// Though in practice, hashes come from different sources and have +/// different semantics. +/// +/// Hence, while this type-alias doesn't provide any strong type-guarantees, +/// it helps writing functions signatures with more meaningful types than mere +/// 'ByteArray'. +/// +/// Compare for example: +/// +/// ```aiken +/// pub type Credential { +/// VerificationKey(ByteArray) +/// Script(ByteArray) +/// } +/// ``` +/// +/// with +/// +/// ```aiken +/// pub type Credential { +/// VerificationKey(Hash) +/// Script(Hash) +/// } +/// ``` +/// +/// Both are strictly equivalent, but the second reads much better. +pub type Hash = + ByteArray + +// ## Hashing + +/// A blake2b-224 hash algorithm. +/// +/// Typically used for: +/// +/// - [`Credential`](../cardano/address.html#Credential) +/// - [`PolicyId`](../cardano/assets.html#PolicyId) +/// +/// Note: there's no function to calculate blake2b-224 hash digests on-chain. +pub opaque type Blake2b_224 { + Blake2b_224 +} + +/// Compute the blake2b-224 hash digest (28 bytes) of some data. +pub fn blake2b_224(bytes: ByteArray) -> Hash { + builtin.blake2b_224(bytes) +} + +/// A blake2b-256 hash algorithm. +/// +/// Typically used for: +/// +/// - [`TransactionId`](../cardano/transaction.html#TransactionId) +pub opaque type Blake2b_256 { + Blake2b_256 +} + +/// Compute the blake2b-256 hash digest (32 bytes) of some data. +pub fn blake2b_256(bytes: ByteArray) -> Hash { + builtin.blake2b_256(bytes) +} + +/// A Keccak-256 hash algorithm. +pub opaque type Keccak_256 { + Keccak_256 +} + +/// Compute the keccak-256 hash digest (32 bytes) of some data. +pub fn keccak_256(bytes: ByteArray) -> Hash { + builtin.keccak_256(bytes) +} + +/// A SHA2-256 hash algorithm. +pub opaque type Sha2_256 { + Sha2_256 +} + +/// Compute the sha2-256 hash digest (32 bytes) of some data. +pub fn sha2_256(bytes: ByteArray) -> Hash { + builtin.sha2_256(bytes) +} + +/// A SHA3-256 hash algorithm. +pub opaque type Sha3_256 { + Sha3_256 +} + +/// Compute the sha3-256 hash digest (32 bytes) of some data. +pub fn sha3_256(bytes: ByteArray) -> Hash { + builtin.sha3_256(bytes) +} + +// ## Verifying signatures + +/// Verify an ECDCA signature (over secp256k1) using the given verification key. +/// Returns `True` when the signature is valid. +pub fn verify_ecdsa_signature( + key: VerificationKey, + msg: ByteArray, + sig: Signature, +) -> Bool { + builtin.verify_ecdsa_secp256k1_signature(key, msg, sig) +} + +/// Verify an Ed25519 signature using the given verification key. +/// Returns `True` when the signature is valid. +pub fn verify_ed25519_signature( + key: VerificationKey, + msg: ByteArray, + sig: Signature, +) -> Bool { + builtin.verify_ed25519_signature(key, msg, sig) +} + +/// Verify a Schnorr signature (over secp256k1) using the given verification key. +/// Returns `True` when the signature is valid. +pub fn verify_schnorr_signature( + key: VerificationKey, + msg: ByteArray, + sig: Signature, +) -> Bool { + builtin.verify_schnorr_secp256k1_signature(key, msg, sig) +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak new file mode 100644 index 00000000..d7b4cc19 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak @@ -0,0 +1,115 @@ +//// This module is designed for cryptographic operations involving the BLS12-381 elliptic curve, particularly focusing on the G1 group of the curve. +//// +//// The key functionalities provided by this module include: +//// - Defining the generator of the G1 group, which is a fixed base point on the elliptic curve used for various cryptographic computations. +//// - Implementing the additive identity (zero) in the G1 group, which plays a crucial role in elliptic curve arithmetic. +//// - Providing functions to compress and decompress points in the G1 group. Compression reduces the size of the point representation, which is useful for efficient storage and transmission. Decompression restores the original point from its compressed form. +//// - Implementing basic arithmetic operations on the points in the G1 group, such as addition and subtraction. +//// - Enabling the exponentiation of a point in the G1 group with a scalar, which is a fundamental operation in elliptic curve cryptography. +//// - Offering a function to hash arbitrary data to a point in the G1 group, a process important in several cryptographic protocols. +//// +//// This module ensures that all operations respect the properties of the BLS12-381 curve and the mathematical structure of the G1 group. + +use aiken/builtin +use aiken/crypto/bls12_381/scalar.{Scalar} + +/// The compressed generator of the G1 group of the BLS12-381 curve. +/// This constant represents a fixed base point on the elliptic curve. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const generator: G1Element = + #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb" + +test generator_1() { + builtin.bls12_381_g1_scalar_mul(scalar.field_prime, generator) == #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" +} + +/// Represents the additive identity (zero) in the G1 group. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const zero: G1Element = + #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + +test zero_1() { + and { + zero == builtin.bls12_381_g1_scalar_mul(scalar.field_prime, generator), + zero == builtin.bls12_381_g1_scalar_mul( + scalar.field_prime, + #"88c7e388ee58f1db9a24d7098b01d13634298bebf2d159254975bd450cb0d287fcc622eb71edde8b469a8513551baf1f", + ), + zero == builtin.bls12_381_g1_scalar_mul( + scalar.field_prime, + #"a6ac32e625dc30b8d31bacf5f4c89c27b0388b15f57ae10de8d5cec02dd1f113c9a31077be05ab587ca57a88d34deb75", + ), + } +} + +/// Compresses a point in the G1 group into a more compact representation. +/// The compressed representation is a 48-byte string, corresponding to a modified `x` coordinate. +/// The leading most significant 3 bits of this string indicate how to reconstruct the `y` coordinate. +/// +/// > [!NOTE] +/// > More explicitly via [Zcash's spec](https://github.com/supranational/blst#serialization-format): +/// > +/// > The most-significant three bits of a G1 or G2 encoding should be masked away before the coordinate(s) are interpreted. These bits are used to unambiguously represent the underlying element: +/// > +/// > - The most significant bit, when set, indicates that the point is in compressed form. Otherwise, the point is in uncompressed form. +/// > - The second-most significant bit indicates that the point is at infinity. If this bit is set, the remaining bits of the group element's encoding should be set to zero. +/// > - The third-most significant bit is set if (and only if) this point is in compressed form and it is not the point at infinity and its y-coordinate is the lexicographically largest of the two associated with the encoded x-coordinate. +pub fn compress(point) { + builtin.bls12_381_g1_compress(point) +} + +test compress_1() { + compress( + #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb", + ) == #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb" +} + +/// Decompresses a point in the G1 group from its compressed form. +pub fn decompress(bytes) { + builtin.bls12_381_g1_uncompress(bytes) +} + +pub fn equal(left, right) { + builtin.bls12_381_g1_equal(left, right) +} + +test equal_1() { + equal(generator, generator) +} + +/// Adds two points in the G1 group. +pub fn add(left, right) { + builtin.bls12_381_g1_add(left, right) +} + +/// Subtracts one point in the G1 group from another. +pub fn sub(left, right) { + builtin.bls12_381_g1_add(left, builtin.bls12_381_g1_neg(right)) +} + +test sub_1() { + generator == sub(add(generator, generator), generator) +} + +/// Exponentiates a point in the G1 group with a `scalar`. +/// This operation is equivalent to the repeated addition of the point with itself `e` times. +pub fn scale(point, e: Scalar) { + builtin.bls12_381_g1_scalar_mul(scalar.to_int(e), point) +} + +test scale_1() { + expect Some(x) = scalar.new(2) + builtin.bls12_381_g1_add(generator, generator) == scale(generator, x) +} + +/// Hashes arbitrary data to a point in the G1 group. +/// You can use the `domain_separation_tag` parameter to cryptographically separate different uses of the hash function between applications. +pub fn hash_to_group(bytes: ByteArray, domain_separation_tag: ByteArray) { + builtin.bls12_381_g1_hash_to_group(bytes, domain_separation_tag) +} + +test hash_to_group_1() { + hash_to_group("hello", "world") == #"89223b03c629cc6bcbbdccbba46b6679bc6a79db82f2d3bd115899a45a5a38c391587b59d3d1e297f977d1c4ee9e3388" +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak new file mode 100644 index 00000000..7a2013db --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak @@ -0,0 +1,124 @@ +//// This module is designed for cryptographic operations involving the BLS12-381 elliptic curve, particularly focusing on the G2 group of the curve. +//// +//// The key functionalities provided by this module include: +//// - Defining the generator of the G2 group, which is a fixed base point on the elliptic curve used for various cryptographic computations. +//// - Implementing the additive identity (zero) in the G2 group, which plays a crucial role in elliptic curve arithmetic. +//// - Providing functions to compress and decompress points in the G2 group. Compression reduces the size of the point representation, which is useful for efficient storage and transmission. Decompression restores the original point from its compressed form. +//// - Implementing basic arithmetic operations on the points in the G2 group, such as addition and subtraction. +//// - Enabling the exponentiation of a point in the G2 group with a scalar, which is a fundamental operation in elliptic curve cryptography. +//// - Offering a function to hash arbitrary data to a point in the G2 group, a process important in several cryptographic protocols. +//// +//// This module ensures that all operations respect the properties of the BLS12-381 curve and the mathematical structure of the G2 group. + +use aiken/builtin +use aiken/crypto/bls12_381/scalar.{Scalar} + +/// The compressed generator of the G2 group of the BLS12-381 curve. +/// This constant represents a fixed base point on the elliptic curve. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const generator: G2Element = + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" + +test generator_1() { + builtin.bls12_381_g2_scalar_mul(scalar.field_prime, generator) == #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" +} + +/// Represents the additive identity (zero) in the G2 group. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const zero: G2Element = + #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + +test zero_1() { + and { + zero == builtin.bls12_381_g2_scalar_mul(scalar.field_prime, generator), + zero == builtin.bls12_381_g2_scalar_mul( + scalar.field_prime, + #"9964a9ac2ee28a4dab595ff0970d446373bf46701c5d0b29ce8e1ba995d811a1c7b193c928269192c64ba1fbe4b1940207c251e086b452b920bc72e3cebab46ce672b9b088ca620a471d3b888d9737f6abd165319aa457dbf8835e3d34196051", + ), + zero == builtin.bls12_381_g2_scalar_mul( + scalar.field_prime, + #"a900e25cb53cf1eeb1a82c0c83292937c49c97966351273767a204256a7ef6e95aa391404387075d361e7b13ccd694db03aa73ee0e1bd2c3dd735582b99fdf71696de72e4eda18ae99ea45995f1c9605aa0057008ee9a4da604b5716fb4a345b", + ), + } +} + +/// Compresses a point in the G2 group into a more compact representation. +/// The compressed representation is the concatenation of two 48-byte strings, corresponding to a modified and complexified `x` coordinate. +/// The leading most significant 3 bits of this string indicate how to reconstruct the `y` coordinate. +/// +/// > [!NOTE] +/// > More explicitly via [Zcash's spec](https://github.com/supranational/blst#serialization-format): +/// > +/// > The most-significant three bits of a G1 or G2 encoding should be masked away before the coordinate(s) are interpreted. These bits are used to unambiguously represent the underlying element: +/// > +/// > - The most significant bit, when set, indicates that the point is in compressed form. Otherwise, the point is in uncompressed form. +/// > - The second-most significant bit indicates that the point is at infinity. If this bit is set, the remaining bits of the group element's encoding should be set to zero. +/// > - The third-most significant bit is set if (and only if) this point is in compressed form and it is not the point at infinity and its y-coordinate is the lexicographically largest of the two associated with the encoded x-coordinate. +pub fn compress(point) { + builtin.bls12_381_g2_compress(point) +} + +test compress_1() { + let g2 = + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" + compress(g2) == #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" +} + +/// Decompresses a point in the G2 group from its compressed form. +pub fn decompress(bytes) { + builtin.bls12_381_g2_uncompress(bytes) +} + +test decompress_1() { + let g2 = + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" + generator == g2 +} + +pub fn equal(left, right) { + builtin.bls12_381_g2_equal(left, right) +} + +test equal_1() { + equal( + generator, + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8", + ) +} + +/// Adds two points in the G2 group. +pub fn add(left, right) { + builtin.bls12_381_g2_add(left, right) +} + +/// Subtracts one point in the G2 group from another. +pub fn sub(left, right) { + builtin.bls12_381_g2_add(left, builtin.bls12_381_g2_neg(right)) +} + +test sub_1() { + generator == sub(add(generator, generator), generator) +} + +/// Exponentiates a point in the G2 group with a `scalar`. +/// This operation is equivalent to the repeated addition of the point with itself `e` times. +pub fn scale(point, e: Scalar) { + builtin.bls12_381_g2_scalar_mul(scalar.to_int(e), point) +} + +test scale_1() { + expect Some(x) = scalar.new(2) + builtin.bls12_381_g2_add(generator, generator) == scale(generator, x) +} + +/// Hashes arbitrary data to a point in the G2 group. +/// You can use the `domain_separation_tag` parameter to cryptographically separate different uses of the hash function between applications. +pub fn hash_to_group(bytes, domain_separation_tag) { + builtin.bls12_381_g2_hash_to_group(bytes, domain_separation_tag) +} + +test hash_to_group_1() { + hash_to_group("hello", "world") == #"a18486bba1dc8321f4998ed4268c6df8dfa5618dd5c91595844059d517f8104bf8031d3e766f9c99db1d6f58b201ee9614de92fc08f9e5cc3a6cd814e871857cb6e3924e8a4fa48775116c5f158d58ceda63614d62f6b7bc47db798d656969a5" +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak new file mode 100644 index 00000000..cf028ad7 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak @@ -0,0 +1,255 @@ +//// This module implements arithmetic operations in the scalar field associated with the BLS12-381 elliptic curve. +//// The scalar field, defined over a prime number `q`, is derived from the order of the subgroup G1. +//// +//// More explicitly, we have the identity: +//// +//// ```aiken +//// builtin.bls12_381_g1_scalar_mul(q, bls12_381_g1_generator) == 1 +//// ``` +//// +//// where, +//// +//// ```aiken +//// q = 52435875175126190479447740508185965837690552500527637822603658699938581184513 +//// ``` +//// +//// This module provides functionality for basic arithmetic operations (addition, subtraction, multiplication, division) within this scalar field. +//// Additionally, it includes advanced operations such as exponentiation and calculation of multiplicative inverses, tailored for cryptographic applications. + +use aiken/builtin + +/// The prime number defining the scalar field of the BLS12-381 curve. +pub const field_prime = + 52435875175126190479447740508185965837690552500527637822603658699938581184513 + +/// Represents the additive identity (zero) in the `Scalar` field. +pub const zero: Scalar = Scalar(0) + +/// Represents the multiplicative identity (one) in the `Scalar` field. +pub const one: Scalar = Scalar(1) + +/// Opaque type representing an element of the finite field `Scalar`. +pub opaque type Scalar { + integer: Int, +} + +// ## Constructing + +/// Constructs a new `Scalar` element from an integer, ensuring it's within the valid range of the field. +/// Returns `None` if the integer is negative or greater than the prime number defining the field. +pub fn new(n: Int) -> Option { + if n >= 0 && n < field_prime { + Some(Scalar(n)) + } else { + None + } +} + +test new_1() { + and { + new(-1) == None, + new(field_prime) == None, + new(834884848) == Some(Scalar(834884848)), + } +} + +/// Constructs a new `Scalar` element from a Big-Endian (most-significant bits first) `ByteArray`. +pub fn from_bytearray_big_endian(bytes: ByteArray) -> Option { + new(builtin.bytearray_to_integer(True, bytes)) +} + +test from_bytearray_big_endian_1() { + from_bytearray_big_endian(#"ffff00") == Some(Scalar(16776960)) +} + +/// Constructs a new `Scalar` element from a Little-Endian (least-significant bits first) `ByteArray`. +pub fn from_bytearray_little_endian(bytes: ByteArray) -> Option { + new(builtin.bytearray_to_integer(False, bytes)) +} + +test from_bytearray_little_endian_1() { + from_bytearray_little_endian(#"ffff00") == Some(Scalar(65535)) +} + +// ## Modifying + +/// Exponentiates an `Scalar` element by a non-negative integer exponent, using repeated squaring. +/// Note that this function returns `scalar.zero` for negative exponents. +/// A dedicated builtin function for this is in the making, see CIP 109. +pub fn scale(self: Scalar, e: Int) -> Scalar { + if e < 0 { + zero + } else if e == 0 { + one + } else if e % 2 == 0 { + scale(mul(self, self), e / 2) + } else { + mul(self, scale(mul(self, self), ( e - 1 ) / 2)) + } +} + +test scale_1() { + and { + scale(Scalar(834884848), -1) == zero, + scale(Scalar(834884848), 0) == one, + scale(Scalar(834884848), 1) == Scalar(834884848), + scale(Scalar(834884848), 2) == Scalar(697032709419983104), + scale(Scalar(834884848), 3) == Scalar(581942047655130761945608192), + scale(Scalar(field_prime - 4), 200) == Scalar( + 12843927705572658539565969578937286576443167978938369866871449552629978143484, + ), + } +} + +/// A faster version of `scale` for the case where the exponent is a power of two. +/// That is, the exponent `e = 2^k` for some non-negative integer `k`. Which is used alot in zk-SNARKs. +pub fn scale2(self: Scalar, k: Int) -> Scalar { + if k < 0 { + zero + } else { + do_scale2(self, k) + } +} + +fn do_scale2(self: Scalar, k: Int) -> Scalar { + if k == 0 { + self + } else { + do_scale2(mul(self, self), k - 1) + } +} + +test scale2_1() { + and { + scale2(Scalar(834884848), -1) == zero, + scale2(Scalar(834884848), 0) == scale(Scalar(834884848), 1), + scale2(Scalar(834884848), 1) == scale(Scalar(834884848), 2), + scale2(Scalar(834884848), 2) == scale(Scalar(834884848), 4), + scale2(Scalar(834884848), 3) == scale(Scalar(834884848), 8), + scale2(Scalar(834884848), 4) == scale(Scalar(834884848), 16), + } +} + +// ## Combining + +/// Adds two `Scalar` elements, ensuring the result stays within the finite field range. +pub fn add(left: Scalar, right: Scalar) -> Scalar { + Scalar(( left.integer + right.integer ) % field_prime) +} + +test add_1() { + and { + (add(Scalar(834884848), Scalar(834884848)) == Scalar(1669769696))?, + (add(Scalar(field_prime - 1), Scalar(1)) == Scalar(0))?, + (add(Scalar(3), Scalar(field_prime)) == Scalar(3))?, + } +} + +/// Divides one `Scalar` element by another, returning `None` if the divisor is zero. +pub fn div(left: Scalar, right: Scalar) -> Option { + if right == zero { + None + } else { + Some(mul(left, scale(right, field_prime - 2))) + } +} + +test div_1() { + and { + div(Scalar(834884848), Scalar(834884848)) == Some(Scalar(1)), + div(Scalar(834884848), zero) == None, + div(Scalar(field_prime - 1), Scalar(2)) == Some( + Scalar( + 26217937587563095239723870254092982918845276250263818911301829349969290592256, + ), + ), + } +} + +/// Multiplies two `Scalar` elements, with the result constrained within the finite field. +pub fn mul(left: Scalar, right: Scalar) -> Scalar { + Scalar(left.integer * right.integer % field_prime) +} + +test mul_1() { + and { + mul(Scalar(834884848), Scalar(834884848)) == Scalar(697032709419983104), + mul(zero, Scalar(834884848)) == zero, + mul(Scalar(field_prime - 1), Scalar(2)) == Scalar( + 52435875175126190479447740508185965837690552500527637822603658699938581184511, + ), + } +} + +/// Calculates the additive inverse of a `Scalar` element. +pub fn neg(self: Scalar) -> Scalar { + // this is basicly sub(zero, self), but more efficient as it saves one modulo operation + if self.integer == 0 { + self + } else { + Scalar(field_prime - self.integer) + } +} + +test neg_1() { + and { + neg(Scalar(834884848)) == Scalar( + 52435875175126190479447740508185965837690552500527637822603658699937746299665, + ), + neg(zero) == zero, + neg(one) == Scalar(field_prime - 1), + } +} + +/// Calculates the multiplicative inverse of an `Scalar` element, returning `None` if the element is zero. +pub fn recip(self: Scalar) -> Option { + div(one, self) +} + +test recip_1() { + and { + recip(Scalar(834884848)) == Some( + Scalar( + 35891248691642227249400403463796410930702563777316955162085759263735363466421, + ), + ), + recip(zero) == None, + } +} + +/// Subtracts one `Scalar` element from another, with the result wrapped within the finite field range. +pub fn sub(left: Scalar, right: Scalar) -> Scalar { + Scalar(( left.integer - right.integer ) % field_prime) +} + +test sub_1() { + and { + (sub(Scalar(834884848), Scalar(834884848)) == zero)?, + (sub(zero, Scalar(5)) == Scalar(field_prime - 5))?, + } +} + +// ## Transforming + +/// Converts a `Scalar` element back to its integer representation. +pub fn to_int(self: Scalar) -> Int { + self.integer +} + +test to_int_1() { + to_int(Scalar(834884848)) == 834884848 +} + +/// Converts a `Scalar` element to a Big-Endian (most-significant bits first) `ByteArray`. +pub fn to_bytearray_big_endian(self: Scalar, size: Int) -> ByteArray { + builtin.integer_to_bytearray(True, size, self.integer) +} + +/// Converts a `Scalar` element to a Little-Endian (least-significant bits first) `ByteArray`. +pub fn to_bytearray_little_endian(self: Scalar, size: Int) -> ByteArray { + builtin.integer_to_bytearray(False, size, self.integer) +} + +test to_bytearray_1() { + to_bytearray_big_endian(Scalar(16777215), 3) == #"ffffff" +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak new file mode 100644 index 00000000..96179f9b --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak @@ -0,0 +1,680 @@ +//// In a eUTxO-based blockchain like Cardano, the management of time can be +//// finicky. +//// +//// Indeed, in order to maintain a complete determinism in the execution of +//// scripts, it is impossible to introduce a notion of _"current time"_ since +//// the execution would then depend on factor that are external to the +//// transaction itself: the ineluctable stream of time flowing in our universe. +//// +//// Hence, to work around that, we typically define time intervals, which gives +//// window -- a.k.a intervals -- within which the transaction can be executed. +//// From within a script, it isn't possible to know when exactly the script is +//// executed, but we can reason about the interval bounds to validate pieces of +//// logic. + +// TODO: Replace 'Int' with a generic 'a' once we have comparable traits. + +/// A type to represent intervals of values. Interval are inhabited by a type +/// `a` which is useful for non-infinite intervals that have a finite +/// lower-bound and/or upper-bound. +/// +/// This allows to represent all kind of mathematical intervals: +/// +/// ```aiken +/// // [1; 10] +/// let i0: Interval = Interval +/// { lower_bound: +/// IntervalBound { bound_type: Finite(1), is_inclusive: True } +/// , upper_bound: +/// IntervalBound { bound_type: Finite(10), is_inclusive: True } +/// } +/// ``` +/// +/// ```aiken +/// // (20; infinity) +/// let i1: Interval = Interval +/// { lower_bound: +/// IntervalBound { bound_type: Finite(20), is_inclusive: False } +/// , upper_bound: +/// IntervalBound { bound_type: PositiveInfinity, is_inclusive: False } +/// } +/// ``` +pub type Interval { + lower_bound: IntervalBound, + upper_bound: IntervalBound, +} + +/// An interval bound, either inclusive or exclusive. +pub type IntervalBound { + bound_type: IntervalBoundType, + is_inclusive: Bool, +} + +/// A type of interval bound. Where finite, a value of type `a` must be +/// provided. `a` will typically be an `Int`, representing a number of seconds or +/// milliseconds. +pub type IntervalBoundType { + NegativeInfinity + Finite(a) + PositiveInfinity +} + +// ## Constructing + +/// Create an interval that includes all values greater than the given bound. i.e [lower_bound, +INF) +/// +/// ```aiken +/// interval.after(10) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: PositiveInfinity, is_inclusive: True }, +/// } +/// ``` +pub fn after(lower_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values after (and not including) the given bound. i.e (lower_bound, +INF) +/// +/// ```aiken +/// interval.entirely_after(10) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: PositiveInfinity, is_inclusive: True }, +/// } +/// ``` +pub fn entirely_after(lower_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: False, + }, + upper_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values before (and including) the given bound. i.e (-INF, upper_bound] +/// +/// ```aiken +/// interval.before(100) == Interval { +/// lower_bound: IntervalBound { bound_type: NegativeInfinity, is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: True }, +/// } +/// ``` +pub fn before(upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values before (and not including) the given bound. i.e (-INF, upper_bound) +/// +/// ```aiken +/// interval.entirely_before(10) == Interval { +/// lower_bound: IntervalBound { bound_type: NegativeInfinity, is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, +/// } +/// ``` +pub fn entirely_before(upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: False, + }, + } +} + +/// Create an interval that includes all values between two bounds, including the bounds. i.e. [lower_bound, upper_bound] +/// +/// ```aiken +/// interval.between(10, 100) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: True }, +/// } +/// ``` +pub fn between(lower_bound: a, upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values between two bounds, excluding the bounds. i.e. (lower_bound, upper_bound) +/// +/// ```aiken +/// interval.entirely_between(10, 100) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: False }, +/// } +/// ``` +pub fn entirely_between(lower_bound: a, upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: False, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: False, + }, + } +} + +/// Create an empty interval that contains no value. +/// +/// ```aiken +/// interval.contains(empty, 0) == False +/// interval.contains(empty, 1000) == False +/// ``` +pub const empty: Interval = + Interval { + lower_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + } + +/// Create an interval that contains every possible values. i.e. (-INF, +INF) +/// +/// ```aiken +/// interval.contains(everything, 0) == True +/// interval.contains(everything, 1000) == True +/// ``` +pub const everything: Interval = + Interval { + lower_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + } + +// ## Inspecting + +/// Checks whether an element is contained within the interval. +/// +/// ```aiken +/// let iv = +/// Interval { +/// lower_bound: IntervalBound { +/// bound_type: Finite(14), +/// is_inclusive: True +/// }, +/// upper_bound: IntervalBound { +/// bound_type: Finite(42), +/// is_inclusive: False +/// }, +/// } +/// +/// interval.contains(iv, 25) == True +/// interval.contains(iv, 0) == False +/// interval.contains(iv, 14) == True +/// interval.contains(iv, 42) == False +/// ``` +pub fn contains(self: Interval, elem: Int) -> Bool { + let is_greater_than_lower_bound = + when self.lower_bound.bound_type is { + NegativeInfinity -> True + Finite(lower_bound) -> + if self.lower_bound.is_inclusive { + elem >= lower_bound + } else { + elem > lower_bound + } + PositiveInfinity -> False + } + + let is_smaller_than_upper_bound = + when self.upper_bound.bound_type is { + NegativeInfinity -> False + Finite(upper_bound) -> + if self.upper_bound.is_inclusive { + elem <= upper_bound + } else { + elem < upper_bound + } + PositiveInfinity -> True + } + + is_greater_than_lower_bound && is_smaller_than_upper_bound +} + +test contains_1() { + let iv = everything + contains(iv, 14) +} + +test contains_2() { + let iv = entirely_before(15) + contains(iv, 14) +} + +test contains_3() { + let iv = before(14) + contains(iv, 14) +} + +test contains_4() { + let iv = entirely_before(14) + !contains(iv, 14) +} + +test contains_5() { + let iv = entirely_after(13) + contains(iv, 14) +} + +test contains_6() { + let iv = after(14) + contains(iv, 14) +} + +test contains_7() { + let iv = entirely_after(14) + !contains(iv, 14) +} + +test contains_8() { + let iv = between(42, 1337) + !contains(iv, 14) +} + +test contains_9() { + let iv = between(0, 42) + contains(iv, 14) +} + +test contains_10() { + let iv = between(0, 42) + contains(iv, 42) +} + +test contains_11() { + let iv = entirely_between(0, 42) + !contains(iv, 0) +} + +test contains_12() { + let iv = empty + !contains(iv, 14) +} + +/// Tells whether an interval is empty; i.e. that is contains no value. +/// +/// ```aiken +/// let iv1 = interval.empty +/// +/// let iv2 = Interval { +/// lower_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, +/// } +/// +/// let iv3 = Interval { +/// lower_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: False }, +/// } +/// +/// interval.is_empty(iv1) == True +/// interval.is_empty(iv2) == True +/// interval.is_empty(iv3) == False +/// +/// // Note: Two empty intervals are not necessarily equal. +/// iv1 != iv2 +/// ``` +pub fn is_empty(self: Interval) -> Bool { + let ordering = + compare_bound_type(self.lower_bound.bound_type, self.upper_bound.bound_type) + + when ordering is { + Greater -> True + Equal -> !(self.lower_bound.is_inclusive && self.upper_bound.is_inclusive) + Less -> { + let is_open_interval = + !self.lower_bound.is_inclusive && !self.upper_bound.is_inclusive + if is_open_interval { + when (self.lower_bound.bound_type, self.upper_bound.bound_type) is { + (Finite(lower_bound), Finite(upper_bound)) -> + lower_bound + 1 == upper_bound + _ -> False + } + } else { + False + } + } + } +} + +/// Check whether the interval is entirely after the point "a" +/// +/// ```aiken +/// interval.is_entirely_after(interval.after(10), 5) == True +/// interval.is_entirely_after(interval.after(10), 10) == False +/// interval.is_entirely_after(interval.after(10), 15) == False +/// interval.is_entirely_after(interval.between(10, 20), 30) == False +/// interval.is_entirely_after(interval.between(10, 20), 5) == True +pub fn is_entirely_after(self: Interval, point: Int) -> Bool { + when self.lower_bound.bound_type is { + Finite(low) -> + if self.lower_bound.is_inclusive { + point < low + } else { + point <= low + } + _ -> False + } +} + +test is_entirely_after_1() { + is_entirely_after(after(10), 5) +} + +test is_entirely_after_2() { + !is_entirely_after(after(10), 10) +} + +test is_entirely_after_3() { + !is_entirely_after(after(10), 15) +} + +test is_entirely_after_4() { + !is_entirely_after(between(10, 20), 30) +} + +test is_entirely_after_5() { + is_entirely_after(between(10, 20), 5) +} + +test is_entirely_after_6() { + is_entirely_after(entirely_after(10), 10) +} + +test is_entirely_after_7() { + !is_entirely_after(before(10), 5) +} + +test is_entirely_after_8() { + !is_entirely_after(before(10), 15) +} + +test is_entirely_after_9() { + !is_entirely_after(entirely_before(10), 5) +} + +/// Check whether the interval is entirely before the point "a" +/// +/// ```aiken +/// interval.is_entirely_before(interval.before(10), 15) == True +/// interval.is_entirely_before(interval.before(10), 10) == False +/// interval.is_entirely_before(interval.before(10), 5) == False +/// interval.is_entirely_before(interval.between(10, 20), 30) == True +/// interval.is_entirely_before(interval.between(10, 20), 5) == False +pub fn is_entirely_before(self: Interval, point: Int) -> Bool { + when self.upper_bound.bound_type is { + Finite(hi) -> + if self.upper_bound.is_inclusive { + hi < point + } else { + hi <= point + } + _ -> False + } +} + +test is_entirely_before_1() { + is_entirely_before(before(10), 15) +} + +test is_entirely_before_2() { + !is_entirely_before(before(10), 10) +} + +test is_entirely_before_3() { + !is_entirely_before(before(10), 5) +} + +test is_entirely_before_4() { + is_entirely_before(between(10, 20), 30) +} + +test is_entirely_before_5() { + !is_entirely_before(between(10, 20), 5) +} + +test is_entirely_before_6() { + is_entirely_before(entirely_before(10), 10) +} + +test is_entirely_before_7() { + !is_entirely_before(after(10), 15) +} + +test is_entirely_before_8() { + !is_entirely_before(after(10), 5) +} + +test is_entirely_before_9() { + !is_entirely_before(entirely_after(10), 5) +} + +// ## Combining + +/// Computes the smallest interval containing the two given intervals, if any +/// +/// ```aiken +/// let iv1 = between(0, 10) +/// let iv2 = between(2, 14) +/// hull(iv1, iv2) == between(0, 14) +/// +/// let iv1 = between(5, 10) +/// let iv2 = before(0) +/// hull(iv1, iv2) == before(10) +/// +/// let iv1 = entirely_after(0) +/// let iv2 = between(10, 42) +/// hull(iv1, iv2) = entirely_after(0) +/// ``` +pub fn hull(iv1: Interval, iv2: Interval) -> Interval { + Interval { + lower_bound: min(iv1.lower_bound, iv2.lower_bound), + upper_bound: max(iv1.upper_bound, iv2.upper_bound), + } +} + +test hull_1() { + let iv1 = between(0, 10) + let iv2 = between(2, 14) + hull(iv1, iv2) == between(0, 14) +} + +test hull_2() { + let iv1 = between(5, 10) + let iv2 = before(0) + hull(iv1, iv2) == before(10) +} + +test hull_3() { + let iv1 = entirely_after(0) + let iv2 = between(10, 42) + hull(iv1, iv2) == entirely_after(0) +} + +/// Computes the largest interval contains in the two given intervals, if any. +/// +/// ```aiken +/// let iv1 = interval.between(0, 10) +/// let iv2 = interval.between(2, 14) +/// interval.intersection(iv1, iv2) == interval.between(2, 10) +/// +/// let iv1 = interval.entirely_before(10) +/// let iv2 = interval.entirely_after(0) +/// interval.intersection(iv1, iv2) == interval.entirely_between(0, 10) +/// +/// let iv1 = interval.between(0, 1) +/// let iv2 = interval.between(2, 3) +/// interval.intersection(iv1, iv2) |> interval.is_empty +/// ``` +pub fn intersection(iv1: Interval, iv2: Interval) -> Interval { + Interval { + lower_bound: max(iv1.lower_bound, iv2.lower_bound), + upper_bound: min(iv1.upper_bound, iv2.upper_bound), + } +} + +test intersection_1() { + let iv1 = between(0, 10) + let iv2 = between(2, 14) + intersection(iv1, iv2) == between(2, 10) +} + +test intersection_2() { + let iv1 = between(0, 1) + let iv2 = between(1, 2) + intersection(iv1, iv2) == between(1, 1) +} + +test intersection_3() { + let iv1 = between(0, 1) + let iv2 = entirely_between(1, 2) + intersection(iv1, iv2) + |> is_empty +} + +test intersection_4() { + let iv1 = entirely_between(0, 1) + let iv2 = entirely_between(1, 2) + intersection(iv1, iv2) + |> is_empty +} + +test intersection_5() { + let iv1 = between(0, 10) + let iv2 = before(4) + intersection(iv1, iv2) == between(0, 4) +} + +test intersection_6() { + let iv1 = entirely_before(10) + let iv2 = entirely_after(0) + intersection(iv1, iv2) == entirely_between(0, 10) +} + +/// Return the highest bound of the two. +/// +/// ```aiken +/// let ib1 = IntervalBound { bound_type: Finite(0), is_inclusive: False } +/// let ib2 = IntervalBound { bound_type: Finite(1), is_inclusive: False } +/// +/// interval.max(ib1, ib2) == ib2 +/// ``` +pub fn max( + left: IntervalBound, + right: IntervalBound, +) -> IntervalBound { + when compare_bound(left, right) is { + Less -> right + Equal -> left + Greater -> left + } +} + +/// Return the smallest bound of the two. +/// +/// ```aiken +/// let ib1 = IntervalBound { bound_type: Finite(0), is_inclusive: False } +/// let ib2 = IntervalBound { bound_type: Finite(1), is_inclusive: False } +/// +/// interval.min(ib1, ib2) == ib1 +/// ``` +pub fn min( + left: IntervalBound, + right: IntervalBound, +) -> IntervalBound { + when compare_bound(left, right) is { + Less -> left + Equal -> left + Greater -> right + } +} + +fn compare_bound( + left: IntervalBound, + right: IntervalBound, +) -> Ordering { + when compare_bound_type(left.bound_type, right.bound_type) is { + Less -> Less + Greater -> Greater + Equal -> + if left.is_inclusive == right.is_inclusive { + Equal + } else if left.is_inclusive { + Greater + } else { + Less + } + } +} + +fn compare_bound_type( + left: IntervalBoundType, + right: IntervalBoundType, +) -> Ordering { + when left is { + NegativeInfinity -> + when right is { + NegativeInfinity -> Equal + _ -> Less + } + PositiveInfinity -> + when right is { + PositiveInfinity -> Equal + _ -> Greater + } + Finite(left) -> + when right is { + NegativeInfinity -> Greater + PositiveInfinity -> Less + Finite(right) -> + if left < right { + Less + } else if left == right { + Equal + } else { + Greater + } + } + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/math.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/math.ak new file mode 100644 index 00000000..dd575e7a --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/math.ak @@ -0,0 +1,424 @@ +//// This module contains some basic Math utilities. Standard arithmetic +//// operations on integers are available through native operators: +//// +//// Operator | Description +//// --- | :--- +//// `+` | Arithmetic sum +//// `-` | Arithmetic difference +//// `/` | Whole division +//// `*` | Arithmetic multiplication +//// `%` | Remainder by whole division +//// +//// Here are a few examples: +//// +//// ```aiken +//// 1 + 1 // 2 +//// 10 - 2 // 8 +//// 40 / 14 // 2 +//// 3 * 4 // 12 +//// 10 % 3 // 1 + +use aiken/builtin + +/// Calculate the absolute value of an integer. +/// +/// ```aiken +/// math.abs(-42) == 42 +/// math.abs(14) == 14 +/// ``` +pub fn abs(self: Int) -> Int { + if self < 0 { + 0 - self + } else { + self + } +} + +test abs_1() { + abs(14) == 14 +} + +test abs_2() { + abs(-42) == 42 +} + +/// Restrict the value of an integer between two min and max bounds +/// +/// ```aiken +/// math.clamp(14, min: 0, max: 10) == 10 +/// ``` +pub fn clamp(self: Int, min: Int, max: Int) -> Int { + if self < min { + min + } else { + if self > max { + max + } else { + self + } + } +} + +test clamp_1() { + clamp(14, min: 0, max: 10) == 10 +} + +test clamp_2() { + clamp(7, min: 0, max: 10) == 7 +} + +test clamp_3() { + clamp(7, min: 10, max: 100) == 10 +} + +/// The greatest common divisor of two integers. +/// +/// ```aiken +/// math.gcd(42, 14) == 14 +/// math.gcd(14, 42) == 14 +/// math.gcd(0, 0) == 0 +/// ``` +pub fn gcd(x: Int, y: Int) -> Int { + abs(do_gcd(x, y)) +} + +fn do_gcd(x: Int, y: Int) -> Int { + when y is { + 0 -> x + _ -> do_gcd(y, x % y) + } +} + +test gcd_test1() { + gcd(10, 300) == 10 +} + +test gcd_test2() { + gcd(-10, 300) == 10 +} + +test gcd_test3() { + gcd(42, 14) == 14 +} + +/// Checks if an integer has a given integer square root x. +/// The check has constant time complexity $O(1)$. +/// +/// ```aiken +/// math.is_sqrt(0, 0) +/// math.is_sqrt(25, 5) +/// !math.is_sqrt(25, -5) +/// math.is_sqrt(44203, 210) +/// ``` +pub fn is_sqrt(self: Int, x: Int) -> Bool { + x * x <= self && ( x + 1 ) * ( x + 1 ) > self +} + +test is_sqrt1() { + is_sqrt(44203, 210) +} + +test is_sqrt2() { + is_sqrt(975461057789971041, 987654321) +} + +/// The logarithm in base `b` of an element using integer divisions. +/// +/// ```aiken +/// math.log(10, base: 2) == 3 +/// math.log(42, base: 2) == 5 +/// math.log(42, base: 3) == 3 +/// math.log(5, base: 0) == 0 +/// math.log(4, base: 4) == 1 +/// math.log(4, base: 42) == 0 +/// ``` +pub fn log(self: Int, base: Int) -> Int { + if base <= 0 { + 0 + } else if self == base { + 1 + } else if self < base { + 0 + } else { + 1 + log(self / base, base) + } +} + +test log_10_2() { + log(10, base: 2) == 3 +} + +test log_42_2() { + log(42, base: 2) == 5 +} + +test log_42_3() { + log(42, base: 3) == 3 +} + +test log_5_0() { + log(5, base: 0) == 0 +} + +test log_4_4() { + log(4, base: 4) == 1 +} + +test log_4_43() { + log(4, base: 43) == 0 +} + +/// The integer logarithm in base 2. Faster than [`log`](#log) in this particular case. +/// +/// ```aiken +/// math.log2(1) == 0 +/// math.log2(2) == 1 +/// math.log2(3) == 1 +/// math.log2(4) == 2 +/// math.log2(256) == 8 +/// math.log2(257) == 8 +/// math.log2(511) == 8 +/// math.log2(1025) == 10 +/// ``` +pub fn log2(x: Int) -> Int { + expect x > 0 + let s = builtin.integer_to_bytearray(True, 0, x) + let len = builtin.length_of_bytearray(s) + let b = builtin.index_bytearray(s, 0) + len * 8 - if b < 2 { + 8 + } else if b < 4 { + 7 + } else if b < 8 { + 6 + } else if b < 16 { + 5 + } else if b < 32 { + 4 + } else if b < 64 { + 3 + } else if b < 128 { + 2 + } else { + 1 + } +} + +test log2_matrix() { + and { + log2(1) == 0, + log2(2) == 1, + log2(3) == 1, + log2(4) == 2, + log2(256) == 8, + log2(257) == 8, + log2(511) == 8, + log2(1025) == 10, + } +} + +/// Return the maximum of two integers. +pub fn max(a: Int, b: Int) -> Int { + if a > b { + a + } else { + b + } +} + +test max_1() { + max(0, 0) == 0 +} + +test max_2() { + max(14, 42) == 42 +} + +test max_3() { + max(42, 14) == 42 +} + +/// Return the minimum of two integers. +pub fn min(a: Int, b: Int) -> Int { + if a > b { + b + } else { + a + } +} + +test min_1() { + min(0, 0) == 0 +} + +test min_2() { + min(14, 42) == 14 +} + +test min_3() { + min(42, 14) == 14 +} + +/// Calculates a number to the power of `e` using the exponentiation by +/// squaring method. +/// +/// ```aiken +/// math.pow(3, 5) == 243 +/// math.pow(7, 2) == 49 +/// math.pow(3, -4) == 0 +/// math.pow(0, 0) == 1 +/// math.pow(513, 3) == 135005697 +/// ``` +pub fn pow(self: Int, e: Int) -> Int { + if e < 0 { + 0 + } else if e == 0 { + 1 + } else if e % 2 == 0 { + pow(self * self, e / 2) + } else { + self * pow(self * self, ( e - 1 ) / 2) + } +} + +test pow_3_5() { + pow(3, 5) == 243 +} + +test pow_7_2() { + pow(7, 2) == 49 +} + +test pow_3__4() { + // negative powers round to zero + pow(3, -4) == 0 +} + +test pow_0_0() { + // sorry math + pow(0, 0) == 1 +} + +test pow_513_3() { + pow(513, 3) == 135005697 +} + +test pow_2_4() { + pow(2, 4) == 16 +} + +test pow_2_42() { + pow(2, 42) == 4398046511104 +} + +/// Calculates the power of 2 for a given exponent `e`. Much cheaper than +/// using `pow(2, _)` for small exponents $0 < e < 256$. +/// +/// ```aiken +/// math.pow2(-2) == 0 +/// math.pow2(0) == 1 +/// math.pow2(1) == 2 +/// math.pow2(4) == 16 +/// math.pow2(42) == 4398046511104 +/// ``` +pub fn pow2(e: Int) -> Int { + // do_pow2(e, 1) + if e < 8 { + if e < 0 { + 0 + } else { + builtin.index_bytearray(#[1, 2, 4, 8, 16, 32, 64, 128], e) + } + } else if e < 32 { + 256 * pow2(e - 8) + } else { + 4294967296 * pow2(e - 32) + } +} + +test pow2_neg() { + pow2(-2) == 0 +} + +test pow2_0() { + pow2(0) == 1 +} + +test pow2_1() { + pow2(1) == 2 +} + +test pow2_4() { + pow2(4) == 16 +} + +test pow2_42() { + pow2(42) == 4398046511104 +} + +test pow2_256() { + pow2(256) == 115792089237316195423570985008687907853269984665640564039457584007913129639936 +} + +/// Calculates the square root of an integer using the [Babylonian +/// method](https://en.wikipedia.org/wiki/Methods_of_computing_square_roots#Babylonian_method). This returns either the exact result or the smallest integer +/// nearest to the square root. +/// +/// Returns `None` for negative values. +/// +/// ```aiken +/// math.sqrt(0) == Some(0) +/// math.sqrt(25) == Some(5) +/// math.sqrt(44203) == Some(210) +/// math.sqrt(-42) == None +/// ``` +/// +/// > [!TIP] +/// > This function can be quite expensive to perform on-chain. Prefer using [`is_sqrt`](#is_sqrt) whenever possible. +pub fn sqrt(self: Int) -> Option { + if self < 0 { + None + } else if self <= 1 { + Some(self) + } else { + Some(sqrt_babylonian(self, self, ( self + 1 ) / 2)) + } +} + +// The basic idea is that if x is an overestimate to the square root of a +// non-negative real number S then S/x will be an underestimate, or vice versa, +// and so the average of these two numbers may reasonably be expected to provide a +// better approximation (though the formal proof of that assertion depends on the +// inequality of arithmetic and geometric means that shows this average is always +// an overestimate of the square root. +fn sqrt_babylonian(self: Int, x: Int, y: Int) -> Int { + if y >= x { + x + } else { + sqrt_babylonian(self, y, ( y + self / y ) / 2) + } +} + +test sqrt1() { + sqrt(0) == Some(0) +} + +test sqrt2() { + sqrt(1) == Some(1) +} + +test sqrt3() { + sqrt(25) == Some(5) +} + +test sqrt4() { + sqrt(44203) == Some(210) +} + +test sqrt5() { + sqrt(975461057789971041) == Some(987654321) +} + +test sqrt6() { + sqrt(-42) == None +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak new file mode 100644 index 00000000..88fe7ab7 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak @@ -0,0 +1,871 @@ +//// This module implements operations between rational numbers. +//// +//// > [!CAUTION] +//// > Internally, rational aren't automatically reduced as this is **only done on-demand**. +//// > +//// > Thus, for example: +//// > +//// > ```aiken +//// > rational.new(2, 3) != rational.new(4, 6) +//// > ``` +//// > +//// > Comparing rational values should, therefore, only happen after reduction (see [reduce](#reduce)) or via the [compare](#compare) method. + +use aiken/builtin +use aiken/collection/list +use aiken/math +use aiken/option + +/// Opaque type used to ensure the sign of the Rational is managed strictly in the numerator. +pub opaque type Rational { + numerator: Int, + denominator: Int, +} + +// ## Constructing + +/// Create a new `Rational` from an `Int`. +/// +/// ```aiken +/// Some(rational.from_int(14)) == rational.new(14, 1) +/// Some(rational.from_int(-5)) == rational.new(-5, 1) +/// Some(rational.from_int(0)) == rational.new(0, 1) +/// ``` +pub fn from_int(numerator: Int) -> Rational { + Rational { numerator, denominator: 1 } +} + +test from_int_1() { + and { + (from_int(14) == ratio(14, 1))?, + (from_int(-5) == ratio(-5, 1))?, + (from_int(0) == ratio(0, 1))?, + } +} + +/// An unsafe constructor for `Rational` values. Assumes that the following invariants are +/// enforced: +/// +/// - the denominator is positive (the sign is managed in the numerator); +/// - the denominator is not null. +/// +/// This function is mainly used as a quick way to construct rationals from literal values. +fn ratio(numerator: Int, denominator: Int) -> Rational { + Rational { numerator, denominator } +} + +/// Make a `Rational` number from the ratio of two integers. +/// +/// Returns `None` when the denominator is null. +/// +/// ```aiken +/// rational.new(14, 42) == Some(r) +/// rational.new(14, 0) == None +/// ``` +pub fn new(numerator: Int, denominator: Int) -> Option { + if denominator == 0 { + None + } else if denominator < 0 { + Some(Rational { numerator: -numerator, denominator: -denominator }) + } else { + Some(Rational { numerator, denominator }) + } +} + +test new_1() { + and { + (new(2, 0) == None)?, + (new(2, 3) == Some(ratio(2, 3)))?, + (new(-2, 3) == Some(ratio(-2, 3)))?, + (new(2, -3) == Some(ratio(-2, 3)))?, + (new(2, 4) == Some(ratio(2, 4)))?, + (new(-2, -3) == Some(ratio(2, 3)))?, + (new(-2, -4) == Some(ratio(2, 4)))?, + } +} + +/// A null `Rational`. +pub const zero: Rational = Rational { numerator: 0, denominator: 1 } + +test zero_1() { + zero == ratio(0, 1) +} + +// ## Inspecting + +/// Get the denominator of a rational value. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.denominator(x) == 3 +/// ``` +pub fn denominator(self: Rational) -> Int { + self.denominator +} + +test denominator_1() { + expect Some(x) = new(2, 3) + expect Some(y) = new(-2, 3) + expect Some(z) = new(2, -3) + expect Some(w) = new(-2, -3) + and { + (denominator(x) == 3)?, + (denominator(y) == 3)?, + (denominator(z) == 3)?, + (denominator(w) == 3)?, + } +} + +/// Get the numerator of a rational value. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.numerator(x) == 2 +/// ``` +pub fn numerator(self: Rational) -> Int { + self.numerator +} + +test numerator_1() { + expect Some(x) = new(2, 3) + expect Some(y) = new(-2, 3) + expect Some(z) = new(2, -3) + expect Some(w) = new(-2, -3) + + and { + (numerator(x) == 2)?, + (numerator(y) == -2)?, + (numerator(z) == -2)?, + (numerator(w) == 2)?, + } +} + +// ## Modifying + +/// Absolute value of a `Rational`. +/// +/// ```aiken +/// expect Some(x) = rational.new(3, 2) +/// expect Some(y) = rational.new(-3, 2) +/// +/// rational.abs(x) == x +/// rational.abs(y) == x +/// ``` +pub fn abs(self: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = self + Rational { numerator: math.abs(a_n), denominator: a_d } +} + +test abs_examples() { + and { + (abs(ratio(5, 2)) == ratio(5, 2))?, + (abs(ratio(-5, 2)) == ratio(5, 2))?, + (abs(ratio(5, 2)) == abs(ratio(-5, 2)))?, + } +} + +/// Change the sign of a `Rational`. +/// +/// ```aiken +/// expect Some(x) = rational.new(3, 2) +/// expect Some(y) = rational.new(-3, 2) +/// +/// rational.negate(x) == y +/// rational.negate(y) == x +/// ``` +pub fn negate(a: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = a + Rational { numerator: -a_n, denominator: a_d } +} + +test negate_1() { + and { + (negate(ratio(5, 2)) == ratio(-5, 2))?, + (negate(ratio(-5, 2)) == ratio(5, 2))?, + (negate(negate(ratio(5, 2))) == ratio(5, 2))?, + } +} + +/// Reciprocal of a `Rational` number. That is, a new `Rational` where the +/// numerator and denominator have been swapped. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 5) +/// rational.reciprocal(x) == rational.new(5, 2) +/// +/// let y = rational.zero +/// rational.reciprocal(y) == None +/// ``` +pub fn reciprocal(self: Rational) -> Option { + let Rational { numerator: a_n, denominator: a_d } = self + if a_n < 0 { + Some(Rational { numerator: -a_d, denominator: -a_n }) + } else if a_n > 0 { + Some(Rational { numerator: a_d, denominator: a_n }) + } else { + None + } +} + +test reciprocal_1() { + and { + (reciprocal(ratio(5, 2)) == new(2, 5))?, + (reciprocal(ratio(-5, 2)) == new(-2, 5))?, + (reciprocal(ratio(0, 2)) == None)?, + (reciprocal(ratio(2, 3)) == new(3, 2))?, + (reciprocal(ratio(-2, 3)) == new(-3, 2))?, + } +} + +/// Reduce a rational to its irreducible form. This operation makes the +/// numerator and denominator coprime. +/// +/// ```aiken +/// expect Some(x) = rational.new(80, 200) +/// Some(rational.reduce(x)) == rational.new(2, 5) +/// ``` +pub fn reduce(self: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = self + let d = math.gcd(a_n, a_d) + Rational { numerator: a_n / d, denominator: a_d / d } +} + +test reduce_1() { + and { + (reduce(ratio(80, 200)) == ratio(2, 5))?, + (reduce(ratio(-5, 1)) == ratio(-5, 1))?, + (reduce(ratio(0, 3)) == ratio(0, 1))?, + } +} + +// ## Combining + +// ### Arithmetic operations + +/// Addition: sum of two rational values +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// Some(rational.add(x, y)) == rational.new(17, 12) +/// ``` +pub fn add(left: Rational, right: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + Rational { numerator: a_n * b_d + b_n * a_d, denominator: a_d * b_d } +} + +test add_1() { + add(ratio(2, 3), ratio(3, 4)) == ratio(17, 12) +} + +test add_2() { + add(ratio(-2, 3), ratio(3, 4)) == ratio(1, 12) +} + +/// Division: quotient of two rational values. Returns `None` when the second +/// value is null. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// rational.div(x, y) == rational.new(8, 9) +/// ``` +pub fn div(left: Rational, right: Rational) -> Option { + reciprocal(right) |> option.map(mul(left, _)) +} + +test div_1() { + div(ratio(2, 3), ratio(3, 4)) == new(8, 9) +} + +test div_2() { + div(ratio(2, 3), ratio(-3, 4)) == new(-8, 9) +} + +/// Multiplication: the product of two rational values. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// Some(rational.mul(x, y)) == rational.new(6, 12) +/// ``` +pub fn mul(left: Rational, right: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + Rational { numerator: a_n * b_n, denominator: a_d * b_d } +} + +test mul_1() { + mul(ratio(2, 3), ratio(3, 4)) == ratio(6, 12) +} + +test mul_2() { + mul(ratio(-2, 3), ratio(-3, 4)) == ratio(6, 12) +} + +test mul_3() { + let result = + ratio(2, 5) + |> mul(ratio(1, 8)) + |> mul(ratio(3, 10)) + |> mul(ratio(21, 100)) + |> mul(ratio(3, 5)) + |> mul(ratio(2, 8)) + |> mul(ratio(4, 10)) + |> mul(ratio(22, 100)) + |> reduce + + result == ratio(2079, 50000000) +} + +/// Subtraction: difference of two rational values +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// Some(rational.sub(x, y)) == rational.new(-1, 12) +/// ``` +pub fn sub(left: Rational, right: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + Rational { numerator: a_n * b_d - b_n * a_d, denominator: a_d * b_d } +} + +test sub_1() { + sub(ratio(2, 3), ratio(3, 4)) == ratio(-1, 12) +} + +test sub_2() { + sub(ratio(2, 3), ratio(-3, 4)) == ratio(17, 12) +} + +test sub_3() { + sub(ratio(-2, 3), ratio(3, 4)) == ratio(-17, 12) +} + +// ### Ordering + +/// Compare two rationals for an ordering. This is safe to use even for +/// non-reduced rationals. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// expect Some(z) = rational.new(4, 6) +/// +/// compare(x, y) == Less +/// compare(y, x) == Greater +/// compare(x, x) == Equal +/// compare(x, z) == Equal +/// ``` +pub fn compare(left: Rational, right: Rational) -> Ordering { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + + let l = a_n * b_d + let r = b_n * a_d + + if l < r { + Less + } else if l > r { + Greater + } else { + Equal + } +} + +test compare_1() { + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + expect Some(z) = new(4, 6) + and { + compare(x, y) == Less, + compare(y, x) == Greater, + compare(x, x) == Equal, + compare(x, z) == Equal, + } +} + +/// Comparison of two rational values using a chosen heuristic. For example: +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// rational.compare_with(x, >, y) == False +/// rational.compare_with(y, >, x) == True +/// rational.compare_with(x, >, x) == False +/// rational.compare_with(x, >=, x) == True +/// rational.compare_with(x, ==, x) == True +/// rational.compare_with(x, ==, y) == False +/// ``` +pub fn compare_with( + left: Rational, + with: fn(Int, Int) -> Bool, + right: Rational, +) -> Bool { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + with(a_n * b_d, b_n * a_d) +} + +// TODO: Rewrite tests using binary-operator as first-class functions once aiken-lang/aiken#619 is merged. + +test compare_with_eq() { + let eq = + compare_with(_, fn(l, r) { l == r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + !eq(x, y)? && !eq(y, x)? && eq(x, x)? +} + +test compare_with_neq() { + let neq = + compare_with(_, fn(l, r) { l != r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + neq(x, y)? && neq(y, x)? && !neq(x, x)? +} + +test compare_with_gte() { + let gte = + compare_with(_, fn(l, r) { l >= r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + !gte(x, y)? && gte(y, x)? && gte(x, x)? +} + +test compare_with_gt() { + let gt = + compare_with(_, fn(l, r) { l > r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + !gt(x, y)? && gt(y, x)? && !gt(x, x)? +} + +test compare_with_lte() { + let lte = + compare_with(_, fn(l, r) { l <= r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + lte(x, y)? && !lte(y, x)? && lte(x, x)? +} + +test compare_with_lt() { + let lt = + compare_with(_, fn(l, r) { l < r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + lt(x, y)? && !lt(y, x)? && !lt(x, x)? +} + +// ### Means + +/// Calculate the arithmetic mean between two `Rational` values. +/// +/// ```aiken +/// let x = rational.from_int(0) +/// let y = rational.from_int(1) +/// let z = rational.from_int(2) +/// +/// expect Some(result) = rational.arithmetic_mean([x, y, z]) +/// +/// rational.compare(result, y) == Equal +/// ``` +pub fn arithmetic_mean(self: List) -> Option { + div(list.foldr(self, zero, add), from_int(list.length(self))) +} + +test arithmetic_mean_1() { + let x = ratio(1, 2) + let y = ratio(1, 2) + expect Some(z) = arithmetic_mean([x, y]) + reduce(z) == ratio(1, 2) +} + +test arithmetic_mean_2() { + let x = ratio(1, 1) + let y = ratio(2, 1) + expect Some(z) = arithmetic_mean([x, y]) + reduce(z) == ratio(3, 2) +} + +test arithmetic_mean_3() { + let xs = + [ + ratio(1, 1), + ratio(2, 1), + ratio(3, 1), + ratio(4, 1), + ratio(5, 1), + ratio(6, 1), + ] + expect Some(z) = arithmetic_mean(xs) + reduce(z) == ratio(7, 2) +} + +/// Calculate the geometric mean between two `Rational` values. This returns +/// either the exact result or the smallest integer nearest to the square root +/// for the numerator and denominator. +/// +/// ```aiken +/// expect Some(x) = rational.new(1, 3) +/// expect Some(y) = rational.new(1, 6) +/// +/// rational.geometric_mean(x, y) == rational.new(1, 4) +/// ``` +pub fn geometric_mean(left: Rational, right: Rational) -> Option { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + when math.sqrt(a_n * b_n) is { + Some(numerator) -> + when math.sqrt(a_d * b_d) is { + Some(denominator) -> Some(Rational { numerator, denominator }) + None -> None + } + None -> None + } +} + +test geometric_mean1() { + expect Some(x) = new(1, 2) + expect Some(y) = new(1, 2) + geometric_mean(x, y) == new(1, 2) +} + +test geometric_mean2() { + expect Some(x) = new(-1, 2) + expect Some(y) = new(1, 2) + geometric_mean(x, y) == None +} + +test geometric_mean3() { + expect Some(x) = new(1, 2) + expect Some(y) = new(-1, 2) + geometric_mean(x, y) == None +} + +test geometric_mean4() { + expect Some(x) = new(1, 3) + expect Some(y) = new(1, 6) + geometric_mean(x, y) == new(1, 4) +} + +test geometric_mean5() { + expect Some(x) = new(67, 2500) + expect Some(y) = new(35331, 1000) + expect Some(yi) = reciprocal(y) + geometric_mean(x, yi) == new(258, 9398) +} + +// ## Transforming + +/// Returns the smallest `Int` not less than a given `Rational` +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.ceil(x) == 1 +/// +/// expect Some(y) = rational.new(44, 14) +/// rational.ceil(y) == 4 +/// +/// expect Some(z) = rational.new(-14, 3) +/// rational.ceil(z) == -4 +/// ``` +pub fn ceil(self: Rational) -> Int { + let Rational { numerator, denominator } = self + if builtin.remainder_integer(numerator, denominator) > 0 { + builtin.quotient_integer(numerator, denominator) + 1 + } else { + builtin.quotient_integer(numerator, denominator) + } +} + +test ceil_1() { + and { + (ceil(ratio(13, 5)) == 3)?, + (ceil(ratio(15, 5)) == 3)?, + (ceil(ratio(16, 5)) == 4)?, + (ceil(ratio(-3, 5)) == 0)?, + (ceil(ratio(-5, 5)) == -1)?, + (ceil(ratio(-14, 3)) == -4)?, + (ceil(ratio(-14, 6)) == -2)?, + (ceil(ratio(44, 14)) == 4)?, + } +} + +/// Returns the greatest `Int` no greater than a given `Rational` +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.floor(x) == 0 +/// +/// expect Some(y) = rational.new(44, 14) +/// rational.floor(y) == 3 +/// +/// expect Some(z) = rational.new(-14, 3) +/// rational.floor(z) == -5 +/// ``` +pub fn floor(self: Rational) -> Int { + let Rational { numerator: a_n, denominator: a_d } = self + a_n / a_d +} + +test floor_1() { + and { + (floor(ratio(5, 2)) == 2)?, + (floor(ratio(5, 3)) == 1)?, + (floor(ratio(5, 4)) == 1)?, + (floor(ratio(5, 5)) == 1)?, + (floor(ratio(5, 6)) == 0)?, + (floor(ratio(8, 3)) == 2)?, + (floor(ratio(-14, 3)) == -5)?, + } +} + +/// Computes the rational number x raised to the power y. Returns `None` for +/// invalid exponentiation. +/// +/// ```aiken +/// expect Some(x) = rational.new(50, 2500) +/// rational.reduce(rational.pow(x, 3)) == rational.new(1, 125000) +/// +/// expect Some(x) = rational.new(50, 2500) +/// rational.reduce(rational.pow(x, -3)) == rational.new(125000, 1) +/// ``` +pub fn pow(x: Rational, y: Int) -> Option { + let Rational { numerator: a, denominator: b } = x + + if a == 0 && y <= 0 { + None + } else if y > 0 { + Some(Rational { numerator: math.pow(a, y), denominator: math.pow(b, y) }) + } else if y < 0 { + Some(Rational { numerator: math.pow(b, -y), denominator: math.pow(a, -y) }) + } else { + Some(Rational { numerator: 1, denominator: 1 }) + } +} + +test pow_negative_exponent_non_zero_fraction() { + expect Some(base) = new(50, 2500) + expect Some(calculated_result) = pow(base, -3) + expect Some(expected_result) = new(125000, 1) + reduce(calculated_result) == expected_result +} + +test pow_positive_exponent() { + expect Some(base) = new(50, 2500) + expect Some(calculated_result) = pow(base, 3) + expect Some(expected_result) = new(1, 125000) + reduce(calculated_result) == expected_result +} + +test pow_exponent_zero() { + expect Some(base) = new(50, 2500) + pow(base, 0) == new(1, 1) +} + +test pow_rational_zero_exponent_zero() { + expect Some(base) = new(0, 1) + pow(base, 0) == None +} + +/// Returns the proper fraction of a given `Rational` `r`. That is, a 2-tuple of +/// an `Int` and `Rational` (n, f) such that: +/// +/// - `r = n + f`; +/// - `n` and `f` have the same sign as `r`; +/// - `f` has an absolute value less than 1. +pub fn proper_fraction(self: Rational) -> (Int, Rational) { + let Rational { numerator, denominator } = self + ( + builtin.quotient_integer(numerator, denominator), + Rational { + numerator: builtin.remainder_integer(numerator, denominator), + denominator, + }, + ) +} + +test proper_fraction_1() { + let r = ratio(10, 7) + let (n, f) = proper_fraction(r) + and { + (n == 1)?, + (f == ratio(3, 7))?, + (r == add(from_int(n), f))?, + } +} + +test proper_fraction_2() { + let r = ratio(-10, 7) + let (n, f) = proper_fraction(r) + and { + (n == -1)?, + (f == ratio(-3, 7))?, + (r == add(from_int(n), f))?, + } +} + +test proper_fraction_3() { + let r = ratio(4, 2) + let (n, f) = proper_fraction(r) + and { + (n == 2)?, + (f == ratio(0, 2))?, + (r == add(from_int(n), f))?, + } +} + +/// Round the argument to the nearest whole number. If the argument is +/// equidistant between two values, the greater value is returned (it +/// rounds half towards positive infinity). +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.round(x) == 1 +/// +/// expect Some(y) = rational.new(3, 2) +/// rational.round(y) == 2 +/// +/// expect Some(z) = rational.new(-3, 2) +/// rational.round(z) == -1 +/// ``` +/// +/// > [!CAUTION] +/// > This behaves differently than _Haskell_. If you're coming from `PlutusTx`, beware that in Haskell, rounding on equidistant values depends on the whole number being odd or even. +/// > If you need this behaviour, use [`round_even`](#round_even). +pub fn round(self: Rational) -> Int { + let (n, f) = proper_fraction(self) + + let is_negative = f.numerator < 0 + + when compare(abs(f), ratio(1, 2)) is { + Less -> n + Equal -> + if is_negative { + n + } else { + n + 1 + } + Greater -> + if is_negative { + n - 1 + } else { + n + 1 + } + } +} + +test round_1() { + and { + (round(ratio(10, 7)) == 1)?, + (round(ratio(11, 7)) == 2)?, + (round(ratio(3, 2)) == 2)?, + (round(ratio(5, 2)) == 3)?, + (round(ratio(-3, 2)) == -1)?, + (round(ratio(-2, 3)) == -1)?, + (round(ratio(-10, 7)) == -1)?, + (round(ratio(4, 2)) == 2)?, + } +} + +/// Round the argument to the nearest whole number. If the argument is +/// equidistant between two values, it returns the value that is even (it +/// rounds half to even, also known as 'banker's rounding'). +/// +/// ```aiken +/// expect Some(w) = rational.new(2, 3) +/// rational.round_even(w) == 1 +/// +/// expect Some(x) = rational.new(3, 2) +/// rational.round_even(x) == 2 +/// +/// expect Some(y) = rational.new(5, 2) +/// rational.round_even(y) == 2 +/// +/// expect Some(y) = rational.new(-3, 2) +/// rational.round_even(y) == -2 +/// ``` +pub fn round_even(self: Rational) -> Int { + let (n, f) = proper_fraction(self) + + let m = + when compare(f, ratio(0, 1)) is { + Less -> -1 + _ -> 1 + } + + let is_even = n % 2 == 0 + + when compare(abs(f), ratio(1, 2)) is { + Less -> n + Equal -> + if is_even { + n + } else { + n + m + } + Greater -> n + m + } +} + +test round_even_1() { + and { + (round_even(ratio(10, 7)) == 1)?, + (round_even(ratio(11, 7)) == 2)?, + (round_even(ratio(3, 2)) == 2)?, + (round_even(ratio(5, 2)) == 2)?, + (round_even(ratio(-3, 2)) == -2)?, + (round_even(ratio(-2, 3)) == -1)?, + (round_even(ratio(-10, 7)) == -1)?, + (round_even(ratio(4, 2)) == 2)?, + } +} + +/// Returns the nearest `Int` between zero and a given `Rational`. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.truncate(x) == 0 +/// +/// expect Some(y) = rational.new(44, 14) +/// rational.truncate(y) == 3 +/// +/// expect Some(z) = rational.new(-14, 3) +/// rational.truncate(z) == -4 +/// ``` +pub fn truncate(self: Rational) -> Int { + let Rational { numerator: a_n, denominator: a_d } = self + builtin.quotient_integer(a_n, a_d) +} + +test truncate_1() { + and { + (truncate(ratio(5, 2)) == 2)?, + (truncate(ratio(5, 3)) == 1)?, + (truncate(ratio(5, 4)) == 1)?, + (truncate(ratio(5, 5)) == 1)?, + (truncate(ratio(5, 6)) == 0)?, + (truncate(ratio(8, 3)) == 2)?, + (truncate(ratio(-14, 3)) == -4)?, + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak new file mode 100644 index 00000000..ab8cbc17 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak @@ -0,0 +1,65 @@ +use aiken/fuzz.{both, either, map} +use aiken/math/rational.{Rational, new, pow} + +const any_positive_rational: Fuzzer = + either( + map( + both(fuzz.int_at_least(1), fuzz.int_at_least(1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + map( + both(fuzz.int_at_most(-1), fuzz.int_at_most(-1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + ) + +const any_negative_rational: Fuzzer = + either( + map( + both(fuzz.int_at_most(-1), fuzz.int_at_least(1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + map( + both(fuzz.int_at_least(1), fuzz.int_at_most(-1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + ) + +const any_non_zero_rational: Fuzzer = + either(any_negative_rational, any_positive_rational) + +test prop_power_of_zero_returns_one(rational via any_non_zero_rational) { + expect Some(calculated_result) = pow(rational, 0) + expect Some(expected_result) = new(1, 1) + calculated_result == expected_result +} + +test prop_power_of_one_returns_same_fraction(rational via any_non_zero_rational) { + expect Some(calculated_result) = pow(rational, 1) + calculated_result == rational +} + +test prop_power_numerator_zero_exponent_negative_returns_none( + (denominator, exponent) via both(fuzz.int_at_least(1), fuzz.int_at_most(-1)), +) { + expect Some(fraction) = new(0, denominator) + expect None = pow(fraction, exponent) +} + +test prop_power_unit_fraction_is_immutable(exponent via fuzz.int()) { + expect Some(unit) = new(1, 1) + expect Some(calculated_result) = pow(unit, exponent) + calculated_result == unit +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/option.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/option.ak new file mode 100644 index 00000000..cf5ef7dc --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/option.ak @@ -0,0 +1,312 @@ +//// A type to capture optional results; useful for handling errors. +//// +//// Note that the `Option` type and its constructors are readily available in Aiken. They are part of the [Prelude](https://aiken-lang.github.io/prelude/aiken.html#Option) module imported by default in every module. + +// ## Inspecting + +/// Asserts whether an option is `None`. +pub fn is_none(self: Option) -> Bool { + when self is { + Some(_) -> False + _ -> True + } +} + +test is_none_1() { + is_none(Some(0)) == False +} + +test is_none_2() { + is_none(None) == True +} + +/// Asserts whether an option is `Some`, irrespective of the value it contains. +pub fn is_some(self: Option) -> Bool { + when self is { + Some(_) -> True + _ -> False + } +} + +test is_some_1() { + is_some(Some(0)) == True +} + +test is_some_2() { + is_some(None) == False +} + +// ## Combining + +/// Chain together many computations that may fail. +/// +/// ```aiken +/// self +/// |> dict.get(policy_id) +/// |> option.and_then(dict.get(_, asset_name)) +/// |> option.or_else(0) +/// ``` +pub fn and_then( + self: Option, + then: fn(a) -> Option, +) -> Option { + when self is { + None -> None + Some(a) -> then(a) + } +} + +fn try_decrement(n: Int) -> Option { + if n > 0 { + Some(n - 1) + } else { + None + } +} + +test and_then_1() { + let result = + None + |> and_then(try_decrement) + result == None +} + +test and_then_2() { + let result = + Some(14) + |> and_then(try_decrement) + result == Some(13) +} + +test and_then_3() { + let result = + Some(0) + |> and_then(try_decrement) + result == None +} + +/// Picks the first element which is not None. If there's no such element, return None. +/// +/// ```aiken +/// option.choice([]) == None +/// option.choice([Some(14), Some(42)]) == Some(14) +/// option.choice([None, Some(42)]) == Some(42) +/// option.choice([None, None]) == None +/// ``` +pub fn choice(self: List>) -> Option { + when self is { + [] -> None + [head, ..others] -> + when head is { + None -> choice(others) + _ -> head + } + } +} + +test choice_1() { + Some(1) == choice([Some(1), Some(2)]) +} + +test choice_2() { + None == choice([]) +} + +test choice_3() { + Some(1) == choice([None, Some(1)]) +} + +/// Converts from `Option>` to `Option`. +/// +/// ```aiken +/// option.flatten(Some(Some(42))) == Some(42) +/// option.flatten(Some(None)) == None +/// option.flatten(None) == None +/// ``` +/// +/// Flattening only removes one level of nesting at a time: +/// +/// ```aiken +/// flatten(Some(Some(Some(42)))) == Some(Some(42)) +/// Some(Some(Some(42))) |> flatten |> flatten == Some(42) +/// ``` +pub fn flatten(opt: Option>) -> Option { + when opt is { + Some(inner) -> inner + None -> None + } +} + +test flatten_1() { + let x: Option> = Some(Some(6)) + Some(6) == flatten(x) +} + +test flatten_2() { + let x: Option> = Some(None) + None == flatten(x) +} + +test flatten_3() { + let x: Option> = None + None == flatten(x) +} + +test flatten_4() { + let x: Option>> = Some(Some(Some(6))) + + let result = + x + |> flatten + |> flatten + + Some(6) == result +} + +/// Apply a function to the inner value of an [`Option`](#option) +/// +/// ```aiken +/// option.map(None, fn(n) { n * 2 }) == None +/// option.map(Some(14), fn(n) { n * 2 }) == Some(28) +/// ``` +pub fn map(self: Option, with: fn(a) -> result) -> Option { + when self is { + None -> None + Some(a) -> Some(with(a)) + } +} + +test map_1() { + map(None, fn(_) { Void }) == None +} + +test map_2() { + map(Some(14), fn(n) { n + 1 }) == Some(15) +} + +/// Combine two [`Option`](#option) together. +/// +/// ```aiken +/// type Foo { +/// Foo(Int, Int) +/// } +/// +/// option.map2(Some(14), Some(42), Foo) == Some(Foo(14, 42)) +/// option.map2(None, Some(42), Foo) == None +/// option.map2(Some(14), None, Foo) == None +/// ``` +pub fn map2( + opt_a: Option, + opt_b: Option, + with: fn(a, b) -> result, +) -> Option { + when opt_a is { + None -> None + Some(a) -> + when opt_b is { + None -> None + Some(b) -> Some(with(a, b)) + } + } +} + +test map2_1() { + map2(None, Some(42), fn(_, _) { 14 }) == None +} + +test map2_2() { + map2(Some(42), None, fn(_, _) { 14 }) == None +} + +test map2_3() { + map2(Some(14), Some(42), fn(a, b) { (a, b) }) == Some((14, 42)) +} + +/// Combine three [`Option`](#option) together. +/// +/// ```aiken +/// type Foo { +/// Foo(Int, Int, Int) +/// } +/// +/// option.map3(Some(14), Some(42), Some(1337), Foo) == Some(Foo(14, 42, 1337)) +/// option.map3(None, Some(42), Some(1337), Foo) == None +/// option.map3(Some(14), None, None, Foo) == None +/// ``` +pub fn map3( + opt_a: Option, + opt_b: Option, + opt_c: Option, + with: fn(a, b, c) -> result, +) -> Option { + when opt_a is { + None -> None + Some(a) -> + when opt_b is { + None -> None + Some(b) -> + when opt_c is { + None -> None + Some(c) -> Some(with(a, b, c)) + } + } + } +} + +test map3_1() { + map3(None, Some(42), None, fn(_, _, _) { 14 }) == None +} + +test map3_2() { + map3(Some(42), None, None, fn(_, _, _) { 14 }) == None +} + +test map3_3() { + map3(Some(14), Some(42), Some(1337), fn(a, b, c) { c - a + b }) == Some(1365) +} + +/// Like [`or_else`](#or_else) but allows returning an `Option`. +/// This is effectively mapping the error branch. +/// +/// ```aiken +/// option.or_try(None, fn(_) { Some("aiken") }) == Some("aiken") +/// option.or_try(Some(42), fn(_) { Some(14) }) == Some(42) +/// option.or_try(None, fn (_) { fail }) => 💥 +/// ``` +pub fn or_try(self: Option, compute_default: fn() -> Option) -> Option { + when self is { + None -> compute_default() + _ -> self + } +} + +test or_try_1() { + or_try(None, fn() { Some("aiken") }) == Some("aiken") +} + +test or_try_2() { + or_try(Some(42), fn() { fail }) == Some(42) +} + +// ## Transforming + +/// Provide a default value, turning an optional value into a normal value. +/// +/// ```aiken +/// option.or_else(None, "aiken") == "aiken" +/// option.or_else(Some(42), 14) == 42 +/// ``` +pub fn or_else(self: Option, default: a) -> a { + when self is { + None -> default + Some(a) -> a + } +} + +test or_else_1() { + or_else(None, "aiken") == "aiken" +} + +test or_else_2() { + or_else(Some(42), 14) == 42 +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak new file mode 100644 index 00000000..d2f125f5 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak @@ -0,0 +1,668 @@ +use aiken/builtin +use aiken/math +use aiken/option + +pub type Byte = + Int + +// ## Constructing + +/// Encode an integer value as a Big-Endian (most-significant bytes first) `ByteArray`. +/// The size is the expected size in number of bytes. +/// +/// > [!IMPORTANT] +/// > This function fails (i.e. halts the program) if the value cannot fit in the given size. When the +/// > size is _too large_, the array is left-padded with zeroes. +/// +/// ```aiken +/// bytearray.from_int_big_endian(1_000_000, 3) == #"0f4240" +/// bytearray.from_int_big_endian(1_000_000, 5) == #"00000f4240" +/// bytearray.from_int_big_endian(0, 8) == #"0000000000000000" +/// bytearray.from_int_big_endian(1_000_000, 1) => 💥 +/// ``` +pub fn from_int_big_endian(self: Int, size: Int) -> ByteArray { + builtin.integer_to_bytearray(True, size, self) +} + +test from_int_big_endian_1() { + from_int_big_endian(1_000_000, 3) == #"0f4240" +} + +test from_int_big_endian_2() { + from_int_big_endian(1_000_000, 5) == #"00000f4240" +} + +test from_int_big_endian_3() { + from_int_big_endian(0, 8) == #"0000000000000000" +} + +test from_int_big_endian_4() fail { + from_int_big_endian(1_000_000, 1) == #"40" +} + +/// Encode an integer value as a Little-Endian (least-significant bytes first) `ByteArray`. +/// The size is the expected size in number of bytes. +/// +/// > [!IMPORTANT] +/// > This function fails (i.e. halts the program) if the value cannot fit in the given size. When the +/// > size is _too large_, the array is right-padded with zeroes. +/// +/// ```aiken +/// bytearray.from_int_little_endian(1_000_000, 3) == #"40420f" +/// bytearray.from_int_little_endian(1_000_000, 5) == #"40420f0000" +/// bytearray.from_int_little_endian(0, 8) == #"0000000000000000" +/// bytearray.from_int_little_endian(1_000_000, 1) => 💥 +/// ``` +pub fn from_int_little_endian(self: Int, size: Int) -> ByteArray { + builtin.integer_to_bytearray(False, size, self) +} + +test from_int_little_endian_1() { + from_int_little_endian(1_000_000, 3) == #"40420f" +} + +test from_int_little_endian_2() { + from_int_little_endian(1_000_000, 5) == #"40420f0000" +} + +test from_int_little_endian_3() { + from_int_little_endian(0, 8) == #"0000000000000000" +} + +test from_int_little_endian_4() fail { + from_int_little_endian(1_000_000, 1) == #"40" +} + +/// Convert a `String` into a `ByteArray`. +/// +/// ```aiken +/// bytearray.from_string(@"ABC") == #"414243" +/// ``` +pub fn from_string(str: String) -> ByteArray { + builtin.encode_utf8(str) +} + +test from_string_1() { + from_string(@"") == "" +} + +test from_string_2() { + from_string(@"ABC") == #"414243" +} + +/// Add a byte element in front of a `ByteArray`. When the given byte is +/// greater than 255, it wraps-around. **PlutusV2 behavior** So 256 is mapped to 0, 257 to 1, and so +/// forth. +/// In PlutusV3 this will error instead of wrapping around. +/// +/// ```aiken +/// bytearray.push(#"", 0) == #"00" +/// bytearray.push(#"0203", 1) == #"010203" +/// bytearray.push(#"0203", 257) == #"010203" +/// ``` +pub fn push(self: ByteArray, byte: Byte) -> ByteArray { + builtin.cons_bytearray(byte, self) +} + +test push_1() { + push(#[], 0) == #[0] +} + +test push_2() { + push(#[2, 3], 1) == #[1, 2, 3] +} + +test push_3() fail { + let x = 257 + push(#[2, 3], x) == #[1, 2, 3] +} + +// ## Inspecting + +/// Get the `Byte` at the given index, or crash. +/// +/// > [!WARNING] +/// > This functions fails (i.e. halts the program) if there's no byte at the given index. +pub fn at(self: ByteArray, index: Int) -> Byte { + builtin.index_bytearray(self, index) +} + +/// Search the start and end positions of a sub-array in a `ByteArray`. +/// +/// ```aiken +/// bytearray.index_of("Hello, World!", "World") == Some((7, 11)) +/// bytearray.index_of("Hello, World!", "foo") == None +/// bytearray.index_of("Hello, World!", "!") == Some((12, 12)) +/// bytearray.index_of("Hello, World!", "o") == Some((4, 4)) +/// bytearray.index_of("Hello, World!", "Hello, World!") == Some((0, 12)) +/// ``` +pub fn index_of(self: ByteArray, bytes: ByteArray) -> Option<(Int, Int)> { + let offset = length(bytes) + + do_index_of(self, bytes, 0, offset, length(self)) + |> option.map(fn(ix) { (ix, ix + offset - 1) }) +} + +fn do_index_of( + self: ByteArray, + bytes: ByteArray, + cursor: Int, + offset: Int, + size: Int, +) -> Option { + if cursor + offset > size { + None + } else { + if builtin.slice_bytearray(cursor, offset, self) == bytes { + Some(cursor) + } else { + do_index_of(self, bytes, cursor + 1, offset, size) + } + } +} + +test index_of_1() { + index_of("Hello, World!", "World") == Some((7, 11)) +} + +test index_of_2() { + index_of("Hello, World!", "foo") == None +} + +test index_of_3() { + index_of("Hello, World!", "!") == Some((12, 12)) +} + +test index_of_4() { + index_of("Hello, World!", "o") == Some((4, 4)) +} + +test index_of_5() { + index_of("Hello, World!", "Hello, World!") == Some((0, 12)) +} + +/// Returns `True` when the given `ByteArray` is empty. +/// +/// ```aiken +/// bytearray.is_empty(#"") == True +/// bytearray.is_empty(#"00ff") == False +/// ``` +pub fn is_empty(self: ByteArray) -> Bool { + builtin.length_of_bytearray(self) == 0 +} + +test is_empty_1() { + is_empty(#"") == True +} + +test is_empty_2() { + is_empty(#"01") == False +} + +/// Returns the number of bytes in a `ByteArray`. +/// +/// ```aiken +/// bytearray.length(#[1, 2, 3]) == 3 +/// ``` +pub fn length(self: ByteArray) -> Int { + builtin.length_of_bytearray(self) +} + +test length_1() { + length(#"") == 0 +} + +test length_2() { + length(#"010203") == 3 +} + +/// Checks whether a bit (Most-Significant-Bit first) is set in the given 'ByteArray'. +/// +/// For example, consider the following bytearray: `#"8b765f"`. It can also be written as the +/// following bits sequence: +/// +/// `8` | `b` | `7` | `6` | `5` | `f` +/// --- | --- | --- | --- | --- | --- +/// `1000` | `1011` | `0111` | `0110` | `0101` | `1111` +/// +/// And thus, we have: +/// +/// ```aiken +/// test_bit(#"8b765f", 0) == True +/// test_bit(#"8b765f", 1) == False +/// test_bit(#"8b765f", 2) == False +/// test_bit(#"8b765f", 3) == False +/// test_bit(#"8b765f", 7) == True +/// test_bit(#"8b765f", 8) == False +/// test_bit(#"8b765f", 20) == True +/// test_bit(#"8b765f", 21) == True +/// test_bit(#"8b765f", 22) == True +/// test_bit(#"8b765f", 23) == True +/// ``` +pub fn test_bit(self: ByteArray, ix: Int) -> Bool { + builtin.less_than_equals_bytearray( + #[128], + builtin.cons_bytearray( + builtin.index_bytearray(self, ix / 8) * math.pow2(ix % 8) % 256, + "", + ), + ) +} + +test test_bit_0() { + test_bit(#"8b765f", 0) +} + +test test_bit_1() { + !test_bit(#"8b765f", 1) +} + +test test_bit_2() { + !test_bit(#"8b765f", 2) +} + +test test_bit_3() { + !test_bit(#"8b765f", 3) +} + +test test_bit_7() { + test_bit(#"8b765f", 7) +} + +test test_bit_8() { + !test_bit(#"8b765f", 8) +} + +test test_bit_20_21_22_23() { + and { + test_bit(#"8b765f", 20), + test_bit(#"8b765f", 21), + test_bit(#"8b765f", 22), + test_bit(#"8b765f", 23), + } +} + +// ## Modifying + +/// Returns the suffix of a `ByteArray` after `n` elements. +/// +/// ```aiken +/// bytearray.drop(#[1, 2, 3], n: 2) == #[3] +/// ``` +pub fn drop(self: ByteArray, n: Int) -> ByteArray { + builtin.slice_bytearray(n, builtin.length_of_bytearray(self) - n, self) +} + +test drop_1() { + let x = #"01020304050607" + drop(x, 2) == #"0304050607" +} + +test drop_2() { + let x = #"01020304050607" + drop(x, 0) == x +} + +test drop_3() { + let x = #"01" + drop(x, 1) == #"" +} + +test drop_4() { + let x = #"" + drop(x, 2) == #"" +} + +/// Extract a `ByteArray` as a slice of another `ByteArray`. +/// +/// Indexes are 0-based and inclusive. +/// +/// ```aiken +/// bytearray.slice(#[0, 1, 2, 3, 4, 5, 6], start: 1, end: 3) == #[1, 2, 3] +/// ``` +pub fn slice(self: ByteArray, start: Int, end: Int) -> ByteArray { + builtin.slice_bytearray(start, end - start + 1, self) +} + +test slice_1() { + slice(#"", 1, 2) == #"" +} + +test slice_2() { + slice(#"010203", 1, 2) == #"0203" +} + +test slice_3() { + slice(#"010203", 0, 42) == #"010203" +} + +test slice_4() { + slice(#[0, 1, 2, 3, 4], 0, 3) == #[0, 1, 2, 3] +} + +test slice_5() { + slice(#[0, 1, 2, 3, 4], 1, 2) == #[1, 2] +} + +/// Returns the n-length prefix of a `ByteArray`. +/// +/// ```aiken +/// bytearray.take(#[1, 2, 3], n: 2) == #[1, 2] +/// ``` +pub fn take(self: ByteArray, n: Int) -> ByteArray { + builtin.slice_bytearray(0, n, self) +} + +test take_1() { + let x = #"01020304050607" + take(x, 2) == #"0102" +} + +test take_2() { + let x = #"01020304050607" + take(x, 0) == #"" +} + +test take_3() { + let x = #"01" + take(x, 1) == x +} + +test take_4() { + let x = #"010203" + take(x, 0) == #"" +} + +// ## Combining + +/// Combine two `ByteArray` together. +/// +/// ```aiken +/// bytearray.concat(left: #[1, 2, 3], right: #[4, 5, 6]) == #[1, 2, 3, 4, 5, 6] +/// ``` +pub fn concat(left: ByteArray, right: ByteArray) -> ByteArray { + builtin.append_bytearray(left, right) +} + +test concat_1() { + concat(#"", #"") == #"" +} + +test concat_2() { + concat(#"", #"01") == #"01" +} + +test concat_3() { + concat(#"0102", #"") == #"0102" +} + +test concat_4() { + concat(#"0102", #"0304") == #"01020304" +} + +/// Compare two bytearrays lexicographically. +/// +/// ```aiken +/// bytearray.compare(#"00", #"FF") == Less +/// bytearray.compare(#"42", #"42") == Equal +/// bytearray.compare(#"FF", #"00") == Greater +/// ``` +pub fn compare(left: ByteArray, right: ByteArray) -> Ordering { + if builtin.less_than_bytearray(left, right) { + Less + } else if builtin.equals_bytearray(left, right) { + Equal + } else { + Greater + } +} + +// ## Transforming + +/// Left-fold over bytes of a [`ByteArray`](https://aiken-lang.github.io/prelude/aiken.html#ByteArray). Note that every byte given to the callback function is comprised between 0 and 255. +/// +/// ```aiken +/// bytearray.foldl(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 44203 +/// bytearray.foldl(#[1, 2, 3], #"", flip(bytearray.push)) == #[3, 2, 1] +/// ``` +pub fn foldl( + self: ByteArray, + zero: result, + with: fn(Int, result) -> result, +) -> result { + do_foldl(self, zero, builtin.length_of_bytearray(self), 0, with) +} + +fn do_foldl( + self: ByteArray, + zero: result, + len: Int, + cursor: Int, + with: fn(Int, result) -> result, +) -> result { + if cursor == len { + zero + } else { + do_foldl( + self, + with(builtin.index_bytearray(self, cursor), zero), + len, + cursor + 1, + with, + ) + } +} + +test foldl_1() { + foldl(#[], 42, fn(byte, acc) { byte + acc }) == 42 +} + +test foldl_2() { + foldl(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 44203 +} + +test foldl_3() { + foldl( + #"356cf088720a169dae0ce0bb1df8588944389fa43322f0d6ef4ed8c069bfd405", + 0, + fn(byte, acc) { acc * 256 + byte }, + ) == 24165060555594911913195642527692216679757672038384202527929620681761931383813 +} + +test foldl_4() { + foldl(#[1, 2, 3, 4, 5], #"", flip(push)) == #[5, 4, 3, 2, 1] +} + +/// Right-fold over bytes of a [`ByteArray`](https://aiken-lang.github.io/prelude/aiken.html#ByteArray). Note that every byte given to the callback function is comprised between 0 and 255. +/// +/// ```aiken +/// bytearray.foldr(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 43948 +/// bytearray.foldl(#[1, 2, 3], #"", flip(bytearray.push)) == #[1, 2, 3] +/// ``` +pub fn foldr( + self: ByteArray, + zero: result, + with: fn(Int, result) -> result, +) -> result { + do_foldr(self, zero, builtin.length_of_bytearray(self) - 1, with) +} + +fn do_foldr( + self: ByteArray, + zero: result, + cursor: Int, + with: fn(Int, result) -> result, +) -> result { + if cursor < 0 { + zero + } else { + do_foldr( + self, + with(builtin.index_bytearray(self, cursor), zero), + cursor - 1, + with, + ) + } +} + +test foldr_1() { + foldr(#[], 42, fn(byte, acc) { byte + acc }) == 42 +} + +test foldr_2() { + foldr(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 43948 +} + +test foldr_3() { + foldr(#[1, 2, 3, 4, 5], #"", flip(push)) == #[1, 2, 3, 4, 5] +} + +/// Reduce bytes in a ByteArray from left to right using the accumulator as left operand. +/// Said differently, this is [`foldl`](#foldl) with callback arguments swapped. +/// +/// ```aiken +/// bytearray.reduce(#[1,2,3], #[], bytearray.push) == #[3, 2, 1] +/// ``` +pub fn reduce( + self: ByteArray, + zero: result, + with: fn(result, Int) -> result, +) -> result { + foldl(self, zero, flip(with)) +} + +test reduce_1() { + reduce(#[], #[], push) == #[] +} + +test reduce_2() { + reduce(#[1, 2, 3], #[], push) == #[3, 2, 1] +} + +/// Interpret a Big-Endian (most-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// bytearray.to_int_big_endian(#"0f4240") == 1_000_000 +/// bytearray.to_int_big_endian(#"00000f4240") == 1_000_000 +/// bytearray.to_int_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn to_int_big_endian(self: ByteArray) -> Int { + builtin.bytearray_to_integer(True, self) +} + +test to_int_big_endian_1() { + to_int_big_endian(#"0f4240") == 1_000_000 +} + +test to_int_big_endian_2() { + to_int_big_endian(#"00000f4240") == 1_000_000 +} + +test to_int_big_endian_3() { + to_int_big_endian(#"0000000000000000") == 0 +} + +/// Interpret a Little-Endian (least-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// bytearray.to_int_big_endian(#"40420f") == 1_000_000 +/// bytearray.to_int_big_endian(#"40420f0000") == 1_000_000 +/// bytearray.to_int_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn to_int_little_endian(self: ByteArray) -> Int { + builtin.bytearray_to_integer(False, self) +} + +test to_int_little_endian_1() { + to_int_little_endian(#"40420f") == 1_000_000 +} + +test to_int_little_endian_2() { + to_int_little_endian(#"40420f0000") == 1_000_000 +} + +test to_int_little_endian_3() { + to_int_little_endian(#"0000000000000000") == 0 +} + +/// Convert a `ByteArray` into a `String`. +/// +/// > [!WARNING] +/// > This functions fails (i.e. halts the program) if the underlying `ByteArray` isn't UTF-8-encoded. In particular, you cannot convert arbitrary hash digests using this function. +/// > +/// > For converting arbitrary `ByteArray`s, use [bytearray.to_hex](#to_hex). +/// +/// ```aiken +/// bytearray.to_string(#"414243") == "ABC" +/// bytearray.to_string(some_hash) => 💥 +/// ``` +pub fn to_string(self: ByteArray) -> String { + builtin.decode_utf8(self) +} + +test to_string_1() { + to_string("") == @"" +} + +test to_string_2() { + to_string("ABC") == @"ABC" +} + +/// Encode a `ByteArray` as a hexidecimal `String`. +/// +/// ```aiken +/// bytearray.to_hex("Hello world!") == @"48656c6c6f20776f726c6421" +/// ``` +pub fn to_hex(self: ByteArray) -> String { + self + |> encode_base16(builtin.length_of_bytearray(self) - 1, "") + |> builtin.decode_utf8 +} + +test to_hex_1() { + to_hex("Hello world!") == @"48656C6C6F20776F726C6421" +} + +test to_hex_2() { + to_hex("The quick brown fox jumps over the lazy dog") == @"54686520717569636B2062726F776E20666F78206A756D7073206F76657220746865206C617A7920646F67" +} + +/// Checks whether a `ByteArray` starts with a given prefix. +/// +/// ```aiken +/// bytearray.starts_with("Hello, World!", prefix: "Hello") == True +/// bytearray.starts_with("", prefix: "") == True +/// bytearray.starts_with("Hello", prefix: "Hello, World!") == False +/// ``` +pub fn starts_with(self: ByteArray, prefix: ByteArray) -> Bool { + let prefix_length = length(prefix) + if length(self) < prefix_length { + False + } else { + take(self, prefix_length) == prefix + } +} + +test starts_with_1() { + starts_with("", "") +} + +test starts_with_2() { + starts_with("Hello, World!", "Hello, World!") +} + +test starts_with_3() { + !starts_with("Hello, World!", "hello") +} + +test starts_with_4() { + !starts_with("", "World") +} + +test starts_with_5() { + starts_with("Hello, World", "Hello") +} + +test starts_with_6() { + !starts_with("foo", "foo_") +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak new file mode 100644 index 00000000..217749e9 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak @@ -0,0 +1,156 @@ +use aiken/builtin.{bytearray_to_integer, decode_utf8} +use aiken/math +use aiken/option +use aiken/primitive/bytearray + +// ## Combining + +/// Compare two integers. +/// +/// ```aiken +/// int.compare(14, 42) == Less +/// int.compare(14, 14) == Equal +/// int.compare(42, 14) == Greater +/// ``` +pub fn compare(left: Int, right: Int) -> Ordering { + if left < right { + Less + } else if left > right { + Greater + } else { + Equal + } +} + +// ## Transforming + +/// Interpret a Big-Endian (most-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// int.from_bytearray_big_endian(#"0f4240") == 1_000_000 +/// int.from_bytearray_big_endian(#"00000f4240") == 1_000_000 +/// int.from_bytearray_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn from_bytearray_big_endian(self: ByteArray) -> Int { + bytearray_to_integer(True, self) +} + +test from_bytearray_big_endian_1() { + from_bytearray_big_endian(#"0f4240") == 1_000_000 +} + +test from_bytearray_big_endian_2() { + from_bytearray_big_endian(#"00000f4240") == 1_000_000 +} + +test from_bytearray_big_endian_3() { + from_bytearray_big_endian(#"0000000000000000") == 0 +} + +/// Interpret a Little-Endian (least-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// int.from_bytearray_big_endian(#"40420f") == 1_000_000 +/// int.from_bytearray_big_endian(#"40420f0000") == 1_000_000 +/// int.from_bytearray_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn from_bytearray_little_endian(self: ByteArray) -> Int { + bytearray_to_integer(False, self) +} + +test from_bytearray_little_endian_1() { + from_bytearray_little_endian(#"40420f") == 1_000_000 +} + +test from_bytearray_little_endian_2() { + from_bytearray_little_endian(#"40420f0000") == 1_000_000 +} + +test from_bytearray_little_endian_3() { + from_bytearray_little_endian(#"0000000000000000") == 0 +} + +/// Parse an integer from a utf-8 encoded `ByteArray`, when possible. +/// +/// ```aiken +/// int.from_utf8("14") == Some(14) +/// int.from_utf8("-42") == Some(-42) +/// int.from_utf8("007") == Some(7) +/// int.from_utf8("foo") == None +/// int.from_utf8("1.0") == None +/// int.from_utf8("1-2") == None +/// ``` +pub fn from_utf8(bytes: ByteArray) -> Option { + bytes + |> bytearray.foldr( + Some((0, 0)), + fn(byte, st) { + when st is { + None -> None + Some((n, e)) -> + if byte < 48 || byte > 57 { + if byte == 45 { + Some((-n, 0)) + } else { + None + } + } else if n < 0 { + None + } else { + let digit = byte - 48 + Some((n + digit * math.pow(10, e), e + 1)) + } + } + }, + ) + |> option.map(fn(tuple) { tuple.1st }) +} + +test from_utf8_1() { + from_utf8("0017") == Some(17) +} + +test from_utf8_2() { + from_utf8("42") == Some(42) +} + +test from_utf8_3() { + from_utf8("1337") == Some(1337) +} + +test from_utf8_4() { + from_utf8("-14") == Some(-14) +} + +test from_utf8_5() { + from_utf8("foo") == None +} + +test from_utf8_6() { + from_utf8("1-2") == None +} + +/// Convert an `Int` to its `String` representation. +/// +/// ```aiken +/// int.to_string(42) == @"42" +/// ``` +pub fn to_string(n: Int) -> String { + diagnostic(n, "") |> decode_utf8 +} + +test to_string_1() { + to_string(0) == @"0" +} + +test to_string_2() { + to_string(5) == @"5" +} + +test to_string_3() { + to_string(42) == @"42" +} + +test to_string_4() { + to_string(200) == @"200" +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak new file mode 100644 index 00000000..35fa5567 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak @@ -0,0 +1,139 @@ +use aiken/builtin.{ + append_bytearray, append_string, decode_utf8, encode_utf8, length_of_bytearray, +} + +// ## Constructing + +/// Convert a `ByteArray` into a `String` +/// +/// > [!WARNING] +/// > This functions fails if the underlying `ByteArray` isn't UTF-8-encoded. In particular, you cannot convert arbitrary hash digests using this function. +/// > +/// > For converting arbitrary `ByteArray`s, use [bytearray.to_hex](./bytearray.html#to_hex). +/// +/// ```aiken +/// string.from_bytearray("foo") == @"foo" +/// string.from_bytearray(#"666f6f") == @"foo" +/// string.from_bytearray(some_hash) -> fail +/// ``` +pub fn from_bytearray(bytes: ByteArray) -> String { + decode_utf8(bytes) +} + +test from_bytearray_1() { + from_bytearray(#[]) == @"" +} + +test from_bytearray_2() { + from_bytearray(#[65, 66, 67]) == @"ABC" +} + +test from_bytearray_3() { + from_bytearray("ABC") == @"ABC" +} + +/// Convert an `Int` to its `String` representation. +/// +/// ```aiken +/// string.from_int(42) == @"42" +/// ``` +pub fn from_int(n: Int) -> String { + diagnostic(n, "") |> decode_utf8 +} + +test from_int_1() { + from_int(0) == @"0" +} + +test from_int_2() { + from_int(5) == @"5" +} + +test from_int_3() { + from_int(42) == @"42" +} + +test from_int_4() { + from_int(200) == @"200" +} + +// ## Combining + +/// Combine two `String` together. +/// +/// ```aiken +/// string.concat(left: @"Hello", right: @", World!") == @"Hello, World!" +/// ``` +pub fn concat(left: String, right: String) -> String { + append_string(left, right) +} + +test concat_1() { + concat(@"", @"") == @"" +} + +test concat_2() { + concat(@"", @"foo") == concat(@"foo", @"") +} + +test concat_3() { + concat(left: @"Hello", right: @", World!") == @"Hello, World!" +} + +/// Join a list of strings, separated by a given _delimiter_. +/// +/// ```aiken +/// string.join([], @"+") == @"" +/// string.join([@"a", @"b", @"c"], @",") == @"a,b,c" +/// ``` +pub fn join(list: List, delimiter: String) -> String { + do_join(list, encode_utf8(delimiter), #"") + |> decode_utf8 +} + +fn do_join(xs, delimiter, bytes) { + when xs is { + [] -> bytes + [x, ..rest] -> + do_join( + rest, + delimiter, + if length_of_bytearray(bytes) == 0 { + encode_utf8(x) + } else { + append_bytearray(bytes, append_bytearray(delimiter, encode_utf8(x))) + }, + ) + } +} + +test join_1() { + join([], @",") == @"" +} + +test join_2() { + join([@"a", @"b", @"c"], @",") == @"a,b,c" +} + +// ## Transforming + +/// Convert a `String` into a `ByteArray` +/// +/// ```aiken +/// string.to_bytearray(@"foo") == "foo" +/// ``` +pub fn to_bytearray(self: String) -> ByteArray { + encode_utf8(self) +} + +test to_bytearray_1() { + to_bytearray(@"") == "" +} + +test to_bytearray_2() { + to_bytearray(@"ABC") == #[65, 66, 67] +} + +test to_bytearray_3() { + to_bytearray(@"ABC") == "ABC" +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/address.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/address.ak new file mode 100644 index 00000000..0167b90f --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/address.ak @@ -0,0 +1,86 @@ +use aiken/crypto.{ + Blake2b_224, Hash, Script, ScriptHash, VerificationKey, VerificationKeyHash, +} + +/// A general structure for representing an on-chain `Credential`. +/// +/// Credentials are always one of two kinds: a direct public/private key +/// pair, or a script (native or Plutus). +pub type Credential { + VerificationKey(VerificationKeyHash) + Script(ScriptHash) +} + +// ## Constructing + +/// A Cardano `Address` typically holding one or two credential references. +/// +/// Note that legacy bootstrap addresses (a.k.a. 'Byron addresses') are +/// completely excluded from Plutus contexts. Thus, from an on-chain +/// perspective only exists addresses of type 00, 01, ..., 07 as detailed +/// in [CIP-0019 :: Shelley Addresses](https://github.com/cardano-foundation/CIPs/tree/master/CIP-0019/#shelley-addresses). +pub type Address { + payment_credential: PaymentCredential, + stake_credential: Option, +} + +/// Smart-constructor for an [Address](#Address) from a [script](#Script) hash. The address has no delegation rights whatsoever. +pub fn from_script(script: Hash) -> Address { + Address { payment_credential: Script(script), stake_credential: None } +} + +/// Smart-constructor for an [Address](#Address) from a [verification key](#VerificationKey) hash. The resulting address has no delegation rights whatsoever. +pub fn from_verification_key(vk: Hash) -> Address { + Address { payment_credential: VerificationKey(vk), stake_credential: None } +} + +/// Set (or reset) the delegation part of an [Address](#Address) using a [verification key](#VerificationKey) hash. This is useful when combined with [`from_verification_key`](#from_verification_key) and/or [`from_script`](#from_script). +pub fn with_delegation_key( + self: Address, + vk: Hash, +) -> Address { + Address { + payment_credential: self.payment_credential, + stake_credential: Some(Inline(VerificationKey(vk))), + } +} + +/// Set (or reset) the delegation part of an [Address](#Address) using a [script](#Script) hash. This is useful when combined with [`from_verification_key`](#from_verification_key) and/or [`from_script`](#from_script). +pub fn with_delegation_script( + self: Address, + script: Hash, +) -> Address { + Address { + payment_credential: self.payment_credential, + stake_credential: Some(Inline(Script(script))), + } +} + +/// Represent a type of object that can be represented either inline (by hash) +/// or via a reference (i.e. a pointer to an on-chain location). +/// +/// This is mainly use for capturing pointers to a stake credential +/// registration certificate in the case of so-called pointer addresses. +pub type Referenced { + Inline(a) + Pointer { slot_number: Int, transaction_index: Int, certificate_index: Int } +} + +/// A `StakeCredential` represents the delegation and rewards withdrawal conditions +/// associated with some stake address / account. +/// +/// A `StakeCredential` is either provided inline, or, by reference using an +/// on-chain pointer. +/// +/// Read more about pointers in [CIP-0019 :: Pointers](https://github.com/cardano-foundation/CIPs/tree/master/CIP-0019/#pointers). +pub type StakeCredential = + Referenced + +/// A 'PaymentCredential' represents the spending conditions associated with +/// some output. Hence, +/// +/// - a `VerificationKey` captures an output locked by a public/private key pair; +/// - and a `Script` captures an output locked by a native or Plutus script. +/// +pub type PaymentCredential = + Credential diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak new file mode 100644 index 00000000..2ebeaa91 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak @@ -0,0 +1,30 @@ +use aiken/primitive/bytearray +use cardano/address.{Credential, Script, VerificationKey} + +pub fn compare(left: Credential, right: Credential) -> Ordering { + when left is { + Script(left) -> + when right is { + Script(right) -> bytearray.compare(left, right) + _ -> Less + } + VerificationKey(left) -> + when right is { + Script(_) -> Greater + VerificationKey(right) -> bytearray.compare(left, right) + } + } +} + +test compare_matrix() { + and { + (compare(Script(""), Script("")) == Equal)?, + (compare(VerificationKey(""), VerificationKey("")) == Equal)?, + (compare(Script(""), VerificationKey("")) == Less)?, + (compare(VerificationKey(""), Script("")) == Greater)?, + (compare(Script("01"), Script("02")) == Less)?, + (compare(Script("02"), Script("01")) == Greater)?, + (compare(VerificationKey("01"), VerificationKey("02")) == Less)?, + (compare(VerificationKey("02"), VerificationKey("01")) == Greater)?, + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak new file mode 100644 index 00000000..664a3983 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak @@ -0,0 +1,920 @@ +use aiken/builtin +use aiken/collection/dict.{Dict, from_ascending_pairs_with} +use aiken/collection/list +use aiken/crypto.{Blake2b_224, Hash, Script} +use aiken/option + +/// Lovelace is now a type wrapper for Int. +pub type Lovelace = + Int + +/// A type-alias for a `PolicyId`. A `PolicyId` is always 28-byte long +pub type PolicyId = + Hash + +/// Ada, the native currency, isn't associated with any `PolicyId` (it's not +/// possible to mint Ada!). +/// +/// By convention, it is an empty `ByteArray`. +pub const ada_policy_id = "" + +/// A type-alias for 'AssetName`, which are free-form byte-arrays between +/// 0 and 32 bytes. +pub type AssetName = + ByteArray + +/// Ada, the native currency, isn't associated with any `AssetName` (it's not +/// possible to mint Ada!). +/// +/// By convention, it is an empty `ByteArray`. +pub const ada_asset_name = "" + +/// A multi-asset output `Value`. Contains tokens indexed by [PolicyId](#PolicyId) and [AssetName](#AssetName). +/// +/// > [!IMPORTANT] +/// > This type maintain some invariants by construction; in particular, a `Value` will never contain a +/// zero quantity of a particular token. +pub opaque type Value { + inner: Dict>, +} + +// ## Constructing + +/// Construct a `Value` from an asset identifier (i.e. `PolicyId` + `AssetName`) +/// and a given quantity. +pub fn from_asset( + policy_id: PolicyId, + asset_name: AssetName, + quantity: Int, +) -> Value { + if quantity == 0 { + zero + } else { + let asset = + dict.empty + |> dict.insert(asset_name, quantity) + dict.empty + |> dict.insert(policy_id, asset) + |> Value + } +} + +/// Promote an arbitrary list of assets into a `Value`. This function fails +/// (i.e. halts the program execution) if: +/// +/// - there's any duplicate amongst `PolicyId`; +/// - there's any duplicate amongst `AssetName`; +/// - the `AssetName` aren't sorted in ascending lexicographic order; or +/// - any asset quantity is null. +/// +/// This function is meant to turn arbitrary user-defined `Data` into safe `Value`, +/// while checking for internal invariants. +pub fn from_asset_list(xs: Pairs>) -> Value { + xs + |> list.foldr( + dict.empty, + fn(inner, acc) { + expect Pair(p, [_, ..] as x) = inner + x + |> from_ascending_pairs_with(fn(v) { v != 0 }) + |> dict.insert_with( + acc, + p, + _, + fn(_, _, _) { + fail @"Duplicate policy in the asset list." + }, + ) + }, + ) + |> Value +} + +test from_asset_list_1() { + let v = from_asset_list([]) + v == zero +} + +test from_asset_list_2() fail { + let v = from_asset_list([Pair(#"33", [])]) + v == zero +} + +test from_asset_list_3() fail { + let v = from_asset_list([Pair(#"33", [Pair(#"", 0)])]) + v != zero +} + +test from_asset_list_4() { + let v = from_asset_list([Pair(#"33", [Pair(#"", 1)])]) + flatten(v) == [(#"33", #"", 1)] +} + +test from_asset_list_5() { + let v = from_asset_list([Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)])]) + flatten(v) == [(#"33", #"", 1), (#"33", #"33", 1)] +} + +test from_asset_list_6() fail { + let v = + from_asset_list( + [ + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + ], + ) + v != zero +} + +test from_asset_list_7() fail { + let v = + from_asset_list( + [ + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"34", [Pair(#"", 1), Pair(#"", 1)]), + ], + ) + v != zero +} + +test from_asset_list_8() { + let v = + from_asset_list( + [ + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"34", [Pair(#"31", 1)]), Pair(#"35", [Pair(#"", 1)]), + ], + ) + flatten(v) == [ + (#"33", #"", 1), (#"33", #"33", 1), (#"34", #"31", 1), (#"35", #"", 1), + ] +} + +test from_asset_list_9() { + let v = + from_asset_list( + [ + Pair(#"35", [Pair(#"", 1)]), Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"34", [Pair(#"31", 1)]), + ], + ) + flatten(v) == [ + (#"33", #"", 1), (#"33", #"33", 1), (#"34", #"31", 1), (#"35", #"", 1), + ] +} + +/// Construct a `Value` from a lovelace quantity. +/// +/// Friendly reminder: 1 Ada = 1.000.000 Lovelace +pub fn from_lovelace(quantity: Int) -> Value { + from_asset(ada_policy_id, ada_asset_name, quantity) +} + +/// Construct an empty `Value` with nothing in it. +pub const zero: Value = Value { inner: dict.empty } + +// ## Inspecting + +/// Check is a `Value` is zero. That is, it has no assets and holds no Ada/Lovelace. +pub fn is_zero(self: Value) -> Bool { + self == zero +} + +/// Efficiently compare two values together, allowing a custom behaviour for Ada/Lovelace. +/// The second parameter is provided as `Data`, allowing to conveniently compare serialized +/// datums or similar structurually equivalent types (such as `Pairs>`). +/// +/// The third argument is a callback function to assert the left and right lovelace +/// quantities. Its first argument refers to the quantity of the first argument of +/// `match`, and the second argument of the callback to the quantity of the second +/// argument of `match`. In the absence of lovelace in any value, it defaults to `0`. +/// +/// ```aiken +/// const value: Value = +/// assets.from_lovelace(30) +/// |> assets.add("foo", "bar", 1) +/// |> assets.add("foo", "baz", 42) +/// +/// const datum: Data = +/// assets.from_lovelace(20) +/// |> assets.add("foo", "bar", 1) +/// |> assets.add("foo", "baz", 42) +/// +/// True == assets.match(value, datum, >=) +/// +/// False == assets.match(value, datum, ==) +/// +/// True == assets.match(value, datum, fn(value_lovelace, datum_lovelace) { +/// 2 * datum_lovelace >= value_lovelace +/// }) +/// ``` +pub fn match( + left: Value, + right: Data, + assert_lovelace: fn(Lovelace, Lovelace) -> Bool, +) -> Bool { + builtin.choose_data( + right, + False, + { + let (left_lovelace, left_assets) = dict.pop(left.inner, ada_policy_id) + let left_assets: Data = left_assets + let left_lovelace = + when left_lovelace is { + Some(tokens) -> builtin.head_list(dict.to_pairs(tokens)).2nd + None -> 0 + } + when builtin.un_map_data(right) is { + [] -> left_assets == right && assert_lovelace(left_lovelace, 0) + [first_asset, ..right_assets] -> + if first_asset.1st == builtin.b_data(ada_policy_id) { + and { + assert_lovelace( + left_lovelace, + builtin.un_i_data( + builtin.head_list(builtin.un_map_data(first_asset.2nd)).2nd, + ), + ), + left_assets == builtin.map_data(right_assets), + } + } else { + and { + assert_lovelace(left_lovelace, 0), + left_assets == right, + } + } + } + }, + False, + False, + False, + ) +} + +const fixture_match_value: Value = + zero + |> add(ada_policy_id, ada_asset_name, 42) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + +const fixture_match_data: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + +const fixture_match_data_missing_foo_02: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("bar", "01", 42) + +const fixture_match_data_altered_foo_01: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 14) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + +const fixture_match_data_missing_bar: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + +const fixture_match_data_extra_policy: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + |> add("baz", "01", 1) + +const fixture_match_data_extra_asset: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("foo", "03", 1) + |> add("bar", "01", 42) + +const fixture_match_data_no_assets: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + +test match_1() { + match(fixture_match_value, fixture_match_data, fn(_, _) { True }) +} + +test match_2() { + !match( + fixture_match_value, + fixture_match_data, + fn(source, target) { source == target }, + ) +} + +test match_3() { + !match( + fixture_match_value, + fixture_match_data_missing_foo_02, + fn(_, _) { True }, + ) +} + +test match_4() { + !match(fixture_match_value, fixture_match_data_missing_bar, fn(_, _) { True }) +} + +test match_5() { + !match( + fixture_match_value, + fixture_match_data_altered_foo_01, + fn(_, _) { True }, + ) +} + +test match_6() { + !match( + fixture_match_value, + fixture_match_data_extra_policy, + fn(_, _) { True }, + ) +} + +test match_7() { + !match(fixture_match_value, fixture_match_data_extra_asset, fn(_, _) { True }) +} + +test match_8() { + !match(fixture_match_value, fixture_match_data_no_assets, fn(_, _) { True }) +} + +test match_9() { + match(zero, zero, ==) +} + +test match_10() { + match( + without_lovelace(fixture_match_value), + without_lovelace(fixture_match_value), + fn(left, right) { left == 0 && right == 0 }, + ) +} + +test match_11() { + match( + without_lovelace(fixture_match_value), + fixture_match_value, + fn(left, right) { left == 0 && right > 0 }, + ) +} + +test match_12() { + match( + fixture_match_value, + without_lovelace(fixture_match_value), + fn(left, right) { left > 0 && right == 0 }, + ) +} + +test match_13() { + match( + zero |> add(ada_policy_id, ada_asset_name, 42), + zero, + fn(left, right) { left == 42 && right == 0 }, + ) +} + +test match_14() { + match( + zero, + zero |> add(ada_policy_id, ada_asset_name, 42), + fn(left, right) { left == 0 && right == 42 }, + ) +} + +const fixture_match_benchmark_left: Value = + zero + |> add(ada_policy_id, ada_asset_name, 1337) + |> add( + #"0246a14d04c3a0e9b65f6b90a3d1aa5faee5d56ab1e30ec7e8b02f29", + "MATTR", + 200, + ) + |> add( + #"0a9e126256cb38c4865cdac6eb2ada51c328ba0df2ebde22ae126c0d", + "ProphecyPoster076", + 1, + ) + |> add( + #"1774343241680e4daef7cbfe3536fc857ce23fb66cd0b66320b2e3dd", + "BISON", + 12_004_999_999, + ) + |> add( + #"279c909f348e533da5808898f87f9a14bb2c3dfbbacccd631d927a3f", + "SNEK", + 1486, + ) + |> add( + #"651dfc074202423585996ffa717cb45237d307e705e2cc3dab1ccabd", + "MAYZSilverFoundersEdition0035", + 1, + ) + |> add( + #"63df49056617dd14034986cf7c250bad6552fd2f0f9c71d797932008", + "CardanoSpaceSession", + 20, + ) + |> add( + #"5b01968867e13432afaa2f814e1d15e332d6cd0aa77e350972b0967d", + "ADAOGovernanceToken", + 1, + ) + |> add( + #"a0028f350aaabe0545fdcb56b039bfb08e4bb4d8c4d7c3c7d481c235", + "HOSKY", + 400_001_000, + ) + |> add( + #"da8c30857834c6ae7203935b89278c532b3995245295456f993e1d24", + "LQ", + 10_635_899, + ) + |> add( + #"95d9a98c2f7999a3d5e0f4d795cb1333837c09eb0f24835cd2ce954c", + "GrandmasterAdventurer659", + 1, + ) + |> add( + #"702cbdb06a81ef2fa4f85f9e32159c03f502539d762a71194fc11eb3", + "AdventurerOfThiolden8105", + 1, + ) + |> add( + #"d0112837f8f856b2ca14f69b375bc394e73d146fdadcc993bb993779", + "DiscoSolaris3725", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld535", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld1213", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld1518", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld1537", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld4199", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld3767", + 1, + ) + |> add( + #"7597444754551a8c17edbf7291cdaeca898ca02ee4e732b09a949396", + "Algae1", + 1, + ) + |> add( + #"7597444754551a8c17edbf7291cdaeca898ca02ee4e732b09a949396", + "Algae2", + 1, + ) + +const fixture_match_benchmark_right: Data = fixture_match_benchmark_left + +test match_benchmark() { + match(fixture_match_benchmark_left, fixture_match_benchmark_right, ==) +} + +test match_benchmark_vs() { + let data: Data = fixture_match_benchmark_right + expect pairs: Pairs> = data + fixture_match_benchmark_left == from_asset_list(pairs) +} + +/// A specialized version of `quantity_of` for the Ada currency. +pub fn lovelace_of(self: Value) -> Int { + quantity_of(self, ada_policy_id, ada_asset_name) +} + +/// A list of all token policies in that Value with non-zero tokens. +pub fn policies(self: Value) -> List { + dict.keys(self.inner) +} + +/// Extract the quantity of a given asset. +pub fn quantity_of( + self: Value, + policy_id: PolicyId, + asset_name: AssetName, +) -> Int { + self.inner + |> dict.get(policy_id) + |> option.and_then(dict.get(_, asset_name)) + |> option.or_else(0) +} + +/// Get all tokens associated with a given policy. +pub fn tokens(self: Value, policy_id: PolicyId) -> Dict { + self.inner + |> dict.get(policy_id) + |> option.or_else(dict.empty) +} + +// ## Combining + +/// Add a (positive or negative) quantity of a single token to a assets. +/// This is more efficient than [`merge`](#merge) for a single asset. +pub fn add( + self: Value, + policy_id: PolicyId, + asset_name: AssetName, + quantity: Int, +) -> Value { + if quantity == 0 { + self + } else { + let helper = + fn(_, left, _right) { + let inner_result = + dict.insert_with( + left, + asset_name, + quantity, + fn(_k, ql, qr) { + let q = ql + qr + if q == 0 { + None + } else { + Some(q) + } + }, + ) + if dict.is_empty(inner_result) { + None + } else { + Some(inner_result) + } + } + + Value( + dict.insert_with( + self.inner, + policy_id, + dict.from_ascending_pairs([Pair(asset_name, quantity)]), + helper, + ), + ) + } +} + +test add_1() { + let v = + zero + |> add(#"acab", #"beef", 321) + |> add(#"acab", #"beef", -321) + v == zero +} + +test add_2() { + let v = + from_lovelace(123) + |> add(#"acab", #"beef", 321) + |> add(#"acab", #"beef", -1 * 321) + v == from_lovelace(123) +} + +test add_3() { + let v = + from_lovelace(1) + |> add(ada_policy_id, ada_asset_name, 2) + |> add(ada_policy_id, ada_asset_name, 3) + v == from_lovelace(6) +} + +test add_4() { + let v = + zero + |> add(#"acab", #"beef", 0) + v == zero +} + +test add_5() { + let v = + zero + |> add(#"acab", #"beef", 0) + |> add(#"acab", #"beef", 0) + v == zero +} + +/// Combine two `Value` together. +pub fn merge(left v0: Value, right v1: Value) -> Value { + Value( + dict.union_with( + v0.inner, + v1.inner, + fn(_, a0, a1) { + let result = + dict.union_with( + a0, + a1, + fn(_, q0, q1) { + let q = q0 + q1 + if q == 0 { + None + } else { + Some(q) + } + }, + ) + if dict.is_empty(result) { + None + } else { + Some(result) + } + }, + ), + ) +} + +test merge_1() { + let v1 = from_lovelace(1) + let v2 = from_lovelace(-1) + merge(v1, v2) == zero +} + +test merge_2() { + let v1 = from_asset(#"00", #"", 1) + let v2 = from_asset(#"01", #"", 2) + let v3 = from_asset(#"02", #"", 3) + let v = + from_lovelace(42) + |> merge(v3) + |> merge(v1) + |> merge(v2) + + flatten(v) == [ + (#"", #"", 42), (#"00", #"", 1), (#"01", #"", 2), (#"02", #"", 3), + ] +} + +test merge_3() { + let v1 = from_asset(#"00", #"", 1) + let v2 = from_asset(#"00", #"", -1) + let v3 = from_asset(#"01", #"", 1) + + let v = + zero + |> merge(v1) + |> merge(v2) + |> merge(v3) + + flatten(v) == [(#"01", #"", 1)] +} + +test merge_4() { + let v1 = from_asset(#"00", #"", 1) + let v2 = from_asset(#"00", #"", -1) + + merge(v1, v2) == zero +} + +test merge_5() { + let v = + zero + |> add(#"acab", #"beef", 0) + + merge(zero, v) == zero +} + +/// Negates quantities of all tokens (including Ada) in that `Value`. +/// +/// ``` +/// v1 +/// |> assets.negate +/// |> assets.merge(v1) +/// |> assets.is_zero +/// // True +/// ``` +pub fn negate(self: Value) -> Value { + dict.map(self.inner, fn(_, a) { dict.map(a, fn(_, q) { 0 - q }) }) + |> Value +} + +/// Get a subset of the assets restricted to the given policies. +pub fn restricted_to(self: Value, mask: List) -> Value { + list.foldr( + policies(self), + zero, + fn(policy_id, value) { + if list.has(mask, policy_id) { + dict.foldr( + tokens(self, policy_id), + value, + fn(asset_name, quantity, value) { + add(value, policy_id, asset_name, quantity) + }, + ) + } else { + value + } + }, + ) +} + +test restricted_to_1() { + let self = from_lovelace(42) |> add("foo", "", 1) + restricted_to(self, []) == zero +} + +test restricted_to_2() { + let self = from_lovelace(42) |> add("foo", "", 1) + restricted_to(self, [ada_policy_id]) == from_lovelace(42) +} + +test restricted_to_3() { + let self = from_lovelace(42) |> add("foo", "", 1) |> add("bar", "", 1) + restricted_to(self, ["foo", "bar"]) == without_lovelace(self) +} + +test restricted_to_4() { + let self = from_lovelace(42) |> add("foo", "bar", 1) |> add("foo", "baz", 1) + restricted_to(self, ["foo"]) == without_lovelace(self) +} + +test restricted_to_5() { + let self = from_lovelace(42) |> add("foo", "bar", 1) |> add("foo", "baz", 1) + restricted_to(self, [ada_policy_id, "foo"]) == self +} + +/// Get a `Value` excluding Ada. +pub fn without_lovelace(self: Value) -> Value { + dict.delete(self.inner, ada_policy_id) + |> Value +} + +test without_lovelace_1() { + let v = from_lovelace(1000000) + without_lovelace(v) == zero +} + +test without_lovelace_2() { + let v = from_lovelace(1000000) + let v2 = from_lovelace(50000000) + without_lovelace(v) == without_lovelace(v2) +} + +test without_lovelace_3() { + let v = + from_asset(#"010203", #"040506", 100) + |> add(ada_policy_id, ada_asset_name, 100000000) + let v2 = from_asset(#"010203", #"040506", 100) + without_lovelace(v) == without_lovelace(v2) && without_lovelace(v) == v2 +} + +// ## Transforming + +/// Flatten a `Value` as list of 3-tuple `(PolicyId, AssetName, Quantity)`. +/// +/// Handy to manipulate values as uniform lists. +pub fn flatten(self: Value) -> List<(PolicyId, AssetName, Int)> { + dict.foldr( + self.inner, + [], + fn(policy_id, asset_list, value) { + dict.foldr( + asset_list, + value, + fn(asset_name, quantity, xs) { + [(policy_id, asset_name, quantity), ..xs] + }, + ) + }, + ) +} + +/// Flatten a `Value` as a list of results, possibly discarding some along the way. +/// +/// When the transform function returns `None`, the result is discarded altogether. +pub fn flatten_with( + self: Value, + with: fn(PolicyId, AssetName, Int) -> Option, +) -> List { + dict.foldr( + self.inner, + [], + fn(policy_id, asset_list, value) { + dict.foldr( + asset_list, + value, + fn(asset_name, quantity, xs) { + when with(policy_id, asset_name, quantity) is { + None -> xs + Some(x) -> [x, ..xs] + } + }, + ) + }, + ) +} + +test flatten_with_1() { + flatten_with(zero, fn(p, a, q) { Some((p, a, q)) }) == [] +} + +test flatten_with_2() { + let v = + zero + |> add("a", "1", 14) + |> add("b", "", 42) + |> add("a", "2", 42) + + flatten_with( + v, + fn(p, a, q) { + if q == 42 { + Some((p, a)) + } else { + None + } + }, + ) == [("a", "2"), ("b", "")] +} + +/// Reduce a value into a single result +/// +/// ``` +/// assets.zero +/// |> assets.add("a", "1", 10) +/// |> assets.add("b", "2", 20) +/// |> assets.reduce(v, 0, fn(_, _, quantity, acc) { acc + quantity }) +/// // 30 +/// ``` +pub fn reduce( + self: Value, + start: result, + with: fn(PolicyId, AssetName, Int, result) -> result, +) -> result { + dict.foldr( + self.inner, + start, + fn(policy_id, asset_list, result) { + dict.foldr(asset_list, result, with(policy_id, _, _, _)) + }, + ) +} + +test reduce_1() { + let v = + zero + |> add("a", "1", 10) + |> add("b", "2", 20) + let result = reduce(v, 0, fn(_, _, quantity, acc) { acc + quantity }) + result == 30 +} + +test reduce_2() { + let v = + zero + |> add("a", "1", 5) + |> add("a", "2", 15) + |> add("b", "", 10) + let result = + reduce( + v, + [], + fn(policy_id, asset_name, _, acc) { [(policy_id, asset_name), ..acc] }, + ) + result == [("a", "1"), ("a", "2"), ("b", "")] +} + +test reduce_3() { + let v = zero + let result = reduce(v, 1, fn(_, _, quantity, acc) { acc + quantity }) + result == 1 +} + +/// Convert the value into a dictionary of dictionaries. +pub fn to_dict(self: Value) -> Dict> { + self.inner +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak new file mode 100644 index 00000000..f0b6d258 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak @@ -0,0 +1,93 @@ +use aiken/crypto.{Blake2b_224, Hash, VerificationKey, VerificationKeyHash} +use cardano/address.{Credential} +use cardano/assets.{Lovelace} + +pub type StakePoolId = + Hash + +/// An on-chain certificate attesting of some operation. Publishing +/// certificates triggers different kind of rules; most of the time, +/// they require signatures from specific keys. +pub type Certificate { + /// Register a stake credential with an optional deposit amount. + /// The deposit is always present when using the new registration certificate + /// format available since the Conway era. + RegisterCredential { + credential: Credential, + /// > [!NOTE] + /// > The `deposit` ought to be an `Option`, but due to unfortunate + /// > circumstances it will always be instantiated to `None` even when set in + /// > the host transaction. This is what the `Never` type captures here. + deposit: Never, + } + /// Un-Register a stake credential with an optional refund amount + /// The deposit is always present when using the new de-registration certificate + /// format available since the Conway era. + UnregisterCredential { + credential: Credential, + /// > [!NOTE] + /// > The `refund` ought to be an `Option`, but due to unfortunate + /// > circumstances it will always be instantiated to `None` even when set in + /// > the host transaction. This is what the `Never` type captures here. + refund: Never, + } + /// Delegate stake to a [Delegate](#Delegate). + DelegateCredential { credential: Credential, delegate: Delegate } + /// Register and delegate staking credential to a Delegatee in one certificate. + RegisterAndDelegateCredential { + credential: Credential, + delegate: Delegate, + deposit: Lovelace, + } + /// Register a delegate representative (a.k.a DRep). The deposit is explicit and + /// is refunded when the delegate steps down (unregister). + RegisterDelegateRepresentative { + delegate_representative: Credential, + deposit: Lovelace, + } + /// Update a delegate representative (a.k.a DRep). The certificate also contains + /// metadata which aren't visible on-chain. + UpdateDelegateRepresentative { delegate_representative: Credential } + /// UnRegister a delegate representative, and refund back its past deposit. + UnregisterDelegateRepresentative { + delegate_representative: Credential, + refund: Lovelace, + } + /// Register a new stake pool + RegisterStakePool { + /// The hash digest of the stake pool's cold (public) key + stake_pool: StakePoolId, + /// The hash digest of the stake pool's VRF (public) key + vrf: VerificationKeyHash, + } + /// Retire a stake pool. 'at_epoch' indicates in which the retirement will take place + RetireStakePool { stake_pool: StakePoolId, at_epoch: Int } + /// Authorize a Hot credential for a specific Committee member's cold credential + AuthorizeConstitutionalCommitteeProxy { + constitutional_committee_member: Credential, + proxy: Credential, + } + /// Step down from the constitutional committee as a member. + RetireFromConstitutionalCommittee { + constitutional_committee_member: Credential, + } +} + +/// A type of stake delegation that can be either block-production, vote or +/// both. Note that delegation types aren't cancelling one another, so it is +/// possible to delegate block production in one transaction, and delegate vote +/// in another. This second delegation **does NOT** invalidate the first one. +pub type Delegate { + DelegateBlockProduction { stake_pool: StakePoolId } + DelegateVote { delegate_representative: DelegateRepresentative } + DelegateBoth { + stake_pool: StakePoolId, + delegate_representative: DelegateRepresentative, + } +} + +pub type DelegateRepresentative { + Registered(Credential) + AlwaysAbstain + AlwaysNoConfidence +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak new file mode 100644 index 00000000..3ec96800 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak @@ -0,0 +1,109 @@ +use aiken/collection.{Index} +use aiken/crypto.{Blake2b_256, Hash, ScriptHash, VerificationKeyHash} +use aiken/math/rational.{Rational} +use cardano/address.{Credential} +use cardano/assets.{Lovelace} +use cardano/governance/protocol_parameters.{ProtocolParametersUpdate} + +pub type ProposalProcedure { + deposit: Lovelace, + return_address: Credential, + governance_action: GovernanceAction, +} + +pub type GovernanceAction { + ProtocolParameters { + /// The last governance action of type 'ProtocolParameters'. They must all + /// form a chain. + ancestor: Option, + /// The new proposed protocol parameters. Only values set to `Some` are relevant. + new_parameters: ProtocolParametersUpdate, + /// The optional guardrails script defined in the constitution. The script + /// is executed by the ledger in addition to the hard-coded ledger rules. + /// + /// It must pass for the new protocol parameters to be deemed valid. + guardrails: Option, + } + HardFork { + /// The last governance action of type `HardFork`. They must all + /// form a chain. + ancestor: Option, + /// The new proposed version. Few rules apply to proposing new versions: + /// + /// - The `major` component, if incremented, must be exactly one more than the current. + /// - The `minor` component, if incremented, must be exactly one more than the current. + /// - If the `major` component is incremented, `minor` must be set to `0`. + /// - Neither `minor` nor `major` can be decremented. + new_version: ProtocolVersion, + } + TreasuryWithdrawal { + /// A collection of beneficiaries, which can be plain verification key + /// hashes or script hashes (e.g. DAO). + beneficiaries: Pairs, + /// The optional guardrails script defined in the constitution. The script + /// is executed by the ledger in addition to the hard-coded ledger rules. + /// + /// It must pass for the withdrawals to be authorized. + guardrails: Option, + } + NoConfidence { + /// The last governance action of type `NoConfidence` or + /// `ConstitutionalCommittee`. They must all / form a chain. + ancestor: Option, + } + ConstitutionalCommittee { + /// The last governance action of type `NoConfidence` or + /// `ConstitutionalCommittee`. They must all / form a chain. + ancestor: Option, + /// Constitutional members to be removed. + evicted_members: List, + /// Constitutional members to be added. + added_members: Pairs, + /// The new quorum value, as a ratio of a numerator and a denominator. The + /// quorum specifies the threshold of 'Yes' votes necessary for the + /// constitutional committee to accept a proposal procedure. + quorum: Rational, + } + NewConstitution { + /// The last governance action of type `Constitution` or + /// `ConstitutionalCommittee`. They must all / form a chain. + ancestor: Option, + /// The new proposed constitution. + constitution: Constitution, + } + NicePoll +} + +pub type Vote { + No + Yes + Abstain +} + +pub type TransactionId = + Hash + +pub type GovernanceActionId { + transaction: TransactionId, + proposal_procedure: Index, +} + +pub type ProtocolVersion { + major: Int, + minor: Int, +} + +pub type Constitution { + guardrails: Option, +} + +/// An epoch number after which constitutional committee member +/// mandate expires. +pub type Mandate = + Int + +pub type Voter { + ConstitutionalCommitteeMember(Credential) + DelegateRepresentative(Credential) + StakePool(VerificationKeyHash) +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak new file mode 100644 index 00000000..d9e7be95 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak @@ -0,0 +1,360 @@ +use aiken/math/rational.{Rational} +use cardano/assets.{Lovelace} + +pub opaque type ProtocolParametersUpdate { + inner: Pairs, +} + +pub type ScriptExecutionPrices { + memory: Rational, + cpu: Rational, +} + +pub type ExecutionUnits { + memory: Int, + cpu: Int, +} + +pub type StakePoolOperatorVotingThresholds { + motion_of_no_confidence: Rational, + constitutional_committee: ConstitutionalCommitteeThresholds, + hard_fork: Rational, + protocol_parameters: ProtocolParametersThresholds< + Rational, + Void, + Void, + Void, + Void, + >, +} + +pub type DelegateRepresentativeVotingThresholds { + motion_of_no_confidence: Rational, + constitutional_committee: ConstitutionalCommitteeThresholds, + constitution: Rational, + hard_fork: Rational, + protocol_parameters: ProtocolParametersThresholds< + Void, + Rational, + Rational, + Rational, + Rational, + >, + treasury_withdrawal: Rational, +} + +pub type ProtocolParametersThresholds< + security, + network, + economic, + technical, + governance, +> { + security_group: security, + network_group: network, + economic_group: economic, + technical_group: technical, + governance_group: governance, +} + +pub type ConstitutionalCommitteeThresholds { + default: Rational, + under_no_confidence: Rational, +} + +/// The linear coefficient that intervenes in the transaction fee calculation. +/// It is multiplied by the size of the transaction in bytes to obtain a Lovelace value. +pub fn min_fee_coefficient(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 0, into_int) +} + +/// The constant factor that intervenes in the transaction fee calculation. It is +/// a flat cost of lovelace that is added to every fee calculation. +pub fn min_fee_constant(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 1, into_int) +} + +/// The maximum size of a serialized block body, expressed in bytes. +pub fn max_block_body_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 2, into_int) +} + +/// The maximum size of a serialized transaction (body + witnesses), expressed in bytes. +pub fn max_transaction_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 3, into_int) +} + +/// The maximum size of a serialized block header, expressed in bytes. +pub fn max_block_header_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 4, into_int) +} + +/// The required deposit amount when registering stake credentials, expressed in Lovelace. +pub fn stake_credential_deposit( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 5, into_int) +} + +/// The required deposit amount when registering a stake pool, expressed in Lovelace. +pub fn stake_pool_deposit(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 6, into_int) +} + +/// The maximum number of epoch in the future allowed for a stake pool retirement to be scheduled. +pub fn stake_pool_retirement_horizon( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 7, into_int) +} + +/// The desired/optimal number of fully saturated stake pools in the system. Also known as the _'k-parameter'_. +pub fn desired_number_of_stake_pools( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 8, into_int) +} + +/// A parameter controlling the influence of an pool owner's pledge on the rewards. Also known as _'a0'_. +pub fn stake_pool_pledge_influence( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 9, into_rational) +} + +/// The monetary expansion parameter, controlling the fraction of Ada put in circulation on every epoch through the incentivies model. Also known as _'ρ'_. +pub fn monetary_expansion(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 10, into_rational) +} + +/// The parameter controlling what fraction (%) of available rewards is sent to the treasury on every epoch. Also known as _'τ'_. +pub fn treasury_expansion(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 11, into_rational) +} + +/// Minimum authorized constant cost that stake pools can declare when registering, expressed in Lovelace. +pub fn min_stake_pool_cost(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 16, into_int) +} + +/// The linear coefficient that intervenes in the calculation of the minimum Ada value that any UTxO must hold. It is expressed in Lovelace per Byte, and is also known as the 'coins per utxo byte' parameter. +pub fn min_utxo_deposit_coefficient( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 17, into_int) +} + +/// The costs associated with the various operations of the Plutus Virtual Machine, which can be different for each Plutus version. +pub fn cost_models(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 18, identity) +} + +/// The price, in Lovelace per unit, of the execution units corresponding to cpu and memory usage of on-chain scripts. +pub fn script_execution_prices( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 19, into_script_execution_prices) +} + +/// The maximum execution units allowed for a single transaction. +pub fn max_transaction_execution_units( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 20, into_execution_units) +} + +/// The maximum execution units allowed for a single block. +pub fn max_block_execution_units( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 21, into_execution_units) +} + +/// The maximum size of a serialized value in a transaction output. This effectively limits +/// the maximum kinds of assets that can be sent in a single output. It is expressed in bytes. +pub fn max_value_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 22, into_int) +} + +/// The scaling factor applied to the transaction cost for defining the minimum collateral +/// amount. It is expressed in percent points (so 100 = 100%). +pub fn collateral_percentage(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 23, into_int) +} + +/// The maximum number of collateral inputs allowed in the transaction. +pub fn max_collateral_inputs(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 24, into_int) +} + +/// The various governance voting thresholds pertaining to stake pool operators. +pub fn stake_pool_operator_voting_thresholds( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 25, into_spo_voting_thresholds) +} + +/// The various governance voting thresholds pertaining to delegate representatives +/// (a.k.a DReps). +pub fn delegate_representative_voting_thresholds( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 26, into_drep_voting_thresholds) +} + +/// The minimum number of members in the constitutional committee. Any updates of the committee +/// must leave at least this number of members. +pub fn min_constitutional_committee_size( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 27, into_int) +} + +/// The maximum length of a constitutional committee member, expressed in number of epochs. +pub fn max_constitutional_committee_mandate( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 28, into_int) +} + +/// The lifetime of any governance proposal. An action that hasn't been approved beyond that +/// period is considered inactive and discarded. It is expressed in number of epochs. +pub fn governance_proposal_lifetime( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 29, into_int) +} + +/// The required deposit amount for governance proposal procedures, expressed in Lovelace. +pub fn governance_proposal_deposit( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 30, into_int) +} + +/// The required deposit amount when registering as a delegate representative, expressed in +/// Lovelace. +pub fn delegate_representative_deposit( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 31, into_int) +} + +/// The maximum number of epochs that a delegate representative can stay inactive (i.e. no +/// voting) without becoming _inactive_ and removed from thresholds calculations. +pub fn delegate_representative_max_idle_time( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 32, into_int) +} + +/// The base tier fee coefficient for reference scripts. Reference scripts gets increasingly +/// more expensives every ~24KB, the base coefficient is a multiplicating factor which grows +/// exponentially with each tier. +pub fn reference_scripts_tier_fee_initial_factor( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 33, into_rational) +} + +// Internals ------------------------------------------------------------------- + +type ProtocolParametersIndex = + Int + +fn get_protocol_param( + self: Pairs, + ix: ProtocolParametersIndex, + into: fn(Data) -> a, +) -> Option { + when self is { + [] -> None + [Pair(jx, param), ..tail] -> + if ix == jx { + Some(into(param)) + } else { + get_protocol_param(tail, ix, into) + } + } +} + +fn into_int(param: Data) -> Int { + expect param: Int = param + param +} + +fn into_rational(param: Data) -> Rational { + expect [numerator, denominator]: List = param + expect Some(r) = rational.new(numerator, denominator) + r +} + +fn into_execution_units(param: Data) -> ExecutionUnits { + expect [memory, cpu]: List = param + ExecutionUnits { memory, cpu } +} + +fn into_script_execution_prices(param: Data) -> ScriptExecutionPrices { + expect [memory, cpu]: List = param + let memory = into_rational(memory) + let cpu = into_rational(cpu) + ScriptExecutionPrices { memory, cpu } +} + +fn into_spo_voting_thresholds(param: Data) -> StakePoolOperatorVotingThresholds { + expect [ + motion_of_no_confidence, constitutional_committee, + constitutional_committee_under_no_confidence, hard_fork, + protocol_parameters_security_group, + ]: List = param + + StakePoolOperatorVotingThresholds { + motion_of_no_confidence: into_rational(motion_of_no_confidence), + constitutional_committee: ConstitutionalCommitteeThresholds { + default: into_rational(constitutional_committee), + under_no_confidence: into_rational( + constitutional_committee_under_no_confidence, + ), + }, + hard_fork: into_rational(hard_fork), + protocol_parameters: ProtocolParametersThresholds { + security_group: into_rational(protocol_parameters_security_group), + network_group: Void, + economic_group: Void, + technical_group: Void, + governance_group: Void, + }, + } +} + +fn into_drep_voting_thresholds( + param: Data, +) -> DelegateRepresentativeVotingThresholds { + expect [ + motion_of_no_confidence, constitutional_committee, + constitutional_committee_under_no_confidence, constitution, hard_fork, + protocol_parameters_network_group, protocol_parameters_economic_group, + protocol_parameters_technical_group, protocol_parameters_governance_group, + treasury_withdrawal, + ]: List = param + + DelegateRepresentativeVotingThresholds { + motion_of_no_confidence: into_rational(motion_of_no_confidence), + constitutional_committee: ConstitutionalCommitteeThresholds { + default: into_rational(constitutional_committee), + under_no_confidence: into_rational( + constitutional_committee_under_no_confidence, + ), + }, + constitution: into_rational(constitution), + hard_fork: into_rational(hard_fork), + protocol_parameters: ProtocolParametersThresholds { + security_group: Void, + network_group: into_rational(protocol_parameters_network_group), + economic_group: into_rational(protocol_parameters_economic_group), + technical_group: into_rational(protocol_parameters_technical_group), + governance_group: into_rational(protocol_parameters_governance_group), + }, + treasury_withdrawal: into_rational(treasury_withdrawal), + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak new file mode 100644 index 00000000..e723e2d5 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak @@ -0,0 +1,62 @@ +use aiken/primitive/bytearray +use cardano/address.{Script} +use cardano/address/credential +use cardano/governance.{ + ConstitutionalCommitteeMember, DelegateRepresentative, StakePool, Voter, +} + +pub fn compare(left: Voter, right: Voter) -> Ordering { + when left is { + ConstitutionalCommitteeMember(left) -> + when right is { + ConstitutionalCommitteeMember(right) -> credential.compare(left, right) + _ -> Less + } + DelegateRepresentative(left) -> + when right is { + DelegateRepresentative(right) -> credential.compare(left, right) + ConstitutionalCommitteeMember(_) -> Greater + _ -> Less + } + StakePool(left) -> + when right is { + StakePool(right) -> bytearray.compare(left, right) + _ -> Greater + } + } +} + +test compare_matrix() { + let cc0 = ConstitutionalCommitteeMember(Script("0")) + let cc1 = ConstitutionalCommitteeMember(Script("1")) + + let drep0 = DelegateRepresentative(Script("0")) + let drep1 = DelegateRepresentative(Script("1")) + + let spo0 = StakePool("0") + let spo1 = StakePool("1") + + and { + (compare(cc0, cc0) == Equal)?, + (compare(cc0, cc1) == Less)?, + (compare(cc1, cc0) == Greater)?, + (compare(drep0, drep0) == Equal)?, + (compare(drep0, drep1) == Less)?, + (compare(drep1, drep0) == Greater)?, + (compare(spo0, spo0) == Equal)?, + (compare(spo0, spo1) == Less)?, + (compare(spo1, spo0) == Greater)?, + (compare(cc0, drep0) == Less)?, + (compare(cc0, drep1) == Less)?, + (compare(cc0, spo0) == Less)?, + (compare(cc0, spo1) == Less)?, + (compare(drep0, cc0) == Greater)?, + (compare(drep0, cc1) == Greater)?, + (compare(drep0, spo0) == Less)?, + (compare(drep0, spo1) == Less)?, + (compare(spo0, cc0) == Greater)?, + (compare(spo0, cc1) == Greater)?, + (compare(spo0, drep0) == Greater)?, + (compare(spo0, drep1) == Greater)?, + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak new file mode 100644 index 00000000..ff73836a --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak @@ -0,0 +1,62 @@ +//// This module contains utilities for manually dealing with [`ScriptContext`](#ScriptContext). This is only ever useful for writing custom `else` handlers in validators. +//// +//// > [!NOTE] +//// > Unless you know what you're doing, you should prefer using named handlers: +//// > +//// > - `mint` +//// > - `spend` +//// > - `withdraw` +//// > - `publish` +//// > - `vote` +//// > - `propose` + +use aiken/collection.{Index} +use cardano/address.{Credential} +use cardano/assets.{PolicyId} +use cardano/certificate.{Certificate} +use cardano/governance.{ProposalProcedure, Voter} +use cardano/transaction.{OutputReference, Redeemer, Transaction} + +/// A context given to a script by the Cardano ledger when being executed. +/// +/// The context contains information about the entire transaction that contains +/// the script. The transaction may also contain other scripts; to distinguish +/// between multiple scripts, the [`ScriptContext`](#ScriptContext) contains a +/// [`ScriptInfo`](#ScriptInfo) which indicates which script (or, for what +/// purpose) the transaction is being executed. +pub type ScriptContext { + transaction: Transaction, + redeemer: Redeemer, + info: ScriptInfo, +} + +/// Characterizes the script information. The main (and only) difference with [`ScriptPurpose`](./transaction.html#ScriptPurpose) resides in the `Spending` variant which here contains a second field `datum: Option`. +pub type ScriptInfo { + /// For scripts executed as minting/burning policies, to insert + /// or remove assets from circulation. It's parameterized by the identifier + /// of the associated policy. + Minting(PolicyId) + /// For scripts that are used as payment credentials for addresses in + /// transaction outputs. They govern the rule by which the output they + /// reference can be spent. + Spending { output: OutputReference, datum: Option } + /// For scripts that validate reward withdrawals from a reward account. + /// + /// The argument identifies the target reward account. + Withdrawing(Credential) + /// Needed when delegating to a pool using stake credentials defined as a + /// custom script. This purpose is also triggered when de-registering such + /// stake credentials. + /// + /// The Int is a 0-based index of the given `Certificate` in `certificates`. + Publishing { at: Index, certificate: Certificate } + /// Voting for a type of voter using a governance action id to vote + /// yes / no / abstain inside a transaction. + /// + /// The voter is who is doing the governance action. + Voting(Voter) + /// Used to propose a governance action. + /// + /// A 0-based index of the given `ProposalProcedure` in `proposal_procedures`. + Proposing { at: Index, proposal_procedure: ProposalProcedure } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak new file mode 100644 index 00000000..6511a596 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak @@ -0,0 +1,225 @@ +use aiken/builtin +use aiken/collection.{Index} +use aiken/collection/dict.{Dict} +use aiken/collection/list +use aiken/crypto.{ + Blake2b_256, DataHash, Hash, ScriptHash, VerificationKeyHash, blake2b_256, +} +use aiken/interval.{Interval} +use aiken/option +use cardano/address.{Address, Credential, Script, VerificationKey} +use cardano/assets.{Lovelace, PolicyId, Value} +use cardano/certificate.{Certificate} +use cardano/governance.{GovernanceActionId, ProposalProcedure, Vote, Voter} + +pub type TransactionId = + Hash + +/// Characterizes the script purpose. +pub type ScriptPurpose { + /// For scripts executed as minting/burning policies, to insert + /// or remove assets from circulation. It's parameterized by the identifier + /// of the associated policy. + Mint(PolicyId) + /// For scripts that are used as payment credentials for addresses in + /// transaction outputs. They govern the rule by which the output they + /// reference can be spent. + Spend(OutputReference) + /// For scripts that validate reward withdrawals from a reward account. + /// + /// The argument identifies the target reward account. + Withdraw(Credential) + /// Needed when delegating to a pool using stake credentials defined as a + /// custom script. This purpose is also triggered when de-registering such + /// stake credentials. + /// + /// The Int is a 0-based index of the given `Certificate` in `certificates`. + Publish { at: Index, certificate: Certificate } + /// Voting for a type of voter using a governance action id to vote + /// yes / no / abstain inside a transaction. + /// + /// The voter is who is doing the governance action. + Vote(Voter) + /// Used to propose a governance action. + /// + /// A 0-based index of the given `ProposalProcedure` in `proposal_procedures`. + Propose { at: Index, proposal_procedure: ProposalProcedure } +} + +/// A Cardano `Transaction`, as seen by on-chain scripts. +/// +/// Note that this is a representation of a transaction, and not the 1:1 +/// translation of the transaction as seen by the ledger. In particular, +/// on-chain scripts can't see inputs locked by bootstrap addresses, outputs +/// to bootstrap addresses or just transaction metadata. +pub type Transaction { + inputs: List, + reference_inputs: List, + outputs: List, + fee: Lovelace, + mint: Value, + certificates: List, + /// > [!IMPORTANT] + /// > Withdrawals are ordered by ascending [Credential](./credential.html#Credential). Yet, note that [`Script`](./credential.html#Credential) credentials are treated as **lower values** than [`VerificationKey`](./credential.html#Credential) credentials. + withdrawals: Pairs, + validity_range: ValidityRange, + extra_signatories: List, + /// > [!IMPORTANT] + /// > Redeemers are ordered by ascending [ScriptPurpose](./transaction.html#ScriptPurpose). + redeemers: Pairs, + datums: Dict, + id: TransactionId, + /// > [!IMPORTANT] + /// > Votes are ordered by ascending [Voter](./governance.html#Voter) and [GovernanceActionId](./governance.html#GovernanceActionId).
First constructor variants in a type are treated as lower indices; except for [Credential](./credential.html#Credential) where [`Script`](./credential.html#Credential) credentials are treated as **lower values** than [`VerificationKey`](./credential.html#Credential) credentials. + votes: Pairs>, + proposal_procedures: List, + current_treasury_amount: Option, + treasury_donation: Option, +} + +/// An interval of POSIX time, measured in **number of milliseconds** since 1970-01-01T00:00:00Z. +pub type ValidityRange = + Interval + +/// An `Input` made of an output reference and, the resolved value associated with that output. +pub type Input { + output_reference: OutputReference, + output: Output, +} + +/// An `OutputReference` is a unique reference to an output on-chain. The `output_index` +/// corresponds to the position in the output list of the transaction (identified by its id) +/// that produced that output +pub type OutputReference { + transaction_id: Hash, + output_index: Int, +} + +/// A transaction `Output`, with an address, a value and optional datums and script references. +pub type Output { + address: Address, + value: Value, + datum: Datum, + reference_script: Option, +} + +/// An output `Datum`. +pub type Datum { + NoDatum + /// A datum referenced by its hash digest. + DatumHash(DataHash) + /// A datum completely inlined in the output. + InlineDatum(Data) +} + +/// A type-alias for Redeemers, passed to scripts for validation. The `Data` is +/// opaque because it is user-defined and it is the script's responsibility to +/// parse it into its expected form. +pub type Redeemer = + Data + +// ## Querying + +/// Find an input by its [`OutputReference`](#OutputReference). This is typically used in +/// combination with the `Spend` [`ScriptPurpose`](#ScriptPurpose) to find a script's own +/// input. +/// +/// ```aiken +/// validator { +/// spend(datum, redeemer, my_output_reference, self) { +/// expect Some(input) = +/// self.inputs +/// |> transaction.find_input(my_output_reference) +/// } +/// } +/// ``` +pub fn find_input( + inputs: List, + output_reference: OutputReference, +) -> Option { + inputs + |> list.find(fn(input) { input.output_reference == output_reference }) +} + +/// Find a [`Datum`](#Datum) by its hash, if present. The function looks first for +/// datums in the witness set, and then for inline datums if it doesn't find any in +/// witnesses. +pub fn find_datum( + outputs: List, + datums: Dict, + datum_hash: DataHash, +) -> Option { + datums + |> dict.get(datum_hash) + |> option.or_try( + fn() { + outputs + |> list.filter_map( + fn(output) { + when output.datum is { + InlineDatum(data) -> + if blake2b_256(builtin.serialise_data(data)) == datum_hash { + Some(data) + } else { + None + } + _ -> None + } + }, + ) + |> list.head + }, + ) +} + +/// Find all outputs that are paying into the given script hash, if any. This is useful for +/// contracts running over multiple transactions. +pub fn find_script_outputs( + outputs: List, + script_hash: ScriptHash, +) -> List { + outputs + |> list.filter( + fn(output) { + when output.address.payment_credential is { + Script(addr_script_hash) -> script_hash == addr_script_hash + VerificationKey(_) -> False + } + }, + ) +} + +// ## Testing + +/// A placeholder / empty `Transaction` to serve as a base in a transaction +/// builder. This is particularly useful for constructing test transactions. +/// +/// Every field is empty or null, and we have in particular: +/// +/// ```aiken +/// use aiken/interval +/// +/// transaction.placeholder.id == +/// #"0000000000000000000000000000000000000000000000000000000000000000" +/// +/// transaction.placeholder.validity_range == interval.everything +/// ``` +pub const placeholder: Transaction = + Transaction { + inputs: [], + reference_inputs: [], + outputs: [], + fee: 0, + mint: assets.zero, + certificates: [], + withdrawals: [], + validity_range: interval.everything, + extra_signatories: [], + redeemers: [], + datums: dict.empty, + id: #"0000000000000000000000000000000000000000000000000000000000000000", + votes: [], + proposal_procedures: [], + current_treasury_amount: None, + treasury_donation: None, + } diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak new file mode 100644 index 00000000..70b7550d --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak @@ -0,0 +1,23 @@ +use aiken/primitive/bytearray +use aiken/primitive/int +use cardano/transaction.{OutputReference} + +pub fn compare(left: OutputReference, right: OutputReference) -> Ordering { + when bytearray.compare(left.transaction_id, right.transaction_id) is { + Equal -> int.compare(left.output_index, right.output_index) + ordering -> ordering + } +} + +test compare_matrix() { + and { + (compare(OutputReference("", 0), OutputReference("", 0)) == Equal)?, + (compare(OutputReference("00", 42), OutputReference("00", 42)) == Equal)?, + (compare(OutputReference("00", 0), OutputReference("01", 0)) == Less)?, + (compare(OutputReference("01", 0), OutputReference("00", 0)) == Greater)?, + (compare(OutputReference("00", 42), OutputReference("01", 14)) == Less)?, + (compare(OutputReference("01", 14), OutputReference("00", 42)) == Greater)?, + (compare(OutputReference("", 42), OutputReference("", 14)) == Greater)?, + (compare(OutputReference("", 14), OutputReference("", 42)) == Less)?, + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak new file mode 100644 index 00000000..4fef2cbe --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak @@ -0,0 +1,126 @@ +use aiken/primitive/bytearray +use aiken/primitive/int +use cardano/address.{Script, VerificationKey} +use cardano/address/credential +use cardano/certificate.{RegisterCredential} +use cardano/governance.{NicePoll, ProposalProcedure, StakePool} +use cardano/governance/voter +use cardano/transaction.{ + Mint, OutputReference, Propose, Publish, ScriptPurpose, Spend, Vote, Withdraw, +} +use cardano/transaction/output_reference + +pub fn compare(left: ScriptPurpose, right: ScriptPurpose) -> Ordering { + when left is { + Mint(left) -> + when right is { + Mint(right) -> bytearray.compare(left, right) + _ -> Less + } + + Spend(left) -> + when right is { + Spend(right) -> output_reference.compare(left, right) + Mint(_) -> Greater + _ -> Less + } + + Withdraw(left) -> + when right is { + Withdraw(right) -> credential.compare(left, right) + Spend(_) | Mint(_) -> Greater + _ -> Less + } + + Publish(left, _) -> + when right is { + Publish(right, _) -> int.compare(left, right) + Spend(_) | Mint(_) | Withdraw(_) -> Greater + _ -> Less + } + + Vote(left) -> + when right is { + Vote(right) -> voter.compare(left, right) + Propose(..) -> Less + _ -> Greater + } + + Propose(left, _) -> + when right is { + Propose(right, _) -> int.compare(left, right) + _ -> Greater + } + } +} + +test compare_matrix() { + let mint0 = Mint("0") + let mint1 = Mint("1") + + let spend0 = Spend(OutputReference("", 0)) + let spend1 = Spend(OutputReference("", 1)) + + let withdraw0 = Withdraw(VerificationKey("0")) + let withdraw1 = Withdraw(VerificationKey("1")) + + let publish0 = Publish(0, RegisterCredential(Script(""), Never)) + let publish1 = Publish(1, RegisterCredential(Script(""), Never)) + + let vote0 = Vote(StakePool("0")) + let vote1 = Vote(StakePool("1")) + + let propose0 = Propose(0, ProposalProcedure(0, Script(""), NicePoll)) + let propose1 = Propose(1, ProposalProcedure(0, Script(""), NicePoll)) + + and { + (compare(mint0, mint0) == Equal)?, + (compare(mint0, mint1) == Less)?, + (compare(mint1, mint0) == Greater)?, + (compare(mint0, spend0) == Less)?, + (compare(mint0, withdraw0) == Less)?, + (compare(mint0, publish0) == Less)?, + (compare(mint0, vote0) == Less)?, + (compare(mint0, propose0) == Less)?, + (compare(spend0, spend0) == Equal)?, + (compare(spend0, spend1) == Less)?, + (compare(spend1, spend0) == Greater)?, + (compare(spend0, mint0) == Greater)?, + (compare(spend0, withdraw0) == Less)?, + (compare(spend0, publish0) == Less)?, + (compare(spend0, vote0) == Less)?, + (compare(spend0, propose0) == Less)?, + (compare(withdraw0, withdraw0) == Equal)?, + (compare(withdraw0, withdraw1) == Less)?, + (compare(withdraw1, withdraw0) == Greater)?, + (compare(withdraw0, mint0) == Greater)?, + (compare(withdraw0, spend0) == Greater)?, + (compare(withdraw0, publish0) == Less)?, + (compare(withdraw0, vote0) == Less)?, + (compare(withdraw0, propose0) == Less)?, + (compare(publish0, publish0) == Equal)?, + (compare(publish0, publish1) == Less)?, + (compare(publish1, publish0) == Greater)?, + (compare(publish0, mint0) == Greater)?, + (compare(publish0, spend0) == Greater)?, + (compare(publish0, withdraw0) == Greater)?, + (compare(publish0, vote0) == Less)?, + (compare(publish0, propose0) == Less)?, + (compare(vote0, vote0) == Equal)?, + (compare(vote0, vote1) == Less)?, + (compare(vote1, vote0) == Greater)?, + (compare(vote0, mint0) == Greater)?, + (compare(vote0, spend0) == Greater)?, + (compare(vote0, withdraw0) == Greater)?, + (compare(vote0, publish0) == Greater)?, + (compare(vote0, propose0) == Less)?, + (compare(propose0, propose0) == Equal)?, + (compare(propose0, propose1) == Less)?, + (compare(propose1, propose0) == Greater)?, + (compare(propose0, mint0) == Greater)?, + (compare(propose0, spend0) == Greater)?, + (compare(propose0, withdraw0) == Greater)?, + (compare(propose0, publish0) == Greater)?, + (compare(propose0, vote0) == Greater)?, + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/packages.toml b/src/components/multisig/proxy/aiken-workspace/build/packages/packages.toml new file mode 100644 index 00000000..a0f1cf2a --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/packages.toml @@ -0,0 +1,9 @@ +[[packages]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +source = "github" + +[[packages]] +name = "sidan-lab/vodka" +version = "0.1.13" +source = "github" diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml new file mode 100644 index 00000000..89cc4e58 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml @@ -0,0 +1,50 @@ +# Simple workflow for deploying static content to GitHub Pages +name: Build CI + +on: + # Runs on pushes targeting the default branch + push: + branches: ["main"] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages +permissions: + contents: read + pages: write + id-token: write + +# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. +# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + # Single deploy job since we're just deploying + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Pages + uses: actions/configure-pages@v5 + + - uses: aiken-lang/setup-aiken@v1 + with: + version: v1.1.9 + - run: aiken fmt --check + - run: aiken check -D + - run: aiken docs + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + with: + # Upload entire repository + path: "./docs" + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/.github/workflows/release.yml b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/.github/workflows/release.yml new file mode 100644 index 00000000..db43ff65 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/.github/workflows/release.yml @@ -0,0 +1,80 @@ +name: Auto Release + +on: + pull_request: + types: + - closed + branches: + - main + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - uses: aiken-lang/setup-aiken@v1 + with: + version: v1.1.9 + - run: aiken fmt --check + - run: aiken check -D + - run: aiken docs + + check-version: + runs-on: ubuntu-latest + if: github.event.pull_request.merged == true + outputs: + version-updated: ${{ steps.compare-versions.outputs.version-updated }} + version: ${{ steps.compare-versions.outputs.version }} + steps: + - name: Checkout main branch at commit before merge + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.base.sha }} + + - name: Get package version from main branch before merge + id: pre-merge-version + run: | + PRE_MERGE_VERSION=$(grep -m 1 '^version = ' aiken.toml | sed 's/version = "\(.*\)"/\1/') + echo "pre_merge_version=$PRE_MERGE_VERSION" >> "$GITHUB_OUTPUT" + + - name: Checkout main branch at commit after merge + uses: actions/checkout@v4 + with: + ref: "main" + + - name: Get package version from main branch after merge + id: post-merge-version + run: | + POST_MERGE_VERSION=$(grep -m 1 '^version = ' aiken.toml | sed 's/version = "\(.*\)"/\1/') + echo "post_merge_version=$POST_MERGE_VERSION" >> "$GITHUB_OUTPUT" + + - name: Compare versions + id: compare-versions + run: | + if [[ "${{ steps.pre-merge-version.outputs.pre_merge_version }}" != "${{ steps.post-merge-version.outputs.post_merge_version }}" ]]; then + echo "version-updated=true" >> "$GITHUB_OUTPUT" + echo "version=${{ steps.post-merge-version.outputs.post_merge_version }}" >> "$GITHUB_OUTPUT" + else + echo "version-updated=false" >> "$GITHUB_OUTPUT" + fi + + release: + needs: [build, check-version] + if: needs.check-version.outputs.version-updated == 'true' + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Create a Release in a GitHub Action + uses: comnoco/create-release-action@v2.0.5 + with: + tag_name: ${{ needs.check-version.outputs.version }} + release_name: ${{ needs.check-version.outputs.version }} + draft: false + prerelease: false + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/.gitignore b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/.gitignore new file mode 100644 index 00000000..7b31be95 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/.gitignore @@ -0,0 +1,16 @@ +# Generated by Cargo +# will have compiled files and executables +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock + +# These are backup files generated by rustfmt +**/*.rs.bk + +# MSVC Windows builds of rustc generate these, which store debugging information +*.pdb + +docs \ No newline at end of file diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/LICENSE b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/README.md b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/README.md new file mode 100644 index 00000000..7abc3ead --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/README.md @@ -0,0 +1,136 @@ +
+
+

Aiken Vodka - Library for Aiken Development

+ +[![Licence](https://img.shields.io/github/license/sidan-lab/vodka)](https://github.com/sidan-lab/vodka/blob/main/LICENSE) +[![Continuous Integration](https://github.com/sidan-lab/vodka/actions/workflows/build_docs.yml/badge.svg?branch=main)](https://github.com/sidan-lab/vodka/actions/workflows/build_docs.yml) +[![Twitter/X](https://img.shields.io/badge/Follow%20us-@sidan__lab-blue?logo=x)](https://x.com/sidan_lab) + +
+
+ +Vodka is a library build for [Aiken](https://aiken-lang.org/) development. It offers + +1. [Cocktail](https://sidan-lab.github.io/vodka/cocktail.html) - Validating utils in writing on-chain code in aiken +2. [Mocktail](https://sidan-lab.github.io/vodka/mocktail.html) - Unit test utils for easy building mock value for unit test + +## Start mixing + +Simply run + +```sh +aiken add sidan-lab/vodka --version 0.1.13 +``` + +or putting the below in you `aiken.toml` + +```toml +[[dependencies]] +name = "sidan-lab/vodka" +version = "0.1.13" +source = "github" +``` + +## Version + +Vodka is now upgraded to support latest PlutusV3 with latest version, if you want to use the old version compatible for legacy aiken version, please refer to below's table + +| Vodka | Aiken Compiler | `aiken-lang/stdlib` | +| ---------- | -------------- | ------------------- | +| 0.1.13 | ^v1.1.9 | v2.2.0 | +| 0.1.6 | ^v1.1.5 | v2.1.0 | +| 0.0.1-beta | v1.0.29-alpha | v1.9.0 | + +## Vodka is pure and simple + +For your transaction. + +```rs +let Transaction { inputs, outputs, extra_signatories, .. } = context.transaction +``` + +Locating inputs & outputs: + +```rs +when (inputs_at(inputs, target_address), outputs_at(outputs, target_address)) is { + ([only_input], [only_output]) -> ... + _ -> False +} +``` + +Checking signature with: + +```rs +key_signed(extra_signatories, key_hash_required) +``` + +## Imports and function groups + +All onchain utility functions can be imported from `cocktail` and are grouped with a naming convention of `vodka_`. + +```ak +use cocktail.{} +``` + +| Type | Naming Convention | +| ------------------------------------ | ----------------------------------------- | +| Address | `vodka_address.{}` | +| Value | `vodka_value.{}` | +| transaction.extra_signatories | `vodka_extra_signatories.{}` | +| transaction.inputs | `vodka_inputs.{}` | +| transaction.mints | `vodka_mints.{}` | +| transaction.outputs | `vodka_outputs.{}` | +| transaction.redeemers | `vodka_redeemers.{}` | +| transaction.validity_range | `vodka_validity_range.{}` | +| ByteArray and Int conversion & utils | `vodka_converter.{}` | + +## Taste it before vodka cocktail, mocktail can be mixed, blended and Mesh + +Building unit testing in vodka, easily indicating how you should build in [whisky](https://whisky.sidan.io/) and [Mesh](https://meshjs.dev/). + +You can taste if your transaction can pass your aiken contract validation: + +```rs +# Mock transaction +let mock_tx: Transaction = mocktail_tx() + ... + |> required_signer_hash(is_key_provided, mock_pub_key_hex(1)) + |> complete() +``` + +Then move it to blend a whisky: + +```rs +let mut tx = MeshTxBuilder::new_core() +tx.spending_plutus_script_v2() + ... + .required_signer_hash(key_hash) + .complete(None) + +``` + +Or Mesh: + +```ts +const txBuilder = new MeshTxBuilder(); +await txBuilder + ... + .requiredSignerHash(keyHash) + .complete(); +``` + +## CIP Support + +All CIP supporting utility can be imported under `cip` + +```rs +use cip.{cip68_100} + +let reference_token_name = cip68_100(asset_name) +``` + +## Documentation + +Please refer to the [hosted documentation](https://sidan-lab.github.io/vodka/). + +![Alt](https://repobeats.axiom.co/api/embed/54410212b620c3299be792bde8965a3371348895.svg "Repobeats analytics image") diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/aiken.lock b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/aiken.lock new file mode 100644 index 00000000..31951300 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/aiken.lock @@ -0,0 +1,15 @@ +# This file was generated by Aiken +# You typically do not need to edit this file + +[[requirements]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +source = "github" + +[[packages]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +requirements = [] +source = "github" + +[etags] diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/aiken.toml b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/aiken.toml new file mode 100644 index 00000000..2f35f2fb --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/aiken.toml @@ -0,0 +1,18 @@ +name = "sidan-lab/vodka" +version = "0.1.13" +compiler = "v1.1.9" +plutus = "v3" +license = "Apache-2.0" +description = "Aiken utils for project 'sidan-lab/vodka" + +[repository] +user = "sidan-lab" +project = "vodka" +platform = "github" + +[[dependencies]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +source = "github" + +[config] diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/assets/logo.png b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/assets/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..2c456908159575c5cef268b16780bba695dff570 GIT binary patch literal 74727 zcmYhicRbZ?{6BsMPGuztnW0p6Wn>+SkU}z&ot4ckdmM6y5<)4vZXvQ|k8?^H38jo< zos*e)PT?FJ=e&Q{>GS!1zrR0vxc|AYbB)({j@SF4nTY}Cq2q^OFc_zip`JMm#sdD6 z1$KZ9{K1IG#DhNv0}O3~U@*}S%ztpduff_dxMa4Go~}h$-r96%o@~E8fo^IUI<#?6 z^(?#50V63^V+8AA36FG^7q09+ygo=aA&y44i;pO7JT?U$f4wFg$Fk}4_&{UQsU+Da zIB}gzyDN*4!`<1-8xf3O*+DAT)~YtDa&|i%t2?M`exa7v$Cozbva>@+*Jn#|MUm9|$_`xw+i3y&oA_C`eR6@yGV# zxB}8wXd*;;qMvXm{CVu8v}4otPC~v71qd>yl%H%N}uxcZmbX3 z%*EzGI-@Io-Q6h!-|m|l9+P*+iC#nA(~VhCM=HK{j$!{aPB@JE@{9N;Ju4_TEEs~w z4255j2adc5mqtiPNX+(#;BLnDAVbaVYY4i*zZq&o`!e|rg4-Wx`4cT%S>kxgeenUw z)2x;{PTbAU&l-|Vhy{H^Rd5>aS>Kp#Y~qr#JItjVN<6-El!xbFHUb6~-Cj4d|AEd8b08X7br0j4Js}AL%%MynIJU ziUQ5CG}pfTQBtT@TfUN_uwb{#W=+FJ`p17xUZ2*;_;<{9&cR>K-&B@cXihZb#j4tA zzN28xi~4XF_<@y647>$?Puk3FtB}22)e_G*V_%6qoYN^*hkTS_u=F7OUa+(O3}ND| zniJ>iGMUVOy+>QIS7kAI#T(lX7f!f@^}b)V@6&quGBzp6M}ZZlr{|i>@#gBbBa3F{ z%6+H|=U8-G$N#!WC8A*&5Ao4**z;3Q#3n!fs^gWT;id$p)h` zV{x`e`a8OtRWEulUjB;?PDNtUuIlMY*&Tufd11NDsNsKUj{`~f zUJ%Y0#}F>gaJA=q!7qcWNO=Wgp5Z>VtZ*Bm$8i)3B z9tUBOe*>nYr*|jlVAw(O3EcNU&E%C?=;@m}qNx6MvqsFNQr$RDJEyu5*lz7l^0=By zo-waamiq9&J+9fb7rHCjr)r%ZcU|0cU)ph53S%cn;;P}?A+*zadU`82nJZt)LXdPD z_8126p`y=!2-AcoI(UfOTK2ufxvoG5LF90Hgy?R8KR!RY;EC{@{W6Fvw)yW@Fd!}I$ZGe!;$B!cL0ghfe>w9^~jg~SP7J{aqf zn?ZMMKx>p84p5sDwkhbk>vK^(q`0$1CpWTj=Z!tIiz@b-;rr*v zUgb`>>Y`Y1%Pg0eD=%ll3%H{+NAw}~vUB@ym;XqdyWaxcsN5F|%T=_o`#NQJ>#Mql zz``lP)FX;VZB`QEzhvvKj0>t|o^o@SYtr~q`m~t#v~QiQ8i9qk^X#ye$KslcPb4QL z$-ZX3<-I<$XPa1ou-$orY1PPZ-rLR>@`}-qRsvBubjQXPr6&Ta_Il!H4rRno{R4B$ zX_hcv3-cg;&X23NS-FT0qBU0nWLbhT-3bFrPyi>3|IoFh>fIaG8s%S(X zgVwU+f7yb6G!CV@!Zjpdcm%~YC)^j{no;C@zrjdb;HjT<(A;!>hVG(IPV;M7B{$E2 z0Ddw)Qz3TDY}%RlJPzXI@a8ho)0-D&ChEt;QnP2<73Ky_cn*i8Z4T|T(mBg8?G%F zc8092Lw!3EY7_*;ub=v{1{rzug#C98r)Cfl=JP#Xa<%S9N@yrp8@Iq8@>%M-{jBgQ zg=-_dl_xYO-`7hMIuGtBmwO`6?2o;;Ex_6df7k9kSuP97#B#G0jIQ3jK!u*F_|Zz& zFsgJxq~|Q`+Ooi!h?KCfxoftJ!rIfKWQ}4$*TGYx#o?Z(hnk?RzOkhn(>BD=I59+w zP}oa+JP4BO#mqR&b1U=vMs>Cb+7mk;+F#kr2>oGq!SqW+qXZ4@qOHqp*?*3p`ZU;Y6x?(QoDBJ#d%PxH49e!DGL_KY>cBLN8=z?&Gv3H5GY-L z1LdX|d(kdAI{J>tLoH?fms5Rqoo%+M@*7#;&VLAG z{HIoHh4a+Fd0g+Lyzx`oIa~gKC9N$&SVH2+DQ0jqP9~T6p((X_XGm!pL6cvPxkE1D z9mue96Ym%F!ljQ#;TrM+!s{p3L?)tCgFY3ryTv#+y^H@+tRHvgbIbcjlg%KakCiJU z8SG>sTzaDOqa;IDW;erYE5Njm?@Z#(?}>bV^puV+PFchHZ%m=pj|5UMqTbFb#D4Hm6oYVHep7~`REjwdCV<*`#7qAgi9p-u+;q5b=R9t^K< zWR=B96{5AAR%&tRf~bxJuSYm4(%(C92iydVc@rwuY*A`tRttW5Q>3TzlgCwckUeS2 zA0wkYvi%tnd7;|fcN_lnWW0CeL0X~_xCL2x$J3k1ZTcCnUx#aNZorAS>|D|x{%f6` zY&&Qi6^@13Z9{kU^j`ZjTbfS+6|OD1(}ruSXxp^HT5t|;+ab9xKn(+) z`}13|^vnJ4)8#@!eF7&R{1di($rF{`|L22QmT{z_MJQuOk;q1Nj4RZ?$pSX~hK*8nbpb(Y}T`LNKFsM|qFE-^1sn-q8sj$`cRkEZds$A5pTX-Py+_Cc(hk~Q* z^{mzkSyNeou+~Gwipe;P+U}k&v9#(&PazGzk6)&6lK{p%I*e z;F?Od^axcRLkgzgLf{i81DEq*JGXDos0DmlNdEMzF}Ya8RsPJOwfh>VyzPv>u`vY1 zz9UZ@Sxqqnjf>fK4f7amBkMHnl6ui+bdn`U5Y+Goi`FjqAQdR(wE1D&psIMIJFefq z?N+LPo5hR#FK!^r@@%%#32af_^Q#RwDgq6+`>kjJno81oB(qS|mBI)d?eHC*JVG*5 zImjkM6RRM03x-iJSs&HwzGT-)Mb1am1(7aihwt20|IB_~`1PPoTZr@!suLB)J2H)Q z_!0F~LSmSe83P}vCvY-xsz~T)74l$e3%I5l zi8}?5y0W9enzC2Vi@FY;^e*62Z_e~OG)#hHjgd9M+};|_T!pKl4my^|21)Oc4ksqhT^<8yz~@_uR(HRw5M z0fn7Oc#fdL2nY8MkWJ8OI(m9hH(0@a-V$Yx;X*)c6Ao3)=zeQ zY>j%-QhXxgBK##gZV8D&>}!(Ky}9i5^lraoJ~|fEfiuKl9Dz+wGyjbGNSr2iIl%@G^@7#B&M!+H4hZu>QMI2IgMWMmmozl4@*~k4`)9}tI_5!1pmAj~^JjWbwmf58tmyVM#!dc-Py3R6)3%z;u0|lKPaKaVkh2^*3{`>L2*zdJaA#3}w zof`<7qcRCTHC7C&bb?Zq9v+gpc4DCH2Ezh9G-ya~-Qr8EyxZV3YxZUQyjXw@Kw^Z?utE3q=q_TJdrAWnE#tqONA1KviUd#(xj~tQl%R*3tvPbq-xdsS3J10o1#V(dUF=h`Q)pF zx`YIe89?%`T4J30$I10L_7b$u!hg!-akr$^7Ut$$ zM-w%v2f_E~=hxH_)F7VE@%}|in6%cx9l!ENu|ri#*Lcz^nHZ$kYX-GrM6|@AsG3=~ z_XfEr`azIn(=r}odP&G7LyfJhJ~A<6$(QP}`19@Z*W1rM%Er>Qkvk`dO>w4M_?o2N z5_9I}YNe5A*!~G}nSF3;B_30`?-;+xs22bF3_>zAsLJp1mJBx&eT(JPS+ zj&fTz`Mo^bSBHqNsVIkofChZHf;`594`Al&mk?EQb(|`^u?2%Qpg)z4hVG(5qnH%OFW60o0gWG+C)>-ikBGct5#e+&`BtvVLup z?ZwDa4!AVcT_Ic{g1~0yr|1LNU=j1y4*1~S-= zKt;X3?Xvmo`1s0L+e22&Qpj!20N2o&2!D66&f9Y)@VBgRSZCf!D%`Qo>pN)3CQQx~ zrcDmQnIeZAf=jJT+L74a@)qNv5BW4gQXM37zb9Aj*8SX7;79>KI!M4LorrmGjF;$Glp*x zO!wUmExURd`)+WsGVcp2kI$zjxio*3!skapCoGxEKuzLA>}KcvqZ-IW86#4-BB=OI zSsET!K-WC>u9o8D-znz6wa-evj@xLhOiKF5JUL!PjAQ>8xiGFrcB-z6cROW>D@2Qm zVfO?_razGAbsiIb8uUaeRd*%hZDa_wlIkujjQ=@^d-k0bAtZZ1ZO9t#|MIq?nVzdR z6NF68nh@LK9FY#guTOx?$g6`KK8oZWl%ZhO*cb8FXv z5|%Gvj+)skS#jv0EkOQainIn%%cyFLr_5*N4{1Za%z3w|7HWWnWED=lccXV%Pf38byRo^A1F^Z zWJnD}pBs1xr~ibWraQ%7Dbq<2nW9v;b?0vLPpqyt2e;umkQ!nA9oa2`PKg@Gr3zp5 znTw>f@^95_j6AP=P?%GgEY_zSg9HipDw_MXw~}3EW90jvOS9->pBafV@7PyI->JRH zk1INTokBs?ZN_LGj>ZYz9Y12{E!k7(SJAi zhZX0DC{(14O=i=%{TlY<^~rWBM}AL1#M3lKkM9nXb~9ny*;tK$`3HjSyED3bz1yzM znXUieTVb4UVI>9Z`)uWGhtrdo8K?AOa2s-Pccqu!X-<0R{5a(_4fAM`L0eTwz~)y9 zx*QI@(Yzf>nsT*=8SkZ{>fZynP?WohsJJlNU3daIy5mB8a9!vKk7q9EyhmEG@O{Ch z#l6CYd_1JGm;6(LZG0cx-sPm?0sI9j!2=WgfNW>AieR=TZhPa9nGnLu)5hm#Y-;3V zOF_(F*+A`Auo9|GGDI`4&&-}BMqr1BSmh>!3h-^T05!TPy>=#P4I@6}=6!*Rt)0=1 zkBF)M_n1yj1-p~q+GM^eHi(!Yd46izrdZ#fREl8*^@+og^HI_cGx7fYRTMjhtiI*D zx&DRGb9n3Sz%lLTuk7#Ye_t-~fZBZOSBIgKK@dM)P%SmloxB*y7D5WBQzOb!{I1KPBh`dQM>|yTjYZN<~9oJUX+STaLzFPunjgl)g-GTvng`^wQH8M2kKyM%j zsy!NZ^^aK+@AyYjaD&6nbKt+gAx>*+=pNO7}wh4hGm+wAlIoFVPTexN+k#Yv- z2rnGRZYpL-knK6eM2C`@<`UJ~nf) z_lQAO>-Qd(J+(5WL!f&50hIKDf_9F0`7o(|fH2N`qmcJPe&OXS=dqwT9{S z-COiy^aSOmsN8jgT~7MWyuhP|X0?9jxoVDuobwQE&*7K}!DLBiMLn5nz3ybd{hI{G zAT;0RZs_R^GMn7qA2{gFg%(Y5;-*iB%s@kTc8fKAbb>~>g89twH||*vCVaY_GjoFY zImMOSNMmb>|1BO=T7taRm^U4pHl)!L^yci0wnzRnlOiz}Bro9J#+8nsS8r0#^D4Hl zfK=eiHdY2u^u*)43(&KiSBDUG)AeosEum&v#Q^!0i|Zu%f9Kf2#F5skjCs^cbVcS1JMB9VE(DEEYy7kpn3UpoC~Cg5C5)(?B1;u#|>-KE#TLp z;Ulw6IY-3*M;UGf$Aeh4Tf%RG!-%*+ZkEhv{FB0?vv4JMV9oucL61ARB$VSzfNOV~ zh6g{JkJ7JuZ;O`9MT8)e$oz*%Q_S{r?`Qv;tBk^bR!WskDR-I`%_)S!!Fni8^LeN5 zDj=zutd7-lw?wIbR3>R{p}37ymkTQRT#_IVvJKke!$VPO^>Zs|2RC819R*zIn9n6W zS2yNc=9&|GVlkl`syQ@*M^NS6^;w-ExAu0s{ZPCPn$Ef#fjUpc>gLM${1RT{_*;@Y zJ)Ed%Yj-^Y>YkaD!%8|2??e-q*MrYWOfs>6XMcT}U+7jgBjWJGDIxJ! zD)kLxCHLSWhxUEPilNU810y_i>p11RqYxfS0edbstcR|vp)zd|r~Jd0#6iOa$NlHe zq{&+tOdbaD?pUt}%SudM1zY>QPC%g7BzB_kNwPNLf~z57p|=Eyng4xepk@|*oI5C= z8KGsgF~+sSI0v+0LYsykqszbgkkg7I42`ra$G=YMz4*THWwPOg4Ok3(=ko*GkwFHn zkVbgKq|L8D6^EIIz%G+7Gl}1Cnc<-?tqagnjMhN}Y&n;3(XfhivS>OkNQ)UEh>?4( zOHHbv9iC!>Wo{CCyXem816!{fk?eo;NQ9PFozA=!oO8YL3Tk(ykK)}L|lMQelBR*Do^Gu!Zb=eS$15%SGD{t$EcIIQ( z1e)P^U$o>_`c zbv|5C!N@&wU4RbAU%JzKs}R(;=YEZ{G(v((LI1WorPZ_ASANW&0yLw+_u}~`sNUgK zR9{#qei}YO9Rb?PYf{|*f=_kXkwc#xga0J@x_(E|J?2@1qYhh+{%rBU@EB9Dd~qHXlZTBdXf~nEC&*q#)72n|9?UbJ zSu=j&fSG!RDxu8<5SL#cQf-$58!T910Yc=rID%~{I}hHExw&#}{+(drpkHgU6iw{C zz9Glvh$*$W1#)P`*R?>E?(YGuvZfHm68cqxxkAnv+{ox$E{M}QKJnXc^wLS(-a1{H zNQe-9kmPe3yrwH3fn^`p?&udxA=C>bi1oAfG-^LzS6M5?{;GotST6dwL1X7Q5%Uj> zpp}9+!PJ4}Wmv)3H5(UfHA_sLyQ)N;KJ$!jF)z+1(;H0j5MCst?tifV#uOw_(BF(a zb9)yv&6thpFg=8(VZ3Zn8pxj0s>``EEpy!u1WE619IWXyWo5vc+n6uf%v>%!B-Rn` zP4bor3yX%B_d4TBKX` z*>yV3@+(FzHq!wH1e&$&5Z-iOreOHxk45<>hIR$5>KVOo+F817OLl4vBl#I{(TWym z`oc^Fw%L)Cxi`cZQ?6tgN56}2nrgc#aJisZR0h7%w8>KLvSSc={9%&MVQ_Er6;vcc zAS}&I=q!jfjxPW8`;rML624n=9;891@8E;&BcB<_n1qJ5x#rI295XQlaZ}#U7vR== z>($*cY+-!lCa;h)5<<)ha&b8`jsW)~mC|x-aI79zK;$+BckV5Bei&f%knI|k96@tc z5%M2(yr=u9C8Z}9@KVBtiKl35*2c`HlZvQo)w|K}YAdF!FZOiR9eWJ)E|q{2U!!Xg zORc8W-mGB~nLs;-hWWo&*z{EgZO^i+own7&F}?9}zSE#?tuyia?+vGzH_eGli@C9+ zU!Ar*2h%~vLRf|n(c2pTs?|ZS99>7Iip6+bA)#+m5Z`GU)(`WoV+QU4b!cwzrU)Qy zZcl(NUzb=dJ;#n*iq^%o3P&9z6LOoRBx;#WxikN)MopdX0Hf)|o1STq1{Jp0{2y1A z|1W={N1N*J+3hXTW-)udIXW3mgb0FoOAIK#NNf|zXQyRr8CQ*XSokf;@G*!KydN6A z-_?nF^$c2KRyTT*lgI;skI~=CnRxmWJ)-ajhC*oa92?ZlQsAk=fsmqrI&O>Qsn~ND9;xeVEEPagBn%rxid1-#W#q1 zEmHj1dLDot-K{AZ+g5stR01_fu%Ffaz?gLDy)C;vKh5#o6_Ix>?Y}wu7z%y88mPAH zIr2CcGpM_6xGwLC>zG=?U};DP^**6w* ziC;{FEztc-|Mjz@oxvijQzB2N)C}4D+c0KQ2i=a~)@Z!CN%rNoP)wMAJWu){^YB_T z`DZ7f9|b*uA41%rNw5U3zxbo8hX{B2rr1=ej(A8tmol;Vr@Kj#h8Z0+>608A1~D`I z+r%o%qwqo1TbTQf8}q3783Cl-D1))v=Rnsa6a}Yux2r-|zL1a+^u{B=QtE<&zDuQ*-&}L)plRrG!K= zg!Yhsro@rs%ylMF2&{2eB;za%`w9p=C(Fk)$X@aIT<-v;y~)%Bjn zoI5N2m+K(T^OV8&gSfzZK6Opgs;4VYqFYWoDb&W|H6RaSf6fINiJw3rb3Gq@fy}Ob z+?;~BtYHD@n=g=du3%E?A(5{hPtZz_>#JL!^SOaFppb+}GS&AAfiJg?*7PgM-JK=C z9r5ibo`WeMO>>tdasPysiWZIV?eHD2F=icIY?*qphFz*c#Sc$W z19I$J3KrWmno1BXpv#c`;}rad-~Hv%=>vasaSD50J~HxZ?!Wc}Q5oMXDcIF~+PZ&n z$y54sR>1x*1KH#0Ekjw#I+c6v=huSUM*}K1CMprI9Tj474)uTNu4_=#Ve)D;_nlg> zA%AaiRx=8ks1nb6t-H0@y%l7GZm!>kDv<8cO(LlHFG_<(gb4q|JjtQq1mDJ{O$-Y| zH*-;6N|+hy3Dlm=PuN9<|7kbK^x+8_X^`IKlAEG>yD=P)Tn-EY_;Et`=@5uc@9%}i-27wxYg|`1agsI^pfa~>1T>I1#>fApVuhw9%%mc~u0F1_a>ewA z76Al)Hxb;(5g;3;FrL!6o91rSLAIOWGk=izD*l|WAJ+S0F;9Rz_1n*?5_Q;#4`$JH zV1FvdKFJUUV(Q^o&@c5(N1Cf49(pqA^PmoT-?-fNz1w99$OF&S1)c(>`Flu~`lI_> zKyS5Jg*Q8hZai8*pYS*lY8 zprr$(At9Bv)(MSim#jepB>zg3VpvanyoB+Y%H_^OPMm1AsH?Q;q$H-v_;G1EmVlJM zH{rcLjB$gS1_4!^Jg(+_4fw>qY?*ZbuO-hp>8o|C`MP~b>*2MZ-hE-zkCkxiMdupt ztzC5Nb7j&Ey;i`A4edlx`|<%TDd#gPn~5)U!2*D+bBAt-iB9^of2$klidc8)WuOGj z-V}qab%Rp-@WDrQ8qfpg@80d!KpF^ACN$98?eB7{^gg0M3eN^glTkRAS96&fZV=5! z{`kfsMzaZjH=e?(_^%oh{k{Hf{*+$$t&mxuQ?Bm;Za1h?AVq)d!ihcDUX!2xzh^vA z{COWOII68!E!CSj!3F9Q+W?(9JdouEsY`nULFne?J~ECzVM(!~$EE@m_lR2B7& z3QD%w#(zf6%7YORjn-9jy-^_Q8;%3PTk7i}3Io*nmo=7B?YotxG zTOs{Az$}I@IuSbtzM*yG7oA}E>n{OHS;KHJ{hB9q|B+K36{?`7&1H&SqCisK?{8-S zD`V~Mm$1q**mO0k;&vR)^a9CVFA6juDa}GuR6kxUMD%y{$<*d>7JO#q07HSVH)uKc z{}^AjW7i{ngIcy7Xe#my)r&N=GC1+K+P;nG0JD-+6V?}v-|rr7pzK{BYUJFLkzjy6 zc_b^1wG)~$ZIh=Hw$`v7z<(H^rxiP+>H5#%T2^g?O`4nP$FMa&OCH53)Ol^pdM>qs;Hk_NsDpe# z(i-31#d>F!!6*f&^BTu<1FbfzU=1UtT>IXG6`oQObRbPgaiqZ=rY${cfr@{)i~NlK z*C~{WUBgsokAgIl9bUsc8Cnp0~n1*69H=5yc>N3;XQhVPvli`)T8zHW32fOJ_$c zti7}_J;~4>RE{l@b*#sieege^d~JKiulzrzV`>}!UwdqM3;_yps9(3z{~f~z!$~6u6H!tUb@HIHwNf>SJ^X$^So(TLAm$#w zaDm=-GqTgEvNoKZbL+eQ6Vj$(p)ui3SVvnyO2hd8yUF@^;BM^>zPz{U@PfME3b_#3 z6%Q{wH+&7c%JQf^u3B#LbOLWJ=tdwp69VcXpuuAA*8sQT|3`Qtd3xZ-ZbiSbdDKEm zzi?4_BC2M^0;alI=>6LaK!XmPu@{%Fo)^%EU)xr*ZQN=G4N5oJd8g7o;3Ju+b(4b4 zWnAK!uTiFoB}qOR{xk*I)csHr&fc0ytUG6P+V?r$6P-3^9VZ>(X{470FwbiZG`nUH z{z_ayWd4oKwL9ds=OtC>f~WC|5+eULZkhlcH1R);-wtDW z@88jyyYjaLet6ekBl2r0X+9T%s9JzJ>j+ne5{8P(vfnwBv4F7sf+hnW;L=J%k*Yf@&u7IL3FfS3;b+KVUMkWOGR#Y`C$aB;y=n^xfGppwRSGsTe zYR#TYl}J#Ec(sN;+(QU89CFFJvw^M^CtlrmB}X^>F2?bP-My~2$b1ql{y0a6;JGIz z;)}^H3y{D$ljvrNlh=*kvVt3e+RIXLEW)j>BO z#??u6kkY>pPg}LhIi(GK&tv!@9Y&UcjrlKty^LrT@<|#04(66WMZao541fW#Z*yGp z9I8(l_ah32c)A2Sz$^>yyMS%X%U9ZIsb0Fcw~;E*J6FIk;V4$lX}vQ#&t+EPSOA_kMRMj<7WXy zHZb4)V}5t9iIwA*95Gc61jSS+hb#bD=VMTxo?}L+2Wy_{{clxQ&Bl`6U?f- zWE`7(y@r0Nq@pY`X|mWH_(Y1^-ZVMir=bnOb_*olc^vO*s)3#fXl5S^#n=jJ2f0oZ z>P#Impb+|JFH28)x5qsPHW%O@Nx)SBlWY0RZWcZ#ltJfSNGnbRP|9;?TkK==tPGTMTmUtqPKe!2a|6nXbw z5J@{h=V~vhuiiySDz(c6jO)nB)!+MM7XHE#HpCKBvlyT^&jLaxC-Ri?v9g1^EVEev zvM9M$%!PG4oNd{Xi#<`H7X)eel`O4c-Xdb;I|9x__>O{l=sTtUsAzM}sl=W>HRGZx z#ExyI0*|LSa~Mm=iCzcpy<<7*ah&TtFyIxj6hqVaW^r&0Owz*xID`=fYFx2zaoc@k z0d?y7eUFQ0N3Fw!yqQVrI4}f7EJoHHNy*66pN>FLim{IUpwUjsMI0%i+WQ+h0ud8#@H zeT>0SMm)|R=3k~H#lx&HB@AihV9=u3%VNe~8Q(D4@j!dj zdPnOZ1NEcCU+HlzAbh3|SE9ACKZV1jfgpiq+dzpTL|?Y{Vw);YG~jMniV2&ih!HVE zc*+f2BqFW!&|%GEVO(S%qNl|O+tP88{*ajLjgBELa|$-w&BgeLnr7oN()#vV*m=f_9*U(R91xnxUdP{W#z_ZXY!093 zy+!4H4Z__454l^3*xqrn$j}T>mh)>n8V$O18koehv^@E@aJxoB&%8`lCoXBnHG?t* z2-_#%P6=`RTs!zi*@*-5;HNcbd*i+KPZwjjn-`;>oGUr>dY{0HuD9Srkgh976T74# z$FLK;uHEWSUf%H@0KCIB(@M?h$@^zQM*je$;g7ww@vj4GjjOIstYgWZ=HYZcra)6L z1Y?Bci>%J@hC1gqV3wnsOg;fnEa}qmcgU>YCO1EdtwhG!whPHdb?2x)Z@DnE%DK;1 zGEsJq?qS+=?co04y!VQc#p;2m3uI?p8$8Xn%?)5E(9Q=cG#WpuU%8w)1sP@$VmxmN zT&~;Nf8BF7mamw4hO{Hp3H0(UGJ#y%8Q5})=m$Aox&?m0G| znwCo&Faqc#cqtllulYy_TdH@YN?Ysm`A~wDR8JA#3`(?$Xxl0$j(ZM=lFN_l=w*TG za~;wU65OlozTdA5H1Bn%K&@ zZ|3fJmbayFM2aRFl0lr?42+rQYbI2OFjY(YE&fL;b>ncY=O;(jDI5tObRPbkowqNz z9s@~RoTQ^QcBzbn&L8G-hbg?7)zvRs=|mK=@JhoQqHlgG2wB^rHOLG3X5#>{xzQuj zJJKj{%$LtC?MZ}l6 zYZci~2jV{H+-Lc2nt7$**`iwKUd3`%aCTk7) zqe6q)tU+d%xYQrZlLYgu=2L+!r{%^u-yn9vh$IAIk_s$dz{q<4f!av$ifciDX;(F< zL{G-9LH8Ij&gH`lPb+lxK5bqfGQH`xi(w7R9lOzcs`=19ZhrL~`5gFi!vh`~L6ZA_ zF&>gH7#jp;C9EkK^GYZc?40?r?3QZG!lFT$z-@kft>-3?lPvlz@BCrNy=GE0DISn%(Ehs4ZS*B#)RtUInm?8{_~ zjbCB8`h0S1DME~W|8#)D$4&gI295A{OlM%{c&?IJ7PHmnD?N=)DQetB?%=r$P8<=j zge85UzQQey&1l?bAN6W71P+Mrwt(cD*DpW+N;AE8!%U?99Pz_eqV!A$R!6xMN(|II za>ut^Z$W*8wrpuZYB4Q3tH3x~?(}_6edlXn5Ek$AnK34&ll5Ul$S7h?KG(6+jO&WG z75s~?1ttC;k!E_CjTF9IG0c{rW*i8Q)T0{t`EgY8f*5uE<-wO8O(&WN7elk0D zV4%$3vDNxD9`enjVXY$5Js^=x4TNjz+sKF08t=jzS2*F*Uod;eCst^3ey=I(x%w*1 z@86a>gm+f2A5WT_`APSg zY*Fgux5|lfv;BoIC}~A&x5eT+s}&I6Ft$sK#EQzqf>Wt;=>t`Erbd~mLrzg!D%h0L zSDxUv0FeJ@LY_Pq%{^NYQU@smouUqyXgek?H)v#cPl#ld$vX#O^$0oL>oi704<90615!U0@=WJvC z^kiee+SC5?^qbWP@D&b**!E_FO}y@m!@;N{WOEdO5KWb{r2E{W5Ii<(-JnvoY!|3^ z0`NMCeszaKFX>nBSF;o>yy8#)1v;Sl?g^rK(XrcVR^8{QSiT$@sd^wpGz$JWZ|;q( zARlXqEIzR6cJENE%_~CTe>sy-?Hj$DDg=MHqmFXJf<`R8gtIZ7p7wKAd8-A|9$Bdx zy{*NDR5SQz;fk02wjeeH6IXb4-$JA4@WW*AeB@UaZy%{1>0))iAOoRd*lq;zn`(@W z>^G>g*#-A^N-hcTvAWruQ@b&X@mu_rm$>IxcC`ro%qtj^?)%3Yhkkwg)xIOrdUCZK z0`~03z}S?6qSlkdl=zQRYBisn^3TG!t8YxZK*!^1sIblw2L*}T%WxPz$CdG^**VZM`YHcy@|3`|nw9OPH-K?F|C+?6cPrDF!X_;cKFM@~)T z&TZm^x&{~u!R~sESqNC&dM9slo2O)) z*xD&C_gIb29bg+5vB^ND{97_2J>h+K?VSE(h@3jf@aF9L?fB-Lf+NR1ej=U+?_>Q& z{%QeRl*p;Xl#$FEpYlQW7mrN#JP>s!vE6<5X1 z#|VV^6TuL7RZDemW6Pl^ElY7?ZrohB5ZiXk=&L6RHSuPv@O<niV6>*}+q|ni_G6pP|33_L=?4obaE#{Ncd&IuU=S z>u2YnGemmq5S;M=O43)EP$s^8bBukT5^?=BpwIKb0jmgH{G9UR=*eQ6rS+pg(KDM) zpKFZJdiW8jL3k@ic)Q2kU)O~yXn0wP13T0LWzsa$9%al04yp|qJ-To2G$yb%kl8tc z+Vvt{eshdU>j>_al~|fWMhTA8uJn|mA3JYgI7aN^w1ah;WT2tU;?#J_Tswm4BiTx- z&$hO`U$_u@o|zD!mQDWphHKindzy*3XL}jm8(0JOMTTh|ol9VmwI|#peaY4BVoNmP zkpFW*?uDT^Yna%9{fKGj&uu&AEEH${x%&5j222Ql@M5Pc&KyWzcxb zTS>>UwG8pK*0?bh#HFwHv7G~AoS(pSM*nBPB(rA2Ixd2*W$bpA+$>xlqmb9%h#l<_Lc z1W}C$(fzjq!%Fu>{SKtTeHt;jerEjEQkC4vpfK0OXs@qI2KKQlkI~C%|V@HaPb-$g6nfS(q0m zCu+(EyXyy>(DcG@9~Jkly6@t2f`1m~Mm^0udXhio#M{~&$Oa8-=RZs3KR7ak1d>)` zHQZ<2qgwhz#XdU78>#OJM!i|Q_rec#$R25LPvJ&t|F~=_@g7JJji0z@&zuzfbMT{F z;1@cz;}0~vs1mS%(a4^2Ji2)ILgU=eaVh=xRn8GVv3JUr*2J)2^oY0_huVL9OYim; zCob~l^^t5D@X=1S#1Mlfn%gi4!aDQY_|b>!U|yruYpawM?k2+wYhqWKm5CqoAb(x$ zcbZg8YqO(6&fL%5R0EexzLWeWZu|be_8u?-izfTdN$xJb^o@Pi5sUnx(j1AetpcO} z9Nc}uJ23^-*Zl}oyRf*YYwqre-u$(4ieqZZ-#r3BR^E;{@-Pux8fVN`>&yQutDqAs zbdsu@l$<#AZ`kiFb)_4DI+!E18LEl0Fiw#@ab3RmhzoNga{Hs9o4NkZsdH7zL7!+#_i_|{Ou~HwJ`lM$ZeEAk*kqkfesabLNKiHOCpe6jS zt`7bdXbU9~N|yK{e&$=$<%7?3ZFjLJE(iZ433vVM6&6dW5LsBfuf#E`_4D>0HOAwW z574SNm!;n9mL#qXS)I2#DO!5%49yV(>-3{Fz3Gq*o>IG3*YzqG?|NM4oh7UvMJ~Xh zeK6t@(;%2tMljo>!+^GH90l5!=L>oho8sduEF7td&oth~qleg;Ke7Q)a?jl=>DT8?c@MJW`+u)}B9^kRv@Ty@?d-@b&{GG_s?UF#0nO zA;E`>ks~6&;yIP|4D#qRR}-EH3Ps-X4%04&MXfd6mc)pKc@1pn-Dmz-4hBB;jVj*C zoW6cyx?$xkcWrSuEj8g(wk-L%d@I>M;KOEhY}(7#lP{8b!&4R5w0YH5I#)$~eZYsL zQ&46_Fo(hGF-qp^(REn#pKo+8->B-u0zE)lV|_B(Vlf+)%0TeL?PJbO0RpOXUzMn{XK0XxiPRa;x$*!V%Qy9j-KAo1^YN z?lX4o4Iy6Zs%;M&sWe041LZc}gUsq^)XC@AL@%0~^IcB{mUi2(p+ut=Og8`GuV3Sf zJ(ug}gf$$jD24Pe3}@Iea{ubKcE#>9bUh_zYAF`zjv3SCT z9ZSiiPn?g%H*NN~l&g;ONZ`9fy4&3jixmwAoXKDP5I(^JgB22Ry|91%e&QO!Te-cO z!h2d(_Gnh0{ZQUyF=?X5yU7&FbithvG|0n37#Z(@E>2U^dC4B!y|RcvuU)J^oClN54h~N08UEGJkqOa52@sQiaDXU8%db>f>Z1aOSu2j4qh$P z&%&(yV%0Q!B#bm=^7rm7sHKZG@#RP zKGrr!V+^zKh4 z#cGL{sV+MtLn4kcNYnIhT4R{P_+clpR4zwSTBJKz;pC`@3hB46Xp6MgjGP?nlz1Iy z%OoW1 z{j1yoC6`FnhCRAPcYzFY!<$Q`${>adpg@8I%;@t#nIG3f(5fUXPJMW1sjO4xC}6XE zpRk?=3F6(!t~!i=Pz3eisWg8qbQnRm_e*=`Y-XxwgXBxHjaZ3h-~dL8&!?1aw{bb5^fGM=!Bfww{tE9++)&5 z@|R=kuR=bK1x5F_O(aonRf+@kj<20O$jp}XJ3!Ikm6HyJSd`2w=lJymeoUUz`>B38 zKACc6{E>U1UZ)vI(0>1=kYct!mo@0V9*uu?|99g0dwaw+q?A+M zHVJ|K*l?~-mfF+5sJPsHdpjQuS`HkQq##LPiTt4cL{&%P*e#+EJXfz=m31L>cc#gd zUL1vzb>{?6x}+TFS>I-#JYi<3`-50wwB?T_e-$Ewtz! z#_@H;F}cCTHaK`qCTmQlwQAk%#uTO4BdHb_FmI7B3QN}>;oZ-c54VTwp|&~D$07Xp z4TChc-7dG=9<}*N?u-IapKj+}$M2oA@@gQ%w*=TF>x2|y&~!WJPmcts`REb1$~p+Y zUmT_e(OSevRS>HH%B9~*MD1WrMiwsB5@H$mDQdbE)787cdVVP{`p7Mb$5{C7V~{`g2wHA03=f*0j8 zt|K^&UDypuHVsQQ^_k9astJoaRVecp*Q%mg*Iy`-uD*tCwN5j187NF7R>tl(FA(d8?nvKrStv=XDmOhZX(w<9;GCDrQ(kSvP5r< zH1-!15o6RMVpFY7@2q=#tDHP_eP&He@)$#j)(D*fB_!j+X!;V)8{iDy_-N{6%J|^HUX;PYH{PmnBu{| z{+I4X;t9~5yrk*yf52aa57G=bi|w`yj-t03vn^S(}X-XGl#xR<-`{;CRjvsnJS&Y`bK=vey8YpC-nV_&dl z)tOxAnbJDfyD8bIuLE7QTm!+s1ql=$xA}@Q$2D&2#C(R6O z@S|BH5fKIUq(%JoxT_H>r!`N>cC=mz?~m>2m91dj2I~CU?Qq-Vo{zz=|3VVs&*2Jn zsY7JMY8~!x)I{pzJi}%XoSc$el*B3GW6H~XtRfn?)1)(~OJTK@Haw~l#ISI%CqqAudnw*Y{wy3!>NsQMIQdxpDBY-I`!r@bCgcq^lp<7FUFV)`;!!Cj z+FDg~QdnyJnAHm%Mrg_54fu3USK$ri0J(EY={99gyK7ov~AZ<*k@pT+zDNlFqWlmGs?{ z*JWFO^i&t3il#)(x{U3&$<)0j&Ud`r6q%n$Uv7)BPSMia()Vljv^cGCs*}-uxcy_f z1-Cv4A7TbA7J=-9q+Ub5@MY*I!|&s z7lJa2VFn5)B#df;E9{}TtXC{EfV)06k30y57^;DK=*8F6``)4#V&A-!kWvl6VU}eO z;lKS;g`12R&X*(!OvO|6ez1)X=1X%K|7y-rJ)6}L)A`Vg0Dg-Zfl+Yd|jP*$WG})P0@US@yLYU8um3WDu>A6$^+h zJcYl>8t(ACCLyn?BvxtOT91&#r5U*0Y{4swE65s|?ahvz@u8IG-lExzHspPWD2W*; zyk++YFnLvq%&DmqT&YuYtIcW6*T-9WF%u5;?_p4M@gvk|)u4ZSHqrU}>eJ8Bk{Sj5 zyDgT+kGBUa>~c#&WI2k-+!7t`D#ciUOgkO-dB*4*=(AMsI~D%6Vea_!m0;*~k85f+ z@n=5zWIM0W<`S;AG&{p7iH(-spF%+G$aCZ>sx3m~J?Y#^#w*6b;h3AcHzQ!srDu0h z0s7y<2#BfSQx-${9vy4HK zaR`3$H5{L-ptJv>;$GZWsZd43#DG9ItI&W@DUyn8{`MMNU@+AVfAQhvZ&oko)0p02 zRTBzB6Ef-dak?wQlXH69kPG>iA0Ox3EU$Wn!sX@MGfk_x+p&BKFwC}8HpC7_zn{m$ ztM#i@5Z2v+wLPh(8D(LfV&K{Dd0mP2$XkGyPW06g)sr1R;?q(LqQOlI_H^hhJJ!Jn=-d+64SH-pGEgYsiB2sG|3Z%gr=vmCXlEK?rQshWM6 zKft#}z61xIk}o8s->~qHXhG!MSscpIKY5=h?>^t`V`6&D6I6F16_MpGM`M7PscEguoE5Q zo**sbcbO7bO}M!&GhtBvQ~9|T$IWqNKp*{_-QVs&^}(YDgfN>$UXW@Kh+5`hD(NTy zNt7-_9|EMK)W(V5PJ*W}sH&@>EmppZBAuw8^(+^qkCG>v=#SqeZx$afv&UP@@e+q9 z&jx=I+<#RmckhnP)bDHupm?spe;CYhN0jnd$YPRO@VaxZ_r}`q1KwKK7T5T9&sq%U zU1^rHFttwZ)#KaG zP%_DQiJk1isIT5TXjk2A21T*GJ0)u6OoCOUeZj8L{^L-`D$*SkT06!M&b;-N&l zd27VlY_J0d0vNd7UFp?dg#|K+Cr3L6#m`2b{#*maf^rs;e1w$o^tvP$llRoREQIr` zoNx`9$yD3TIS=ds&mawOYZB&*h3OVF1(eRKQ^f4v;sJ9b!9$-wXW=`lb#))dhP2if za*PULzT!LA3OB1>OhU<^Ih1D{(ICvGKa8Tz9$E++jAbTn%a{-A^Q`996W-kXr9~qp6{IH|2E^T zT${vC>h{CG>rGXDoi4JC%O9hQl<=msuV}N@1dDClhW;67^?+(ghR78BJNUO`%|LZ= z8SruAKDpHYvC;Jt7ox)a`%#;dIgmoM24WlT2#wNypF?C*t)e!y_SLLVT0AiSSmA$z?}0>=r@|5PcLcGu{v~lN%7y(_qNq@^-<1h1 z1~flv)HjI)aa8E}ShHZ_^B5l%DRJjWk$s}G-_n;n!b!-npj78MoY^R9pAYs&SG%Oe zUM&Flb~7*Oqh{hKVo*a-_}lrhrTd+=1=@z&?!}Vt;02HwTHk*E#;{`mm4CCa{|c~j z-Y-^IApjLPxstKS>vv?!k}vU(6{gl|ByO4p_y~&Qm=RV77bA*4nEV%2_089XsYhw( zOSlZ*KcPGef78Z_M!TEok}hTEyEKk3%-Yjd#>`v{T*!wCT-mEbZATEbR{)RZMpR~c zxvDaDFzbitKSI4QC#vR=e#|Wp)o2HXf7Sz7s$1VhC)UmOGO@VMow>2C9E#3C`l+n~ zh*@|$nSj1-eF4>2O_qPjIM>bd_<8lbyq}nUiW7JQXZ|_gF=v86R&sp8_wQArM-KMQlgqyGSlHvHGm}f zpUSXgQG^pmbOf z^bCk6W2~S3tq$fXuc)p!P-JiU=^2B5XB(Kia#qv$r!zzNI2nyimQNb-@{!vhwp72J zd&3f`0ZqBcR#nUW4S?DXk8tk+FUJ?eQvWu^Mt<2xdif6+{ip;KrqNHgqB5*Jtrgw< zes2Bpd&9rC_s2zYPt3?<7pW^90+AH+h(qv+bP4Ly(jA6+SNzF1dj9@lXwQv(5N{%_ zm{$&5S;j`ERT+;juS_{e7CkR<@Brt(@yiGfuXag!N_u(L!1RYMWEz1iNUsTHMaJ4X3fX4$SIHD#2X@gy*piI_jD={*w^2?`A{AjaDvQE1or#XEt2?U zAQ-^Mdoo0ZKa3I>r4{g#={&8GeR5?jRuUDGDasx>YR>kp`$tsK{I$8o$*tC-DdNu8 zHsc9q;};w)C$Gnn1F7E>1(OPoa9`Ux5?P0T~hdpeuoW*Qf3us#& z_dN)J*AhW9tuf(e-?5mIp_0zCkQ64;yp_|IS`m{;Ex0zu+$uA*=mI>Yh`Q{NB>LZi z;-7DG!frOZBQwHfjvS@Z;yBuo>->+L%q)s!CcUI7Vcyg403hs6+NBK)_HLF;>XusC z3@Ae{>H%;g4t|@I?~Q0qxFuYyr4WayXAmO%D1v?Qg&b#j8DV2Zb#E$tojoPoKp6HC zE$l%4EP^pX#uPu-&=vKOc(+(ycT!nNHR265Khp}L-%)p#VE?sS#_uezXZh{+<3uK# zhL&vjCEjVc<&#V6Sct4j{_WhATa32r0eQ`NbI>_|C%joW49{E(cKDj#2cSB zh>p%MIukZ%d#q=!Ag(M`%=m0+BHh~6PUJRLxWYMS1d<7(dH1fN!3*1;pYdT~0lB`2 za%?m&^YLwnx`v~TgD9!JloNX^I#TM7OvTJCSEKePcf`o_vS?CI)y%gpD3se@FYf`g zlGSLOO6Y}$^j$6KGEMbqH$>`=&E2&(RgP`Ho(5mp;e|fK3FBX{-w008=f&k0uMo}T zzkhhC?0^-#a0}==YQo;CUT@hQo1&t+TJqlQ5YJNGHahlG^)%3>9>IA?yf*dIQZiK8 zQC*tfjq}h5A{fB9lK~n)lWJfWx^p2+r<|=u4qSKR@#d{SOtve;QoOll=ChF)r2;u9G^BFJVp>93dnv>`^b3=O-wNeC8xc>&qt2ay>w? zD$PppLz|8E)wajE(3BaWjVg!{00KAo7e`2B4k@j5Gf6vhCxG2weG+6%ETr?)=VYIp zk8HW2{z1LJm)^S4nDez7V@pcjC*D`dp{mItWnaCZe$j8usEO>j zDkpMScB$3vTUvU=^GOF*6uK*JsWTuoeZY zuA{pM{gz*B2%9^nuUHHgpNNYi=4xh>I2wvDX7ZI6_9|rawH|-x1u+BEk=y4Zg5KlX z4*jbZ&441WhMA}h#b=-F{GPvWu(Il%DxP4~gX=;nZ=!ie9Zj~e$i5YiCVOi;Tk7!d5~WG~j?Bxt+&~siH^E_Dd+8~(iS>Bj%R6jg_*YyB zS0M?aOGBy<%h@(K)JHjajN>$7G)&Z_&USP!_GSGOV^3c}w116|lgeZZ?L-?fnw{0i zL@`L8QOy~(q`?4m#&6T>_x%amgpHTu&Q&iq5Ixjaxq3lrU5qqom^1#~9(2g0^5 zL3Su2g6lL^p+WL`j?d%S#5tcDAOf!DB3X}r6sJ}}6u2JBe;23#QQ6?9b{LmQz;=l+ z!O<3|tpaiZtR;rSY`i91RH(A;>FA~|^vq{Y7fZuSYE!X}ocHWjyt3PIUy)%hcuu{s z3Xo1GOdq?D*}bcij9|<$ov>Nbg{U9?APv;+&AE>OoLtZBfnwr9*q?THhhadgk$9!! zQV@Y;mJn2P4H}EOKIbv9l|zkr#`|{>77L$w$3sV9>7qdvcD%TZc%>+ZYk~I(p8Y*2 zXT8l~X!fy$zF(A2g2cm7ZAVb$O^R{?YoZazk$@A3~;s> z#h|1<(dhB^9PBsv)`w)KaI4Qb5RP*l0@~I5(LX}2o5m{Xh}FN8PZtbT%MGdNv>{x%u74~BX@%w1(o^KJ&?XG3)kasmBDwH?-Egy;! zJzIp}jL`*&zp35#<22X*%6o4zUy|5{x(dSE|8_-Ybx?Z8Sses5QhjT(Qzv?mFWzG! z@(c=*T)nx}*BZEYEc}YxVRQWXVRJR4k|=%TF_s}=S#U4bCg?a1wo_)Vt^itE9V-#X z;wC&#eq`li14c&a+dt_~_Ge34P@hz>sVT4~Pka9SjieW3pCihQ@aX7>K8v=GXJapK zpuB&>#>QVCl0oP>Vbe)5@}=vt1j8C+;+N}PQ;Q2>#}DVC>KCsVz6JYKZ3`97tFon3 zEe62LboMeP8JP!`XruRzIPX9S38ul;#6&hByOwSxj1TQjq{6#_sIRqZitFeCREP&v?tg}`l8k0cv6{vJopf*dvF*e7Z3FuM0| z+bfPN%)x%*lkK}rbjQxkBx`-OBIRkQh=;9vwM?W9mjajn<9lUOk^uR%tJ&q4+E1(- zvZ#Ro^b!W5KwmpHt7J6nOSiq$_D{z+YR zmneeW4wE2}Xyh2p(_U*1`)66tZ85)YwT0U7%SLV84V0l3< z^NKP5(hrQT8Ujrv`=Q*?3Bc+6rV*NUOQdIWcSPPX(%n8TLPz)}9xJ)mpD(5~;KoZ( zl4pDixVc*&^l)41Yc10$;4>s`{u(R0aUdP5+e zV&9#a_mofyIFpYG-Ug!1z+LD8cj2qzeFwShv~A?o%|gwu1(Ub$2UZC3u(f{ae2vNu z_mhw9Pd<4HWDz{wGhPkG#NS*Q78O9B*Sqw|faK0DrxCpjV3*i;&t|mY_@ih~l5%C7 zZJYb6$A6^+IHFnK0}2^e)VNyQ@5$1m2%uOM%ysO?HSO7}ZB67j|0Ta-+ue7*uBM`W zO6tqEL)UQ&iesNMF1=v7-+6cOoy~X;AL{d{lcv=*5C{lzl^=qhJ1A@!TRx`KH&ZMF z&CqGJIJXkzQQrpSkE*-h?UJm!t9H`DNNtoENzZK2^S#>+94d=JJNw5y1d@0fL|5-t z-BIU+F65Qss6wA`p{v6CIj*AChu}A~Waf+d+<8Zd1vjpV%}Xgk*2m2%`iRCkVQeI4 zhfI%bhX_S(Wu7Q}@SyLU1p?!2^u*VwP_(|+w7tj(si`~g6$uUxp0jP1?k{Gu`IwT4 zw8Xul-5X7L2z2J{TS7??j+;OWQ`$L2JBxxnx@TuSIMxt5c)3!JUv5j-1$mXg^3-~? z?Y{%I&J!YB2gwD^+$*6~!l;xb_fInV6$$MG_V}m_%}`NYsBfL1#Q{*XSV#p{^p4it zCzM&J$kOm+)+f_m%y1VeTu$@(rlSHi=s%p1&t2o{J-l0kr;qug5%Q_zFlZ)ZCwo8m zO3!cP8_ns?ThdIARAu+=_sh90xOE|%BlVdH9p~C9t_bw3iDR@zKf%ZM-_G(a4e!adxT&Lf*AqN}^BXvIxW?ym*_6n8rw&lGbJehb8JTh^@YqgoKx9b!+)>V67O z!iJ)fVeAQ;TLk$7TFdibffM~hiYu~1@cggzLxoBzl_3Ko3{JCCNRqKuhS9^#V_zrl z!Pb*B+EEP#bTjOoLK3zmPdivmWx?Xh6ywJ{#hRwS8?>+M&|7nF@_XAI3bs{CF7@

E@E@a&0UPEjT$6Iy< zp;n(TgbIMvX1FQZpU)5@xQGg@@_M0sz>jv+pOjc{ACJ0E?n2VXvY4{#YcTnm4{}_z za{WpjINaS$`q6PXFdgR}f-MDmWY5k-Q!gDBlX}P%TYCK$;DX#Oju9f;ALThCwU4Y_xs|L-wxgheU%inI9^ycMi5*UU*J5BY3hiGE zIw;@iyBLsU?UxX)=fS|>4;<@{qmugUgmh2Yg@}hKmuz1U&Q1mABrpGwL4SBf%Jdje z2>tXZ;LWZy(Y$<=6L}nRcEJQcisP)L>4+4NxgY<#gz?oP^UyZ6`%yt`Ly0Z{&dJM^ z%D4#wv)q05KvT%4Ge;yeerTil$QDRY2MzI|Ugs&cH(3#OOBXkgMdS;WRJ9HSWbj^l zF|f#)(wLRhrkoLeI^`oO}jkq=eHS(<#(U*$Wt zjdzr~_7Z@V7St~V?i8^3_&+xVm4_~ZZT{li7*#3Pm%B-|V~T7|P2136QF)mz$s=ygFmaJ$>&z+= zDq~AWu7-$yAzld4)Mh~W0`%7LoPep zfM;c{;Fnzt1P9p6fm7@%{4YKTaWZ?{)IC~kcsZMtpbuuhyOG81+{^{&MNE9|PK|Sqr*VGXADR1KdnnE!*D}S-`Q>0Qs53 zBgJ}myCI1>-CUM-ti;d}Bor*Vq+ug~l!qtSxx)xT45Y6lHnL-FVpGkN+16y~_^)nt4 z&B5rz+XPxGii)_-5MxPfxJy?PgPJbqHtvOl;wvI6 zR+MPWQXNlgD_hHb>_P+DrMl^C{lg4umM2d+l}xjffe1_hHxCcT*hgcSw|`Ay8BNVG zAJP0|*|qo*x-tqE!0KjPW7>5Kc*-PS*v^s=Vz`TVa04US)qA{tHuEr?!!>GpMf!~})m6G2d*5?!|_~G6;C4aCLcPNiah7}xK z+C-(^>YIi?en76frZ^{xm_;(6eTM&L_ze9fdSVr?j6a}xVUAl0MGcH|xd8Ma)X`1t zNG$^dtA9oQ?wA2!JJ zl=oX{?Xm9w&y|>{t3kBi5<^k?RpWO%LPLQRx)s`D`oRlB%5UM4{i)`crl$KI{ugI^ z$;arrK>w=!e{_r_PJBcRaWWE;C7>Q>NISas3T#}U;>w>|wL`^?V=zwvK{c{!@tDb0 zJg~ph71Y5Z>rT=X-k6e==-_9G4&dL01~(o5H~qV6zk!+OsBQz9Q6$kn&&^(18cXQdlYFCV2i4_Q6Ubd zaS?f%*&yg6AbgD#!F>9nV18Ow2*FHkXG{Kw5&Z9q&nttOfe7FGI--|4@&8={YqKwR z>z7I!3i%-YcTvdEb@@}HQ?U}G6Gb1RGcA?sWay&|@TXj6sow>%E?YV=rpS!z3F|@x zn-=Bd)Eoh9jjC*@L3LaTxEhYnDBA6S%Ll6jpGou4!QpbB_z#EC1-e9I3eBRk7Cm-Y z^O&LQSNmPXD>247WI8dy(~EOHj`BN#Jp=?q`Nd;O{W(YSb4Qcu6wLAYRc`fcq!Una z%Zkc{jgND+RqQpprkYxma+iN(vhj4DP4ap~#DZj`_iyPcF3b(Ow)RUk`}(@)=ddv0 zA>)h$popHheK4Zku_|ZQrl%W_rSyK^gsiMOG9UIOKvgKl54Z-|uO3^xrsij(eFWFT zE@HUX@<@>JhfCrQbCA=fQH{w*Kcj)}EL(P=9&z-bIqczY4Fdh{>Ow!PrRs1D1Vr(o zSKE|(+WViOMx-y+UYfJs^tqM{Kw$Zci+u#{rMv?{_Y||?{4BRAlLO>;pLnXl)?&Pc zatRDx-fym5GbILDJ{67#>u8H;fXzKGK)@vE_Wa^Y%Vp54FC7aJ^Isi)eCD-iwA?dN z-YB^=7nFQ-WAeYx`{g!8+`^d;z2;DsvEUskUfy#g4^IpXFal*3-iC@1jIrLEy?0>r z(VO)8KjCMKyX|^2FC!|6aCGg}sSPYG*Hfrb5iGB8`SY1+_|2A^KSBEs^9iCQa}oll znINbWjT3R^l>-v)CW!AR107U@Sq@o^8-b;|0Qzh>Is+z;5j*sM0E2#e{j|TGNW(rn zEwi+LJIylryS0Xw(D)-Rtnqd=%F$i?ywlX}(?IE)mekvQt_OoTxHos0JbR930F&n6 zoZ|xkRxcocKRUf3Afw^9N`cHQY&tdBt(*D$3RX-I<`L_^5GW^0y9Oc2g&}Mnz>pEcL?WllCQKN#)ayRh_2x74|H9U6@<-Cif`pi+Do4BlJ77 zr|(gc#{X9SFPV4_J?T)jE||~G_XueB_H=V1HLq^~zj1HFYml zf?n|R4DzYs?cH7-YcNh{=#3*GMe{tMyt?YsyaKN;f&Klx@Kd2UQ>WwI`Tl$DE8mKu z?d$C&@G`#twKPgg=UA+susu+c07ND(Huy!(lV%@o^4?de?(m#=mKF7i%g{FTBUj+FPPMINc|CP7XcbNH?U_ned-ud zibPj^j*}IX*j|81c-VJmS!(W=iqiYJU4#%4%X0gA`11i*2Bo>5rzg>?u$_gi3;(Hl zVs?27XcNC>Vrp8;_mHU%o>I13iZ{7`5imP;HDeO|)K&$aDSm9!P11LN#f`vpo0mez z-DUTKS>SR9^js`UQB{&sBnB-mxzW?@Cz|ZK5G}(q#~5R98NC|#%TB;Z1Qs;2z6s)C zSH=x>J-Me2u6NNZpkcodiQYbSt!C?E=_N81Zav6;RuHW+#ioe+Xq~Z}R zuunFdpB_gp95l*2!08n&PID*Fj`gPC6b+A~@#!4fT$2*&rdNm9SwY2}3rVtOwscg@ zcx6qGotGs-K;Gu<*=1ukV@WdZ@`RaD0V4s>qhLH%B5HJ<&o{yB#zukxD+))7fK$FS zN<&AIg*7wq)+N(!e;pj0}Lkh!$q+Cs_Uje>C0ms~1clJ1G-!0@qt$X~E>h-v6wU7;Di0H6O zwVAmzD$P|ap^2F!Tf982<9(Q?!)XkQ`|@xLn(ejbe*eJMN9s0|WMBQVI+NrA@5Qpy z3xd6C=A&eDYQf3z=Tqj2*45y|8fFSU+-!RNI8o>MgHhUIwW3O|NeVwf6br5)=YjZh zw*lL76%~ISy7uzGSNh%qYI$lXLygZjt-{%IC>C{p6dRMxsm6m7vzw`KA|Zn;$w}{H zDcspVNK50=Zj^wETenWriZjuOSv@oeZ$O#(_l>0(QE@>cA?)EO)zqbLZMcKDO3vi| z7H7Ao6bZLEtisjfTC+__+iNl|&7JiL+YP2T(Sq-$$h23k)(mJhn)}ZdKlN(+V${An zwjv$XH+$&XQV8K`!l#8V;BWKL6@)G3A}@TC(-VGfmuJvC?WHSyi6>@wmbT;Un6fM$dH}Ao>U- z%psL+H~Wgyio^3SSrlKZPb8XjC*nDuRwuyR-<hz#G3|R8} zBWNrpARoWhlsJxb{3g)p%5Gp1(ccXEyzg;6BY(ed*1YnOu>Yo&%?P=A#omhbapYO$ zKc65eM#n=Sa89pJ^+Z1*JCt%-l5MAwQE4nak3Kwf^+5VkI4CHCukVg%vymq=YtlGa z6Y1N3miN+zO7(6y2>SX78TsEEm4gu75t(*r$!EM9Psy&t?9?4Mo2}y2?0+d&`@PNS zzms}R-Bh%L-I`@|zO%%JMYQY88z)&ndt=br3V`f8+Z=)m9uNF)v1m1Zozm4B zzdEe&hp0Erp?q0c`;p3x3o*#AglDrP7w;PW;`RW7pUYNsN&>nN(Zpv^yK744g-XNU znSl}+b;Lq-`gQ;zWsh=7DEWeS9W+No7L9G3a7CfgoNY-r2hWgT*zUZuuhGZi-Slk0 zvioxf*O?_%Bb$ASvQ2Dj%l2%)#}Ic*-m%$GPe& zJV()M9D6RqDBy9$*L&!}3q{`%UE3|>Cw$HFK!?@Z_ySQVt%CqyCzG1}o5_7RFvD1D zfvAnIYa;)%;!*o#h$i;U;2T*yvX~FZS=zBLv4KuRMFo*%=|#)Rt*9~u(VR#Wv7!G-iL;#+u4n* zMcK?&*7JP{E{ED=bE~_drer8aSp3cZK0NpxN+eH)KPtkim=3kpHCh_8M?X%w`L(6f z>)+5QR(4~A?fjL9VVME&s+{_-I9hGl4my1kW_S)I?sgE;d5rC0%^!z2IlK4Z@ zC-p3e`x?fS=yrpongO^UZ!(~V8?Fp!cw@T2@fTSl!K)k78EefC^cWe?+Y9ho!+n-X z5T+h85m&K9U~)lLM5qZx_;1#hhWa+Mb4Sw_ylq;E7^ee%My%g1dt`!%m@IQN%X~Yv z^C*g{t>;qAsDl(bC;_};rL3mpKXpFII-DQ6`888G=AR>W>++tZnXOab-4~{b2CFNY zaZef0zoZ-_z?*nJ{8nJUs2))*3UU@NPM>p%v4)>S#abQE)vn@l2LE%keBgx|7)uOY z59UDGFnyef2kP?rHyA6OQd3IWS=1SnsI~55Ass+Lzk>&X4)ArA4AVN!Zg$9SHvI4^ z6v?XC!3@$;6w%OuV6pIpGCq2fH zzH~l_@J!`4^kFS{vAFLB?A0R#&O|_6V3lPm(o%ixVSTNyr*G{;)ASfwJiNAaB@7ID zzIRk@&F1vvkn7v+=RUat-cLS9?KAXWYDmfRt-zf0rHr6fzL#TKpPE8VGsZD5L1Gv-G+5- zugU56%?oY89R=6wt+($!gBA(qd0=u6K@xLcUdz zK>?zqsAiU<>~uQY>WAGGB}1!a2ny-WF0dZxW(>4AuHxD;H!z8n&ch~3!h1nI*BzzU z7^k61Dp_9FifQ$S*e18QhAePHR-@|> zq*p73tddlGIIdT%zXm#1LX|BmZvsH#1Ey6$#YE2J=7rLH7Y*9Y%h>2QoIhyU`JT>K*^>+i1{d_h;cFAYH@Mg`~TKSz}UVp%WuN z9W)g|wKlAydPiYd@EfHjq>E0-$|dDipGfs49&f4sV%liaQg_s78`Pt|W7vNy1ZRnW65`Xl7+cwDIg zB1i*u_mg{2K@(d~c?RxdKGbgx(=EHJbcx@A`zVto{0Xz%LQX#zYIR@iNNBf=vko_2 zRVZ;coOkaXTN0jNjXEDO+z|qCJs3Bz>%YIoUDZb=_W3IxQPE?k)Qk1%VBL_>D&k#Y z>2QkFx)Ou+kF1+inoOaoQ$L&=Gnby1Yx(u!Kd~GQebnVHnzmLR(A<05O~<+n>vqU zntWLXPb202W==Md9*coWTx}8=MDybA@zbkrel-H#;q2@wc z-5r`xDUBio(c;I4;5Xx+NnE~5Uk3A!znPG4IZr(N*hx>0c|!A~eQJ{E{G0(jr~#Vh zJu|}TmHU3-;oyVaC>Ac}yMT!-_d;TgPlFw&{FKVWGFBYvEN?ZTJY^fr#|9qX3c(rB zDZXMwjQCZCxZMs$Qdiv8)7lwcSIpo;4FL@m_^uX6me>K6)yD6tn?7#Z>2L~(8jRth z5fn%Ysakni8p-%`^Cj8z$Z385&*!+1^O588>|eC?LP~A%5Hv-)P47NztD*PikHpHB z-PcUFvqsKFB|QEfY@xmnUf=|c^yE#UacOQ=#T zl2XLE3U0YsJvRIDGvo2w!U3(!_K9U@=;xymUrwvE^%Cl9JUqt5hr>16w1~dDNgsij z+RCaPF|AG+t8A1S?IFSiYBQCyF1^#%A;69*r@Z%)^%zGS(=}$3J`6B-P(Ed+a<(Tq z;l~2eU=1Deb1&gw5VU@-U6mmLr2akJYkJz2->Y$QQNm6EGvU6iBNRBEK8mG568Ik6gOxE5AFky1TBD?rS z2VvHsumS(V@CM@|rn4)c0G7v}Qw71WE)+*fk}36ph{DFc9A>d!Nv!50_D4UEQtMf# z_A<+uT{eR3uhE>3vv$TP9Fl$EsR$i+YegqXnO)CiWY%yf-Msz!xarBRsO9XB_VOv7_4Fg3PaG+wqF-lU{4g&2z>l# zQe>422x^@Fq>_ri54UXyLlJ;X@Zsm?RxLC|E3mbKQHj1c1R|&hMQ>uAg(d-bU8Wu6 zF~?OLY!j02CUT_&9zKbX;xoJ-$J^su$#ZST*se1Kh>r#gBgN&45dvCL%=j zSvY=cP}4pl@GxI1R~K@PVzC-kPyOw@7u_QkKn(%3o7?3gJ&_1@mgF(x$oo2Ee`Xb` zw%!VWnepXU_}PhpU$MuaTsC30X5lmUP#`WmI#_(%RQTdwFF{mQ)f2LBlaMYM@R$lP zfwWkebhMqB9^ARfmnL6^Pfw|40maT&og28Ybp-M(V*bq<>8i!Y*xBH8fe1Pe$Fo(f z*8r~Hngzm>WZnO4`rLwKxFFhxNI&|%cwBqie4pZ9h}h<}tvIk|aQL}z+nc#dL;9_V z`-8yO^!_&eynH`kpJODmzonOB>q`T;ByX<)t=-}6Yj^;~aG+>6#^sQNY6a}Td z9KV#y1TAXZnLV^Re*y4g^*SztMV*_(KDY0+LQXBgavO1r%%$ThZdA`x3qc@qbwQSp z+DSlgi^lty@llJ@zpspr;!i`QjF(|fK9bn3r>c5+5hRmG#VJj(oiYWGkA_CI)D~al zgx){pj4pC7$!^XETn>ML)efY=%R z(|Ne}%MJJYC72?zWUaoqw>#xD0WwZ<=*sMb@wpnCn0m|0YxMu_W8+IfgA+V_-tnc? zr^wBS6>1|&i?+Hq;Db^!K2#spzWrt2+H*-XP!%u7Lk0Wg z!@>wt&(4OD@7dU2Es1=7U}WxWM3Ommm0a18zS6(&nm%+m+*|rD@76FdkB9QPeLpSX zK1r;r?BDs+t zIdk3s@%}{XOhrVXnI~%qf_dEC?8~$l2}FL{-?cGlLOwQ2#CGc&*d(4^)sL>6rk4J< z@7s@Y)GkCt+&tl1CQA2Tht=AbL{Gv>{LHc_UsA;6Ksx-O)E3&Ahz{27)zy_mKPZ#Wqnfx+Yn^+xZ-;s0p* z3Wq44@9SrQ6$Jre ze1GpBu(LDI+&TB$d(YgNrM7n`x(50mhmXDWar=9H#^tsYRc_!b+4z1AOR0mkI5Pk&f4~3Q_gd0HlKcbp@}#2X7Hb)F0hsWfxP|@cLKOf@y)#E zEYs2FBPJ0;VU)XIY1ZvvgC5Jq=O5RbdXEf!8`qt4? zUB|cmV4*(WNCgExncjm$ZXSp0VXgIL%Q$+Y#3s{ijKVp6a_Ja$rIaSz{$Dl}3@Te2d ztq(p?&lgxO{Mx@O;b2RGw+@D0PK+T@j>Z)wYO%nzjE%~j;xlvMvkMGepy^=o;-OpA zMV_wfpQvIpm=kYCelKeT z_;}<7!1Dut$!PYAd#ge#{>N86*RvImXTPDfWSUcu>{zW7(=oq0#&_YP6bYb-1T44% zvH<#=X&0~Eynfu6`-_bqnD>PjeuZ3VONh*c)H1$j1 zyn&|X+oDH5KX@0Y{KiY@2I=!BB7C8`0(ktOpS}jnrGQ#G$@2J}Qh?YMkR-Gk%g7{i z0bQ>+wkuxKnabBtFc)y4^6W{q$^p+K?Oin&yP$(SS`SK=f?P(*vDTcU;aaV8ZPaO# zUz^<_;q0aW-516-9w2=SDEq3`%kl>OXl#9rE_Jy+N4KSoXbSfX*hr>$Vpwb*yud{# z--hX8Gb`O;ALqu4ayRgUz=+XFpWgFN7+S-+CJyx9y6^+fUMxFguWyduzu}H)E}Crb z$oX2j32x7yXk9BvySeyym|)b}mH7tS#aJTTA6gz9*XL3g5=NV=x;=gB~#Ii#E5# zT~{HS{YfNbZTxkjf47aY{qsJ)SC{|twdYjQ#lFd}({5cHGKq|>FJOjpfpcZS=AKsN zUDSNI<-@<$Zc<6g5+8q(%$8K<_aE}f-Eq7?fo~7sM%Bf5Xju3|C88y;J?EDvjKSee zU4P%&_PH=Y0x2g83%ZE;-Apj~6m>^6U(ARC zO^o$uKUB7__2Q*|Vd1d5I;7HSAZ9SroxEbUzBLHmMB^Z%aqrk=j8k~jGa{_LI%`(A zK0)B7pF7)H8Mk3MtWh)t+=BLzJ=PiAA+<<&)DjWbLd~{1X2JT83(_=g&9fu*c(+9a zv_KO->VFHluOXLdjiQ?v<33*?`cg18K4GZm^D>m151)NC0_yWWFOKzpPQFqU+gkPp z-&=G^wz54qz8p&A4GGXSRIahx?w`3Z7g5*aDlr#VJ`G{o^t_67C&a4~@6bLkw<$F$ z=|tru!?z^=BXCY|ju@=e?jt!*frrjC&}%B`v?MC%+Vu$A-9ZB{{YY#O5NS>DRh;hy zKGe6`t=HZ>6~0HBLz0>&{TsYp7Vdnp#5_pTinxp<2uG>{sN#wr>17-pXEB9K$>LV45G^$^{f>V(XFyOs0mqp=te) z=YP!K*x|a5Q3>cz`PzgrZ8D^_((z5Z;fvY_t{x7gk3xLQ)97?^3<3Gz);Xm{O@HB6 zr2Eaq=;%JB5y{5Ksj#hoP(A~&wqRK6#S^0CeY6CaSn|5{Hn;_iG;ju2fJWJ$BZgZt zya3PF-zq-q2_wF(mc|WE)9B=ol|6S47VxS)F@7pHVL`uP(diN{6o@QpG%m7&cT2gw z%+oHC92@*I=~DWaOc}#z(!UOUk^W7fRh>`d;Z}W-CRv@@%+Z4NEq)i}cW~F|>zL}w zcz#Pua7NClU5-jxg-#h>LBSzTE++`oRu6lDMmn&~(q4FI6AdfyN&)GU?{q)g7{o7WhhxllB(* zB(oH#9DozN{{&S94xiOacmd6BQ!MqxQW=1Bq~2ye(LQvz=jbtAQjeYgO>kN8+r5iUQ%jZg+psRQ zE9(H{X%&gi#h0t*lQ2&L)xN}Wr@NmQcr8#yhGl2PoZiJX+4O3NQ|DA%@ZS6v#n!<3 zb-Y`Y6j@Ici*nFN4#DWY_%~1Ql~$f}$awVC;LD>qzRRPk``a^H7a-)dpO<@F zU~+`K$GzV={Vn0g3J1opDN_^X5DleZ2{mU^b<#XlfwcsF^YKTclDkc=3@!I#t69K(;DO zs2Zqd!S$9{TOdp3aWY*}sqVpfm$lG#E);U=hw4It8^`av0as~EB6`gN@MON7Zzod2 zTWgNiAi6-zAx&<^;A_oFURmKfk0)TijlvSK>#sp9bKy{fCxI;Z>?^pf3mV^6I2zwD z?a>Q#uh{9N)_|25UW|s$y+;`L2K!?n20xW%$KR3U-??XkUB`n1QB1!#BnzoS5|h*p zf!zT==VR`JlM!6AJE>A;w1t1Ydfyx||425E3zcCo4!S?c$cTHNT$=@ zG&}A{$5-3KEuCkUO>cqr#kn3%e}tqfsbT|J3+*=DKOk!7*`x7wo7OyH`mMNyp8Jdz zeq_!1o4~TJcO%JABMku%D%@9QhOmuv@?v^Z@!&n8r`n{mnb2=&1#}qyB;;wUBnYyb zMQy8`5O~6+d^lp6y;m_&-$8HH?nwh#E`jjONH9uFlmQ^R{6r7DT&x?n{6VqW?)>Kj zi5Oi<(uh`d=lvtn43g9=v_Bl^j>lG|S-HS5V;RvL`x)9>QwJK06kYld==j`fW0Dg} zLXPIUw|xOp4{=iQ4B04v%6Gi3?o8Xd6>JFW2~V1CwRQ5iY8w=&_N6*z>6bbVY&fSY z2T)1mzzZ7aYf4$^0aeugH+=s6EBo{AmsvWtdbDbJ4#cQDzId@kn@K zEnLoq-jRs%jB6DmXr{rzrefbIV#f2Ad=O~rEmUQ+xl&_xC@YjslB&)2Mmgs1&2NZ{ zs&a4D2G9GAdLU1DlSuQ1p{1(GB2a`Y>`Ayg{Pu(yNHDSZTiW9AKNjp>J$>8GNX^{ zXlFzvoy15lL0s;pj8ggC9}R3kKKY8c-I-FHGhWWLDb2oje}X@HmBSD;_1s=OG|0O7 zK7wP_i>u*NH@@c0NYQ_V`rCUj#q7Sp`{|dJR-r-j=d50yn!p{!>`CPK@Va#6( z;PPa@-S%Sck0lMAwwObb=x&D-+@oFOe(N6j+Eis_cpJr6= z^bY~VtH`{GIl!=8>ORiqCK>jnUyxjAQg0wL8(h;#PS`7Sr(Eo5Fkdh^eE($I8fF95 zq)!Ht)pO1~MHlK%^J%Xm;)JR_d^D`%Th!vyo5zittnRpkWy0PS(~EC;n|3zCRzQ54 z@{QL7phc7yP9ZC;P{?l)wEXC%d0G z*&lsNZQa>hEgP!RBb|+n8?=-Kpi9IAiC?Me#HnltSt*h9^oOTuHpCG2bR9&@@;-LNk}d9O zq?)+iQ16YT=YGan3?X|sKCgg|r67>cEuRJ=X;vSA99+`?5H*Gl0>IEr&T?w7_)V?s{N9Sn5Ze$y5{p75 zJ9QNm`PzclXqp$cJM>NDNyFS9kZ>g!y4Sq_zSHj$y(FXcT2X) zujEs{7e|vXBt9^>vG$u_r?9v4?FJw4U_rfZb>uCI>5RHqq?<&Qd+yR;_JT&N+sP_b743b4$HB8b5~rzCep)(0D@P zH#4eD6&!KVX&sBK9)6#A{S$WKqS)$nn-nt8T1#kFI~Tkp&Qf%n)W<_NbeS#vO4>b1 zE_d?C`M+z*`&|ASITwU|V{$CWr6}>&2rA!b`>B>6^8ps5@n82GcpMKC*-ZV z>V6#A0KSKJF%5H~Vt>Q2Ql-`4HuYvl#rkvM_d%I3iZ1V+1 z4JJxr3Eq`xpwkBxh?2Q04RR-zEB_w!gRAWYQC0mnw>mgG@%6Q0VR1Q?>lj?_^wvX6 z*S@%ype-u=4|i~srU`4Imh}DUf;)|O+z76eK{qE51rHnPnM1l!0V^&kjko5k&uCC23XD;NyrkJn2{qo?i-)gQs%UG|ea%JUWKKP#mD$Rel+3gwDFy zI9BWKK?Ucyk^X2k=v82SgA}a~5d{4{^+7brO4xWz@#MbNqEeaU>If*5_-gwRmp`%p zQD#~?{TqR0!Amqw8|u|wHQj)u)hVHuBm5eEX0qA%NV5?2S8M4UkIC*2ZZ$rmFwdys zx4S|kfD3=Uohl1K1gAx^xk5QV=@IvwO_}O+UfApLu@7j+v?Kq1a{a)GO8-nQMkn$` z)6LffcC$NJtO6dqdnV0`%z3p=#=W}#l#aGL#lzCB zOM1a`ZZKm#<2_Sbd9*4dSw>L7dsSlnA>|o(lxU5Mt=4^n&`A%A+*!^Hk9j~gZ<`KH z(hd8ZWFfqs)Urd6$03`fSh3FJ-gaTa{#Ztnlh4CPI$}psMkLI^U(Yk$49ah&7rshx zi~PMuX$hJgtIsrp+uM0|81bqipQQGFzvAOW(N$oX-ljni4c=zro|6GKF1_gWs_|u2 zs8i5b4Kmc*o&$5+m-BTsEBSiP#5B5kQ)s#Ahd`6l2~OxUJDAniZGr{{tt7=MoV{2crqRGJ zX9(PnuiY8$`f|xlJVDAMhFtVcuLFFhwNGqf^Th0j1+8^TL8&q*508ORBVpua~thK$;X{pLFSJUSvNu}f$JGc)XvZooWYn=(voQ)nd4!}(ZB2T) z9^zeQ@VOEg944ILWAe$>DZU`D z{0vXL5CfJY?|M=pNVEP%(f+W;#OB!@)Uwnj+We2L%{^7{La7M>WE7U~Xjl0Vk=z~n zfl$ftq_2xEtv~0!SXP3r;nU-qGCs*oH{FI&oiVT3o%Wxcs42feWZViN3FL4nX!SsQ zt78qn+9XH=v82GSY1Xnxzk`P=?)mV)fgr&X-bh?+W>T~USMKL~Oz|;tH@&r7XOOBl z|1px8WI^u^9N+8R0Lp?I!lu;A!B(z|7pKdl%tspJ1O_^X~0b+7(fy5<1^ z9Lgyy+iRte=bG2oie`@xdx3}tgV{>zqFY74W;gKN1q}q0Ln1Pr)B2^0X(R+51^BZU zpLWb6ODItj0Xib24MG2dheSi#Gvhcu2*`-yPf_WHheO)y{FITeO}TG#qF8$gPJ$1; zEWE=!R2VE)7wp!8XocqlSD1@XNVoH!y$n~2}x1JKjP?DL$@3PJm@*0a;)#;+qlyI zW|z$$=`xV!#Jo(FD!UtENIrQDc)=%c;h6 z!`d+$lkQ9rn>GI!Q$j!o#JZW>TLnNdqExGi#>BgX_pJv+2-sj)1XqkqzFLZauT9ihw0r`F{VGQtk_DqlZH$ zJ7EEb45vn-#lrbnN79^G(Bx<6y5Zy>@#{zhv~hSc4D#JQbfu@go75HZh5ZOe%9Ko& zGh}J7F>|a!(txCCFSCv%@W@J^`8h%)90nR$d6Zi>z1P6UkqlMlw0V==@Tu#OQRH~- zJ^B)z;<9oZf)Hm62>$GW2KjhiyJI2Wxu!>CD)z_ab^AyPrqv2yEo41PPD}zWOt~3Q zU45>9KO?}{rm#z^rAf8;M+kG1sIeB?NwXA!lSc}!io_7JD>AnyAT-_6$}IPdiljI< z`Mo2Dg-x8>4o&(UFnw$W|Y9e-94zgkAW zz35g{#QuHK`X;y-TBCy9;C{!Tc$UIgMGPHGEX$vzi;{g-#9PPnGF&aOUHMb@Ft0_-Ls2W>tfC^=tNSeKAs_Rt*%26SW=XlJ)ku zK@fZb#oP%Pd+Axdi4nuyg(OK*W}iSYqq%?b=oDx0S^HE{?yWAWGuh+{B+z5pN=?rC zhUSe#g>cilL6vGfAKVh*v)&cb+XK_InYssLVCVNV5cOLbS)Jmk?$VcxyC9=vw=W+rA8!zqWNKJ?ver zRHo;N9#n+Ybo(;T!a%#OjPQdd_1jv8 zoD}P7pgI$9{8fIx#634!e8l;?3o1^wz6Y-Os6IGlB$I~?+8qx>;sQ%hPWip71<=TF**=Drl%AEqM1_z{5x@mmPgo!=Lv3+n(za<=|XHsDI~5R>%6NkPD#i|C?O1GWYF)?853~+iOp4 zo1Hr5_M!9F8asEbAHa8%Tfh5Ipa^tb?6Nd*nbR=`JXC;_fMZO z+;@VYslzLgbsi&xAtFiVVVhVQN%4V((e{#`!lzP;ixP@loG5mEjNM2WWrNh!e$ERb z=!2ork%>W<-mQBR^5IQ^H&yBuEg(mHlP#aV5?tPwai9jIbrnB-h)8Qr#s?JE9{&ik za1$;d9xy>}AVWn-5iBM4onTK1IePv##n1gzDgg*{dP&sT#pV0BC&a()=SB zM|t@-OIt!u{|wKDKHvOqk_r7OspMSc|IgH7XYQ`XOh{ISV3|TTYH1|I2OKQJGK*u& z|3naI*~jTU__IC}ky4Jv~6qrPyWyn^3K z!$Gf*KlE%o`<6yDWx;?-MFcu^}GmOnwV5abQ#bss6oa&zMojx9!FUx+d3 z!ft%xZt+vr7t8%daH0-41Spzb>nQd{wz2K~D0K0<)Zimra9r!opm?xcVsM?g@m-6% z2sp%T6D?YYWB>3ji%9VzxqHqk-1xd7AIBG&D+O6kHYlaTC3^%URo2Lo`G|ypV0>6W z#L{eWL9Eo8O(KF!mj*vd3*_&Nnx87}34aTnVV|3qZ4Q#+SP^4V%JlPQ1oc&Gb4bW=3YM$ETNm4^IHm6i~&ipM#-J9stpvqLXp z$ZQ_3kL`ZDfe*}z!mrk}4Aw7FqK=h)(xG^lBondxUhCZD9)+q+V;7^lVC|dR06$7c zo25ScUtuI({~@3TJ;pBVfJw4oybvlpNK@7%N(yBRQ{~pFig~FhI=r9nanth9zVjJg zzG~Tvw?PsZ&bXRIy@C%2TSV@f+H6+ZP}eS<%c{N3mk{YgLGRyOyCUd$^|+6X^z4y>0y3i2Gu4%dA+L37W!^E& zON8CTvIDf`6~T{1!tS+KvyB`wM%tD5UWXw5o*#G$6THp~T+nT0%(1|euA-IMYUdeW znX>&zcb~mKS&}TPw9IjQ(*MA_6I`^_=4{`(O;b z0z|fOBY_h0)ltbILB0!0r3TF83ZDSB_=nDh)Z$2YZdELSrGs50CiOdJ^lZ}iHTu|-_R1(g3X@YYTSIBG zHz~nxxr72&Hg$ffX5&*{+uQPcs|NL`L*r z;fv2A4Q5&z_@wfKWCI^?=}%-$wggUSLyj$0!v&H2aR-$b`Hqtv6U0oJ=qa*Dbpz@C zFMm%l1jOj1oAIvlB&lxI-MTPIj#ZJ?Ig}1a17~he9;1e{t0NTe!q+1U1-VgAzj4l-=%mU@tq5P?$OgIa@N zLgn-Q>sTpuowVJ)w*oJ<1w9(SD~)ra;whLAXWzY)An3YpafE^6L!tPqG==zGa1Crz z3Ih(O`${aqNB!{Kqr?xKtmii=Y6)The7ykSn@YLw!f@el>bLLVoF?ZEjaqxFo98-PP7&-2paH=iow5sNJ!SRS!iAVbF9a;CZ56xM6K6b zB?%;iG|=VmyW5~;Fgkr5_W`72$P^NBNp?S2A8z>A-T_3y$`kLA#m>yDK{r0u>}f#K zGuX|jKj+oN&%C!kB{=I^>=|xGAH{FFi5Cq0@hd?TPjJ5g@fR=k4Zee2)1tG~4J+gw zNfam;rjjjhWhc+B)w6zU@1t(9+*F1N{bJp=E!?s)lT%n$>Bs9XLo3{OedluS8$P%_ zzIwBKiH^)?>-6}*YiN(P`0#5@Z;$Ws^|3FCxP)DxNCZKh;CYb`gIm#J)~~RUK(qp} zsTc!U4zGmESDStm$P+o%DWI1{1VBJ(l&G9dy*nRx3De3H1>IES(X(HC~TDnJ+;; z;tlx}$M)z6`^Wa2CE~Z`14*N+<=z zN#HRvc<3z0<%U%!Z@~vJ>!T!F!v?_&{X{~ekf~d`DxS}!Rt2xnsM1LgYuHC0d?MF} zAmt=woLtm*-vRI)TGRa1SN;PV_biC3{*9RzHOe|DSfx4xGEvd1mB<V zCD#8#A9)dYc%wEHIdnJDk9Gbd)OQ8EBI;p@?@1F{PI*kHrDQR55C0^eC+2@JPJ7IQ z`kOL`JDE6p@n!a`a zfW-iD9E;NgrG%Y-imcehqYuQWlH9*I&%PI@rUE7|UTaWVqce3zG-k2P zc~m%NNH+ZFC1_)k{x4yo4izkQplI!KzbHQBb+mc(9c~Hm3pzdBb4nB|_7=i$vyTdL z8Sm)8gM;Uc!!~N-d~M)65d@Z%Nyo~7CF~eFcqOD8I#shY@PoOw3*})E!JvroIu`Ab zzB|`O-f2q=Yk2_^V7Rw=?a5PbZeOpRbYyKhzx{ZU)h;3Aqx0ogqWrsT%A+sWfsX{2(AVv~CfK_s_XSo^!=>#w(6eniBBP(Bds! zR^RW>+;e|GAP$Xb$8+DV7C}xlkK#41-}_jZ4%INj3#NN!ocHWmB^JI5HIRadcH`ZS zvST&znz*)*pm@Z1-Ia!CB&pUQul*eLaiFU$Fq&$Hca@ao3pvyd;??>CP-kFbBHJ(q zbrzkQ>r>_MoG!V~;TIu_xL+tTqdyQz4jr$9X5TTz9e|tz_GjAG?|-^!X@!g-AIb_e z0=V3XF86eJBFfyoO?^1WBldEa?5nm+mNMXc|;hh92*qg#Hq-(<^D z-O_{!VsAbheg5TnwM4Hz3l$|rZ@0U`+0S}zf_U)nuABU9KIQ54ysNU&{YO5v8tA?@ za^f&GxWgYYIvVoM$8AoZ2H#bc?vF^ji7b? z=Qd)rvUiJr?5BXXQ1B|Y zE{D!6?hEtA#J?pLJ>`TpEDds@ot2kfK`{m|iKX%NPP%Xkl*M6NzQQi%H>HEHM_wSdGF~@60SjmL-HLb|Cy2(tRk0ybR)2yqH z-Wgs-^=Ku-3eJ(2n8IQ;wz+a|murL~p$X8YtYqSpm}(-Uqcw1Nqsg_+a?_@Gmrej@ z7=QK3`-?@1MjQym)>~O0B7gCOvOfbA4Ka?pHTT&kT4ZYknd~iGhw4c){E9r)LI%be86lDM<=ekps@mOHSmY)V2UeCVjJq}7`)NoXLU;KijHYNG-Zxy%aGZ|!@ z(6S>}U7=U;C^?i$AIpsoEBC!q=OO{0t-T52b$0#ofvvbw;eRypfRg4ETKT^Jl`49E zzy#pCg-$&g^7gu(|JUx^T+Zbl{3YQ84Ekqhy@5|u&bR2tEl;O^(#R*m=Ckzgejgg z`{bGLaubMm#Y>&d%W*AoK@(=t1fx?YgpoLZ(_?8$08RnhY_%+3jz?(sW7nAzUs*JHO>b9><`#?b&PGG6da?{>kUx-K8*7E!n zVrDR1jF}pU0@_oYY**Hmp1h)ERN6Q(vQ&DNcd+RsQQ{bGN7As6@KOKbn?DY2uh$R5 zmOtcl4j<5l#dY_mn7N8oq^m#j1%GSI8A`e&ytd1O5cp{tb9#w7_l4`*b?G z!=dT!!tiUh9Na^7-T|G$ITCImwZoqX$D{!2_uLzw(?BBd5_@^ zhOkXb;ex~EAFIk7^kG!TN}8Y+)@JJ`k6soQAVy{09-7}07kYOvy#sG9-C<&dheuMg zchfT&d=FD-I^huZ9Cb6v#l0&HhJU7%LR~16{UtR)t;Aa3w)P{26+Jj}z!g*X zaaKL~-D_+)3!IMZ5UL`U7-yS&eh=(S(~Cm+C1~6mM#Xfx}N&R9r+ARj5UB=yA5n`jrjHH7by7% zmw#SW0^C^VTmyCWZOvcBeROte^13Gz1aCkcuHrc}%0bx_pV{Am+`x{3kfWN3dRMRv zBTT){e{_9k?r{8+LwKmZK@~kRXa{En5tGwVYnd<0-!=U29|EHBT8yFUU7bt(Ye}E8 z-0#C@_l|${JI~^}&BY*SX4nMZPE~cYXungL13uefhYbxQFaHD<+3bcyB%X)804CX! z9CM&n&2%Gk9$=?jMHsB!q*u=L|IR)% z7xcLKoP>Fkvf)Ee5hEn6254~0D6+(|j2}>*Mm31He196@$BB~Swz4JV2nUR(ZEAwG z_HZ&K5j%KPe~eUrMW-*yAxC^STPX;}9!Fp)tlTj|?fU$8#y&km0V9OUmdL`6oP1k% zM7qV^{P)-@19I|IrrBg+xLi>mMxk(KeOs152jr~ucjRL0J$9VM&fgb+-TB#XX6Cx< zhAI{CX>>xsWabvazyE&L`~voG=mS{yn%6bV%zhI`)|B})+Mf|bZvA_)?_cWMTOHpk z6^5?00nl-2?0xH~iPGxFH!tT12x!Eymle3}2S536Q+C@{576Y#V4&aQw=h!t`IM;a zL{&(6Bwm%IL9UV0=E1ygWL;C!W$A1XpK#ax2o=$`GXHYV?LLD}-18N;Z~~9M7W2eW z+jx)x|LX=p7@IHH?ekk^53_@~*8EYxYI<%MHcCmoNH2y2X%cZi2WF*BI3T0Y?)`-k z^F!X?eCCa5{17o^23Tiz830uU{`Mx!9J0doNO^g^W;xAU(((^7vCq z>ok+xG@DQAdh3_!MtZ%6K*~wof5`Ky$p@cEe4W|0bX3}gJcU5%sT{$qY(4KnOVtYwtL zos9mBQ%n`WULz~sBPJK8#k=sn0)6^2L+g4qs9O|Nk3EuNrpJy%<1lBm4&XexbdM;W zz3hT5XY}umu36f2+D<^M$3k+AF0MR?Uzv~|QB|2x@FvK&3G+KfD)u?r88b2ulS6gw+sdX>VKe}=zit|v}T~FY|BtiaFHuF)J^7uK7oN! zHpMrQ>tXQ{QUM^Nwj56YNK7Qb&q=v%YpiaOm*INn2@k#r@4K}v0_R$0a%f`r3p?mZX_ZzH)`UkK8|q3k*B>Cg?Y+P6E0qos!L86W=f{(TdH zEP6#D{SFvUHcmHA))j#368J(05lLEYHIHP(@%`kjETwcxlwy-?uRq?NZvuwG{ba!O zvcbcnk_=6Wf#^hR7kF&+p~;?f^h}I)H(B93hGMQHW&jbZHcTUNGC7T((JPjEU)b+b zWgMmud}LeXCAPaUuDco)CX}!aOvOc|b^xavYfI7){N5m+j&eQ9Ml*jO$+J2|9!FpX z`g&Gv;iC_deJS2no$(_AW7zM4c7VEPb@bBBik8FddvgJxLfx>aWcI^~@M40F)EIX_ zBLV(fUyz$c9+U_7NjIjzPYnv)Vuz=LH1XMNuqm0qeL92qXhLl&%x(I09#mfLN;P0M zdB4-o|EagSLCi>hWlTOxQ!x~IeTfIfs<|jpwm*6bvXyh;aj=rmi&5m*u(M?6pLO{o7{ZSw@yIY~T?3`3 z3L#S9vv=X)Xl*E_ENR1Y%k6mgc{M%Z6mXWlyH5~?zPl8@c{u(UjVf`@&N=D8^<)92 zkwz_=4@Ig|Lh76}0xWi$fV_o=)fVDMIZ;39>M|=57mErJBdQQ@BE0Wu|6GdBYS5+? zQ>8ED3Nk1uJ^|Fbc}6)v*We;vnAQ|g8f**Q-n!hkE6A6amz&lcdiesU)yV-kKHOP8Y_7`?wcDQ1E3_ zrv$5Wk^a^a0dJ2WW=8)xp(ep9nXq@blQw*0nl!UP#4OC$-Jj0$`N#O@;`M|yqbkwZ zbXtsR)HF$d&VTb0=US)6948Zu{z?K#4kfL-0IebjcFEHx6dQeCe9$q9;oiaAPJ)n6sn&EDFt-xcGhtB3u>Fsp2|7`n;) zqx$vNO z<<-z6#shhbJbJQ89U24HEQwJ^;MXgqcCl|Q&GQ(d-HcwpwpYbJX>U2Z>r`F#F`Mg( zq^AL2`unXo})lvNoKqjfj$;>yJ7~UJZD=V(n_&j z@a31^N3Ve07N^q{<8R7<-hudGmgOx8^P2>^Pzn7Biq9;d=%-k^T!Q3}Vr7z&ayyC` zv4ycc*US9^>e~o0PKbMz!h`nK#TH4Q`9{90@ru=;Bj23ed!ikk;jEC44p2@0+VYen zl`S~`f}{7i@e{4&&{#b;2Q%R;ZexpfD0Wo1$N7(u)!W6ue z(V^i1V4DLdrD;JXLyCVjK_cX`8NitH!dpmrq!wDvM~W>{fIo!1b#ZpjSR^4P`zC(s zISSa0s@=dCrL>so6ey!ym%CJzbCiqI>m`t<=0uUo!zw_)Q*!Dv!?skxgkqwEP=TuG z?fkRZDt({f7xkKBR!eWxb?m)B4w+O4Mp5DwU$rLAC(QpXUUu0wn3Lt zN-Oqe2h`hSq}Y4c{ux***@RDHpeJBY3g%CgmTULmf($$+v20wvQbFh)q zBv7Z9Y@QGnTFzrlPS%DK0>n1#SdQBQ7Bz;8*7hmo;Npg)FKI>!4r~rHY_5LI^z(d<% z6^y;{)5WIYJI8v8{J@!&6Kp8MUgPMh69|~kC~N@uk8bU2AQw#c9`lFrDRuN`IBL6a z!o&a4bLLk$S0@eMAW0AdTT+mRw!)94zzfvGI8m}Uhvl2+v4FZLaKm@9Sh1b5BLps# zaLDLF#W~{ZX30jD-dNhCs#DEoR0q~~p`8X`h5ncb5XQsEdc#n)Y-k8LC6Tey$8Nmv zzH|CBa&7E(QI_xG35*D+SAf!!4g3Gdd+)a>lVEN5nE^y$1zia%N?1h@Nd|J9H6SBN z5s4B66eJ@sNFK$E2!aTbm89gHlOjQoEOAJRB#Dyqyw&WUbFS}i_~s|Bi)VWJsjjZR ztLm<5!I;)bz1z-fv4;=)Pd_xkcg`fY`QBF#KjZfDPRZA~loMHeqayf3j2D)-EwP)* z1u3_+{M>x0yL;LE6}tG}Is%Int{2Tim&45fv>et#dyzF!l|GGF5}OZtQHG#8yZ zPT7W@peTQ0!jNPz<#V9qcWB=Evc}sg*=o4E`}ydGdM~O7khxzi`pA-Si@WHs*l_!A zKUB#zaD)nQ`q#IHRJEx$_+_{rjHYWD+IudQg?u$43{tjx&0x#Pbrn>Tu*Hkh-Bi9b zrBvS}tWCt({^xMskyaM+Li^N*z{^#SQx{|SGcA8^;yyzx@BNI^F*QL&_gW37qqmJX zoTf9snOpkZna(Ql!$j;~yi*J3?EUmrpurn))o53m?Y zXCa?uzv6MMk@GRFJHYD8{hY*#3L<@qs1?}UyP7XD4!<5r9hD&S0#nEVQYWqH{?pia zc5U8dF7|R?x zpJNR51(fa=iCEjA$$Xg4jGp?oWX-nq)|uzrDRv=~+L$KB1;GmOg7o>3ulZEF<;sXg-eyFU?Vq@>^vM>yGUtXn^VIwM z6myEB@lTS!uhhycx2~}eR^N5ZE}z7=2(n<9WQEtHg;ja?!P^d7?*sKB_ZDvVaB4=^ zxumOos`gh3eLdO*%i;Xd_Y0NFOBqMY*>;g3_C`r=7>|#}pK^>tjK|6bvpix8;7#p- z%E_(2(Jq{!U1Ib(@nP$T@147Au|i370%Y^rpjTE5l^5EoUsqG@z^SS0s{iRTGNyD% z0IPkJE8G-6p3B8wzo_*^Hp(b+Erm)2{!+;_PHTWM%!_DlQLEq^gNHQcLs?3W~D zuZA|ulg3b;|F0Kr*^Ic)L7=}W5z22CG z5@x~lgs+(rD~cy^*Nhf#J#6i$y5l?%8`=D9{UFu_uGi1SnGgAM8=57r@eWz1nJBox zVBuh@m7?T3jdY;wPJIIszJED8s8Y`ym>$kuVqkhK?HsuXz8&-_BYIduYAzRk_uQ`I zJ*-}#692|QZ59v5N1#wBSuRp-du5xo*_pe78S*JXEq4V8^j9%$KLkS8rd7s;Gk}Mq%6zH?Lg1C^zO~k&s8gcx8OQDuvb>Z=`tz2uf zxN1&Z`e7Dd!QgBu$HJrJhKM-J;8A7UA5V1yiE~fq?hx&wkeLsB5fw~bs`!D^#7M)U z*1v=ByVGl$`}VSrYHE?J3VP)#i(qH-F)!^6Yxx;&!X;2W?=2GXOjivA zzgqo=HSZK2B?Ot4&%20&5%dqZRyt=3vS%qw+oUfSjujLY{0M=am-dX~z5!j&&R@m$ zZ;24OEjNyk)6uM#sKj;obkRhmHfKBMrOdw!U);S&e#M`dRcL4cp1e_^4;-0}(3ss<8nwv)ecRncE<>0Q$TM%xF&yUmJs|dkWakX;N(Yp%W@0sY*z0TT)Fz2Ur#Fp&asNlr;u zZ^xt|$6b8P0wgf58FBU9j3GOFn11&Bt6~7{Y@|555q>UiT3O=H95S-JV}pT~rNB4q9-Jdq0r`Ppvn zjagtqWWBX!s8lm@ttOrTw_%BKi-S0LhMh)K z8NUzqCeI6)809+0>x8e3!f2 zwO+M<>Mlmyvhd9`ab)Utun_IDz0@;TXvrl3UC}UKA2xY)oC6zSFF>KYGK`fPCCs`L z9m~01FSrTnup=D3UdQ|7%>Kh}tU8)-O7g|C@89d1EN;f`@lg?6==b=p8pkty&U(*7 zhA6GWV==__NTsg(3hqziT`v_ImE0U+Ax!PD<^~sD;Dl_Rfi2eu-BRPz89Fh1DFJy3 zeiq9C(IDT?;#WI@>vfE@MiU_K9e1eqg0yJf_jK}FG~LiA(D2+bp0-1>p^`8CqkSs} z`yBcsl0LtIbK_hc!NS4`_vF1#muVYGax`IIyn;cGnER%z9Pa;+-ml180(Kn#Dd6P(OX{N*8L-&f77d|H@1Vy*M zr+0L=U5b0l*)J!e)x8d-FybgO9Q#m0oFi#z>93~-w5A2MX~REbSP0eMy*4+ydmi83 zXh;+Nu_6ULqVn00MT;sIW<;7Jcx)HP+ka>`(f`X(R!B*Qxqgk9HIYv)slJ3OR9^Fs zpyVyd(hWVNX86IN!3%=aH1+!85WKQBn2{&bB*tKT_xI_b^2+8|>=&fsb0-CP<%O>q z+!RhTkN=o9e3mBeovw$?BKz;r#zW6p0W9BVAy^qV_f+^(Ol2`HDao}J83Cv58#(h% za)eXwI`&x4u@H=wTDlEzP)wDdyV3P-A7d_h`SQWPCoEFrS}b4U3q3Et$lzvm3Z>u^ z#RaUB{23u0vS+~sHg8+HJas=+{=%C;R}8QB-bhz%naQkc8u<6Nzy=!vcY;k7K)@Giv5EdlXk`TOI?C<#*ET;MBuf1 z14l}QVd(xK--Pkn1OplPrQKlWx9e2I_>1CgLr8+irU=?F#CDioc)&tXpX%{B+&jJL`)vApZOTjvuUCPG6Q+O$o3IC3ZAz`h!={M>%Zz@Uvi0sg{|Xle{L6(3^ocrlE~ig^X6j~t;(%Sv>7>m zLkSd0D=kIiSNVv8mKxOKwalny5Sv)L7_?pM#jMY@`|e2W;r?!So*}z*`xo~oAR(@) z1}atD{bOJh3E}2U7WGN+=+fh3;dlp|?gZN z&CTRW_{TJrY7@EaoSxo$q$W3(Api?Q6{t`=e_$H=dd}z>0eU@yft-#iJ9{-j&#%iF;&b!xLJslD&9L zjtQrI9a~gN4li9!fL=2X@DyBeb@M%+G^MeWt-dGaOFVbLh$D||(9awq%F!jRmX3aA z*dEaS_~|@-n?2oDMXWQV`n<~n1XFY!-8@tM{YyX9V3(;L`s6*wA*GyOcD z%@yi(axFmNAEu7<4%71`kUA0T{FNVU71Q_ogK^NJfCY2?Z<=u_ki+`63emoM2`dTv z%*0i!6?X5+l?P#)Jy8`)g2C)21r1ZtoWwr|utY!1AKz{kz67ncp0!`PHU`9{V^9Z$uuj(+mE zC-!bEZy;9eiDnI$0w~se(!)1*;IEm=7X8dkv+ut@H{H5GzB*=> zh;h6K{v45!CKHaI`+ci@L6@u8%TMY65C68#wvLN1-B6rV&@wY+T@IdPeQ|`wa8^lPsFlqb5;gx z;DG0$r|?rhBw&#~`47tw#%fjrT`ub%w zw0}b3tRlzjEO6$@v~TMQpjfmM8$snq0L zv{<&3zMF6Y26fvwD9p4=NW$w5c*B!-PRSkafrrT@GNVj=VjK#zVNb*e-+(@>Z17oV?UM1yHfy^qbgEs#AQN_Nz7 zEUz{Gd%{5eIW5mASf?3%3X=oJ7Ps7;o9R$w>{b*xbXRI$OO+m$)f+Et$`I?JF>p3E zBc6Kf6vs zLGq_6bnNDhYv!`_Hi&etZy9XD5X&)k{|^c-TsmTto_a9kyB-$d1}grln z4s)=vyjb5#Shr#eWb?=~vIU$_&)Vpp&dEA_=wh{vLl3|huV3ONX1)L^Emy|flhw8unu`67(iDu4$4YMuHeVSZ#A>w?E zdrExsGpE1xJhGCbmt&xa<6XA^W?{IY#0xq5=P=4q7{yzf!jvh;JY;E83)i~Q94{-| zPqg?EGOZgW&&WfOVATY~;_e$icxC#T`@}GBnSm@H)leNLwdvJ+kEOEdaiTLDq3WYt z0`_E!MjnEH<1r&7%$iqiOsR{>uXQQzjzUB*{(T!yZ(h&(%Wp!<6Sriwho8(%9`kzf zm~kRJAb639*u`FvwkpRTj#qXtc3mL7N!%$~M&Xwx8dEBP8Sv2h#I%fj5 zFv&7$#-V_x@a8B2whi*pA=Wgh#NEq^W4|siRVma=(=9TxM|E4CZLx1Ax-ZSkk%x)WZmV~$ zd{xO=k=uy~?Ihy|_M^}ho3U+jwcm!fUuA#6I_2-xpS@#>kC55;8H9HLsV5g&;4;U! z$DyG+79G6!Md?J6Q{3dm>eW$4>4--E73mwK14Z(CHDMKu4IXP^-%YCq_~;8dnx`>~ zU~2y$TLrXmxMU|jm}eMp6^o>EcqC87z@7&>yZaFYd)z*Jge?*`{4~+1iDrDU-c!|W zTBScB@4?xJ>PM2W_-$MI_eJxwK-Er9!aBLNr^l2QP3bIfsf;oBA#ERw7nWnLl5(u9 zI8&*{a&^3;=pd6=R0w5o*Rc{A?v%X(=DH(g%CK#OW|vrwKgcoK?O)aF{Nt4w8X!Da~eQ z!GzkSP$;-2jcsdrN#TArNK~|t()R|d;a1^xr&>};EtrBKFt3=1d&x^9@u)){H%*zj z%C*=Q+FscjYYFciI1d@sw`%uO3Oai4s^OIT`#U$KH;&qxCCIEaDk#w69Wtr%nhu|Gv&eL|QUA&5w>)89cYGzT^ z&0K6{2_y2DO$@deE)Wo>^=WC>P(w0z%_e-0Nc%NTnjLRYg~f_U(HY#gw=G`A@@tLWtl z0z>T9+jokhdN#zmUqYBE6(a#SHsL>7=A}P=ZTRiteZdjk(>ZRzk_HD;zsicc$T3#J z+Q%<1MaCR1WWnWo%5+U*0V?jHX)~obB{{+lw5G7sfjQ9=(a^QCOwL zdX-?!fa@YP7e1J_^NOVr^=c{I-Z6B61G*o}xeG$NNLW$`^%{_NrIh*jW}WsyMchY)!n_sdP$%HE$9 z^9%4||5&)VtO}8LL)vry2L0uObQtDR>ZKj7GgxfyGM#xil4a69bxF>*wB<(Edfiab#>})c;LKkY)B=ovG>yv@;+rD;&SffHW0K&an3hnd@0? zzd}R{HD8C;oNoVA_^@^1&D~=no6odzbM|K2AD9v%Cr~aq$yPRf@d;z89IaUS$X}vB zJrxLL$z$CuK-uHF!(g(~b#98AU<-@)&6KxMWveh=N1l|I8T?kfozZodvbARC*+ZIG zW6kZp!dokDk#w1_dP!J@AT_1b)4+6FzP_mcHT`)FTrSTVOXV^NG)oF1Xt5GUZ)^XW zuDWNm)!p%l$zRM>0N;(Akao$W4oIMVq~wXm=*DMr4mtK%1yi*_bqny$r|9;@dO$ob z&7KYpt4bs@_2=<=Z^xKwmqP-K1T3_bE?n}EqS&>6WFfm)c8^PsJ2J$68tKz6q2TVP zZDu@^UwbaCeP{Y~f9>o!5drd5n2s1^QwT@fi`+I6eCInr14b7#Pv{Kvz$^rM+CRiS zVPy{p!M}f>HjRz(Q`6EjH?}+mmsl#-t3GY_@z6QcU)*ofiiBPRLbtS$ken^_)+%5B zt}spz=SUH)tIigXm~Xy4cgm&~n#kMX60eQKWp z)@VE^Ih;K_X(!l3B2Q=e3<{9-r`rmJO^)uTcTvNM3E&IoAhIl?kF*1`hl$R8(!Xu= zD~j1z#K>zpGg4#B@#w^tri&#z?%78cg7_8SnlY4dn%K^Bdv;K2d9&TvL+I;d>Gmz> zlhcFls;`j#l3P-m;wI~WX5781`{8(_tg%CaXGciY9R+zc#K9_56>1AVDZyRa3n`yJ z4+&=FV6V@Dy(SGA56$ie1E%&PZ{hOcr)~;oS(dJ1=xw3Eku0gy)_1A%7bh&J;@Vvs zjl>K7On*lFTsth-qnmu1h%=|~LG`Oj!H6r;TtxxQ?ag>`yy3E|woW$-A*K4+kOhvG zh}*Y8zmFC)#ONR~M>A+Ky*7g_L+-G(gs^7BE0+>AEl!bMbGg?U)7X;z(nSWIVhC3@ z-C0x8x+Y9V0EIt^UaLR|A9inGYqjG?2>rRnTi1dyH;vMuzpNeEFaa{$CanyWsde9b z!|mwb%dPR#Id~&)>LvdD15k%pkn-+N%HgbS4zaGVV^-6K->Tobxnsb*xrm`zqFEDqETK?e6Aq=Z*OoZ3Ytn z7hzr9tF;UQimTjI-_OB@m2a+J2hIV% z@9P3X=`wH!_kmcQz?MD%Uz}_S+&RnlF;W~99pa4rX!0e}eAmYuEcINK(%^QOk8ANu zU|4W>j80>X%bOW>>91c8HOKNI@&A5$DaJ_Q2`s`mEW(rI+_uW`TK=OO0=EXQzfb5n zpZdj3E=^sg%xc=>>f7KyG_kgI5;-lH3J|P>a!rmgwj=)RgYw9Va>#*Q_ru~5T(kuG z1Jqwid09*I z7qVD+dbkt6(hwfVSX`u%gnJZpy}~L zw!I;=mJ@r;^s^kfhn>afDs}%UdHx2!yWI=&+OOASCwZnIj(?W>$RVVPL^wBjSeuFK z-_l}iS8s_fN3_cz_dxf9-F}|5YYXXlx8>>fX?=w-Tq@uTULX{v7%<<5c&OmkDLC%} zp|LooIUm>S%2)Xzm+wv^rBK;zo?pP1(fL~k3*m?b?!iPk4FpD_==MSQHT-KRaeXvD z1CPCi1yycbK7%_Qb9b6U&<@V@L!pkQ1rzgn*oF?fwcj;bu*u$J-X=V%+Y-6XKn6S+ zgtYS+DQ1tuiL|HJm28CoB#3K&p1nwcR&-szKFfULfqtaE=t+P*gyiV<#3O|K;L-;* zL7(-oT*vN|L>?B`gHRE$QURS*d0f2S%OMK;G1%d{!H7HC%o%7d(nhitPOgRu}qme9%A9eC$E zFIA$k8UG}YwUSN%TYEF^J{e2ohE{)RY=T^yk3baau#aK|!Qr;_%6@e_;=N4fX>RXS zdAc{}%Yk1U$Lv9RMY%LdzW|%T{ zlK5YJXQovx-W{YB!C-*{r&|mm!c!S9bFh4;ZG}Q{Ua%Fxpz!0#W4vCM^bZt0|KBqr z_vziokJxOaorO1b)2Htg*0B8Nb$X%c0PhYnT$UhaFsyH%qU^cJ3XM@riN%EBmMis; zZ&ZhNIoCIyyBXc z(kEM7+4HMe{$o9^b9>$f94|7H4aIR7n>{dZBt>7v^70+Eej$ZG>E+k}E2I$jN8V7R zK)p|DUp{peyHRW}K-ZF;H>oN>H0OcXkR`*+Wh0+F2D^c6BeM#^p`Z)|kdDcRP8j);=gsmrbHPoJ+|KRkPiz+;H~P$$ z02zRC5y*$Jv(m74rJ$q`A)-vJ&3>WhM!jIKU9tQ`AOEOTeyCl^ zEByQ8Uv-5!cZc0(Fxk4%5nb`&zl2P>@nA%enxf-dd=&hHIChdzSP{iqh^iOy$nOov%*~| zRV;+CnFV!RAxj5Gz*qq%tnYg$oWs72m2>j=vXD!Z1+ZSN%io2_=b31o|6v%~#QbaL zU#32dL7_>E3HBg4!U>WX;hTBi5@}LK`Z)vxat0)z#2Ttkkr<-EGGNr?LC(r^rBa6S&!X-3S&PeR0~gI2d?Ta(Gl)Q? zN3RGCw}4=Sf(oTQd~_{k;6~BoZwak-4c{P#b%pg=dCSAP^6Z0k-KRcXLi0I1vi2MU zkH7ln^AG@D=}`~f%q2P+R4VnVpwo~{hM5v{J9aG7PR{bl@^u||Em_Fkka%KeNnR?r z)m6!B8PF&R+Xo?WYIkJYB)eJ?HA>E!7{`NkMBKf&)C9 zIlAGaw>SkrDESGR=Jk>S8gqp&6k%v};}`EI9KzM1&Opw=N=f_e$qw{*u<7JF^q^)OJ(#6 zWt>*t+N{@CYnx4te*2v{=0dax%e9#82X#K_4y9CZO`ubtZ7bgoPLAGAD~4FplEdj) zZn7Yt7PJRj(iv%>nCeE87_&Ls;6)>-?3#Iq~FZ4#QVd2TkwgXHi z&086-du?r2b~^vTF&i3U8k&X~9$3gM3v(5An9ix*@hO!tkiK$N(=;ZKVOcV<9Pbok z&i{;3+$cpibmHszq91zLQ?A|JCYNC%JD@%A#U%i4&vsHNVf2*guIc-#b6(3_>t8Dh zvshtID?^Jz6boV&qRxQ4Wk3BndOC|C6~F+%{zMf6zTqibOmSZ z$3rW+KsLT|6+1=Y4Z}ZiVIy}{$VO9->@r?z`kQVDx}|J!BGsZrZzyKl#}ue-(^rem zM`shEP!@85BR+Re`%~)`;RNglT92oI`>+Z3OirHp2dBYLfW~n`A?6LDmG2d@b*P>j+tUm2%Dn;=-*j;BYdBm^&n} z)3V*wa$DbN%(p?f5|KkM1pp>}Z+HuezPCNlD%t=nBbRPwsyEYGdTEZ9sKrz8#bQ$y z(Aw{xM=a!|nH+VvEvs$N`S71u3p&68WBdzt|8e9u3mFJ3T$HJaCoF_(5!8Vs7k}&S z1!n9Zl7$c z4un-`4rdh_)XgX#?rax0zY@$s$RU+nQ^lDnhGieND`H>ZkbI6LFN0+2d~tdjyRGq4hC`m&Q&y51 zwR$k6>%S1#5Chsme%>!qFJRQ3T#Iyg z>713&7B>$lZquXX<<34uXpj+frOO-J_W~&P z(sU-A=umRd6Cm|4w1`JTlZd@T;QaemxFTTi7%73`R;Fjno_%Ts{81b6o*pG=+sW<; z=9*hyiH2RWh!k%;BaH&(i_UU%LwRZijHf>qJXkFk9@^eHtpbo>y(vYUJi7|kG{C&v ze~b?VcYmmR5A4>ROcdFlf-{A2&#U1++gy!v)Hl{xxUZUegzR^ii8i%`4xU4XZ>}LZ z_gv4WY3rc^n)4AYvU$HO8DeS7fTe6ol$O7e=8?ogc%9{wa)T0zVNu}gVsfX0b|ro* zm9yN?ye>A#1cCfoa9ULjDkL;9m4h|gX_q+b-@m%jvs{f4l0bl1fO@@(`^WFilP{}` z+sb|@Q9_4JA@?$QpvcfxNC2xz&bTN*{=S7$KOYZ50*D7ud#J-(LZn@?%Df$PN3rqv-?k%lf@ufm~aXQz%v?cjx zC%=-&&JP22CwcOi>URNbx>BM~W3%ZIV>{(@YRyNRrvKVo!9O>sxJp@2(S4Qgb~_M* z7N4awlfa_x$l(a*?(w#howm3`3o@P2_fQ(B&4-q!1@D@oUqHtnttnmy2A;`oYGY0a)!KA~8~cP*6o*n>y+B0e zsC|1w!UA{KwtYI7;>+#}#dCJS@RI4)9MKV*#;n0U*ls^~KibafX>8(GQ+y5tFYt{v zh!)Rkl4@f{HS2FpbIS7J1JDA^53wh)DioYeKQes(h5;Y%mJqFrUs{^_i~sYArO(bm zAIpYbn%I?3^ZBnj+av6!0Ij^m)OelQ$U1MKty+Qg&x0EUXmh9`c=Jw;f}0t6L;D_b z`ltS<&W#Ho&Yh@Z&=y$mwEvcIM>n!S5@;9HiP+H>N^Fbw3)|K4$~X4B2Sf6sx=0oX zP+q;BbLuBZhfGpC!2{To&u-(^d7VW~;rL~Se(ff*19Z&!G|Cx;%exE@Q6nJIshFA! z4UCNSEr9BuPn;6Mntj+0sg6!6%n9;t3D~ANitfpKshn=0dhq`1A>viqd!Y7|pZuA`r&U4oxI4901WWV@w`Ab_isggr8Pb#PM^rmQ6jyv38q21CIX zL{&;(bd>T0&sWes9uqF{j2g^SW__&gfyR*m-@^ns5 zBdZCyn{QLR&D|AciGQQPQ9MbxFMGF%x*sOVH{0Hi%-D$O0ttN3#u`W4snai3B;dr? z@*K#1zSTVK;@^02v1LxcfAI$Fl|UdeV}ZChP5u$qgL+%CIx(${UpAZz3I~>N7Ik~M zlEeEF4?#?2Pt#-g@luzF-+CCU%$JkP_TpXQ{*9dSVxCGzy|m7tq4uJoo?h5Ll+6p~ z#|@dE_AN9>P2E+T@+SSudz+0<7{-eB z&U2w>SST+sqi{aePIZ6+Cq7eOJ|05fE%&laqks=)#=rv$FYfPhk$G=)$K~dfmTPDZ z2hkkf&=avJKHNZx2Xvm0-2tI1DpsA;Z0fj zujiIq4HT*=06oMJ02QVWxiakZEiEheV$Puh&CjqdO4)Hq*w79JgfYiWvGg7<=BpSJ zdNc^(snZ30OeQjgoIEdP`ur5AK%ihDco)!pl~`8X0V3#hlQAjcth|(TfxKBj6A#BY z8zApH29-6SNKyBnqjyq85DBPOnDhChe46f%5}Nb-e+!Xm&8Cc=3A4)l*8cVd3e{bv zs5d~lTZqUuoRi7pto+puL6ux@Wy3j8@E9)!p5|^Uv}0XOeFRZ2%o#2}z{bPCm_0eV zK^%Jn7K?|DXXNmPr0e3`xZd8zg>VB9eai(jo^Nq3XJzs>iTo2fsz=iWcK&@I9AM$8 z{JJyCK6djyI%ia1lEsih!ko2Wl2Uz~mA3Q9@DAx=M!c_p7s*2xXg@{m`xz4gcmaCd zVl6jp=-(%SbBx})NH~@;L;U`96l^>UH6*~CuZQN-ydOpS-p`Q&q3m0FHQN@*jE176 zWnT1>C2&Ih$Y%<^?N-3TpkHUk^d>(NeKzt%-QQ&Z9DzgbdDc9~gG?hRTp=}@qbDR{WIm#!O7&szC+VoqS^Y_C42%Tth8ZMTyl@NI9ET^xQVXSlu>UqmT z?q82cTxK!<^-bX>a>rwYQY`v58ih&wW65r)1)8Nj}87 z&Dl33Pl$J2gViNFq2Zg;?C9@Shgi6HZCV!_^l2GfOp%doOKBV%@>*j0Mp$@^2L zRHcMl%f(9T@3{^t0-peLKrq|nrYyOmAwwZ0W!Sh2u+a-58QN0!ZY z@;RFFrY!lTn^C~?!S`>GNV#HRwt=~-`0FpPy!9FTb+uT=yJBWkkM?d9DRK{i!6ZS= zzs8@D{%QYy$o+d3#oJrTT|Vy@LKWl)FL5{Dd|os9;?^>UAiM!$^k5d+eprUC06xBK zS7~e08C+j^xHGJR|AQuc_t6G<>KTt~=bSVGp2ysrXU4jr-4-?!d2_!>W)N+ivux$S z8QiZOQuCu6Or+kSxk5f_bFd>WNsxNgL#;oJ7l&O3V}qskyr!+o+?~0^b`OgA0$3+s zU<1WzK5PV1!McI}mZH1eh+>IP3P#=yLd7KX)$ryS=oMDlVZz?r<*6~hLhfG z(qL9^;raIuN@(OVr5KS<-I-kEm#e)Pm-prLvZ7+dCM2G)Z^$%V?)gO`hsGSk*`27` zQ&$A!Lx`0Cb+9(?ZAw@Hoz3iIJ$xgtOYF33Gaoe-#zu7?*!!E}d*nwJtZa4iuu%Zjchf1DY4BO0$S(ogH5vDhWpdKku#%0Ti1*H&6bukR#q9O*XS`s zJ;7S)5Fpl;5yqEFb{!%3me9nzfvai6w*dXh*J`#wIL+2IKe_)6EE3F=5)wyl65IHV zzIe!zUAmb9=Q)w9({@QblGS|v#g;~NYh9nb{9M2nMJyJv@-R_kFlG8Vc~FjdtYU4e z7?#x!>1+BZI!@F5+8!65vQk=PD0JvHnJ8=&;{}90QMPtY=@vxxp z&BCw*@q8K~O>F5IwkIvz^{pByi`vn?%?&vdY&X=;V?0mFK{Q1I#Hwj_|KA~EIDrpV4snRhR@A`w2fb8$P(|wqRB(+ z;P-9bBfm*)dVC&Mp3H$X_}w^#M1Bu#HG6j42#P!#*gwn)Db$lh9}4cs0NcrbjDlz) zWWR_kSCr3dz&H0H#7NNc>Sb!lV;Fy8@;$h;2RK6x`^hLedo5!KxT>9GD7F zp#>Wcj+;V>s3%(hO0jr^>39x~<u>wn(SaD5oqCq=2;H(^+St}3>rdo)da&drof4i6Uic(;pW%rQa@9Z;!h-cVJ!(_d~eG)`6lQ{<5Mb#td{c~PyDWa%W%YO7rdL_EFg>vf! z#m0SgzoVxFUFmlV%^a540l8ld<#!%LE)mvaz=nHtD^a6xQHe7^A^t;I;#&dnlbsV) z=QnlVysRL$S!iz+pMmJoUmFGCue-z~xLbqKOgRyVY}uofFcS*f$Wv9cVpE2bX?EN@ zIwc>PxF7s@agPs=2^jCFnr(hC#t$E7D1t-F4z4#r2SrPqYqEEr?Y_(q6Q#M;?J-#%tVjO+#xuwy=gPd>>A_0xRq9>BabN~lCsp3oj zVJ)EfTr6yIp$Gkvto!`_^-vQ zYFqS>6rys=b_e!Pf@nr-G?u8U{U-seJAWxTZ9mj20c(5o-E;eHGxJVZ#x zS{S{cJOj{y#BewD(xNj+J&tYagq#4ngiYNXk`j zH_qo$k;_J?K&rQ>ZVd3ZXNv{wYs??WZ0%Sr6~jTMeE@;~ofkhq9E8DR2a(}%PXSjd zQsv;}-kj%KG1F^`-bzQEblT-A3Lx;aGeK(`dMW!VohU%5@ulg$-#N?1ZRSyo1|^ZO zvaThVB%85&eiqgD``x)xBj?3k{*ATZ1!AacHaxG-f=iz?ncH7B z)piv;Eg^f!m9(Jp{As`URqww&ElX}4aw}UM7f{>9-AarW_yf*@o!8}HyFgBn;TULb z5K+ttqBS|fQGchI+AWvC&MhD3`;jy1r`}y077KTQf|$S5{W5Y>d+f_RaWgcg)7(K4 zXwY9|{k9@X+<&*W%lzdzzNqaZ*9Ey(Uyd#iWN}9KZAHM zel(y1v}MdFtRa!HCR3{K*wT+-3+I!iPVPh8NK!ihHx`#qv$`^rONE~}g$`fOx9~;6 zIUD*p3-M)Y&~TD7Qso`;DL@&hZvog_@pJVcC(}$|B1jr#$g#W7;K4N|iy8_WrUw!i zWMlO6bCwWl;L9%2i+a_C_Sa6z5R)q@uG!2r8|`Q6R|cAe;UZD(lY`C>R>{^H3CC^6d&x*;C2$qfMY)q z9|7A_4=oGO4CWEcC7(AG!XJi^$t%MCp}u-W^pBl$nfxG)4X5zEtzCv|J8_1MIVJNz z-qqciC#KH3`aO0L+f>trAE-k~MdsER#)8N}sBX{k%kMe*NFmqFAtPfMzdVVSDm5$iymBpW}0h66S*_ZB3 zG&ozV|EP|?epPLk>&=^T0oLMqwI`;}KZdopoM$YSUc11#SK<8|`L^8BPiR$Ck<57x zC?zm>#J-dI*4@`};IZP{$NY*JvN`ONl1ZZN3k#3Mx%%4c`hq#w@u{QB%4a;{RjPba z41$fn<+Wy5onC#D-9=_^eM$ZbPwm~hHhT+GvWhCScf&wE=NjwH z3^|0~Z9`;VpYmvT!iT&Q#(+Nv%t!&?6HAZm2Ygx(I|Y5l5spBg|Gx}#(rN>wS}8sHDWmbV zvAz3dx}wvKmCdzZRLL!O9c~aT3f5s-7Wtk+ijv*))DC0oTB@s;U6+|DMeO6?3e_th zE2y54YP?Wd^P1dehmg$=4C=}>x!P2m!r71R*#&M>5rmFYqPsNh$WaZA%%C(Be6b8D6cZ9d}7w#9WFKs~&SHy84FzsSjP|{cZ zaZx{JNK|J@hyOtY-&BKf=)DY1?{7rSsq9UUQ*FzOHC9(F^mugpqc2-t`76v?-VMHr zh{l-asX(#LHP-c)T&l~G)r+pxAMdG2BuM4!vAud-VYKbDHrbKG@VfebQT&@G{y#>O zsNgg2;DIb*{&h2X-H+VS+ulX)qVi!w_PtemKVLq!Ua`4Ryy@W?yY%Cm)qaz;Q?gsJ zC*iO{z>DYA$OB8>+wbTPY>w1#-=Qv7f06e{dl`J9gQZt@0SXt|`$QY+&__^#Ual*>jS>_QP-7>~G}SRse9 z%s10_ifyJ3ouuFB88*fgxZETBkg&DTBDYRPvo87UYKM>h3?aOEb^CsYba?jR+042!8ir0E=B4+<0vKO;&s@T-+_X!a<{K z{I)tCTH0O_&ff#@3CVNl-68!{-t1q@KX0$uuO#06e*q{1*Zi*s09e6`0svr@(PH(G zRevB~MvH&LBQ3V`001C~0ssKuJgdb?;*+*71VAGP;ue7bfD0%9001t!Bk5yWOdxoj z0PXg1g1}IVWWcf@u#7D) z+t`&4pl{5Ek7coJInke;{(AX;000`J0002+X;~0hwJZQkv?vOU^`IcInijAdK7gDp zSMrx1(t}m~xz;EAg*apr01yxb002N}%hk)(y~@j&^D-eY)PsV+^7pHe=xv1oCQ4|7 zx$x`NkLBOV01zDo002N6mXo84X#r_5`CoplCIwat1*`j{FD3|d z;r~psP{-=usZIZ1{+%jU{=b~$FIV+f|BU_m0Dxpr0001_X1Qv*$?AR2SBnFy1p{;8 z|I0MNYN24&#bf~mgKk-!ys!Q}xcK?izXyfZC;F=s`ad^XBQ{(`@TQRzcLk9<`F;3&?*IUuLID5(&_$~gJ85z2v|TQy1uh=mwpuh;{aF6lvX4#w-&IgpUhA%} zW1rZQ)J+rgI*+;dxow4%)!&yF9#?981^(jQB`!|xS*ZsA^d1EO0062d3pTk}B+!;$ zUrZkC`dICI->*0Qe%p2K;!vHAy7bokRZVh>H1)psi001=f{{yGKK*)4z R+-?8>002ovPDHLkV1hagpV|Nb literal 0 HcmV?d00001 diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/aiken-compile.lock b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/aiken-compile.lock new file mode 100644 index 00000000..e69de29b diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig new file mode 100644 index 00000000..0759674c --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig @@ -0,0 +1,9 @@ +root = true + +[*.ak] +indent_style = space +indent_size = 2 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes new file mode 100644 index 00000000..99fefcf4 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes @@ -0,0 +1,2 @@ +# Temp hack to get some syntax highlighting on github +*.ak linguist-language=Gleam diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml new file mode 100644 index 00000000..b0081ac7 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml @@ -0,0 +1,64 @@ +name: Continuous Integration + +on: + workflow_dispatch: + push: + branches: ["main"] + tags: ["*.*.*"] + pull_request: + branches: ["main"] + +env: + CARGO_TERM_COLOR: always + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: "pages" + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: 📥 Checkout repository + uses: actions/checkout@v3 + + - name: 🧰 Setup Pages + uses: actions/configure-pages@v2 + + - name: 🧰 Install Aiken + uses: aiken-lang/setup-aiken@v1 + with: + version: v1.1.9 + + - name: 📝 Run fmt + run: aiken fmt --check + + - name: 🔬 Run tests + run: aiken check + + - name: 📘 Generate documentation + shell: bash + working-directory: . + run: aiken docs -o docs + + - name: 📦 Upload artifact + uses: actions/upload-pages-artifact@v2 + with: + path: "docs/" + + deploy: + if: ${{ startsWith(github.ref, 'refs/tags') }} + needs: build + runs-on: ubuntu-latest + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + steps: + - name: 🚀 Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v1 diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore new file mode 100644 index 00000000..3a3d38e6 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore @@ -0,0 +1,3 @@ +build/ +docs/ +.DS_Store \ No newline at end of file diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md new file mode 100644 index 00000000..62345b32 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md @@ -0,0 +1,805 @@ +# Changelog + +## v2.2.0 - 2024-12-13 + +### Added + +- [`aiken/cbor.{deserialise}`](https://aiken-lang.github.io/stdlib/aiken/cbor.html#deserialise): to recover `Data` from CBOR bytes. +- [`aiken/collection/pairs.{insert_with_by_ascending_key}`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#insert_with_by_ascending_key): for inserting in pairs while specifying how to combine values on key conflict. + +## v2.1.0 - 2024-09-14 + +### Added + +- Various new helper functions: + - [`aiken/collection/list.{for_each}`](https://aiken-lang.github.io/stdlib/aiken/collection/list.html#for_each): for performing many side-effects. + - [`aiken/collection/dict.{pop}`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html#pop): for accessing and removing a value from a dictionnary in a single op. + - [`aiken/primitive/bytearray.{starts_with}`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#starts_with): for matching bytearray prefixes. + - [`aiken/primitive/math/rational.{pow}`](https://aiken-lang.github.io/stdlib/aiken/primitive/math/rational.html#pow): for computing (int) powers of rational numbers. + - [`cardano/assets.{match}`](https://aiken-lang.github.io/stdlib/cardano/assets.html#match): efficiently compare two value-like. + - [`cardano/assets.{restricted_to}`](https://aiken-lang.github.io/stdlib/cardano/assets.html#restricted_to): extracting value subsets from parent value. + +- Comparison functions for various Cardano types: + - [`cardano/address/credential.{compare}`](https://aiken-lang.github.io/stdlib/cardano/address/credential.html#compare): for ordering credentials. + - [`cardano/governance/voter.{compare}`](https://aiken-lang.github.io/stdlib/cardano/governacen/voter.html#compare): for ordering voters. + - [`cardano/transaction/output_reference.{compare}`](https://aiken-lang.github.io/stdlib/cardano/transaction/output_reference.html#compare): for ordering output references. + - [`cardano/transaction/script_purpose.{compare}`](https://aiken-lang.github.io/stdlib/cardano/transaction/script_purpose.html#compare): for ordering script purpose. + +- New BLS12-381 crypto modules: + - [`aiken/crypto/bls12_381/g1`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/g1.html) + - [`aiken/crypto/bls12_381/g2`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/g2.html) + - [`aiken/crypto/bls12_381/scalar`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/scalar.html) + +### Changed + +- N/A + +### Removed + +- N/A + +## v2.0.0 - 2024-09-01 + +> [!NOTE] +> Significant performance improvements (mostly on CPU) across all boards mostly due to the integration of Plutus V3. +> +>

see benchmarks +> +> test | cpu | mem +> --- | --- | --- +> aiken/cbor.{serialise_1} | -38.20% | ±0.00% +> aiken/cbor.{serialise_2} | -38.20% | ±0.00% +> aiken/cbor.{serialise_3} | -37.25% | ±0.00% +> aiken/cbor.{serialise_4} | -41.95% | ±0.00% +> aiken/cbor.{serialise_5} | -42.77% | ±0.00% +> aiken/cbor.{serialise_6} | -42.63% | ±0.00% +> aiken/cbor.{serialise_7} | -40.51% | ±0.00% +> aiken/cbor.{serialise_8} | -37.25% | ±0.00% +> aiken/cbor.{serialise_9} | -41.95% | ±0.00% +> aiken/cbor.{diagnostic_1} | -47.62% | -4.35% +> aiken/cbor.{diagnostic_2} | -45.16% | -2.87% +> aiken/cbor.{diagnostic_3} | -43.32% | -13.33% +> aiken/cbor.{diagnostic_4} | -38.28% | -8.03% +> aiken/cbor.{diagnostic_5} | -44.15% | -14.59% +> aiken/cbor.{diagnostic_6} | -42.77% | -12.21% +> aiken/cbor.{diagnostic_7} | -43.87% | -16.87% +> aiken/cbor.{diagnostic_7_alt} | -42.99% | -11.56% +> aiken/cbor.{diagnostic_8} | -46.00% | -10.23% +> aiken/cbor.{diagnostic_9} | -42.81% | -2.81% +> aiken/cbor.{diagnostic_10} | -38.28% | -8.03% +> aiken/cbor.{diagnostic_10_alt} | -38.43% | -8.03% +> aiken/cbor.{diagnostic_11} | -44.00% | -8.51% +> aiken/cbor.{diagnostic_12} | -45.65% | -11.56% +> aiken/cbor.{diagnostic_13} | -44.44% | -9.34% +> aiken/cbor.{diagnostic_14} | -43.59% | -19.77% +> aiken/cbor.{diagnostic_15} | -46.50% | -3.67% +> aiken/cbor.{diagnostic_16} | -41.89% | -13.41% +> aiken/collection/dict.{bench_from_ascending_pairs} | -20.48% | ±0.00% +> aiken/collection/dict.{from_list_1} | -20.16% | ±0.00% +> aiken/collection/dict.{from_list_2} | -18.28% | ±0.00% +> aiken/collection/dict.{from_list_3} | -17.83% | ±0.00% +> aiken/collection/dict.{from_list_4} | -18.97% | ±0.00% +> aiken/collection/dict.{bench_from_pairs} | -25.28% | ±0.00% +> aiken/collection/dict.{find_1} | -20.63% | ±0.00% +> aiken/collection/dict.{find_2} | -20.43% | ±0.00% +> aiken/collection/dict.{find_3} | -22.03% | ±0.00% +> aiken/collection/dict.{find_4} | -22.53% | ±0.00% +> aiken/collection/dict.{get_1} | -20.63% | ±0.00% +> aiken/collection/dict.{get_2} | -22.72% | ±0.00% +> aiken/collection/dict.{get_3} | -23.26% | ±0.00% +> aiken/collection/dict.{get_4} | -26.91% | ±0.00% +> aiken/collection/dict.{get_5} | -26.30% | ±0.00% +> aiken/collection/dict.{has_key_1} | -28.07% | ±0.00% +> aiken/collection/dict.{has_key_2} | -30.77% | ±0.00% +> aiken/collection/dict.{has_key_3} | -30.22% | ±0.00% +> aiken/collection/dict.{has_key_4} | -27.25% | ±0.00% +> aiken/collection/dict.{is_empty_1} | -27.86% | ±0.00% +> aiken/collection/dict.{keys_1} | -20.30% | ±0.00% +> aiken/collection/dict.{keys_2} | -17.48% | ±0.00% +> aiken/collection/dict.{size_1} | -37.90% | ±0.00% +> aiken/collection/dict.{size_2} | -32.34% | ±0.00% +> aiken/collection/dict.{size_3} | -27.97% | ±0.00% +> aiken/collection/dict.{values_1} | -20.30% | ±0.00% +> aiken/collection/dict.{values_2} | -17.58% | ±0.00% +> aiken/collection/dict.{delete_1} | -20.16% | ±0.00% +> aiken/collection/dict.{delete_2} | -24.29% | ±0.00% +> aiken/collection/dict.{delete_3} | -21.03% | ±0.00% +> aiken/collection/dict.{delete_4} | -25.03% | ±0.00% +> aiken/collection/dict.{delete_5} | -27.22% | ±0.00% +> aiken/collection/dict.{delete_6} | -25.83% | ±0.00% +> aiken/collection/dict.{filter_1} | -20.16% | ±0.00% +> aiken/collection/dict.{filter_2} | -19.61% | ±0.00% +> aiken/collection/dict.{filter_3} | -20.15% | ±0.00% +> aiken/collection/dict.{insert_1} | -22.83% | ±0.00% +> aiken/collection/dict.{insert_2} | -21.77% | ±0.00% +> aiken/collection/dict.{insert_with_1} | -17.21% | ±0.00% +> aiken/collection/dict.{insert_with_2} | -22.66% | ±0.00% +> aiken/collection/dict.{insert_with_3} | -25.81% | ±0.00% +> aiken/collection/dict.{map_1} | -19.56% | ±0.00% +> aiken/collection/dict.{map_2} | -23.66% | ±0.00% +> aiken/collection/dict.{union_1} | -17.91% | ±0.00% +> aiken/collection/dict.{union_2} | -8.67% | ±0.00% +> aiken/collection/dict.{union_3} | -22.82% | ±0.00% +> aiken/collection/dict.{union_4} | -22.77% | ±0.00% +> aiken/collection/dict.{union_with_1} | -22.90% | ±0.00% +> aiken/collection/dict.{fold_1} | -35.94% | ±0.00% +> aiken/collection/dict.{fold_2} | -22.31% | ±0.00% +> aiken/collection/dict.{foldr_1} | -36.21% | ±0.00% +> aiken/collection/dict.{foldr_2} | -21.93% | ±0.00% +> aiken/collection/dict.{to_list_1} | -98.69% | -66.72% +> aiken/collection/dict.{to_list_2} | -98.91% | -66.72% +> aiken/collection/list.{push_1} | -8.02% | ±0.00% +> aiken/collection/list.{push_2} | 1.25% | ±0.00% +> aiken/collection/list.{range_1} | -27.77% | ±0.00% +> aiken/collection/list.{range_2} | -27.39% | ±0.00% +> aiken/collection/list.{repeat_1} | -23.72% | ±0.00% +> aiken/collection/list.{repeat_2} | -27.96% | ±0.00% +> aiken/collection/list.{all_1} | -28.36% | ±0.00% +> aiken/collection/list.{all_2} | -27.59% | ±0.00% +> aiken/collection/list.{all_3} | -27.94% | ±0.00% +> aiken/collection/list.{any_1} | -28.23% | ±0.00% +> aiken/collection/list.{any_2} | -28.09% | ±0.00% +> aiken/collection/list.{any_3} | -26.95% | ±0.00% +> aiken/collection/list.{at_1} | -27.60% | ±0.00% +> aiken/collection/list.{at_2} | -19.96% | ±0.00% +> aiken/collection/list.{at_3} | -27.60% | ±0.00% +> aiken/collection/list.{at_4} | -20.77% | ±0.00% +> aiken/collection/list.{at_5} | -25.75% | ±0.00% +> aiken/collection/list.{count_empty} | -36.83% | ±0.00% +> aiken/collection/list.{count_all} | -32.37% | ±0.00% +> aiken/collection/list.{count_some} | -31.73% | ±0.00% +> aiken/collection/list.{count_none} | -30.44% | ±0.00% +> aiken/collection/list.{find_1} | -20.59% | ±0.00% +> aiken/collection/list.{find_2} | -25.53% | ±0.00% +> aiken/collection/list.{find_3} | -19.64% | ±0.00% +> aiken/collection/list.{has_1} | -27.88% | ±0.00% +> aiken/collection/list.{has_2} | -27.69% | ±0.00% +> aiken/collection/list.{has_3} | -26.95% | ±0.00% +> aiken/collection/list.{head_1} | -14.03% | ±0.00% +> aiken/collection/list.{head_2} | -16.90% | ±0.00% +> aiken/collection/list.{is_empty_1} | -26.48% | ±0.00% +> aiken/collection/list.{is_empty_2} | -25.35% | ±0.00% +> aiken/collection/list.{index_of_1} | -25.62% | ±0.00% +> aiken/collection/list.{index_of_2} | -27.52% | ±0.00% +> aiken/collection/list.{index_of_3} | -26.65% | ±0.00% +> aiken/collection/list.{index_of_4} | -19.96% | ±0.00% +> aiken/collection/list.{last_1} | -19.18% | ±0.00% +> aiken/collection/list.{last_2} | -16.26% | ±0.00% +> aiken/collection/list.{last_3} | -17.13% | ±0.00% +> aiken/collection/list.{length_1} | -37.90% | ±0.00% +> aiken/collection/list.{length_2} | -30.89% | ±0.00% +> aiken/collection/list.{delete_1} | -20.20% | ±0.00% +> aiken/collection/list.{delete_2} | -15.02% | ±0.00% +> aiken/collection/list.{delete_3} | -20.55% | ±0.00% +> aiken/collection/list.{delete_4} | -22.46% | ±0.00% +> aiken/collection/list.{drop_1} | -24.62% | ±0.00% +> aiken/collection/list.{drop_2} | -28.08% | ±0.00% +> aiken/collection/list.{drop_while_1} | -19.79% | ±0.00% +> aiken/collection/list.{drop_while_2} | -22.25% | ±0.00% +> aiken/collection/list.{drop_while_3} | 0.86% | ±0.00% +> aiken/collection/list.{drop_while_4} | -27.26% | ±0.00% +> aiken/collection/list.{filter_1} | -20.20% | ±0.00% +> aiken/collection/list.{filter_2} | -32.06% | ±0.00% +> aiken/collection/list.{filter_3} | -31.39% | ±0.00% +> aiken/collection/list.{filter_map_1} | -21.10% | ±0.00% +> aiken/collection/list.{filter_map_2} | -28.74% | ±0.00% +> aiken/collection/list.{init_1} | -19.64% | ±0.00% +> aiken/collection/list.{init_2} | -20.01% | ±0.00% +> aiken/collection/list.{init_3} | -13.72% | ±0.00% +> aiken/collection/list.{partition_1} | -14.63% | ±0.00% +> aiken/collection/list.{partition_2} | -16.85% | ±0.00% +> aiken/collection/list.{partition_3} | -16.63% | ±0.00% +> aiken/collection/list.{partition_4} | -16.87% | ±0.00% +> aiken/collection/list.{partition_5} | -22.94% | ±0.00% +> aiken/collection/list.{slice_1} | -29.08% | -2.81% +> aiken/collection/list.{slice_2} | -30.11% | -2.25% +> aiken/collection/list.{slice_3} | -30.29% | -1.46% +> aiken/collection/list.{slice_4} | -28.53% | -1.48% +> aiken/collection/list.{slice_5} | -29.73% | -1.64% +> aiken/collection/list.{slice_6} | -32.01% | -1.80% +> aiken/collection/list.{span_1} | -15.05% | ±0.00% +> aiken/collection/list.{span_2} | -18.03% | ±0.00% +> aiken/collection/list.{span_3} | -12.49% | ±0.00% +> aiken/collection/list.{span_4} | -18.13% | ±0.00% +> aiken/collection/list.{tail_1} | -8.88% | ±0.00% +> aiken/collection/list.{tail_2} | -16.90% | ±0.00% +> aiken/collection/list.{take_1} | -24.98% | ±0.00% +> aiken/collection/list.{take_2} | -24.35% | ±0.00% +> aiken/collection/list.{take_while_1} | -20.20% | ±0.00% +> aiken/collection/list.{take_while_2} | -21.56% | ±0.00% +> aiken/collection/list.{take_while_3} | -22.46% | ±0.00% +> aiken/collection/list.{take_while_4} | -21.02% | ±0.00% +> aiken/collection/list.{unique_1} | -20.20% | ±0.00% +> aiken/collection/list.{unique_2} | -24.34% | ±0.00% +> aiken/collection/list.{flat_map_1} | -19.79% | ±0.00% +> aiken/collection/list.{flat_map_2} | -13.36% | ±0.00% +> aiken/collection/list.{indexed_map_1} | -20.10% | ±0.00% +> aiken/collection/list.{indexed_map_2} | -23.36% | ±0.00% +> aiken/collection/list.{map_1} | -19.79% | ±0.00% +> aiken/collection/list.{map_2} | -16.75% | ±0.00% +> aiken/collection/list.{map2_1} | -20.10% | ±0.00% +> aiken/collection/list.{map2_2} | -17.46% | ±0.00% +> aiken/collection/list.{map2_3} | -15.92% | ±0.00% +> aiken/collection/list.{map3_1} | -20.39% | ±0.00% +> aiken/collection/list.{map3_2} | -19.22% | ±0.00% +> aiken/collection/list.{reverse_1} | -20.10% | ±0.00% +> aiken/collection/list.{reverse_2} | -12.26% | ±0.00% +> aiken/collection/list.{sort_1} | -22.31% | ±0.00% +> aiken/collection/list.{sort_2} | -17.93% | ±0.00% +> aiken/collection/list.{sort_3} | -23.09% | ±0.00% +> aiken/collection/list.{sort_4} | -20.20% | ±0.00% +> aiken/collection/list.{unzip_1} | -14.01% | ±0.00% +> aiken/collection/list.{unzip_2} | -5.48% | ±0.00% +> aiken/collection/list.{concat_1} | -6.56% | ±0.00% +> aiken/collection/list.{concat_2} | -11.25% | ±0.00% +> aiken/collection/list.{concat_3} | -9.35% | ±0.00% +> aiken/collection/list.{difference_1} | -24.23% | ±0.00% +> aiken/collection/list.{difference_2} | -22.59% | ±0.00% +> aiken/collection/list.{difference_3} | -10.64% | ±0.00% +> aiken/collection/list.{difference_4} | -21.68% | ±0.00% +> aiken/collection/list.{zip_1} | -20.10% | ±0.00% +> aiken/collection/list.{zip_2} | -19.17% | ±0.00% +> aiken/collection/list.{zip_3} | -10.35% | ±0.00% +> aiken/collection/list.{foldl_1} | -36.95% | ±0.00% +> aiken/collection/list.{foldl_2} | -26.90% | ±0.00% +> aiken/collection/list.{foldl_3} | -11.27% | ±0.00% +> aiken/collection/list.{foldr_1} | -26.68% | ±0.00% +> aiken/collection/list.{foldr_2} | -38.04% | ±0.00% +> aiken/collection/list.{foldr_3} | -10.14% | ±0.00% +> aiken/collection/list.{indexed_foldr_1} | -36.95% | ±0.00% +> aiken/collection/list.{indexed_foldr_2} | -11.06% | ±0.00% +> aiken/collection/list.{reduce_1} | -36.95% | ±0.00% +> aiken/collection/list.{reduce_2} | -27.99% | ±0.00% +> aiken/collection/list.{reduce_3} | -23.54% | ±0.00% +> aiken/collection/list.{reduce_4} | -24.84% | ±0.00% +> aiken/collection/pairs.{get_all_1} | -21.10% | ±0.00% +> aiken/collection/pairs.{get_all_2} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_all_3} | -19.53% | ±0.00% +> aiken/collection/pairs.{get_all_4} | -18.70% | ±0.00% +> aiken/collection/pairs.{get_all_5} | -21.19% | ±0.00% +> aiken/collection/pairs.{get_first_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{get_first_2} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_first_3} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_first_4} | -18.86% | ±0.00% +> aiken/collection/pairs.{get_first_5} | -21.05% | ±0.00% +> aiken/collection/pairs.{get_last_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{get_last_2} | -21.13% | ±0.00% +> aiken/collection/pairs.{get_last_3} | -21.16% | ±0.00% +> aiken/collection/pairs.{get_last_4} | -21.79% | ±0.00% +> aiken/collection/pairs.{get_last_5} | -21.05% | ±0.00% +> aiken/collection/pairs.{find_all_1} | -21.10% | ±0.00% +> aiken/collection/pairs.{find_all_2} | -18.33% | ±0.00% +> aiken/collection/pairs.{find_all_3} | -20.51% | ±0.00% +> aiken/collection/pairs.{find_all_4} | -17.79% | ±0.00% +> aiken/collection/pairs.{find_first_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{find_first_2} | -18.28% | ±0.00% +> aiken/collection/pairs.{find_first_3} | -20.22% | ±0.00% +> aiken/collection/pairs.{find_first_4} | -18.28% | ±0.00% +> aiken/collection/pairs.{find_last_1} | -20.63% | ±0.00% +> aiken/collection/pairs.{find_last_2} | -20.70% | ±0.00% +> aiken/collection/pairs.{find_last_3} | -20.22% | ±0.00% +> aiken/collection/pairs.{find_last_4} | -20.98% | ±0.00% +> aiken/collection/pairs.{has_key_1} | -28.07% | ±0.00% +> aiken/collection/pairs.{has_key_2} | -25.70% | ±0.00% +> aiken/collection/pairs.{has_key_3} | -25.80% | ±0.00% +> aiken/collection/pairs.{has_key_4} | -24.93% | ±0.00% +> aiken/collection/pairs.{has_key_5} | -25.70% | ±0.00% +> aiken/collection/pairs.{keys_1} | -20.30% | ±0.00% +> aiken/collection/pairs.{keys_2} | -13.89% | ±0.00% +> aiken/collection/pairs.{keys_3} | -10.43% | ±0.00% +> aiken/collection/pairs.{values_1} | -20.30% | ±0.00% +> aiken/collection/pairs.{values_2} | -14.02% | ±0.00% +> aiken/collection/pairs.{values_3} | -10.65% | ±0.00% +> aiken/collection/pairs.{values_4} | -8.53% | ±0.00% +> aiken/collection/pairs.{map_1} | -11.17% | ±0.00% +> aiken/collection/pairs.{map_2} | -12.89% | ±0.00% +> aiken/collection/pairs.{foldl_1} | -35.94% | ±0.00% +> aiken/collection/pairs.{foldl_2} | -22.31% | ±0.00% +> aiken/collection/pairs.{foldr_1} | -36.21% | ±0.00% +> aiken/collection/pairs.{foldr_2} | -21.93% | ±0.00% +> aiken/collection/pairs.{foldr_3} | -20.00% | ±0.00% +> aiken/interval.{contains_1} | -21.08% | -4.01% +> aiken/interval.{contains_2} | -31.22% | -13.95% +> aiken/interval.{contains_3} | -26.80% | -10.08% +> aiken/interval.{contains_4} | -31.02% | -13.67% +> aiken/interval.{contains_5} | -32.32% | -13.59% +> aiken/interval.{contains_6} | -28.15% | -9.81% +> aiken/interval.{contains_7} | -32.11% | -13.32% +> aiken/interval.{contains_8} | -29.56% | -12.59% +> aiken/interval.{contains_9} | -29.68% | -12.78% +> aiken/interval.{contains_10} | -29.68% | -12.78% +> aiken/interval.{contains_11} | -35.17% | -17.77% +> aiken/interval.{contains_12} | -21.09% | -3.86% +> aiken/interval.{is_entirely_after_1} | -29.89% | -13.81% +> aiken/interval.{is_entirely_after_2} | -29.63% | -13.39% +> aiken/interval.{is_entirely_after_3} | -29.63% | -13.39% +> aiken/interval.{is_entirely_after_4} | -29.48% | -11.81% +> aiken/interval.{is_entirely_after_5} | -29.70% | -12.14% +> aiken/interval.{is_entirely_after_6} | -36.09% | -19.77% +> aiken/interval.{is_entirely_after_7} | -24.19% | -3.99% +> aiken/interval.{is_entirely_after_8} | -24.19% | -3.99% +> aiken/interval.{is_entirely_after_9} | -24.19% | -3.99% +> aiken/interval.{is_entirely_before_1} | -28.44% | -13.48% +> aiken/interval.{is_entirely_before_2} | -28.24% | -13.09% +> aiken/interval.{is_entirely_before_3} | -28.24% | -13.09% +> aiken/interval.{is_entirely_before_4} | -28.44% | -11.88% +> aiken/interval.{is_entirely_before_5} | -28.26% | -11.57% +> aiken/interval.{is_entirely_before_6} | -34.63% | -19.34% +> aiken/interval.{is_entirely_before_7} | -22.97% | -4.02% +> aiken/interval.{is_entirely_before_8} | -22.97% | -4.02% +> aiken/interval.{is_entirely_before_9} | -22.97% | -4.02% +> aiken/interval.{hull_1} | -21.51% | -0.73% +> aiken/interval.{hull_2} | -23.06% | -0.80% +> aiken/interval.{hull_3} | -22.00% | -0.86% +> aiken/interval.{intersection_1} | -21.51% | -0.73% +> aiken/interval.{intersection_2} | -21.51% | -0.73% +> aiken/interval.{intersection_3} | -26.55% | -4.65% +> aiken/interval.{intersection_4} | -26.45% | -4.51% +> aiken/interval.{intersection_5} | -22.87% | -0.76% +> aiken/interval.{intersection_6} | -19.73% | -0.98% +> aiken/math.{abs_1} | -61.39% | -21.07% +> aiken/math.{abs_2} | -70.90% | -34.84% +> aiken/math.{clamp_1} | -60.95% | -23.55% +> aiken/math.{clamp_2} | -60.95% | -23.55% +> aiken/math.{clamp_3} | -59.22% | -18.20% +> aiken/math.{gcd_test1} | -47.20% | ±0.00% +> aiken/math.{gcd_test2} | -47.81% | ±0.00% +> aiken/math.{gcd_test3} | -46.10% | ±0.00% +> aiken/math.{is_sqrt1} | -87.41% | -68.64% +> aiken/math.{is_sqrt2} | -87.41% | -68.64% +> aiken/math.{log_10_2} | -51.35% | -8.40% +> aiken/math.{log_42_2} | -51.46% | -8.24% +> aiken/math.{log_42_3} | -51.05% | -7.81% +> aiken/math.{log_5_0} | -54.05% | -12.92% +> aiken/math.{log_4_4} | -50.59% | -9.31% +> aiken/math.{log_4_43} | -49.14% | -7.28% +> aiken/math.{max_1} | -61.39% | -21.07% +> aiken/math.{max_2} | -61.39% | -21.07% +> aiken/math.{max_3} | -61.39% | -21.07% +> aiken/math.{min_1} | -61.39% | -21.07% +> aiken/math.{min_2} | -61.39% | -21.07% +> aiken/math.{min_3} | -61.39% | -21.07% +> aiken/math.{pow_3_5} | -46.34% | ±0.00% +> aiken/math.{pow_7_2} | -46.38% | ±0.00% +> aiken/math.{pow_3__4} | -43.50% | ±0.00% +> aiken/math.{pow_0_0} | -43.95% | ±0.00% +> aiken/math.{pow_513_3} | -45.80% | ±0.00% +> aiken/math.{pow_2_4} | -46.79% | ±0.00% +> aiken/math.{pow_2_42} | -46.77% | ±0.00% +> aiken/math.{pow2_neg} | -44.71% | ±0.00% +> aiken/math.{pow2_0} | -45.00% | ±0.00% +> aiken/math.{pow2_1} | -45.00% | ±0.00% +> aiken/math.{pow2_4} | -45.00% | ±0.00% +> aiken/math.{pow2_42} | -42.01% | ±0.00% +> aiken/math.{pow2_256} | -41.40% | ±0.00% +> aiken/math.{sqrt1} | -32.56% | -17.18% +> aiken/math.{sqrt2} | -32.56% | -17.18% +> aiken/math.{sqrt3} | -49.99% | -8.90% +> aiken/math.{sqrt4} | -51.76% | -3.90% +> aiken/math.{sqrt5} | -52.63% | -1.33% +> aiken/math.{sqrt6} | -28.16% | -15.41% +> aiken/math/rational.{from_int_1} | -14.32% | ±0.00% +> aiken/math/rational.{new_1} | -22.98% | ±0.00% +> aiken/math/rational.{zero_1} | -8.08% | ±0.00% +> aiken/math/rational.{denominator_1} | -28.33% | ±0.00% +> aiken/math/rational.{numerator_1} | -29.34% | ±0.00% +> aiken/math/rational.{abs_examples} | -18.25% | ±0.00% +> aiken/math/rational.{negate_1} | -15.39% | ±0.00% +> aiken/math/rational.{reciprocal_1} | -23.28% | ±0.00% +> aiken/math/rational.{reduce_1} | -31.89% | ±0.00% +> aiken/math/rational.{add_1} | -15.11% | ±0.00% +> aiken/math/rational.{add_2} | -15.11% | ±0.00% +> aiken/math/rational.{div_1} | -22.31% | -2.75% +> aiken/math/rational.{div_2} | -22.37% | -2.79% +> aiken/math/rational.{mul_1} | -13.37% | ±0.00% +> aiken/math/rational.{mul_2} | -13.37% | ±0.00% +> aiken/math/rational.{mul_3} | -26.25% | ±0.00% +> aiken/math/rational.{sub_1} | -15.11% | ±0.00% +> aiken/math/rational.{sub_2} | -15.11% | ±0.00% +> aiken/math/rational.{sub_3} | -15.11% | ±0.00% +> aiken/math/rational.{compare_1} | -21.70% | ±0.00% +> aiken/math/rational.{compare_with_eq} | -23.05% | ±0.00% +> aiken/math/rational.{compare_with_neq} | -22.33% | ±0.00% +> aiken/math/rational.{compare_with_gte} | -22.48% | ±0.00% +> aiken/math/rational.{compare_with_gt} | -23.18% | ±0.00% +> aiken/math/rational.{compare_with_lte} | -22.48% | ±0.00% +> aiken/math/rational.{compare_with_lt} | -23.18% | ±0.00% +> aiken/math/rational.{arithmetic_mean_1} | -23.31% | ±0.00% +> aiken/math/rational.{arithmetic_mean_2} | -23.31% | ±0.00% +> aiken/math/rational.{arithmetic_mean_3} | -20.58% | ±0.00% +> aiken/math/rational.{geometric_mean1} | -29.87% | ±0.00% +> aiken/math/rational.{geometric_mean2} | -24.52% | ±0.00% +> aiken/math/rational.{geometric_mean3} | -24.52% | ±0.00% +> aiken/math/rational.{geometric_mean4} | -33.55% | ±0.00% +> aiken/math/rational.{geometric_mean5} | -45.34% | ±0.00% +> aiken/math/rational.{ceil_1} | -36.26% | ±0.00% +> aiken/math/rational.{floor_1} | -29.49% | ±0.00% +> aiken/math/rational.{proper_fraction_1} | -18.44% | ±0.00% +> aiken/math/rational.{proper_fraction_2} | -18.44% | ±0.00% +> aiken/math/rational.{proper_fraction_3} | -18.44% | ±0.00% +> aiken/math/rational.{round_1} | -25.17% | ±0.00% +> aiken/math/rational.{round_even_1} | -25.91% | ±0.00% +> aiken/math/rational.{truncate_1} | -29.49% | ±0.00% +> aiken/option.{is_none_1} | -26.56% | ±0.00% +> aiken/option.{is_none_2} | -27.52% | ±0.00% +> aiken/option.{is_some_1} | -27.52% | ±0.00% +> aiken/option.{is_some_2} | -26.56% | ±0.00% +> aiken/option.{and_then_1} | -20.19% | ±0.00% +> aiken/option.{and_then_2} | -22.15% | ±0.00% +> aiken/option.{and_then_3} | -21.85% | ±0.00% +> aiken/option.{choice_1} | -17.11% | ±0.00% +> aiken/option.{choice_2} | -19.75% | ±0.00% +> aiken/option.{choice_3} | -18.68% | ±0.00% +> aiken/option.{flatten_1} | -12.25% | ±0.00% +> aiken/option.{flatten_2} | -15.41% | ±0.00% +> aiken/option.{flatten_3} | -19.46% | ±0.00% +> aiken/option.{flatten_4} | -14.31% | ±0.00% +> aiken/option.{map_1} | -19.89% | ±0.00% +> aiken/option.{map_2} | -18.18% | ±0.00% +> aiken/option.{map2_1} | -20.47% | ±0.00% +> aiken/option.{map2_2} | -19.93% | ±0.00% +> aiken/option.{map2_3} | -13.64% | ±0.00% +> aiken/option.{map3_1} | -20.74% | ±0.00% +> aiken/option.{map3_2} | -20.00% | ±0.00% +> aiken/option.{map3_3} | -19.90% | ±0.00% +> aiken/option.{or_try_1} | -14.36% | ±0.00% +> aiken/option.{or_try_2} | -14.36% | ±0.00% +> aiken/option.{or_else_1} | -38.16% | ±0.00% +> aiken/option.{or_else_2} | -27.62% | ±0.00% +> aiken/primitive/bytearray.{from_string_1} | -62.36% | ±0.00% +> aiken/primitive/bytearray.{from_string_2} | -41.62% | ±0.00% +> aiken/primitive/bytearray.{push_1} | -97.51% | -80.06% +> aiken/primitive/bytearray.{push_2} | -97.51% | -80.06% +> aiken/primitive/bytearray.{push_3} | -88.82% | -89.83% +> aiken/primitive/bytearray.{index_of_1} | -39.75% | ±0.00% +> aiken/primitive/bytearray.{index_of_2} | -43.19% | ±0.00% +> aiken/primitive/bytearray.{index_of_3} | -41.70% | ±0.00% +> aiken/primitive/bytearray.{index_of_4} | -37.24% | ±0.00% +> aiken/primitive/bytearray.{index_of_5} | -26.02% | ±0.00% +> aiken/primitive/bytearray.{is_empty_1} | -37.52% | ±0.00% +> aiken/primitive/bytearray.{is_empty_2} | -33.77% | ±0.00% +> aiken/primitive/bytearray.{length_1} | -49.73% | ±0.00% +> aiken/primitive/bytearray.{length_2} | -49.73% | ±0.00% +> aiken/primitive/bytearray.{test_bit_0} | -45.48% | 5.88% +> aiken/primitive/bytearray.{test_bit_1} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_2} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_3} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_7} | -58.31% | -11.81% +> aiken/primitive/bytearray.{test_bit_8} | -56.22% | -10.85% +> aiken/primitive/bytearray.{test_bit_20_21_22_23} | -44.38% | 5.52% +> aiken/primitive/bytearray.{drop_1} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{drop_2} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{drop_3} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{drop_4} | -58.79% | ±0.00% +> aiken/primitive/bytearray.{slice_1} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_2} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_3} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_4} | -98.79% | -90.04% +> aiken/primitive/bytearray.{slice_5} | -98.79% | -90.04% +> aiken/primitive/bytearray.{take_1} | -97.81% | -83.40% +> aiken/primitive/bytearray.{take_2} | -97.81% | -83.40% +> aiken/primitive/bytearray.{take_3} | -97.81% | -83.40% +> aiken/primitive/bytearray.{take_4} | -97.81% | -83.40% +> aiken/primitive/bytearray.{concat_1} | -96.22% | -80.06% +> aiken/primitive/bytearray.{concat_2} | -96.22% | -80.06% +> aiken/primitive/bytearray.{concat_3} | -96.22% | -80.06% +> aiken/primitive/bytearray.{concat_4} | -96.22% | -80.06% +> aiken/primitive/bytearray.{foldl_1} | -40.96% | ±0.00% +> aiken/primitive/bytearray.{foldl_2} | -40.09% | ±0.00% +> aiken/primitive/bytearray.{foldl_3} | -40.29% | ±0.00% +> aiken/primitive/bytearray.{foldl_4} | -44.76% | ±0.00% +> aiken/primitive/bytearray.{foldr_1} | -42.56% | ±0.00% +> aiken/primitive/bytearray.{foldr_2} | -40.93% | ±0.00% +> aiken/primitive/bytearray.{foldr_3} | -45.34% | ±0.00% +> aiken/primitive/bytearray.{reduce_1} | -42.95% | ±0.00% +> aiken/primitive/bytearray.{reduce_2} | -44.60% | ±0.00% +> aiken/primitive/bytearray.{to_string_1} | -69.56% | ±0.00% +> aiken/primitive/bytearray.{to_string_2} | -53.54% | ±0.00% +> aiken/primitive/bytearray.{to_hex_1} | -48.15% | ±0.00% +> aiken/primitive/bytearray.{to_hex_2} | -48.15% | ±0.00% +> aiken/primitive/int.{from_utf8_1} | -37.06% | ±0.00% +> aiken/primitive/int.{from_utf8_2} | -33.40% | ±0.00% +> aiken/primitive/int.{from_utf8_3} | -37.06% | ±0.00% +> aiken/primitive/int.{from_utf8_4} | -32.78% | ±0.00% +> aiken/primitive/int.{from_utf8_5} | -32.05% | ±0.00% +> aiken/primitive/int.{from_utf8_6} | -31.36% | ±0.00% +> aiken/primitive/string.{from_bytearray_1} | -69.56% | ±0.00% +> aiken/primitive/string.{from_bytearray_2} | -53.54% | ±0.00% +> aiken/primitive/string.{from_bytearray_3} | -53.54% | ±0.00% +> aiken/primitive/string.{from_int_1} | -40.54% | -7.05% +> aiken/primitive/string.{from_int_2} | -45.93% | -5.30% +> aiken/primitive/string.{from_int_3} | -47.62% | -4.35% +> aiken/primitive/string.{from_int_4} | -48.58% | -3.69% +> aiken/primitive/string.{concat_1} | -92.30% | -80.10% +> aiken/primitive/string.{concat_2} | -97.34% | -85.87% +> aiken/primitive/string.{concat_3} | -98.67% | -80.35% +> aiken/primitive/string.{join_1} | -42.87% | ±0.00% +> aiken/primitive/string.{join_2} | -37.65% | ±0.00% +> aiken/primitive/string.{to_bytearray_1} | -62.36% | ±0.00% +> aiken/primitive/string.{to_bytearray_2} | -41.62% | ±0.00% +> aiken/primitive/string.{to_bytearray_3} | -41.62% | ±0.00% +> cardano/assets.{from_asset_list_1} | -20.51% | ±0.00% +> cardano/assets.{from_asset_list_2} | -10.09% | ±0.00% +> cardano/assets.{from_asset_list_3} | -12.21% | ±0.00% +> cardano/assets.{from_asset_list_4} | -16.22% | ±0.00% +> cardano/assets.{from_asset_list_5} | -14.60% | ±0.00% +> cardano/assets.{from_asset_list_6} | -20.97% | ±0.00% +> cardano/assets.{from_asset_list_7} | -20.25% | ±0.00% +> cardano/assets.{from_asset_list_8} | -14.51% | ±0.00% +> cardano/assets.{from_asset_list_9} | -16.07% | ±0.00% +> cardano/assets.{add_1} | -27.84% | ±0.00% +> cardano/assets.{add_2} | -27.56% | -0.54% +> cardano/assets.{add_3} | -26.39% | ±0.00% +> cardano/assets.{add_4} | -29.75% | -10.41% +> cardano/assets.{add_5} | -27.80% | ±0.00% +> cardano/assets.{merge_1} | -26.02% | ±0.00% +> cardano/assets.{merge_2} | -19.60% | ±0.00% +> cardano/assets.{merge_3} | -23.80% | ±0.00% +> cardano/assets.{merge_4} | -25.92% | ±0.00% +> cardano/assets.{merge_5} | -27.61% | -1.98% +> cardano/assets.{without_lovelace_1} | -28.00% | -2.24% +> cardano/assets.{without_lovelace_2} | -27.49% | ±0.00% +> cardano/assets.{without_lovelace_3} | -23.40% | -0.34% +> cardano/assets.{flatten_with_1} | -21.10% | ±0.00% +> cardano/assets.{flatten_with_2} | -22.77% | ±0.00% +> cardano/assets.{reduce_1} | -24.31% | ±0.00% +> cardano/assets.{reduce_2} | -20.89% | ±0.00% +> cardano/assets.{reduce_3} | -36.21% | ±0.00% +>
+ +### Added + +- New modules covering Conway-related features (i.e. governance) + - [`cardano/governance`](https://aiken-lang.github.io/stdlib/cardano/governance.html) + - [`cardano/governance/protocol_parameters`](https://aiken-lang.github.io/stdlib/cardano/governance/protocol_parameters.html) + +- New primitives in `aiken/collection/pairs`: + - [`insert_by_ascending_key`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#insert_by_ascending_key) + - [`repsert_by_ascending_key`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#repsert_by_ascending_key) + +- New primitives in `aiken/crypto`: + - [`blake2b_224`](https://aiken-lang.github.io/stdlib/aiken/crypto.html#blake2b_224) + - [`keccak_256`](https://aiken-lang.github.io/stdlib/aiken/crypto.html#keccak_256) + +- New primitives in `aiken/math`: + - [`log2`](https://aiken-lang.github.io/stdlib/aiken/math.html#log2) + +- New primitives in `aiken/primitive/bytearray`: + - [`at`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#at) + - [`from_int_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#from_int_big_endian) + - [`from_int_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#from_int_little_endian) + - [`to_int_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#to_int_big_endian) + - [`to_int_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#to_int_little_endian) + +- New primitives in `aiken/primitive/int`: + - [`from_bytearray_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html#from_bytearray_big_endian) + - [`from_bytearray_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html#from_bytearray_little_endian) + +- New primitives in `aiken/crypto`: + - [`verify_ecdsa_signature`](https://aiken-lang.github.io/stdlib/cardano/credential.html#verify_ecdsa_signature) + - [`verify_schnorr_signature`](https://aiken-lang.github.io/stdlib/cardano/credential.html#verify_schnorr_signature) + +### Changed + +- Few modules have been relocated and better organized: + - `aiken/hash` -> [`aiken/crypto`](https://aiken-lang.github.io/stdlib/aiken/crypto.html) + - **collections** + - `aiken/dict` -> [`aiken/collection/dict`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html) + - `aiken/list` -> [`aiken/collection/list`](https://aiken-lang.github.io/stdlib/aiken/collection/list.html) + - `aiken/pairs` -> [`aiken/collection/pairs`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html) + - **primitive** + - `aiken/bytearray` -> [`aiken/primitive/bytearray`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html) + - `aiken/int` -> [`aiken/primitive/int`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html) + - `aiken/string` -> [`aiken/primitive/string`](https://aiken-lang.github.io/stdlib/aiken/primitive/string.html) + - **cardano** + - `aiken/transaction` -> [`cardano/transaction`](https://aiken-lang.github.io/stdlib/cardano/transaction.html) + - `aiken/transaction/certificate` -> [`cardano/certificate`](https://aiken-lang.github.io/stdlib/cardano/certificate.html) + - `aiken/transaction/credential` -> [`cardano/address`](https://aiken-lang.github.io/stdlib/cardano/address.html) & `aiken/crypto` + - `aiken/transaction/value` -> [`cardano/assets`](https://aiken-lang.github.io/stdlib/cardano/assets.html) + +- Several zero-argument functions have been turned into top-level constants + - `aiken/dict.new()` -> [`aiken/collection/dict.empty`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html#empty) + - `aiken/interval.empty()` -> [`aiken/interval.empty`](https://aiken-lang.github.io/stdlib/aiken/interval.html#empty) + - `aiken/interval.everything()` -> [`aiken/interval.everything`](https://aiken-lang.github.io/stdlib/aiken/interval.html#everything) + - `aiken/math/rational.zero()` -> [`aiken/math/rational.zero`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#zero) + - `aiken/transaction/value.zero()` -> [`cardano/assets.zero`](https://aiken-lang.github.io/stdlib/cardano/assets.html#zero) + +- The `Transaction` type from [`cardano/transaction`](https://aiken-lang.github.io/stdlib/cardano/transaction.html) (originally `aiken/transaction`) has been greatly reworked to match the new transaction format in Plutus V3. + +- The `ScriptContext` type has split from `cardano/transaction` (originally `aiken/transaction`) and moved into its own module [`cardano/script_context`](https://aiken-lang.github.io/stdlib/cardano/script_context.html) and adjusted to its new form as per Plutus V3. + +- The constructors of [`Credential`](https://aiken-lang.github.io/stdlib/cardano/address.html#credential) have been renamed from `VerificationKeyCredential` and `ScriptCredential` into `VerificationKey` and `Script` respectively. + +- The function `remove_all`, `remove_first` and `remove_last` from [`aiken/collection/pairs`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html) (originally `aiken/pairs`) have been renamed to `delete_all`, `delete_first` and `delete_last` respectively. + +- The function `verify_signature` from [`aiken/crypto`](https://aiken-lang.github.io/stdlib/aiken/crypto.html) (originally `aiken/credential`) has been renamed to `verify_ed25519_signature`. + +### Removed + +- The module `aiken/time`. The `PosixTime` alias is no longer used anywhere. + +- `MintedValue` (from `aiken/transaction/value` originally) and its associated functions are no longer needed and, therefore, gone. + +## v1.9.0 - 2024-05-24 + +### Added + +- A new module [`aiken/pairs`](https://aiken-lang.github.io/stdlib/aiken/pairs.html) to work with associative lists (a.k.a. `Pairs`). + +### Changed + +- **BREAKING-CHANGE**
+ Specialized all `Dict`'s key to `ByteArray`, and thus remove the need for passing an extra comparison function in many functions. `Dict` are however still specialized with a phantom type for keys. + +- **BREAKING-CHANGE**
+ Few functions from `Dict` have been renamed for consistency: + - `from_list` -> `from_pairs` + - `from_ascending_list` -> `from_ascending_pairs` + - `to_list` -> `to_pairs` + +### Removed + +N/A + +## v1.8.0 - 2024-03-28 + +### Added + +- [`value.reduce`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#reduce) to efficiently fold over a value and its elements. + +- [`value.from_asset_list`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#from_asset_list) to turn an asset list into a Value while enforcing invariants expected of `Value`. + +- [`math.is_sqrt`](https://aiken-lang.github.io/stdlib/aiken/math.html#is_sqrt) as a more efficient alternative to `sqrt`. + +### Changed + +- Disclaimers in documentation to [`bytearray.to_string`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#to_string) and [`string.from_bytearray`](https://aiken-lang.github.io/stdlib/aiken/string.html#from_bytearray) regarding UTF-8 encoding. + +### Removed + +N/A + +## v1.7.0 - 2023-11-07 + +### Added + +- [`list.index_of`](https://aiken-lang.github.io/stdlib/aiken/list.html#index_of): For getting a values index in a list. +- [`transaction.placeholder`](https://aiken-lang.github.io/stdlib/aiken/transaction.html#placeholder): For constructing test transactions. +- [`transaction.value.is_zero`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#is_zero): For checking whether a value is null. + +### Changed + +- [`value.to_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_minted_value) now correctly preserves the invariant of `MintedValue`: it always contain a null quantity of Ada. + +### Removed + +N/A + +## v1.6.0 - 2023-09-08 + +### Added + +- [`math.pow2`](https://aiken-lang.github.io/stdlib/aiken/math.html#pow2): For faster exponentions for powers of two. +- [`bytearray.test_bit`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#test_bit): For testing if a bit is set in a bytearray (MSB). + +## v1.5.0 - 2023-08-16 + +### Removed + +- retired `list.and` and `list.or` because of the new keywords for logical op chaining. + +## v1.4.0 - 2023-07-21 + +### Changed + +- Fixed missing null-check on `value.add`. Adding a null quantity of token is now correctly a no-op. + +## v1.3.0 - 2023-06-30 + +### Added + +- [`math.sqrt`](https://aiken-lang.github.io/stdlib/aiken/math.html#sqrt): For calculating integer square roots using a quadratically convergent method. +- [`math/rational.numerator`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#numerator) & [`math/rational.denominator`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#numerator): For accessing parts of a rational value. +- [`math/rational.arithmetic_mean`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#arithmetic_mean): For computing [arithmetic mean](https://en.wikipedia.org/wiki/Arithmetic_mean) of rational values. +- [`math/rational.geometric_mean`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#geometric_mean): For computing [geometric mean](https://en.wikipedia.org/wiki/Geometric_mean) of two rational values. + +### Changed + +- Clear empty asset lists in [`Value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#Value) on various operations. Before that fix, it could happen that removing all assets from a given policy would lead to an empty dictionnary of assets still be present in the `Value`. + +## v1.2.0 - 2023-06-17 + +### Added + +- [`transaction/value.MintedValue`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#MintedValue) +- [`transaction/value.from_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#from_minted_value): Convert from `MintedValue` to `Value` +- [`transaction/value.to_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_minted_value): Convert from `Value` to `MintedValue` +- [`transaction/bytearray.to_hex`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#to_hex): Convert a `ByteArray` to a hex encoded `String` +- [`math/rational`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html): Working with rational numbers. + - [x] `abs` + - [x] `add` + - [x] `ceil` + - [x] `compare` + - [x] `compare_with` + - [x] `div` + - [x] `floor` + - [x] `from_int` + - [x] `mul` + - [x] `negate` + - [x] `new` + - [x] `proper_fraction` + - [x] `reciprocal` + - [x] `reduce` + - [x] `round` + - [x] `round_even` + - [x] `sub` + - [x] `truncate` + - [x] `zero` + +### Removed + +- module `MintedValue` was merged with `Value` + +## v1.1.0 - 2023-06-06 + +### Added + +- [`list.count`](https://aiken-lang.github.io/stdlib/aiken/list.html#count): Count how many items in the list satisfy the given predicate. + +- [`int.from_utf8`](https://aiken-lang.github.io/stdlib/aiken/int.html#from_utf8): Parse an integer from a utf-8 encoded `ByteArray`, when possible. + +- [`dict.foldl`](https://aiken-lang.github.io/stdlib/aiken/dict.html#foldl) & [`dict.foldr`](https://aiken-lang.github.io/stdlib/aiken/dict.html#foldr): for left and right folds over dictionnary elements in ascending key order. + +- [`dict.insert_with`](https://aiken-lang.github.io/stdlib/aiken/dict.html#insert_with): Insert a value in the dictionary at a given key. When the key already exist, the provided merge function is called. + +- [`transaction/value.add`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#add): Add a (positive or negative) quantity of a single token to a value. This is more efficient than `merge` for a single asset. + +- [`transaction/value.to_dict`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_dict): Convert a `Value` into a dictionnary of dictionnaries. + +- A new module [`transaction/minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/minted_value.html): This is used exclusively for representing values present in the `mint` field of transactions. This allows to simplify some of the implementation for `Value` which no longer needs to handle the special case where null-quantity tokens would be present. It isn't possible to construct `MintedValue` by hand, they come from the script context entirely and are 'read-only'. + +- More documentation for `dict` and `interval` modules. + +### Changed + +> **Warning** +> +> Most of those changes are breaking-changes. Though, given we're still in an +> alpha state, only the `minor` component is bumped from the version number. +> Please forgive us. + +- Rework `list.{foldl, foldr, reduce, indexed_foldr}`, `dict.{fold}`, `bytearray.{foldl, foldr, reduce}` to take the iterator as last argument. For example: + + ``` + fn foldl(self: List
, with: fn(a, b) -> b, zero: b) -> b + + ↓ becomes + + fn foldl(self: List, zero: b, with: fn(a, b) -> b) -> b + ``` + +- Fixed implementation of `bytearray.slice`; `slice` would otherwise behave as if the second argument were an offset. + +- Rename `transaction/value.add` into `transaction/value.merge`. + +- Swap arguments of the merge function in `dict.union_with`; the first value received now corresponds to the value already present in the dictionnary. + +- Fixed various examples from the documentation + +### Removed + +- Removed `dict.fold`; replaced with `dict.foldl` and `dict.foldr` to remove ambiguity. + +## v1.0.0 - 2023-04-13 + +### Added + +N/A + +### Changed + +N/A + +### Removed + +N/A diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE new file mode 100644 index 00000000..4a1de273 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2022 Lucas Rosa + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md new file mode 100644 index 00000000..4cd6fef2 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md @@ -0,0 +1,71 @@ +
+
+

Aiken Aiken Standard Library

+ +[![Licence](https://img.shields.io/github/license/aiken-lang/stdlib?style=for-the-badge)](https://github.com/aiken-lang/stdlib/blob/main/LICENSE) +[![Continuous Integration](https://img.shields.io/github/actions/workflow/status/aiken-lang/stdlib/continuous-integration.yml?style=for-the-badge)](https://github.com/aiken-lang/stdlib/actions/workflows/continuous-integration.yml) + +
+
+ +## Getting started + +``` +aiken add aiken-lang/stdlib --version v2 +``` + +## Compatibility + +aiken's version | stdlib's version(s) +--- | --- +`v1.1.3`
`v1.1.4`
`v1.1.5`
`v1.1.6`
`v1.1.7` | `>= 2.1.0` +`v1.1.1`
`v1.1.2` | `>= 2.0.0` && `< 2.1.0` +`v1.0.29-alpha`
`v1.0.28-alpha` | `>= 1.9.0` && `< 2.0.0` +`v1.0.26-alpha` | `<= 1.8.0` && `< 1.9.0` + +## Overview + +The official standard library for the [Aiken](https://aiken-lang.org) Cardano +smart-contract language. + +It extends the language builtins with useful data-types, functions, constants +and aliases that make using Aiken a bliss. + +```aiken +use aiken/collection/list +use aiken/crypto.{VerificationKeyHash} +use cardano/transaction.{OutputReference, Transaction} + +pub type Datum { + owner: VerificationKeyHash, +} + +pub type Redeemer { + msg: ByteArray, +} + +/// A simple validator which replicates a basic public/private signature lock. +/// +/// - The key (hash) is set as datum when the funds are sent to the script address. +/// - The spender is expected to provide a signature, and the string 'Hello, World!' as message +/// - The signature is implicitly verified by the ledger, and included as 'extra_signatories' +/// +validator hello_world { + spend(datum: Option, redeemer: Redeemer, _, self: Transaction) { + expect Some(Datum { owner }) = datum + + let must_say_hello = redeemer.msg == "Hello, World!" + + let must_be_signed = list.has(self.extra_signatories, owner) + + and { + must_say_hello, + must_be_signed, + } + } +} +``` + +## Stats + +![Alt](https://repobeats.axiom.co/api/embed/f0a17e7f6133630e165b9e56ec5447bef32fe831.svg "Repobeats analytics image") diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock new file mode 100644 index 00000000..769ac20f --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock @@ -0,0 +1,16 @@ +# This file was generated by Aiken +# You typically do not need to edit this file + +[[requirements]] +name = "aiken-lang/fuzz" +version = "v2" +source = "github" + +[[packages]] +name = "aiken-lang/fuzz" +version = "v2" +requirements = [] +source = "github" + +[etags] +"aiken-lang/fuzz@v2" = [{ secs_since_epoch = 1734106349, nanos_since_epoch = 450591000 }, "64a32283418d58cade34059d3855b857e84505541158c541c460cafa0d355475"] diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml new file mode 100644 index 00000000..cbc76a0b --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml @@ -0,0 +1,15 @@ +name = "aiken-lang/stdlib" +version = "2.2.0" +compiler = "v1.1.9" +plutus = "v3" +description = "The Aiken Standard Library" + +[repository] +user = "aiken-lang" +project = "stdlib" +platform = "github" + +[[dependencies]] +name = "aiken-lang/fuzz" +version = "v2" +source = "github" diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak new file mode 100644 index 00000000..f0c66d69 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak @@ -0,0 +1,293 @@ +use aiken +use aiken/builtin.{decode_utf8, serialise_data} +use aiken/primitive/bytearray + +/// Obtain a String representation of _anything_. This is particularly (and only) useful for tracing +/// and debugging. This function is expensive and should not be used in any production code as it +/// will very likely explodes the validator's budget. +/// +/// The output is a [CBOR diagnostic](https://www.rfc-editor.org/rfc/rfc8949#name-diagnostic-notation) +/// of the underlying on-chain binary representation of the data. It's not as +/// easy to read as plain Aiken code, but it is handy for troubleshooting values +/// _at runtime_. Incidentally, getting familiar with reading CBOR diagnostic is +/// a good idea in the Cardano world. +/// +/// ```aiken +/// cbor.diagnostic(42) == "42" +/// cbor.diagnostic(#"a1b2") == "h'A1B2'" +/// cbor.diagnostic([1, 2, 3]) == "[_ 1, 2, 3]" +/// cbor.diagnostic([]) == "[]" +/// cbor.diagnostic((1, 2)) == "[_ 1, 2]" +/// cbor.diagnostic((1, #"ff", 3)) == "[_ 1, h'FF', 3]" +/// cbor.diagnostic([(1, #"ff")]) == "{_ 1: h'FF' }" +/// cbor.diagnostic(Some(42)) == "121([_ 42])" +/// cbor.diagnostic(None) == "122([])" +/// ``` +pub fn diagnostic(self: Data) -> String { + aiken.diagnostic(self, #"") + |> decode_utf8 +} + +/// Deserialise a [CBOR](https://www.rfc-editor.org/rfc/rfc8949) Data. This is the reverse operation of [serialise](#serialise). +/// In particular, we have the following property: +/// +/// ```aiken +/// cbor.deserialise(cbor.serialise(any_data)) == Some(any_data) +/// ``` +/// +/// > [!CAUTION] +/// > Unfortunately, this function isn't derived from a builtin primitive. It +/// > is therefore an order of magnitude more expensive than its counterpart +/// > and shall be used with care. +/// > +/// > In general, one might prefer avoiding deserialisation unless truly necessary. +/// > Yet, it may come in handy for testing and in rare scenarios. +pub fn deserialise(bytes: ByteArray) -> Option { + let length = bytearray.length(bytes) + + let peek = + fn(offset: Int, callback: fn(Byte) -> Decoder) -> Decoder { + fn(cursor) { + if 0 >= cursor { + deserialise_failure + } else { + callback(bytearray.at(bytes, length - cursor))(cursor - offset) + } + } + } + + let take = + fn(n: Int, callback: fn(ByteArray) -> Decoder) -> Decoder { + fn(cursor) { + if 0 >= cursor { + deserialise_failure + } else { + callback(builtin.slice_bytearray(length - cursor, n, bytes))( + cursor - n, + ) + } + } + } + + if length == 0 { + None + } else { + let Pair(result, consumed) = decode_data(peek, take)(length) + if consumed != 0 { + None + } else { + Some(result) + } + } +} + +/// Serialise any value to binary, encoding using [CBOR](https://www.rfc-editor.org/rfc/rfc8949). +/// +/// This is particularly useful in combination with hashing functions, as a way +/// to obtain a byte representation that matches the serialised representation +/// used by the ledger in the context of on-chain code. +/// +/// Note that the output matches the output of [`diagnostic`](#diagnostic), +/// though with a different encoding. [`diagnostic`](#diagnostic) is merely a +/// textual representation of the CBOR encoding that is human friendly and +/// useful for debugging. +/// +/// ```aiken +/// cbor.serialise(42) == #"182a" +/// cbor.serialise(#"a1b2") == #"42a1b2" +/// cbor.serialise([]) == #"80" +/// cbor.serialise((1, 2)) == #"9f0102ff" +/// cbor.serialise((1, #"ff", 3)) == #"9f0141ff03ff" +/// cbor.serialise([(1, #"ff")]) == #"a10141ff" +/// cbor.serialise(Some(42)) == #"d8799f182aff" +/// cbor.serialise(None) == #"d87a80" +/// ``` +pub fn serialise(self: Data) -> ByteArray { + serialise_data(self) +} + +type Byte = + Int + +type Decoder
= + fn(Int) -> Pair + +type Peek = + fn(Int, fn(Byte) -> Decoder) -> Decoder + +type Take = + fn(Int, fn(ByteArray) -> Decoder) -> Decoder + +fn return(data: Data) -> Decoder { + fn(cursor) { Pair(data, cursor) } +} + +const deserialise_failure: Pair = { + let empty: Data = "" + Pair(empty, -1) + } + +const token_begin_bytes = 0x5f + +const token_begin_list = 0x9f + +const token_begin_map = 0xbf + +const token_break = 0xff + +fn decode_data(peek: Peek, take: Take) -> Decoder { + let next <- peek(1) + let major_type = next / 32 + if major_type <= 2 { + if major_type == 0 { + let i <- decode_uint(peek, take, next) + return(builtin.i_data(i)) + } else if major_type == 1 { + let i <- decode_uint(peek, take, next - 32) + return(builtin.i_data(-i - 1)) + } else { + if next == token_begin_bytes { + let b <- decode_chunks(peek, take) + return(builtin.b_data(b)) + } else { + let b <- decode_bytes(peek, take, next - 64) + return(builtin.b_data(b)) + } + } + } else if major_type == 6 { + let tag <- decode_uint(peek, take, next - 192) + let next <- peek(1) + if tag == 102 { + fn(_) { deserialise_failure } + } else { + let ix = + if tag >= 1280 { + tag - 1280 + 7 + } else { + tag - 121 + } + if next == token_begin_list { + let fields <- decode_indefinite(peek, take, decode_data) + return(builtin.constr_data(ix, fields)) + } else { + let size <- decode_uint(peek, take, next - 128) + let fields <- decode_definite(peek, take, decode_data, size) + return(builtin.constr_data(ix, fields)) + } + } + } else if major_type == 4 { + if next == token_begin_list { + let xs <- decode_indefinite(peek, take, decode_data) + return(builtin.list_data(xs)) + } else { + let size <- decode_uint(peek, take, next - 128) + let xs <- decode_definite(peek, take, decode_data, size) + return(builtin.list_data(xs)) + } + } else if major_type == 5 { + if next == token_begin_map { + let xs <- decode_indefinite(peek, take, decode_pair) + return(builtin.map_data(xs)) + } else { + let size <- decode_uint(peek, take, next - 160) + let xs <- decode_definite(peek, take, decode_pair, size) + return(builtin.map_data(xs)) + } + } else { + fn(_) { deserialise_failure } + } +} + +fn decode_pair(peek: Peek, take: Take) -> Decoder> { + fn(cursor) { + let Pair(k, cursor) = decode_data(peek, take)(cursor) + let Pair(v, cursor) = decode_data(peek, take)(cursor) + Pair(Pair(k, v), cursor) + } +} + +fn decode_uint( + peek: Peek, + take: Take, + header: Int, + and_then: fn(Int) -> Decoder, +) -> Decoder { + if header < 24 { + and_then(header) + } else if header == 24 { + let payload <- peek(1) + and_then(payload) + } else if header < 28 { + let width = bytearray.at(#[2, 4, 8], header - 25) + let payload <- take(width) + and_then(bytearray.to_int_big_endian(payload)) + } else { + fn(_) { deserialise_failure } + } +} + +fn decode_bytes( + peek: Peek, + take: Take, + header: Int, + and_then: fn(ByteArray) -> Decoder, +) -> Decoder { + let width <- decode_uint(peek, take, header) + let bytes <- take(width) + and_then(bytes) +} + +fn decode_chunks( + peek: Peek, + take: Take, + and_then: fn(ByteArray) -> Decoder, +) -> Decoder { + let next <- peek(1) + if next == token_break { + and_then("") + } else { + let chunk <- decode_bytes(peek, take, next - 64) + let chunks <- decode_chunks(peek, take) + and_then(builtin.append_bytearray(chunk, chunks)) + } +} + +fn decode_definite( + peek: Peek, + take: Take, + decode_one: fn(Peek, Take) -> Decoder, + size: Int, + and_then: fn(List) -> Decoder, +) -> Decoder { + if size <= 0 { + and_then([]) + } else { + fn(cursor) { + let Pair(elem, cursor) = decode_one(peek, take)(cursor) + { + let elems <- decode_definite(peek, take, decode_one, size - 1) + and_then([elem, ..elems]) + }(cursor) + } + } +} + +fn decode_indefinite( + peek: Peek, + take: Take, + decode_one: fn(Peek, Take) -> Decoder, + and_then: fn(List) -> Decoder, +) -> Decoder { + let next <- peek(1) + if next == token_break { + and_then([]) + } else { + fn(cursor) { + let Pair(elem, cursor) = decode_one(peek, take)(cursor + 1) + { + let elems <- decode_indefinite(peek, take, decode_one) + and_then([elem, ..elems]) + }(cursor) + } + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak new file mode 100644 index 00000000..28d9f5bb --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak @@ -0,0 +1,297 @@ +use aiken/cbor.{deserialise, diagnostic, serialise} +use aiken/fuzz + +// ------------------------------------------------------------------ diagnostic + +test diagnostic_1() { + diagnostic(42) == @"42" +} + +test diagnostic_2() { + diagnostic(#"a1b2") == @"h'A1B2'" +} + +test diagnostic_3() { + diagnostic([1, 2, 3]) == @"[_ 1, 2, 3]" +} + +test diagnostic_4() { + diagnostic([]) == @"[]" +} + +test diagnostic_5() { + diagnostic((1, 2)) == @"[_ 1, 2]" +} + +test diagnostic_6() { + diagnostic((1, #"ff", 3)) == @"[_ 1, h'FF', 3]" +} + +test diagnostic_7() { + diagnostic([(1, #"ff")]) == @"[_ [_ 1, h'FF']]" +} + +test diagnostic_7_alt() { + diagnostic([Pair(1, #"ff")]) == @"{_ 1: h'FF' }" +} + +test diagnostic_8() { + diagnostic(Some(42)) == @"121([_ 42])" +} + +test diagnostic_9() { + diagnostic(None) == @"122([])" +} + +test diagnostic_10() { + let xs: List<(Int, Int)> = [] + diagnostic(xs) == @"[]" +} + +test diagnostic_10_alt() { + let xs: Pairs = [] + diagnostic(xs) == @"{}" +} + +type Foo { + foo: Bar, +} + +type Bar { + A + B(Int) +} + +test diagnostic_11() { + diagnostic(Foo { foo: A }) == @"121([_ 121([])])" +} + +test diagnostic_12() { + diagnostic(Foo { foo: B(42) }) == @"121([_ 122([_ 42])])" +} + +type Baz { + a0: Int, + b0: ByteArray, +} + +test diagnostic_13() { + diagnostic(Baz { a0: 14, b0: #"ff" }) == @"121([_ 14, h'FF'])" +} + +test diagnostic_14() { + diagnostic([0]) == @"[_ 0]" +} + +test diagnostic_15() { + diagnostic(-42) == @"-42" +} + +test diagnostic_16() { + diagnostic([-1, 0, 1]) == @"[_ -1, 0, 1]" +} + +// ------------------------------------------------------------------ serialise + +test serialise_1() { + serialise(42) == #"182a" +} + +test serialise_2() { + serialise(#"a1b2") == #"42a1b2" +} + +test serialise_3() { + serialise([]) == #"80" +} + +test serialise_4() { + serialise((1, 2)) == #"9f0102ff" +} + +test serialise_5() { + serialise((1, #"ff", 3)) == #"9f0141ff03ff" +} + +test serialise_6() { + serialise([(1, #"ff")]) == #"9f9f0141ffffff" +} + +test serialise_7() { + serialise(Some(42)) == #"d8799f182aff" +} + +test serialise_8() { + serialise(None) == #"d87a80" +} + +test serialise_9() { + serialise([Pair(1, #"ff")]) == #"a10141ff" +} + +// ------------------------------------------------------------------ deserialise + +type AnyData { + AnyInt(Int) + AnyByteArray(ByteArray) + AnyList(List) + AnyPairs(Pairs) + AnyUnaryConstr0(UnaryConstr0) + AnyUnaryConstr1(UnaryConstr1) + AnyUnaryConstr2(UnaryConstr2) + AnyBinaryConstr0(BinaryConstr0) + AnyBinaryConstr1(BinaryConstr1) +} + +type UnaryConstr0 { + UnaryConstr0 +} + +type UnaryConstr1 { + field0: String, +} + +type UnaryConstr2 { + field0: Int, + field1: List>, +} + +type BinaryConstr0 = + Bool + +type BinaryConstr1 = + Option + +fn any_pair(any_key: Fuzzer, any_value: Fuzzer) -> Fuzzer> { + let k <- fuzz.and_then(any_key) + let v <- fuzz.map(any_value) + Pair(k, v) +} + +fn any_data() -> Fuzzer { + fuzz.either6( + { + let i <- fuzz.map(fuzz.int()) + AnyInt(i) + }, + { + let bs <- fuzz.map(fuzz.bytearray()) + AnyByteArray(bs) + }, + { + let xs <- fuzz.map(fuzz.list(fuzz.int())) + AnyList(xs) + }, + { + let ps <- fuzz.map(fuzz.list(any_pair(fuzz.bytearray(), fuzz.int()))) + AnyPairs(ps) + }, + fuzz.either3( + fuzz.constant(AnyUnaryConstr0(UnaryConstr0)), + fuzz.constant(AnyUnaryConstr1(UnaryConstr1(@"lorem ipsum"))), + { + let i <- fuzz.and_then(fuzz.int()) + let xs <- fuzz.map(fuzz.list(fuzz.list(fuzz.bytearray()))) + AnyUnaryConstr2(UnaryConstr2(i, xs)) + }, + ), + fuzz.either( + { + let b <- fuzz.map(fuzz.bool()) + AnyBinaryConstr0(b) + }, + { + let o <- fuzz.map(fuzz.option(fuzz.int())) + AnyBinaryConstr1(o) + }, + ), + ) +} + +test unit_deserialise_not_enough_bytes_1() { + expect None = deserialise(#"") +} + +test unit_deserialise_not_enough_bytes_2() { + expect None = deserialise(#"82") +} + +test unit_deserialise_non_empty_leftovers() { + expect None = deserialise(#"811442") +} + +test unit_deserialise_invalid_header() { + expect None = deserialise(#"f1") +} + +test unit_deserialise_invalid_uint() { + expect None = deserialise(#"1d0013bdae") +} + +/// A full script context with a minting policy and various assets. Meant to be +/// non-trivial and cover many things we might encounter in a transaction. +test bench_deserialise_script_context() { + expect Some(_) = + deserialise( + #"d8799fd8799f9fd8799fd8799f5820000000000000000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87980d87a80ffffff9fd8799fd8799f5820000000000000000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87980d87a80ffffff9fd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87a9f5820923918e403bf43c34b4ef6b48eb2ee04babed17320d8d1b9ff9ad086e86f44ecffd87a80ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffffffffa340a1401a000f4240581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea14561696b656e182a581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874a14474756e611b00005af3107a4000d87980d87a80ffd8799fd8799fd87a9f581c00000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffffffffa240a1401a000f4240581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea14763617264616e6f01d87980d8799f581c68ad54b3a8124d9fe5caaaf2011a85d72096e696a2fb3d7f86c41717ffffff182aa2581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea24561696b656e2d4763617264616e6f01581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874a14474756e611b00005af3107a400080a0d8799fd8799fd87980d87a80ffd8799fd87b80d87a80ffff80a2d8799f581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262effd87980d8799f581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874ff182aa15820923918e403bf43c34b4ef6b48eb2ee04babed17320d8d1b9ff9ad086e86f44ecd879805820e757985e48e43a95a185ddba08c814bc20f81cb68544ac937a9b992e4e6c38a0a080d87a80d87a80ff182ad8799f581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874ffff", + ) +} + +test prop_deserialise_any_data(any via any_data()) { + when any is { + AnyInt(i) -> { + fuzz.label(@"Int") + expect Some(data) = deserialise(serialise(i)) + expect i_decoded: Int = data + i_decoded == i + } + AnyByteArray(bs) -> { + fuzz.label(@"ByteArray") + expect Some(data) = deserialise(serialise(bs)) + expect bs_decoded: ByteArray = data + bs_decoded == bs + } + AnyList(xs) -> { + fuzz.label(@"List") + expect Some(data) = deserialise(serialise(xs)) + expect xs_decoded: List = data + xs_decoded == xs + } + AnyPairs(ps) -> { + fuzz.label(@"Pairs") + expect Some(data) = deserialise(serialise(ps)) + expect ps_decoded: Pairs = data + ps_decoded == ps + } + AnyUnaryConstr0(constr) -> { + fuzz.label(@"(unary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: UnaryConstr0 = data + constr_decoded == constr + } + AnyUnaryConstr1(constr) -> { + fuzz.label(@"(unary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: UnaryConstr1 = data + constr_decoded == constr + } + AnyUnaryConstr2(constr) -> { + fuzz.label(@"(unary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: UnaryConstr2 = data + constr_decoded == constr + } + AnyBinaryConstr0(constr) -> { + fuzz.label(@"(binary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: BinaryConstr0 = data + constr_decoded == constr + } + AnyBinaryConstr1(constr) -> { + fuzz.label(@"(binary) Constr") + expect Some(data) = deserialise(serialise(constr)) + expect constr_decoded: BinaryConstr1 = data + constr_decoded == constr + } + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak new file mode 100644 index 00000000..3d4d332e --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak @@ -0,0 +1,4 @@ +/// A non negative integer that materializes the position of an element in a +/// collection. +pub type Index = + Int diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak new file mode 100644 index 00000000..681d0bae --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak @@ -0,0 +1,1174 @@ +//// A module for working with bytearray dictionaries. +//// +//// +//// > [!IMPORTANT] +//// > +//// > Dictionaries are **ordered sets** of key-value pairs, which thus +//// > preserve some invariants. Specifically, each key is only present once in +//// > the dictionary and all keys are stored in ascending lexicographic order. +//// > +//// > These invariants allow for more optimized functions to operate on `Dict`, +//// > but as a trade-offs, prevent `Dict` from being serializable. To recover a `Dict` +//// > from an unknown `Data`, you must first recover an `Pairs` and use +//// > [`dict.from_ascending_list`](#from_ascending_list). + +use aiken/builtin + +/// An opaque `Dict`. The type is opaque because the module maintains some +/// invariant, namely: there's only one occurrence of a given key in the dictionary. +/// +/// Note that the `key` parameter is a phantom-type, and only present as a +/// means of documentation. Keys can be any type, yet will need to comparable +/// to use functions like `insert`. +/// +/// See for example: +/// +/// ```aiken +/// pub type Value = +/// Dict> +/// ``` +pub opaque type Dict { + inner: Pairs, +} + +// ## Constructing + +/// An empty dictionnary. +/// ```aiken +/// dict.to_pairs(dict.empty) == [] +/// ``` +pub const empty: Dict = Dict { inner: [] } + +const foo = #"666f6f" + +const bar = #"626172" + +const baz = #"62617a" + +const fixture_1 = + empty + |> insert(foo, 42) + |> insert(bar, 14) + +/// Like ['from_pairs'](#from_pairs), but from an already sorted list by ascending +/// keys. This function fails (i.e. halts the program execution) if the list isn't +/// sorted. +/// +/// ```aiken +/// let pairs = [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// +/// let result = +/// dict.from_ascending_pairs(pairs) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +/// +/// This is meant to be used to turn a list constructed off-chain into a `Dict` +/// which has taken care of maintaining interval invariants. This function still +/// performs a sanity check on all keys to avoid silly mistakes. It is, however, +/// considerably faster than ['from_pairs'](from_pairs) +pub fn from_ascending_pairs(xs: Pairs) -> Dict { + let Void = check_ascending_list(xs) + Dict { inner: xs } +} + +fn check_ascending_list(xs: Pairs) { + when xs is { + [] -> Void + [_] -> Void + [Pair(x0, _), Pair(x1, _) as e, ..rest] -> + if builtin.less_than_bytearray(x0, x1) { + check_ascending_list([e, ..rest]) + } else { + fail @"keys in associative list aren't in ascending order" + } + } +} + +/// Like [`from_ascending_pairs`](#from_ascending_pairs) but fails if **any** +/// value doesn't satisfy the predicate. +/// +/// ```aiken +/// let pairs = [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// +/// dict.from_ascending_pairs_with(pairs, fn(x) { x <= 250 }) // fail +/// ``` +pub fn from_ascending_pairs_with( + xs: Pairs, + predicate: fn(value) -> Bool, +) -> Dict { + let Void = check_ascending_pairs_with(xs, predicate) + Dict { inner: xs } +} + +fn check_ascending_pairs_with( + xs: Pairs, + predicate: fn(value) -> Bool, +) { + when xs is { + [] -> Void + [Pair(_, v)] -> + if predicate(v) { + Void + } else { + fail @"value doesn't satisfy predicate" + } + [Pair(x0, v0), Pair(x1, _) as e, ..rest] -> + if builtin.less_than_bytearray(x0, x1) { + if predicate(v0) { + check_ascending_pairs_with([e, ..rest], predicate) + } else { + fail @"value doesn't satisfy predicate" + } + } else { + fail @"keys in pairs aren't in ascending order" + } + } +} + +test bench_from_ascending_pairs() { + let dict = + from_ascending_pairs( + [ + Pair("aaaa", 1), Pair("aaab", 9), Pair("aaba", 5), Pair("aabb", 13), + Pair("abaa", 2), Pair("abab", 10), Pair("abba", 6), Pair("abbb", 14), + Pair("baaa", 3), Pair("baab", 11), Pair("baba", 7), Pair("babb", 15), + Pair("bbaa", 4), Pair("bbab", 12), Pair("bbba", 8), Pair("bbbb", 16), + ], + ) + + size(dict) == 16 +} + +/// Construct a dictionary from a list of key-value pairs. Note that when a key is present +/// multiple times, the first occurrence prevails. +/// +/// ```aiken +/// let pairs = [Pair("a", 100), Pair("c", 300), Pair("b", 200)] +/// +/// let result = +/// dict.from_pairs(pairs) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn from_pairs(self: Pairs) -> Dict { + Dict { inner: do_from_pairs(self) } +} + +fn do_from_pairs(xs: Pairs) -> Pairs { + when xs is { + [] -> [] + [Pair(k, v), ..rest] -> do_insert(do_from_pairs(rest), k, v) + } +} + +test from_list_1() { + from_pairs([]) == empty +} + +test from_list_2() { + from_pairs([Pair(foo, 42), Pair(bar, 14)]) == from_pairs( + [Pair(bar, 14), Pair(foo, 42)], + ) +} + +test from_list_3() { + from_pairs([Pair(foo, 42), Pair(bar, 14)]) == fixture_1 +} + +test from_list_4() { + from_pairs([Pair(foo, 42), Pair(bar, 14), Pair(foo, 1337)]) == fixture_1 +} + +test bench_from_pairs() { + let dict = + from_pairs( + [ + Pair("bbba", 8), Pair("bbab", 12), Pair("aabb", 13), Pair("aaab", 9), + Pair("bbbb", 16), Pair("aaaa", 1), Pair("aaba", 5), Pair("abab", 10), + Pair("baba", 7), Pair("baab", 11), Pair("abaa", 2), Pair("baaa", 3), + Pair("bbaa", 4), Pair("babb", 15), Pair("abbb", 14), Pair("abba", 6), + ], + ) + + size(dict) == 16 +} + +// ## Inspecting + +/// Finds a value in the dictionary, and returns the first key found to have that value. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 42) +/// |> dict.insert(key: "b", value: 14) +/// |> dict.insert(key: "c", value: 42) +/// |> dict.find(42) +/// +/// result == Some("a") +/// ``` +pub fn find(self: Dict, value v: value) -> Option { + do_find(self.inner, v) +} + +fn do_find(self: Pairs, value v: value) -> Option { + when self is { + [] -> None + [Pair(k2, v2), ..rest] -> + if v == v2 { + Some(k2) + } else { + do_find(rest, v) + } + } +} + +test find_1() { + find(empty, foo) == None +} + +test find_2() { + find( + empty + |> insert(foo, 14), + 14, + ) == Some(foo) +} + +test find_3() { + find( + empty + |> insert(foo, 14), + 42, + ) == None +} + +test find_4() { + find( + empty + |> insert(foo, 14) + |> insert(bar, 42) + |> insert(baz, 14), + 14, + ) == Some(baz) +} + +/// Get a value in the dict by its key. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: "Aiken") +/// |> dict.get(key: "a") +/// +/// result == Some("Aiken") +/// ``` +pub fn get(self: Dict, key: ByteArray) -> Option { + do_get(self.inner, key) +} + +fn do_get(self: Pairs, key k: ByteArray) -> Option { + when self is { + [] -> None + [Pair(k2, v), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + if k == k2 { + Some(v) + } else { + None + } + } else { + do_get(rest, k) + } + } +} + +test get_1() { + get(empty, foo) == None +} + +test get_2() { + let m = + empty + |> insert(foo, "Aiken") + |> insert(bar, "awesome") + get(m, key: foo) == Some("Aiken") +} + +test get_3() { + let m = + empty + |> insert(foo, "Aiken") + |> insert(bar, "awesome") + get(m, key: baz) == None +} + +test get_4() { + let m = + empty + |> insert("aaa", "1") + |> insert("bbb", "2") + |> insert("ccc", "3") + |> insert("ddd", "4") + |> insert("eee", "5") + |> insert("fff", "6") + |> insert("ggg", "7") + |> insert("hhh", "8") + |> insert("iii", "9") + |> insert("jjj", "10") + + get(m, "bcd") == None +} + +test get_5() { + let m = + empty + |> insert("aaa", "1") + |> insert("bbb", "2") + |> insert("ccc", "3") + |> insert("ddd", "4") + |> insert("eee", "5") + |> insert("fff", "6") + |> insert("ggg", "7") + |> insert("hhh", "8") + |> insert("iii", "9") + |> insert("jjj", "10") + + get(m, "kkk") == None +} + +/// Check if a key exists in the dictionary. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: "Aiken") +/// |> dict.has_key("a") +/// +/// result == True +/// ``` +pub fn has_key(self: Dict, key k: ByteArray) -> Bool { + do_has_key(self.inner, k) +} + +fn do_has_key(self: Pairs, key k: ByteArray) -> Bool { + when self is { + [] -> False + [Pair(k2, _), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + k == k2 + } else { + do_has_key(rest, k) + } + } +} + +test has_key_1() { + !has_key(empty, foo) +} + +test has_key_2() { + has_key( + empty + |> insert(foo, 14), + foo, + ) +} + +test has_key_3() { + !has_key( + empty + |> insert(foo, 14), + bar, + ) +} + +test has_key_4() { + has_key( + empty + |> insert(foo, 14) + |> insert(bar, 42), + bar, + ) +} + +/// Efficiently checks whether a dictionary is empty. +/// ```aiken +/// dict.is_empty(dict.empty) == True +/// ``` +pub fn is_empty(self: Dict) -> Bool { + when self.inner is { + [] -> True + _ -> False + } +} + +test is_empty_1() { + is_empty(empty) +} + +/// Extract all the keys present in a given `Dict`. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 14) +/// |> dict.insert("b", 42) +/// |> dict.insert("a", 1337) +/// |> dict.keys() +/// +/// result == ["a", "b"] +/// ``` +pub fn keys(self: Dict) -> List { + do_keys(self.inner) +} + +fn do_keys(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(k, _), ..rest] -> [k, ..do_keys(rest)] + } +} + +test keys_1() { + keys(empty) == [] +} + +test keys_2() { + keys( + empty + |> insert(foo, 0) + |> insert(bar, 0), + ) == [bar, foo] +} + +/// Return the number of key-value pairs in the dictionary. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 100) +/// |> dict.insert("b", 200) +/// |> dict.insert("c", 300) +/// |> dict.size() +/// +/// result == 3 +/// ``` +pub fn size(self: Dict) -> Int { + do_size(self.inner) +} + +fn do_size(self: Pairs) -> Int { + when self is { + [] -> 0 + [_, ..rest] -> 1 + do_size(rest) + } +} + +test size_1() { + size(empty) == 0 +} + +test size_2() { + size( + empty + |> insert(foo, 14), + ) == 1 +} + +test size_3() { + size( + empty + |> insert(foo, 14) + |> insert(bar, 42), + ) == 2 +} + +/// Extract all the values present in a given `Dict`. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 14) +/// |> dict.insert("b", 42) +/// |> dict.insert("c", 1337) +/// |> dict.values() +/// +/// result == [14, 42, 1337] +/// ``` +pub fn values(self: Dict) -> List { + do_values(self.inner) +} + +fn do_values(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(_, v), ..rest] -> [v, ..do_values(rest)] + } +} + +test values_1() { + values(empty) == [] +} + +test values_2() { + values( + empty + |> insert(foo, 3) + |> insert(bar, 4), + ) == [4, 3] +} + +// ## Modifying + +/// Remove a key-value pair from the dictionary. If the key is not found, no changes are made. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.delete(key: "a") +/// |> dict.to_pairs() +/// +/// result == [Pair("b", 200)] +/// ``` +pub fn delete(self: Dict, key: ByteArray) -> Dict { + Dict { inner: do_delete(self.inner, key) } +} + +fn do_delete( + self: Pairs, + key k: ByteArray, +) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + if k == k2 { + rest + } else { + self + } + } else { + [Pair(k2, v2), ..do_delete(rest, k)] + } + } +} + +test delete_1() { + delete(empty, foo) == empty +} + +test delete_2() { + let m = + empty + |> insert(foo, 14) + delete(m, foo) == empty +} + +test delete_3() { + let m = + empty + |> insert(foo, 14) + delete(m, bar) == m +} + +test delete_4() { + let m = + empty + |> insert(foo, 14) + |> insert(bar, 14) + !has_key(delete(m, foo), foo) +} + +test delete_5() { + let m = + empty + |> insert(foo, 14) + |> insert(bar, 14) + has_key(delete(m, bar), foo) +} + +test delete_6() { + let m = + empty + |> insert("aaa", 1) + |> insert("bbb", 2) + |> insert("ccc", 3) + |> insert("ddd", 4) + |> insert("eee", 5) + |> insert("fff", 6) + |> insert("ggg", 7) + |> insert("hhh", 8) + |> insert("iii", 9) + |> insert("jjj", 10) + + delete(m, "bcd") == m +} + +/// Keep only the key-value pairs that pass the given predicate. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.insert(key: "c", value: 300) +/// |> dict.filter(fn(k, _v) { k != "a" }) +/// |> dict.to_pairs() +/// +/// result == [Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn filter( + self: Dict, + with: fn(ByteArray, value) -> Bool, +) -> Dict { + Dict { inner: do_filter(self.inner, with) } +} + +fn do_filter( + self: Pairs, + with: fn(ByteArray, value) -> Bool, +) -> Pairs { + when self is { + [] -> [] + [Pair(k, v), ..rest] -> + if with(k, v) { + [Pair(k, v), ..do_filter(rest, with)] + } else { + do_filter(rest, with) + } + } +} + +test filter_1() { + filter(empty, fn(_, _) { True }) == empty +} + +test filter_2() { + let expected = + empty + |> insert(foo, 42) + filter(fixture_1, fn(_, v) { v > 14 }) == expected +} + +test filter_3() { + let expected = + empty + |> insert(bar, 14) + filter(fixture_1, fn(k, _) { k == bar }) == expected +} + +/// Insert a value in the dictionary at a given key. If the key already exists, its value is **overridden**. If you need ways to combine keys together, use (`insert_with`)[#insert_with]. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 1) +/// |> dict.insert(key: "b", value: 2) +/// |> dict.insert(key: "a", value: 3) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 3), Pair("b", 2)] +/// ``` +pub fn insert( + self: Dict, + key k: ByteArray, + value v: value, +) -> Dict { + Dict { inner: do_insert(self.inner, k, v) } +} + +fn do_insert( + self: Pairs, + key k: ByteArray, + value v: value, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if builtin.less_than_bytearray(k, k2) { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, v), ..rest] + } else { + [Pair(k2, v2), ..do_insert(rest, k, v)] + } + } + } +} + +test insert_1() { + let m1 = + empty + |> insert(foo, 42) + let m2 = + empty + |> insert(foo, 14) + insert(m1, foo, 14) == m2 +} + +test insert_2() { + let m1 = + empty + |> insert(foo, 42) + let m2 = + empty + |> insert(bar, 14) + insert(m1, bar, 14) == insert(m2, foo, 42) +} + +/// Insert a value in the dictionary at a given key. When the key already exist, the provided +/// merge function is called. The value existing in the dictionary is passed as the second argument +/// to the merge function, and the new value is passed as the third argument. +/// +/// ```aiken +/// let sum = +/// fn (_k, a, b) { Some(a + b) } +/// +/// let result = +/// dict.empty +/// |> dict.insert_with(key: "a", value: 1, with: sum) +/// |> dict.insert_with(key: "b", value: 2, with: sum) +/// |> dict.insert_with(key: "a", value: 3, with: sum) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 4), Pair("b", 2)] +/// ``` +pub fn insert_with( + self: Dict, + key k: ByteArray, + value v: value, + with: fn(ByteArray, value, value) -> Option, +) -> Dict { + Dict { + inner: do_insert_with(self.inner, k, v, fn(k, v1, v2) { with(k, v2, v1) }), + } +} + +test insert_with_1() { + let sum = + fn(_k, a, b) { Some(a + b) } + + let result = + empty + |> insert_with(key: "foo", value: 1, with: sum) + |> insert_with(key: "bar", value: 2, with: sum) + |> to_pairs() + + result == [Pair("bar", 2), Pair("foo", 1)] +} + +test insert_with_2() { + let sum = + fn(_k, a, b) { Some(a + b) } + + let result = + empty + |> insert_with(key: "foo", value: 1, with: sum) + |> insert_with(key: "bar", value: 2, with: sum) + |> insert_with(key: "foo", value: 3, with: sum) + |> to_pairs() + + result == [Pair("bar", 2), Pair("foo", 4)] +} + +test insert_with_3() { + let with = + fn(k, a, _b) { + if k == "foo" { + Some(a) + } else { + None + } + } + + let result = + empty + |> insert_with(key: "foo", value: 1, with: with) + |> insert_with(key: "bar", value: 2, with: with) + |> insert_with(key: "foo", value: 3, with: with) + |> insert_with(key: "bar", value: 4, with: with) + |> to_pairs() + + result == [Pair("foo", 1)] +} + +/// Apply a function to all key-value pairs in a Dict. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 100) +/// |> dict.insert("b", 200) +/// |> dict.insert("c", 300) +/// |> dict.map(fn(_k, v) { v * 2 }) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 200), Pair("b", 400), Pair("c", 600)] +/// ``` +pub fn map(self: Dict, with: fn(ByteArray, a) -> b) -> Dict { + Dict { inner: do_map(self.inner, with) } +} + +fn do_map( + self: Pairs, + with: fn(ByteArray, a) -> b, +) -> Pairs { + when self is { + [] -> [] + [Pair(k, v), ..rest] -> [Pair(k, with(k, v)), ..do_map(rest, with)] + } +} + +test map_1() { + let result = + fixture_1 + |> map(with: fn(k, _) { k }) + get(result, foo) == Some(foo) +} + +test map_2() { + let result = + fixture_1 + |> map(with: fn(_, v) { v + 1 }) + get(result, foo) == Some(43) && size(result) == size(fixture_1) +} + +/// Remove a key-value pair from the dictionary and return its value. If the key is not found, no changes are made. +/// +/// ```aiken +/// let (value, _) = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.pop(key: "a") +/// +/// result == 100 +/// ``` +pub fn pop( + self: Dict, + key: ByteArray, +) -> (Option, Dict) { + do_pop(self.inner, key, fn(value, inner) { (value, Dict { inner }) }) +} + +fn do_pop( + self: Pairs, + key k: ByteArray, + return: fn(Option, Pairs) -> result, +) -> result { + when self is { + [] -> return(None, []) + [Pair(k2, v2), ..rest] -> + if builtin.less_than_equals_bytearray(k, k2) { + if k == k2 { + return(Some(v2), rest) + } else { + return(None, self) + } + } else { + do_pop( + rest, + k, + fn(value, inner) { return(value, [Pair(k2, v2), ..inner]) }, + ) + } + } +} + +test pop_1() { + pop(empty, foo) == (None, empty) +} + +test pop_2() { + let m = + empty + |> insert(foo, 14) + pop(m, foo) == (Some(14), empty) +} + +test pop_3() { + let m = + empty + |> insert(foo, 14) + pop(m, bar) == (None, m) +} + +test pop_4() { + let m = + empty + |> insert(foo, 14) + |> insert(bar, 14) + pop(m, foo) == (Some(14), empty |> insert(bar, 14)) +} + +test pop_6() { + let m = + empty + |> insert("aaa", 1) + |> insert("bbb", 2) + |> insert("ccc", 3) + |> insert("ddd", 4) + |> insert("eee", 5) + |> insert("fff", 6) + |> insert("ggg", 7) + |> insert("hhh", 8) + |> insert("iii", 9) + |> insert("jjj", 10) + + pop(m, "bcd") == (None, m) +} + +// ## Combining + +/// Combine two dictionaries. If the same key exist in both the left and +/// right dictionary, values from the left are preferred (i.e. left-biaised). +/// +/// ```aiken +/// let left_dict = dict.from_pairs([Pair("a", 100), Pair("b", 200)]) +/// let right_dict = dict.from_pairs([Pair("a", 150), Pair("c", 300)]) +/// +/// let result = +/// dict.union(left_dict, right_dict) |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn union( + left: Dict, + right: Dict, +) -> Dict { + Dict { inner: do_union(left.inner, right.inner) } +} + +fn do_union( + left: Pairs, + right: Pairs, +) -> Pairs { + when left is { + [] -> right + [Pair(k, v), ..rest] -> do_union(rest, do_insert(right, k, v)) + } +} + +test union_1() { + union(fixture_1, empty) == fixture_1 +} + +test union_2() { + union(empty, fixture_1) == fixture_1 +} + +test union_3() { + let left = + empty + |> insert(foo, 14) + let right = + empty + |> insert(bar, 42) + |> insert(baz, 1337) + union(left, right) == from_pairs( + [Pair(foo, 14), Pair(baz, 1337), Pair(bar, 42)], + ) +} + +test union_4() { + let left = + empty + |> insert(foo, 14) + let right = + empty + |> insert(bar, 42) + |> insert(foo, 1337) + union(left, right) == from_pairs([Pair(foo, 14), Pair(bar, 42)]) +} + +/// Like [`union`](#union) but allows specifying the behavior to adopt when a key is present +/// in both dictionaries. The first value received correspond to the value in the left +/// dictionnary, whereas the second argument corresponds to the value in the right dictionnary. +/// +/// When passing `None`, the value is removed and not present in the union. +/// +/// ```aiken +/// let left_dict = dict.from_pairs([Pair("a", 100), Pair("b", 200)]) +/// let right_dict = dict.from_pairs([Pair("a", 150), Pair("c", 300)]) +/// +/// let result = +/// dict.union_with( +/// left_dict, +/// right_dict, +/// fn(_k, v1, v2) { Some(v1 + v2) }, +/// ) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 250), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn union_with( + left: Dict, + right: Dict, + with: fn(ByteArray, value, value) -> Option, +) -> Dict { + Dict { inner: do_union_with(left.inner, right.inner, with) } +} + +fn do_union_with( + left: Pairs, + right: Pairs, + with: fn(ByteArray, value, value) -> Option, +) -> Pairs { + when left is { + [] -> right + [Pair(k, v), ..rest] -> + do_union_with(rest, do_insert_with(right, k, v, with), with) + } +} + +fn do_insert_with( + self: Pairs, + key k: ByteArray, + value v: value, + with: fn(ByteArray, value, value) -> Option, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if builtin.less_than_bytearray(k, k2) { + [Pair(k, v), ..self] + } else { + if k == k2 { + when with(k, v, v2) is { + Some(combined) -> [Pair(k, combined), ..rest] + None -> rest + } + } else { + [Pair(k2, v2), ..do_insert_with(rest, k, v, with)] + } + } + } +} + +test union_with_1() { + let left = + empty + |> insert(foo, 14) + + let right = + empty + |> insert(bar, 42) + |> insert(foo, 1337) + + let result = union_with(left, right, with: fn(_, l, r) { Some(l + r) }) + + result == from_pairs([Pair(foo, 1351), Pair(bar, 42)]) +} + +// ## Transforming + +/// Fold over the key-value pairs in a dictionary. The fold direction follows keys +/// in ascending order and is done from left-to-right. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.insert(key: "c", value: 300) +/// |> dict.foldl(0, fn(_k, v, r) { v + r }) +/// +/// result == 600 +/// ``` +pub fn foldl( + self: Dict, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + do_foldl(self.inner, zero, with) +} + +fn do_foldl( + self: Pairs, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> do_foldl(rest, with(k, v, zero), with) + } +} + +test fold_1() { + foldl(empty, 14, fn(_, _, _) { 42 }) == 14 +} + +test fold_2() { + foldl(fixture_1, zero: 0, with: fn(_, v, total) { v + total }) == 56 +} + +/// Fold over the key-value pairs in a dictionary. The fold direction follows keys +/// in ascending order and is done from right-to-left. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert(key: "a", value: 100) +/// |> dict.insert(key: "b", value: 200) +/// |> dict.insert(key: "c", value: 300) +/// |> dict.foldr(0, fn(_k, v, r) { v + r }) +/// +/// result == 600 +/// ``` +pub fn foldr( + self: Dict, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + do_foldr(self.inner, zero, with) +} + +fn do_foldr( + self: Pairs, + zero: result, + with: fn(ByteArray, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> with(k, v, do_foldr(rest, zero, with)) + } +} + +test foldr_1() { + foldr(empty, 14, fn(_, _, _) { 42 }) == 14 +} + +test foldr_2() { + foldr(fixture_1, zero: 0, with: fn(_, v, total) { v + total }) == 56 +} + +/// Get the inner list holding the dictionary data. +/// +/// ```aiken +/// let result = +/// dict.empty +/// |> dict.insert("a", 100) +/// |> dict.insert("b", 200) +/// |> dict.insert("c", 300) +/// |> dict.to_pairs() +/// +/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] +/// ``` +pub fn to_pairs(self: Dict) -> Pairs { + self.inner +} + +test to_list_1() { + to_pairs(empty) == [] +} + +test to_list_2() { + to_pairs(fixture_1) == [Pair(bar, 14), Pair(foo, 42)] +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak new file mode 100644 index 00000000..b8e7f675 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak @@ -0,0 +1,1411 @@ +use aiken/builtin +use aiken/primitive/bytearray +use aiken/primitive/int + +// ## Constructing + +/// Add an element in front of the list. Sometimes useful when combined with +/// other functions. +/// +/// ```aiken +/// list.push([2, 3], 1) == [1, ..[2, 3]] == [1, 2, 3] +/// ``` +pub fn push(self: List, elem: a) -> List { + [elem, ..self] +} + +test push_1() { + push([], 0) == [0] +} + +test push_2() { + push([2, 3], 1) == [1, 2, 3] +} + +/// Construct a list of a integer from a given range. +/// +/// ```aiken +/// list.range(0, 3) == [0, 1, 2, 3] +/// list.range(-1, 1) == [-1, 0, 1] +/// ``` +pub fn range(from: Int, to: Int) -> List { + if from > to { + [] + } else { + [from, ..range(from + 1, to)] + } +} + +test range_1() { + range(0, 3) == [0, 1, 2, 3] +} + +test range_2() { + range(-1, 1) == [-1, 0, 1] +} + +/// Construct a list filled with n copies of a value. +/// +/// ```aiken +/// list.repeat("na", 3) == ["na", "na", "na"] +/// ``` +pub fn repeat(elem: a, n_times: Int) -> List { + if n_times <= 0 { + [] + } else { + [elem, ..repeat(elem, n_times - 1)] + } +} + +test repeat_1() { + repeat(42, 0) == [] +} + +test repeat_2() { + repeat(14, 3) == [14, 14, 14] +} + +// ## Inspecting + +/// Determine if all elements of the list satisfy the given predicate. +/// +/// Note: an empty list always satisfies the predicate. +/// +/// ```aiken +/// list.all([], fn(n) { n > 0 }) == True +/// list.all([1, 2, 3], fn(n) { n > 0 }) == True +/// list.all([1, 2, 3], fn(n) { n == 2 }) == False +/// ``` +pub fn all(self: List, predicate: fn(a) -> Bool) -> Bool { + when self is { + [] -> True + [x, ..xs] -> predicate(x) && all(xs, predicate) + } +} + +test all_1() { + all([1, 2, 3], fn(n) { n > 0 }) == True +} + +test all_2() { + all([1, 2, 3], fn(n) { n > 42 }) == False +} + +test all_3() { + all([], fn(n) { n == 42 }) == True +} + +/// Determine if at least one element of the list satisfies the given predicate. +/// +/// Note: an empty list never satisfies the predicate. +/// +/// ```aiken +/// list.any([], fn(n) { n > 2 }) == False +/// list.any([1, 2, 3], fn(n) { n > 0 }) == True +/// list.any([1, 2, 3], fn(n) { n == 2 }) == True +/// list.any([1, 2, 3], fn(n) { n < 0 }) == False +/// ``` +pub fn any(self: List, predicate: fn(a) -> Bool) -> Bool { + when self is { + [] -> False + [x, ..xs] -> predicate(x) || any(xs, predicate) + } +} + +test any_1() { + any([1, 2, 3], fn(n) { n > 0 }) == True +} + +test any_2() { + any([1, 2, 3], fn(n) { n > 42 }) == False +} + +test any_3() { + any([], fn(n) { n == 42 }) == False +} + +/// Return Some(item) at the index or None if the index is out of range. The index is 0-based. +/// +/// ```aiken +/// list.at([1, 2, 3], 1) == Some(2) +/// list.at([1, 2, 3], 42) == None +/// ``` +pub fn at(self: List, index: Int) -> Option { + when self is { + [] -> None + [x, ..xs] -> + if index == 0 { + Some(x) + } else { + at(xs, index - 1) + } + } +} + +test at_1() { + at([1, 2, 3], -1) == None +} + +test at_2() { + at([], 0) == None +} + +test at_3() { + at([1, 2, 3], 3) == None +} + +test at_4() { + at([1], 0) == Some(1) +} + +test at_5() { + at([1, 2, 3], 2) == Some(3) +} + +/// Count how many items in the list satisfy the given predicate. +/// +/// ```aiken +/// list.count([], fn(a) { a > 2}) == 0 +/// list.count([1, 2, 3], fn(a) { n > 0 }) == 3 +/// list.count([1, 2, 3], fn(a) { n >= 2 }) == 2 +/// list.count([1, 2, 3], fn(a) { n > 5 }) == 0 +/// ``` +pub fn count(self: List, predicate: fn(a) -> Bool) -> Int { + foldr( + self, + 0, + fn(item, total) { + if predicate(item) { + total + 1 + } else { + total + } + }, + ) +} + +test count_empty() { + count([], fn(a) { a > 2 }) == 0 +} + +test count_all() { + count([1, 2, 3], fn(a) { a > 0 }) == 3 +} + +test count_some() { + count([1, 2, 3], fn(a) { a >= 2 }) == 2 +} + +test count_none() { + count([1, 2, 3], fn(a) { a > 5 }) == 0 +} + +/// Find the first element satisfying the given predicate, if any. +/// +/// ```aiken +/// list.find([1, 2, 3], fn(x) { x == 2 }) == Some(2) +/// list.find([4, 5, 6], fn(x) { x == 2 }) == None +/// ``` +pub fn find(self: List, predicate: fn(a) -> Bool) -> Option { + when self is { + [] -> None + [x, ..xs] -> + if predicate(x) { + Some(x) + } else { + find(xs, predicate) + } + } +} + +test find_1() { + find([1, 2, 3], fn(x) { x == 1 }) == Some(1) +} + +test find_2() { + find([1, 2, 3], fn(x) { x > 42 }) == None +} + +test find_3() { + find([], fn(_) { True }) == None +} + +/// Figures out whether a list contain the given element. +/// +/// ```aiken +/// list.has([1, 2, 3], 2) == True +/// list.has([1, 2, 3], 14) == False +/// list.has([], 14) == False +/// ``` +pub fn has(self: List, elem: a) -> Bool { + when self is { + [] -> False + [x, ..xs] -> + if x == elem { + True + } else { + has(xs, elem) + } + } +} + +test has_1() { + has([1, 2, 3], 1) == True +} + +test has_2() { + has([1, 2, 3], 14) == False +} + +test has_3() { + has([], 14) == False +} + +/// Get the first element of a list +/// +/// ```aiken +/// list.head([1, 2, 3]) == Some(1) +/// list.head([]) == None +/// ``` +pub fn head(self: List) -> Option { + when self is { + [] -> None + _ -> Some(builtin.head_list(self)) + } +} + +test head_1() { + head([1, 2, 3]) == Some(1) +} + +test head_2() { + head([]) == None +} + +/// Checks whether a list is empty. +/// +/// ```aiken +/// list.is_empty([]) == True +/// list.is_empty([1, 2, 3]) == False +/// ``` +pub fn is_empty(self: List) -> Bool { + when self is { + [] -> True + _ -> False + } +} + +test is_empty_1() { + is_empty([]) == True +} + +test is_empty_2() { + is_empty([1, 2, 3]) == False +} + +/// Gets the index of an element of a list, if any. Otherwise, returns None. +/// +/// ```aiken +/// list.index_of([1, 5, 2], 2) == Some(2) +/// list.index_of([1, 7, 3], 4) == None +/// list.index_of([1, 0, 9, 6], 6) == 3 +/// list.index_of([], 6) == None +/// ``` +pub fn index_of(self: List, elem: a) -> Option { + do_index_of(self, elem, 0) +} + +fn do_index_of(self: List, elem: a, i: Int) -> Option { + when self is { + [] -> None + [x, ..xs] -> + if x == elem { + Some(i) + } else { + do_index_of(xs, elem, i + 1) + } + } +} + +test index_of_1() { + index_of([1, 5, 2], 2) == Some(2) +} + +test index_of_2() { + index_of([1, 7, 3], 4) == None +} + +test index_of_3() { + index_of([1, 0, 9, 6], 6) == Some(3) +} + +test index_of_4() { + index_of([], 6) == None +} + +/// Get the last in the given list, if any. +/// +/// ```aiken +/// list.last([]) == None +/// list.last([1, 2, 3]) == Some(3) +/// ``` +pub fn last(self: List) -> Option { + when self is { + [] -> None + [x] -> Some(x) + [_, ..xs] -> last(xs) + } +} + +test last_1() { + last([]) == None +} + +test last_2() { + last([1]) == Some(1) +} + +test last_3() { + last([1, 2, 3, 4]) == Some(4) +} + +/// Get the number of elements in the given list. +/// +/// ```aiken +/// list.length([]) == 0 +/// list.length([1, 2, 3]) == 3 +/// ``` +pub fn length(self: List) -> Int { + when self is { + [] -> 0 + [_, ..xs] -> 1 + length(xs) + } +} + +test length_1() { + length([]) == 0 +} + +test length_2() { + length([1, 2, 3]) == 3 +} + +// ## Modifying + +// ### Extracting + +/// Remove the first occurrence of the given element from the list. +/// +/// ```aiken +/// list.delete([1, 2, 3, 1], 1) == [2, 3, 1] +/// list.delete([1, 2, 3], 14) == [1, 2, 3] +/// ``` +pub fn delete(self: List, elem: a) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if x == elem { + xs + } else { + [x, ..delete(xs, elem)] + } + } +} + +test delete_1() { + delete([], 42) == [] +} + +test delete_2() { + delete([1, 2, 3, 1], 1) == [2, 3, 1] +} + +test delete_3() { + delete([1, 2, 3], 14) == [1, 2, 3] +} + +test delete_4() { + delete([2], 2) == [] +} + +/// Drop the first `n` elements of a list. +/// +/// ```aiken +/// list.drop([1, 2, 3], 2) == [3] +/// list.drop([], 42) == [] +/// list.drop([1, 2, 3], 42) == [] +/// ``` +pub fn drop(self: List, n: Int) -> List { + if n <= 0 { + self + } else { + when self is { + [] -> [] + [_x, ..xs] -> drop(xs, n - 1) + } + } +} + +test drop_1() { + drop([], 42) == [] +} + +test drop_2() { + drop([1, 2, 3], 2) == [3] +} + +/// Returns the suffix of the given list after removing all elements that satisfy the predicate. +/// +/// ```aiken +/// list.drop_while([1, 2, 3], fn(x) { x < 2 }) == [2, 3] +/// list.drop_while([], fn(x) { x > 2 }) == [] +/// list.drop_while([1, 2, 3], fn(x) { x == 3 }) == [1, 2, 3] +/// ``` +pub fn drop_while(self: List, predicate: fn(a) -> Bool) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if predicate(x) { + drop_while(xs, predicate) + } else { + self + } + } +} + +test drop_while_1() { + drop_while([], fn(x) { x > 2 }) == [] +} + +test drop_while_2() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + drop_while(xs, fn(x) { x > 5 }) == [5, 4, 3, 2, 1] +} + +test drop_while_3() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + drop_while(xs, fn(x) { x == 42 }) == xs +} + +test drop_while_4() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + drop_while(xs, fn(x) { x < 42 }) == [] +} + +/// Produce a list of elements that satisfy a predicate. +/// +/// ```aiken +/// list.filter([1, 2, 3], fn(x) { x >= 2 }) == [2, 3] +/// list.filter([], fn(x) { x > 2 }) == [] +/// list.filter([1, 2, 3], fn(x) { x == 3 }) == [3] +/// ``` +pub fn filter(self: List, predicate: fn(a) -> Bool) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if predicate(x) { + [x, ..filter(xs, predicate)] + } else { + filter(xs, predicate) + } + } +} + +test filter_1() { + filter([], fn(x) { x > 0 }) == [] +} + +test filter_2() { + let xs = [1, 2, 3, 4, 5, 6] + filter(xs, fn(x) { builtin.mod_integer(x, 2) == 0 }) == [2, 4, 6] +} + +test filter_3() { + let filter_foldr = + fn(xs, f) { + foldr( + xs, + [], + fn(x, ys) { + if f(x) { + [x, ..ys] + } else { + ys + } + }, + ) + } + + let is_odd = + fn(n) { builtin.mod_integer(n, 2) != 0 } + + filter_foldr([1, 2, 3], is_odd) == filter([1, 2, 3], is_odd) +} + +/// Produce a list of transformed elements that satisfy a predicate. +/// +/// ```aiken +/// let transform = fn(x) { if x % 2 == 0 { None } else { Some(3*x) } } +/// list.filter_map([1, 2, 3], transform) == [3, 9] +/// ``` +pub fn filter_map(self: List, predicate: fn(a) -> Option) -> List { + when self is { + [] -> [] + [x, ..xs] -> + when predicate(x) is { + None -> filter_map(xs, predicate) + Some(y) -> [y, ..filter_map(xs, predicate)] + } + } +} + +test filter_map_1() { + filter_map([], fn(_) { Some(42) }) == [] +} + +test filter_map_2() { + filter_map( + [1, 2, 3, 4, 5, 6], + fn(x) { + if builtin.mod_integer(x, 2) != 0 { + Some(3 * x) + } else { + None + } + }, + ) == [3, 9, 15] +} + +/// Return all elements except the last one. +/// +/// ```aiken +/// list.init([]) == None +/// list.init([1, 2, 3]) == Some([1, 2]) +/// ``` +pub fn init(self: List) -> Option> { + when self is { + [] -> None + _ -> Some(do_init(self)) + } +} + +fn do_init(self: List) -> List { + when self is { + [] -> fail @"unreachable" + [_] -> [] + [x, ..xs] -> [x, ..do_init(xs)] + } +} + +test init_1() { + init([]) == None +} + +test init_2() { + init([1]) == Some([]) +} + +test init_3() { + init([1, 2, 3, 4]) == Some([1, 2, 3]) +} + +/// Returns a tuple with all elements that satisfy the predicate at first +/// element, and the rest as second element. +/// +/// ```aiken +/// list.partition([1, 2, 3, 4], fn(x) { x % 2 == 0 }) == ([2, 4], [1, 3]) +/// ``` +pub fn partition(self: List, predicate: fn(a) -> Bool) -> (List, List) { + when self is { + [] -> ([], []) + [x, ..xs] -> { + let (left, right) = partition(xs, predicate) + if predicate(x) { + ([x, ..left], right) + } else { + (left, [x, ..right]) + } + } + } +} + +test partition_1() { + partition([], fn(x) { x > 2 }) == ([], []) +} + +test partition_2() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + partition(xs, fn(x) { x > 5 }) == ([10, 9, 8, 7, 6], [5, 4, 3, 2, 1]) +} + +test partition_3() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + partition(xs, fn(x) { x == 42 }) == ([], xs) +} + +test partition_4() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + partition(xs, fn(x) { x < 42 }) == (xs, []) +} + +test partition_5() { + partition([1, 2, 3, 4], fn(x) { x % 2 == 0 }) == ([2, 4], [1, 3]) +} + +/// Extract a sublist from the given list using 0-based indexes. Negative +/// indexes wrap over, so `-1` refers to the last element of the list. +/// +/// ```aiken +/// list.slice([1, 2, 3, 4, 5, 6], from: 2, to: 4) == [3, 4, 5] +/// list.slice([1, 2, 3, 4, 5, 6], from: -2, to: -1) == [5, 6] +/// list.slice([1, 2, 3, 4, 5, 6], from: 1, to: -1) == [2, 3, 4, 5, 6] +/// ``` +pub fn slice(self: List, from: Int, to: Int) -> List { + let (i, l) = + if from >= 0 { + (from, None) + } else { + let l = length(self) + (l + from, Some(l)) + } + + let j = + if to >= 0 { + to - i + 1 + } else { + when l is { + Some(l) -> l + to - i + 1 + None -> length(self) + to - i + 1 + } + } + + self + |> drop(i) + |> take(j) +} + +test slice_1() { + slice([1, 2, 3], 0, 2) == [1, 2, 3] +} + +test slice_2() { + slice([1, 2, 3, 4, 5, 6], from: 2, to: 4) == [3, 4, 5] +} + +test slice_3() { + slice([1, 2, 3, 4, 5, 6], from: -2, to: -1) == [5, 6] +} + +test slice_4() { + slice([1, 2, 3, 4, 5, 6], from: 1, to: -1) == [2, 3, 4, 5, 6] +} + +test slice_5() { + slice([1, 2, 3, 4, 5, 6], from: -4, to: -3) == [3, 4] +} + +test slice_6() { + slice([1, 2, 3, 4, 5, 6], from: -2, to: 1) == [] +} + +/// Cut a list in two, such that the first list contains the given number of / +/// elements and the second list contains the rest. +/// +/// Fundamentally equivalent to (but more efficient): +/// +/// ```aiken +/// // span(xs, n) == (take(xs, n), drop(xs, n)) +/// span([1, 2, 3, 4, 5], 3) == ([1, 2, 3], [4, 5]) +/// ``` +pub fn span(self: List, n: Int) -> (List, List) { + when self is { + [] -> ([], []) + [x, ..xs] -> + if n <= 0 { + ([], self) + } else { + let (left, right) = span(xs, n - 1) + ([x, ..left], right) + } + } +} + +test span_1() { + span([], 2) == ([], []) +} + +test span_2() { + span([1, 2, 3], 2) == ([1, 2], [3]) +} + +test span_3() { + span([1, 2, 3], -1) == ([], [1, 2, 3]) +} + +test span_4() { + span([1, 2, 3], 42) == ([1, 2, 3], []) +} + +/// Get elements of a list after the first one, if any. +/// +/// ```aiken +/// list.tail([]) == None +/// list.tail([1, 2, 3]) == Some([2, 3]) +/// ``` +pub fn tail(self: List) -> Option> { + when self is { + [] -> None + [_, ..xs] -> Some(xs) + } +} + +test tail_1() { + tail([1, 2, 3]) == Some([2, 3]) +} + +test tail_2() { + tail([]) == None +} + +/// Get the first `n` elements of a list. +/// +/// ```aiken +/// list.take([1, 2, 3], 2) == [1, 2] +/// list.take([1, 2, 3], 14) == [1, 2, 3] +/// ``` +pub fn take(self: List, n: Int) -> List { + if n <= 0 { + [] + } else { + when self is { + [] -> [] + [x, ..xs] -> [x, ..take(xs, n - 1)] + } + } +} + +test take_1() { + take([], 42) == [] +} + +test take_2() { + take([1, 2, 3], 2) == [1, 2] +} + +/// Returns the longest prefix of the given list where all elements satisfy the predicate. +/// +/// ```aiken +/// list.take_while([1, 2, 3], fn(x) { x > 2 }) == [] +/// list.take_while([1, 2, 3], fn(x) { x < 2 }) == [1] +/// ``` +pub fn take_while(self: List, predicate: fn(a) -> Bool) -> List { + when self is { + [] -> [] + [x, ..xs] -> + if predicate(x) { + [x, ..take_while(xs, predicate)] + } else { + [] + } + } +} + +test take_while_1() { + take_while([], fn(x) { x > 2 }) == [] +} + +test take_while_2() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + take_while(xs, fn(x) { x > 5 }) == [10, 9, 8, 7, 6] +} + +test take_while_3() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + take_while(xs, fn(x) { x == 42 }) == [] +} + +test take_while_4() { + let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] + take_while(xs, fn(x) { x < 42 }) == xs +} + +/// Removes duplicate elements from a list. +/// +/// ```aiken +/// list.unique([1, 2, 3, 1]) == [1, 2, 3] +/// ``` +pub fn unique(self: List) -> List { + when self is { + [] -> [] + [x, ..xs] -> [x, ..unique(filter(xs, fn(y) { y != x }))] + } +} + +test unique_1() { + unique([]) == [] +} + +test unique_2() { + let xs = [1, 2, 3, 1, 1, 3, 4, 1, 2, 3, 2, 4, 5, 6, 7, 8, 9, 10, 9] + unique(xs) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] +} + +// ### Mapping + +/// Map elements of a list into a new list and flatten the result. +/// +/// ```aiken +/// list.flat_map([1, 2, 3], fn(a) { [a, 2*a] }) == [1, 2, 2, 4, 3, 6] +/// ``` +pub fn flat_map(self: List, with: fn(a) -> List) -> List { + foldr(self, [], fn(x, xs) { concat(with(x), xs) }) +} + +test flat_map_1() { + flat_map([], fn(a) { [a] }) == [] +} + +test flat_map_2() { + flat_map([1, 2, 3], fn(a) { [a, a] }) == [1, 1, 2, 2, 3, 3] +} + +/// Perform an action for each element of a list. +/// +/// ```aiken +/// list.for_each(labels, do: fn(lbl) { trace lbl Void }) +/// ``` +pub fn for_each(self: List, do: fn(a) -> Void) -> Void { + foldr(self, Void, fn(x, _) { do(x) }) +} + +test for_each_1() { + for_each( + [@"hello", @"world"], + do: fn(lbl) { + trace lbl + Void + }, + ) +} + +/// List [`map`](#map) but provides the position (0-based) of the elements while iterating. +/// +/// ```aiken +/// list.indexed_map([1, 2, 3], fn(i, x) { i + x }) == [1, 3, 5] +/// ``` +pub fn indexed_map(self: List, with: fn(Int, a) -> result) -> List { + do_indexed_map(0, self, with) +} + +fn do_indexed_map( + n: Int, + self: List, + with: fn(Int, a) -> result, +) -> List { + when self is { + [] -> [] + [x, ..xs] -> [with(n, x), ..do_indexed_map(n + 1, xs, with)] + } +} + +test indexed_map_1() { + indexed_map([], fn(i, _n) { i }) == [] +} + +test indexed_map_2() { + indexed_map( + [4, 8, 13, 2], + fn(i, n) { + if n == 8 { + n + } else { + i + } + }, + ) == [0, 8, 2, 3] +} + +/// Apply a function to each element of a list. +/// +/// ```aiken +/// list.map([1, 2, 3, 4], fn(n) { n + 1 }) == [2, 3, 4, 5] +/// ``` +pub fn map(self: List, with: fn(a) -> result) -> List { + when self is { + [] -> [] + [x, ..xs] -> [with(x), ..map(xs, with)] + } +} + +test map_1() { + map([], fn(n) { n + 1 }) == [] +} + +test map_2() { + map([1, 2, 3, 4], fn(n) { n + 1 }) == [2, 3, 4, 5] +} + +/// Apply a function of two arguments, combining elements from two lists. +/// +/// Note: if one list is longer, the extra elements are dropped. +/// +/// ```aiken +/// list.map2([1, 2, 3], [1, 2], fn(a, b) { a + b }) == [2, 4] +/// ``` +pub fn map2( + self: List, + bs: List, + with: fn(a, b) -> result, +) -> List { + when self is { + [] -> [] + [x, ..xs] -> + when bs is { + [] -> [] + [y, ..ys] -> [with(x, y), ..map2(xs, ys, with)] + } + } +} + +test map2_1() { + map2([], [1, 2, 3], fn(a, b) { a + b }) == [] +} + +test map2_2() { + map2([1, 2, 3], [1, 2], fn(a, b) { a + b }) == [2, 4] +} + +test map2_3() { + map2([42], [1, 2, 3], fn(_a, b) { Some(b) }) == [Some(1)] +} + +/// Apply a function of three arguments, combining elements from three lists. +/// +/// Note: if one list is longer, the extra elements are dropped. +/// +/// ```aiken +/// list.map3([1, 2, 3], [1, 2], [1, 2, 3], fn(a, b, c) { a + b + c }) == [3, 6] +/// ``` +pub fn map3( + self: List, + bs: List, + cs: List, + with: fn(a, b, c) -> result, +) -> List { + when self is { + [] -> [] + [x, ..xs] -> + when bs is { + [] -> [] + [y, ..ys] -> + when cs is { + [] -> [] + [z, ..zs] -> [with(x, y, z), ..map3(xs, ys, zs, with)] + } + } + } +} + +test map3_1() { + map3([], [], [1, 2, 3], fn(a, b, c) { a + b + c }) == [] +} + +test map3_2() { + map3([1, 2, 3], [1, 2], [1, 2, 3], fn(a, b, c) { a + b + c }) == [3, 6] +} + +/// Return the list with its elements in the reserve order. +/// +/// ```aiken +/// list.reverse([1, 2, 3]) == [3, 2, 1] +/// ``` +pub fn reverse(self: List) -> List { + foldl(self, [], fn(x, xs) { [x, ..xs] }) +} + +test reverse_1() { + reverse([]) == [] +} + +test reverse_2() { + reverse([1, 2, 3]) == [3, 2, 1] +} + +/// Sort a list in ascending order using the given comparison function. +/// +/// ```aiken +/// use aiken/int +/// +/// sort([3, 1, 4, 0, 2], int.compare) == [0, 1, 2, 3, 4] +/// sort([1, 2, 3], int.compare) == [1, 2, 3] +/// ``` +pub fn sort(self: List, compare: fn(a, a) -> Ordering) -> List { + when self is { + [] -> [] + [x, ..xs] -> insert(sort(xs, compare), x, compare) + } +} + +fn insert(self: List, e: a, compare: fn(a, a) -> Ordering) -> List { + when self is { + [] -> [e] + [x, ..xs] -> + if compare(e, x) == Less { + [e, ..self] + } else { + [x, ..insert(xs, e, compare)] + } + } +} + +test sort_1() { + let xs = [6, 7, 5, 4, 1, 3, 9, 8, 0, 2] + sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} + +test sort_2() { + let xs = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} + +test sort_3() { + let xs = [9, 8, 7, 6, 5, 4, 3, 2, 1, 0] + sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] +} + +test sort_4() { + sort([], int.compare) == [] +} + +/// Decompose a list of tuples into a tuple of lists. +/// +/// ``` +/// list.unzip([(1, "a"), (2, "b")]) == ([1, 2], ["a", "b"]) +/// ``` +pub fn unzip(self: List<(a, b)>) -> (List, List) { + when self is { + [] -> ([], []) + [(a, b), ..xs] -> { + let (a_tail, b_tail) = unzip(xs) + ([a, ..a_tail], [b, ..b_tail]) + } + } +} + +test unzip_1() { + unzip([]) == ([], []) +} + +test unzip_2() { + unzip([(1, "a"), (2, "b")]) == ([1, 2], ["a", "b"]) +} + +// ## Combining + +/// Merge two lists together. +/// +/// ```aiken +/// list.concat([], []) == [] +/// list.concat([], [1, 2, 3]) == [1, 2, 3] +/// list.concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] +/// ``` +pub fn concat(left: List, right: List) -> List { + when left is { + [] -> right + [x, ..xs] -> [x, ..concat(xs, right)] + } +} + +test concat_1() { + concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] +} + +test concat_2() { + concat([1, 2, 3], []) == [1, 2, 3] +} + +test concat_3() { + concat([], [1, 2, 3]) == [1, 2, 3] +} + +/// Remove the first occurrence of each element of the second list from the first one. +/// +/// ``` +/// list.difference(["h", "e", "l", "l", "o"], ["l", "e", "l"]) == ["h", "o"] +/// list.difference([1, 2, 3, 4, 5], [1, 1, 2]) == [3, 4, 5] +/// list.difference([1, 2, 3], []) == [1, 2, 3] +/// ``` +pub fn difference(self: List, with: List) -> List { + when with is { + [] -> self + [x, ..xs] -> difference(delete(self, x), xs) + } +} + +test difference_1() { + difference(["h", "e", "l", "l", "o"], ["l", "e", "l"]) == ["h", "o"] +} + +test difference_2() { + difference([1, 2, 3, 4, 5], [1, 1, 2]) == [3, 4, 5] +} + +test difference_3() { + difference([1, 2, 3], []) == [1, 2, 3] +} + +test difference_4() { + difference([], [1, 2, 3]) == [] +} + +/// Combine two lists together. +/// +/// Note: if one list is longer, the extra elements are dropped. +/// +/// ```aiken +/// list.zip([1, 2], ["a", "b", "c"]) == [(1, "a"), (2, "b")] +/// ``` +pub fn zip(self: List, bs: List) -> List<(a, b)> { + when self is { + [] -> [] + [x, ..xs] -> + when bs is { + [] -> [] + [y, ..ys] -> [(x, y), ..zip(xs, ys)] + } + } +} + +test zip_1() { + zip([], [1, 2, 3]) == [] +} + +test zip_2() { + zip([1, 2, 3], []) == [] +} + +test zip_3() { + zip([1, 2], ["a", "b", "c"]) == [(1, "a"), (2, "b")] +} + +// ## Transforming + +/// Reduce a list from left to right. +/// +/// ```aiken +/// list.foldl([1, 2, 3], 0, fn(n, total) { n + total }) == 6 +/// list.foldl([1, 2, 3], [], fn(x, xs) { [x, ..xs] }) == [3, 2, 1] +/// ``` +pub fn foldl(self: List, zero: b, with: fn(a, b) -> b) -> b { + when self is { + [] -> zero + [x, ..xs] -> foldl(xs, with(x, zero), with) + } +} + +type Fold2 = + fn(a, b) -> result + +pub fn foldl2( + self: List, + zero_a: a, + zero_b: b, + with: fn(elem, a, b, Fold2) -> result, + return: Fold2, +) -> result { + do_foldl2(self, with, return)(zero_a, zero_b) +} + +fn do_foldl2( + self: List, + with: fn(elem, a, b, Fold2) -> result, + return: Fold2, +) -> Fold2 { + when self is { + [] -> return + [x, ..xs] -> do_foldl2(xs, with, fn(a, b) { with(x, a, b, return) }) + } +} + +test foldl2_optimized() { + let + len, + sum, + <- + foldl2( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + 0, + 0, + fn(n, len, sum, return) { return(len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +test foldl2_classic() { + let (len, sum) = + foldl( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + (0, 0), + fn(n, (len, sum)) { (len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +type Foo { + Foo(Int, Int) +} + +test foldl2_pair() { + let Pair(len, sum) = + foldl( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + Pair(0, 0), + fn(n, Pair(len, sum)) { Pair(len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +test foldl2_foo() { + let Foo(len, sum) = + foldl( + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + Foo(0, 0), + fn(n, Foo(len, sum)) { Foo(len + 1, sum + n) }, + ) + + and { + len == 10, + sum == 55, + } +} + +test foldl_1() { + foldl([], 0, fn(_, _) { 1 }) == 0 +} + +test foldl_2() { + foldl([1, 2, 3, 4, 5], 0, fn(n, total) { n + total }) == 15 +} + +test foldl_3() { + foldl([1, 2, 3, 4], [], fn(x, xs) { [x, ..xs] }) == [4, 3, 2, 1] +} + +/// Reduce a list from right to left. +/// +/// ```aiken +/// list.foldr([1, 2, 3], 0, fn(n, total) { n + total }) == 6 +/// list.foldr([1, 2, 3], [], fn(x, xs) { [x, ..xs] }) == [1, 2, 3] +/// ``` +pub fn foldr(self: List, zero: b, with: fn(a, b) -> b) -> b { + when self is { + [] -> zero + [x, ..xs] -> with(x, foldr(xs, zero, with)) + } +} + +test foldr_1() { + foldr([1, 2, 3, 4, 5], 0, fn(n, total) { n + total }) == 15 +} + +test foldr_2() { + foldr( + [1, 2, 3], + "", + fn(n, _str) { + if builtin.mod_integer(n, 2) == 0 { + "foo" + } else { + "bar" + } + }, + ) == "bar" +} + +test foldr_3() { + foldr([1, 2, 3, 4], [], fn(x, xs) { [x, ..xs] }) == [1, 2, 3, 4] +} + +/// Like [`foldr`](#foldr), but also provides the position (0-based) of the elements when iterating. +/// +/// ```aiken +/// let group = fn(i, x, xs) { [(i, x), ..xs] } +/// list.indexed_foldr(["a", "b", "c"], [], group) == [ +/// (0, "a"), +/// (1, "b"), +/// (2, "c") +/// ] +/// ``` +pub fn indexed_foldr( + self: List, + zero: result, + with: fn(Int, a, result) -> result, +) -> result { + do_indexed_foldr(0, self, zero, with) +} + +fn do_indexed_foldr( + n: Int, + self: List, + zero: result, + with: fn(Int, a, result) -> result, +) -> result { + when self is { + [] -> zero + [x, ..xs] -> with(n, x, do_indexed_foldr(n + 1, xs, zero, with)) + } +} + +test indexed_foldr_1() { + indexed_foldr([], 0, fn(i, x, xs) { i + x + xs }) == 0 +} + +test indexed_foldr_2() { + let letters = ["a", "b", "c"] + indexed_foldr(letters, [], fn(i, x, xs) { [(i, x), ..xs] }) == [ + (0, "a"), (1, "b"), (2, "c"), + ] +} + +/// Reduce a list from left to right using the accumulator as left operand. +/// Said differently, this is [`foldl`](#foldl) with callback arguments swapped. +/// +/// ```aiken +/// list.reduce([#[1], #[2], #[3]], #[0], bytearray.concat) == #[0, 1, 2, 3] +/// list.reduce([True, False, True], False, fn(b, a) { or { b, a } }) == True +/// ``` +pub fn reduce(self: List, zero: b, with: fn(b, a) -> b) -> b { + foldl(self, zero, flip(with)) +} + +test reduce_1() { + reduce([], 0, fn(n, total) { n + total }) == 0 +} + +test reduce_2() { + reduce([1, 2, 3], 0, fn(n, total) { n + total }) == 6 +} + +test reduce_3() { + reduce([True, False, True], False, fn(left, right) { left || right }) == True +} + +test reduce_4() { + reduce( + [#[1], #[2], #[3]], + #[9], + fn(left, right) { bytearray.concat(left, right) }, + ) == #[9, 1, 2, 3] +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak new file mode 100644 index 00000000..01bfe763 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak @@ -0,0 +1,833 @@ +//// A module for working with associative lists (a.k.a `Pairs`). +//// +//// While any function that works on `List` also work on `Pairs`, this module provides some extra helpers +//// that are specifically tailored to working with associative lists. Fundamentally, a `Pairs` is +//// a type-alias to `List>`. +//// +//// > [!CAUTION] +//// > +//// > Unlike dictionnaries (a.k.a. [`Dict`](./dict.html#Dict), associative lists make no assumption +//// > about the ordering of elements within the list. As a result, lookup +//// > functions do traverse the entire list when invoked. They are also not _sets_, +//// > and thus allow for duplicate keys. This is reflected in the functions used +//// > to interact with them. + +use aiken/builtin +use aiken/primitive/bytearray + +// ## Inspecting + +/// Get all values in the alist associated with a given key. +/// +/// ```aiken +/// pairs.get_all([], "a") == [] +/// pairs.get_all([Pair("a", 1)], "a") == [1] +/// pairs.get_all([Pair("a", 1), Pair("b", 2)], "a") == [1] +/// pairs.get_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [1, 3] +/// ``` +pub fn get_all(self: Pairs, key k: key) -> List { + when self is { + [] -> [] + [Pair(k2, v), ..rest] -> + if k == k2 { + [v, ..get_all(rest, k)] + } else { + get_all(rest, k) + } + } +} + +test get_all_1() { + get_all([], "a") == [] +} + +test get_all_2() { + get_all([Pair("a", 1)], "a") == [1] +} + +test get_all_3() { + get_all([Pair("a", 1), Pair("b", 2)], "a") == [1] +} + +test get_all_4() { + get_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [1, 3] +} + +test get_all_5() { + get_all([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == [] +} + +/// Get the value in the alist by its key. +/// If multiple values with the same key exist, only the first one is returned. +/// +/// ```aiken +/// pairs.get_first([], "a") == None +/// pairs.get_first([Pair("a", 1)], "a") == Some(1) +/// pairs.get_first([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +/// pairs.get_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(1) +/// ``` +pub fn get_first(self: Pairs, key k: key) -> Option { + when self is { + [] -> None + [Pair(k2, v), ..rest] -> + if k == k2 { + Some(v) + } else { + get_first(rest, k) + } + } +} + +test get_first_1() { + get_first([], "a") == None +} + +test get_first_2() { + get_first([Pair("a", 1)], "a") == Some(1) +} + +test get_first_3() { + get_first([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +} + +test get_first_4() { + get_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(1) +} + +test get_first_5() { + get_first([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == None +} + +/// Get the value in the alist by its key. +/// If multiple values with the same key exist, only the last one is returned. +/// +/// ```aiken +/// pairs.get_last([], "a") == None +/// pairs.get_last([Pair("a", 1)], "a") == Some(1) +/// pairs.get_last([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +/// pairs.get_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(3) +/// ``` +pub fn get_last(self: Pairs, key k: key) -> Option { + when self is { + [] -> None + [Pair(k2, v), ..rest] -> + if k == k2 { + when get_last(rest, k) is { + None -> Some(v) + some -> some + } + } else { + get_last(rest, k) + } + } +} + +test get_last_1() { + get_last([], "a") == None +} + +test get_last_2() { + get_last([Pair("a", 1)], "a") == Some(1) +} + +test get_last_3() { + get_last([Pair("a", 1), Pair("b", 2)], "a") == Some(1) +} + +test get_last_4() { + get_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(3) +} + +test get_last_5() { + get_last([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == None +} + +/// Finds all keys in the alist associated with a given value. +/// +/// ```aiken +/// pairs.find_all([], 1) == [] +/// pairs.find_all([Pair("a", 1)], 1) == ["a"] +/// pairs.find_all([Pair("a", 1), Pair("b", 2)], 1) == ["a"] +/// pairs.find_all([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == ["a", "c"] +/// ``` +pub fn find_all(self: Pairs, v: value) -> List { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if v == v2 { + [k2, ..find_all(rest, v)] + } else { + find_all(rest, v) + } + } +} + +test find_all_1() { + find_all([], "a") == [] +} + +test find_all_2() { + find_all([Pair("a", 14)], 14) == ["a"] +} + +test find_all_3() { + find_all([Pair("a", 14)], 42) == [] +} + +test find_all_4() { + find_all([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == ["a", "c"] +} + +/// Finds the first key in the alist associated with a given value, if any. +/// +/// ```aiken +/// pairs.find_first([], 1) == None +/// pairs.find_first([Pair("a", 1)], 1) == Some("a") +/// pairs.find_first([Pair("a", 1), Pair("b", 2)], 1) == Some("a") +/// pairs.find_first([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == Some("a") +/// ``` +pub fn find_first(self: Pairs, v: value) -> Option { + when self is { + [] -> None + [Pair(k2, v2), ..rest] -> + if v == v2 { + Some(k2) + } else { + find_first(rest, v) + } + } +} + +test find_first_1() { + find_first([], "a") == None +} + +test find_first_2() { + find_first([Pair("a", 14)], 14) == Some("a") +} + +test find_first_3() { + find_first([Pair("a", 14)], 42) == None +} + +test find_first_4() { + find_first([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == Some("a") +} + +/// Finds the last key in the alist associated with a given value, if any. +/// +/// ```aiken +/// pairs.find_last([], 1) == None +/// pairs.find_last([Pair("a", 1)], 1) == Some("a") +/// pairs.find_last([Pair("a", 1), Pair("b", 2)], 1) == Some("a") +/// pairs.find_last([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == Some("c") +/// ``` +pub fn find_last(self: Pairs, v: value) -> Option { + when self is { + [] -> None + [Pair(k2, v2), ..rest] -> + if v == v2 { + when find_last(rest, v) is { + None -> Some(k2) + some -> some + } + } else { + find_last(rest, v) + } + } +} + +test find_last_1() { + find_last([], "a") == None +} + +test find_last_2() { + find_last([Pair("a", 14)], 14) == Some("a") +} + +test find_last_3() { + find_last([Pair("a", 14)], 42) == None +} + +test find_last_4() { + find_last([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == Some("c") +} + +/// Check if a key exists in the pairs. +/// +/// ```aiken +/// pairs.has_key([], "a") == False +/// pairs.has_key([Pair("a", 1)], "a") == True +/// pairs.has_key([Pair("a", 1), Pair("b", 2)], "a") == True +/// pairs.has_key([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == True +/// ``` +pub fn has_key(self: Pairs, k: key) -> Bool { + when self is { + [] -> False + // || is lazy so this is fine + [Pair(k2, _), ..rest] -> k == k2 || has_key(rest, k) + } +} + +test has_key_1() { + !has_key([], "a") +} + +test has_key_2() { + has_key([Pair("a", 14)], "a") +} + +test has_key_3() { + !has_key([Pair("a", 14)], "b") +} + +test has_key_4() { + has_key([Pair("a", 14), Pair("b", 42)], "b") +} + +test has_key_5() { + has_key([Pair("a", 14), Pair("b", 42), Pair("a", 42)], "a") +} + +/// Extract all the keys present in a given `Pairs`. +/// +/// ```aiken +/// pairs.keys([]) == [] +/// pairs.keys([Pair("a", 1)]) == ["a"] +/// pairs.keys([Pair("a", 1), Pair("b", 2)]) == ["a", "b"] +/// pairs.keys([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == ["a", "b", "a"] +/// ``` +pub fn keys(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(k, _), ..rest] -> [k, ..keys(rest)] + } +} + +test keys_1() { + keys([]) == [] +} + +test keys_2() { + keys([Pair("a", 0)]) == ["a"] +} + +test keys_3() { + keys([Pair("a", 0), Pair("b", 0)]) == ["a", "b"] +} + +/// Extract all the values present in a given `Pairs`. +/// +/// ```aiken +/// pairs.values([]) == [] +/// pairs.values([Pair("a", 1)]) == [1] +/// pairs.values([Pair("a", 1), Pair("b", 2)]) == [1, 2] +/// pairs.values([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == [1, 2, 3] +/// ``` +pub fn values(self: Pairs) -> List { + when self is { + [] -> [] + [Pair(_, v), ..rest] -> [v, ..values(rest)] + } +} + +test values_1() { + values([]) == [] +} + +test values_2() { + values([Pair("a", 1)]) == [1] +} + +test values_3() { + values([Pair("a", 1), Pair("b", 2)]) == [1, 2] +} + +test values_4() { + values([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == [1, 2, 3] +} + +// ## Modifying + +/// Remove all key-value pairs matching the key from the Pairs. If the key is not found, no changes are made. +/// +/// ```aiken +/// pairs.delete_all([], "a") == [] +/// pairs.delete_all([Pair("a", 1)], "a") == [] +/// pairs.delete_all([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] +/// pairs.delete_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("b", 2)] +/// ``` +pub fn delete_all(self: Pairs, key k: key) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if k == k2 { + delete_all(rest, k) + } else { + [Pair(k2, v2), ..delete_all(rest, k)] + } + } +} + +test delete_all_1() { + delete_all([], "a") == [] +} + +test delete_all_2() { + delete_all([Pair("a", 14)], "a") == [] +} + +test delete_all_3() { + let fixture = [Pair("a", 14)] + delete_all(fixture, "b") == fixture +} + +test delete_all_4() { + let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] + delete_all(fixture, "a") == [Pair("b", 2)] +} + +/// Remove a single key-value pair from the `Pairs`. If the key is not found, no changes are made. +/// Duplicate keys are not deleted. Only the **first** key found is deleted. +/// +/// ```aiken +/// pairs.delete_first([], "a") == [] +/// pairs.delete_first([Pair("a", 1)], "a") == [] +/// pairs.delete_first([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] +/// pairs.delete_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("b", 2), Pair("a", 3)] +/// ``` +pub fn delete_first(self: Pairs, key k: key) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if k == k2 { + rest + } else { + [Pair(k2, v2), ..delete_first(rest, k)] + } + } +} + +test delete_first_1() { + delete_first([], "a") == [] +} + +test delete_first_2() { + delete_first([Pair("a", 14)], "a") == [] +} + +test delete_first_3() { + let fixture = [Pair("a", 14)] + delete_first(fixture, "b") == fixture +} + +test delete_first_4() { + let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] + delete_first(fixture, "a") == [Pair("b", 2), Pair("a", 3)] +} + +/// Remove a single key-value pair from the Pairs. If the key is not found, no changes are made. +/// Duplicate keys are not deleted. Only the **last** key found is deleted. +/// +/// ```aiken +/// pairs.delete_last([], "a") == [] +/// pairs.delete_last([Pair("a", 1)], "a") == [] +/// pairs.delete_last([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] +/// pairs.delete_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("a", 1), Pair("b", 2)] +/// ``` +pub fn delete_last(self: Pairs, key k: key) -> Pairs { + when self is { + [] -> [] + [Pair(k2, v2), ..rest] -> + if k == k2 { + let tail = delete_last(rest, k) + if tail == rest { + rest + } else { + [Pair(k2, v2), ..tail] + } + } else { + [Pair(k2, v2), ..delete_last(rest, k)] + } + } +} + +test delete_last_1() { + delete_last([], "a") == [] +} + +test delete_last_2() { + delete_last([Pair("a", 14)], "a") == [] +} + +test delete_last_3() { + let fixture = [Pair("a", 14)] + delete_last(fixture, "b") == fixture +} + +test delete_last_4() { + let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] + delete_last(fixture, "a") == [Pair("a", 1), Pair("b", 2)] +} + +/// Insert a value in the `Pairs` at a given key. If the key already exists, +/// the value is added in front. +/// +/// > [!CAUTION] +/// > The list is only traversed up to the given key and the traversal +/// > stops as soon as a higher key is encountered. Said differently, the list +/// > is assumed to **be ordered by ascending keys**! If it is not, expect the +/// > unexpected. +/// +/// ```aiken +/// use aiken/primitive/bytearray +/// +/// let result = +/// [] +/// |> pairs.insert_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare) +/// |> pairs.insert_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare) +/// |> pairs.insert_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare) +/// +/// result == [Pair("bar", 2), Pair("foo", 3), Pair("foo", 1)] +/// ``` +pub fn insert_by_ascending_key( + self: Pairs, + key k: key, + value v: value, + compare: fn(key, key) -> Ordering, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if compare(k, k2) == Less { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, v), ..self] + } else { + [Pair(k2, v2), ..insert_by_ascending_key(rest, k, v, compare)] + } + } + } +} + +test insert_by_ascending_key_1() { + let m = + [] + |> insert_by_ascending_key("foo", 42, bytearray.compare) + |> insert_by_ascending_key("foo", 14, bytearray.compare) + + m == [Pair("foo", 14), Pair("foo", 42)] +} + +test insert_by_ascending_key_2() { + let m = + [] + |> insert_by_ascending_key("foo", 42, bytearray.compare) + |> insert_by_ascending_key("bar", 14, bytearray.compare) + |> insert_by_ascending_key("baz", 1337, bytearray.compare) + + m == [Pair("bar", 14), Pair("baz", 1337), Pair("foo", 42)] +} + +/// Like [`insert_by_ascending_key`](#insert_by_ascending_key) but specifies +/// how to combine two values on a key conflict. +/// +/// > [!CAUTION] +/// > The list is only traversed up to the given key and the traversal +/// > stops as soon as a higher key is encountered. Said differently, the list +/// > is assumed to **be ordered by ascending keys**! If it is not, expect the +/// > unexpected. +/// +/// ```aiken +/// use aiken/primitive/bytearray +/// +/// let add_integer = fn(x, y) { x + y } +/// +/// let result = +/// [] +/// |> pairs.insert_with_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare, with: add_integer) +/// |> pairs.insert_with_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare, with: add_integer) +/// |> pairs.insert_with_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare, with: add_integer) +/// +/// result == [Pair("bar", 2), Pair("foo", 4)] +/// ``` +pub fn insert_with_by_ascending_key( + self: Pairs, + key k: key, + value v: value, + compare: fn(key, key) -> Ordering, + with: fn(value, value) -> value, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if compare(k, k2) == Less { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, with(v, v2)), ..rest] + } else { + [ + Pair(k2, v2), + ..insert_with_by_ascending_key(rest, k, v, compare, with) + ] + } + } + } +} + +test insert_with_by_ascending_key_1() { + let compare_un_b_data = + fn(l, r) { + bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) + } + + let m = + [] + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 42, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 14, + compare_un_b_data, + builtin.add_integer, + ) + + m == [Pair("foo" |> builtin.b_data, 56)] +} + +test insert_with_by_ascending_key_2() { + let compare_un_b_data = + fn(l, r) { + bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) + } + + let m = + [] + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 42, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "bar" |> builtin.b_data, + 14, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "baz" |> builtin.b_data, + 1337, + compare_un_b_data, + builtin.add_integer, + ) + + m == [ + Pair("bar" |> builtin.b_data, 14), + Pair("baz" |> builtin.b_data, 1337), + Pair("foo" |> builtin.b_data, 42), + ] +} + +test insert_with_by_ascending_key_3() { + let compare_un_b_data = + fn(l, r) { + bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) + } + + let result = + [] + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 1, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "bar" |> builtin.b_data, + 2, + compare_un_b_data, + builtin.add_integer, + ) + |> insert_with_by_ascending_key( + "foo" |> builtin.b_data, + 3, + compare_un_b_data, + builtin.add_integer, + ) + + result == [Pair("bar" |> builtin.b_data, 2), Pair("foo" |> builtin.b_data, 4)] +} + +/// Apply a function to all key-value pairs in a alist, replacing the values. +/// +/// ```aiken +/// let fixture = [Pair("a", 100), Pair("b", 200)] +/// +/// pairs.map(fixture, fn(_k, v) { v * 2 }) == [Pair("a", 200), Pair("b", 400)] +/// ``` +pub fn map( + self: Pairs, + with: fn(key, value) -> result, +) -> Pairs { + when self is { + [] -> [] + [Pair(k, v), ..rest] -> [Pair(k, with(k, v)), ..map(rest, with)] + } +} + +test map_1() { + let fixture = [Pair("a", 1), Pair("b", 2)] + + map(fixture, with: fn(k, _) { k }) == [Pair("a", "a"), Pair("b", "b")] +} + +test map_2() { + let fixture = [Pair("a", 1), Pair("b", 2)] + + map(fixture, with: fn(_, v) { v + 1 }) == [Pair("a", 2), Pair("b", 3)] +} + +/// Insert a value in the `Pairs` at a given key. If the key already exists, +/// its value is replaced. +/// +/// > [!CAUTION] +/// > The list is only traversed up to the given key and the traversal +/// > stops as soon as a higher key is encountered. Said differently, the list +/// > is assumed to **be ordered by ascending keys**! If it is not, expect the +/// > unexpected. +/// +/// ```aiken +/// use aiken/primitive/bytearray +/// +/// let result = +/// [] +/// |> pairs.repsert_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare) +/// |> pairs.repsert_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare) +/// |> pairs.repsert_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare) +/// +/// result == [Pair("bar", 2), Pair("foo", 3)] +/// ``` +pub fn repsert_by_ascending_key( + self: Pairs, + key k: key, + value v: value, + compare: fn(key, key) -> Ordering, +) -> Pairs { + when self is { + [] -> [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if compare(k, k2) == Less { + [Pair(k, v), ..self] + } else { + if k == k2 { + [Pair(k, v), ..rest] + } else { + [Pair(k2, v2), ..repsert_by_ascending_key(rest, k, v, compare)] + } + } + } +} + +test repsert_by_ascending_key_1() { + let m = + [] + |> repsert_by_ascending_key("foo", 42, bytearray.compare) + |> repsert_by_ascending_key("foo", 14, bytearray.compare) + + m == [Pair("foo", 14)] +} + +test repsert_by_ascending_key_2() { + let m = + [] + |> repsert_by_ascending_key("foo", 42, bytearray.compare) + |> repsert_by_ascending_key("bar", 14, bytearray.compare) + |> repsert_by_ascending_key("baz", 1337, bytearray.compare) + + m == [Pair("bar", 14), Pair("baz", 1337), Pair("foo", 42)] +} + +// ## Transforming + +/// Fold over the key-value pairs in a pairs. The fold direction follows keys +/// in ascending order and is done from left-to-right. +/// +/// ```aiken +/// let fixture = [ +/// Pair(1, 100), +/// Pair(2, 200), +/// Pair(3, 300), +/// ] +/// +/// pairs.foldl(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 +/// ``` +pub fn foldl( + self: Pairs, + zero: result, + with: fn(key, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> foldl(rest, with(k, v, zero), with) + } +} + +test foldl_1() { + foldl([], 14, fn(_, _, _) { 42 }) == 14 +} + +test foldl_2() { + foldl( + [Pair("a", 42), Pair("b", 14)], + zero: 0, + with: fn(_, v, total) { v + total }, + ) == 56 +} + +/// Fold over the key-value pairs in a Pairs. The fold direction follows the +/// order of elements in the Pairs and is done from right-to-left. +/// +/// ```aiken +/// let fixture = [ +/// Pair(1, 100), +/// Pair(2, 200), +/// Pair(3, 300), +/// ] +/// +/// pairs.foldr(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 +/// ``` +pub fn foldr( + self: Pairs, + zero: result, + with: fn(key, value, result) -> result, +) -> result { + when self is { + [] -> zero + [Pair(k, v), ..rest] -> with(k, v, foldr(rest, zero, with)) + } +} + +test foldr_1() { + foldr([], 14, fn(_, _, _) { 42 }) == 14 +} + +test foldr_2() { + foldr( + [Pair("a", 42), Pair("b", 14)], + zero: 0, + with: fn(_, v, total) { v + total }, + ) == 56 +} + +test foldr_3() { + let fixture = [Pair(1, 100), Pair(2, 200), Pair(3, 300)] + + foldr(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak new file mode 100644 index 00000000..46a7dda5 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak @@ -0,0 +1,147 @@ +use aiken/builtin + +pub type VerificationKey = + ByteArray + +pub type VerificationKeyHash = + Hash + +pub type Script = + ByteArray + +pub type ScriptHash = + Hash + +pub type Signature = + ByteArray + +pub type DataHash = + Hash + +/// A `Hash` is nothing more than a `ByteArray`, but it carries extra +/// information for readability. +/// +/// On-chain, any hash digest value is represented as a plain 'ByteArray'. +/// Though in practice, hashes come from different sources and have +/// different semantics. +/// +/// Hence, while this type-alias doesn't provide any strong type-guarantees, +/// it helps writing functions signatures with more meaningful types than mere +/// 'ByteArray'. +/// +/// Compare for example: +/// +/// ```aiken +/// pub type Credential { +/// VerificationKey(ByteArray) +/// Script(ByteArray) +/// } +/// ``` +/// +/// with +/// +/// ```aiken +/// pub type Credential { +/// VerificationKey(Hash) +/// Script(Hash) +/// } +/// ``` +/// +/// Both are strictly equivalent, but the second reads much better. +pub type Hash = + ByteArray + +// ## Hashing + +/// A blake2b-224 hash algorithm. +/// +/// Typically used for: +/// +/// - [`Credential`](../cardano/address.html#Credential) +/// - [`PolicyId`](../cardano/assets.html#PolicyId) +/// +/// Note: there's no function to calculate blake2b-224 hash digests on-chain. +pub opaque type Blake2b_224 { + Blake2b_224 +} + +/// Compute the blake2b-224 hash digest (28 bytes) of some data. +pub fn blake2b_224(bytes: ByteArray) -> Hash { + builtin.blake2b_224(bytes) +} + +/// A blake2b-256 hash algorithm. +/// +/// Typically used for: +/// +/// - [`TransactionId`](../cardano/transaction.html#TransactionId) +pub opaque type Blake2b_256 { + Blake2b_256 +} + +/// Compute the blake2b-256 hash digest (32 bytes) of some data. +pub fn blake2b_256(bytes: ByteArray) -> Hash { + builtin.blake2b_256(bytes) +} + +/// A Keccak-256 hash algorithm. +pub opaque type Keccak_256 { + Keccak_256 +} + +/// Compute the keccak-256 hash digest (32 bytes) of some data. +pub fn keccak_256(bytes: ByteArray) -> Hash { + builtin.keccak_256(bytes) +} + +/// A SHA2-256 hash algorithm. +pub opaque type Sha2_256 { + Sha2_256 +} + +/// Compute the sha2-256 hash digest (32 bytes) of some data. +pub fn sha2_256(bytes: ByteArray) -> Hash { + builtin.sha2_256(bytes) +} + +/// A SHA3-256 hash algorithm. +pub opaque type Sha3_256 { + Sha3_256 +} + +/// Compute the sha3-256 hash digest (32 bytes) of some data. +pub fn sha3_256(bytes: ByteArray) -> Hash { + builtin.sha3_256(bytes) +} + +// ## Verifying signatures + +/// Verify an ECDCA signature (over secp256k1) using the given verification key. +/// Returns `True` when the signature is valid. +pub fn verify_ecdsa_signature( + key: VerificationKey, + msg: ByteArray, + sig: Signature, +) -> Bool { + builtin.verify_ecdsa_secp256k1_signature(key, msg, sig) +} + +/// Verify an Ed25519 signature using the given verification key. +/// Returns `True` when the signature is valid. +pub fn verify_ed25519_signature( + key: VerificationKey, + msg: ByteArray, + sig: Signature, +) -> Bool { + builtin.verify_ed25519_signature(key, msg, sig) +} + +/// Verify a Schnorr signature (over secp256k1) using the given verification key. +/// Returns `True` when the signature is valid. +pub fn verify_schnorr_signature( + key: VerificationKey, + msg: ByteArray, + sig: Signature, +) -> Bool { + builtin.verify_schnorr_secp256k1_signature(key, msg, sig) +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak new file mode 100644 index 00000000..d7b4cc19 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak @@ -0,0 +1,115 @@ +//// This module is designed for cryptographic operations involving the BLS12-381 elliptic curve, particularly focusing on the G1 group of the curve. +//// +//// The key functionalities provided by this module include: +//// - Defining the generator of the G1 group, which is a fixed base point on the elliptic curve used for various cryptographic computations. +//// - Implementing the additive identity (zero) in the G1 group, which plays a crucial role in elliptic curve arithmetic. +//// - Providing functions to compress and decompress points in the G1 group. Compression reduces the size of the point representation, which is useful for efficient storage and transmission. Decompression restores the original point from its compressed form. +//// - Implementing basic arithmetic operations on the points in the G1 group, such as addition and subtraction. +//// - Enabling the exponentiation of a point in the G1 group with a scalar, which is a fundamental operation in elliptic curve cryptography. +//// - Offering a function to hash arbitrary data to a point in the G1 group, a process important in several cryptographic protocols. +//// +//// This module ensures that all operations respect the properties of the BLS12-381 curve and the mathematical structure of the G1 group. + +use aiken/builtin +use aiken/crypto/bls12_381/scalar.{Scalar} + +/// The compressed generator of the G1 group of the BLS12-381 curve. +/// This constant represents a fixed base point on the elliptic curve. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const generator: G1Element = + #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb" + +test generator_1() { + builtin.bls12_381_g1_scalar_mul(scalar.field_prime, generator) == #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" +} + +/// Represents the additive identity (zero) in the G1 group. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const zero: G1Element = + #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + +test zero_1() { + and { + zero == builtin.bls12_381_g1_scalar_mul(scalar.field_prime, generator), + zero == builtin.bls12_381_g1_scalar_mul( + scalar.field_prime, + #"88c7e388ee58f1db9a24d7098b01d13634298bebf2d159254975bd450cb0d287fcc622eb71edde8b469a8513551baf1f", + ), + zero == builtin.bls12_381_g1_scalar_mul( + scalar.field_prime, + #"a6ac32e625dc30b8d31bacf5f4c89c27b0388b15f57ae10de8d5cec02dd1f113c9a31077be05ab587ca57a88d34deb75", + ), + } +} + +/// Compresses a point in the G1 group into a more compact representation. +/// The compressed representation is a 48-byte string, corresponding to a modified `x` coordinate. +/// The leading most significant 3 bits of this string indicate how to reconstruct the `y` coordinate. +/// +/// > [!NOTE] +/// > More explicitly via [Zcash's spec](https://github.com/supranational/blst#serialization-format): +/// > +/// > The most-significant three bits of a G1 or G2 encoding should be masked away before the coordinate(s) are interpreted. These bits are used to unambiguously represent the underlying element: +/// > +/// > - The most significant bit, when set, indicates that the point is in compressed form. Otherwise, the point is in uncompressed form. +/// > - The second-most significant bit indicates that the point is at infinity. If this bit is set, the remaining bits of the group element's encoding should be set to zero. +/// > - The third-most significant bit is set if (and only if) this point is in compressed form and it is not the point at infinity and its y-coordinate is the lexicographically largest of the two associated with the encoded x-coordinate. +pub fn compress(point) { + builtin.bls12_381_g1_compress(point) +} + +test compress_1() { + compress( + #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb", + ) == #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb" +} + +/// Decompresses a point in the G1 group from its compressed form. +pub fn decompress(bytes) { + builtin.bls12_381_g1_uncompress(bytes) +} + +pub fn equal(left, right) { + builtin.bls12_381_g1_equal(left, right) +} + +test equal_1() { + equal(generator, generator) +} + +/// Adds two points in the G1 group. +pub fn add(left, right) { + builtin.bls12_381_g1_add(left, right) +} + +/// Subtracts one point in the G1 group from another. +pub fn sub(left, right) { + builtin.bls12_381_g1_add(left, builtin.bls12_381_g1_neg(right)) +} + +test sub_1() { + generator == sub(add(generator, generator), generator) +} + +/// Exponentiates a point in the G1 group with a `scalar`. +/// This operation is equivalent to the repeated addition of the point with itself `e` times. +pub fn scale(point, e: Scalar) { + builtin.bls12_381_g1_scalar_mul(scalar.to_int(e), point) +} + +test scale_1() { + expect Some(x) = scalar.new(2) + builtin.bls12_381_g1_add(generator, generator) == scale(generator, x) +} + +/// Hashes arbitrary data to a point in the G1 group. +/// You can use the `domain_separation_tag` parameter to cryptographically separate different uses of the hash function between applications. +pub fn hash_to_group(bytes: ByteArray, domain_separation_tag: ByteArray) { + builtin.bls12_381_g1_hash_to_group(bytes, domain_separation_tag) +} + +test hash_to_group_1() { + hash_to_group("hello", "world") == #"89223b03c629cc6bcbbdccbba46b6679bc6a79db82f2d3bd115899a45a5a38c391587b59d3d1e297f977d1c4ee9e3388" +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak new file mode 100644 index 00000000..7a2013db --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak @@ -0,0 +1,124 @@ +//// This module is designed for cryptographic operations involving the BLS12-381 elliptic curve, particularly focusing on the G2 group of the curve. +//// +//// The key functionalities provided by this module include: +//// - Defining the generator of the G2 group, which is a fixed base point on the elliptic curve used for various cryptographic computations. +//// - Implementing the additive identity (zero) in the G2 group, which plays a crucial role in elliptic curve arithmetic. +//// - Providing functions to compress and decompress points in the G2 group. Compression reduces the size of the point representation, which is useful for efficient storage and transmission. Decompression restores the original point from its compressed form. +//// - Implementing basic arithmetic operations on the points in the G2 group, such as addition and subtraction. +//// - Enabling the exponentiation of a point in the G2 group with a scalar, which is a fundamental operation in elliptic curve cryptography. +//// - Offering a function to hash arbitrary data to a point in the G2 group, a process important in several cryptographic protocols. +//// +//// This module ensures that all operations respect the properties of the BLS12-381 curve and the mathematical structure of the G2 group. + +use aiken/builtin +use aiken/crypto/bls12_381/scalar.{Scalar} + +/// The compressed generator of the G2 group of the BLS12-381 curve. +/// This constant represents a fixed base point on the elliptic curve. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const generator: G2Element = + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" + +test generator_1() { + builtin.bls12_381_g2_scalar_mul(scalar.field_prime, generator) == #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" +} + +/// Represents the additive identity (zero) in the G2 group. +/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. +/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. +pub const zero: G2Element = + #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" + +test zero_1() { + and { + zero == builtin.bls12_381_g2_scalar_mul(scalar.field_prime, generator), + zero == builtin.bls12_381_g2_scalar_mul( + scalar.field_prime, + #"9964a9ac2ee28a4dab595ff0970d446373bf46701c5d0b29ce8e1ba995d811a1c7b193c928269192c64ba1fbe4b1940207c251e086b452b920bc72e3cebab46ce672b9b088ca620a471d3b888d9737f6abd165319aa457dbf8835e3d34196051", + ), + zero == builtin.bls12_381_g2_scalar_mul( + scalar.field_prime, + #"a900e25cb53cf1eeb1a82c0c83292937c49c97966351273767a204256a7ef6e95aa391404387075d361e7b13ccd694db03aa73ee0e1bd2c3dd735582b99fdf71696de72e4eda18ae99ea45995f1c9605aa0057008ee9a4da604b5716fb4a345b", + ), + } +} + +/// Compresses a point in the G2 group into a more compact representation. +/// The compressed representation is the concatenation of two 48-byte strings, corresponding to a modified and complexified `x` coordinate. +/// The leading most significant 3 bits of this string indicate how to reconstruct the `y` coordinate. +/// +/// > [!NOTE] +/// > More explicitly via [Zcash's spec](https://github.com/supranational/blst#serialization-format): +/// > +/// > The most-significant three bits of a G1 or G2 encoding should be masked away before the coordinate(s) are interpreted. These bits are used to unambiguously represent the underlying element: +/// > +/// > - The most significant bit, when set, indicates that the point is in compressed form. Otherwise, the point is in uncompressed form. +/// > - The second-most significant bit indicates that the point is at infinity. If this bit is set, the remaining bits of the group element's encoding should be set to zero. +/// > - The third-most significant bit is set if (and only if) this point is in compressed form and it is not the point at infinity and its y-coordinate is the lexicographically largest of the two associated with the encoded x-coordinate. +pub fn compress(point) { + builtin.bls12_381_g2_compress(point) +} + +test compress_1() { + let g2 = + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" + compress(g2) == #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" +} + +/// Decompresses a point in the G2 group from its compressed form. +pub fn decompress(bytes) { + builtin.bls12_381_g2_uncompress(bytes) +} + +test decompress_1() { + let g2 = + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" + generator == g2 +} + +pub fn equal(left, right) { + builtin.bls12_381_g2_equal(left, right) +} + +test equal_1() { + equal( + generator, + #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8", + ) +} + +/// Adds two points in the G2 group. +pub fn add(left, right) { + builtin.bls12_381_g2_add(left, right) +} + +/// Subtracts one point in the G2 group from another. +pub fn sub(left, right) { + builtin.bls12_381_g2_add(left, builtin.bls12_381_g2_neg(right)) +} + +test sub_1() { + generator == sub(add(generator, generator), generator) +} + +/// Exponentiates a point in the G2 group with a `scalar`. +/// This operation is equivalent to the repeated addition of the point with itself `e` times. +pub fn scale(point, e: Scalar) { + builtin.bls12_381_g2_scalar_mul(scalar.to_int(e), point) +} + +test scale_1() { + expect Some(x) = scalar.new(2) + builtin.bls12_381_g2_add(generator, generator) == scale(generator, x) +} + +/// Hashes arbitrary data to a point in the G2 group. +/// You can use the `domain_separation_tag` parameter to cryptographically separate different uses of the hash function between applications. +pub fn hash_to_group(bytes, domain_separation_tag) { + builtin.bls12_381_g2_hash_to_group(bytes, domain_separation_tag) +} + +test hash_to_group_1() { + hash_to_group("hello", "world") == #"a18486bba1dc8321f4998ed4268c6df8dfa5618dd5c91595844059d517f8104bf8031d3e766f9c99db1d6f58b201ee9614de92fc08f9e5cc3a6cd814e871857cb6e3924e8a4fa48775116c5f158d58ceda63614d62f6b7bc47db798d656969a5" +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak new file mode 100644 index 00000000..cf028ad7 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak @@ -0,0 +1,255 @@ +//// This module implements arithmetic operations in the scalar field associated with the BLS12-381 elliptic curve. +//// The scalar field, defined over a prime number `q`, is derived from the order of the subgroup G1. +//// +//// More explicitly, we have the identity: +//// +//// ```aiken +//// builtin.bls12_381_g1_scalar_mul(q, bls12_381_g1_generator) == 1 +//// ``` +//// +//// where, +//// +//// ```aiken +//// q = 52435875175126190479447740508185965837690552500527637822603658699938581184513 +//// ``` +//// +//// This module provides functionality for basic arithmetic operations (addition, subtraction, multiplication, division) within this scalar field. +//// Additionally, it includes advanced operations such as exponentiation and calculation of multiplicative inverses, tailored for cryptographic applications. + +use aiken/builtin + +/// The prime number defining the scalar field of the BLS12-381 curve. +pub const field_prime = + 52435875175126190479447740508185965837690552500527637822603658699938581184513 + +/// Represents the additive identity (zero) in the `Scalar` field. +pub const zero: Scalar = Scalar(0) + +/// Represents the multiplicative identity (one) in the `Scalar` field. +pub const one: Scalar = Scalar(1) + +/// Opaque type representing an element of the finite field `Scalar`. +pub opaque type Scalar { + integer: Int, +} + +// ## Constructing + +/// Constructs a new `Scalar` element from an integer, ensuring it's within the valid range of the field. +/// Returns `None` if the integer is negative or greater than the prime number defining the field. +pub fn new(n: Int) -> Option { + if n >= 0 && n < field_prime { + Some(Scalar(n)) + } else { + None + } +} + +test new_1() { + and { + new(-1) == None, + new(field_prime) == None, + new(834884848) == Some(Scalar(834884848)), + } +} + +/// Constructs a new `Scalar` element from a Big-Endian (most-significant bits first) `ByteArray`. +pub fn from_bytearray_big_endian(bytes: ByteArray) -> Option { + new(builtin.bytearray_to_integer(True, bytes)) +} + +test from_bytearray_big_endian_1() { + from_bytearray_big_endian(#"ffff00") == Some(Scalar(16776960)) +} + +/// Constructs a new `Scalar` element from a Little-Endian (least-significant bits first) `ByteArray`. +pub fn from_bytearray_little_endian(bytes: ByteArray) -> Option { + new(builtin.bytearray_to_integer(False, bytes)) +} + +test from_bytearray_little_endian_1() { + from_bytearray_little_endian(#"ffff00") == Some(Scalar(65535)) +} + +// ## Modifying + +/// Exponentiates an `Scalar` element by a non-negative integer exponent, using repeated squaring. +/// Note that this function returns `scalar.zero` for negative exponents. +/// A dedicated builtin function for this is in the making, see CIP 109. +pub fn scale(self: Scalar, e: Int) -> Scalar { + if e < 0 { + zero + } else if e == 0 { + one + } else if e % 2 == 0 { + scale(mul(self, self), e / 2) + } else { + mul(self, scale(mul(self, self), ( e - 1 ) / 2)) + } +} + +test scale_1() { + and { + scale(Scalar(834884848), -1) == zero, + scale(Scalar(834884848), 0) == one, + scale(Scalar(834884848), 1) == Scalar(834884848), + scale(Scalar(834884848), 2) == Scalar(697032709419983104), + scale(Scalar(834884848), 3) == Scalar(581942047655130761945608192), + scale(Scalar(field_prime - 4), 200) == Scalar( + 12843927705572658539565969578937286576443167978938369866871449552629978143484, + ), + } +} + +/// A faster version of `scale` for the case where the exponent is a power of two. +/// That is, the exponent `e = 2^k` for some non-negative integer `k`. Which is used alot in zk-SNARKs. +pub fn scale2(self: Scalar, k: Int) -> Scalar { + if k < 0 { + zero + } else { + do_scale2(self, k) + } +} + +fn do_scale2(self: Scalar, k: Int) -> Scalar { + if k == 0 { + self + } else { + do_scale2(mul(self, self), k - 1) + } +} + +test scale2_1() { + and { + scale2(Scalar(834884848), -1) == zero, + scale2(Scalar(834884848), 0) == scale(Scalar(834884848), 1), + scale2(Scalar(834884848), 1) == scale(Scalar(834884848), 2), + scale2(Scalar(834884848), 2) == scale(Scalar(834884848), 4), + scale2(Scalar(834884848), 3) == scale(Scalar(834884848), 8), + scale2(Scalar(834884848), 4) == scale(Scalar(834884848), 16), + } +} + +// ## Combining + +/// Adds two `Scalar` elements, ensuring the result stays within the finite field range. +pub fn add(left: Scalar, right: Scalar) -> Scalar { + Scalar(( left.integer + right.integer ) % field_prime) +} + +test add_1() { + and { + (add(Scalar(834884848), Scalar(834884848)) == Scalar(1669769696))?, + (add(Scalar(field_prime - 1), Scalar(1)) == Scalar(0))?, + (add(Scalar(3), Scalar(field_prime)) == Scalar(3))?, + } +} + +/// Divides one `Scalar` element by another, returning `None` if the divisor is zero. +pub fn div(left: Scalar, right: Scalar) -> Option { + if right == zero { + None + } else { + Some(mul(left, scale(right, field_prime - 2))) + } +} + +test div_1() { + and { + div(Scalar(834884848), Scalar(834884848)) == Some(Scalar(1)), + div(Scalar(834884848), zero) == None, + div(Scalar(field_prime - 1), Scalar(2)) == Some( + Scalar( + 26217937587563095239723870254092982918845276250263818911301829349969290592256, + ), + ), + } +} + +/// Multiplies two `Scalar` elements, with the result constrained within the finite field. +pub fn mul(left: Scalar, right: Scalar) -> Scalar { + Scalar(left.integer * right.integer % field_prime) +} + +test mul_1() { + and { + mul(Scalar(834884848), Scalar(834884848)) == Scalar(697032709419983104), + mul(zero, Scalar(834884848)) == zero, + mul(Scalar(field_prime - 1), Scalar(2)) == Scalar( + 52435875175126190479447740508185965837690552500527637822603658699938581184511, + ), + } +} + +/// Calculates the additive inverse of a `Scalar` element. +pub fn neg(self: Scalar) -> Scalar { + // this is basicly sub(zero, self), but more efficient as it saves one modulo operation + if self.integer == 0 { + self + } else { + Scalar(field_prime - self.integer) + } +} + +test neg_1() { + and { + neg(Scalar(834884848)) == Scalar( + 52435875175126190479447740508185965837690552500527637822603658699937746299665, + ), + neg(zero) == zero, + neg(one) == Scalar(field_prime - 1), + } +} + +/// Calculates the multiplicative inverse of an `Scalar` element, returning `None` if the element is zero. +pub fn recip(self: Scalar) -> Option { + div(one, self) +} + +test recip_1() { + and { + recip(Scalar(834884848)) == Some( + Scalar( + 35891248691642227249400403463796410930702563777316955162085759263735363466421, + ), + ), + recip(zero) == None, + } +} + +/// Subtracts one `Scalar` element from another, with the result wrapped within the finite field range. +pub fn sub(left: Scalar, right: Scalar) -> Scalar { + Scalar(( left.integer - right.integer ) % field_prime) +} + +test sub_1() { + and { + (sub(Scalar(834884848), Scalar(834884848)) == zero)?, + (sub(zero, Scalar(5)) == Scalar(field_prime - 5))?, + } +} + +// ## Transforming + +/// Converts a `Scalar` element back to its integer representation. +pub fn to_int(self: Scalar) -> Int { + self.integer +} + +test to_int_1() { + to_int(Scalar(834884848)) == 834884848 +} + +/// Converts a `Scalar` element to a Big-Endian (most-significant bits first) `ByteArray`. +pub fn to_bytearray_big_endian(self: Scalar, size: Int) -> ByteArray { + builtin.integer_to_bytearray(True, size, self.integer) +} + +/// Converts a `Scalar` element to a Little-Endian (least-significant bits first) `ByteArray`. +pub fn to_bytearray_little_endian(self: Scalar, size: Int) -> ByteArray { + builtin.integer_to_bytearray(False, size, self.integer) +} + +test to_bytearray_1() { + to_bytearray_big_endian(Scalar(16777215), 3) == #"ffffff" +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak new file mode 100644 index 00000000..96179f9b --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak @@ -0,0 +1,680 @@ +//// In a eUTxO-based blockchain like Cardano, the management of time can be +//// finicky. +//// +//// Indeed, in order to maintain a complete determinism in the execution of +//// scripts, it is impossible to introduce a notion of _"current time"_ since +//// the execution would then depend on factor that are external to the +//// transaction itself: the ineluctable stream of time flowing in our universe. +//// +//// Hence, to work around that, we typically define time intervals, which gives +//// window -- a.k.a intervals -- within which the transaction can be executed. +//// From within a script, it isn't possible to know when exactly the script is +//// executed, but we can reason about the interval bounds to validate pieces of +//// logic. + +// TODO: Replace 'Int' with a generic 'a' once we have comparable traits. + +/// A type to represent intervals of values. Interval are inhabited by a type +/// `a` which is useful for non-infinite intervals that have a finite +/// lower-bound and/or upper-bound. +/// +/// This allows to represent all kind of mathematical intervals: +/// +/// ```aiken +/// // [1; 10] +/// let i0: Interval = Interval +/// { lower_bound: +/// IntervalBound { bound_type: Finite(1), is_inclusive: True } +/// , upper_bound: +/// IntervalBound { bound_type: Finite(10), is_inclusive: True } +/// } +/// ``` +/// +/// ```aiken +/// // (20; infinity) +/// let i1: Interval = Interval +/// { lower_bound: +/// IntervalBound { bound_type: Finite(20), is_inclusive: False } +/// , upper_bound: +/// IntervalBound { bound_type: PositiveInfinity, is_inclusive: False } +/// } +/// ``` +pub type Interval { + lower_bound: IntervalBound, + upper_bound: IntervalBound, +} + +/// An interval bound, either inclusive or exclusive. +pub type IntervalBound { + bound_type: IntervalBoundType, + is_inclusive: Bool, +} + +/// A type of interval bound. Where finite, a value of type `a` must be +/// provided. `a` will typically be an `Int`, representing a number of seconds or +/// milliseconds. +pub type IntervalBoundType { + NegativeInfinity + Finite(a) + PositiveInfinity +} + +// ## Constructing + +/// Create an interval that includes all values greater than the given bound. i.e [lower_bound, +INF) +/// +/// ```aiken +/// interval.after(10) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: PositiveInfinity, is_inclusive: True }, +/// } +/// ``` +pub fn after(lower_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values after (and not including) the given bound. i.e (lower_bound, +INF) +/// +/// ```aiken +/// interval.entirely_after(10) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: PositiveInfinity, is_inclusive: True }, +/// } +/// ``` +pub fn entirely_after(lower_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: False, + }, + upper_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values before (and including) the given bound. i.e (-INF, upper_bound] +/// +/// ```aiken +/// interval.before(100) == Interval { +/// lower_bound: IntervalBound { bound_type: NegativeInfinity, is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: True }, +/// } +/// ``` +pub fn before(upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values before (and not including) the given bound. i.e (-INF, upper_bound) +/// +/// ```aiken +/// interval.entirely_before(10) == Interval { +/// lower_bound: IntervalBound { bound_type: NegativeInfinity, is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, +/// } +/// ``` +pub fn entirely_before(upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: False, + }, + } +} + +/// Create an interval that includes all values between two bounds, including the bounds. i.e. [lower_bound, upper_bound] +/// +/// ```aiken +/// interval.between(10, 100) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: True }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: True }, +/// } +/// ``` +pub fn between(lower_bound: a, upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: True, + }, + } +} + +/// Create an interval that includes all values between two bounds, excluding the bounds. i.e. (lower_bound, upper_bound) +/// +/// ```aiken +/// interval.entirely_between(10, 100) == Interval { +/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: False }, +/// } +/// ``` +pub fn entirely_between(lower_bound: a, upper_bound: a) -> Interval { + Interval { + lower_bound: IntervalBound { + bound_type: Finite(lower_bound), + is_inclusive: False, + }, + upper_bound: IntervalBound { + bound_type: Finite(upper_bound), + is_inclusive: False, + }, + } +} + +/// Create an empty interval that contains no value. +/// +/// ```aiken +/// interval.contains(empty, 0) == False +/// interval.contains(empty, 1000) == False +/// ``` +pub const empty: Interval = + Interval { + lower_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + } + +/// Create an interval that contains every possible values. i.e. (-INF, +INF) +/// +/// ```aiken +/// interval.contains(everything, 0) == True +/// interval.contains(everything, 1000) == True +/// ``` +pub const everything: Interval = + Interval { + lower_bound: IntervalBound { + bound_type: NegativeInfinity, + is_inclusive: True, + }, + upper_bound: IntervalBound { + bound_type: PositiveInfinity, + is_inclusive: True, + }, + } + +// ## Inspecting + +/// Checks whether an element is contained within the interval. +/// +/// ```aiken +/// let iv = +/// Interval { +/// lower_bound: IntervalBound { +/// bound_type: Finite(14), +/// is_inclusive: True +/// }, +/// upper_bound: IntervalBound { +/// bound_type: Finite(42), +/// is_inclusive: False +/// }, +/// } +/// +/// interval.contains(iv, 25) == True +/// interval.contains(iv, 0) == False +/// interval.contains(iv, 14) == True +/// interval.contains(iv, 42) == False +/// ``` +pub fn contains(self: Interval, elem: Int) -> Bool { + let is_greater_than_lower_bound = + when self.lower_bound.bound_type is { + NegativeInfinity -> True + Finite(lower_bound) -> + if self.lower_bound.is_inclusive { + elem >= lower_bound + } else { + elem > lower_bound + } + PositiveInfinity -> False + } + + let is_smaller_than_upper_bound = + when self.upper_bound.bound_type is { + NegativeInfinity -> False + Finite(upper_bound) -> + if self.upper_bound.is_inclusive { + elem <= upper_bound + } else { + elem < upper_bound + } + PositiveInfinity -> True + } + + is_greater_than_lower_bound && is_smaller_than_upper_bound +} + +test contains_1() { + let iv = everything + contains(iv, 14) +} + +test contains_2() { + let iv = entirely_before(15) + contains(iv, 14) +} + +test contains_3() { + let iv = before(14) + contains(iv, 14) +} + +test contains_4() { + let iv = entirely_before(14) + !contains(iv, 14) +} + +test contains_5() { + let iv = entirely_after(13) + contains(iv, 14) +} + +test contains_6() { + let iv = after(14) + contains(iv, 14) +} + +test contains_7() { + let iv = entirely_after(14) + !contains(iv, 14) +} + +test contains_8() { + let iv = between(42, 1337) + !contains(iv, 14) +} + +test contains_9() { + let iv = between(0, 42) + contains(iv, 14) +} + +test contains_10() { + let iv = between(0, 42) + contains(iv, 42) +} + +test contains_11() { + let iv = entirely_between(0, 42) + !contains(iv, 0) +} + +test contains_12() { + let iv = empty + !contains(iv, 14) +} + +/// Tells whether an interval is empty; i.e. that is contains no value. +/// +/// ```aiken +/// let iv1 = interval.empty +/// +/// let iv2 = Interval { +/// lower_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, +/// } +/// +/// let iv3 = Interval { +/// lower_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, +/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: False }, +/// } +/// +/// interval.is_empty(iv1) == True +/// interval.is_empty(iv2) == True +/// interval.is_empty(iv3) == False +/// +/// // Note: Two empty intervals are not necessarily equal. +/// iv1 != iv2 +/// ``` +pub fn is_empty(self: Interval) -> Bool { + let ordering = + compare_bound_type(self.lower_bound.bound_type, self.upper_bound.bound_type) + + when ordering is { + Greater -> True + Equal -> !(self.lower_bound.is_inclusive && self.upper_bound.is_inclusive) + Less -> { + let is_open_interval = + !self.lower_bound.is_inclusive && !self.upper_bound.is_inclusive + if is_open_interval { + when (self.lower_bound.bound_type, self.upper_bound.bound_type) is { + (Finite(lower_bound), Finite(upper_bound)) -> + lower_bound + 1 == upper_bound + _ -> False + } + } else { + False + } + } + } +} + +/// Check whether the interval is entirely after the point "a" +/// +/// ```aiken +/// interval.is_entirely_after(interval.after(10), 5) == True +/// interval.is_entirely_after(interval.after(10), 10) == False +/// interval.is_entirely_after(interval.after(10), 15) == False +/// interval.is_entirely_after(interval.between(10, 20), 30) == False +/// interval.is_entirely_after(interval.between(10, 20), 5) == True +pub fn is_entirely_after(self: Interval, point: Int) -> Bool { + when self.lower_bound.bound_type is { + Finite(low) -> + if self.lower_bound.is_inclusive { + point < low + } else { + point <= low + } + _ -> False + } +} + +test is_entirely_after_1() { + is_entirely_after(after(10), 5) +} + +test is_entirely_after_2() { + !is_entirely_after(after(10), 10) +} + +test is_entirely_after_3() { + !is_entirely_after(after(10), 15) +} + +test is_entirely_after_4() { + !is_entirely_after(between(10, 20), 30) +} + +test is_entirely_after_5() { + is_entirely_after(between(10, 20), 5) +} + +test is_entirely_after_6() { + is_entirely_after(entirely_after(10), 10) +} + +test is_entirely_after_7() { + !is_entirely_after(before(10), 5) +} + +test is_entirely_after_8() { + !is_entirely_after(before(10), 15) +} + +test is_entirely_after_9() { + !is_entirely_after(entirely_before(10), 5) +} + +/// Check whether the interval is entirely before the point "a" +/// +/// ```aiken +/// interval.is_entirely_before(interval.before(10), 15) == True +/// interval.is_entirely_before(interval.before(10), 10) == False +/// interval.is_entirely_before(interval.before(10), 5) == False +/// interval.is_entirely_before(interval.between(10, 20), 30) == True +/// interval.is_entirely_before(interval.between(10, 20), 5) == False +pub fn is_entirely_before(self: Interval, point: Int) -> Bool { + when self.upper_bound.bound_type is { + Finite(hi) -> + if self.upper_bound.is_inclusive { + hi < point + } else { + hi <= point + } + _ -> False + } +} + +test is_entirely_before_1() { + is_entirely_before(before(10), 15) +} + +test is_entirely_before_2() { + !is_entirely_before(before(10), 10) +} + +test is_entirely_before_3() { + !is_entirely_before(before(10), 5) +} + +test is_entirely_before_4() { + is_entirely_before(between(10, 20), 30) +} + +test is_entirely_before_5() { + !is_entirely_before(between(10, 20), 5) +} + +test is_entirely_before_6() { + is_entirely_before(entirely_before(10), 10) +} + +test is_entirely_before_7() { + !is_entirely_before(after(10), 15) +} + +test is_entirely_before_8() { + !is_entirely_before(after(10), 5) +} + +test is_entirely_before_9() { + !is_entirely_before(entirely_after(10), 5) +} + +// ## Combining + +/// Computes the smallest interval containing the two given intervals, if any +/// +/// ```aiken +/// let iv1 = between(0, 10) +/// let iv2 = between(2, 14) +/// hull(iv1, iv2) == between(0, 14) +/// +/// let iv1 = between(5, 10) +/// let iv2 = before(0) +/// hull(iv1, iv2) == before(10) +/// +/// let iv1 = entirely_after(0) +/// let iv2 = between(10, 42) +/// hull(iv1, iv2) = entirely_after(0) +/// ``` +pub fn hull(iv1: Interval, iv2: Interval) -> Interval { + Interval { + lower_bound: min(iv1.lower_bound, iv2.lower_bound), + upper_bound: max(iv1.upper_bound, iv2.upper_bound), + } +} + +test hull_1() { + let iv1 = between(0, 10) + let iv2 = between(2, 14) + hull(iv1, iv2) == between(0, 14) +} + +test hull_2() { + let iv1 = between(5, 10) + let iv2 = before(0) + hull(iv1, iv2) == before(10) +} + +test hull_3() { + let iv1 = entirely_after(0) + let iv2 = between(10, 42) + hull(iv1, iv2) == entirely_after(0) +} + +/// Computes the largest interval contains in the two given intervals, if any. +/// +/// ```aiken +/// let iv1 = interval.between(0, 10) +/// let iv2 = interval.between(2, 14) +/// interval.intersection(iv1, iv2) == interval.between(2, 10) +/// +/// let iv1 = interval.entirely_before(10) +/// let iv2 = interval.entirely_after(0) +/// interval.intersection(iv1, iv2) == interval.entirely_between(0, 10) +/// +/// let iv1 = interval.between(0, 1) +/// let iv2 = interval.between(2, 3) +/// interval.intersection(iv1, iv2) |> interval.is_empty +/// ``` +pub fn intersection(iv1: Interval, iv2: Interval) -> Interval { + Interval { + lower_bound: max(iv1.lower_bound, iv2.lower_bound), + upper_bound: min(iv1.upper_bound, iv2.upper_bound), + } +} + +test intersection_1() { + let iv1 = between(0, 10) + let iv2 = between(2, 14) + intersection(iv1, iv2) == between(2, 10) +} + +test intersection_2() { + let iv1 = between(0, 1) + let iv2 = between(1, 2) + intersection(iv1, iv2) == between(1, 1) +} + +test intersection_3() { + let iv1 = between(0, 1) + let iv2 = entirely_between(1, 2) + intersection(iv1, iv2) + |> is_empty +} + +test intersection_4() { + let iv1 = entirely_between(0, 1) + let iv2 = entirely_between(1, 2) + intersection(iv1, iv2) + |> is_empty +} + +test intersection_5() { + let iv1 = between(0, 10) + let iv2 = before(4) + intersection(iv1, iv2) == between(0, 4) +} + +test intersection_6() { + let iv1 = entirely_before(10) + let iv2 = entirely_after(0) + intersection(iv1, iv2) == entirely_between(0, 10) +} + +/// Return the highest bound of the two. +/// +/// ```aiken +/// let ib1 = IntervalBound { bound_type: Finite(0), is_inclusive: False } +/// let ib2 = IntervalBound { bound_type: Finite(1), is_inclusive: False } +/// +/// interval.max(ib1, ib2) == ib2 +/// ``` +pub fn max( + left: IntervalBound, + right: IntervalBound, +) -> IntervalBound { + when compare_bound(left, right) is { + Less -> right + Equal -> left + Greater -> left + } +} + +/// Return the smallest bound of the two. +/// +/// ```aiken +/// let ib1 = IntervalBound { bound_type: Finite(0), is_inclusive: False } +/// let ib2 = IntervalBound { bound_type: Finite(1), is_inclusive: False } +/// +/// interval.min(ib1, ib2) == ib1 +/// ``` +pub fn min( + left: IntervalBound, + right: IntervalBound, +) -> IntervalBound { + when compare_bound(left, right) is { + Less -> left + Equal -> left + Greater -> right + } +} + +fn compare_bound( + left: IntervalBound, + right: IntervalBound, +) -> Ordering { + when compare_bound_type(left.bound_type, right.bound_type) is { + Less -> Less + Greater -> Greater + Equal -> + if left.is_inclusive == right.is_inclusive { + Equal + } else if left.is_inclusive { + Greater + } else { + Less + } + } +} + +fn compare_bound_type( + left: IntervalBoundType, + right: IntervalBoundType, +) -> Ordering { + when left is { + NegativeInfinity -> + when right is { + NegativeInfinity -> Equal + _ -> Less + } + PositiveInfinity -> + when right is { + PositiveInfinity -> Equal + _ -> Greater + } + Finite(left) -> + when right is { + NegativeInfinity -> Greater + PositiveInfinity -> Less + Finite(right) -> + if left < right { + Less + } else if left == right { + Equal + } else { + Greater + } + } + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak new file mode 100644 index 00000000..dd575e7a --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak @@ -0,0 +1,424 @@ +//// This module contains some basic Math utilities. Standard arithmetic +//// operations on integers are available through native operators: +//// +//// Operator | Description +//// --- | :--- +//// `+` | Arithmetic sum +//// `-` | Arithmetic difference +//// `/` | Whole division +//// `*` | Arithmetic multiplication +//// `%` | Remainder by whole division +//// +//// Here are a few examples: +//// +//// ```aiken +//// 1 + 1 // 2 +//// 10 - 2 // 8 +//// 40 / 14 // 2 +//// 3 * 4 // 12 +//// 10 % 3 // 1 + +use aiken/builtin + +/// Calculate the absolute value of an integer. +/// +/// ```aiken +/// math.abs(-42) == 42 +/// math.abs(14) == 14 +/// ``` +pub fn abs(self: Int) -> Int { + if self < 0 { + 0 - self + } else { + self + } +} + +test abs_1() { + abs(14) == 14 +} + +test abs_2() { + abs(-42) == 42 +} + +/// Restrict the value of an integer between two min and max bounds +/// +/// ```aiken +/// math.clamp(14, min: 0, max: 10) == 10 +/// ``` +pub fn clamp(self: Int, min: Int, max: Int) -> Int { + if self < min { + min + } else { + if self > max { + max + } else { + self + } + } +} + +test clamp_1() { + clamp(14, min: 0, max: 10) == 10 +} + +test clamp_2() { + clamp(7, min: 0, max: 10) == 7 +} + +test clamp_3() { + clamp(7, min: 10, max: 100) == 10 +} + +/// The greatest common divisor of two integers. +/// +/// ```aiken +/// math.gcd(42, 14) == 14 +/// math.gcd(14, 42) == 14 +/// math.gcd(0, 0) == 0 +/// ``` +pub fn gcd(x: Int, y: Int) -> Int { + abs(do_gcd(x, y)) +} + +fn do_gcd(x: Int, y: Int) -> Int { + when y is { + 0 -> x + _ -> do_gcd(y, x % y) + } +} + +test gcd_test1() { + gcd(10, 300) == 10 +} + +test gcd_test2() { + gcd(-10, 300) == 10 +} + +test gcd_test3() { + gcd(42, 14) == 14 +} + +/// Checks if an integer has a given integer square root x. +/// The check has constant time complexity $O(1)$. +/// +/// ```aiken +/// math.is_sqrt(0, 0) +/// math.is_sqrt(25, 5) +/// !math.is_sqrt(25, -5) +/// math.is_sqrt(44203, 210) +/// ``` +pub fn is_sqrt(self: Int, x: Int) -> Bool { + x * x <= self && ( x + 1 ) * ( x + 1 ) > self +} + +test is_sqrt1() { + is_sqrt(44203, 210) +} + +test is_sqrt2() { + is_sqrt(975461057789971041, 987654321) +} + +/// The logarithm in base `b` of an element using integer divisions. +/// +/// ```aiken +/// math.log(10, base: 2) == 3 +/// math.log(42, base: 2) == 5 +/// math.log(42, base: 3) == 3 +/// math.log(5, base: 0) == 0 +/// math.log(4, base: 4) == 1 +/// math.log(4, base: 42) == 0 +/// ``` +pub fn log(self: Int, base: Int) -> Int { + if base <= 0 { + 0 + } else if self == base { + 1 + } else if self < base { + 0 + } else { + 1 + log(self / base, base) + } +} + +test log_10_2() { + log(10, base: 2) == 3 +} + +test log_42_2() { + log(42, base: 2) == 5 +} + +test log_42_3() { + log(42, base: 3) == 3 +} + +test log_5_0() { + log(5, base: 0) == 0 +} + +test log_4_4() { + log(4, base: 4) == 1 +} + +test log_4_43() { + log(4, base: 43) == 0 +} + +/// The integer logarithm in base 2. Faster than [`log`](#log) in this particular case. +/// +/// ```aiken +/// math.log2(1) == 0 +/// math.log2(2) == 1 +/// math.log2(3) == 1 +/// math.log2(4) == 2 +/// math.log2(256) == 8 +/// math.log2(257) == 8 +/// math.log2(511) == 8 +/// math.log2(1025) == 10 +/// ``` +pub fn log2(x: Int) -> Int { + expect x > 0 + let s = builtin.integer_to_bytearray(True, 0, x) + let len = builtin.length_of_bytearray(s) + let b = builtin.index_bytearray(s, 0) + len * 8 - if b < 2 { + 8 + } else if b < 4 { + 7 + } else if b < 8 { + 6 + } else if b < 16 { + 5 + } else if b < 32 { + 4 + } else if b < 64 { + 3 + } else if b < 128 { + 2 + } else { + 1 + } +} + +test log2_matrix() { + and { + log2(1) == 0, + log2(2) == 1, + log2(3) == 1, + log2(4) == 2, + log2(256) == 8, + log2(257) == 8, + log2(511) == 8, + log2(1025) == 10, + } +} + +/// Return the maximum of two integers. +pub fn max(a: Int, b: Int) -> Int { + if a > b { + a + } else { + b + } +} + +test max_1() { + max(0, 0) == 0 +} + +test max_2() { + max(14, 42) == 42 +} + +test max_3() { + max(42, 14) == 42 +} + +/// Return the minimum of two integers. +pub fn min(a: Int, b: Int) -> Int { + if a > b { + b + } else { + a + } +} + +test min_1() { + min(0, 0) == 0 +} + +test min_2() { + min(14, 42) == 14 +} + +test min_3() { + min(42, 14) == 14 +} + +/// Calculates a number to the power of `e` using the exponentiation by +/// squaring method. +/// +/// ```aiken +/// math.pow(3, 5) == 243 +/// math.pow(7, 2) == 49 +/// math.pow(3, -4) == 0 +/// math.pow(0, 0) == 1 +/// math.pow(513, 3) == 135005697 +/// ``` +pub fn pow(self: Int, e: Int) -> Int { + if e < 0 { + 0 + } else if e == 0 { + 1 + } else if e % 2 == 0 { + pow(self * self, e / 2) + } else { + self * pow(self * self, ( e - 1 ) / 2) + } +} + +test pow_3_5() { + pow(3, 5) == 243 +} + +test pow_7_2() { + pow(7, 2) == 49 +} + +test pow_3__4() { + // negative powers round to zero + pow(3, -4) == 0 +} + +test pow_0_0() { + // sorry math + pow(0, 0) == 1 +} + +test pow_513_3() { + pow(513, 3) == 135005697 +} + +test pow_2_4() { + pow(2, 4) == 16 +} + +test pow_2_42() { + pow(2, 42) == 4398046511104 +} + +/// Calculates the power of 2 for a given exponent `e`. Much cheaper than +/// using `pow(2, _)` for small exponents $0 < e < 256$. +/// +/// ```aiken +/// math.pow2(-2) == 0 +/// math.pow2(0) == 1 +/// math.pow2(1) == 2 +/// math.pow2(4) == 16 +/// math.pow2(42) == 4398046511104 +/// ``` +pub fn pow2(e: Int) -> Int { + // do_pow2(e, 1) + if e < 8 { + if e < 0 { + 0 + } else { + builtin.index_bytearray(#[1, 2, 4, 8, 16, 32, 64, 128], e) + } + } else if e < 32 { + 256 * pow2(e - 8) + } else { + 4294967296 * pow2(e - 32) + } +} + +test pow2_neg() { + pow2(-2) == 0 +} + +test pow2_0() { + pow2(0) == 1 +} + +test pow2_1() { + pow2(1) == 2 +} + +test pow2_4() { + pow2(4) == 16 +} + +test pow2_42() { + pow2(42) == 4398046511104 +} + +test pow2_256() { + pow2(256) == 115792089237316195423570985008687907853269984665640564039457584007913129639936 +} + +/// Calculates the square root of an integer using the [Babylonian +/// method](https://en.wikipedia.org/wiki/Methods_of_computing_square_roots#Babylonian_method). This returns either the exact result or the smallest integer +/// nearest to the square root. +/// +/// Returns `None` for negative values. +/// +/// ```aiken +/// math.sqrt(0) == Some(0) +/// math.sqrt(25) == Some(5) +/// math.sqrt(44203) == Some(210) +/// math.sqrt(-42) == None +/// ``` +/// +/// > [!TIP] +/// > This function can be quite expensive to perform on-chain. Prefer using [`is_sqrt`](#is_sqrt) whenever possible. +pub fn sqrt(self: Int) -> Option { + if self < 0 { + None + } else if self <= 1 { + Some(self) + } else { + Some(sqrt_babylonian(self, self, ( self + 1 ) / 2)) + } +} + +// The basic idea is that if x is an overestimate to the square root of a +// non-negative real number S then S/x will be an underestimate, or vice versa, +// and so the average of these two numbers may reasonably be expected to provide a +// better approximation (though the formal proof of that assertion depends on the +// inequality of arithmetic and geometric means that shows this average is always +// an overestimate of the square root. +fn sqrt_babylonian(self: Int, x: Int, y: Int) -> Int { + if y >= x { + x + } else { + sqrt_babylonian(self, y, ( y + self / y ) / 2) + } +} + +test sqrt1() { + sqrt(0) == Some(0) +} + +test sqrt2() { + sqrt(1) == Some(1) +} + +test sqrt3() { + sqrt(25) == Some(5) +} + +test sqrt4() { + sqrt(44203) == Some(210) +} + +test sqrt5() { + sqrt(975461057789971041) == Some(987654321) +} + +test sqrt6() { + sqrt(-42) == None +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak new file mode 100644 index 00000000..88fe7ab7 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak @@ -0,0 +1,871 @@ +//// This module implements operations between rational numbers. +//// +//// > [!CAUTION] +//// > Internally, rational aren't automatically reduced as this is **only done on-demand**. +//// > +//// > Thus, for example: +//// > +//// > ```aiken +//// > rational.new(2, 3) != rational.new(4, 6) +//// > ``` +//// > +//// > Comparing rational values should, therefore, only happen after reduction (see [reduce](#reduce)) or via the [compare](#compare) method. + +use aiken/builtin +use aiken/collection/list +use aiken/math +use aiken/option + +/// Opaque type used to ensure the sign of the Rational is managed strictly in the numerator. +pub opaque type Rational { + numerator: Int, + denominator: Int, +} + +// ## Constructing + +/// Create a new `Rational` from an `Int`. +/// +/// ```aiken +/// Some(rational.from_int(14)) == rational.new(14, 1) +/// Some(rational.from_int(-5)) == rational.new(-5, 1) +/// Some(rational.from_int(0)) == rational.new(0, 1) +/// ``` +pub fn from_int(numerator: Int) -> Rational { + Rational { numerator, denominator: 1 } +} + +test from_int_1() { + and { + (from_int(14) == ratio(14, 1))?, + (from_int(-5) == ratio(-5, 1))?, + (from_int(0) == ratio(0, 1))?, + } +} + +/// An unsafe constructor for `Rational` values. Assumes that the following invariants are +/// enforced: +/// +/// - the denominator is positive (the sign is managed in the numerator); +/// - the denominator is not null. +/// +/// This function is mainly used as a quick way to construct rationals from literal values. +fn ratio(numerator: Int, denominator: Int) -> Rational { + Rational { numerator, denominator } +} + +/// Make a `Rational` number from the ratio of two integers. +/// +/// Returns `None` when the denominator is null. +/// +/// ```aiken +/// rational.new(14, 42) == Some(r) +/// rational.new(14, 0) == None +/// ``` +pub fn new(numerator: Int, denominator: Int) -> Option { + if denominator == 0 { + None + } else if denominator < 0 { + Some(Rational { numerator: -numerator, denominator: -denominator }) + } else { + Some(Rational { numerator, denominator }) + } +} + +test new_1() { + and { + (new(2, 0) == None)?, + (new(2, 3) == Some(ratio(2, 3)))?, + (new(-2, 3) == Some(ratio(-2, 3)))?, + (new(2, -3) == Some(ratio(-2, 3)))?, + (new(2, 4) == Some(ratio(2, 4)))?, + (new(-2, -3) == Some(ratio(2, 3)))?, + (new(-2, -4) == Some(ratio(2, 4)))?, + } +} + +/// A null `Rational`. +pub const zero: Rational = Rational { numerator: 0, denominator: 1 } + +test zero_1() { + zero == ratio(0, 1) +} + +// ## Inspecting + +/// Get the denominator of a rational value. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.denominator(x) == 3 +/// ``` +pub fn denominator(self: Rational) -> Int { + self.denominator +} + +test denominator_1() { + expect Some(x) = new(2, 3) + expect Some(y) = new(-2, 3) + expect Some(z) = new(2, -3) + expect Some(w) = new(-2, -3) + and { + (denominator(x) == 3)?, + (denominator(y) == 3)?, + (denominator(z) == 3)?, + (denominator(w) == 3)?, + } +} + +/// Get the numerator of a rational value. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.numerator(x) == 2 +/// ``` +pub fn numerator(self: Rational) -> Int { + self.numerator +} + +test numerator_1() { + expect Some(x) = new(2, 3) + expect Some(y) = new(-2, 3) + expect Some(z) = new(2, -3) + expect Some(w) = new(-2, -3) + + and { + (numerator(x) == 2)?, + (numerator(y) == -2)?, + (numerator(z) == -2)?, + (numerator(w) == 2)?, + } +} + +// ## Modifying + +/// Absolute value of a `Rational`. +/// +/// ```aiken +/// expect Some(x) = rational.new(3, 2) +/// expect Some(y) = rational.new(-3, 2) +/// +/// rational.abs(x) == x +/// rational.abs(y) == x +/// ``` +pub fn abs(self: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = self + Rational { numerator: math.abs(a_n), denominator: a_d } +} + +test abs_examples() { + and { + (abs(ratio(5, 2)) == ratio(5, 2))?, + (abs(ratio(-5, 2)) == ratio(5, 2))?, + (abs(ratio(5, 2)) == abs(ratio(-5, 2)))?, + } +} + +/// Change the sign of a `Rational`. +/// +/// ```aiken +/// expect Some(x) = rational.new(3, 2) +/// expect Some(y) = rational.new(-3, 2) +/// +/// rational.negate(x) == y +/// rational.negate(y) == x +/// ``` +pub fn negate(a: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = a + Rational { numerator: -a_n, denominator: a_d } +} + +test negate_1() { + and { + (negate(ratio(5, 2)) == ratio(-5, 2))?, + (negate(ratio(-5, 2)) == ratio(5, 2))?, + (negate(negate(ratio(5, 2))) == ratio(5, 2))?, + } +} + +/// Reciprocal of a `Rational` number. That is, a new `Rational` where the +/// numerator and denominator have been swapped. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 5) +/// rational.reciprocal(x) == rational.new(5, 2) +/// +/// let y = rational.zero +/// rational.reciprocal(y) == None +/// ``` +pub fn reciprocal(self: Rational) -> Option { + let Rational { numerator: a_n, denominator: a_d } = self + if a_n < 0 { + Some(Rational { numerator: -a_d, denominator: -a_n }) + } else if a_n > 0 { + Some(Rational { numerator: a_d, denominator: a_n }) + } else { + None + } +} + +test reciprocal_1() { + and { + (reciprocal(ratio(5, 2)) == new(2, 5))?, + (reciprocal(ratio(-5, 2)) == new(-2, 5))?, + (reciprocal(ratio(0, 2)) == None)?, + (reciprocal(ratio(2, 3)) == new(3, 2))?, + (reciprocal(ratio(-2, 3)) == new(-3, 2))?, + } +} + +/// Reduce a rational to its irreducible form. This operation makes the +/// numerator and denominator coprime. +/// +/// ```aiken +/// expect Some(x) = rational.new(80, 200) +/// Some(rational.reduce(x)) == rational.new(2, 5) +/// ``` +pub fn reduce(self: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = self + let d = math.gcd(a_n, a_d) + Rational { numerator: a_n / d, denominator: a_d / d } +} + +test reduce_1() { + and { + (reduce(ratio(80, 200)) == ratio(2, 5))?, + (reduce(ratio(-5, 1)) == ratio(-5, 1))?, + (reduce(ratio(0, 3)) == ratio(0, 1))?, + } +} + +// ## Combining + +// ### Arithmetic operations + +/// Addition: sum of two rational values +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// Some(rational.add(x, y)) == rational.new(17, 12) +/// ``` +pub fn add(left: Rational, right: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + Rational { numerator: a_n * b_d + b_n * a_d, denominator: a_d * b_d } +} + +test add_1() { + add(ratio(2, 3), ratio(3, 4)) == ratio(17, 12) +} + +test add_2() { + add(ratio(-2, 3), ratio(3, 4)) == ratio(1, 12) +} + +/// Division: quotient of two rational values. Returns `None` when the second +/// value is null. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// rational.div(x, y) == rational.new(8, 9) +/// ``` +pub fn div(left: Rational, right: Rational) -> Option { + reciprocal(right) |> option.map(mul(left, _)) +} + +test div_1() { + div(ratio(2, 3), ratio(3, 4)) == new(8, 9) +} + +test div_2() { + div(ratio(2, 3), ratio(-3, 4)) == new(-8, 9) +} + +/// Multiplication: the product of two rational values. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// Some(rational.mul(x, y)) == rational.new(6, 12) +/// ``` +pub fn mul(left: Rational, right: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + Rational { numerator: a_n * b_n, denominator: a_d * b_d } +} + +test mul_1() { + mul(ratio(2, 3), ratio(3, 4)) == ratio(6, 12) +} + +test mul_2() { + mul(ratio(-2, 3), ratio(-3, 4)) == ratio(6, 12) +} + +test mul_3() { + let result = + ratio(2, 5) + |> mul(ratio(1, 8)) + |> mul(ratio(3, 10)) + |> mul(ratio(21, 100)) + |> mul(ratio(3, 5)) + |> mul(ratio(2, 8)) + |> mul(ratio(4, 10)) + |> mul(ratio(22, 100)) + |> reduce + + result == ratio(2079, 50000000) +} + +/// Subtraction: difference of two rational values +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// Some(rational.sub(x, y)) == rational.new(-1, 12) +/// ``` +pub fn sub(left: Rational, right: Rational) -> Rational { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + Rational { numerator: a_n * b_d - b_n * a_d, denominator: a_d * b_d } +} + +test sub_1() { + sub(ratio(2, 3), ratio(3, 4)) == ratio(-1, 12) +} + +test sub_2() { + sub(ratio(2, 3), ratio(-3, 4)) == ratio(17, 12) +} + +test sub_3() { + sub(ratio(-2, 3), ratio(3, 4)) == ratio(-17, 12) +} + +// ### Ordering + +/// Compare two rationals for an ordering. This is safe to use even for +/// non-reduced rationals. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// expect Some(z) = rational.new(4, 6) +/// +/// compare(x, y) == Less +/// compare(y, x) == Greater +/// compare(x, x) == Equal +/// compare(x, z) == Equal +/// ``` +pub fn compare(left: Rational, right: Rational) -> Ordering { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + + let l = a_n * b_d + let r = b_n * a_d + + if l < r { + Less + } else if l > r { + Greater + } else { + Equal + } +} + +test compare_1() { + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + expect Some(z) = new(4, 6) + and { + compare(x, y) == Less, + compare(y, x) == Greater, + compare(x, x) == Equal, + compare(x, z) == Equal, + } +} + +/// Comparison of two rational values using a chosen heuristic. For example: +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// expect Some(y) = rational.new(3, 4) +/// +/// rational.compare_with(x, >, y) == False +/// rational.compare_with(y, >, x) == True +/// rational.compare_with(x, >, x) == False +/// rational.compare_with(x, >=, x) == True +/// rational.compare_with(x, ==, x) == True +/// rational.compare_with(x, ==, y) == False +/// ``` +pub fn compare_with( + left: Rational, + with: fn(Int, Int) -> Bool, + right: Rational, +) -> Bool { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + with(a_n * b_d, b_n * a_d) +} + +// TODO: Rewrite tests using binary-operator as first-class functions once aiken-lang/aiken#619 is merged. + +test compare_with_eq() { + let eq = + compare_with(_, fn(l, r) { l == r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + !eq(x, y)? && !eq(y, x)? && eq(x, x)? +} + +test compare_with_neq() { + let neq = + compare_with(_, fn(l, r) { l != r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + neq(x, y)? && neq(y, x)? && !neq(x, x)? +} + +test compare_with_gte() { + let gte = + compare_with(_, fn(l, r) { l >= r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + !gte(x, y)? && gte(y, x)? && gte(x, x)? +} + +test compare_with_gt() { + let gt = + compare_with(_, fn(l, r) { l > r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + !gt(x, y)? && gt(y, x)? && !gt(x, x)? +} + +test compare_with_lte() { + let lte = + compare_with(_, fn(l, r) { l <= r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + lte(x, y)? && !lte(y, x)? && lte(x, x)? +} + +test compare_with_lt() { + let lt = + compare_with(_, fn(l, r) { l < r }, _) + + expect Some(x) = new(2, 3) + expect Some(y) = new(3, 4) + + lt(x, y)? && !lt(y, x)? && !lt(x, x)? +} + +// ### Means + +/// Calculate the arithmetic mean between two `Rational` values. +/// +/// ```aiken +/// let x = rational.from_int(0) +/// let y = rational.from_int(1) +/// let z = rational.from_int(2) +/// +/// expect Some(result) = rational.arithmetic_mean([x, y, z]) +/// +/// rational.compare(result, y) == Equal +/// ``` +pub fn arithmetic_mean(self: List) -> Option { + div(list.foldr(self, zero, add), from_int(list.length(self))) +} + +test arithmetic_mean_1() { + let x = ratio(1, 2) + let y = ratio(1, 2) + expect Some(z) = arithmetic_mean([x, y]) + reduce(z) == ratio(1, 2) +} + +test arithmetic_mean_2() { + let x = ratio(1, 1) + let y = ratio(2, 1) + expect Some(z) = arithmetic_mean([x, y]) + reduce(z) == ratio(3, 2) +} + +test arithmetic_mean_3() { + let xs = + [ + ratio(1, 1), + ratio(2, 1), + ratio(3, 1), + ratio(4, 1), + ratio(5, 1), + ratio(6, 1), + ] + expect Some(z) = arithmetic_mean(xs) + reduce(z) == ratio(7, 2) +} + +/// Calculate the geometric mean between two `Rational` values. This returns +/// either the exact result or the smallest integer nearest to the square root +/// for the numerator and denominator. +/// +/// ```aiken +/// expect Some(x) = rational.new(1, 3) +/// expect Some(y) = rational.new(1, 6) +/// +/// rational.geometric_mean(x, y) == rational.new(1, 4) +/// ``` +pub fn geometric_mean(left: Rational, right: Rational) -> Option { + let Rational { numerator: a_n, denominator: a_d } = left + let Rational { numerator: b_n, denominator: b_d } = right + when math.sqrt(a_n * b_n) is { + Some(numerator) -> + when math.sqrt(a_d * b_d) is { + Some(denominator) -> Some(Rational { numerator, denominator }) + None -> None + } + None -> None + } +} + +test geometric_mean1() { + expect Some(x) = new(1, 2) + expect Some(y) = new(1, 2) + geometric_mean(x, y) == new(1, 2) +} + +test geometric_mean2() { + expect Some(x) = new(-1, 2) + expect Some(y) = new(1, 2) + geometric_mean(x, y) == None +} + +test geometric_mean3() { + expect Some(x) = new(1, 2) + expect Some(y) = new(-1, 2) + geometric_mean(x, y) == None +} + +test geometric_mean4() { + expect Some(x) = new(1, 3) + expect Some(y) = new(1, 6) + geometric_mean(x, y) == new(1, 4) +} + +test geometric_mean5() { + expect Some(x) = new(67, 2500) + expect Some(y) = new(35331, 1000) + expect Some(yi) = reciprocal(y) + geometric_mean(x, yi) == new(258, 9398) +} + +// ## Transforming + +/// Returns the smallest `Int` not less than a given `Rational` +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.ceil(x) == 1 +/// +/// expect Some(y) = rational.new(44, 14) +/// rational.ceil(y) == 4 +/// +/// expect Some(z) = rational.new(-14, 3) +/// rational.ceil(z) == -4 +/// ``` +pub fn ceil(self: Rational) -> Int { + let Rational { numerator, denominator } = self + if builtin.remainder_integer(numerator, denominator) > 0 { + builtin.quotient_integer(numerator, denominator) + 1 + } else { + builtin.quotient_integer(numerator, denominator) + } +} + +test ceil_1() { + and { + (ceil(ratio(13, 5)) == 3)?, + (ceil(ratio(15, 5)) == 3)?, + (ceil(ratio(16, 5)) == 4)?, + (ceil(ratio(-3, 5)) == 0)?, + (ceil(ratio(-5, 5)) == -1)?, + (ceil(ratio(-14, 3)) == -4)?, + (ceil(ratio(-14, 6)) == -2)?, + (ceil(ratio(44, 14)) == 4)?, + } +} + +/// Returns the greatest `Int` no greater than a given `Rational` +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.floor(x) == 0 +/// +/// expect Some(y) = rational.new(44, 14) +/// rational.floor(y) == 3 +/// +/// expect Some(z) = rational.new(-14, 3) +/// rational.floor(z) == -5 +/// ``` +pub fn floor(self: Rational) -> Int { + let Rational { numerator: a_n, denominator: a_d } = self + a_n / a_d +} + +test floor_1() { + and { + (floor(ratio(5, 2)) == 2)?, + (floor(ratio(5, 3)) == 1)?, + (floor(ratio(5, 4)) == 1)?, + (floor(ratio(5, 5)) == 1)?, + (floor(ratio(5, 6)) == 0)?, + (floor(ratio(8, 3)) == 2)?, + (floor(ratio(-14, 3)) == -5)?, + } +} + +/// Computes the rational number x raised to the power y. Returns `None` for +/// invalid exponentiation. +/// +/// ```aiken +/// expect Some(x) = rational.new(50, 2500) +/// rational.reduce(rational.pow(x, 3)) == rational.new(1, 125000) +/// +/// expect Some(x) = rational.new(50, 2500) +/// rational.reduce(rational.pow(x, -3)) == rational.new(125000, 1) +/// ``` +pub fn pow(x: Rational, y: Int) -> Option { + let Rational { numerator: a, denominator: b } = x + + if a == 0 && y <= 0 { + None + } else if y > 0 { + Some(Rational { numerator: math.pow(a, y), denominator: math.pow(b, y) }) + } else if y < 0 { + Some(Rational { numerator: math.pow(b, -y), denominator: math.pow(a, -y) }) + } else { + Some(Rational { numerator: 1, denominator: 1 }) + } +} + +test pow_negative_exponent_non_zero_fraction() { + expect Some(base) = new(50, 2500) + expect Some(calculated_result) = pow(base, -3) + expect Some(expected_result) = new(125000, 1) + reduce(calculated_result) == expected_result +} + +test pow_positive_exponent() { + expect Some(base) = new(50, 2500) + expect Some(calculated_result) = pow(base, 3) + expect Some(expected_result) = new(1, 125000) + reduce(calculated_result) == expected_result +} + +test pow_exponent_zero() { + expect Some(base) = new(50, 2500) + pow(base, 0) == new(1, 1) +} + +test pow_rational_zero_exponent_zero() { + expect Some(base) = new(0, 1) + pow(base, 0) == None +} + +/// Returns the proper fraction of a given `Rational` `r`. That is, a 2-tuple of +/// an `Int` and `Rational` (n, f) such that: +/// +/// - `r = n + f`; +/// - `n` and `f` have the same sign as `r`; +/// - `f` has an absolute value less than 1. +pub fn proper_fraction(self: Rational) -> (Int, Rational) { + let Rational { numerator, denominator } = self + ( + builtin.quotient_integer(numerator, denominator), + Rational { + numerator: builtin.remainder_integer(numerator, denominator), + denominator, + }, + ) +} + +test proper_fraction_1() { + let r = ratio(10, 7) + let (n, f) = proper_fraction(r) + and { + (n == 1)?, + (f == ratio(3, 7))?, + (r == add(from_int(n), f))?, + } +} + +test proper_fraction_2() { + let r = ratio(-10, 7) + let (n, f) = proper_fraction(r) + and { + (n == -1)?, + (f == ratio(-3, 7))?, + (r == add(from_int(n), f))?, + } +} + +test proper_fraction_3() { + let r = ratio(4, 2) + let (n, f) = proper_fraction(r) + and { + (n == 2)?, + (f == ratio(0, 2))?, + (r == add(from_int(n), f))?, + } +} + +/// Round the argument to the nearest whole number. If the argument is +/// equidistant between two values, the greater value is returned (it +/// rounds half towards positive infinity). +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.round(x) == 1 +/// +/// expect Some(y) = rational.new(3, 2) +/// rational.round(y) == 2 +/// +/// expect Some(z) = rational.new(-3, 2) +/// rational.round(z) == -1 +/// ``` +/// +/// > [!CAUTION] +/// > This behaves differently than _Haskell_. If you're coming from `PlutusTx`, beware that in Haskell, rounding on equidistant values depends on the whole number being odd or even. +/// > If you need this behaviour, use [`round_even`](#round_even). +pub fn round(self: Rational) -> Int { + let (n, f) = proper_fraction(self) + + let is_negative = f.numerator < 0 + + when compare(abs(f), ratio(1, 2)) is { + Less -> n + Equal -> + if is_negative { + n + } else { + n + 1 + } + Greater -> + if is_negative { + n - 1 + } else { + n + 1 + } + } +} + +test round_1() { + and { + (round(ratio(10, 7)) == 1)?, + (round(ratio(11, 7)) == 2)?, + (round(ratio(3, 2)) == 2)?, + (round(ratio(5, 2)) == 3)?, + (round(ratio(-3, 2)) == -1)?, + (round(ratio(-2, 3)) == -1)?, + (round(ratio(-10, 7)) == -1)?, + (round(ratio(4, 2)) == 2)?, + } +} + +/// Round the argument to the nearest whole number. If the argument is +/// equidistant between two values, it returns the value that is even (it +/// rounds half to even, also known as 'banker's rounding'). +/// +/// ```aiken +/// expect Some(w) = rational.new(2, 3) +/// rational.round_even(w) == 1 +/// +/// expect Some(x) = rational.new(3, 2) +/// rational.round_even(x) == 2 +/// +/// expect Some(y) = rational.new(5, 2) +/// rational.round_even(y) == 2 +/// +/// expect Some(y) = rational.new(-3, 2) +/// rational.round_even(y) == -2 +/// ``` +pub fn round_even(self: Rational) -> Int { + let (n, f) = proper_fraction(self) + + let m = + when compare(f, ratio(0, 1)) is { + Less -> -1 + _ -> 1 + } + + let is_even = n % 2 == 0 + + when compare(abs(f), ratio(1, 2)) is { + Less -> n + Equal -> + if is_even { + n + } else { + n + m + } + Greater -> n + m + } +} + +test round_even_1() { + and { + (round_even(ratio(10, 7)) == 1)?, + (round_even(ratio(11, 7)) == 2)?, + (round_even(ratio(3, 2)) == 2)?, + (round_even(ratio(5, 2)) == 2)?, + (round_even(ratio(-3, 2)) == -2)?, + (round_even(ratio(-2, 3)) == -1)?, + (round_even(ratio(-10, 7)) == -1)?, + (round_even(ratio(4, 2)) == 2)?, + } +} + +/// Returns the nearest `Int` between zero and a given `Rational`. +/// +/// ```aiken +/// expect Some(x) = rational.new(2, 3) +/// rational.truncate(x) == 0 +/// +/// expect Some(y) = rational.new(44, 14) +/// rational.truncate(y) == 3 +/// +/// expect Some(z) = rational.new(-14, 3) +/// rational.truncate(z) == -4 +/// ``` +pub fn truncate(self: Rational) -> Int { + let Rational { numerator: a_n, denominator: a_d } = self + builtin.quotient_integer(a_n, a_d) +} + +test truncate_1() { + and { + (truncate(ratio(5, 2)) == 2)?, + (truncate(ratio(5, 3)) == 1)?, + (truncate(ratio(5, 4)) == 1)?, + (truncate(ratio(5, 5)) == 1)?, + (truncate(ratio(5, 6)) == 0)?, + (truncate(ratio(8, 3)) == 2)?, + (truncate(ratio(-14, 3)) == -4)?, + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak new file mode 100644 index 00000000..ab8cbc17 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak @@ -0,0 +1,65 @@ +use aiken/fuzz.{both, either, map} +use aiken/math/rational.{Rational, new, pow} + +const any_positive_rational: Fuzzer = + either( + map( + both(fuzz.int_at_least(1), fuzz.int_at_least(1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + map( + both(fuzz.int_at_most(-1), fuzz.int_at_most(-1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + ) + +const any_negative_rational: Fuzzer = + either( + map( + both(fuzz.int_at_most(-1), fuzz.int_at_least(1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + map( + both(fuzz.int_at_least(1), fuzz.int_at_most(-1)), + fn((num, den)) { + expect Some(new_fraction) = new(num, den) + new_fraction + }, + ), + ) + +const any_non_zero_rational: Fuzzer = + either(any_negative_rational, any_positive_rational) + +test prop_power_of_zero_returns_one(rational via any_non_zero_rational) { + expect Some(calculated_result) = pow(rational, 0) + expect Some(expected_result) = new(1, 1) + calculated_result == expected_result +} + +test prop_power_of_one_returns_same_fraction(rational via any_non_zero_rational) { + expect Some(calculated_result) = pow(rational, 1) + calculated_result == rational +} + +test prop_power_numerator_zero_exponent_negative_returns_none( + (denominator, exponent) via both(fuzz.int_at_least(1), fuzz.int_at_most(-1)), +) { + expect Some(fraction) = new(0, denominator) + expect None = pow(fraction, exponent) +} + +test prop_power_unit_fraction_is_immutable(exponent via fuzz.int()) { + expect Some(unit) = new(1, 1) + expect Some(calculated_result) = pow(unit, exponent) + calculated_result == unit +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak new file mode 100644 index 00000000..cf5ef7dc --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak @@ -0,0 +1,312 @@ +//// A type to capture optional results; useful for handling errors. +//// +//// Note that the `Option` type and its constructors are readily available in Aiken. They are part of the [Prelude](https://aiken-lang.github.io/prelude/aiken.html#Option) module imported by default in every module. + +// ## Inspecting + +/// Asserts whether an option is `None`. +pub fn is_none(self: Option) -> Bool { + when self is { + Some(_) -> False + _ -> True + } +} + +test is_none_1() { + is_none(Some(0)) == False +} + +test is_none_2() { + is_none(None) == True +} + +/// Asserts whether an option is `Some`, irrespective of the value it contains. +pub fn is_some(self: Option) -> Bool { + when self is { + Some(_) -> True + _ -> False + } +} + +test is_some_1() { + is_some(Some(0)) == True +} + +test is_some_2() { + is_some(None) == False +} + +// ## Combining + +/// Chain together many computations that may fail. +/// +/// ```aiken +/// self +/// |> dict.get(policy_id) +/// |> option.and_then(dict.get(_, asset_name)) +/// |> option.or_else(0) +/// ``` +pub fn and_then( + self: Option, + then: fn(a) -> Option, +) -> Option { + when self is { + None -> None + Some(a) -> then(a) + } +} + +fn try_decrement(n: Int) -> Option { + if n > 0 { + Some(n - 1) + } else { + None + } +} + +test and_then_1() { + let result = + None + |> and_then(try_decrement) + result == None +} + +test and_then_2() { + let result = + Some(14) + |> and_then(try_decrement) + result == Some(13) +} + +test and_then_3() { + let result = + Some(0) + |> and_then(try_decrement) + result == None +} + +/// Picks the first element which is not None. If there's no such element, return None. +/// +/// ```aiken +/// option.choice([]) == None +/// option.choice([Some(14), Some(42)]) == Some(14) +/// option.choice([None, Some(42)]) == Some(42) +/// option.choice([None, None]) == None +/// ``` +pub fn choice(self: List>) -> Option { + when self is { + [] -> None + [head, ..others] -> + when head is { + None -> choice(others) + _ -> head + } + } +} + +test choice_1() { + Some(1) == choice([Some(1), Some(2)]) +} + +test choice_2() { + None == choice([]) +} + +test choice_3() { + Some(1) == choice([None, Some(1)]) +} + +/// Converts from `Option>` to `Option`. +/// +/// ```aiken +/// option.flatten(Some(Some(42))) == Some(42) +/// option.flatten(Some(None)) == None +/// option.flatten(None) == None +/// ``` +/// +/// Flattening only removes one level of nesting at a time: +/// +/// ```aiken +/// flatten(Some(Some(Some(42)))) == Some(Some(42)) +/// Some(Some(Some(42))) |> flatten |> flatten == Some(42) +/// ``` +pub fn flatten(opt: Option>) -> Option { + when opt is { + Some(inner) -> inner + None -> None + } +} + +test flatten_1() { + let x: Option> = Some(Some(6)) + Some(6) == flatten(x) +} + +test flatten_2() { + let x: Option> = Some(None) + None == flatten(x) +} + +test flatten_3() { + let x: Option> = None + None == flatten(x) +} + +test flatten_4() { + let x: Option>> = Some(Some(Some(6))) + + let result = + x + |> flatten + |> flatten + + Some(6) == result +} + +/// Apply a function to the inner value of an [`Option`](#option) +/// +/// ```aiken +/// option.map(None, fn(n) { n * 2 }) == None +/// option.map(Some(14), fn(n) { n * 2 }) == Some(28) +/// ``` +pub fn map(self: Option, with: fn(a) -> result) -> Option { + when self is { + None -> None + Some(a) -> Some(with(a)) + } +} + +test map_1() { + map(None, fn(_) { Void }) == None +} + +test map_2() { + map(Some(14), fn(n) { n + 1 }) == Some(15) +} + +/// Combine two [`Option`](#option) together. +/// +/// ```aiken +/// type Foo { +/// Foo(Int, Int) +/// } +/// +/// option.map2(Some(14), Some(42), Foo) == Some(Foo(14, 42)) +/// option.map2(None, Some(42), Foo) == None +/// option.map2(Some(14), None, Foo) == None +/// ``` +pub fn map2( + opt_a: Option, + opt_b: Option, + with: fn(a, b) -> result, +) -> Option { + when opt_a is { + None -> None + Some(a) -> + when opt_b is { + None -> None + Some(b) -> Some(with(a, b)) + } + } +} + +test map2_1() { + map2(None, Some(42), fn(_, _) { 14 }) == None +} + +test map2_2() { + map2(Some(42), None, fn(_, _) { 14 }) == None +} + +test map2_3() { + map2(Some(14), Some(42), fn(a, b) { (a, b) }) == Some((14, 42)) +} + +/// Combine three [`Option`](#option) together. +/// +/// ```aiken +/// type Foo { +/// Foo(Int, Int, Int) +/// } +/// +/// option.map3(Some(14), Some(42), Some(1337), Foo) == Some(Foo(14, 42, 1337)) +/// option.map3(None, Some(42), Some(1337), Foo) == None +/// option.map3(Some(14), None, None, Foo) == None +/// ``` +pub fn map3( + opt_a: Option, + opt_b: Option, + opt_c: Option, + with: fn(a, b, c) -> result, +) -> Option { + when opt_a is { + None -> None + Some(a) -> + when opt_b is { + None -> None + Some(b) -> + when opt_c is { + None -> None + Some(c) -> Some(with(a, b, c)) + } + } + } +} + +test map3_1() { + map3(None, Some(42), None, fn(_, _, _) { 14 }) == None +} + +test map3_2() { + map3(Some(42), None, None, fn(_, _, _) { 14 }) == None +} + +test map3_3() { + map3(Some(14), Some(42), Some(1337), fn(a, b, c) { c - a + b }) == Some(1365) +} + +/// Like [`or_else`](#or_else) but allows returning an `Option`. +/// This is effectively mapping the error branch. +/// +/// ```aiken +/// option.or_try(None, fn(_) { Some("aiken") }) == Some("aiken") +/// option.or_try(Some(42), fn(_) { Some(14) }) == Some(42) +/// option.or_try(None, fn (_) { fail }) => 💥 +/// ``` +pub fn or_try(self: Option, compute_default: fn() -> Option) -> Option { + when self is { + None -> compute_default() + _ -> self + } +} + +test or_try_1() { + or_try(None, fn() { Some("aiken") }) == Some("aiken") +} + +test or_try_2() { + or_try(Some(42), fn() { fail }) == Some(42) +} + +// ## Transforming + +/// Provide a default value, turning an optional value into a normal value. +/// +/// ```aiken +/// option.or_else(None, "aiken") == "aiken" +/// option.or_else(Some(42), 14) == 42 +/// ``` +pub fn or_else(self: Option, default: a) -> a { + when self is { + None -> default + Some(a) -> a + } +} + +test or_else_1() { + or_else(None, "aiken") == "aiken" +} + +test or_else_2() { + or_else(Some(42), 14) == 42 +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak new file mode 100644 index 00000000..d2f125f5 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak @@ -0,0 +1,668 @@ +use aiken/builtin +use aiken/math +use aiken/option + +pub type Byte = + Int + +// ## Constructing + +/// Encode an integer value as a Big-Endian (most-significant bytes first) `ByteArray`. +/// The size is the expected size in number of bytes. +/// +/// > [!IMPORTANT] +/// > This function fails (i.e. halts the program) if the value cannot fit in the given size. When the +/// > size is _too large_, the array is left-padded with zeroes. +/// +/// ```aiken +/// bytearray.from_int_big_endian(1_000_000, 3) == #"0f4240" +/// bytearray.from_int_big_endian(1_000_000, 5) == #"00000f4240" +/// bytearray.from_int_big_endian(0, 8) == #"0000000000000000" +/// bytearray.from_int_big_endian(1_000_000, 1) => 💥 +/// ``` +pub fn from_int_big_endian(self: Int, size: Int) -> ByteArray { + builtin.integer_to_bytearray(True, size, self) +} + +test from_int_big_endian_1() { + from_int_big_endian(1_000_000, 3) == #"0f4240" +} + +test from_int_big_endian_2() { + from_int_big_endian(1_000_000, 5) == #"00000f4240" +} + +test from_int_big_endian_3() { + from_int_big_endian(0, 8) == #"0000000000000000" +} + +test from_int_big_endian_4() fail { + from_int_big_endian(1_000_000, 1) == #"40" +} + +/// Encode an integer value as a Little-Endian (least-significant bytes first) `ByteArray`. +/// The size is the expected size in number of bytes. +/// +/// > [!IMPORTANT] +/// > This function fails (i.e. halts the program) if the value cannot fit in the given size. When the +/// > size is _too large_, the array is right-padded with zeroes. +/// +/// ```aiken +/// bytearray.from_int_little_endian(1_000_000, 3) == #"40420f" +/// bytearray.from_int_little_endian(1_000_000, 5) == #"40420f0000" +/// bytearray.from_int_little_endian(0, 8) == #"0000000000000000" +/// bytearray.from_int_little_endian(1_000_000, 1) => 💥 +/// ``` +pub fn from_int_little_endian(self: Int, size: Int) -> ByteArray { + builtin.integer_to_bytearray(False, size, self) +} + +test from_int_little_endian_1() { + from_int_little_endian(1_000_000, 3) == #"40420f" +} + +test from_int_little_endian_2() { + from_int_little_endian(1_000_000, 5) == #"40420f0000" +} + +test from_int_little_endian_3() { + from_int_little_endian(0, 8) == #"0000000000000000" +} + +test from_int_little_endian_4() fail { + from_int_little_endian(1_000_000, 1) == #"40" +} + +/// Convert a `String` into a `ByteArray`. +/// +/// ```aiken +/// bytearray.from_string(@"ABC") == #"414243" +/// ``` +pub fn from_string(str: String) -> ByteArray { + builtin.encode_utf8(str) +} + +test from_string_1() { + from_string(@"") == "" +} + +test from_string_2() { + from_string(@"ABC") == #"414243" +} + +/// Add a byte element in front of a `ByteArray`. When the given byte is +/// greater than 255, it wraps-around. **PlutusV2 behavior** So 256 is mapped to 0, 257 to 1, and so +/// forth. +/// In PlutusV3 this will error instead of wrapping around. +/// +/// ```aiken +/// bytearray.push(#"", 0) == #"00" +/// bytearray.push(#"0203", 1) == #"010203" +/// bytearray.push(#"0203", 257) == #"010203" +/// ``` +pub fn push(self: ByteArray, byte: Byte) -> ByteArray { + builtin.cons_bytearray(byte, self) +} + +test push_1() { + push(#[], 0) == #[0] +} + +test push_2() { + push(#[2, 3], 1) == #[1, 2, 3] +} + +test push_3() fail { + let x = 257 + push(#[2, 3], x) == #[1, 2, 3] +} + +// ## Inspecting + +/// Get the `Byte` at the given index, or crash. +/// +/// > [!WARNING] +/// > This functions fails (i.e. halts the program) if there's no byte at the given index. +pub fn at(self: ByteArray, index: Int) -> Byte { + builtin.index_bytearray(self, index) +} + +/// Search the start and end positions of a sub-array in a `ByteArray`. +/// +/// ```aiken +/// bytearray.index_of("Hello, World!", "World") == Some((7, 11)) +/// bytearray.index_of("Hello, World!", "foo") == None +/// bytearray.index_of("Hello, World!", "!") == Some((12, 12)) +/// bytearray.index_of("Hello, World!", "o") == Some((4, 4)) +/// bytearray.index_of("Hello, World!", "Hello, World!") == Some((0, 12)) +/// ``` +pub fn index_of(self: ByteArray, bytes: ByteArray) -> Option<(Int, Int)> { + let offset = length(bytes) + + do_index_of(self, bytes, 0, offset, length(self)) + |> option.map(fn(ix) { (ix, ix + offset - 1) }) +} + +fn do_index_of( + self: ByteArray, + bytes: ByteArray, + cursor: Int, + offset: Int, + size: Int, +) -> Option { + if cursor + offset > size { + None + } else { + if builtin.slice_bytearray(cursor, offset, self) == bytes { + Some(cursor) + } else { + do_index_of(self, bytes, cursor + 1, offset, size) + } + } +} + +test index_of_1() { + index_of("Hello, World!", "World") == Some((7, 11)) +} + +test index_of_2() { + index_of("Hello, World!", "foo") == None +} + +test index_of_3() { + index_of("Hello, World!", "!") == Some((12, 12)) +} + +test index_of_4() { + index_of("Hello, World!", "o") == Some((4, 4)) +} + +test index_of_5() { + index_of("Hello, World!", "Hello, World!") == Some((0, 12)) +} + +/// Returns `True` when the given `ByteArray` is empty. +/// +/// ```aiken +/// bytearray.is_empty(#"") == True +/// bytearray.is_empty(#"00ff") == False +/// ``` +pub fn is_empty(self: ByteArray) -> Bool { + builtin.length_of_bytearray(self) == 0 +} + +test is_empty_1() { + is_empty(#"") == True +} + +test is_empty_2() { + is_empty(#"01") == False +} + +/// Returns the number of bytes in a `ByteArray`. +/// +/// ```aiken +/// bytearray.length(#[1, 2, 3]) == 3 +/// ``` +pub fn length(self: ByteArray) -> Int { + builtin.length_of_bytearray(self) +} + +test length_1() { + length(#"") == 0 +} + +test length_2() { + length(#"010203") == 3 +} + +/// Checks whether a bit (Most-Significant-Bit first) is set in the given 'ByteArray'. +/// +/// For example, consider the following bytearray: `#"8b765f"`. It can also be written as the +/// following bits sequence: +/// +/// `8` | `b` | `7` | `6` | `5` | `f` +/// --- | --- | --- | --- | --- | --- +/// `1000` | `1011` | `0111` | `0110` | `0101` | `1111` +/// +/// And thus, we have: +/// +/// ```aiken +/// test_bit(#"8b765f", 0) == True +/// test_bit(#"8b765f", 1) == False +/// test_bit(#"8b765f", 2) == False +/// test_bit(#"8b765f", 3) == False +/// test_bit(#"8b765f", 7) == True +/// test_bit(#"8b765f", 8) == False +/// test_bit(#"8b765f", 20) == True +/// test_bit(#"8b765f", 21) == True +/// test_bit(#"8b765f", 22) == True +/// test_bit(#"8b765f", 23) == True +/// ``` +pub fn test_bit(self: ByteArray, ix: Int) -> Bool { + builtin.less_than_equals_bytearray( + #[128], + builtin.cons_bytearray( + builtin.index_bytearray(self, ix / 8) * math.pow2(ix % 8) % 256, + "", + ), + ) +} + +test test_bit_0() { + test_bit(#"8b765f", 0) +} + +test test_bit_1() { + !test_bit(#"8b765f", 1) +} + +test test_bit_2() { + !test_bit(#"8b765f", 2) +} + +test test_bit_3() { + !test_bit(#"8b765f", 3) +} + +test test_bit_7() { + test_bit(#"8b765f", 7) +} + +test test_bit_8() { + !test_bit(#"8b765f", 8) +} + +test test_bit_20_21_22_23() { + and { + test_bit(#"8b765f", 20), + test_bit(#"8b765f", 21), + test_bit(#"8b765f", 22), + test_bit(#"8b765f", 23), + } +} + +// ## Modifying + +/// Returns the suffix of a `ByteArray` after `n` elements. +/// +/// ```aiken +/// bytearray.drop(#[1, 2, 3], n: 2) == #[3] +/// ``` +pub fn drop(self: ByteArray, n: Int) -> ByteArray { + builtin.slice_bytearray(n, builtin.length_of_bytearray(self) - n, self) +} + +test drop_1() { + let x = #"01020304050607" + drop(x, 2) == #"0304050607" +} + +test drop_2() { + let x = #"01020304050607" + drop(x, 0) == x +} + +test drop_3() { + let x = #"01" + drop(x, 1) == #"" +} + +test drop_4() { + let x = #"" + drop(x, 2) == #"" +} + +/// Extract a `ByteArray` as a slice of another `ByteArray`. +/// +/// Indexes are 0-based and inclusive. +/// +/// ```aiken +/// bytearray.slice(#[0, 1, 2, 3, 4, 5, 6], start: 1, end: 3) == #[1, 2, 3] +/// ``` +pub fn slice(self: ByteArray, start: Int, end: Int) -> ByteArray { + builtin.slice_bytearray(start, end - start + 1, self) +} + +test slice_1() { + slice(#"", 1, 2) == #"" +} + +test slice_2() { + slice(#"010203", 1, 2) == #"0203" +} + +test slice_3() { + slice(#"010203", 0, 42) == #"010203" +} + +test slice_4() { + slice(#[0, 1, 2, 3, 4], 0, 3) == #[0, 1, 2, 3] +} + +test slice_5() { + slice(#[0, 1, 2, 3, 4], 1, 2) == #[1, 2] +} + +/// Returns the n-length prefix of a `ByteArray`. +/// +/// ```aiken +/// bytearray.take(#[1, 2, 3], n: 2) == #[1, 2] +/// ``` +pub fn take(self: ByteArray, n: Int) -> ByteArray { + builtin.slice_bytearray(0, n, self) +} + +test take_1() { + let x = #"01020304050607" + take(x, 2) == #"0102" +} + +test take_2() { + let x = #"01020304050607" + take(x, 0) == #"" +} + +test take_3() { + let x = #"01" + take(x, 1) == x +} + +test take_4() { + let x = #"010203" + take(x, 0) == #"" +} + +// ## Combining + +/// Combine two `ByteArray` together. +/// +/// ```aiken +/// bytearray.concat(left: #[1, 2, 3], right: #[4, 5, 6]) == #[1, 2, 3, 4, 5, 6] +/// ``` +pub fn concat(left: ByteArray, right: ByteArray) -> ByteArray { + builtin.append_bytearray(left, right) +} + +test concat_1() { + concat(#"", #"") == #"" +} + +test concat_2() { + concat(#"", #"01") == #"01" +} + +test concat_3() { + concat(#"0102", #"") == #"0102" +} + +test concat_4() { + concat(#"0102", #"0304") == #"01020304" +} + +/// Compare two bytearrays lexicographically. +/// +/// ```aiken +/// bytearray.compare(#"00", #"FF") == Less +/// bytearray.compare(#"42", #"42") == Equal +/// bytearray.compare(#"FF", #"00") == Greater +/// ``` +pub fn compare(left: ByteArray, right: ByteArray) -> Ordering { + if builtin.less_than_bytearray(left, right) { + Less + } else if builtin.equals_bytearray(left, right) { + Equal + } else { + Greater + } +} + +// ## Transforming + +/// Left-fold over bytes of a [`ByteArray`](https://aiken-lang.github.io/prelude/aiken.html#ByteArray). Note that every byte given to the callback function is comprised between 0 and 255. +/// +/// ```aiken +/// bytearray.foldl(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 44203 +/// bytearray.foldl(#[1, 2, 3], #"", flip(bytearray.push)) == #[3, 2, 1] +/// ``` +pub fn foldl( + self: ByteArray, + zero: result, + with: fn(Int, result) -> result, +) -> result { + do_foldl(self, zero, builtin.length_of_bytearray(self), 0, with) +} + +fn do_foldl( + self: ByteArray, + zero: result, + len: Int, + cursor: Int, + with: fn(Int, result) -> result, +) -> result { + if cursor == len { + zero + } else { + do_foldl( + self, + with(builtin.index_bytearray(self, cursor), zero), + len, + cursor + 1, + with, + ) + } +} + +test foldl_1() { + foldl(#[], 42, fn(byte, acc) { byte + acc }) == 42 +} + +test foldl_2() { + foldl(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 44203 +} + +test foldl_3() { + foldl( + #"356cf088720a169dae0ce0bb1df8588944389fa43322f0d6ef4ed8c069bfd405", + 0, + fn(byte, acc) { acc * 256 + byte }, + ) == 24165060555594911913195642527692216679757672038384202527929620681761931383813 +} + +test foldl_4() { + foldl(#[1, 2, 3, 4, 5], #"", flip(push)) == #[5, 4, 3, 2, 1] +} + +/// Right-fold over bytes of a [`ByteArray`](https://aiken-lang.github.io/prelude/aiken.html#ByteArray). Note that every byte given to the callback function is comprised between 0 and 255. +/// +/// ```aiken +/// bytearray.foldr(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 43948 +/// bytearray.foldl(#[1, 2, 3], #"", flip(bytearray.push)) == #[1, 2, 3] +/// ``` +pub fn foldr( + self: ByteArray, + zero: result, + with: fn(Int, result) -> result, +) -> result { + do_foldr(self, zero, builtin.length_of_bytearray(self) - 1, with) +} + +fn do_foldr( + self: ByteArray, + zero: result, + cursor: Int, + with: fn(Int, result) -> result, +) -> result { + if cursor < 0 { + zero + } else { + do_foldr( + self, + with(builtin.index_bytearray(self, cursor), zero), + cursor - 1, + with, + ) + } +} + +test foldr_1() { + foldr(#[], 42, fn(byte, acc) { byte + acc }) == 42 +} + +test foldr_2() { + foldr(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 43948 +} + +test foldr_3() { + foldr(#[1, 2, 3, 4, 5], #"", flip(push)) == #[1, 2, 3, 4, 5] +} + +/// Reduce bytes in a ByteArray from left to right using the accumulator as left operand. +/// Said differently, this is [`foldl`](#foldl) with callback arguments swapped. +/// +/// ```aiken +/// bytearray.reduce(#[1,2,3], #[], bytearray.push) == #[3, 2, 1] +/// ``` +pub fn reduce( + self: ByteArray, + zero: result, + with: fn(result, Int) -> result, +) -> result { + foldl(self, zero, flip(with)) +} + +test reduce_1() { + reduce(#[], #[], push) == #[] +} + +test reduce_2() { + reduce(#[1, 2, 3], #[], push) == #[3, 2, 1] +} + +/// Interpret a Big-Endian (most-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// bytearray.to_int_big_endian(#"0f4240") == 1_000_000 +/// bytearray.to_int_big_endian(#"00000f4240") == 1_000_000 +/// bytearray.to_int_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn to_int_big_endian(self: ByteArray) -> Int { + builtin.bytearray_to_integer(True, self) +} + +test to_int_big_endian_1() { + to_int_big_endian(#"0f4240") == 1_000_000 +} + +test to_int_big_endian_2() { + to_int_big_endian(#"00000f4240") == 1_000_000 +} + +test to_int_big_endian_3() { + to_int_big_endian(#"0000000000000000") == 0 +} + +/// Interpret a Little-Endian (least-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// bytearray.to_int_big_endian(#"40420f") == 1_000_000 +/// bytearray.to_int_big_endian(#"40420f0000") == 1_000_000 +/// bytearray.to_int_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn to_int_little_endian(self: ByteArray) -> Int { + builtin.bytearray_to_integer(False, self) +} + +test to_int_little_endian_1() { + to_int_little_endian(#"40420f") == 1_000_000 +} + +test to_int_little_endian_2() { + to_int_little_endian(#"40420f0000") == 1_000_000 +} + +test to_int_little_endian_3() { + to_int_little_endian(#"0000000000000000") == 0 +} + +/// Convert a `ByteArray` into a `String`. +/// +/// > [!WARNING] +/// > This functions fails (i.e. halts the program) if the underlying `ByteArray` isn't UTF-8-encoded. In particular, you cannot convert arbitrary hash digests using this function. +/// > +/// > For converting arbitrary `ByteArray`s, use [bytearray.to_hex](#to_hex). +/// +/// ```aiken +/// bytearray.to_string(#"414243") == "ABC" +/// bytearray.to_string(some_hash) => 💥 +/// ``` +pub fn to_string(self: ByteArray) -> String { + builtin.decode_utf8(self) +} + +test to_string_1() { + to_string("") == @"" +} + +test to_string_2() { + to_string("ABC") == @"ABC" +} + +/// Encode a `ByteArray` as a hexidecimal `String`. +/// +/// ```aiken +/// bytearray.to_hex("Hello world!") == @"48656c6c6f20776f726c6421" +/// ``` +pub fn to_hex(self: ByteArray) -> String { + self + |> encode_base16(builtin.length_of_bytearray(self) - 1, "") + |> builtin.decode_utf8 +} + +test to_hex_1() { + to_hex("Hello world!") == @"48656C6C6F20776F726C6421" +} + +test to_hex_2() { + to_hex("The quick brown fox jumps over the lazy dog") == @"54686520717569636B2062726F776E20666F78206A756D7073206F76657220746865206C617A7920646F67" +} + +/// Checks whether a `ByteArray` starts with a given prefix. +/// +/// ```aiken +/// bytearray.starts_with("Hello, World!", prefix: "Hello") == True +/// bytearray.starts_with("", prefix: "") == True +/// bytearray.starts_with("Hello", prefix: "Hello, World!") == False +/// ``` +pub fn starts_with(self: ByteArray, prefix: ByteArray) -> Bool { + let prefix_length = length(prefix) + if length(self) < prefix_length { + False + } else { + take(self, prefix_length) == prefix + } +} + +test starts_with_1() { + starts_with("", "") +} + +test starts_with_2() { + starts_with("Hello, World!", "Hello, World!") +} + +test starts_with_3() { + !starts_with("Hello, World!", "hello") +} + +test starts_with_4() { + !starts_with("", "World") +} + +test starts_with_5() { + starts_with("Hello, World", "Hello") +} + +test starts_with_6() { + !starts_with("foo", "foo_") +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak new file mode 100644 index 00000000..217749e9 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak @@ -0,0 +1,156 @@ +use aiken/builtin.{bytearray_to_integer, decode_utf8} +use aiken/math +use aiken/option +use aiken/primitive/bytearray + +// ## Combining + +/// Compare two integers. +/// +/// ```aiken +/// int.compare(14, 42) == Less +/// int.compare(14, 14) == Equal +/// int.compare(42, 14) == Greater +/// ``` +pub fn compare(left: Int, right: Int) -> Ordering { + if left < right { + Less + } else if left > right { + Greater + } else { + Equal + } +} + +// ## Transforming + +/// Interpret a Big-Endian (most-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// int.from_bytearray_big_endian(#"0f4240") == 1_000_000 +/// int.from_bytearray_big_endian(#"00000f4240") == 1_000_000 +/// int.from_bytearray_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn from_bytearray_big_endian(self: ByteArray) -> Int { + bytearray_to_integer(True, self) +} + +test from_bytearray_big_endian_1() { + from_bytearray_big_endian(#"0f4240") == 1_000_000 +} + +test from_bytearray_big_endian_2() { + from_bytearray_big_endian(#"00000f4240") == 1_000_000 +} + +test from_bytearray_big_endian_3() { + from_bytearray_big_endian(#"0000000000000000") == 0 +} + +/// Interpret a Little-Endian (least-significant bytes first) `ByteArray` as an `Int`. +/// +/// ```aiken +/// int.from_bytearray_big_endian(#"40420f") == 1_000_000 +/// int.from_bytearray_big_endian(#"40420f0000") == 1_000_000 +/// int.from_bytearray_big_endian(#"0000000000000000") == 0 +/// ``` +pub fn from_bytearray_little_endian(self: ByteArray) -> Int { + bytearray_to_integer(False, self) +} + +test from_bytearray_little_endian_1() { + from_bytearray_little_endian(#"40420f") == 1_000_000 +} + +test from_bytearray_little_endian_2() { + from_bytearray_little_endian(#"40420f0000") == 1_000_000 +} + +test from_bytearray_little_endian_3() { + from_bytearray_little_endian(#"0000000000000000") == 0 +} + +/// Parse an integer from a utf-8 encoded `ByteArray`, when possible. +/// +/// ```aiken +/// int.from_utf8("14") == Some(14) +/// int.from_utf8("-42") == Some(-42) +/// int.from_utf8("007") == Some(7) +/// int.from_utf8("foo") == None +/// int.from_utf8("1.0") == None +/// int.from_utf8("1-2") == None +/// ``` +pub fn from_utf8(bytes: ByteArray) -> Option { + bytes + |> bytearray.foldr( + Some((0, 0)), + fn(byte, st) { + when st is { + None -> None + Some((n, e)) -> + if byte < 48 || byte > 57 { + if byte == 45 { + Some((-n, 0)) + } else { + None + } + } else if n < 0 { + None + } else { + let digit = byte - 48 + Some((n + digit * math.pow(10, e), e + 1)) + } + } + }, + ) + |> option.map(fn(tuple) { tuple.1st }) +} + +test from_utf8_1() { + from_utf8("0017") == Some(17) +} + +test from_utf8_2() { + from_utf8("42") == Some(42) +} + +test from_utf8_3() { + from_utf8("1337") == Some(1337) +} + +test from_utf8_4() { + from_utf8("-14") == Some(-14) +} + +test from_utf8_5() { + from_utf8("foo") == None +} + +test from_utf8_6() { + from_utf8("1-2") == None +} + +/// Convert an `Int` to its `String` representation. +/// +/// ```aiken +/// int.to_string(42) == @"42" +/// ``` +pub fn to_string(n: Int) -> String { + diagnostic(n, "") |> decode_utf8 +} + +test to_string_1() { + to_string(0) == @"0" +} + +test to_string_2() { + to_string(5) == @"5" +} + +test to_string_3() { + to_string(42) == @"42" +} + +test to_string_4() { + to_string(200) == @"200" +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak new file mode 100644 index 00000000..35fa5567 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak @@ -0,0 +1,139 @@ +use aiken/builtin.{ + append_bytearray, append_string, decode_utf8, encode_utf8, length_of_bytearray, +} + +// ## Constructing + +/// Convert a `ByteArray` into a `String` +/// +/// > [!WARNING] +/// > This functions fails if the underlying `ByteArray` isn't UTF-8-encoded. In particular, you cannot convert arbitrary hash digests using this function. +/// > +/// > For converting arbitrary `ByteArray`s, use [bytearray.to_hex](./bytearray.html#to_hex). +/// +/// ```aiken +/// string.from_bytearray("foo") == @"foo" +/// string.from_bytearray(#"666f6f") == @"foo" +/// string.from_bytearray(some_hash) -> fail +/// ``` +pub fn from_bytearray(bytes: ByteArray) -> String { + decode_utf8(bytes) +} + +test from_bytearray_1() { + from_bytearray(#[]) == @"" +} + +test from_bytearray_2() { + from_bytearray(#[65, 66, 67]) == @"ABC" +} + +test from_bytearray_3() { + from_bytearray("ABC") == @"ABC" +} + +/// Convert an `Int` to its `String` representation. +/// +/// ```aiken +/// string.from_int(42) == @"42" +/// ``` +pub fn from_int(n: Int) -> String { + diagnostic(n, "") |> decode_utf8 +} + +test from_int_1() { + from_int(0) == @"0" +} + +test from_int_2() { + from_int(5) == @"5" +} + +test from_int_3() { + from_int(42) == @"42" +} + +test from_int_4() { + from_int(200) == @"200" +} + +// ## Combining + +/// Combine two `String` together. +/// +/// ```aiken +/// string.concat(left: @"Hello", right: @", World!") == @"Hello, World!" +/// ``` +pub fn concat(left: String, right: String) -> String { + append_string(left, right) +} + +test concat_1() { + concat(@"", @"") == @"" +} + +test concat_2() { + concat(@"", @"foo") == concat(@"foo", @"") +} + +test concat_3() { + concat(left: @"Hello", right: @", World!") == @"Hello, World!" +} + +/// Join a list of strings, separated by a given _delimiter_. +/// +/// ```aiken +/// string.join([], @"+") == @"" +/// string.join([@"a", @"b", @"c"], @",") == @"a,b,c" +/// ``` +pub fn join(list: List, delimiter: String) -> String { + do_join(list, encode_utf8(delimiter), #"") + |> decode_utf8 +} + +fn do_join(xs, delimiter, bytes) { + when xs is { + [] -> bytes + [x, ..rest] -> + do_join( + rest, + delimiter, + if length_of_bytearray(bytes) == 0 { + encode_utf8(x) + } else { + append_bytearray(bytes, append_bytearray(delimiter, encode_utf8(x))) + }, + ) + } +} + +test join_1() { + join([], @",") == @"" +} + +test join_2() { + join([@"a", @"b", @"c"], @",") == @"a,b,c" +} + +// ## Transforming + +/// Convert a `String` into a `ByteArray` +/// +/// ```aiken +/// string.to_bytearray(@"foo") == "foo" +/// ``` +pub fn to_bytearray(self: String) -> ByteArray { + encode_utf8(self) +} + +test to_bytearray_1() { + to_bytearray(@"") == "" +} + +test to_bytearray_2() { + to_bytearray(@"ABC") == #[65, 66, 67] +} + +test to_bytearray_3() { + to_bytearray(@"ABC") == "ABC" +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak new file mode 100644 index 00000000..0167b90f --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak @@ -0,0 +1,86 @@ +use aiken/crypto.{ + Blake2b_224, Hash, Script, ScriptHash, VerificationKey, VerificationKeyHash, +} + +/// A general structure for representing an on-chain `Credential`. +/// +/// Credentials are always one of two kinds: a direct public/private key +/// pair, or a script (native or Plutus). +pub type Credential { + VerificationKey(VerificationKeyHash) + Script(ScriptHash) +} + +// ## Constructing + +/// A Cardano `Address` typically holding one or two credential references. +/// +/// Note that legacy bootstrap addresses (a.k.a. 'Byron addresses') are +/// completely excluded from Plutus contexts. Thus, from an on-chain +/// perspective only exists addresses of type 00, 01, ..., 07 as detailed +/// in [CIP-0019 :: Shelley Addresses](https://github.com/cardano-foundation/CIPs/tree/master/CIP-0019/#shelley-addresses). +pub type Address { + payment_credential: PaymentCredential, + stake_credential: Option, +} + +/// Smart-constructor for an [Address](#Address) from a [script](#Script) hash. The address has no delegation rights whatsoever. +pub fn from_script(script: Hash) -> Address { + Address { payment_credential: Script(script), stake_credential: None } +} + +/// Smart-constructor for an [Address](#Address) from a [verification key](#VerificationKey) hash. The resulting address has no delegation rights whatsoever. +pub fn from_verification_key(vk: Hash) -> Address { + Address { payment_credential: VerificationKey(vk), stake_credential: None } +} + +/// Set (or reset) the delegation part of an [Address](#Address) using a [verification key](#VerificationKey) hash. This is useful when combined with [`from_verification_key`](#from_verification_key) and/or [`from_script`](#from_script). +pub fn with_delegation_key( + self: Address, + vk: Hash, +) -> Address { + Address { + payment_credential: self.payment_credential, + stake_credential: Some(Inline(VerificationKey(vk))), + } +} + +/// Set (or reset) the delegation part of an [Address](#Address) using a [script](#Script) hash. This is useful when combined with [`from_verification_key`](#from_verification_key) and/or [`from_script`](#from_script). +pub fn with_delegation_script( + self: Address, + script: Hash, +) -> Address { + Address { + payment_credential: self.payment_credential, + stake_credential: Some(Inline(Script(script))), + } +} + +/// Represent a type of object that can be represented either inline (by hash) +/// or via a reference (i.e. a pointer to an on-chain location). +/// +/// This is mainly use for capturing pointers to a stake credential +/// registration certificate in the case of so-called pointer addresses. +pub type Referenced { + Inline(a) + Pointer { slot_number: Int, transaction_index: Int, certificate_index: Int } +} + +/// A `StakeCredential` represents the delegation and rewards withdrawal conditions +/// associated with some stake address / account. +/// +/// A `StakeCredential` is either provided inline, or, by reference using an +/// on-chain pointer. +/// +/// Read more about pointers in [CIP-0019 :: Pointers](https://github.com/cardano-foundation/CIPs/tree/master/CIP-0019/#pointers). +pub type StakeCredential = + Referenced + +/// A 'PaymentCredential' represents the spending conditions associated with +/// some output. Hence, +/// +/// - a `VerificationKey` captures an output locked by a public/private key pair; +/// - and a `Script` captures an output locked by a native or Plutus script. +/// +pub type PaymentCredential = + Credential diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak new file mode 100644 index 00000000..2ebeaa91 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak @@ -0,0 +1,30 @@ +use aiken/primitive/bytearray +use cardano/address.{Credential, Script, VerificationKey} + +pub fn compare(left: Credential, right: Credential) -> Ordering { + when left is { + Script(left) -> + when right is { + Script(right) -> bytearray.compare(left, right) + _ -> Less + } + VerificationKey(left) -> + when right is { + Script(_) -> Greater + VerificationKey(right) -> bytearray.compare(left, right) + } + } +} + +test compare_matrix() { + and { + (compare(Script(""), Script("")) == Equal)?, + (compare(VerificationKey(""), VerificationKey("")) == Equal)?, + (compare(Script(""), VerificationKey("")) == Less)?, + (compare(VerificationKey(""), Script("")) == Greater)?, + (compare(Script("01"), Script("02")) == Less)?, + (compare(Script("02"), Script("01")) == Greater)?, + (compare(VerificationKey("01"), VerificationKey("02")) == Less)?, + (compare(VerificationKey("02"), VerificationKey("01")) == Greater)?, + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak new file mode 100644 index 00000000..664a3983 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak @@ -0,0 +1,920 @@ +use aiken/builtin +use aiken/collection/dict.{Dict, from_ascending_pairs_with} +use aiken/collection/list +use aiken/crypto.{Blake2b_224, Hash, Script} +use aiken/option + +/// Lovelace is now a type wrapper for Int. +pub type Lovelace = + Int + +/// A type-alias for a `PolicyId`. A `PolicyId` is always 28-byte long +pub type PolicyId = + Hash + +/// Ada, the native currency, isn't associated with any `PolicyId` (it's not +/// possible to mint Ada!). +/// +/// By convention, it is an empty `ByteArray`. +pub const ada_policy_id = "" + +/// A type-alias for 'AssetName`, which are free-form byte-arrays between +/// 0 and 32 bytes. +pub type AssetName = + ByteArray + +/// Ada, the native currency, isn't associated with any `AssetName` (it's not +/// possible to mint Ada!). +/// +/// By convention, it is an empty `ByteArray`. +pub const ada_asset_name = "" + +/// A multi-asset output `Value`. Contains tokens indexed by [PolicyId](#PolicyId) and [AssetName](#AssetName). +/// +/// > [!IMPORTANT] +/// > This type maintain some invariants by construction; in particular, a `Value` will never contain a +/// zero quantity of a particular token. +pub opaque type Value { + inner: Dict>, +} + +// ## Constructing + +/// Construct a `Value` from an asset identifier (i.e. `PolicyId` + `AssetName`) +/// and a given quantity. +pub fn from_asset( + policy_id: PolicyId, + asset_name: AssetName, + quantity: Int, +) -> Value { + if quantity == 0 { + zero + } else { + let asset = + dict.empty + |> dict.insert(asset_name, quantity) + dict.empty + |> dict.insert(policy_id, asset) + |> Value + } +} + +/// Promote an arbitrary list of assets into a `Value`. This function fails +/// (i.e. halts the program execution) if: +/// +/// - there's any duplicate amongst `PolicyId`; +/// - there's any duplicate amongst `AssetName`; +/// - the `AssetName` aren't sorted in ascending lexicographic order; or +/// - any asset quantity is null. +/// +/// This function is meant to turn arbitrary user-defined `Data` into safe `Value`, +/// while checking for internal invariants. +pub fn from_asset_list(xs: Pairs>) -> Value { + xs + |> list.foldr( + dict.empty, + fn(inner, acc) { + expect Pair(p, [_, ..] as x) = inner + x + |> from_ascending_pairs_with(fn(v) { v != 0 }) + |> dict.insert_with( + acc, + p, + _, + fn(_, _, _) { + fail @"Duplicate policy in the asset list." + }, + ) + }, + ) + |> Value +} + +test from_asset_list_1() { + let v = from_asset_list([]) + v == zero +} + +test from_asset_list_2() fail { + let v = from_asset_list([Pair(#"33", [])]) + v == zero +} + +test from_asset_list_3() fail { + let v = from_asset_list([Pair(#"33", [Pair(#"", 0)])]) + v != zero +} + +test from_asset_list_4() { + let v = from_asset_list([Pair(#"33", [Pair(#"", 1)])]) + flatten(v) == [(#"33", #"", 1)] +} + +test from_asset_list_5() { + let v = from_asset_list([Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)])]) + flatten(v) == [(#"33", #"", 1), (#"33", #"33", 1)] +} + +test from_asset_list_6() fail { + let v = + from_asset_list( + [ + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + ], + ) + v != zero +} + +test from_asset_list_7() fail { + let v = + from_asset_list( + [ + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"34", [Pair(#"", 1), Pair(#"", 1)]), + ], + ) + v != zero +} + +test from_asset_list_8() { + let v = + from_asset_list( + [ + Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"34", [Pair(#"31", 1)]), Pair(#"35", [Pair(#"", 1)]), + ], + ) + flatten(v) == [ + (#"33", #"", 1), (#"33", #"33", 1), (#"34", #"31", 1), (#"35", #"", 1), + ] +} + +test from_asset_list_9() { + let v = + from_asset_list( + [ + Pair(#"35", [Pair(#"", 1)]), Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), + Pair(#"34", [Pair(#"31", 1)]), + ], + ) + flatten(v) == [ + (#"33", #"", 1), (#"33", #"33", 1), (#"34", #"31", 1), (#"35", #"", 1), + ] +} + +/// Construct a `Value` from a lovelace quantity. +/// +/// Friendly reminder: 1 Ada = 1.000.000 Lovelace +pub fn from_lovelace(quantity: Int) -> Value { + from_asset(ada_policy_id, ada_asset_name, quantity) +} + +/// Construct an empty `Value` with nothing in it. +pub const zero: Value = Value { inner: dict.empty } + +// ## Inspecting + +/// Check is a `Value` is zero. That is, it has no assets and holds no Ada/Lovelace. +pub fn is_zero(self: Value) -> Bool { + self == zero +} + +/// Efficiently compare two values together, allowing a custom behaviour for Ada/Lovelace. +/// The second parameter is provided as `Data`, allowing to conveniently compare serialized +/// datums or similar structurually equivalent types (such as `Pairs>`). +/// +/// The third argument is a callback function to assert the left and right lovelace +/// quantities. Its first argument refers to the quantity of the first argument of +/// `match`, and the second argument of the callback to the quantity of the second +/// argument of `match`. In the absence of lovelace in any value, it defaults to `0`. +/// +/// ```aiken +/// const value: Value = +/// assets.from_lovelace(30) +/// |> assets.add("foo", "bar", 1) +/// |> assets.add("foo", "baz", 42) +/// +/// const datum: Data = +/// assets.from_lovelace(20) +/// |> assets.add("foo", "bar", 1) +/// |> assets.add("foo", "baz", 42) +/// +/// True == assets.match(value, datum, >=) +/// +/// False == assets.match(value, datum, ==) +/// +/// True == assets.match(value, datum, fn(value_lovelace, datum_lovelace) { +/// 2 * datum_lovelace >= value_lovelace +/// }) +/// ``` +pub fn match( + left: Value, + right: Data, + assert_lovelace: fn(Lovelace, Lovelace) -> Bool, +) -> Bool { + builtin.choose_data( + right, + False, + { + let (left_lovelace, left_assets) = dict.pop(left.inner, ada_policy_id) + let left_assets: Data = left_assets + let left_lovelace = + when left_lovelace is { + Some(tokens) -> builtin.head_list(dict.to_pairs(tokens)).2nd + None -> 0 + } + when builtin.un_map_data(right) is { + [] -> left_assets == right && assert_lovelace(left_lovelace, 0) + [first_asset, ..right_assets] -> + if first_asset.1st == builtin.b_data(ada_policy_id) { + and { + assert_lovelace( + left_lovelace, + builtin.un_i_data( + builtin.head_list(builtin.un_map_data(first_asset.2nd)).2nd, + ), + ), + left_assets == builtin.map_data(right_assets), + } + } else { + and { + assert_lovelace(left_lovelace, 0), + left_assets == right, + } + } + } + }, + False, + False, + False, + ) +} + +const fixture_match_value: Value = + zero + |> add(ada_policy_id, ada_asset_name, 42) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + +const fixture_match_data: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + +const fixture_match_data_missing_foo_02: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("bar", "01", 42) + +const fixture_match_data_altered_foo_01: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 14) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + +const fixture_match_data_missing_bar: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + +const fixture_match_data_extra_policy: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("bar", "01", 42) + |> add("baz", "01", 1) + +const fixture_match_data_extra_asset: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + |> add("foo", "01", 1) + |> add("foo", "02", 1) + |> add("foo", "03", 1) + |> add("bar", "01", 42) + +const fixture_match_data_no_assets: Data = + zero + |> add(ada_policy_id, ada_asset_name, 14) + +test match_1() { + match(fixture_match_value, fixture_match_data, fn(_, _) { True }) +} + +test match_2() { + !match( + fixture_match_value, + fixture_match_data, + fn(source, target) { source == target }, + ) +} + +test match_3() { + !match( + fixture_match_value, + fixture_match_data_missing_foo_02, + fn(_, _) { True }, + ) +} + +test match_4() { + !match(fixture_match_value, fixture_match_data_missing_bar, fn(_, _) { True }) +} + +test match_5() { + !match( + fixture_match_value, + fixture_match_data_altered_foo_01, + fn(_, _) { True }, + ) +} + +test match_6() { + !match( + fixture_match_value, + fixture_match_data_extra_policy, + fn(_, _) { True }, + ) +} + +test match_7() { + !match(fixture_match_value, fixture_match_data_extra_asset, fn(_, _) { True }) +} + +test match_8() { + !match(fixture_match_value, fixture_match_data_no_assets, fn(_, _) { True }) +} + +test match_9() { + match(zero, zero, ==) +} + +test match_10() { + match( + without_lovelace(fixture_match_value), + without_lovelace(fixture_match_value), + fn(left, right) { left == 0 && right == 0 }, + ) +} + +test match_11() { + match( + without_lovelace(fixture_match_value), + fixture_match_value, + fn(left, right) { left == 0 && right > 0 }, + ) +} + +test match_12() { + match( + fixture_match_value, + without_lovelace(fixture_match_value), + fn(left, right) { left > 0 && right == 0 }, + ) +} + +test match_13() { + match( + zero |> add(ada_policy_id, ada_asset_name, 42), + zero, + fn(left, right) { left == 42 && right == 0 }, + ) +} + +test match_14() { + match( + zero, + zero |> add(ada_policy_id, ada_asset_name, 42), + fn(left, right) { left == 0 && right == 42 }, + ) +} + +const fixture_match_benchmark_left: Value = + zero + |> add(ada_policy_id, ada_asset_name, 1337) + |> add( + #"0246a14d04c3a0e9b65f6b90a3d1aa5faee5d56ab1e30ec7e8b02f29", + "MATTR", + 200, + ) + |> add( + #"0a9e126256cb38c4865cdac6eb2ada51c328ba0df2ebde22ae126c0d", + "ProphecyPoster076", + 1, + ) + |> add( + #"1774343241680e4daef7cbfe3536fc857ce23fb66cd0b66320b2e3dd", + "BISON", + 12_004_999_999, + ) + |> add( + #"279c909f348e533da5808898f87f9a14bb2c3dfbbacccd631d927a3f", + "SNEK", + 1486, + ) + |> add( + #"651dfc074202423585996ffa717cb45237d307e705e2cc3dab1ccabd", + "MAYZSilverFoundersEdition0035", + 1, + ) + |> add( + #"63df49056617dd14034986cf7c250bad6552fd2f0f9c71d797932008", + "CardanoSpaceSession", + 20, + ) + |> add( + #"5b01968867e13432afaa2f814e1d15e332d6cd0aa77e350972b0967d", + "ADAOGovernanceToken", + 1, + ) + |> add( + #"a0028f350aaabe0545fdcb56b039bfb08e4bb4d8c4d7c3c7d481c235", + "HOSKY", + 400_001_000, + ) + |> add( + #"da8c30857834c6ae7203935b89278c532b3995245295456f993e1d24", + "LQ", + 10_635_899, + ) + |> add( + #"95d9a98c2f7999a3d5e0f4d795cb1333837c09eb0f24835cd2ce954c", + "GrandmasterAdventurer659", + 1, + ) + |> add( + #"702cbdb06a81ef2fa4f85f9e32159c03f502539d762a71194fc11eb3", + "AdventurerOfThiolden8105", + 1, + ) + |> add( + #"d0112837f8f856b2ca14f69b375bc394e73d146fdadcc993bb993779", + "DiscoSolaris3725", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld535", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld1213", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld1518", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld1537", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld4199", + 1, + ) + |> add( + #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", + "CardaWorld3767", + 1, + ) + |> add( + #"7597444754551a8c17edbf7291cdaeca898ca02ee4e732b09a949396", + "Algae1", + 1, + ) + |> add( + #"7597444754551a8c17edbf7291cdaeca898ca02ee4e732b09a949396", + "Algae2", + 1, + ) + +const fixture_match_benchmark_right: Data = fixture_match_benchmark_left + +test match_benchmark() { + match(fixture_match_benchmark_left, fixture_match_benchmark_right, ==) +} + +test match_benchmark_vs() { + let data: Data = fixture_match_benchmark_right + expect pairs: Pairs> = data + fixture_match_benchmark_left == from_asset_list(pairs) +} + +/// A specialized version of `quantity_of` for the Ada currency. +pub fn lovelace_of(self: Value) -> Int { + quantity_of(self, ada_policy_id, ada_asset_name) +} + +/// A list of all token policies in that Value with non-zero tokens. +pub fn policies(self: Value) -> List { + dict.keys(self.inner) +} + +/// Extract the quantity of a given asset. +pub fn quantity_of( + self: Value, + policy_id: PolicyId, + asset_name: AssetName, +) -> Int { + self.inner + |> dict.get(policy_id) + |> option.and_then(dict.get(_, asset_name)) + |> option.or_else(0) +} + +/// Get all tokens associated with a given policy. +pub fn tokens(self: Value, policy_id: PolicyId) -> Dict { + self.inner + |> dict.get(policy_id) + |> option.or_else(dict.empty) +} + +// ## Combining + +/// Add a (positive or negative) quantity of a single token to a assets. +/// This is more efficient than [`merge`](#merge) for a single asset. +pub fn add( + self: Value, + policy_id: PolicyId, + asset_name: AssetName, + quantity: Int, +) -> Value { + if quantity == 0 { + self + } else { + let helper = + fn(_, left, _right) { + let inner_result = + dict.insert_with( + left, + asset_name, + quantity, + fn(_k, ql, qr) { + let q = ql + qr + if q == 0 { + None + } else { + Some(q) + } + }, + ) + if dict.is_empty(inner_result) { + None + } else { + Some(inner_result) + } + } + + Value( + dict.insert_with( + self.inner, + policy_id, + dict.from_ascending_pairs([Pair(asset_name, quantity)]), + helper, + ), + ) + } +} + +test add_1() { + let v = + zero + |> add(#"acab", #"beef", 321) + |> add(#"acab", #"beef", -321) + v == zero +} + +test add_2() { + let v = + from_lovelace(123) + |> add(#"acab", #"beef", 321) + |> add(#"acab", #"beef", -1 * 321) + v == from_lovelace(123) +} + +test add_3() { + let v = + from_lovelace(1) + |> add(ada_policy_id, ada_asset_name, 2) + |> add(ada_policy_id, ada_asset_name, 3) + v == from_lovelace(6) +} + +test add_4() { + let v = + zero + |> add(#"acab", #"beef", 0) + v == zero +} + +test add_5() { + let v = + zero + |> add(#"acab", #"beef", 0) + |> add(#"acab", #"beef", 0) + v == zero +} + +/// Combine two `Value` together. +pub fn merge(left v0: Value, right v1: Value) -> Value { + Value( + dict.union_with( + v0.inner, + v1.inner, + fn(_, a0, a1) { + let result = + dict.union_with( + a0, + a1, + fn(_, q0, q1) { + let q = q0 + q1 + if q == 0 { + None + } else { + Some(q) + } + }, + ) + if dict.is_empty(result) { + None + } else { + Some(result) + } + }, + ), + ) +} + +test merge_1() { + let v1 = from_lovelace(1) + let v2 = from_lovelace(-1) + merge(v1, v2) == zero +} + +test merge_2() { + let v1 = from_asset(#"00", #"", 1) + let v2 = from_asset(#"01", #"", 2) + let v3 = from_asset(#"02", #"", 3) + let v = + from_lovelace(42) + |> merge(v3) + |> merge(v1) + |> merge(v2) + + flatten(v) == [ + (#"", #"", 42), (#"00", #"", 1), (#"01", #"", 2), (#"02", #"", 3), + ] +} + +test merge_3() { + let v1 = from_asset(#"00", #"", 1) + let v2 = from_asset(#"00", #"", -1) + let v3 = from_asset(#"01", #"", 1) + + let v = + zero + |> merge(v1) + |> merge(v2) + |> merge(v3) + + flatten(v) == [(#"01", #"", 1)] +} + +test merge_4() { + let v1 = from_asset(#"00", #"", 1) + let v2 = from_asset(#"00", #"", -1) + + merge(v1, v2) == zero +} + +test merge_5() { + let v = + zero + |> add(#"acab", #"beef", 0) + + merge(zero, v) == zero +} + +/// Negates quantities of all tokens (including Ada) in that `Value`. +/// +/// ``` +/// v1 +/// |> assets.negate +/// |> assets.merge(v1) +/// |> assets.is_zero +/// // True +/// ``` +pub fn negate(self: Value) -> Value { + dict.map(self.inner, fn(_, a) { dict.map(a, fn(_, q) { 0 - q }) }) + |> Value +} + +/// Get a subset of the assets restricted to the given policies. +pub fn restricted_to(self: Value, mask: List) -> Value { + list.foldr( + policies(self), + zero, + fn(policy_id, value) { + if list.has(mask, policy_id) { + dict.foldr( + tokens(self, policy_id), + value, + fn(asset_name, quantity, value) { + add(value, policy_id, asset_name, quantity) + }, + ) + } else { + value + } + }, + ) +} + +test restricted_to_1() { + let self = from_lovelace(42) |> add("foo", "", 1) + restricted_to(self, []) == zero +} + +test restricted_to_2() { + let self = from_lovelace(42) |> add("foo", "", 1) + restricted_to(self, [ada_policy_id]) == from_lovelace(42) +} + +test restricted_to_3() { + let self = from_lovelace(42) |> add("foo", "", 1) |> add("bar", "", 1) + restricted_to(self, ["foo", "bar"]) == without_lovelace(self) +} + +test restricted_to_4() { + let self = from_lovelace(42) |> add("foo", "bar", 1) |> add("foo", "baz", 1) + restricted_to(self, ["foo"]) == without_lovelace(self) +} + +test restricted_to_5() { + let self = from_lovelace(42) |> add("foo", "bar", 1) |> add("foo", "baz", 1) + restricted_to(self, [ada_policy_id, "foo"]) == self +} + +/// Get a `Value` excluding Ada. +pub fn without_lovelace(self: Value) -> Value { + dict.delete(self.inner, ada_policy_id) + |> Value +} + +test without_lovelace_1() { + let v = from_lovelace(1000000) + without_lovelace(v) == zero +} + +test without_lovelace_2() { + let v = from_lovelace(1000000) + let v2 = from_lovelace(50000000) + without_lovelace(v) == without_lovelace(v2) +} + +test without_lovelace_3() { + let v = + from_asset(#"010203", #"040506", 100) + |> add(ada_policy_id, ada_asset_name, 100000000) + let v2 = from_asset(#"010203", #"040506", 100) + without_lovelace(v) == without_lovelace(v2) && without_lovelace(v) == v2 +} + +// ## Transforming + +/// Flatten a `Value` as list of 3-tuple `(PolicyId, AssetName, Quantity)`. +/// +/// Handy to manipulate values as uniform lists. +pub fn flatten(self: Value) -> List<(PolicyId, AssetName, Int)> { + dict.foldr( + self.inner, + [], + fn(policy_id, asset_list, value) { + dict.foldr( + asset_list, + value, + fn(asset_name, quantity, xs) { + [(policy_id, asset_name, quantity), ..xs] + }, + ) + }, + ) +} + +/// Flatten a `Value` as a list of results, possibly discarding some along the way. +/// +/// When the transform function returns `None`, the result is discarded altogether. +pub fn flatten_with( + self: Value, + with: fn(PolicyId, AssetName, Int) -> Option, +) -> List { + dict.foldr( + self.inner, + [], + fn(policy_id, asset_list, value) { + dict.foldr( + asset_list, + value, + fn(asset_name, quantity, xs) { + when with(policy_id, asset_name, quantity) is { + None -> xs + Some(x) -> [x, ..xs] + } + }, + ) + }, + ) +} + +test flatten_with_1() { + flatten_with(zero, fn(p, a, q) { Some((p, a, q)) }) == [] +} + +test flatten_with_2() { + let v = + zero + |> add("a", "1", 14) + |> add("b", "", 42) + |> add("a", "2", 42) + + flatten_with( + v, + fn(p, a, q) { + if q == 42 { + Some((p, a)) + } else { + None + } + }, + ) == [("a", "2"), ("b", "")] +} + +/// Reduce a value into a single result +/// +/// ``` +/// assets.zero +/// |> assets.add("a", "1", 10) +/// |> assets.add("b", "2", 20) +/// |> assets.reduce(v, 0, fn(_, _, quantity, acc) { acc + quantity }) +/// // 30 +/// ``` +pub fn reduce( + self: Value, + start: result, + with: fn(PolicyId, AssetName, Int, result) -> result, +) -> result { + dict.foldr( + self.inner, + start, + fn(policy_id, asset_list, result) { + dict.foldr(asset_list, result, with(policy_id, _, _, _)) + }, + ) +} + +test reduce_1() { + let v = + zero + |> add("a", "1", 10) + |> add("b", "2", 20) + let result = reduce(v, 0, fn(_, _, quantity, acc) { acc + quantity }) + result == 30 +} + +test reduce_2() { + let v = + zero + |> add("a", "1", 5) + |> add("a", "2", 15) + |> add("b", "", 10) + let result = + reduce( + v, + [], + fn(policy_id, asset_name, _, acc) { [(policy_id, asset_name), ..acc] }, + ) + result == [("a", "1"), ("a", "2"), ("b", "")] +} + +test reduce_3() { + let v = zero + let result = reduce(v, 1, fn(_, _, quantity, acc) { acc + quantity }) + result == 1 +} + +/// Convert the value into a dictionary of dictionaries. +pub fn to_dict(self: Value) -> Dict> { + self.inner +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak new file mode 100644 index 00000000..f0b6d258 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak @@ -0,0 +1,93 @@ +use aiken/crypto.{Blake2b_224, Hash, VerificationKey, VerificationKeyHash} +use cardano/address.{Credential} +use cardano/assets.{Lovelace} + +pub type StakePoolId = + Hash + +/// An on-chain certificate attesting of some operation. Publishing +/// certificates triggers different kind of rules; most of the time, +/// they require signatures from specific keys. +pub type Certificate { + /// Register a stake credential with an optional deposit amount. + /// The deposit is always present when using the new registration certificate + /// format available since the Conway era. + RegisterCredential { + credential: Credential, + /// > [!NOTE] + /// > The `deposit` ought to be an `Option`, but due to unfortunate + /// > circumstances it will always be instantiated to `None` even when set in + /// > the host transaction. This is what the `Never` type captures here. + deposit: Never, + } + /// Un-Register a stake credential with an optional refund amount + /// The deposit is always present when using the new de-registration certificate + /// format available since the Conway era. + UnregisterCredential { + credential: Credential, + /// > [!NOTE] + /// > The `refund` ought to be an `Option`, but due to unfortunate + /// > circumstances it will always be instantiated to `None` even when set in + /// > the host transaction. This is what the `Never` type captures here. + refund: Never, + } + /// Delegate stake to a [Delegate](#Delegate). + DelegateCredential { credential: Credential, delegate: Delegate } + /// Register and delegate staking credential to a Delegatee in one certificate. + RegisterAndDelegateCredential { + credential: Credential, + delegate: Delegate, + deposit: Lovelace, + } + /// Register a delegate representative (a.k.a DRep). The deposit is explicit and + /// is refunded when the delegate steps down (unregister). + RegisterDelegateRepresentative { + delegate_representative: Credential, + deposit: Lovelace, + } + /// Update a delegate representative (a.k.a DRep). The certificate also contains + /// metadata which aren't visible on-chain. + UpdateDelegateRepresentative { delegate_representative: Credential } + /// UnRegister a delegate representative, and refund back its past deposit. + UnregisterDelegateRepresentative { + delegate_representative: Credential, + refund: Lovelace, + } + /// Register a new stake pool + RegisterStakePool { + /// The hash digest of the stake pool's cold (public) key + stake_pool: StakePoolId, + /// The hash digest of the stake pool's VRF (public) key + vrf: VerificationKeyHash, + } + /// Retire a stake pool. 'at_epoch' indicates in which the retirement will take place + RetireStakePool { stake_pool: StakePoolId, at_epoch: Int } + /// Authorize a Hot credential for a specific Committee member's cold credential + AuthorizeConstitutionalCommitteeProxy { + constitutional_committee_member: Credential, + proxy: Credential, + } + /// Step down from the constitutional committee as a member. + RetireFromConstitutionalCommittee { + constitutional_committee_member: Credential, + } +} + +/// A type of stake delegation that can be either block-production, vote or +/// both. Note that delegation types aren't cancelling one another, so it is +/// possible to delegate block production in one transaction, and delegate vote +/// in another. This second delegation **does NOT** invalidate the first one. +pub type Delegate { + DelegateBlockProduction { stake_pool: StakePoolId } + DelegateVote { delegate_representative: DelegateRepresentative } + DelegateBoth { + stake_pool: StakePoolId, + delegate_representative: DelegateRepresentative, + } +} + +pub type DelegateRepresentative { + Registered(Credential) + AlwaysAbstain + AlwaysNoConfidence +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak new file mode 100644 index 00000000..3ec96800 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak @@ -0,0 +1,109 @@ +use aiken/collection.{Index} +use aiken/crypto.{Blake2b_256, Hash, ScriptHash, VerificationKeyHash} +use aiken/math/rational.{Rational} +use cardano/address.{Credential} +use cardano/assets.{Lovelace} +use cardano/governance/protocol_parameters.{ProtocolParametersUpdate} + +pub type ProposalProcedure { + deposit: Lovelace, + return_address: Credential, + governance_action: GovernanceAction, +} + +pub type GovernanceAction { + ProtocolParameters { + /// The last governance action of type 'ProtocolParameters'. They must all + /// form a chain. + ancestor: Option, + /// The new proposed protocol parameters. Only values set to `Some` are relevant. + new_parameters: ProtocolParametersUpdate, + /// The optional guardrails script defined in the constitution. The script + /// is executed by the ledger in addition to the hard-coded ledger rules. + /// + /// It must pass for the new protocol parameters to be deemed valid. + guardrails: Option, + } + HardFork { + /// The last governance action of type `HardFork`. They must all + /// form a chain. + ancestor: Option, + /// The new proposed version. Few rules apply to proposing new versions: + /// + /// - The `major` component, if incremented, must be exactly one more than the current. + /// - The `minor` component, if incremented, must be exactly one more than the current. + /// - If the `major` component is incremented, `minor` must be set to `0`. + /// - Neither `minor` nor `major` can be decremented. + new_version: ProtocolVersion, + } + TreasuryWithdrawal { + /// A collection of beneficiaries, which can be plain verification key + /// hashes or script hashes (e.g. DAO). + beneficiaries: Pairs, + /// The optional guardrails script defined in the constitution. The script + /// is executed by the ledger in addition to the hard-coded ledger rules. + /// + /// It must pass for the withdrawals to be authorized. + guardrails: Option, + } + NoConfidence { + /// The last governance action of type `NoConfidence` or + /// `ConstitutionalCommittee`. They must all / form a chain. + ancestor: Option, + } + ConstitutionalCommittee { + /// The last governance action of type `NoConfidence` or + /// `ConstitutionalCommittee`. They must all / form a chain. + ancestor: Option, + /// Constitutional members to be removed. + evicted_members: List, + /// Constitutional members to be added. + added_members: Pairs, + /// The new quorum value, as a ratio of a numerator and a denominator. The + /// quorum specifies the threshold of 'Yes' votes necessary for the + /// constitutional committee to accept a proposal procedure. + quorum: Rational, + } + NewConstitution { + /// The last governance action of type `Constitution` or + /// `ConstitutionalCommittee`. They must all / form a chain. + ancestor: Option, + /// The new proposed constitution. + constitution: Constitution, + } + NicePoll +} + +pub type Vote { + No + Yes + Abstain +} + +pub type TransactionId = + Hash + +pub type GovernanceActionId { + transaction: TransactionId, + proposal_procedure: Index, +} + +pub type ProtocolVersion { + major: Int, + minor: Int, +} + +pub type Constitution { + guardrails: Option, +} + +/// An epoch number after which constitutional committee member +/// mandate expires. +pub type Mandate = + Int + +pub type Voter { + ConstitutionalCommitteeMember(Credential) + DelegateRepresentative(Credential) + StakePool(VerificationKeyHash) +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak new file mode 100644 index 00000000..d9e7be95 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak @@ -0,0 +1,360 @@ +use aiken/math/rational.{Rational} +use cardano/assets.{Lovelace} + +pub opaque type ProtocolParametersUpdate { + inner: Pairs, +} + +pub type ScriptExecutionPrices { + memory: Rational, + cpu: Rational, +} + +pub type ExecutionUnits { + memory: Int, + cpu: Int, +} + +pub type StakePoolOperatorVotingThresholds { + motion_of_no_confidence: Rational, + constitutional_committee: ConstitutionalCommitteeThresholds, + hard_fork: Rational, + protocol_parameters: ProtocolParametersThresholds< + Rational, + Void, + Void, + Void, + Void, + >, +} + +pub type DelegateRepresentativeVotingThresholds { + motion_of_no_confidence: Rational, + constitutional_committee: ConstitutionalCommitteeThresholds, + constitution: Rational, + hard_fork: Rational, + protocol_parameters: ProtocolParametersThresholds< + Void, + Rational, + Rational, + Rational, + Rational, + >, + treasury_withdrawal: Rational, +} + +pub type ProtocolParametersThresholds< + security, + network, + economic, + technical, + governance, +> { + security_group: security, + network_group: network, + economic_group: economic, + technical_group: technical, + governance_group: governance, +} + +pub type ConstitutionalCommitteeThresholds { + default: Rational, + under_no_confidence: Rational, +} + +/// The linear coefficient that intervenes in the transaction fee calculation. +/// It is multiplied by the size of the transaction in bytes to obtain a Lovelace value. +pub fn min_fee_coefficient(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 0, into_int) +} + +/// The constant factor that intervenes in the transaction fee calculation. It is +/// a flat cost of lovelace that is added to every fee calculation. +pub fn min_fee_constant(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 1, into_int) +} + +/// The maximum size of a serialized block body, expressed in bytes. +pub fn max_block_body_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 2, into_int) +} + +/// The maximum size of a serialized transaction (body + witnesses), expressed in bytes. +pub fn max_transaction_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 3, into_int) +} + +/// The maximum size of a serialized block header, expressed in bytes. +pub fn max_block_header_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 4, into_int) +} + +/// The required deposit amount when registering stake credentials, expressed in Lovelace. +pub fn stake_credential_deposit( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 5, into_int) +} + +/// The required deposit amount when registering a stake pool, expressed in Lovelace. +pub fn stake_pool_deposit(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 6, into_int) +} + +/// The maximum number of epoch in the future allowed for a stake pool retirement to be scheduled. +pub fn stake_pool_retirement_horizon( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 7, into_int) +} + +/// The desired/optimal number of fully saturated stake pools in the system. Also known as the _'k-parameter'_. +pub fn desired_number_of_stake_pools( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 8, into_int) +} + +/// A parameter controlling the influence of an pool owner's pledge on the rewards. Also known as _'a0'_. +pub fn stake_pool_pledge_influence( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 9, into_rational) +} + +/// The monetary expansion parameter, controlling the fraction of Ada put in circulation on every epoch through the incentivies model. Also known as _'ρ'_. +pub fn monetary_expansion(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 10, into_rational) +} + +/// The parameter controlling what fraction (%) of available rewards is sent to the treasury on every epoch. Also known as _'τ'_. +pub fn treasury_expansion(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 11, into_rational) +} + +/// Minimum authorized constant cost that stake pools can declare when registering, expressed in Lovelace. +pub fn min_stake_pool_cost(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 16, into_int) +} + +/// The linear coefficient that intervenes in the calculation of the minimum Ada value that any UTxO must hold. It is expressed in Lovelace per Byte, and is also known as the 'coins per utxo byte' parameter. +pub fn min_utxo_deposit_coefficient( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 17, into_int) +} + +/// The costs associated with the various operations of the Plutus Virtual Machine, which can be different for each Plutus version. +pub fn cost_models(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 18, identity) +} + +/// The price, in Lovelace per unit, of the execution units corresponding to cpu and memory usage of on-chain scripts. +pub fn script_execution_prices( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 19, into_script_execution_prices) +} + +/// The maximum execution units allowed for a single transaction. +pub fn max_transaction_execution_units( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 20, into_execution_units) +} + +/// The maximum execution units allowed for a single block. +pub fn max_block_execution_units( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 21, into_execution_units) +} + +/// The maximum size of a serialized value in a transaction output. This effectively limits +/// the maximum kinds of assets that can be sent in a single output. It is expressed in bytes. +pub fn max_value_size(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 22, into_int) +} + +/// The scaling factor applied to the transaction cost for defining the minimum collateral +/// amount. It is expressed in percent points (so 100 = 100%). +pub fn collateral_percentage(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 23, into_int) +} + +/// The maximum number of collateral inputs allowed in the transaction. +pub fn max_collateral_inputs(self: ProtocolParametersUpdate) -> Option { + get_protocol_param(self.inner, 24, into_int) +} + +/// The various governance voting thresholds pertaining to stake pool operators. +pub fn stake_pool_operator_voting_thresholds( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 25, into_spo_voting_thresholds) +} + +/// The various governance voting thresholds pertaining to delegate representatives +/// (a.k.a DReps). +pub fn delegate_representative_voting_thresholds( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 26, into_drep_voting_thresholds) +} + +/// The minimum number of members in the constitutional committee. Any updates of the committee +/// must leave at least this number of members. +pub fn min_constitutional_committee_size( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 27, into_int) +} + +/// The maximum length of a constitutional committee member, expressed in number of epochs. +pub fn max_constitutional_committee_mandate( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 28, into_int) +} + +/// The lifetime of any governance proposal. An action that hasn't been approved beyond that +/// period is considered inactive and discarded. It is expressed in number of epochs. +pub fn governance_proposal_lifetime( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 29, into_int) +} + +/// The required deposit amount for governance proposal procedures, expressed in Lovelace. +pub fn governance_proposal_deposit( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 30, into_int) +} + +/// The required deposit amount when registering as a delegate representative, expressed in +/// Lovelace. +pub fn delegate_representative_deposit( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 31, into_int) +} + +/// The maximum number of epochs that a delegate representative can stay inactive (i.e. no +/// voting) without becoming _inactive_ and removed from thresholds calculations. +pub fn delegate_representative_max_idle_time( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 32, into_int) +} + +/// The base tier fee coefficient for reference scripts. Reference scripts gets increasingly +/// more expensives every ~24KB, the base coefficient is a multiplicating factor which grows +/// exponentially with each tier. +pub fn reference_scripts_tier_fee_initial_factor( + self: ProtocolParametersUpdate, +) -> Option { + get_protocol_param(self.inner, 33, into_rational) +} + +// Internals ------------------------------------------------------------------- + +type ProtocolParametersIndex = + Int + +fn get_protocol_param( + self: Pairs, + ix: ProtocolParametersIndex, + into: fn(Data) -> a, +) -> Option { + when self is { + [] -> None + [Pair(jx, param), ..tail] -> + if ix == jx { + Some(into(param)) + } else { + get_protocol_param(tail, ix, into) + } + } +} + +fn into_int(param: Data) -> Int { + expect param: Int = param + param +} + +fn into_rational(param: Data) -> Rational { + expect [numerator, denominator]: List = param + expect Some(r) = rational.new(numerator, denominator) + r +} + +fn into_execution_units(param: Data) -> ExecutionUnits { + expect [memory, cpu]: List = param + ExecutionUnits { memory, cpu } +} + +fn into_script_execution_prices(param: Data) -> ScriptExecutionPrices { + expect [memory, cpu]: List = param + let memory = into_rational(memory) + let cpu = into_rational(cpu) + ScriptExecutionPrices { memory, cpu } +} + +fn into_spo_voting_thresholds(param: Data) -> StakePoolOperatorVotingThresholds { + expect [ + motion_of_no_confidence, constitutional_committee, + constitutional_committee_under_no_confidence, hard_fork, + protocol_parameters_security_group, + ]: List = param + + StakePoolOperatorVotingThresholds { + motion_of_no_confidence: into_rational(motion_of_no_confidence), + constitutional_committee: ConstitutionalCommitteeThresholds { + default: into_rational(constitutional_committee), + under_no_confidence: into_rational( + constitutional_committee_under_no_confidence, + ), + }, + hard_fork: into_rational(hard_fork), + protocol_parameters: ProtocolParametersThresholds { + security_group: into_rational(protocol_parameters_security_group), + network_group: Void, + economic_group: Void, + technical_group: Void, + governance_group: Void, + }, + } +} + +fn into_drep_voting_thresholds( + param: Data, +) -> DelegateRepresentativeVotingThresholds { + expect [ + motion_of_no_confidence, constitutional_committee, + constitutional_committee_under_no_confidence, constitution, hard_fork, + protocol_parameters_network_group, protocol_parameters_economic_group, + protocol_parameters_technical_group, protocol_parameters_governance_group, + treasury_withdrawal, + ]: List = param + + DelegateRepresentativeVotingThresholds { + motion_of_no_confidence: into_rational(motion_of_no_confidence), + constitutional_committee: ConstitutionalCommitteeThresholds { + default: into_rational(constitutional_committee), + under_no_confidence: into_rational( + constitutional_committee_under_no_confidence, + ), + }, + constitution: into_rational(constitution), + hard_fork: into_rational(hard_fork), + protocol_parameters: ProtocolParametersThresholds { + security_group: Void, + network_group: into_rational(protocol_parameters_network_group), + economic_group: into_rational(protocol_parameters_economic_group), + technical_group: into_rational(protocol_parameters_technical_group), + governance_group: into_rational(protocol_parameters_governance_group), + }, + treasury_withdrawal: into_rational(treasury_withdrawal), + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak new file mode 100644 index 00000000..e723e2d5 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak @@ -0,0 +1,62 @@ +use aiken/primitive/bytearray +use cardano/address.{Script} +use cardano/address/credential +use cardano/governance.{ + ConstitutionalCommitteeMember, DelegateRepresentative, StakePool, Voter, +} + +pub fn compare(left: Voter, right: Voter) -> Ordering { + when left is { + ConstitutionalCommitteeMember(left) -> + when right is { + ConstitutionalCommitteeMember(right) -> credential.compare(left, right) + _ -> Less + } + DelegateRepresentative(left) -> + when right is { + DelegateRepresentative(right) -> credential.compare(left, right) + ConstitutionalCommitteeMember(_) -> Greater + _ -> Less + } + StakePool(left) -> + when right is { + StakePool(right) -> bytearray.compare(left, right) + _ -> Greater + } + } +} + +test compare_matrix() { + let cc0 = ConstitutionalCommitteeMember(Script("0")) + let cc1 = ConstitutionalCommitteeMember(Script("1")) + + let drep0 = DelegateRepresentative(Script("0")) + let drep1 = DelegateRepresentative(Script("1")) + + let spo0 = StakePool("0") + let spo1 = StakePool("1") + + and { + (compare(cc0, cc0) == Equal)?, + (compare(cc0, cc1) == Less)?, + (compare(cc1, cc0) == Greater)?, + (compare(drep0, drep0) == Equal)?, + (compare(drep0, drep1) == Less)?, + (compare(drep1, drep0) == Greater)?, + (compare(spo0, spo0) == Equal)?, + (compare(spo0, spo1) == Less)?, + (compare(spo1, spo0) == Greater)?, + (compare(cc0, drep0) == Less)?, + (compare(cc0, drep1) == Less)?, + (compare(cc0, spo0) == Less)?, + (compare(cc0, spo1) == Less)?, + (compare(drep0, cc0) == Greater)?, + (compare(drep0, cc1) == Greater)?, + (compare(drep0, spo0) == Less)?, + (compare(drep0, spo1) == Less)?, + (compare(spo0, cc0) == Greater)?, + (compare(spo0, cc1) == Greater)?, + (compare(spo0, drep0) == Greater)?, + (compare(spo0, drep1) == Greater)?, + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak new file mode 100644 index 00000000..ff73836a --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak @@ -0,0 +1,62 @@ +//// This module contains utilities for manually dealing with [`ScriptContext`](#ScriptContext). This is only ever useful for writing custom `else` handlers in validators. +//// +//// > [!NOTE] +//// > Unless you know what you're doing, you should prefer using named handlers: +//// > +//// > - `mint` +//// > - `spend` +//// > - `withdraw` +//// > - `publish` +//// > - `vote` +//// > - `propose` + +use aiken/collection.{Index} +use cardano/address.{Credential} +use cardano/assets.{PolicyId} +use cardano/certificate.{Certificate} +use cardano/governance.{ProposalProcedure, Voter} +use cardano/transaction.{OutputReference, Redeemer, Transaction} + +/// A context given to a script by the Cardano ledger when being executed. +/// +/// The context contains information about the entire transaction that contains +/// the script. The transaction may also contain other scripts; to distinguish +/// between multiple scripts, the [`ScriptContext`](#ScriptContext) contains a +/// [`ScriptInfo`](#ScriptInfo) which indicates which script (or, for what +/// purpose) the transaction is being executed. +pub type ScriptContext { + transaction: Transaction, + redeemer: Redeemer, + info: ScriptInfo, +} + +/// Characterizes the script information. The main (and only) difference with [`ScriptPurpose`](./transaction.html#ScriptPurpose) resides in the `Spending` variant which here contains a second field `datum: Option`. +pub type ScriptInfo { + /// For scripts executed as minting/burning policies, to insert + /// or remove assets from circulation. It's parameterized by the identifier + /// of the associated policy. + Minting(PolicyId) + /// For scripts that are used as payment credentials for addresses in + /// transaction outputs. They govern the rule by which the output they + /// reference can be spent. + Spending { output: OutputReference, datum: Option } + /// For scripts that validate reward withdrawals from a reward account. + /// + /// The argument identifies the target reward account. + Withdrawing(Credential) + /// Needed when delegating to a pool using stake credentials defined as a + /// custom script. This purpose is also triggered when de-registering such + /// stake credentials. + /// + /// The Int is a 0-based index of the given `Certificate` in `certificates`. + Publishing { at: Index, certificate: Certificate } + /// Voting for a type of voter using a governance action id to vote + /// yes / no / abstain inside a transaction. + /// + /// The voter is who is doing the governance action. + Voting(Voter) + /// Used to propose a governance action. + /// + /// A 0-based index of the given `ProposalProcedure` in `proposal_procedures`. + Proposing { at: Index, proposal_procedure: ProposalProcedure } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak new file mode 100644 index 00000000..6511a596 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak @@ -0,0 +1,225 @@ +use aiken/builtin +use aiken/collection.{Index} +use aiken/collection/dict.{Dict} +use aiken/collection/list +use aiken/crypto.{ + Blake2b_256, DataHash, Hash, ScriptHash, VerificationKeyHash, blake2b_256, +} +use aiken/interval.{Interval} +use aiken/option +use cardano/address.{Address, Credential, Script, VerificationKey} +use cardano/assets.{Lovelace, PolicyId, Value} +use cardano/certificate.{Certificate} +use cardano/governance.{GovernanceActionId, ProposalProcedure, Vote, Voter} + +pub type TransactionId = + Hash + +/// Characterizes the script purpose. +pub type ScriptPurpose { + /// For scripts executed as minting/burning policies, to insert + /// or remove assets from circulation. It's parameterized by the identifier + /// of the associated policy. + Mint(PolicyId) + /// For scripts that are used as payment credentials for addresses in + /// transaction outputs. They govern the rule by which the output they + /// reference can be spent. + Spend(OutputReference) + /// For scripts that validate reward withdrawals from a reward account. + /// + /// The argument identifies the target reward account. + Withdraw(Credential) + /// Needed when delegating to a pool using stake credentials defined as a + /// custom script. This purpose is also triggered when de-registering such + /// stake credentials. + /// + /// The Int is a 0-based index of the given `Certificate` in `certificates`. + Publish { at: Index, certificate: Certificate } + /// Voting for a type of voter using a governance action id to vote + /// yes / no / abstain inside a transaction. + /// + /// The voter is who is doing the governance action. + Vote(Voter) + /// Used to propose a governance action. + /// + /// A 0-based index of the given `ProposalProcedure` in `proposal_procedures`. + Propose { at: Index, proposal_procedure: ProposalProcedure } +} + +/// A Cardano `Transaction`, as seen by on-chain scripts. +/// +/// Note that this is a representation of a transaction, and not the 1:1 +/// translation of the transaction as seen by the ledger. In particular, +/// on-chain scripts can't see inputs locked by bootstrap addresses, outputs +/// to bootstrap addresses or just transaction metadata. +pub type Transaction { + inputs: List, + reference_inputs: List, + outputs: List, + fee: Lovelace, + mint: Value, + certificates: List, + /// > [!IMPORTANT] + /// > Withdrawals are ordered by ascending [Credential](./credential.html#Credential). Yet, note that [`Script`](./credential.html#Credential) credentials are treated as **lower values** than [`VerificationKey`](./credential.html#Credential) credentials. + withdrawals: Pairs, + validity_range: ValidityRange, + extra_signatories: List, + /// > [!IMPORTANT] + /// > Redeemers are ordered by ascending [ScriptPurpose](./transaction.html#ScriptPurpose). + redeemers: Pairs, + datums: Dict, + id: TransactionId, + /// > [!IMPORTANT] + /// > Votes are ordered by ascending [Voter](./governance.html#Voter) and [GovernanceActionId](./governance.html#GovernanceActionId).
First constructor variants in a type are treated as lower indices; except for [Credential](./credential.html#Credential) where [`Script`](./credential.html#Credential) credentials are treated as **lower values** than [`VerificationKey`](./credential.html#Credential) credentials. + votes: Pairs>, + proposal_procedures: List, + current_treasury_amount: Option, + treasury_donation: Option, +} + +/// An interval of POSIX time, measured in **number of milliseconds** since 1970-01-01T00:00:00Z. +pub type ValidityRange = + Interval + +/// An `Input` made of an output reference and, the resolved value associated with that output. +pub type Input { + output_reference: OutputReference, + output: Output, +} + +/// An `OutputReference` is a unique reference to an output on-chain. The `output_index` +/// corresponds to the position in the output list of the transaction (identified by its id) +/// that produced that output +pub type OutputReference { + transaction_id: Hash, + output_index: Int, +} + +/// A transaction `Output`, with an address, a value and optional datums and script references. +pub type Output { + address: Address, + value: Value, + datum: Datum, + reference_script: Option, +} + +/// An output `Datum`. +pub type Datum { + NoDatum + /// A datum referenced by its hash digest. + DatumHash(DataHash) + /// A datum completely inlined in the output. + InlineDatum(Data) +} + +/// A type-alias for Redeemers, passed to scripts for validation. The `Data` is +/// opaque because it is user-defined and it is the script's responsibility to +/// parse it into its expected form. +pub type Redeemer = + Data + +// ## Querying + +/// Find an input by its [`OutputReference`](#OutputReference). This is typically used in +/// combination with the `Spend` [`ScriptPurpose`](#ScriptPurpose) to find a script's own +/// input. +/// +/// ```aiken +/// validator { +/// spend(datum, redeemer, my_output_reference, self) { +/// expect Some(input) = +/// self.inputs +/// |> transaction.find_input(my_output_reference) +/// } +/// } +/// ``` +pub fn find_input( + inputs: List, + output_reference: OutputReference, +) -> Option { + inputs + |> list.find(fn(input) { input.output_reference == output_reference }) +} + +/// Find a [`Datum`](#Datum) by its hash, if present. The function looks first for +/// datums in the witness set, and then for inline datums if it doesn't find any in +/// witnesses. +pub fn find_datum( + outputs: List, + datums: Dict, + datum_hash: DataHash, +) -> Option { + datums + |> dict.get(datum_hash) + |> option.or_try( + fn() { + outputs + |> list.filter_map( + fn(output) { + when output.datum is { + InlineDatum(data) -> + if blake2b_256(builtin.serialise_data(data)) == datum_hash { + Some(data) + } else { + None + } + _ -> None + } + }, + ) + |> list.head + }, + ) +} + +/// Find all outputs that are paying into the given script hash, if any. This is useful for +/// contracts running over multiple transactions. +pub fn find_script_outputs( + outputs: List, + script_hash: ScriptHash, +) -> List { + outputs + |> list.filter( + fn(output) { + when output.address.payment_credential is { + Script(addr_script_hash) -> script_hash == addr_script_hash + VerificationKey(_) -> False + } + }, + ) +} + +// ## Testing + +/// A placeholder / empty `Transaction` to serve as a base in a transaction +/// builder. This is particularly useful for constructing test transactions. +/// +/// Every field is empty or null, and we have in particular: +/// +/// ```aiken +/// use aiken/interval +/// +/// transaction.placeholder.id == +/// #"0000000000000000000000000000000000000000000000000000000000000000" +/// +/// transaction.placeholder.validity_range == interval.everything +/// ``` +pub const placeholder: Transaction = + Transaction { + inputs: [], + reference_inputs: [], + outputs: [], + fee: 0, + mint: assets.zero, + certificates: [], + withdrawals: [], + validity_range: interval.everything, + extra_signatories: [], + redeemers: [], + datums: dict.empty, + id: #"0000000000000000000000000000000000000000000000000000000000000000", + votes: [], + proposal_procedures: [], + current_treasury_amount: None, + treasury_donation: None, + } diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak new file mode 100644 index 00000000..70b7550d --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak @@ -0,0 +1,23 @@ +use aiken/primitive/bytearray +use aiken/primitive/int +use cardano/transaction.{OutputReference} + +pub fn compare(left: OutputReference, right: OutputReference) -> Ordering { + when bytearray.compare(left.transaction_id, right.transaction_id) is { + Equal -> int.compare(left.output_index, right.output_index) + ordering -> ordering + } +} + +test compare_matrix() { + and { + (compare(OutputReference("", 0), OutputReference("", 0)) == Equal)?, + (compare(OutputReference("00", 42), OutputReference("00", 42)) == Equal)?, + (compare(OutputReference("00", 0), OutputReference("01", 0)) == Less)?, + (compare(OutputReference("01", 0), OutputReference("00", 0)) == Greater)?, + (compare(OutputReference("00", 42), OutputReference("01", 14)) == Less)?, + (compare(OutputReference("01", 14), OutputReference("00", 42)) == Greater)?, + (compare(OutputReference("", 42), OutputReference("", 14)) == Greater)?, + (compare(OutputReference("", 14), OutputReference("", 42)) == Less)?, + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak new file mode 100644 index 00000000..4fef2cbe --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak @@ -0,0 +1,126 @@ +use aiken/primitive/bytearray +use aiken/primitive/int +use cardano/address.{Script, VerificationKey} +use cardano/address/credential +use cardano/certificate.{RegisterCredential} +use cardano/governance.{NicePoll, ProposalProcedure, StakePool} +use cardano/governance/voter +use cardano/transaction.{ + Mint, OutputReference, Propose, Publish, ScriptPurpose, Spend, Vote, Withdraw, +} +use cardano/transaction/output_reference + +pub fn compare(left: ScriptPurpose, right: ScriptPurpose) -> Ordering { + when left is { + Mint(left) -> + when right is { + Mint(right) -> bytearray.compare(left, right) + _ -> Less + } + + Spend(left) -> + when right is { + Spend(right) -> output_reference.compare(left, right) + Mint(_) -> Greater + _ -> Less + } + + Withdraw(left) -> + when right is { + Withdraw(right) -> credential.compare(left, right) + Spend(_) | Mint(_) -> Greater + _ -> Less + } + + Publish(left, _) -> + when right is { + Publish(right, _) -> int.compare(left, right) + Spend(_) | Mint(_) | Withdraw(_) -> Greater + _ -> Less + } + + Vote(left) -> + when right is { + Vote(right) -> voter.compare(left, right) + Propose(..) -> Less + _ -> Greater + } + + Propose(left, _) -> + when right is { + Propose(right, _) -> int.compare(left, right) + _ -> Greater + } + } +} + +test compare_matrix() { + let mint0 = Mint("0") + let mint1 = Mint("1") + + let spend0 = Spend(OutputReference("", 0)) + let spend1 = Spend(OutputReference("", 1)) + + let withdraw0 = Withdraw(VerificationKey("0")) + let withdraw1 = Withdraw(VerificationKey("1")) + + let publish0 = Publish(0, RegisterCredential(Script(""), Never)) + let publish1 = Publish(1, RegisterCredential(Script(""), Never)) + + let vote0 = Vote(StakePool("0")) + let vote1 = Vote(StakePool("1")) + + let propose0 = Propose(0, ProposalProcedure(0, Script(""), NicePoll)) + let propose1 = Propose(1, ProposalProcedure(0, Script(""), NicePoll)) + + and { + (compare(mint0, mint0) == Equal)?, + (compare(mint0, mint1) == Less)?, + (compare(mint1, mint0) == Greater)?, + (compare(mint0, spend0) == Less)?, + (compare(mint0, withdraw0) == Less)?, + (compare(mint0, publish0) == Less)?, + (compare(mint0, vote0) == Less)?, + (compare(mint0, propose0) == Less)?, + (compare(spend0, spend0) == Equal)?, + (compare(spend0, spend1) == Less)?, + (compare(spend1, spend0) == Greater)?, + (compare(spend0, mint0) == Greater)?, + (compare(spend0, withdraw0) == Less)?, + (compare(spend0, publish0) == Less)?, + (compare(spend0, vote0) == Less)?, + (compare(spend0, propose0) == Less)?, + (compare(withdraw0, withdraw0) == Equal)?, + (compare(withdraw0, withdraw1) == Less)?, + (compare(withdraw1, withdraw0) == Greater)?, + (compare(withdraw0, mint0) == Greater)?, + (compare(withdraw0, spend0) == Greater)?, + (compare(withdraw0, publish0) == Less)?, + (compare(withdraw0, vote0) == Less)?, + (compare(withdraw0, propose0) == Less)?, + (compare(publish0, publish0) == Equal)?, + (compare(publish0, publish1) == Less)?, + (compare(publish1, publish0) == Greater)?, + (compare(publish0, mint0) == Greater)?, + (compare(publish0, spend0) == Greater)?, + (compare(publish0, withdraw0) == Greater)?, + (compare(publish0, vote0) == Less)?, + (compare(publish0, propose0) == Less)?, + (compare(vote0, vote0) == Equal)?, + (compare(vote0, vote1) == Less)?, + (compare(vote1, vote0) == Greater)?, + (compare(vote0, mint0) == Greater)?, + (compare(vote0, spend0) == Greater)?, + (compare(vote0, withdraw0) == Greater)?, + (compare(vote0, publish0) == Greater)?, + (compare(vote0, propose0) == Less)?, + (compare(propose0, propose0) == Equal)?, + (compare(propose0, propose1) == Less)?, + (compare(propose1, propose0) == Greater)?, + (compare(propose0, mint0) == Greater)?, + (compare(propose0, spend0) == Greater)?, + (compare(propose0, withdraw0) == Greater)?, + (compare(propose0, publish0) == Greater)?, + (compare(propose0, vote0) == Greater)?, + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/packages.toml b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/packages.toml new file mode 100644 index 00000000..f0c64bc6 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/build/packages/packages.toml @@ -0,0 +1,4 @@ +[[packages]] +name = "aiken-lang/stdlib" +version = "v2.2.0" +source = "github" diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cip.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cip.ak new file mode 100644 index 00000000..a4ca9d6f --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cip.ak @@ -0,0 +1,73 @@ +//// Vodka provides utility functions to build CIP onchain code +//// +//// ### Example usage +//// ```aiken +//// use cip +//// use cardano/assets.{AssetName} +//// +//// let cip68_100_asset_name = cip.cip68_100(asset_name) +//// let cip68_222_asset_name = cip.cip68_222(asset_name) +//// let cip68_333_asset_name = cip.cip68_333(asset_name) +//// let cip68_444_asset_name = cip.cip68_444(asset_name) + +use aiken/primitive/bytearray.{concat} +use cardano/assets.{AssetName} + +/// The byte prefix for CIP-68 asset - Reference Token +pub const cip68_100_prefix: ByteArray = #"000643b0" + +/// The byte prefix for CIP-68 asset - Non-Fungible Token +pub const cip68_222_prefix: ByteArray = #"000de140" + +/// The byte prefix for CIP-68 asset - Fungible Token +pub const cip68_333_prefix: ByteArray = #"0014df10" + +/// The byte prefix for CIP-68 asset - Rich-Fungible Token +pub const cip68_444_prefix: ByteArray = #"001bc280" + +pub fn drop_cip68_prefix(cip_68_asset_name: AssetName) -> AssetName { + cip_68_asset_name |> bytearray.drop(4) +} + +test test_drop_cip68_prefix() { + let cip68_100_asset_name = #"000643b012345678" + drop_cip68_prefix(cip68_100_asset_name) == #"12345678" +} + +/// Obtain the asset name for CIP-68 asset - Reference Token +/// ```aiken +/// let cip68_100_asset_name = cip68_100(asset_name) +/// ``` +pub fn cip68_100(asset_name: AssetName) -> AssetName { + concat(cip68_100_prefix, asset_name) +} + +/// Obtain the asset name for CIP-68 asset - Non-Fungible Token +/// ```aiken +/// let cip68_222_asset_name = cip68_222(asset_name) +/// ``` +pub fn cip68_222(asset_name: AssetName) -> AssetName { + concat(cip68_222_prefix, asset_name) +} + +/// Obtain the asset name for CIP-68 asset - Fungible Token +/// ```aiken +/// let cip68_333_asset_name = cip68_333(asset_name) +/// ``` +pub fn cip68_333(asset_name: AssetName) -> AssetName { + concat(cip68_333_prefix, asset_name) +} + +/// Obtain the asset name for CIP-68 asset - Rich-Fungible Token +/// ```aiken +/// let cip68_444_asset_name = cip68_444(asset_name) +/// ``` +pub fn cip68_444(asset_name: AssetName) -> AssetName { + concat(cip68_444_prefix, asset_name) +} + +/// The metadata attached with CIP-68 reference token (100) +pub type CIP68Metadata { + metadata: Pairs, + version: Int, +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail.ak new file mode 100644 index 00000000..f67cb2cc --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail.ak @@ -0,0 +1,161 @@ +//// Vodka cocktail provides utility functions to build Aiken onchain code +//// +//// All onchain utility functions are grouped with a naming convention of `vodka_`, +//// and all can be imported directly with `use cocktail` +//// +//// ### Cardano data types +//// - `Address` - [`use vodka_address`](./cocktail/vodka_address.html) +//// - `Value` - [`use vodka_value`](./cocktail/vodka_value.html) +//// +//// ### Transaction types +//// - `extra_signatories` - [Documentation](./cocktail/vodka_extra_signatories.html) +//// - `inputs` - [Documentation](./cocktail/vodka_inputs.html) +//// - `outputs` - [Documentation](./cocktail/vodka_outputs.html) +//// - `mints` - [Documentation](./cocktail/vodka_mints.html) +//// - `validity_range` - [Documentation](./cocktail/vodka_validity_range.html) +//// - `Redeemers` - [Documentation](./cocktail/vodka_redeemers.html) + +use cocktail/vodka_address +use cocktail/vodka_converter +use cocktail/vodka_extra_signatories +use cocktail/vodka_inputs +use cocktail/vodka_mints +use cocktail/vodka_outputs +use cocktail/vodka_redeemers +use cocktail/vodka_validity_range +use cocktail/vodka_value + +// Address + +/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) +pub const compare_script_address = vodka_address.compare_script_address + +/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) +pub const compare_address = vodka_address.compare_address + +/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) +pub const address_payment_key = vodka_address.address_payment_key + +/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) +pub const address_pub_key = vodka_address.address_pub_key + +/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) +pub const address_script_hash = vodka_address.address_script_hash + +// Converter + +/// Documentation please refer to [`vodka_converter`](./cocktail/vodka_converter.html) +pub const convert_int_to_bytes = vodka_converter.convert_int_to_bytes + +/// Documentation please refer to [`vodka_converter`](./cocktail/vodka_converter.html) +pub const get_number_digit = vodka_converter.get_number_digit + +// Extra Signatories + +/// Documentation please refer to [`vodka_extra_signatories`](./cocktail/vodka_extra_signatories.html) +pub const key_signed = vodka_extra_signatories.key_signed + +/// Documentation please refer to [`vodka_extra_signatories`](./cocktail/vodka_extra_signatories.html) +pub const one_of_keys_signed = vodka_extra_signatories.one_of_keys_signed + +/// Documentation please refer to [`vodka_extra_signatories`](./cocktail/vodka_extra_signatories.html) +pub const all_key_signed = vodka_extra_signatories.all_key_signed + +// Inputs + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const input_inline_datum = vodka_inputs.input_inline_datum + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const only_input_datum_with = vodka_inputs.only_input_datum_with + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const inputs_at = vodka_inputs.inputs_at + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const inputs_with = vodka_inputs.inputs_with + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const inputs_with_policy = vodka_inputs.inputs_with_policy + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const inputs_at_with = vodka_inputs.inputs_at_with + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const inputs_at_with_policy = vodka_inputs.inputs_at_with_policy + +/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) +pub const inputs_token_quantity = vodka_inputs.inputs_token_quantity + +// Mints + +/// Documentation please refer to [`vodka_mints`](./cocktail/vodka_mints.html) +pub const check_policy_only_burn = vodka_mints.check_policy_only_burn + +/// Documentation please refer to [`vodka_mints`](./cocktail/vodka_mints.html) +pub const only_minted_token = vodka_mints.only_minted_token + +/// Documentation please refer to [`vodka_mints`](./cocktail/vodka_mints.html) +pub const policy_only_minted_token = vodka_mints.policy_only_minted_token + +/// Documentation please refer to [`vodka_mints`](./cocktail/vodka_mints.html) +pub const token_minted = vodka_mints.token_minted + +// Outputs + +/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) +pub const output_inline_datum = vodka_outputs.output_inline_datum + +/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) +pub const outputs_at = vodka_outputs.outputs_at + +/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) +pub const outputs_with = vodka_outputs.outputs_with + +/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) +pub const outputs_with_policy = vodka_outputs.outputs_with_policy + +/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) +pub const outputs_at_with = vodka_outputs.outputs_at_with + +/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) +pub const outputs_at_with_policy = vodka_outputs.outputs_at_with_policy + +// Redeemers + +/// Documentation please refer to [`vodka_redeemers`](./cocktail/vodka_redeemers.html) +pub const redeemer_from = vodka_redeemers.redeemer_from + +/// Documentation please refer to [`vodka_redeemers`](./cocktail/vodka_redeemers.html) +pub const withdrawal_redeemer = vodka_redeemers.withdrawal_redeemer + +/// Documentation please refer to [`vodka_redeemers`](./cocktail/vodka_redeemers.html) +pub const compare_output_reference = vodka_redeemers.compare_output_reference + +// Validity Range + +/// Documentation please refer to [`vodka_validity_range`](./cocktail/vodka_validity_range.html) +pub const valid_after = vodka_validity_range.valid_after + +/// Documentation please refer to [`vodka_validity_range`](./cocktail/vodka_validity_range.html) +pub const valid_before = vodka_validity_range.valid_before + +// Value + +/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) +pub const value_length = vodka_value.value_length + +/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) +pub const get_all_value_to = vodka_value.get_all_value_to + +/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) +pub const get_all_value_from = vodka_value.get_all_value_from + +/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) +pub const value_geq = vodka_value.value_geq + +/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) +pub const value_policy_info = vodka_value.value_policy_info + +/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) +pub const value_tokens = vodka_value.value_tokens diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak new file mode 100644 index 00000000..5bbf8a3d --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak @@ -0,0 +1,56 @@ +use aiken/crypto.{Blake2b_224, Hash, ScriptHash, VerificationKeyHash} +use aiken/primitive/bytearray +use cardano/address.{Address, Script, VerificationKey} + +/// Compare two script addresses +pub fn compare_script_address(x: Address, y: Address) -> Ordering { + expect Script(x_hash) = x.payment_credential + expect Script(y_hash) = y.payment_credential + bytearray.compare(x_hash, y_hash) +} + +/// Compare two addresses +pub fn compare_address(x: Address, y: Address) -> Ordering { + let x_hash = x.payment_credential + let y_hash = y.payment_credential + when (x_hash, y_hash) is { + (Script(x_script_hash), Script(y_script_hash)) -> + bytearray.compare(x_script_hash, y_script_hash) + (VerificationKey(x_key_hash), VerificationKey(y_key_hash)) -> + bytearray.compare(x_key_hash, y_key_hash) + _ -> Equal + } +} + +/// Obtain the payment key of an address, it can be either a script hash or a verification key +/// ```aiken +/// let payment_key_hash = address_payment_key(address) +/// ``` +pub fn address_payment_key(address: Address) -> Hash { + when address.payment_credential is { + Script(hash) -> hash + VerificationKey(key_hash) -> key_hash + } +} + +/// Obtain the verification key of an address, None if it is a script address +/// ```aiken +/// expect Some(pub_key_hash) = address_pub_key(address) +/// ``` +pub fn address_pub_key(address: Address) -> Option { + when address.payment_credential is { + VerificationKey(key_hash) -> Some(key_hash) + _ -> None + } +} + +/// Obtain the script hash of an address, None if it is a verification key address +/// ```aiken +/// expect Some(script_hash) = address_pub_key(address) +/// ``` +pub fn address_script_hash(address: Address) -> Option { + when address.payment_credential is { + Script(script_hash) -> Some(script_hash) + _ -> None + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak new file mode 100644 index 00000000..f13c84aa --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak @@ -0,0 +1,40 @@ +use aiken/primitive/bytearray + +/// Convert an integer to a "stringify" ByteArray value +/// ```aiken +/// let int_bytes = convert_int_to_bytes(123) +/// let this_is_true = int_bytes == "123" +/// ``` +pub fn convert_int_to_bytes(i: Int) -> ByteArray { + convert_int_to_bytes_go(i, get_number_digit(i)) +} + +fn convert_int_to_bytes_go(newInt: Int, digit: Int) -> ByteArray { + if digit == 1 { + bytearray.push("", newInt + 48) + } else { + bytearray.push( + convert_int_to_bytes_go(newInt % digit, digit / 10), + newInt / digit + 48, + ) + } +} + +/// Get the number of digits in an integer +pub fn get_number_digit(i: Int) -> Int { + go_get_number_digit(i, 1) +} + +fn go_get_number_digit(newInt: Int, digit: Int) -> Int { + if newInt < 10 { + digit + } else { + go_get_number_digit(newInt / 10, digit * 10) + } +} + +test byte_conversion() { + convert_int_to_bytes(1) == "1" && convert_int_to_bytes(123) == "123" && convert_int_to_bytes( + 672912, + ) == "672912" +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak new file mode 100644 index 00000000..17eb3b20 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak @@ -0,0 +1,46 @@ +use aiken/collection/list + +/// Check if a key is signed by any of the extra_signatories +/// ```aiken +/// let extra_signatories = ["key1", "key2", "key3"] +/// +/// let key_to_test_1 = "key2" +/// let this_is_true = key_signed(extra_signatories, key_to_test_1) +/// +/// let key_to_test_2 = "key4" +/// let this_is_false = key_signed(extra_signatories, key_to_test_2) +/// ``` +pub fn key_signed(extra_signatories: List, key: ByteArray) { + list.has(extra_signatories, key) +} + +/// Check if any of the keys are signed by the extra_signatories +/// ```aiken +/// let extra_signatories = ["key1", "key2", "key3"] +/// +/// let keys_to_test_1 = ["key4", "key2", "key5"] +/// let this_is_true = one_of_keys_signed(extra_signatories, keys_to_test_1) +/// +/// let keys_to_test_2 = ["key4", "key5"] +/// let this_is_false = one_of_keys_signed(extra_signatories, keys_to_test_2) +/// ``` +pub fn one_of_keys_signed( + extra_signatories: List, + keys: List, +) { + list.any(keys, fn(key) { key_signed(extra_signatories, key) }) +} + +/// Check if all of the keys are signed by the extra_signatories +/// ```aiken +/// let extra_signatories = ["key1", "key2", "key3"] +/// +/// let keys_to_test_1 = ["key2", "key3"] +/// let this_is_true = all_keys_signed(extra_signatories, keys_to_test_1) +/// +/// let keys_to_test_2 = ["key2", "key4"] +/// let this_is_false = all_keys_signed(extra_signatories, keys_to_test_2) +/// ``` +pub fn all_key_signed(extra_signatories: List, keys: List) { + list.all(keys, fn(key) { key_signed(extra_signatories, key) }) +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak new file mode 100644 index 00000000..5ddaaa7f --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak @@ -0,0 +1,123 @@ +use aiken/collection/list +use cardano/address.{Address} +use cardano/assets.{AssetName, PolicyId, flatten, quantity_of} +use cardano/transaction.{InlineDatum, Input} + +/// Extracts the inline datum from an input. +/// ```aiken +/// expect inline_datum: MyDatumType = input_inline_datum(input) +/// ``` +pub fn input_inline_datum(input: Input) { + expect InlineDatum(raw_datum) = input.output.datum + raw_datum +} + +/// Extract the inline datum by locating the first input in a list of inputs by Address and PolicyId. +/// ```aiken +/// expect inline_datum: MyDatumType = only_input_datum_with(inputs, policy, name) +/// ``` +pub fn only_input_datum_with( + inputs: List, + policy: PolicyId, + name: AssetName, +) { + expect Some(input) = + list.find( + inputs, + fn(input) { quantity_of(input.output.value, policy, name) == 1 }, + ) + input_inline_datum(input) +} + +/// Filters inputs by Address. +/// ```aiken +/// let filtered_inputs = inputs_at(inputs, address) +/// ``` +pub fn inputs_at(inputs: List, address: Address) -> List { + list.filter(inputs, fn(input) { input.output.address == address }) +} + +/// Filters inputs by PolicyId and AssetName. +/// ```aiken +/// let filtered_inputs = inputs_with(inputs, policy, name) +/// ``` +pub fn inputs_with( + inputs: List, + policy: PolicyId, + name: AssetName, +) -> List { + list.filter( + inputs, + fn(input) { quantity_of(input.output.value, policy, name) == 1 }, + ) +} + +/// Filters inputs by token policy. +/// ```aiken +/// let filtered_inputs = inputs_with_policy(inputs, policy) +/// ``` +pub fn inputs_with_policy(inputs: List, policy: PolicyId) -> List { + list.filter( + inputs, + fn(input) { + list.any(flatten(input.output.value), fn(token) { token.1st == policy }) + }, + ) +} + +/// Filters inputs by Address, PolicyId, and AssetName. +/// ```aiken +/// let filtered_inputs = inputs_at_with(inputs, address, policy, name) +/// ``` +pub fn inputs_at_with( + inputs: List, + address: Address, + policy: PolicyId, + name: AssetName, +) -> List { + list.filter( + inputs, + fn(input) { + input.output.address == address && quantity_of( + input.output.value, + policy, + name, + ) == 1 + }, + ) +} + +/// Filters inputs by Address and PolicyId. +/// ```aiken +/// let filtered_inputs = inputs_at_with_policy(inputs, address, policy) +/// ``` +pub fn inputs_at_with_policy( + inputs: List, + address: Address, + policy: PolicyId, +) -> List { + list.filter( + inputs, + fn(input) { + input.output.address == address && list.any( + flatten(input.output.value), + fn(token) { token.1st == policy }, + ) + }, + ) +} + +/// Calculate the total quantity of a token in a list of inputs. +/// ```aiken +/// let total_quantity = inputs_token_quantity(inputs, token) +/// ``` +pub fn inputs_token_quantity( + inputs: List, + token: (PolicyId, AssetName), +) -> Int { + list.map( + inputs, + fn(input) { quantity_of(input.output.value, token.1st, token.2nd) }, + ) + |> list.foldr(0, fn(n, total) { n + total }) +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak new file mode 100644 index 00000000..aa7dfe9a --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak @@ -0,0 +1,68 @@ +use aiken/collection/list +use cardano/assets.{AssetName, PolicyId, Value, flatten} + +/// Check if a certain PolicyId is burning only if exists in the minted value. +/// ```aiken +/// let is_policy_only_burn = check_policy_only_burn(minted_value, policy) +/// ``` +pub fn check_policy_only_burn(mint: Value, policy: PolicyId) -> Bool { + let mint_value = flatten(mint) + list.all( + mint_value, + fn(x) { + if x.1st == policy { + x.3rd < 0 + } else { + True + } + }, + ) +} + +/// Check if a certain policy has only minted this token. +/// ```aiken +/// let is_policy_only_minted = check_policy_only_minted(minted_value, policy, name, quantity) +/// ``` +pub fn policy_only_minted_token( + mint: Value, + policy: PolicyId, + name: AssetName, + quantity: Int, +) { + when flatten(mint) |> list.filter(fn(asset) { asset.1st == policy }) is { + [(_, minted_asset_name, minted_quantity)] -> + minted_asset_name == name && minted_quantity == quantity + _ -> False + } +} + +/// Check if the minted value contains only one distinct asset with particular PolicyId. +/// ```aiken +/// let is_only_minted_token = only_minted_token(minted_value, policy, name, quantity) +/// ``` +pub fn only_minted_token( + mint: Value, + policy: PolicyId, + name: AssetName, + quantity: Int, +) { + when flatten(mint) is { + [(minted_policy, minted_asset_name, minted_quantity)] -> + minted_policy == policy && minted_asset_name == name && minted_quantity == quantity + _ -> False + } +} + +/// Check if the minted value contains a token with particular PolicyId, AssetName, and quantity. +/// ```aiken +/// let is_token_minted = token_minted(minted_value, policy, name, quantity) +/// ``` +pub fn token_minted( + mint: Value, + policy: PolicyId, + name: AssetName, + quantity: Int, +) { + flatten(mint) + |> list.any(fn(x) { x.1st == policy && x.2nd == name && x.3rd == quantity }) +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak new file mode 100644 index 00000000..d83ffaf5 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak @@ -0,0 +1,90 @@ +use aiken/collection/list +use cardano/address.{Address} +use cardano/assets.{AssetName, PolicyId, flatten, quantity_of} +use cardano/transaction.{InlineDatum, Output} + +/// Extracts the inline datum from an output. +/// ```aiken +/// expect inline_datum: MyDatumType = output_inline_datum(output) +/// ``` +pub fn output_inline_datum(output: Output) { + expect InlineDatum(raw_datum) = output.datum + raw_datum +} + +/// Filters outputs by Address. +/// ```aiken +/// let filtered_outputs = outputs_at(outputs, address) +/// ``` +pub fn outputs_at(outputs: List, address: Address) -> List { + list.filter(outputs, fn(output) { output.address == address }) +} + +/// Filters outputs by PolicyId and AssetName. +/// ```aiken +/// let filtered_outputs = outputs_with(outputs, policy, name) +/// ``` +pub fn outputs_with( + outputs: List, + policy: PolicyId, + name: AssetName, +) -> List { + list.filter( + outputs, + fn(output) { quantity_of(output.value, policy, name) == 1 }, + ) +} + +/// Filters outputs by token policy. +/// ```aiken +/// let filtered_outputs = outputs_with_policy(outputs, policy) +/// ``` +pub fn outputs_with_policy( + outputs: List, + policy: PolicyId, +) -> List { + list.filter( + outputs, + fn(output) { + list.any(flatten(output.value), fn(token) { token.1st == policy }) + }, + ) +} + +/// Filters outputs by Address, PolicyId, and AssetName. +/// ```aiken +/// let filtered_outputs = outputs_at_with(outputs, address, policy, name) +/// ``` +pub fn outputs_at_with( + outputs: List, + address: Address, + policy: PolicyId, + name: AssetName, +) -> List { + list.filter( + outputs, + fn(output) { + output.address == address && quantity_of(output.value, policy, name) == 1 + }, + ) +} + +/// Filters outputs by Address and PolicyId. +/// ```aiken +/// let filtered_outputs = outputs_at_with_policy(outputs, address, policy) +/// ``` +pub fn outputs_at_with_policy( + outputs: List, + address: Address, + policy: PolicyId, +) -> List { + list.filter( + outputs, + fn(output) { + output.address == address && list.any( + flatten(output.value), + fn(token) { token.1st == policy }, + ) + }, + ) +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak new file mode 100644 index 00000000..df3d69e9 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak @@ -0,0 +1,45 @@ +use aiken/collection/pairs +use aiken/primitive/bytearray +use cardano/address.{Address, Script} +use cardano/transaction.{ + Input, OutputReference, Redeemer, ScriptPurpose, Spend, Withdraw, find_input, +} + +/// Obtain the redeemer for a given output reference and address +/// ```aiken +/// expect Some(redeemer) = redeemer_from(redeemers, inputs, output_reference, input_address) +/// ``` +pub fn redeemer_from( + redeemers: Pairs, + inputs: List, + output_reference: OutputReference, + input_address: Address, +) -> Option { + expect Some(redeemer) = redeemers |> pairs.get_first(Spend(output_reference)) + expect Some(input) = find_input(inputs, output_reference) + if input.output.address == input_address { + Some(redeemer) + } else { + None + } +} + +/// Obtain the first redeemer for a given withdrawal script hash +/// ```aiken +/// expect Some(raw_redeemer) = withdrawal_redeemer(redeemers, withdrawal_script_hash) +/// expect my_redeemer: MyRedeemerType = raw_redeemer; +/// ``` +pub fn withdrawal_redeemer( + redeemers: Pairs, + withdrawal_script_hash: ByteArray, +) -> Option { + redeemers + |> pairs.get_first(Withdraw(Script(withdrawal_script_hash))) +} + +/// Compare the output reference of two spend transactions +pub fn compare_output_reference(x, y) { + expect Spend(out_ref_x) = x + expect Spend(out_ref_y) = y + bytearray.compare(out_ref_x.transaction_id, out_ref_y.transaction_id) +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak new file mode 100644 index 00000000..498bc600 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak @@ -0,0 +1,34 @@ +use aiken/interval.{Finite, IntervalBound} +use cardano/transaction.{ValidityRange} + +/// Check if the validity range is after the required timestamp. +/// ```aiken +/// let is_valid_after = valid_after(transaction.validity_range, required_timestamp) +/// ``` +pub fn valid_after( + validity_range: ValidityRange, + required_timestamp: Int, +) -> Bool { + let IntervalBound { bound_type, is_inclusive } = validity_range.lower_bound + when (bound_type, is_inclusive) is { + (Finite(lower_bound), True) -> lower_bound > required_timestamp + (Finite(lower_bound), False) -> lower_bound >= required_timestamp + _ -> False + } +} + +/// Check if the validity range is before the required timestamp. +/// ```aiken +/// let is_valid_before = valid_before(transaction.validity_range, required_timestamp) +/// ``` +pub fn valid_before( + validity_range: ValidityRange, + required_timestamp: Int, +) -> Bool { + let IntervalBound { bound_type, is_inclusive } = validity_range.upper_bound + when (bound_type, is_inclusive) is { + (Finite(upper_bound), True) -> upper_bound < required_timestamp + (Finite(upper_bound), False) -> upper_bound <= required_timestamp + _ -> False + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak new file mode 100644 index 00000000..770ddbcc --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak @@ -0,0 +1,80 @@ +use aiken/collection/list +use cardano/address.{Address} +use cardano/assets.{ + AssetName, PolicyId, Value, flatten, merge, quantity_of, zero, +} +use cardano/transaction.{Input, Output} + +/// Calulate the length of a value +/// ```aiken +/// let value_length = value_length(value) +/// ``` +pub fn value_length(value: Value) -> Int { + list.length(flatten(value)) +} + +/// Get the value send to a particular address in a list of outputs +/// ```aiken +/// let value_to = get_all_value_to(outputs, address) +/// ``` +pub fn get_all_value_to(outputs: List, address: Address) -> Value { + list.foldr( + outputs, + zero, + fn(output, acc_value) { + if output.address == address { + merge(acc_value, output.value) + } else { + acc_value + } + }, + ) +} + +/// Get the value coming from a particular address in a list of inputs +/// ```aiken +/// let value_from = get_all_value_from(inputs, address) +/// ``` +pub fn get_all_value_from(inputs: List, address: Address) -> Value { + list.foldr( + inputs, + zero, + fn(input, acc_value) { + if input.output.address == address { + merge(acc_value, input.output.value) + } else { + acc_value + } + }, + ) +} + +/// Check if the first value provided is greater than or equal to the second value +/// ```aiken +/// let is_geq = value_geq(supposed_greater, supposed_smaller) +/// ``` +pub fn value_geq(greater: Value, smaller: Value) -> Bool { + list.all( + flatten(smaller), + fn(token) { quantity_of(greater, token.1st, token.2nd) >= token.3rd }, + ) +} + +/// Obtain the information (i.e. flattened value) of a policy in a value +/// ```aiken +/// expect Some((policyId, assetName, quantity)) = value_policy_info(value, policy) +/// ``` +pub fn value_policy_info( + value: Value, + policy: ByteArray, +) -> Option<(ByteArray, ByteArray, Int)> { + list.find(flatten(value), fn(t) { t.1st == policy }) +} + +/// Obtain the non-lovelace information (i.e. flattened value) of a policy in a value +/// ```aiken +/// let tokens = value_tokens(value) +/// ``` +pub fn value_tokens(value: Value) -> List<(PolicyId, AssetName, Int)> { + list.filter(flatten(value), fn(t) { t.1st != "" }) +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail.ak new file mode 100644 index 00000000..79a8e7c7 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail.ak @@ -0,0 +1,776 @@ +//// Mocktail contains a set of functions to build transactions for testing purposes. +//// +//// To use Mocktail Tx, there are 4 steps +//// 1. Starts with [`mocktail_tx()`](./mocktail.html#mocktail_tx) to create a new transaction builder. +//// 2. Use tx building methods similar to MeshJS lower level APIs to build the transaction. +//// 3. Call [`complete`](./mocktail.html#complete) to complete building transaction. +//// 4. Finally, if there is any whole items to be added to the transaction, use the `add` functions. +//// +//// Mocktail is built with devex and multiple test cases compatibility in mind. +//// 1. It is pipable. +//// 2. For every tx building and adding methods, it takes first param as condition. that function will only run when this condition is `True`. +//// +//// ## Example +//// ```aiken +//// let tx: Transaction = +//// mocktail_tx() +//// |> required_signer_hash(is_signature_provided, mock_pub_key_hash(0)) +//// |> script_withdrawal(True, mock_script_hash(0), 0) +//// |> script_withdrawal(True, mock_script_hash(1), 0) +//// |> required_signer_hash(True, mock_pub_key_hash(1)) +//// |> complete() +//// |> add_reference_input(True, mock_oracle_ref_input_1()) +//// |> add_reference_input(True, mock_oracle_ref_input_2()) +//// |> add_output(True, mock_pub_key_output(mock_fee_collection_address, mock_fee)) +//// ``` + +use aiken/cbor +use aiken/collection/dict +use aiken/collection/list +use aiken/crypto.{VerificationKeyHash, blake2b_256} +use aiken/interval.{Finite, Interval} +use cardano/address.{Address, Credential, Script} +use cardano/assets.{Value, from_asset, merge, zero} +use cardano/certificate.{Certificate} +use cardano/transaction.{ + InlineDatum, Input, Output, OutputReference, Redeemer, ScriptPurpose, + Transaction, TransactionId, placeholder, +} +use mocktail/virgin_address +use mocktail/virgin_key_hash +use mocktail/virgin_output_reference +use mocktail/virgin_outputs +use mocktail/virgin_validity_range + +/// A mock transaction builder. It can be initialized with `mocktail_tx()`. +pub type MocktailTx { + tx: Transaction, + queue_input: Option, + queue_output: Option, + queue_ref_input: Option, +} + +/// Initialize a new mock transaction builder, and output a built transaction wiht [`.complete().`](./mocktail.html#complete) +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn mocktail_tx() -> MocktailTx { + MocktailTx { + tx: placeholder, + queue_input: None, + queue_output: None, + queue_ref_input: None, + } +} + +/// Tx building method - Add an input to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> tx_in(condition, tx_hash, tx_index, amount, address) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn tx_in( + mocktail_tx: MocktailTx, + condition: Bool, + tx_hash: ByteArray, + tx_index: Int, + amount: Value, + address: Address, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let queue_input = + Some( + Input { + output_reference: OutputReference { + transaction_id: tx_hash, + output_index: tx_index, + }, + output: mock_pub_key_output(address, amount), + }, + ) + when mocktail_tx.queue_input is { + Some(input) -> + MocktailTx { + ..mocktail_tx, + queue_input: queue_input, + tx: mocktail_tx.tx |> add_input(True, input), + } + None -> MocktailTx { ..mocktail_tx, queue_input: queue_input } + } + } +} + +/// Tx building method - Add an input with inline datum to the transaction. +/// This can only be used right after [`tx_in`](./mocktail.html#tx_in). +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> tx_in(condition, tx_hash, tx_index, amount, address) +/// |> tx_in_inline_datum(condition, datum) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn tx_in_inline_datum( + mocktail_tx: MocktailTx, + condition: Bool, + datum: Data, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let queue_input = + when mocktail_tx.queue_input is { + Some(input) -> { + let Input { output_reference, output } = input + Some( + Input { + output_reference, + output: Output { ..output, datum: InlineDatum(datum) }, + }, + ) + } + None -> + Some( + Input { + output_reference: mock_utxo_ref(0, 0), + output: mock_script_output( + mock_script_address(0, None), + zero, + InlineDatum(datum), + ), + }, + ) + } + MocktailTx { ..mocktail_tx, queue_input: queue_input } + } +} + +/// Tx building method - Add an output to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> tx_out(condition, address, amount) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn tx_out( + mocktail_tx: MocktailTx, + condition: Bool, + address: Address, + amount: Value, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let queue_output = Some(mock_pub_key_output(address, amount)) + when mocktail_tx.queue_output is { + Some(output) -> + MocktailTx { + ..mocktail_tx, + queue_output: queue_output, + tx: mocktail_tx.tx |> add_output(True, output), + } + None -> MocktailTx { ..mocktail_tx, queue_output: queue_output } + } + } +} + +/// Tx building method - Add an output with inline datum to the transaction. +/// This can only be used right after [`tx_out`](./mocktail.html#tx_out). +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> tx_out(condition, address, amount) +/// |> tx_out_inline_datum(condition, datum) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn tx_out_inline_datum( + mocktail_tx: MocktailTx, + condition: Bool, + datum: Data, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let queue_output = + when mocktail_tx.queue_output is { + Some(output) -> Some(Output { ..output, datum: InlineDatum(datum) }) + None -> + Some( + mock_script_output( + mock_script_address(0, None), + zero, + InlineDatum(datum), + ), + ) + } + MocktailTx { ..mocktail_tx, queue_output: queue_output } + } +} + +/// Tx building method - Add a mint to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> mint(condition, quantity, policy_id, token_name) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn mint( + mocktail_tx: MocktailTx, + condition: Bool, + quantity: Int, + policy_id: ByteArray, + token_name: ByteArray, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + MocktailTx { + ..mocktail_tx, + tx: mocktail_tx.tx + |> add_mint(True, from_asset(policy_id, token_name, quantity)), + } + } +} + +/// Tx building method - Add a reference input to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ref_tx_in(condition, tx_hash, tx_index, amount, address) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn ref_tx_in( + mocktail_tx: MocktailTx, + condition: Bool, + tx_hash: ByteArray, + tx_index: Int, + amount: Value, + address: Address, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let queue_ref_input = + Some( + Input { + output_reference: OutputReference { + transaction_id: tx_hash, + output_index: tx_index, + }, + output: mock_pub_key_output(address, amount), + }, + ) + when mocktail_tx.queue_ref_input is { + Some(input) -> + MocktailTx { + ..mocktail_tx, + queue_ref_input: queue_ref_input, + tx: mocktail_tx.tx |> add_reference_input(True, input), + } + None -> MocktailTx { ..mocktail_tx, queue_ref_input: queue_ref_input } + } + } +} + +/// Tx building method - Add an inline datum to last reference input in the transaction. +/// This can only be used right after [`ref_tx_in`](./mocktail.html#ref_tx_in). +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ref_tx_in(condition, tx_hash, tx_index, amount, address) +/// |> ref_tx_in_inline_datum(condition, datum) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn ref_tx_in_inline_datum( + mocktail_tx: MocktailTx, + condition: Bool, + datum: Data, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let queue_ref_input = + when mocktail_tx.queue_ref_input is { + Some(input) -> { + let Input { output_reference, output } = input + Some( + Input { + output_reference, + output: Output { ..output, datum: InlineDatum(datum) }, + }, + ) + } + None -> + Some( + Input { + output_reference: mock_utxo_ref(0, 0), + output: mock_script_output( + mock_script_address(0, None), + zero, + InlineDatum(datum), + ), + }, + ) + } + MocktailTx { ..mocktail_tx, queue_ref_input: queue_ref_input } + } +} + +/// Tx building method - Add a a lower bound validity range to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> valid_hereafter(condition, time) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn invalid_before( + mocktail_tx: MocktailTx, + condition: Bool, + time: Int, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let tx = mocktail_tx.tx + let upper_bound = + when tx.validity_range.upper_bound.bound_type is { + Finite(x) -> Some(x) + _ -> None + } + MocktailTx { + ..mocktail_tx, + tx: Transaction { + ..tx, + validity_range: mock_interval(Some(time), upper_bound), + }, + } + } +} + +/// Tx building method - Add a a upper bound validity range to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> valid_hereafter(condition, time) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn invalid_hereafter( + mocktail_tx: MocktailTx, + condition: Bool, + time: Int, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + let tx = mocktail_tx.tx + let lower_bound = + when tx.validity_range.lower_bound.bound_type is { + Finite(x) -> Some(x) + _ -> None + } + MocktailTx { + ..mocktail_tx, + tx: Transaction { + ..tx, + validity_range: mock_interval(lower_bound, Some(time)), + }, + } + } +} + +/// Tx building method - Add a required signer hash to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> required_signer_hash(condition, key) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn required_signer_hash( + mocktail_tx: MocktailTx, + condition: Bool, + key: ByteArray, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + MocktailTx { + ..mocktail_tx, + tx: mocktail_tx.tx |> add_extra_signatory(True, key), + } + } +} + +/// Tx building method - Add a script withdrawal to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> script_withdrawal(condition, script_hash, withdrawal_amount) +/// |> ...other tx building methods +/// |> complete() +/// ``` +pub fn script_withdrawal( + mocktail_tx: MocktailTx, + condition: Bool, + script_hash: ByteArray, + withdrawal_amount: Int, +) -> MocktailTx { + if !condition { + mocktail_tx + } else { + MocktailTx { + ..mocktail_tx, + tx: mocktail_tx.tx + |> add_withdrawal(True, Pair(Script(script_hash), withdrawal_amount)), + } + } +} + +/// Tx building method - Conclude the transaction building process, and return the built transaction. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +pub fn complete(mocktail_tx: MocktailTx) -> Transaction { + let tx = mocktail_tx.tx + let tx = + when mocktail_tx.queue_input is { + Some(input) -> tx |> add_input(True, input) + None -> tx + } + let tx = + when mocktail_tx.queue_output is { + Some(output) -> tx |> add_output(True, output) + None -> tx + } + let tx = + when mocktail_tx.queue_ref_input is { + Some(input) -> tx |> add_reference_input(True, input) + None -> tx + } + tx +} + +/// Tx maniputlator - Add an input to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_input(condition, input) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_input(tx: Transaction, condition: Bool, input: Input) -> Transaction { + if !condition { + tx + } else { + Transaction { ..tx, inputs: tx.inputs |> list.concat([input]) } + } +} + +/// Tx maniputlator - Add a reference input to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_reference_input(condition, input) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_reference_input( + tx: Transaction, + condition: Bool, + input: Input, +) -> Transaction { + if !condition { + tx + } else { + Transaction { + ..tx, + reference_inputs: tx.reference_inputs |> list.concat([input]), + } + } +} + +/// Tx maniputlator - Add an output to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let t = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_output(condition, output) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_output( + tx: Transaction, + condition: Bool, + output: Output, +) -> Transaction { + if !condition { + tx + } else { + Transaction { ..tx, outputs: tx.outputs |> list.concat([output]) } + } +} + +/// Tx maniputlator - Set a fee to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> set_fee(condition, lovelace_fee) +/// |> ...other tx maniputlator methods +/// ``` +pub fn set_fee( + tx: Transaction, + condition: Bool, + lovelace_fee: Int, +) -> Transaction { + if !condition { + tx + } else { + Transaction { ..tx, fee: lovelace_fee } + } +} + +/// Tx maniputlator - Add a mint to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_mint(condition, mint) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_mint(tx: Transaction, condition: Bool, mint: Value) -> Transaction { + if !condition { + tx + } else { + Transaction { + ..tx, + mint: tx.mint + |> merge(mint), + } + } +} + +/// Tx maniputlator - Add a certificate to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_certificate(condition, certificate) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_certificate( + tx: Transaction, + condition: Bool, + certificate: Certificate, +) -> Transaction { + if !condition { + tx + } else { + Transaction { + ..tx, + certificates: tx.certificates |> list.concat([certificate]), + } + } +} + +/// Tx maniputlator - Add a withdrawal to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_withdrawal(condition, stake_credential, amount) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_withdrawal( + tx: Transaction, + condition: Bool, + withdrawal: Pair, +) -> Transaction { + if !condition { + tx + } else { + Transaction { + ..tx, + withdrawals: tx.withdrawals |> list.concat([withdrawal]), + } + } +} + +/// Tx maniputlator - Add an extra signatory to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_extra_signatory(condition, signatory) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_extra_signatory( + tx: Transaction, + condition: Bool, + signatory: VerificationKeyHash, +) -> Transaction { + if !condition { + tx + } else { + Transaction { + ..tx, + extra_signatories: tx.extra_signatories |> list.concat([signatory]), + } + } +} + +/// Tx maniputlator - Add a redeemer to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_redeemer(condition, redeemer) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_redeemer( + tx: Transaction, + condition: Bool, + redeemer: Pair, +) -> Transaction { + if !condition { + tx + } else { + Transaction { ..tx, redeemers: tx.redeemers |> list.concat([redeemer]) } + } +} + +/// Tx maniputlator - Add a datum to the transaction. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> add_datum(condition, datum) +/// |> ...other tx maniputlator methods +/// ``` +pub fn add_datum(tx: Transaction, condition: Bool, datum: Data) -> Transaction { + if !condition { + tx + } else { + let datum_hash = blake2b_256(cbor.serialise(datum)) + Transaction { ..tx, datums: tx.datums |> dict.insert(datum_hash, datum) } + } +} + +/// Tx maniputlator - Set the transaction id. +/// This function will only run when the condition is `True`. +/// ```aiken +/// let tx = mocktail_tx() +/// |> ...tx building methods +/// |> complete() +/// |> set_transaction_id(condition, transaction_id) +/// |> ...other tx maniputlator methods +/// ``` +pub fn set_transaction_id( + tx: Transaction, + condition: Bool, + transaction_id: TransactionId, +) -> Transaction { + if !condition { + tx + } else { + Transaction { ..tx, id: transaction_id } + } +} + +// Address + +/// Documentation please refer to [`virgin_address`](./mocktail/virgin_address.html) +pub const mock_verfication_key_credential = + virgin_address.mock_verfication_key_credential + +/// Documentation please refer to [`virgin_address`](./mocktail/virgin_address.html) +pub const mock_pub_key_address = virgin_address.mock_pub_key_address + +/// Documentation please refer to [`virgin_address`](./mocktail/virgin_address.html) +pub const mock_script_credential = virgin_address.mock_script_credential + +/// Documentation please refer to [`virgin_address`](./mocktail/virgin_address.html) +pub const mock_script_address = virgin_address.mock_script_address + +// Key hash + +/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) +pub const mock_key_hash = virgin_key_hash.mock_key_hash + +/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) +pub const mock_policy_id = virgin_key_hash.mock_policy_id + +/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) +pub const mock_pub_key_hash = virgin_key_hash.mock_pub_key_hash + +/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) +pub const mock_script_hash = virgin_key_hash.mock_script_hash + +/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) +pub const mock_stake_key_hash = virgin_key_hash.mock_stake_key_hash + +/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) +pub const mock_script_stake_key_hash = + virgin_key_hash.mock_script_stake_key_hash + +// Output reference + +/// Documentation please refer to [`virgin_output_reference`](./mocktail/virgin_output_reference.html) +pub const mock_tx_hash = virgin_output_reference.mock_tx_hash + +/// Documentation please refer to [`virgin_output_reference`](./mocktail/virgin_output_reference.html) +pub const mock_utxo_ref = virgin_output_reference.mock_utxo_ref + +// Outputs + +/// Documentation please refer to [`virgin_outputs`](./mocktail/virgin_outputs.html) +pub const mock_output = virgin_outputs.mock_output + +/// Documentation please refer to [`virgin_outputs`](./mocktail/virgin_outputs.html) +pub const mock_pub_key_output = virgin_outputs.mock_pub_key_output + +/// Documentation please refer to [`virgin_outputs`](./mocktail/virgin_outputs.html) +pub const mock_script_output = virgin_outputs.mock_script_output + +// Validity range + +/// Documentation please refer to [`virgin_validity_range`](./mocktail/virgin_validity_range.html) +pub const mock_interval = virgin_validity_range.mock_interval + +test preserve_tx_in_order() { + let tx: Transaction = + mocktail_tx() + |> tx_in(True, mock_tx_hash(0), 0, zero, mock_pub_key_address(0, None)) + |> tx_in(True, mock_tx_hash(1), 1, zero, mock_pub_key_address(1, None)) + |> complete() + expect [input1, input2] = tx.inputs + and { + input1.output_reference.transaction_id == mock_tx_hash(0), + input1.output_reference.output_index == 0, + input2.output_reference.transaction_id == mock_tx_hash(1), + input2.output_reference.output_index == 1, + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak new file mode 100644 index 00000000..30a6e40b --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak @@ -0,0 +1,57 @@ +use cardano/address.{ + Address, Credential, Inline, Script, StakeCredential, VerificationKey, +} +use mocktail/virgin_key_hash.{ + mock_pub_key_hash, mock_script_hash, mock_script_stake_key_hash, + mock_stake_key_hash, +} + +/// Mock a pub key credential +/// `variation` same the same index as `mock_pub_key_hash` +pub fn mock_verfication_key_credential(variation: Int) -> Credential { + VerificationKey(mock_pub_key_hash(variation)) +} + +/// Mock a pub key address +/// `variation` same the same index as `mock_pub_key_hash` +/// `stake_credential` is optional +pub fn mock_pub_key_address( + variation: Int, + stake_credential: Option, +) -> Address { + Address { + payment_credential: mock_verfication_key_credential(variation), + stake_credential, + } +} + +/// Mock a script credential +/// `variation` same the same index as `mock_script_hash` +pub fn mock_script_credential(variation: Int) -> Credential { + Script(mock_script_hash(variation)) +} + +/// Mock a script address +/// `variation` same the same index as `mock_script_hash` +/// `stake_credential` is optional +pub fn mock_script_address( + variation: Int, + stake_credential: Option, +) -> Address { + Address { + payment_credential: mock_script_credential(variation), + stake_credential, + } +} + +/// Mock a pub key stake credential +/// `variation` same the same index as `mock_stake_key_hash` +pub fn mock_pub_key_stake_cred(variation: Int) -> StakeCredential { + Inline(VerificationKey(mock_stake_key_hash(variation))) +} + +/// Mock a script stake credential +/// `variation` same the same index as `mock_script_stake_key_hash` +pub fn mock_script_stake_cred(variation: Int) -> StakeCredential { + Inline(Script(mock_script_stake_key_hash(variation))) +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak new file mode 100644 index 00000000..9a32dd06 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak @@ -0,0 +1,47 @@ +use aiken/cbor.{serialise} +use aiken/crypto.{ScriptHash, VerificationKeyHash, blake2b_224} +use aiken/primitive/bytearray.{concat} +use cardano/assets.{PolicyId} + +pub const root_hash = + #"a2c20c77887ace1cd986193e4e75babd8993cfd56995cd5cfce609c2" + +/// Mock a key in hexadecimal format +pub fn mock_key_hash(variation: Int) -> ByteArray { + serialise(variation) |> concat(root_hash) |> blake2b_224() +} + +/// Mock a PolicyID +/// The variation is used to distinguish between different PolicyIDs +/// Use this but not other `mock_key_hash` functions to avoid hash collision +pub fn mock_policy_id(variation: Int) -> PolicyId { + mock_key_hash(variation) +} + +/// Mock a public key hash +/// The variation is used to distinguish between different public keys +/// Use this but not other `mock_key_hash` functions to avoid hash collision +pub fn mock_pub_key_hash(variation: Int) -> VerificationKeyHash { + mock_key_hash(variation + 1000) +} + +/// Mock a script hash +/// The variation is used to distinguish between different scripts +/// Use this but not other `mock_key_hash` functions to avoid hash collision +pub fn mock_script_hash(variation: Int) -> ScriptHash { + mock_key_hash(variation + 2000) +} + +/// Mock a stake key hash +/// The variation is used to distinguish between different stake keys +/// Use this but not other `mock_key_hash` functions to avoid hash collision +pub fn mock_stake_key_hash(variation: Int) -> VerificationKeyHash { + mock_key_hash(variation + 3000) +} + +/// Mock a script stake key hash +/// The variation is used to distinguish between different scripts +/// Use this but not other `mock_key_hash` functions to avoid hash collision +pub fn mock_script_stake_key_hash(variation: Int) -> ScriptHash { + mock_key_hash(variation + 4000) +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak new file mode 100644 index 00000000..03ec9c02 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak @@ -0,0 +1,16 @@ +use aiken/cbor.{serialise} +use aiken/crypto.{Blake2b_256, Hash, blake2b_256} +use aiken/primitive/bytearray.{concat} +use cardano/transaction.{OutputReference, Transaction} + +const root_hash = + #"5a077cbcdffb88b104f292aacb9687ce93e2191e103a30a0cc5505c18b719f98" + +pub fn mock_tx_hash(variation: Int) -> Hash { + serialise(variation) |> concat(root_hash) |> blake2b_256() +} + +/// Mock an output reference +pub fn mock_utxo_ref(variation: Int, output_index: Int) -> OutputReference { + OutputReference { transaction_id: mock_tx_hash(variation), output_index } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak new file mode 100644 index 00000000..b75181ba --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak @@ -0,0 +1,30 @@ +use aiken/crypto.{ScriptHash} +use cardano/address.{Address} +use cardano/assets.{Value} +use cardano/transaction.{Datum, NoDatum, Output} + +/// Mock an output +pub fn mock_output( + address: Address, + value: Value, + datum: Datum, + reference_script: Option, +) -> Output { + Output { address, value, datum, reference_script } +} + +/// Mock an output with a public key address +/// `datum` and `reference_script` is omitted as it is seldom used in practice +pub fn mock_pub_key_output(address: Address, value: Value) -> Output { + mock_output(address, value, NoDatum, reference_script: None) +} + +/// Mock an output with a script address +/// `reference_script` is omitted as it is seldom used in practice +pub fn mock_script_output( + address: Address, + value: Value, + datum: Datum, +) -> Output { + mock_output(address, value, datum, reference_script: None) +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak new file mode 100644 index 00000000..c11a249d --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak @@ -0,0 +1,28 @@ +use aiken/interval.{ + Finite, Interval, IntervalBound, NegativeInfinity, PositiveInfinity, +} +use cardano/transaction.{ValidityRange} + +/// Mock a validity range with the given lower and upper bounds. +pub fn mock_interval(lower: Option, upper: Option) -> ValidityRange { + let lower_bound = + when lower is { + Some(lower_bound_number) -> + IntervalBound { + bound_type: Finite(lower_bound_number), + is_inclusive: True, + } + None -> IntervalBound { bound_type: NegativeInfinity, is_inclusive: True } + } + let upper_bound = + when upper is { + Some(upper_bound_number) -> + IntervalBound { + bound_type: Finite(upper_bound_number), + is_inclusive: True, + } + None -> IntervalBound { bound_type: PositiveInfinity, is_inclusive: True } + } + + Interval { lower_bound, upper_bound } +} diff --git a/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/plutus.json b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/plutus.json new file mode 100644 index 00000000..ebc0bcfa --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/build/packages/sidan-lab-vodka/plutus.json @@ -0,0 +1,14 @@ +{ + "preamble": { + "title": "sidan-lab/vodka", + "description": "Aiken utils for project 'sidan-lab/vodka", + "version": "0.1.13", + "plutusVersion": "v3", + "compiler": { + "name": "Aiken", + "version": "v1.1.9+2217206" + }, + "license": "Apache-2.0" + }, + "validators": [] +} \ No newline at end of file diff --git a/src/components/multisig/proxy/aiken-workspace/lib/types.ak b/src/components/multisig/proxy/aiken-workspace/lib/types.ak new file mode 100644 index 00000000..3084fe32 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/lib/types.ak @@ -0,0 +1,9 @@ +pub type MintPolarity { + RMint + RBurn +} + +pub type ProxyRedeemer { + SpendFunds + RemoveEmptyInstance +} diff --git a/src/components/multisig/proxy/aiken-workspace/plutus.json b/src/components/multisig/proxy/aiken-workspace/plutus.json new file mode 100644 index 00000000..ee53388c --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/plutus.json @@ -0,0 +1,163 @@ +{ + "preamble": { + "title": "mesh/proxy", + "description": "Aiken contracts for project 'aiken-proxy'", + "version": "0.0.0", + "plutusVersion": "v3", + "compiler": { + "name": "Aiken", + "version": "v1.1.17+c3a7fba" + }, + "license": "Apache-2.0" + }, + "validators": [ + { + "title": "auth_token/mint.auth_token.mint", + "redeemer": { + "title": "redeemer", + "schema": { + "$ref": "#/definitions/types~1MintPolarity" + } + }, + "parameters": [ + { + "title": "utxo_ref", + "schema": { + "$ref": "#/definitions/cardano~1transaction~1OutputReference" + } + } + ], + "compiledCode": "59017b010100229800aba2aba1aba0aab9faab9eaab9dab9a488888896600264653001300800198041804800cdc3a400130080024888966002600460106ea800e2646644b300130050018acc004c030dd5003c00a2c806a2b30013370e9001000c56600260186ea801e00516403516402880504c966002601e00313259800980298059baa0078acc004c8cc004004dd6180818069baa0052259800800c528456600266ebcc044c038dd5180880080a4528c4cc008008c04800500c201e899b8700148052294100a44cdc3800a40268050dd698059807000c5900c192cc004cdc3a400460146ea8006297adef6c6089bab300e300b37540028048c8cc004004dd59807180798079807980798059baa0032259800800c5300103d87a8000899192cc004cdc8802800c56600266e3c014006266e95200033010300e0024bd7045300103d87a80004031133004004301200340306eb8c030004c03c00500d18049baa005375c601860126ea800e2c8038601000260066ea802229344d9590011", + "hash": "eaff18079b75649a8cfc35b99f8a145c4fabf5c02e239822316e9321" + }, + { + "title": "auth_token/mint.auth_token.else", + "redeemer": { + "schema": {} + }, + "parameters": [ + { + "title": "utxo_ref", + "schema": { + "$ref": "#/definitions/cardano~1transaction~1OutputReference" + } + } + ], + "compiledCode": "59017b010100229800aba2aba1aba0aab9faab9eaab9dab9a488888896600264653001300800198041804800cdc3a400130080024888966002600460106ea800e2646644b300130050018acc004c030dd5003c00a2c806a2b30013370e9001000c56600260186ea801e00516403516402880504c966002601e00313259800980298059baa0078acc004c8cc004004dd6180818069baa0052259800800c528456600266ebcc044c038dd5180880080a4528c4cc008008c04800500c201e899b8700148052294100a44cdc3800a40268050dd698059807000c5900c192cc004cdc3a400460146ea8006297adef6c6089bab300e300b37540028048c8cc004004dd59807180798079807980798059baa0032259800800c5300103d87a8000899192cc004cdc8802800c56600266e3c014006266e95200033010300e0024bd7045300103d87a80004031133004004301200340306eb8c030004c03c00500d18049baa005375c601860126ea800e2c8038601000260066ea802229344d9590011", + "hash": "eaff18079b75649a8cfc35b99f8a145c4fabf5c02e239822316e9321" + }, + { + "title": "proxy/spend.proxy.spend", + "datum": { + "title": "_datum", + "schema": { + "$ref": "#/definitions/Data" + } + }, + "redeemer": { + "title": "redeemer", + "schema": { + "$ref": "#/definitions/types~1ProxyRedeemer" + } + }, + "parameters": [ + { + "title": "auth_token", + "schema": { + "$ref": "#/definitions/cardano~1assets~1PolicyId" + } + } + ], + "compiledCode": "590253010100229800aba2aba1aba0aab9faab9eaab9dab9a9bae0024888888896600264653001300900198049805000cc0240092225980099b8748008c020dd500144ca6002601a003300d300e00198049baa0048992cc004cdc3a400060146ea8016266e212000323300100132330010013758602000644b30010018a5eb8226644b3001323300100132330010013756602a602c60246ea8010896600200314bd7044c8cc88c8cc004004dd5980b802112cc00400620071323301b374e660366ea4014cc06cc060004cc06cc0640052f5c066006006603a004603600280c8dd7180980099801801980c001180b000a0282259800800c528456600266e3cdd7180a9bac301500100f8a51899801001180b000a020404d13301200233004004001899802002000a01c30110013012001403c44b30010018a40011337009001198010011808800a01c899912cc0040060051598009808800c4c8c96600266e3cdd71809001006456600266e3cdd71809000a44100899b87375a602460260029009c528201a8a50403460240026eb0c040006004807100e0a5032330010013756601e6020602000444b30010018a5eb82264664464660020026eacc044010896600200310038991980a9ba733015375200a6602a60240026602a602600297ae03300300330170023015001404c6eb8c034004cc00c00cc048008c04000500e2012300e300e300a375400491112cc004cdc3a400000515980098071baa008800c5900f456600266e1d20020028acc004c038dd500440062c807a2c806100c0c024dd50014590070c024004c010dd5004c52689b2b200401", + "hash": "0dc0f294df05817b4471985a0fd255fa8c9b81fadc4c630a66783943" + }, + { + "title": "proxy/spend.proxy.else", + "redeemer": { + "schema": {} + }, + "parameters": [ + { + "title": "auth_token", + "schema": { + "$ref": "#/definitions/cardano~1assets~1PolicyId" + } + } + ], + "compiledCode": "590253010100229800aba2aba1aba0aab9faab9eaab9dab9a9bae0024888888896600264653001300900198049805000cc0240092225980099b8748008c020dd500144ca6002601a003300d300e00198049baa0048992cc004cdc3a400060146ea8016266e212000323300100132330010013758602000644b30010018a5eb8226644b3001323300100132330010013756602a602c60246ea8010896600200314bd7044c8cc88c8cc004004dd5980b802112cc00400620071323301b374e660366ea4014cc06cc060004cc06cc0640052f5c066006006603a004603600280c8dd7180980099801801980c001180b000a0282259800800c528456600266e3cdd7180a9bac301500100f8a51899801001180b000a020404d13301200233004004001899802002000a01c30110013012001403c44b30010018a40011337009001198010011808800a01c899912cc0040060051598009808800c4c8c96600266e3cdd71809001006456600266e3cdd71809000a44100899b87375a602460260029009c528201a8a50403460240026eb0c040006004807100e0a5032330010013756601e6020602000444b30010018a5eb82264664464660020026eacc044010896600200310038991980a9ba733015375200a6602a60240026602a602600297ae03300300330170023015001404c6eb8c034004cc00c00cc048008c04000500e2012300e300e300a375400491112cc004cdc3a400000515980098071baa008800c5900f456600266e1d20020028acc004c038dd500440062c807a2c806100c0c024dd50014590070c024004c010dd5004c52689b2b200401", + "hash": "0dc0f294df05817b4471985a0fd255fa8c9b81fadc4c630a66783943" + } + ], + "definitions": { + "ByteArray": { + "title": "ByteArray", + "dataType": "bytes" + }, + "Data": { + "title": "Data", + "description": "Any Plutus data." + }, + "Int": { + "dataType": "integer" + }, + "cardano/assets/PolicyId": { + "title": "PolicyId", + "dataType": "bytes" + }, + "cardano/transaction/OutputReference": { + "title": "OutputReference", + "description": "An `OutputReference` is a unique reference to an output on-chain. The `output_index`\n corresponds to the position in the output list of the transaction (identified by its id)\n that produced that output", + "anyOf": [ + { + "title": "OutputReference", + "dataType": "constructor", + "index": 0, + "fields": [ + { + "title": "transaction_id", + "$ref": "#/definitions/ByteArray" + }, + { + "title": "output_index", + "$ref": "#/definitions/Int" + } + ] + } + ] + }, + "types/MintPolarity": { + "title": "MintPolarity", + "anyOf": [ + { + "title": "RMint", + "dataType": "constructor", + "index": 0, + "fields": [] + }, + { + "title": "RBurn", + "dataType": "constructor", + "index": 1, + "fields": [] + } + ] + }, + "types/ProxyRedeemer": { + "title": "ProxyRedeemer", + "anyOf": [ + { + "title": "SpendFunds", + "dataType": "constructor", + "index": 0, + "fields": [] + }, + { + "title": "RemoveEmptyInstance", + "dataType": "constructor", + "index": 1, + "fields": [] + } + ] + } + } +} \ No newline at end of file diff --git a/src/components/multisig/proxy/aiken-workspace/specs/1_auth_tokens.md b/src/components/multisig/proxy/aiken-workspace/specs/1_auth_tokens.md new file mode 100644 index 00000000..78afb6a1 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/specs/1_auth_tokens.md @@ -0,0 +1,15 @@ +# Auth Tokens - One Shot + +## Parameter + +- `utxo_ref`: UTxO to be spent at minting + +## User Action + +1. Mint - Redeemer `RMint` + + - Transaction hash as parameterized is included in input + +2. Burn - Redeemer `RBurn` + + - The current policy id only has negative minting value in transaction body. diff --git a/src/components/multisig/proxy/aiken-workspace/specs/2_proxy.md b/src/components/multisig/proxy/aiken-workspace/specs/2_proxy.md new file mode 100644 index 00000000..1f9abb9d --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/specs/2_proxy.md @@ -0,0 +1,19 @@ +# Specification - Crowdfund + +## Parameter + +- `auth_token`: The policy id of `AuthToken` + +## User Action + +1. SpendFunds + +2. RegisterDrep + +3. DeregisterDrep + +4. VoteasDrep + +5. RemoveEmptyInstance + + - `auth_token` from current input is burnt diff --git a/src/components/multisig/proxy/aiken-workspace/specs/_scripts.md b/src/components/multisig/proxy/aiken-workspace/specs/_scripts.md new file mode 100644 index 00000000..c44c42f3 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/specs/_scripts.md @@ -0,0 +1,20 @@ +# Aiken Crowdfunding + +## 1. Auth Tokens + +The tokens are held in a native script multisig wallet and have to be included in every transaction. + + +## 2. Proxy + +The validator that represents the actual treasury / drep + +## Param dependency tree + +1. First layer + + - `auth_tokens` - `utxo_ref` + +2. Second layer + + - `proxy` - param `auth_tokens` diff --git a/src/components/multisig/proxy/aiken-workspace/specs/application_setup_doc.md b/src/components/multisig/proxy/aiken-workspace/specs/application_setup_doc.md new file mode 100644 index 00000000..743db0a5 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/specs/application_setup_doc.md @@ -0,0 +1,13 @@ +# Application Setup Documentation + +## Setup + +The are 2 steps of setting up the applications: + +1. Minting `auth_token`, one time minting policy with empty token name with a quantity decided by the user. + + - Validation: 1.1 + +2. Sending the the `auth_token` to the owner multisig + + - Validation: N/A \ No newline at end of file diff --git a/src/components/multisig/proxy/aiken-workspace/specs/user_action_doc.md b/src/components/multisig/proxy/aiken-workspace/specs/user_action_doc.md new file mode 100644 index 00000000..dcb542e7 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/specs/user_action_doc.md @@ -0,0 +1,6 @@ +# User Actions Documentation + +## Multisig Users + + + diff --git a/src/components/multisig/proxy/aiken-workspace/validators/auth_token/mint.ak b/src/components/multisig/proxy/aiken-workspace/validators/auth_token/mint.ak new file mode 100644 index 00000000..11e14cf2 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/validators/auth_token/mint.ak @@ -0,0 +1,27 @@ +use aiken/collection/dict +use aiken/collection/list +use cardano/assets.{PolicyId} +use cardano/transaction.{OutputReference, Transaction} +use types.{MintPolarity, RBurn, RMint} + +validator auth_token(utxo_ref: OutputReference) { + mint(redeemer: MintPolarity, policy_id: PolicyId, self: Transaction) { + expect [Pair(_asset_name, quantity)] = + self.mint + |> assets.tokens(policy_id) + |> dict.to_pairs() + let Transaction { inputs, .. } = self + when redeemer is { + RMint -> { + let is_output_consumed = + list.any(inputs, fn(input) { input.output_reference == utxo_ref }) + is_output_consumed? && quantity == 10 + } + RBurn -> quantity == -10 + } + } + + else(_) { + fail + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/validators/proxy/spend.ak b/src/components/multisig/proxy/aiken-workspace/validators/proxy/spend.ak new file mode 100644 index 00000000..8cc68e1a --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/validators/proxy/spend.ak @@ -0,0 +1,34 @@ +use aiken/collection/list +use cardano/assets.{PolicyId} +use cardano/transaction.{OutputReference, Transaction} +use cocktail.{only_minted_token, outputs_with_policy} +use types.{ProxyRedeemer, RemoveEmptyInstance, SpendFunds} + +validator proxy(auth_token: PolicyId) { + spend( + _datum: Option, + redeemer: ProxyRedeemer, + _input: OutputReference, + self: Transaction, + ) { + let Transaction { mint, outputs, .. } = self + + when redeemer is { + // check if one auth token is moved in the transaction + SpendFunds -> { + // Check if any token from the auth_token policy is present in the outputs + // This means the token is being moved/transferred + let auth_tokens_in_outputs = outputs_with_policy(outputs, auth_token) + // The token is considered "moved" if any auth token appears in outputs + // (meaning it's being transferred somewhere) + list.length(auth_tokens_in_outputs) > 0 + } + + RemoveEmptyInstance -> only_minted_token(mint, auth_token, "", -10)? + } + } + + else(_) { + fail + } +} diff --git a/src/components/multisig/proxy/aiken-workspace/validators/tests/auth_token/mint.ak b/src/components/multisig/proxy/aiken-workspace/validators/tests/auth_token/mint.ak new file mode 100644 index 00000000..b113dd1c --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/validators/tests/auth_token/mint.ak @@ -0,0 +1,63 @@ +use auth_token/mint as auth_token_mint +use cardano/assets.{add, from_asset, zero} +use cardano/transaction.{Transaction, placeholder} +use mocktail.{ + complete, mint, mock_policy_id, mock_pub_key_address, mock_tx_hash, + mock_utxo_ref, mocktail_tx, tx_in, +} +use tests/utils.{mock_auth_token} +use types.{RBurn, RMint} + +test s1_mint_success_mint() { + let redeemer = RMint + let input_utxo = mock_utxo_ref(0, 1) + let policy_id = mock_auth_token + + let tx = + mocktail_tx() + |> mint(True, 1, policy_id, "") + |> tx_in(True, mock_tx_hash(0), 1, zero, mock_pub_key_address(0, None)) + |> complete() + + auth_token_mint.auth_token.mint(input_utxo, redeemer, policy_id, tx) +} + +test s1_mint_fail_mint_no_utxo_ref_supply() { + let redeemer = RMint + let policy_id = mock_auth_token + + let tx = + mocktail_tx() + |> mint(True, 1, policy_id, "") + |> tx_in(True, mock_tx_hash(0), 1, zero, mock_pub_key_address(0, None)) + |> complete() + !auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) +} + +test s1_mint_success_burn() { + let redeemer = RBurn + let policy_id = mock_auth_token + + let tx = Transaction { ..placeholder, mint: from_asset(policy_id, "", -1) } + auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) +} + +test s1_mint_success_burn_with_other_minting() { + let redeemer = RBurn + let policy_id = mock_auth_token + + let tx = + Transaction { + ..placeholder, + mint: from_asset(policy_id, "", -1) |> add(mock_policy_id(999), "", 1), + } + auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) +} + +test s1_mint_fail_burn_with_mint() { + let redeemer = RBurn + let policy_id = mock_auth_token + + let tx = Transaction { ..placeholder, mint: from_asset(policy_id, "", 1) } + !auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) +} diff --git a/src/components/multisig/proxy/aiken-workspace/validators/tests/proxy/spend.ak b/src/components/multisig/proxy/aiken-workspace/validators/tests/proxy/spend.ak new file mode 100644 index 00000000..05e836fc --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/validators/tests/proxy/spend.ak @@ -0,0 +1,65 @@ +use cardano/assets.{add, from_lovelace} +use mocktail.{ + complete, mint, mock_policy_id, mock_pub_key_address, mock_utxo_ref, + mocktail_tx, tx_out, +} +use proxy/spend as proxy_spend +use types.{RemoveEmptyInstance, SpendFunds} + +const auth_token = mock_policy_id(0) + +// SpendFunds: succeeds when any token of auth_token policy appears in outputs +test p_proxy_spend_success_spend_funds_when_auth_token_in_outputs() { + let value = from_lovelace(2_000_000) |> add(auth_token, "", 1) + let tx = + mocktail_tx() + |> tx_out(True, mock_pub_key_address(0, None), value) + |> complete() + + proxy_spend.proxy.spend(auth_token, None, SpendFunds, mock_utxo_ref(0, 0), tx) +} + +// SpendFunds: fails when no auth_token policy appears in outputs +test p_proxy_spend_fail_spend_funds_when_no_auth_token_in_outputs() { + let value = from_lovelace(2_000_000) + let tx = + mocktail_tx() + |> tx_out(True, mock_pub_key_address(1, None), value) + |> complete() + + !proxy_spend.proxy.spend( + auth_token, + None, + SpendFunds, + mock_utxo_ref(0, 0), + tx, + ) +} + +// RemoveEmptyInstance: succeeds when exact burn of -10 under auth_token policy +test p_proxy_spend_success_remove_empty_instance_with_auth_burnt() { + let tx = + mocktail_tx() + |> mint(True, -10, auth_token, "") + |> complete() + + proxy_spend.proxy.spend( + auth_token, + None, + RemoveEmptyInstance, + mock_utxo_ref(0, 0), + tx, + ) +} + +// RemoveEmptyInstance: fails when no burn occurs +test p_proxy_spend_fail_remove_empty_instance_without_burn() { + let tx = mocktail_tx() |> complete() + !proxy_spend.proxy.spend( + auth_token, + None, + RemoveEmptyInstance, + mock_utxo_ref(0, 0), + tx, + ) +} diff --git a/src/components/multisig/proxy/aiken-workspace/validators/tests/utils.ak b/src/components/multisig/proxy/aiken-workspace/validators/tests/utils.ak new file mode 100644 index 00000000..c2ee6a99 --- /dev/null +++ b/src/components/multisig/proxy/aiken-workspace/validators/tests/utils.ak @@ -0,0 +1,64 @@ +use cardano/address.{from_script} +use mocktail.{ + mock_policy_id, mock_pub_key_hash, mock_script_hash, + mock_script_stake_key_hash, +} +use types.{CrowdfundDatum} + +pub const mock_auth_token = mock_policy_id(0) + +pub const mock_share_token = mock_policy_id(1) + +pub const mock_completion_script = mock_script_hash(0) + +pub const mock_crowdfund_spend_script_hash = mock_script_hash(1) + +pub const mock_crowdfund_stake_script_hash = mock_script_stake_key_hash(0) + +pub const mock_crowdfund_address = from_script(mock_crowdfund_spend_script_hash) + +pub const mock_fee_address = from_script("fee_address") + +pub const mock_fundraise_target = 100000000000 + +pub const mock_deadline = 1750735607 + +pub const mock_expiry_buffer = 3600 * 24 + +pub const mock_min_charge = 10 + +pub fn mock_crowdfund_datum( + current_fundraised_amount: Int, + allow_over_subscription: Bool, +) { + CrowdfundDatum { + completion_script: mock_completion_script, + share_token: mock_share_token, + crowdfund_address: mock_crowdfund_address, + fundraise_target: mock_fundraise_target, + current_fundraised_amount, + allow_over_subscription, + deadline: mock_deadline, + expiry_buffer: mock_expiry_buffer, + fee_address: mock_fee_address, + min_charge: mock_min_charge, + } +} + +pub const mock_current_fundraised_amount = 2000000 + +pub const mock_extra_fundraised_amount = 4000000 + +pub const mock_contribute_less_than_min_fundraised_amount = 1999999 + +pub const mock_contribute_min_fundraised_amount = 2000000 + +pub const mock_contribute_over_fundraised_amount = 100000000001 + +pub const auth_allow_over_subscription = + mock_crowdfund_datum(mock_current_fundraised_amount, True) + +pub const auth_not_allow_over_subscription = + mock_crowdfund_datum(mock_current_fundraised_amount, False) + +pub const mock_proposer_key_hash = mock_pub_key_hash(0) diff --git a/src/components/multisig/common.ts b/src/components/multisig/proxy/common.ts similarity index 100% rename from src/components/multisig/common.ts rename to src/components/multisig/proxy/common.ts diff --git a/src/components/multisig/proxy/index.ts b/src/components/multisig/proxy/index.ts new file mode 100644 index 00000000..90e9fd4e --- /dev/null +++ b/src/components/multisig/proxy/index.ts @@ -0,0 +1 @@ +export * from "./offchain"; diff --git a/src/components/multisig/proxy/offchain.ts b/src/components/multisig/proxy/offchain.ts new file mode 100644 index 00000000..67d81ea1 --- /dev/null +++ b/src/components/multisig/proxy/offchain.ts @@ -0,0 +1,234 @@ +import { + AssetMetadata, + conStr0, + Data, + integer, + mConStr0, + mOutputReference, + mPubKeyAddress, + stringToHex, + } from "@meshsdk/common"; + import { + deserializeAddress, + resolveScriptHash, + serializeAddressObj, + serializePlutusScript, + UTxO, + applyCborEncoding, + applyParamsToScript, + } from "@meshsdk/core"; + import { + parseDatumCbor + } from "@meshsdk/core-cst"; + + import { MeshTxInitiator, MeshTxInitiatorInput } from "./common"; + import blueprint from "./aiken-workspace/plutus.json"; + + + +/** + * Mesh Plutus NFT contract class + * + * This NFT minting script enables users to mint NFTs with an automatically incremented index, which increases by one for each newly minted NFT. + * + * To facilitate this process, the first step is to set up a one-time minting policy by minting an oracle token. This oracle token is essential as it holds the current state and index of the NFTs, acting as a reference for the minting sequence. + * + * With each new NFT minted, the token index within the oracle is incremented by one, ensuring a consistent and orderly progression in the numbering of the NFTs. + */ +export class MeshProxyContract { + paramUtxo: UTxO["input"] = { outputIndex: 0, txHash: "" }; + proxyAddress?: string; + stakeCredential?: string | undefined; + networkId: number; + + getAuthTokenCbor = () => { + return applyParamsToScript(blueprint.validators[0]!.compiledCode, [ + mOutputReference(this.paramUtxo.txHash, this.paramUtxo.outputIndex), + ]); + }; + getAuthTokenPolicyId = () => { + return resolveScriptHash(this.getAuthTokenCbor(), "V3"); + }; + + getProxyCbor = () => { + const authTokenPolicyId = this.getAuthTokenPolicyId(); + return applyParamsToScript(blueprint.validators[2]!.compiledCode, [ + authTokenPolicyId, + ]); + }; + + setProxyAddress = () => { + const proxyAddress = serializePlutusScript( + { + code: this.getProxyCbor(), + version: "V3", + }, + this.stakeCredential, + this.networkId, + ).address; + this.proxyAddress = proxyAddress; + return proxyAddress; + }; + + constructor( + inputs: MeshTxInitiatorInput, + contract: { + paramUtxo?: UTxO["input"]; + }, + ) { + this.stakeCredential = inputs.stakeCredential; + this.networkId = inputs.networkId ? inputs.networkId : 0; + // Set the proxyAddress if paramUtxo is provided + if (contract.paramUtxo) { + this.paramUtxo = contract.paramUtxo; + this.setProxyAddress(); + } + } + +/** + * Set up a proxy address with fixed amount of 10 auth tokens, that will be sent to the owner multisig + * Moving an auth token unlocks the proxy address. + * + * @returns - Transaction hex to be signed by the owner multisig + * + * @example + * ```typescript + * const { tx, paramUtxo } = await contract.setupProxy(); + * ``` + */ + setupProxy = async () => { + const { utxos, collateral, walletAddress } = + await this.getWalletInfoForTx(); + + //look for, get and set a paramUtxo for minting the AuthToken + if (utxos?.length <= 0) { + throw new Error("No UTxOs found"); + } + const paramUtxo = utxos[0]!; + this.paramUtxo = paramUtxo.input; + + //Set proxyAddress depending on the paramUtxo + const proxyAddress = this.setProxyAddress(); + if (!proxyAddress) { + throw new Error("Proxy address not set"); + } + + //prepare AuthToken mint + const policyId = this.getAuthTokenPolicyId(); + const tokenName = ""; + + // Try completing the transaction step by step + const tx = this.mesh + .txIn( + paramUtxo.input.txHash, + paramUtxo.input.outputIndex, + paramUtxo.output.amount, + paramUtxo.output.address, + ) + .mintPlutusScriptV3() + .mint("10", policyId, tokenName) + .mintingScript(this.getAuthTokenCbor()) + .mintRedeemerValue(mConStr0([])) + .txOut(walletAddress, [{ unit: policyId, quantity: "10" }]) + .txInCollateral( + collateral.input.txHash, + collateral.input.outputIndex, + collateral.output.amount, + collateral.output.address, + ) + .changeAddress(walletAddress) + .selectUtxosFrom(utxos); + + const txHex = await tx.complete(); + + return { + tx: txHex, + paramUtxo: paramUtxo.input, + authTokenId: policyId, + proxyAddress: proxyAddress, + }; + + }; + + + /** + * Mint NFT token with an automatically incremented index, which increases by one for each newly minted NFT. + * @param assetMetadata - Asset metadata + * @returns - Transaction hex + * + * @example + * ```typescript + * const assetMetadata = { + * ...demoAssetMetadata, + * name: `Mesh Token ${oracleData.nftIndex}`, + * }; + * const tx = await contract.mintPlutusNFT(assetMetadata); + * ``` + */ + mintPlutusNFT = async (assetMetadata?: AssetMetadata) => { + const { utxos, collateral, walletAddress } = + await this.getWalletInfoForTx(); + if (utxos?.length <= 0) { + throw new Error("No UTxOs found"); + } + + const { + nftIndex, + policyId, + lovelacePrice, + oracleUtxo, + oracleNftPolicyId, + feeCollectorAddress, + feeCollectorAddressObj, + } = await this.getOracleData(); + + const tokenName = `${this.collectionName} (${nftIndex})`; + const tokenNameHex = stringToHex(tokenName); + + const updatedOracleDatum: OracleDatum = conStr0([ + integer((nftIndex as number) + 1), + integer(lovelacePrice), + feeCollectorAddressObj, + ]); + + const tx = this.mesh + .spendingPlutusScriptV3() + .txIn( + oracleUtxo.input.txHash, + oracleUtxo.input.outputIndex, + oracleUtxo.output.amount, + oracleUtxo.output.address, + ) + .txInRedeemerValue(mConStr0([])) + .txInScript(this.getOracleCbor()) + .txInInlineDatumPresent() + .txOut(this.oracleAddress, [{ unit: oracleNftPolicyId, quantity: "1" }]) + .txOutInlineDatumValue(updatedOracleDatum, "JSON") + .mintPlutusScriptV3() + .mint("1", policyId, tokenNameHex) + .mintingScript(this.getNFTCbor()); + + if (assetMetadata) { + const metadata = { [policyId]: { [tokenName]: { ...assetMetadata } } }; + tx.metadataValue(721, metadata); + } + + tx.mintRedeemerValue(mConStr0([])) + .txOut(feeCollectorAddress, [ + { unit: "lovelace", quantity: lovelacePrice.toString() }, + ]) + .txInCollateral( + collateral.input.txHash, + collateral.input.outputIndex, + collateral.output.amount, + collateral.output.address, + ) + .changeAddress(walletAddress) + .selectUtxosFrom(utxos); + + const txHex = await tx.complete(); + return txHex; + }; + + +} \ No newline at end of file From 5329bb0f9c6ead3899d88c6b3de0f1fd9d671a54 Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Fri, 10 Oct 2025 12:50:41 +0200 Subject: [PATCH 08/15] s07 --- prisma/schema.prisma | 12 + src/components/multisig/aiken-proxy/README.md | 1 - .../multisig/aiken-proxy/aiken.lock | 26 - .../multisig/aiken-proxy/aiken.toml | 23 - .../aiken-proxy/build/aiken-compile.lock | 0 .../packages/aiken-lang-stdlib/.editorconfig | 9 - .../packages/aiken-lang-stdlib/.gitattributes | 2 - .../workflows/continuous-integration.yml | 64 - .../packages/aiken-lang-stdlib/.gitignore | 3 - .../packages/aiken-lang-stdlib/CHANGELOG.md | 805 ---------- .../build/packages/aiken-lang-stdlib/LICENSE | 201 --- .../packages/aiken-lang-stdlib/README.md | 71 - .../packages/aiken-lang-stdlib/aiken.lock | 16 - .../packages/aiken-lang-stdlib/aiken.toml | 15 - .../aiken-lang-stdlib/lib/aiken/cbor.ak | 293 ---- .../aiken-lang-stdlib/lib/aiken/cbor.test.ak | 297 ---- .../aiken-lang-stdlib/lib/aiken/collection.ak | 4 - .../lib/aiken/collection/dict.ak | 1174 -------------- .../lib/aiken/collection/list.ak | 1411 ----------------- .../lib/aiken/collection/pairs.ak | 833 ---------- .../aiken-lang-stdlib/lib/aiken/crypto.ak | 147 -- .../lib/aiken/crypto/bls12_381/g1.ak | 115 -- .../lib/aiken/crypto/bls12_381/g2.ak | 124 -- .../lib/aiken/crypto/bls12_381/scalar.ak | 255 --- .../aiken-lang-stdlib/lib/aiken/interval.ak | 680 -------- .../aiken-lang-stdlib/lib/aiken/math.ak | 424 ----- .../lib/aiken/math/rational.ak | 871 ---------- .../lib/aiken/math/rational.tests.ak | 65 - .../aiken-lang-stdlib/lib/aiken/option.ak | 312 ---- .../lib/aiken/primitive/bytearray.ak | 668 -------- .../lib/aiken/primitive/int.ak | 156 -- .../lib/aiken/primitive/string.ak | 139 -- .../aiken-lang-stdlib/lib/cardano/address.ak | 86 - .../lib/cardano/address/credential.ak | 30 - .../aiken-lang-stdlib/lib/cardano/assets.ak | 920 ----------- .../lib/cardano/certificate.ak | 93 -- .../lib/cardano/governance.ak | 109 -- .../cardano/governance/protocol_parameters.ak | 360 ----- .../lib/cardano/governance/voter.ak | 62 - .../lib/cardano/script_context.ak | 62 - .../lib/cardano/transaction.ak | 225 --- .../cardano/transaction/output_reference.ak | 23 - .../lib/cardano/transaction/script_purpose.ak | 126 -- .../aiken-proxy/build/packages/packages.toml | 9 - .../.github/workflows/build_docs.yml | 50 - .../.github/workflows/release.yml | 80 - .../build/packages/sidan-lab-vodka/.gitignore | 16 - .../build/packages/sidan-lab-vodka/LICENSE | 201 --- .../build/packages/sidan-lab-vodka/README.md | 136 -- .../build/packages/sidan-lab-vodka/aiken.lock | 15 - .../build/packages/sidan-lab-vodka/aiken.toml | 18 - .../packages/sidan-lab-vodka/assets/logo.png | Bin 74727 -> 0 bytes .../sidan-lab-vodka/build/aiken-compile.lock | 0 .../packages/aiken-lang-stdlib/.editorconfig | 9 - .../packages/aiken-lang-stdlib/.gitattributes | 2 - .../workflows/continuous-integration.yml | 64 - .../packages/aiken-lang-stdlib/.gitignore | 3 - .../packages/aiken-lang-stdlib/CHANGELOG.md | 805 ---------- .../build/packages/aiken-lang-stdlib/LICENSE | 201 --- .../packages/aiken-lang-stdlib/README.md | 71 - .../packages/aiken-lang-stdlib/aiken.lock | 16 - .../packages/aiken-lang-stdlib/aiken.toml | 15 - .../aiken-lang-stdlib/lib/aiken/cbor.ak | 293 ---- .../aiken-lang-stdlib/lib/aiken/cbor.test.ak | 297 ---- .../aiken-lang-stdlib/lib/aiken/collection.ak | 4 - .../lib/aiken/collection/dict.ak | 1174 -------------- .../lib/aiken/collection/list.ak | 1411 ----------------- .../lib/aiken/collection/pairs.ak | 833 ---------- .../aiken-lang-stdlib/lib/aiken/crypto.ak | 147 -- .../lib/aiken/crypto/bls12_381/g1.ak | 115 -- .../lib/aiken/crypto/bls12_381/g2.ak | 124 -- .../lib/aiken/crypto/bls12_381/scalar.ak | 255 --- .../aiken-lang-stdlib/lib/aiken/interval.ak | 680 -------- .../aiken-lang-stdlib/lib/aiken/math.ak | 424 ----- .../lib/aiken/math/rational.ak | 871 ---------- .../lib/aiken/math/rational.tests.ak | 65 - .../aiken-lang-stdlib/lib/aiken/option.ak | 312 ---- .../lib/aiken/primitive/bytearray.ak | 668 -------- .../lib/aiken/primitive/int.ak | 156 -- .../lib/aiken/primitive/string.ak | 139 -- .../aiken-lang-stdlib/lib/cardano/address.ak | 86 - .../lib/cardano/address/credential.ak | 30 - .../aiken-lang-stdlib/lib/cardano/assets.ak | 920 ----------- .../lib/cardano/certificate.ak | 93 -- .../lib/cardano/governance.ak | 109 -- .../cardano/governance/protocol_parameters.ak | 360 ----- .../lib/cardano/governance/voter.ak | 62 - .../lib/cardano/script_context.ak | 62 - .../lib/cardano/transaction.ak | 225 --- .../cardano/transaction/output_reference.ak | 23 - .../lib/cardano/transaction/script_purpose.ak | 126 -- .../build/packages/packages.toml | 4 - .../build/packages/sidan-lab-vodka/lib/cip.ak | 73 - .../packages/sidan-lab-vodka/lib/cocktail.ak | 161 -- .../lib/cocktail/vodka_address.ak | 56 - .../lib/cocktail/vodka_converter.ak | 40 - .../lib/cocktail/vodka_extra_signatories.ak | 46 - .../lib/cocktail/vodka_inputs.ak | 123 -- .../lib/cocktail/vodka_mints.ak | 68 - .../lib/cocktail/vodka_outputs.ak | 90 -- .../lib/cocktail/vodka_redeemers.ak | 45 - .../lib/cocktail/vodka_validity_range.ak | 34 - .../lib/cocktail/vodka_value.ak | 80 - .../packages/sidan-lab-vodka/lib/mocktail.ak | 776 --------- .../lib/mocktail/virgin_address.ak | 57 - .../lib/mocktail/virgin_key_hash.ak | 47 - .../lib/mocktail/virgin_output_reference.ak | 16 - .../lib/mocktail/virgin_outputs.ak | 30 - .../lib/mocktail/virgin_validity_range.ak | 28 - .../packages/sidan-lab-vodka/plutus.json | 14 - .../multisig/aiken-proxy/lib/types.ak | 9 - .../multisig/aiken-proxy/plutus.json | 163 -- .../aiken-proxy/specs/1_auth_tokens.md | 15 - .../multisig/aiken-proxy/specs/2_proxy.md | 19 - .../multisig/aiken-proxy/specs/_scripts.md | 20 - .../specs/application_setup_doc.md | 13 - .../aiken-proxy/specs/user_action_doc.md | 6 - .../aiken-proxy/validators/auth_token/mint.ak | 27 - .../aiken-proxy/validators/proxy/spend.ak | 34 - .../validators/tests/auth_token/mint.ak | 63 - .../validators/tests/proxy/spend.ak | 65 - .../aiken-proxy/validators/tests/utils.ak | 64 - .../proxy/aiken-workspace/plutus.json | 40 +- .../aiken-workspace/validators/proxy/spend.ak | 8 +- .../validators/tests/auth_token/mint.ak | 14 +- .../aiken-workspace/validators/tests/utils.ak | 64 - src/components/multisig/proxy/offchain.ts | 259 +-- src/components/pages/wallet/info/index.tsx | 2 + src/server/api/root.ts | 2 + src/server/api/routers/proxy.ts | 69 + 130 files changed, 292 insertions(+), 25539 deletions(-) delete mode 100644 src/components/multisig/aiken-proxy/README.md delete mode 100644 src/components/multisig/aiken-proxy/aiken.lock delete mode 100644 src/components/multisig/aiken-proxy/aiken.toml delete mode 100644 src/components/multisig/aiken-proxy/build/aiken-compile.lock delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.editorconfig delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.gitattributes delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.gitignore delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/CHANGELOG.md delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/LICENSE delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/README.md delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/aiken.lock delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/aiken.toml delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/option.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/address.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/packages.toml delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.github/workflows/release.yml delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.gitignore delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/LICENSE delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/README.md delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/aiken.lock delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/aiken.toml delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/assets/logo.png delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/aiken-compile.lock delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/packages.toml delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cip.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak delete mode 100644 src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/plutus.json delete mode 100644 src/components/multisig/aiken-proxy/lib/types.ak delete mode 100644 src/components/multisig/aiken-proxy/plutus.json delete mode 100644 src/components/multisig/aiken-proxy/specs/1_auth_tokens.md delete mode 100644 src/components/multisig/aiken-proxy/specs/2_proxy.md delete mode 100644 src/components/multisig/aiken-proxy/specs/_scripts.md delete mode 100644 src/components/multisig/aiken-proxy/specs/application_setup_doc.md delete mode 100644 src/components/multisig/aiken-proxy/specs/user_action_doc.md delete mode 100644 src/components/multisig/aiken-proxy/validators/auth_token/mint.ak delete mode 100644 src/components/multisig/aiken-proxy/validators/proxy/spend.ak delete mode 100644 src/components/multisig/aiken-proxy/validators/tests/auth_token/mint.ak delete mode 100644 src/components/multisig/aiken-proxy/validators/tests/proxy/spend.ak delete mode 100644 src/components/multisig/aiken-proxy/validators/tests/utils.ak delete mode 100644 src/components/multisig/proxy/aiken-workspace/validators/tests/utils.ak create mode 100644 src/server/api/routers/proxy.ts diff --git a/prisma/schema.prisma b/prisma/schema.prisma index c9f17c75..cf6e4de0 100644 --- a/prisma/schema.prisma +++ b/prisma/schema.prisma @@ -94,3 +94,15 @@ model Ballot { type Int createdAt DateTime @default(now()) } + +model Proxy { + id String @id @default(cuid()) + walletId String @unique + proxyAddress String + authTokenId String + paramUtxo String + description String? + isActive Boolean @default(true) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt +} diff --git a/src/components/multisig/aiken-proxy/README.md b/src/components/multisig/aiken-proxy/README.md deleted file mode 100644 index e76d2249..00000000 --- a/src/components/multisig/aiken-proxy/README.md +++ /dev/null @@ -1 +0,0 @@ -Implement in core mesh repo `mesh-contract` diff --git a/src/components/multisig/aiken-proxy/aiken.lock b/src/components/multisig/aiken-proxy/aiken.lock deleted file mode 100644 index 2e65bd16..00000000 --- a/src/components/multisig/aiken-proxy/aiken.lock +++ /dev/null @@ -1,26 +0,0 @@ -# This file was generated by Aiken -# You typically do not need to edit this file - -[[requirements]] -name = "aiken-lang/stdlib" -version = "v2.2.0" -source = "github" - -[[requirements]] -name = "sidan-lab/vodka" -version = "0.1.13" -source = "github" - -[[packages]] -name = "aiken-lang/stdlib" -version = "v2.2.0" -requirements = [] -source = "github" - -[[packages]] -name = "sidan-lab/vodka" -version = "0.1.13" -requirements = [] -source = "github" - -[etags] diff --git a/src/components/multisig/aiken-proxy/aiken.toml b/src/components/multisig/aiken-proxy/aiken.toml deleted file mode 100644 index c8b0955c..00000000 --- a/src/components/multisig/aiken-proxy/aiken.toml +++ /dev/null @@ -1,23 +0,0 @@ -name = "mesh/proxy" -version = "0.0.0" -compiler = "v1.1.17" -plutus = "v3" -license = "Apache-2.0" -description = "Aiken contracts for project 'aiken-proxy'" - -[repository] -user = "QS" -project = "multisig" -platform = "github" - -[[dependencies]] -name = "aiken-lang/stdlib" -version = "v2.2.0" -source = "github" - -[[dependencies]] -name = "sidan-lab/vodka" -version = "0.1.13" -source = "github" - -[config] diff --git a/src/components/multisig/aiken-proxy/build/aiken-compile.lock b/src/components/multisig/aiken-proxy/build/aiken-compile.lock deleted file mode 100644 index e69de29b..00000000 diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.editorconfig b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.editorconfig deleted file mode 100644 index 0759674c..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.editorconfig +++ /dev/null @@ -1,9 +0,0 @@ -root = true - -[*.ak] -indent_style = space -indent_size = 2 -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.gitattributes b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.gitattributes deleted file mode 100644 index 99fefcf4..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.gitattributes +++ /dev/null @@ -1,2 +0,0 @@ -# Temp hack to get some syntax highlighting on github -*.ak linguist-language=Gleam diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml deleted file mode 100644 index b0081ac7..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml +++ /dev/null @@ -1,64 +0,0 @@ -name: Continuous Integration - -on: - workflow_dispatch: - push: - branches: ["main"] - tags: ["*.*.*"] - pull_request: - branches: ["main"] - -env: - CARGO_TERM_COLOR: always - -permissions: - contents: read - pages: write - id-token: write - -concurrency: - group: "pages" - cancel-in-progress: true - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: 📥 Checkout repository - uses: actions/checkout@v3 - - - name: 🧰 Setup Pages - uses: actions/configure-pages@v2 - - - name: 🧰 Install Aiken - uses: aiken-lang/setup-aiken@v1 - with: - version: v1.1.9 - - - name: 📝 Run fmt - run: aiken fmt --check - - - name: 🔬 Run tests - run: aiken check - - - name: 📘 Generate documentation - shell: bash - working-directory: . - run: aiken docs -o docs - - - name: 📦 Upload artifact - uses: actions/upload-pages-artifact@v2 - with: - path: "docs/" - - deploy: - if: ${{ startsWith(github.ref, 'refs/tags') }} - needs: build - runs-on: ubuntu-latest - environment: - name: github-pages - url: ${{ steps.deployment.outputs.page_url }} - steps: - - name: 🚀 Deploy to GitHub Pages - id: deployment - uses: actions/deploy-pages@v1 diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.gitignore b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.gitignore deleted file mode 100644 index 3a3d38e6..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -build/ -docs/ -.DS_Store \ No newline at end of file diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/CHANGELOG.md b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/CHANGELOG.md deleted file mode 100644 index 62345b32..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/CHANGELOG.md +++ /dev/null @@ -1,805 +0,0 @@ -# Changelog - -## v2.2.0 - 2024-12-13 - -### Added - -- [`aiken/cbor.{deserialise}`](https://aiken-lang.github.io/stdlib/aiken/cbor.html#deserialise): to recover `Data` from CBOR bytes. -- [`aiken/collection/pairs.{insert_with_by_ascending_key}`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#insert_with_by_ascending_key): for inserting in pairs while specifying how to combine values on key conflict. - -## v2.1.0 - 2024-09-14 - -### Added - -- Various new helper functions: - - [`aiken/collection/list.{for_each}`](https://aiken-lang.github.io/stdlib/aiken/collection/list.html#for_each): for performing many side-effects. - - [`aiken/collection/dict.{pop}`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html#pop): for accessing and removing a value from a dictionnary in a single op. - - [`aiken/primitive/bytearray.{starts_with}`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#starts_with): for matching bytearray prefixes. - - [`aiken/primitive/math/rational.{pow}`](https://aiken-lang.github.io/stdlib/aiken/primitive/math/rational.html#pow): for computing (int) powers of rational numbers. - - [`cardano/assets.{match}`](https://aiken-lang.github.io/stdlib/cardano/assets.html#match): efficiently compare two value-like. - - [`cardano/assets.{restricted_to}`](https://aiken-lang.github.io/stdlib/cardano/assets.html#restricted_to): extracting value subsets from parent value. - -- Comparison functions for various Cardano types: - - [`cardano/address/credential.{compare}`](https://aiken-lang.github.io/stdlib/cardano/address/credential.html#compare): for ordering credentials. - - [`cardano/governance/voter.{compare}`](https://aiken-lang.github.io/stdlib/cardano/governacen/voter.html#compare): for ordering voters. - - [`cardano/transaction/output_reference.{compare}`](https://aiken-lang.github.io/stdlib/cardano/transaction/output_reference.html#compare): for ordering output references. - - [`cardano/transaction/script_purpose.{compare}`](https://aiken-lang.github.io/stdlib/cardano/transaction/script_purpose.html#compare): for ordering script purpose. - -- New BLS12-381 crypto modules: - - [`aiken/crypto/bls12_381/g1`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/g1.html) - - [`aiken/crypto/bls12_381/g2`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/g2.html) - - [`aiken/crypto/bls12_381/scalar`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/scalar.html) - -### Changed - -- N/A - -### Removed - -- N/A - -## v2.0.0 - 2024-09-01 - -> [!NOTE] -> Significant performance improvements (mostly on CPU) across all boards mostly due to the integration of Plutus V3. -> ->
see benchmarks -> -> test | cpu | mem -> --- | --- | --- -> aiken/cbor.{serialise_1} | -38.20% | ±0.00% -> aiken/cbor.{serialise_2} | -38.20% | ±0.00% -> aiken/cbor.{serialise_3} | -37.25% | ±0.00% -> aiken/cbor.{serialise_4} | -41.95% | ±0.00% -> aiken/cbor.{serialise_5} | -42.77% | ±0.00% -> aiken/cbor.{serialise_6} | -42.63% | ±0.00% -> aiken/cbor.{serialise_7} | -40.51% | ±0.00% -> aiken/cbor.{serialise_8} | -37.25% | ±0.00% -> aiken/cbor.{serialise_9} | -41.95% | ±0.00% -> aiken/cbor.{diagnostic_1} | -47.62% | -4.35% -> aiken/cbor.{diagnostic_2} | -45.16% | -2.87% -> aiken/cbor.{diagnostic_3} | -43.32% | -13.33% -> aiken/cbor.{diagnostic_4} | -38.28% | -8.03% -> aiken/cbor.{diagnostic_5} | -44.15% | -14.59% -> aiken/cbor.{diagnostic_6} | -42.77% | -12.21% -> aiken/cbor.{diagnostic_7} | -43.87% | -16.87% -> aiken/cbor.{diagnostic_7_alt} | -42.99% | -11.56% -> aiken/cbor.{diagnostic_8} | -46.00% | -10.23% -> aiken/cbor.{diagnostic_9} | -42.81% | -2.81% -> aiken/cbor.{diagnostic_10} | -38.28% | -8.03% -> aiken/cbor.{diagnostic_10_alt} | -38.43% | -8.03% -> aiken/cbor.{diagnostic_11} | -44.00% | -8.51% -> aiken/cbor.{diagnostic_12} | -45.65% | -11.56% -> aiken/cbor.{diagnostic_13} | -44.44% | -9.34% -> aiken/cbor.{diagnostic_14} | -43.59% | -19.77% -> aiken/cbor.{diagnostic_15} | -46.50% | -3.67% -> aiken/cbor.{diagnostic_16} | -41.89% | -13.41% -> aiken/collection/dict.{bench_from_ascending_pairs} | -20.48% | ±0.00% -> aiken/collection/dict.{from_list_1} | -20.16% | ±0.00% -> aiken/collection/dict.{from_list_2} | -18.28% | ±0.00% -> aiken/collection/dict.{from_list_3} | -17.83% | ±0.00% -> aiken/collection/dict.{from_list_4} | -18.97% | ±0.00% -> aiken/collection/dict.{bench_from_pairs} | -25.28% | ±0.00% -> aiken/collection/dict.{find_1} | -20.63% | ±0.00% -> aiken/collection/dict.{find_2} | -20.43% | ±0.00% -> aiken/collection/dict.{find_3} | -22.03% | ±0.00% -> aiken/collection/dict.{find_4} | -22.53% | ±0.00% -> aiken/collection/dict.{get_1} | -20.63% | ±0.00% -> aiken/collection/dict.{get_2} | -22.72% | ±0.00% -> aiken/collection/dict.{get_3} | -23.26% | ±0.00% -> aiken/collection/dict.{get_4} | -26.91% | ±0.00% -> aiken/collection/dict.{get_5} | -26.30% | ±0.00% -> aiken/collection/dict.{has_key_1} | -28.07% | ±0.00% -> aiken/collection/dict.{has_key_2} | -30.77% | ±0.00% -> aiken/collection/dict.{has_key_3} | -30.22% | ±0.00% -> aiken/collection/dict.{has_key_4} | -27.25% | ±0.00% -> aiken/collection/dict.{is_empty_1} | -27.86% | ±0.00% -> aiken/collection/dict.{keys_1} | -20.30% | ±0.00% -> aiken/collection/dict.{keys_2} | -17.48% | ±0.00% -> aiken/collection/dict.{size_1} | -37.90% | ±0.00% -> aiken/collection/dict.{size_2} | -32.34% | ±0.00% -> aiken/collection/dict.{size_3} | -27.97% | ±0.00% -> aiken/collection/dict.{values_1} | -20.30% | ±0.00% -> aiken/collection/dict.{values_2} | -17.58% | ±0.00% -> aiken/collection/dict.{delete_1} | -20.16% | ±0.00% -> aiken/collection/dict.{delete_2} | -24.29% | ±0.00% -> aiken/collection/dict.{delete_3} | -21.03% | ±0.00% -> aiken/collection/dict.{delete_4} | -25.03% | ±0.00% -> aiken/collection/dict.{delete_5} | -27.22% | ±0.00% -> aiken/collection/dict.{delete_6} | -25.83% | ±0.00% -> aiken/collection/dict.{filter_1} | -20.16% | ±0.00% -> aiken/collection/dict.{filter_2} | -19.61% | ±0.00% -> aiken/collection/dict.{filter_3} | -20.15% | ±0.00% -> aiken/collection/dict.{insert_1} | -22.83% | ±0.00% -> aiken/collection/dict.{insert_2} | -21.77% | ±0.00% -> aiken/collection/dict.{insert_with_1} | -17.21% | ±0.00% -> aiken/collection/dict.{insert_with_2} | -22.66% | ±0.00% -> aiken/collection/dict.{insert_with_3} | -25.81% | ±0.00% -> aiken/collection/dict.{map_1} | -19.56% | ±0.00% -> aiken/collection/dict.{map_2} | -23.66% | ±0.00% -> aiken/collection/dict.{union_1} | -17.91% | ±0.00% -> aiken/collection/dict.{union_2} | -8.67% | ±0.00% -> aiken/collection/dict.{union_3} | -22.82% | ±0.00% -> aiken/collection/dict.{union_4} | -22.77% | ±0.00% -> aiken/collection/dict.{union_with_1} | -22.90% | ±0.00% -> aiken/collection/dict.{fold_1} | -35.94% | ±0.00% -> aiken/collection/dict.{fold_2} | -22.31% | ±0.00% -> aiken/collection/dict.{foldr_1} | -36.21% | ±0.00% -> aiken/collection/dict.{foldr_2} | -21.93% | ±0.00% -> aiken/collection/dict.{to_list_1} | -98.69% | -66.72% -> aiken/collection/dict.{to_list_2} | -98.91% | -66.72% -> aiken/collection/list.{push_1} | -8.02% | ±0.00% -> aiken/collection/list.{push_2} | 1.25% | ±0.00% -> aiken/collection/list.{range_1} | -27.77% | ±0.00% -> aiken/collection/list.{range_2} | -27.39% | ±0.00% -> aiken/collection/list.{repeat_1} | -23.72% | ±0.00% -> aiken/collection/list.{repeat_2} | -27.96% | ±0.00% -> aiken/collection/list.{all_1} | -28.36% | ±0.00% -> aiken/collection/list.{all_2} | -27.59% | ±0.00% -> aiken/collection/list.{all_3} | -27.94% | ±0.00% -> aiken/collection/list.{any_1} | -28.23% | ±0.00% -> aiken/collection/list.{any_2} | -28.09% | ±0.00% -> aiken/collection/list.{any_3} | -26.95% | ±0.00% -> aiken/collection/list.{at_1} | -27.60% | ±0.00% -> aiken/collection/list.{at_2} | -19.96% | ±0.00% -> aiken/collection/list.{at_3} | -27.60% | ±0.00% -> aiken/collection/list.{at_4} | -20.77% | ±0.00% -> aiken/collection/list.{at_5} | -25.75% | ±0.00% -> aiken/collection/list.{count_empty} | -36.83% | ±0.00% -> aiken/collection/list.{count_all} | -32.37% | ±0.00% -> aiken/collection/list.{count_some} | -31.73% | ±0.00% -> aiken/collection/list.{count_none} | -30.44% | ±0.00% -> aiken/collection/list.{find_1} | -20.59% | ±0.00% -> aiken/collection/list.{find_2} | -25.53% | ±0.00% -> aiken/collection/list.{find_3} | -19.64% | ±0.00% -> aiken/collection/list.{has_1} | -27.88% | ±0.00% -> aiken/collection/list.{has_2} | -27.69% | ±0.00% -> aiken/collection/list.{has_3} | -26.95% | ±0.00% -> aiken/collection/list.{head_1} | -14.03% | ±0.00% -> aiken/collection/list.{head_2} | -16.90% | ±0.00% -> aiken/collection/list.{is_empty_1} | -26.48% | ±0.00% -> aiken/collection/list.{is_empty_2} | -25.35% | ±0.00% -> aiken/collection/list.{index_of_1} | -25.62% | ±0.00% -> aiken/collection/list.{index_of_2} | -27.52% | ±0.00% -> aiken/collection/list.{index_of_3} | -26.65% | ±0.00% -> aiken/collection/list.{index_of_4} | -19.96% | ±0.00% -> aiken/collection/list.{last_1} | -19.18% | ±0.00% -> aiken/collection/list.{last_2} | -16.26% | ±0.00% -> aiken/collection/list.{last_3} | -17.13% | ±0.00% -> aiken/collection/list.{length_1} | -37.90% | ±0.00% -> aiken/collection/list.{length_2} | -30.89% | ±0.00% -> aiken/collection/list.{delete_1} | -20.20% | ±0.00% -> aiken/collection/list.{delete_2} | -15.02% | ±0.00% -> aiken/collection/list.{delete_3} | -20.55% | ±0.00% -> aiken/collection/list.{delete_4} | -22.46% | ±0.00% -> aiken/collection/list.{drop_1} | -24.62% | ±0.00% -> aiken/collection/list.{drop_2} | -28.08% | ±0.00% -> aiken/collection/list.{drop_while_1} | -19.79% | ±0.00% -> aiken/collection/list.{drop_while_2} | -22.25% | ±0.00% -> aiken/collection/list.{drop_while_3} | 0.86% | ±0.00% -> aiken/collection/list.{drop_while_4} | -27.26% | ±0.00% -> aiken/collection/list.{filter_1} | -20.20% | ±0.00% -> aiken/collection/list.{filter_2} | -32.06% | ±0.00% -> aiken/collection/list.{filter_3} | -31.39% | ±0.00% -> aiken/collection/list.{filter_map_1} | -21.10% | ±0.00% -> aiken/collection/list.{filter_map_2} | -28.74% | ±0.00% -> aiken/collection/list.{init_1} | -19.64% | ±0.00% -> aiken/collection/list.{init_2} | -20.01% | ±0.00% -> aiken/collection/list.{init_3} | -13.72% | ±0.00% -> aiken/collection/list.{partition_1} | -14.63% | ±0.00% -> aiken/collection/list.{partition_2} | -16.85% | ±0.00% -> aiken/collection/list.{partition_3} | -16.63% | ±0.00% -> aiken/collection/list.{partition_4} | -16.87% | ±0.00% -> aiken/collection/list.{partition_5} | -22.94% | ±0.00% -> aiken/collection/list.{slice_1} | -29.08% | -2.81% -> aiken/collection/list.{slice_2} | -30.11% | -2.25% -> aiken/collection/list.{slice_3} | -30.29% | -1.46% -> aiken/collection/list.{slice_4} | -28.53% | -1.48% -> aiken/collection/list.{slice_5} | -29.73% | -1.64% -> aiken/collection/list.{slice_6} | -32.01% | -1.80% -> aiken/collection/list.{span_1} | -15.05% | ±0.00% -> aiken/collection/list.{span_2} | -18.03% | ±0.00% -> aiken/collection/list.{span_3} | -12.49% | ±0.00% -> aiken/collection/list.{span_4} | -18.13% | ±0.00% -> aiken/collection/list.{tail_1} | -8.88% | ±0.00% -> aiken/collection/list.{tail_2} | -16.90% | ±0.00% -> aiken/collection/list.{take_1} | -24.98% | ±0.00% -> aiken/collection/list.{take_2} | -24.35% | ±0.00% -> aiken/collection/list.{take_while_1} | -20.20% | ±0.00% -> aiken/collection/list.{take_while_2} | -21.56% | ±0.00% -> aiken/collection/list.{take_while_3} | -22.46% | ±0.00% -> aiken/collection/list.{take_while_4} | -21.02% | ±0.00% -> aiken/collection/list.{unique_1} | -20.20% | ±0.00% -> aiken/collection/list.{unique_2} | -24.34% | ±0.00% -> aiken/collection/list.{flat_map_1} | -19.79% | ±0.00% -> aiken/collection/list.{flat_map_2} | -13.36% | ±0.00% -> aiken/collection/list.{indexed_map_1} | -20.10% | ±0.00% -> aiken/collection/list.{indexed_map_2} | -23.36% | ±0.00% -> aiken/collection/list.{map_1} | -19.79% | ±0.00% -> aiken/collection/list.{map_2} | -16.75% | ±0.00% -> aiken/collection/list.{map2_1} | -20.10% | ±0.00% -> aiken/collection/list.{map2_2} | -17.46% | ±0.00% -> aiken/collection/list.{map2_3} | -15.92% | ±0.00% -> aiken/collection/list.{map3_1} | -20.39% | ±0.00% -> aiken/collection/list.{map3_2} | -19.22% | ±0.00% -> aiken/collection/list.{reverse_1} | -20.10% | ±0.00% -> aiken/collection/list.{reverse_2} | -12.26% | ±0.00% -> aiken/collection/list.{sort_1} | -22.31% | ±0.00% -> aiken/collection/list.{sort_2} | -17.93% | ±0.00% -> aiken/collection/list.{sort_3} | -23.09% | ±0.00% -> aiken/collection/list.{sort_4} | -20.20% | ±0.00% -> aiken/collection/list.{unzip_1} | -14.01% | ±0.00% -> aiken/collection/list.{unzip_2} | -5.48% | ±0.00% -> aiken/collection/list.{concat_1} | -6.56% | ±0.00% -> aiken/collection/list.{concat_2} | -11.25% | ±0.00% -> aiken/collection/list.{concat_3} | -9.35% | ±0.00% -> aiken/collection/list.{difference_1} | -24.23% | ±0.00% -> aiken/collection/list.{difference_2} | -22.59% | ±0.00% -> aiken/collection/list.{difference_3} | -10.64% | ±0.00% -> aiken/collection/list.{difference_4} | -21.68% | ±0.00% -> aiken/collection/list.{zip_1} | -20.10% | ±0.00% -> aiken/collection/list.{zip_2} | -19.17% | ±0.00% -> aiken/collection/list.{zip_3} | -10.35% | ±0.00% -> aiken/collection/list.{foldl_1} | -36.95% | ±0.00% -> aiken/collection/list.{foldl_2} | -26.90% | ±0.00% -> aiken/collection/list.{foldl_3} | -11.27% | ±0.00% -> aiken/collection/list.{foldr_1} | -26.68% | ±0.00% -> aiken/collection/list.{foldr_2} | -38.04% | ±0.00% -> aiken/collection/list.{foldr_3} | -10.14% | ±0.00% -> aiken/collection/list.{indexed_foldr_1} | -36.95% | ±0.00% -> aiken/collection/list.{indexed_foldr_2} | -11.06% | ±0.00% -> aiken/collection/list.{reduce_1} | -36.95% | ±0.00% -> aiken/collection/list.{reduce_2} | -27.99% | ±0.00% -> aiken/collection/list.{reduce_3} | -23.54% | ±0.00% -> aiken/collection/list.{reduce_4} | -24.84% | ±0.00% -> aiken/collection/pairs.{get_all_1} | -21.10% | ±0.00% -> aiken/collection/pairs.{get_all_2} | -18.86% | ±0.00% -> aiken/collection/pairs.{get_all_3} | -19.53% | ±0.00% -> aiken/collection/pairs.{get_all_4} | -18.70% | ±0.00% -> aiken/collection/pairs.{get_all_5} | -21.19% | ±0.00% -> aiken/collection/pairs.{get_first_1} | -20.63% | ±0.00% -> aiken/collection/pairs.{get_first_2} | -18.86% | ±0.00% -> aiken/collection/pairs.{get_first_3} | -18.86% | ±0.00% -> aiken/collection/pairs.{get_first_4} | -18.86% | ±0.00% -> aiken/collection/pairs.{get_first_5} | -21.05% | ±0.00% -> aiken/collection/pairs.{get_last_1} | -20.63% | ±0.00% -> aiken/collection/pairs.{get_last_2} | -21.13% | ±0.00% -> aiken/collection/pairs.{get_last_3} | -21.16% | ±0.00% -> aiken/collection/pairs.{get_last_4} | -21.79% | ±0.00% -> aiken/collection/pairs.{get_last_5} | -21.05% | ±0.00% -> aiken/collection/pairs.{find_all_1} | -21.10% | ±0.00% -> aiken/collection/pairs.{find_all_2} | -18.33% | ±0.00% -> aiken/collection/pairs.{find_all_3} | -20.51% | ±0.00% -> aiken/collection/pairs.{find_all_4} | -17.79% | ±0.00% -> aiken/collection/pairs.{find_first_1} | -20.63% | ±0.00% -> aiken/collection/pairs.{find_first_2} | -18.28% | ±0.00% -> aiken/collection/pairs.{find_first_3} | -20.22% | ±0.00% -> aiken/collection/pairs.{find_first_4} | -18.28% | ±0.00% -> aiken/collection/pairs.{find_last_1} | -20.63% | ±0.00% -> aiken/collection/pairs.{find_last_2} | -20.70% | ±0.00% -> aiken/collection/pairs.{find_last_3} | -20.22% | ±0.00% -> aiken/collection/pairs.{find_last_4} | -20.98% | ±0.00% -> aiken/collection/pairs.{has_key_1} | -28.07% | ±0.00% -> aiken/collection/pairs.{has_key_2} | -25.70% | ±0.00% -> aiken/collection/pairs.{has_key_3} | -25.80% | ±0.00% -> aiken/collection/pairs.{has_key_4} | -24.93% | ±0.00% -> aiken/collection/pairs.{has_key_5} | -25.70% | ±0.00% -> aiken/collection/pairs.{keys_1} | -20.30% | ±0.00% -> aiken/collection/pairs.{keys_2} | -13.89% | ±0.00% -> aiken/collection/pairs.{keys_3} | -10.43% | ±0.00% -> aiken/collection/pairs.{values_1} | -20.30% | ±0.00% -> aiken/collection/pairs.{values_2} | -14.02% | ±0.00% -> aiken/collection/pairs.{values_3} | -10.65% | ±0.00% -> aiken/collection/pairs.{values_4} | -8.53% | ±0.00% -> aiken/collection/pairs.{map_1} | -11.17% | ±0.00% -> aiken/collection/pairs.{map_2} | -12.89% | ±0.00% -> aiken/collection/pairs.{foldl_1} | -35.94% | ±0.00% -> aiken/collection/pairs.{foldl_2} | -22.31% | ±0.00% -> aiken/collection/pairs.{foldr_1} | -36.21% | ±0.00% -> aiken/collection/pairs.{foldr_2} | -21.93% | ±0.00% -> aiken/collection/pairs.{foldr_3} | -20.00% | ±0.00% -> aiken/interval.{contains_1} | -21.08% | -4.01% -> aiken/interval.{contains_2} | -31.22% | -13.95% -> aiken/interval.{contains_3} | -26.80% | -10.08% -> aiken/interval.{contains_4} | -31.02% | -13.67% -> aiken/interval.{contains_5} | -32.32% | -13.59% -> aiken/interval.{contains_6} | -28.15% | -9.81% -> aiken/interval.{contains_7} | -32.11% | -13.32% -> aiken/interval.{contains_8} | -29.56% | -12.59% -> aiken/interval.{contains_9} | -29.68% | -12.78% -> aiken/interval.{contains_10} | -29.68% | -12.78% -> aiken/interval.{contains_11} | -35.17% | -17.77% -> aiken/interval.{contains_12} | -21.09% | -3.86% -> aiken/interval.{is_entirely_after_1} | -29.89% | -13.81% -> aiken/interval.{is_entirely_after_2} | -29.63% | -13.39% -> aiken/interval.{is_entirely_after_3} | -29.63% | -13.39% -> aiken/interval.{is_entirely_after_4} | -29.48% | -11.81% -> aiken/interval.{is_entirely_after_5} | -29.70% | -12.14% -> aiken/interval.{is_entirely_after_6} | -36.09% | -19.77% -> aiken/interval.{is_entirely_after_7} | -24.19% | -3.99% -> aiken/interval.{is_entirely_after_8} | -24.19% | -3.99% -> aiken/interval.{is_entirely_after_9} | -24.19% | -3.99% -> aiken/interval.{is_entirely_before_1} | -28.44% | -13.48% -> aiken/interval.{is_entirely_before_2} | -28.24% | -13.09% -> aiken/interval.{is_entirely_before_3} | -28.24% | -13.09% -> aiken/interval.{is_entirely_before_4} | -28.44% | -11.88% -> aiken/interval.{is_entirely_before_5} | -28.26% | -11.57% -> aiken/interval.{is_entirely_before_6} | -34.63% | -19.34% -> aiken/interval.{is_entirely_before_7} | -22.97% | -4.02% -> aiken/interval.{is_entirely_before_8} | -22.97% | -4.02% -> aiken/interval.{is_entirely_before_9} | -22.97% | -4.02% -> aiken/interval.{hull_1} | -21.51% | -0.73% -> aiken/interval.{hull_2} | -23.06% | -0.80% -> aiken/interval.{hull_3} | -22.00% | -0.86% -> aiken/interval.{intersection_1} | -21.51% | -0.73% -> aiken/interval.{intersection_2} | -21.51% | -0.73% -> aiken/interval.{intersection_3} | -26.55% | -4.65% -> aiken/interval.{intersection_4} | -26.45% | -4.51% -> aiken/interval.{intersection_5} | -22.87% | -0.76% -> aiken/interval.{intersection_6} | -19.73% | -0.98% -> aiken/math.{abs_1} | -61.39% | -21.07% -> aiken/math.{abs_2} | -70.90% | -34.84% -> aiken/math.{clamp_1} | -60.95% | -23.55% -> aiken/math.{clamp_2} | -60.95% | -23.55% -> aiken/math.{clamp_3} | -59.22% | -18.20% -> aiken/math.{gcd_test1} | -47.20% | ±0.00% -> aiken/math.{gcd_test2} | -47.81% | ±0.00% -> aiken/math.{gcd_test3} | -46.10% | ±0.00% -> aiken/math.{is_sqrt1} | -87.41% | -68.64% -> aiken/math.{is_sqrt2} | -87.41% | -68.64% -> aiken/math.{log_10_2} | -51.35% | -8.40% -> aiken/math.{log_42_2} | -51.46% | -8.24% -> aiken/math.{log_42_3} | -51.05% | -7.81% -> aiken/math.{log_5_0} | -54.05% | -12.92% -> aiken/math.{log_4_4} | -50.59% | -9.31% -> aiken/math.{log_4_43} | -49.14% | -7.28% -> aiken/math.{max_1} | -61.39% | -21.07% -> aiken/math.{max_2} | -61.39% | -21.07% -> aiken/math.{max_3} | -61.39% | -21.07% -> aiken/math.{min_1} | -61.39% | -21.07% -> aiken/math.{min_2} | -61.39% | -21.07% -> aiken/math.{min_3} | -61.39% | -21.07% -> aiken/math.{pow_3_5} | -46.34% | ±0.00% -> aiken/math.{pow_7_2} | -46.38% | ±0.00% -> aiken/math.{pow_3__4} | -43.50% | ±0.00% -> aiken/math.{pow_0_0} | -43.95% | ±0.00% -> aiken/math.{pow_513_3} | -45.80% | ±0.00% -> aiken/math.{pow_2_4} | -46.79% | ±0.00% -> aiken/math.{pow_2_42} | -46.77% | ±0.00% -> aiken/math.{pow2_neg} | -44.71% | ±0.00% -> aiken/math.{pow2_0} | -45.00% | ±0.00% -> aiken/math.{pow2_1} | -45.00% | ±0.00% -> aiken/math.{pow2_4} | -45.00% | ±0.00% -> aiken/math.{pow2_42} | -42.01% | ±0.00% -> aiken/math.{pow2_256} | -41.40% | ±0.00% -> aiken/math.{sqrt1} | -32.56% | -17.18% -> aiken/math.{sqrt2} | -32.56% | -17.18% -> aiken/math.{sqrt3} | -49.99% | -8.90% -> aiken/math.{sqrt4} | -51.76% | -3.90% -> aiken/math.{sqrt5} | -52.63% | -1.33% -> aiken/math.{sqrt6} | -28.16% | -15.41% -> aiken/math/rational.{from_int_1} | -14.32% | ±0.00% -> aiken/math/rational.{new_1} | -22.98% | ±0.00% -> aiken/math/rational.{zero_1} | -8.08% | ±0.00% -> aiken/math/rational.{denominator_1} | -28.33% | ±0.00% -> aiken/math/rational.{numerator_1} | -29.34% | ±0.00% -> aiken/math/rational.{abs_examples} | -18.25% | ±0.00% -> aiken/math/rational.{negate_1} | -15.39% | ±0.00% -> aiken/math/rational.{reciprocal_1} | -23.28% | ±0.00% -> aiken/math/rational.{reduce_1} | -31.89% | ±0.00% -> aiken/math/rational.{add_1} | -15.11% | ±0.00% -> aiken/math/rational.{add_2} | -15.11% | ±0.00% -> aiken/math/rational.{div_1} | -22.31% | -2.75% -> aiken/math/rational.{div_2} | -22.37% | -2.79% -> aiken/math/rational.{mul_1} | -13.37% | ±0.00% -> aiken/math/rational.{mul_2} | -13.37% | ±0.00% -> aiken/math/rational.{mul_3} | -26.25% | ±0.00% -> aiken/math/rational.{sub_1} | -15.11% | ±0.00% -> aiken/math/rational.{sub_2} | -15.11% | ±0.00% -> aiken/math/rational.{sub_3} | -15.11% | ±0.00% -> aiken/math/rational.{compare_1} | -21.70% | ±0.00% -> aiken/math/rational.{compare_with_eq} | -23.05% | ±0.00% -> aiken/math/rational.{compare_with_neq} | -22.33% | ±0.00% -> aiken/math/rational.{compare_with_gte} | -22.48% | ±0.00% -> aiken/math/rational.{compare_with_gt} | -23.18% | ±0.00% -> aiken/math/rational.{compare_with_lte} | -22.48% | ±0.00% -> aiken/math/rational.{compare_with_lt} | -23.18% | ±0.00% -> aiken/math/rational.{arithmetic_mean_1} | -23.31% | ±0.00% -> aiken/math/rational.{arithmetic_mean_2} | -23.31% | ±0.00% -> aiken/math/rational.{arithmetic_mean_3} | -20.58% | ±0.00% -> aiken/math/rational.{geometric_mean1} | -29.87% | ±0.00% -> aiken/math/rational.{geometric_mean2} | -24.52% | ±0.00% -> aiken/math/rational.{geometric_mean3} | -24.52% | ±0.00% -> aiken/math/rational.{geometric_mean4} | -33.55% | ±0.00% -> aiken/math/rational.{geometric_mean5} | -45.34% | ±0.00% -> aiken/math/rational.{ceil_1} | -36.26% | ±0.00% -> aiken/math/rational.{floor_1} | -29.49% | ±0.00% -> aiken/math/rational.{proper_fraction_1} | -18.44% | ±0.00% -> aiken/math/rational.{proper_fraction_2} | -18.44% | ±0.00% -> aiken/math/rational.{proper_fraction_3} | -18.44% | ±0.00% -> aiken/math/rational.{round_1} | -25.17% | ±0.00% -> aiken/math/rational.{round_even_1} | -25.91% | ±0.00% -> aiken/math/rational.{truncate_1} | -29.49% | ±0.00% -> aiken/option.{is_none_1} | -26.56% | ±0.00% -> aiken/option.{is_none_2} | -27.52% | ±0.00% -> aiken/option.{is_some_1} | -27.52% | ±0.00% -> aiken/option.{is_some_2} | -26.56% | ±0.00% -> aiken/option.{and_then_1} | -20.19% | ±0.00% -> aiken/option.{and_then_2} | -22.15% | ±0.00% -> aiken/option.{and_then_3} | -21.85% | ±0.00% -> aiken/option.{choice_1} | -17.11% | ±0.00% -> aiken/option.{choice_2} | -19.75% | ±0.00% -> aiken/option.{choice_3} | -18.68% | ±0.00% -> aiken/option.{flatten_1} | -12.25% | ±0.00% -> aiken/option.{flatten_2} | -15.41% | ±0.00% -> aiken/option.{flatten_3} | -19.46% | ±0.00% -> aiken/option.{flatten_4} | -14.31% | ±0.00% -> aiken/option.{map_1} | -19.89% | ±0.00% -> aiken/option.{map_2} | -18.18% | ±0.00% -> aiken/option.{map2_1} | -20.47% | ±0.00% -> aiken/option.{map2_2} | -19.93% | ±0.00% -> aiken/option.{map2_3} | -13.64% | ±0.00% -> aiken/option.{map3_1} | -20.74% | ±0.00% -> aiken/option.{map3_2} | -20.00% | ±0.00% -> aiken/option.{map3_3} | -19.90% | ±0.00% -> aiken/option.{or_try_1} | -14.36% | ±0.00% -> aiken/option.{or_try_2} | -14.36% | ±0.00% -> aiken/option.{or_else_1} | -38.16% | ±0.00% -> aiken/option.{or_else_2} | -27.62% | ±0.00% -> aiken/primitive/bytearray.{from_string_1} | -62.36% | ±0.00% -> aiken/primitive/bytearray.{from_string_2} | -41.62% | ±0.00% -> aiken/primitive/bytearray.{push_1} | -97.51% | -80.06% -> aiken/primitive/bytearray.{push_2} | -97.51% | -80.06% -> aiken/primitive/bytearray.{push_3} | -88.82% | -89.83% -> aiken/primitive/bytearray.{index_of_1} | -39.75% | ±0.00% -> aiken/primitive/bytearray.{index_of_2} | -43.19% | ±0.00% -> aiken/primitive/bytearray.{index_of_3} | -41.70% | ±0.00% -> aiken/primitive/bytearray.{index_of_4} | -37.24% | ±0.00% -> aiken/primitive/bytearray.{index_of_5} | -26.02% | ±0.00% -> aiken/primitive/bytearray.{is_empty_1} | -37.52% | ±0.00% -> aiken/primitive/bytearray.{is_empty_2} | -33.77% | ±0.00% -> aiken/primitive/bytearray.{length_1} | -49.73% | ±0.00% -> aiken/primitive/bytearray.{length_2} | -49.73% | ±0.00% -> aiken/primitive/bytearray.{test_bit_0} | -45.48% | 5.88% -> aiken/primitive/bytearray.{test_bit_1} | -56.22% | -10.85% -> aiken/primitive/bytearray.{test_bit_2} | -56.22% | -10.85% -> aiken/primitive/bytearray.{test_bit_3} | -56.22% | -10.85% -> aiken/primitive/bytearray.{test_bit_7} | -58.31% | -11.81% -> aiken/primitive/bytearray.{test_bit_8} | -56.22% | -10.85% -> aiken/primitive/bytearray.{test_bit_20_21_22_23} | -44.38% | 5.52% -> aiken/primitive/bytearray.{drop_1} | -58.79% | ±0.00% -> aiken/primitive/bytearray.{drop_2} | -58.79% | ±0.00% -> aiken/primitive/bytearray.{drop_3} | -58.79% | ±0.00% -> aiken/primitive/bytearray.{drop_4} | -58.79% | ±0.00% -> aiken/primitive/bytearray.{slice_1} | -98.79% | -90.04% -> aiken/primitive/bytearray.{slice_2} | -98.79% | -90.04% -> aiken/primitive/bytearray.{slice_3} | -98.79% | -90.04% -> aiken/primitive/bytearray.{slice_4} | -98.79% | -90.04% -> aiken/primitive/bytearray.{slice_5} | -98.79% | -90.04% -> aiken/primitive/bytearray.{take_1} | -97.81% | -83.40% -> aiken/primitive/bytearray.{take_2} | -97.81% | -83.40% -> aiken/primitive/bytearray.{take_3} | -97.81% | -83.40% -> aiken/primitive/bytearray.{take_4} | -97.81% | -83.40% -> aiken/primitive/bytearray.{concat_1} | -96.22% | -80.06% -> aiken/primitive/bytearray.{concat_2} | -96.22% | -80.06% -> aiken/primitive/bytearray.{concat_3} | -96.22% | -80.06% -> aiken/primitive/bytearray.{concat_4} | -96.22% | -80.06% -> aiken/primitive/bytearray.{foldl_1} | -40.96% | ±0.00% -> aiken/primitive/bytearray.{foldl_2} | -40.09% | ±0.00% -> aiken/primitive/bytearray.{foldl_3} | -40.29% | ±0.00% -> aiken/primitive/bytearray.{foldl_4} | -44.76% | ±0.00% -> aiken/primitive/bytearray.{foldr_1} | -42.56% | ±0.00% -> aiken/primitive/bytearray.{foldr_2} | -40.93% | ±0.00% -> aiken/primitive/bytearray.{foldr_3} | -45.34% | ±0.00% -> aiken/primitive/bytearray.{reduce_1} | -42.95% | ±0.00% -> aiken/primitive/bytearray.{reduce_2} | -44.60% | ±0.00% -> aiken/primitive/bytearray.{to_string_1} | -69.56% | ±0.00% -> aiken/primitive/bytearray.{to_string_2} | -53.54% | ±0.00% -> aiken/primitive/bytearray.{to_hex_1} | -48.15% | ±0.00% -> aiken/primitive/bytearray.{to_hex_2} | -48.15% | ±0.00% -> aiken/primitive/int.{from_utf8_1} | -37.06% | ±0.00% -> aiken/primitive/int.{from_utf8_2} | -33.40% | ±0.00% -> aiken/primitive/int.{from_utf8_3} | -37.06% | ±0.00% -> aiken/primitive/int.{from_utf8_4} | -32.78% | ±0.00% -> aiken/primitive/int.{from_utf8_5} | -32.05% | ±0.00% -> aiken/primitive/int.{from_utf8_6} | -31.36% | ±0.00% -> aiken/primitive/string.{from_bytearray_1} | -69.56% | ±0.00% -> aiken/primitive/string.{from_bytearray_2} | -53.54% | ±0.00% -> aiken/primitive/string.{from_bytearray_3} | -53.54% | ±0.00% -> aiken/primitive/string.{from_int_1} | -40.54% | -7.05% -> aiken/primitive/string.{from_int_2} | -45.93% | -5.30% -> aiken/primitive/string.{from_int_3} | -47.62% | -4.35% -> aiken/primitive/string.{from_int_4} | -48.58% | -3.69% -> aiken/primitive/string.{concat_1} | -92.30% | -80.10% -> aiken/primitive/string.{concat_2} | -97.34% | -85.87% -> aiken/primitive/string.{concat_3} | -98.67% | -80.35% -> aiken/primitive/string.{join_1} | -42.87% | ±0.00% -> aiken/primitive/string.{join_2} | -37.65% | ±0.00% -> aiken/primitive/string.{to_bytearray_1} | -62.36% | ±0.00% -> aiken/primitive/string.{to_bytearray_2} | -41.62% | ±0.00% -> aiken/primitive/string.{to_bytearray_3} | -41.62% | ±0.00% -> cardano/assets.{from_asset_list_1} | -20.51% | ±0.00% -> cardano/assets.{from_asset_list_2} | -10.09% | ±0.00% -> cardano/assets.{from_asset_list_3} | -12.21% | ±0.00% -> cardano/assets.{from_asset_list_4} | -16.22% | ±0.00% -> cardano/assets.{from_asset_list_5} | -14.60% | ±0.00% -> cardano/assets.{from_asset_list_6} | -20.97% | ±0.00% -> cardano/assets.{from_asset_list_7} | -20.25% | ±0.00% -> cardano/assets.{from_asset_list_8} | -14.51% | ±0.00% -> cardano/assets.{from_asset_list_9} | -16.07% | ±0.00% -> cardano/assets.{add_1} | -27.84% | ±0.00% -> cardano/assets.{add_2} | -27.56% | -0.54% -> cardano/assets.{add_3} | -26.39% | ±0.00% -> cardano/assets.{add_4} | -29.75% | -10.41% -> cardano/assets.{add_5} | -27.80% | ±0.00% -> cardano/assets.{merge_1} | -26.02% | ±0.00% -> cardano/assets.{merge_2} | -19.60% | ±0.00% -> cardano/assets.{merge_3} | -23.80% | ±0.00% -> cardano/assets.{merge_4} | -25.92% | ±0.00% -> cardano/assets.{merge_5} | -27.61% | -1.98% -> cardano/assets.{without_lovelace_1} | -28.00% | -2.24% -> cardano/assets.{without_lovelace_2} | -27.49% | ±0.00% -> cardano/assets.{without_lovelace_3} | -23.40% | -0.34% -> cardano/assets.{flatten_with_1} | -21.10% | ±0.00% -> cardano/assets.{flatten_with_2} | -22.77% | ±0.00% -> cardano/assets.{reduce_1} | -24.31% | ±0.00% -> cardano/assets.{reduce_2} | -20.89% | ±0.00% -> cardano/assets.{reduce_3} | -36.21% | ±0.00% ->
- -### Added - -- New modules covering Conway-related features (i.e. governance) - - [`cardano/governance`](https://aiken-lang.github.io/stdlib/cardano/governance.html) - - [`cardano/governance/protocol_parameters`](https://aiken-lang.github.io/stdlib/cardano/governance/protocol_parameters.html) - -- New primitives in `aiken/collection/pairs`: - - [`insert_by_ascending_key`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#insert_by_ascending_key) - - [`repsert_by_ascending_key`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#repsert_by_ascending_key) - -- New primitives in `aiken/crypto`: - - [`blake2b_224`](https://aiken-lang.github.io/stdlib/aiken/crypto.html#blake2b_224) - - [`keccak_256`](https://aiken-lang.github.io/stdlib/aiken/crypto.html#keccak_256) - -- New primitives in `aiken/math`: - - [`log2`](https://aiken-lang.github.io/stdlib/aiken/math.html#log2) - -- New primitives in `aiken/primitive/bytearray`: - - [`at`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#at) - - [`from_int_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#from_int_big_endian) - - [`from_int_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#from_int_little_endian) - - [`to_int_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#to_int_big_endian) - - [`to_int_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#to_int_little_endian) - -- New primitives in `aiken/primitive/int`: - - [`from_bytearray_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html#from_bytearray_big_endian) - - [`from_bytearray_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html#from_bytearray_little_endian) - -- New primitives in `aiken/crypto`: - - [`verify_ecdsa_signature`](https://aiken-lang.github.io/stdlib/cardano/credential.html#verify_ecdsa_signature) - - [`verify_schnorr_signature`](https://aiken-lang.github.io/stdlib/cardano/credential.html#verify_schnorr_signature) - -### Changed - -- Few modules have been relocated and better organized: - - `aiken/hash` -> [`aiken/crypto`](https://aiken-lang.github.io/stdlib/aiken/crypto.html) - - **collections** - - `aiken/dict` -> [`aiken/collection/dict`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html) - - `aiken/list` -> [`aiken/collection/list`](https://aiken-lang.github.io/stdlib/aiken/collection/list.html) - - `aiken/pairs` -> [`aiken/collection/pairs`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html) - - **primitive** - - `aiken/bytearray` -> [`aiken/primitive/bytearray`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html) - - `aiken/int` -> [`aiken/primitive/int`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html) - - `aiken/string` -> [`aiken/primitive/string`](https://aiken-lang.github.io/stdlib/aiken/primitive/string.html) - - **cardano** - - `aiken/transaction` -> [`cardano/transaction`](https://aiken-lang.github.io/stdlib/cardano/transaction.html) - - `aiken/transaction/certificate` -> [`cardano/certificate`](https://aiken-lang.github.io/stdlib/cardano/certificate.html) - - `aiken/transaction/credential` -> [`cardano/address`](https://aiken-lang.github.io/stdlib/cardano/address.html) & `aiken/crypto` - - `aiken/transaction/value` -> [`cardano/assets`](https://aiken-lang.github.io/stdlib/cardano/assets.html) - -- Several zero-argument functions have been turned into top-level constants - - `aiken/dict.new()` -> [`aiken/collection/dict.empty`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html#empty) - - `aiken/interval.empty()` -> [`aiken/interval.empty`](https://aiken-lang.github.io/stdlib/aiken/interval.html#empty) - - `aiken/interval.everything()` -> [`aiken/interval.everything`](https://aiken-lang.github.io/stdlib/aiken/interval.html#everything) - - `aiken/math/rational.zero()` -> [`aiken/math/rational.zero`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#zero) - - `aiken/transaction/value.zero()` -> [`cardano/assets.zero`](https://aiken-lang.github.io/stdlib/cardano/assets.html#zero) - -- The `Transaction` type from [`cardano/transaction`](https://aiken-lang.github.io/stdlib/cardano/transaction.html) (originally `aiken/transaction`) has been greatly reworked to match the new transaction format in Plutus V3. - -- The `ScriptContext` type has split from `cardano/transaction` (originally `aiken/transaction`) and moved into its own module [`cardano/script_context`](https://aiken-lang.github.io/stdlib/cardano/script_context.html) and adjusted to its new form as per Plutus V3. - -- The constructors of [`Credential`](https://aiken-lang.github.io/stdlib/cardano/address.html#credential) have been renamed from `VerificationKeyCredential` and `ScriptCredential` into `VerificationKey` and `Script` respectively. - -- The function `remove_all`, `remove_first` and `remove_last` from [`aiken/collection/pairs`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html) (originally `aiken/pairs`) have been renamed to `delete_all`, `delete_first` and `delete_last` respectively. - -- The function `verify_signature` from [`aiken/crypto`](https://aiken-lang.github.io/stdlib/aiken/crypto.html) (originally `aiken/credential`) has been renamed to `verify_ed25519_signature`. - -### Removed - -- The module `aiken/time`. The `PosixTime` alias is no longer used anywhere. - -- `MintedValue` (from `aiken/transaction/value` originally) and its associated functions are no longer needed and, therefore, gone. - -## v1.9.0 - 2024-05-24 - -### Added - -- A new module [`aiken/pairs`](https://aiken-lang.github.io/stdlib/aiken/pairs.html) to work with associative lists (a.k.a. `Pairs`). - -### Changed - -- **BREAKING-CHANGE**
- Specialized all `Dict`'s key to `ByteArray`, and thus remove the need for passing an extra comparison function in many functions. `Dict` are however still specialized with a phantom type for keys. - -- **BREAKING-CHANGE**
- Few functions from `Dict` have been renamed for consistency: - - `from_list` -> `from_pairs` - - `from_ascending_list` -> `from_ascending_pairs` - - `to_list` -> `to_pairs` - -### Removed - -N/A - -## v1.8.0 - 2024-03-28 - -### Added - -- [`value.reduce`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#reduce) to efficiently fold over a value and its elements. - -- [`value.from_asset_list`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#from_asset_list) to turn an asset list into a Value while enforcing invariants expected of `Value`. - -- [`math.is_sqrt`](https://aiken-lang.github.io/stdlib/aiken/math.html#is_sqrt) as a more efficient alternative to `sqrt`. - -### Changed - -- Disclaimers in documentation to [`bytearray.to_string`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#to_string) and [`string.from_bytearray`](https://aiken-lang.github.io/stdlib/aiken/string.html#from_bytearray) regarding UTF-8 encoding. - -### Removed - -N/A - -## v1.7.0 - 2023-11-07 - -### Added - -- [`list.index_of`](https://aiken-lang.github.io/stdlib/aiken/list.html#index_of): For getting a values index in a list. -- [`transaction.placeholder`](https://aiken-lang.github.io/stdlib/aiken/transaction.html#placeholder): For constructing test transactions. -- [`transaction.value.is_zero`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#is_zero): For checking whether a value is null. - -### Changed - -- [`value.to_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_minted_value) now correctly preserves the invariant of `MintedValue`: it always contain a null quantity of Ada. - -### Removed - -N/A - -## v1.6.0 - 2023-09-08 - -### Added - -- [`math.pow2`](https://aiken-lang.github.io/stdlib/aiken/math.html#pow2): For faster exponentions for powers of two. -- [`bytearray.test_bit`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#test_bit): For testing if a bit is set in a bytearray (MSB). - -## v1.5.0 - 2023-08-16 - -### Removed - -- retired `list.and` and `list.or` because of the new keywords for logical op chaining. - -## v1.4.0 - 2023-07-21 - -### Changed - -- Fixed missing null-check on `value.add`. Adding a null quantity of token is now correctly a no-op. - -## v1.3.0 - 2023-06-30 - -### Added - -- [`math.sqrt`](https://aiken-lang.github.io/stdlib/aiken/math.html#sqrt): For calculating integer square roots using a quadratically convergent method. -- [`math/rational.numerator`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#numerator) & [`math/rational.denominator`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#numerator): For accessing parts of a rational value. -- [`math/rational.arithmetic_mean`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#arithmetic_mean): For computing [arithmetic mean](https://en.wikipedia.org/wiki/Arithmetic_mean) of rational values. -- [`math/rational.geometric_mean`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#geometric_mean): For computing [geometric mean](https://en.wikipedia.org/wiki/Geometric_mean) of two rational values. - -### Changed - -- Clear empty asset lists in [`Value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#Value) on various operations. Before that fix, it could happen that removing all assets from a given policy would lead to an empty dictionnary of assets still be present in the `Value`. - -## v1.2.0 - 2023-06-17 - -### Added - -- [`transaction/value.MintedValue`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#MintedValue) -- [`transaction/value.from_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#from_minted_value): Convert from `MintedValue` to `Value` -- [`transaction/value.to_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_minted_value): Convert from `Value` to `MintedValue` -- [`transaction/bytearray.to_hex`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#to_hex): Convert a `ByteArray` to a hex encoded `String` -- [`math/rational`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html): Working with rational numbers. - - [x] `abs` - - [x] `add` - - [x] `ceil` - - [x] `compare` - - [x] `compare_with` - - [x] `div` - - [x] `floor` - - [x] `from_int` - - [x] `mul` - - [x] `negate` - - [x] `new` - - [x] `proper_fraction` - - [x] `reciprocal` - - [x] `reduce` - - [x] `round` - - [x] `round_even` - - [x] `sub` - - [x] `truncate` - - [x] `zero` - -### Removed - -- module `MintedValue` was merged with `Value` - -## v1.1.0 - 2023-06-06 - -### Added - -- [`list.count`](https://aiken-lang.github.io/stdlib/aiken/list.html#count): Count how many items in the list satisfy the given predicate. - -- [`int.from_utf8`](https://aiken-lang.github.io/stdlib/aiken/int.html#from_utf8): Parse an integer from a utf-8 encoded `ByteArray`, when possible. - -- [`dict.foldl`](https://aiken-lang.github.io/stdlib/aiken/dict.html#foldl) & [`dict.foldr`](https://aiken-lang.github.io/stdlib/aiken/dict.html#foldr): for left and right folds over dictionnary elements in ascending key order. - -- [`dict.insert_with`](https://aiken-lang.github.io/stdlib/aiken/dict.html#insert_with): Insert a value in the dictionary at a given key. When the key already exist, the provided merge function is called. - -- [`transaction/value.add`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#add): Add a (positive or negative) quantity of a single token to a value. This is more efficient than `merge` for a single asset. - -- [`transaction/value.to_dict`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_dict): Convert a `Value` into a dictionnary of dictionnaries. - -- A new module [`transaction/minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/minted_value.html): This is used exclusively for representing values present in the `mint` field of transactions. This allows to simplify some of the implementation for `Value` which no longer needs to handle the special case where null-quantity tokens would be present. It isn't possible to construct `MintedValue` by hand, they come from the script context entirely and are 'read-only'. - -- More documentation for `dict` and `interval` modules. - -### Changed - -> **Warning** -> -> Most of those changes are breaking-changes. Though, given we're still in an -> alpha state, only the `minor` component is bumped from the version number. -> Please forgive us. - -- Rework `list.{foldl, foldr, reduce, indexed_foldr}`, `dict.{fold}`, `bytearray.{foldl, foldr, reduce}` to take the iterator as last argument. For example: - - ``` - fn foldl(self: List
, with: fn(a, b) -> b, zero: b) -> b - - ↓ becomes - - fn foldl(self: List, zero: b, with: fn(a, b) -> b) -> b - ``` - -- Fixed implementation of `bytearray.slice`; `slice` would otherwise behave as if the second argument were an offset. - -- Rename `transaction/value.add` into `transaction/value.merge`. - -- Swap arguments of the merge function in `dict.union_with`; the first value received now corresponds to the value already present in the dictionnary. - -- Fixed various examples from the documentation - -### Removed - -- Removed `dict.fold`; replaced with `dict.foldl` and `dict.foldr` to remove ambiguity. - -## v1.0.0 - 2023-04-13 - -### Added - -N/A - -### Changed - -N/A - -### Removed - -N/A diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/LICENSE b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/LICENSE deleted file mode 100644 index 4a1de273..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2022 Lucas Rosa - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/README.md b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/README.md deleted file mode 100644 index 4cd6fef2..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/README.md +++ /dev/null @@ -1,71 +0,0 @@ -
-
-

Aiken Aiken Standard Library

- -[![Licence](https://img.shields.io/github/license/aiken-lang/stdlib?style=for-the-badge)](https://github.com/aiken-lang/stdlib/blob/main/LICENSE) -[![Continuous Integration](https://img.shields.io/github/actions/workflow/status/aiken-lang/stdlib/continuous-integration.yml?style=for-the-badge)](https://github.com/aiken-lang/stdlib/actions/workflows/continuous-integration.yml) - -
-
- -## Getting started - -``` -aiken add aiken-lang/stdlib --version v2 -``` - -## Compatibility - -aiken's version | stdlib's version(s) ---- | --- -`v1.1.3`
`v1.1.4`
`v1.1.5`
`v1.1.6`
`v1.1.7` | `>= 2.1.0` -`v1.1.1`
`v1.1.2` | `>= 2.0.0` && `< 2.1.0` -`v1.0.29-alpha`
`v1.0.28-alpha` | `>= 1.9.0` && `< 2.0.0` -`v1.0.26-alpha` | `<= 1.8.0` && `< 1.9.0` - -## Overview - -The official standard library for the [Aiken](https://aiken-lang.org) Cardano -smart-contract language. - -It extends the language builtins with useful data-types, functions, constants -and aliases that make using Aiken a bliss. - -```aiken -use aiken/collection/list -use aiken/crypto.{VerificationKeyHash} -use cardano/transaction.{OutputReference, Transaction} - -pub type Datum { - owner: VerificationKeyHash, -} - -pub type Redeemer { - msg: ByteArray, -} - -/// A simple validator which replicates a basic public/private signature lock. -/// -/// - The key (hash) is set as datum when the funds are sent to the script address. -/// - The spender is expected to provide a signature, and the string 'Hello, World!' as message -/// - The signature is implicitly verified by the ledger, and included as 'extra_signatories' -/// -validator hello_world { - spend(datum: Option, redeemer: Redeemer, _, self: Transaction) { - expect Some(Datum { owner }) = datum - - let must_say_hello = redeemer.msg == "Hello, World!" - - let must_be_signed = list.has(self.extra_signatories, owner) - - and { - must_say_hello, - must_be_signed, - } - } -} -``` - -## Stats - -![Alt](https://repobeats.axiom.co/api/embed/f0a17e7f6133630e165b9e56ec5447bef32fe831.svg "Repobeats analytics image") diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/aiken.lock b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/aiken.lock deleted file mode 100644 index 769ac20f..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/aiken.lock +++ /dev/null @@ -1,16 +0,0 @@ -# This file was generated by Aiken -# You typically do not need to edit this file - -[[requirements]] -name = "aiken-lang/fuzz" -version = "v2" -source = "github" - -[[packages]] -name = "aiken-lang/fuzz" -version = "v2" -requirements = [] -source = "github" - -[etags] -"aiken-lang/fuzz@v2" = [{ secs_since_epoch = 1734106349, nanos_since_epoch = 450591000 }, "64a32283418d58cade34059d3855b857e84505541158c541c460cafa0d355475"] diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/aiken.toml b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/aiken.toml deleted file mode 100644 index cbc76a0b..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/aiken.toml +++ /dev/null @@ -1,15 +0,0 @@ -name = "aiken-lang/stdlib" -version = "2.2.0" -compiler = "v1.1.9" -plutus = "v3" -description = "The Aiken Standard Library" - -[repository] -user = "aiken-lang" -project = "stdlib" -platform = "github" - -[[dependencies]] -name = "aiken-lang/fuzz" -version = "v2" -source = "github" diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak deleted file mode 100644 index f0c66d69..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak +++ /dev/null @@ -1,293 +0,0 @@ -use aiken -use aiken/builtin.{decode_utf8, serialise_data} -use aiken/primitive/bytearray - -/// Obtain a String representation of _anything_. This is particularly (and only) useful for tracing -/// and debugging. This function is expensive and should not be used in any production code as it -/// will very likely explodes the validator's budget. -/// -/// The output is a [CBOR diagnostic](https://www.rfc-editor.org/rfc/rfc8949#name-diagnostic-notation) -/// of the underlying on-chain binary representation of the data. It's not as -/// easy to read as plain Aiken code, but it is handy for troubleshooting values -/// _at runtime_. Incidentally, getting familiar with reading CBOR diagnostic is -/// a good idea in the Cardano world. -/// -/// ```aiken -/// cbor.diagnostic(42) == "42" -/// cbor.diagnostic(#"a1b2") == "h'A1B2'" -/// cbor.diagnostic([1, 2, 3]) == "[_ 1, 2, 3]" -/// cbor.diagnostic([]) == "[]" -/// cbor.diagnostic((1, 2)) == "[_ 1, 2]" -/// cbor.diagnostic((1, #"ff", 3)) == "[_ 1, h'FF', 3]" -/// cbor.diagnostic([(1, #"ff")]) == "{_ 1: h'FF' }" -/// cbor.diagnostic(Some(42)) == "121([_ 42])" -/// cbor.diagnostic(None) == "122([])" -/// ``` -pub fn diagnostic(self: Data) -> String { - aiken.diagnostic(self, #"") - |> decode_utf8 -} - -/// Deserialise a [CBOR](https://www.rfc-editor.org/rfc/rfc8949) Data. This is the reverse operation of [serialise](#serialise). -/// In particular, we have the following property: -/// -/// ```aiken -/// cbor.deserialise(cbor.serialise(any_data)) == Some(any_data) -/// ``` -/// -/// > [!CAUTION] -/// > Unfortunately, this function isn't derived from a builtin primitive. It -/// > is therefore an order of magnitude more expensive than its counterpart -/// > and shall be used with care. -/// > -/// > In general, one might prefer avoiding deserialisation unless truly necessary. -/// > Yet, it may come in handy for testing and in rare scenarios. -pub fn deserialise(bytes: ByteArray) -> Option { - let length = bytearray.length(bytes) - - let peek = - fn(offset: Int, callback: fn(Byte) -> Decoder) -> Decoder { - fn(cursor) { - if 0 >= cursor { - deserialise_failure - } else { - callback(bytearray.at(bytes, length - cursor))(cursor - offset) - } - } - } - - let take = - fn(n: Int, callback: fn(ByteArray) -> Decoder) -> Decoder { - fn(cursor) { - if 0 >= cursor { - deserialise_failure - } else { - callback(builtin.slice_bytearray(length - cursor, n, bytes))( - cursor - n, - ) - } - } - } - - if length == 0 { - None - } else { - let Pair(result, consumed) = decode_data(peek, take)(length) - if consumed != 0 { - None - } else { - Some(result) - } - } -} - -/// Serialise any value to binary, encoding using [CBOR](https://www.rfc-editor.org/rfc/rfc8949). -/// -/// This is particularly useful in combination with hashing functions, as a way -/// to obtain a byte representation that matches the serialised representation -/// used by the ledger in the context of on-chain code. -/// -/// Note that the output matches the output of [`diagnostic`](#diagnostic), -/// though with a different encoding. [`diagnostic`](#diagnostic) is merely a -/// textual representation of the CBOR encoding that is human friendly and -/// useful for debugging. -/// -/// ```aiken -/// cbor.serialise(42) == #"182a" -/// cbor.serialise(#"a1b2") == #"42a1b2" -/// cbor.serialise([]) == #"80" -/// cbor.serialise((1, 2)) == #"9f0102ff" -/// cbor.serialise((1, #"ff", 3)) == #"9f0141ff03ff" -/// cbor.serialise([(1, #"ff")]) == #"a10141ff" -/// cbor.serialise(Some(42)) == #"d8799f182aff" -/// cbor.serialise(None) == #"d87a80" -/// ``` -pub fn serialise(self: Data) -> ByteArray { - serialise_data(self) -} - -type Byte = - Int - -type Decoder
= - fn(Int) -> Pair - -type Peek = - fn(Int, fn(Byte) -> Decoder) -> Decoder - -type Take = - fn(Int, fn(ByteArray) -> Decoder) -> Decoder - -fn return(data: Data) -> Decoder { - fn(cursor) { Pair(data, cursor) } -} - -const deserialise_failure: Pair = { - let empty: Data = "" - Pair(empty, -1) - } - -const token_begin_bytes = 0x5f - -const token_begin_list = 0x9f - -const token_begin_map = 0xbf - -const token_break = 0xff - -fn decode_data(peek: Peek, take: Take) -> Decoder { - let next <- peek(1) - let major_type = next / 32 - if major_type <= 2 { - if major_type == 0 { - let i <- decode_uint(peek, take, next) - return(builtin.i_data(i)) - } else if major_type == 1 { - let i <- decode_uint(peek, take, next - 32) - return(builtin.i_data(-i - 1)) - } else { - if next == token_begin_bytes { - let b <- decode_chunks(peek, take) - return(builtin.b_data(b)) - } else { - let b <- decode_bytes(peek, take, next - 64) - return(builtin.b_data(b)) - } - } - } else if major_type == 6 { - let tag <- decode_uint(peek, take, next - 192) - let next <- peek(1) - if tag == 102 { - fn(_) { deserialise_failure } - } else { - let ix = - if tag >= 1280 { - tag - 1280 + 7 - } else { - tag - 121 - } - if next == token_begin_list { - let fields <- decode_indefinite(peek, take, decode_data) - return(builtin.constr_data(ix, fields)) - } else { - let size <- decode_uint(peek, take, next - 128) - let fields <- decode_definite(peek, take, decode_data, size) - return(builtin.constr_data(ix, fields)) - } - } - } else if major_type == 4 { - if next == token_begin_list { - let xs <- decode_indefinite(peek, take, decode_data) - return(builtin.list_data(xs)) - } else { - let size <- decode_uint(peek, take, next - 128) - let xs <- decode_definite(peek, take, decode_data, size) - return(builtin.list_data(xs)) - } - } else if major_type == 5 { - if next == token_begin_map { - let xs <- decode_indefinite(peek, take, decode_pair) - return(builtin.map_data(xs)) - } else { - let size <- decode_uint(peek, take, next - 160) - let xs <- decode_definite(peek, take, decode_pair, size) - return(builtin.map_data(xs)) - } - } else { - fn(_) { deserialise_failure } - } -} - -fn decode_pair(peek: Peek, take: Take) -> Decoder> { - fn(cursor) { - let Pair(k, cursor) = decode_data(peek, take)(cursor) - let Pair(v, cursor) = decode_data(peek, take)(cursor) - Pair(Pair(k, v), cursor) - } -} - -fn decode_uint( - peek: Peek, - take: Take, - header: Int, - and_then: fn(Int) -> Decoder, -) -> Decoder { - if header < 24 { - and_then(header) - } else if header == 24 { - let payload <- peek(1) - and_then(payload) - } else if header < 28 { - let width = bytearray.at(#[2, 4, 8], header - 25) - let payload <- take(width) - and_then(bytearray.to_int_big_endian(payload)) - } else { - fn(_) { deserialise_failure } - } -} - -fn decode_bytes( - peek: Peek, - take: Take, - header: Int, - and_then: fn(ByteArray) -> Decoder, -) -> Decoder { - let width <- decode_uint(peek, take, header) - let bytes <- take(width) - and_then(bytes) -} - -fn decode_chunks( - peek: Peek, - take: Take, - and_then: fn(ByteArray) -> Decoder, -) -> Decoder { - let next <- peek(1) - if next == token_break { - and_then("") - } else { - let chunk <- decode_bytes(peek, take, next - 64) - let chunks <- decode_chunks(peek, take) - and_then(builtin.append_bytearray(chunk, chunks)) - } -} - -fn decode_definite( - peek: Peek, - take: Take, - decode_one: fn(Peek, Take) -> Decoder, - size: Int, - and_then: fn(List) -> Decoder, -) -> Decoder { - if size <= 0 { - and_then([]) - } else { - fn(cursor) { - let Pair(elem, cursor) = decode_one(peek, take)(cursor) - { - let elems <- decode_definite(peek, take, decode_one, size - 1) - and_then([elem, ..elems]) - }(cursor) - } - } -} - -fn decode_indefinite( - peek: Peek, - take: Take, - decode_one: fn(Peek, Take) -> Decoder, - and_then: fn(List) -> Decoder, -) -> Decoder { - let next <- peek(1) - if next == token_break { - and_then([]) - } else { - fn(cursor) { - let Pair(elem, cursor) = decode_one(peek, take)(cursor + 1) - { - let elems <- decode_indefinite(peek, take, decode_one) - and_then([elem, ..elems]) - }(cursor) - } - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak deleted file mode 100644 index 28d9f5bb..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak +++ /dev/null @@ -1,297 +0,0 @@ -use aiken/cbor.{deserialise, diagnostic, serialise} -use aiken/fuzz - -// ------------------------------------------------------------------ diagnostic - -test diagnostic_1() { - diagnostic(42) == @"42" -} - -test diagnostic_2() { - diagnostic(#"a1b2") == @"h'A1B2'" -} - -test diagnostic_3() { - diagnostic([1, 2, 3]) == @"[_ 1, 2, 3]" -} - -test diagnostic_4() { - diagnostic([]) == @"[]" -} - -test diagnostic_5() { - diagnostic((1, 2)) == @"[_ 1, 2]" -} - -test diagnostic_6() { - diagnostic((1, #"ff", 3)) == @"[_ 1, h'FF', 3]" -} - -test diagnostic_7() { - diagnostic([(1, #"ff")]) == @"[_ [_ 1, h'FF']]" -} - -test diagnostic_7_alt() { - diagnostic([Pair(1, #"ff")]) == @"{_ 1: h'FF' }" -} - -test diagnostic_8() { - diagnostic(Some(42)) == @"121([_ 42])" -} - -test diagnostic_9() { - diagnostic(None) == @"122([])" -} - -test diagnostic_10() { - let xs: List<(Int, Int)> = [] - diagnostic(xs) == @"[]" -} - -test diagnostic_10_alt() { - let xs: Pairs = [] - diagnostic(xs) == @"{}" -} - -type Foo { - foo: Bar, -} - -type Bar { - A - B(Int) -} - -test diagnostic_11() { - diagnostic(Foo { foo: A }) == @"121([_ 121([])])" -} - -test diagnostic_12() { - diagnostic(Foo { foo: B(42) }) == @"121([_ 122([_ 42])])" -} - -type Baz { - a0: Int, - b0: ByteArray, -} - -test diagnostic_13() { - diagnostic(Baz { a0: 14, b0: #"ff" }) == @"121([_ 14, h'FF'])" -} - -test diagnostic_14() { - diagnostic([0]) == @"[_ 0]" -} - -test diagnostic_15() { - diagnostic(-42) == @"-42" -} - -test diagnostic_16() { - diagnostic([-1, 0, 1]) == @"[_ -1, 0, 1]" -} - -// ------------------------------------------------------------------ serialise - -test serialise_1() { - serialise(42) == #"182a" -} - -test serialise_2() { - serialise(#"a1b2") == #"42a1b2" -} - -test serialise_3() { - serialise([]) == #"80" -} - -test serialise_4() { - serialise((1, 2)) == #"9f0102ff" -} - -test serialise_5() { - serialise((1, #"ff", 3)) == #"9f0141ff03ff" -} - -test serialise_6() { - serialise([(1, #"ff")]) == #"9f9f0141ffffff" -} - -test serialise_7() { - serialise(Some(42)) == #"d8799f182aff" -} - -test serialise_8() { - serialise(None) == #"d87a80" -} - -test serialise_9() { - serialise([Pair(1, #"ff")]) == #"a10141ff" -} - -// ------------------------------------------------------------------ deserialise - -type AnyData { - AnyInt(Int) - AnyByteArray(ByteArray) - AnyList(List) - AnyPairs(Pairs) - AnyUnaryConstr0(UnaryConstr0) - AnyUnaryConstr1(UnaryConstr1) - AnyUnaryConstr2(UnaryConstr2) - AnyBinaryConstr0(BinaryConstr0) - AnyBinaryConstr1(BinaryConstr1) -} - -type UnaryConstr0 { - UnaryConstr0 -} - -type UnaryConstr1 { - field0: String, -} - -type UnaryConstr2 { - field0: Int, - field1: List>, -} - -type BinaryConstr0 = - Bool - -type BinaryConstr1 = - Option - -fn any_pair(any_key: Fuzzer, any_value: Fuzzer) -> Fuzzer> { - let k <- fuzz.and_then(any_key) - let v <- fuzz.map(any_value) - Pair(k, v) -} - -fn any_data() -> Fuzzer { - fuzz.either6( - { - let i <- fuzz.map(fuzz.int()) - AnyInt(i) - }, - { - let bs <- fuzz.map(fuzz.bytearray()) - AnyByteArray(bs) - }, - { - let xs <- fuzz.map(fuzz.list(fuzz.int())) - AnyList(xs) - }, - { - let ps <- fuzz.map(fuzz.list(any_pair(fuzz.bytearray(), fuzz.int()))) - AnyPairs(ps) - }, - fuzz.either3( - fuzz.constant(AnyUnaryConstr0(UnaryConstr0)), - fuzz.constant(AnyUnaryConstr1(UnaryConstr1(@"lorem ipsum"))), - { - let i <- fuzz.and_then(fuzz.int()) - let xs <- fuzz.map(fuzz.list(fuzz.list(fuzz.bytearray()))) - AnyUnaryConstr2(UnaryConstr2(i, xs)) - }, - ), - fuzz.either( - { - let b <- fuzz.map(fuzz.bool()) - AnyBinaryConstr0(b) - }, - { - let o <- fuzz.map(fuzz.option(fuzz.int())) - AnyBinaryConstr1(o) - }, - ), - ) -} - -test unit_deserialise_not_enough_bytes_1() { - expect None = deserialise(#"") -} - -test unit_deserialise_not_enough_bytes_2() { - expect None = deserialise(#"82") -} - -test unit_deserialise_non_empty_leftovers() { - expect None = deserialise(#"811442") -} - -test unit_deserialise_invalid_header() { - expect None = deserialise(#"f1") -} - -test unit_deserialise_invalid_uint() { - expect None = deserialise(#"1d0013bdae") -} - -/// A full script context with a minting policy and various assets. Meant to be -/// non-trivial and cover many things we might encounter in a transaction. -test bench_deserialise_script_context() { - expect Some(_) = - deserialise( - #"d8799fd8799f9fd8799fd8799f5820000000000000000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87980d87a80ffffff9fd8799fd8799f5820000000000000000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87980d87a80ffffff9fd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87a9f5820923918e403bf43c34b4ef6b48eb2ee04babed17320d8d1b9ff9ad086e86f44ecffd87a80ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffffffffa340a1401a000f4240581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea14561696b656e182a581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874a14474756e611b00005af3107a4000d87980d87a80ffd8799fd8799fd87a9f581c00000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffffffffa240a1401a000f4240581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea14763617264616e6f01d87980d8799f581c68ad54b3a8124d9fe5caaaf2011a85d72096e696a2fb3d7f86c41717ffffff182aa2581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea24561696b656e2d4763617264616e6f01581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874a14474756e611b00005af3107a400080a0d8799fd8799fd87980d87a80ffd8799fd87b80d87a80ffff80a2d8799f581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262effd87980d8799f581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874ff182aa15820923918e403bf43c34b4ef6b48eb2ee04babed17320d8d1b9ff9ad086e86f44ecd879805820e757985e48e43a95a185ddba08c814bc20f81cb68544ac937a9b992e4e6c38a0a080d87a80d87a80ff182ad8799f581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874ffff", - ) -} - -test prop_deserialise_any_data(any via any_data()) { - when any is { - AnyInt(i) -> { - fuzz.label(@"Int") - expect Some(data) = deserialise(serialise(i)) - expect i_decoded: Int = data - i_decoded == i - } - AnyByteArray(bs) -> { - fuzz.label(@"ByteArray") - expect Some(data) = deserialise(serialise(bs)) - expect bs_decoded: ByteArray = data - bs_decoded == bs - } - AnyList(xs) -> { - fuzz.label(@"List") - expect Some(data) = deserialise(serialise(xs)) - expect xs_decoded: List = data - xs_decoded == xs - } - AnyPairs(ps) -> { - fuzz.label(@"Pairs") - expect Some(data) = deserialise(serialise(ps)) - expect ps_decoded: Pairs = data - ps_decoded == ps - } - AnyUnaryConstr0(constr) -> { - fuzz.label(@"(unary) Constr") - expect Some(data) = deserialise(serialise(constr)) - expect constr_decoded: UnaryConstr0 = data - constr_decoded == constr - } - AnyUnaryConstr1(constr) -> { - fuzz.label(@"(unary) Constr") - expect Some(data) = deserialise(serialise(constr)) - expect constr_decoded: UnaryConstr1 = data - constr_decoded == constr - } - AnyUnaryConstr2(constr) -> { - fuzz.label(@"(unary) Constr") - expect Some(data) = deserialise(serialise(constr)) - expect constr_decoded: UnaryConstr2 = data - constr_decoded == constr - } - AnyBinaryConstr0(constr) -> { - fuzz.label(@"(binary) Constr") - expect Some(data) = deserialise(serialise(constr)) - expect constr_decoded: BinaryConstr0 = data - constr_decoded == constr - } - AnyBinaryConstr1(constr) -> { - fuzz.label(@"(binary) Constr") - expect Some(data) = deserialise(serialise(constr)) - expect constr_decoded: BinaryConstr1 = data - constr_decoded == constr - } - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak deleted file mode 100644 index 3d4d332e..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak +++ /dev/null @@ -1,4 +0,0 @@ -/// A non negative integer that materializes the position of an element in a -/// collection. -pub type Index = - Int diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak deleted file mode 100644 index 681d0bae..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak +++ /dev/null @@ -1,1174 +0,0 @@ -//// A module for working with bytearray dictionaries. -//// -//// -//// > [!IMPORTANT] -//// > -//// > Dictionaries are **ordered sets** of key-value pairs, which thus -//// > preserve some invariants. Specifically, each key is only present once in -//// > the dictionary and all keys are stored in ascending lexicographic order. -//// > -//// > These invariants allow for more optimized functions to operate on `Dict`, -//// > but as a trade-offs, prevent `Dict` from being serializable. To recover a `Dict` -//// > from an unknown `Data`, you must first recover an `Pairs` and use -//// > [`dict.from_ascending_list`](#from_ascending_list). - -use aiken/builtin - -/// An opaque `Dict`. The type is opaque because the module maintains some -/// invariant, namely: there's only one occurrence of a given key in the dictionary. -/// -/// Note that the `key` parameter is a phantom-type, and only present as a -/// means of documentation. Keys can be any type, yet will need to comparable -/// to use functions like `insert`. -/// -/// See for example: -/// -/// ```aiken -/// pub type Value = -/// Dict> -/// ``` -pub opaque type Dict { - inner: Pairs, -} - -// ## Constructing - -/// An empty dictionnary. -/// ```aiken -/// dict.to_pairs(dict.empty) == [] -/// ``` -pub const empty: Dict = Dict { inner: [] } - -const foo = #"666f6f" - -const bar = #"626172" - -const baz = #"62617a" - -const fixture_1 = - empty - |> insert(foo, 42) - |> insert(bar, 14) - -/// Like ['from_pairs'](#from_pairs), but from an already sorted list by ascending -/// keys. This function fails (i.e. halts the program execution) if the list isn't -/// sorted. -/// -/// ```aiken -/// let pairs = [Pair("a", 100), Pair("b", 200), Pair("c", 300)] -/// -/// let result = -/// dict.from_ascending_pairs(pairs) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] -/// ``` -/// -/// This is meant to be used to turn a list constructed off-chain into a `Dict` -/// which has taken care of maintaining interval invariants. This function still -/// performs a sanity check on all keys to avoid silly mistakes. It is, however, -/// considerably faster than ['from_pairs'](from_pairs) -pub fn from_ascending_pairs(xs: Pairs) -> Dict { - let Void = check_ascending_list(xs) - Dict { inner: xs } -} - -fn check_ascending_list(xs: Pairs) { - when xs is { - [] -> Void - [_] -> Void - [Pair(x0, _), Pair(x1, _) as e, ..rest] -> - if builtin.less_than_bytearray(x0, x1) { - check_ascending_list([e, ..rest]) - } else { - fail @"keys in associative list aren't in ascending order" - } - } -} - -/// Like [`from_ascending_pairs`](#from_ascending_pairs) but fails if **any** -/// value doesn't satisfy the predicate. -/// -/// ```aiken -/// let pairs = [Pair("a", 100), Pair("b", 200), Pair("c", 300)] -/// -/// dict.from_ascending_pairs_with(pairs, fn(x) { x <= 250 }) // fail -/// ``` -pub fn from_ascending_pairs_with( - xs: Pairs, - predicate: fn(value) -> Bool, -) -> Dict { - let Void = check_ascending_pairs_with(xs, predicate) - Dict { inner: xs } -} - -fn check_ascending_pairs_with( - xs: Pairs, - predicate: fn(value) -> Bool, -) { - when xs is { - [] -> Void - [Pair(_, v)] -> - if predicate(v) { - Void - } else { - fail @"value doesn't satisfy predicate" - } - [Pair(x0, v0), Pair(x1, _) as e, ..rest] -> - if builtin.less_than_bytearray(x0, x1) { - if predicate(v0) { - check_ascending_pairs_with([e, ..rest], predicate) - } else { - fail @"value doesn't satisfy predicate" - } - } else { - fail @"keys in pairs aren't in ascending order" - } - } -} - -test bench_from_ascending_pairs() { - let dict = - from_ascending_pairs( - [ - Pair("aaaa", 1), Pair("aaab", 9), Pair("aaba", 5), Pair("aabb", 13), - Pair("abaa", 2), Pair("abab", 10), Pair("abba", 6), Pair("abbb", 14), - Pair("baaa", 3), Pair("baab", 11), Pair("baba", 7), Pair("babb", 15), - Pair("bbaa", 4), Pair("bbab", 12), Pair("bbba", 8), Pair("bbbb", 16), - ], - ) - - size(dict) == 16 -} - -/// Construct a dictionary from a list of key-value pairs. Note that when a key is present -/// multiple times, the first occurrence prevails. -/// -/// ```aiken -/// let pairs = [Pair("a", 100), Pair("c", 300), Pair("b", 200)] -/// -/// let result = -/// dict.from_pairs(pairs) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] -/// ``` -pub fn from_pairs(self: Pairs) -> Dict { - Dict { inner: do_from_pairs(self) } -} - -fn do_from_pairs(xs: Pairs) -> Pairs { - when xs is { - [] -> [] - [Pair(k, v), ..rest] -> do_insert(do_from_pairs(rest), k, v) - } -} - -test from_list_1() { - from_pairs([]) == empty -} - -test from_list_2() { - from_pairs([Pair(foo, 42), Pair(bar, 14)]) == from_pairs( - [Pair(bar, 14), Pair(foo, 42)], - ) -} - -test from_list_3() { - from_pairs([Pair(foo, 42), Pair(bar, 14)]) == fixture_1 -} - -test from_list_4() { - from_pairs([Pair(foo, 42), Pair(bar, 14), Pair(foo, 1337)]) == fixture_1 -} - -test bench_from_pairs() { - let dict = - from_pairs( - [ - Pair("bbba", 8), Pair("bbab", 12), Pair("aabb", 13), Pair("aaab", 9), - Pair("bbbb", 16), Pair("aaaa", 1), Pair("aaba", 5), Pair("abab", 10), - Pair("baba", 7), Pair("baab", 11), Pair("abaa", 2), Pair("baaa", 3), - Pair("bbaa", 4), Pair("babb", 15), Pair("abbb", 14), Pair("abba", 6), - ], - ) - - size(dict) == 16 -} - -// ## Inspecting - -/// Finds a value in the dictionary, and returns the first key found to have that value. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: 42) -/// |> dict.insert(key: "b", value: 14) -/// |> dict.insert(key: "c", value: 42) -/// |> dict.find(42) -/// -/// result == Some("a") -/// ``` -pub fn find(self: Dict, value v: value) -> Option { - do_find(self.inner, v) -} - -fn do_find(self: Pairs, value v: value) -> Option { - when self is { - [] -> None - [Pair(k2, v2), ..rest] -> - if v == v2 { - Some(k2) - } else { - do_find(rest, v) - } - } -} - -test find_1() { - find(empty, foo) == None -} - -test find_2() { - find( - empty - |> insert(foo, 14), - 14, - ) == Some(foo) -} - -test find_3() { - find( - empty - |> insert(foo, 14), - 42, - ) == None -} - -test find_4() { - find( - empty - |> insert(foo, 14) - |> insert(bar, 42) - |> insert(baz, 14), - 14, - ) == Some(baz) -} - -/// Get a value in the dict by its key. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: "Aiken") -/// |> dict.get(key: "a") -/// -/// result == Some("Aiken") -/// ``` -pub fn get(self: Dict, key: ByteArray) -> Option { - do_get(self.inner, key) -} - -fn do_get(self: Pairs, key k: ByteArray) -> Option { - when self is { - [] -> None - [Pair(k2, v), ..rest] -> - if builtin.less_than_equals_bytearray(k, k2) { - if k == k2 { - Some(v) - } else { - None - } - } else { - do_get(rest, k) - } - } -} - -test get_1() { - get(empty, foo) == None -} - -test get_2() { - let m = - empty - |> insert(foo, "Aiken") - |> insert(bar, "awesome") - get(m, key: foo) == Some("Aiken") -} - -test get_3() { - let m = - empty - |> insert(foo, "Aiken") - |> insert(bar, "awesome") - get(m, key: baz) == None -} - -test get_4() { - let m = - empty - |> insert("aaa", "1") - |> insert("bbb", "2") - |> insert("ccc", "3") - |> insert("ddd", "4") - |> insert("eee", "5") - |> insert("fff", "6") - |> insert("ggg", "7") - |> insert("hhh", "8") - |> insert("iii", "9") - |> insert("jjj", "10") - - get(m, "bcd") == None -} - -test get_5() { - let m = - empty - |> insert("aaa", "1") - |> insert("bbb", "2") - |> insert("ccc", "3") - |> insert("ddd", "4") - |> insert("eee", "5") - |> insert("fff", "6") - |> insert("ggg", "7") - |> insert("hhh", "8") - |> insert("iii", "9") - |> insert("jjj", "10") - - get(m, "kkk") == None -} - -/// Check if a key exists in the dictionary. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: "Aiken") -/// |> dict.has_key("a") -/// -/// result == True -/// ``` -pub fn has_key(self: Dict, key k: ByteArray) -> Bool { - do_has_key(self.inner, k) -} - -fn do_has_key(self: Pairs, key k: ByteArray) -> Bool { - when self is { - [] -> False - [Pair(k2, _), ..rest] -> - if builtin.less_than_equals_bytearray(k, k2) { - k == k2 - } else { - do_has_key(rest, k) - } - } -} - -test has_key_1() { - !has_key(empty, foo) -} - -test has_key_2() { - has_key( - empty - |> insert(foo, 14), - foo, - ) -} - -test has_key_3() { - !has_key( - empty - |> insert(foo, 14), - bar, - ) -} - -test has_key_4() { - has_key( - empty - |> insert(foo, 14) - |> insert(bar, 42), - bar, - ) -} - -/// Efficiently checks whether a dictionary is empty. -/// ```aiken -/// dict.is_empty(dict.empty) == True -/// ``` -pub fn is_empty(self: Dict) -> Bool { - when self.inner is { - [] -> True - _ -> False - } -} - -test is_empty_1() { - is_empty(empty) -} - -/// Extract all the keys present in a given `Dict`. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert("a", 14) -/// |> dict.insert("b", 42) -/// |> dict.insert("a", 1337) -/// |> dict.keys() -/// -/// result == ["a", "b"] -/// ``` -pub fn keys(self: Dict) -> List { - do_keys(self.inner) -} - -fn do_keys(self: Pairs) -> List { - when self is { - [] -> [] - [Pair(k, _), ..rest] -> [k, ..do_keys(rest)] - } -} - -test keys_1() { - keys(empty) == [] -} - -test keys_2() { - keys( - empty - |> insert(foo, 0) - |> insert(bar, 0), - ) == [bar, foo] -} - -/// Return the number of key-value pairs in the dictionary. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert("a", 100) -/// |> dict.insert("b", 200) -/// |> dict.insert("c", 300) -/// |> dict.size() -/// -/// result == 3 -/// ``` -pub fn size(self: Dict) -> Int { - do_size(self.inner) -} - -fn do_size(self: Pairs) -> Int { - when self is { - [] -> 0 - [_, ..rest] -> 1 + do_size(rest) - } -} - -test size_1() { - size(empty) == 0 -} - -test size_2() { - size( - empty - |> insert(foo, 14), - ) == 1 -} - -test size_3() { - size( - empty - |> insert(foo, 14) - |> insert(bar, 42), - ) == 2 -} - -/// Extract all the values present in a given `Dict`. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert("a", 14) -/// |> dict.insert("b", 42) -/// |> dict.insert("c", 1337) -/// |> dict.values() -/// -/// result == [14, 42, 1337] -/// ``` -pub fn values(self: Dict) -> List { - do_values(self.inner) -} - -fn do_values(self: Pairs) -> List { - when self is { - [] -> [] - [Pair(_, v), ..rest] -> [v, ..do_values(rest)] - } -} - -test values_1() { - values(empty) == [] -} - -test values_2() { - values( - empty - |> insert(foo, 3) - |> insert(bar, 4), - ) == [4, 3] -} - -// ## Modifying - -/// Remove a key-value pair from the dictionary. If the key is not found, no changes are made. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: 100) -/// |> dict.insert(key: "b", value: 200) -/// |> dict.delete(key: "a") -/// |> dict.to_pairs() -/// -/// result == [Pair("b", 200)] -/// ``` -pub fn delete(self: Dict, key: ByteArray) -> Dict { - Dict { inner: do_delete(self.inner, key) } -} - -fn do_delete( - self: Pairs, - key k: ByteArray, -) -> Pairs { - when self is { - [] -> [] - [Pair(k2, v2), ..rest] -> - if builtin.less_than_equals_bytearray(k, k2) { - if k == k2 { - rest - } else { - self - } - } else { - [Pair(k2, v2), ..do_delete(rest, k)] - } - } -} - -test delete_1() { - delete(empty, foo) == empty -} - -test delete_2() { - let m = - empty - |> insert(foo, 14) - delete(m, foo) == empty -} - -test delete_3() { - let m = - empty - |> insert(foo, 14) - delete(m, bar) == m -} - -test delete_4() { - let m = - empty - |> insert(foo, 14) - |> insert(bar, 14) - !has_key(delete(m, foo), foo) -} - -test delete_5() { - let m = - empty - |> insert(foo, 14) - |> insert(bar, 14) - has_key(delete(m, bar), foo) -} - -test delete_6() { - let m = - empty - |> insert("aaa", 1) - |> insert("bbb", 2) - |> insert("ccc", 3) - |> insert("ddd", 4) - |> insert("eee", 5) - |> insert("fff", 6) - |> insert("ggg", 7) - |> insert("hhh", 8) - |> insert("iii", 9) - |> insert("jjj", 10) - - delete(m, "bcd") == m -} - -/// Keep only the key-value pairs that pass the given predicate. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: 100) -/// |> dict.insert(key: "b", value: 200) -/// |> dict.insert(key: "c", value: 300) -/// |> dict.filter(fn(k, _v) { k != "a" }) -/// |> dict.to_pairs() -/// -/// result == [Pair("b", 200), Pair("c", 300)] -/// ``` -pub fn filter( - self: Dict, - with: fn(ByteArray, value) -> Bool, -) -> Dict { - Dict { inner: do_filter(self.inner, with) } -} - -fn do_filter( - self: Pairs, - with: fn(ByteArray, value) -> Bool, -) -> Pairs { - when self is { - [] -> [] - [Pair(k, v), ..rest] -> - if with(k, v) { - [Pair(k, v), ..do_filter(rest, with)] - } else { - do_filter(rest, with) - } - } -} - -test filter_1() { - filter(empty, fn(_, _) { True }) == empty -} - -test filter_2() { - let expected = - empty - |> insert(foo, 42) - filter(fixture_1, fn(_, v) { v > 14 }) == expected -} - -test filter_3() { - let expected = - empty - |> insert(bar, 14) - filter(fixture_1, fn(k, _) { k == bar }) == expected -} - -/// Insert a value in the dictionary at a given key. If the key already exists, its value is **overridden**. If you need ways to combine keys together, use (`insert_with`)[#insert_with]. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: 1) -/// |> dict.insert(key: "b", value: 2) -/// |> dict.insert(key: "a", value: 3) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 3), Pair("b", 2)] -/// ``` -pub fn insert( - self: Dict, - key k: ByteArray, - value v: value, -) -> Dict { - Dict { inner: do_insert(self.inner, k, v) } -} - -fn do_insert( - self: Pairs, - key k: ByteArray, - value v: value, -) -> Pairs { - when self is { - [] -> [Pair(k, v)] - [Pair(k2, v2), ..rest] -> - if builtin.less_than_bytearray(k, k2) { - [Pair(k, v), ..self] - } else { - if k == k2 { - [Pair(k, v), ..rest] - } else { - [Pair(k2, v2), ..do_insert(rest, k, v)] - } - } - } -} - -test insert_1() { - let m1 = - empty - |> insert(foo, 42) - let m2 = - empty - |> insert(foo, 14) - insert(m1, foo, 14) == m2 -} - -test insert_2() { - let m1 = - empty - |> insert(foo, 42) - let m2 = - empty - |> insert(bar, 14) - insert(m1, bar, 14) == insert(m2, foo, 42) -} - -/// Insert a value in the dictionary at a given key. When the key already exist, the provided -/// merge function is called. The value existing in the dictionary is passed as the second argument -/// to the merge function, and the new value is passed as the third argument. -/// -/// ```aiken -/// let sum = -/// fn (_k, a, b) { Some(a + b) } -/// -/// let result = -/// dict.empty -/// |> dict.insert_with(key: "a", value: 1, with: sum) -/// |> dict.insert_with(key: "b", value: 2, with: sum) -/// |> dict.insert_with(key: "a", value: 3, with: sum) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 4), Pair("b", 2)] -/// ``` -pub fn insert_with( - self: Dict, - key k: ByteArray, - value v: value, - with: fn(ByteArray, value, value) -> Option, -) -> Dict { - Dict { - inner: do_insert_with(self.inner, k, v, fn(k, v1, v2) { with(k, v2, v1) }), - } -} - -test insert_with_1() { - let sum = - fn(_k, a, b) { Some(a + b) } - - let result = - empty - |> insert_with(key: "foo", value: 1, with: sum) - |> insert_with(key: "bar", value: 2, with: sum) - |> to_pairs() - - result == [Pair("bar", 2), Pair("foo", 1)] -} - -test insert_with_2() { - let sum = - fn(_k, a, b) { Some(a + b) } - - let result = - empty - |> insert_with(key: "foo", value: 1, with: sum) - |> insert_with(key: "bar", value: 2, with: sum) - |> insert_with(key: "foo", value: 3, with: sum) - |> to_pairs() - - result == [Pair("bar", 2), Pair("foo", 4)] -} - -test insert_with_3() { - let with = - fn(k, a, _b) { - if k == "foo" { - Some(a) - } else { - None - } - } - - let result = - empty - |> insert_with(key: "foo", value: 1, with: with) - |> insert_with(key: "bar", value: 2, with: with) - |> insert_with(key: "foo", value: 3, with: with) - |> insert_with(key: "bar", value: 4, with: with) - |> to_pairs() - - result == [Pair("foo", 1)] -} - -/// Apply a function to all key-value pairs in a Dict. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert("a", 100) -/// |> dict.insert("b", 200) -/// |> dict.insert("c", 300) -/// |> dict.map(fn(_k, v) { v * 2 }) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 200), Pair("b", 400), Pair("c", 600)] -/// ``` -pub fn map(self: Dict, with: fn(ByteArray, a) -> b) -> Dict { - Dict { inner: do_map(self.inner, with) } -} - -fn do_map( - self: Pairs, - with: fn(ByteArray, a) -> b, -) -> Pairs { - when self is { - [] -> [] - [Pair(k, v), ..rest] -> [Pair(k, with(k, v)), ..do_map(rest, with)] - } -} - -test map_1() { - let result = - fixture_1 - |> map(with: fn(k, _) { k }) - get(result, foo) == Some(foo) -} - -test map_2() { - let result = - fixture_1 - |> map(with: fn(_, v) { v + 1 }) - get(result, foo) == Some(43) && size(result) == size(fixture_1) -} - -/// Remove a key-value pair from the dictionary and return its value. If the key is not found, no changes are made. -/// -/// ```aiken -/// let (value, _) = -/// dict.empty -/// |> dict.insert(key: "a", value: 100) -/// |> dict.insert(key: "b", value: 200) -/// |> dict.pop(key: "a") -/// -/// result == 100 -/// ``` -pub fn pop( - self: Dict, - key: ByteArray, -) -> (Option, Dict) { - do_pop(self.inner, key, fn(value, inner) { (value, Dict { inner }) }) -} - -fn do_pop( - self: Pairs, - key k: ByteArray, - return: fn(Option, Pairs) -> result, -) -> result { - when self is { - [] -> return(None, []) - [Pair(k2, v2), ..rest] -> - if builtin.less_than_equals_bytearray(k, k2) { - if k == k2 { - return(Some(v2), rest) - } else { - return(None, self) - } - } else { - do_pop( - rest, - k, - fn(value, inner) { return(value, [Pair(k2, v2), ..inner]) }, - ) - } - } -} - -test pop_1() { - pop(empty, foo) == (None, empty) -} - -test pop_2() { - let m = - empty - |> insert(foo, 14) - pop(m, foo) == (Some(14), empty) -} - -test pop_3() { - let m = - empty - |> insert(foo, 14) - pop(m, bar) == (None, m) -} - -test pop_4() { - let m = - empty - |> insert(foo, 14) - |> insert(bar, 14) - pop(m, foo) == (Some(14), empty |> insert(bar, 14)) -} - -test pop_6() { - let m = - empty - |> insert("aaa", 1) - |> insert("bbb", 2) - |> insert("ccc", 3) - |> insert("ddd", 4) - |> insert("eee", 5) - |> insert("fff", 6) - |> insert("ggg", 7) - |> insert("hhh", 8) - |> insert("iii", 9) - |> insert("jjj", 10) - - pop(m, "bcd") == (None, m) -} - -// ## Combining - -/// Combine two dictionaries. If the same key exist in both the left and -/// right dictionary, values from the left are preferred (i.e. left-biaised). -/// -/// ```aiken -/// let left_dict = dict.from_pairs([Pair("a", 100), Pair("b", 200)]) -/// let right_dict = dict.from_pairs([Pair("a", 150), Pair("c", 300)]) -/// -/// let result = -/// dict.union(left_dict, right_dict) |> dict.to_pairs() -/// -/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] -/// ``` -pub fn union( - left: Dict, - right: Dict, -) -> Dict { - Dict { inner: do_union(left.inner, right.inner) } -} - -fn do_union( - left: Pairs, - right: Pairs, -) -> Pairs { - when left is { - [] -> right - [Pair(k, v), ..rest] -> do_union(rest, do_insert(right, k, v)) - } -} - -test union_1() { - union(fixture_1, empty) == fixture_1 -} - -test union_2() { - union(empty, fixture_1) == fixture_1 -} - -test union_3() { - let left = - empty - |> insert(foo, 14) - let right = - empty - |> insert(bar, 42) - |> insert(baz, 1337) - union(left, right) == from_pairs( - [Pair(foo, 14), Pair(baz, 1337), Pair(bar, 42)], - ) -} - -test union_4() { - let left = - empty - |> insert(foo, 14) - let right = - empty - |> insert(bar, 42) - |> insert(foo, 1337) - union(left, right) == from_pairs([Pair(foo, 14), Pair(bar, 42)]) -} - -/// Like [`union`](#union) but allows specifying the behavior to adopt when a key is present -/// in both dictionaries. The first value received correspond to the value in the left -/// dictionnary, whereas the second argument corresponds to the value in the right dictionnary. -/// -/// When passing `None`, the value is removed and not present in the union. -/// -/// ```aiken -/// let left_dict = dict.from_pairs([Pair("a", 100), Pair("b", 200)]) -/// let right_dict = dict.from_pairs([Pair("a", 150), Pair("c", 300)]) -/// -/// let result = -/// dict.union_with( -/// left_dict, -/// right_dict, -/// fn(_k, v1, v2) { Some(v1 + v2) }, -/// ) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 250), Pair("b", 200), Pair("c", 300)] -/// ``` -pub fn union_with( - left: Dict, - right: Dict, - with: fn(ByteArray, value, value) -> Option, -) -> Dict { - Dict { inner: do_union_with(left.inner, right.inner, with) } -} - -fn do_union_with( - left: Pairs, - right: Pairs, - with: fn(ByteArray, value, value) -> Option, -) -> Pairs { - when left is { - [] -> right - [Pair(k, v), ..rest] -> - do_union_with(rest, do_insert_with(right, k, v, with), with) - } -} - -fn do_insert_with( - self: Pairs, - key k: ByteArray, - value v: value, - with: fn(ByteArray, value, value) -> Option, -) -> Pairs { - when self is { - [] -> [Pair(k, v)] - [Pair(k2, v2), ..rest] -> - if builtin.less_than_bytearray(k, k2) { - [Pair(k, v), ..self] - } else { - if k == k2 { - when with(k, v, v2) is { - Some(combined) -> [Pair(k, combined), ..rest] - None -> rest - } - } else { - [Pair(k2, v2), ..do_insert_with(rest, k, v, with)] - } - } - } -} - -test union_with_1() { - let left = - empty - |> insert(foo, 14) - - let right = - empty - |> insert(bar, 42) - |> insert(foo, 1337) - - let result = union_with(left, right, with: fn(_, l, r) { Some(l + r) }) - - result == from_pairs([Pair(foo, 1351), Pair(bar, 42)]) -} - -// ## Transforming - -/// Fold over the key-value pairs in a dictionary. The fold direction follows keys -/// in ascending order and is done from left-to-right. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: 100) -/// |> dict.insert(key: "b", value: 200) -/// |> dict.insert(key: "c", value: 300) -/// |> dict.foldl(0, fn(_k, v, r) { v + r }) -/// -/// result == 600 -/// ``` -pub fn foldl( - self: Dict, - zero: result, - with: fn(ByteArray, value, result) -> result, -) -> result { - do_foldl(self.inner, zero, with) -} - -fn do_foldl( - self: Pairs, - zero: result, - with: fn(ByteArray, value, result) -> result, -) -> result { - when self is { - [] -> zero - [Pair(k, v), ..rest] -> do_foldl(rest, with(k, v, zero), with) - } -} - -test fold_1() { - foldl(empty, 14, fn(_, _, _) { 42 }) == 14 -} - -test fold_2() { - foldl(fixture_1, zero: 0, with: fn(_, v, total) { v + total }) == 56 -} - -/// Fold over the key-value pairs in a dictionary. The fold direction follows keys -/// in ascending order and is done from right-to-left. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: 100) -/// |> dict.insert(key: "b", value: 200) -/// |> dict.insert(key: "c", value: 300) -/// |> dict.foldr(0, fn(_k, v, r) { v + r }) -/// -/// result == 600 -/// ``` -pub fn foldr( - self: Dict, - zero: result, - with: fn(ByteArray, value, result) -> result, -) -> result { - do_foldr(self.inner, zero, with) -} - -fn do_foldr( - self: Pairs, - zero: result, - with: fn(ByteArray, value, result) -> result, -) -> result { - when self is { - [] -> zero - [Pair(k, v), ..rest] -> with(k, v, do_foldr(rest, zero, with)) - } -} - -test foldr_1() { - foldr(empty, 14, fn(_, _, _) { 42 }) == 14 -} - -test foldr_2() { - foldr(fixture_1, zero: 0, with: fn(_, v, total) { v + total }) == 56 -} - -/// Get the inner list holding the dictionary data. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert("a", 100) -/// |> dict.insert("b", 200) -/// |> dict.insert("c", 300) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] -/// ``` -pub fn to_pairs(self: Dict) -> Pairs { - self.inner -} - -test to_list_1() { - to_pairs(empty) == [] -} - -test to_list_2() { - to_pairs(fixture_1) == [Pair(bar, 14), Pair(foo, 42)] -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak deleted file mode 100644 index b8e7f675..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak +++ /dev/null @@ -1,1411 +0,0 @@ -use aiken/builtin -use aiken/primitive/bytearray -use aiken/primitive/int - -// ## Constructing - -/// Add an element in front of the list. Sometimes useful when combined with -/// other functions. -/// -/// ```aiken -/// list.push([2, 3], 1) == [1, ..[2, 3]] == [1, 2, 3] -/// ``` -pub fn push(self: List, elem: a) -> List { - [elem, ..self] -} - -test push_1() { - push([], 0) == [0] -} - -test push_2() { - push([2, 3], 1) == [1, 2, 3] -} - -/// Construct a list of a integer from a given range. -/// -/// ```aiken -/// list.range(0, 3) == [0, 1, 2, 3] -/// list.range(-1, 1) == [-1, 0, 1] -/// ``` -pub fn range(from: Int, to: Int) -> List { - if from > to { - [] - } else { - [from, ..range(from + 1, to)] - } -} - -test range_1() { - range(0, 3) == [0, 1, 2, 3] -} - -test range_2() { - range(-1, 1) == [-1, 0, 1] -} - -/// Construct a list filled with n copies of a value. -/// -/// ```aiken -/// list.repeat("na", 3) == ["na", "na", "na"] -/// ``` -pub fn repeat(elem: a, n_times: Int) -> List { - if n_times <= 0 { - [] - } else { - [elem, ..repeat(elem, n_times - 1)] - } -} - -test repeat_1() { - repeat(42, 0) == [] -} - -test repeat_2() { - repeat(14, 3) == [14, 14, 14] -} - -// ## Inspecting - -/// Determine if all elements of the list satisfy the given predicate. -/// -/// Note: an empty list always satisfies the predicate. -/// -/// ```aiken -/// list.all([], fn(n) { n > 0 }) == True -/// list.all([1, 2, 3], fn(n) { n > 0 }) == True -/// list.all([1, 2, 3], fn(n) { n == 2 }) == False -/// ``` -pub fn all(self: List, predicate: fn(a) -> Bool) -> Bool { - when self is { - [] -> True - [x, ..xs] -> predicate(x) && all(xs, predicate) - } -} - -test all_1() { - all([1, 2, 3], fn(n) { n > 0 }) == True -} - -test all_2() { - all([1, 2, 3], fn(n) { n > 42 }) == False -} - -test all_3() { - all([], fn(n) { n == 42 }) == True -} - -/// Determine if at least one element of the list satisfies the given predicate. -/// -/// Note: an empty list never satisfies the predicate. -/// -/// ```aiken -/// list.any([], fn(n) { n > 2 }) == False -/// list.any([1, 2, 3], fn(n) { n > 0 }) == True -/// list.any([1, 2, 3], fn(n) { n == 2 }) == True -/// list.any([1, 2, 3], fn(n) { n < 0 }) == False -/// ``` -pub fn any(self: List, predicate: fn(a) -> Bool) -> Bool { - when self is { - [] -> False - [x, ..xs] -> predicate(x) || any(xs, predicate) - } -} - -test any_1() { - any([1, 2, 3], fn(n) { n > 0 }) == True -} - -test any_2() { - any([1, 2, 3], fn(n) { n > 42 }) == False -} - -test any_3() { - any([], fn(n) { n == 42 }) == False -} - -/// Return Some(item) at the index or None if the index is out of range. The index is 0-based. -/// -/// ```aiken -/// list.at([1, 2, 3], 1) == Some(2) -/// list.at([1, 2, 3], 42) == None -/// ``` -pub fn at(self: List, index: Int) -> Option { - when self is { - [] -> None - [x, ..xs] -> - if index == 0 { - Some(x) - } else { - at(xs, index - 1) - } - } -} - -test at_1() { - at([1, 2, 3], -1) == None -} - -test at_2() { - at([], 0) == None -} - -test at_3() { - at([1, 2, 3], 3) == None -} - -test at_4() { - at([1], 0) == Some(1) -} - -test at_5() { - at([1, 2, 3], 2) == Some(3) -} - -/// Count how many items in the list satisfy the given predicate. -/// -/// ```aiken -/// list.count([], fn(a) { a > 2}) == 0 -/// list.count([1, 2, 3], fn(a) { n > 0 }) == 3 -/// list.count([1, 2, 3], fn(a) { n >= 2 }) == 2 -/// list.count([1, 2, 3], fn(a) { n > 5 }) == 0 -/// ``` -pub fn count(self: List, predicate: fn(a) -> Bool) -> Int { - foldr( - self, - 0, - fn(item, total) { - if predicate(item) { - total + 1 - } else { - total - } - }, - ) -} - -test count_empty() { - count([], fn(a) { a > 2 }) == 0 -} - -test count_all() { - count([1, 2, 3], fn(a) { a > 0 }) == 3 -} - -test count_some() { - count([1, 2, 3], fn(a) { a >= 2 }) == 2 -} - -test count_none() { - count([1, 2, 3], fn(a) { a > 5 }) == 0 -} - -/// Find the first element satisfying the given predicate, if any. -/// -/// ```aiken -/// list.find([1, 2, 3], fn(x) { x == 2 }) == Some(2) -/// list.find([4, 5, 6], fn(x) { x == 2 }) == None -/// ``` -pub fn find(self: List, predicate: fn(a) -> Bool) -> Option { - when self is { - [] -> None - [x, ..xs] -> - if predicate(x) { - Some(x) - } else { - find(xs, predicate) - } - } -} - -test find_1() { - find([1, 2, 3], fn(x) { x == 1 }) == Some(1) -} - -test find_2() { - find([1, 2, 3], fn(x) { x > 42 }) == None -} - -test find_3() { - find([], fn(_) { True }) == None -} - -/// Figures out whether a list contain the given element. -/// -/// ```aiken -/// list.has([1, 2, 3], 2) == True -/// list.has([1, 2, 3], 14) == False -/// list.has([], 14) == False -/// ``` -pub fn has(self: List, elem: a) -> Bool { - when self is { - [] -> False - [x, ..xs] -> - if x == elem { - True - } else { - has(xs, elem) - } - } -} - -test has_1() { - has([1, 2, 3], 1) == True -} - -test has_2() { - has([1, 2, 3], 14) == False -} - -test has_3() { - has([], 14) == False -} - -/// Get the first element of a list -/// -/// ```aiken -/// list.head([1, 2, 3]) == Some(1) -/// list.head([]) == None -/// ``` -pub fn head(self: List) -> Option { - when self is { - [] -> None - _ -> Some(builtin.head_list(self)) - } -} - -test head_1() { - head([1, 2, 3]) == Some(1) -} - -test head_2() { - head([]) == None -} - -/// Checks whether a list is empty. -/// -/// ```aiken -/// list.is_empty([]) == True -/// list.is_empty([1, 2, 3]) == False -/// ``` -pub fn is_empty(self: List) -> Bool { - when self is { - [] -> True - _ -> False - } -} - -test is_empty_1() { - is_empty([]) == True -} - -test is_empty_2() { - is_empty([1, 2, 3]) == False -} - -/// Gets the index of an element of a list, if any. Otherwise, returns None. -/// -/// ```aiken -/// list.index_of([1, 5, 2], 2) == Some(2) -/// list.index_of([1, 7, 3], 4) == None -/// list.index_of([1, 0, 9, 6], 6) == 3 -/// list.index_of([], 6) == None -/// ``` -pub fn index_of(self: List, elem: a) -> Option { - do_index_of(self, elem, 0) -} - -fn do_index_of(self: List, elem: a, i: Int) -> Option { - when self is { - [] -> None - [x, ..xs] -> - if x == elem { - Some(i) - } else { - do_index_of(xs, elem, i + 1) - } - } -} - -test index_of_1() { - index_of([1, 5, 2], 2) == Some(2) -} - -test index_of_2() { - index_of([1, 7, 3], 4) == None -} - -test index_of_3() { - index_of([1, 0, 9, 6], 6) == Some(3) -} - -test index_of_4() { - index_of([], 6) == None -} - -/// Get the last in the given list, if any. -/// -/// ```aiken -/// list.last([]) == None -/// list.last([1, 2, 3]) == Some(3) -/// ``` -pub fn last(self: List) -> Option { - when self is { - [] -> None - [x] -> Some(x) - [_, ..xs] -> last(xs) - } -} - -test last_1() { - last([]) == None -} - -test last_2() { - last([1]) == Some(1) -} - -test last_3() { - last([1, 2, 3, 4]) == Some(4) -} - -/// Get the number of elements in the given list. -/// -/// ```aiken -/// list.length([]) == 0 -/// list.length([1, 2, 3]) == 3 -/// ``` -pub fn length(self: List) -> Int { - when self is { - [] -> 0 - [_, ..xs] -> 1 + length(xs) - } -} - -test length_1() { - length([]) == 0 -} - -test length_2() { - length([1, 2, 3]) == 3 -} - -// ## Modifying - -// ### Extracting - -/// Remove the first occurrence of the given element from the list. -/// -/// ```aiken -/// list.delete([1, 2, 3, 1], 1) == [2, 3, 1] -/// list.delete([1, 2, 3], 14) == [1, 2, 3] -/// ``` -pub fn delete(self: List, elem: a) -> List { - when self is { - [] -> [] - [x, ..xs] -> - if x == elem { - xs - } else { - [x, ..delete(xs, elem)] - } - } -} - -test delete_1() { - delete([], 42) == [] -} - -test delete_2() { - delete([1, 2, 3, 1], 1) == [2, 3, 1] -} - -test delete_3() { - delete([1, 2, 3], 14) == [1, 2, 3] -} - -test delete_4() { - delete([2], 2) == [] -} - -/// Drop the first `n` elements of a list. -/// -/// ```aiken -/// list.drop([1, 2, 3], 2) == [3] -/// list.drop([], 42) == [] -/// list.drop([1, 2, 3], 42) == [] -/// ``` -pub fn drop(self: List, n: Int) -> List { - if n <= 0 { - self - } else { - when self is { - [] -> [] - [_x, ..xs] -> drop(xs, n - 1) - } - } -} - -test drop_1() { - drop([], 42) == [] -} - -test drop_2() { - drop([1, 2, 3], 2) == [3] -} - -/// Returns the suffix of the given list after removing all elements that satisfy the predicate. -/// -/// ```aiken -/// list.drop_while([1, 2, 3], fn(x) { x < 2 }) == [2, 3] -/// list.drop_while([], fn(x) { x > 2 }) == [] -/// list.drop_while([1, 2, 3], fn(x) { x == 3 }) == [1, 2, 3] -/// ``` -pub fn drop_while(self: List, predicate: fn(a) -> Bool) -> List { - when self is { - [] -> [] - [x, ..xs] -> - if predicate(x) { - drop_while(xs, predicate) - } else { - self - } - } -} - -test drop_while_1() { - drop_while([], fn(x) { x > 2 }) == [] -} - -test drop_while_2() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - drop_while(xs, fn(x) { x > 5 }) == [5, 4, 3, 2, 1] -} - -test drop_while_3() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - drop_while(xs, fn(x) { x == 42 }) == xs -} - -test drop_while_4() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - drop_while(xs, fn(x) { x < 42 }) == [] -} - -/// Produce a list of elements that satisfy a predicate. -/// -/// ```aiken -/// list.filter([1, 2, 3], fn(x) { x >= 2 }) == [2, 3] -/// list.filter([], fn(x) { x > 2 }) == [] -/// list.filter([1, 2, 3], fn(x) { x == 3 }) == [3] -/// ``` -pub fn filter(self: List, predicate: fn(a) -> Bool) -> List { - when self is { - [] -> [] - [x, ..xs] -> - if predicate(x) { - [x, ..filter(xs, predicate)] - } else { - filter(xs, predicate) - } - } -} - -test filter_1() { - filter([], fn(x) { x > 0 }) == [] -} - -test filter_2() { - let xs = [1, 2, 3, 4, 5, 6] - filter(xs, fn(x) { builtin.mod_integer(x, 2) == 0 }) == [2, 4, 6] -} - -test filter_3() { - let filter_foldr = - fn(xs, f) { - foldr( - xs, - [], - fn(x, ys) { - if f(x) { - [x, ..ys] - } else { - ys - } - }, - ) - } - - let is_odd = - fn(n) { builtin.mod_integer(n, 2) != 0 } - - filter_foldr([1, 2, 3], is_odd) == filter([1, 2, 3], is_odd) -} - -/// Produce a list of transformed elements that satisfy a predicate. -/// -/// ```aiken -/// let transform = fn(x) { if x % 2 == 0 { None } else { Some(3*x) } } -/// list.filter_map([1, 2, 3], transform) == [3, 9] -/// ``` -pub fn filter_map(self: List, predicate: fn(a) -> Option) -> List { - when self is { - [] -> [] - [x, ..xs] -> - when predicate(x) is { - None -> filter_map(xs, predicate) - Some(y) -> [y, ..filter_map(xs, predicate)] - } - } -} - -test filter_map_1() { - filter_map([], fn(_) { Some(42) }) == [] -} - -test filter_map_2() { - filter_map( - [1, 2, 3, 4, 5, 6], - fn(x) { - if builtin.mod_integer(x, 2) != 0 { - Some(3 * x) - } else { - None - } - }, - ) == [3, 9, 15] -} - -/// Return all elements except the last one. -/// -/// ```aiken -/// list.init([]) == None -/// list.init([1, 2, 3]) == Some([1, 2]) -/// ``` -pub fn init(self: List) -> Option> { - when self is { - [] -> None - _ -> Some(do_init(self)) - } -} - -fn do_init(self: List) -> List { - when self is { - [] -> fail @"unreachable" - [_] -> [] - [x, ..xs] -> [x, ..do_init(xs)] - } -} - -test init_1() { - init([]) == None -} - -test init_2() { - init([1]) == Some([]) -} - -test init_3() { - init([1, 2, 3, 4]) == Some([1, 2, 3]) -} - -/// Returns a tuple with all elements that satisfy the predicate at first -/// element, and the rest as second element. -/// -/// ```aiken -/// list.partition([1, 2, 3, 4], fn(x) { x % 2 == 0 }) == ([2, 4], [1, 3]) -/// ``` -pub fn partition(self: List, predicate: fn(a) -> Bool) -> (List, List) { - when self is { - [] -> ([], []) - [x, ..xs] -> { - let (left, right) = partition(xs, predicate) - if predicate(x) { - ([x, ..left], right) - } else { - (left, [x, ..right]) - } - } - } -} - -test partition_1() { - partition([], fn(x) { x > 2 }) == ([], []) -} - -test partition_2() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - partition(xs, fn(x) { x > 5 }) == ([10, 9, 8, 7, 6], [5, 4, 3, 2, 1]) -} - -test partition_3() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - partition(xs, fn(x) { x == 42 }) == ([], xs) -} - -test partition_4() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - partition(xs, fn(x) { x < 42 }) == (xs, []) -} - -test partition_5() { - partition([1, 2, 3, 4], fn(x) { x % 2 == 0 }) == ([2, 4], [1, 3]) -} - -/// Extract a sublist from the given list using 0-based indexes. Negative -/// indexes wrap over, so `-1` refers to the last element of the list. -/// -/// ```aiken -/// list.slice([1, 2, 3, 4, 5, 6], from: 2, to: 4) == [3, 4, 5] -/// list.slice([1, 2, 3, 4, 5, 6], from: -2, to: -1) == [5, 6] -/// list.slice([1, 2, 3, 4, 5, 6], from: 1, to: -1) == [2, 3, 4, 5, 6] -/// ``` -pub fn slice(self: List, from: Int, to: Int) -> List { - let (i, l) = - if from >= 0 { - (from, None) - } else { - let l = length(self) - (l + from, Some(l)) - } - - let j = - if to >= 0 { - to - i + 1 - } else { - when l is { - Some(l) -> l + to - i + 1 - None -> length(self) + to - i + 1 - } - } - - self - |> drop(i) - |> take(j) -} - -test slice_1() { - slice([1, 2, 3], 0, 2) == [1, 2, 3] -} - -test slice_2() { - slice([1, 2, 3, 4, 5, 6], from: 2, to: 4) == [3, 4, 5] -} - -test slice_3() { - slice([1, 2, 3, 4, 5, 6], from: -2, to: -1) == [5, 6] -} - -test slice_4() { - slice([1, 2, 3, 4, 5, 6], from: 1, to: -1) == [2, 3, 4, 5, 6] -} - -test slice_5() { - slice([1, 2, 3, 4, 5, 6], from: -4, to: -3) == [3, 4] -} - -test slice_6() { - slice([1, 2, 3, 4, 5, 6], from: -2, to: 1) == [] -} - -/// Cut a list in two, such that the first list contains the given number of / -/// elements and the second list contains the rest. -/// -/// Fundamentally equivalent to (but more efficient): -/// -/// ```aiken -/// // span(xs, n) == (take(xs, n), drop(xs, n)) -/// span([1, 2, 3, 4, 5], 3) == ([1, 2, 3], [4, 5]) -/// ``` -pub fn span(self: List, n: Int) -> (List, List) { - when self is { - [] -> ([], []) - [x, ..xs] -> - if n <= 0 { - ([], self) - } else { - let (left, right) = span(xs, n - 1) - ([x, ..left], right) - } - } -} - -test span_1() { - span([], 2) == ([], []) -} - -test span_2() { - span([1, 2, 3], 2) == ([1, 2], [3]) -} - -test span_3() { - span([1, 2, 3], -1) == ([], [1, 2, 3]) -} - -test span_4() { - span([1, 2, 3], 42) == ([1, 2, 3], []) -} - -/// Get elements of a list after the first one, if any. -/// -/// ```aiken -/// list.tail([]) == None -/// list.tail([1, 2, 3]) == Some([2, 3]) -/// ``` -pub fn tail(self: List) -> Option> { - when self is { - [] -> None - [_, ..xs] -> Some(xs) - } -} - -test tail_1() { - tail([1, 2, 3]) == Some([2, 3]) -} - -test tail_2() { - tail([]) == None -} - -/// Get the first `n` elements of a list. -/// -/// ```aiken -/// list.take([1, 2, 3], 2) == [1, 2] -/// list.take([1, 2, 3], 14) == [1, 2, 3] -/// ``` -pub fn take(self: List, n: Int) -> List { - if n <= 0 { - [] - } else { - when self is { - [] -> [] - [x, ..xs] -> [x, ..take(xs, n - 1)] - } - } -} - -test take_1() { - take([], 42) == [] -} - -test take_2() { - take([1, 2, 3], 2) == [1, 2] -} - -/// Returns the longest prefix of the given list where all elements satisfy the predicate. -/// -/// ```aiken -/// list.take_while([1, 2, 3], fn(x) { x > 2 }) == [] -/// list.take_while([1, 2, 3], fn(x) { x < 2 }) == [1] -/// ``` -pub fn take_while(self: List, predicate: fn(a) -> Bool) -> List { - when self is { - [] -> [] - [x, ..xs] -> - if predicate(x) { - [x, ..take_while(xs, predicate)] - } else { - [] - } - } -} - -test take_while_1() { - take_while([], fn(x) { x > 2 }) == [] -} - -test take_while_2() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - take_while(xs, fn(x) { x > 5 }) == [10, 9, 8, 7, 6] -} - -test take_while_3() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - take_while(xs, fn(x) { x == 42 }) == [] -} - -test take_while_4() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - take_while(xs, fn(x) { x < 42 }) == xs -} - -/// Removes duplicate elements from a list. -/// -/// ```aiken -/// list.unique([1, 2, 3, 1]) == [1, 2, 3] -/// ``` -pub fn unique(self: List) -> List { - when self is { - [] -> [] - [x, ..xs] -> [x, ..unique(filter(xs, fn(y) { y != x }))] - } -} - -test unique_1() { - unique([]) == [] -} - -test unique_2() { - let xs = [1, 2, 3, 1, 1, 3, 4, 1, 2, 3, 2, 4, 5, 6, 7, 8, 9, 10, 9] - unique(xs) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] -} - -// ### Mapping - -/// Map elements of a list into a new list and flatten the result. -/// -/// ```aiken -/// list.flat_map([1, 2, 3], fn(a) { [a, 2*a] }) == [1, 2, 2, 4, 3, 6] -/// ``` -pub fn flat_map(self: List, with: fn(a) -> List) -> List { - foldr(self, [], fn(x, xs) { concat(with(x), xs) }) -} - -test flat_map_1() { - flat_map([], fn(a) { [a] }) == [] -} - -test flat_map_2() { - flat_map([1, 2, 3], fn(a) { [a, a] }) == [1, 1, 2, 2, 3, 3] -} - -/// Perform an action for each element of a list. -/// -/// ```aiken -/// list.for_each(labels, do: fn(lbl) { trace lbl Void }) -/// ``` -pub fn for_each(self: List, do: fn(a) -> Void) -> Void { - foldr(self, Void, fn(x, _) { do(x) }) -} - -test for_each_1() { - for_each( - [@"hello", @"world"], - do: fn(lbl) { - trace lbl - Void - }, - ) -} - -/// List [`map`](#map) but provides the position (0-based) of the elements while iterating. -/// -/// ```aiken -/// list.indexed_map([1, 2, 3], fn(i, x) { i + x }) == [1, 3, 5] -/// ``` -pub fn indexed_map(self: List, with: fn(Int, a) -> result) -> List { - do_indexed_map(0, self, with) -} - -fn do_indexed_map( - n: Int, - self: List, - with: fn(Int, a) -> result, -) -> List { - when self is { - [] -> [] - [x, ..xs] -> [with(n, x), ..do_indexed_map(n + 1, xs, with)] - } -} - -test indexed_map_1() { - indexed_map([], fn(i, _n) { i }) == [] -} - -test indexed_map_2() { - indexed_map( - [4, 8, 13, 2], - fn(i, n) { - if n == 8 { - n - } else { - i - } - }, - ) == [0, 8, 2, 3] -} - -/// Apply a function to each element of a list. -/// -/// ```aiken -/// list.map([1, 2, 3, 4], fn(n) { n + 1 }) == [2, 3, 4, 5] -/// ``` -pub fn map(self: List, with: fn(a) -> result) -> List { - when self is { - [] -> [] - [x, ..xs] -> [with(x), ..map(xs, with)] - } -} - -test map_1() { - map([], fn(n) { n + 1 }) == [] -} - -test map_2() { - map([1, 2, 3, 4], fn(n) { n + 1 }) == [2, 3, 4, 5] -} - -/// Apply a function of two arguments, combining elements from two lists. -/// -/// Note: if one list is longer, the extra elements are dropped. -/// -/// ```aiken -/// list.map2([1, 2, 3], [1, 2], fn(a, b) { a + b }) == [2, 4] -/// ``` -pub fn map2( - self: List, - bs: List, - with: fn(a, b) -> result, -) -> List { - when self is { - [] -> [] - [x, ..xs] -> - when bs is { - [] -> [] - [y, ..ys] -> [with(x, y), ..map2(xs, ys, with)] - } - } -} - -test map2_1() { - map2([], [1, 2, 3], fn(a, b) { a + b }) == [] -} - -test map2_2() { - map2([1, 2, 3], [1, 2], fn(a, b) { a + b }) == [2, 4] -} - -test map2_3() { - map2([42], [1, 2, 3], fn(_a, b) { Some(b) }) == [Some(1)] -} - -/// Apply a function of three arguments, combining elements from three lists. -/// -/// Note: if one list is longer, the extra elements are dropped. -/// -/// ```aiken -/// list.map3([1, 2, 3], [1, 2], [1, 2, 3], fn(a, b, c) { a + b + c }) == [3, 6] -/// ``` -pub fn map3( - self: List, - bs: List, - cs: List, - with: fn(a, b, c) -> result, -) -> List { - when self is { - [] -> [] - [x, ..xs] -> - when bs is { - [] -> [] - [y, ..ys] -> - when cs is { - [] -> [] - [z, ..zs] -> [with(x, y, z), ..map3(xs, ys, zs, with)] - } - } - } -} - -test map3_1() { - map3([], [], [1, 2, 3], fn(a, b, c) { a + b + c }) == [] -} - -test map3_2() { - map3([1, 2, 3], [1, 2], [1, 2, 3], fn(a, b, c) { a + b + c }) == [3, 6] -} - -/// Return the list with its elements in the reserve order. -/// -/// ```aiken -/// list.reverse([1, 2, 3]) == [3, 2, 1] -/// ``` -pub fn reverse(self: List) -> List { - foldl(self, [], fn(x, xs) { [x, ..xs] }) -} - -test reverse_1() { - reverse([]) == [] -} - -test reverse_2() { - reverse([1, 2, 3]) == [3, 2, 1] -} - -/// Sort a list in ascending order using the given comparison function. -/// -/// ```aiken -/// use aiken/int -/// -/// sort([3, 1, 4, 0, 2], int.compare) == [0, 1, 2, 3, 4] -/// sort([1, 2, 3], int.compare) == [1, 2, 3] -/// ``` -pub fn sort(self: List, compare: fn(a, a) -> Ordering) -> List { - when self is { - [] -> [] - [x, ..xs] -> insert(sort(xs, compare), x, compare) - } -} - -fn insert(self: List, e: a, compare: fn(a, a) -> Ordering) -> List { - when self is { - [] -> [e] - [x, ..xs] -> - if compare(e, x) == Less { - [e, ..self] - } else { - [x, ..insert(xs, e, compare)] - } - } -} - -test sort_1() { - let xs = [6, 7, 5, 4, 1, 3, 9, 8, 0, 2] - sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] -} - -test sort_2() { - let xs = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] - sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] -} - -test sort_3() { - let xs = [9, 8, 7, 6, 5, 4, 3, 2, 1, 0] - sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] -} - -test sort_4() { - sort([], int.compare) == [] -} - -/// Decompose a list of tuples into a tuple of lists. -/// -/// ``` -/// list.unzip([(1, "a"), (2, "b")]) == ([1, 2], ["a", "b"]) -/// ``` -pub fn unzip(self: List<(a, b)>) -> (List, List) { - when self is { - [] -> ([], []) - [(a, b), ..xs] -> { - let (a_tail, b_tail) = unzip(xs) - ([a, ..a_tail], [b, ..b_tail]) - } - } -} - -test unzip_1() { - unzip([]) == ([], []) -} - -test unzip_2() { - unzip([(1, "a"), (2, "b")]) == ([1, 2], ["a", "b"]) -} - -// ## Combining - -/// Merge two lists together. -/// -/// ```aiken -/// list.concat([], []) == [] -/// list.concat([], [1, 2, 3]) == [1, 2, 3] -/// list.concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] -/// ``` -pub fn concat(left: List, right: List) -> List { - when left is { - [] -> right - [x, ..xs] -> [x, ..concat(xs, right)] - } -} - -test concat_1() { - concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] -} - -test concat_2() { - concat([1, 2, 3], []) == [1, 2, 3] -} - -test concat_3() { - concat([], [1, 2, 3]) == [1, 2, 3] -} - -/// Remove the first occurrence of each element of the second list from the first one. -/// -/// ``` -/// list.difference(["h", "e", "l", "l", "o"], ["l", "e", "l"]) == ["h", "o"] -/// list.difference([1, 2, 3, 4, 5], [1, 1, 2]) == [3, 4, 5] -/// list.difference([1, 2, 3], []) == [1, 2, 3] -/// ``` -pub fn difference(self: List, with: List) -> List { - when with is { - [] -> self - [x, ..xs] -> difference(delete(self, x), xs) - } -} - -test difference_1() { - difference(["h", "e", "l", "l", "o"], ["l", "e", "l"]) == ["h", "o"] -} - -test difference_2() { - difference([1, 2, 3, 4, 5], [1, 1, 2]) == [3, 4, 5] -} - -test difference_3() { - difference([1, 2, 3], []) == [1, 2, 3] -} - -test difference_4() { - difference([], [1, 2, 3]) == [] -} - -/// Combine two lists together. -/// -/// Note: if one list is longer, the extra elements are dropped. -/// -/// ```aiken -/// list.zip([1, 2], ["a", "b", "c"]) == [(1, "a"), (2, "b")] -/// ``` -pub fn zip(self: List, bs: List) -> List<(a, b)> { - when self is { - [] -> [] - [x, ..xs] -> - when bs is { - [] -> [] - [y, ..ys] -> [(x, y), ..zip(xs, ys)] - } - } -} - -test zip_1() { - zip([], [1, 2, 3]) == [] -} - -test zip_2() { - zip([1, 2, 3], []) == [] -} - -test zip_3() { - zip([1, 2], ["a", "b", "c"]) == [(1, "a"), (2, "b")] -} - -// ## Transforming - -/// Reduce a list from left to right. -/// -/// ```aiken -/// list.foldl([1, 2, 3], 0, fn(n, total) { n + total }) == 6 -/// list.foldl([1, 2, 3], [], fn(x, xs) { [x, ..xs] }) == [3, 2, 1] -/// ``` -pub fn foldl(self: List, zero: b, with: fn(a, b) -> b) -> b { - when self is { - [] -> zero - [x, ..xs] -> foldl(xs, with(x, zero), with) - } -} - -type Fold2 = - fn(a, b) -> result - -pub fn foldl2( - self: List, - zero_a: a, - zero_b: b, - with: fn(elem, a, b, Fold2) -> result, - return: Fold2, -) -> result { - do_foldl2(self, with, return)(zero_a, zero_b) -} - -fn do_foldl2( - self: List, - with: fn(elem, a, b, Fold2) -> result, - return: Fold2, -) -> Fold2 { - when self is { - [] -> return - [x, ..xs] -> do_foldl2(xs, with, fn(a, b) { with(x, a, b, return) }) - } -} - -test foldl2_optimized() { - let - len, - sum, - <- - foldl2( - [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], - 0, - 0, - fn(n, len, sum, return) { return(len + 1, sum + n) }, - ) - - and { - len == 10, - sum == 55, - } -} - -test foldl2_classic() { - let (len, sum) = - foldl( - [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], - (0, 0), - fn(n, (len, sum)) { (len + 1, sum + n) }, - ) - - and { - len == 10, - sum == 55, - } -} - -type Foo { - Foo(Int, Int) -} - -test foldl2_pair() { - let Pair(len, sum) = - foldl( - [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], - Pair(0, 0), - fn(n, Pair(len, sum)) { Pair(len + 1, sum + n) }, - ) - - and { - len == 10, - sum == 55, - } -} - -test foldl2_foo() { - let Foo(len, sum) = - foldl( - [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], - Foo(0, 0), - fn(n, Foo(len, sum)) { Foo(len + 1, sum + n) }, - ) - - and { - len == 10, - sum == 55, - } -} - -test foldl_1() { - foldl([], 0, fn(_, _) { 1 }) == 0 -} - -test foldl_2() { - foldl([1, 2, 3, 4, 5], 0, fn(n, total) { n + total }) == 15 -} - -test foldl_3() { - foldl([1, 2, 3, 4], [], fn(x, xs) { [x, ..xs] }) == [4, 3, 2, 1] -} - -/// Reduce a list from right to left. -/// -/// ```aiken -/// list.foldr([1, 2, 3], 0, fn(n, total) { n + total }) == 6 -/// list.foldr([1, 2, 3], [], fn(x, xs) { [x, ..xs] }) == [1, 2, 3] -/// ``` -pub fn foldr(self: List, zero: b, with: fn(a, b) -> b) -> b { - when self is { - [] -> zero - [x, ..xs] -> with(x, foldr(xs, zero, with)) - } -} - -test foldr_1() { - foldr([1, 2, 3, 4, 5], 0, fn(n, total) { n + total }) == 15 -} - -test foldr_2() { - foldr( - [1, 2, 3], - "", - fn(n, _str) { - if builtin.mod_integer(n, 2) == 0 { - "foo" - } else { - "bar" - } - }, - ) == "bar" -} - -test foldr_3() { - foldr([1, 2, 3, 4], [], fn(x, xs) { [x, ..xs] }) == [1, 2, 3, 4] -} - -/// Like [`foldr`](#foldr), but also provides the position (0-based) of the elements when iterating. -/// -/// ```aiken -/// let group = fn(i, x, xs) { [(i, x), ..xs] } -/// list.indexed_foldr(["a", "b", "c"], [], group) == [ -/// (0, "a"), -/// (1, "b"), -/// (2, "c") -/// ] -/// ``` -pub fn indexed_foldr( - self: List, - zero: result, - with: fn(Int, a, result) -> result, -) -> result { - do_indexed_foldr(0, self, zero, with) -} - -fn do_indexed_foldr( - n: Int, - self: List, - zero: result, - with: fn(Int, a, result) -> result, -) -> result { - when self is { - [] -> zero - [x, ..xs] -> with(n, x, do_indexed_foldr(n + 1, xs, zero, with)) - } -} - -test indexed_foldr_1() { - indexed_foldr([], 0, fn(i, x, xs) { i + x + xs }) == 0 -} - -test indexed_foldr_2() { - let letters = ["a", "b", "c"] - indexed_foldr(letters, [], fn(i, x, xs) { [(i, x), ..xs] }) == [ - (0, "a"), (1, "b"), (2, "c"), - ] -} - -/// Reduce a list from left to right using the accumulator as left operand. -/// Said differently, this is [`foldl`](#foldl) with callback arguments swapped. -/// -/// ```aiken -/// list.reduce([#[1], #[2], #[3]], #[0], bytearray.concat) == #[0, 1, 2, 3] -/// list.reduce([True, False, True], False, fn(b, a) { or { b, a } }) == True -/// ``` -pub fn reduce(self: List, zero: b, with: fn(b, a) -> b) -> b { - foldl(self, zero, flip(with)) -} - -test reduce_1() { - reduce([], 0, fn(n, total) { n + total }) == 0 -} - -test reduce_2() { - reduce([1, 2, 3], 0, fn(n, total) { n + total }) == 6 -} - -test reduce_3() { - reduce([True, False, True], False, fn(left, right) { left || right }) == True -} - -test reduce_4() { - reduce( - [#[1], #[2], #[3]], - #[9], - fn(left, right) { bytearray.concat(left, right) }, - ) == #[9, 1, 2, 3] -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak deleted file mode 100644 index 01bfe763..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak +++ /dev/null @@ -1,833 +0,0 @@ -//// A module for working with associative lists (a.k.a `Pairs`). -//// -//// While any function that works on `List` also work on `Pairs`, this module provides some extra helpers -//// that are specifically tailored to working with associative lists. Fundamentally, a `Pairs` is -//// a type-alias to `List>`. -//// -//// > [!CAUTION] -//// > -//// > Unlike dictionnaries (a.k.a. [`Dict`](./dict.html#Dict), associative lists make no assumption -//// > about the ordering of elements within the list. As a result, lookup -//// > functions do traverse the entire list when invoked. They are also not _sets_, -//// > and thus allow for duplicate keys. This is reflected in the functions used -//// > to interact with them. - -use aiken/builtin -use aiken/primitive/bytearray - -// ## Inspecting - -/// Get all values in the alist associated with a given key. -/// -/// ```aiken -/// pairs.get_all([], "a") == [] -/// pairs.get_all([Pair("a", 1)], "a") == [1] -/// pairs.get_all([Pair("a", 1), Pair("b", 2)], "a") == [1] -/// pairs.get_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [1, 3] -/// ``` -pub fn get_all(self: Pairs, key k: key) -> List { - when self is { - [] -> [] - [Pair(k2, v), ..rest] -> - if k == k2 { - [v, ..get_all(rest, k)] - } else { - get_all(rest, k) - } - } -} - -test get_all_1() { - get_all([], "a") == [] -} - -test get_all_2() { - get_all([Pair("a", 1)], "a") == [1] -} - -test get_all_3() { - get_all([Pair("a", 1), Pair("b", 2)], "a") == [1] -} - -test get_all_4() { - get_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [1, 3] -} - -test get_all_5() { - get_all([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == [] -} - -/// Get the value in the alist by its key. -/// If multiple values with the same key exist, only the first one is returned. -/// -/// ```aiken -/// pairs.get_first([], "a") == None -/// pairs.get_first([Pair("a", 1)], "a") == Some(1) -/// pairs.get_first([Pair("a", 1), Pair("b", 2)], "a") == Some(1) -/// pairs.get_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(1) -/// ``` -pub fn get_first(self: Pairs, key k: key) -> Option { - when self is { - [] -> None - [Pair(k2, v), ..rest] -> - if k == k2 { - Some(v) - } else { - get_first(rest, k) - } - } -} - -test get_first_1() { - get_first([], "a") == None -} - -test get_first_2() { - get_first([Pair("a", 1)], "a") == Some(1) -} - -test get_first_3() { - get_first([Pair("a", 1), Pair("b", 2)], "a") == Some(1) -} - -test get_first_4() { - get_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(1) -} - -test get_first_5() { - get_first([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == None -} - -/// Get the value in the alist by its key. -/// If multiple values with the same key exist, only the last one is returned. -/// -/// ```aiken -/// pairs.get_last([], "a") == None -/// pairs.get_last([Pair("a", 1)], "a") == Some(1) -/// pairs.get_last([Pair("a", 1), Pair("b", 2)], "a") == Some(1) -/// pairs.get_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(3) -/// ``` -pub fn get_last(self: Pairs, key k: key) -> Option { - when self is { - [] -> None - [Pair(k2, v), ..rest] -> - if k == k2 { - when get_last(rest, k) is { - None -> Some(v) - some -> some - } - } else { - get_last(rest, k) - } - } -} - -test get_last_1() { - get_last([], "a") == None -} - -test get_last_2() { - get_last([Pair("a", 1)], "a") == Some(1) -} - -test get_last_3() { - get_last([Pair("a", 1), Pair("b", 2)], "a") == Some(1) -} - -test get_last_4() { - get_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(3) -} - -test get_last_5() { - get_last([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == None -} - -/// Finds all keys in the alist associated with a given value. -/// -/// ```aiken -/// pairs.find_all([], 1) == [] -/// pairs.find_all([Pair("a", 1)], 1) == ["a"] -/// pairs.find_all([Pair("a", 1), Pair("b", 2)], 1) == ["a"] -/// pairs.find_all([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == ["a", "c"] -/// ``` -pub fn find_all(self: Pairs, v: value) -> List { - when self is { - [] -> [] - [Pair(k2, v2), ..rest] -> - if v == v2 { - [k2, ..find_all(rest, v)] - } else { - find_all(rest, v) - } - } -} - -test find_all_1() { - find_all([], "a") == [] -} - -test find_all_2() { - find_all([Pair("a", 14)], 14) == ["a"] -} - -test find_all_3() { - find_all([Pair("a", 14)], 42) == [] -} - -test find_all_4() { - find_all([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == ["a", "c"] -} - -/// Finds the first key in the alist associated with a given value, if any. -/// -/// ```aiken -/// pairs.find_first([], 1) == None -/// pairs.find_first([Pair("a", 1)], 1) == Some("a") -/// pairs.find_first([Pair("a", 1), Pair("b", 2)], 1) == Some("a") -/// pairs.find_first([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == Some("a") -/// ``` -pub fn find_first(self: Pairs, v: value) -> Option { - when self is { - [] -> None - [Pair(k2, v2), ..rest] -> - if v == v2 { - Some(k2) - } else { - find_first(rest, v) - } - } -} - -test find_first_1() { - find_first([], "a") == None -} - -test find_first_2() { - find_first([Pair("a", 14)], 14) == Some("a") -} - -test find_first_3() { - find_first([Pair("a", 14)], 42) == None -} - -test find_first_4() { - find_first([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == Some("a") -} - -/// Finds the last key in the alist associated with a given value, if any. -/// -/// ```aiken -/// pairs.find_last([], 1) == None -/// pairs.find_last([Pair("a", 1)], 1) == Some("a") -/// pairs.find_last([Pair("a", 1), Pair("b", 2)], 1) == Some("a") -/// pairs.find_last([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == Some("c") -/// ``` -pub fn find_last(self: Pairs, v: value) -> Option { - when self is { - [] -> None - [Pair(k2, v2), ..rest] -> - if v == v2 { - when find_last(rest, v) is { - None -> Some(k2) - some -> some - } - } else { - find_last(rest, v) - } - } -} - -test find_last_1() { - find_last([], "a") == None -} - -test find_last_2() { - find_last([Pair("a", 14)], 14) == Some("a") -} - -test find_last_3() { - find_last([Pair("a", 14)], 42) == None -} - -test find_last_4() { - find_last([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == Some("c") -} - -/// Check if a key exists in the pairs. -/// -/// ```aiken -/// pairs.has_key([], "a") == False -/// pairs.has_key([Pair("a", 1)], "a") == True -/// pairs.has_key([Pair("a", 1), Pair("b", 2)], "a") == True -/// pairs.has_key([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == True -/// ``` -pub fn has_key(self: Pairs, k: key) -> Bool { - when self is { - [] -> False - // || is lazy so this is fine - [Pair(k2, _), ..rest] -> k == k2 || has_key(rest, k) - } -} - -test has_key_1() { - !has_key([], "a") -} - -test has_key_2() { - has_key([Pair("a", 14)], "a") -} - -test has_key_3() { - !has_key([Pair("a", 14)], "b") -} - -test has_key_4() { - has_key([Pair("a", 14), Pair("b", 42)], "b") -} - -test has_key_5() { - has_key([Pair("a", 14), Pair("b", 42), Pair("a", 42)], "a") -} - -/// Extract all the keys present in a given `Pairs`. -/// -/// ```aiken -/// pairs.keys([]) == [] -/// pairs.keys([Pair("a", 1)]) == ["a"] -/// pairs.keys([Pair("a", 1), Pair("b", 2)]) == ["a", "b"] -/// pairs.keys([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == ["a", "b", "a"] -/// ``` -pub fn keys(self: Pairs) -> List { - when self is { - [] -> [] - [Pair(k, _), ..rest] -> [k, ..keys(rest)] - } -} - -test keys_1() { - keys([]) == [] -} - -test keys_2() { - keys([Pair("a", 0)]) == ["a"] -} - -test keys_3() { - keys([Pair("a", 0), Pair("b", 0)]) == ["a", "b"] -} - -/// Extract all the values present in a given `Pairs`. -/// -/// ```aiken -/// pairs.values([]) == [] -/// pairs.values([Pair("a", 1)]) == [1] -/// pairs.values([Pair("a", 1), Pair("b", 2)]) == [1, 2] -/// pairs.values([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == [1, 2, 3] -/// ``` -pub fn values(self: Pairs) -> List { - when self is { - [] -> [] - [Pair(_, v), ..rest] -> [v, ..values(rest)] - } -} - -test values_1() { - values([]) == [] -} - -test values_2() { - values([Pair("a", 1)]) == [1] -} - -test values_3() { - values([Pair("a", 1), Pair("b", 2)]) == [1, 2] -} - -test values_4() { - values([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == [1, 2, 3] -} - -// ## Modifying - -/// Remove all key-value pairs matching the key from the Pairs. If the key is not found, no changes are made. -/// -/// ```aiken -/// pairs.delete_all([], "a") == [] -/// pairs.delete_all([Pair("a", 1)], "a") == [] -/// pairs.delete_all([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] -/// pairs.delete_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("b", 2)] -/// ``` -pub fn delete_all(self: Pairs, key k: key) -> Pairs { - when self is { - [] -> [] - [Pair(k2, v2), ..rest] -> - if k == k2 { - delete_all(rest, k) - } else { - [Pair(k2, v2), ..delete_all(rest, k)] - } - } -} - -test delete_all_1() { - delete_all([], "a") == [] -} - -test delete_all_2() { - delete_all([Pair("a", 14)], "a") == [] -} - -test delete_all_3() { - let fixture = [Pair("a", 14)] - delete_all(fixture, "b") == fixture -} - -test delete_all_4() { - let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] - delete_all(fixture, "a") == [Pair("b", 2)] -} - -/// Remove a single key-value pair from the `Pairs`. If the key is not found, no changes are made. -/// Duplicate keys are not deleted. Only the **first** key found is deleted. -/// -/// ```aiken -/// pairs.delete_first([], "a") == [] -/// pairs.delete_first([Pair("a", 1)], "a") == [] -/// pairs.delete_first([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] -/// pairs.delete_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("b", 2), Pair("a", 3)] -/// ``` -pub fn delete_first(self: Pairs, key k: key) -> Pairs { - when self is { - [] -> [] - [Pair(k2, v2), ..rest] -> - if k == k2 { - rest - } else { - [Pair(k2, v2), ..delete_first(rest, k)] - } - } -} - -test delete_first_1() { - delete_first([], "a") == [] -} - -test delete_first_2() { - delete_first([Pair("a", 14)], "a") == [] -} - -test delete_first_3() { - let fixture = [Pair("a", 14)] - delete_first(fixture, "b") == fixture -} - -test delete_first_4() { - let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] - delete_first(fixture, "a") == [Pair("b", 2), Pair("a", 3)] -} - -/// Remove a single key-value pair from the Pairs. If the key is not found, no changes are made. -/// Duplicate keys are not deleted. Only the **last** key found is deleted. -/// -/// ```aiken -/// pairs.delete_last([], "a") == [] -/// pairs.delete_last([Pair("a", 1)], "a") == [] -/// pairs.delete_last([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] -/// pairs.delete_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("a", 1), Pair("b", 2)] -/// ``` -pub fn delete_last(self: Pairs, key k: key) -> Pairs { - when self is { - [] -> [] - [Pair(k2, v2), ..rest] -> - if k == k2 { - let tail = delete_last(rest, k) - if tail == rest { - rest - } else { - [Pair(k2, v2), ..tail] - } - } else { - [Pair(k2, v2), ..delete_last(rest, k)] - } - } -} - -test delete_last_1() { - delete_last([], "a") == [] -} - -test delete_last_2() { - delete_last([Pair("a", 14)], "a") == [] -} - -test delete_last_3() { - let fixture = [Pair("a", 14)] - delete_last(fixture, "b") == fixture -} - -test delete_last_4() { - let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] - delete_last(fixture, "a") == [Pair("a", 1), Pair("b", 2)] -} - -/// Insert a value in the `Pairs` at a given key. If the key already exists, -/// the value is added in front. -/// -/// > [!CAUTION] -/// > The list is only traversed up to the given key and the traversal -/// > stops as soon as a higher key is encountered. Said differently, the list -/// > is assumed to **be ordered by ascending keys**! If it is not, expect the -/// > unexpected. -/// -/// ```aiken -/// use aiken/primitive/bytearray -/// -/// let result = -/// [] -/// |> pairs.insert_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare) -/// |> pairs.insert_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare) -/// |> pairs.insert_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare) -/// -/// result == [Pair("bar", 2), Pair("foo", 3), Pair("foo", 1)] -/// ``` -pub fn insert_by_ascending_key( - self: Pairs, - key k: key, - value v: value, - compare: fn(key, key) -> Ordering, -) -> Pairs { - when self is { - [] -> [Pair(k, v)] - [Pair(k2, v2), ..rest] -> - if compare(k, k2) == Less { - [Pair(k, v), ..self] - } else { - if k == k2 { - [Pair(k, v), ..self] - } else { - [Pair(k2, v2), ..insert_by_ascending_key(rest, k, v, compare)] - } - } - } -} - -test insert_by_ascending_key_1() { - let m = - [] - |> insert_by_ascending_key("foo", 42, bytearray.compare) - |> insert_by_ascending_key("foo", 14, bytearray.compare) - - m == [Pair("foo", 14), Pair("foo", 42)] -} - -test insert_by_ascending_key_2() { - let m = - [] - |> insert_by_ascending_key("foo", 42, bytearray.compare) - |> insert_by_ascending_key("bar", 14, bytearray.compare) - |> insert_by_ascending_key("baz", 1337, bytearray.compare) - - m == [Pair("bar", 14), Pair("baz", 1337), Pair("foo", 42)] -} - -/// Like [`insert_by_ascending_key`](#insert_by_ascending_key) but specifies -/// how to combine two values on a key conflict. -/// -/// > [!CAUTION] -/// > The list is only traversed up to the given key and the traversal -/// > stops as soon as a higher key is encountered. Said differently, the list -/// > is assumed to **be ordered by ascending keys**! If it is not, expect the -/// > unexpected. -/// -/// ```aiken -/// use aiken/primitive/bytearray -/// -/// let add_integer = fn(x, y) { x + y } -/// -/// let result = -/// [] -/// |> pairs.insert_with_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare, with: add_integer) -/// |> pairs.insert_with_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare, with: add_integer) -/// |> pairs.insert_with_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare, with: add_integer) -/// -/// result == [Pair("bar", 2), Pair("foo", 4)] -/// ``` -pub fn insert_with_by_ascending_key( - self: Pairs, - key k: key, - value v: value, - compare: fn(key, key) -> Ordering, - with: fn(value, value) -> value, -) -> Pairs { - when self is { - [] -> [Pair(k, v)] - [Pair(k2, v2), ..rest] -> - if compare(k, k2) == Less { - [Pair(k, v), ..self] - } else { - if k == k2 { - [Pair(k, with(v, v2)), ..rest] - } else { - [ - Pair(k2, v2), - ..insert_with_by_ascending_key(rest, k, v, compare, with) - ] - } - } - } -} - -test insert_with_by_ascending_key_1() { - let compare_un_b_data = - fn(l, r) { - bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) - } - - let m = - [] - |> insert_with_by_ascending_key( - "foo" |> builtin.b_data, - 42, - compare_un_b_data, - builtin.add_integer, - ) - |> insert_with_by_ascending_key( - "foo" |> builtin.b_data, - 14, - compare_un_b_data, - builtin.add_integer, - ) - - m == [Pair("foo" |> builtin.b_data, 56)] -} - -test insert_with_by_ascending_key_2() { - let compare_un_b_data = - fn(l, r) { - bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) - } - - let m = - [] - |> insert_with_by_ascending_key( - "foo" |> builtin.b_data, - 42, - compare_un_b_data, - builtin.add_integer, - ) - |> insert_with_by_ascending_key( - "bar" |> builtin.b_data, - 14, - compare_un_b_data, - builtin.add_integer, - ) - |> insert_with_by_ascending_key( - "baz" |> builtin.b_data, - 1337, - compare_un_b_data, - builtin.add_integer, - ) - - m == [ - Pair("bar" |> builtin.b_data, 14), - Pair("baz" |> builtin.b_data, 1337), - Pair("foo" |> builtin.b_data, 42), - ] -} - -test insert_with_by_ascending_key_3() { - let compare_un_b_data = - fn(l, r) { - bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) - } - - let result = - [] - |> insert_with_by_ascending_key( - "foo" |> builtin.b_data, - 1, - compare_un_b_data, - builtin.add_integer, - ) - |> insert_with_by_ascending_key( - "bar" |> builtin.b_data, - 2, - compare_un_b_data, - builtin.add_integer, - ) - |> insert_with_by_ascending_key( - "foo" |> builtin.b_data, - 3, - compare_un_b_data, - builtin.add_integer, - ) - - result == [Pair("bar" |> builtin.b_data, 2), Pair("foo" |> builtin.b_data, 4)] -} - -/// Apply a function to all key-value pairs in a alist, replacing the values. -/// -/// ```aiken -/// let fixture = [Pair("a", 100), Pair("b", 200)] -/// -/// pairs.map(fixture, fn(_k, v) { v * 2 }) == [Pair("a", 200), Pair("b", 400)] -/// ``` -pub fn map( - self: Pairs, - with: fn(key, value) -> result, -) -> Pairs { - when self is { - [] -> [] - [Pair(k, v), ..rest] -> [Pair(k, with(k, v)), ..map(rest, with)] - } -} - -test map_1() { - let fixture = [Pair("a", 1), Pair("b", 2)] - - map(fixture, with: fn(k, _) { k }) == [Pair("a", "a"), Pair("b", "b")] -} - -test map_2() { - let fixture = [Pair("a", 1), Pair("b", 2)] - - map(fixture, with: fn(_, v) { v + 1 }) == [Pair("a", 2), Pair("b", 3)] -} - -/// Insert a value in the `Pairs` at a given key. If the key already exists, -/// its value is replaced. -/// -/// > [!CAUTION] -/// > The list is only traversed up to the given key and the traversal -/// > stops as soon as a higher key is encountered. Said differently, the list -/// > is assumed to **be ordered by ascending keys**! If it is not, expect the -/// > unexpected. -/// -/// ```aiken -/// use aiken/primitive/bytearray -/// -/// let result = -/// [] -/// |> pairs.repsert_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare) -/// |> pairs.repsert_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare) -/// |> pairs.repsert_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare) -/// -/// result == [Pair("bar", 2), Pair("foo", 3)] -/// ``` -pub fn repsert_by_ascending_key( - self: Pairs, - key k: key, - value v: value, - compare: fn(key, key) -> Ordering, -) -> Pairs { - when self is { - [] -> [Pair(k, v)] - [Pair(k2, v2), ..rest] -> - if compare(k, k2) == Less { - [Pair(k, v), ..self] - } else { - if k == k2 { - [Pair(k, v), ..rest] - } else { - [Pair(k2, v2), ..repsert_by_ascending_key(rest, k, v, compare)] - } - } - } -} - -test repsert_by_ascending_key_1() { - let m = - [] - |> repsert_by_ascending_key("foo", 42, bytearray.compare) - |> repsert_by_ascending_key("foo", 14, bytearray.compare) - - m == [Pair("foo", 14)] -} - -test repsert_by_ascending_key_2() { - let m = - [] - |> repsert_by_ascending_key("foo", 42, bytearray.compare) - |> repsert_by_ascending_key("bar", 14, bytearray.compare) - |> repsert_by_ascending_key("baz", 1337, bytearray.compare) - - m == [Pair("bar", 14), Pair("baz", 1337), Pair("foo", 42)] -} - -// ## Transforming - -/// Fold over the key-value pairs in a pairs. The fold direction follows keys -/// in ascending order and is done from left-to-right. -/// -/// ```aiken -/// let fixture = [ -/// Pair(1, 100), -/// Pair(2, 200), -/// Pair(3, 300), -/// ] -/// -/// pairs.foldl(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 -/// ``` -pub fn foldl( - self: Pairs, - zero: result, - with: fn(key, value, result) -> result, -) -> result { - when self is { - [] -> zero - [Pair(k, v), ..rest] -> foldl(rest, with(k, v, zero), with) - } -} - -test foldl_1() { - foldl([], 14, fn(_, _, _) { 42 }) == 14 -} - -test foldl_2() { - foldl( - [Pair("a", 42), Pair("b", 14)], - zero: 0, - with: fn(_, v, total) { v + total }, - ) == 56 -} - -/// Fold over the key-value pairs in a Pairs. The fold direction follows the -/// order of elements in the Pairs and is done from right-to-left. -/// -/// ```aiken -/// let fixture = [ -/// Pair(1, 100), -/// Pair(2, 200), -/// Pair(3, 300), -/// ] -/// -/// pairs.foldr(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 -/// ``` -pub fn foldr( - self: Pairs, - zero: result, - with: fn(key, value, result) -> result, -) -> result { - when self is { - [] -> zero - [Pair(k, v), ..rest] -> with(k, v, foldr(rest, zero, with)) - } -} - -test foldr_1() { - foldr([], 14, fn(_, _, _) { 42 }) == 14 -} - -test foldr_2() { - foldr( - [Pair("a", 42), Pair("b", 14)], - zero: 0, - with: fn(_, v, total) { v + total }, - ) == 56 -} - -test foldr_3() { - let fixture = [Pair(1, 100), Pair(2, 200), Pair(3, 300)] - - foldr(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak deleted file mode 100644 index 46a7dda5..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak +++ /dev/null @@ -1,147 +0,0 @@ -use aiken/builtin - -pub type VerificationKey = - ByteArray - -pub type VerificationKeyHash = - Hash - -pub type Script = - ByteArray - -pub type ScriptHash = - Hash - -pub type Signature = - ByteArray - -pub type DataHash = - Hash - -/// A `Hash` is nothing more than a `ByteArray`, but it carries extra -/// information for readability. -/// -/// On-chain, any hash digest value is represented as a plain 'ByteArray'. -/// Though in practice, hashes come from different sources and have -/// different semantics. -/// -/// Hence, while this type-alias doesn't provide any strong type-guarantees, -/// it helps writing functions signatures with more meaningful types than mere -/// 'ByteArray'. -/// -/// Compare for example: -/// -/// ```aiken -/// pub type Credential { -/// VerificationKey(ByteArray) -/// Script(ByteArray) -/// } -/// ``` -/// -/// with -/// -/// ```aiken -/// pub type Credential { -/// VerificationKey(Hash) -/// Script(Hash) -/// } -/// ``` -/// -/// Both are strictly equivalent, but the second reads much better. -pub type Hash = - ByteArray - -// ## Hashing - -/// A blake2b-224 hash algorithm. -/// -/// Typically used for: -/// -/// - [`Credential`](../cardano/address.html#Credential) -/// - [`PolicyId`](../cardano/assets.html#PolicyId) -/// -/// Note: there's no function to calculate blake2b-224 hash digests on-chain. -pub opaque type Blake2b_224 { - Blake2b_224 -} - -/// Compute the blake2b-224 hash digest (28 bytes) of some data. -pub fn blake2b_224(bytes: ByteArray) -> Hash { - builtin.blake2b_224(bytes) -} - -/// A blake2b-256 hash algorithm. -/// -/// Typically used for: -/// -/// - [`TransactionId`](../cardano/transaction.html#TransactionId) -pub opaque type Blake2b_256 { - Blake2b_256 -} - -/// Compute the blake2b-256 hash digest (32 bytes) of some data. -pub fn blake2b_256(bytes: ByteArray) -> Hash { - builtin.blake2b_256(bytes) -} - -/// A Keccak-256 hash algorithm. -pub opaque type Keccak_256 { - Keccak_256 -} - -/// Compute the keccak-256 hash digest (32 bytes) of some data. -pub fn keccak_256(bytes: ByteArray) -> Hash { - builtin.keccak_256(bytes) -} - -/// A SHA2-256 hash algorithm. -pub opaque type Sha2_256 { - Sha2_256 -} - -/// Compute the sha2-256 hash digest (32 bytes) of some data. -pub fn sha2_256(bytes: ByteArray) -> Hash { - builtin.sha2_256(bytes) -} - -/// A SHA3-256 hash algorithm. -pub opaque type Sha3_256 { - Sha3_256 -} - -/// Compute the sha3-256 hash digest (32 bytes) of some data. -pub fn sha3_256(bytes: ByteArray) -> Hash { - builtin.sha3_256(bytes) -} - -// ## Verifying signatures - -/// Verify an ECDCA signature (over secp256k1) using the given verification key. -/// Returns `True` when the signature is valid. -pub fn verify_ecdsa_signature( - key: VerificationKey, - msg: ByteArray, - sig: Signature, -) -> Bool { - builtin.verify_ecdsa_secp256k1_signature(key, msg, sig) -} - -/// Verify an Ed25519 signature using the given verification key. -/// Returns `True` when the signature is valid. -pub fn verify_ed25519_signature( - key: VerificationKey, - msg: ByteArray, - sig: Signature, -) -> Bool { - builtin.verify_ed25519_signature(key, msg, sig) -} - -/// Verify a Schnorr signature (over secp256k1) using the given verification key. -/// Returns `True` when the signature is valid. -pub fn verify_schnorr_signature( - key: VerificationKey, - msg: ByteArray, - sig: Signature, -) -> Bool { - builtin.verify_schnorr_secp256k1_signature(key, msg, sig) -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak deleted file mode 100644 index d7b4cc19..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak +++ /dev/null @@ -1,115 +0,0 @@ -//// This module is designed for cryptographic operations involving the BLS12-381 elliptic curve, particularly focusing on the G1 group of the curve. -//// -//// The key functionalities provided by this module include: -//// - Defining the generator of the G1 group, which is a fixed base point on the elliptic curve used for various cryptographic computations. -//// - Implementing the additive identity (zero) in the G1 group, which plays a crucial role in elliptic curve arithmetic. -//// - Providing functions to compress and decompress points in the G1 group. Compression reduces the size of the point representation, which is useful for efficient storage and transmission. Decompression restores the original point from its compressed form. -//// - Implementing basic arithmetic operations on the points in the G1 group, such as addition and subtraction. -//// - Enabling the exponentiation of a point in the G1 group with a scalar, which is a fundamental operation in elliptic curve cryptography. -//// - Offering a function to hash arbitrary data to a point in the G1 group, a process important in several cryptographic protocols. -//// -//// This module ensures that all operations respect the properties of the BLS12-381 curve and the mathematical structure of the G1 group. - -use aiken/builtin -use aiken/crypto/bls12_381/scalar.{Scalar} - -/// The compressed generator of the G1 group of the BLS12-381 curve. -/// This constant represents a fixed base point on the elliptic curve. -/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. -/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. -pub const generator: G1Element = - #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb" - -test generator_1() { - builtin.bls12_381_g1_scalar_mul(scalar.field_prime, generator) == #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" -} - -/// Represents the additive identity (zero) in the G1 group. -/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. -/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. -pub const zero: G1Element = - #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - -test zero_1() { - and { - zero == builtin.bls12_381_g1_scalar_mul(scalar.field_prime, generator), - zero == builtin.bls12_381_g1_scalar_mul( - scalar.field_prime, - #"88c7e388ee58f1db9a24d7098b01d13634298bebf2d159254975bd450cb0d287fcc622eb71edde8b469a8513551baf1f", - ), - zero == builtin.bls12_381_g1_scalar_mul( - scalar.field_prime, - #"a6ac32e625dc30b8d31bacf5f4c89c27b0388b15f57ae10de8d5cec02dd1f113c9a31077be05ab587ca57a88d34deb75", - ), - } -} - -/// Compresses a point in the G1 group into a more compact representation. -/// The compressed representation is a 48-byte string, corresponding to a modified `x` coordinate. -/// The leading most significant 3 bits of this string indicate how to reconstruct the `y` coordinate. -/// -/// > [!NOTE] -/// > More explicitly via [Zcash's spec](https://github.com/supranational/blst#serialization-format): -/// > -/// > The most-significant three bits of a G1 or G2 encoding should be masked away before the coordinate(s) are interpreted. These bits are used to unambiguously represent the underlying element: -/// > -/// > - The most significant bit, when set, indicates that the point is in compressed form. Otherwise, the point is in uncompressed form. -/// > - The second-most significant bit indicates that the point is at infinity. If this bit is set, the remaining bits of the group element's encoding should be set to zero. -/// > - The third-most significant bit is set if (and only if) this point is in compressed form and it is not the point at infinity and its y-coordinate is the lexicographically largest of the two associated with the encoded x-coordinate. -pub fn compress(point) { - builtin.bls12_381_g1_compress(point) -} - -test compress_1() { - compress( - #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb", - ) == #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb" -} - -/// Decompresses a point in the G1 group from its compressed form. -pub fn decompress(bytes) { - builtin.bls12_381_g1_uncompress(bytes) -} - -pub fn equal(left, right) { - builtin.bls12_381_g1_equal(left, right) -} - -test equal_1() { - equal(generator, generator) -} - -/// Adds two points in the G1 group. -pub fn add(left, right) { - builtin.bls12_381_g1_add(left, right) -} - -/// Subtracts one point in the G1 group from another. -pub fn sub(left, right) { - builtin.bls12_381_g1_add(left, builtin.bls12_381_g1_neg(right)) -} - -test sub_1() { - generator == sub(add(generator, generator), generator) -} - -/// Exponentiates a point in the G1 group with a `scalar`. -/// This operation is equivalent to the repeated addition of the point with itself `e` times. -pub fn scale(point, e: Scalar) { - builtin.bls12_381_g1_scalar_mul(scalar.to_int(e), point) -} - -test scale_1() { - expect Some(x) = scalar.new(2) - builtin.bls12_381_g1_add(generator, generator) == scale(generator, x) -} - -/// Hashes arbitrary data to a point in the G1 group. -/// You can use the `domain_separation_tag` parameter to cryptographically separate different uses of the hash function between applications. -pub fn hash_to_group(bytes: ByteArray, domain_separation_tag: ByteArray) { - builtin.bls12_381_g1_hash_to_group(bytes, domain_separation_tag) -} - -test hash_to_group_1() { - hash_to_group("hello", "world") == #"89223b03c629cc6bcbbdccbba46b6679bc6a79db82f2d3bd115899a45a5a38c391587b59d3d1e297f977d1c4ee9e3388" -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak deleted file mode 100644 index 7a2013db..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak +++ /dev/null @@ -1,124 +0,0 @@ -//// This module is designed for cryptographic operations involving the BLS12-381 elliptic curve, particularly focusing on the G2 group of the curve. -//// -//// The key functionalities provided by this module include: -//// - Defining the generator of the G2 group, which is a fixed base point on the elliptic curve used for various cryptographic computations. -//// - Implementing the additive identity (zero) in the G2 group, which plays a crucial role in elliptic curve arithmetic. -//// - Providing functions to compress and decompress points in the G2 group. Compression reduces the size of the point representation, which is useful for efficient storage and transmission. Decompression restores the original point from its compressed form. -//// - Implementing basic arithmetic operations on the points in the G2 group, such as addition and subtraction. -//// - Enabling the exponentiation of a point in the G2 group with a scalar, which is a fundamental operation in elliptic curve cryptography. -//// - Offering a function to hash arbitrary data to a point in the G2 group, a process important in several cryptographic protocols. -//// -//// This module ensures that all operations respect the properties of the BLS12-381 curve and the mathematical structure of the G2 group. - -use aiken/builtin -use aiken/crypto/bls12_381/scalar.{Scalar} - -/// The compressed generator of the G2 group of the BLS12-381 curve. -/// This constant represents a fixed base point on the elliptic curve. -/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. -/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. -pub const generator: G2Element = - #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" - -test generator_1() { - builtin.bls12_381_g2_scalar_mul(scalar.field_prime, generator) == #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" -} - -/// Represents the additive identity (zero) in the G2 group. -/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. -/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. -pub const zero: G2Element = - #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - -test zero_1() { - and { - zero == builtin.bls12_381_g2_scalar_mul(scalar.field_prime, generator), - zero == builtin.bls12_381_g2_scalar_mul( - scalar.field_prime, - #"9964a9ac2ee28a4dab595ff0970d446373bf46701c5d0b29ce8e1ba995d811a1c7b193c928269192c64ba1fbe4b1940207c251e086b452b920bc72e3cebab46ce672b9b088ca620a471d3b888d9737f6abd165319aa457dbf8835e3d34196051", - ), - zero == builtin.bls12_381_g2_scalar_mul( - scalar.field_prime, - #"a900e25cb53cf1eeb1a82c0c83292937c49c97966351273767a204256a7ef6e95aa391404387075d361e7b13ccd694db03aa73ee0e1bd2c3dd735582b99fdf71696de72e4eda18ae99ea45995f1c9605aa0057008ee9a4da604b5716fb4a345b", - ), - } -} - -/// Compresses a point in the G2 group into a more compact representation. -/// The compressed representation is the concatenation of two 48-byte strings, corresponding to a modified and complexified `x` coordinate. -/// The leading most significant 3 bits of this string indicate how to reconstruct the `y` coordinate. -/// -/// > [!NOTE] -/// > More explicitly via [Zcash's spec](https://github.com/supranational/blst#serialization-format): -/// > -/// > The most-significant three bits of a G1 or G2 encoding should be masked away before the coordinate(s) are interpreted. These bits are used to unambiguously represent the underlying element: -/// > -/// > - The most significant bit, when set, indicates that the point is in compressed form. Otherwise, the point is in uncompressed form. -/// > - The second-most significant bit indicates that the point is at infinity. If this bit is set, the remaining bits of the group element's encoding should be set to zero. -/// > - The third-most significant bit is set if (and only if) this point is in compressed form and it is not the point at infinity and its y-coordinate is the lexicographically largest of the two associated with the encoded x-coordinate. -pub fn compress(point) { - builtin.bls12_381_g2_compress(point) -} - -test compress_1() { - let g2 = - #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" - compress(g2) == #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" -} - -/// Decompresses a point in the G2 group from its compressed form. -pub fn decompress(bytes) { - builtin.bls12_381_g2_uncompress(bytes) -} - -test decompress_1() { - let g2 = - #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" - generator == g2 -} - -pub fn equal(left, right) { - builtin.bls12_381_g2_equal(left, right) -} - -test equal_1() { - equal( - generator, - #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8", - ) -} - -/// Adds two points in the G2 group. -pub fn add(left, right) { - builtin.bls12_381_g2_add(left, right) -} - -/// Subtracts one point in the G2 group from another. -pub fn sub(left, right) { - builtin.bls12_381_g2_add(left, builtin.bls12_381_g2_neg(right)) -} - -test sub_1() { - generator == sub(add(generator, generator), generator) -} - -/// Exponentiates a point in the G2 group with a `scalar`. -/// This operation is equivalent to the repeated addition of the point with itself `e` times. -pub fn scale(point, e: Scalar) { - builtin.bls12_381_g2_scalar_mul(scalar.to_int(e), point) -} - -test scale_1() { - expect Some(x) = scalar.new(2) - builtin.bls12_381_g2_add(generator, generator) == scale(generator, x) -} - -/// Hashes arbitrary data to a point in the G2 group. -/// You can use the `domain_separation_tag` parameter to cryptographically separate different uses of the hash function between applications. -pub fn hash_to_group(bytes, domain_separation_tag) { - builtin.bls12_381_g2_hash_to_group(bytes, domain_separation_tag) -} - -test hash_to_group_1() { - hash_to_group("hello", "world") == #"a18486bba1dc8321f4998ed4268c6df8dfa5618dd5c91595844059d517f8104bf8031d3e766f9c99db1d6f58b201ee9614de92fc08f9e5cc3a6cd814e871857cb6e3924e8a4fa48775116c5f158d58ceda63614d62f6b7bc47db798d656969a5" -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak deleted file mode 100644 index cf028ad7..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak +++ /dev/null @@ -1,255 +0,0 @@ -//// This module implements arithmetic operations in the scalar field associated with the BLS12-381 elliptic curve. -//// The scalar field, defined over a prime number `q`, is derived from the order of the subgroup G1. -//// -//// More explicitly, we have the identity: -//// -//// ```aiken -//// builtin.bls12_381_g1_scalar_mul(q, bls12_381_g1_generator) == 1 -//// ``` -//// -//// where, -//// -//// ```aiken -//// q = 52435875175126190479447740508185965837690552500527637822603658699938581184513 -//// ``` -//// -//// This module provides functionality for basic arithmetic operations (addition, subtraction, multiplication, division) within this scalar field. -//// Additionally, it includes advanced operations such as exponentiation and calculation of multiplicative inverses, tailored for cryptographic applications. - -use aiken/builtin - -/// The prime number defining the scalar field of the BLS12-381 curve. -pub const field_prime = - 52435875175126190479447740508185965837690552500527637822603658699938581184513 - -/// Represents the additive identity (zero) in the `Scalar` field. -pub const zero: Scalar = Scalar(0) - -/// Represents the multiplicative identity (one) in the `Scalar` field. -pub const one: Scalar = Scalar(1) - -/// Opaque type representing an element of the finite field `Scalar`. -pub opaque type Scalar { - integer: Int, -} - -// ## Constructing - -/// Constructs a new `Scalar` element from an integer, ensuring it's within the valid range of the field. -/// Returns `None` if the integer is negative or greater than the prime number defining the field. -pub fn new(n: Int) -> Option { - if n >= 0 && n < field_prime { - Some(Scalar(n)) - } else { - None - } -} - -test new_1() { - and { - new(-1) == None, - new(field_prime) == None, - new(834884848) == Some(Scalar(834884848)), - } -} - -/// Constructs a new `Scalar` element from a Big-Endian (most-significant bits first) `ByteArray`. -pub fn from_bytearray_big_endian(bytes: ByteArray) -> Option { - new(builtin.bytearray_to_integer(True, bytes)) -} - -test from_bytearray_big_endian_1() { - from_bytearray_big_endian(#"ffff00") == Some(Scalar(16776960)) -} - -/// Constructs a new `Scalar` element from a Little-Endian (least-significant bits first) `ByteArray`. -pub fn from_bytearray_little_endian(bytes: ByteArray) -> Option { - new(builtin.bytearray_to_integer(False, bytes)) -} - -test from_bytearray_little_endian_1() { - from_bytearray_little_endian(#"ffff00") == Some(Scalar(65535)) -} - -// ## Modifying - -/// Exponentiates an `Scalar` element by a non-negative integer exponent, using repeated squaring. -/// Note that this function returns `scalar.zero` for negative exponents. -/// A dedicated builtin function for this is in the making, see CIP 109. -pub fn scale(self: Scalar, e: Int) -> Scalar { - if e < 0 { - zero - } else if e == 0 { - one - } else if e % 2 == 0 { - scale(mul(self, self), e / 2) - } else { - mul(self, scale(mul(self, self), ( e - 1 ) / 2)) - } -} - -test scale_1() { - and { - scale(Scalar(834884848), -1) == zero, - scale(Scalar(834884848), 0) == one, - scale(Scalar(834884848), 1) == Scalar(834884848), - scale(Scalar(834884848), 2) == Scalar(697032709419983104), - scale(Scalar(834884848), 3) == Scalar(581942047655130761945608192), - scale(Scalar(field_prime - 4), 200) == Scalar( - 12843927705572658539565969578937286576443167978938369866871449552629978143484, - ), - } -} - -/// A faster version of `scale` for the case where the exponent is a power of two. -/// That is, the exponent `e = 2^k` for some non-negative integer `k`. Which is used alot in zk-SNARKs. -pub fn scale2(self: Scalar, k: Int) -> Scalar { - if k < 0 { - zero - } else { - do_scale2(self, k) - } -} - -fn do_scale2(self: Scalar, k: Int) -> Scalar { - if k == 0 { - self - } else { - do_scale2(mul(self, self), k - 1) - } -} - -test scale2_1() { - and { - scale2(Scalar(834884848), -1) == zero, - scale2(Scalar(834884848), 0) == scale(Scalar(834884848), 1), - scale2(Scalar(834884848), 1) == scale(Scalar(834884848), 2), - scale2(Scalar(834884848), 2) == scale(Scalar(834884848), 4), - scale2(Scalar(834884848), 3) == scale(Scalar(834884848), 8), - scale2(Scalar(834884848), 4) == scale(Scalar(834884848), 16), - } -} - -// ## Combining - -/// Adds two `Scalar` elements, ensuring the result stays within the finite field range. -pub fn add(left: Scalar, right: Scalar) -> Scalar { - Scalar(( left.integer + right.integer ) % field_prime) -} - -test add_1() { - and { - (add(Scalar(834884848), Scalar(834884848)) == Scalar(1669769696))?, - (add(Scalar(field_prime - 1), Scalar(1)) == Scalar(0))?, - (add(Scalar(3), Scalar(field_prime)) == Scalar(3))?, - } -} - -/// Divides one `Scalar` element by another, returning `None` if the divisor is zero. -pub fn div(left: Scalar, right: Scalar) -> Option { - if right == zero { - None - } else { - Some(mul(left, scale(right, field_prime - 2))) - } -} - -test div_1() { - and { - div(Scalar(834884848), Scalar(834884848)) == Some(Scalar(1)), - div(Scalar(834884848), zero) == None, - div(Scalar(field_prime - 1), Scalar(2)) == Some( - Scalar( - 26217937587563095239723870254092982918845276250263818911301829349969290592256, - ), - ), - } -} - -/// Multiplies two `Scalar` elements, with the result constrained within the finite field. -pub fn mul(left: Scalar, right: Scalar) -> Scalar { - Scalar(left.integer * right.integer % field_prime) -} - -test mul_1() { - and { - mul(Scalar(834884848), Scalar(834884848)) == Scalar(697032709419983104), - mul(zero, Scalar(834884848)) == zero, - mul(Scalar(field_prime - 1), Scalar(2)) == Scalar( - 52435875175126190479447740508185965837690552500527637822603658699938581184511, - ), - } -} - -/// Calculates the additive inverse of a `Scalar` element. -pub fn neg(self: Scalar) -> Scalar { - // this is basicly sub(zero, self), but more efficient as it saves one modulo operation - if self.integer == 0 { - self - } else { - Scalar(field_prime - self.integer) - } -} - -test neg_1() { - and { - neg(Scalar(834884848)) == Scalar( - 52435875175126190479447740508185965837690552500527637822603658699937746299665, - ), - neg(zero) == zero, - neg(one) == Scalar(field_prime - 1), - } -} - -/// Calculates the multiplicative inverse of an `Scalar` element, returning `None` if the element is zero. -pub fn recip(self: Scalar) -> Option { - div(one, self) -} - -test recip_1() { - and { - recip(Scalar(834884848)) == Some( - Scalar( - 35891248691642227249400403463796410930702563777316955162085759263735363466421, - ), - ), - recip(zero) == None, - } -} - -/// Subtracts one `Scalar` element from another, with the result wrapped within the finite field range. -pub fn sub(left: Scalar, right: Scalar) -> Scalar { - Scalar(( left.integer - right.integer ) % field_prime) -} - -test sub_1() { - and { - (sub(Scalar(834884848), Scalar(834884848)) == zero)?, - (sub(zero, Scalar(5)) == Scalar(field_prime - 5))?, - } -} - -// ## Transforming - -/// Converts a `Scalar` element back to its integer representation. -pub fn to_int(self: Scalar) -> Int { - self.integer -} - -test to_int_1() { - to_int(Scalar(834884848)) == 834884848 -} - -/// Converts a `Scalar` element to a Big-Endian (most-significant bits first) `ByteArray`. -pub fn to_bytearray_big_endian(self: Scalar, size: Int) -> ByteArray { - builtin.integer_to_bytearray(True, size, self.integer) -} - -/// Converts a `Scalar` element to a Little-Endian (least-significant bits first) `ByteArray`. -pub fn to_bytearray_little_endian(self: Scalar, size: Int) -> ByteArray { - builtin.integer_to_bytearray(False, size, self.integer) -} - -test to_bytearray_1() { - to_bytearray_big_endian(Scalar(16777215), 3) == #"ffffff" -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak deleted file mode 100644 index 96179f9b..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak +++ /dev/null @@ -1,680 +0,0 @@ -//// In a eUTxO-based blockchain like Cardano, the management of time can be -//// finicky. -//// -//// Indeed, in order to maintain a complete determinism in the execution of -//// scripts, it is impossible to introduce a notion of _"current time"_ since -//// the execution would then depend on factor that are external to the -//// transaction itself: the ineluctable stream of time flowing in our universe. -//// -//// Hence, to work around that, we typically define time intervals, which gives -//// window -- a.k.a intervals -- within which the transaction can be executed. -//// From within a script, it isn't possible to know when exactly the script is -//// executed, but we can reason about the interval bounds to validate pieces of -//// logic. - -// TODO: Replace 'Int' with a generic 'a' once we have comparable traits. - -/// A type to represent intervals of values. Interval are inhabited by a type -/// `a` which is useful for non-infinite intervals that have a finite -/// lower-bound and/or upper-bound. -/// -/// This allows to represent all kind of mathematical intervals: -/// -/// ```aiken -/// // [1; 10] -/// let i0: Interval = Interval -/// { lower_bound: -/// IntervalBound { bound_type: Finite(1), is_inclusive: True } -/// , upper_bound: -/// IntervalBound { bound_type: Finite(10), is_inclusive: True } -/// } -/// ``` -/// -/// ```aiken -/// // (20; infinity) -/// let i1: Interval = Interval -/// { lower_bound: -/// IntervalBound { bound_type: Finite(20), is_inclusive: False } -/// , upper_bound: -/// IntervalBound { bound_type: PositiveInfinity, is_inclusive: False } -/// } -/// ``` -pub type Interval { - lower_bound: IntervalBound, - upper_bound: IntervalBound, -} - -/// An interval bound, either inclusive or exclusive. -pub type IntervalBound { - bound_type: IntervalBoundType, - is_inclusive: Bool, -} - -/// A type of interval bound. Where finite, a value of type `a` must be -/// provided. `a` will typically be an `Int`, representing a number of seconds or -/// milliseconds. -pub type IntervalBoundType { - NegativeInfinity - Finite(a) - PositiveInfinity -} - -// ## Constructing - -/// Create an interval that includes all values greater than the given bound. i.e [lower_bound, +INF) -/// -/// ```aiken -/// interval.after(10) == Interval { -/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: True }, -/// upper_bound: IntervalBound { bound_type: PositiveInfinity, is_inclusive: True }, -/// } -/// ``` -pub fn after(lower_bound: a) -> Interval { - Interval { - lower_bound: IntervalBound { - bound_type: Finite(lower_bound), - is_inclusive: True, - }, - upper_bound: IntervalBound { - bound_type: PositiveInfinity, - is_inclusive: True, - }, - } -} - -/// Create an interval that includes all values after (and not including) the given bound. i.e (lower_bound, +INF) -/// -/// ```aiken -/// interval.entirely_after(10) == Interval { -/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, -/// upper_bound: IntervalBound { bound_type: PositiveInfinity, is_inclusive: True }, -/// } -/// ``` -pub fn entirely_after(lower_bound: a) -> Interval { - Interval { - lower_bound: IntervalBound { - bound_type: Finite(lower_bound), - is_inclusive: False, - }, - upper_bound: IntervalBound { - bound_type: PositiveInfinity, - is_inclusive: True, - }, - } -} - -/// Create an interval that includes all values before (and including) the given bound. i.e (-INF, upper_bound] -/// -/// ```aiken -/// interval.before(100) == Interval { -/// lower_bound: IntervalBound { bound_type: NegativeInfinity, is_inclusive: True }, -/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: True }, -/// } -/// ``` -pub fn before(upper_bound: a) -> Interval { - Interval { - lower_bound: IntervalBound { - bound_type: NegativeInfinity, - is_inclusive: True, - }, - upper_bound: IntervalBound { - bound_type: Finite(upper_bound), - is_inclusive: True, - }, - } -} - -/// Create an interval that includes all values before (and not including) the given bound. i.e (-INF, upper_bound) -/// -/// ```aiken -/// interval.entirely_before(10) == Interval { -/// lower_bound: IntervalBound { bound_type: NegativeInfinity, is_inclusive: True }, -/// upper_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, -/// } -/// ``` -pub fn entirely_before(upper_bound: a) -> Interval { - Interval { - lower_bound: IntervalBound { - bound_type: NegativeInfinity, - is_inclusive: True, - }, - upper_bound: IntervalBound { - bound_type: Finite(upper_bound), - is_inclusive: False, - }, - } -} - -/// Create an interval that includes all values between two bounds, including the bounds. i.e. [lower_bound, upper_bound] -/// -/// ```aiken -/// interval.between(10, 100) == Interval { -/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: True }, -/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: True }, -/// } -/// ``` -pub fn between(lower_bound: a, upper_bound: a) -> Interval { - Interval { - lower_bound: IntervalBound { - bound_type: Finite(lower_bound), - is_inclusive: True, - }, - upper_bound: IntervalBound { - bound_type: Finite(upper_bound), - is_inclusive: True, - }, - } -} - -/// Create an interval that includes all values between two bounds, excluding the bounds. i.e. (lower_bound, upper_bound) -/// -/// ```aiken -/// interval.entirely_between(10, 100) == Interval { -/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, -/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: False }, -/// } -/// ``` -pub fn entirely_between(lower_bound: a, upper_bound: a) -> Interval { - Interval { - lower_bound: IntervalBound { - bound_type: Finite(lower_bound), - is_inclusive: False, - }, - upper_bound: IntervalBound { - bound_type: Finite(upper_bound), - is_inclusive: False, - }, - } -} - -/// Create an empty interval that contains no value. -/// -/// ```aiken -/// interval.contains(empty, 0) == False -/// interval.contains(empty, 1000) == False -/// ``` -pub const empty: Interval = - Interval { - lower_bound: IntervalBound { - bound_type: PositiveInfinity, - is_inclusive: True, - }, - upper_bound: IntervalBound { - bound_type: NegativeInfinity, - is_inclusive: True, - }, - } - -/// Create an interval that contains every possible values. i.e. (-INF, +INF) -/// -/// ```aiken -/// interval.contains(everything, 0) == True -/// interval.contains(everything, 1000) == True -/// ``` -pub const everything: Interval = - Interval { - lower_bound: IntervalBound { - bound_type: NegativeInfinity, - is_inclusive: True, - }, - upper_bound: IntervalBound { - bound_type: PositiveInfinity, - is_inclusive: True, - }, - } - -// ## Inspecting - -/// Checks whether an element is contained within the interval. -/// -/// ```aiken -/// let iv = -/// Interval { -/// lower_bound: IntervalBound { -/// bound_type: Finite(14), -/// is_inclusive: True -/// }, -/// upper_bound: IntervalBound { -/// bound_type: Finite(42), -/// is_inclusive: False -/// }, -/// } -/// -/// interval.contains(iv, 25) == True -/// interval.contains(iv, 0) == False -/// interval.contains(iv, 14) == True -/// interval.contains(iv, 42) == False -/// ``` -pub fn contains(self: Interval, elem: Int) -> Bool { - let is_greater_than_lower_bound = - when self.lower_bound.bound_type is { - NegativeInfinity -> True - Finite(lower_bound) -> - if self.lower_bound.is_inclusive { - elem >= lower_bound - } else { - elem > lower_bound - } - PositiveInfinity -> False - } - - let is_smaller_than_upper_bound = - when self.upper_bound.bound_type is { - NegativeInfinity -> False - Finite(upper_bound) -> - if self.upper_bound.is_inclusive { - elem <= upper_bound - } else { - elem < upper_bound - } - PositiveInfinity -> True - } - - is_greater_than_lower_bound && is_smaller_than_upper_bound -} - -test contains_1() { - let iv = everything - contains(iv, 14) -} - -test contains_2() { - let iv = entirely_before(15) - contains(iv, 14) -} - -test contains_3() { - let iv = before(14) - contains(iv, 14) -} - -test contains_4() { - let iv = entirely_before(14) - !contains(iv, 14) -} - -test contains_5() { - let iv = entirely_after(13) - contains(iv, 14) -} - -test contains_6() { - let iv = after(14) - contains(iv, 14) -} - -test contains_7() { - let iv = entirely_after(14) - !contains(iv, 14) -} - -test contains_8() { - let iv = between(42, 1337) - !contains(iv, 14) -} - -test contains_9() { - let iv = between(0, 42) - contains(iv, 14) -} - -test contains_10() { - let iv = between(0, 42) - contains(iv, 42) -} - -test contains_11() { - let iv = entirely_between(0, 42) - !contains(iv, 0) -} - -test contains_12() { - let iv = empty - !contains(iv, 14) -} - -/// Tells whether an interval is empty; i.e. that is contains no value. -/// -/// ```aiken -/// let iv1 = interval.empty -/// -/// let iv2 = Interval { -/// lower_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, -/// upper_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, -/// } -/// -/// let iv3 = Interval { -/// lower_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, -/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: False }, -/// } -/// -/// interval.is_empty(iv1) == True -/// interval.is_empty(iv2) == True -/// interval.is_empty(iv3) == False -/// -/// // Note: Two empty intervals are not necessarily equal. -/// iv1 != iv2 -/// ``` -pub fn is_empty(self: Interval) -> Bool { - let ordering = - compare_bound_type(self.lower_bound.bound_type, self.upper_bound.bound_type) - - when ordering is { - Greater -> True - Equal -> !(self.lower_bound.is_inclusive && self.upper_bound.is_inclusive) - Less -> { - let is_open_interval = - !self.lower_bound.is_inclusive && !self.upper_bound.is_inclusive - if is_open_interval { - when (self.lower_bound.bound_type, self.upper_bound.bound_type) is { - (Finite(lower_bound), Finite(upper_bound)) -> - lower_bound + 1 == upper_bound - _ -> False - } - } else { - False - } - } - } -} - -/// Check whether the interval is entirely after the point "a" -/// -/// ```aiken -/// interval.is_entirely_after(interval.after(10), 5) == True -/// interval.is_entirely_after(interval.after(10), 10) == False -/// interval.is_entirely_after(interval.after(10), 15) == False -/// interval.is_entirely_after(interval.between(10, 20), 30) == False -/// interval.is_entirely_after(interval.between(10, 20), 5) == True -pub fn is_entirely_after(self: Interval, point: Int) -> Bool { - when self.lower_bound.bound_type is { - Finite(low) -> - if self.lower_bound.is_inclusive { - point < low - } else { - point <= low - } - _ -> False - } -} - -test is_entirely_after_1() { - is_entirely_after(after(10), 5) -} - -test is_entirely_after_2() { - !is_entirely_after(after(10), 10) -} - -test is_entirely_after_3() { - !is_entirely_after(after(10), 15) -} - -test is_entirely_after_4() { - !is_entirely_after(between(10, 20), 30) -} - -test is_entirely_after_5() { - is_entirely_after(between(10, 20), 5) -} - -test is_entirely_after_6() { - is_entirely_after(entirely_after(10), 10) -} - -test is_entirely_after_7() { - !is_entirely_after(before(10), 5) -} - -test is_entirely_after_8() { - !is_entirely_after(before(10), 15) -} - -test is_entirely_after_9() { - !is_entirely_after(entirely_before(10), 5) -} - -/// Check whether the interval is entirely before the point "a" -/// -/// ```aiken -/// interval.is_entirely_before(interval.before(10), 15) == True -/// interval.is_entirely_before(interval.before(10), 10) == False -/// interval.is_entirely_before(interval.before(10), 5) == False -/// interval.is_entirely_before(interval.between(10, 20), 30) == True -/// interval.is_entirely_before(interval.between(10, 20), 5) == False -pub fn is_entirely_before(self: Interval, point: Int) -> Bool { - when self.upper_bound.bound_type is { - Finite(hi) -> - if self.upper_bound.is_inclusive { - hi < point - } else { - hi <= point - } - _ -> False - } -} - -test is_entirely_before_1() { - is_entirely_before(before(10), 15) -} - -test is_entirely_before_2() { - !is_entirely_before(before(10), 10) -} - -test is_entirely_before_3() { - !is_entirely_before(before(10), 5) -} - -test is_entirely_before_4() { - is_entirely_before(between(10, 20), 30) -} - -test is_entirely_before_5() { - !is_entirely_before(between(10, 20), 5) -} - -test is_entirely_before_6() { - is_entirely_before(entirely_before(10), 10) -} - -test is_entirely_before_7() { - !is_entirely_before(after(10), 15) -} - -test is_entirely_before_8() { - !is_entirely_before(after(10), 5) -} - -test is_entirely_before_9() { - !is_entirely_before(entirely_after(10), 5) -} - -// ## Combining - -/// Computes the smallest interval containing the two given intervals, if any -/// -/// ```aiken -/// let iv1 = between(0, 10) -/// let iv2 = between(2, 14) -/// hull(iv1, iv2) == between(0, 14) -/// -/// let iv1 = between(5, 10) -/// let iv2 = before(0) -/// hull(iv1, iv2) == before(10) -/// -/// let iv1 = entirely_after(0) -/// let iv2 = between(10, 42) -/// hull(iv1, iv2) = entirely_after(0) -/// ``` -pub fn hull(iv1: Interval, iv2: Interval) -> Interval { - Interval { - lower_bound: min(iv1.lower_bound, iv2.lower_bound), - upper_bound: max(iv1.upper_bound, iv2.upper_bound), - } -} - -test hull_1() { - let iv1 = between(0, 10) - let iv2 = between(2, 14) - hull(iv1, iv2) == between(0, 14) -} - -test hull_2() { - let iv1 = between(5, 10) - let iv2 = before(0) - hull(iv1, iv2) == before(10) -} - -test hull_3() { - let iv1 = entirely_after(0) - let iv2 = between(10, 42) - hull(iv1, iv2) == entirely_after(0) -} - -/// Computes the largest interval contains in the two given intervals, if any. -/// -/// ```aiken -/// let iv1 = interval.between(0, 10) -/// let iv2 = interval.between(2, 14) -/// interval.intersection(iv1, iv2) == interval.between(2, 10) -/// -/// let iv1 = interval.entirely_before(10) -/// let iv2 = interval.entirely_after(0) -/// interval.intersection(iv1, iv2) == interval.entirely_between(0, 10) -/// -/// let iv1 = interval.between(0, 1) -/// let iv2 = interval.between(2, 3) -/// interval.intersection(iv1, iv2) |> interval.is_empty -/// ``` -pub fn intersection(iv1: Interval, iv2: Interval) -> Interval { - Interval { - lower_bound: max(iv1.lower_bound, iv2.lower_bound), - upper_bound: min(iv1.upper_bound, iv2.upper_bound), - } -} - -test intersection_1() { - let iv1 = between(0, 10) - let iv2 = between(2, 14) - intersection(iv1, iv2) == between(2, 10) -} - -test intersection_2() { - let iv1 = between(0, 1) - let iv2 = between(1, 2) - intersection(iv1, iv2) == between(1, 1) -} - -test intersection_3() { - let iv1 = between(0, 1) - let iv2 = entirely_between(1, 2) - intersection(iv1, iv2) - |> is_empty -} - -test intersection_4() { - let iv1 = entirely_between(0, 1) - let iv2 = entirely_between(1, 2) - intersection(iv1, iv2) - |> is_empty -} - -test intersection_5() { - let iv1 = between(0, 10) - let iv2 = before(4) - intersection(iv1, iv2) == between(0, 4) -} - -test intersection_6() { - let iv1 = entirely_before(10) - let iv2 = entirely_after(0) - intersection(iv1, iv2) == entirely_between(0, 10) -} - -/// Return the highest bound of the two. -/// -/// ```aiken -/// let ib1 = IntervalBound { bound_type: Finite(0), is_inclusive: False } -/// let ib2 = IntervalBound { bound_type: Finite(1), is_inclusive: False } -/// -/// interval.max(ib1, ib2) == ib2 -/// ``` -pub fn max( - left: IntervalBound, - right: IntervalBound, -) -> IntervalBound { - when compare_bound(left, right) is { - Less -> right - Equal -> left - Greater -> left - } -} - -/// Return the smallest bound of the two. -/// -/// ```aiken -/// let ib1 = IntervalBound { bound_type: Finite(0), is_inclusive: False } -/// let ib2 = IntervalBound { bound_type: Finite(1), is_inclusive: False } -/// -/// interval.min(ib1, ib2) == ib1 -/// ``` -pub fn min( - left: IntervalBound, - right: IntervalBound, -) -> IntervalBound { - when compare_bound(left, right) is { - Less -> left - Equal -> left - Greater -> right - } -} - -fn compare_bound( - left: IntervalBound, - right: IntervalBound, -) -> Ordering { - when compare_bound_type(left.bound_type, right.bound_type) is { - Less -> Less - Greater -> Greater - Equal -> - if left.is_inclusive == right.is_inclusive { - Equal - } else if left.is_inclusive { - Greater - } else { - Less - } - } -} - -fn compare_bound_type( - left: IntervalBoundType, - right: IntervalBoundType, -) -> Ordering { - when left is { - NegativeInfinity -> - when right is { - NegativeInfinity -> Equal - _ -> Less - } - PositiveInfinity -> - when right is { - PositiveInfinity -> Equal - _ -> Greater - } - Finite(left) -> - when right is { - NegativeInfinity -> Greater - PositiveInfinity -> Less - Finite(right) -> - if left < right { - Less - } else if left == right { - Equal - } else { - Greater - } - } - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math.ak deleted file mode 100644 index dd575e7a..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math.ak +++ /dev/null @@ -1,424 +0,0 @@ -//// This module contains some basic Math utilities. Standard arithmetic -//// operations on integers are available through native operators: -//// -//// Operator | Description -//// --- | :--- -//// `+` | Arithmetic sum -//// `-` | Arithmetic difference -//// `/` | Whole division -//// `*` | Arithmetic multiplication -//// `%` | Remainder by whole division -//// -//// Here are a few examples: -//// -//// ```aiken -//// 1 + 1 // 2 -//// 10 - 2 // 8 -//// 40 / 14 // 2 -//// 3 * 4 // 12 -//// 10 % 3 // 1 - -use aiken/builtin - -/// Calculate the absolute value of an integer. -/// -/// ```aiken -/// math.abs(-42) == 42 -/// math.abs(14) == 14 -/// ``` -pub fn abs(self: Int) -> Int { - if self < 0 { - 0 - self - } else { - self - } -} - -test abs_1() { - abs(14) == 14 -} - -test abs_2() { - abs(-42) == 42 -} - -/// Restrict the value of an integer between two min and max bounds -/// -/// ```aiken -/// math.clamp(14, min: 0, max: 10) == 10 -/// ``` -pub fn clamp(self: Int, min: Int, max: Int) -> Int { - if self < min { - min - } else { - if self > max { - max - } else { - self - } - } -} - -test clamp_1() { - clamp(14, min: 0, max: 10) == 10 -} - -test clamp_2() { - clamp(7, min: 0, max: 10) == 7 -} - -test clamp_3() { - clamp(7, min: 10, max: 100) == 10 -} - -/// The greatest common divisor of two integers. -/// -/// ```aiken -/// math.gcd(42, 14) == 14 -/// math.gcd(14, 42) == 14 -/// math.gcd(0, 0) == 0 -/// ``` -pub fn gcd(x: Int, y: Int) -> Int { - abs(do_gcd(x, y)) -} - -fn do_gcd(x: Int, y: Int) -> Int { - when y is { - 0 -> x - _ -> do_gcd(y, x % y) - } -} - -test gcd_test1() { - gcd(10, 300) == 10 -} - -test gcd_test2() { - gcd(-10, 300) == 10 -} - -test gcd_test3() { - gcd(42, 14) == 14 -} - -/// Checks if an integer has a given integer square root x. -/// The check has constant time complexity $O(1)$. -/// -/// ```aiken -/// math.is_sqrt(0, 0) -/// math.is_sqrt(25, 5) -/// !math.is_sqrt(25, -5) -/// math.is_sqrt(44203, 210) -/// ``` -pub fn is_sqrt(self: Int, x: Int) -> Bool { - x * x <= self && ( x + 1 ) * ( x + 1 ) > self -} - -test is_sqrt1() { - is_sqrt(44203, 210) -} - -test is_sqrt2() { - is_sqrt(975461057789971041, 987654321) -} - -/// The logarithm in base `b` of an element using integer divisions. -/// -/// ```aiken -/// math.log(10, base: 2) == 3 -/// math.log(42, base: 2) == 5 -/// math.log(42, base: 3) == 3 -/// math.log(5, base: 0) == 0 -/// math.log(4, base: 4) == 1 -/// math.log(4, base: 42) == 0 -/// ``` -pub fn log(self: Int, base: Int) -> Int { - if base <= 0 { - 0 - } else if self == base { - 1 - } else if self < base { - 0 - } else { - 1 + log(self / base, base) - } -} - -test log_10_2() { - log(10, base: 2) == 3 -} - -test log_42_2() { - log(42, base: 2) == 5 -} - -test log_42_3() { - log(42, base: 3) == 3 -} - -test log_5_0() { - log(5, base: 0) == 0 -} - -test log_4_4() { - log(4, base: 4) == 1 -} - -test log_4_43() { - log(4, base: 43) == 0 -} - -/// The integer logarithm in base 2. Faster than [`log`](#log) in this particular case. -/// -/// ```aiken -/// math.log2(1) == 0 -/// math.log2(2) == 1 -/// math.log2(3) == 1 -/// math.log2(4) == 2 -/// math.log2(256) == 8 -/// math.log2(257) == 8 -/// math.log2(511) == 8 -/// math.log2(1025) == 10 -/// ``` -pub fn log2(x: Int) -> Int { - expect x > 0 - let s = builtin.integer_to_bytearray(True, 0, x) - let len = builtin.length_of_bytearray(s) - let b = builtin.index_bytearray(s, 0) - len * 8 - if b < 2 { - 8 - } else if b < 4 { - 7 - } else if b < 8 { - 6 - } else if b < 16 { - 5 - } else if b < 32 { - 4 - } else if b < 64 { - 3 - } else if b < 128 { - 2 - } else { - 1 - } -} - -test log2_matrix() { - and { - log2(1) == 0, - log2(2) == 1, - log2(3) == 1, - log2(4) == 2, - log2(256) == 8, - log2(257) == 8, - log2(511) == 8, - log2(1025) == 10, - } -} - -/// Return the maximum of two integers. -pub fn max(a: Int, b: Int) -> Int { - if a > b { - a - } else { - b - } -} - -test max_1() { - max(0, 0) == 0 -} - -test max_2() { - max(14, 42) == 42 -} - -test max_3() { - max(42, 14) == 42 -} - -/// Return the minimum of two integers. -pub fn min(a: Int, b: Int) -> Int { - if a > b { - b - } else { - a - } -} - -test min_1() { - min(0, 0) == 0 -} - -test min_2() { - min(14, 42) == 14 -} - -test min_3() { - min(42, 14) == 14 -} - -/// Calculates a number to the power of `e` using the exponentiation by -/// squaring method. -/// -/// ```aiken -/// math.pow(3, 5) == 243 -/// math.pow(7, 2) == 49 -/// math.pow(3, -4) == 0 -/// math.pow(0, 0) == 1 -/// math.pow(513, 3) == 135005697 -/// ``` -pub fn pow(self: Int, e: Int) -> Int { - if e < 0 { - 0 - } else if e == 0 { - 1 - } else if e % 2 == 0 { - pow(self * self, e / 2) - } else { - self * pow(self * self, ( e - 1 ) / 2) - } -} - -test pow_3_5() { - pow(3, 5) == 243 -} - -test pow_7_2() { - pow(7, 2) == 49 -} - -test pow_3__4() { - // negative powers round to zero - pow(3, -4) == 0 -} - -test pow_0_0() { - // sorry math - pow(0, 0) == 1 -} - -test pow_513_3() { - pow(513, 3) == 135005697 -} - -test pow_2_4() { - pow(2, 4) == 16 -} - -test pow_2_42() { - pow(2, 42) == 4398046511104 -} - -/// Calculates the power of 2 for a given exponent `e`. Much cheaper than -/// using `pow(2, _)` for small exponents $0 < e < 256$. -/// -/// ```aiken -/// math.pow2(-2) == 0 -/// math.pow2(0) == 1 -/// math.pow2(1) == 2 -/// math.pow2(4) == 16 -/// math.pow2(42) == 4398046511104 -/// ``` -pub fn pow2(e: Int) -> Int { - // do_pow2(e, 1) - if e < 8 { - if e < 0 { - 0 - } else { - builtin.index_bytearray(#[1, 2, 4, 8, 16, 32, 64, 128], e) - } - } else if e < 32 { - 256 * pow2(e - 8) - } else { - 4294967296 * pow2(e - 32) - } -} - -test pow2_neg() { - pow2(-2) == 0 -} - -test pow2_0() { - pow2(0) == 1 -} - -test pow2_1() { - pow2(1) == 2 -} - -test pow2_4() { - pow2(4) == 16 -} - -test pow2_42() { - pow2(42) == 4398046511104 -} - -test pow2_256() { - pow2(256) == 115792089237316195423570985008687907853269984665640564039457584007913129639936 -} - -/// Calculates the square root of an integer using the [Babylonian -/// method](https://en.wikipedia.org/wiki/Methods_of_computing_square_roots#Babylonian_method). This returns either the exact result or the smallest integer -/// nearest to the square root. -/// -/// Returns `None` for negative values. -/// -/// ```aiken -/// math.sqrt(0) == Some(0) -/// math.sqrt(25) == Some(5) -/// math.sqrt(44203) == Some(210) -/// math.sqrt(-42) == None -/// ``` -/// -/// > [!TIP] -/// > This function can be quite expensive to perform on-chain. Prefer using [`is_sqrt`](#is_sqrt) whenever possible. -pub fn sqrt(self: Int) -> Option { - if self < 0 { - None - } else if self <= 1 { - Some(self) - } else { - Some(sqrt_babylonian(self, self, ( self + 1 ) / 2)) - } -} - -// The basic idea is that if x is an overestimate to the square root of a -// non-negative real number S then S/x will be an underestimate, or vice versa, -// and so the average of these two numbers may reasonably be expected to provide a -// better approximation (though the formal proof of that assertion depends on the -// inequality of arithmetic and geometric means that shows this average is always -// an overestimate of the square root. -fn sqrt_babylonian(self: Int, x: Int, y: Int) -> Int { - if y >= x { - x - } else { - sqrt_babylonian(self, y, ( y + self / y ) / 2) - } -} - -test sqrt1() { - sqrt(0) == Some(0) -} - -test sqrt2() { - sqrt(1) == Some(1) -} - -test sqrt3() { - sqrt(25) == Some(5) -} - -test sqrt4() { - sqrt(44203) == Some(210) -} - -test sqrt5() { - sqrt(975461057789971041) == Some(987654321) -} - -test sqrt6() { - sqrt(-42) == None -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak deleted file mode 100644 index 88fe7ab7..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak +++ /dev/null @@ -1,871 +0,0 @@ -//// This module implements operations between rational numbers. -//// -//// > [!CAUTION] -//// > Internally, rational aren't automatically reduced as this is **only done on-demand**. -//// > -//// > Thus, for example: -//// > -//// > ```aiken -//// > rational.new(2, 3) != rational.new(4, 6) -//// > ``` -//// > -//// > Comparing rational values should, therefore, only happen after reduction (see [reduce](#reduce)) or via the [compare](#compare) method. - -use aiken/builtin -use aiken/collection/list -use aiken/math -use aiken/option - -/// Opaque type used to ensure the sign of the Rational is managed strictly in the numerator. -pub opaque type Rational { - numerator: Int, - denominator: Int, -} - -// ## Constructing - -/// Create a new `Rational` from an `Int`. -/// -/// ```aiken -/// Some(rational.from_int(14)) == rational.new(14, 1) -/// Some(rational.from_int(-5)) == rational.new(-5, 1) -/// Some(rational.from_int(0)) == rational.new(0, 1) -/// ``` -pub fn from_int(numerator: Int) -> Rational { - Rational { numerator, denominator: 1 } -} - -test from_int_1() { - and { - (from_int(14) == ratio(14, 1))?, - (from_int(-5) == ratio(-5, 1))?, - (from_int(0) == ratio(0, 1))?, - } -} - -/// An unsafe constructor for `Rational` values. Assumes that the following invariants are -/// enforced: -/// -/// - the denominator is positive (the sign is managed in the numerator); -/// - the denominator is not null. -/// -/// This function is mainly used as a quick way to construct rationals from literal values. -fn ratio(numerator: Int, denominator: Int) -> Rational { - Rational { numerator, denominator } -} - -/// Make a `Rational` number from the ratio of two integers. -/// -/// Returns `None` when the denominator is null. -/// -/// ```aiken -/// rational.new(14, 42) == Some(r) -/// rational.new(14, 0) == None -/// ``` -pub fn new(numerator: Int, denominator: Int) -> Option { - if denominator == 0 { - None - } else if denominator < 0 { - Some(Rational { numerator: -numerator, denominator: -denominator }) - } else { - Some(Rational { numerator, denominator }) - } -} - -test new_1() { - and { - (new(2, 0) == None)?, - (new(2, 3) == Some(ratio(2, 3)))?, - (new(-2, 3) == Some(ratio(-2, 3)))?, - (new(2, -3) == Some(ratio(-2, 3)))?, - (new(2, 4) == Some(ratio(2, 4)))?, - (new(-2, -3) == Some(ratio(2, 3)))?, - (new(-2, -4) == Some(ratio(2, 4)))?, - } -} - -/// A null `Rational`. -pub const zero: Rational = Rational { numerator: 0, denominator: 1 } - -test zero_1() { - zero == ratio(0, 1) -} - -// ## Inspecting - -/// Get the denominator of a rational value. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// rational.denominator(x) == 3 -/// ``` -pub fn denominator(self: Rational) -> Int { - self.denominator -} - -test denominator_1() { - expect Some(x) = new(2, 3) - expect Some(y) = new(-2, 3) - expect Some(z) = new(2, -3) - expect Some(w) = new(-2, -3) - and { - (denominator(x) == 3)?, - (denominator(y) == 3)?, - (denominator(z) == 3)?, - (denominator(w) == 3)?, - } -} - -/// Get the numerator of a rational value. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// rational.numerator(x) == 2 -/// ``` -pub fn numerator(self: Rational) -> Int { - self.numerator -} - -test numerator_1() { - expect Some(x) = new(2, 3) - expect Some(y) = new(-2, 3) - expect Some(z) = new(2, -3) - expect Some(w) = new(-2, -3) - - and { - (numerator(x) == 2)?, - (numerator(y) == -2)?, - (numerator(z) == -2)?, - (numerator(w) == 2)?, - } -} - -// ## Modifying - -/// Absolute value of a `Rational`. -/// -/// ```aiken -/// expect Some(x) = rational.new(3, 2) -/// expect Some(y) = rational.new(-3, 2) -/// -/// rational.abs(x) == x -/// rational.abs(y) == x -/// ``` -pub fn abs(self: Rational) -> Rational { - let Rational { numerator: a_n, denominator: a_d } = self - Rational { numerator: math.abs(a_n), denominator: a_d } -} - -test abs_examples() { - and { - (abs(ratio(5, 2)) == ratio(5, 2))?, - (abs(ratio(-5, 2)) == ratio(5, 2))?, - (abs(ratio(5, 2)) == abs(ratio(-5, 2)))?, - } -} - -/// Change the sign of a `Rational`. -/// -/// ```aiken -/// expect Some(x) = rational.new(3, 2) -/// expect Some(y) = rational.new(-3, 2) -/// -/// rational.negate(x) == y -/// rational.negate(y) == x -/// ``` -pub fn negate(a: Rational) -> Rational { - let Rational { numerator: a_n, denominator: a_d } = a - Rational { numerator: -a_n, denominator: a_d } -} - -test negate_1() { - and { - (negate(ratio(5, 2)) == ratio(-5, 2))?, - (negate(ratio(-5, 2)) == ratio(5, 2))?, - (negate(negate(ratio(5, 2))) == ratio(5, 2))?, - } -} - -/// Reciprocal of a `Rational` number. That is, a new `Rational` where the -/// numerator and denominator have been swapped. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 5) -/// rational.reciprocal(x) == rational.new(5, 2) -/// -/// let y = rational.zero -/// rational.reciprocal(y) == None -/// ``` -pub fn reciprocal(self: Rational) -> Option { - let Rational { numerator: a_n, denominator: a_d } = self - if a_n < 0 { - Some(Rational { numerator: -a_d, denominator: -a_n }) - } else if a_n > 0 { - Some(Rational { numerator: a_d, denominator: a_n }) - } else { - None - } -} - -test reciprocal_1() { - and { - (reciprocal(ratio(5, 2)) == new(2, 5))?, - (reciprocal(ratio(-5, 2)) == new(-2, 5))?, - (reciprocal(ratio(0, 2)) == None)?, - (reciprocal(ratio(2, 3)) == new(3, 2))?, - (reciprocal(ratio(-2, 3)) == new(-3, 2))?, - } -} - -/// Reduce a rational to its irreducible form. This operation makes the -/// numerator and denominator coprime. -/// -/// ```aiken -/// expect Some(x) = rational.new(80, 200) -/// Some(rational.reduce(x)) == rational.new(2, 5) -/// ``` -pub fn reduce(self: Rational) -> Rational { - let Rational { numerator: a_n, denominator: a_d } = self - let d = math.gcd(a_n, a_d) - Rational { numerator: a_n / d, denominator: a_d / d } -} - -test reduce_1() { - and { - (reduce(ratio(80, 200)) == ratio(2, 5))?, - (reduce(ratio(-5, 1)) == ratio(-5, 1))?, - (reduce(ratio(0, 3)) == ratio(0, 1))?, - } -} - -// ## Combining - -// ### Arithmetic operations - -/// Addition: sum of two rational values -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// expect Some(y) = rational.new(3, 4) -/// -/// Some(rational.add(x, y)) == rational.new(17, 12) -/// ``` -pub fn add(left: Rational, right: Rational) -> Rational { - let Rational { numerator: a_n, denominator: a_d } = left - let Rational { numerator: b_n, denominator: b_d } = right - Rational { numerator: a_n * b_d + b_n * a_d, denominator: a_d * b_d } -} - -test add_1() { - add(ratio(2, 3), ratio(3, 4)) == ratio(17, 12) -} - -test add_2() { - add(ratio(-2, 3), ratio(3, 4)) == ratio(1, 12) -} - -/// Division: quotient of two rational values. Returns `None` when the second -/// value is null. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// expect Some(y) = rational.new(3, 4) -/// -/// rational.div(x, y) == rational.new(8, 9) -/// ``` -pub fn div(left: Rational, right: Rational) -> Option { - reciprocal(right) |> option.map(mul(left, _)) -} - -test div_1() { - div(ratio(2, 3), ratio(3, 4)) == new(8, 9) -} - -test div_2() { - div(ratio(2, 3), ratio(-3, 4)) == new(-8, 9) -} - -/// Multiplication: the product of two rational values. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// expect Some(y) = rational.new(3, 4) -/// -/// Some(rational.mul(x, y)) == rational.new(6, 12) -/// ``` -pub fn mul(left: Rational, right: Rational) -> Rational { - let Rational { numerator: a_n, denominator: a_d } = left - let Rational { numerator: b_n, denominator: b_d } = right - Rational { numerator: a_n * b_n, denominator: a_d * b_d } -} - -test mul_1() { - mul(ratio(2, 3), ratio(3, 4)) == ratio(6, 12) -} - -test mul_2() { - mul(ratio(-2, 3), ratio(-3, 4)) == ratio(6, 12) -} - -test mul_3() { - let result = - ratio(2, 5) - |> mul(ratio(1, 8)) - |> mul(ratio(3, 10)) - |> mul(ratio(21, 100)) - |> mul(ratio(3, 5)) - |> mul(ratio(2, 8)) - |> mul(ratio(4, 10)) - |> mul(ratio(22, 100)) - |> reduce - - result == ratio(2079, 50000000) -} - -/// Subtraction: difference of two rational values -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// expect Some(y) = rational.new(3, 4) -/// -/// Some(rational.sub(x, y)) == rational.new(-1, 12) -/// ``` -pub fn sub(left: Rational, right: Rational) -> Rational { - let Rational { numerator: a_n, denominator: a_d } = left - let Rational { numerator: b_n, denominator: b_d } = right - Rational { numerator: a_n * b_d - b_n * a_d, denominator: a_d * b_d } -} - -test sub_1() { - sub(ratio(2, 3), ratio(3, 4)) == ratio(-1, 12) -} - -test sub_2() { - sub(ratio(2, 3), ratio(-3, 4)) == ratio(17, 12) -} - -test sub_3() { - sub(ratio(-2, 3), ratio(3, 4)) == ratio(-17, 12) -} - -// ### Ordering - -/// Compare two rationals for an ordering. This is safe to use even for -/// non-reduced rationals. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// expect Some(y) = rational.new(3, 4) -/// expect Some(z) = rational.new(4, 6) -/// -/// compare(x, y) == Less -/// compare(y, x) == Greater -/// compare(x, x) == Equal -/// compare(x, z) == Equal -/// ``` -pub fn compare(left: Rational, right: Rational) -> Ordering { - let Rational { numerator: a_n, denominator: a_d } = left - let Rational { numerator: b_n, denominator: b_d } = right - - let l = a_n * b_d - let r = b_n * a_d - - if l < r { - Less - } else if l > r { - Greater - } else { - Equal - } -} - -test compare_1() { - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - expect Some(z) = new(4, 6) - and { - compare(x, y) == Less, - compare(y, x) == Greater, - compare(x, x) == Equal, - compare(x, z) == Equal, - } -} - -/// Comparison of two rational values using a chosen heuristic. For example: -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// expect Some(y) = rational.new(3, 4) -/// -/// rational.compare_with(x, >, y) == False -/// rational.compare_with(y, >, x) == True -/// rational.compare_with(x, >, x) == False -/// rational.compare_with(x, >=, x) == True -/// rational.compare_with(x, ==, x) == True -/// rational.compare_with(x, ==, y) == False -/// ``` -pub fn compare_with( - left: Rational, - with: fn(Int, Int) -> Bool, - right: Rational, -) -> Bool { - let Rational { numerator: a_n, denominator: a_d } = left - let Rational { numerator: b_n, denominator: b_d } = right - with(a_n * b_d, b_n * a_d) -} - -// TODO: Rewrite tests using binary-operator as first-class functions once aiken-lang/aiken#619 is merged. - -test compare_with_eq() { - let eq = - compare_with(_, fn(l, r) { l == r }, _) - - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - - !eq(x, y)? && !eq(y, x)? && eq(x, x)? -} - -test compare_with_neq() { - let neq = - compare_with(_, fn(l, r) { l != r }, _) - - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - - neq(x, y)? && neq(y, x)? && !neq(x, x)? -} - -test compare_with_gte() { - let gte = - compare_with(_, fn(l, r) { l >= r }, _) - - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - - !gte(x, y)? && gte(y, x)? && gte(x, x)? -} - -test compare_with_gt() { - let gt = - compare_with(_, fn(l, r) { l > r }, _) - - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - - !gt(x, y)? && gt(y, x)? && !gt(x, x)? -} - -test compare_with_lte() { - let lte = - compare_with(_, fn(l, r) { l <= r }, _) - - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - - lte(x, y)? && !lte(y, x)? && lte(x, x)? -} - -test compare_with_lt() { - let lt = - compare_with(_, fn(l, r) { l < r }, _) - - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - - lt(x, y)? && !lt(y, x)? && !lt(x, x)? -} - -// ### Means - -/// Calculate the arithmetic mean between two `Rational` values. -/// -/// ```aiken -/// let x = rational.from_int(0) -/// let y = rational.from_int(1) -/// let z = rational.from_int(2) -/// -/// expect Some(result) = rational.arithmetic_mean([x, y, z]) -/// -/// rational.compare(result, y) == Equal -/// ``` -pub fn arithmetic_mean(self: List) -> Option { - div(list.foldr(self, zero, add), from_int(list.length(self))) -} - -test arithmetic_mean_1() { - let x = ratio(1, 2) - let y = ratio(1, 2) - expect Some(z) = arithmetic_mean([x, y]) - reduce(z) == ratio(1, 2) -} - -test arithmetic_mean_2() { - let x = ratio(1, 1) - let y = ratio(2, 1) - expect Some(z) = arithmetic_mean([x, y]) - reduce(z) == ratio(3, 2) -} - -test arithmetic_mean_3() { - let xs = - [ - ratio(1, 1), - ratio(2, 1), - ratio(3, 1), - ratio(4, 1), - ratio(5, 1), - ratio(6, 1), - ] - expect Some(z) = arithmetic_mean(xs) - reduce(z) == ratio(7, 2) -} - -/// Calculate the geometric mean between two `Rational` values. This returns -/// either the exact result or the smallest integer nearest to the square root -/// for the numerator and denominator. -/// -/// ```aiken -/// expect Some(x) = rational.new(1, 3) -/// expect Some(y) = rational.new(1, 6) -/// -/// rational.geometric_mean(x, y) == rational.new(1, 4) -/// ``` -pub fn geometric_mean(left: Rational, right: Rational) -> Option { - let Rational { numerator: a_n, denominator: a_d } = left - let Rational { numerator: b_n, denominator: b_d } = right - when math.sqrt(a_n * b_n) is { - Some(numerator) -> - when math.sqrt(a_d * b_d) is { - Some(denominator) -> Some(Rational { numerator, denominator }) - None -> None - } - None -> None - } -} - -test geometric_mean1() { - expect Some(x) = new(1, 2) - expect Some(y) = new(1, 2) - geometric_mean(x, y) == new(1, 2) -} - -test geometric_mean2() { - expect Some(x) = new(-1, 2) - expect Some(y) = new(1, 2) - geometric_mean(x, y) == None -} - -test geometric_mean3() { - expect Some(x) = new(1, 2) - expect Some(y) = new(-1, 2) - geometric_mean(x, y) == None -} - -test geometric_mean4() { - expect Some(x) = new(1, 3) - expect Some(y) = new(1, 6) - geometric_mean(x, y) == new(1, 4) -} - -test geometric_mean5() { - expect Some(x) = new(67, 2500) - expect Some(y) = new(35331, 1000) - expect Some(yi) = reciprocal(y) - geometric_mean(x, yi) == new(258, 9398) -} - -// ## Transforming - -/// Returns the smallest `Int` not less than a given `Rational` -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// rational.ceil(x) == 1 -/// -/// expect Some(y) = rational.new(44, 14) -/// rational.ceil(y) == 4 -/// -/// expect Some(z) = rational.new(-14, 3) -/// rational.ceil(z) == -4 -/// ``` -pub fn ceil(self: Rational) -> Int { - let Rational { numerator, denominator } = self - if builtin.remainder_integer(numerator, denominator) > 0 { - builtin.quotient_integer(numerator, denominator) + 1 - } else { - builtin.quotient_integer(numerator, denominator) - } -} - -test ceil_1() { - and { - (ceil(ratio(13, 5)) == 3)?, - (ceil(ratio(15, 5)) == 3)?, - (ceil(ratio(16, 5)) == 4)?, - (ceil(ratio(-3, 5)) == 0)?, - (ceil(ratio(-5, 5)) == -1)?, - (ceil(ratio(-14, 3)) == -4)?, - (ceil(ratio(-14, 6)) == -2)?, - (ceil(ratio(44, 14)) == 4)?, - } -} - -/// Returns the greatest `Int` no greater than a given `Rational` -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// rational.floor(x) == 0 -/// -/// expect Some(y) = rational.new(44, 14) -/// rational.floor(y) == 3 -/// -/// expect Some(z) = rational.new(-14, 3) -/// rational.floor(z) == -5 -/// ``` -pub fn floor(self: Rational) -> Int { - let Rational { numerator: a_n, denominator: a_d } = self - a_n / a_d -} - -test floor_1() { - and { - (floor(ratio(5, 2)) == 2)?, - (floor(ratio(5, 3)) == 1)?, - (floor(ratio(5, 4)) == 1)?, - (floor(ratio(5, 5)) == 1)?, - (floor(ratio(5, 6)) == 0)?, - (floor(ratio(8, 3)) == 2)?, - (floor(ratio(-14, 3)) == -5)?, - } -} - -/// Computes the rational number x raised to the power y. Returns `None` for -/// invalid exponentiation. -/// -/// ```aiken -/// expect Some(x) = rational.new(50, 2500) -/// rational.reduce(rational.pow(x, 3)) == rational.new(1, 125000) -/// -/// expect Some(x) = rational.new(50, 2500) -/// rational.reduce(rational.pow(x, -3)) == rational.new(125000, 1) -/// ``` -pub fn pow(x: Rational, y: Int) -> Option { - let Rational { numerator: a, denominator: b } = x - - if a == 0 && y <= 0 { - None - } else if y > 0 { - Some(Rational { numerator: math.pow(a, y), denominator: math.pow(b, y) }) - } else if y < 0 { - Some(Rational { numerator: math.pow(b, -y), denominator: math.pow(a, -y) }) - } else { - Some(Rational { numerator: 1, denominator: 1 }) - } -} - -test pow_negative_exponent_non_zero_fraction() { - expect Some(base) = new(50, 2500) - expect Some(calculated_result) = pow(base, -3) - expect Some(expected_result) = new(125000, 1) - reduce(calculated_result) == expected_result -} - -test pow_positive_exponent() { - expect Some(base) = new(50, 2500) - expect Some(calculated_result) = pow(base, 3) - expect Some(expected_result) = new(1, 125000) - reduce(calculated_result) == expected_result -} - -test pow_exponent_zero() { - expect Some(base) = new(50, 2500) - pow(base, 0) == new(1, 1) -} - -test pow_rational_zero_exponent_zero() { - expect Some(base) = new(0, 1) - pow(base, 0) == None -} - -/// Returns the proper fraction of a given `Rational` `r`. That is, a 2-tuple of -/// an `Int` and `Rational` (n, f) such that: -/// -/// - `r = n + f`; -/// - `n` and `f` have the same sign as `r`; -/// - `f` has an absolute value less than 1. -pub fn proper_fraction(self: Rational) -> (Int, Rational) { - let Rational { numerator, denominator } = self - ( - builtin.quotient_integer(numerator, denominator), - Rational { - numerator: builtin.remainder_integer(numerator, denominator), - denominator, - }, - ) -} - -test proper_fraction_1() { - let r = ratio(10, 7) - let (n, f) = proper_fraction(r) - and { - (n == 1)?, - (f == ratio(3, 7))?, - (r == add(from_int(n), f))?, - } -} - -test proper_fraction_2() { - let r = ratio(-10, 7) - let (n, f) = proper_fraction(r) - and { - (n == -1)?, - (f == ratio(-3, 7))?, - (r == add(from_int(n), f))?, - } -} - -test proper_fraction_3() { - let r = ratio(4, 2) - let (n, f) = proper_fraction(r) - and { - (n == 2)?, - (f == ratio(0, 2))?, - (r == add(from_int(n), f))?, - } -} - -/// Round the argument to the nearest whole number. If the argument is -/// equidistant between two values, the greater value is returned (it -/// rounds half towards positive infinity). -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// rational.round(x) == 1 -/// -/// expect Some(y) = rational.new(3, 2) -/// rational.round(y) == 2 -/// -/// expect Some(z) = rational.new(-3, 2) -/// rational.round(z) == -1 -/// ``` -/// -/// > [!CAUTION] -/// > This behaves differently than _Haskell_. If you're coming from `PlutusTx`, beware that in Haskell, rounding on equidistant values depends on the whole number being odd or even. -/// > If you need this behaviour, use [`round_even`](#round_even). -pub fn round(self: Rational) -> Int { - let (n, f) = proper_fraction(self) - - let is_negative = f.numerator < 0 - - when compare(abs(f), ratio(1, 2)) is { - Less -> n - Equal -> - if is_negative { - n - } else { - n + 1 - } - Greater -> - if is_negative { - n - 1 - } else { - n + 1 - } - } -} - -test round_1() { - and { - (round(ratio(10, 7)) == 1)?, - (round(ratio(11, 7)) == 2)?, - (round(ratio(3, 2)) == 2)?, - (round(ratio(5, 2)) == 3)?, - (round(ratio(-3, 2)) == -1)?, - (round(ratio(-2, 3)) == -1)?, - (round(ratio(-10, 7)) == -1)?, - (round(ratio(4, 2)) == 2)?, - } -} - -/// Round the argument to the nearest whole number. If the argument is -/// equidistant between two values, it returns the value that is even (it -/// rounds half to even, also known as 'banker's rounding'). -/// -/// ```aiken -/// expect Some(w) = rational.new(2, 3) -/// rational.round_even(w) == 1 -/// -/// expect Some(x) = rational.new(3, 2) -/// rational.round_even(x) == 2 -/// -/// expect Some(y) = rational.new(5, 2) -/// rational.round_even(y) == 2 -/// -/// expect Some(y) = rational.new(-3, 2) -/// rational.round_even(y) == -2 -/// ``` -pub fn round_even(self: Rational) -> Int { - let (n, f) = proper_fraction(self) - - let m = - when compare(f, ratio(0, 1)) is { - Less -> -1 - _ -> 1 - } - - let is_even = n % 2 == 0 - - when compare(abs(f), ratio(1, 2)) is { - Less -> n - Equal -> - if is_even { - n - } else { - n + m - } - Greater -> n + m - } -} - -test round_even_1() { - and { - (round_even(ratio(10, 7)) == 1)?, - (round_even(ratio(11, 7)) == 2)?, - (round_even(ratio(3, 2)) == 2)?, - (round_even(ratio(5, 2)) == 2)?, - (round_even(ratio(-3, 2)) == -2)?, - (round_even(ratio(-2, 3)) == -1)?, - (round_even(ratio(-10, 7)) == -1)?, - (round_even(ratio(4, 2)) == 2)?, - } -} - -/// Returns the nearest `Int` between zero and a given `Rational`. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// rational.truncate(x) == 0 -/// -/// expect Some(y) = rational.new(44, 14) -/// rational.truncate(y) == 3 -/// -/// expect Some(z) = rational.new(-14, 3) -/// rational.truncate(z) == -4 -/// ``` -pub fn truncate(self: Rational) -> Int { - let Rational { numerator: a_n, denominator: a_d } = self - builtin.quotient_integer(a_n, a_d) -} - -test truncate_1() { - and { - (truncate(ratio(5, 2)) == 2)?, - (truncate(ratio(5, 3)) == 1)?, - (truncate(ratio(5, 4)) == 1)?, - (truncate(ratio(5, 5)) == 1)?, - (truncate(ratio(5, 6)) == 0)?, - (truncate(ratio(8, 3)) == 2)?, - (truncate(ratio(-14, 3)) == -4)?, - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak deleted file mode 100644 index ab8cbc17..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak +++ /dev/null @@ -1,65 +0,0 @@ -use aiken/fuzz.{both, either, map} -use aiken/math/rational.{Rational, new, pow} - -const any_positive_rational: Fuzzer = - either( - map( - both(fuzz.int_at_least(1), fuzz.int_at_least(1)), - fn((num, den)) { - expect Some(new_fraction) = new(num, den) - new_fraction - }, - ), - map( - both(fuzz.int_at_most(-1), fuzz.int_at_most(-1)), - fn((num, den)) { - expect Some(new_fraction) = new(num, den) - new_fraction - }, - ), - ) - -const any_negative_rational: Fuzzer = - either( - map( - both(fuzz.int_at_most(-1), fuzz.int_at_least(1)), - fn((num, den)) { - expect Some(new_fraction) = new(num, den) - new_fraction - }, - ), - map( - both(fuzz.int_at_least(1), fuzz.int_at_most(-1)), - fn((num, den)) { - expect Some(new_fraction) = new(num, den) - new_fraction - }, - ), - ) - -const any_non_zero_rational: Fuzzer = - either(any_negative_rational, any_positive_rational) - -test prop_power_of_zero_returns_one(rational via any_non_zero_rational) { - expect Some(calculated_result) = pow(rational, 0) - expect Some(expected_result) = new(1, 1) - calculated_result == expected_result -} - -test prop_power_of_one_returns_same_fraction(rational via any_non_zero_rational) { - expect Some(calculated_result) = pow(rational, 1) - calculated_result == rational -} - -test prop_power_numerator_zero_exponent_negative_returns_none( - (denominator, exponent) via both(fuzz.int_at_least(1), fuzz.int_at_most(-1)), -) { - expect Some(fraction) = new(0, denominator) - expect None = pow(fraction, exponent) -} - -test prop_power_unit_fraction_is_immutable(exponent via fuzz.int()) { - expect Some(unit) = new(1, 1) - expect Some(calculated_result) = pow(unit, exponent) - calculated_result == unit -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/option.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/option.ak deleted file mode 100644 index cf5ef7dc..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/option.ak +++ /dev/null @@ -1,312 +0,0 @@ -//// A type to capture optional results; useful for handling errors. -//// -//// Note that the `Option` type and its constructors are readily available in Aiken. They are part of the [Prelude](https://aiken-lang.github.io/prelude/aiken.html#Option) module imported by default in every module. - -// ## Inspecting - -/// Asserts whether an option is `None`. -pub fn is_none(self: Option) -> Bool { - when self is { - Some(_) -> False - _ -> True - } -} - -test is_none_1() { - is_none(Some(0)) == False -} - -test is_none_2() { - is_none(None) == True -} - -/// Asserts whether an option is `Some`, irrespective of the value it contains. -pub fn is_some(self: Option) -> Bool { - when self is { - Some(_) -> True - _ -> False - } -} - -test is_some_1() { - is_some(Some(0)) == True -} - -test is_some_2() { - is_some(None) == False -} - -// ## Combining - -/// Chain together many computations that may fail. -/// -/// ```aiken -/// self -/// |> dict.get(policy_id) -/// |> option.and_then(dict.get(_, asset_name)) -/// |> option.or_else(0) -/// ``` -pub fn and_then( - self: Option, - then: fn(a) -> Option, -) -> Option { - when self is { - None -> None - Some(a) -> then(a) - } -} - -fn try_decrement(n: Int) -> Option { - if n > 0 { - Some(n - 1) - } else { - None - } -} - -test and_then_1() { - let result = - None - |> and_then(try_decrement) - result == None -} - -test and_then_2() { - let result = - Some(14) - |> and_then(try_decrement) - result == Some(13) -} - -test and_then_3() { - let result = - Some(0) - |> and_then(try_decrement) - result == None -} - -/// Picks the first element which is not None. If there's no such element, return None. -/// -/// ```aiken -/// option.choice([]) == None -/// option.choice([Some(14), Some(42)]) == Some(14) -/// option.choice([None, Some(42)]) == Some(42) -/// option.choice([None, None]) == None -/// ``` -pub fn choice(self: List>) -> Option { - when self is { - [] -> None - [head, ..others] -> - when head is { - None -> choice(others) - _ -> head - } - } -} - -test choice_1() { - Some(1) == choice([Some(1), Some(2)]) -} - -test choice_2() { - None == choice([]) -} - -test choice_3() { - Some(1) == choice([None, Some(1)]) -} - -/// Converts from `Option>` to `Option`. -/// -/// ```aiken -/// option.flatten(Some(Some(42))) == Some(42) -/// option.flatten(Some(None)) == None -/// option.flatten(None) == None -/// ``` -/// -/// Flattening only removes one level of nesting at a time: -/// -/// ```aiken -/// flatten(Some(Some(Some(42)))) == Some(Some(42)) -/// Some(Some(Some(42))) |> flatten |> flatten == Some(42) -/// ``` -pub fn flatten(opt: Option>) -> Option { - when opt is { - Some(inner) -> inner - None -> None - } -} - -test flatten_1() { - let x: Option> = Some(Some(6)) - Some(6) == flatten(x) -} - -test flatten_2() { - let x: Option> = Some(None) - None == flatten(x) -} - -test flatten_3() { - let x: Option> = None - None == flatten(x) -} - -test flatten_4() { - let x: Option>> = Some(Some(Some(6))) - - let result = - x - |> flatten - |> flatten - - Some(6) == result -} - -/// Apply a function to the inner value of an [`Option`](#option) -/// -/// ```aiken -/// option.map(None, fn(n) { n * 2 }) == None -/// option.map(Some(14), fn(n) { n * 2 }) == Some(28) -/// ``` -pub fn map(self: Option, with: fn(a) -> result) -> Option { - when self is { - None -> None - Some(a) -> Some(with(a)) - } -} - -test map_1() { - map(None, fn(_) { Void }) == None -} - -test map_2() { - map(Some(14), fn(n) { n + 1 }) == Some(15) -} - -/// Combine two [`Option`](#option) together. -/// -/// ```aiken -/// type Foo { -/// Foo(Int, Int) -/// } -/// -/// option.map2(Some(14), Some(42), Foo) == Some(Foo(14, 42)) -/// option.map2(None, Some(42), Foo) == None -/// option.map2(Some(14), None, Foo) == None -/// ``` -pub fn map2( - opt_a: Option, - opt_b: Option, - with: fn(a, b) -> result, -) -> Option { - when opt_a is { - None -> None - Some(a) -> - when opt_b is { - None -> None - Some(b) -> Some(with(a, b)) - } - } -} - -test map2_1() { - map2(None, Some(42), fn(_, _) { 14 }) == None -} - -test map2_2() { - map2(Some(42), None, fn(_, _) { 14 }) == None -} - -test map2_3() { - map2(Some(14), Some(42), fn(a, b) { (a, b) }) == Some((14, 42)) -} - -/// Combine three [`Option`](#option) together. -/// -/// ```aiken -/// type Foo { -/// Foo(Int, Int, Int) -/// } -/// -/// option.map3(Some(14), Some(42), Some(1337), Foo) == Some(Foo(14, 42, 1337)) -/// option.map3(None, Some(42), Some(1337), Foo) == None -/// option.map3(Some(14), None, None, Foo) == None -/// ``` -pub fn map3( - opt_a: Option, - opt_b: Option, - opt_c: Option, - with: fn(a, b, c) -> result, -) -> Option { - when opt_a is { - None -> None - Some(a) -> - when opt_b is { - None -> None - Some(b) -> - when opt_c is { - None -> None - Some(c) -> Some(with(a, b, c)) - } - } - } -} - -test map3_1() { - map3(None, Some(42), None, fn(_, _, _) { 14 }) == None -} - -test map3_2() { - map3(Some(42), None, None, fn(_, _, _) { 14 }) == None -} - -test map3_3() { - map3(Some(14), Some(42), Some(1337), fn(a, b, c) { c - a + b }) == Some(1365) -} - -/// Like [`or_else`](#or_else) but allows returning an `Option`. -/// This is effectively mapping the error branch. -/// -/// ```aiken -/// option.or_try(None, fn(_) { Some("aiken") }) == Some("aiken") -/// option.or_try(Some(42), fn(_) { Some(14) }) == Some(42) -/// option.or_try(None, fn (_) { fail }) => 💥 -/// ``` -pub fn or_try(self: Option, compute_default: fn() -> Option) -> Option { - when self is { - None -> compute_default() - _ -> self - } -} - -test or_try_1() { - or_try(None, fn() { Some("aiken") }) == Some("aiken") -} - -test or_try_2() { - or_try(Some(42), fn() { fail }) == Some(42) -} - -// ## Transforming - -/// Provide a default value, turning an optional value into a normal value. -/// -/// ```aiken -/// option.or_else(None, "aiken") == "aiken" -/// option.or_else(Some(42), 14) == 42 -/// ``` -pub fn or_else(self: Option, default: a) -> a { - when self is { - None -> default - Some(a) -> a - } -} - -test or_else_1() { - or_else(None, "aiken") == "aiken" -} - -test or_else_2() { - or_else(Some(42), 14) == 42 -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak deleted file mode 100644 index d2f125f5..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak +++ /dev/null @@ -1,668 +0,0 @@ -use aiken/builtin -use aiken/math -use aiken/option - -pub type Byte = - Int - -// ## Constructing - -/// Encode an integer value as a Big-Endian (most-significant bytes first) `ByteArray`. -/// The size is the expected size in number of bytes. -/// -/// > [!IMPORTANT] -/// > This function fails (i.e. halts the program) if the value cannot fit in the given size. When the -/// > size is _too large_, the array is left-padded with zeroes. -/// -/// ```aiken -/// bytearray.from_int_big_endian(1_000_000, 3) == #"0f4240" -/// bytearray.from_int_big_endian(1_000_000, 5) == #"00000f4240" -/// bytearray.from_int_big_endian(0, 8) == #"0000000000000000" -/// bytearray.from_int_big_endian(1_000_000, 1) => 💥 -/// ``` -pub fn from_int_big_endian(self: Int, size: Int) -> ByteArray { - builtin.integer_to_bytearray(True, size, self) -} - -test from_int_big_endian_1() { - from_int_big_endian(1_000_000, 3) == #"0f4240" -} - -test from_int_big_endian_2() { - from_int_big_endian(1_000_000, 5) == #"00000f4240" -} - -test from_int_big_endian_3() { - from_int_big_endian(0, 8) == #"0000000000000000" -} - -test from_int_big_endian_4() fail { - from_int_big_endian(1_000_000, 1) == #"40" -} - -/// Encode an integer value as a Little-Endian (least-significant bytes first) `ByteArray`. -/// The size is the expected size in number of bytes. -/// -/// > [!IMPORTANT] -/// > This function fails (i.e. halts the program) if the value cannot fit in the given size. When the -/// > size is _too large_, the array is right-padded with zeroes. -/// -/// ```aiken -/// bytearray.from_int_little_endian(1_000_000, 3) == #"40420f" -/// bytearray.from_int_little_endian(1_000_000, 5) == #"40420f0000" -/// bytearray.from_int_little_endian(0, 8) == #"0000000000000000" -/// bytearray.from_int_little_endian(1_000_000, 1) => 💥 -/// ``` -pub fn from_int_little_endian(self: Int, size: Int) -> ByteArray { - builtin.integer_to_bytearray(False, size, self) -} - -test from_int_little_endian_1() { - from_int_little_endian(1_000_000, 3) == #"40420f" -} - -test from_int_little_endian_2() { - from_int_little_endian(1_000_000, 5) == #"40420f0000" -} - -test from_int_little_endian_3() { - from_int_little_endian(0, 8) == #"0000000000000000" -} - -test from_int_little_endian_4() fail { - from_int_little_endian(1_000_000, 1) == #"40" -} - -/// Convert a `String` into a `ByteArray`. -/// -/// ```aiken -/// bytearray.from_string(@"ABC") == #"414243" -/// ``` -pub fn from_string(str: String) -> ByteArray { - builtin.encode_utf8(str) -} - -test from_string_1() { - from_string(@"") == "" -} - -test from_string_2() { - from_string(@"ABC") == #"414243" -} - -/// Add a byte element in front of a `ByteArray`. When the given byte is -/// greater than 255, it wraps-around. **PlutusV2 behavior** So 256 is mapped to 0, 257 to 1, and so -/// forth. -/// In PlutusV3 this will error instead of wrapping around. -/// -/// ```aiken -/// bytearray.push(#"", 0) == #"00" -/// bytearray.push(#"0203", 1) == #"010203" -/// bytearray.push(#"0203", 257) == #"010203" -/// ``` -pub fn push(self: ByteArray, byte: Byte) -> ByteArray { - builtin.cons_bytearray(byte, self) -} - -test push_1() { - push(#[], 0) == #[0] -} - -test push_2() { - push(#[2, 3], 1) == #[1, 2, 3] -} - -test push_3() fail { - let x = 257 - push(#[2, 3], x) == #[1, 2, 3] -} - -// ## Inspecting - -/// Get the `Byte` at the given index, or crash. -/// -/// > [!WARNING] -/// > This functions fails (i.e. halts the program) if there's no byte at the given index. -pub fn at(self: ByteArray, index: Int) -> Byte { - builtin.index_bytearray(self, index) -} - -/// Search the start and end positions of a sub-array in a `ByteArray`. -/// -/// ```aiken -/// bytearray.index_of("Hello, World!", "World") == Some((7, 11)) -/// bytearray.index_of("Hello, World!", "foo") == None -/// bytearray.index_of("Hello, World!", "!") == Some((12, 12)) -/// bytearray.index_of("Hello, World!", "o") == Some((4, 4)) -/// bytearray.index_of("Hello, World!", "Hello, World!") == Some((0, 12)) -/// ``` -pub fn index_of(self: ByteArray, bytes: ByteArray) -> Option<(Int, Int)> { - let offset = length(bytes) - - do_index_of(self, bytes, 0, offset, length(self)) - |> option.map(fn(ix) { (ix, ix + offset - 1) }) -} - -fn do_index_of( - self: ByteArray, - bytes: ByteArray, - cursor: Int, - offset: Int, - size: Int, -) -> Option { - if cursor + offset > size { - None - } else { - if builtin.slice_bytearray(cursor, offset, self) == bytes { - Some(cursor) - } else { - do_index_of(self, bytes, cursor + 1, offset, size) - } - } -} - -test index_of_1() { - index_of("Hello, World!", "World") == Some((7, 11)) -} - -test index_of_2() { - index_of("Hello, World!", "foo") == None -} - -test index_of_3() { - index_of("Hello, World!", "!") == Some((12, 12)) -} - -test index_of_4() { - index_of("Hello, World!", "o") == Some((4, 4)) -} - -test index_of_5() { - index_of("Hello, World!", "Hello, World!") == Some((0, 12)) -} - -/// Returns `True` when the given `ByteArray` is empty. -/// -/// ```aiken -/// bytearray.is_empty(#"") == True -/// bytearray.is_empty(#"00ff") == False -/// ``` -pub fn is_empty(self: ByteArray) -> Bool { - builtin.length_of_bytearray(self) == 0 -} - -test is_empty_1() { - is_empty(#"") == True -} - -test is_empty_2() { - is_empty(#"01") == False -} - -/// Returns the number of bytes in a `ByteArray`. -/// -/// ```aiken -/// bytearray.length(#[1, 2, 3]) == 3 -/// ``` -pub fn length(self: ByteArray) -> Int { - builtin.length_of_bytearray(self) -} - -test length_1() { - length(#"") == 0 -} - -test length_2() { - length(#"010203") == 3 -} - -/// Checks whether a bit (Most-Significant-Bit first) is set in the given 'ByteArray'. -/// -/// For example, consider the following bytearray: `#"8b765f"`. It can also be written as the -/// following bits sequence: -/// -/// `8` | `b` | `7` | `6` | `5` | `f` -/// --- | --- | --- | --- | --- | --- -/// `1000` | `1011` | `0111` | `0110` | `0101` | `1111` -/// -/// And thus, we have: -/// -/// ```aiken -/// test_bit(#"8b765f", 0) == True -/// test_bit(#"8b765f", 1) == False -/// test_bit(#"8b765f", 2) == False -/// test_bit(#"8b765f", 3) == False -/// test_bit(#"8b765f", 7) == True -/// test_bit(#"8b765f", 8) == False -/// test_bit(#"8b765f", 20) == True -/// test_bit(#"8b765f", 21) == True -/// test_bit(#"8b765f", 22) == True -/// test_bit(#"8b765f", 23) == True -/// ``` -pub fn test_bit(self: ByteArray, ix: Int) -> Bool { - builtin.less_than_equals_bytearray( - #[128], - builtin.cons_bytearray( - builtin.index_bytearray(self, ix / 8) * math.pow2(ix % 8) % 256, - "", - ), - ) -} - -test test_bit_0() { - test_bit(#"8b765f", 0) -} - -test test_bit_1() { - !test_bit(#"8b765f", 1) -} - -test test_bit_2() { - !test_bit(#"8b765f", 2) -} - -test test_bit_3() { - !test_bit(#"8b765f", 3) -} - -test test_bit_7() { - test_bit(#"8b765f", 7) -} - -test test_bit_8() { - !test_bit(#"8b765f", 8) -} - -test test_bit_20_21_22_23() { - and { - test_bit(#"8b765f", 20), - test_bit(#"8b765f", 21), - test_bit(#"8b765f", 22), - test_bit(#"8b765f", 23), - } -} - -// ## Modifying - -/// Returns the suffix of a `ByteArray` after `n` elements. -/// -/// ```aiken -/// bytearray.drop(#[1, 2, 3], n: 2) == #[3] -/// ``` -pub fn drop(self: ByteArray, n: Int) -> ByteArray { - builtin.slice_bytearray(n, builtin.length_of_bytearray(self) - n, self) -} - -test drop_1() { - let x = #"01020304050607" - drop(x, 2) == #"0304050607" -} - -test drop_2() { - let x = #"01020304050607" - drop(x, 0) == x -} - -test drop_3() { - let x = #"01" - drop(x, 1) == #"" -} - -test drop_4() { - let x = #"" - drop(x, 2) == #"" -} - -/// Extract a `ByteArray` as a slice of another `ByteArray`. -/// -/// Indexes are 0-based and inclusive. -/// -/// ```aiken -/// bytearray.slice(#[0, 1, 2, 3, 4, 5, 6], start: 1, end: 3) == #[1, 2, 3] -/// ``` -pub fn slice(self: ByteArray, start: Int, end: Int) -> ByteArray { - builtin.slice_bytearray(start, end - start + 1, self) -} - -test slice_1() { - slice(#"", 1, 2) == #"" -} - -test slice_2() { - slice(#"010203", 1, 2) == #"0203" -} - -test slice_3() { - slice(#"010203", 0, 42) == #"010203" -} - -test slice_4() { - slice(#[0, 1, 2, 3, 4], 0, 3) == #[0, 1, 2, 3] -} - -test slice_5() { - slice(#[0, 1, 2, 3, 4], 1, 2) == #[1, 2] -} - -/// Returns the n-length prefix of a `ByteArray`. -/// -/// ```aiken -/// bytearray.take(#[1, 2, 3], n: 2) == #[1, 2] -/// ``` -pub fn take(self: ByteArray, n: Int) -> ByteArray { - builtin.slice_bytearray(0, n, self) -} - -test take_1() { - let x = #"01020304050607" - take(x, 2) == #"0102" -} - -test take_2() { - let x = #"01020304050607" - take(x, 0) == #"" -} - -test take_3() { - let x = #"01" - take(x, 1) == x -} - -test take_4() { - let x = #"010203" - take(x, 0) == #"" -} - -// ## Combining - -/// Combine two `ByteArray` together. -/// -/// ```aiken -/// bytearray.concat(left: #[1, 2, 3], right: #[4, 5, 6]) == #[1, 2, 3, 4, 5, 6] -/// ``` -pub fn concat(left: ByteArray, right: ByteArray) -> ByteArray { - builtin.append_bytearray(left, right) -} - -test concat_1() { - concat(#"", #"") == #"" -} - -test concat_2() { - concat(#"", #"01") == #"01" -} - -test concat_3() { - concat(#"0102", #"") == #"0102" -} - -test concat_4() { - concat(#"0102", #"0304") == #"01020304" -} - -/// Compare two bytearrays lexicographically. -/// -/// ```aiken -/// bytearray.compare(#"00", #"FF") == Less -/// bytearray.compare(#"42", #"42") == Equal -/// bytearray.compare(#"FF", #"00") == Greater -/// ``` -pub fn compare(left: ByteArray, right: ByteArray) -> Ordering { - if builtin.less_than_bytearray(left, right) { - Less - } else if builtin.equals_bytearray(left, right) { - Equal - } else { - Greater - } -} - -// ## Transforming - -/// Left-fold over bytes of a [`ByteArray`](https://aiken-lang.github.io/prelude/aiken.html#ByteArray). Note that every byte given to the callback function is comprised between 0 and 255. -/// -/// ```aiken -/// bytearray.foldl(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 44203 -/// bytearray.foldl(#[1, 2, 3], #"", flip(bytearray.push)) == #[3, 2, 1] -/// ``` -pub fn foldl( - self: ByteArray, - zero: result, - with: fn(Int, result) -> result, -) -> result { - do_foldl(self, zero, builtin.length_of_bytearray(self), 0, with) -} - -fn do_foldl( - self: ByteArray, - zero: result, - len: Int, - cursor: Int, - with: fn(Int, result) -> result, -) -> result { - if cursor == len { - zero - } else { - do_foldl( - self, - with(builtin.index_bytearray(self, cursor), zero), - len, - cursor + 1, - with, - ) - } -} - -test foldl_1() { - foldl(#[], 42, fn(byte, acc) { byte + acc }) == 42 -} - -test foldl_2() { - foldl(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 44203 -} - -test foldl_3() { - foldl( - #"356cf088720a169dae0ce0bb1df8588944389fa43322f0d6ef4ed8c069bfd405", - 0, - fn(byte, acc) { acc * 256 + byte }, - ) == 24165060555594911913195642527692216679757672038384202527929620681761931383813 -} - -test foldl_4() { - foldl(#[1, 2, 3, 4, 5], #"", flip(push)) == #[5, 4, 3, 2, 1] -} - -/// Right-fold over bytes of a [`ByteArray`](https://aiken-lang.github.io/prelude/aiken.html#ByteArray). Note that every byte given to the callback function is comprised between 0 and 255. -/// -/// ```aiken -/// bytearray.foldr(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 43948 -/// bytearray.foldl(#[1, 2, 3], #"", flip(bytearray.push)) == #[1, 2, 3] -/// ``` -pub fn foldr( - self: ByteArray, - zero: result, - with: fn(Int, result) -> result, -) -> result { - do_foldr(self, zero, builtin.length_of_bytearray(self) - 1, with) -} - -fn do_foldr( - self: ByteArray, - zero: result, - cursor: Int, - with: fn(Int, result) -> result, -) -> result { - if cursor < 0 { - zero - } else { - do_foldr( - self, - with(builtin.index_bytearray(self, cursor), zero), - cursor - 1, - with, - ) - } -} - -test foldr_1() { - foldr(#[], 42, fn(byte, acc) { byte + acc }) == 42 -} - -test foldr_2() { - foldr(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 43948 -} - -test foldr_3() { - foldr(#[1, 2, 3, 4, 5], #"", flip(push)) == #[1, 2, 3, 4, 5] -} - -/// Reduce bytes in a ByteArray from left to right using the accumulator as left operand. -/// Said differently, this is [`foldl`](#foldl) with callback arguments swapped. -/// -/// ```aiken -/// bytearray.reduce(#[1,2,3], #[], bytearray.push) == #[3, 2, 1] -/// ``` -pub fn reduce( - self: ByteArray, - zero: result, - with: fn(result, Int) -> result, -) -> result { - foldl(self, zero, flip(with)) -} - -test reduce_1() { - reduce(#[], #[], push) == #[] -} - -test reduce_2() { - reduce(#[1, 2, 3], #[], push) == #[3, 2, 1] -} - -/// Interpret a Big-Endian (most-significant bytes first) `ByteArray` as an `Int`. -/// -/// ```aiken -/// bytearray.to_int_big_endian(#"0f4240") == 1_000_000 -/// bytearray.to_int_big_endian(#"00000f4240") == 1_000_000 -/// bytearray.to_int_big_endian(#"0000000000000000") == 0 -/// ``` -pub fn to_int_big_endian(self: ByteArray) -> Int { - builtin.bytearray_to_integer(True, self) -} - -test to_int_big_endian_1() { - to_int_big_endian(#"0f4240") == 1_000_000 -} - -test to_int_big_endian_2() { - to_int_big_endian(#"00000f4240") == 1_000_000 -} - -test to_int_big_endian_3() { - to_int_big_endian(#"0000000000000000") == 0 -} - -/// Interpret a Little-Endian (least-significant bytes first) `ByteArray` as an `Int`. -/// -/// ```aiken -/// bytearray.to_int_big_endian(#"40420f") == 1_000_000 -/// bytearray.to_int_big_endian(#"40420f0000") == 1_000_000 -/// bytearray.to_int_big_endian(#"0000000000000000") == 0 -/// ``` -pub fn to_int_little_endian(self: ByteArray) -> Int { - builtin.bytearray_to_integer(False, self) -} - -test to_int_little_endian_1() { - to_int_little_endian(#"40420f") == 1_000_000 -} - -test to_int_little_endian_2() { - to_int_little_endian(#"40420f0000") == 1_000_000 -} - -test to_int_little_endian_3() { - to_int_little_endian(#"0000000000000000") == 0 -} - -/// Convert a `ByteArray` into a `String`. -/// -/// > [!WARNING] -/// > This functions fails (i.e. halts the program) if the underlying `ByteArray` isn't UTF-8-encoded. In particular, you cannot convert arbitrary hash digests using this function. -/// > -/// > For converting arbitrary `ByteArray`s, use [bytearray.to_hex](#to_hex). -/// -/// ```aiken -/// bytearray.to_string(#"414243") == "ABC" -/// bytearray.to_string(some_hash) => 💥 -/// ``` -pub fn to_string(self: ByteArray) -> String { - builtin.decode_utf8(self) -} - -test to_string_1() { - to_string("") == @"" -} - -test to_string_2() { - to_string("ABC") == @"ABC" -} - -/// Encode a `ByteArray` as a hexidecimal `String`. -/// -/// ```aiken -/// bytearray.to_hex("Hello world!") == @"48656c6c6f20776f726c6421" -/// ``` -pub fn to_hex(self: ByteArray) -> String { - self - |> encode_base16(builtin.length_of_bytearray(self) - 1, "") - |> builtin.decode_utf8 -} - -test to_hex_1() { - to_hex("Hello world!") == @"48656C6C6F20776F726C6421" -} - -test to_hex_2() { - to_hex("The quick brown fox jumps over the lazy dog") == @"54686520717569636B2062726F776E20666F78206A756D7073206F76657220746865206C617A7920646F67" -} - -/// Checks whether a `ByteArray` starts with a given prefix. -/// -/// ```aiken -/// bytearray.starts_with("Hello, World!", prefix: "Hello") == True -/// bytearray.starts_with("", prefix: "") == True -/// bytearray.starts_with("Hello", prefix: "Hello, World!") == False -/// ``` -pub fn starts_with(self: ByteArray, prefix: ByteArray) -> Bool { - let prefix_length = length(prefix) - if length(self) < prefix_length { - False - } else { - take(self, prefix_length) == prefix - } -} - -test starts_with_1() { - starts_with("", "") -} - -test starts_with_2() { - starts_with("Hello, World!", "Hello, World!") -} - -test starts_with_3() { - !starts_with("Hello, World!", "hello") -} - -test starts_with_4() { - !starts_with("", "World") -} - -test starts_with_5() { - starts_with("Hello, World", "Hello") -} - -test starts_with_6() { - !starts_with("foo", "foo_") -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak deleted file mode 100644 index 217749e9..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak +++ /dev/null @@ -1,156 +0,0 @@ -use aiken/builtin.{bytearray_to_integer, decode_utf8} -use aiken/math -use aiken/option -use aiken/primitive/bytearray - -// ## Combining - -/// Compare two integers. -/// -/// ```aiken -/// int.compare(14, 42) == Less -/// int.compare(14, 14) == Equal -/// int.compare(42, 14) == Greater -/// ``` -pub fn compare(left: Int, right: Int) -> Ordering { - if left < right { - Less - } else if left > right { - Greater - } else { - Equal - } -} - -// ## Transforming - -/// Interpret a Big-Endian (most-significant bytes first) `ByteArray` as an `Int`. -/// -/// ```aiken -/// int.from_bytearray_big_endian(#"0f4240") == 1_000_000 -/// int.from_bytearray_big_endian(#"00000f4240") == 1_000_000 -/// int.from_bytearray_big_endian(#"0000000000000000") == 0 -/// ``` -pub fn from_bytearray_big_endian(self: ByteArray) -> Int { - bytearray_to_integer(True, self) -} - -test from_bytearray_big_endian_1() { - from_bytearray_big_endian(#"0f4240") == 1_000_000 -} - -test from_bytearray_big_endian_2() { - from_bytearray_big_endian(#"00000f4240") == 1_000_000 -} - -test from_bytearray_big_endian_3() { - from_bytearray_big_endian(#"0000000000000000") == 0 -} - -/// Interpret a Little-Endian (least-significant bytes first) `ByteArray` as an `Int`. -/// -/// ```aiken -/// int.from_bytearray_big_endian(#"40420f") == 1_000_000 -/// int.from_bytearray_big_endian(#"40420f0000") == 1_000_000 -/// int.from_bytearray_big_endian(#"0000000000000000") == 0 -/// ``` -pub fn from_bytearray_little_endian(self: ByteArray) -> Int { - bytearray_to_integer(False, self) -} - -test from_bytearray_little_endian_1() { - from_bytearray_little_endian(#"40420f") == 1_000_000 -} - -test from_bytearray_little_endian_2() { - from_bytearray_little_endian(#"40420f0000") == 1_000_000 -} - -test from_bytearray_little_endian_3() { - from_bytearray_little_endian(#"0000000000000000") == 0 -} - -/// Parse an integer from a utf-8 encoded `ByteArray`, when possible. -/// -/// ```aiken -/// int.from_utf8("14") == Some(14) -/// int.from_utf8("-42") == Some(-42) -/// int.from_utf8("007") == Some(7) -/// int.from_utf8("foo") == None -/// int.from_utf8("1.0") == None -/// int.from_utf8("1-2") == None -/// ``` -pub fn from_utf8(bytes: ByteArray) -> Option { - bytes - |> bytearray.foldr( - Some((0, 0)), - fn(byte, st) { - when st is { - None -> None - Some((n, e)) -> - if byte < 48 || byte > 57 { - if byte == 45 { - Some((-n, 0)) - } else { - None - } - } else if n < 0 { - None - } else { - let digit = byte - 48 - Some((n + digit * math.pow(10, e), e + 1)) - } - } - }, - ) - |> option.map(fn(tuple) { tuple.1st }) -} - -test from_utf8_1() { - from_utf8("0017") == Some(17) -} - -test from_utf8_2() { - from_utf8("42") == Some(42) -} - -test from_utf8_3() { - from_utf8("1337") == Some(1337) -} - -test from_utf8_4() { - from_utf8("-14") == Some(-14) -} - -test from_utf8_5() { - from_utf8("foo") == None -} - -test from_utf8_6() { - from_utf8("1-2") == None -} - -/// Convert an `Int` to its `String` representation. -/// -/// ```aiken -/// int.to_string(42) == @"42" -/// ``` -pub fn to_string(n: Int) -> String { - diagnostic(n, "") |> decode_utf8 -} - -test to_string_1() { - to_string(0) == @"0" -} - -test to_string_2() { - to_string(5) == @"5" -} - -test to_string_3() { - to_string(42) == @"42" -} - -test to_string_4() { - to_string(200) == @"200" -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak deleted file mode 100644 index 35fa5567..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak +++ /dev/null @@ -1,139 +0,0 @@ -use aiken/builtin.{ - append_bytearray, append_string, decode_utf8, encode_utf8, length_of_bytearray, -} - -// ## Constructing - -/// Convert a `ByteArray` into a `String` -/// -/// > [!WARNING] -/// > This functions fails if the underlying `ByteArray` isn't UTF-8-encoded. In particular, you cannot convert arbitrary hash digests using this function. -/// > -/// > For converting arbitrary `ByteArray`s, use [bytearray.to_hex](./bytearray.html#to_hex). -/// -/// ```aiken -/// string.from_bytearray("foo") == @"foo" -/// string.from_bytearray(#"666f6f") == @"foo" -/// string.from_bytearray(some_hash) -> fail -/// ``` -pub fn from_bytearray(bytes: ByteArray) -> String { - decode_utf8(bytes) -} - -test from_bytearray_1() { - from_bytearray(#[]) == @"" -} - -test from_bytearray_2() { - from_bytearray(#[65, 66, 67]) == @"ABC" -} - -test from_bytearray_3() { - from_bytearray("ABC") == @"ABC" -} - -/// Convert an `Int` to its `String` representation. -/// -/// ```aiken -/// string.from_int(42) == @"42" -/// ``` -pub fn from_int(n: Int) -> String { - diagnostic(n, "") |> decode_utf8 -} - -test from_int_1() { - from_int(0) == @"0" -} - -test from_int_2() { - from_int(5) == @"5" -} - -test from_int_3() { - from_int(42) == @"42" -} - -test from_int_4() { - from_int(200) == @"200" -} - -// ## Combining - -/// Combine two `String` together. -/// -/// ```aiken -/// string.concat(left: @"Hello", right: @", World!") == @"Hello, World!" -/// ``` -pub fn concat(left: String, right: String) -> String { - append_string(left, right) -} - -test concat_1() { - concat(@"", @"") == @"" -} - -test concat_2() { - concat(@"", @"foo") == concat(@"foo", @"") -} - -test concat_3() { - concat(left: @"Hello", right: @", World!") == @"Hello, World!" -} - -/// Join a list of strings, separated by a given _delimiter_. -/// -/// ```aiken -/// string.join([], @"+") == @"" -/// string.join([@"a", @"b", @"c"], @",") == @"a,b,c" -/// ``` -pub fn join(list: List, delimiter: String) -> String { - do_join(list, encode_utf8(delimiter), #"") - |> decode_utf8 -} - -fn do_join(xs, delimiter, bytes) { - when xs is { - [] -> bytes - [x, ..rest] -> - do_join( - rest, - delimiter, - if length_of_bytearray(bytes) == 0 { - encode_utf8(x) - } else { - append_bytearray(bytes, append_bytearray(delimiter, encode_utf8(x))) - }, - ) - } -} - -test join_1() { - join([], @",") == @"" -} - -test join_2() { - join([@"a", @"b", @"c"], @",") == @"a,b,c" -} - -// ## Transforming - -/// Convert a `String` into a `ByteArray` -/// -/// ```aiken -/// string.to_bytearray(@"foo") == "foo" -/// ``` -pub fn to_bytearray(self: String) -> ByteArray { - encode_utf8(self) -} - -test to_bytearray_1() { - to_bytearray(@"") == "" -} - -test to_bytearray_2() { - to_bytearray(@"ABC") == #[65, 66, 67] -} - -test to_bytearray_3() { - to_bytearray(@"ABC") == "ABC" -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/address.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/address.ak deleted file mode 100644 index 0167b90f..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/address.ak +++ /dev/null @@ -1,86 +0,0 @@ -use aiken/crypto.{ - Blake2b_224, Hash, Script, ScriptHash, VerificationKey, VerificationKeyHash, -} - -/// A general structure for representing an on-chain `Credential`. -/// -/// Credentials are always one of two kinds: a direct public/private key -/// pair, or a script (native or Plutus). -pub type Credential { - VerificationKey(VerificationKeyHash) - Script(ScriptHash) -} - -// ## Constructing - -/// A Cardano `Address` typically holding one or two credential references. -/// -/// Note that legacy bootstrap addresses (a.k.a. 'Byron addresses') are -/// completely excluded from Plutus contexts. Thus, from an on-chain -/// perspective only exists addresses of type 00, 01, ..., 07 as detailed -/// in [CIP-0019 :: Shelley Addresses](https://github.com/cardano-foundation/CIPs/tree/master/CIP-0019/#shelley-addresses). -pub type Address { - payment_credential: PaymentCredential, - stake_credential: Option, -} - -/// Smart-constructor for an [Address](#Address) from a [script](#Script) hash. The address has no delegation rights whatsoever. -pub fn from_script(script: Hash) -> Address { - Address { payment_credential: Script(script), stake_credential: None } -} - -/// Smart-constructor for an [Address](#Address) from a [verification key](#VerificationKey) hash. The resulting address has no delegation rights whatsoever. -pub fn from_verification_key(vk: Hash) -> Address { - Address { payment_credential: VerificationKey(vk), stake_credential: None } -} - -/// Set (or reset) the delegation part of an [Address](#Address) using a [verification key](#VerificationKey) hash. This is useful when combined with [`from_verification_key`](#from_verification_key) and/or [`from_script`](#from_script). -pub fn with_delegation_key( - self: Address, - vk: Hash, -) -> Address { - Address { - payment_credential: self.payment_credential, - stake_credential: Some(Inline(VerificationKey(vk))), - } -} - -/// Set (or reset) the delegation part of an [Address](#Address) using a [script](#Script) hash. This is useful when combined with [`from_verification_key`](#from_verification_key) and/or [`from_script`](#from_script). -pub fn with_delegation_script( - self: Address, - script: Hash, -) -> Address { - Address { - payment_credential: self.payment_credential, - stake_credential: Some(Inline(Script(script))), - } -} - -/// Represent a type of object that can be represented either inline (by hash) -/// or via a reference (i.e. a pointer to an on-chain location). -/// -/// This is mainly use for capturing pointers to a stake credential -/// registration certificate in the case of so-called pointer addresses. -pub type Referenced { - Inline(a) - Pointer { slot_number: Int, transaction_index: Int, certificate_index: Int } -} - -/// A `StakeCredential` represents the delegation and rewards withdrawal conditions -/// associated with some stake address / account. -/// -/// A `StakeCredential` is either provided inline, or, by reference using an -/// on-chain pointer. -/// -/// Read more about pointers in [CIP-0019 :: Pointers](https://github.com/cardano-foundation/CIPs/tree/master/CIP-0019/#pointers). -pub type StakeCredential = - Referenced - -/// A 'PaymentCredential' represents the spending conditions associated with -/// some output. Hence, -/// -/// - a `VerificationKey` captures an output locked by a public/private key pair; -/// - and a `Script` captures an output locked by a native or Plutus script. -/// -pub type PaymentCredential = - Credential diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak deleted file mode 100644 index 2ebeaa91..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak +++ /dev/null @@ -1,30 +0,0 @@ -use aiken/primitive/bytearray -use cardano/address.{Credential, Script, VerificationKey} - -pub fn compare(left: Credential, right: Credential) -> Ordering { - when left is { - Script(left) -> - when right is { - Script(right) -> bytearray.compare(left, right) - _ -> Less - } - VerificationKey(left) -> - when right is { - Script(_) -> Greater - VerificationKey(right) -> bytearray.compare(left, right) - } - } -} - -test compare_matrix() { - and { - (compare(Script(""), Script("")) == Equal)?, - (compare(VerificationKey(""), VerificationKey("")) == Equal)?, - (compare(Script(""), VerificationKey("")) == Less)?, - (compare(VerificationKey(""), Script("")) == Greater)?, - (compare(Script("01"), Script("02")) == Less)?, - (compare(Script("02"), Script("01")) == Greater)?, - (compare(VerificationKey("01"), VerificationKey("02")) == Less)?, - (compare(VerificationKey("02"), VerificationKey("01")) == Greater)?, - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak deleted file mode 100644 index 664a3983..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak +++ /dev/null @@ -1,920 +0,0 @@ -use aiken/builtin -use aiken/collection/dict.{Dict, from_ascending_pairs_with} -use aiken/collection/list -use aiken/crypto.{Blake2b_224, Hash, Script} -use aiken/option - -/// Lovelace is now a type wrapper for Int. -pub type Lovelace = - Int - -/// A type-alias for a `PolicyId`. A `PolicyId` is always 28-byte long -pub type PolicyId = - Hash - -/// Ada, the native currency, isn't associated with any `PolicyId` (it's not -/// possible to mint Ada!). -/// -/// By convention, it is an empty `ByteArray`. -pub const ada_policy_id = "" - -/// A type-alias for 'AssetName`, which are free-form byte-arrays between -/// 0 and 32 bytes. -pub type AssetName = - ByteArray - -/// Ada, the native currency, isn't associated with any `AssetName` (it's not -/// possible to mint Ada!). -/// -/// By convention, it is an empty `ByteArray`. -pub const ada_asset_name = "" - -/// A multi-asset output `Value`. Contains tokens indexed by [PolicyId](#PolicyId) and [AssetName](#AssetName). -/// -/// > [!IMPORTANT] -/// > This type maintain some invariants by construction; in particular, a `Value` will never contain a -/// zero quantity of a particular token. -pub opaque type Value { - inner: Dict>, -} - -// ## Constructing - -/// Construct a `Value` from an asset identifier (i.e. `PolicyId` + `AssetName`) -/// and a given quantity. -pub fn from_asset( - policy_id: PolicyId, - asset_name: AssetName, - quantity: Int, -) -> Value { - if quantity == 0 { - zero - } else { - let asset = - dict.empty - |> dict.insert(asset_name, quantity) - dict.empty - |> dict.insert(policy_id, asset) - |> Value - } -} - -/// Promote an arbitrary list of assets into a `Value`. This function fails -/// (i.e. halts the program execution) if: -/// -/// - there's any duplicate amongst `PolicyId`; -/// - there's any duplicate amongst `AssetName`; -/// - the `AssetName` aren't sorted in ascending lexicographic order; or -/// - any asset quantity is null. -/// -/// This function is meant to turn arbitrary user-defined `Data` into safe `Value`, -/// while checking for internal invariants. -pub fn from_asset_list(xs: Pairs>) -> Value { - xs - |> list.foldr( - dict.empty, - fn(inner, acc) { - expect Pair(p, [_, ..] as x) = inner - x - |> from_ascending_pairs_with(fn(v) { v != 0 }) - |> dict.insert_with( - acc, - p, - _, - fn(_, _, _) { - fail @"Duplicate policy in the asset list." - }, - ) - }, - ) - |> Value -} - -test from_asset_list_1() { - let v = from_asset_list([]) - v == zero -} - -test from_asset_list_2() fail { - let v = from_asset_list([Pair(#"33", [])]) - v == zero -} - -test from_asset_list_3() fail { - let v = from_asset_list([Pair(#"33", [Pair(#"", 0)])]) - v != zero -} - -test from_asset_list_4() { - let v = from_asset_list([Pair(#"33", [Pair(#"", 1)])]) - flatten(v) == [(#"33", #"", 1)] -} - -test from_asset_list_5() { - let v = from_asset_list([Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)])]) - flatten(v) == [(#"33", #"", 1), (#"33", #"33", 1)] -} - -test from_asset_list_6() fail { - let v = - from_asset_list( - [ - Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), - Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), - ], - ) - v != zero -} - -test from_asset_list_7() fail { - let v = - from_asset_list( - [ - Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), - Pair(#"34", [Pair(#"", 1), Pair(#"", 1)]), - ], - ) - v != zero -} - -test from_asset_list_8() { - let v = - from_asset_list( - [ - Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), - Pair(#"34", [Pair(#"31", 1)]), Pair(#"35", [Pair(#"", 1)]), - ], - ) - flatten(v) == [ - (#"33", #"", 1), (#"33", #"33", 1), (#"34", #"31", 1), (#"35", #"", 1), - ] -} - -test from_asset_list_9() { - let v = - from_asset_list( - [ - Pair(#"35", [Pair(#"", 1)]), Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), - Pair(#"34", [Pair(#"31", 1)]), - ], - ) - flatten(v) == [ - (#"33", #"", 1), (#"33", #"33", 1), (#"34", #"31", 1), (#"35", #"", 1), - ] -} - -/// Construct a `Value` from a lovelace quantity. -/// -/// Friendly reminder: 1 Ada = 1.000.000 Lovelace -pub fn from_lovelace(quantity: Int) -> Value { - from_asset(ada_policy_id, ada_asset_name, quantity) -} - -/// Construct an empty `Value` with nothing in it. -pub const zero: Value = Value { inner: dict.empty } - -// ## Inspecting - -/// Check is a `Value` is zero. That is, it has no assets and holds no Ada/Lovelace. -pub fn is_zero(self: Value) -> Bool { - self == zero -} - -/// Efficiently compare two values together, allowing a custom behaviour for Ada/Lovelace. -/// The second parameter is provided as `Data`, allowing to conveniently compare serialized -/// datums or similar structurually equivalent types (such as `Pairs>`). -/// -/// The third argument is a callback function to assert the left and right lovelace -/// quantities. Its first argument refers to the quantity of the first argument of -/// `match`, and the second argument of the callback to the quantity of the second -/// argument of `match`. In the absence of lovelace in any value, it defaults to `0`. -/// -/// ```aiken -/// const value: Value = -/// assets.from_lovelace(30) -/// |> assets.add("foo", "bar", 1) -/// |> assets.add("foo", "baz", 42) -/// -/// const datum: Data = -/// assets.from_lovelace(20) -/// |> assets.add("foo", "bar", 1) -/// |> assets.add("foo", "baz", 42) -/// -/// True == assets.match(value, datum, >=) -/// -/// False == assets.match(value, datum, ==) -/// -/// True == assets.match(value, datum, fn(value_lovelace, datum_lovelace) { -/// 2 * datum_lovelace >= value_lovelace -/// }) -/// ``` -pub fn match( - left: Value, - right: Data, - assert_lovelace: fn(Lovelace, Lovelace) -> Bool, -) -> Bool { - builtin.choose_data( - right, - False, - { - let (left_lovelace, left_assets) = dict.pop(left.inner, ada_policy_id) - let left_assets: Data = left_assets - let left_lovelace = - when left_lovelace is { - Some(tokens) -> builtin.head_list(dict.to_pairs(tokens)).2nd - None -> 0 - } - when builtin.un_map_data(right) is { - [] -> left_assets == right && assert_lovelace(left_lovelace, 0) - [first_asset, ..right_assets] -> - if first_asset.1st == builtin.b_data(ada_policy_id) { - and { - assert_lovelace( - left_lovelace, - builtin.un_i_data( - builtin.head_list(builtin.un_map_data(first_asset.2nd)).2nd, - ), - ), - left_assets == builtin.map_data(right_assets), - } - } else { - and { - assert_lovelace(left_lovelace, 0), - left_assets == right, - } - } - } - }, - False, - False, - False, - ) -} - -const fixture_match_value: Value = - zero - |> add(ada_policy_id, ada_asset_name, 42) - |> add("foo", "01", 1) - |> add("foo", "02", 1) - |> add("bar", "01", 42) - -const fixture_match_data: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - |> add("foo", "01", 1) - |> add("foo", "02", 1) - |> add("bar", "01", 42) - -const fixture_match_data_missing_foo_02: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - |> add("foo", "01", 1) - |> add("bar", "01", 42) - -const fixture_match_data_altered_foo_01: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - |> add("foo", "01", 14) - |> add("foo", "02", 1) - |> add("bar", "01", 42) - -const fixture_match_data_missing_bar: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - |> add("foo", "01", 1) - |> add("foo", "02", 1) - -const fixture_match_data_extra_policy: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - |> add("foo", "01", 1) - |> add("foo", "02", 1) - |> add("bar", "01", 42) - |> add("baz", "01", 1) - -const fixture_match_data_extra_asset: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - |> add("foo", "01", 1) - |> add("foo", "02", 1) - |> add("foo", "03", 1) - |> add("bar", "01", 42) - -const fixture_match_data_no_assets: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - -test match_1() { - match(fixture_match_value, fixture_match_data, fn(_, _) { True }) -} - -test match_2() { - !match( - fixture_match_value, - fixture_match_data, - fn(source, target) { source == target }, - ) -} - -test match_3() { - !match( - fixture_match_value, - fixture_match_data_missing_foo_02, - fn(_, _) { True }, - ) -} - -test match_4() { - !match(fixture_match_value, fixture_match_data_missing_bar, fn(_, _) { True }) -} - -test match_5() { - !match( - fixture_match_value, - fixture_match_data_altered_foo_01, - fn(_, _) { True }, - ) -} - -test match_6() { - !match( - fixture_match_value, - fixture_match_data_extra_policy, - fn(_, _) { True }, - ) -} - -test match_7() { - !match(fixture_match_value, fixture_match_data_extra_asset, fn(_, _) { True }) -} - -test match_8() { - !match(fixture_match_value, fixture_match_data_no_assets, fn(_, _) { True }) -} - -test match_9() { - match(zero, zero, ==) -} - -test match_10() { - match( - without_lovelace(fixture_match_value), - without_lovelace(fixture_match_value), - fn(left, right) { left == 0 && right == 0 }, - ) -} - -test match_11() { - match( - without_lovelace(fixture_match_value), - fixture_match_value, - fn(left, right) { left == 0 && right > 0 }, - ) -} - -test match_12() { - match( - fixture_match_value, - without_lovelace(fixture_match_value), - fn(left, right) { left > 0 && right == 0 }, - ) -} - -test match_13() { - match( - zero |> add(ada_policy_id, ada_asset_name, 42), - zero, - fn(left, right) { left == 42 && right == 0 }, - ) -} - -test match_14() { - match( - zero, - zero |> add(ada_policy_id, ada_asset_name, 42), - fn(left, right) { left == 0 && right == 42 }, - ) -} - -const fixture_match_benchmark_left: Value = - zero - |> add(ada_policy_id, ada_asset_name, 1337) - |> add( - #"0246a14d04c3a0e9b65f6b90a3d1aa5faee5d56ab1e30ec7e8b02f29", - "MATTR", - 200, - ) - |> add( - #"0a9e126256cb38c4865cdac6eb2ada51c328ba0df2ebde22ae126c0d", - "ProphecyPoster076", - 1, - ) - |> add( - #"1774343241680e4daef7cbfe3536fc857ce23fb66cd0b66320b2e3dd", - "BISON", - 12_004_999_999, - ) - |> add( - #"279c909f348e533da5808898f87f9a14bb2c3dfbbacccd631d927a3f", - "SNEK", - 1486, - ) - |> add( - #"651dfc074202423585996ffa717cb45237d307e705e2cc3dab1ccabd", - "MAYZSilverFoundersEdition0035", - 1, - ) - |> add( - #"63df49056617dd14034986cf7c250bad6552fd2f0f9c71d797932008", - "CardanoSpaceSession", - 20, - ) - |> add( - #"5b01968867e13432afaa2f814e1d15e332d6cd0aa77e350972b0967d", - "ADAOGovernanceToken", - 1, - ) - |> add( - #"a0028f350aaabe0545fdcb56b039bfb08e4bb4d8c4d7c3c7d481c235", - "HOSKY", - 400_001_000, - ) - |> add( - #"da8c30857834c6ae7203935b89278c532b3995245295456f993e1d24", - "LQ", - 10_635_899, - ) - |> add( - #"95d9a98c2f7999a3d5e0f4d795cb1333837c09eb0f24835cd2ce954c", - "GrandmasterAdventurer659", - 1, - ) - |> add( - #"702cbdb06a81ef2fa4f85f9e32159c03f502539d762a71194fc11eb3", - "AdventurerOfThiolden8105", - 1, - ) - |> add( - #"d0112837f8f856b2ca14f69b375bc394e73d146fdadcc993bb993779", - "DiscoSolaris3725", - 1, - ) - |> add( - #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", - "CardaWorld535", - 1, - ) - |> add( - #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", - "CardaWorld1213", - 1, - ) - |> add( - #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", - "CardaWorld1518", - 1, - ) - |> add( - #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", - "CardaWorld1537", - 1, - ) - |> add( - #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", - "CardaWorld4199", - 1, - ) - |> add( - #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", - "CardaWorld3767", - 1, - ) - |> add( - #"7597444754551a8c17edbf7291cdaeca898ca02ee4e732b09a949396", - "Algae1", - 1, - ) - |> add( - #"7597444754551a8c17edbf7291cdaeca898ca02ee4e732b09a949396", - "Algae2", - 1, - ) - -const fixture_match_benchmark_right: Data = fixture_match_benchmark_left - -test match_benchmark() { - match(fixture_match_benchmark_left, fixture_match_benchmark_right, ==) -} - -test match_benchmark_vs() { - let data: Data = fixture_match_benchmark_right - expect pairs: Pairs> = data - fixture_match_benchmark_left == from_asset_list(pairs) -} - -/// A specialized version of `quantity_of` for the Ada currency. -pub fn lovelace_of(self: Value) -> Int { - quantity_of(self, ada_policy_id, ada_asset_name) -} - -/// A list of all token policies in that Value with non-zero tokens. -pub fn policies(self: Value) -> List { - dict.keys(self.inner) -} - -/// Extract the quantity of a given asset. -pub fn quantity_of( - self: Value, - policy_id: PolicyId, - asset_name: AssetName, -) -> Int { - self.inner - |> dict.get(policy_id) - |> option.and_then(dict.get(_, asset_name)) - |> option.or_else(0) -} - -/// Get all tokens associated with a given policy. -pub fn tokens(self: Value, policy_id: PolicyId) -> Dict { - self.inner - |> dict.get(policy_id) - |> option.or_else(dict.empty) -} - -// ## Combining - -/// Add a (positive or negative) quantity of a single token to a assets. -/// This is more efficient than [`merge`](#merge) for a single asset. -pub fn add( - self: Value, - policy_id: PolicyId, - asset_name: AssetName, - quantity: Int, -) -> Value { - if quantity == 0 { - self - } else { - let helper = - fn(_, left, _right) { - let inner_result = - dict.insert_with( - left, - asset_name, - quantity, - fn(_k, ql, qr) { - let q = ql + qr - if q == 0 { - None - } else { - Some(q) - } - }, - ) - if dict.is_empty(inner_result) { - None - } else { - Some(inner_result) - } - } - - Value( - dict.insert_with( - self.inner, - policy_id, - dict.from_ascending_pairs([Pair(asset_name, quantity)]), - helper, - ), - ) - } -} - -test add_1() { - let v = - zero - |> add(#"acab", #"beef", 321) - |> add(#"acab", #"beef", -321) - v == zero -} - -test add_2() { - let v = - from_lovelace(123) - |> add(#"acab", #"beef", 321) - |> add(#"acab", #"beef", -1 * 321) - v == from_lovelace(123) -} - -test add_3() { - let v = - from_lovelace(1) - |> add(ada_policy_id, ada_asset_name, 2) - |> add(ada_policy_id, ada_asset_name, 3) - v == from_lovelace(6) -} - -test add_4() { - let v = - zero - |> add(#"acab", #"beef", 0) - v == zero -} - -test add_5() { - let v = - zero - |> add(#"acab", #"beef", 0) - |> add(#"acab", #"beef", 0) - v == zero -} - -/// Combine two `Value` together. -pub fn merge(left v0: Value, right v1: Value) -> Value { - Value( - dict.union_with( - v0.inner, - v1.inner, - fn(_, a0, a1) { - let result = - dict.union_with( - a0, - a1, - fn(_, q0, q1) { - let q = q0 + q1 - if q == 0 { - None - } else { - Some(q) - } - }, - ) - if dict.is_empty(result) { - None - } else { - Some(result) - } - }, - ), - ) -} - -test merge_1() { - let v1 = from_lovelace(1) - let v2 = from_lovelace(-1) - merge(v1, v2) == zero -} - -test merge_2() { - let v1 = from_asset(#"00", #"", 1) - let v2 = from_asset(#"01", #"", 2) - let v3 = from_asset(#"02", #"", 3) - let v = - from_lovelace(42) - |> merge(v3) - |> merge(v1) - |> merge(v2) - - flatten(v) == [ - (#"", #"", 42), (#"00", #"", 1), (#"01", #"", 2), (#"02", #"", 3), - ] -} - -test merge_3() { - let v1 = from_asset(#"00", #"", 1) - let v2 = from_asset(#"00", #"", -1) - let v3 = from_asset(#"01", #"", 1) - - let v = - zero - |> merge(v1) - |> merge(v2) - |> merge(v3) - - flatten(v) == [(#"01", #"", 1)] -} - -test merge_4() { - let v1 = from_asset(#"00", #"", 1) - let v2 = from_asset(#"00", #"", -1) - - merge(v1, v2) == zero -} - -test merge_5() { - let v = - zero - |> add(#"acab", #"beef", 0) - - merge(zero, v) == zero -} - -/// Negates quantities of all tokens (including Ada) in that `Value`. -/// -/// ``` -/// v1 -/// |> assets.negate -/// |> assets.merge(v1) -/// |> assets.is_zero -/// // True -/// ``` -pub fn negate(self: Value) -> Value { - dict.map(self.inner, fn(_, a) { dict.map(a, fn(_, q) { 0 - q }) }) - |> Value -} - -/// Get a subset of the assets restricted to the given policies. -pub fn restricted_to(self: Value, mask: List) -> Value { - list.foldr( - policies(self), - zero, - fn(policy_id, value) { - if list.has(mask, policy_id) { - dict.foldr( - tokens(self, policy_id), - value, - fn(asset_name, quantity, value) { - add(value, policy_id, asset_name, quantity) - }, - ) - } else { - value - } - }, - ) -} - -test restricted_to_1() { - let self = from_lovelace(42) |> add("foo", "", 1) - restricted_to(self, []) == zero -} - -test restricted_to_2() { - let self = from_lovelace(42) |> add("foo", "", 1) - restricted_to(self, [ada_policy_id]) == from_lovelace(42) -} - -test restricted_to_3() { - let self = from_lovelace(42) |> add("foo", "", 1) |> add("bar", "", 1) - restricted_to(self, ["foo", "bar"]) == without_lovelace(self) -} - -test restricted_to_4() { - let self = from_lovelace(42) |> add("foo", "bar", 1) |> add("foo", "baz", 1) - restricted_to(self, ["foo"]) == without_lovelace(self) -} - -test restricted_to_5() { - let self = from_lovelace(42) |> add("foo", "bar", 1) |> add("foo", "baz", 1) - restricted_to(self, [ada_policy_id, "foo"]) == self -} - -/// Get a `Value` excluding Ada. -pub fn without_lovelace(self: Value) -> Value { - dict.delete(self.inner, ada_policy_id) - |> Value -} - -test without_lovelace_1() { - let v = from_lovelace(1000000) - without_lovelace(v) == zero -} - -test without_lovelace_2() { - let v = from_lovelace(1000000) - let v2 = from_lovelace(50000000) - without_lovelace(v) == without_lovelace(v2) -} - -test without_lovelace_3() { - let v = - from_asset(#"010203", #"040506", 100) - |> add(ada_policy_id, ada_asset_name, 100000000) - let v2 = from_asset(#"010203", #"040506", 100) - without_lovelace(v) == without_lovelace(v2) && without_lovelace(v) == v2 -} - -// ## Transforming - -/// Flatten a `Value` as list of 3-tuple `(PolicyId, AssetName, Quantity)`. -/// -/// Handy to manipulate values as uniform lists. -pub fn flatten(self: Value) -> List<(PolicyId, AssetName, Int)> { - dict.foldr( - self.inner, - [], - fn(policy_id, asset_list, value) { - dict.foldr( - asset_list, - value, - fn(asset_name, quantity, xs) { - [(policy_id, asset_name, quantity), ..xs] - }, - ) - }, - ) -} - -/// Flatten a `Value` as a list of results, possibly discarding some along the way. -/// -/// When the transform function returns `None`, the result is discarded altogether. -pub fn flatten_with( - self: Value, - with: fn(PolicyId, AssetName, Int) -> Option, -) -> List { - dict.foldr( - self.inner, - [], - fn(policy_id, asset_list, value) { - dict.foldr( - asset_list, - value, - fn(asset_name, quantity, xs) { - when with(policy_id, asset_name, quantity) is { - None -> xs - Some(x) -> [x, ..xs] - } - }, - ) - }, - ) -} - -test flatten_with_1() { - flatten_with(zero, fn(p, a, q) { Some((p, a, q)) }) == [] -} - -test flatten_with_2() { - let v = - zero - |> add("a", "1", 14) - |> add("b", "", 42) - |> add("a", "2", 42) - - flatten_with( - v, - fn(p, a, q) { - if q == 42 { - Some((p, a)) - } else { - None - } - }, - ) == [("a", "2"), ("b", "")] -} - -/// Reduce a value into a single result -/// -/// ``` -/// assets.zero -/// |> assets.add("a", "1", 10) -/// |> assets.add("b", "2", 20) -/// |> assets.reduce(v, 0, fn(_, _, quantity, acc) { acc + quantity }) -/// // 30 -/// ``` -pub fn reduce( - self: Value, - start: result, - with: fn(PolicyId, AssetName, Int, result) -> result, -) -> result { - dict.foldr( - self.inner, - start, - fn(policy_id, asset_list, result) { - dict.foldr(asset_list, result, with(policy_id, _, _, _)) - }, - ) -} - -test reduce_1() { - let v = - zero - |> add("a", "1", 10) - |> add("b", "2", 20) - let result = reduce(v, 0, fn(_, _, quantity, acc) { acc + quantity }) - result == 30 -} - -test reduce_2() { - let v = - zero - |> add("a", "1", 5) - |> add("a", "2", 15) - |> add("b", "", 10) - let result = - reduce( - v, - [], - fn(policy_id, asset_name, _, acc) { [(policy_id, asset_name), ..acc] }, - ) - result == [("a", "1"), ("a", "2"), ("b", "")] -} - -test reduce_3() { - let v = zero - let result = reduce(v, 1, fn(_, _, quantity, acc) { acc + quantity }) - result == 1 -} - -/// Convert the value into a dictionary of dictionaries. -pub fn to_dict(self: Value) -> Dict> { - self.inner -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak deleted file mode 100644 index f0b6d258..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak +++ /dev/null @@ -1,93 +0,0 @@ -use aiken/crypto.{Blake2b_224, Hash, VerificationKey, VerificationKeyHash} -use cardano/address.{Credential} -use cardano/assets.{Lovelace} - -pub type StakePoolId = - Hash - -/// An on-chain certificate attesting of some operation. Publishing -/// certificates triggers different kind of rules; most of the time, -/// they require signatures from specific keys. -pub type Certificate { - /// Register a stake credential with an optional deposit amount. - /// The deposit is always present when using the new registration certificate - /// format available since the Conway era. - RegisterCredential { - credential: Credential, - /// > [!NOTE] - /// > The `deposit` ought to be an `Option`, but due to unfortunate - /// > circumstances it will always be instantiated to `None` even when set in - /// > the host transaction. This is what the `Never` type captures here. - deposit: Never, - } - /// Un-Register a stake credential with an optional refund amount - /// The deposit is always present when using the new de-registration certificate - /// format available since the Conway era. - UnregisterCredential { - credential: Credential, - /// > [!NOTE] - /// > The `refund` ought to be an `Option`, but due to unfortunate - /// > circumstances it will always be instantiated to `None` even when set in - /// > the host transaction. This is what the `Never` type captures here. - refund: Never, - } - /// Delegate stake to a [Delegate](#Delegate). - DelegateCredential { credential: Credential, delegate: Delegate } - /// Register and delegate staking credential to a Delegatee in one certificate. - RegisterAndDelegateCredential { - credential: Credential, - delegate: Delegate, - deposit: Lovelace, - } - /// Register a delegate representative (a.k.a DRep). The deposit is explicit and - /// is refunded when the delegate steps down (unregister). - RegisterDelegateRepresentative { - delegate_representative: Credential, - deposit: Lovelace, - } - /// Update a delegate representative (a.k.a DRep). The certificate also contains - /// metadata which aren't visible on-chain. - UpdateDelegateRepresentative { delegate_representative: Credential } - /// UnRegister a delegate representative, and refund back its past deposit. - UnregisterDelegateRepresentative { - delegate_representative: Credential, - refund: Lovelace, - } - /// Register a new stake pool - RegisterStakePool { - /// The hash digest of the stake pool's cold (public) key - stake_pool: StakePoolId, - /// The hash digest of the stake pool's VRF (public) key - vrf: VerificationKeyHash, - } - /// Retire a stake pool. 'at_epoch' indicates in which the retirement will take place - RetireStakePool { stake_pool: StakePoolId, at_epoch: Int } - /// Authorize a Hot credential for a specific Committee member's cold credential - AuthorizeConstitutionalCommitteeProxy { - constitutional_committee_member: Credential, - proxy: Credential, - } - /// Step down from the constitutional committee as a member. - RetireFromConstitutionalCommittee { - constitutional_committee_member: Credential, - } -} - -/// A type of stake delegation that can be either block-production, vote or -/// both. Note that delegation types aren't cancelling one another, so it is -/// possible to delegate block production in one transaction, and delegate vote -/// in another. This second delegation **does NOT** invalidate the first one. -pub type Delegate { - DelegateBlockProduction { stake_pool: StakePoolId } - DelegateVote { delegate_representative: DelegateRepresentative } - DelegateBoth { - stake_pool: StakePoolId, - delegate_representative: DelegateRepresentative, - } -} - -pub type DelegateRepresentative { - Registered(Credential) - AlwaysAbstain - AlwaysNoConfidence -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak deleted file mode 100644 index 3ec96800..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak +++ /dev/null @@ -1,109 +0,0 @@ -use aiken/collection.{Index} -use aiken/crypto.{Blake2b_256, Hash, ScriptHash, VerificationKeyHash} -use aiken/math/rational.{Rational} -use cardano/address.{Credential} -use cardano/assets.{Lovelace} -use cardano/governance/protocol_parameters.{ProtocolParametersUpdate} - -pub type ProposalProcedure { - deposit: Lovelace, - return_address: Credential, - governance_action: GovernanceAction, -} - -pub type GovernanceAction { - ProtocolParameters { - /// The last governance action of type 'ProtocolParameters'. They must all - /// form a chain. - ancestor: Option, - /// The new proposed protocol parameters. Only values set to `Some` are relevant. - new_parameters: ProtocolParametersUpdate, - /// The optional guardrails script defined in the constitution. The script - /// is executed by the ledger in addition to the hard-coded ledger rules. - /// - /// It must pass for the new protocol parameters to be deemed valid. - guardrails: Option, - } - HardFork { - /// The last governance action of type `HardFork`. They must all - /// form a chain. - ancestor: Option, - /// The new proposed version. Few rules apply to proposing new versions: - /// - /// - The `major` component, if incremented, must be exactly one more than the current. - /// - The `minor` component, if incremented, must be exactly one more than the current. - /// - If the `major` component is incremented, `minor` must be set to `0`. - /// - Neither `minor` nor `major` can be decremented. - new_version: ProtocolVersion, - } - TreasuryWithdrawal { - /// A collection of beneficiaries, which can be plain verification key - /// hashes or script hashes (e.g. DAO). - beneficiaries: Pairs, - /// The optional guardrails script defined in the constitution. The script - /// is executed by the ledger in addition to the hard-coded ledger rules. - /// - /// It must pass for the withdrawals to be authorized. - guardrails: Option, - } - NoConfidence { - /// The last governance action of type `NoConfidence` or - /// `ConstitutionalCommittee`. They must all / form a chain. - ancestor: Option, - } - ConstitutionalCommittee { - /// The last governance action of type `NoConfidence` or - /// `ConstitutionalCommittee`. They must all / form a chain. - ancestor: Option, - /// Constitutional members to be removed. - evicted_members: List, - /// Constitutional members to be added. - added_members: Pairs, - /// The new quorum value, as a ratio of a numerator and a denominator. The - /// quorum specifies the threshold of 'Yes' votes necessary for the - /// constitutional committee to accept a proposal procedure. - quorum: Rational, - } - NewConstitution { - /// The last governance action of type `Constitution` or - /// `ConstitutionalCommittee`. They must all / form a chain. - ancestor: Option, - /// The new proposed constitution. - constitution: Constitution, - } - NicePoll -} - -pub type Vote { - No - Yes - Abstain -} - -pub type TransactionId = - Hash - -pub type GovernanceActionId { - transaction: TransactionId, - proposal_procedure: Index, -} - -pub type ProtocolVersion { - major: Int, - minor: Int, -} - -pub type Constitution { - guardrails: Option, -} - -/// An epoch number after which constitutional committee member -/// mandate expires. -pub type Mandate = - Int - -pub type Voter { - ConstitutionalCommitteeMember(Credential) - DelegateRepresentative(Credential) - StakePool(VerificationKeyHash) -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak deleted file mode 100644 index d9e7be95..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak +++ /dev/null @@ -1,360 +0,0 @@ -use aiken/math/rational.{Rational} -use cardano/assets.{Lovelace} - -pub opaque type ProtocolParametersUpdate { - inner: Pairs, -} - -pub type ScriptExecutionPrices { - memory: Rational, - cpu: Rational, -} - -pub type ExecutionUnits { - memory: Int, - cpu: Int, -} - -pub type StakePoolOperatorVotingThresholds { - motion_of_no_confidence: Rational, - constitutional_committee: ConstitutionalCommitteeThresholds, - hard_fork: Rational, - protocol_parameters: ProtocolParametersThresholds< - Rational, - Void, - Void, - Void, - Void, - >, -} - -pub type DelegateRepresentativeVotingThresholds { - motion_of_no_confidence: Rational, - constitutional_committee: ConstitutionalCommitteeThresholds, - constitution: Rational, - hard_fork: Rational, - protocol_parameters: ProtocolParametersThresholds< - Void, - Rational, - Rational, - Rational, - Rational, - >, - treasury_withdrawal: Rational, -} - -pub type ProtocolParametersThresholds< - security, - network, - economic, - technical, - governance, -> { - security_group: security, - network_group: network, - economic_group: economic, - technical_group: technical, - governance_group: governance, -} - -pub type ConstitutionalCommitteeThresholds { - default: Rational, - under_no_confidence: Rational, -} - -/// The linear coefficient that intervenes in the transaction fee calculation. -/// It is multiplied by the size of the transaction in bytes to obtain a Lovelace value. -pub fn min_fee_coefficient(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 0, into_int) -} - -/// The constant factor that intervenes in the transaction fee calculation. It is -/// a flat cost of lovelace that is added to every fee calculation. -pub fn min_fee_constant(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 1, into_int) -} - -/// The maximum size of a serialized block body, expressed in bytes. -pub fn max_block_body_size(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 2, into_int) -} - -/// The maximum size of a serialized transaction (body + witnesses), expressed in bytes. -pub fn max_transaction_size(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 3, into_int) -} - -/// The maximum size of a serialized block header, expressed in bytes. -pub fn max_block_header_size(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 4, into_int) -} - -/// The required deposit amount when registering stake credentials, expressed in Lovelace. -pub fn stake_credential_deposit( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 5, into_int) -} - -/// The required deposit amount when registering a stake pool, expressed in Lovelace. -pub fn stake_pool_deposit(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 6, into_int) -} - -/// The maximum number of epoch in the future allowed for a stake pool retirement to be scheduled. -pub fn stake_pool_retirement_horizon( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 7, into_int) -} - -/// The desired/optimal number of fully saturated stake pools in the system. Also known as the _'k-parameter'_. -pub fn desired_number_of_stake_pools( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 8, into_int) -} - -/// A parameter controlling the influence of an pool owner's pledge on the rewards. Also known as _'a0'_. -pub fn stake_pool_pledge_influence( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 9, into_rational) -} - -/// The monetary expansion parameter, controlling the fraction of Ada put in circulation on every epoch through the incentivies model. Also known as _'ρ'_. -pub fn monetary_expansion(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 10, into_rational) -} - -/// The parameter controlling what fraction (%) of available rewards is sent to the treasury on every epoch. Also known as _'τ'_. -pub fn treasury_expansion(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 11, into_rational) -} - -/// Minimum authorized constant cost that stake pools can declare when registering, expressed in Lovelace. -pub fn min_stake_pool_cost(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 16, into_int) -} - -/// The linear coefficient that intervenes in the calculation of the minimum Ada value that any UTxO must hold. It is expressed in Lovelace per Byte, and is also known as the 'coins per utxo byte' parameter. -pub fn min_utxo_deposit_coefficient( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 17, into_int) -} - -/// The costs associated with the various operations of the Plutus Virtual Machine, which can be different for each Plutus version. -pub fn cost_models(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 18, identity) -} - -/// The price, in Lovelace per unit, of the execution units corresponding to cpu and memory usage of on-chain scripts. -pub fn script_execution_prices( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 19, into_script_execution_prices) -} - -/// The maximum execution units allowed for a single transaction. -pub fn max_transaction_execution_units( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 20, into_execution_units) -} - -/// The maximum execution units allowed for a single block. -pub fn max_block_execution_units( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 21, into_execution_units) -} - -/// The maximum size of a serialized value in a transaction output. This effectively limits -/// the maximum kinds of assets that can be sent in a single output. It is expressed in bytes. -pub fn max_value_size(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 22, into_int) -} - -/// The scaling factor applied to the transaction cost for defining the minimum collateral -/// amount. It is expressed in percent points (so 100 = 100%). -pub fn collateral_percentage(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 23, into_int) -} - -/// The maximum number of collateral inputs allowed in the transaction. -pub fn max_collateral_inputs(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 24, into_int) -} - -/// The various governance voting thresholds pertaining to stake pool operators. -pub fn stake_pool_operator_voting_thresholds( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 25, into_spo_voting_thresholds) -} - -/// The various governance voting thresholds pertaining to delegate representatives -/// (a.k.a DReps). -pub fn delegate_representative_voting_thresholds( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 26, into_drep_voting_thresholds) -} - -/// The minimum number of members in the constitutional committee. Any updates of the committee -/// must leave at least this number of members. -pub fn min_constitutional_committee_size( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 27, into_int) -} - -/// The maximum length of a constitutional committee member, expressed in number of epochs. -pub fn max_constitutional_committee_mandate( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 28, into_int) -} - -/// The lifetime of any governance proposal. An action that hasn't been approved beyond that -/// period is considered inactive and discarded. It is expressed in number of epochs. -pub fn governance_proposal_lifetime( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 29, into_int) -} - -/// The required deposit amount for governance proposal procedures, expressed in Lovelace. -pub fn governance_proposal_deposit( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 30, into_int) -} - -/// The required deposit amount when registering as a delegate representative, expressed in -/// Lovelace. -pub fn delegate_representative_deposit( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 31, into_int) -} - -/// The maximum number of epochs that a delegate representative can stay inactive (i.e. no -/// voting) without becoming _inactive_ and removed from thresholds calculations. -pub fn delegate_representative_max_idle_time( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 32, into_int) -} - -/// The base tier fee coefficient for reference scripts. Reference scripts gets increasingly -/// more expensives every ~24KB, the base coefficient is a multiplicating factor which grows -/// exponentially with each tier. -pub fn reference_scripts_tier_fee_initial_factor( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 33, into_rational) -} - -// Internals ------------------------------------------------------------------- - -type ProtocolParametersIndex = - Int - -fn get_protocol_param( - self: Pairs, - ix: ProtocolParametersIndex, - into: fn(Data) -> a, -) -> Option { - when self is { - [] -> None - [Pair(jx, param), ..tail] -> - if ix == jx { - Some(into(param)) - } else { - get_protocol_param(tail, ix, into) - } - } -} - -fn into_int(param: Data) -> Int { - expect param: Int = param - param -} - -fn into_rational(param: Data) -> Rational { - expect [numerator, denominator]: List = param - expect Some(r) = rational.new(numerator, denominator) - r -} - -fn into_execution_units(param: Data) -> ExecutionUnits { - expect [memory, cpu]: List = param - ExecutionUnits { memory, cpu } -} - -fn into_script_execution_prices(param: Data) -> ScriptExecutionPrices { - expect [memory, cpu]: List = param - let memory = into_rational(memory) - let cpu = into_rational(cpu) - ScriptExecutionPrices { memory, cpu } -} - -fn into_spo_voting_thresholds(param: Data) -> StakePoolOperatorVotingThresholds { - expect [ - motion_of_no_confidence, constitutional_committee, - constitutional_committee_under_no_confidence, hard_fork, - protocol_parameters_security_group, - ]: List = param - - StakePoolOperatorVotingThresholds { - motion_of_no_confidence: into_rational(motion_of_no_confidence), - constitutional_committee: ConstitutionalCommitteeThresholds { - default: into_rational(constitutional_committee), - under_no_confidence: into_rational( - constitutional_committee_under_no_confidence, - ), - }, - hard_fork: into_rational(hard_fork), - protocol_parameters: ProtocolParametersThresholds { - security_group: into_rational(protocol_parameters_security_group), - network_group: Void, - economic_group: Void, - technical_group: Void, - governance_group: Void, - }, - } -} - -fn into_drep_voting_thresholds( - param: Data, -) -> DelegateRepresentativeVotingThresholds { - expect [ - motion_of_no_confidence, constitutional_committee, - constitutional_committee_under_no_confidence, constitution, hard_fork, - protocol_parameters_network_group, protocol_parameters_economic_group, - protocol_parameters_technical_group, protocol_parameters_governance_group, - treasury_withdrawal, - ]: List = param - - DelegateRepresentativeVotingThresholds { - motion_of_no_confidence: into_rational(motion_of_no_confidence), - constitutional_committee: ConstitutionalCommitteeThresholds { - default: into_rational(constitutional_committee), - under_no_confidence: into_rational( - constitutional_committee_under_no_confidence, - ), - }, - constitution: into_rational(constitution), - hard_fork: into_rational(hard_fork), - protocol_parameters: ProtocolParametersThresholds { - security_group: Void, - network_group: into_rational(protocol_parameters_network_group), - economic_group: into_rational(protocol_parameters_economic_group), - technical_group: into_rational(protocol_parameters_technical_group), - governance_group: into_rational(protocol_parameters_governance_group), - }, - treasury_withdrawal: into_rational(treasury_withdrawal), - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak deleted file mode 100644 index e723e2d5..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak +++ /dev/null @@ -1,62 +0,0 @@ -use aiken/primitive/bytearray -use cardano/address.{Script} -use cardano/address/credential -use cardano/governance.{ - ConstitutionalCommitteeMember, DelegateRepresentative, StakePool, Voter, -} - -pub fn compare(left: Voter, right: Voter) -> Ordering { - when left is { - ConstitutionalCommitteeMember(left) -> - when right is { - ConstitutionalCommitteeMember(right) -> credential.compare(left, right) - _ -> Less - } - DelegateRepresentative(left) -> - when right is { - DelegateRepresentative(right) -> credential.compare(left, right) - ConstitutionalCommitteeMember(_) -> Greater - _ -> Less - } - StakePool(left) -> - when right is { - StakePool(right) -> bytearray.compare(left, right) - _ -> Greater - } - } -} - -test compare_matrix() { - let cc0 = ConstitutionalCommitteeMember(Script("0")) - let cc1 = ConstitutionalCommitteeMember(Script("1")) - - let drep0 = DelegateRepresentative(Script("0")) - let drep1 = DelegateRepresentative(Script("1")) - - let spo0 = StakePool("0") - let spo1 = StakePool("1") - - and { - (compare(cc0, cc0) == Equal)?, - (compare(cc0, cc1) == Less)?, - (compare(cc1, cc0) == Greater)?, - (compare(drep0, drep0) == Equal)?, - (compare(drep0, drep1) == Less)?, - (compare(drep1, drep0) == Greater)?, - (compare(spo0, spo0) == Equal)?, - (compare(spo0, spo1) == Less)?, - (compare(spo1, spo0) == Greater)?, - (compare(cc0, drep0) == Less)?, - (compare(cc0, drep1) == Less)?, - (compare(cc0, spo0) == Less)?, - (compare(cc0, spo1) == Less)?, - (compare(drep0, cc0) == Greater)?, - (compare(drep0, cc1) == Greater)?, - (compare(drep0, spo0) == Less)?, - (compare(drep0, spo1) == Less)?, - (compare(spo0, cc0) == Greater)?, - (compare(spo0, cc1) == Greater)?, - (compare(spo0, drep0) == Greater)?, - (compare(spo0, drep1) == Greater)?, - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak deleted file mode 100644 index ff73836a..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak +++ /dev/null @@ -1,62 +0,0 @@ -//// This module contains utilities for manually dealing with [`ScriptContext`](#ScriptContext). This is only ever useful for writing custom `else` handlers in validators. -//// -//// > [!NOTE] -//// > Unless you know what you're doing, you should prefer using named handlers: -//// > -//// > - `mint` -//// > - `spend` -//// > - `withdraw` -//// > - `publish` -//// > - `vote` -//// > - `propose` - -use aiken/collection.{Index} -use cardano/address.{Credential} -use cardano/assets.{PolicyId} -use cardano/certificate.{Certificate} -use cardano/governance.{ProposalProcedure, Voter} -use cardano/transaction.{OutputReference, Redeemer, Transaction} - -/// A context given to a script by the Cardano ledger when being executed. -/// -/// The context contains information about the entire transaction that contains -/// the script. The transaction may also contain other scripts; to distinguish -/// between multiple scripts, the [`ScriptContext`](#ScriptContext) contains a -/// [`ScriptInfo`](#ScriptInfo) which indicates which script (or, for what -/// purpose) the transaction is being executed. -pub type ScriptContext { - transaction: Transaction, - redeemer: Redeemer, - info: ScriptInfo, -} - -/// Characterizes the script information. The main (and only) difference with [`ScriptPurpose`](./transaction.html#ScriptPurpose) resides in the `Spending` variant which here contains a second field `datum: Option`. -pub type ScriptInfo { - /// For scripts executed as minting/burning policies, to insert - /// or remove assets from circulation. It's parameterized by the identifier - /// of the associated policy. - Minting(PolicyId) - /// For scripts that are used as payment credentials for addresses in - /// transaction outputs. They govern the rule by which the output they - /// reference can be spent. - Spending { output: OutputReference, datum: Option } - /// For scripts that validate reward withdrawals from a reward account. - /// - /// The argument identifies the target reward account. - Withdrawing(Credential) - /// Needed when delegating to a pool using stake credentials defined as a - /// custom script. This purpose is also triggered when de-registering such - /// stake credentials. - /// - /// The Int is a 0-based index of the given `Certificate` in `certificates`. - Publishing { at: Index, certificate: Certificate } - /// Voting for a type of voter using a governance action id to vote - /// yes / no / abstain inside a transaction. - /// - /// The voter is who is doing the governance action. - Voting(Voter) - /// Used to propose a governance action. - /// - /// A 0-based index of the given `ProposalProcedure` in `proposal_procedures`. - Proposing { at: Index, proposal_procedure: ProposalProcedure } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak deleted file mode 100644 index 6511a596..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak +++ /dev/null @@ -1,225 +0,0 @@ -use aiken/builtin -use aiken/collection.{Index} -use aiken/collection/dict.{Dict} -use aiken/collection/list -use aiken/crypto.{ - Blake2b_256, DataHash, Hash, ScriptHash, VerificationKeyHash, blake2b_256, -} -use aiken/interval.{Interval} -use aiken/option -use cardano/address.{Address, Credential, Script, VerificationKey} -use cardano/assets.{Lovelace, PolicyId, Value} -use cardano/certificate.{Certificate} -use cardano/governance.{GovernanceActionId, ProposalProcedure, Vote, Voter} - -pub type TransactionId = - Hash - -/// Characterizes the script purpose. -pub type ScriptPurpose { - /// For scripts executed as minting/burning policies, to insert - /// or remove assets from circulation. It's parameterized by the identifier - /// of the associated policy. - Mint(PolicyId) - /// For scripts that are used as payment credentials for addresses in - /// transaction outputs. They govern the rule by which the output they - /// reference can be spent. - Spend(OutputReference) - /// For scripts that validate reward withdrawals from a reward account. - /// - /// The argument identifies the target reward account. - Withdraw(Credential) - /// Needed when delegating to a pool using stake credentials defined as a - /// custom script. This purpose is also triggered when de-registering such - /// stake credentials. - /// - /// The Int is a 0-based index of the given `Certificate` in `certificates`. - Publish { at: Index, certificate: Certificate } - /// Voting for a type of voter using a governance action id to vote - /// yes / no / abstain inside a transaction. - /// - /// The voter is who is doing the governance action. - Vote(Voter) - /// Used to propose a governance action. - /// - /// A 0-based index of the given `ProposalProcedure` in `proposal_procedures`. - Propose { at: Index, proposal_procedure: ProposalProcedure } -} - -/// A Cardano `Transaction`, as seen by on-chain scripts. -/// -/// Note that this is a representation of a transaction, and not the 1:1 -/// translation of the transaction as seen by the ledger. In particular, -/// on-chain scripts can't see inputs locked by bootstrap addresses, outputs -/// to bootstrap addresses or just transaction metadata. -pub type Transaction { - inputs: List, - reference_inputs: List, - outputs: List, - fee: Lovelace, - mint: Value, - certificates: List, - /// > [!IMPORTANT] - /// > Withdrawals are ordered by ascending [Credential](./credential.html#Credential). Yet, note that [`Script`](./credential.html#Credential) credentials are treated as **lower values** than [`VerificationKey`](./credential.html#Credential) credentials. - withdrawals: Pairs, - validity_range: ValidityRange, - extra_signatories: List, - /// > [!IMPORTANT] - /// > Redeemers are ordered by ascending [ScriptPurpose](./transaction.html#ScriptPurpose). - redeemers: Pairs, - datums: Dict, - id: TransactionId, - /// > [!IMPORTANT] - /// > Votes are ordered by ascending [Voter](./governance.html#Voter) and [GovernanceActionId](./governance.html#GovernanceActionId).
First constructor variants in a type are treated as lower indices; except for [Credential](./credential.html#Credential) where [`Script`](./credential.html#Credential) credentials are treated as **lower values** than [`VerificationKey`](./credential.html#Credential) credentials. - votes: Pairs>, - proposal_procedures: List, - current_treasury_amount: Option, - treasury_donation: Option, -} - -/// An interval of POSIX time, measured in **number of milliseconds** since 1970-01-01T00:00:00Z. -pub type ValidityRange = - Interval - -/// An `Input` made of an output reference and, the resolved value associated with that output. -pub type Input { - output_reference: OutputReference, - output: Output, -} - -/// An `OutputReference` is a unique reference to an output on-chain. The `output_index` -/// corresponds to the position in the output list of the transaction (identified by its id) -/// that produced that output -pub type OutputReference { - transaction_id: Hash, - output_index: Int, -} - -/// A transaction `Output`, with an address, a value and optional datums and script references. -pub type Output { - address: Address, - value: Value, - datum: Datum, - reference_script: Option, -} - -/// An output `Datum`. -pub type Datum { - NoDatum - /// A datum referenced by its hash digest. - DatumHash(DataHash) - /// A datum completely inlined in the output. - InlineDatum(Data) -} - -/// A type-alias for Redeemers, passed to scripts for validation. The `Data` is -/// opaque because it is user-defined and it is the script's responsibility to -/// parse it into its expected form. -pub type Redeemer = - Data - -// ## Querying - -/// Find an input by its [`OutputReference`](#OutputReference). This is typically used in -/// combination with the `Spend` [`ScriptPurpose`](#ScriptPurpose) to find a script's own -/// input. -/// -/// ```aiken -/// validator { -/// spend(datum, redeemer, my_output_reference, self) { -/// expect Some(input) = -/// self.inputs -/// |> transaction.find_input(my_output_reference) -/// } -/// } -/// ``` -pub fn find_input( - inputs: List, - output_reference: OutputReference, -) -> Option { - inputs - |> list.find(fn(input) { input.output_reference == output_reference }) -} - -/// Find a [`Datum`](#Datum) by its hash, if present. The function looks first for -/// datums in the witness set, and then for inline datums if it doesn't find any in -/// witnesses. -pub fn find_datum( - outputs: List, - datums: Dict, - datum_hash: DataHash, -) -> Option { - datums - |> dict.get(datum_hash) - |> option.or_try( - fn() { - outputs - |> list.filter_map( - fn(output) { - when output.datum is { - InlineDatum(data) -> - if blake2b_256(builtin.serialise_data(data)) == datum_hash { - Some(data) - } else { - None - } - _ -> None - } - }, - ) - |> list.head - }, - ) -} - -/// Find all outputs that are paying into the given script hash, if any. This is useful for -/// contracts running over multiple transactions. -pub fn find_script_outputs( - outputs: List, - script_hash: ScriptHash, -) -> List { - outputs - |> list.filter( - fn(output) { - when output.address.payment_credential is { - Script(addr_script_hash) -> script_hash == addr_script_hash - VerificationKey(_) -> False - } - }, - ) -} - -// ## Testing - -/// A placeholder / empty `Transaction` to serve as a base in a transaction -/// builder. This is particularly useful for constructing test transactions. -/// -/// Every field is empty or null, and we have in particular: -/// -/// ```aiken -/// use aiken/interval -/// -/// transaction.placeholder.id == -/// #"0000000000000000000000000000000000000000000000000000000000000000" -/// -/// transaction.placeholder.validity_range == interval.everything -/// ``` -pub const placeholder: Transaction = - Transaction { - inputs: [], - reference_inputs: [], - outputs: [], - fee: 0, - mint: assets.zero, - certificates: [], - withdrawals: [], - validity_range: interval.everything, - extra_signatories: [], - redeemers: [], - datums: dict.empty, - id: #"0000000000000000000000000000000000000000000000000000000000000000", - votes: [], - proposal_procedures: [], - current_treasury_amount: None, - treasury_donation: None, - } diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak deleted file mode 100644 index 70b7550d..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak +++ /dev/null @@ -1,23 +0,0 @@ -use aiken/primitive/bytearray -use aiken/primitive/int -use cardano/transaction.{OutputReference} - -pub fn compare(left: OutputReference, right: OutputReference) -> Ordering { - when bytearray.compare(left.transaction_id, right.transaction_id) is { - Equal -> int.compare(left.output_index, right.output_index) - ordering -> ordering - } -} - -test compare_matrix() { - and { - (compare(OutputReference("", 0), OutputReference("", 0)) == Equal)?, - (compare(OutputReference("00", 42), OutputReference("00", 42)) == Equal)?, - (compare(OutputReference("00", 0), OutputReference("01", 0)) == Less)?, - (compare(OutputReference("01", 0), OutputReference("00", 0)) == Greater)?, - (compare(OutputReference("00", 42), OutputReference("01", 14)) == Less)?, - (compare(OutputReference("01", 14), OutputReference("00", 42)) == Greater)?, - (compare(OutputReference("", 42), OutputReference("", 14)) == Greater)?, - (compare(OutputReference("", 14), OutputReference("", 42)) == Less)?, - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak b/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak deleted file mode 100644 index 4fef2cbe..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak +++ /dev/null @@ -1,126 +0,0 @@ -use aiken/primitive/bytearray -use aiken/primitive/int -use cardano/address.{Script, VerificationKey} -use cardano/address/credential -use cardano/certificate.{RegisterCredential} -use cardano/governance.{NicePoll, ProposalProcedure, StakePool} -use cardano/governance/voter -use cardano/transaction.{ - Mint, OutputReference, Propose, Publish, ScriptPurpose, Spend, Vote, Withdraw, -} -use cardano/transaction/output_reference - -pub fn compare(left: ScriptPurpose, right: ScriptPurpose) -> Ordering { - when left is { - Mint(left) -> - when right is { - Mint(right) -> bytearray.compare(left, right) - _ -> Less - } - - Spend(left) -> - when right is { - Spend(right) -> output_reference.compare(left, right) - Mint(_) -> Greater - _ -> Less - } - - Withdraw(left) -> - when right is { - Withdraw(right) -> credential.compare(left, right) - Spend(_) | Mint(_) -> Greater - _ -> Less - } - - Publish(left, _) -> - when right is { - Publish(right, _) -> int.compare(left, right) - Spend(_) | Mint(_) | Withdraw(_) -> Greater - _ -> Less - } - - Vote(left) -> - when right is { - Vote(right) -> voter.compare(left, right) - Propose(..) -> Less - _ -> Greater - } - - Propose(left, _) -> - when right is { - Propose(right, _) -> int.compare(left, right) - _ -> Greater - } - } -} - -test compare_matrix() { - let mint0 = Mint("0") - let mint1 = Mint("1") - - let spend0 = Spend(OutputReference("", 0)) - let spend1 = Spend(OutputReference("", 1)) - - let withdraw0 = Withdraw(VerificationKey("0")) - let withdraw1 = Withdraw(VerificationKey("1")) - - let publish0 = Publish(0, RegisterCredential(Script(""), Never)) - let publish1 = Publish(1, RegisterCredential(Script(""), Never)) - - let vote0 = Vote(StakePool("0")) - let vote1 = Vote(StakePool("1")) - - let propose0 = Propose(0, ProposalProcedure(0, Script(""), NicePoll)) - let propose1 = Propose(1, ProposalProcedure(0, Script(""), NicePoll)) - - and { - (compare(mint0, mint0) == Equal)?, - (compare(mint0, mint1) == Less)?, - (compare(mint1, mint0) == Greater)?, - (compare(mint0, spend0) == Less)?, - (compare(mint0, withdraw0) == Less)?, - (compare(mint0, publish0) == Less)?, - (compare(mint0, vote0) == Less)?, - (compare(mint0, propose0) == Less)?, - (compare(spend0, spend0) == Equal)?, - (compare(spend0, spend1) == Less)?, - (compare(spend1, spend0) == Greater)?, - (compare(spend0, mint0) == Greater)?, - (compare(spend0, withdraw0) == Less)?, - (compare(spend0, publish0) == Less)?, - (compare(spend0, vote0) == Less)?, - (compare(spend0, propose0) == Less)?, - (compare(withdraw0, withdraw0) == Equal)?, - (compare(withdraw0, withdraw1) == Less)?, - (compare(withdraw1, withdraw0) == Greater)?, - (compare(withdraw0, mint0) == Greater)?, - (compare(withdraw0, spend0) == Greater)?, - (compare(withdraw0, publish0) == Less)?, - (compare(withdraw0, vote0) == Less)?, - (compare(withdraw0, propose0) == Less)?, - (compare(publish0, publish0) == Equal)?, - (compare(publish0, publish1) == Less)?, - (compare(publish1, publish0) == Greater)?, - (compare(publish0, mint0) == Greater)?, - (compare(publish0, spend0) == Greater)?, - (compare(publish0, withdraw0) == Greater)?, - (compare(publish0, vote0) == Less)?, - (compare(publish0, propose0) == Less)?, - (compare(vote0, vote0) == Equal)?, - (compare(vote0, vote1) == Less)?, - (compare(vote1, vote0) == Greater)?, - (compare(vote0, mint0) == Greater)?, - (compare(vote0, spend0) == Greater)?, - (compare(vote0, withdraw0) == Greater)?, - (compare(vote0, publish0) == Greater)?, - (compare(vote0, propose0) == Less)?, - (compare(propose0, propose0) == Equal)?, - (compare(propose0, propose1) == Less)?, - (compare(propose1, propose0) == Greater)?, - (compare(propose0, mint0) == Greater)?, - (compare(propose0, spend0) == Greater)?, - (compare(propose0, withdraw0) == Greater)?, - (compare(propose0, publish0) == Greater)?, - (compare(propose0, vote0) == Greater)?, - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/packages.toml b/src/components/multisig/aiken-proxy/build/packages/packages.toml deleted file mode 100644 index a0f1cf2a..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/packages.toml +++ /dev/null @@ -1,9 +0,0 @@ -[[packages]] -name = "aiken-lang/stdlib" -version = "v2.2.0" -source = "github" - -[[packages]] -name = "sidan-lab/vodka" -version = "0.1.13" -source = "github" diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml deleted file mode 100644 index 89cc4e58..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.github/workflows/build_docs.yml +++ /dev/null @@ -1,50 +0,0 @@ -# Simple workflow for deploying static content to GitHub Pages -name: Build CI - -on: - # Runs on pushes targeting the default branch - push: - branches: ["main"] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages -permissions: - contents: read - pages: write - id-token: write - -# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. -# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. -concurrency: - group: "pages" - cancel-in-progress: false - -jobs: - # Single deploy job since we're just deploying - deploy: - environment: - name: github-pages - url: ${{ steps.deployment.outputs.page_url }} - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Pages - uses: actions/configure-pages@v5 - - - uses: aiken-lang/setup-aiken@v1 - with: - version: v1.1.9 - - run: aiken fmt --check - - run: aiken check -D - - run: aiken docs - - name: Upload artifact - uses: actions/upload-pages-artifact@v3 - with: - # Upload entire repository - path: "./docs" - - name: Deploy to GitHub Pages - id: deployment - uses: actions/deploy-pages@v4 diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.github/workflows/release.yml b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.github/workflows/release.yml deleted file mode 100644 index db43ff65..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.github/workflows/release.yml +++ /dev/null @@ -1,80 +0,0 @@ -name: Auto Release - -on: - pull_request: - types: - - closed - branches: - - main - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - uses: aiken-lang/setup-aiken@v1 - with: - version: v1.1.9 - - run: aiken fmt --check - - run: aiken check -D - - run: aiken docs - - check-version: - runs-on: ubuntu-latest - if: github.event.pull_request.merged == true - outputs: - version-updated: ${{ steps.compare-versions.outputs.version-updated }} - version: ${{ steps.compare-versions.outputs.version }} - steps: - - name: Checkout main branch at commit before merge - uses: actions/checkout@v4 - with: - ref: ${{ github.event.pull_request.base.sha }} - - - name: Get package version from main branch before merge - id: pre-merge-version - run: | - PRE_MERGE_VERSION=$(grep -m 1 '^version = ' aiken.toml | sed 's/version = "\(.*\)"/\1/') - echo "pre_merge_version=$PRE_MERGE_VERSION" >> "$GITHUB_OUTPUT" - - - name: Checkout main branch at commit after merge - uses: actions/checkout@v4 - with: - ref: "main" - - - name: Get package version from main branch after merge - id: post-merge-version - run: | - POST_MERGE_VERSION=$(grep -m 1 '^version = ' aiken.toml | sed 's/version = "\(.*\)"/\1/') - echo "post_merge_version=$POST_MERGE_VERSION" >> "$GITHUB_OUTPUT" - - - name: Compare versions - id: compare-versions - run: | - if [[ "${{ steps.pre-merge-version.outputs.pre_merge_version }}" != "${{ steps.post-merge-version.outputs.post_merge_version }}" ]]; then - echo "version-updated=true" >> "$GITHUB_OUTPUT" - echo "version=${{ steps.post-merge-version.outputs.post_merge_version }}" >> "$GITHUB_OUTPUT" - else - echo "version-updated=false" >> "$GITHUB_OUTPUT" - fi - - release: - needs: [build, check-version] - if: needs.check-version.outputs.version-updated == 'true' - runs-on: ubuntu-latest - permissions: - contents: write - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Create a Release in a GitHub Action - uses: comnoco/create-release-action@v2.0.5 - with: - tag_name: ${{ needs.check-version.outputs.version }} - release_name: ${{ needs.check-version.outputs.version }} - draft: false - prerelease: false - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.gitignore b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.gitignore deleted file mode 100644 index 7b31be95..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/.gitignore +++ /dev/null @@ -1,16 +0,0 @@ -# Generated by Cargo -# will have compiled files and executables -debug/ -target/ - -# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries -# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html -Cargo.lock - -# These are backup files generated by rustfmt -**/*.rs.bk - -# MSVC Windows builds of rustc generate these, which store debugging information -*.pdb - -docs \ No newline at end of file diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/LICENSE b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/LICENSE deleted file mode 100644 index 261eeb9e..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/README.md b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/README.md deleted file mode 100644 index 7abc3ead..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/README.md +++ /dev/null @@ -1,136 +0,0 @@ -
-
-

Aiken Vodka - Library for Aiken Development

- -[![Licence](https://img.shields.io/github/license/sidan-lab/vodka)](https://github.com/sidan-lab/vodka/blob/main/LICENSE) -[![Continuous Integration](https://github.com/sidan-lab/vodka/actions/workflows/build_docs.yml/badge.svg?branch=main)](https://github.com/sidan-lab/vodka/actions/workflows/build_docs.yml) -[![Twitter/X](https://img.shields.io/badge/Follow%20us-@sidan__lab-blue?logo=x)](https://x.com/sidan_lab) - -
-
- -Vodka is a library build for [Aiken](https://aiken-lang.org/) development. It offers - -1. [Cocktail](https://sidan-lab.github.io/vodka/cocktail.html) - Validating utils in writing on-chain code in aiken -2. [Mocktail](https://sidan-lab.github.io/vodka/mocktail.html) - Unit test utils for easy building mock value for unit test - -## Start mixing - -Simply run - -```sh -aiken add sidan-lab/vodka --version 0.1.13 -``` - -or putting the below in you `aiken.toml` - -```toml -[[dependencies]] -name = "sidan-lab/vodka" -version = "0.1.13" -source = "github" -``` - -## Version - -Vodka is now upgraded to support latest PlutusV3 with latest version, if you want to use the old version compatible for legacy aiken version, please refer to below's table - -| Vodka | Aiken Compiler | `aiken-lang/stdlib` | -| ---------- | -------------- | ------------------- | -| 0.1.13 | ^v1.1.9 | v2.2.0 | -| 0.1.6 | ^v1.1.5 | v2.1.0 | -| 0.0.1-beta | v1.0.29-alpha | v1.9.0 | - -## Vodka is pure and simple - -For your transaction. - -```rs -let Transaction { inputs, outputs, extra_signatories, .. } = context.transaction -``` - -Locating inputs & outputs: - -```rs -when (inputs_at(inputs, target_address), outputs_at(outputs, target_address)) is { - ([only_input], [only_output]) -> ... - _ -> False -} -``` - -Checking signature with: - -```rs -key_signed(extra_signatories, key_hash_required) -``` - -## Imports and function groups - -All onchain utility functions can be imported from `cocktail` and are grouped with a naming convention of `vodka_`. - -```ak -use cocktail.{} -``` - -| Type | Naming Convention | -| ------------------------------------ | ----------------------------------------- | -| Address | `vodka_address.{}` | -| Value | `vodka_value.{}` | -| transaction.extra_signatories | `vodka_extra_signatories.{}` | -| transaction.inputs | `vodka_inputs.{}` | -| transaction.mints | `vodka_mints.{}` | -| transaction.outputs | `vodka_outputs.{}` | -| transaction.redeemers | `vodka_redeemers.{}` | -| transaction.validity_range | `vodka_validity_range.{}` | -| ByteArray and Int conversion & utils | `vodka_converter.{}` | - -## Taste it before vodka cocktail, mocktail can be mixed, blended and Mesh - -Building unit testing in vodka, easily indicating how you should build in [whisky](https://whisky.sidan.io/) and [Mesh](https://meshjs.dev/). - -You can taste if your transaction can pass your aiken contract validation: - -```rs -# Mock transaction -let mock_tx: Transaction = mocktail_tx() - ... - |> required_signer_hash(is_key_provided, mock_pub_key_hex(1)) - |> complete() -``` - -Then move it to blend a whisky: - -```rs -let mut tx = MeshTxBuilder::new_core() -tx.spending_plutus_script_v2() - ... - .required_signer_hash(key_hash) - .complete(None) - -``` - -Or Mesh: - -```ts -const txBuilder = new MeshTxBuilder(); -await txBuilder - ... - .requiredSignerHash(keyHash) - .complete(); -``` - -## CIP Support - -All CIP supporting utility can be imported under `cip` - -```rs -use cip.{cip68_100} - -let reference_token_name = cip68_100(asset_name) -``` - -## Documentation - -Please refer to the [hosted documentation](https://sidan-lab.github.io/vodka/). - -![Alt](https://repobeats.axiom.co/api/embed/54410212b620c3299be792bde8965a3371348895.svg "Repobeats analytics image") diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/aiken.lock b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/aiken.lock deleted file mode 100644 index 31951300..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/aiken.lock +++ /dev/null @@ -1,15 +0,0 @@ -# This file was generated by Aiken -# You typically do not need to edit this file - -[[requirements]] -name = "aiken-lang/stdlib" -version = "v2.2.0" -source = "github" - -[[packages]] -name = "aiken-lang/stdlib" -version = "v2.2.0" -requirements = [] -source = "github" - -[etags] diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/aiken.toml b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/aiken.toml deleted file mode 100644 index 2f35f2fb..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/aiken.toml +++ /dev/null @@ -1,18 +0,0 @@ -name = "sidan-lab/vodka" -version = "0.1.13" -compiler = "v1.1.9" -plutus = "v3" -license = "Apache-2.0" -description = "Aiken utils for project 'sidan-lab/vodka" - -[repository] -user = "sidan-lab" -project = "vodka" -platform = "github" - -[[dependencies]] -name = "aiken-lang/stdlib" -version = "v2.2.0" -source = "github" - -[config] diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/assets/logo.png b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/assets/logo.png deleted file mode 100644 index 2c456908159575c5cef268b16780bba695dff570..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 74727 zcmYhicRbZ?{6BsMPGuztnW0p6Wn>+SkU}z&ot4ckdmM6y5<)4vZXvQ|k8?^H38jo< zos*e)PT?FJ=e&Q{>GS!1zrR0vxc|AYbB)({j@SF4nTY}Cq2q^OFc_zip`JMm#sdD6 z1$KZ9{K1IG#DhNv0}O3~U@*}S%ztpduff_dxMa4Go~}h$-r96%o@~E8fo^IUI<#?6 z^(?#50V63^V+8AA36FG^7q09+ygo=aA&y44i;pO7JT?U$f4wFg$Fk}4_&{UQsU+Da zIB}gzyDN*4!`<1-8xf3O*+DAT)~YtDa&|i%t2?M`exa7v$Cozbva>@+*Jn#|MUm9|$_`xw+i3y&oA_C`eR6@yGV# zxB}8wXd*;;qMvXm{CVu8v}4otPC~v71qd>yl%H%N}uxcZmbX3 z%*EzGI-@Io-Q6h!-|m|l9+P*+iC#nA(~VhCM=HK{j$!{aPB@JE@{9N;Ju4_TEEs~w z4255j2adc5mqtiPNX+(#;BLnDAVbaVYY4i*zZq&o`!e|rg4-Wx`4cT%S>kxgeenUw z)2x;{PTbAU&l-|Vhy{H^Rd5>aS>Kp#Y~qr#JItjVN<6-El!xbFHUb6~-Cj4d|AEd8b08X7br0j4Js}AL%%MynIJU ziUQ5CG}pfTQBtT@TfUN_uwb{#W=+FJ`p17xUZ2*;_;<{9&cR>K-&B@cXihZb#j4tA zzN28xi~4XF_<@y647>$?Puk3FtB}22)e_G*V_%6qoYN^*hkTS_u=F7OUa+(O3}ND| zniJ>iGMUVOy+>QIS7kAI#T(lX7f!f@^}b)V@6&quGBzp6M}ZZlr{|i>@#gBbBa3F{ z%6+H|=U8-G$N#!WC8A*&5Ao4**z;3Q#3n!fs^gWT;id$p)h` zV{x`e`a8OtRWEulUjB;?PDNtUuIlMY*&Tufd11NDsNsKUj{`~f zUJ%Y0#}F>gaJA=q!7qcWNO=Wgp5Z>VtZ*Bm$8i)3B z9tUBOe*>nYr*|jlVAw(O3EcNU&E%C?=;@m}qNx6MvqsFNQr$RDJEyu5*lz7l^0=By zo-waamiq9&J+9fb7rHCjr)r%ZcU|0cU)ph53S%cn;;P}?A+*zadU`82nJZt)LXdPD z_8126p`y=!2-AcoI(UfOTK2ufxvoG5LF90Hgy?R8KR!RY;EC{@{W6Fvw)yW@Fd!}I$ZGe!;$B!cL0ghfe>w9^~jg~SP7J{aqf zn?ZMMKx>p84p5sDwkhbk>vK^(q`0$1CpWTj=Z!tIiz@b-;rr*v zUgb`>>Y`Y1%Pg0eD=%ll3%H{+NAw}~vUB@ym;XqdyWaxcsN5F|%T=_o`#NQJ>#Mql zz``lP)FX;VZB`QEzhvvKj0>t|o^o@SYtr~q`m~t#v~QiQ8i9qk^X#ye$KslcPb4QL z$-ZX3<-I<$XPa1ou-$orY1PPZ-rLR>@`}-qRsvBubjQXPr6&Ta_Il!H4rRno{R4B$ zX_hcv3-cg;&X23NS-FT0qBU0nWLbhT-3bFrPyi>3|IoFh>fIaG8s%S(X zgVwU+f7yb6G!CV@!Zjpdcm%~YC)^j{no;C@zrjdb;HjT<(A;!>hVG(IPV;M7B{$E2 z0Ddw)Qz3TDY}%RlJPzXI@a8ho)0-D&ChEt;QnP2<73Ky_cn*i8Z4T|T(mBg8?G%F zc8092Lw!3EY7_*;ub=v{1{rzug#C98r)Cfl=JP#Xa<%S9N@yrp8@Iq8@>%M-{jBgQ zg=-_dl_xYO-`7hMIuGtBmwO`6?2o;;Ex_6df7k9kSuP97#B#G0jIQ3jK!u*F_|Zz& zFsgJxq~|Q`+Ooi!h?KCfxoftJ!rIfKWQ}4$*TGYx#o?Z(hnk?RzOkhn(>BD=I59+w zP}oa+JP4BO#mqR&b1U=vMs>Cb+7mk;+F#kr2>oGq!SqW+qXZ4@qOHqp*?*3p`ZU;Y6x?(QoDBJ#d%PxH49e!DGL_KY>cBLN8=z?&Gv3H5GY-L z1LdX|d(kdAI{J>tLoH?fms5Rqoo%+M@*7#;&VLAG z{HIoHh4a+Fd0g+Lyzx`oIa~gKC9N$&SVH2+DQ0jqP9~T6p((X_XGm!pL6cvPxkE1D z9mue96Ym%F!ljQ#;TrM+!s{p3L?)tCgFY3ryTv#+y^H@+tRHvgbIbcjlg%KakCiJU z8SG>sTzaDOqa;IDW;erYE5Njm?@Z#(?}>bV^puV+PFchHZ%m=pj|5UMqTbFb#D4Hm6oYVHep7~`REjwdCV<*`#7qAgi9p-u+;q5b=R9t^K< zWR=B96{5AAR%&tRf~bxJuSYm4(%(C92iydVc@rwuY*A`tRttW5Q>3TzlgCwckUeS2 zA0wkYvi%tnd7;|fcN_lnWW0CeL0X~_xCL2x$J3k1ZTcCnUx#aNZorAS>|D|x{%f6` zY&&Qi6^@13Z9{kU^j`ZjTbfS+6|OD1(}ruSXxp^HT5t|;+ab9xKn(+) z`}13|^vnJ4)8#@!eF7&R{1di($rF{`|L22QmT{z_MJQuOk;q1Nj4RZ?$pSX~hK*8nbpb(Y}T`LNKFsM|qFE-^1sn-q8sj$`cRkEZds$A5pTX-Py+_Cc(hk~Q* z^{mzkSyNeou+~Gwipe;P+U}k&v9#(&PazGzk6)&6lK{p%I*e z;F?Od^axcRLkgzgLf{i81DEq*JGXDos0DmlNdEMzF}Ya8RsPJOwfh>VyzPv>u`vY1 zz9UZ@Sxqqnjf>fK4f7amBkMHnl6ui+bdn`U5Y+Goi`FjqAQdR(wE1D&psIMIJFefq z?N+LPo5hR#FK!^r@@%%#32af_^Q#RwDgq6+`>kjJno81oB(qS|mBI)d?eHC*JVG*5 zImjkM6RRM03x-iJSs&HwzGT-)Mb1am1(7aihwt20|IB_~`1PPoTZr@!suLB)J2H)Q z_!0F~LSmSe83P}vCvY-xsz~T)74l$e3%I5l zi8}?5y0W9enzC2Vi@FY;^e*62Z_e~OG)#hHjgd9M+};|_T!pKl4my^|21)Oc4ksqhT^<8yz~@_uR(HRw5M z0fn7Oc#fdL2nY8MkWJ8OI(m9hH(0@a-V$Yx;X*)c6Ao3)=zeQ zY>j%-QhXxgBK##gZV8D&>}!(Ky}9i5^lraoJ~|fEfiuKl9Dz+wGyjbGNSr2iIl%@G^@7#B&M!+H4hZu>QMI2IgMWMmmozl4@*~k4`)9}tI_5!1pmAj~^JjWbwmf58tmyVM#!dc-Py3R6)3%z;u0|lKPaKaVkh2^*3{`>L2*zdJaA#3}w zof`<7qcRCTHC7C&bb?Zq9v+gpc4DCH2Ezh9G-ya~-Qr8EyxZV3YxZUQyjXw@Kw^Z?utE3q=q_TJdrAWnE#tqONA1KviUd#(xj~tQl%R*3tvPbq-xdsS3J10o1#V(dUF=h`Q)pF zx`YIe89?%`T4J30$I10L_7b$u!hg!-akr$^7Ut$$ zM-w%v2f_E~=hxH_)F7VE@%}|in6%cx9l!ENu|ri#*Lcz^nHZ$kYX-GrM6|@AsG3=~ z_XfEr`azIn(=r}odP&G7LyfJhJ~A<6$(QP}`19@Z*W1rM%Er>Qkvk`dO>w4M_?o2N z5_9I}YNe5A*!~G}nSF3;B_30`?-;+xs22bF3_>zAsLJp1mJBx&eT(JPS+ zj&fTz`Mo^bSBHqNsVIkofChZHf;`594`Al&mk?EQb(|`^u?2%Qpg)z4hVG(5qnH%OFW60o0gWG+C)>-ikBGct5#e+&`BtvVLup z?ZwDa4!AVcT_Ic{g1~0yr|1LNU=j1y4*1~S-= zKt;X3?Xvmo`1s0L+e22&Qpj!20N2o&2!D66&f9Y)@VBgRSZCf!D%`Qo>pN)3CQQx~ zrcDmQnIeZAf=jJT+L74a@)qNv5BW4gQXM37zb9Aj*8SX7;79>KI!M4LorrmGjF;$Glp*x zO!wUmExURd`)+WsGVcp2kI$zjxio*3!skapCoGxEKuzLA>}KcvqZ-IW86#4-BB=OI zSsET!K-WC>u9o8D-znz6wa-evj@xLhOiKF5JUL!PjAQ>8xiGFrcB-z6cROW>D@2Qm zVfO?_razGAbsiIb8uUaeRd*%hZDa_wlIkujjQ=@^d-k0bAtZZ1ZO9t#|MIq?nVzdR z6NF68nh@LK9FY#guTOx?$g6`KK8oZWl%ZhO*cb8FXv z5|%Gvj+)skS#jv0EkOQainIn%%cyFLr_5*N4{1Za%z3w|7HWWnWED=lccXV%Pf38byRo^A1F^Z zWJnD}pBs1xr~ibWraQ%7Dbq<2nW9v;b?0vLPpqyt2e;umkQ!nA9oa2`PKg@Gr3zp5 znTw>f@^95_j6AP=P?%GgEY_zSg9HipDw_MXw~}3EW90jvOS9->pBafV@7PyI->JRH zk1INTokBs?ZN_LGj>ZYz9Y12{E!k7(SJAi zhZX0DC{(14O=i=%{TlY<^~rWBM}AL1#M3lKkM9nXb~9ny*;tK$`3HjSyED3bz1yzM znXUieTVb4UVI>9Z`)uWGhtrdo8K?AOa2s-Pccqu!X-<0R{5a(_4fAM`L0eTwz~)y9 zx*QI@(Yzf>nsT*=8SkZ{>fZynP?WohsJJlNU3daIy5mB8a9!vKk7q9EyhmEG@O{Ch z#l6CYd_1JGm;6(LZG0cx-sPm?0sI9j!2=WgfNW>AieR=TZhPa9nGnLu)5hm#Y-;3V zOF_(F*+A`Auo9|GGDI`4&&-}BMqr1BSmh>!3h-^T05!TPy>=#P4I@6}=6!*Rt)0=1 zkBF)M_n1yj1-p~q+GM^eHi(!Yd46izrdZ#fREl8*^@+og^HI_cGx7fYRTMjhtiI*D zx&DRGb9n3Sz%lLTuk7#Ye_t-~fZBZOSBIgKK@dM)P%SmloxB*y7D5WBQzOb!{I1KPBh`dQM>|yTjYZN<~9oJUX+STaLzFPunjgl)g-GTvng`^wQH8M2kKyM%j zsy!NZ^^aK+@AyYjaD&6nbKt+gAx>*+=pNO7}wh4hGm+wAlIoFVPTexN+k#Yv- z2rnGRZYpL-knK6eM2C`@<`UJ~nf) z_lQAO>-Qd(J+(5WL!f&50hIKDf_9F0`7o(|fH2N`qmcJPe&OXS=dqwT9{S z-COiy^aSOmsN8jgT~7MWyuhP|X0?9jxoVDuobwQE&*7K}!DLBiMLn5nz3ybd{hI{G zAT;0RZs_R^GMn7qA2{gFg%(Y5;-*iB%s@kTc8fKAbb>~>g89twH||*vCVaY_GjoFY zImMOSNMmb>|1BO=T7taRm^U4pHl)!L^yci0wnzRnlOiz}Bro9J#+8nsS8r0#^D4Hl zfK=eiHdY2u^u*)43(&KiSBDUG)AeosEum&v#Q^!0i|Zu%f9Kf2#F5skjCs^cbVcS1JMB9VE(DEEYy7kpn3UpoC~Cg5C5)(?B1;u#|>-KE#TLp z;Ulw6IY-3*M;UGf$Aeh4Tf%RG!-%*+ZkEhv{FB0?vv4JMV9oucL61ARB$VSzfNOV~ zh6g{JkJ7JuZ;O`9MT8)e$oz*%Q_S{r?`Qv;tBk^bR!WskDR-I`%_)S!!Fni8^LeN5 zDj=zutd7-lw?wIbR3>R{p}37ymkTQRT#_IVvJKke!$VPO^>Zs|2RC819R*zIn9n6W zS2yNc=9&|GVlkl`syQ@*M^NS6^;w-ExAu0s{ZPCPn$Ef#fjUpc>gLM${1RT{_*;@Y zJ)Ed%Yj-^Y>YkaD!%8|2??e-q*MrYWOfs>6XMcT}U+7jgBjWJGDIxJ! zD)kLxCHLSWhxUEPilNU810y_i>p11RqYxfS0edbstcR|vp)zd|r~Jd0#6iOa$NlHe zq{&+tOdbaD?pUt}%SudM1zY>QPC%g7BzB_kNwPNLf~z57p|=Eyng4xepk@|*oI5C= z8KGsgF~+sSI0v+0LYsykqszbgkkg7I42`ra$G=YMz4*THWwPOg4Ok3(=ko*GkwFHn zkVbgKq|L8D6^EIIz%G+7Gl}1Cnc<-?tqagnjMhN}Y&n;3(XfhivS>OkNQ)UEh>?4( zOHHbv9iC!>Wo{CCyXem816!{fk?eo;NQ9PFozA=!oO8YL3Tk(ykK)}L|lMQelBR*Do^Gu!Zb=eS$15%SGD{t$EcIIQ( z1e)P^U$o>_`c zbv|5C!N@&wU4RbAU%JzKs}R(;=YEZ{G(v((LI1WorPZ_ASANW&0yLw+_u}~`sNUgK zR9{#qei}YO9Rb?PYf{|*f=_kXkwc#xga0J@x_(E|J?2@1qYhh+{%rBU@EB9Dd~qHXlZTBdXf~nEC&*q#)72n|9?UbJ zSu=j&fSG!RDxu8<5SL#cQf-$58!T910Yc=rID%~{I}hHExw&#}{+(drpkHgU6iw{C zz9Glvh$*$W1#)P`*R?>E?(YGuvZfHm68cqxxkAnv+{ox$E{M}QKJnXc^wLS(-a1{H zNQe-9kmPe3yrwH3fn^`p?&udxA=C>bi1oAfG-^LzS6M5?{;GotST6dwL1X7Q5%Uj> zpp}9+!PJ4}Wmv)3H5(UfHA_sLyQ)N;KJ$!jF)z+1(;H0j5MCst?tifV#uOw_(BF(a zb9)yv&6thpFg=8(VZ3Zn8pxj0s>``EEpy!u1WE619IWXyWo5vc+n6uf%v>%!B-Rn` zP4bor3yX%B_d4TBKX` z*>yV3@+(FzHq!wH1e&$&5Z-iOreOHxk45<>hIR$5>KVOo+F817OLl4vBl#I{(TWym z`oc^Fw%L)Cxi`cZQ?6tgN56}2nrgc#aJisZR0h7%w8>KLvSSc={9%&MVQ_Er6;vcc zAS}&I=q!jfjxPW8`;rML624n=9;891@8E;&BcB<_n1qJ5x#rI295XQlaZ}#U7vR== z>($*cY+-!lCa;h)5<<)ha&b8`jsW)~mC|x-aI79zK;$+BckV5Bei&f%knI|k96@tc z5%M2(yr=u9C8Z}9@KVBtiKl35*2c`HlZvQo)w|K}YAdF!FZOiR9eWJ)E|q{2U!!Xg zORc8W-mGB~nLs;-hWWo&*z{EgZO^i+own7&F}?9}zSE#?tuyia?+vGzH_eGli@C9+ zU!Ar*2h%~vLRf|n(c2pTs?|ZS99>7Iip6+bA)#+m5Z`GU)(`WoV+QU4b!cwzrU)Qy zZcl(NUzb=dJ;#n*iq^%o3P&9z6LOoRBx;#WxikN)MopdX0Hf)|o1STq1{Jp0{2y1A z|1W={N1N*J+3hXTW-)udIXW3mgb0FoOAIK#NNf|zXQyRr8CQ*XSokf;@G*!KydN6A z-_?nF^$c2KRyTT*lgI;skI~=CnRxmWJ)-ajhC*oa92?ZlQsAk=fsmqrI&O>Qsn~ND9;xeVEEPagBn%rxid1-#W#q1 zEmHj1dLDot-K{AZ+g5stR01_fu%Ffaz?gLDy)C;vKh5#o6_Ix>?Y}wu7z%y88mPAH zIr2CcGpM_6xGwLC>zG=?U};DP^**6w* ziC;{FEztc-|Mjz@oxvijQzB2N)C}4D+c0KQ2i=a~)@Z!CN%rNoP)wMAJWu){^YB_T z`DZ7f9|b*uA41%rNw5U3zxbo8hX{B2rr1=ej(A8tmol;Vr@Kj#h8Z0+>608A1~D`I z+r%o%qwqo1TbTQf8}q3783Cl-D1))v=Rnsa6a}Yux2r-|zL1a+^u{B=QtE<&zDuQ*-&}L)plRrG!K= zg!Yhsro@rs%ylMF2&{2eB;za%`w9p=C(Fk)$X@aIT<-v;y~)%Bjn zoI5N2m+K(T^OV8&gSfzZK6Opgs;4VYqFYWoDb&W|H6RaSf6fINiJw3rb3Gq@fy}Ob z+?;~BtYHD@n=g=du3%E?A(5{hPtZz_>#JL!^SOaFppb+}GS&AAfiJg?*7PgM-JK=C z9r5ibo`WeMO>>tdasPysiWZIV?eHD2F=icIY?*qphFz*c#Sc$W z19I$J3KrWmno1BXpv#c`;}rad-~Hv%=>vasaSD50J~HxZ?!Wc}Q5oMXDcIF~+PZ&n z$y54sR>1x*1KH#0Ekjw#I+c6v=huSUM*}K1CMprI9Tj474)uTNu4_=#Ve)D;_nlg> zA%AaiRx=8ks1nb6t-H0@y%l7GZm!>kDv<8cO(LlHFG_<(gb4q|JjtQq1mDJ{O$-Y| zH*-;6N|+hy3Dlm=PuN9<|7kbK^x+8_X^`IKlAEG>yD=P)Tn-EY_;Et`=@5uc@9%}i-27wxYg|`1agsI^pfa~>1T>I1#>fApVuhw9%%mc~u0F1_a>ewA z76Al)Hxb;(5g;3;FrL!6o91rSLAIOWGk=izD*l|WAJ+S0F;9Rz_1n*?5_Q;#4`$JH zV1FvdKFJUUV(Q^o&@c5(N1Cf49(pqA^PmoT-?-fNz1w99$OF&S1)c(>`Flu~`lI_> zKyS5Jg*Q8hZai8*pYS*lY8 zprr$(At9Bv)(MSim#jepB>zg3VpvanyoB+Y%H_^OPMm1AsH?Q;q$H-v_;G1EmVlJM zH{rcLjB$gS1_4!^Jg(+_4fw>qY?*ZbuO-hp>8o|C`MP~b>*2MZ-hE-zkCkxiMdupt ztzC5Nb7j&Ey;i`A4edlx`|<%TDd#gPn~5)U!2*D+bBAt-iB9^of2$klidc8)WuOGj z-V}qab%Rp-@WDrQ8qfpg@80d!KpF^ACN$98?eB7{^gg0M3eN^glTkRAS96&fZV=5! z{`kfsMzaZjH=e?(_^%oh{k{Hf{*+$$t&mxuQ?Bm;Za1h?AVq)d!ihcDUX!2xzh^vA z{COWOII68!E!CSj!3F9Q+W?(9JdouEsY`nULFne?J~ECzVM(!~$EE@m_lR2B7& z3QD%w#(zf6%7YORjn-9jy-^_Q8;%3PTk7i}3Io*nmo=7B?YotxG zTOs{Az$}I@IuSbtzM*yG7oA}E>n{OHS;KHJ{hB9q|B+K36{?`7&1H&SqCisK?{8-S zD`V~Mm$1q**mO0k;&vR)^a9CVFA6juDa}GuR6kxUMD%y{$<*d>7JO#q07HSVH)uKc z{}^AjW7i{ngIcy7Xe#my)r&N=GC1+K+P;nG0JD-+6V?}v-|rr7pzK{BYUJFLkzjy6 zc_b^1wG)~$ZIh=Hw$`v7z<(H^rxiP+>H5#%T2^g?O`4nP$FMa&OCH53)Ol^pdM>qs;Hk_NsDpe# z(i-31#d>F!!6*f&^BTu<1FbfzU=1UtT>IXG6`oQObRbPgaiqZ=rY${cfr@{)i~NlK z*C~{WUBgsokAgIl9bUsc8Cnp0~n1*69H=5yc>N3;XQhVPvli`)T8zHW32fOJ_$c zti7}_J;~4>RE{l@b*#sieege^d~JKiulzrzV`>}!UwdqM3;_yps9(3z{~f~z!$~6u6H!tUb@HIHwNf>SJ^X$^So(TLAm$#w zaDm=-GqTgEvNoKZbL+eQ6Vj$(p)ui3SVvnyO2hd8yUF@^;BM^>zPz{U@PfME3b_#3 z6%Q{wH+&7c%JQf^u3B#LbOLWJ=tdwp69VcXpuuAA*8sQT|3`Qtd3xZ-ZbiSbdDKEm zzi?4_BC2M^0;alI=>6LaK!XmPu@{%Fo)^%EU)xr*ZQN=G4N5oJd8g7o;3Ju+b(4b4 zWnAK!uTiFoB}qOR{xk*I)csHr&fc0ytUG6P+V?r$6P-3^9VZ>(X{470FwbiZG`nUH z{z_ayWd4oKwL9ds=OtC>f~WC|5+eULZkhlcH1R);-wtDW z@88jyyYjaLet6ekBl2r0X+9T%s9JzJ>j+ne5{8P(vfnwBv4F7sf+hnW;L=J%k*Yf@&u7IL3FfS3;b+KVUMkWOGR#Y`C$aB;y=n^xfGppwRSGsTe zYR#TYl}J#Ec(sN;+(QU89CFFJvw^M^CtlrmB}X^>F2?bP-My~2$b1ql{y0a6;JGIz z;)}^H3y{D$ljvrNlh=*kvVt3e+RIXLEW)j>BO z#??u6kkY>pPg}LhIi(GK&tv!@9Y&UcjrlKty^LrT@<|#04(66WMZao541fW#Z*yGp z9I8(l_ah32c)A2Sz$^>yyMS%X%U9ZIsb0Fcw~;E*J6FIk;V4$lX}vQ#&t+EPSOA_kMRMj<7WXy zHZb4)V}5t9iIwA*95Gc61jSS+hb#bD=VMTxo?}L+2Wy_{{clxQ&Bl`6U?f- zWE`7(y@r0Nq@pY`X|mWH_(Y1^-ZVMir=bnOb_*olc^vO*s)3#fXl5S^#n=jJ2f0oZ z>P#Impb+|JFH28)x5qsPHW%O@Nx)SBlWY0RZWcZ#ltJfSNGnbRP|9;?TkK==tPGTMTmUtqPKe!2a|6nXbw z5J@{h=V~vhuiiySDz(c6jO)nB)!+MM7XHE#HpCKBvlyT^&jLaxC-Ri?v9g1^EVEev zvM9M$%!PG4oNd{Xi#<`H7X)eel`O4c-Xdb;I|9x__>O{l=sTtUsAzM}sl=W>HRGZx z#ExyI0*|LSa~Mm=iCzcpy<<7*ah&TtFyIxj6hqVaW^r&0Owz*xID`=fYFx2zaoc@k z0d?y7eUFQ0N3Fw!yqQVrI4}f7EJoHHNy*66pN>FLim{IUpwUjsMI0%i+WQ+h0ud8#@H zeT>0SMm)|R=3k~H#lx&HB@AihV9=u3%VNe~8Q(D4@j!dj zdPnOZ1NEcCU+HlzAbh3|SE9ACKZV1jfgpiq+dzpTL|?Y{Vw);YG~jMniV2&ih!HVE zc*+f2BqFW!&|%GEVO(S%qNl|O+tP88{*ajLjgBELa|$-w&BgeLnr7oN()#vV*m=f_9*U(R91xnxUdP{W#z_ZXY!093 zy+!4H4Z__454l^3*xqrn$j}T>mh)>n8V$O18koehv^@E@aJxoB&%8`lCoXBnHG?t* z2-_#%P6=`RTs!zi*@*-5;HNcbd*i+KPZwjjn-`;>oGUr>dY{0HuD9Srkgh976T74# z$FLK;uHEWSUf%H@0KCIB(@M?h$@^zQM*je$;g7ww@vj4GjjOIstYgWZ=HYZcra)6L z1Y?Bci>%J@hC1gqV3wnsOg;fnEa}qmcgU>YCO1EdtwhG!whPHdb?2x)Z@DnE%DK;1 zGEsJq?qS+=?co04y!VQc#p;2m3uI?p8$8Xn%?)5E(9Q=cG#WpuU%8w)1sP@$VmxmN zT&~;Nf8BF7mamw4hO{Hp3H0(UGJ#y%8Q5})=m$Aox&?m0G| znwCo&Faqc#cqtllulYy_TdH@YN?Ysm`A~wDR8JA#3`(?$Xxl0$j(ZM=lFN_l=w*TG za~;wU65OlozTdA5H1Bn%K&@ zZ|3fJmbayFM2aRFl0lr?42+rQYbI2OFjY(YE&fL;b>ncY=O;(jDI5tObRPbkowqNz z9s@~RoTQ^QcBzbn&L8G-hbg?7)zvRs=|mK=@JhoQqHlgG2wB^rHOLG3X5#>{xzQuj zJJKj{%$LtC?MZ}l6 zYZci~2jV{H+-Lc2nt7$**`iwKUd3`%aCTk7) zqe6q)tU+d%xYQrZlLYgu=2L+!r{%^u-yn9vh$IAIk_s$dz{q<4f!av$ifciDX;(F< zL{G-9LH8Ij&gH`lPb+lxK5bqfGQH`xi(w7R9lOzcs`=19ZhrL~`5gFi!vh`~L6ZA_ zF&>gH7#jp;C9EkK^GYZc?40?r?3QZG!lFT$z-@kft>-3?lPvlz@BCrNy=GE0DISn%(Ehs4ZS*B#)RtUInm?8{_~ zjbCB8`h0S1DME~W|8#)D$4&gI295A{OlM%{c&?IJ7PHmnD?N=)DQetB?%=r$P8<=j zge85UzQQey&1l?bAN6W71P+Mrwt(cD*DpW+N;AE8!%U?99Pz_eqV!A$R!6xMN(|II za>ut^Z$W*8wrpuZYB4Q3tH3x~?(}_6edlXn5Ek$AnK34&ll5Ul$S7h?KG(6+jO&WG z75s~?1ttC;k!E_CjTF9IG0c{rW*i8Q)T0{t`EgY8f*5uE<-wO8O(&WN7elk0D zV4%$3vDNxD9`enjVXY$5Js^=x4TNjz+sKF08t=jzS2*F*Uod;eCst^3ey=I(x%w*1 z@86a>gm+f2A5WT_`APSg zY*Fgux5|lfv;BoIC}~A&x5eT+s}&I6Ft$sK#EQzqf>Wt;=>t`Erbd~mLrzg!D%h0L zSDxUv0FeJ@LY_Pq%{^NYQU@smouUqyXgek?H)v#cPl#ld$vX#O^$0oL>oi704<90615!U0@=WJvC z^kiee+SC5?^qbWP@D&b**!E_FO}y@m!@;N{WOEdO5KWb{r2E{W5Ii<(-JnvoY!|3^ z0`NMCeszaKFX>nBSF;o>yy8#)1v;Sl?g^rK(XrcVR^8{QSiT$@sd^wpGz$JWZ|;q( zARlXqEIzR6cJENE%_~CTe>sy-?Hj$DDg=MHqmFXJf<`R8gtIZ7p7wKAd8-A|9$Bdx zy{*NDR5SQz;fk02wjeeH6IXb4-$JA4@WW*AeB@UaZy%{1>0))iAOoRd*lq;zn`(@W z>^G>g*#-A^N-hcTvAWruQ@b&X@mu_rm$>IxcC`ro%qtj^?)%3Yhkkwg)xIOrdUCZK z0`~03z}S?6qSlkdl=zQRYBisn^3TG!t8YxZK*!^1sIblw2L*}T%WxPz$CdG^**VZM`YHcy@|3`|nw9OPH-K?F|C+?6cPrDF!X_;cKFM@~)T z&TZm^x&{~u!R~sESqNC&dM9slo2O)) z*xD&C_gIb29bg+5vB^ND{97_2J>h+K?VSE(h@3jf@aF9L?fB-Lf+NR1ej=U+?_>Q& z{%QeRl*p;Xl#$FEpYlQW7mrN#JP>s!vE6<5X1 z#|VV^6TuL7RZDemW6Pl^ElY7?ZrohB5ZiXk=&L6RHSuPv@O<niV6>*}+q|ni_G6pP|33_L=?4obaE#{Ncd&IuU=S z>u2YnGemmq5S;M=O43)EP$s^8bBukT5^?=BpwIKb0jmgH{G9UR=*eQ6rS+pg(KDM) zpKFZJdiW8jL3k@ic)Q2kU)O~yXn0wP13T0LWzsa$9%al04yp|qJ-To2G$yb%kl8tc z+Vvt{eshdU>j>_al~|fWMhTA8uJn|mA3JYgI7aN^w1ah;WT2tU;?#J_Tswm4BiTx- z&$hO`U$_u@o|zD!mQDWphHKindzy*3XL}jm8(0JOMTTh|ol9VmwI|#peaY4BVoNmP zkpFW*?uDT^Yna%9{fKGj&uu&AEEH${x%&5j222Ql@M5Pc&KyWzcxb zTS>>UwG8pK*0?bh#HFwHv7G~AoS(pSM*nBPB(rA2Ixd2*W$bpA+$>xlqmb9%h#l<_Lc z1W}C$(fzjq!%Fu>{SKtTeHt;jerEjEQkC4vpfK0OXs@qI2KKQlkI~C%|V@HaPb-$g6nfS(q0m zCu+(EyXyy>(DcG@9~Jkly6@t2f`1m~Mm^0udXhio#M{~&$Oa8-=RZs3KR7ak1d>)` zHQZ<2qgwhz#XdU78>#OJM!i|Q_rec#$R25LPvJ&t|F~=_@g7JJji0z@&zuzfbMT{F z;1@cz;}0~vs1mS%(a4^2Ji2)ILgU=eaVh=xRn8GVv3JUr*2J)2^oY0_huVL9OYim; zCob~l^^t5D@X=1S#1Mlfn%gi4!aDQY_|b>!U|yruYpawM?k2+wYhqWKm5CqoAb(x$ zcbZg8YqO(6&fL%5R0EexzLWeWZu|be_8u?-izfTdN$xJb^o@Pi5sUnx(j1AetpcO} z9Nc}uJ23^-*Zl}oyRf*YYwqre-u$(4ieqZZ-#r3BR^E;{@-Pux8fVN`>&yQutDqAs zbdsu@l$<#AZ`kiFb)_4DI+!E18LEl0Fiw#@ab3RmhzoNga{Hs9o4NkZsdH7zL7!+#_i_|{Ou~HwJ`lM$ZeEAk*kqkfesabLNKiHOCpe6jS zt`7bdXbU9~N|yK{e&$=$<%7?3ZFjLJE(iZ433vVM6&6dW5LsBfuf#E`_4D>0HOAwW z574SNm!;n9mL#qXS)I2#DO!5%49yV(>-3{Fz3Gq*o>IG3*YzqG?|NM4oh7UvMJ~Xh zeK6t@(;%2tMljo>!+^GH90l5!=L>oho8sduEF7td&oth~qleg;Ke7Q)a?jl=>DT8?c@MJW`+u)}B9^kRv@Ty@?d-@b&{GG_s?UF#0nO zA;E`>ks~6&;yIP|4D#qRR}-EH3Ps-X4%04&MXfd6mc)pKc@1pn-Dmz-4hBB;jVj*C zoW6cyx?$xkcWrSuEj8g(wk-L%d@I>M;KOEhY}(7#lP{8b!&4R5w0YH5I#)$~eZYsL zQ&46_Fo(hGF-qp^(REn#pKo+8->B-u0zE)lV|_B(Vlf+)%0TeL?PJbO0RpOXUzMn{XK0XxiPRa;x$*!V%Qy9j-KAo1^YN z?lX4o4Iy6Zs%;M&sWe041LZc}gUsq^)XC@AL@%0~^IcB{mUi2(p+ut=Og8`GuV3Sf zJ(ug}gf$$jD24Pe3}@Iea{ubKcE#>9bUh_zYAF`zjv3SCT z9ZSiiPn?g%H*NN~l&g;ONZ`9fy4&3jixmwAoXKDP5I(^JgB22Ry|91%e&QO!Te-cO z!h2d(_Gnh0{ZQUyF=?X5yU7&FbithvG|0n37#Z(@E>2U^dC4B!y|RcvuU)J^oClN54h~N08UEGJkqOa52@sQiaDXU8%db>f>Z1aOSu2j4qh$P z&%&(yV%0Q!B#bm=^7rm7sHKZG@#RP zKGrr!V+^zKh4 z#cGL{sV+MtLn4kcNYnIhT4R{P_+clpR4zwSTBJKz;pC`@3hB46Xp6MgjGP?nlz1Iy z%OoW1 z{j1yoC6`FnhCRAPcYzFY!<$Q`${>adpg@8I%;@t#nIG3f(5fUXPJMW1sjO4xC}6XE zpRk?=3F6(!t~!i=Pz3eisWg8qbQnRm_e*=`Y-XxwgXBxHjaZ3h-~dL8&!?1aw{bb5^fGM=!Bfww{tE9++)&5 z@|R=kuR=bK1x5F_O(aonRf+@kj<20O$jp}XJ3!Ikm6HyJSd`2w=lJymeoUUz`>B38 zKACc6{E>U1UZ)vI(0>1=kYct!mo@0V9*uu?|99g0dwaw+q?A+M zHVJ|K*l?~-mfF+5sJPsHdpjQuS`HkQq##LPiTt4cL{&%P*e#+EJXfz=m31L>cc#gd zUL1vzb>{?6x}+TFS>I-#JYi<3`-50wwB?T_e-$Ewtz! z#_@H;F}cCTHaK`qCTmQlwQAk%#uTO4BdHb_FmI7B3QN}>;oZ-c54VTwp|&~D$07Xp z4TChc-7dG=9<}*N?u-IapKj+}$M2oA@@gQ%w*=TF>x2|y&~!WJPmcts`REb1$~p+Y zUmT_e(OSevRS>HH%B9~*MD1WrMiwsB5@H$mDQdbE)787cdVVP{`p7Mb$5{C7V~{`g2wHA03=f*0j8 zt|K^&UDypuHVsQQ^_k9astJoaRVecp*Q%mg*Iy`-uD*tCwN5j187NF7R>tl(FA(d8?nvKrStv=XDmOhZX(w<9;GCDrQ(kSvP5r< zH1-!15o6RMVpFY7@2q=#tDHP_eP&He@)$#j)(D*fB_!j+X!;V)8{iDy_-N{6%J|^HUX;PYH{PmnBu{| z{+I4X;t9~5yrk*yf52aa57G=bi|w`yj-t03vn^S(}X-XGl#xR<-`{;CRjvsnJS&Y`bK=vey8YpC-nV_&dl z)tOxAnbJDfyD8bIuLE7QTm!+s1ql=$xA}@Q$2D&2#C(R6O z@S|BH5fKIUq(%JoxT_H>r!`N>cC=mz?~m>2m91dj2I~CU?Qq-Vo{zz=|3VVs&*2Jn zsY7JMY8~!x)I{pzJi}%XoSc$el*B3GW6H~XtRfn?)1)(~OJTK@Haw~l#ISI%CqqAudnw*Y{wy3!>NsQMIQdxpDBY-I`!r@bCgcq^lp<7FUFV)`;!!Cj z+FDg~QdnyJnAHm%Mrg_54fu3USK$ri0J(EY={99gyK7ov~AZ<*k@pT+zDNlFqWlmGs?{ z*JWFO^i&t3il#)(x{U3&$<)0j&Ud`r6q%n$Uv7)BPSMia()Vljv^cGCs*}-uxcy_f z1-Cv4A7TbA7J=-9q+Ub5@MY*I!|&s z7lJa2VFn5)B#df;E9{}TtXC{EfV)06k30y57^;DK=*8F6``)4#V&A-!kWvl6VU}eO z;lKS;g`12R&X*(!OvO|6ez1)X=1X%K|7y-rJ)6}L)A`Vg0Dg-Zfl+Yd|jP*$WG})P0@US@yLYU8um3WDu>A6$^+h zJcYl>8t(ACCLyn?BvxtOT91&#r5U*0Y{4swE65s|?ahvz@u8IG-lExzHspPWD2W*; zyk++YFnLvq%&DmqT&YuYtIcW6*T-9WF%u5;?_p4M@gvk|)u4ZSHqrU}>eJ8Bk{Sj5 zyDgT+kGBUa>~c#&WI2k-+!7t`D#ciUOgkO-dB*4*=(AMsI~D%6Vea_!m0;*~k85f+ z@n=5zWIM0W<`S;AG&{p7iH(-spF%+G$aCZ>sx3m~J?Y#^#w*6b;h3AcHzQ!srDu0h z0s7y<2#BfSQx-${9vy4HK zaR`3$H5{L-ptJv>;$GZWsZd43#DG9ItI&W@DUyn8{`MMNU@+AVfAQhvZ&oko)0p02 zRTBzB6Ef-dak?wQlXH69kPG>iA0Ox3EU$Wn!sX@MGfk_x+p&BKFwC}8HpC7_zn{m$ ztM#i@5Z2v+wLPh(8D(LfV&K{Dd0mP2$XkGyPW06g)sr1R;?q(LqQOlI_H^hhJJ!Jn=-d+64SH-pGEgYsiB2sG|3Z%gr=vmCXlEK?rQshWM6 zKft#}z61xIk}o8s->~qHXhG!MSscpIKY5=h?>^t`V`6&D6I6F16_MpGM`M7PscEguoE5Q zo**sbcbO7bO}M!&GhtBvQ~9|T$IWqNKp*{_-QVs&^}(YDgfN>$UXW@Kh+5`hD(NTy zNt7-_9|EMK)W(V5PJ*W}sH&@>EmppZBAuw8^(+^qkCG>v=#SqeZx$afv&UP@@e+q9 z&jx=I+<#RmckhnP)bDHupm?spe;CYhN0jnd$YPRO@VaxZ_r}`q1KwKK7T5T9&sq%U zU1^rHFttwZ)#KaG zP%_DQiJk1isIT5TXjk2A21T*GJ0)u6OoCOUeZj8L{^L-`D$*SkT06!M&b;-N&l zd27VlY_J0d0vNd7UFp?dg#|K+Cr3L6#m`2b{#*maf^rs;e1w$o^tvP$llRoREQIr` zoNx`9$yD3TIS=ds&mawOYZB&*h3OVF1(eRKQ^f4v;sJ9b!9$-wXW=`lb#))dhP2if za*PULzT!LA3OB1>OhU<^Ih1D{(ICvGKa8Tz9$E++jAbTn%a{-A^Q`996W-kXr9~qp6{IH|2E^T zT${vC>h{CG>rGXDoi4JC%O9hQl<=msuV}N@1dDClhW;67^?+(ghR78BJNUO`%|LZ= z8SruAKDpHYvC;Jt7ox)a`%#;dIgmoM24WlT2#wNypF?C*t)e!y_SLLVT0AiSSmA$z?}0>=r@|5PcLcGu{v~lN%7y(_qNq@^-<1h1 z1~flv)HjI)aa8E}ShHZ_^B5l%DRJjWk$s}G-_n;n!b!-npj78MoY^R9pAYs&SG%Oe zUM&Flb~7*Oqh{hKVo*a-_}lrhrTd+=1=@z&?!}Vt;02HwTHk*E#;{`mm4CCa{|c~j z-Y-^IApjLPxstKS>vv?!k}vU(6{gl|ByO4p_y~&Qm=RV77bA*4nEV%2_089XsYhw( zOSlZ*KcPGef78Z_M!TEok}hTEyEKk3%-Yjd#>`v{T*!wCT-mEbZATEbR{)RZMpR~c zxvDaDFzbitKSI4QC#vR=e#|Wp)o2HXf7Sz7s$1VhC)UmOGO@VMow>2C9E#3C`l+n~ zh*@|$nSj1-eF4>2O_qPjIM>bd_<8lbyq}nUiW7JQXZ|_gF=v86R&sp8_wQArM-KMQlgqyGSlHvHGm}f zpUSXgQG^pmbOf z^bCk6W2~S3tq$fXuc)p!P-JiU=^2B5XB(Kia#qv$r!zzNI2nyimQNb-@{!vhwp72J zd&3f`0ZqBcR#nUW4S?DXk8tk+FUJ?eQvWu^Mt<2xdif6+{ip;KrqNHgqB5*Jtrgw< zes2Bpd&9rC_s2zYPt3?<7pW^90+AH+h(qv+bP4Ly(jA6+SNzF1dj9@lXwQv(5N{%_ zm{$&5S;j`ERT+;juS_{e7CkR<@Brt(@yiGfuXag!N_u(L!1RYMWEz1iNUsTHMaJ4X3fX4$SIHD#2X@gy*piI_jD={*w^2?`A{AjaDvQE1or#XEt2?U zAQ-^Mdoo0ZKa3I>r4{g#={&8GeR5?jRuUDGDasx>YR>kp`$tsK{I$8o$*tC-DdNu8 zHsc9q;};w)C$Gnn1F7E>1(OPoa9`Ux5?P0T~hdpeuoW*Qf3us#& z_dN)J*AhW9tuf(e-?5mIp_0zCkQ64;yp_|IS`m{;Ex0zu+$uA*=mI>Yh`Q{NB>LZi z;-7DG!frOZBQwHfjvS@Z;yBuo>->+L%q)s!CcUI7Vcyg403hs6+NBK)_HLF;>XusC z3@Ae{>H%;g4t|@I?~Q0qxFuYyr4WayXAmO%D1v?Qg&b#j8DV2Zb#E$tojoPoKp6HC zE$l%4EP^pX#uPu-&=vKOc(+(ycT!nNHR265Khp}L-%)p#VE?sS#_uezXZh{+<3uK# zhL&vjCEjVc<&#V6Sct4j{_WhATa32r0eQ`NbI>_|C%joW49{E(cKDj#2cSB zh>p%MIukZ%d#q=!Ag(M`%=m0+BHh~6PUJRLxWYMS1d<7(dH1fN!3*1;pYdT~0lB`2 za%?m&^YLwnx`v~TgD9!JloNX^I#TM7OvTJCSEKePcf`o_vS?CI)y%gpD3se@FYf`g zlGSLOO6Y}$^j$6KGEMbqH$>`=&E2&(RgP`Ho(5mp;e|fK3FBX{-w008=f&k0uMo}T zzkhhC?0^-#a0}==YQo;CUT@hQo1&t+TJqlQ5YJNGHahlG^)%3>9>IA?yf*dIQZiK8 zQC*tfjq}h5A{fB9lK~n)lWJfWx^p2+r<|=u4qSKR@#d{SOtve;QoOll=ChF)r2;u9G^BFJVp>93dnv>`^b3=O-wNeC8xc>&qt2ay>w? zD$PppLz|8E)wajE(3BaWjVg!{00KAo7e`2B4k@j5Gf6vhCxG2weG+6%ETr?)=VYIp zk8HW2{z1LJm)^S4nDez7V@pcjC*D`dp{mItWnaCZe$j8usEO>j zDkpMScB$3vTUvU=^GOF*6uK*JsWTuoeZY zuA{pM{gz*B2%9^nuUHHgpNNYi=4xh>I2wvDX7ZI6_9|rawH|-x1u+BEk=y4Zg5KlX z4*jbZ&441WhMA}h#b=-F{GPvWu(Il%DxP4~gX=;nZ=!ie9Zj~e$i5YiCVOi;Tk7!d5~WG~j?Bxt+&~siH^E_Dd+8~(iS>Bj%R6jg_*YyB zS0M?aOGBy<%h@(K)JHjajN>$7G)&Z_&USP!_GSGOV^3c}w116|lgeZZ?L-?fnw{0i zL@`L8QOy~(q`?4m#&6T>_x%amgpHTu&Q&iq5Ixjaxq3lrU5qqom^1#~9(2g0^5 zL3Su2g6lL^p+WL`j?d%S#5tcDAOf!DB3X}r6sJ}}6u2JBe;23#QQ6?9b{LmQz;=l+ z!O<3|tpaiZtR;rSY`i91RH(A;>FA~|^vq{Y7fZuSYE!X}ocHWjyt3PIUy)%hcuu{s z3Xo1GOdq?D*}bcij9|<$ov>Nbg{U9?APv;+&AE>OoLtZBfnwr9*q?THhhadgk$9!! zQV@Y;mJn2P4H}EOKIbv9l|zkr#`|{>77L$w$3sV9>7qdvcD%TZc%>+ZYk~I(p8Y*2 zXT8l~X!fy$zF(A2g2cm7ZAVb$O^R{?YoZazk$@A3~;s> z#h|1<(dhB^9PBsv)`w)KaI4Qb5RP*l0@~I5(LX}2o5m{Xh}FN8PZtbT%MGdNv>{x%u74~BX@%w1(o^KJ&?XG3)kasmBDwH?-Egy;! zJzIp}jL`*&zp35#<22X*%6o4zUy|5{x(dSE|8_-Ybx?Z8Sses5QhjT(Qzv?mFWzG! z@(c=*T)nx}*BZEYEc}YxVRQWXVRJR4k|=%TF_s}=S#U4bCg?a1wo_)Vt^itE9V-#X z;wC&#eq`li14c&a+dt_~_Ge34P@hz>sVT4~Pka9SjieW3pCihQ@aX7>K8v=GXJapK zpuB&>#>QVCl0oP>Vbe)5@}=vt1j8C+;+N}PQ;Q2>#}DVC>KCsVz6JYKZ3`97tFon3 zEe62LboMeP8JP!`XruRzIPX9S38ul;#6&hByOwSxj1TQjq{6#_sIRqZitFeCREP&v?tg}`l8k0cv6{vJopf*dvF*e7Z3FuM0| z+bfPN%)x%*lkK}rbjQxkBx`-OBIRkQh=;9vwM?W9mjajn<9lUOk^uR%tJ&q4+E1(- zvZ#Ro^b!W5KwmpHt7J6nOSiq$_D{z+YR zmneeW4wE2}Xyh2p(_U*1`)66tZ85)YwT0U7%SLV84V0l3< z^NKP5(hrQT8Ujrv`=Q*?3Bc+6rV*NUOQdIWcSPPX(%n8TLPz)}9xJ)mpD(5~;KoZ( zl4pDixVc*&^l)41Yc10$;4>s`{u(R0aUdP5+e zV&9#a_mofyIFpYG-Ug!1z+LD8cj2qzeFwShv~A?o%|gwu1(Ub$2UZC3u(f{ae2vNu z_mhw9Pd<4HWDz{wGhPkG#NS*Q78O9B*Sqw|faK0DrxCpjV3*i;&t|mY_@ih~l5%C7 zZJYb6$A6^+IHFnK0}2^e)VNyQ@5$1m2%uOM%ysO?HSO7}ZB67j|0Ta-+ue7*uBM`W zO6tqEL)UQ&iesNMF1=v7-+6cOoy~X;AL{d{lcv=*5C{lzl^=qhJ1A@!TRx`KH&ZMF z&CqGJIJXkzQQrpSkE*-h?UJm!t9H`DNNtoENzZK2^S#>+94d=JJNw5y1d@0fL|5-t z-BIU+F65Qss6wA`p{v6CIj*AChu}A~Waf+d+<8Zd1vjpV%}Xgk*2m2%`iRCkVQeI4 zhfI%bhX_S(Wu7Q}@SyLU1p?!2^u*VwP_(|+w7tj(si`~g6$uUxp0jP1?k{Gu`IwT4 zw8Xul-5X7L2z2J{TS7??j+;OWQ`$L2JBxxnx@TuSIMxt5c)3!JUv5j-1$mXg^3-~? z?Y{%I&J!YB2gwD^+$*6~!l;xb_fInV6$$MG_V}m_%}`NYsBfL1#Q{*XSV#p{^p4it zCzM&J$kOm+)+f_m%y1VeTu$@(rlSHi=s%p1&t2o{J-l0kr;qug5%Q_zFlZ)ZCwo8m zO3!cP8_ns?ThdIARAu+=_sh90xOE|%BlVdH9p~C9t_bw3iDR@zKf%ZM-_G(a4e!adxT&Lf*AqN}^BXvIxW?ym*_6n8rw&lGbJehb8JTh^@YqgoKx9b!+)>V67O z!iJ)fVeAQ;TLk$7TFdibffM~hiYu~1@cggzLxoBzl_3Ko3{JCCNRqKuhS9^#V_zrl z!Pb*B+EEP#bTjOoLK3zmPdivmWx?Xh6ywJ{#hRwS8?>+M&|7nF@_XAI3bs{CF7@

E@E@a&0UPEjT$6Iy< zp;n(TgbIMvX1FQZpU)5@xQGg@@_M0sz>jv+pOjc{ACJ0E?n2VXvY4{#YcTnm4{}_z za{WpjINaS$`q6PXFdgR}f-MDmWY5k-Q!gDBlX}P%TYCK$;DX#Oju9f;ALThCwU4Y_xs|L-wxgheU%inI9^ycMi5*UU*J5BY3hiGE zIw;@iyBLsU?UxX)=fS|>4;<@{qmugUgmh2Yg@}hKmuz1U&Q1mABrpGwL4SBf%Jdje z2>tXZ;LWZy(Y$<=6L}nRcEJQcisP)L>4+4NxgY<#gz?oP^UyZ6`%yt`Ly0Z{&dJM^ z%D4#wv)q05KvT%4Ge;yeerTil$QDRY2MzI|Ugs&cH(3#OOBXkgMdS;WRJ9HSWbj^l zF|f#)(wLRhrkoLeI^`oO}jkq=eHS(<#(U*$Wt zjdzr~_7Z@V7St~V?i8^3_&+xVm4_~ZZT{li7*#3Pm%B-|V~T7|P2136QF)mz$s=ygFmaJ$>&z+= zDq~AWu7-$yAzld4)Mh~W0`%7LoPep zfM;c{;Fnzt1P9p6fm7@%{4YKTaWZ?{)IC~kcsZMtpbuuhyOG81+{^{&MNE9|PK|Sqr*VGXADR1KdnnE!*D}S-`Q>0Qs53 zBgJ}myCI1>-CUM-ti;d}Bor*Vq+ug~l!qtSxx)xT45Y6lHnL-FVpGkN+16y~_^)nt4 z&B5rz+XPxGii)_-5MxPfxJy?PgPJbqHtvOl;wvI6 zR+MPWQXNlgD_hHb>_P+DrMl^C{lg4umM2d+l}xjffe1_hHxCcT*hgcSw|`Ay8BNVG zAJP0|*|qo*x-tqE!0KjPW7>5Kc*-PS*v^s=Vz`TVa04US)qA{tHuEr?!!>GpMf!~})m6G2d*5?!|_~G6;C4aCLcPNiah7}xK z+C-(^>YIi?en76frZ^{xm_;(6eTM&L_ze9fdSVr?j6a}xVUAl0MGcH|xd8Ma)X`1t zNG$^dtA9oQ?wA2!JJ zl=oX{?Xm9w&y|>{t3kBi5<^k?RpWO%LPLQRx)s`D`oRlB%5UM4{i)`crl$KI{ugI^ z$;arrK>w=!e{_r_PJBcRaWWE;C7>Q>NISas3T#}U;>w>|wL`^?V=zwvK{c{!@tDb0 zJg~ph71Y5Z>rT=X-k6e==-_9G4&dL01~(o5H~qV6zk!+OsBQz9Q6$kn&&^(18cXQdlYFCV2i4_Q6Ubd zaS?f%*&yg6AbgD#!F>9nV18Ow2*FHkXG{Kw5&Z9q&nttOfe7FGI--|4@&8={YqKwR z>z7I!3i%-YcTvdEb@@}HQ?U}G6Gb1RGcA?sWay&|@TXj6sow>%E?YV=rpS!z3F|@x zn-=Bd)Eoh9jjC*@L3LaTxEhYnDBA6S%Ll6jpGou4!QpbB_z#EC1-e9I3eBRk7Cm-Y z^O&LQSNmPXD>247WI8dy(~EOHj`BN#Jp=?q`Nd;O{W(YSb4Qcu6wLAYRc`fcq!Una z%Zkc{jgND+RqQpprkYxma+iN(vhj4DP4ap~#DZj`_iyPcF3b(Ow)RUk`}(@)=ddv0 zA>)h$popHheK4Zku_|ZQrl%W_rSyK^gsiMOG9UIOKvgKl54Z-|uO3^xrsij(eFWFT zE@HUX@<@>JhfCrQbCA=fQH{w*Kcj)}EL(P=9&z-bIqczY4Fdh{>Ow!PrRs1D1Vr(o zSKE|(+WViOMx-y+UYfJs^tqM{Kw$Zci+u#{rMv?{_Y||?{4BRAlLO>;pLnXl)?&Pc zatRDx-fym5GbILDJ{67#>u8H;fXzKGK)@vE_Wa^Y%Vp54FC7aJ^Isi)eCD-iwA?dN z-YB^=7nFQ-WAeYx`{g!8+`^d;z2;DsvEUskUfy#g4^IpXFal*3-iC@1jIrLEy?0>r z(VO)8KjCMKyX|^2FC!|6aCGg}sSPYG*Hfrb5iGB8`SY1+_|2A^KSBEs^9iCQa}oll znINbWjT3R^l>-v)CW!AR107U@Sq@o^8-b;|0Qzh>Is+z;5j*sM0E2#e{j|TGNW(rn zEwi+LJIylryS0Xw(D)-Rtnqd=%F$i?ywlX}(?IE)mekvQt_OoTxHos0JbR930F&n6 zoZ|xkRxcocKRUf3Afw^9N`cHQY&tdBt(*D$3RX-I<`L_^5GW^0y9Oc2g&}Mnz>pEcL?WllCQKN#)ayRh_2x74|H9U6@<-Cif`pi+Do4BlJ77 zr|(gc#{X9SFPV4_J?T)jE||~G_XueB_H=V1HLq^~zj1HFYml zf?n|R4DzYs?cH7-YcNh{=#3*GMe{tMyt?YsyaKN;f&Klx@Kd2UQ>WwI`Tl$DE8mKu z?d$C&@G`#twKPgg=UA+susu+c07ND(Huy!(lV%@o^4?de?(m#=mKF7i%g{FTBUj+FPPMINc|CP7XcbNH?U_ned-ud zibPj^j*}IX*j|81c-VJmS!(W=iqiYJU4#%4%X0gA`11i*2Bo>5rzg>?u$_gi3;(Hl zVs?27XcNC>Vrp8;_mHU%o>I13iZ{7`5imP;HDeO|)K&$aDSm9!P11LN#f`vpo0mez z-DUTKS>SR9^js`UQB{&sBnB-mxzW?@Cz|ZK5G}(q#~5R98NC|#%TB;Z1Qs;2z6s)C zSH=x>J-Me2u6NNZpkcodiQYbSt!C?E=_N81Zav6;RuHW+#ioe+Xq~Z}R zuunFdpB_gp95l*2!08n&PID*Fj`gPC6b+A~@#!4fT$2*&rdNm9SwY2}3rVtOwscg@ zcx6qGotGs-K;Gu<*=1ukV@WdZ@`RaD0V4s>qhLH%B5HJ<&o{yB#zukxD+))7fK$FS zN<&AIg*7wq)+N(!e;pj0}Lkh!$q+Cs_Uje>C0ms~1clJ1G-!0@qt$X~E>h-v6wU7;Di0H6O zwVAmzD$P|ap^2F!Tf982<9(Q?!)XkQ`|@xLn(ejbe*eJMN9s0|WMBQVI+NrA@5Qpy z3xd6C=A&eDYQf3z=Tqj2*45y|8fFSU+-!RNI8o>MgHhUIwW3O|NeVwf6br5)=YjZh zw*lL76%~ISy7uzGSNh%qYI$lXLygZjt-{%IC>C{p6dRMxsm6m7vzw`KA|Zn;$w}{H zDcspVNK50=Zj^wETenWriZjuOSv@oeZ$O#(_l>0(QE@>cA?)EO)zqbLZMcKDO3vi| z7H7Ao6bZLEtisjfTC+__+iNl|&7JiL+YP2T(Sq-$$h23k)(mJhn)}ZdKlN(+V${An zwjv$XH+$&XQV8K`!l#8V;BWKL6@)G3A}@TC(-VGfmuJvC?WHSyi6>@wmbT;Un6fM$dH}Ao>U- z%psL+H~Wgyio^3SSrlKZPb8XjC*nDuRwuyR-<hz#G3|R8} zBWNrpARoWhlsJxb{3g)p%5Gp1(ccXEyzg;6BY(ed*1YnOu>Yo&%?P=A#omhbapYO$ zKc65eM#n=Sa89pJ^+Z1*JCt%-l5MAwQE4nak3Kwf^+5VkI4CHCukVg%vymq=YtlGa z6Y1N3miN+zO7(6y2>SX78TsEEm4gu75t(*r$!EM9Psy&t?9?4Mo2}y2?0+d&`@PNS zzms}R-Bh%L-I`@|zO%%JMYQY88z)&ndt=br3V`f8+Z=)m9uNF)v1m1Zozm4B zzdEe&hp0Erp?q0c`;p3x3o*#AglDrP7w;PW;`RW7pUYNsN&>nN(Zpv^yK744g-XNU znSl}+b;Lq-`gQ;zWsh=7DEWeS9W+No7L9G3a7CfgoNY-r2hWgT*zUZuuhGZi-Slk0 zvioxf*O?_%Bb$ASvQ2Dj%l2%)#}Ic*-m%$GPe& zJV()M9D6RqDBy9$*L&!}3q{`%UE3|>Cw$HFK!?@Z_ySQVt%CqyCzG1}o5_7RFvD1D zfvAnIYa;)%;!*o#h$i;U;2T*yvX~FZS=zBLv4KuRMFo*%=|#)Rt*9~u(VR#Wv7!G-iL;#+u4n* zMcK?&*7JP{E{ED=bE~_drer8aSp3cZK0NpxN+eH)KPtkim=3kpHCh_8M?X%w`L(6f z>)+5QR(4~A?fjL9VVME&s+{_-I9hGl4my1kW_S)I?sgE;d5rC0%^!z2IlK4Z@ zC-p3e`x?fS=yrpongO^UZ!(~V8?Fp!cw@T2@fTSl!K)k78EefC^cWe?+Y9ho!+n-X z5T+h85m&K9U~)lLM5qZx_;1#hhWa+Mb4Sw_ylq;E7^ee%My%g1dt`!%m@IQN%X~Yv z^C*g{t>;qAsDl(bC;_};rL3mpKXpFII-DQ6`888G=AR>W>++tZnXOab-4~{b2CFNY zaZef0zoZ-_z?*nJ{8nJUs2))*3UU@NPM>p%v4)>S#abQE)vn@l2LE%keBgx|7)uOY z59UDGFnyef2kP?rHyA6OQd3IWS=1SnsI~55Ass+Lzk>&X4)ArA4AVN!Zg$9SHvI4^ z6v?XC!3@$;6w%OuV6pIpGCq2fH zzH~l_@J!`4^kFS{vAFLB?A0R#&O|_6V3lPm(o%ixVSTNyr*G{;)ASfwJiNAaB@7ID zzIRk@&F1vvkn7v+=RUat-cLS9?KAXWYDmfRt-zf0rHr6fzL#TKpPE8VGsZD5L1Gv-G+5- zugU56%?oY89R=6wt+($!gBA(qd0=u6K@xLcUdz zK>?zqsAiU<>~uQY>WAGGB}1!a2ny-WF0dZxW(>4AuHxD;H!z8n&ch~3!h1nI*BzzU z7^k61Dp_9FifQ$S*e18QhAePHR-@|> zq*p73tddlGIIdT%zXm#1LX|BmZvsH#1Ey6$#YE2J=7rLH7Y*9Y%h>2QoIhyU`JT>K*^>+i1{d_h;cFAYH@Mg`~TKSz}UVp%WuN z9W)g|wKlAydPiYd@EfHjq>E0-$|dDipGfs49&f4sV%liaQg_s78`Pt|W7vNy1ZRnW65`Xl7+cwDIg zB1i*u_mg{2K@(d~c?RxdKGbgx(=EHJbcx@A`zVto{0Xz%LQX#zYIR@iNNBf=vko_2 zRVZ;coOkaXTN0jNjXEDO+z|qCJs3Bz>%YIoUDZb=_W3IxQPE?k)Qk1%VBL_>D&k#Y z>2QkFx)Ou+kF1+inoOaoQ$L&=Gnby1Yx(u!Kd~GQebnVHnzmLR(A<05O~<+n>vqU zntWLXPb202W==Md9*coWTx}8=MDybA@zbkrel-H#;q2@wc z-5r`xDUBio(c;I4;5Xx+NnE~5Uk3A!znPG4IZr(N*hx>0c|!A~eQJ{E{G0(jr~#Vh zJu|}TmHU3-;oyVaC>Ac}yMT!-_d;TgPlFw&{FKVWGFBYvEN?ZTJY^fr#|9qX3c(rB zDZXMwjQCZCxZMs$Qdiv8)7lwcSIpo;4FL@m_^uX6me>K6)yD6tn?7#Z>2L~(8jRth z5fn%Ysakni8p-%`^Cj8z$Z385&*!+1^O588>|eC?LP~A%5Hv-)P47NztD*PikHpHB z-PcUFvqsKFB|QEfY@xmnUf=|c^yE#UacOQ=#T zl2XLE3U0YsJvRIDGvo2w!U3(!_K9U@=;xymUrwvE^%Cl9JUqt5hr>16w1~dDNgsij z+RCaPF|AG+t8A1S?IFSiYBQCyF1^#%A;69*r@Z%)^%zGS(=}$3J`6B-P(Ed+a<(Tq z;l~2eU=1Deb1&gw5VU@-U6mmLr2akJYkJz2->Y$QQNm6EGvU6iBNRBEK8mG568Ik6gOxE5AFky1TBD?rS z2VvHsumS(V@CM@|rn4)c0G7v}Qw71WE)+*fk}36ph{DFc9A>d!Nv!50_D4UEQtMf# z_A<+uT{eR3uhE>3vv$TP9Fl$EsR$i+YegqXnO)CiWY%yf-Msz!xarBRsO9XB_VOv7_4Fg3PaG+wqF-lU{4g&2z>l# zQe>422x^@Fq>_ri54UXyLlJ;X@Zsm?RxLC|E3mbKQHj1c1R|&hMQ>uAg(d-bU8Wu6 zF~?OLY!j02CUT_&9zKbX;xoJ-$J^su$#ZST*se1Kh>r#gBgN&45dvCL%=j zSvY=cP}4pl@GxI1R~K@PVzC-kPyOw@7u_QkKn(%3o7?3gJ&_1@mgF(x$oo2Ee`Xb` zw%!VWnepXU_}PhpU$MuaTsC30X5lmUP#`WmI#_(%RQTdwFF{mQ)f2LBlaMYM@R$lP zfwWkebhMqB9^ARfmnL6^Pfw|40maT&og28Ybp-M(V*bq<>8i!Y*xBH8fe1Pe$Fo(f z*8r~Hngzm>WZnO4`rLwKxFFhxNI&|%cwBqie4pZ9h}h<}tvIk|aQL}z+nc#dL;9_V z`-8yO^!_&eynH`kpJODmzonOB>q`T;ByX<)t=-}6Yj^;~aG+>6#^sQNY6a}Td z9KV#y1TAXZnLV^Re*y4g^*SztMV*_(KDY0+LQXBgavO1r%%$ThZdA`x3qc@qbwQSp z+DSlgi^lty@llJ@zpspr;!i`QjF(|fK9bn3r>c5+5hRmG#VJj(oiYWGkA_CI)D~al zgx){pj4pC7$!^XETn>ML)efY=%R z(|Ne}%MJJYC72?zWUaoqw>#xD0WwZ<=*sMb@wpnCn0m|0YxMu_W8+IfgA+V_-tnc? zr^wBS6>1|&i?+Hq;Db^!K2#spzWrt2+H*-XP!%u7Lk0Wg z!@>wt&(4OD@7dU2Es1=7U}WxWM3Ommm0a18zS6(&nm%+m+*|rD@76FdkB9QPeLpSX zK1r;r?BDs+t zIdk3s@%}{XOhrVXnI~%qf_dEC?8~$l2}FL{-?cGlLOwQ2#CGc&*d(4^)sL>6rk4J< z@7s@Y)GkCt+&tl1CQA2Tht=AbL{Gv>{LHc_UsA;6Ksx-O)E3&Ahz{27)zy_mKPZ#Wqnfx+Yn^+xZ-;s0p* z3Wq44@9SrQ6$Jre ze1GpBu(LDI+&TB$d(YgNrM7n`x(50mhmXDWar=9H#^tsYRc_!b+4z1AOR0mkI5Pk&f4~3Q_gd0HlKcbp@}#2X7Hb)F0hsWfxP|@cLKOf@y)#E zEYs2FBPJ0;VU)XIY1ZvvgC5Jq=O5RbdXEf!8`qt4? zUB|cmV4*(WNCgExncjm$ZXSp0VXgIL%Q$+Y#3s{ijKVp6a_Ja$rIaSz{$Dl}3@Te2d ztq(p?&lgxO{Mx@O;b2RGw+@D0PK+T@j>Z)wYO%nzjE%~j;xlvMvkMGepy^=o;-OpA zMV_wfpQvIpm=kYCelKeT z_;}<7!1Dut$!PYAd#ge#{>N86*RvImXTPDfWSUcu>{zW7(=oq0#&_YP6bYb-1T44% zvH<#=X&0~Eynfu6`-_bqnD>PjeuZ3VONh*c)H1$j1 zyn&|X+oDH5KX@0Y{KiY@2I=!BB7C8`0(ktOpS}jnrGQ#G$@2J}Qh?YMkR-Gk%g7{i z0bQ>+wkuxKnabBtFc)y4^6W{q$^p+K?Oin&yP$(SS`SK=f?P(*vDTcU;aaV8ZPaO# zUz^<_;q0aW-516-9w2=SDEq3`%kl>OXl#9rE_Jy+N4KSoXbSfX*hr>$Vpwb*yud{# z--hX8Gb`O;ALqu4ayRgUz=+XFpWgFN7+S-+CJyx9y6^+fUMxFguWyduzu}H)E}Crb z$oX2j32x7yXk9BvySeyym|)b}mH7tS#aJTTA6gz9*XL3g5=NV=x;=gB~#Ii#E5# zT~{HS{YfNbZTxkjf47aY{qsJ)SC{|twdYjQ#lFd}({5cHGKq|>FJOjpfpcZS=AKsN zUDSNI<-@<$Zc<6g5+8q(%$8K<_aE}f-Eq7?fo~7sM%Bf5Xju3|C88y;J?EDvjKSee zU4P%&_PH=Y0x2g83%ZE;-Apj~6m>^6U(ARC zO^o$uKUB7__2Q*|Vd1d5I;7HSAZ9SroxEbUzBLHmMB^Z%aqrk=j8k~jGa{_LI%`(A zK0)B7pF7)H8Mk3MtWh)t+=BLzJ=PiAA+<<&)DjWbLd~{1X2JT83(_=g&9fu*c(+9a zv_KO->VFHluOXLdjiQ?v<33*?`cg18K4GZm^D>m151)NC0_yWWFOKzpPQFqU+gkPp z-&=G^wz54qz8p&A4GGXSRIahx?w`3Z7g5*aDlr#VJ`G{o^t_67C&a4~@6bLkw<$F$ z=|tru!?z^=BXCY|ju@=e?jt!*frrjC&}%B`v?MC%+Vu$A-9ZB{{YY#O5NS>DRh;hy zKGe6`t=HZ>6~0HBLz0>&{TsYp7Vdnp#5_pTinxp<2uG>{sN#wr>17-pXEB9K$>LV45G^$^{f>V(XFyOs0mqp=te) z=YP!K*x|a5Q3>cz`PzgrZ8D^_((z5Z;fvY_t{x7gk3xLQ)97?^3<3Gz);Xm{O@HB6 zr2Eaq=;%JB5y{5Ksj#hoP(A~&wqRK6#S^0CeY6CaSn|5{Hn;_iG;ju2fJWJ$BZgZt zya3PF-zq-q2_wF(mc|WE)9B=ol|6S47VxS)F@7pHVL`uP(diN{6o@QpG%m7&cT2gw z%+oHC92@*I=~DWaOc}#z(!UOUk^W7fRh>`d;Z}W-CRv@@%+Z4NEq)i}cW~F|>zL}w zcz#Pua7NClU5-jxg-#h>LBSzTE++`oRu6lDMmn&~(q4FI6AdfyN&)GU?{q)g7{o7WhhxllB(* zB(oH#9DozN{{&S94xiOacmd6BQ!MqxQW=1Bq~2ye(LQvz=jbtAQjeYgO>kN8+r5iUQ%jZg+psRQ zE9(H{X%&gi#h0t*lQ2&L)xN}Wr@NmQcr8#yhGl2PoZiJX+4O3NQ|DA%@ZS6v#n!<3 zb-Y`Y6j@Ici*nFN4#DWY_%~1Ql~$f}$awVC;LD>qzRRPk``a^H7a-)dpO<@F zU~+`K$GzV={Vn0g3J1opDN_^X5DleZ2{mU^b<#XlfwcsF^YKTclDkc=3@!I#t69K(;DO zs2Zqd!S$9{TOdp3aWY*}sqVpfm$lG#E);U=hw4It8^`av0as~EB6`gN@MON7Zzod2 zTWgNiAi6-zAx&<^;A_oFURmKfk0)TijlvSK>#sp9bKy{fCxI;Z>?^pf3mV^6I2zwD z?a>Q#uh{9N)_|25UW|s$y+;`L2K!?n20xW%$KR3U-??XkUB`n1QB1!#BnzoS5|h*p zf!zT==VR`JlM!6AJE>A;w1t1Ydfyx||425E3zcCo4!S?c$cTHNT$=@ zG&}A{$5-3KEuCkUO>cqr#kn3%e}tqfsbT|J3+*=DKOk!7*`x7wo7OyH`mMNyp8Jdz zeq_!1o4~TJcO%JABMku%D%@9QhOmuv@?v^Z@!&n8r`n{mnb2=&1#}qyB;;wUBnYyb zMQy8`5O~6+d^lp6y;m_&-$8HH?nwh#E`jjONH9uFlmQ^R{6r7DT&x?n{6VqW?)>Kj zi5Oi<(uh`d=lvtn43g9=v_Bl^j>lG|S-HS5V;RvL`x)9>QwJK06kYld==j`fW0Dg} zLXPIUw|xOp4{=iQ4B04v%6Gi3?o8Xd6>JFW2~V1CwRQ5iY8w=&_N6*z>6bbVY&fSY z2T)1mzzZ7aYf4$^0aeugH+=s6EBo{AmsvWtdbDbJ4#cQDzId@kn@K zEnLoq-jRs%jB6DmXr{rzrefbIV#f2Ad=O~rEmUQ+xl&_xC@YjslB&)2Mmgs1&2NZ{ zs&a4D2G9GAdLU1DlSuQ1p{1(GB2a`Y>`Ayg{Pu(yNHDSZTiW9AKNjp>J$>8GNX^{ zXlFzvoy15lL0s;pj8ggC9}R3kKKY8c-I-FHGhWWLDb2oje}X@HmBSD;_1s=OG|0O7 zK7wP_i>u*NH@@c0NYQ_V`rCUj#q7Sp`{|dJR-r-j=d50yn!p{!>`CPK@Va#6( z;PPa@-S%Sck0lMAwwObb=x&D-+@oFOe(N6j+Eis_cpJr6= z^bY~VtH`{GIl!=8>ORiqCK>jnUyxjAQg0wL8(h;#PS`7Sr(Eo5Fkdh^eE($I8fF95 zq)!Ht)pO1~MHlK%^J%Xm;)JR_d^D`%Th!vyo5zittnRpkWy0PS(~EC;n|3zCRzQ54 z@{QL7phc7yP9ZC;P{?l)wEXC%d0G z*&lsNZQa>hEgP!RBb|+n8?=-Kpi9IAiC?Me#HnltSt*h9^oOTuHpCG2bR9&@@;-LNk}d9O zq?)+iQ16YT=YGan3?X|sKCgg|r67>cEuRJ=X;vSA99+`?5H*Gl0>IEr&T?w7_)V?s{N9Sn5Ze$y5{p75 zJ9QNm`PzclXqp$cJM>NDNyFS9kZ>g!y4Sq_zSHj$y(FXcT2X) zujEs{7e|vXBt9^>vG$u_r?9v4?FJw4U_rfZb>uCI>5RHqq?<&Qd+yR;_JT&N+sP_b743b4$HB8b5~rzCep)(0D@P zH#4eD6&!KVX&sBK9)6#A{S$WKqS)$nn-nt8T1#kFI~Tkp&Qf%n)W<_NbeS#vO4>b1 zE_d?C`M+z*`&|ASITwU|V{$CWr6}>&2rA!b`>B>6^8ps5@n82GcpMKC*-ZV z>V6#A0KSKJF%5H~Vt>Q2Ql-`4HuYvl#rkvM_d%I3iZ1V+1 z4JJxr3Eq`xpwkBxh?2Q04RR-zEB_w!gRAWYQC0mnw>mgG@%6Q0VR1Q?>lj?_^wvX6 z*S@%ype-u=4|i~srU`4Imh}DUf;)|O+z76eK{qE51rHnPnM1l!0V^&kjko5k&uCC23XD;NyrkJn2{qo?i-)gQs%UG|ea%JUWKKP#mD$Rel+3gwDFy zI9BWKK?Ucyk^X2k=v82SgA}a~5d{4{^+7brO4xWz@#MbNqEeaU>If*5_-gwRmp`%p zQD#~?{TqR0!Amqw8|u|wHQj)u)hVHuBm5eEX0qA%NV5?2S8M4UkIC*2ZZ$rmFwdys zx4S|kfD3=Uohl1K1gAx^xk5QV=@IvwO_}O+UfApLu@7j+v?Kq1a{a)GO8-nQMkn$` z)6LffcC$NJtO6dqdnV0`%z3p=#=W}#l#aGL#lzCB zOM1a`ZZKm#<2_Sbd9*4dSw>L7dsSlnA>|o(lxU5Mt=4^n&`A%A+*!^Hk9j~gZ<`KH z(hd8ZWFfqs)Urd6$03`fSh3FJ-gaTa{#Ztnlh4CPI$}psMkLI^U(Yk$49ah&7rshx zi~PMuX$hJgtIsrp+uM0|81bqipQQGFzvAOW(N$oX-ljni4c=zro|6GKF1_gWs_|u2 zs8i5b4Kmc*o&$5+m-BTsEBSiP#5B5kQ)s#Ahd`6l2~OxUJDAniZGr{{tt7=MoV{2crqRGJ zX9(PnuiY8$`f|xlJVDAMhFtVcuLFFhwNGqf^Th0j1+8^TL8&q*508ORBVpua~thK$;X{pLFSJUSvNu}f$JGc)XvZooWYn=(voQ)nd4!}(ZB2T) z9^zeQ@VOEg944ILWAe$>DZU`D z{0vXL5CfJY?|M=pNVEP%(f+W;#OB!@)Uwnj+We2L%{^7{La7M>WE7U~Xjl0Vk=z~n zfl$ftq_2xEtv~0!SXP3r;nU-qGCs*oH{FI&oiVT3o%Wxcs42feWZViN3FL4nX!SsQ zt78qn+9XH=v82GSY1Xnxzk`P=?)mV)fgr&X-bh?+W>T~USMKL~Oz|;tH@&r7XOOBl z|1px8WI^u^9N+8R0Lp?I!lu;A!B(z|7pKdl%tspJ1O_^X~0b+7(fy5<1^ z9Lgyy+iRte=bG2oie`@xdx3}tgV{>zqFY74W;gKN1q}q0Ln1Pr)B2^0X(R+51^BZU zpLWb6ODItj0Xib24MG2dheSi#Gvhcu2*`-yPf_WHheO)y{FITeO}TG#qF8$gPJ$1; zEWE=!R2VE)7wp!8XocqlSD1@XNVoH!y$n~2}x1JKjP?DL$@3PJm@*0a;)#;+qlyI zW|z$$=`xV!#Jo(FD!UtENIrQDc)=%c;h6 z!`d+$lkQ9rn>GI!Q$j!o#JZW>TLnNdqExGi#>BgX_pJv+2-sj)1XqkqzFLZauT9ihw0r`F{VGQtk_DqlZH$ zJ7EEb45vn-#lrbnN79^G(Bx<6y5Zy>@#{zhv~hSc4D#JQbfu@go75HZh5ZOe%9Ko& zGh}J7F>|a!(txCCFSCv%@W@J^`8h%)90nR$d6Zi>z1P6UkqlMlw0V==@Tu#OQRH~- zJ^B)z;<9oZf)Hm62>$GW2KjhiyJI2Wxu!>CD)z_ab^AyPrqv2yEo41PPD}zWOt~3Q zU45>9KO?}{rm#z^rAf8;M+kG1sIeB?NwXA!lSc}!io_7JD>AnyAT-_6$}IPdiljI< z`Mo2Dg-x8>4o&(UFnw$W|Y9e-94zgkAW zz35g{#QuHK`X;y-TBCy9;C{!Tc$UIgMGPHGEX$vzi;{g-#9PPnGF&aOUHMb@Ft0_-Ls2W>tfC^=tNSeKAs_Rt*%26SW=XlJ)ku zK@fZb#oP%Pd+Axdi4nuyg(OK*W}iSYqq%?b=oDx0S^HE{?yWAWGuh+{B+z5pN=?rC zhUSe#g>cilL6vGfAKVh*v)&cb+XK_InYssLVCVNV5cOLbS)Jmk?$VcxyC9=vw=W+rA8!zqWNKJ?ver zRHo;N9#n+Ybo(;T!a%#OjPQdd_1jv8 zoD}P7pgI$9{8fIx#634!e8l;?3o1^wz6Y-Os6IGlB$I~?+8qx>;sQ%hPWip71<=TF**=Drl%AEqM1_z{5x@mmPgo!=Lv3+n(za<=|XHsDI~5R>%6NkPD#i|C?O1GWYF)?853~+iOp4 zo1Hr5_M!9F8asEbAHa8%Tfh5Ipa^tb?6Nd*nbR=`JXC;_fMZO z+;@VYslzLgbsi&xAtFiVVVhVQN%4V((e{#`!lzP;ixP@loG5mEjNM2WWrNh!e$ERb z=!2ork%>W<-mQBR^5IQ^H&yBuEg(mHlP#aV5?tPwai9jIbrnB-h)8Qr#s?JE9{&ik za1$;d9xy>}AVWn-5iBM4onTK1IePv##n1gzDgg*{dP&sT#pV0BC&a()=SB zM|t@-OIt!u{|wKDKHvOqk_r7OspMSc|IgH7XYQ`XOh{ISV3|TTYH1|I2OKQJGK*u& z|3naI*~jTU__IC}ky4Jv~6qrPyWyn^3K z!$Gf*KlE%o`<6yDWx;?-MFcu^}GmOnwV5abQ#bss6oa&zMojx9!FUx+d3 z!ft%xZt+vr7t8%daH0-41Spzb>nQd{wz2K~D0K0<)Zimra9r!opm?xcVsM?g@m-6% z2sp%T6D?YYWB>3ji%9VzxqHqk-1xd7AIBG&D+O6kHYlaTC3^%URo2Lo`G|ypV0>6W z#L{eWL9Eo8O(KF!mj*vd3*_&Nnx87}34aTnVV|3qZ4Q#+SP^4V%JlPQ1oc&Gb4bW=3YM$ETNm4^IHm6i~&ipM#-J9stpvqLXp z$ZQ_3kL`ZDfe*}z!mrk}4Aw7FqK=h)(xG^lBondxUhCZD9)+q+V;7^lVC|dR06$7c zo25ScUtuI({~@3TJ;pBVfJw4oybvlpNK@7%N(yBRQ{~pFig~FhI=r9nanth9zVjJg zzG~Tvw?PsZ&bXRIy@C%2TSV@f+H6+ZP}eS<%c{N3mk{YgLGRyOyCUd$^|+6X^z4y>0y3i2Gu4%dA+L37W!^E& zON8CTvIDf`6~T{1!tS+KvyB`wM%tD5UWXw5o*#G$6THp~T+nT0%(1|euA-IMYUdeW znX>&zcb~mKS&}TPw9IjQ(*MA_6I`^_=4{`(O;b z0z|fOBY_h0)ltbILB0!0r3TF83ZDSB_=nDh)Z$2YZdELSrGs50CiOdJ^lZ}iHTu|-_R1(g3X@YYTSIBG zHz~nxxr72&Hg$ffX5&*{+uQPcs|NL`L*r z;fv2A4Q5&z_@wfKWCI^?=}%-$wggUSLyj$0!v&H2aR-$b`Hqtv6U0oJ=qa*Dbpz@C zFMm%l1jOj1oAIvlB&lxI-MTPIj#ZJ?Ig}1a17~he9;1e{t0NTe!q+1U1-VgAzj4l-=%mU@tq5P?$OgIa@N zLgn-Q>sTpuowVJ)w*oJ<1w9(SD~)ra;whLAXWzY)An3YpafE^6L!tPqG==zGa1Crz z3Ih(O`${aqNB!{Kqr?xKtmii=Y6)The7ykSn@YLw!f@el>bLLVoF?ZEjaqxFo98-PP7&-2paH=iow5sNJ!SRS!iAVbF9a;CZ56xM6K6b zB?%;iG|=VmyW5~;Fgkr5_W`72$P^NBNp?S2A8z>A-T_3y$`kLA#m>yDK{r0u>}f#K zGuX|jKj+oN&%C!kB{=I^>=|xGAH{FFi5Cq0@hd?TPjJ5g@fR=k4Zee2)1tG~4J+gw zNfam;rjjjhWhc+B)w6zU@1t(9+*F1N{bJp=E!?s)lT%n$>Bs9XLo3{OedluS8$P%_ zzIwBKiH^)?>-6}*YiN(P`0#5@Z;$Ws^|3FCxP)DxNCZKh;CYb`gIm#J)~~RUK(qp} zsTc!U4zGmESDStm$P+o%DWI1{1VBJ(l&G9dy*nRx3De3H1>IES(X(HC~TDnJ+;; z;tlx}$M)z6`^Wa2CE~Z`14*N+<=z zN#HRvc<3z0<%U%!Z@~vJ>!T!F!v?_&{X{~ekf~d`DxS}!Rt2xnsM1LgYuHC0d?MF} zAmt=woLtm*-vRI)TGRa1SN;PV_biC3{*9RzHOe|DSfx4xGEvd1mB<V zCD#8#A9)dYc%wEHIdnJDk9Gbd)OQ8EBI;p@?@1F{PI*kHrDQR55C0^eC+2@JPJ7IQ z`kOL`JDE6p@n!a`a zfW-iD9E;NgrG%Y-imcehqYuQWlH9*I&%PI@rUE7|UTaWVqce3zG-k2P zc~m%NNH+ZFC1_)k{x4yo4izkQplI!KzbHQBb+mc(9c~Hm3pzdBb4nB|_7=i$vyTdL z8Sm)8gM;Uc!!~N-d~M)65d@Z%Nyo~7CF~eFcqOD8I#shY@PoOw3*})E!JvroIu`Ab zzB|`O-f2q=Yk2_^V7Rw=?a5PbZeOpRbYyKhzx{ZU)h;3Aqx0ogqWrsT%A+sWfsX{2(AVv~CfK_s_XSo^!=>#w(6eniBBP(Bds! zR^RW>+;e|GAP$Xb$8+DV7C}xlkK#41-}_jZ4%INj3#NN!ocHWmB^JI5HIRadcH`ZS zvST&znz*)*pm@Z1-Ia!CB&pUQul*eLaiFU$Fq&$Hca@ao3pvyd;??>CP-kFbBHJ(q zbrzkQ>r>_MoG!V~;TIu_xL+tTqdyQz4jr$9X5TTz9e|tz_GjAG?|-^!X@!g-AIb_e z0=V3XF86eJBFfyoO?^1WBldEa?5nm+mNMXc|;hh92*qg#Hq-(<^D z-O_{!VsAbheg5TnwM4Hz3l$|rZ@0U`+0S}zf_U)nuABU9KIQ54ysNU&{YO5v8tA?@ za^f&GxWgYYIvVoM$8AoZ2H#bc?vF^ji7b? z=Qd)rvUiJr?5BXXQ1B|Y zE{D!6?hEtA#J?pLJ>`TpEDds@ot2kfK`{m|iKX%NPP%Xkl*M6NzQQi%H>HEHM_wSdGF~@60SjmL-HLb|Cy2(tRk0ybR)2yqH z-Wgs-^=Ku-3eJ(2n8IQ;wz+a|murL~p$X8YtYqSpm}(-Uqcw1Nqsg_+a?_@Gmrej@ z7=QK3`-?@1MjQym)>~O0B7gCOvOfbA4Ka?pHTT&kT4ZYknd~iGhw4c){E9r)LI%be86lDM<=ekps@mOHSmY)V2UeCVjJq}7`)NoXLU;KijHYNG-Zxy%aGZ|!@ z(6S>}U7=U;C^?i$AIpsoEBC!q=OO{0t-T52b$0#ofvvbw;eRypfRg4ETKT^Jl`49E zzy#pCg-$&g^7gu(|JUx^T+Zbl{3YQ84Ekqhy@5|u&bR2tEl;O^(#R*m=Ckzgejgg z`{bGLaubMm#Y>&d%W*AoK@(=t1fx?YgpoLZ(_?8$08RnhY_%+3jz?(sW7nAzUs*JHO>b9><`#?b&PGG6da?{>kUx-K8*7E!n zVrDR1jF}pU0@_oYY**Hmp1h)ERN6Q(vQ&DNcd+RsQQ{bGN7As6@KOKbn?DY2uh$R5 zmOtcl4j<5l#dY_mn7N8oq^m#j1%GSI8A`e&ytd1O5cp{tb9#w7_l4`*b?G z!=dT!!tiUh9Na^7-T|G$ITCImwZoqX$D{!2_uLzw(?BBd5_@^ zhOkXb;ex~EAFIk7^kG!TN}8Y+)@JJ`k6soQAVy{09-7}07kYOvy#sG9-C<&dheuMg zchfT&d=FD-I^huZ9Cb6v#l0&HhJU7%LR~16{UtR)t;Aa3w)P{26+Jj}z!g*X zaaKL~-D_+)3!IMZ5UL`U7-yS&eh=(S(~Cm+C1~6mM#Xfx}N&R9r+ARj5UB=yA5n`jrjHH7by7% zmw#SW0^C^VTmyCWZOvcBeROte^13Gz1aCkcuHrc}%0bx_pV{Am+`x{3kfWN3dRMRv zBTT){e{_9k?r{8+LwKmZK@~kRXa{En5tGwVYnd<0-!=U29|EHBT8yFUU7bt(Ye}E8 z-0#C@_l|${JI~^}&BY*SX4nMZPE~cYXungL13uefhYbxQFaHD<+3bcyB%X)804CX! z9CM&n&2%Gk9$=?jMHsB!q*u=L|IR)% z7xcLKoP>Fkvf)Ee5hEn6254~0D6+(|j2}>*Mm31He196@$BB~Swz4JV2nUR(ZEAwG z_HZ&K5j%KPe~eUrMW-*yAxC^STPX;}9!Fp)tlTj|?fU$8#y&km0V9OUmdL`6oP1k% zM7qV^{P)-@19I|IrrBg+xLi>mMxk(KeOs152jr~ucjRL0J$9VM&fgb+-TB#XX6Cx< zhAI{CX>>xsWabvazyE&L`~voG=mS{yn%6bV%zhI`)|B})+Mf|bZvA_)?_cWMTOHpk z6^5?00nl-2?0xH~iPGxFH!tT12x!Eymle3}2S536Q+C@{576Y#V4&aQw=h!t`IM;a zL{&(6Bwm%IL9UV0=E1ygWL;C!W$A1XpK#ax2o=$`GXHYV?LLD}-18N;Z~~9M7W2eW z+jx)x|LX=p7@IHH?ekk^53_@~*8EYxYI<%MHcCmoNH2y2X%cZi2WF*BI3T0Y?)`-k z^F!X?eCCa5{17o^23Tiz830uU{`Mx!9J0doNO^g^W;xAU(((^7vCq z>ok+xG@DQAdh3_!MtZ%6K*~wof5`Ky$p@cEe4W|0bX3}gJcU5%sT{$qY(4KnOVtYwtL zos9mBQ%n`WULz~sBPJK8#k=sn0)6^2L+g4qs9O|Nk3EuNrpJy%<1lBm4&XexbdM;W zz3hT5XY}umu36f2+D<^M$3k+AF0MR?Uzv~|QB|2x@FvK&3G+KfD)u?r88b2ulS6gw+sdX>VKe}=zit|v}T~FY|BtiaFHuF)J^7uK7oN! zHpMrQ>tXQ{QUM^Nwj56YNK7Qb&q=v%YpiaOm*INn2@k#r@4K}v0_R$0a%f`r3p?mZX_ZzH)`UkK8|q3k*B>Cg?Y+P6E0qos!L86W=f{(TdH zEP6#D{SFvUHcmHA))j#368J(05lLEYHIHP(@%`kjETwcxlwy-?uRq?NZvuwG{ba!O zvcbcnk_=6Wf#^hR7kF&+p~;?f^h}I)H(B93hGMQHW&jbZHcTUNGC7T((JPjEU)b+b zWgMmud}LeXCAPaUuDco)CX}!aOvOc|b^xavYfI7){N5m+j&eQ9Ml*jO$+J2|9!FpX z`g&Gv;iC_deJS2no$(_AW7zM4c7VEPb@bBBik8FddvgJxLfx>aWcI^~@M40F)EIX_ zBLV(fUyz$c9+U_7NjIjzPYnv)Vuz=LH1XMNuqm0qeL92qXhLl&%x(I09#mfLN;P0M zdB4-o|EagSLCi>hWlTOxQ!x~IeTfIfs<|jpwm*6bvXyh;aj=rmi&5m*u(M?6pLO{o7{ZSw@yIY~T?3`3 z3L#S9vv=X)Xl*E_ENR1Y%k6mgc{M%Z6mXWlyH5~?zPl8@c{u(UjVf`@&N=D8^<)92 zkwz_=4@Ig|Lh76}0xWi$fV_o=)fVDMIZ;39>M|=57mErJBdQQ@BE0Wu|6GdBYS5+? zQ>8ED3Nk1uJ^|Fbc}6)v*We;vnAQ|g8f**Q-n!hkE6A6amz&lcdiesU)yV-kKHOP8Y_7`?wcDQ1E3_ zrv$5Wk^a^a0dJ2WW=8)xp(ep9nXq@blQw*0nl!UP#4OC$-Jj0$`N#O@;`M|yqbkwZ zbXtsR)HF$d&VTb0=US)6948Zu{z?K#4kfL-0IebjcFEHx6dQeCe9$q9;oiaAPJ)n6sn&EDFt-xcGhtB3u>Fsp2|7`n;) zqx$vNO z<<-z6#shhbJbJQ89U24HEQwJ^;MXgqcCl|Q&GQ(d-HcwpwpYbJX>U2Z>r`F#F`Mg( zq^AL2`unXo})lvNoKqjfj$;>yJ7~UJZD=V(n_&j z@a31^N3Ve07N^q{<8R7<-hudGmgOx8^P2>^Pzn7Biq9;d=%-k^T!Q3}Vr7z&ayyC` zv4ycc*US9^>e~o0PKbMz!h`nK#TH4Q`9{90@ru=;Bj23ed!ikk;jEC44p2@0+VYen zl`S~`f}{7i@e{4&&{#b;2Q%R;ZexpfD0Wo1$N7(u)!W6ue z(V^i1V4DLdrD;JXLyCVjK_cX`8NitH!dpmrq!wDvM~W>{fIo!1b#ZpjSR^4P`zC(s zISSa0s@=dCrL>so6ey!ym%CJzbCiqI>m`t<=0uUo!zw_)Q*!Dv!?skxgkqwEP=TuG z?fkRZDt({f7xkKBR!eWxb?m)B4w+O4Mp5DwU$rLAC(QpXUUu0wn3Lt zN-Oqe2h`hSq}Y4c{ux***@RDHpeJBY3g%CgmTULmf($$+v20wvQbFh)q zBv7Z9Y@QGnTFzrlPS%DK0>n1#SdQBQ7Bz;8*7hmo;Npg)FKI>!4r~rHY_5LI^z(d<% z6^y;{)5WIYJI8v8{J@!&6Kp8MUgPMh69|~kC~N@uk8bU2AQw#c9`lFrDRuN`IBL6a z!o&a4bLLk$S0@eMAW0AdTT+mRw!)94zzfvGI8m}Uhvl2+v4FZLaKm@9Sh1b5BLps# zaLDLF#W~{ZX30jD-dNhCs#DEoR0q~~p`8X`h5ncb5XQsEdc#n)Y-k8LC6Tey$8Nmv zzH|CBa&7E(QI_xG35*D+SAf!!4g3Gdd+)a>lVEN5nE^y$1zia%N?1h@Nd|J9H6SBN z5s4B66eJ@sNFK$E2!aTbm89gHlOjQoEOAJRB#Dyqyw&WUbFS}i_~s|Bi)VWJsjjZR ztLm<5!I;)bz1z-fv4;=)Pd_xkcg`fY`QBF#KjZfDPRZA~loMHeqayf3j2D)-EwP)* z1u3_+{M>x0yL;LE6}tG}Is%Int{2Tim&45fv>et#dyzF!l|GGF5}OZtQHG#8yZ zPT7W@peTQ0!jNPz<#V9qcWB=Evc}sg*=o4E`}ydGdM~O7khxzi`pA-Si@WHs*l_!A zKUB#zaD)nQ`q#IHRJEx$_+_{rjHYWD+IudQg?u$43{tjx&0x#Pbrn>Tu*Hkh-Bi9b zrBvS}tWCt({^xMskyaM+Li^N*z{^#SQx{|SGcA8^;yyzx@BNI^F*QL&_gW37qqmJX zoTf9snOpkZna(Ql!$j;~yi*J3?EUmrpurn))o53m?Y zXCa?uzv6MMk@GRFJHYD8{hY*#3L<@qs1?}UyP7XD4!<5r9hD&S0#nEVQYWqH{?pia zc5U8dF7|R?x zpJNR51(fa=iCEjA$$Xg4jGp?oWX-nq)|uzrDRv=~+L$KB1;GmOg7o>3ulZEF<;sXg-eyFU?Vq@>^vM>yGUtXn^VIwM z6myEB@lTS!uhhycx2~}eR^N5ZE}z7=2(n<9WQEtHg;ja?!P^d7?*sKB_ZDvVaB4=^ zxumOos`gh3eLdO*%i;Xd_Y0NFOBqMY*>;g3_C`r=7>|#}pK^>tjK|6bvpix8;7#p- z%E_(2(Jq{!U1Ib(@nP$T@147Au|i370%Y^rpjTE5l^5EoUsqG@z^SS0s{iRTGNyD% z0IPkJE8G-6p3B8wzo_*^Hp(b+Erm)2{!+;_PHTWM%!_DlQLEq^gNHQcLs?3W~D zuZA|ulg3b;|F0Kr*^Ic)L7=}W5z22CG z5@x~lgs+(rD~cy^*Nhf#J#6i$y5l?%8`=D9{UFu_uGi1SnGgAM8=57r@eWz1nJBox zVBuh@m7?T3jdY;wPJIIszJED8s8Y`ym>$kuVqkhK?HsuXz8&-_BYIduYAzRk_uQ`I zJ*-}#692|QZ59v5N1#wBSuRp-du5xo*_pe78S*JXEq4V8^j9%$KLkS8rd7s;Gk}Mq%6zH?Lg1C^zO~k&s8gcx8OQDuvb>Z=`tz2uf zxN1&Z`e7Dd!QgBu$HJrJhKM-J;8A7UA5V1yiE~fq?hx&wkeLsB5fw~bs`!D^#7M)U z*1v=ByVGl$`}VSrYHE?J3VP)#i(qH-F)!^6Yxx;&!X;2W?=2GXOjivA zzgqo=HSZK2B?Ot4&%20&5%dqZRyt=3vS%qw+oUfSjujLY{0M=am-dX~z5!j&&R@m$ zZ;24OEjNyk)6uM#sKj;obkRhmHfKBMrOdw!U);S&e#M`dRcL4cp1e_^4;-0}(3ss<8nwv)ecRncE<>0Q$TM%xF&yUmJs|dkWakX;N(Yp%W@0sY*z0TT)Fz2Ur#Fp&asNlr;u zZ^xt|$6b8P0wgf58FBU9j3GOFn11&Bt6~7{Y@|555q>UiT3O=H95S-JV}pT~rNB4q9-Jdq0r`Ppvn zjagtqWWBX!s8lm@ttOrTw_%BKi-S0LhMh)K z8NUzqCeI6)809+0>x8e3!f2 zwO+M<>Mlmyvhd9`ab)Utun_IDz0@;TXvrl3UC}UKA2xY)oC6zSFF>KYGK`fPCCs`L z9m~01FSrTnup=D3UdQ|7%>Kh}tU8)-O7g|C@89d1EN;f`@lg?6==b=p8pkty&U(*7 zhA6GWV==__NTsg(3hqziT`v_ImE0U+Ax!PD<^~sD;Dl_Rfi2eu-BRPz89Fh1DFJy3 zeiq9C(IDT?;#WI@>vfE@MiU_K9e1eqg0yJf_jK}FG~LiA(D2+bp0-1>p^`8CqkSs} z`yBcsl0LtIbK_hc!NS4`_vF1#muVYGax`IIyn;cGnER%z9Pa;+-ml180(Kn#Dd6P(OX{N*8L-&f77d|H@1Vy*M zr+0L=U5b0l*)J!e)x8d-FybgO9Q#m0oFi#z>93~-w5A2MX~REbSP0eMy*4+ydmi83 zXh;+Nu_6ULqVn00MT;sIW<;7Jcx)HP+ka>`(f`X(R!B*Qxqgk9HIYv)slJ3OR9^Fs zpyVyd(hWVNX86IN!3%=aH1+!85WKQBn2{&bB*tKT_xI_b^2+8|>=&fsb0-CP<%O>q z+!RhTkN=o9e3mBeovw$?BKz;r#zW6p0W9BVAy^qV_f+^(Ol2`HDao}J83Cv58#(h% za)eXwI`&x4u@H=wTDlEzP)wDdyV3P-A7d_h`SQWPCoEFrS}b4U3q3Et$lzvm3Z>u^ z#RaUB{23u0vS+~sHg8+HJas=+{=%C;R}8QB-bhz%naQkc8u<6Nzy=!vcY;k7K)@Giv5EdlXk`TOI?C<#*ET;MBuf1 z14l}QVd(xK--Pkn1OplPrQKlWx9e2I_>1CgLr8+irU=?F#CDioc)&tXpX%{B+&jJL`)vApZOTjvuUCPG6Q+O$o3IC3ZAz`h!={M>%Zz@Uvi0sg{|Xle{L6(3^ocrlE~ig^X6j~t;(%Sv>7>m zLkSd0D=kIiSNVv8mKxOKwalny5Sv)L7_?pM#jMY@`|e2W;r?!So*}z*`xo~oAR(@) z1}atD{bOJh3E}2U7WGN+=+fh3;dlp|?gZN z&CTRW_{TJrY7@EaoSxo$q$W3(Api?Q6{t`=e_$H=dd}z>0eU@yft-#iJ9{-j&#%iF;&b!xLJslD&9L zjtQrI9a~gN4li9!fL=2X@DyBeb@M%+G^MeWt-dGaOFVbLh$D||(9awq%F!jRmX3aA z*dEaS_~|@-n?2oDMXWQV`n<~n1XFY!-8@tM{YyX9V3(;L`s6*wA*GyOcD z%@yi(axFmNAEu7<4%71`kUA0T{FNVU71Q_ogK^NJfCY2?Z<=u_ki+`63emoM2`dTv z%*0i!6?X5+l?P#)Jy8`)g2C)21r1ZtoWwr|utY!1AKz{kz67ncp0!`PHU`9{V^9Z$uuj(+mE zC-!bEZy;9eiDnI$0w~se(!)1*;IEm=7X8dkv+ut@H{H5GzB*=> zh;h6K{v45!CKHaI`+ci@L6@u8%TMY65C68#wvLN1-B6rV&@wY+T@IdPeQ|`wa8^lPsFlqb5;gx z;DG0$r|?rhBw&#~`47tw#%fjrT`ub%w zw0}b3tRlzjEO6$@v~TMQpjfmM8$snq0L zv{<&3zMF6Y26fvwD9p4=NW$w5c*B!-PRSkafrrT@GNVj=VjK#zVNb*e-+(@>Z17oV?UM1yHfy^qbgEs#AQN_Nz7 zEUz{Gd%{5eIW5mASf?3%3X=oJ7Ps7;o9R$w>{b*xbXRI$OO+m$)f+Et$`I?JF>p3E zBc6Kf6vs zLGq_6bnNDhYv!`_Hi&etZy9XD5X&)k{|^c-TsmTto_a9kyB-$d1}grln z4s)=vyjb5#Shr#eWb?=~vIU$_&)Vpp&dEA_=wh{vLl3|huV3ONX1)L^Emy|flhw8unu`67(iDu4$4YMuHeVSZ#A>w?E zdrExsGpE1xJhGCbmt&xa<6XA^W?{IY#0xq5=P=4q7{yzf!jvh;JY;E83)i~Q94{-| zPqg?EGOZgW&&WfOVATY~;_e$icxC#T`@}GBnSm@H)leNLwdvJ+kEOEdaiTLDq3WYt z0`_E!MjnEH<1r&7%$iqiOsR{>uXQQzjzUB*{(T!yZ(h&(%Wp!<6Sriwho8(%9`kzf zm~kRJAb639*u`FvwkpRTj#qXtc3mL7N!%$~M&Xwx8dEBP8Sv2h#I%fj5 zFv&7$#-V_x@a8B2whi*pA=Wgh#NEq^W4|siRVma=(=9TxM|E4CZLx1Ax-ZSkk%x)WZmV~$ zd{xO=k=uy~?Ihy|_M^}ho3U+jwcm!fUuA#6I_2-xpS@#>kC55;8H9HLsV5g&;4;U! z$DyG+79G6!Md?J6Q{3dm>eW$4>4--E73mwK14Z(CHDMKu4IXP^-%YCq_~;8dnx`>~ zU~2y$TLrXmxMU|jm}eMp6^o>EcqC87z@7&>yZaFYd)z*Jge?*`{4~+1iDrDU-c!|W zTBScB@4?xJ>PM2W_-$MI_eJxwK-Er9!aBLNr^l2QP3bIfsf;oBA#ERw7nWnLl5(u9 zI8&*{a&^3;=pd6=R0w5o*Rc{A?v%X(=DH(g%CK#OW|vrwKgcoK?O)aF{Nt4w8X!Da~eQ z!GzkSP$;-2jcsdrN#TArNK~|t()R|d;a1^xr&>};EtrBKFt3=1d&x^9@u)){H%*zj z%C*=Q+FscjYYFciI1d@sw`%uO3Oai4s^OIT`#U$KH;&qxCCIEaDk#w69Wtr%nhu|Gv&eL|QUA&5w>)89cYGzT^ z&0K6{2_y2DO$@deE)Wo>^=WC>P(w0z%_e-0Nc%NTnjLRYg~f_U(HY#gw=G`A@@tLWtl z0z>T9+jokhdN#zmUqYBE6(a#SHsL>7=A}P=ZTRiteZdjk(>ZRzk_HD;zsicc$T3#J z+Q%<1MaCR1WWnWo%5+U*0V?jHX)~obB{{+lw5G7sfjQ9=(a^QCOwL zdX-?!fa@YP7e1J_^NOVr^=c{I-Z6B61G*o}xeG$NNLW$`^%{_NrIh*jW}WsyMchY)!n_sdP$%HE$9 z^9%4||5&)VtO}8LL)vry2L0uObQtDR>ZKj7GgxfyGM#xil4a69bxF>*wB<(Edfiab#>})c;LKkY)B=ovG>yv@;+rD;&SffHW0K&an3hnd@0? zzd}R{HD8C;oNoVA_^@^1&D~=no6odzbM|K2AD9v%Cr~aq$yPRf@d;z89IaUS$X}vB zJrxLL$z$CuK-uHF!(g(~b#98AU<-@)&6KxMWveh=N1l|I8T?kfozZodvbARC*+ZIG zW6kZp!dokDk#w1_dP!J@AT_1b)4+6FzP_mcHT`)FTrSTVOXV^NG)oF1Xt5GUZ)^XW zuDWNm)!p%l$zRM>0N;(Akao$W4oIMVq~wXm=*DMr4mtK%1yi*_bqny$r|9;@dO$ob z&7KYpt4bs@_2=<=Z^xKwmqP-K1T3_bE?n}EqS&>6WFfm)c8^PsJ2J$68tKz6q2TVP zZDu@^UwbaCeP{Y~f9>o!5drd5n2s1^QwT@fi`+I6eCInr14b7#Pv{Kvz$^rM+CRiS zVPy{p!M}f>HjRz(Q`6EjH?}+mmsl#-t3GY_@z6QcU)*ofiiBPRLbtS$ken^_)+%5B zt}spz=SUH)tIigXm~Xy4cgm&~n#kMX60eQKWp z)@VE^Ih;K_X(!l3B2Q=e3<{9-r`rmJO^)uTcTvNM3E&IoAhIl?kF*1`hl$R8(!Xu= zD~j1z#K>zpGg4#B@#w^tri&#z?%78cg7_8SnlY4dn%K^Bdv;K2d9&TvL+I;d>Gmz> zlhcFls;`j#l3P-m;wI~WX5781`{8(_tg%CaXGciY9R+zc#K9_56>1AVDZyRa3n`yJ z4+&=FV6V@Dy(SGA56$ie1E%&PZ{hOcr)~;oS(dJ1=xw3Eku0gy)_1A%7bh&J;@Vvs zjl>K7On*lFTsth-qnmu1h%=|~LG`Oj!H6r;TtxxQ?ag>`yy3E|woW$-A*K4+kOhvG zh}*Y8zmFC)#ONR~M>A+Ky*7g_L+-G(gs^7BE0+>AEl!bMbGg?U)7X;z(nSWIVhC3@ z-C0x8x+Y9V0EIt^UaLR|A9inGYqjG?2>rRnTi1dyH;vMuzpNeEFaa{$CanyWsde9b z!|mwb%dPR#Id~&)>LvdD15k%pkn-+N%HgbS4zaGVV^-6K->Tobxnsb*xrm`zqFEDqETK?e6Aq=Z*OoZ3Ytn z7hzr9tF;UQimTjI-_OB@m2a+J2hIV% z@9P3X=`wH!_kmcQz?MD%Uz}_S+&RnlF;W~99pa4rX!0e}eAmYuEcINK(%^QOk8ANu zU|4W>j80>X%bOW>>91c8HOKNI@&A5$DaJ_Q2`s`mEW(rI+_uW`TK=OO0=EXQzfb5n zpZdj3E=^sg%xc=>>f7KyG_kgI5;-lH3J|P>a!rmgwj=)RgYw9Va>#*Q_ru~5T(kuG z1Jqwid09*I z7qVD+dbkt6(hwfVSX`u%gnJZpy}~L zw!I;=mJ@r;^s^kfhn>afDs}%UdHx2!yWI=&+OOASCwZnIj(?W>$RVVPL^wBjSeuFK z-_l}iS8s_fN3_cz_dxf9-F}|5YYXXlx8>>fX?=w-Tq@uTULX{v7%<<5c&OmkDLC%} zp|LooIUm>S%2)Xzm+wv^rBK;zo?pP1(fL~k3*m?b?!iPk4FpD_==MSQHT-KRaeXvD z1CPCi1yycbK7%_Qb9b6U&<@V@L!pkQ1rzgn*oF?fwcj;bu*u$J-X=V%+Y-6XKn6S+ zgtYS+DQ1tuiL|HJm28CoB#3K&p1nwcR&-szKFfULfqtaE=t+P*gyiV<#3O|K;L-;* zL7(-oT*vN|L>?B`gHRE$QURS*d0f2S%OMK;G1%d{!H7HC%o%7d(nhitPOgRu}qme9%A9eC$E zFIA$k8UG}YwUSN%TYEF^J{e2ohE{)RY=T^yk3baau#aK|!Qr;_%6@e_;=N4fX>RXS zdAc{}%Yk1U$Lv9RMY%LdzW|%T{ zlK5YJXQovx-W{YB!C-*{r&|mm!c!S9bFh4;ZG}Q{Ua%Fxpz!0#W4vCM^bZt0|KBqr z_vziokJxOaorO1b)2Htg*0B8Nb$X%c0PhYnT$UhaFsyH%qU^cJ3XM@riN%EBmMis; zZ&ZhNIoCIyyBXc z(kEM7+4HMe{$o9^b9>$f94|7H4aIR7n>{dZBt>7v^70+Eej$ZG>E+k}E2I$jN8V7R zK)p|DUp{peyHRW}K-ZF;H>oN>H0OcXkR`*+Wh0+F2D^c6BeM#^p`Z)|kdDcRP8j);=gsmrbHPoJ+|KRkPiz+;H~P$$ z02zRC5y*$Jv(m74rJ$q`A)-vJ&3>WhM!jIKU9tQ`AOEOTeyCl^ zEByQ8Uv-5!cZc0(Fxk4%5nb`&zl2P>@nA%enxf-dd=&hHIChdzSP{iqh^iOy$nOov%*~| zRV;+CnFV!RAxj5Gz*qq%tnYg$oWs72m2>j=vXD!Z1+ZSN%io2_=b31o|6v%~#QbaL zU#32dL7_>E3HBg4!U>WX;hTBi5@}LK`Z)vxat0)z#2Ttkkr<-EGGNr?LC(r^rBa6S&!X-3S&PeR0~gI2d?Ta(Gl)Q? zN3RGCw}4=Sf(oTQd~_{k;6~BoZwak-4c{P#b%pg=dCSAP^6Z0k-KRcXLi0I1vi2MU zkH7ln^AG@D=}`~f%q2P+R4VnVpwo~{hM5v{J9aG7PR{bl@^u||Em_Fkka%KeNnR?r z)m6!B8PF&R+Xo?WYIkJYB)eJ?HA>E!7{`NkMBKf&)C9 zIlAGaw>SkrDESGR=Jk>S8gqp&6k%v};}`EI9KzM1&Opw=N=f_e$qw{*u<7JF^q^)OJ(#6 zWt>*t+N{@CYnx4te*2v{=0dax%e9#82X#K_4y9CZO`ubtZ7bgoPLAGAD~4FplEdj) zZn7Yt7PJRj(iv%>nCeE87_&Ls;6)>-?3#Iq~FZ4#QVd2TkwgXHi z&086-du?r2b~^vTF&i3U8k&X~9$3gM3v(5An9ix*@hO!tkiK$N(=;ZKVOcV<9Pbok z&i{;3+$cpibmHszq91zLQ?A|JCYNC%JD@%A#U%i4&vsHNVf2*guIc-#b6(3_>t8Dh zvshtID?^Jz6boV&qRxQ4Wk3BndOC|C6~F+%{zMf6zTqibOmSZ z$3rW+KsLT|6+1=Y4Z}ZiVIy}{$VO9->@r?z`kQVDx}|J!BGsZrZzyKl#}ue-(^rem zM`shEP!@85BR+Re`%~)`;RNglT92oI`>+Z3OirHp2dBYLfW~n`A?6LDmG2d@b*P>j+tUm2%Dn;=-*j;BYdBm^&n} z)3V*wa$DbN%(p?f5|KkM1pp>}Z+HuezPCNlD%t=nBbRPwsyEYGdTEZ9sKrz8#bQ$y z(Aw{xM=a!|nH+VvEvs$N`S71u3p&68WBdzt|8e9u3mFJ3T$HJaCoF_(5!8Vs7k}&S z1!n9Zl7$c z4un-`4rdh_)XgX#?rax0zY@$s$RU+nQ^lDnhGieND`H>ZkbI6LFN0+2d~tdjyRGq4hC`m&Q&y51 zwR$k6>%S1#5Chsme%>!qFJRQ3T#Iyg z>713&7B>$lZquXX<<34uXpj+frOO-J_W~&P z(sU-A=umRd6Cm|4w1`JTlZd@T;QaemxFTTi7%73`R;Fjno_%Ts{81b6o*pG=+sW<; z=9*hyiH2RWh!k%;BaH&(i_UU%LwRZijHf>qJXkFk9@^eHtpbo>y(vYUJi7|kG{C&v ze~b?VcYmmR5A4>ROcdFlf-{A2&#U1++gy!v)Hl{xxUZUegzR^ii8i%`4xU4XZ>}LZ z_gv4WY3rc^n)4AYvU$HO8DeS7fTe6ol$O7e=8?ogc%9{wa)T0zVNu}gVsfX0b|ro* zm9yN?ye>A#1cCfoa9ULjDkL;9m4h|gX_q+b-@m%jvs{f4l0bl1fO@@(`^WFilP{}` z+sb|@Q9_4JA@?$QpvcfxNC2xz&bTN*{=S7$KOYZ50*D7ud#J-(LZn@?%Df$PN3rqv-?k%lf@ufm~aXQz%v?cjx zC%=-&&JP22CwcOi>URNbx>BM~W3%ZIV>{(@YRyNRrvKVo!9O>sxJp@2(S4Qgb~_M* z7N4awlfa_x$l(a*?(w#howm3`3o@P2_fQ(B&4-q!1@D@oUqHtnttnmy2A;`oYGY0a)!KA~8~cP*6o*n>y+B0e zsC|1w!UA{KwtYI7;>+#}#dCJS@RI4)9MKV*#;n0U*ls^~KibafX>8(GQ+y5tFYt{v zh!)Rkl4@f{HS2FpbIS7J1JDA^53wh)DioYeKQes(h5;Y%mJqFrUs{^_i~sYArO(bm zAIpYbn%I?3^ZBnj+av6!0Ij^m)OelQ$U1MKty+Qg&x0EUXmh9`c=Jw;f}0t6L;D_b z`ltS<&W#Ho&Yh@Z&=y$mwEvcIM>n!S5@;9HiP+H>N^Fbw3)|K4$~X4B2Sf6sx=0oX zP+q;BbLuBZhfGpC!2{To&u-(^d7VW~;rL~Se(ff*19Z&!G|Cx;%exE@Q6nJIshFA! z4UCNSEr9BuPn;6Mntj+0sg6!6%n9;t3D~ANitfpKshn=0dhq`1A>viqd!Y7|pZuA`r&U4oxI4901WWV@w`Ab_isggr8Pb#PM^rmQ6jyv38q21CIX zL{&;(bd>T0&sWes9uqF{j2g^SW__&gfyR*m-@^ns5 zBdZCyn{QLR&D|AciGQQPQ9MbxFMGF%x*sOVH{0Hi%-D$O0ttN3#u`W4snai3B;dr? z@*K#1zSTVK;@^02v1LxcfAI$Fl|UdeV}ZChP5u$qgL+%CIx(${UpAZz3I~>N7Ik~M zlEeEF4?#?2Pt#-g@luzF-+CCU%$JkP_TpXQ{*9dSVxCGzy|m7tq4uJoo?h5Ll+6p~ z#|@dE_AN9>P2E+T@+SSudz+0<7{-eB z&U2w>SST+sqi{aePIZ6+Cq7eOJ|05fE%&laqks=)#=rv$FYfPhk$G=)$K~dfmTPDZ z2hkkf&=avJKHNZx2Xvm0-2tI1DpsA;Z0fj zujiIq4HT*=06oMJ02QVWxiakZEiEheV$Puh&CjqdO4)Hq*w79JgfYiWvGg7<=BpSJ zdNc^(snZ30OeQjgoIEdP`ur5AK%ihDco)!pl~`8X0V3#hlQAjcth|(TfxKBj6A#BY z8zApH29-6SNKyBnqjyq85DBPOnDhChe46f%5}Nb-e+!Xm&8Cc=3A4)l*8cVd3e{bv zs5d~lTZqUuoRi7pto+puL6ux@Wy3j8@E9)!p5|^Uv}0XOeFRZ2%o#2}z{bPCm_0eV zK^%Jn7K?|DXXNmPr0e3`xZd8zg>VB9eai(jo^Nq3XJzs>iTo2fsz=iWcK&@I9AM$8 z{JJyCK6djyI%ia1lEsih!ko2Wl2Uz~mA3Q9@DAx=M!c_p7s*2xXg@{m`xz4gcmaCd zVl6jp=-(%SbBx})NH~@;L;U`96l^>UH6*~CuZQN-ydOpS-p`Q&q3m0FHQN@*jE176 zWnT1>C2&Ih$Y%<^?N-3TpkHUk^d>(NeKzt%-QQ&Z9DzgbdDc9~gG?hRTp=}@qbDR{WIm#!O7&szC+VoqS^Y_C42%Tth8ZMTyl@NI9ET^xQVXSlu>UqmT z?q82cTxK!<^-bX>a>rwYQY`v58ih&wW65r)1)8Nj}87 z&Dl33Pl$J2gViNFq2Zg;?C9@Shgi6HZCV!_^l2GfOp%doOKBV%@>*j0Mp$@^2L zRHcMl%f(9T@3{^t0-peLKrq|nrYyOmAwwZ0W!Sh2u+a-58QN0!ZY z@;RFFrY!lTn^C~?!S`>GNV#HRwt=~-`0FpPy!9FTb+uT=yJBWkkM?d9DRK{i!6ZS= zzs8@D{%QYy$o+d3#oJrTT|Vy@LKWl)FL5{Dd|os9;?^>UAiM!$^k5d+eprUC06xBK zS7~e08C+j^xHGJR|AQuc_t6G<>KTt~=bSVGp2ysrXU4jr-4-?!d2_!>W)N+ivux$S z8QiZOQuCu6Or+kSxk5f_bFd>WNsxNgL#;oJ7l&O3V}qskyr!+o+?~0^b`OgA0$3+s zU<1WzK5PV1!McI}mZH1eh+>IP3P#=yLd7KX)$ryS=oMDlVZz?r<*6~hLhfG z(qL9^;raIuN@(OVr5KS<-I-kEm#e)Pm-prLvZ7+dCM2G)Z^$%V?)gO`hsGSk*`27` zQ&$A!Lx`0Cb+9(?ZAw@Hoz3iIJ$xgtOYF33Gaoe-#zu7?*!!E}d*nwJtZa4iuu%Zjchf1DY4BO0$S(ogH5vDhWpdKku#%0Ti1*H&6bukR#q9O*XS`s zJ;7S)5Fpl;5yqEFb{!%3me9nzfvai6w*dXh*J`#wIL+2IKe_)6EE3F=5)wyl65IHV zzIe!zUAmb9=Q)w9({@QblGS|v#g;~NYh9nb{9M2nMJyJv@-R_kFlG8Vc~FjdtYU4e z7?#x!>1+BZI!@F5+8!65vQk=PD0JvHnJ8=&;{}90QMPtY=@vxxp z&BCw*@q8K~O>F5IwkIvz^{pByi`vn?%?&vdY&X=;V?0mFK{Q1I#Hwj_|KA~EIDrpV4snRhR@A`w2fb8$P(|wqRB(+ z;P-9bBfm*)dVC&Mp3H$X_}w^#M1Bu#HG6j42#P!#*gwn)Db$lh9}4cs0NcrbjDlz) zWWR_kSCr3dz&H0H#7NNc>Sb!lV;Fy8@;$h;2RK6x`^hLedo5!KxT>9GD7F zp#>Wcj+;V>s3%(hO0jr^>39x~<u>wn(SaD5oqCq=2;H(^+St}3>rdo)da&drof4i6Uic(;pW%rQa@9Z;!h-cVJ!(_d~eG)`6lQ{<5Mb#td{c~PyDWa%W%YO7rdL_EFg>vf! z#m0SgzoVxFUFmlV%^a540l8ld<#!%LE)mvaz=nHtD^a6xQHe7^A^t;I;#&dnlbsV) z=QnlVysRL$S!iz+pMmJoUmFGCue-z~xLbqKOgRyVY}uofFcS*f$Wv9cVpE2bX?EN@ zIwc>PxF7s@agPs=2^jCFnr(hC#t$E7D1t-F4z4#r2SrPqYqEEr?Y_(q6Q#M;?J-#%tVjO+#xuwy=gPd>>A_0xRq9>BabN~lCsp3oj zVJ)EfTr6yIp$Gkvto!`_^-vQ zYFqS>6rys=b_e!Pf@nr-G?u8U{U-seJAWxTZ9mj20c(5o-E;eHGxJVZ#x zS{S{cJOj{y#BewD(xNj+J&tYagq#4ngiYNXk`j zH_qo$k;_J?K&rQ>ZVd3ZXNv{wYs??WZ0%Sr6~jTMeE@;~ofkhq9E8DR2a(}%PXSjd zQsv;}-kj%KG1F^`-bzQEblT-A3Lx;aGeK(`dMW!VohU%5@ulg$-#N?1ZRSyo1|^ZO zvaThVB%85&eiqgD``x)xBj?3k{*ATZ1!AacHaxG-f=iz?ncH7B z)piv;Eg^f!m9(Jp{As`URqww&ElX}4aw}UM7f{>9-AarW_yf*@o!8}HyFgBn;TULb z5K+ttqBS|fQGchI+AWvC&MhD3`;jy1r`}y077KTQf|$S5{W5Y>d+f_RaWgcg)7(K4 zXwY9|{k9@X+<&*W%lzdzzNqaZ*9Ey(Uyd#iWN}9KZAHM zel(y1v}MdFtRa!HCR3{K*wT+-3+I!iPVPh8NK!ihHx`#qv$`^rONE~}g$`fOx9~;6 zIUD*p3-M)Y&~TD7Qso`;DL@&hZvog_@pJVcC(}$|B1jr#$g#W7;K4N|iy8_WrUw!i zWMlO6bCwWl;L9%2i+a_C_Sa6z5R)q@uG!2r8|`Q6R|cAe;UZD(lY`C>R>{^H3CC^6d&x*;C2$qfMY)q z9|7A_4=oGO4CWEcC7(AG!XJi^$t%MCp}u-W^pBl$nfxG)4X5zEtzCv|J8_1MIVJNz z-qqciC#KH3`aO0L+f>trAE-k~MdsER#)8N}sBX{k%kMe*NFmqFAtPfMzdVVSDm5$iymBpW}0h66S*_ZB3 zG&ozV|EP|?epPLk>&=^T0oLMqwI`;}KZdopoM$YSUc11#SK<8|`L^8BPiR$Ck<57x zC?zm>#J-dI*4@`};IZP{$NY*JvN`ONl1ZZN3k#3Mx%%4c`hq#w@u{QB%4a;{RjPba z41$fn<+Wy5onC#D-9=_^eM$ZbPwm~hHhT+GvWhCScf&wE=NjwH z3^|0~Z9`;VpYmvT!iT&Q#(+Nv%t!&?6HAZm2Ygx(I|Y5l5spBg|Gx}#(rN>wS}8sHDWmbV zvAz3dx}wvKmCdzZRLL!O9c~aT3f5s-7Wtk+ijv*))DC0oTB@s;U6+|DMeO6?3e_th zE2y54YP?Wd^P1dehmg$=4C=}>x!P2m!r71R*#&M>5rmFYqPsNh$WaZA%%C(Be6b8D6cZ9d}7w#9WFKs~&SHy84FzsSjP|{cZ zaZx{JNK|J@hyOtY-&BKf=)DY1?{7rSsq9UUQ*FzOHC9(F^mugpqc2-t`76v?-VMHr zh{l-asX(#LHP-c)T&l~G)r+pxAMdG2BuM4!vAud-VYKbDHrbKG@VfebQT&@G{y#>O zsNgg2;DIb*{&h2X-H+VS+ulX)qVi!w_PtemKVLq!Ua`4Ryy@W?yY%Cm)qaz;Q?gsJ zC*iO{z>DYA$OB8>+wbTPY>w1#-=Qv7f06e{dl`J9gQZt@0SXt|`$QY+&__^#Ual*>jS>_QP-7>~G}SRse9 z%s10_ifyJ3ouuFB88*fgxZETBkg&DTBDYRPvo87UYKM>h3?aOEb^CsYba?jR+042!8ir0E=B4+<0vKO;&s@T-+_X!a<{K z{I)tCTH0O_&ff#@3CVNl-68!{-t1q@KX0$uuO#06e*q{1*Zi*s09e6`0svr@(PH(G zRevB~MvH&LBQ3V`001C~0ssKuJgdb?;*+*71VAGP;ue7bfD0%9001t!Bk5yWOdxoj z0PXg1g1}IVWWcf@u#7D) z+t`&4pl{5Ek7coJInke;{(AX;000`J0002+X;~0hwJZQkv?vOU^`IcInijAdK7gDp zSMrx1(t}m~xz;EAg*apr01yxb002N}%hk)(y~@j&^D-eY)PsV+^7pHe=xv1oCQ4|7 zx$x`NkLBOV01zDo002N6mXo84X#r_5`CoplCIwat1*`j{FD3|d z;r~psP{-=usZIZ1{+%jU{=b~$FIV+f|BU_m0Dxpr0001_X1Qv*$?AR2SBnFy1p{;8 z|I0MNYN24&#bf~mgKk-!ys!Q}xcK?izXyfZC;F=s`ad^XBQ{(`@TQRzcLk9<`F;3&?*IUuLID5(&_$~gJ85z2v|TQy1uh=mwpuh;{aF6lvX4#w-&IgpUhA%} zW1rZQ)J+rgI*+;dxow4%)!&yF9#?981^(jQB`!|xS*ZsA^d1EO0062d3pTk}B+!;$ zUrZkC`dICI->*0Qe%p2K;!vHAy7bokRZVh>H1)psi001=f{{yGKK*)4z R+-?8>002ovPDHLkV1hagpV|Nb diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/aiken-compile.lock b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/aiken-compile.lock deleted file mode 100644 index e69de29b..00000000 diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig deleted file mode 100644 index 0759674c..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.editorconfig +++ /dev/null @@ -1,9 +0,0 @@ -root = true - -[*.ak] -indent_style = space -indent_size = 2 -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes deleted file mode 100644 index 99fefcf4..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitattributes +++ /dev/null @@ -1,2 +0,0 @@ -# Temp hack to get some syntax highlighting on github -*.ak linguist-language=Gleam diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml deleted file mode 100644 index b0081ac7..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.github/workflows/continuous-integration.yml +++ /dev/null @@ -1,64 +0,0 @@ -name: Continuous Integration - -on: - workflow_dispatch: - push: - branches: ["main"] - tags: ["*.*.*"] - pull_request: - branches: ["main"] - -env: - CARGO_TERM_COLOR: always - -permissions: - contents: read - pages: write - id-token: write - -concurrency: - group: "pages" - cancel-in-progress: true - -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: 📥 Checkout repository - uses: actions/checkout@v3 - - - name: 🧰 Setup Pages - uses: actions/configure-pages@v2 - - - name: 🧰 Install Aiken - uses: aiken-lang/setup-aiken@v1 - with: - version: v1.1.9 - - - name: 📝 Run fmt - run: aiken fmt --check - - - name: 🔬 Run tests - run: aiken check - - - name: 📘 Generate documentation - shell: bash - working-directory: . - run: aiken docs -o docs - - - name: 📦 Upload artifact - uses: actions/upload-pages-artifact@v2 - with: - path: "docs/" - - deploy: - if: ${{ startsWith(github.ref, 'refs/tags') }} - needs: build - runs-on: ubuntu-latest - environment: - name: github-pages - url: ${{ steps.deployment.outputs.page_url }} - steps: - - name: 🚀 Deploy to GitHub Pages - id: deployment - uses: actions/deploy-pages@v1 diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore deleted file mode 100644 index 3a3d38e6..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -build/ -docs/ -.DS_Store \ No newline at end of file diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md deleted file mode 100644 index 62345b32..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/CHANGELOG.md +++ /dev/null @@ -1,805 +0,0 @@ -# Changelog - -## v2.2.0 - 2024-12-13 - -### Added - -- [`aiken/cbor.{deserialise}`](https://aiken-lang.github.io/stdlib/aiken/cbor.html#deserialise): to recover `Data` from CBOR bytes. -- [`aiken/collection/pairs.{insert_with_by_ascending_key}`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#insert_with_by_ascending_key): for inserting in pairs while specifying how to combine values on key conflict. - -## v2.1.0 - 2024-09-14 - -### Added - -- Various new helper functions: - - [`aiken/collection/list.{for_each}`](https://aiken-lang.github.io/stdlib/aiken/collection/list.html#for_each): for performing many side-effects. - - [`aiken/collection/dict.{pop}`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html#pop): for accessing and removing a value from a dictionnary in a single op. - - [`aiken/primitive/bytearray.{starts_with}`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#starts_with): for matching bytearray prefixes. - - [`aiken/primitive/math/rational.{pow}`](https://aiken-lang.github.io/stdlib/aiken/primitive/math/rational.html#pow): for computing (int) powers of rational numbers. - - [`cardano/assets.{match}`](https://aiken-lang.github.io/stdlib/cardano/assets.html#match): efficiently compare two value-like. - - [`cardano/assets.{restricted_to}`](https://aiken-lang.github.io/stdlib/cardano/assets.html#restricted_to): extracting value subsets from parent value. - -- Comparison functions for various Cardano types: - - [`cardano/address/credential.{compare}`](https://aiken-lang.github.io/stdlib/cardano/address/credential.html#compare): for ordering credentials. - - [`cardano/governance/voter.{compare}`](https://aiken-lang.github.io/stdlib/cardano/governacen/voter.html#compare): for ordering voters. - - [`cardano/transaction/output_reference.{compare}`](https://aiken-lang.github.io/stdlib/cardano/transaction/output_reference.html#compare): for ordering output references. - - [`cardano/transaction/script_purpose.{compare}`](https://aiken-lang.github.io/stdlib/cardano/transaction/script_purpose.html#compare): for ordering script purpose. - -- New BLS12-381 crypto modules: - - [`aiken/crypto/bls12_381/g1`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/g1.html) - - [`aiken/crypto/bls12_381/g2`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/g2.html) - - [`aiken/crypto/bls12_381/scalar`](https://aiken-lang.github.io/stdlib/aiken/crypto/bls12_381/scalar.html) - -### Changed - -- N/A - -### Removed - -- N/A - -## v2.0.0 - 2024-09-01 - -> [!NOTE] -> Significant performance improvements (mostly on CPU) across all boards mostly due to the integration of Plutus V3. -> ->

see benchmarks -> -> test | cpu | mem -> --- | --- | --- -> aiken/cbor.{serialise_1} | -38.20% | ±0.00% -> aiken/cbor.{serialise_2} | -38.20% | ±0.00% -> aiken/cbor.{serialise_3} | -37.25% | ±0.00% -> aiken/cbor.{serialise_4} | -41.95% | ±0.00% -> aiken/cbor.{serialise_5} | -42.77% | ±0.00% -> aiken/cbor.{serialise_6} | -42.63% | ±0.00% -> aiken/cbor.{serialise_7} | -40.51% | ±0.00% -> aiken/cbor.{serialise_8} | -37.25% | ±0.00% -> aiken/cbor.{serialise_9} | -41.95% | ±0.00% -> aiken/cbor.{diagnostic_1} | -47.62% | -4.35% -> aiken/cbor.{diagnostic_2} | -45.16% | -2.87% -> aiken/cbor.{diagnostic_3} | -43.32% | -13.33% -> aiken/cbor.{diagnostic_4} | -38.28% | -8.03% -> aiken/cbor.{diagnostic_5} | -44.15% | -14.59% -> aiken/cbor.{diagnostic_6} | -42.77% | -12.21% -> aiken/cbor.{diagnostic_7} | -43.87% | -16.87% -> aiken/cbor.{diagnostic_7_alt} | -42.99% | -11.56% -> aiken/cbor.{diagnostic_8} | -46.00% | -10.23% -> aiken/cbor.{diagnostic_9} | -42.81% | -2.81% -> aiken/cbor.{diagnostic_10} | -38.28% | -8.03% -> aiken/cbor.{diagnostic_10_alt} | -38.43% | -8.03% -> aiken/cbor.{diagnostic_11} | -44.00% | -8.51% -> aiken/cbor.{diagnostic_12} | -45.65% | -11.56% -> aiken/cbor.{diagnostic_13} | -44.44% | -9.34% -> aiken/cbor.{diagnostic_14} | -43.59% | -19.77% -> aiken/cbor.{diagnostic_15} | -46.50% | -3.67% -> aiken/cbor.{diagnostic_16} | -41.89% | -13.41% -> aiken/collection/dict.{bench_from_ascending_pairs} | -20.48% | ±0.00% -> aiken/collection/dict.{from_list_1} | -20.16% | ±0.00% -> aiken/collection/dict.{from_list_2} | -18.28% | ±0.00% -> aiken/collection/dict.{from_list_3} | -17.83% | ±0.00% -> aiken/collection/dict.{from_list_4} | -18.97% | ±0.00% -> aiken/collection/dict.{bench_from_pairs} | -25.28% | ±0.00% -> aiken/collection/dict.{find_1} | -20.63% | ±0.00% -> aiken/collection/dict.{find_2} | -20.43% | ±0.00% -> aiken/collection/dict.{find_3} | -22.03% | ±0.00% -> aiken/collection/dict.{find_4} | -22.53% | ±0.00% -> aiken/collection/dict.{get_1} | -20.63% | ±0.00% -> aiken/collection/dict.{get_2} | -22.72% | ±0.00% -> aiken/collection/dict.{get_3} | -23.26% | ±0.00% -> aiken/collection/dict.{get_4} | -26.91% | ±0.00% -> aiken/collection/dict.{get_5} | -26.30% | ±0.00% -> aiken/collection/dict.{has_key_1} | -28.07% | ±0.00% -> aiken/collection/dict.{has_key_2} | -30.77% | ±0.00% -> aiken/collection/dict.{has_key_3} | -30.22% | ±0.00% -> aiken/collection/dict.{has_key_4} | -27.25% | ±0.00% -> aiken/collection/dict.{is_empty_1} | -27.86% | ±0.00% -> aiken/collection/dict.{keys_1} | -20.30% | ±0.00% -> aiken/collection/dict.{keys_2} | -17.48% | ±0.00% -> aiken/collection/dict.{size_1} | -37.90% | ±0.00% -> aiken/collection/dict.{size_2} | -32.34% | ±0.00% -> aiken/collection/dict.{size_3} | -27.97% | ±0.00% -> aiken/collection/dict.{values_1} | -20.30% | ±0.00% -> aiken/collection/dict.{values_2} | -17.58% | ±0.00% -> aiken/collection/dict.{delete_1} | -20.16% | ±0.00% -> aiken/collection/dict.{delete_2} | -24.29% | ±0.00% -> aiken/collection/dict.{delete_3} | -21.03% | ±0.00% -> aiken/collection/dict.{delete_4} | -25.03% | ±0.00% -> aiken/collection/dict.{delete_5} | -27.22% | ±0.00% -> aiken/collection/dict.{delete_6} | -25.83% | ±0.00% -> aiken/collection/dict.{filter_1} | -20.16% | ±0.00% -> aiken/collection/dict.{filter_2} | -19.61% | ±0.00% -> aiken/collection/dict.{filter_3} | -20.15% | ±0.00% -> aiken/collection/dict.{insert_1} | -22.83% | ±0.00% -> aiken/collection/dict.{insert_2} | -21.77% | ±0.00% -> aiken/collection/dict.{insert_with_1} | -17.21% | ±0.00% -> aiken/collection/dict.{insert_with_2} | -22.66% | ±0.00% -> aiken/collection/dict.{insert_with_3} | -25.81% | ±0.00% -> aiken/collection/dict.{map_1} | -19.56% | ±0.00% -> aiken/collection/dict.{map_2} | -23.66% | ±0.00% -> aiken/collection/dict.{union_1} | -17.91% | ±0.00% -> aiken/collection/dict.{union_2} | -8.67% | ±0.00% -> aiken/collection/dict.{union_3} | -22.82% | ±0.00% -> aiken/collection/dict.{union_4} | -22.77% | ±0.00% -> aiken/collection/dict.{union_with_1} | -22.90% | ±0.00% -> aiken/collection/dict.{fold_1} | -35.94% | ±0.00% -> aiken/collection/dict.{fold_2} | -22.31% | ±0.00% -> aiken/collection/dict.{foldr_1} | -36.21% | ±0.00% -> aiken/collection/dict.{foldr_2} | -21.93% | ±0.00% -> aiken/collection/dict.{to_list_1} | -98.69% | -66.72% -> aiken/collection/dict.{to_list_2} | -98.91% | -66.72% -> aiken/collection/list.{push_1} | -8.02% | ±0.00% -> aiken/collection/list.{push_2} | 1.25% | ±0.00% -> aiken/collection/list.{range_1} | -27.77% | ±0.00% -> aiken/collection/list.{range_2} | -27.39% | ±0.00% -> aiken/collection/list.{repeat_1} | -23.72% | ±0.00% -> aiken/collection/list.{repeat_2} | -27.96% | ±0.00% -> aiken/collection/list.{all_1} | -28.36% | ±0.00% -> aiken/collection/list.{all_2} | -27.59% | ±0.00% -> aiken/collection/list.{all_3} | -27.94% | ±0.00% -> aiken/collection/list.{any_1} | -28.23% | ±0.00% -> aiken/collection/list.{any_2} | -28.09% | ±0.00% -> aiken/collection/list.{any_3} | -26.95% | ±0.00% -> aiken/collection/list.{at_1} | -27.60% | ±0.00% -> aiken/collection/list.{at_2} | -19.96% | ±0.00% -> aiken/collection/list.{at_3} | -27.60% | ±0.00% -> aiken/collection/list.{at_4} | -20.77% | ±0.00% -> aiken/collection/list.{at_5} | -25.75% | ±0.00% -> aiken/collection/list.{count_empty} | -36.83% | ±0.00% -> aiken/collection/list.{count_all} | -32.37% | ±0.00% -> aiken/collection/list.{count_some} | -31.73% | ±0.00% -> aiken/collection/list.{count_none} | -30.44% | ±0.00% -> aiken/collection/list.{find_1} | -20.59% | ±0.00% -> aiken/collection/list.{find_2} | -25.53% | ±0.00% -> aiken/collection/list.{find_3} | -19.64% | ±0.00% -> aiken/collection/list.{has_1} | -27.88% | ±0.00% -> aiken/collection/list.{has_2} | -27.69% | ±0.00% -> aiken/collection/list.{has_3} | -26.95% | ±0.00% -> aiken/collection/list.{head_1} | -14.03% | ±0.00% -> aiken/collection/list.{head_2} | -16.90% | ±0.00% -> aiken/collection/list.{is_empty_1} | -26.48% | ±0.00% -> aiken/collection/list.{is_empty_2} | -25.35% | ±0.00% -> aiken/collection/list.{index_of_1} | -25.62% | ±0.00% -> aiken/collection/list.{index_of_2} | -27.52% | ±0.00% -> aiken/collection/list.{index_of_3} | -26.65% | ±0.00% -> aiken/collection/list.{index_of_4} | -19.96% | ±0.00% -> aiken/collection/list.{last_1} | -19.18% | ±0.00% -> aiken/collection/list.{last_2} | -16.26% | ±0.00% -> aiken/collection/list.{last_3} | -17.13% | ±0.00% -> aiken/collection/list.{length_1} | -37.90% | ±0.00% -> aiken/collection/list.{length_2} | -30.89% | ±0.00% -> aiken/collection/list.{delete_1} | -20.20% | ±0.00% -> aiken/collection/list.{delete_2} | -15.02% | ±0.00% -> aiken/collection/list.{delete_3} | -20.55% | ±0.00% -> aiken/collection/list.{delete_4} | -22.46% | ±0.00% -> aiken/collection/list.{drop_1} | -24.62% | ±0.00% -> aiken/collection/list.{drop_2} | -28.08% | ±0.00% -> aiken/collection/list.{drop_while_1} | -19.79% | ±0.00% -> aiken/collection/list.{drop_while_2} | -22.25% | ±0.00% -> aiken/collection/list.{drop_while_3} | 0.86% | ±0.00% -> aiken/collection/list.{drop_while_4} | -27.26% | ±0.00% -> aiken/collection/list.{filter_1} | -20.20% | ±0.00% -> aiken/collection/list.{filter_2} | -32.06% | ±0.00% -> aiken/collection/list.{filter_3} | -31.39% | ±0.00% -> aiken/collection/list.{filter_map_1} | -21.10% | ±0.00% -> aiken/collection/list.{filter_map_2} | -28.74% | ±0.00% -> aiken/collection/list.{init_1} | -19.64% | ±0.00% -> aiken/collection/list.{init_2} | -20.01% | ±0.00% -> aiken/collection/list.{init_3} | -13.72% | ±0.00% -> aiken/collection/list.{partition_1} | -14.63% | ±0.00% -> aiken/collection/list.{partition_2} | -16.85% | ±0.00% -> aiken/collection/list.{partition_3} | -16.63% | ±0.00% -> aiken/collection/list.{partition_4} | -16.87% | ±0.00% -> aiken/collection/list.{partition_5} | -22.94% | ±0.00% -> aiken/collection/list.{slice_1} | -29.08% | -2.81% -> aiken/collection/list.{slice_2} | -30.11% | -2.25% -> aiken/collection/list.{slice_3} | -30.29% | -1.46% -> aiken/collection/list.{slice_4} | -28.53% | -1.48% -> aiken/collection/list.{slice_5} | -29.73% | -1.64% -> aiken/collection/list.{slice_6} | -32.01% | -1.80% -> aiken/collection/list.{span_1} | -15.05% | ±0.00% -> aiken/collection/list.{span_2} | -18.03% | ±0.00% -> aiken/collection/list.{span_3} | -12.49% | ±0.00% -> aiken/collection/list.{span_4} | -18.13% | ±0.00% -> aiken/collection/list.{tail_1} | -8.88% | ±0.00% -> aiken/collection/list.{tail_2} | -16.90% | ±0.00% -> aiken/collection/list.{take_1} | -24.98% | ±0.00% -> aiken/collection/list.{take_2} | -24.35% | ±0.00% -> aiken/collection/list.{take_while_1} | -20.20% | ±0.00% -> aiken/collection/list.{take_while_2} | -21.56% | ±0.00% -> aiken/collection/list.{take_while_3} | -22.46% | ±0.00% -> aiken/collection/list.{take_while_4} | -21.02% | ±0.00% -> aiken/collection/list.{unique_1} | -20.20% | ±0.00% -> aiken/collection/list.{unique_2} | -24.34% | ±0.00% -> aiken/collection/list.{flat_map_1} | -19.79% | ±0.00% -> aiken/collection/list.{flat_map_2} | -13.36% | ±0.00% -> aiken/collection/list.{indexed_map_1} | -20.10% | ±0.00% -> aiken/collection/list.{indexed_map_2} | -23.36% | ±0.00% -> aiken/collection/list.{map_1} | -19.79% | ±0.00% -> aiken/collection/list.{map_2} | -16.75% | ±0.00% -> aiken/collection/list.{map2_1} | -20.10% | ±0.00% -> aiken/collection/list.{map2_2} | -17.46% | ±0.00% -> aiken/collection/list.{map2_3} | -15.92% | ±0.00% -> aiken/collection/list.{map3_1} | -20.39% | ±0.00% -> aiken/collection/list.{map3_2} | -19.22% | ±0.00% -> aiken/collection/list.{reverse_1} | -20.10% | ±0.00% -> aiken/collection/list.{reverse_2} | -12.26% | ±0.00% -> aiken/collection/list.{sort_1} | -22.31% | ±0.00% -> aiken/collection/list.{sort_2} | -17.93% | ±0.00% -> aiken/collection/list.{sort_3} | -23.09% | ±0.00% -> aiken/collection/list.{sort_4} | -20.20% | ±0.00% -> aiken/collection/list.{unzip_1} | -14.01% | ±0.00% -> aiken/collection/list.{unzip_2} | -5.48% | ±0.00% -> aiken/collection/list.{concat_1} | -6.56% | ±0.00% -> aiken/collection/list.{concat_2} | -11.25% | ±0.00% -> aiken/collection/list.{concat_3} | -9.35% | ±0.00% -> aiken/collection/list.{difference_1} | -24.23% | ±0.00% -> aiken/collection/list.{difference_2} | -22.59% | ±0.00% -> aiken/collection/list.{difference_3} | -10.64% | ±0.00% -> aiken/collection/list.{difference_4} | -21.68% | ±0.00% -> aiken/collection/list.{zip_1} | -20.10% | ±0.00% -> aiken/collection/list.{zip_2} | -19.17% | ±0.00% -> aiken/collection/list.{zip_3} | -10.35% | ±0.00% -> aiken/collection/list.{foldl_1} | -36.95% | ±0.00% -> aiken/collection/list.{foldl_2} | -26.90% | ±0.00% -> aiken/collection/list.{foldl_3} | -11.27% | ±0.00% -> aiken/collection/list.{foldr_1} | -26.68% | ±0.00% -> aiken/collection/list.{foldr_2} | -38.04% | ±0.00% -> aiken/collection/list.{foldr_3} | -10.14% | ±0.00% -> aiken/collection/list.{indexed_foldr_1} | -36.95% | ±0.00% -> aiken/collection/list.{indexed_foldr_2} | -11.06% | ±0.00% -> aiken/collection/list.{reduce_1} | -36.95% | ±0.00% -> aiken/collection/list.{reduce_2} | -27.99% | ±0.00% -> aiken/collection/list.{reduce_3} | -23.54% | ±0.00% -> aiken/collection/list.{reduce_4} | -24.84% | ±0.00% -> aiken/collection/pairs.{get_all_1} | -21.10% | ±0.00% -> aiken/collection/pairs.{get_all_2} | -18.86% | ±0.00% -> aiken/collection/pairs.{get_all_3} | -19.53% | ±0.00% -> aiken/collection/pairs.{get_all_4} | -18.70% | ±0.00% -> aiken/collection/pairs.{get_all_5} | -21.19% | ±0.00% -> aiken/collection/pairs.{get_first_1} | -20.63% | ±0.00% -> aiken/collection/pairs.{get_first_2} | -18.86% | ±0.00% -> aiken/collection/pairs.{get_first_3} | -18.86% | ±0.00% -> aiken/collection/pairs.{get_first_4} | -18.86% | ±0.00% -> aiken/collection/pairs.{get_first_5} | -21.05% | ±0.00% -> aiken/collection/pairs.{get_last_1} | -20.63% | ±0.00% -> aiken/collection/pairs.{get_last_2} | -21.13% | ±0.00% -> aiken/collection/pairs.{get_last_3} | -21.16% | ±0.00% -> aiken/collection/pairs.{get_last_4} | -21.79% | ±0.00% -> aiken/collection/pairs.{get_last_5} | -21.05% | ±0.00% -> aiken/collection/pairs.{find_all_1} | -21.10% | ±0.00% -> aiken/collection/pairs.{find_all_2} | -18.33% | ±0.00% -> aiken/collection/pairs.{find_all_3} | -20.51% | ±0.00% -> aiken/collection/pairs.{find_all_4} | -17.79% | ±0.00% -> aiken/collection/pairs.{find_first_1} | -20.63% | ±0.00% -> aiken/collection/pairs.{find_first_2} | -18.28% | ±0.00% -> aiken/collection/pairs.{find_first_3} | -20.22% | ±0.00% -> aiken/collection/pairs.{find_first_4} | -18.28% | ±0.00% -> aiken/collection/pairs.{find_last_1} | -20.63% | ±0.00% -> aiken/collection/pairs.{find_last_2} | -20.70% | ±0.00% -> aiken/collection/pairs.{find_last_3} | -20.22% | ±0.00% -> aiken/collection/pairs.{find_last_4} | -20.98% | ±0.00% -> aiken/collection/pairs.{has_key_1} | -28.07% | ±0.00% -> aiken/collection/pairs.{has_key_2} | -25.70% | ±0.00% -> aiken/collection/pairs.{has_key_3} | -25.80% | ±0.00% -> aiken/collection/pairs.{has_key_4} | -24.93% | ±0.00% -> aiken/collection/pairs.{has_key_5} | -25.70% | ±0.00% -> aiken/collection/pairs.{keys_1} | -20.30% | ±0.00% -> aiken/collection/pairs.{keys_2} | -13.89% | ±0.00% -> aiken/collection/pairs.{keys_3} | -10.43% | ±0.00% -> aiken/collection/pairs.{values_1} | -20.30% | ±0.00% -> aiken/collection/pairs.{values_2} | -14.02% | ±0.00% -> aiken/collection/pairs.{values_3} | -10.65% | ±0.00% -> aiken/collection/pairs.{values_4} | -8.53% | ±0.00% -> aiken/collection/pairs.{map_1} | -11.17% | ±0.00% -> aiken/collection/pairs.{map_2} | -12.89% | ±0.00% -> aiken/collection/pairs.{foldl_1} | -35.94% | ±0.00% -> aiken/collection/pairs.{foldl_2} | -22.31% | ±0.00% -> aiken/collection/pairs.{foldr_1} | -36.21% | ±0.00% -> aiken/collection/pairs.{foldr_2} | -21.93% | ±0.00% -> aiken/collection/pairs.{foldr_3} | -20.00% | ±0.00% -> aiken/interval.{contains_1} | -21.08% | -4.01% -> aiken/interval.{contains_2} | -31.22% | -13.95% -> aiken/interval.{contains_3} | -26.80% | -10.08% -> aiken/interval.{contains_4} | -31.02% | -13.67% -> aiken/interval.{contains_5} | -32.32% | -13.59% -> aiken/interval.{contains_6} | -28.15% | -9.81% -> aiken/interval.{contains_7} | -32.11% | -13.32% -> aiken/interval.{contains_8} | -29.56% | -12.59% -> aiken/interval.{contains_9} | -29.68% | -12.78% -> aiken/interval.{contains_10} | -29.68% | -12.78% -> aiken/interval.{contains_11} | -35.17% | -17.77% -> aiken/interval.{contains_12} | -21.09% | -3.86% -> aiken/interval.{is_entirely_after_1} | -29.89% | -13.81% -> aiken/interval.{is_entirely_after_2} | -29.63% | -13.39% -> aiken/interval.{is_entirely_after_3} | -29.63% | -13.39% -> aiken/interval.{is_entirely_after_4} | -29.48% | -11.81% -> aiken/interval.{is_entirely_after_5} | -29.70% | -12.14% -> aiken/interval.{is_entirely_after_6} | -36.09% | -19.77% -> aiken/interval.{is_entirely_after_7} | -24.19% | -3.99% -> aiken/interval.{is_entirely_after_8} | -24.19% | -3.99% -> aiken/interval.{is_entirely_after_9} | -24.19% | -3.99% -> aiken/interval.{is_entirely_before_1} | -28.44% | -13.48% -> aiken/interval.{is_entirely_before_2} | -28.24% | -13.09% -> aiken/interval.{is_entirely_before_3} | -28.24% | -13.09% -> aiken/interval.{is_entirely_before_4} | -28.44% | -11.88% -> aiken/interval.{is_entirely_before_5} | -28.26% | -11.57% -> aiken/interval.{is_entirely_before_6} | -34.63% | -19.34% -> aiken/interval.{is_entirely_before_7} | -22.97% | -4.02% -> aiken/interval.{is_entirely_before_8} | -22.97% | -4.02% -> aiken/interval.{is_entirely_before_9} | -22.97% | -4.02% -> aiken/interval.{hull_1} | -21.51% | -0.73% -> aiken/interval.{hull_2} | -23.06% | -0.80% -> aiken/interval.{hull_3} | -22.00% | -0.86% -> aiken/interval.{intersection_1} | -21.51% | -0.73% -> aiken/interval.{intersection_2} | -21.51% | -0.73% -> aiken/interval.{intersection_3} | -26.55% | -4.65% -> aiken/interval.{intersection_4} | -26.45% | -4.51% -> aiken/interval.{intersection_5} | -22.87% | -0.76% -> aiken/interval.{intersection_6} | -19.73% | -0.98% -> aiken/math.{abs_1} | -61.39% | -21.07% -> aiken/math.{abs_2} | -70.90% | -34.84% -> aiken/math.{clamp_1} | -60.95% | -23.55% -> aiken/math.{clamp_2} | -60.95% | -23.55% -> aiken/math.{clamp_3} | -59.22% | -18.20% -> aiken/math.{gcd_test1} | -47.20% | ±0.00% -> aiken/math.{gcd_test2} | -47.81% | ±0.00% -> aiken/math.{gcd_test3} | -46.10% | ±0.00% -> aiken/math.{is_sqrt1} | -87.41% | -68.64% -> aiken/math.{is_sqrt2} | -87.41% | -68.64% -> aiken/math.{log_10_2} | -51.35% | -8.40% -> aiken/math.{log_42_2} | -51.46% | -8.24% -> aiken/math.{log_42_3} | -51.05% | -7.81% -> aiken/math.{log_5_0} | -54.05% | -12.92% -> aiken/math.{log_4_4} | -50.59% | -9.31% -> aiken/math.{log_4_43} | -49.14% | -7.28% -> aiken/math.{max_1} | -61.39% | -21.07% -> aiken/math.{max_2} | -61.39% | -21.07% -> aiken/math.{max_3} | -61.39% | -21.07% -> aiken/math.{min_1} | -61.39% | -21.07% -> aiken/math.{min_2} | -61.39% | -21.07% -> aiken/math.{min_3} | -61.39% | -21.07% -> aiken/math.{pow_3_5} | -46.34% | ±0.00% -> aiken/math.{pow_7_2} | -46.38% | ±0.00% -> aiken/math.{pow_3__4} | -43.50% | ±0.00% -> aiken/math.{pow_0_0} | -43.95% | ±0.00% -> aiken/math.{pow_513_3} | -45.80% | ±0.00% -> aiken/math.{pow_2_4} | -46.79% | ±0.00% -> aiken/math.{pow_2_42} | -46.77% | ±0.00% -> aiken/math.{pow2_neg} | -44.71% | ±0.00% -> aiken/math.{pow2_0} | -45.00% | ±0.00% -> aiken/math.{pow2_1} | -45.00% | ±0.00% -> aiken/math.{pow2_4} | -45.00% | ±0.00% -> aiken/math.{pow2_42} | -42.01% | ±0.00% -> aiken/math.{pow2_256} | -41.40% | ±0.00% -> aiken/math.{sqrt1} | -32.56% | -17.18% -> aiken/math.{sqrt2} | -32.56% | -17.18% -> aiken/math.{sqrt3} | -49.99% | -8.90% -> aiken/math.{sqrt4} | -51.76% | -3.90% -> aiken/math.{sqrt5} | -52.63% | -1.33% -> aiken/math.{sqrt6} | -28.16% | -15.41% -> aiken/math/rational.{from_int_1} | -14.32% | ±0.00% -> aiken/math/rational.{new_1} | -22.98% | ±0.00% -> aiken/math/rational.{zero_1} | -8.08% | ±0.00% -> aiken/math/rational.{denominator_1} | -28.33% | ±0.00% -> aiken/math/rational.{numerator_1} | -29.34% | ±0.00% -> aiken/math/rational.{abs_examples} | -18.25% | ±0.00% -> aiken/math/rational.{negate_1} | -15.39% | ±0.00% -> aiken/math/rational.{reciprocal_1} | -23.28% | ±0.00% -> aiken/math/rational.{reduce_1} | -31.89% | ±0.00% -> aiken/math/rational.{add_1} | -15.11% | ±0.00% -> aiken/math/rational.{add_2} | -15.11% | ±0.00% -> aiken/math/rational.{div_1} | -22.31% | -2.75% -> aiken/math/rational.{div_2} | -22.37% | -2.79% -> aiken/math/rational.{mul_1} | -13.37% | ±0.00% -> aiken/math/rational.{mul_2} | -13.37% | ±0.00% -> aiken/math/rational.{mul_3} | -26.25% | ±0.00% -> aiken/math/rational.{sub_1} | -15.11% | ±0.00% -> aiken/math/rational.{sub_2} | -15.11% | ±0.00% -> aiken/math/rational.{sub_3} | -15.11% | ±0.00% -> aiken/math/rational.{compare_1} | -21.70% | ±0.00% -> aiken/math/rational.{compare_with_eq} | -23.05% | ±0.00% -> aiken/math/rational.{compare_with_neq} | -22.33% | ±0.00% -> aiken/math/rational.{compare_with_gte} | -22.48% | ±0.00% -> aiken/math/rational.{compare_with_gt} | -23.18% | ±0.00% -> aiken/math/rational.{compare_with_lte} | -22.48% | ±0.00% -> aiken/math/rational.{compare_with_lt} | -23.18% | ±0.00% -> aiken/math/rational.{arithmetic_mean_1} | -23.31% | ±0.00% -> aiken/math/rational.{arithmetic_mean_2} | -23.31% | ±0.00% -> aiken/math/rational.{arithmetic_mean_3} | -20.58% | ±0.00% -> aiken/math/rational.{geometric_mean1} | -29.87% | ±0.00% -> aiken/math/rational.{geometric_mean2} | -24.52% | ±0.00% -> aiken/math/rational.{geometric_mean3} | -24.52% | ±0.00% -> aiken/math/rational.{geometric_mean4} | -33.55% | ±0.00% -> aiken/math/rational.{geometric_mean5} | -45.34% | ±0.00% -> aiken/math/rational.{ceil_1} | -36.26% | ±0.00% -> aiken/math/rational.{floor_1} | -29.49% | ±0.00% -> aiken/math/rational.{proper_fraction_1} | -18.44% | ±0.00% -> aiken/math/rational.{proper_fraction_2} | -18.44% | ±0.00% -> aiken/math/rational.{proper_fraction_3} | -18.44% | ±0.00% -> aiken/math/rational.{round_1} | -25.17% | ±0.00% -> aiken/math/rational.{round_even_1} | -25.91% | ±0.00% -> aiken/math/rational.{truncate_1} | -29.49% | ±0.00% -> aiken/option.{is_none_1} | -26.56% | ±0.00% -> aiken/option.{is_none_2} | -27.52% | ±0.00% -> aiken/option.{is_some_1} | -27.52% | ±0.00% -> aiken/option.{is_some_2} | -26.56% | ±0.00% -> aiken/option.{and_then_1} | -20.19% | ±0.00% -> aiken/option.{and_then_2} | -22.15% | ±0.00% -> aiken/option.{and_then_3} | -21.85% | ±0.00% -> aiken/option.{choice_1} | -17.11% | ±0.00% -> aiken/option.{choice_2} | -19.75% | ±0.00% -> aiken/option.{choice_3} | -18.68% | ±0.00% -> aiken/option.{flatten_1} | -12.25% | ±0.00% -> aiken/option.{flatten_2} | -15.41% | ±0.00% -> aiken/option.{flatten_3} | -19.46% | ±0.00% -> aiken/option.{flatten_4} | -14.31% | ±0.00% -> aiken/option.{map_1} | -19.89% | ±0.00% -> aiken/option.{map_2} | -18.18% | ±0.00% -> aiken/option.{map2_1} | -20.47% | ±0.00% -> aiken/option.{map2_2} | -19.93% | ±0.00% -> aiken/option.{map2_3} | -13.64% | ±0.00% -> aiken/option.{map3_1} | -20.74% | ±0.00% -> aiken/option.{map3_2} | -20.00% | ±0.00% -> aiken/option.{map3_3} | -19.90% | ±0.00% -> aiken/option.{or_try_1} | -14.36% | ±0.00% -> aiken/option.{or_try_2} | -14.36% | ±0.00% -> aiken/option.{or_else_1} | -38.16% | ±0.00% -> aiken/option.{or_else_2} | -27.62% | ±0.00% -> aiken/primitive/bytearray.{from_string_1} | -62.36% | ±0.00% -> aiken/primitive/bytearray.{from_string_2} | -41.62% | ±0.00% -> aiken/primitive/bytearray.{push_1} | -97.51% | -80.06% -> aiken/primitive/bytearray.{push_2} | -97.51% | -80.06% -> aiken/primitive/bytearray.{push_3} | -88.82% | -89.83% -> aiken/primitive/bytearray.{index_of_1} | -39.75% | ±0.00% -> aiken/primitive/bytearray.{index_of_2} | -43.19% | ±0.00% -> aiken/primitive/bytearray.{index_of_3} | -41.70% | ±0.00% -> aiken/primitive/bytearray.{index_of_4} | -37.24% | ±0.00% -> aiken/primitive/bytearray.{index_of_5} | -26.02% | ±0.00% -> aiken/primitive/bytearray.{is_empty_1} | -37.52% | ±0.00% -> aiken/primitive/bytearray.{is_empty_2} | -33.77% | ±0.00% -> aiken/primitive/bytearray.{length_1} | -49.73% | ±0.00% -> aiken/primitive/bytearray.{length_2} | -49.73% | ±0.00% -> aiken/primitive/bytearray.{test_bit_0} | -45.48% | 5.88% -> aiken/primitive/bytearray.{test_bit_1} | -56.22% | -10.85% -> aiken/primitive/bytearray.{test_bit_2} | -56.22% | -10.85% -> aiken/primitive/bytearray.{test_bit_3} | -56.22% | -10.85% -> aiken/primitive/bytearray.{test_bit_7} | -58.31% | -11.81% -> aiken/primitive/bytearray.{test_bit_8} | -56.22% | -10.85% -> aiken/primitive/bytearray.{test_bit_20_21_22_23} | -44.38% | 5.52% -> aiken/primitive/bytearray.{drop_1} | -58.79% | ±0.00% -> aiken/primitive/bytearray.{drop_2} | -58.79% | ±0.00% -> aiken/primitive/bytearray.{drop_3} | -58.79% | ±0.00% -> aiken/primitive/bytearray.{drop_4} | -58.79% | ±0.00% -> aiken/primitive/bytearray.{slice_1} | -98.79% | -90.04% -> aiken/primitive/bytearray.{slice_2} | -98.79% | -90.04% -> aiken/primitive/bytearray.{slice_3} | -98.79% | -90.04% -> aiken/primitive/bytearray.{slice_4} | -98.79% | -90.04% -> aiken/primitive/bytearray.{slice_5} | -98.79% | -90.04% -> aiken/primitive/bytearray.{take_1} | -97.81% | -83.40% -> aiken/primitive/bytearray.{take_2} | -97.81% | -83.40% -> aiken/primitive/bytearray.{take_3} | -97.81% | -83.40% -> aiken/primitive/bytearray.{take_4} | -97.81% | -83.40% -> aiken/primitive/bytearray.{concat_1} | -96.22% | -80.06% -> aiken/primitive/bytearray.{concat_2} | -96.22% | -80.06% -> aiken/primitive/bytearray.{concat_3} | -96.22% | -80.06% -> aiken/primitive/bytearray.{concat_4} | -96.22% | -80.06% -> aiken/primitive/bytearray.{foldl_1} | -40.96% | ±0.00% -> aiken/primitive/bytearray.{foldl_2} | -40.09% | ±0.00% -> aiken/primitive/bytearray.{foldl_3} | -40.29% | ±0.00% -> aiken/primitive/bytearray.{foldl_4} | -44.76% | ±0.00% -> aiken/primitive/bytearray.{foldr_1} | -42.56% | ±0.00% -> aiken/primitive/bytearray.{foldr_2} | -40.93% | ±0.00% -> aiken/primitive/bytearray.{foldr_3} | -45.34% | ±0.00% -> aiken/primitive/bytearray.{reduce_1} | -42.95% | ±0.00% -> aiken/primitive/bytearray.{reduce_2} | -44.60% | ±0.00% -> aiken/primitive/bytearray.{to_string_1} | -69.56% | ±0.00% -> aiken/primitive/bytearray.{to_string_2} | -53.54% | ±0.00% -> aiken/primitive/bytearray.{to_hex_1} | -48.15% | ±0.00% -> aiken/primitive/bytearray.{to_hex_2} | -48.15% | ±0.00% -> aiken/primitive/int.{from_utf8_1} | -37.06% | ±0.00% -> aiken/primitive/int.{from_utf8_2} | -33.40% | ±0.00% -> aiken/primitive/int.{from_utf8_3} | -37.06% | ±0.00% -> aiken/primitive/int.{from_utf8_4} | -32.78% | ±0.00% -> aiken/primitive/int.{from_utf8_5} | -32.05% | ±0.00% -> aiken/primitive/int.{from_utf8_6} | -31.36% | ±0.00% -> aiken/primitive/string.{from_bytearray_1} | -69.56% | ±0.00% -> aiken/primitive/string.{from_bytearray_2} | -53.54% | ±0.00% -> aiken/primitive/string.{from_bytearray_3} | -53.54% | ±0.00% -> aiken/primitive/string.{from_int_1} | -40.54% | -7.05% -> aiken/primitive/string.{from_int_2} | -45.93% | -5.30% -> aiken/primitive/string.{from_int_3} | -47.62% | -4.35% -> aiken/primitive/string.{from_int_4} | -48.58% | -3.69% -> aiken/primitive/string.{concat_1} | -92.30% | -80.10% -> aiken/primitive/string.{concat_2} | -97.34% | -85.87% -> aiken/primitive/string.{concat_3} | -98.67% | -80.35% -> aiken/primitive/string.{join_1} | -42.87% | ±0.00% -> aiken/primitive/string.{join_2} | -37.65% | ±0.00% -> aiken/primitive/string.{to_bytearray_1} | -62.36% | ±0.00% -> aiken/primitive/string.{to_bytearray_2} | -41.62% | ±0.00% -> aiken/primitive/string.{to_bytearray_3} | -41.62% | ±0.00% -> cardano/assets.{from_asset_list_1} | -20.51% | ±0.00% -> cardano/assets.{from_asset_list_2} | -10.09% | ±0.00% -> cardano/assets.{from_asset_list_3} | -12.21% | ±0.00% -> cardano/assets.{from_asset_list_4} | -16.22% | ±0.00% -> cardano/assets.{from_asset_list_5} | -14.60% | ±0.00% -> cardano/assets.{from_asset_list_6} | -20.97% | ±0.00% -> cardano/assets.{from_asset_list_7} | -20.25% | ±0.00% -> cardano/assets.{from_asset_list_8} | -14.51% | ±0.00% -> cardano/assets.{from_asset_list_9} | -16.07% | ±0.00% -> cardano/assets.{add_1} | -27.84% | ±0.00% -> cardano/assets.{add_2} | -27.56% | -0.54% -> cardano/assets.{add_3} | -26.39% | ±0.00% -> cardano/assets.{add_4} | -29.75% | -10.41% -> cardano/assets.{add_5} | -27.80% | ±0.00% -> cardano/assets.{merge_1} | -26.02% | ±0.00% -> cardano/assets.{merge_2} | -19.60% | ±0.00% -> cardano/assets.{merge_3} | -23.80% | ±0.00% -> cardano/assets.{merge_4} | -25.92% | ±0.00% -> cardano/assets.{merge_5} | -27.61% | -1.98% -> cardano/assets.{without_lovelace_1} | -28.00% | -2.24% -> cardano/assets.{without_lovelace_2} | -27.49% | ±0.00% -> cardano/assets.{without_lovelace_3} | -23.40% | -0.34% -> cardano/assets.{flatten_with_1} | -21.10% | ±0.00% -> cardano/assets.{flatten_with_2} | -22.77% | ±0.00% -> cardano/assets.{reduce_1} | -24.31% | ±0.00% -> cardano/assets.{reduce_2} | -20.89% | ±0.00% -> cardano/assets.{reduce_3} | -36.21% | ±0.00% ->
- -### Added - -- New modules covering Conway-related features (i.e. governance) - - [`cardano/governance`](https://aiken-lang.github.io/stdlib/cardano/governance.html) - - [`cardano/governance/protocol_parameters`](https://aiken-lang.github.io/stdlib/cardano/governance/protocol_parameters.html) - -- New primitives in `aiken/collection/pairs`: - - [`insert_by_ascending_key`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#insert_by_ascending_key) - - [`repsert_by_ascending_key`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html#repsert_by_ascending_key) - -- New primitives in `aiken/crypto`: - - [`blake2b_224`](https://aiken-lang.github.io/stdlib/aiken/crypto.html#blake2b_224) - - [`keccak_256`](https://aiken-lang.github.io/stdlib/aiken/crypto.html#keccak_256) - -- New primitives in `aiken/math`: - - [`log2`](https://aiken-lang.github.io/stdlib/aiken/math.html#log2) - -- New primitives in `aiken/primitive/bytearray`: - - [`at`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#at) - - [`from_int_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#from_int_big_endian) - - [`from_int_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#from_int_little_endian) - - [`to_int_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#to_int_big_endian) - - [`to_int_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html#to_int_little_endian) - -- New primitives in `aiken/primitive/int`: - - [`from_bytearray_big_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html#from_bytearray_big_endian) - - [`from_bytearray_little_endian`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html#from_bytearray_little_endian) - -- New primitives in `aiken/crypto`: - - [`verify_ecdsa_signature`](https://aiken-lang.github.io/stdlib/cardano/credential.html#verify_ecdsa_signature) - - [`verify_schnorr_signature`](https://aiken-lang.github.io/stdlib/cardano/credential.html#verify_schnorr_signature) - -### Changed - -- Few modules have been relocated and better organized: - - `aiken/hash` -> [`aiken/crypto`](https://aiken-lang.github.io/stdlib/aiken/crypto.html) - - **collections** - - `aiken/dict` -> [`aiken/collection/dict`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html) - - `aiken/list` -> [`aiken/collection/list`](https://aiken-lang.github.io/stdlib/aiken/collection/list.html) - - `aiken/pairs` -> [`aiken/collection/pairs`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html) - - **primitive** - - `aiken/bytearray` -> [`aiken/primitive/bytearray`](https://aiken-lang.github.io/stdlib/aiken/primitive/bytearray.html) - - `aiken/int` -> [`aiken/primitive/int`](https://aiken-lang.github.io/stdlib/aiken/primitive/int.html) - - `aiken/string` -> [`aiken/primitive/string`](https://aiken-lang.github.io/stdlib/aiken/primitive/string.html) - - **cardano** - - `aiken/transaction` -> [`cardano/transaction`](https://aiken-lang.github.io/stdlib/cardano/transaction.html) - - `aiken/transaction/certificate` -> [`cardano/certificate`](https://aiken-lang.github.io/stdlib/cardano/certificate.html) - - `aiken/transaction/credential` -> [`cardano/address`](https://aiken-lang.github.io/stdlib/cardano/address.html) & `aiken/crypto` - - `aiken/transaction/value` -> [`cardano/assets`](https://aiken-lang.github.io/stdlib/cardano/assets.html) - -- Several zero-argument functions have been turned into top-level constants - - `aiken/dict.new()` -> [`aiken/collection/dict.empty`](https://aiken-lang.github.io/stdlib/aiken/collection/dict.html#empty) - - `aiken/interval.empty()` -> [`aiken/interval.empty`](https://aiken-lang.github.io/stdlib/aiken/interval.html#empty) - - `aiken/interval.everything()` -> [`aiken/interval.everything`](https://aiken-lang.github.io/stdlib/aiken/interval.html#everything) - - `aiken/math/rational.zero()` -> [`aiken/math/rational.zero`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#zero) - - `aiken/transaction/value.zero()` -> [`cardano/assets.zero`](https://aiken-lang.github.io/stdlib/cardano/assets.html#zero) - -- The `Transaction` type from [`cardano/transaction`](https://aiken-lang.github.io/stdlib/cardano/transaction.html) (originally `aiken/transaction`) has been greatly reworked to match the new transaction format in Plutus V3. - -- The `ScriptContext` type has split from `cardano/transaction` (originally `aiken/transaction`) and moved into its own module [`cardano/script_context`](https://aiken-lang.github.io/stdlib/cardano/script_context.html) and adjusted to its new form as per Plutus V3. - -- The constructors of [`Credential`](https://aiken-lang.github.io/stdlib/cardano/address.html#credential) have been renamed from `VerificationKeyCredential` and `ScriptCredential` into `VerificationKey` and `Script` respectively. - -- The function `remove_all`, `remove_first` and `remove_last` from [`aiken/collection/pairs`](https://aiken-lang.github.io/stdlib/aiken/collection/pairs.html) (originally `aiken/pairs`) have been renamed to `delete_all`, `delete_first` and `delete_last` respectively. - -- The function `verify_signature` from [`aiken/crypto`](https://aiken-lang.github.io/stdlib/aiken/crypto.html) (originally `aiken/credential`) has been renamed to `verify_ed25519_signature`. - -### Removed - -- The module `aiken/time`. The `PosixTime` alias is no longer used anywhere. - -- `MintedValue` (from `aiken/transaction/value` originally) and its associated functions are no longer needed and, therefore, gone. - -## v1.9.0 - 2024-05-24 - -### Added - -- A new module [`aiken/pairs`](https://aiken-lang.github.io/stdlib/aiken/pairs.html) to work with associative lists (a.k.a. `Pairs`). - -### Changed - -- **BREAKING-CHANGE**
- Specialized all `Dict`'s key to `ByteArray`, and thus remove the need for passing an extra comparison function in many functions. `Dict` are however still specialized with a phantom type for keys. - -- **BREAKING-CHANGE**
- Few functions from `Dict` have been renamed for consistency: - - `from_list` -> `from_pairs` - - `from_ascending_list` -> `from_ascending_pairs` - - `to_list` -> `to_pairs` - -### Removed - -N/A - -## v1.8.0 - 2024-03-28 - -### Added - -- [`value.reduce`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#reduce) to efficiently fold over a value and its elements. - -- [`value.from_asset_list`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#from_asset_list) to turn an asset list into a Value while enforcing invariants expected of `Value`. - -- [`math.is_sqrt`](https://aiken-lang.github.io/stdlib/aiken/math.html#is_sqrt) as a more efficient alternative to `sqrt`. - -### Changed - -- Disclaimers in documentation to [`bytearray.to_string`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#to_string) and [`string.from_bytearray`](https://aiken-lang.github.io/stdlib/aiken/string.html#from_bytearray) regarding UTF-8 encoding. - -### Removed - -N/A - -## v1.7.0 - 2023-11-07 - -### Added - -- [`list.index_of`](https://aiken-lang.github.io/stdlib/aiken/list.html#index_of): For getting a values index in a list. -- [`transaction.placeholder`](https://aiken-lang.github.io/stdlib/aiken/transaction.html#placeholder): For constructing test transactions. -- [`transaction.value.is_zero`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#is_zero): For checking whether a value is null. - -### Changed - -- [`value.to_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_minted_value) now correctly preserves the invariant of `MintedValue`: it always contain a null quantity of Ada. - -### Removed - -N/A - -## v1.6.0 - 2023-09-08 - -### Added - -- [`math.pow2`](https://aiken-lang.github.io/stdlib/aiken/math.html#pow2): For faster exponentions for powers of two. -- [`bytearray.test_bit`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#test_bit): For testing if a bit is set in a bytearray (MSB). - -## v1.5.0 - 2023-08-16 - -### Removed - -- retired `list.and` and `list.or` because of the new keywords for logical op chaining. - -## v1.4.0 - 2023-07-21 - -### Changed - -- Fixed missing null-check on `value.add`. Adding a null quantity of token is now correctly a no-op. - -## v1.3.0 - 2023-06-30 - -### Added - -- [`math.sqrt`](https://aiken-lang.github.io/stdlib/aiken/math.html#sqrt): For calculating integer square roots using a quadratically convergent method. -- [`math/rational.numerator`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#numerator) & [`math/rational.denominator`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#numerator): For accessing parts of a rational value. -- [`math/rational.arithmetic_mean`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#arithmetic_mean): For computing [arithmetic mean](https://en.wikipedia.org/wiki/Arithmetic_mean) of rational values. -- [`math/rational.geometric_mean`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html#geometric_mean): For computing [geometric mean](https://en.wikipedia.org/wiki/Geometric_mean) of two rational values. - -### Changed - -- Clear empty asset lists in [`Value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#Value) on various operations. Before that fix, it could happen that removing all assets from a given policy would lead to an empty dictionnary of assets still be present in the `Value`. - -## v1.2.0 - 2023-06-17 - -### Added - -- [`transaction/value.MintedValue`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#MintedValue) -- [`transaction/value.from_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#from_minted_value): Convert from `MintedValue` to `Value` -- [`transaction/value.to_minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_minted_value): Convert from `Value` to `MintedValue` -- [`transaction/bytearray.to_hex`](https://aiken-lang.github.io/stdlib/aiken/bytearray.html#to_hex): Convert a `ByteArray` to a hex encoded `String` -- [`math/rational`](https://aiken-lang.github.io/stdlib/aiken/math/rational.html): Working with rational numbers. - - [x] `abs` - - [x] `add` - - [x] `ceil` - - [x] `compare` - - [x] `compare_with` - - [x] `div` - - [x] `floor` - - [x] `from_int` - - [x] `mul` - - [x] `negate` - - [x] `new` - - [x] `proper_fraction` - - [x] `reciprocal` - - [x] `reduce` - - [x] `round` - - [x] `round_even` - - [x] `sub` - - [x] `truncate` - - [x] `zero` - -### Removed - -- module `MintedValue` was merged with `Value` - -## v1.1.0 - 2023-06-06 - -### Added - -- [`list.count`](https://aiken-lang.github.io/stdlib/aiken/list.html#count): Count how many items in the list satisfy the given predicate. - -- [`int.from_utf8`](https://aiken-lang.github.io/stdlib/aiken/int.html#from_utf8): Parse an integer from a utf-8 encoded `ByteArray`, when possible. - -- [`dict.foldl`](https://aiken-lang.github.io/stdlib/aiken/dict.html#foldl) & [`dict.foldr`](https://aiken-lang.github.io/stdlib/aiken/dict.html#foldr): for left and right folds over dictionnary elements in ascending key order. - -- [`dict.insert_with`](https://aiken-lang.github.io/stdlib/aiken/dict.html#insert_with): Insert a value in the dictionary at a given key. When the key already exist, the provided merge function is called. - -- [`transaction/value.add`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#add): Add a (positive or negative) quantity of a single token to a value. This is more efficient than `merge` for a single asset. - -- [`transaction/value.to_dict`](https://aiken-lang.github.io/stdlib/aiken/transaction/value.html#to_dict): Convert a `Value` into a dictionnary of dictionnaries. - -- A new module [`transaction/minted_value`](https://aiken-lang.github.io/stdlib/aiken/transaction/minted_value.html): This is used exclusively for representing values present in the `mint` field of transactions. This allows to simplify some of the implementation for `Value` which no longer needs to handle the special case where null-quantity tokens would be present. It isn't possible to construct `MintedValue` by hand, they come from the script context entirely and are 'read-only'. - -- More documentation for `dict` and `interval` modules. - -### Changed - -> **Warning** -> -> Most of those changes are breaking-changes. Though, given we're still in an -> alpha state, only the `minor` component is bumped from the version number. -> Please forgive us. - -- Rework `list.{foldl, foldr, reduce, indexed_foldr}`, `dict.{fold}`, `bytearray.{foldl, foldr, reduce}` to take the iterator as last argument. For example: - - ``` - fn foldl(self: List
, with: fn(a, b) -> b, zero: b) -> b - - ↓ becomes - - fn foldl(self: List, zero: b, with: fn(a, b) -> b) -> b - ``` - -- Fixed implementation of `bytearray.slice`; `slice` would otherwise behave as if the second argument were an offset. - -- Rename `transaction/value.add` into `transaction/value.merge`. - -- Swap arguments of the merge function in `dict.union_with`; the first value received now corresponds to the value already present in the dictionnary. - -- Fixed various examples from the documentation - -### Removed - -- Removed `dict.fold`; replaced with `dict.foldl` and `dict.foldr` to remove ambiguity. - -## v1.0.0 - 2023-04-13 - -### Added - -N/A - -### Changed - -N/A - -### Removed - -N/A diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE deleted file mode 100644 index 4a1de273..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2022 Lucas Rosa - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md deleted file mode 100644 index 4cd6fef2..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/README.md +++ /dev/null @@ -1,71 +0,0 @@ -
-
-

Aiken Aiken Standard Library

- -[![Licence](https://img.shields.io/github/license/aiken-lang/stdlib?style=for-the-badge)](https://github.com/aiken-lang/stdlib/blob/main/LICENSE) -[![Continuous Integration](https://img.shields.io/github/actions/workflow/status/aiken-lang/stdlib/continuous-integration.yml?style=for-the-badge)](https://github.com/aiken-lang/stdlib/actions/workflows/continuous-integration.yml) - -
-
- -## Getting started - -``` -aiken add aiken-lang/stdlib --version v2 -``` - -## Compatibility - -aiken's version | stdlib's version(s) ---- | --- -`v1.1.3`
`v1.1.4`
`v1.1.5`
`v1.1.6`
`v1.1.7` | `>= 2.1.0` -`v1.1.1`
`v1.1.2` | `>= 2.0.0` && `< 2.1.0` -`v1.0.29-alpha`
`v1.0.28-alpha` | `>= 1.9.0` && `< 2.0.0` -`v1.0.26-alpha` | `<= 1.8.0` && `< 1.9.0` - -## Overview - -The official standard library for the [Aiken](https://aiken-lang.org) Cardano -smart-contract language. - -It extends the language builtins with useful data-types, functions, constants -and aliases that make using Aiken a bliss. - -```aiken -use aiken/collection/list -use aiken/crypto.{VerificationKeyHash} -use cardano/transaction.{OutputReference, Transaction} - -pub type Datum { - owner: VerificationKeyHash, -} - -pub type Redeemer { - msg: ByteArray, -} - -/// A simple validator which replicates a basic public/private signature lock. -/// -/// - The key (hash) is set as datum when the funds are sent to the script address. -/// - The spender is expected to provide a signature, and the string 'Hello, World!' as message -/// - The signature is implicitly verified by the ledger, and included as 'extra_signatories' -/// -validator hello_world { - spend(datum: Option, redeemer: Redeemer, _, self: Transaction) { - expect Some(Datum { owner }) = datum - - let must_say_hello = redeemer.msg == "Hello, World!" - - let must_be_signed = list.has(self.extra_signatories, owner) - - and { - must_say_hello, - must_be_signed, - } - } -} -``` - -## Stats - -![Alt](https://repobeats.axiom.co/api/embed/f0a17e7f6133630e165b9e56ec5447bef32fe831.svg "Repobeats analytics image") diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock deleted file mode 100644 index 769ac20f..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.lock +++ /dev/null @@ -1,16 +0,0 @@ -# This file was generated by Aiken -# You typically do not need to edit this file - -[[requirements]] -name = "aiken-lang/fuzz" -version = "v2" -source = "github" - -[[packages]] -name = "aiken-lang/fuzz" -version = "v2" -requirements = [] -source = "github" - -[etags] -"aiken-lang/fuzz@v2" = [{ secs_since_epoch = 1734106349, nanos_since_epoch = 450591000 }, "64a32283418d58cade34059d3855b857e84505541158c541c460cafa0d355475"] diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml deleted file mode 100644 index cbc76a0b..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/aiken.toml +++ /dev/null @@ -1,15 +0,0 @@ -name = "aiken-lang/stdlib" -version = "2.2.0" -compiler = "v1.1.9" -plutus = "v3" -description = "The Aiken Standard Library" - -[repository] -user = "aiken-lang" -project = "stdlib" -platform = "github" - -[[dependencies]] -name = "aiken-lang/fuzz" -version = "v2" -source = "github" diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak deleted file mode 100644 index f0c66d69..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.ak +++ /dev/null @@ -1,293 +0,0 @@ -use aiken -use aiken/builtin.{decode_utf8, serialise_data} -use aiken/primitive/bytearray - -/// Obtain a String representation of _anything_. This is particularly (and only) useful for tracing -/// and debugging. This function is expensive and should not be used in any production code as it -/// will very likely explodes the validator's budget. -/// -/// The output is a [CBOR diagnostic](https://www.rfc-editor.org/rfc/rfc8949#name-diagnostic-notation) -/// of the underlying on-chain binary representation of the data. It's not as -/// easy to read as plain Aiken code, but it is handy for troubleshooting values -/// _at runtime_. Incidentally, getting familiar with reading CBOR diagnostic is -/// a good idea in the Cardano world. -/// -/// ```aiken -/// cbor.diagnostic(42) == "42" -/// cbor.diagnostic(#"a1b2") == "h'A1B2'" -/// cbor.diagnostic([1, 2, 3]) == "[_ 1, 2, 3]" -/// cbor.diagnostic([]) == "[]" -/// cbor.diagnostic((1, 2)) == "[_ 1, 2]" -/// cbor.diagnostic((1, #"ff", 3)) == "[_ 1, h'FF', 3]" -/// cbor.diagnostic([(1, #"ff")]) == "{_ 1: h'FF' }" -/// cbor.diagnostic(Some(42)) == "121([_ 42])" -/// cbor.diagnostic(None) == "122([])" -/// ``` -pub fn diagnostic(self: Data) -> String { - aiken.diagnostic(self, #"") - |> decode_utf8 -} - -/// Deserialise a [CBOR](https://www.rfc-editor.org/rfc/rfc8949) Data. This is the reverse operation of [serialise](#serialise). -/// In particular, we have the following property: -/// -/// ```aiken -/// cbor.deserialise(cbor.serialise(any_data)) == Some(any_data) -/// ``` -/// -/// > [!CAUTION] -/// > Unfortunately, this function isn't derived from a builtin primitive. It -/// > is therefore an order of magnitude more expensive than its counterpart -/// > and shall be used with care. -/// > -/// > In general, one might prefer avoiding deserialisation unless truly necessary. -/// > Yet, it may come in handy for testing and in rare scenarios. -pub fn deserialise(bytes: ByteArray) -> Option { - let length = bytearray.length(bytes) - - let peek = - fn(offset: Int, callback: fn(Byte) -> Decoder) -> Decoder { - fn(cursor) { - if 0 >= cursor { - deserialise_failure - } else { - callback(bytearray.at(bytes, length - cursor))(cursor - offset) - } - } - } - - let take = - fn(n: Int, callback: fn(ByteArray) -> Decoder) -> Decoder { - fn(cursor) { - if 0 >= cursor { - deserialise_failure - } else { - callback(builtin.slice_bytearray(length - cursor, n, bytes))( - cursor - n, - ) - } - } - } - - if length == 0 { - None - } else { - let Pair(result, consumed) = decode_data(peek, take)(length) - if consumed != 0 { - None - } else { - Some(result) - } - } -} - -/// Serialise any value to binary, encoding using [CBOR](https://www.rfc-editor.org/rfc/rfc8949). -/// -/// This is particularly useful in combination with hashing functions, as a way -/// to obtain a byte representation that matches the serialised representation -/// used by the ledger in the context of on-chain code. -/// -/// Note that the output matches the output of [`diagnostic`](#diagnostic), -/// though with a different encoding. [`diagnostic`](#diagnostic) is merely a -/// textual representation of the CBOR encoding that is human friendly and -/// useful for debugging. -/// -/// ```aiken -/// cbor.serialise(42) == #"182a" -/// cbor.serialise(#"a1b2") == #"42a1b2" -/// cbor.serialise([]) == #"80" -/// cbor.serialise((1, 2)) == #"9f0102ff" -/// cbor.serialise((1, #"ff", 3)) == #"9f0141ff03ff" -/// cbor.serialise([(1, #"ff")]) == #"a10141ff" -/// cbor.serialise(Some(42)) == #"d8799f182aff" -/// cbor.serialise(None) == #"d87a80" -/// ``` -pub fn serialise(self: Data) -> ByteArray { - serialise_data(self) -} - -type Byte = - Int - -type Decoder
= - fn(Int) -> Pair - -type Peek = - fn(Int, fn(Byte) -> Decoder) -> Decoder - -type Take = - fn(Int, fn(ByteArray) -> Decoder) -> Decoder - -fn return(data: Data) -> Decoder { - fn(cursor) { Pair(data, cursor) } -} - -const deserialise_failure: Pair = { - let empty: Data = "" - Pair(empty, -1) - } - -const token_begin_bytes = 0x5f - -const token_begin_list = 0x9f - -const token_begin_map = 0xbf - -const token_break = 0xff - -fn decode_data(peek: Peek, take: Take) -> Decoder { - let next <- peek(1) - let major_type = next / 32 - if major_type <= 2 { - if major_type == 0 { - let i <- decode_uint(peek, take, next) - return(builtin.i_data(i)) - } else if major_type == 1 { - let i <- decode_uint(peek, take, next - 32) - return(builtin.i_data(-i - 1)) - } else { - if next == token_begin_bytes { - let b <- decode_chunks(peek, take) - return(builtin.b_data(b)) - } else { - let b <- decode_bytes(peek, take, next - 64) - return(builtin.b_data(b)) - } - } - } else if major_type == 6 { - let tag <- decode_uint(peek, take, next - 192) - let next <- peek(1) - if tag == 102 { - fn(_) { deserialise_failure } - } else { - let ix = - if tag >= 1280 { - tag - 1280 + 7 - } else { - tag - 121 - } - if next == token_begin_list { - let fields <- decode_indefinite(peek, take, decode_data) - return(builtin.constr_data(ix, fields)) - } else { - let size <- decode_uint(peek, take, next - 128) - let fields <- decode_definite(peek, take, decode_data, size) - return(builtin.constr_data(ix, fields)) - } - } - } else if major_type == 4 { - if next == token_begin_list { - let xs <- decode_indefinite(peek, take, decode_data) - return(builtin.list_data(xs)) - } else { - let size <- decode_uint(peek, take, next - 128) - let xs <- decode_definite(peek, take, decode_data, size) - return(builtin.list_data(xs)) - } - } else if major_type == 5 { - if next == token_begin_map { - let xs <- decode_indefinite(peek, take, decode_pair) - return(builtin.map_data(xs)) - } else { - let size <- decode_uint(peek, take, next - 160) - let xs <- decode_definite(peek, take, decode_pair, size) - return(builtin.map_data(xs)) - } - } else { - fn(_) { deserialise_failure } - } -} - -fn decode_pair(peek: Peek, take: Take) -> Decoder> { - fn(cursor) { - let Pair(k, cursor) = decode_data(peek, take)(cursor) - let Pair(v, cursor) = decode_data(peek, take)(cursor) - Pair(Pair(k, v), cursor) - } -} - -fn decode_uint( - peek: Peek, - take: Take, - header: Int, - and_then: fn(Int) -> Decoder, -) -> Decoder { - if header < 24 { - and_then(header) - } else if header == 24 { - let payload <- peek(1) - and_then(payload) - } else if header < 28 { - let width = bytearray.at(#[2, 4, 8], header - 25) - let payload <- take(width) - and_then(bytearray.to_int_big_endian(payload)) - } else { - fn(_) { deserialise_failure } - } -} - -fn decode_bytes( - peek: Peek, - take: Take, - header: Int, - and_then: fn(ByteArray) -> Decoder, -) -> Decoder { - let width <- decode_uint(peek, take, header) - let bytes <- take(width) - and_then(bytes) -} - -fn decode_chunks( - peek: Peek, - take: Take, - and_then: fn(ByteArray) -> Decoder, -) -> Decoder { - let next <- peek(1) - if next == token_break { - and_then("") - } else { - let chunk <- decode_bytes(peek, take, next - 64) - let chunks <- decode_chunks(peek, take) - and_then(builtin.append_bytearray(chunk, chunks)) - } -} - -fn decode_definite( - peek: Peek, - take: Take, - decode_one: fn(Peek, Take) -> Decoder, - size: Int, - and_then: fn(List) -> Decoder, -) -> Decoder { - if size <= 0 { - and_then([]) - } else { - fn(cursor) { - let Pair(elem, cursor) = decode_one(peek, take)(cursor) - { - let elems <- decode_definite(peek, take, decode_one, size - 1) - and_then([elem, ..elems]) - }(cursor) - } - } -} - -fn decode_indefinite( - peek: Peek, - take: Take, - decode_one: fn(Peek, Take) -> Decoder, - and_then: fn(List) -> Decoder, -) -> Decoder { - let next <- peek(1) - if next == token_break { - and_then([]) - } else { - fn(cursor) { - let Pair(elem, cursor) = decode_one(peek, take)(cursor + 1) - { - let elems <- decode_indefinite(peek, take, decode_one) - and_then([elem, ..elems]) - }(cursor) - } - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak deleted file mode 100644 index 28d9f5bb..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/cbor.test.ak +++ /dev/null @@ -1,297 +0,0 @@ -use aiken/cbor.{deserialise, diagnostic, serialise} -use aiken/fuzz - -// ------------------------------------------------------------------ diagnostic - -test diagnostic_1() { - diagnostic(42) == @"42" -} - -test diagnostic_2() { - diagnostic(#"a1b2") == @"h'A1B2'" -} - -test diagnostic_3() { - diagnostic([1, 2, 3]) == @"[_ 1, 2, 3]" -} - -test diagnostic_4() { - diagnostic([]) == @"[]" -} - -test diagnostic_5() { - diagnostic((1, 2)) == @"[_ 1, 2]" -} - -test diagnostic_6() { - diagnostic((1, #"ff", 3)) == @"[_ 1, h'FF', 3]" -} - -test diagnostic_7() { - diagnostic([(1, #"ff")]) == @"[_ [_ 1, h'FF']]" -} - -test diagnostic_7_alt() { - diagnostic([Pair(1, #"ff")]) == @"{_ 1: h'FF' }" -} - -test diagnostic_8() { - diagnostic(Some(42)) == @"121([_ 42])" -} - -test diagnostic_9() { - diagnostic(None) == @"122([])" -} - -test diagnostic_10() { - let xs: List<(Int, Int)> = [] - diagnostic(xs) == @"[]" -} - -test diagnostic_10_alt() { - let xs: Pairs = [] - diagnostic(xs) == @"{}" -} - -type Foo { - foo: Bar, -} - -type Bar { - A - B(Int) -} - -test diagnostic_11() { - diagnostic(Foo { foo: A }) == @"121([_ 121([])])" -} - -test diagnostic_12() { - diagnostic(Foo { foo: B(42) }) == @"121([_ 122([_ 42])])" -} - -type Baz { - a0: Int, - b0: ByteArray, -} - -test diagnostic_13() { - diagnostic(Baz { a0: 14, b0: #"ff" }) == @"121([_ 14, h'FF'])" -} - -test diagnostic_14() { - diagnostic([0]) == @"[_ 0]" -} - -test diagnostic_15() { - diagnostic(-42) == @"-42" -} - -test diagnostic_16() { - diagnostic([-1, 0, 1]) == @"[_ -1, 0, 1]" -} - -// ------------------------------------------------------------------ serialise - -test serialise_1() { - serialise(42) == #"182a" -} - -test serialise_2() { - serialise(#"a1b2") == #"42a1b2" -} - -test serialise_3() { - serialise([]) == #"80" -} - -test serialise_4() { - serialise((1, 2)) == #"9f0102ff" -} - -test serialise_5() { - serialise((1, #"ff", 3)) == #"9f0141ff03ff" -} - -test serialise_6() { - serialise([(1, #"ff")]) == #"9f9f0141ffffff" -} - -test serialise_7() { - serialise(Some(42)) == #"d8799f182aff" -} - -test serialise_8() { - serialise(None) == #"d87a80" -} - -test serialise_9() { - serialise([Pair(1, #"ff")]) == #"a10141ff" -} - -// ------------------------------------------------------------------ deserialise - -type AnyData { - AnyInt(Int) - AnyByteArray(ByteArray) - AnyList(List) - AnyPairs(Pairs) - AnyUnaryConstr0(UnaryConstr0) - AnyUnaryConstr1(UnaryConstr1) - AnyUnaryConstr2(UnaryConstr2) - AnyBinaryConstr0(BinaryConstr0) - AnyBinaryConstr1(BinaryConstr1) -} - -type UnaryConstr0 { - UnaryConstr0 -} - -type UnaryConstr1 { - field0: String, -} - -type UnaryConstr2 { - field0: Int, - field1: List>, -} - -type BinaryConstr0 = - Bool - -type BinaryConstr1 = - Option - -fn any_pair(any_key: Fuzzer, any_value: Fuzzer) -> Fuzzer> { - let k <- fuzz.and_then(any_key) - let v <- fuzz.map(any_value) - Pair(k, v) -} - -fn any_data() -> Fuzzer { - fuzz.either6( - { - let i <- fuzz.map(fuzz.int()) - AnyInt(i) - }, - { - let bs <- fuzz.map(fuzz.bytearray()) - AnyByteArray(bs) - }, - { - let xs <- fuzz.map(fuzz.list(fuzz.int())) - AnyList(xs) - }, - { - let ps <- fuzz.map(fuzz.list(any_pair(fuzz.bytearray(), fuzz.int()))) - AnyPairs(ps) - }, - fuzz.either3( - fuzz.constant(AnyUnaryConstr0(UnaryConstr0)), - fuzz.constant(AnyUnaryConstr1(UnaryConstr1(@"lorem ipsum"))), - { - let i <- fuzz.and_then(fuzz.int()) - let xs <- fuzz.map(fuzz.list(fuzz.list(fuzz.bytearray()))) - AnyUnaryConstr2(UnaryConstr2(i, xs)) - }, - ), - fuzz.either( - { - let b <- fuzz.map(fuzz.bool()) - AnyBinaryConstr0(b) - }, - { - let o <- fuzz.map(fuzz.option(fuzz.int())) - AnyBinaryConstr1(o) - }, - ), - ) -} - -test unit_deserialise_not_enough_bytes_1() { - expect None = deserialise(#"") -} - -test unit_deserialise_not_enough_bytes_2() { - expect None = deserialise(#"82") -} - -test unit_deserialise_non_empty_leftovers() { - expect None = deserialise(#"811442") -} - -test unit_deserialise_invalid_header() { - expect None = deserialise(#"f1") -} - -test unit_deserialise_invalid_uint() { - expect None = deserialise(#"1d0013bdae") -} - -/// A full script context with a minting policy and various assets. Meant to be -/// non-trivial and cover many things we might encounter in a transaction. -test bench_deserialise_script_context() { - expect Some(_) = - deserialise( - #"d8799fd8799f9fd8799fd8799f5820000000000000000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87980d87a80ffffff9fd8799fd8799f5820000000000000000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87980d87a80ffffff9fd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd87a80ffa140a1401a000f4240d87a9f5820923918e403bf43c34b4ef6b48eb2ee04babed17320d8d1b9ff9ad086e86f44ecffd87a80ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffffffffa340a1401a000f4240581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea14561696b656e182a581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874a14474756e611b00005af3107a4000d87980d87a80ffd8799fd8799fd87a9f581c00000000000000000000000000000000000000000000000000000000ffd8799fd8799fd8799f581c00000000000000000000000000000000000000000000000000000000ffffffffa240a1401a000f4240581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea14763617264616e6f01d87980d8799f581c68ad54b3a8124d9fe5caaaf2011a85d72096e696a2fb3d7f86c41717ffffff182aa2581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262ea24561696b656e2d4763617264616e6f01581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874a14474756e611b00005af3107a400080a0d8799fd8799fd87980d87a80ffd8799fd87b80d87a80ffff80a2d8799f581c0c8eaf490c53afbf27e3d84a3b57da51fbafe5aa78443fcec2dc262effd87980d8799f581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874ff182aa15820923918e403bf43c34b4ef6b48eb2ee04babed17320d8d1b9ff9ad086e86f44ecd879805820e757985e48e43a95a185ddba08c814bc20f81cb68544ac937a9b992e4e6c38a0a080d87a80d87a80ff182ad8799f581c12593b4cbf7fdfd8636db99fe356437cd6af8539aadaa0a401964874ffff", - ) -} - -test prop_deserialise_any_data(any via any_data()) { - when any is { - AnyInt(i) -> { - fuzz.label(@"Int") - expect Some(data) = deserialise(serialise(i)) - expect i_decoded: Int = data - i_decoded == i - } - AnyByteArray(bs) -> { - fuzz.label(@"ByteArray") - expect Some(data) = deserialise(serialise(bs)) - expect bs_decoded: ByteArray = data - bs_decoded == bs - } - AnyList(xs) -> { - fuzz.label(@"List") - expect Some(data) = deserialise(serialise(xs)) - expect xs_decoded: List = data - xs_decoded == xs - } - AnyPairs(ps) -> { - fuzz.label(@"Pairs") - expect Some(data) = deserialise(serialise(ps)) - expect ps_decoded: Pairs = data - ps_decoded == ps - } - AnyUnaryConstr0(constr) -> { - fuzz.label(@"(unary) Constr") - expect Some(data) = deserialise(serialise(constr)) - expect constr_decoded: UnaryConstr0 = data - constr_decoded == constr - } - AnyUnaryConstr1(constr) -> { - fuzz.label(@"(unary) Constr") - expect Some(data) = deserialise(serialise(constr)) - expect constr_decoded: UnaryConstr1 = data - constr_decoded == constr - } - AnyUnaryConstr2(constr) -> { - fuzz.label(@"(unary) Constr") - expect Some(data) = deserialise(serialise(constr)) - expect constr_decoded: UnaryConstr2 = data - constr_decoded == constr - } - AnyBinaryConstr0(constr) -> { - fuzz.label(@"(binary) Constr") - expect Some(data) = deserialise(serialise(constr)) - expect constr_decoded: BinaryConstr0 = data - constr_decoded == constr - } - AnyBinaryConstr1(constr) -> { - fuzz.label(@"(binary) Constr") - expect Some(data) = deserialise(serialise(constr)) - expect constr_decoded: BinaryConstr1 = data - constr_decoded == constr - } - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak deleted file mode 100644 index 3d4d332e..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection.ak +++ /dev/null @@ -1,4 +0,0 @@ -/// A non negative integer that materializes the position of an element in a -/// collection. -pub type Index = - Int diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak deleted file mode 100644 index 681d0bae..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/dict.ak +++ /dev/null @@ -1,1174 +0,0 @@ -//// A module for working with bytearray dictionaries. -//// -//// -//// > [!IMPORTANT] -//// > -//// > Dictionaries are **ordered sets** of key-value pairs, which thus -//// > preserve some invariants. Specifically, each key is only present once in -//// > the dictionary and all keys are stored in ascending lexicographic order. -//// > -//// > These invariants allow for more optimized functions to operate on `Dict`, -//// > but as a trade-offs, prevent `Dict` from being serializable. To recover a `Dict` -//// > from an unknown `Data`, you must first recover an `Pairs` and use -//// > [`dict.from_ascending_list`](#from_ascending_list). - -use aiken/builtin - -/// An opaque `Dict`. The type is opaque because the module maintains some -/// invariant, namely: there's only one occurrence of a given key in the dictionary. -/// -/// Note that the `key` parameter is a phantom-type, and only present as a -/// means of documentation. Keys can be any type, yet will need to comparable -/// to use functions like `insert`. -/// -/// See for example: -/// -/// ```aiken -/// pub type Value = -/// Dict> -/// ``` -pub opaque type Dict { - inner: Pairs, -} - -// ## Constructing - -/// An empty dictionnary. -/// ```aiken -/// dict.to_pairs(dict.empty) == [] -/// ``` -pub const empty: Dict = Dict { inner: [] } - -const foo = #"666f6f" - -const bar = #"626172" - -const baz = #"62617a" - -const fixture_1 = - empty - |> insert(foo, 42) - |> insert(bar, 14) - -/// Like ['from_pairs'](#from_pairs), but from an already sorted list by ascending -/// keys. This function fails (i.e. halts the program execution) if the list isn't -/// sorted. -/// -/// ```aiken -/// let pairs = [Pair("a", 100), Pair("b", 200), Pair("c", 300)] -/// -/// let result = -/// dict.from_ascending_pairs(pairs) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] -/// ``` -/// -/// This is meant to be used to turn a list constructed off-chain into a `Dict` -/// which has taken care of maintaining interval invariants. This function still -/// performs a sanity check on all keys to avoid silly mistakes. It is, however, -/// considerably faster than ['from_pairs'](from_pairs) -pub fn from_ascending_pairs(xs: Pairs) -> Dict { - let Void = check_ascending_list(xs) - Dict { inner: xs } -} - -fn check_ascending_list(xs: Pairs) { - when xs is { - [] -> Void - [_] -> Void - [Pair(x0, _), Pair(x1, _) as e, ..rest] -> - if builtin.less_than_bytearray(x0, x1) { - check_ascending_list([e, ..rest]) - } else { - fail @"keys in associative list aren't in ascending order" - } - } -} - -/// Like [`from_ascending_pairs`](#from_ascending_pairs) but fails if **any** -/// value doesn't satisfy the predicate. -/// -/// ```aiken -/// let pairs = [Pair("a", 100), Pair("b", 200), Pair("c", 300)] -/// -/// dict.from_ascending_pairs_with(pairs, fn(x) { x <= 250 }) // fail -/// ``` -pub fn from_ascending_pairs_with( - xs: Pairs, - predicate: fn(value) -> Bool, -) -> Dict { - let Void = check_ascending_pairs_with(xs, predicate) - Dict { inner: xs } -} - -fn check_ascending_pairs_with( - xs: Pairs, - predicate: fn(value) -> Bool, -) { - when xs is { - [] -> Void - [Pair(_, v)] -> - if predicate(v) { - Void - } else { - fail @"value doesn't satisfy predicate" - } - [Pair(x0, v0), Pair(x1, _) as e, ..rest] -> - if builtin.less_than_bytearray(x0, x1) { - if predicate(v0) { - check_ascending_pairs_with([e, ..rest], predicate) - } else { - fail @"value doesn't satisfy predicate" - } - } else { - fail @"keys in pairs aren't in ascending order" - } - } -} - -test bench_from_ascending_pairs() { - let dict = - from_ascending_pairs( - [ - Pair("aaaa", 1), Pair("aaab", 9), Pair("aaba", 5), Pair("aabb", 13), - Pair("abaa", 2), Pair("abab", 10), Pair("abba", 6), Pair("abbb", 14), - Pair("baaa", 3), Pair("baab", 11), Pair("baba", 7), Pair("babb", 15), - Pair("bbaa", 4), Pair("bbab", 12), Pair("bbba", 8), Pair("bbbb", 16), - ], - ) - - size(dict) == 16 -} - -/// Construct a dictionary from a list of key-value pairs. Note that when a key is present -/// multiple times, the first occurrence prevails. -/// -/// ```aiken -/// let pairs = [Pair("a", 100), Pair("c", 300), Pair("b", 200)] -/// -/// let result = -/// dict.from_pairs(pairs) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] -/// ``` -pub fn from_pairs(self: Pairs) -> Dict { - Dict { inner: do_from_pairs(self) } -} - -fn do_from_pairs(xs: Pairs) -> Pairs { - when xs is { - [] -> [] - [Pair(k, v), ..rest] -> do_insert(do_from_pairs(rest), k, v) - } -} - -test from_list_1() { - from_pairs([]) == empty -} - -test from_list_2() { - from_pairs([Pair(foo, 42), Pair(bar, 14)]) == from_pairs( - [Pair(bar, 14), Pair(foo, 42)], - ) -} - -test from_list_3() { - from_pairs([Pair(foo, 42), Pair(bar, 14)]) == fixture_1 -} - -test from_list_4() { - from_pairs([Pair(foo, 42), Pair(bar, 14), Pair(foo, 1337)]) == fixture_1 -} - -test bench_from_pairs() { - let dict = - from_pairs( - [ - Pair("bbba", 8), Pair("bbab", 12), Pair("aabb", 13), Pair("aaab", 9), - Pair("bbbb", 16), Pair("aaaa", 1), Pair("aaba", 5), Pair("abab", 10), - Pair("baba", 7), Pair("baab", 11), Pair("abaa", 2), Pair("baaa", 3), - Pair("bbaa", 4), Pair("babb", 15), Pair("abbb", 14), Pair("abba", 6), - ], - ) - - size(dict) == 16 -} - -// ## Inspecting - -/// Finds a value in the dictionary, and returns the first key found to have that value. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: 42) -/// |> dict.insert(key: "b", value: 14) -/// |> dict.insert(key: "c", value: 42) -/// |> dict.find(42) -/// -/// result == Some("a") -/// ``` -pub fn find(self: Dict, value v: value) -> Option { - do_find(self.inner, v) -} - -fn do_find(self: Pairs, value v: value) -> Option { - when self is { - [] -> None - [Pair(k2, v2), ..rest] -> - if v == v2 { - Some(k2) - } else { - do_find(rest, v) - } - } -} - -test find_1() { - find(empty, foo) == None -} - -test find_2() { - find( - empty - |> insert(foo, 14), - 14, - ) == Some(foo) -} - -test find_3() { - find( - empty - |> insert(foo, 14), - 42, - ) == None -} - -test find_4() { - find( - empty - |> insert(foo, 14) - |> insert(bar, 42) - |> insert(baz, 14), - 14, - ) == Some(baz) -} - -/// Get a value in the dict by its key. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: "Aiken") -/// |> dict.get(key: "a") -/// -/// result == Some("Aiken") -/// ``` -pub fn get(self: Dict, key: ByteArray) -> Option { - do_get(self.inner, key) -} - -fn do_get(self: Pairs, key k: ByteArray) -> Option { - when self is { - [] -> None - [Pair(k2, v), ..rest] -> - if builtin.less_than_equals_bytearray(k, k2) { - if k == k2 { - Some(v) - } else { - None - } - } else { - do_get(rest, k) - } - } -} - -test get_1() { - get(empty, foo) == None -} - -test get_2() { - let m = - empty - |> insert(foo, "Aiken") - |> insert(bar, "awesome") - get(m, key: foo) == Some("Aiken") -} - -test get_3() { - let m = - empty - |> insert(foo, "Aiken") - |> insert(bar, "awesome") - get(m, key: baz) == None -} - -test get_4() { - let m = - empty - |> insert("aaa", "1") - |> insert("bbb", "2") - |> insert("ccc", "3") - |> insert("ddd", "4") - |> insert("eee", "5") - |> insert("fff", "6") - |> insert("ggg", "7") - |> insert("hhh", "8") - |> insert("iii", "9") - |> insert("jjj", "10") - - get(m, "bcd") == None -} - -test get_5() { - let m = - empty - |> insert("aaa", "1") - |> insert("bbb", "2") - |> insert("ccc", "3") - |> insert("ddd", "4") - |> insert("eee", "5") - |> insert("fff", "6") - |> insert("ggg", "7") - |> insert("hhh", "8") - |> insert("iii", "9") - |> insert("jjj", "10") - - get(m, "kkk") == None -} - -/// Check if a key exists in the dictionary. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: "Aiken") -/// |> dict.has_key("a") -/// -/// result == True -/// ``` -pub fn has_key(self: Dict, key k: ByteArray) -> Bool { - do_has_key(self.inner, k) -} - -fn do_has_key(self: Pairs, key k: ByteArray) -> Bool { - when self is { - [] -> False - [Pair(k2, _), ..rest] -> - if builtin.less_than_equals_bytearray(k, k2) { - k == k2 - } else { - do_has_key(rest, k) - } - } -} - -test has_key_1() { - !has_key(empty, foo) -} - -test has_key_2() { - has_key( - empty - |> insert(foo, 14), - foo, - ) -} - -test has_key_3() { - !has_key( - empty - |> insert(foo, 14), - bar, - ) -} - -test has_key_4() { - has_key( - empty - |> insert(foo, 14) - |> insert(bar, 42), - bar, - ) -} - -/// Efficiently checks whether a dictionary is empty. -/// ```aiken -/// dict.is_empty(dict.empty) == True -/// ``` -pub fn is_empty(self: Dict) -> Bool { - when self.inner is { - [] -> True - _ -> False - } -} - -test is_empty_1() { - is_empty(empty) -} - -/// Extract all the keys present in a given `Dict`. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert("a", 14) -/// |> dict.insert("b", 42) -/// |> dict.insert("a", 1337) -/// |> dict.keys() -/// -/// result == ["a", "b"] -/// ``` -pub fn keys(self: Dict) -> List { - do_keys(self.inner) -} - -fn do_keys(self: Pairs) -> List { - when self is { - [] -> [] - [Pair(k, _), ..rest] -> [k, ..do_keys(rest)] - } -} - -test keys_1() { - keys(empty) == [] -} - -test keys_2() { - keys( - empty - |> insert(foo, 0) - |> insert(bar, 0), - ) == [bar, foo] -} - -/// Return the number of key-value pairs in the dictionary. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert("a", 100) -/// |> dict.insert("b", 200) -/// |> dict.insert("c", 300) -/// |> dict.size() -/// -/// result == 3 -/// ``` -pub fn size(self: Dict) -> Int { - do_size(self.inner) -} - -fn do_size(self: Pairs) -> Int { - when self is { - [] -> 0 - [_, ..rest] -> 1 + do_size(rest) - } -} - -test size_1() { - size(empty) == 0 -} - -test size_2() { - size( - empty - |> insert(foo, 14), - ) == 1 -} - -test size_3() { - size( - empty - |> insert(foo, 14) - |> insert(bar, 42), - ) == 2 -} - -/// Extract all the values present in a given `Dict`. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert("a", 14) -/// |> dict.insert("b", 42) -/// |> dict.insert("c", 1337) -/// |> dict.values() -/// -/// result == [14, 42, 1337] -/// ``` -pub fn values(self: Dict) -> List { - do_values(self.inner) -} - -fn do_values(self: Pairs) -> List { - when self is { - [] -> [] - [Pair(_, v), ..rest] -> [v, ..do_values(rest)] - } -} - -test values_1() { - values(empty) == [] -} - -test values_2() { - values( - empty - |> insert(foo, 3) - |> insert(bar, 4), - ) == [4, 3] -} - -// ## Modifying - -/// Remove a key-value pair from the dictionary. If the key is not found, no changes are made. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: 100) -/// |> dict.insert(key: "b", value: 200) -/// |> dict.delete(key: "a") -/// |> dict.to_pairs() -/// -/// result == [Pair("b", 200)] -/// ``` -pub fn delete(self: Dict, key: ByteArray) -> Dict { - Dict { inner: do_delete(self.inner, key) } -} - -fn do_delete( - self: Pairs, - key k: ByteArray, -) -> Pairs { - when self is { - [] -> [] - [Pair(k2, v2), ..rest] -> - if builtin.less_than_equals_bytearray(k, k2) { - if k == k2 { - rest - } else { - self - } - } else { - [Pair(k2, v2), ..do_delete(rest, k)] - } - } -} - -test delete_1() { - delete(empty, foo) == empty -} - -test delete_2() { - let m = - empty - |> insert(foo, 14) - delete(m, foo) == empty -} - -test delete_3() { - let m = - empty - |> insert(foo, 14) - delete(m, bar) == m -} - -test delete_4() { - let m = - empty - |> insert(foo, 14) - |> insert(bar, 14) - !has_key(delete(m, foo), foo) -} - -test delete_5() { - let m = - empty - |> insert(foo, 14) - |> insert(bar, 14) - has_key(delete(m, bar), foo) -} - -test delete_6() { - let m = - empty - |> insert("aaa", 1) - |> insert("bbb", 2) - |> insert("ccc", 3) - |> insert("ddd", 4) - |> insert("eee", 5) - |> insert("fff", 6) - |> insert("ggg", 7) - |> insert("hhh", 8) - |> insert("iii", 9) - |> insert("jjj", 10) - - delete(m, "bcd") == m -} - -/// Keep only the key-value pairs that pass the given predicate. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: 100) -/// |> dict.insert(key: "b", value: 200) -/// |> dict.insert(key: "c", value: 300) -/// |> dict.filter(fn(k, _v) { k != "a" }) -/// |> dict.to_pairs() -/// -/// result == [Pair("b", 200), Pair("c", 300)] -/// ``` -pub fn filter( - self: Dict, - with: fn(ByteArray, value) -> Bool, -) -> Dict { - Dict { inner: do_filter(self.inner, with) } -} - -fn do_filter( - self: Pairs, - with: fn(ByteArray, value) -> Bool, -) -> Pairs { - when self is { - [] -> [] - [Pair(k, v), ..rest] -> - if with(k, v) { - [Pair(k, v), ..do_filter(rest, with)] - } else { - do_filter(rest, with) - } - } -} - -test filter_1() { - filter(empty, fn(_, _) { True }) == empty -} - -test filter_2() { - let expected = - empty - |> insert(foo, 42) - filter(fixture_1, fn(_, v) { v > 14 }) == expected -} - -test filter_3() { - let expected = - empty - |> insert(bar, 14) - filter(fixture_1, fn(k, _) { k == bar }) == expected -} - -/// Insert a value in the dictionary at a given key. If the key already exists, its value is **overridden**. If you need ways to combine keys together, use (`insert_with`)[#insert_with]. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: 1) -/// |> dict.insert(key: "b", value: 2) -/// |> dict.insert(key: "a", value: 3) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 3), Pair("b", 2)] -/// ``` -pub fn insert( - self: Dict, - key k: ByteArray, - value v: value, -) -> Dict { - Dict { inner: do_insert(self.inner, k, v) } -} - -fn do_insert( - self: Pairs, - key k: ByteArray, - value v: value, -) -> Pairs { - when self is { - [] -> [Pair(k, v)] - [Pair(k2, v2), ..rest] -> - if builtin.less_than_bytearray(k, k2) { - [Pair(k, v), ..self] - } else { - if k == k2 { - [Pair(k, v), ..rest] - } else { - [Pair(k2, v2), ..do_insert(rest, k, v)] - } - } - } -} - -test insert_1() { - let m1 = - empty - |> insert(foo, 42) - let m2 = - empty - |> insert(foo, 14) - insert(m1, foo, 14) == m2 -} - -test insert_2() { - let m1 = - empty - |> insert(foo, 42) - let m2 = - empty - |> insert(bar, 14) - insert(m1, bar, 14) == insert(m2, foo, 42) -} - -/// Insert a value in the dictionary at a given key. When the key already exist, the provided -/// merge function is called. The value existing in the dictionary is passed as the second argument -/// to the merge function, and the new value is passed as the third argument. -/// -/// ```aiken -/// let sum = -/// fn (_k, a, b) { Some(a + b) } -/// -/// let result = -/// dict.empty -/// |> dict.insert_with(key: "a", value: 1, with: sum) -/// |> dict.insert_with(key: "b", value: 2, with: sum) -/// |> dict.insert_with(key: "a", value: 3, with: sum) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 4), Pair("b", 2)] -/// ``` -pub fn insert_with( - self: Dict, - key k: ByteArray, - value v: value, - with: fn(ByteArray, value, value) -> Option, -) -> Dict { - Dict { - inner: do_insert_with(self.inner, k, v, fn(k, v1, v2) { with(k, v2, v1) }), - } -} - -test insert_with_1() { - let sum = - fn(_k, a, b) { Some(a + b) } - - let result = - empty - |> insert_with(key: "foo", value: 1, with: sum) - |> insert_with(key: "bar", value: 2, with: sum) - |> to_pairs() - - result == [Pair("bar", 2), Pair("foo", 1)] -} - -test insert_with_2() { - let sum = - fn(_k, a, b) { Some(a + b) } - - let result = - empty - |> insert_with(key: "foo", value: 1, with: sum) - |> insert_with(key: "bar", value: 2, with: sum) - |> insert_with(key: "foo", value: 3, with: sum) - |> to_pairs() - - result == [Pair("bar", 2), Pair("foo", 4)] -} - -test insert_with_3() { - let with = - fn(k, a, _b) { - if k == "foo" { - Some(a) - } else { - None - } - } - - let result = - empty - |> insert_with(key: "foo", value: 1, with: with) - |> insert_with(key: "bar", value: 2, with: with) - |> insert_with(key: "foo", value: 3, with: with) - |> insert_with(key: "bar", value: 4, with: with) - |> to_pairs() - - result == [Pair("foo", 1)] -} - -/// Apply a function to all key-value pairs in a Dict. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert("a", 100) -/// |> dict.insert("b", 200) -/// |> dict.insert("c", 300) -/// |> dict.map(fn(_k, v) { v * 2 }) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 200), Pair("b", 400), Pair("c", 600)] -/// ``` -pub fn map(self: Dict, with: fn(ByteArray, a) -> b) -> Dict { - Dict { inner: do_map(self.inner, with) } -} - -fn do_map( - self: Pairs, - with: fn(ByteArray, a) -> b, -) -> Pairs { - when self is { - [] -> [] - [Pair(k, v), ..rest] -> [Pair(k, with(k, v)), ..do_map(rest, with)] - } -} - -test map_1() { - let result = - fixture_1 - |> map(with: fn(k, _) { k }) - get(result, foo) == Some(foo) -} - -test map_2() { - let result = - fixture_1 - |> map(with: fn(_, v) { v + 1 }) - get(result, foo) == Some(43) && size(result) == size(fixture_1) -} - -/// Remove a key-value pair from the dictionary and return its value. If the key is not found, no changes are made. -/// -/// ```aiken -/// let (value, _) = -/// dict.empty -/// |> dict.insert(key: "a", value: 100) -/// |> dict.insert(key: "b", value: 200) -/// |> dict.pop(key: "a") -/// -/// result == 100 -/// ``` -pub fn pop( - self: Dict, - key: ByteArray, -) -> (Option, Dict) { - do_pop(self.inner, key, fn(value, inner) { (value, Dict { inner }) }) -} - -fn do_pop( - self: Pairs, - key k: ByteArray, - return: fn(Option, Pairs) -> result, -) -> result { - when self is { - [] -> return(None, []) - [Pair(k2, v2), ..rest] -> - if builtin.less_than_equals_bytearray(k, k2) { - if k == k2 { - return(Some(v2), rest) - } else { - return(None, self) - } - } else { - do_pop( - rest, - k, - fn(value, inner) { return(value, [Pair(k2, v2), ..inner]) }, - ) - } - } -} - -test pop_1() { - pop(empty, foo) == (None, empty) -} - -test pop_2() { - let m = - empty - |> insert(foo, 14) - pop(m, foo) == (Some(14), empty) -} - -test pop_3() { - let m = - empty - |> insert(foo, 14) - pop(m, bar) == (None, m) -} - -test pop_4() { - let m = - empty - |> insert(foo, 14) - |> insert(bar, 14) - pop(m, foo) == (Some(14), empty |> insert(bar, 14)) -} - -test pop_6() { - let m = - empty - |> insert("aaa", 1) - |> insert("bbb", 2) - |> insert("ccc", 3) - |> insert("ddd", 4) - |> insert("eee", 5) - |> insert("fff", 6) - |> insert("ggg", 7) - |> insert("hhh", 8) - |> insert("iii", 9) - |> insert("jjj", 10) - - pop(m, "bcd") == (None, m) -} - -// ## Combining - -/// Combine two dictionaries. If the same key exist in both the left and -/// right dictionary, values from the left are preferred (i.e. left-biaised). -/// -/// ```aiken -/// let left_dict = dict.from_pairs([Pair("a", 100), Pair("b", 200)]) -/// let right_dict = dict.from_pairs([Pair("a", 150), Pair("c", 300)]) -/// -/// let result = -/// dict.union(left_dict, right_dict) |> dict.to_pairs() -/// -/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] -/// ``` -pub fn union( - left: Dict, - right: Dict, -) -> Dict { - Dict { inner: do_union(left.inner, right.inner) } -} - -fn do_union( - left: Pairs, - right: Pairs, -) -> Pairs { - when left is { - [] -> right - [Pair(k, v), ..rest] -> do_union(rest, do_insert(right, k, v)) - } -} - -test union_1() { - union(fixture_1, empty) == fixture_1 -} - -test union_2() { - union(empty, fixture_1) == fixture_1 -} - -test union_3() { - let left = - empty - |> insert(foo, 14) - let right = - empty - |> insert(bar, 42) - |> insert(baz, 1337) - union(left, right) == from_pairs( - [Pair(foo, 14), Pair(baz, 1337), Pair(bar, 42)], - ) -} - -test union_4() { - let left = - empty - |> insert(foo, 14) - let right = - empty - |> insert(bar, 42) - |> insert(foo, 1337) - union(left, right) == from_pairs([Pair(foo, 14), Pair(bar, 42)]) -} - -/// Like [`union`](#union) but allows specifying the behavior to adopt when a key is present -/// in both dictionaries. The first value received correspond to the value in the left -/// dictionnary, whereas the second argument corresponds to the value in the right dictionnary. -/// -/// When passing `None`, the value is removed and not present in the union. -/// -/// ```aiken -/// let left_dict = dict.from_pairs([Pair("a", 100), Pair("b", 200)]) -/// let right_dict = dict.from_pairs([Pair("a", 150), Pair("c", 300)]) -/// -/// let result = -/// dict.union_with( -/// left_dict, -/// right_dict, -/// fn(_k, v1, v2) { Some(v1 + v2) }, -/// ) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 250), Pair("b", 200), Pair("c", 300)] -/// ``` -pub fn union_with( - left: Dict, - right: Dict, - with: fn(ByteArray, value, value) -> Option, -) -> Dict { - Dict { inner: do_union_with(left.inner, right.inner, with) } -} - -fn do_union_with( - left: Pairs, - right: Pairs, - with: fn(ByteArray, value, value) -> Option, -) -> Pairs { - when left is { - [] -> right - [Pair(k, v), ..rest] -> - do_union_with(rest, do_insert_with(right, k, v, with), with) - } -} - -fn do_insert_with( - self: Pairs, - key k: ByteArray, - value v: value, - with: fn(ByteArray, value, value) -> Option, -) -> Pairs { - when self is { - [] -> [Pair(k, v)] - [Pair(k2, v2), ..rest] -> - if builtin.less_than_bytearray(k, k2) { - [Pair(k, v), ..self] - } else { - if k == k2 { - when with(k, v, v2) is { - Some(combined) -> [Pair(k, combined), ..rest] - None -> rest - } - } else { - [Pair(k2, v2), ..do_insert_with(rest, k, v, with)] - } - } - } -} - -test union_with_1() { - let left = - empty - |> insert(foo, 14) - - let right = - empty - |> insert(bar, 42) - |> insert(foo, 1337) - - let result = union_with(left, right, with: fn(_, l, r) { Some(l + r) }) - - result == from_pairs([Pair(foo, 1351), Pair(bar, 42)]) -} - -// ## Transforming - -/// Fold over the key-value pairs in a dictionary. The fold direction follows keys -/// in ascending order and is done from left-to-right. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: 100) -/// |> dict.insert(key: "b", value: 200) -/// |> dict.insert(key: "c", value: 300) -/// |> dict.foldl(0, fn(_k, v, r) { v + r }) -/// -/// result == 600 -/// ``` -pub fn foldl( - self: Dict, - zero: result, - with: fn(ByteArray, value, result) -> result, -) -> result { - do_foldl(self.inner, zero, with) -} - -fn do_foldl( - self: Pairs, - zero: result, - with: fn(ByteArray, value, result) -> result, -) -> result { - when self is { - [] -> zero - [Pair(k, v), ..rest] -> do_foldl(rest, with(k, v, zero), with) - } -} - -test fold_1() { - foldl(empty, 14, fn(_, _, _) { 42 }) == 14 -} - -test fold_2() { - foldl(fixture_1, zero: 0, with: fn(_, v, total) { v + total }) == 56 -} - -/// Fold over the key-value pairs in a dictionary. The fold direction follows keys -/// in ascending order and is done from right-to-left. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert(key: "a", value: 100) -/// |> dict.insert(key: "b", value: 200) -/// |> dict.insert(key: "c", value: 300) -/// |> dict.foldr(0, fn(_k, v, r) { v + r }) -/// -/// result == 600 -/// ``` -pub fn foldr( - self: Dict, - zero: result, - with: fn(ByteArray, value, result) -> result, -) -> result { - do_foldr(self.inner, zero, with) -} - -fn do_foldr( - self: Pairs, - zero: result, - with: fn(ByteArray, value, result) -> result, -) -> result { - when self is { - [] -> zero - [Pair(k, v), ..rest] -> with(k, v, do_foldr(rest, zero, with)) - } -} - -test foldr_1() { - foldr(empty, 14, fn(_, _, _) { 42 }) == 14 -} - -test foldr_2() { - foldr(fixture_1, zero: 0, with: fn(_, v, total) { v + total }) == 56 -} - -/// Get the inner list holding the dictionary data. -/// -/// ```aiken -/// let result = -/// dict.empty -/// |> dict.insert("a", 100) -/// |> dict.insert("b", 200) -/// |> dict.insert("c", 300) -/// |> dict.to_pairs() -/// -/// result == [Pair("a", 100), Pair("b", 200), Pair("c", 300)] -/// ``` -pub fn to_pairs(self: Dict) -> Pairs { - self.inner -} - -test to_list_1() { - to_pairs(empty) == [] -} - -test to_list_2() { - to_pairs(fixture_1) == [Pair(bar, 14), Pair(foo, 42)] -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak deleted file mode 100644 index b8e7f675..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/list.ak +++ /dev/null @@ -1,1411 +0,0 @@ -use aiken/builtin -use aiken/primitive/bytearray -use aiken/primitive/int - -// ## Constructing - -/// Add an element in front of the list. Sometimes useful when combined with -/// other functions. -/// -/// ```aiken -/// list.push([2, 3], 1) == [1, ..[2, 3]] == [1, 2, 3] -/// ``` -pub fn push(self: List, elem: a) -> List { - [elem, ..self] -} - -test push_1() { - push([], 0) == [0] -} - -test push_2() { - push([2, 3], 1) == [1, 2, 3] -} - -/// Construct a list of a integer from a given range. -/// -/// ```aiken -/// list.range(0, 3) == [0, 1, 2, 3] -/// list.range(-1, 1) == [-1, 0, 1] -/// ``` -pub fn range(from: Int, to: Int) -> List { - if from > to { - [] - } else { - [from, ..range(from + 1, to)] - } -} - -test range_1() { - range(0, 3) == [0, 1, 2, 3] -} - -test range_2() { - range(-1, 1) == [-1, 0, 1] -} - -/// Construct a list filled with n copies of a value. -/// -/// ```aiken -/// list.repeat("na", 3) == ["na", "na", "na"] -/// ``` -pub fn repeat(elem: a, n_times: Int) -> List { - if n_times <= 0 { - [] - } else { - [elem, ..repeat(elem, n_times - 1)] - } -} - -test repeat_1() { - repeat(42, 0) == [] -} - -test repeat_2() { - repeat(14, 3) == [14, 14, 14] -} - -// ## Inspecting - -/// Determine if all elements of the list satisfy the given predicate. -/// -/// Note: an empty list always satisfies the predicate. -/// -/// ```aiken -/// list.all([], fn(n) { n > 0 }) == True -/// list.all([1, 2, 3], fn(n) { n > 0 }) == True -/// list.all([1, 2, 3], fn(n) { n == 2 }) == False -/// ``` -pub fn all(self: List, predicate: fn(a) -> Bool) -> Bool { - when self is { - [] -> True - [x, ..xs] -> predicate(x) && all(xs, predicate) - } -} - -test all_1() { - all([1, 2, 3], fn(n) { n > 0 }) == True -} - -test all_2() { - all([1, 2, 3], fn(n) { n > 42 }) == False -} - -test all_3() { - all([], fn(n) { n == 42 }) == True -} - -/// Determine if at least one element of the list satisfies the given predicate. -/// -/// Note: an empty list never satisfies the predicate. -/// -/// ```aiken -/// list.any([], fn(n) { n > 2 }) == False -/// list.any([1, 2, 3], fn(n) { n > 0 }) == True -/// list.any([1, 2, 3], fn(n) { n == 2 }) == True -/// list.any([1, 2, 3], fn(n) { n < 0 }) == False -/// ``` -pub fn any(self: List, predicate: fn(a) -> Bool) -> Bool { - when self is { - [] -> False - [x, ..xs] -> predicate(x) || any(xs, predicate) - } -} - -test any_1() { - any([1, 2, 3], fn(n) { n > 0 }) == True -} - -test any_2() { - any([1, 2, 3], fn(n) { n > 42 }) == False -} - -test any_3() { - any([], fn(n) { n == 42 }) == False -} - -/// Return Some(item) at the index or None if the index is out of range. The index is 0-based. -/// -/// ```aiken -/// list.at([1, 2, 3], 1) == Some(2) -/// list.at([1, 2, 3], 42) == None -/// ``` -pub fn at(self: List, index: Int) -> Option { - when self is { - [] -> None - [x, ..xs] -> - if index == 0 { - Some(x) - } else { - at(xs, index - 1) - } - } -} - -test at_1() { - at([1, 2, 3], -1) == None -} - -test at_2() { - at([], 0) == None -} - -test at_3() { - at([1, 2, 3], 3) == None -} - -test at_4() { - at([1], 0) == Some(1) -} - -test at_5() { - at([1, 2, 3], 2) == Some(3) -} - -/// Count how many items in the list satisfy the given predicate. -/// -/// ```aiken -/// list.count([], fn(a) { a > 2}) == 0 -/// list.count([1, 2, 3], fn(a) { n > 0 }) == 3 -/// list.count([1, 2, 3], fn(a) { n >= 2 }) == 2 -/// list.count([1, 2, 3], fn(a) { n > 5 }) == 0 -/// ``` -pub fn count(self: List, predicate: fn(a) -> Bool) -> Int { - foldr( - self, - 0, - fn(item, total) { - if predicate(item) { - total + 1 - } else { - total - } - }, - ) -} - -test count_empty() { - count([], fn(a) { a > 2 }) == 0 -} - -test count_all() { - count([1, 2, 3], fn(a) { a > 0 }) == 3 -} - -test count_some() { - count([1, 2, 3], fn(a) { a >= 2 }) == 2 -} - -test count_none() { - count([1, 2, 3], fn(a) { a > 5 }) == 0 -} - -/// Find the first element satisfying the given predicate, if any. -/// -/// ```aiken -/// list.find([1, 2, 3], fn(x) { x == 2 }) == Some(2) -/// list.find([4, 5, 6], fn(x) { x == 2 }) == None -/// ``` -pub fn find(self: List, predicate: fn(a) -> Bool) -> Option { - when self is { - [] -> None - [x, ..xs] -> - if predicate(x) { - Some(x) - } else { - find(xs, predicate) - } - } -} - -test find_1() { - find([1, 2, 3], fn(x) { x == 1 }) == Some(1) -} - -test find_2() { - find([1, 2, 3], fn(x) { x > 42 }) == None -} - -test find_3() { - find([], fn(_) { True }) == None -} - -/// Figures out whether a list contain the given element. -/// -/// ```aiken -/// list.has([1, 2, 3], 2) == True -/// list.has([1, 2, 3], 14) == False -/// list.has([], 14) == False -/// ``` -pub fn has(self: List, elem: a) -> Bool { - when self is { - [] -> False - [x, ..xs] -> - if x == elem { - True - } else { - has(xs, elem) - } - } -} - -test has_1() { - has([1, 2, 3], 1) == True -} - -test has_2() { - has([1, 2, 3], 14) == False -} - -test has_3() { - has([], 14) == False -} - -/// Get the first element of a list -/// -/// ```aiken -/// list.head([1, 2, 3]) == Some(1) -/// list.head([]) == None -/// ``` -pub fn head(self: List) -> Option { - when self is { - [] -> None - _ -> Some(builtin.head_list(self)) - } -} - -test head_1() { - head([1, 2, 3]) == Some(1) -} - -test head_2() { - head([]) == None -} - -/// Checks whether a list is empty. -/// -/// ```aiken -/// list.is_empty([]) == True -/// list.is_empty([1, 2, 3]) == False -/// ``` -pub fn is_empty(self: List) -> Bool { - when self is { - [] -> True - _ -> False - } -} - -test is_empty_1() { - is_empty([]) == True -} - -test is_empty_2() { - is_empty([1, 2, 3]) == False -} - -/// Gets the index of an element of a list, if any. Otherwise, returns None. -/// -/// ```aiken -/// list.index_of([1, 5, 2], 2) == Some(2) -/// list.index_of([1, 7, 3], 4) == None -/// list.index_of([1, 0, 9, 6], 6) == 3 -/// list.index_of([], 6) == None -/// ``` -pub fn index_of(self: List, elem: a) -> Option { - do_index_of(self, elem, 0) -} - -fn do_index_of(self: List, elem: a, i: Int) -> Option { - when self is { - [] -> None - [x, ..xs] -> - if x == elem { - Some(i) - } else { - do_index_of(xs, elem, i + 1) - } - } -} - -test index_of_1() { - index_of([1, 5, 2], 2) == Some(2) -} - -test index_of_2() { - index_of([1, 7, 3], 4) == None -} - -test index_of_3() { - index_of([1, 0, 9, 6], 6) == Some(3) -} - -test index_of_4() { - index_of([], 6) == None -} - -/// Get the last in the given list, if any. -/// -/// ```aiken -/// list.last([]) == None -/// list.last([1, 2, 3]) == Some(3) -/// ``` -pub fn last(self: List) -> Option { - when self is { - [] -> None - [x] -> Some(x) - [_, ..xs] -> last(xs) - } -} - -test last_1() { - last([]) == None -} - -test last_2() { - last([1]) == Some(1) -} - -test last_3() { - last([1, 2, 3, 4]) == Some(4) -} - -/// Get the number of elements in the given list. -/// -/// ```aiken -/// list.length([]) == 0 -/// list.length([1, 2, 3]) == 3 -/// ``` -pub fn length(self: List) -> Int { - when self is { - [] -> 0 - [_, ..xs] -> 1 + length(xs) - } -} - -test length_1() { - length([]) == 0 -} - -test length_2() { - length([1, 2, 3]) == 3 -} - -// ## Modifying - -// ### Extracting - -/// Remove the first occurrence of the given element from the list. -/// -/// ```aiken -/// list.delete([1, 2, 3, 1], 1) == [2, 3, 1] -/// list.delete([1, 2, 3], 14) == [1, 2, 3] -/// ``` -pub fn delete(self: List, elem: a) -> List { - when self is { - [] -> [] - [x, ..xs] -> - if x == elem { - xs - } else { - [x, ..delete(xs, elem)] - } - } -} - -test delete_1() { - delete([], 42) == [] -} - -test delete_2() { - delete([1, 2, 3, 1], 1) == [2, 3, 1] -} - -test delete_3() { - delete([1, 2, 3], 14) == [1, 2, 3] -} - -test delete_4() { - delete([2], 2) == [] -} - -/// Drop the first `n` elements of a list. -/// -/// ```aiken -/// list.drop([1, 2, 3], 2) == [3] -/// list.drop([], 42) == [] -/// list.drop([1, 2, 3], 42) == [] -/// ``` -pub fn drop(self: List, n: Int) -> List { - if n <= 0 { - self - } else { - when self is { - [] -> [] - [_x, ..xs] -> drop(xs, n - 1) - } - } -} - -test drop_1() { - drop([], 42) == [] -} - -test drop_2() { - drop([1, 2, 3], 2) == [3] -} - -/// Returns the suffix of the given list after removing all elements that satisfy the predicate. -/// -/// ```aiken -/// list.drop_while([1, 2, 3], fn(x) { x < 2 }) == [2, 3] -/// list.drop_while([], fn(x) { x > 2 }) == [] -/// list.drop_while([1, 2, 3], fn(x) { x == 3 }) == [1, 2, 3] -/// ``` -pub fn drop_while(self: List, predicate: fn(a) -> Bool) -> List { - when self is { - [] -> [] - [x, ..xs] -> - if predicate(x) { - drop_while(xs, predicate) - } else { - self - } - } -} - -test drop_while_1() { - drop_while([], fn(x) { x > 2 }) == [] -} - -test drop_while_2() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - drop_while(xs, fn(x) { x > 5 }) == [5, 4, 3, 2, 1] -} - -test drop_while_3() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - drop_while(xs, fn(x) { x == 42 }) == xs -} - -test drop_while_4() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - drop_while(xs, fn(x) { x < 42 }) == [] -} - -/// Produce a list of elements that satisfy a predicate. -/// -/// ```aiken -/// list.filter([1, 2, 3], fn(x) { x >= 2 }) == [2, 3] -/// list.filter([], fn(x) { x > 2 }) == [] -/// list.filter([1, 2, 3], fn(x) { x == 3 }) == [3] -/// ``` -pub fn filter(self: List, predicate: fn(a) -> Bool) -> List { - when self is { - [] -> [] - [x, ..xs] -> - if predicate(x) { - [x, ..filter(xs, predicate)] - } else { - filter(xs, predicate) - } - } -} - -test filter_1() { - filter([], fn(x) { x > 0 }) == [] -} - -test filter_2() { - let xs = [1, 2, 3, 4, 5, 6] - filter(xs, fn(x) { builtin.mod_integer(x, 2) == 0 }) == [2, 4, 6] -} - -test filter_3() { - let filter_foldr = - fn(xs, f) { - foldr( - xs, - [], - fn(x, ys) { - if f(x) { - [x, ..ys] - } else { - ys - } - }, - ) - } - - let is_odd = - fn(n) { builtin.mod_integer(n, 2) != 0 } - - filter_foldr([1, 2, 3], is_odd) == filter([1, 2, 3], is_odd) -} - -/// Produce a list of transformed elements that satisfy a predicate. -/// -/// ```aiken -/// let transform = fn(x) { if x % 2 == 0 { None } else { Some(3*x) } } -/// list.filter_map([1, 2, 3], transform) == [3, 9] -/// ``` -pub fn filter_map(self: List, predicate: fn(a) -> Option) -> List { - when self is { - [] -> [] - [x, ..xs] -> - when predicate(x) is { - None -> filter_map(xs, predicate) - Some(y) -> [y, ..filter_map(xs, predicate)] - } - } -} - -test filter_map_1() { - filter_map([], fn(_) { Some(42) }) == [] -} - -test filter_map_2() { - filter_map( - [1, 2, 3, 4, 5, 6], - fn(x) { - if builtin.mod_integer(x, 2) != 0 { - Some(3 * x) - } else { - None - } - }, - ) == [3, 9, 15] -} - -/// Return all elements except the last one. -/// -/// ```aiken -/// list.init([]) == None -/// list.init([1, 2, 3]) == Some([1, 2]) -/// ``` -pub fn init(self: List) -> Option> { - when self is { - [] -> None - _ -> Some(do_init(self)) - } -} - -fn do_init(self: List) -> List { - when self is { - [] -> fail @"unreachable" - [_] -> [] - [x, ..xs] -> [x, ..do_init(xs)] - } -} - -test init_1() { - init([]) == None -} - -test init_2() { - init([1]) == Some([]) -} - -test init_3() { - init([1, 2, 3, 4]) == Some([1, 2, 3]) -} - -/// Returns a tuple with all elements that satisfy the predicate at first -/// element, and the rest as second element. -/// -/// ```aiken -/// list.partition([1, 2, 3, 4], fn(x) { x % 2 == 0 }) == ([2, 4], [1, 3]) -/// ``` -pub fn partition(self: List, predicate: fn(a) -> Bool) -> (List, List) { - when self is { - [] -> ([], []) - [x, ..xs] -> { - let (left, right) = partition(xs, predicate) - if predicate(x) { - ([x, ..left], right) - } else { - (left, [x, ..right]) - } - } - } -} - -test partition_1() { - partition([], fn(x) { x > 2 }) == ([], []) -} - -test partition_2() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - partition(xs, fn(x) { x > 5 }) == ([10, 9, 8, 7, 6], [5, 4, 3, 2, 1]) -} - -test partition_3() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - partition(xs, fn(x) { x == 42 }) == ([], xs) -} - -test partition_4() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - partition(xs, fn(x) { x < 42 }) == (xs, []) -} - -test partition_5() { - partition([1, 2, 3, 4], fn(x) { x % 2 == 0 }) == ([2, 4], [1, 3]) -} - -/// Extract a sublist from the given list using 0-based indexes. Negative -/// indexes wrap over, so `-1` refers to the last element of the list. -/// -/// ```aiken -/// list.slice([1, 2, 3, 4, 5, 6], from: 2, to: 4) == [3, 4, 5] -/// list.slice([1, 2, 3, 4, 5, 6], from: -2, to: -1) == [5, 6] -/// list.slice([1, 2, 3, 4, 5, 6], from: 1, to: -1) == [2, 3, 4, 5, 6] -/// ``` -pub fn slice(self: List, from: Int, to: Int) -> List { - let (i, l) = - if from >= 0 { - (from, None) - } else { - let l = length(self) - (l + from, Some(l)) - } - - let j = - if to >= 0 { - to - i + 1 - } else { - when l is { - Some(l) -> l + to - i + 1 - None -> length(self) + to - i + 1 - } - } - - self - |> drop(i) - |> take(j) -} - -test slice_1() { - slice([1, 2, 3], 0, 2) == [1, 2, 3] -} - -test slice_2() { - slice([1, 2, 3, 4, 5, 6], from: 2, to: 4) == [3, 4, 5] -} - -test slice_3() { - slice([1, 2, 3, 4, 5, 6], from: -2, to: -1) == [5, 6] -} - -test slice_4() { - slice([1, 2, 3, 4, 5, 6], from: 1, to: -1) == [2, 3, 4, 5, 6] -} - -test slice_5() { - slice([1, 2, 3, 4, 5, 6], from: -4, to: -3) == [3, 4] -} - -test slice_6() { - slice([1, 2, 3, 4, 5, 6], from: -2, to: 1) == [] -} - -/// Cut a list in two, such that the first list contains the given number of / -/// elements and the second list contains the rest. -/// -/// Fundamentally equivalent to (but more efficient): -/// -/// ```aiken -/// // span(xs, n) == (take(xs, n), drop(xs, n)) -/// span([1, 2, 3, 4, 5], 3) == ([1, 2, 3], [4, 5]) -/// ``` -pub fn span(self: List, n: Int) -> (List, List) { - when self is { - [] -> ([], []) - [x, ..xs] -> - if n <= 0 { - ([], self) - } else { - let (left, right) = span(xs, n - 1) - ([x, ..left], right) - } - } -} - -test span_1() { - span([], 2) == ([], []) -} - -test span_2() { - span([1, 2, 3], 2) == ([1, 2], [3]) -} - -test span_3() { - span([1, 2, 3], -1) == ([], [1, 2, 3]) -} - -test span_4() { - span([1, 2, 3], 42) == ([1, 2, 3], []) -} - -/// Get elements of a list after the first one, if any. -/// -/// ```aiken -/// list.tail([]) == None -/// list.tail([1, 2, 3]) == Some([2, 3]) -/// ``` -pub fn tail(self: List) -> Option> { - when self is { - [] -> None - [_, ..xs] -> Some(xs) - } -} - -test tail_1() { - tail([1, 2, 3]) == Some([2, 3]) -} - -test tail_2() { - tail([]) == None -} - -/// Get the first `n` elements of a list. -/// -/// ```aiken -/// list.take([1, 2, 3], 2) == [1, 2] -/// list.take([1, 2, 3], 14) == [1, 2, 3] -/// ``` -pub fn take(self: List, n: Int) -> List { - if n <= 0 { - [] - } else { - when self is { - [] -> [] - [x, ..xs] -> [x, ..take(xs, n - 1)] - } - } -} - -test take_1() { - take([], 42) == [] -} - -test take_2() { - take([1, 2, 3], 2) == [1, 2] -} - -/// Returns the longest prefix of the given list where all elements satisfy the predicate. -/// -/// ```aiken -/// list.take_while([1, 2, 3], fn(x) { x > 2 }) == [] -/// list.take_while([1, 2, 3], fn(x) { x < 2 }) == [1] -/// ``` -pub fn take_while(self: List, predicate: fn(a) -> Bool) -> List { - when self is { - [] -> [] - [x, ..xs] -> - if predicate(x) { - [x, ..take_while(xs, predicate)] - } else { - [] - } - } -} - -test take_while_1() { - take_while([], fn(x) { x > 2 }) == [] -} - -test take_while_2() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - take_while(xs, fn(x) { x > 5 }) == [10, 9, 8, 7, 6] -} - -test take_while_3() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - take_while(xs, fn(x) { x == 42 }) == [] -} - -test take_while_4() { - let xs = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1] - take_while(xs, fn(x) { x < 42 }) == xs -} - -/// Removes duplicate elements from a list. -/// -/// ```aiken -/// list.unique([1, 2, 3, 1]) == [1, 2, 3] -/// ``` -pub fn unique(self: List) -> List { - when self is { - [] -> [] - [x, ..xs] -> [x, ..unique(filter(xs, fn(y) { y != x }))] - } -} - -test unique_1() { - unique([]) == [] -} - -test unique_2() { - let xs = [1, 2, 3, 1, 1, 3, 4, 1, 2, 3, 2, 4, 5, 6, 7, 8, 9, 10, 9] - unique(xs) == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] -} - -// ### Mapping - -/// Map elements of a list into a new list and flatten the result. -/// -/// ```aiken -/// list.flat_map([1, 2, 3], fn(a) { [a, 2*a] }) == [1, 2, 2, 4, 3, 6] -/// ``` -pub fn flat_map(self: List, with: fn(a) -> List) -> List { - foldr(self, [], fn(x, xs) { concat(with(x), xs) }) -} - -test flat_map_1() { - flat_map([], fn(a) { [a] }) == [] -} - -test flat_map_2() { - flat_map([1, 2, 3], fn(a) { [a, a] }) == [1, 1, 2, 2, 3, 3] -} - -/// Perform an action for each element of a list. -/// -/// ```aiken -/// list.for_each(labels, do: fn(lbl) { trace lbl Void }) -/// ``` -pub fn for_each(self: List, do: fn(a) -> Void) -> Void { - foldr(self, Void, fn(x, _) { do(x) }) -} - -test for_each_1() { - for_each( - [@"hello", @"world"], - do: fn(lbl) { - trace lbl - Void - }, - ) -} - -/// List [`map`](#map) but provides the position (0-based) of the elements while iterating. -/// -/// ```aiken -/// list.indexed_map([1, 2, 3], fn(i, x) { i + x }) == [1, 3, 5] -/// ``` -pub fn indexed_map(self: List, with: fn(Int, a) -> result) -> List { - do_indexed_map(0, self, with) -} - -fn do_indexed_map( - n: Int, - self: List, - with: fn(Int, a) -> result, -) -> List { - when self is { - [] -> [] - [x, ..xs] -> [with(n, x), ..do_indexed_map(n + 1, xs, with)] - } -} - -test indexed_map_1() { - indexed_map([], fn(i, _n) { i }) == [] -} - -test indexed_map_2() { - indexed_map( - [4, 8, 13, 2], - fn(i, n) { - if n == 8 { - n - } else { - i - } - }, - ) == [0, 8, 2, 3] -} - -/// Apply a function to each element of a list. -/// -/// ```aiken -/// list.map([1, 2, 3, 4], fn(n) { n + 1 }) == [2, 3, 4, 5] -/// ``` -pub fn map(self: List, with: fn(a) -> result) -> List { - when self is { - [] -> [] - [x, ..xs] -> [with(x), ..map(xs, with)] - } -} - -test map_1() { - map([], fn(n) { n + 1 }) == [] -} - -test map_2() { - map([1, 2, 3, 4], fn(n) { n + 1 }) == [2, 3, 4, 5] -} - -/// Apply a function of two arguments, combining elements from two lists. -/// -/// Note: if one list is longer, the extra elements are dropped. -/// -/// ```aiken -/// list.map2([1, 2, 3], [1, 2], fn(a, b) { a + b }) == [2, 4] -/// ``` -pub fn map2( - self: List, - bs: List, - with: fn(a, b) -> result, -) -> List { - when self is { - [] -> [] - [x, ..xs] -> - when bs is { - [] -> [] - [y, ..ys] -> [with(x, y), ..map2(xs, ys, with)] - } - } -} - -test map2_1() { - map2([], [1, 2, 3], fn(a, b) { a + b }) == [] -} - -test map2_2() { - map2([1, 2, 3], [1, 2], fn(a, b) { a + b }) == [2, 4] -} - -test map2_3() { - map2([42], [1, 2, 3], fn(_a, b) { Some(b) }) == [Some(1)] -} - -/// Apply a function of three arguments, combining elements from three lists. -/// -/// Note: if one list is longer, the extra elements are dropped. -/// -/// ```aiken -/// list.map3([1, 2, 3], [1, 2], [1, 2, 3], fn(a, b, c) { a + b + c }) == [3, 6] -/// ``` -pub fn map3( - self: List, - bs: List, - cs: List, - with: fn(a, b, c) -> result, -) -> List { - when self is { - [] -> [] - [x, ..xs] -> - when bs is { - [] -> [] - [y, ..ys] -> - when cs is { - [] -> [] - [z, ..zs] -> [with(x, y, z), ..map3(xs, ys, zs, with)] - } - } - } -} - -test map3_1() { - map3([], [], [1, 2, 3], fn(a, b, c) { a + b + c }) == [] -} - -test map3_2() { - map3([1, 2, 3], [1, 2], [1, 2, 3], fn(a, b, c) { a + b + c }) == [3, 6] -} - -/// Return the list with its elements in the reserve order. -/// -/// ```aiken -/// list.reverse([1, 2, 3]) == [3, 2, 1] -/// ``` -pub fn reverse(self: List) -> List { - foldl(self, [], fn(x, xs) { [x, ..xs] }) -} - -test reverse_1() { - reverse([]) == [] -} - -test reverse_2() { - reverse([1, 2, 3]) == [3, 2, 1] -} - -/// Sort a list in ascending order using the given comparison function. -/// -/// ```aiken -/// use aiken/int -/// -/// sort([3, 1, 4, 0, 2], int.compare) == [0, 1, 2, 3, 4] -/// sort([1, 2, 3], int.compare) == [1, 2, 3] -/// ``` -pub fn sort(self: List, compare: fn(a, a) -> Ordering) -> List { - when self is { - [] -> [] - [x, ..xs] -> insert(sort(xs, compare), x, compare) - } -} - -fn insert(self: List, e: a, compare: fn(a, a) -> Ordering) -> List { - when self is { - [] -> [e] - [x, ..xs] -> - if compare(e, x) == Less { - [e, ..self] - } else { - [x, ..insert(xs, e, compare)] - } - } -} - -test sort_1() { - let xs = [6, 7, 5, 4, 1, 3, 9, 8, 0, 2] - sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] -} - -test sort_2() { - let xs = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] - sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] -} - -test sort_3() { - let xs = [9, 8, 7, 6, 5, 4, 3, 2, 1, 0] - sort(xs, int.compare) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] -} - -test sort_4() { - sort([], int.compare) == [] -} - -/// Decompose a list of tuples into a tuple of lists. -/// -/// ``` -/// list.unzip([(1, "a"), (2, "b")]) == ([1, 2], ["a", "b"]) -/// ``` -pub fn unzip(self: List<(a, b)>) -> (List, List) { - when self is { - [] -> ([], []) - [(a, b), ..xs] -> { - let (a_tail, b_tail) = unzip(xs) - ([a, ..a_tail], [b, ..b_tail]) - } - } -} - -test unzip_1() { - unzip([]) == ([], []) -} - -test unzip_2() { - unzip([(1, "a"), (2, "b")]) == ([1, 2], ["a", "b"]) -} - -// ## Combining - -/// Merge two lists together. -/// -/// ```aiken -/// list.concat([], []) == [] -/// list.concat([], [1, 2, 3]) == [1, 2, 3] -/// list.concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] -/// ``` -pub fn concat(left: List, right: List) -> List { - when left is { - [] -> right - [x, ..xs] -> [x, ..concat(xs, right)] - } -} - -test concat_1() { - concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] -} - -test concat_2() { - concat([1, 2, 3], []) == [1, 2, 3] -} - -test concat_3() { - concat([], [1, 2, 3]) == [1, 2, 3] -} - -/// Remove the first occurrence of each element of the second list from the first one. -/// -/// ``` -/// list.difference(["h", "e", "l", "l", "o"], ["l", "e", "l"]) == ["h", "o"] -/// list.difference([1, 2, 3, 4, 5], [1, 1, 2]) == [3, 4, 5] -/// list.difference([1, 2, 3], []) == [1, 2, 3] -/// ``` -pub fn difference(self: List, with: List) -> List { - when with is { - [] -> self - [x, ..xs] -> difference(delete(self, x), xs) - } -} - -test difference_1() { - difference(["h", "e", "l", "l", "o"], ["l", "e", "l"]) == ["h", "o"] -} - -test difference_2() { - difference([1, 2, 3, 4, 5], [1, 1, 2]) == [3, 4, 5] -} - -test difference_3() { - difference([1, 2, 3], []) == [1, 2, 3] -} - -test difference_4() { - difference([], [1, 2, 3]) == [] -} - -/// Combine two lists together. -/// -/// Note: if one list is longer, the extra elements are dropped. -/// -/// ```aiken -/// list.zip([1, 2], ["a", "b", "c"]) == [(1, "a"), (2, "b")] -/// ``` -pub fn zip(self: List, bs: List) -> List<(a, b)> { - when self is { - [] -> [] - [x, ..xs] -> - when bs is { - [] -> [] - [y, ..ys] -> [(x, y), ..zip(xs, ys)] - } - } -} - -test zip_1() { - zip([], [1, 2, 3]) == [] -} - -test zip_2() { - zip([1, 2, 3], []) == [] -} - -test zip_3() { - zip([1, 2], ["a", "b", "c"]) == [(1, "a"), (2, "b")] -} - -// ## Transforming - -/// Reduce a list from left to right. -/// -/// ```aiken -/// list.foldl([1, 2, 3], 0, fn(n, total) { n + total }) == 6 -/// list.foldl([1, 2, 3], [], fn(x, xs) { [x, ..xs] }) == [3, 2, 1] -/// ``` -pub fn foldl(self: List, zero: b, with: fn(a, b) -> b) -> b { - when self is { - [] -> zero - [x, ..xs] -> foldl(xs, with(x, zero), with) - } -} - -type Fold2 = - fn(a, b) -> result - -pub fn foldl2( - self: List, - zero_a: a, - zero_b: b, - with: fn(elem, a, b, Fold2) -> result, - return: Fold2, -) -> result { - do_foldl2(self, with, return)(zero_a, zero_b) -} - -fn do_foldl2( - self: List, - with: fn(elem, a, b, Fold2) -> result, - return: Fold2, -) -> Fold2 { - when self is { - [] -> return - [x, ..xs] -> do_foldl2(xs, with, fn(a, b) { with(x, a, b, return) }) - } -} - -test foldl2_optimized() { - let - len, - sum, - <- - foldl2( - [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], - 0, - 0, - fn(n, len, sum, return) { return(len + 1, sum + n) }, - ) - - and { - len == 10, - sum == 55, - } -} - -test foldl2_classic() { - let (len, sum) = - foldl( - [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], - (0, 0), - fn(n, (len, sum)) { (len + 1, sum + n) }, - ) - - and { - len == 10, - sum == 55, - } -} - -type Foo { - Foo(Int, Int) -} - -test foldl2_pair() { - let Pair(len, sum) = - foldl( - [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], - Pair(0, 0), - fn(n, Pair(len, sum)) { Pair(len + 1, sum + n) }, - ) - - and { - len == 10, - sum == 55, - } -} - -test foldl2_foo() { - let Foo(len, sum) = - foldl( - [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], - Foo(0, 0), - fn(n, Foo(len, sum)) { Foo(len + 1, sum + n) }, - ) - - and { - len == 10, - sum == 55, - } -} - -test foldl_1() { - foldl([], 0, fn(_, _) { 1 }) == 0 -} - -test foldl_2() { - foldl([1, 2, 3, 4, 5], 0, fn(n, total) { n + total }) == 15 -} - -test foldl_3() { - foldl([1, 2, 3, 4], [], fn(x, xs) { [x, ..xs] }) == [4, 3, 2, 1] -} - -/// Reduce a list from right to left. -/// -/// ```aiken -/// list.foldr([1, 2, 3], 0, fn(n, total) { n + total }) == 6 -/// list.foldr([1, 2, 3], [], fn(x, xs) { [x, ..xs] }) == [1, 2, 3] -/// ``` -pub fn foldr(self: List, zero: b, with: fn(a, b) -> b) -> b { - when self is { - [] -> zero - [x, ..xs] -> with(x, foldr(xs, zero, with)) - } -} - -test foldr_1() { - foldr([1, 2, 3, 4, 5], 0, fn(n, total) { n + total }) == 15 -} - -test foldr_2() { - foldr( - [1, 2, 3], - "", - fn(n, _str) { - if builtin.mod_integer(n, 2) == 0 { - "foo" - } else { - "bar" - } - }, - ) == "bar" -} - -test foldr_3() { - foldr([1, 2, 3, 4], [], fn(x, xs) { [x, ..xs] }) == [1, 2, 3, 4] -} - -/// Like [`foldr`](#foldr), but also provides the position (0-based) of the elements when iterating. -/// -/// ```aiken -/// let group = fn(i, x, xs) { [(i, x), ..xs] } -/// list.indexed_foldr(["a", "b", "c"], [], group) == [ -/// (0, "a"), -/// (1, "b"), -/// (2, "c") -/// ] -/// ``` -pub fn indexed_foldr( - self: List, - zero: result, - with: fn(Int, a, result) -> result, -) -> result { - do_indexed_foldr(0, self, zero, with) -} - -fn do_indexed_foldr( - n: Int, - self: List, - zero: result, - with: fn(Int, a, result) -> result, -) -> result { - when self is { - [] -> zero - [x, ..xs] -> with(n, x, do_indexed_foldr(n + 1, xs, zero, with)) - } -} - -test indexed_foldr_1() { - indexed_foldr([], 0, fn(i, x, xs) { i + x + xs }) == 0 -} - -test indexed_foldr_2() { - let letters = ["a", "b", "c"] - indexed_foldr(letters, [], fn(i, x, xs) { [(i, x), ..xs] }) == [ - (0, "a"), (1, "b"), (2, "c"), - ] -} - -/// Reduce a list from left to right using the accumulator as left operand. -/// Said differently, this is [`foldl`](#foldl) with callback arguments swapped. -/// -/// ```aiken -/// list.reduce([#[1], #[2], #[3]], #[0], bytearray.concat) == #[0, 1, 2, 3] -/// list.reduce([True, False, True], False, fn(b, a) { or { b, a } }) == True -/// ``` -pub fn reduce(self: List, zero: b, with: fn(b, a) -> b) -> b { - foldl(self, zero, flip(with)) -} - -test reduce_1() { - reduce([], 0, fn(n, total) { n + total }) == 0 -} - -test reduce_2() { - reduce([1, 2, 3], 0, fn(n, total) { n + total }) == 6 -} - -test reduce_3() { - reduce([True, False, True], False, fn(left, right) { left || right }) == True -} - -test reduce_4() { - reduce( - [#[1], #[2], #[3]], - #[9], - fn(left, right) { bytearray.concat(left, right) }, - ) == #[9, 1, 2, 3] -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak deleted file mode 100644 index 01bfe763..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/collection/pairs.ak +++ /dev/null @@ -1,833 +0,0 @@ -//// A module for working with associative lists (a.k.a `Pairs`). -//// -//// While any function that works on `List` also work on `Pairs`, this module provides some extra helpers -//// that are specifically tailored to working with associative lists. Fundamentally, a `Pairs` is -//// a type-alias to `List>`. -//// -//// > [!CAUTION] -//// > -//// > Unlike dictionnaries (a.k.a. [`Dict`](./dict.html#Dict), associative lists make no assumption -//// > about the ordering of elements within the list. As a result, lookup -//// > functions do traverse the entire list when invoked. They are also not _sets_, -//// > and thus allow for duplicate keys. This is reflected in the functions used -//// > to interact with them. - -use aiken/builtin -use aiken/primitive/bytearray - -// ## Inspecting - -/// Get all values in the alist associated with a given key. -/// -/// ```aiken -/// pairs.get_all([], "a") == [] -/// pairs.get_all([Pair("a", 1)], "a") == [1] -/// pairs.get_all([Pair("a", 1), Pair("b", 2)], "a") == [1] -/// pairs.get_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [1, 3] -/// ``` -pub fn get_all(self: Pairs, key k: key) -> List { - when self is { - [] -> [] - [Pair(k2, v), ..rest] -> - if k == k2 { - [v, ..get_all(rest, k)] - } else { - get_all(rest, k) - } - } -} - -test get_all_1() { - get_all([], "a") == [] -} - -test get_all_2() { - get_all([Pair("a", 1)], "a") == [1] -} - -test get_all_3() { - get_all([Pair("a", 1), Pair("b", 2)], "a") == [1] -} - -test get_all_4() { - get_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [1, 3] -} - -test get_all_5() { - get_all([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == [] -} - -/// Get the value in the alist by its key. -/// If multiple values with the same key exist, only the first one is returned. -/// -/// ```aiken -/// pairs.get_first([], "a") == None -/// pairs.get_first([Pair("a", 1)], "a") == Some(1) -/// pairs.get_first([Pair("a", 1), Pair("b", 2)], "a") == Some(1) -/// pairs.get_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(1) -/// ``` -pub fn get_first(self: Pairs, key k: key) -> Option { - when self is { - [] -> None - [Pair(k2, v), ..rest] -> - if k == k2 { - Some(v) - } else { - get_first(rest, k) - } - } -} - -test get_first_1() { - get_first([], "a") == None -} - -test get_first_2() { - get_first([Pair("a", 1)], "a") == Some(1) -} - -test get_first_3() { - get_first([Pair("a", 1), Pair("b", 2)], "a") == Some(1) -} - -test get_first_4() { - get_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(1) -} - -test get_first_5() { - get_first([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == None -} - -/// Get the value in the alist by its key. -/// If multiple values with the same key exist, only the last one is returned. -/// -/// ```aiken -/// pairs.get_last([], "a") == None -/// pairs.get_last([Pair("a", 1)], "a") == Some(1) -/// pairs.get_last([Pair("a", 1), Pair("b", 2)], "a") == Some(1) -/// pairs.get_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(3) -/// ``` -pub fn get_last(self: Pairs, key k: key) -> Option { - when self is { - [] -> None - [Pair(k2, v), ..rest] -> - if k == k2 { - when get_last(rest, k) is { - None -> Some(v) - some -> some - } - } else { - get_last(rest, k) - } - } -} - -test get_last_1() { - get_last([], "a") == None -} - -test get_last_2() { - get_last([Pair("a", 1)], "a") == Some(1) -} - -test get_last_3() { - get_last([Pair("a", 1), Pair("b", 2)], "a") == Some(1) -} - -test get_last_4() { - get_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == Some(3) -} - -test get_last_5() { - get_last([Pair("a", 1), Pair("b", 2), Pair("c", 3)], "d") == None -} - -/// Finds all keys in the alist associated with a given value. -/// -/// ```aiken -/// pairs.find_all([], 1) == [] -/// pairs.find_all([Pair("a", 1)], 1) == ["a"] -/// pairs.find_all([Pair("a", 1), Pair("b", 2)], 1) == ["a"] -/// pairs.find_all([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == ["a", "c"] -/// ``` -pub fn find_all(self: Pairs, v: value) -> List { - when self is { - [] -> [] - [Pair(k2, v2), ..rest] -> - if v == v2 { - [k2, ..find_all(rest, v)] - } else { - find_all(rest, v) - } - } -} - -test find_all_1() { - find_all([], "a") == [] -} - -test find_all_2() { - find_all([Pair("a", 14)], 14) == ["a"] -} - -test find_all_3() { - find_all([Pair("a", 14)], 42) == [] -} - -test find_all_4() { - find_all([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == ["a", "c"] -} - -/// Finds the first key in the alist associated with a given value, if any. -/// -/// ```aiken -/// pairs.find_first([], 1) == None -/// pairs.find_first([Pair("a", 1)], 1) == Some("a") -/// pairs.find_first([Pair("a", 1), Pair("b", 2)], 1) == Some("a") -/// pairs.find_first([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == Some("a") -/// ``` -pub fn find_first(self: Pairs, v: value) -> Option { - when self is { - [] -> None - [Pair(k2, v2), ..rest] -> - if v == v2 { - Some(k2) - } else { - find_first(rest, v) - } - } -} - -test find_first_1() { - find_first([], "a") == None -} - -test find_first_2() { - find_first([Pair("a", 14)], 14) == Some("a") -} - -test find_first_3() { - find_first([Pair("a", 14)], 42) == None -} - -test find_first_4() { - find_first([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == Some("a") -} - -/// Finds the last key in the alist associated with a given value, if any. -/// -/// ```aiken -/// pairs.find_last([], 1) == None -/// pairs.find_last([Pair("a", 1)], 1) == Some("a") -/// pairs.find_last([Pair("a", 1), Pair("b", 2)], 1) == Some("a") -/// pairs.find_last([Pair("a", 1), Pair("b", 2), Pair("c", 1)], 1) == Some("c") -/// ``` -pub fn find_last(self: Pairs, v: value) -> Option { - when self is { - [] -> None - [Pair(k2, v2), ..rest] -> - if v == v2 { - when find_last(rest, v) is { - None -> Some(k2) - some -> some - } - } else { - find_last(rest, v) - } - } -} - -test find_last_1() { - find_last([], "a") == None -} - -test find_last_2() { - find_last([Pair("a", 14)], 14) == Some("a") -} - -test find_last_3() { - find_last([Pair("a", 14)], 42) == None -} - -test find_last_4() { - find_last([Pair("a", 14), Pair("b", 42), Pair("c", 14)], 14) == Some("c") -} - -/// Check if a key exists in the pairs. -/// -/// ```aiken -/// pairs.has_key([], "a") == False -/// pairs.has_key([Pair("a", 1)], "a") == True -/// pairs.has_key([Pair("a", 1), Pair("b", 2)], "a") == True -/// pairs.has_key([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == True -/// ``` -pub fn has_key(self: Pairs, k: key) -> Bool { - when self is { - [] -> False - // || is lazy so this is fine - [Pair(k2, _), ..rest] -> k == k2 || has_key(rest, k) - } -} - -test has_key_1() { - !has_key([], "a") -} - -test has_key_2() { - has_key([Pair("a", 14)], "a") -} - -test has_key_3() { - !has_key([Pair("a", 14)], "b") -} - -test has_key_4() { - has_key([Pair("a", 14), Pair("b", 42)], "b") -} - -test has_key_5() { - has_key([Pair("a", 14), Pair("b", 42), Pair("a", 42)], "a") -} - -/// Extract all the keys present in a given `Pairs`. -/// -/// ```aiken -/// pairs.keys([]) == [] -/// pairs.keys([Pair("a", 1)]) == ["a"] -/// pairs.keys([Pair("a", 1), Pair("b", 2)]) == ["a", "b"] -/// pairs.keys([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == ["a", "b", "a"] -/// ``` -pub fn keys(self: Pairs) -> List { - when self is { - [] -> [] - [Pair(k, _), ..rest] -> [k, ..keys(rest)] - } -} - -test keys_1() { - keys([]) == [] -} - -test keys_2() { - keys([Pair("a", 0)]) == ["a"] -} - -test keys_3() { - keys([Pair("a", 0), Pair("b", 0)]) == ["a", "b"] -} - -/// Extract all the values present in a given `Pairs`. -/// -/// ```aiken -/// pairs.values([]) == [] -/// pairs.values([Pair("a", 1)]) == [1] -/// pairs.values([Pair("a", 1), Pair("b", 2)]) == [1, 2] -/// pairs.values([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == [1, 2, 3] -/// ``` -pub fn values(self: Pairs) -> List { - when self is { - [] -> [] - [Pair(_, v), ..rest] -> [v, ..values(rest)] - } -} - -test values_1() { - values([]) == [] -} - -test values_2() { - values([Pair("a", 1)]) == [1] -} - -test values_3() { - values([Pair("a", 1), Pair("b", 2)]) == [1, 2] -} - -test values_4() { - values([Pair("a", 1), Pair("b", 2), Pair("a", 3)]) == [1, 2, 3] -} - -// ## Modifying - -/// Remove all key-value pairs matching the key from the Pairs. If the key is not found, no changes are made. -/// -/// ```aiken -/// pairs.delete_all([], "a") == [] -/// pairs.delete_all([Pair("a", 1)], "a") == [] -/// pairs.delete_all([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] -/// pairs.delete_all([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("b", 2)] -/// ``` -pub fn delete_all(self: Pairs, key k: key) -> Pairs { - when self is { - [] -> [] - [Pair(k2, v2), ..rest] -> - if k == k2 { - delete_all(rest, k) - } else { - [Pair(k2, v2), ..delete_all(rest, k)] - } - } -} - -test delete_all_1() { - delete_all([], "a") == [] -} - -test delete_all_2() { - delete_all([Pair("a", 14)], "a") == [] -} - -test delete_all_3() { - let fixture = [Pair("a", 14)] - delete_all(fixture, "b") == fixture -} - -test delete_all_4() { - let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] - delete_all(fixture, "a") == [Pair("b", 2)] -} - -/// Remove a single key-value pair from the `Pairs`. If the key is not found, no changes are made. -/// Duplicate keys are not deleted. Only the **first** key found is deleted. -/// -/// ```aiken -/// pairs.delete_first([], "a") == [] -/// pairs.delete_first([Pair("a", 1)], "a") == [] -/// pairs.delete_first([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] -/// pairs.delete_first([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("b", 2), Pair("a", 3)] -/// ``` -pub fn delete_first(self: Pairs, key k: key) -> Pairs { - when self is { - [] -> [] - [Pair(k2, v2), ..rest] -> - if k == k2 { - rest - } else { - [Pair(k2, v2), ..delete_first(rest, k)] - } - } -} - -test delete_first_1() { - delete_first([], "a") == [] -} - -test delete_first_2() { - delete_first([Pair("a", 14)], "a") == [] -} - -test delete_first_3() { - let fixture = [Pair("a", 14)] - delete_first(fixture, "b") == fixture -} - -test delete_first_4() { - let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] - delete_first(fixture, "a") == [Pair("b", 2), Pair("a", 3)] -} - -/// Remove a single key-value pair from the Pairs. If the key is not found, no changes are made. -/// Duplicate keys are not deleted. Only the **last** key found is deleted. -/// -/// ```aiken -/// pairs.delete_last([], "a") == [] -/// pairs.delete_last([Pair("a", 1)], "a") == [] -/// pairs.delete_last([Pair("a", 1), Pair("b", 2)], "a") == [Pair("b", 2)] -/// pairs.delete_last([Pair("a", 1), Pair("b", 2), Pair("a", 3)], "a") == [Pair("a", 1), Pair("b", 2)] -/// ``` -pub fn delete_last(self: Pairs, key k: key) -> Pairs { - when self is { - [] -> [] - [Pair(k2, v2), ..rest] -> - if k == k2 { - let tail = delete_last(rest, k) - if tail == rest { - rest - } else { - [Pair(k2, v2), ..tail] - } - } else { - [Pair(k2, v2), ..delete_last(rest, k)] - } - } -} - -test delete_last_1() { - delete_last([], "a") == [] -} - -test delete_last_2() { - delete_last([Pair("a", 14)], "a") == [] -} - -test delete_last_3() { - let fixture = [Pair("a", 14)] - delete_last(fixture, "b") == fixture -} - -test delete_last_4() { - let fixture = [Pair("a", 1), Pair("b", 2), Pair("a", 3)] - delete_last(fixture, "a") == [Pair("a", 1), Pair("b", 2)] -} - -/// Insert a value in the `Pairs` at a given key. If the key already exists, -/// the value is added in front. -/// -/// > [!CAUTION] -/// > The list is only traversed up to the given key and the traversal -/// > stops as soon as a higher key is encountered. Said differently, the list -/// > is assumed to **be ordered by ascending keys**! If it is not, expect the -/// > unexpected. -/// -/// ```aiken -/// use aiken/primitive/bytearray -/// -/// let result = -/// [] -/// |> pairs.insert_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare) -/// |> pairs.insert_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare) -/// |> pairs.insert_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare) -/// -/// result == [Pair("bar", 2), Pair("foo", 3), Pair("foo", 1)] -/// ``` -pub fn insert_by_ascending_key( - self: Pairs, - key k: key, - value v: value, - compare: fn(key, key) -> Ordering, -) -> Pairs { - when self is { - [] -> [Pair(k, v)] - [Pair(k2, v2), ..rest] -> - if compare(k, k2) == Less { - [Pair(k, v), ..self] - } else { - if k == k2 { - [Pair(k, v), ..self] - } else { - [Pair(k2, v2), ..insert_by_ascending_key(rest, k, v, compare)] - } - } - } -} - -test insert_by_ascending_key_1() { - let m = - [] - |> insert_by_ascending_key("foo", 42, bytearray.compare) - |> insert_by_ascending_key("foo", 14, bytearray.compare) - - m == [Pair("foo", 14), Pair("foo", 42)] -} - -test insert_by_ascending_key_2() { - let m = - [] - |> insert_by_ascending_key("foo", 42, bytearray.compare) - |> insert_by_ascending_key("bar", 14, bytearray.compare) - |> insert_by_ascending_key("baz", 1337, bytearray.compare) - - m == [Pair("bar", 14), Pair("baz", 1337), Pair("foo", 42)] -} - -/// Like [`insert_by_ascending_key`](#insert_by_ascending_key) but specifies -/// how to combine two values on a key conflict. -/// -/// > [!CAUTION] -/// > The list is only traversed up to the given key and the traversal -/// > stops as soon as a higher key is encountered. Said differently, the list -/// > is assumed to **be ordered by ascending keys**! If it is not, expect the -/// > unexpected. -/// -/// ```aiken -/// use aiken/primitive/bytearray -/// -/// let add_integer = fn(x, y) { x + y } -/// -/// let result = -/// [] -/// |> pairs.insert_with_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare, with: add_integer) -/// |> pairs.insert_with_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare, with: add_integer) -/// |> pairs.insert_with_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare, with: add_integer) -/// -/// result == [Pair("bar", 2), Pair("foo", 4)] -/// ``` -pub fn insert_with_by_ascending_key( - self: Pairs, - key k: key, - value v: value, - compare: fn(key, key) -> Ordering, - with: fn(value, value) -> value, -) -> Pairs { - when self is { - [] -> [Pair(k, v)] - [Pair(k2, v2), ..rest] -> - if compare(k, k2) == Less { - [Pair(k, v), ..self] - } else { - if k == k2 { - [Pair(k, with(v, v2)), ..rest] - } else { - [ - Pair(k2, v2), - ..insert_with_by_ascending_key(rest, k, v, compare, with) - ] - } - } - } -} - -test insert_with_by_ascending_key_1() { - let compare_un_b_data = - fn(l, r) { - bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) - } - - let m = - [] - |> insert_with_by_ascending_key( - "foo" |> builtin.b_data, - 42, - compare_un_b_data, - builtin.add_integer, - ) - |> insert_with_by_ascending_key( - "foo" |> builtin.b_data, - 14, - compare_un_b_data, - builtin.add_integer, - ) - - m == [Pair("foo" |> builtin.b_data, 56)] -} - -test insert_with_by_ascending_key_2() { - let compare_un_b_data = - fn(l, r) { - bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) - } - - let m = - [] - |> insert_with_by_ascending_key( - "foo" |> builtin.b_data, - 42, - compare_un_b_data, - builtin.add_integer, - ) - |> insert_with_by_ascending_key( - "bar" |> builtin.b_data, - 14, - compare_un_b_data, - builtin.add_integer, - ) - |> insert_with_by_ascending_key( - "baz" |> builtin.b_data, - 1337, - compare_un_b_data, - builtin.add_integer, - ) - - m == [ - Pair("bar" |> builtin.b_data, 14), - Pair("baz" |> builtin.b_data, 1337), - Pair("foo" |> builtin.b_data, 42), - ] -} - -test insert_with_by_ascending_key_3() { - let compare_un_b_data = - fn(l, r) { - bytearray.compare(l |> builtin.un_b_data, r |> builtin.un_b_data) - } - - let result = - [] - |> insert_with_by_ascending_key( - "foo" |> builtin.b_data, - 1, - compare_un_b_data, - builtin.add_integer, - ) - |> insert_with_by_ascending_key( - "bar" |> builtin.b_data, - 2, - compare_un_b_data, - builtin.add_integer, - ) - |> insert_with_by_ascending_key( - "foo" |> builtin.b_data, - 3, - compare_un_b_data, - builtin.add_integer, - ) - - result == [Pair("bar" |> builtin.b_data, 2), Pair("foo" |> builtin.b_data, 4)] -} - -/// Apply a function to all key-value pairs in a alist, replacing the values. -/// -/// ```aiken -/// let fixture = [Pair("a", 100), Pair("b", 200)] -/// -/// pairs.map(fixture, fn(_k, v) { v * 2 }) == [Pair("a", 200), Pair("b", 400)] -/// ``` -pub fn map( - self: Pairs, - with: fn(key, value) -> result, -) -> Pairs { - when self is { - [] -> [] - [Pair(k, v), ..rest] -> [Pair(k, with(k, v)), ..map(rest, with)] - } -} - -test map_1() { - let fixture = [Pair("a", 1), Pair("b", 2)] - - map(fixture, with: fn(k, _) { k }) == [Pair("a", "a"), Pair("b", "b")] -} - -test map_2() { - let fixture = [Pair("a", 1), Pair("b", 2)] - - map(fixture, with: fn(_, v) { v + 1 }) == [Pair("a", 2), Pair("b", 3)] -} - -/// Insert a value in the `Pairs` at a given key. If the key already exists, -/// its value is replaced. -/// -/// > [!CAUTION] -/// > The list is only traversed up to the given key and the traversal -/// > stops as soon as a higher key is encountered. Said differently, the list -/// > is assumed to **be ordered by ascending keys**! If it is not, expect the -/// > unexpected. -/// -/// ```aiken -/// use aiken/primitive/bytearray -/// -/// let result = -/// [] -/// |> pairs.repsert_by_ascending_key(key: "foo", value: 1, compare: bytearray.compare) -/// |> pairs.repsert_by_ascending_key(key: "bar", value: 2, compare: bytearray.compare) -/// |> pairs.repsert_by_ascending_key(key: "foo", value: 3, compare: bytearray.compare) -/// -/// result == [Pair("bar", 2), Pair("foo", 3)] -/// ``` -pub fn repsert_by_ascending_key( - self: Pairs, - key k: key, - value v: value, - compare: fn(key, key) -> Ordering, -) -> Pairs { - when self is { - [] -> [Pair(k, v)] - [Pair(k2, v2), ..rest] -> - if compare(k, k2) == Less { - [Pair(k, v), ..self] - } else { - if k == k2 { - [Pair(k, v), ..rest] - } else { - [Pair(k2, v2), ..repsert_by_ascending_key(rest, k, v, compare)] - } - } - } -} - -test repsert_by_ascending_key_1() { - let m = - [] - |> repsert_by_ascending_key("foo", 42, bytearray.compare) - |> repsert_by_ascending_key("foo", 14, bytearray.compare) - - m == [Pair("foo", 14)] -} - -test repsert_by_ascending_key_2() { - let m = - [] - |> repsert_by_ascending_key("foo", 42, bytearray.compare) - |> repsert_by_ascending_key("bar", 14, bytearray.compare) - |> repsert_by_ascending_key("baz", 1337, bytearray.compare) - - m == [Pair("bar", 14), Pair("baz", 1337), Pair("foo", 42)] -} - -// ## Transforming - -/// Fold over the key-value pairs in a pairs. The fold direction follows keys -/// in ascending order and is done from left-to-right. -/// -/// ```aiken -/// let fixture = [ -/// Pair(1, 100), -/// Pair(2, 200), -/// Pair(3, 300), -/// ] -/// -/// pairs.foldl(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 -/// ``` -pub fn foldl( - self: Pairs, - zero: result, - with: fn(key, value, result) -> result, -) -> result { - when self is { - [] -> zero - [Pair(k, v), ..rest] -> foldl(rest, with(k, v, zero), with) - } -} - -test foldl_1() { - foldl([], 14, fn(_, _, _) { 42 }) == 14 -} - -test foldl_2() { - foldl( - [Pair("a", 42), Pair("b", 14)], - zero: 0, - with: fn(_, v, total) { v + total }, - ) == 56 -} - -/// Fold over the key-value pairs in a Pairs. The fold direction follows the -/// order of elements in the Pairs and is done from right-to-left. -/// -/// ```aiken -/// let fixture = [ -/// Pair(1, 100), -/// Pair(2, 200), -/// Pair(3, 300), -/// ] -/// -/// pairs.foldr(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 -/// ``` -pub fn foldr( - self: Pairs, - zero: result, - with: fn(key, value, result) -> result, -) -> result { - when self is { - [] -> zero - [Pair(k, v), ..rest] -> with(k, v, foldr(rest, zero, with)) - } -} - -test foldr_1() { - foldr([], 14, fn(_, _, _) { 42 }) == 14 -} - -test foldr_2() { - foldr( - [Pair("a", 42), Pair("b", 14)], - zero: 0, - with: fn(_, v, total) { v + total }, - ) == 56 -} - -test foldr_3() { - let fixture = [Pair(1, 100), Pair(2, 200), Pair(3, 300)] - - foldr(fixture, 0, fn(k, v, result) { k * v + result }) == 1400 -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak deleted file mode 100644 index 46a7dda5..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto.ak +++ /dev/null @@ -1,147 +0,0 @@ -use aiken/builtin - -pub type VerificationKey = - ByteArray - -pub type VerificationKeyHash = - Hash - -pub type Script = - ByteArray - -pub type ScriptHash = - Hash - -pub type Signature = - ByteArray - -pub type DataHash = - Hash - -/// A `Hash` is nothing more than a `ByteArray`, but it carries extra -/// information for readability. -/// -/// On-chain, any hash digest value is represented as a plain 'ByteArray'. -/// Though in practice, hashes come from different sources and have -/// different semantics. -/// -/// Hence, while this type-alias doesn't provide any strong type-guarantees, -/// it helps writing functions signatures with more meaningful types than mere -/// 'ByteArray'. -/// -/// Compare for example: -/// -/// ```aiken -/// pub type Credential { -/// VerificationKey(ByteArray) -/// Script(ByteArray) -/// } -/// ``` -/// -/// with -/// -/// ```aiken -/// pub type Credential { -/// VerificationKey(Hash) -/// Script(Hash) -/// } -/// ``` -/// -/// Both are strictly equivalent, but the second reads much better. -pub type Hash = - ByteArray - -// ## Hashing - -/// A blake2b-224 hash algorithm. -/// -/// Typically used for: -/// -/// - [`Credential`](../cardano/address.html#Credential) -/// - [`PolicyId`](../cardano/assets.html#PolicyId) -/// -/// Note: there's no function to calculate blake2b-224 hash digests on-chain. -pub opaque type Blake2b_224 { - Blake2b_224 -} - -/// Compute the blake2b-224 hash digest (28 bytes) of some data. -pub fn blake2b_224(bytes: ByteArray) -> Hash { - builtin.blake2b_224(bytes) -} - -/// A blake2b-256 hash algorithm. -/// -/// Typically used for: -/// -/// - [`TransactionId`](../cardano/transaction.html#TransactionId) -pub opaque type Blake2b_256 { - Blake2b_256 -} - -/// Compute the blake2b-256 hash digest (32 bytes) of some data. -pub fn blake2b_256(bytes: ByteArray) -> Hash { - builtin.blake2b_256(bytes) -} - -/// A Keccak-256 hash algorithm. -pub opaque type Keccak_256 { - Keccak_256 -} - -/// Compute the keccak-256 hash digest (32 bytes) of some data. -pub fn keccak_256(bytes: ByteArray) -> Hash { - builtin.keccak_256(bytes) -} - -/// A SHA2-256 hash algorithm. -pub opaque type Sha2_256 { - Sha2_256 -} - -/// Compute the sha2-256 hash digest (32 bytes) of some data. -pub fn sha2_256(bytes: ByteArray) -> Hash { - builtin.sha2_256(bytes) -} - -/// A SHA3-256 hash algorithm. -pub opaque type Sha3_256 { - Sha3_256 -} - -/// Compute the sha3-256 hash digest (32 bytes) of some data. -pub fn sha3_256(bytes: ByteArray) -> Hash { - builtin.sha3_256(bytes) -} - -// ## Verifying signatures - -/// Verify an ECDCA signature (over secp256k1) using the given verification key. -/// Returns `True` when the signature is valid. -pub fn verify_ecdsa_signature( - key: VerificationKey, - msg: ByteArray, - sig: Signature, -) -> Bool { - builtin.verify_ecdsa_secp256k1_signature(key, msg, sig) -} - -/// Verify an Ed25519 signature using the given verification key. -/// Returns `True` when the signature is valid. -pub fn verify_ed25519_signature( - key: VerificationKey, - msg: ByteArray, - sig: Signature, -) -> Bool { - builtin.verify_ed25519_signature(key, msg, sig) -} - -/// Verify a Schnorr signature (over secp256k1) using the given verification key. -/// Returns `True` when the signature is valid. -pub fn verify_schnorr_signature( - key: VerificationKey, - msg: ByteArray, - sig: Signature, -) -> Bool { - builtin.verify_schnorr_secp256k1_signature(key, msg, sig) -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak deleted file mode 100644 index d7b4cc19..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g1.ak +++ /dev/null @@ -1,115 +0,0 @@ -//// This module is designed for cryptographic operations involving the BLS12-381 elliptic curve, particularly focusing on the G1 group of the curve. -//// -//// The key functionalities provided by this module include: -//// - Defining the generator of the G1 group, which is a fixed base point on the elliptic curve used for various cryptographic computations. -//// - Implementing the additive identity (zero) in the G1 group, which plays a crucial role in elliptic curve arithmetic. -//// - Providing functions to compress and decompress points in the G1 group. Compression reduces the size of the point representation, which is useful for efficient storage and transmission. Decompression restores the original point from its compressed form. -//// - Implementing basic arithmetic operations on the points in the G1 group, such as addition and subtraction. -//// - Enabling the exponentiation of a point in the G1 group with a scalar, which is a fundamental operation in elliptic curve cryptography. -//// - Offering a function to hash arbitrary data to a point in the G1 group, a process important in several cryptographic protocols. -//// -//// This module ensures that all operations respect the properties of the BLS12-381 curve and the mathematical structure of the G1 group. - -use aiken/builtin -use aiken/crypto/bls12_381/scalar.{Scalar} - -/// The compressed generator of the G1 group of the BLS12-381 curve. -/// This constant represents a fixed base point on the elliptic curve. -/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. -/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. -pub const generator: G1Element = - #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb" - -test generator_1() { - builtin.bls12_381_g1_scalar_mul(scalar.field_prime, generator) == #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" -} - -/// Represents the additive identity (zero) in the G1 group. -/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. -/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. -pub const zero: G1Element = - #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - -test zero_1() { - and { - zero == builtin.bls12_381_g1_scalar_mul(scalar.field_prime, generator), - zero == builtin.bls12_381_g1_scalar_mul( - scalar.field_prime, - #"88c7e388ee58f1db9a24d7098b01d13634298bebf2d159254975bd450cb0d287fcc622eb71edde8b469a8513551baf1f", - ), - zero == builtin.bls12_381_g1_scalar_mul( - scalar.field_prime, - #"a6ac32e625dc30b8d31bacf5f4c89c27b0388b15f57ae10de8d5cec02dd1f113c9a31077be05ab587ca57a88d34deb75", - ), - } -} - -/// Compresses a point in the G1 group into a more compact representation. -/// The compressed representation is a 48-byte string, corresponding to a modified `x` coordinate. -/// The leading most significant 3 bits of this string indicate how to reconstruct the `y` coordinate. -/// -/// > [!NOTE] -/// > More explicitly via [Zcash's spec](https://github.com/supranational/blst#serialization-format): -/// > -/// > The most-significant three bits of a G1 or G2 encoding should be masked away before the coordinate(s) are interpreted. These bits are used to unambiguously represent the underlying element: -/// > -/// > - The most significant bit, when set, indicates that the point is in compressed form. Otherwise, the point is in uncompressed form. -/// > - The second-most significant bit indicates that the point is at infinity. If this bit is set, the remaining bits of the group element's encoding should be set to zero. -/// > - The third-most significant bit is set if (and only if) this point is in compressed form and it is not the point at infinity and its y-coordinate is the lexicographically largest of the two associated with the encoded x-coordinate. -pub fn compress(point) { - builtin.bls12_381_g1_compress(point) -} - -test compress_1() { - compress( - #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb", - ) == #"97f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb" -} - -/// Decompresses a point in the G1 group from its compressed form. -pub fn decompress(bytes) { - builtin.bls12_381_g1_uncompress(bytes) -} - -pub fn equal(left, right) { - builtin.bls12_381_g1_equal(left, right) -} - -test equal_1() { - equal(generator, generator) -} - -/// Adds two points in the G1 group. -pub fn add(left, right) { - builtin.bls12_381_g1_add(left, right) -} - -/// Subtracts one point in the G1 group from another. -pub fn sub(left, right) { - builtin.bls12_381_g1_add(left, builtin.bls12_381_g1_neg(right)) -} - -test sub_1() { - generator == sub(add(generator, generator), generator) -} - -/// Exponentiates a point in the G1 group with a `scalar`. -/// This operation is equivalent to the repeated addition of the point with itself `e` times. -pub fn scale(point, e: Scalar) { - builtin.bls12_381_g1_scalar_mul(scalar.to_int(e), point) -} - -test scale_1() { - expect Some(x) = scalar.new(2) - builtin.bls12_381_g1_add(generator, generator) == scale(generator, x) -} - -/// Hashes arbitrary data to a point in the G1 group. -/// You can use the `domain_separation_tag` parameter to cryptographically separate different uses of the hash function between applications. -pub fn hash_to_group(bytes: ByteArray, domain_separation_tag: ByteArray) { - builtin.bls12_381_g1_hash_to_group(bytes, domain_separation_tag) -} - -test hash_to_group_1() { - hash_to_group("hello", "world") == #"89223b03c629cc6bcbbdccbba46b6679bc6a79db82f2d3bd115899a45a5a38c391587b59d3d1e297f977d1c4ee9e3388" -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak deleted file mode 100644 index 7a2013db..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/g2.ak +++ /dev/null @@ -1,124 +0,0 @@ -//// This module is designed for cryptographic operations involving the BLS12-381 elliptic curve, particularly focusing on the G2 group of the curve. -//// -//// The key functionalities provided by this module include: -//// - Defining the generator of the G2 group, which is a fixed base point on the elliptic curve used for various cryptographic computations. -//// - Implementing the additive identity (zero) in the G2 group, which plays a crucial role in elliptic curve arithmetic. -//// - Providing functions to compress and decompress points in the G2 group. Compression reduces the size of the point representation, which is useful for efficient storage and transmission. Decompression restores the original point from its compressed form. -//// - Implementing basic arithmetic operations on the points in the G2 group, such as addition and subtraction. -//// - Enabling the exponentiation of a point in the G2 group with a scalar, which is a fundamental operation in elliptic curve cryptography. -//// - Offering a function to hash arbitrary data to a point in the G2 group, a process important in several cryptographic protocols. -//// -//// This module ensures that all operations respect the properties of the BLS12-381 curve and the mathematical structure of the G2 group. - -use aiken/builtin -use aiken/crypto/bls12_381/scalar.{Scalar} - -/// The compressed generator of the G2 group of the BLS12-381 curve. -/// This constant represents a fixed base point on the elliptic curve. -/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. -/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. -pub const generator: G2Element = - #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" - -test generator_1() { - builtin.bls12_381_g2_scalar_mul(scalar.field_prime, generator) == #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" -} - -/// Represents the additive identity (zero) in the G2 group. -/// Note that flat encoded plutus does not allow for the direct usage of BLS12-381 points. -/// More explicit, any points in plutus data or scripts must be decompressed before usage onchain. -pub const zero: G2Element = - #"c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - -test zero_1() { - and { - zero == builtin.bls12_381_g2_scalar_mul(scalar.field_prime, generator), - zero == builtin.bls12_381_g2_scalar_mul( - scalar.field_prime, - #"9964a9ac2ee28a4dab595ff0970d446373bf46701c5d0b29ce8e1ba995d811a1c7b193c928269192c64ba1fbe4b1940207c251e086b452b920bc72e3cebab46ce672b9b088ca620a471d3b888d9737f6abd165319aa457dbf8835e3d34196051", - ), - zero == builtin.bls12_381_g2_scalar_mul( - scalar.field_prime, - #"a900e25cb53cf1eeb1a82c0c83292937c49c97966351273767a204256a7ef6e95aa391404387075d361e7b13ccd694db03aa73ee0e1bd2c3dd735582b99fdf71696de72e4eda18ae99ea45995f1c9605aa0057008ee9a4da604b5716fb4a345b", - ), - } -} - -/// Compresses a point in the G2 group into a more compact representation. -/// The compressed representation is the concatenation of two 48-byte strings, corresponding to a modified and complexified `x` coordinate. -/// The leading most significant 3 bits of this string indicate how to reconstruct the `y` coordinate. -/// -/// > [!NOTE] -/// > More explicitly via [Zcash's spec](https://github.com/supranational/blst#serialization-format): -/// > -/// > The most-significant three bits of a G1 or G2 encoding should be masked away before the coordinate(s) are interpreted. These bits are used to unambiguously represent the underlying element: -/// > -/// > - The most significant bit, when set, indicates that the point is in compressed form. Otherwise, the point is in uncompressed form. -/// > - The second-most significant bit indicates that the point is at infinity. If this bit is set, the remaining bits of the group element's encoding should be set to zero. -/// > - The third-most significant bit is set if (and only if) this point is in compressed form and it is not the point at infinity and its y-coordinate is the lexicographically largest of the two associated with the encoded x-coordinate. -pub fn compress(point) { - builtin.bls12_381_g2_compress(point) -} - -test compress_1() { - let g2 = - #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" - compress(g2) == #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" -} - -/// Decompresses a point in the G2 group from its compressed form. -pub fn decompress(bytes) { - builtin.bls12_381_g2_uncompress(bytes) -} - -test decompress_1() { - let g2 = - #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8" - generator == g2 -} - -pub fn equal(left, right) { - builtin.bls12_381_g2_equal(left, right) -} - -test equal_1() { - equal( - generator, - #"93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8", - ) -} - -/// Adds two points in the G2 group. -pub fn add(left, right) { - builtin.bls12_381_g2_add(left, right) -} - -/// Subtracts one point in the G2 group from another. -pub fn sub(left, right) { - builtin.bls12_381_g2_add(left, builtin.bls12_381_g2_neg(right)) -} - -test sub_1() { - generator == sub(add(generator, generator), generator) -} - -/// Exponentiates a point in the G2 group with a `scalar`. -/// This operation is equivalent to the repeated addition of the point with itself `e` times. -pub fn scale(point, e: Scalar) { - builtin.bls12_381_g2_scalar_mul(scalar.to_int(e), point) -} - -test scale_1() { - expect Some(x) = scalar.new(2) - builtin.bls12_381_g2_add(generator, generator) == scale(generator, x) -} - -/// Hashes arbitrary data to a point in the G2 group. -/// You can use the `domain_separation_tag` parameter to cryptographically separate different uses of the hash function between applications. -pub fn hash_to_group(bytes, domain_separation_tag) { - builtin.bls12_381_g2_hash_to_group(bytes, domain_separation_tag) -} - -test hash_to_group_1() { - hash_to_group("hello", "world") == #"a18486bba1dc8321f4998ed4268c6df8dfa5618dd5c91595844059d517f8104bf8031d3e766f9c99db1d6f58b201ee9614de92fc08f9e5cc3a6cd814e871857cb6e3924e8a4fa48775116c5f158d58ceda63614d62f6b7bc47db798d656969a5" -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak deleted file mode 100644 index cf028ad7..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/crypto/bls12_381/scalar.ak +++ /dev/null @@ -1,255 +0,0 @@ -//// This module implements arithmetic operations in the scalar field associated with the BLS12-381 elliptic curve. -//// The scalar field, defined over a prime number `q`, is derived from the order of the subgroup G1. -//// -//// More explicitly, we have the identity: -//// -//// ```aiken -//// builtin.bls12_381_g1_scalar_mul(q, bls12_381_g1_generator) == 1 -//// ``` -//// -//// where, -//// -//// ```aiken -//// q = 52435875175126190479447740508185965837690552500527637822603658699938581184513 -//// ``` -//// -//// This module provides functionality for basic arithmetic operations (addition, subtraction, multiplication, division) within this scalar field. -//// Additionally, it includes advanced operations such as exponentiation and calculation of multiplicative inverses, tailored for cryptographic applications. - -use aiken/builtin - -/// The prime number defining the scalar field of the BLS12-381 curve. -pub const field_prime = - 52435875175126190479447740508185965837690552500527637822603658699938581184513 - -/// Represents the additive identity (zero) in the `Scalar` field. -pub const zero: Scalar = Scalar(0) - -/// Represents the multiplicative identity (one) in the `Scalar` field. -pub const one: Scalar = Scalar(1) - -/// Opaque type representing an element of the finite field `Scalar`. -pub opaque type Scalar { - integer: Int, -} - -// ## Constructing - -/// Constructs a new `Scalar` element from an integer, ensuring it's within the valid range of the field. -/// Returns `None` if the integer is negative or greater than the prime number defining the field. -pub fn new(n: Int) -> Option { - if n >= 0 && n < field_prime { - Some(Scalar(n)) - } else { - None - } -} - -test new_1() { - and { - new(-1) == None, - new(field_prime) == None, - new(834884848) == Some(Scalar(834884848)), - } -} - -/// Constructs a new `Scalar` element from a Big-Endian (most-significant bits first) `ByteArray`. -pub fn from_bytearray_big_endian(bytes: ByteArray) -> Option { - new(builtin.bytearray_to_integer(True, bytes)) -} - -test from_bytearray_big_endian_1() { - from_bytearray_big_endian(#"ffff00") == Some(Scalar(16776960)) -} - -/// Constructs a new `Scalar` element from a Little-Endian (least-significant bits first) `ByteArray`. -pub fn from_bytearray_little_endian(bytes: ByteArray) -> Option { - new(builtin.bytearray_to_integer(False, bytes)) -} - -test from_bytearray_little_endian_1() { - from_bytearray_little_endian(#"ffff00") == Some(Scalar(65535)) -} - -// ## Modifying - -/// Exponentiates an `Scalar` element by a non-negative integer exponent, using repeated squaring. -/// Note that this function returns `scalar.zero` for negative exponents. -/// A dedicated builtin function for this is in the making, see CIP 109. -pub fn scale(self: Scalar, e: Int) -> Scalar { - if e < 0 { - zero - } else if e == 0 { - one - } else if e % 2 == 0 { - scale(mul(self, self), e / 2) - } else { - mul(self, scale(mul(self, self), ( e - 1 ) / 2)) - } -} - -test scale_1() { - and { - scale(Scalar(834884848), -1) == zero, - scale(Scalar(834884848), 0) == one, - scale(Scalar(834884848), 1) == Scalar(834884848), - scale(Scalar(834884848), 2) == Scalar(697032709419983104), - scale(Scalar(834884848), 3) == Scalar(581942047655130761945608192), - scale(Scalar(field_prime - 4), 200) == Scalar( - 12843927705572658539565969578937286576443167978938369866871449552629978143484, - ), - } -} - -/// A faster version of `scale` for the case where the exponent is a power of two. -/// That is, the exponent `e = 2^k` for some non-negative integer `k`. Which is used alot in zk-SNARKs. -pub fn scale2(self: Scalar, k: Int) -> Scalar { - if k < 0 { - zero - } else { - do_scale2(self, k) - } -} - -fn do_scale2(self: Scalar, k: Int) -> Scalar { - if k == 0 { - self - } else { - do_scale2(mul(self, self), k - 1) - } -} - -test scale2_1() { - and { - scale2(Scalar(834884848), -1) == zero, - scale2(Scalar(834884848), 0) == scale(Scalar(834884848), 1), - scale2(Scalar(834884848), 1) == scale(Scalar(834884848), 2), - scale2(Scalar(834884848), 2) == scale(Scalar(834884848), 4), - scale2(Scalar(834884848), 3) == scale(Scalar(834884848), 8), - scale2(Scalar(834884848), 4) == scale(Scalar(834884848), 16), - } -} - -// ## Combining - -/// Adds two `Scalar` elements, ensuring the result stays within the finite field range. -pub fn add(left: Scalar, right: Scalar) -> Scalar { - Scalar(( left.integer + right.integer ) % field_prime) -} - -test add_1() { - and { - (add(Scalar(834884848), Scalar(834884848)) == Scalar(1669769696))?, - (add(Scalar(field_prime - 1), Scalar(1)) == Scalar(0))?, - (add(Scalar(3), Scalar(field_prime)) == Scalar(3))?, - } -} - -/// Divides one `Scalar` element by another, returning `None` if the divisor is zero. -pub fn div(left: Scalar, right: Scalar) -> Option { - if right == zero { - None - } else { - Some(mul(left, scale(right, field_prime - 2))) - } -} - -test div_1() { - and { - div(Scalar(834884848), Scalar(834884848)) == Some(Scalar(1)), - div(Scalar(834884848), zero) == None, - div(Scalar(field_prime - 1), Scalar(2)) == Some( - Scalar( - 26217937587563095239723870254092982918845276250263818911301829349969290592256, - ), - ), - } -} - -/// Multiplies two `Scalar` elements, with the result constrained within the finite field. -pub fn mul(left: Scalar, right: Scalar) -> Scalar { - Scalar(left.integer * right.integer % field_prime) -} - -test mul_1() { - and { - mul(Scalar(834884848), Scalar(834884848)) == Scalar(697032709419983104), - mul(zero, Scalar(834884848)) == zero, - mul(Scalar(field_prime - 1), Scalar(2)) == Scalar( - 52435875175126190479447740508185965837690552500527637822603658699938581184511, - ), - } -} - -/// Calculates the additive inverse of a `Scalar` element. -pub fn neg(self: Scalar) -> Scalar { - // this is basicly sub(zero, self), but more efficient as it saves one modulo operation - if self.integer == 0 { - self - } else { - Scalar(field_prime - self.integer) - } -} - -test neg_1() { - and { - neg(Scalar(834884848)) == Scalar( - 52435875175126190479447740508185965837690552500527637822603658699937746299665, - ), - neg(zero) == zero, - neg(one) == Scalar(field_prime - 1), - } -} - -/// Calculates the multiplicative inverse of an `Scalar` element, returning `None` if the element is zero. -pub fn recip(self: Scalar) -> Option { - div(one, self) -} - -test recip_1() { - and { - recip(Scalar(834884848)) == Some( - Scalar( - 35891248691642227249400403463796410930702563777316955162085759263735363466421, - ), - ), - recip(zero) == None, - } -} - -/// Subtracts one `Scalar` element from another, with the result wrapped within the finite field range. -pub fn sub(left: Scalar, right: Scalar) -> Scalar { - Scalar(( left.integer - right.integer ) % field_prime) -} - -test sub_1() { - and { - (sub(Scalar(834884848), Scalar(834884848)) == zero)?, - (sub(zero, Scalar(5)) == Scalar(field_prime - 5))?, - } -} - -// ## Transforming - -/// Converts a `Scalar` element back to its integer representation. -pub fn to_int(self: Scalar) -> Int { - self.integer -} - -test to_int_1() { - to_int(Scalar(834884848)) == 834884848 -} - -/// Converts a `Scalar` element to a Big-Endian (most-significant bits first) `ByteArray`. -pub fn to_bytearray_big_endian(self: Scalar, size: Int) -> ByteArray { - builtin.integer_to_bytearray(True, size, self.integer) -} - -/// Converts a `Scalar` element to a Little-Endian (least-significant bits first) `ByteArray`. -pub fn to_bytearray_little_endian(self: Scalar, size: Int) -> ByteArray { - builtin.integer_to_bytearray(False, size, self.integer) -} - -test to_bytearray_1() { - to_bytearray_big_endian(Scalar(16777215), 3) == #"ffffff" -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak deleted file mode 100644 index 96179f9b..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/interval.ak +++ /dev/null @@ -1,680 +0,0 @@ -//// In a eUTxO-based blockchain like Cardano, the management of time can be -//// finicky. -//// -//// Indeed, in order to maintain a complete determinism in the execution of -//// scripts, it is impossible to introduce a notion of _"current time"_ since -//// the execution would then depend on factor that are external to the -//// transaction itself: the ineluctable stream of time flowing in our universe. -//// -//// Hence, to work around that, we typically define time intervals, which gives -//// window -- a.k.a intervals -- within which the transaction can be executed. -//// From within a script, it isn't possible to know when exactly the script is -//// executed, but we can reason about the interval bounds to validate pieces of -//// logic. - -// TODO: Replace 'Int' with a generic 'a' once we have comparable traits. - -/// A type to represent intervals of values. Interval are inhabited by a type -/// `a` which is useful for non-infinite intervals that have a finite -/// lower-bound and/or upper-bound. -/// -/// This allows to represent all kind of mathematical intervals: -/// -/// ```aiken -/// // [1; 10] -/// let i0: Interval = Interval -/// { lower_bound: -/// IntervalBound { bound_type: Finite(1), is_inclusive: True } -/// , upper_bound: -/// IntervalBound { bound_type: Finite(10), is_inclusive: True } -/// } -/// ``` -/// -/// ```aiken -/// // (20; infinity) -/// let i1: Interval = Interval -/// { lower_bound: -/// IntervalBound { bound_type: Finite(20), is_inclusive: False } -/// , upper_bound: -/// IntervalBound { bound_type: PositiveInfinity, is_inclusive: False } -/// } -/// ``` -pub type Interval { - lower_bound: IntervalBound, - upper_bound: IntervalBound, -} - -/// An interval bound, either inclusive or exclusive. -pub type IntervalBound { - bound_type: IntervalBoundType, - is_inclusive: Bool, -} - -/// A type of interval bound. Where finite, a value of type `a` must be -/// provided. `a` will typically be an `Int`, representing a number of seconds or -/// milliseconds. -pub type IntervalBoundType { - NegativeInfinity - Finite(a) - PositiveInfinity -} - -// ## Constructing - -/// Create an interval that includes all values greater than the given bound. i.e [lower_bound, +INF) -/// -/// ```aiken -/// interval.after(10) == Interval { -/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: True }, -/// upper_bound: IntervalBound { bound_type: PositiveInfinity, is_inclusive: True }, -/// } -/// ``` -pub fn after(lower_bound: a) -> Interval { - Interval { - lower_bound: IntervalBound { - bound_type: Finite(lower_bound), - is_inclusive: True, - }, - upper_bound: IntervalBound { - bound_type: PositiveInfinity, - is_inclusive: True, - }, - } -} - -/// Create an interval that includes all values after (and not including) the given bound. i.e (lower_bound, +INF) -/// -/// ```aiken -/// interval.entirely_after(10) == Interval { -/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, -/// upper_bound: IntervalBound { bound_type: PositiveInfinity, is_inclusive: True }, -/// } -/// ``` -pub fn entirely_after(lower_bound: a) -> Interval { - Interval { - lower_bound: IntervalBound { - bound_type: Finite(lower_bound), - is_inclusive: False, - }, - upper_bound: IntervalBound { - bound_type: PositiveInfinity, - is_inclusive: True, - }, - } -} - -/// Create an interval that includes all values before (and including) the given bound. i.e (-INF, upper_bound] -/// -/// ```aiken -/// interval.before(100) == Interval { -/// lower_bound: IntervalBound { bound_type: NegativeInfinity, is_inclusive: True }, -/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: True }, -/// } -/// ``` -pub fn before(upper_bound: a) -> Interval { - Interval { - lower_bound: IntervalBound { - bound_type: NegativeInfinity, - is_inclusive: True, - }, - upper_bound: IntervalBound { - bound_type: Finite(upper_bound), - is_inclusive: True, - }, - } -} - -/// Create an interval that includes all values before (and not including) the given bound. i.e (-INF, upper_bound) -/// -/// ```aiken -/// interval.entirely_before(10) == Interval { -/// lower_bound: IntervalBound { bound_type: NegativeInfinity, is_inclusive: True }, -/// upper_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, -/// } -/// ``` -pub fn entirely_before(upper_bound: a) -> Interval { - Interval { - lower_bound: IntervalBound { - bound_type: NegativeInfinity, - is_inclusive: True, - }, - upper_bound: IntervalBound { - bound_type: Finite(upper_bound), - is_inclusive: False, - }, - } -} - -/// Create an interval that includes all values between two bounds, including the bounds. i.e. [lower_bound, upper_bound] -/// -/// ```aiken -/// interval.between(10, 100) == Interval { -/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: True }, -/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: True }, -/// } -/// ``` -pub fn between(lower_bound: a, upper_bound: a) -> Interval { - Interval { - lower_bound: IntervalBound { - bound_type: Finite(lower_bound), - is_inclusive: True, - }, - upper_bound: IntervalBound { - bound_type: Finite(upper_bound), - is_inclusive: True, - }, - } -} - -/// Create an interval that includes all values between two bounds, excluding the bounds. i.e. (lower_bound, upper_bound) -/// -/// ```aiken -/// interval.entirely_between(10, 100) == Interval { -/// lower_bound: IntervalBound { bound_type: Finite(10), is_inclusive: False }, -/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: False }, -/// } -/// ``` -pub fn entirely_between(lower_bound: a, upper_bound: a) -> Interval { - Interval { - lower_bound: IntervalBound { - bound_type: Finite(lower_bound), - is_inclusive: False, - }, - upper_bound: IntervalBound { - bound_type: Finite(upper_bound), - is_inclusive: False, - }, - } -} - -/// Create an empty interval that contains no value. -/// -/// ```aiken -/// interval.contains(empty, 0) == False -/// interval.contains(empty, 1000) == False -/// ``` -pub const empty: Interval = - Interval { - lower_bound: IntervalBound { - bound_type: PositiveInfinity, - is_inclusive: True, - }, - upper_bound: IntervalBound { - bound_type: NegativeInfinity, - is_inclusive: True, - }, - } - -/// Create an interval that contains every possible values. i.e. (-INF, +INF) -/// -/// ```aiken -/// interval.contains(everything, 0) == True -/// interval.contains(everything, 1000) == True -/// ``` -pub const everything: Interval = - Interval { - lower_bound: IntervalBound { - bound_type: NegativeInfinity, - is_inclusive: True, - }, - upper_bound: IntervalBound { - bound_type: PositiveInfinity, - is_inclusive: True, - }, - } - -// ## Inspecting - -/// Checks whether an element is contained within the interval. -/// -/// ```aiken -/// let iv = -/// Interval { -/// lower_bound: IntervalBound { -/// bound_type: Finite(14), -/// is_inclusive: True -/// }, -/// upper_bound: IntervalBound { -/// bound_type: Finite(42), -/// is_inclusive: False -/// }, -/// } -/// -/// interval.contains(iv, 25) == True -/// interval.contains(iv, 0) == False -/// interval.contains(iv, 14) == True -/// interval.contains(iv, 42) == False -/// ``` -pub fn contains(self: Interval, elem: Int) -> Bool { - let is_greater_than_lower_bound = - when self.lower_bound.bound_type is { - NegativeInfinity -> True - Finite(lower_bound) -> - if self.lower_bound.is_inclusive { - elem >= lower_bound - } else { - elem > lower_bound - } - PositiveInfinity -> False - } - - let is_smaller_than_upper_bound = - when self.upper_bound.bound_type is { - NegativeInfinity -> False - Finite(upper_bound) -> - if self.upper_bound.is_inclusive { - elem <= upper_bound - } else { - elem < upper_bound - } - PositiveInfinity -> True - } - - is_greater_than_lower_bound && is_smaller_than_upper_bound -} - -test contains_1() { - let iv = everything - contains(iv, 14) -} - -test contains_2() { - let iv = entirely_before(15) - contains(iv, 14) -} - -test contains_3() { - let iv = before(14) - contains(iv, 14) -} - -test contains_4() { - let iv = entirely_before(14) - !contains(iv, 14) -} - -test contains_5() { - let iv = entirely_after(13) - contains(iv, 14) -} - -test contains_6() { - let iv = after(14) - contains(iv, 14) -} - -test contains_7() { - let iv = entirely_after(14) - !contains(iv, 14) -} - -test contains_8() { - let iv = between(42, 1337) - !contains(iv, 14) -} - -test contains_9() { - let iv = between(0, 42) - contains(iv, 14) -} - -test contains_10() { - let iv = between(0, 42) - contains(iv, 42) -} - -test contains_11() { - let iv = entirely_between(0, 42) - !contains(iv, 0) -} - -test contains_12() { - let iv = empty - !contains(iv, 14) -} - -/// Tells whether an interval is empty; i.e. that is contains no value. -/// -/// ```aiken -/// let iv1 = interval.empty -/// -/// let iv2 = Interval { -/// lower_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, -/// upper_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, -/// } -/// -/// let iv3 = Interval { -/// lower_bound: IntervalBound { bound_type: Finite(0), is_inclusive: False }, -/// upper_bound: IntervalBound { bound_type: Finite(100), is_inclusive: False }, -/// } -/// -/// interval.is_empty(iv1) == True -/// interval.is_empty(iv2) == True -/// interval.is_empty(iv3) == False -/// -/// // Note: Two empty intervals are not necessarily equal. -/// iv1 != iv2 -/// ``` -pub fn is_empty(self: Interval) -> Bool { - let ordering = - compare_bound_type(self.lower_bound.bound_type, self.upper_bound.bound_type) - - when ordering is { - Greater -> True - Equal -> !(self.lower_bound.is_inclusive && self.upper_bound.is_inclusive) - Less -> { - let is_open_interval = - !self.lower_bound.is_inclusive && !self.upper_bound.is_inclusive - if is_open_interval { - when (self.lower_bound.bound_type, self.upper_bound.bound_type) is { - (Finite(lower_bound), Finite(upper_bound)) -> - lower_bound + 1 == upper_bound - _ -> False - } - } else { - False - } - } - } -} - -/// Check whether the interval is entirely after the point "a" -/// -/// ```aiken -/// interval.is_entirely_after(interval.after(10), 5) == True -/// interval.is_entirely_after(interval.after(10), 10) == False -/// interval.is_entirely_after(interval.after(10), 15) == False -/// interval.is_entirely_after(interval.between(10, 20), 30) == False -/// interval.is_entirely_after(interval.between(10, 20), 5) == True -pub fn is_entirely_after(self: Interval, point: Int) -> Bool { - when self.lower_bound.bound_type is { - Finite(low) -> - if self.lower_bound.is_inclusive { - point < low - } else { - point <= low - } - _ -> False - } -} - -test is_entirely_after_1() { - is_entirely_after(after(10), 5) -} - -test is_entirely_after_2() { - !is_entirely_after(after(10), 10) -} - -test is_entirely_after_3() { - !is_entirely_after(after(10), 15) -} - -test is_entirely_after_4() { - !is_entirely_after(between(10, 20), 30) -} - -test is_entirely_after_5() { - is_entirely_after(between(10, 20), 5) -} - -test is_entirely_after_6() { - is_entirely_after(entirely_after(10), 10) -} - -test is_entirely_after_7() { - !is_entirely_after(before(10), 5) -} - -test is_entirely_after_8() { - !is_entirely_after(before(10), 15) -} - -test is_entirely_after_9() { - !is_entirely_after(entirely_before(10), 5) -} - -/// Check whether the interval is entirely before the point "a" -/// -/// ```aiken -/// interval.is_entirely_before(interval.before(10), 15) == True -/// interval.is_entirely_before(interval.before(10), 10) == False -/// interval.is_entirely_before(interval.before(10), 5) == False -/// interval.is_entirely_before(interval.between(10, 20), 30) == True -/// interval.is_entirely_before(interval.between(10, 20), 5) == False -pub fn is_entirely_before(self: Interval, point: Int) -> Bool { - when self.upper_bound.bound_type is { - Finite(hi) -> - if self.upper_bound.is_inclusive { - hi < point - } else { - hi <= point - } - _ -> False - } -} - -test is_entirely_before_1() { - is_entirely_before(before(10), 15) -} - -test is_entirely_before_2() { - !is_entirely_before(before(10), 10) -} - -test is_entirely_before_3() { - !is_entirely_before(before(10), 5) -} - -test is_entirely_before_4() { - is_entirely_before(between(10, 20), 30) -} - -test is_entirely_before_5() { - !is_entirely_before(between(10, 20), 5) -} - -test is_entirely_before_6() { - is_entirely_before(entirely_before(10), 10) -} - -test is_entirely_before_7() { - !is_entirely_before(after(10), 15) -} - -test is_entirely_before_8() { - !is_entirely_before(after(10), 5) -} - -test is_entirely_before_9() { - !is_entirely_before(entirely_after(10), 5) -} - -// ## Combining - -/// Computes the smallest interval containing the two given intervals, if any -/// -/// ```aiken -/// let iv1 = between(0, 10) -/// let iv2 = between(2, 14) -/// hull(iv1, iv2) == between(0, 14) -/// -/// let iv1 = between(5, 10) -/// let iv2 = before(0) -/// hull(iv1, iv2) == before(10) -/// -/// let iv1 = entirely_after(0) -/// let iv2 = between(10, 42) -/// hull(iv1, iv2) = entirely_after(0) -/// ``` -pub fn hull(iv1: Interval, iv2: Interval) -> Interval { - Interval { - lower_bound: min(iv1.lower_bound, iv2.lower_bound), - upper_bound: max(iv1.upper_bound, iv2.upper_bound), - } -} - -test hull_1() { - let iv1 = between(0, 10) - let iv2 = between(2, 14) - hull(iv1, iv2) == between(0, 14) -} - -test hull_2() { - let iv1 = between(5, 10) - let iv2 = before(0) - hull(iv1, iv2) == before(10) -} - -test hull_3() { - let iv1 = entirely_after(0) - let iv2 = between(10, 42) - hull(iv1, iv2) == entirely_after(0) -} - -/// Computes the largest interval contains in the two given intervals, if any. -/// -/// ```aiken -/// let iv1 = interval.between(0, 10) -/// let iv2 = interval.between(2, 14) -/// interval.intersection(iv1, iv2) == interval.between(2, 10) -/// -/// let iv1 = interval.entirely_before(10) -/// let iv2 = interval.entirely_after(0) -/// interval.intersection(iv1, iv2) == interval.entirely_between(0, 10) -/// -/// let iv1 = interval.between(0, 1) -/// let iv2 = interval.between(2, 3) -/// interval.intersection(iv1, iv2) |> interval.is_empty -/// ``` -pub fn intersection(iv1: Interval, iv2: Interval) -> Interval { - Interval { - lower_bound: max(iv1.lower_bound, iv2.lower_bound), - upper_bound: min(iv1.upper_bound, iv2.upper_bound), - } -} - -test intersection_1() { - let iv1 = between(0, 10) - let iv2 = between(2, 14) - intersection(iv1, iv2) == between(2, 10) -} - -test intersection_2() { - let iv1 = between(0, 1) - let iv2 = between(1, 2) - intersection(iv1, iv2) == between(1, 1) -} - -test intersection_3() { - let iv1 = between(0, 1) - let iv2 = entirely_between(1, 2) - intersection(iv1, iv2) - |> is_empty -} - -test intersection_4() { - let iv1 = entirely_between(0, 1) - let iv2 = entirely_between(1, 2) - intersection(iv1, iv2) - |> is_empty -} - -test intersection_5() { - let iv1 = between(0, 10) - let iv2 = before(4) - intersection(iv1, iv2) == between(0, 4) -} - -test intersection_6() { - let iv1 = entirely_before(10) - let iv2 = entirely_after(0) - intersection(iv1, iv2) == entirely_between(0, 10) -} - -/// Return the highest bound of the two. -/// -/// ```aiken -/// let ib1 = IntervalBound { bound_type: Finite(0), is_inclusive: False } -/// let ib2 = IntervalBound { bound_type: Finite(1), is_inclusive: False } -/// -/// interval.max(ib1, ib2) == ib2 -/// ``` -pub fn max( - left: IntervalBound, - right: IntervalBound, -) -> IntervalBound { - when compare_bound(left, right) is { - Less -> right - Equal -> left - Greater -> left - } -} - -/// Return the smallest bound of the two. -/// -/// ```aiken -/// let ib1 = IntervalBound { bound_type: Finite(0), is_inclusive: False } -/// let ib2 = IntervalBound { bound_type: Finite(1), is_inclusive: False } -/// -/// interval.min(ib1, ib2) == ib1 -/// ``` -pub fn min( - left: IntervalBound, - right: IntervalBound, -) -> IntervalBound { - when compare_bound(left, right) is { - Less -> left - Equal -> left - Greater -> right - } -} - -fn compare_bound( - left: IntervalBound, - right: IntervalBound, -) -> Ordering { - when compare_bound_type(left.bound_type, right.bound_type) is { - Less -> Less - Greater -> Greater - Equal -> - if left.is_inclusive == right.is_inclusive { - Equal - } else if left.is_inclusive { - Greater - } else { - Less - } - } -} - -fn compare_bound_type( - left: IntervalBoundType, - right: IntervalBoundType, -) -> Ordering { - when left is { - NegativeInfinity -> - when right is { - NegativeInfinity -> Equal - _ -> Less - } - PositiveInfinity -> - when right is { - PositiveInfinity -> Equal - _ -> Greater - } - Finite(left) -> - when right is { - NegativeInfinity -> Greater - PositiveInfinity -> Less - Finite(right) -> - if left < right { - Less - } else if left == right { - Equal - } else { - Greater - } - } - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak deleted file mode 100644 index dd575e7a..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math.ak +++ /dev/null @@ -1,424 +0,0 @@ -//// This module contains some basic Math utilities. Standard arithmetic -//// operations on integers are available through native operators: -//// -//// Operator | Description -//// --- | :--- -//// `+` | Arithmetic sum -//// `-` | Arithmetic difference -//// `/` | Whole division -//// `*` | Arithmetic multiplication -//// `%` | Remainder by whole division -//// -//// Here are a few examples: -//// -//// ```aiken -//// 1 + 1 // 2 -//// 10 - 2 // 8 -//// 40 / 14 // 2 -//// 3 * 4 // 12 -//// 10 % 3 // 1 - -use aiken/builtin - -/// Calculate the absolute value of an integer. -/// -/// ```aiken -/// math.abs(-42) == 42 -/// math.abs(14) == 14 -/// ``` -pub fn abs(self: Int) -> Int { - if self < 0 { - 0 - self - } else { - self - } -} - -test abs_1() { - abs(14) == 14 -} - -test abs_2() { - abs(-42) == 42 -} - -/// Restrict the value of an integer between two min and max bounds -/// -/// ```aiken -/// math.clamp(14, min: 0, max: 10) == 10 -/// ``` -pub fn clamp(self: Int, min: Int, max: Int) -> Int { - if self < min { - min - } else { - if self > max { - max - } else { - self - } - } -} - -test clamp_1() { - clamp(14, min: 0, max: 10) == 10 -} - -test clamp_2() { - clamp(7, min: 0, max: 10) == 7 -} - -test clamp_3() { - clamp(7, min: 10, max: 100) == 10 -} - -/// The greatest common divisor of two integers. -/// -/// ```aiken -/// math.gcd(42, 14) == 14 -/// math.gcd(14, 42) == 14 -/// math.gcd(0, 0) == 0 -/// ``` -pub fn gcd(x: Int, y: Int) -> Int { - abs(do_gcd(x, y)) -} - -fn do_gcd(x: Int, y: Int) -> Int { - when y is { - 0 -> x - _ -> do_gcd(y, x % y) - } -} - -test gcd_test1() { - gcd(10, 300) == 10 -} - -test gcd_test2() { - gcd(-10, 300) == 10 -} - -test gcd_test3() { - gcd(42, 14) == 14 -} - -/// Checks if an integer has a given integer square root x. -/// The check has constant time complexity $O(1)$. -/// -/// ```aiken -/// math.is_sqrt(0, 0) -/// math.is_sqrt(25, 5) -/// !math.is_sqrt(25, -5) -/// math.is_sqrt(44203, 210) -/// ``` -pub fn is_sqrt(self: Int, x: Int) -> Bool { - x * x <= self && ( x + 1 ) * ( x + 1 ) > self -} - -test is_sqrt1() { - is_sqrt(44203, 210) -} - -test is_sqrt2() { - is_sqrt(975461057789971041, 987654321) -} - -/// The logarithm in base `b` of an element using integer divisions. -/// -/// ```aiken -/// math.log(10, base: 2) == 3 -/// math.log(42, base: 2) == 5 -/// math.log(42, base: 3) == 3 -/// math.log(5, base: 0) == 0 -/// math.log(4, base: 4) == 1 -/// math.log(4, base: 42) == 0 -/// ``` -pub fn log(self: Int, base: Int) -> Int { - if base <= 0 { - 0 - } else if self == base { - 1 - } else if self < base { - 0 - } else { - 1 + log(self / base, base) - } -} - -test log_10_2() { - log(10, base: 2) == 3 -} - -test log_42_2() { - log(42, base: 2) == 5 -} - -test log_42_3() { - log(42, base: 3) == 3 -} - -test log_5_0() { - log(5, base: 0) == 0 -} - -test log_4_4() { - log(4, base: 4) == 1 -} - -test log_4_43() { - log(4, base: 43) == 0 -} - -/// The integer logarithm in base 2. Faster than [`log`](#log) in this particular case. -/// -/// ```aiken -/// math.log2(1) == 0 -/// math.log2(2) == 1 -/// math.log2(3) == 1 -/// math.log2(4) == 2 -/// math.log2(256) == 8 -/// math.log2(257) == 8 -/// math.log2(511) == 8 -/// math.log2(1025) == 10 -/// ``` -pub fn log2(x: Int) -> Int { - expect x > 0 - let s = builtin.integer_to_bytearray(True, 0, x) - let len = builtin.length_of_bytearray(s) - let b = builtin.index_bytearray(s, 0) - len * 8 - if b < 2 { - 8 - } else if b < 4 { - 7 - } else if b < 8 { - 6 - } else if b < 16 { - 5 - } else if b < 32 { - 4 - } else if b < 64 { - 3 - } else if b < 128 { - 2 - } else { - 1 - } -} - -test log2_matrix() { - and { - log2(1) == 0, - log2(2) == 1, - log2(3) == 1, - log2(4) == 2, - log2(256) == 8, - log2(257) == 8, - log2(511) == 8, - log2(1025) == 10, - } -} - -/// Return the maximum of two integers. -pub fn max(a: Int, b: Int) -> Int { - if a > b { - a - } else { - b - } -} - -test max_1() { - max(0, 0) == 0 -} - -test max_2() { - max(14, 42) == 42 -} - -test max_3() { - max(42, 14) == 42 -} - -/// Return the minimum of two integers. -pub fn min(a: Int, b: Int) -> Int { - if a > b { - b - } else { - a - } -} - -test min_1() { - min(0, 0) == 0 -} - -test min_2() { - min(14, 42) == 14 -} - -test min_3() { - min(42, 14) == 14 -} - -/// Calculates a number to the power of `e` using the exponentiation by -/// squaring method. -/// -/// ```aiken -/// math.pow(3, 5) == 243 -/// math.pow(7, 2) == 49 -/// math.pow(3, -4) == 0 -/// math.pow(0, 0) == 1 -/// math.pow(513, 3) == 135005697 -/// ``` -pub fn pow(self: Int, e: Int) -> Int { - if e < 0 { - 0 - } else if e == 0 { - 1 - } else if e % 2 == 0 { - pow(self * self, e / 2) - } else { - self * pow(self * self, ( e - 1 ) / 2) - } -} - -test pow_3_5() { - pow(3, 5) == 243 -} - -test pow_7_2() { - pow(7, 2) == 49 -} - -test pow_3__4() { - // negative powers round to zero - pow(3, -4) == 0 -} - -test pow_0_0() { - // sorry math - pow(0, 0) == 1 -} - -test pow_513_3() { - pow(513, 3) == 135005697 -} - -test pow_2_4() { - pow(2, 4) == 16 -} - -test pow_2_42() { - pow(2, 42) == 4398046511104 -} - -/// Calculates the power of 2 for a given exponent `e`. Much cheaper than -/// using `pow(2, _)` for small exponents $0 < e < 256$. -/// -/// ```aiken -/// math.pow2(-2) == 0 -/// math.pow2(0) == 1 -/// math.pow2(1) == 2 -/// math.pow2(4) == 16 -/// math.pow2(42) == 4398046511104 -/// ``` -pub fn pow2(e: Int) -> Int { - // do_pow2(e, 1) - if e < 8 { - if e < 0 { - 0 - } else { - builtin.index_bytearray(#[1, 2, 4, 8, 16, 32, 64, 128], e) - } - } else if e < 32 { - 256 * pow2(e - 8) - } else { - 4294967296 * pow2(e - 32) - } -} - -test pow2_neg() { - pow2(-2) == 0 -} - -test pow2_0() { - pow2(0) == 1 -} - -test pow2_1() { - pow2(1) == 2 -} - -test pow2_4() { - pow2(4) == 16 -} - -test pow2_42() { - pow2(42) == 4398046511104 -} - -test pow2_256() { - pow2(256) == 115792089237316195423570985008687907853269984665640564039457584007913129639936 -} - -/// Calculates the square root of an integer using the [Babylonian -/// method](https://en.wikipedia.org/wiki/Methods_of_computing_square_roots#Babylonian_method). This returns either the exact result or the smallest integer -/// nearest to the square root. -/// -/// Returns `None` for negative values. -/// -/// ```aiken -/// math.sqrt(0) == Some(0) -/// math.sqrt(25) == Some(5) -/// math.sqrt(44203) == Some(210) -/// math.sqrt(-42) == None -/// ``` -/// -/// > [!TIP] -/// > This function can be quite expensive to perform on-chain. Prefer using [`is_sqrt`](#is_sqrt) whenever possible. -pub fn sqrt(self: Int) -> Option { - if self < 0 { - None - } else if self <= 1 { - Some(self) - } else { - Some(sqrt_babylonian(self, self, ( self + 1 ) / 2)) - } -} - -// The basic idea is that if x is an overestimate to the square root of a -// non-negative real number S then S/x will be an underestimate, or vice versa, -// and so the average of these two numbers may reasonably be expected to provide a -// better approximation (though the formal proof of that assertion depends on the -// inequality of arithmetic and geometric means that shows this average is always -// an overestimate of the square root. -fn sqrt_babylonian(self: Int, x: Int, y: Int) -> Int { - if y >= x { - x - } else { - sqrt_babylonian(self, y, ( y + self / y ) / 2) - } -} - -test sqrt1() { - sqrt(0) == Some(0) -} - -test sqrt2() { - sqrt(1) == Some(1) -} - -test sqrt3() { - sqrt(25) == Some(5) -} - -test sqrt4() { - sqrt(44203) == Some(210) -} - -test sqrt5() { - sqrt(975461057789971041) == Some(987654321) -} - -test sqrt6() { - sqrt(-42) == None -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak deleted file mode 100644 index 88fe7ab7..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.ak +++ /dev/null @@ -1,871 +0,0 @@ -//// This module implements operations between rational numbers. -//// -//// > [!CAUTION] -//// > Internally, rational aren't automatically reduced as this is **only done on-demand**. -//// > -//// > Thus, for example: -//// > -//// > ```aiken -//// > rational.new(2, 3) != rational.new(4, 6) -//// > ``` -//// > -//// > Comparing rational values should, therefore, only happen after reduction (see [reduce](#reduce)) or via the [compare](#compare) method. - -use aiken/builtin -use aiken/collection/list -use aiken/math -use aiken/option - -/// Opaque type used to ensure the sign of the Rational is managed strictly in the numerator. -pub opaque type Rational { - numerator: Int, - denominator: Int, -} - -// ## Constructing - -/// Create a new `Rational` from an `Int`. -/// -/// ```aiken -/// Some(rational.from_int(14)) == rational.new(14, 1) -/// Some(rational.from_int(-5)) == rational.new(-5, 1) -/// Some(rational.from_int(0)) == rational.new(0, 1) -/// ``` -pub fn from_int(numerator: Int) -> Rational { - Rational { numerator, denominator: 1 } -} - -test from_int_1() { - and { - (from_int(14) == ratio(14, 1))?, - (from_int(-5) == ratio(-5, 1))?, - (from_int(0) == ratio(0, 1))?, - } -} - -/// An unsafe constructor for `Rational` values. Assumes that the following invariants are -/// enforced: -/// -/// - the denominator is positive (the sign is managed in the numerator); -/// - the denominator is not null. -/// -/// This function is mainly used as a quick way to construct rationals from literal values. -fn ratio(numerator: Int, denominator: Int) -> Rational { - Rational { numerator, denominator } -} - -/// Make a `Rational` number from the ratio of two integers. -/// -/// Returns `None` when the denominator is null. -/// -/// ```aiken -/// rational.new(14, 42) == Some(r) -/// rational.new(14, 0) == None -/// ``` -pub fn new(numerator: Int, denominator: Int) -> Option { - if denominator == 0 { - None - } else if denominator < 0 { - Some(Rational { numerator: -numerator, denominator: -denominator }) - } else { - Some(Rational { numerator, denominator }) - } -} - -test new_1() { - and { - (new(2, 0) == None)?, - (new(2, 3) == Some(ratio(2, 3)))?, - (new(-2, 3) == Some(ratio(-2, 3)))?, - (new(2, -3) == Some(ratio(-2, 3)))?, - (new(2, 4) == Some(ratio(2, 4)))?, - (new(-2, -3) == Some(ratio(2, 3)))?, - (new(-2, -4) == Some(ratio(2, 4)))?, - } -} - -/// A null `Rational`. -pub const zero: Rational = Rational { numerator: 0, denominator: 1 } - -test zero_1() { - zero == ratio(0, 1) -} - -// ## Inspecting - -/// Get the denominator of a rational value. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// rational.denominator(x) == 3 -/// ``` -pub fn denominator(self: Rational) -> Int { - self.denominator -} - -test denominator_1() { - expect Some(x) = new(2, 3) - expect Some(y) = new(-2, 3) - expect Some(z) = new(2, -3) - expect Some(w) = new(-2, -3) - and { - (denominator(x) == 3)?, - (denominator(y) == 3)?, - (denominator(z) == 3)?, - (denominator(w) == 3)?, - } -} - -/// Get the numerator of a rational value. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// rational.numerator(x) == 2 -/// ``` -pub fn numerator(self: Rational) -> Int { - self.numerator -} - -test numerator_1() { - expect Some(x) = new(2, 3) - expect Some(y) = new(-2, 3) - expect Some(z) = new(2, -3) - expect Some(w) = new(-2, -3) - - and { - (numerator(x) == 2)?, - (numerator(y) == -2)?, - (numerator(z) == -2)?, - (numerator(w) == 2)?, - } -} - -// ## Modifying - -/// Absolute value of a `Rational`. -/// -/// ```aiken -/// expect Some(x) = rational.new(3, 2) -/// expect Some(y) = rational.new(-3, 2) -/// -/// rational.abs(x) == x -/// rational.abs(y) == x -/// ``` -pub fn abs(self: Rational) -> Rational { - let Rational { numerator: a_n, denominator: a_d } = self - Rational { numerator: math.abs(a_n), denominator: a_d } -} - -test abs_examples() { - and { - (abs(ratio(5, 2)) == ratio(5, 2))?, - (abs(ratio(-5, 2)) == ratio(5, 2))?, - (abs(ratio(5, 2)) == abs(ratio(-5, 2)))?, - } -} - -/// Change the sign of a `Rational`. -/// -/// ```aiken -/// expect Some(x) = rational.new(3, 2) -/// expect Some(y) = rational.new(-3, 2) -/// -/// rational.negate(x) == y -/// rational.negate(y) == x -/// ``` -pub fn negate(a: Rational) -> Rational { - let Rational { numerator: a_n, denominator: a_d } = a - Rational { numerator: -a_n, denominator: a_d } -} - -test negate_1() { - and { - (negate(ratio(5, 2)) == ratio(-5, 2))?, - (negate(ratio(-5, 2)) == ratio(5, 2))?, - (negate(negate(ratio(5, 2))) == ratio(5, 2))?, - } -} - -/// Reciprocal of a `Rational` number. That is, a new `Rational` where the -/// numerator and denominator have been swapped. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 5) -/// rational.reciprocal(x) == rational.new(5, 2) -/// -/// let y = rational.zero -/// rational.reciprocal(y) == None -/// ``` -pub fn reciprocal(self: Rational) -> Option { - let Rational { numerator: a_n, denominator: a_d } = self - if a_n < 0 { - Some(Rational { numerator: -a_d, denominator: -a_n }) - } else if a_n > 0 { - Some(Rational { numerator: a_d, denominator: a_n }) - } else { - None - } -} - -test reciprocal_1() { - and { - (reciprocal(ratio(5, 2)) == new(2, 5))?, - (reciprocal(ratio(-5, 2)) == new(-2, 5))?, - (reciprocal(ratio(0, 2)) == None)?, - (reciprocal(ratio(2, 3)) == new(3, 2))?, - (reciprocal(ratio(-2, 3)) == new(-3, 2))?, - } -} - -/// Reduce a rational to its irreducible form. This operation makes the -/// numerator and denominator coprime. -/// -/// ```aiken -/// expect Some(x) = rational.new(80, 200) -/// Some(rational.reduce(x)) == rational.new(2, 5) -/// ``` -pub fn reduce(self: Rational) -> Rational { - let Rational { numerator: a_n, denominator: a_d } = self - let d = math.gcd(a_n, a_d) - Rational { numerator: a_n / d, denominator: a_d / d } -} - -test reduce_1() { - and { - (reduce(ratio(80, 200)) == ratio(2, 5))?, - (reduce(ratio(-5, 1)) == ratio(-5, 1))?, - (reduce(ratio(0, 3)) == ratio(0, 1))?, - } -} - -// ## Combining - -// ### Arithmetic operations - -/// Addition: sum of two rational values -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// expect Some(y) = rational.new(3, 4) -/// -/// Some(rational.add(x, y)) == rational.new(17, 12) -/// ``` -pub fn add(left: Rational, right: Rational) -> Rational { - let Rational { numerator: a_n, denominator: a_d } = left - let Rational { numerator: b_n, denominator: b_d } = right - Rational { numerator: a_n * b_d + b_n * a_d, denominator: a_d * b_d } -} - -test add_1() { - add(ratio(2, 3), ratio(3, 4)) == ratio(17, 12) -} - -test add_2() { - add(ratio(-2, 3), ratio(3, 4)) == ratio(1, 12) -} - -/// Division: quotient of two rational values. Returns `None` when the second -/// value is null. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// expect Some(y) = rational.new(3, 4) -/// -/// rational.div(x, y) == rational.new(8, 9) -/// ``` -pub fn div(left: Rational, right: Rational) -> Option { - reciprocal(right) |> option.map(mul(left, _)) -} - -test div_1() { - div(ratio(2, 3), ratio(3, 4)) == new(8, 9) -} - -test div_2() { - div(ratio(2, 3), ratio(-3, 4)) == new(-8, 9) -} - -/// Multiplication: the product of two rational values. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// expect Some(y) = rational.new(3, 4) -/// -/// Some(rational.mul(x, y)) == rational.new(6, 12) -/// ``` -pub fn mul(left: Rational, right: Rational) -> Rational { - let Rational { numerator: a_n, denominator: a_d } = left - let Rational { numerator: b_n, denominator: b_d } = right - Rational { numerator: a_n * b_n, denominator: a_d * b_d } -} - -test mul_1() { - mul(ratio(2, 3), ratio(3, 4)) == ratio(6, 12) -} - -test mul_2() { - mul(ratio(-2, 3), ratio(-3, 4)) == ratio(6, 12) -} - -test mul_3() { - let result = - ratio(2, 5) - |> mul(ratio(1, 8)) - |> mul(ratio(3, 10)) - |> mul(ratio(21, 100)) - |> mul(ratio(3, 5)) - |> mul(ratio(2, 8)) - |> mul(ratio(4, 10)) - |> mul(ratio(22, 100)) - |> reduce - - result == ratio(2079, 50000000) -} - -/// Subtraction: difference of two rational values -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// expect Some(y) = rational.new(3, 4) -/// -/// Some(rational.sub(x, y)) == rational.new(-1, 12) -/// ``` -pub fn sub(left: Rational, right: Rational) -> Rational { - let Rational { numerator: a_n, denominator: a_d } = left - let Rational { numerator: b_n, denominator: b_d } = right - Rational { numerator: a_n * b_d - b_n * a_d, denominator: a_d * b_d } -} - -test sub_1() { - sub(ratio(2, 3), ratio(3, 4)) == ratio(-1, 12) -} - -test sub_2() { - sub(ratio(2, 3), ratio(-3, 4)) == ratio(17, 12) -} - -test sub_3() { - sub(ratio(-2, 3), ratio(3, 4)) == ratio(-17, 12) -} - -// ### Ordering - -/// Compare two rationals for an ordering. This is safe to use even for -/// non-reduced rationals. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// expect Some(y) = rational.new(3, 4) -/// expect Some(z) = rational.new(4, 6) -/// -/// compare(x, y) == Less -/// compare(y, x) == Greater -/// compare(x, x) == Equal -/// compare(x, z) == Equal -/// ``` -pub fn compare(left: Rational, right: Rational) -> Ordering { - let Rational { numerator: a_n, denominator: a_d } = left - let Rational { numerator: b_n, denominator: b_d } = right - - let l = a_n * b_d - let r = b_n * a_d - - if l < r { - Less - } else if l > r { - Greater - } else { - Equal - } -} - -test compare_1() { - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - expect Some(z) = new(4, 6) - and { - compare(x, y) == Less, - compare(y, x) == Greater, - compare(x, x) == Equal, - compare(x, z) == Equal, - } -} - -/// Comparison of two rational values using a chosen heuristic. For example: -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// expect Some(y) = rational.new(3, 4) -/// -/// rational.compare_with(x, >, y) == False -/// rational.compare_with(y, >, x) == True -/// rational.compare_with(x, >, x) == False -/// rational.compare_with(x, >=, x) == True -/// rational.compare_with(x, ==, x) == True -/// rational.compare_with(x, ==, y) == False -/// ``` -pub fn compare_with( - left: Rational, - with: fn(Int, Int) -> Bool, - right: Rational, -) -> Bool { - let Rational { numerator: a_n, denominator: a_d } = left - let Rational { numerator: b_n, denominator: b_d } = right - with(a_n * b_d, b_n * a_d) -} - -// TODO: Rewrite tests using binary-operator as first-class functions once aiken-lang/aiken#619 is merged. - -test compare_with_eq() { - let eq = - compare_with(_, fn(l, r) { l == r }, _) - - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - - !eq(x, y)? && !eq(y, x)? && eq(x, x)? -} - -test compare_with_neq() { - let neq = - compare_with(_, fn(l, r) { l != r }, _) - - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - - neq(x, y)? && neq(y, x)? && !neq(x, x)? -} - -test compare_with_gte() { - let gte = - compare_with(_, fn(l, r) { l >= r }, _) - - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - - !gte(x, y)? && gte(y, x)? && gte(x, x)? -} - -test compare_with_gt() { - let gt = - compare_with(_, fn(l, r) { l > r }, _) - - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - - !gt(x, y)? && gt(y, x)? && !gt(x, x)? -} - -test compare_with_lte() { - let lte = - compare_with(_, fn(l, r) { l <= r }, _) - - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - - lte(x, y)? && !lte(y, x)? && lte(x, x)? -} - -test compare_with_lt() { - let lt = - compare_with(_, fn(l, r) { l < r }, _) - - expect Some(x) = new(2, 3) - expect Some(y) = new(3, 4) - - lt(x, y)? && !lt(y, x)? && !lt(x, x)? -} - -// ### Means - -/// Calculate the arithmetic mean between two `Rational` values. -/// -/// ```aiken -/// let x = rational.from_int(0) -/// let y = rational.from_int(1) -/// let z = rational.from_int(2) -/// -/// expect Some(result) = rational.arithmetic_mean([x, y, z]) -/// -/// rational.compare(result, y) == Equal -/// ``` -pub fn arithmetic_mean(self: List) -> Option { - div(list.foldr(self, zero, add), from_int(list.length(self))) -} - -test arithmetic_mean_1() { - let x = ratio(1, 2) - let y = ratio(1, 2) - expect Some(z) = arithmetic_mean([x, y]) - reduce(z) == ratio(1, 2) -} - -test arithmetic_mean_2() { - let x = ratio(1, 1) - let y = ratio(2, 1) - expect Some(z) = arithmetic_mean([x, y]) - reduce(z) == ratio(3, 2) -} - -test arithmetic_mean_3() { - let xs = - [ - ratio(1, 1), - ratio(2, 1), - ratio(3, 1), - ratio(4, 1), - ratio(5, 1), - ratio(6, 1), - ] - expect Some(z) = arithmetic_mean(xs) - reduce(z) == ratio(7, 2) -} - -/// Calculate the geometric mean between two `Rational` values. This returns -/// either the exact result or the smallest integer nearest to the square root -/// for the numerator and denominator. -/// -/// ```aiken -/// expect Some(x) = rational.new(1, 3) -/// expect Some(y) = rational.new(1, 6) -/// -/// rational.geometric_mean(x, y) == rational.new(1, 4) -/// ``` -pub fn geometric_mean(left: Rational, right: Rational) -> Option { - let Rational { numerator: a_n, denominator: a_d } = left - let Rational { numerator: b_n, denominator: b_d } = right - when math.sqrt(a_n * b_n) is { - Some(numerator) -> - when math.sqrt(a_d * b_d) is { - Some(denominator) -> Some(Rational { numerator, denominator }) - None -> None - } - None -> None - } -} - -test geometric_mean1() { - expect Some(x) = new(1, 2) - expect Some(y) = new(1, 2) - geometric_mean(x, y) == new(1, 2) -} - -test geometric_mean2() { - expect Some(x) = new(-1, 2) - expect Some(y) = new(1, 2) - geometric_mean(x, y) == None -} - -test geometric_mean3() { - expect Some(x) = new(1, 2) - expect Some(y) = new(-1, 2) - geometric_mean(x, y) == None -} - -test geometric_mean4() { - expect Some(x) = new(1, 3) - expect Some(y) = new(1, 6) - geometric_mean(x, y) == new(1, 4) -} - -test geometric_mean5() { - expect Some(x) = new(67, 2500) - expect Some(y) = new(35331, 1000) - expect Some(yi) = reciprocal(y) - geometric_mean(x, yi) == new(258, 9398) -} - -// ## Transforming - -/// Returns the smallest `Int` not less than a given `Rational` -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// rational.ceil(x) == 1 -/// -/// expect Some(y) = rational.new(44, 14) -/// rational.ceil(y) == 4 -/// -/// expect Some(z) = rational.new(-14, 3) -/// rational.ceil(z) == -4 -/// ``` -pub fn ceil(self: Rational) -> Int { - let Rational { numerator, denominator } = self - if builtin.remainder_integer(numerator, denominator) > 0 { - builtin.quotient_integer(numerator, denominator) + 1 - } else { - builtin.quotient_integer(numerator, denominator) - } -} - -test ceil_1() { - and { - (ceil(ratio(13, 5)) == 3)?, - (ceil(ratio(15, 5)) == 3)?, - (ceil(ratio(16, 5)) == 4)?, - (ceil(ratio(-3, 5)) == 0)?, - (ceil(ratio(-5, 5)) == -1)?, - (ceil(ratio(-14, 3)) == -4)?, - (ceil(ratio(-14, 6)) == -2)?, - (ceil(ratio(44, 14)) == 4)?, - } -} - -/// Returns the greatest `Int` no greater than a given `Rational` -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// rational.floor(x) == 0 -/// -/// expect Some(y) = rational.new(44, 14) -/// rational.floor(y) == 3 -/// -/// expect Some(z) = rational.new(-14, 3) -/// rational.floor(z) == -5 -/// ``` -pub fn floor(self: Rational) -> Int { - let Rational { numerator: a_n, denominator: a_d } = self - a_n / a_d -} - -test floor_1() { - and { - (floor(ratio(5, 2)) == 2)?, - (floor(ratio(5, 3)) == 1)?, - (floor(ratio(5, 4)) == 1)?, - (floor(ratio(5, 5)) == 1)?, - (floor(ratio(5, 6)) == 0)?, - (floor(ratio(8, 3)) == 2)?, - (floor(ratio(-14, 3)) == -5)?, - } -} - -/// Computes the rational number x raised to the power y. Returns `None` for -/// invalid exponentiation. -/// -/// ```aiken -/// expect Some(x) = rational.new(50, 2500) -/// rational.reduce(rational.pow(x, 3)) == rational.new(1, 125000) -/// -/// expect Some(x) = rational.new(50, 2500) -/// rational.reduce(rational.pow(x, -3)) == rational.new(125000, 1) -/// ``` -pub fn pow(x: Rational, y: Int) -> Option { - let Rational { numerator: a, denominator: b } = x - - if a == 0 && y <= 0 { - None - } else if y > 0 { - Some(Rational { numerator: math.pow(a, y), denominator: math.pow(b, y) }) - } else if y < 0 { - Some(Rational { numerator: math.pow(b, -y), denominator: math.pow(a, -y) }) - } else { - Some(Rational { numerator: 1, denominator: 1 }) - } -} - -test pow_negative_exponent_non_zero_fraction() { - expect Some(base) = new(50, 2500) - expect Some(calculated_result) = pow(base, -3) - expect Some(expected_result) = new(125000, 1) - reduce(calculated_result) == expected_result -} - -test pow_positive_exponent() { - expect Some(base) = new(50, 2500) - expect Some(calculated_result) = pow(base, 3) - expect Some(expected_result) = new(1, 125000) - reduce(calculated_result) == expected_result -} - -test pow_exponent_zero() { - expect Some(base) = new(50, 2500) - pow(base, 0) == new(1, 1) -} - -test pow_rational_zero_exponent_zero() { - expect Some(base) = new(0, 1) - pow(base, 0) == None -} - -/// Returns the proper fraction of a given `Rational` `r`. That is, a 2-tuple of -/// an `Int` and `Rational` (n, f) such that: -/// -/// - `r = n + f`; -/// - `n` and `f` have the same sign as `r`; -/// - `f` has an absolute value less than 1. -pub fn proper_fraction(self: Rational) -> (Int, Rational) { - let Rational { numerator, denominator } = self - ( - builtin.quotient_integer(numerator, denominator), - Rational { - numerator: builtin.remainder_integer(numerator, denominator), - denominator, - }, - ) -} - -test proper_fraction_1() { - let r = ratio(10, 7) - let (n, f) = proper_fraction(r) - and { - (n == 1)?, - (f == ratio(3, 7))?, - (r == add(from_int(n), f))?, - } -} - -test proper_fraction_2() { - let r = ratio(-10, 7) - let (n, f) = proper_fraction(r) - and { - (n == -1)?, - (f == ratio(-3, 7))?, - (r == add(from_int(n), f))?, - } -} - -test proper_fraction_3() { - let r = ratio(4, 2) - let (n, f) = proper_fraction(r) - and { - (n == 2)?, - (f == ratio(0, 2))?, - (r == add(from_int(n), f))?, - } -} - -/// Round the argument to the nearest whole number. If the argument is -/// equidistant between two values, the greater value is returned (it -/// rounds half towards positive infinity). -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// rational.round(x) == 1 -/// -/// expect Some(y) = rational.new(3, 2) -/// rational.round(y) == 2 -/// -/// expect Some(z) = rational.new(-3, 2) -/// rational.round(z) == -1 -/// ``` -/// -/// > [!CAUTION] -/// > This behaves differently than _Haskell_. If you're coming from `PlutusTx`, beware that in Haskell, rounding on equidistant values depends on the whole number being odd or even. -/// > If you need this behaviour, use [`round_even`](#round_even). -pub fn round(self: Rational) -> Int { - let (n, f) = proper_fraction(self) - - let is_negative = f.numerator < 0 - - when compare(abs(f), ratio(1, 2)) is { - Less -> n - Equal -> - if is_negative { - n - } else { - n + 1 - } - Greater -> - if is_negative { - n - 1 - } else { - n + 1 - } - } -} - -test round_1() { - and { - (round(ratio(10, 7)) == 1)?, - (round(ratio(11, 7)) == 2)?, - (round(ratio(3, 2)) == 2)?, - (round(ratio(5, 2)) == 3)?, - (round(ratio(-3, 2)) == -1)?, - (round(ratio(-2, 3)) == -1)?, - (round(ratio(-10, 7)) == -1)?, - (round(ratio(4, 2)) == 2)?, - } -} - -/// Round the argument to the nearest whole number. If the argument is -/// equidistant between two values, it returns the value that is even (it -/// rounds half to even, also known as 'banker's rounding'). -/// -/// ```aiken -/// expect Some(w) = rational.new(2, 3) -/// rational.round_even(w) == 1 -/// -/// expect Some(x) = rational.new(3, 2) -/// rational.round_even(x) == 2 -/// -/// expect Some(y) = rational.new(5, 2) -/// rational.round_even(y) == 2 -/// -/// expect Some(y) = rational.new(-3, 2) -/// rational.round_even(y) == -2 -/// ``` -pub fn round_even(self: Rational) -> Int { - let (n, f) = proper_fraction(self) - - let m = - when compare(f, ratio(0, 1)) is { - Less -> -1 - _ -> 1 - } - - let is_even = n % 2 == 0 - - when compare(abs(f), ratio(1, 2)) is { - Less -> n - Equal -> - if is_even { - n - } else { - n + m - } - Greater -> n + m - } -} - -test round_even_1() { - and { - (round_even(ratio(10, 7)) == 1)?, - (round_even(ratio(11, 7)) == 2)?, - (round_even(ratio(3, 2)) == 2)?, - (round_even(ratio(5, 2)) == 2)?, - (round_even(ratio(-3, 2)) == -2)?, - (round_even(ratio(-2, 3)) == -1)?, - (round_even(ratio(-10, 7)) == -1)?, - (round_even(ratio(4, 2)) == 2)?, - } -} - -/// Returns the nearest `Int` between zero and a given `Rational`. -/// -/// ```aiken -/// expect Some(x) = rational.new(2, 3) -/// rational.truncate(x) == 0 -/// -/// expect Some(y) = rational.new(44, 14) -/// rational.truncate(y) == 3 -/// -/// expect Some(z) = rational.new(-14, 3) -/// rational.truncate(z) == -4 -/// ``` -pub fn truncate(self: Rational) -> Int { - let Rational { numerator: a_n, denominator: a_d } = self - builtin.quotient_integer(a_n, a_d) -} - -test truncate_1() { - and { - (truncate(ratio(5, 2)) == 2)?, - (truncate(ratio(5, 3)) == 1)?, - (truncate(ratio(5, 4)) == 1)?, - (truncate(ratio(5, 5)) == 1)?, - (truncate(ratio(5, 6)) == 0)?, - (truncate(ratio(8, 3)) == 2)?, - (truncate(ratio(-14, 3)) == -4)?, - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak deleted file mode 100644 index ab8cbc17..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/math/rational.tests.ak +++ /dev/null @@ -1,65 +0,0 @@ -use aiken/fuzz.{both, either, map} -use aiken/math/rational.{Rational, new, pow} - -const any_positive_rational: Fuzzer = - either( - map( - both(fuzz.int_at_least(1), fuzz.int_at_least(1)), - fn((num, den)) { - expect Some(new_fraction) = new(num, den) - new_fraction - }, - ), - map( - both(fuzz.int_at_most(-1), fuzz.int_at_most(-1)), - fn((num, den)) { - expect Some(new_fraction) = new(num, den) - new_fraction - }, - ), - ) - -const any_negative_rational: Fuzzer = - either( - map( - both(fuzz.int_at_most(-1), fuzz.int_at_least(1)), - fn((num, den)) { - expect Some(new_fraction) = new(num, den) - new_fraction - }, - ), - map( - both(fuzz.int_at_least(1), fuzz.int_at_most(-1)), - fn((num, den)) { - expect Some(new_fraction) = new(num, den) - new_fraction - }, - ), - ) - -const any_non_zero_rational: Fuzzer = - either(any_negative_rational, any_positive_rational) - -test prop_power_of_zero_returns_one(rational via any_non_zero_rational) { - expect Some(calculated_result) = pow(rational, 0) - expect Some(expected_result) = new(1, 1) - calculated_result == expected_result -} - -test prop_power_of_one_returns_same_fraction(rational via any_non_zero_rational) { - expect Some(calculated_result) = pow(rational, 1) - calculated_result == rational -} - -test prop_power_numerator_zero_exponent_negative_returns_none( - (denominator, exponent) via both(fuzz.int_at_least(1), fuzz.int_at_most(-1)), -) { - expect Some(fraction) = new(0, denominator) - expect None = pow(fraction, exponent) -} - -test prop_power_unit_fraction_is_immutable(exponent via fuzz.int()) { - expect Some(unit) = new(1, 1) - expect Some(calculated_result) = pow(unit, exponent) - calculated_result == unit -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak deleted file mode 100644 index cf5ef7dc..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/option.ak +++ /dev/null @@ -1,312 +0,0 @@ -//// A type to capture optional results; useful for handling errors. -//// -//// Note that the `Option` type and its constructors are readily available in Aiken. They are part of the [Prelude](https://aiken-lang.github.io/prelude/aiken.html#Option) module imported by default in every module. - -// ## Inspecting - -/// Asserts whether an option is `None`. -pub fn is_none(self: Option) -> Bool { - when self is { - Some(_) -> False - _ -> True - } -} - -test is_none_1() { - is_none(Some(0)) == False -} - -test is_none_2() { - is_none(None) == True -} - -/// Asserts whether an option is `Some`, irrespective of the value it contains. -pub fn is_some(self: Option) -> Bool { - when self is { - Some(_) -> True - _ -> False - } -} - -test is_some_1() { - is_some(Some(0)) == True -} - -test is_some_2() { - is_some(None) == False -} - -// ## Combining - -/// Chain together many computations that may fail. -/// -/// ```aiken -/// self -/// |> dict.get(policy_id) -/// |> option.and_then(dict.get(_, asset_name)) -/// |> option.or_else(0) -/// ``` -pub fn and_then( - self: Option, - then: fn(a) -> Option, -) -> Option { - when self is { - None -> None - Some(a) -> then(a) - } -} - -fn try_decrement(n: Int) -> Option { - if n > 0 { - Some(n - 1) - } else { - None - } -} - -test and_then_1() { - let result = - None - |> and_then(try_decrement) - result == None -} - -test and_then_2() { - let result = - Some(14) - |> and_then(try_decrement) - result == Some(13) -} - -test and_then_3() { - let result = - Some(0) - |> and_then(try_decrement) - result == None -} - -/// Picks the first element which is not None. If there's no such element, return None. -/// -/// ```aiken -/// option.choice([]) == None -/// option.choice([Some(14), Some(42)]) == Some(14) -/// option.choice([None, Some(42)]) == Some(42) -/// option.choice([None, None]) == None -/// ``` -pub fn choice(self: List>) -> Option { - when self is { - [] -> None - [head, ..others] -> - when head is { - None -> choice(others) - _ -> head - } - } -} - -test choice_1() { - Some(1) == choice([Some(1), Some(2)]) -} - -test choice_2() { - None == choice([]) -} - -test choice_3() { - Some(1) == choice([None, Some(1)]) -} - -/// Converts from `Option>` to `Option`. -/// -/// ```aiken -/// option.flatten(Some(Some(42))) == Some(42) -/// option.flatten(Some(None)) == None -/// option.flatten(None) == None -/// ``` -/// -/// Flattening only removes one level of nesting at a time: -/// -/// ```aiken -/// flatten(Some(Some(Some(42)))) == Some(Some(42)) -/// Some(Some(Some(42))) |> flatten |> flatten == Some(42) -/// ``` -pub fn flatten(opt: Option>) -> Option { - when opt is { - Some(inner) -> inner - None -> None - } -} - -test flatten_1() { - let x: Option> = Some(Some(6)) - Some(6) == flatten(x) -} - -test flatten_2() { - let x: Option> = Some(None) - None == flatten(x) -} - -test flatten_3() { - let x: Option> = None - None == flatten(x) -} - -test flatten_4() { - let x: Option>> = Some(Some(Some(6))) - - let result = - x - |> flatten - |> flatten - - Some(6) == result -} - -/// Apply a function to the inner value of an [`Option`](#option) -/// -/// ```aiken -/// option.map(None, fn(n) { n * 2 }) == None -/// option.map(Some(14), fn(n) { n * 2 }) == Some(28) -/// ``` -pub fn map(self: Option, with: fn(a) -> result) -> Option { - when self is { - None -> None - Some(a) -> Some(with(a)) - } -} - -test map_1() { - map(None, fn(_) { Void }) == None -} - -test map_2() { - map(Some(14), fn(n) { n + 1 }) == Some(15) -} - -/// Combine two [`Option`](#option) together. -/// -/// ```aiken -/// type Foo { -/// Foo(Int, Int) -/// } -/// -/// option.map2(Some(14), Some(42), Foo) == Some(Foo(14, 42)) -/// option.map2(None, Some(42), Foo) == None -/// option.map2(Some(14), None, Foo) == None -/// ``` -pub fn map2( - opt_a: Option, - opt_b: Option, - with: fn(a, b) -> result, -) -> Option { - when opt_a is { - None -> None - Some(a) -> - when opt_b is { - None -> None - Some(b) -> Some(with(a, b)) - } - } -} - -test map2_1() { - map2(None, Some(42), fn(_, _) { 14 }) == None -} - -test map2_2() { - map2(Some(42), None, fn(_, _) { 14 }) == None -} - -test map2_3() { - map2(Some(14), Some(42), fn(a, b) { (a, b) }) == Some((14, 42)) -} - -/// Combine three [`Option`](#option) together. -/// -/// ```aiken -/// type Foo { -/// Foo(Int, Int, Int) -/// } -/// -/// option.map3(Some(14), Some(42), Some(1337), Foo) == Some(Foo(14, 42, 1337)) -/// option.map3(None, Some(42), Some(1337), Foo) == None -/// option.map3(Some(14), None, None, Foo) == None -/// ``` -pub fn map3( - opt_a: Option, - opt_b: Option, - opt_c: Option, - with: fn(a, b, c) -> result, -) -> Option { - when opt_a is { - None -> None - Some(a) -> - when opt_b is { - None -> None - Some(b) -> - when opt_c is { - None -> None - Some(c) -> Some(with(a, b, c)) - } - } - } -} - -test map3_1() { - map3(None, Some(42), None, fn(_, _, _) { 14 }) == None -} - -test map3_2() { - map3(Some(42), None, None, fn(_, _, _) { 14 }) == None -} - -test map3_3() { - map3(Some(14), Some(42), Some(1337), fn(a, b, c) { c - a + b }) == Some(1365) -} - -/// Like [`or_else`](#or_else) but allows returning an `Option`. -/// This is effectively mapping the error branch. -/// -/// ```aiken -/// option.or_try(None, fn(_) { Some("aiken") }) == Some("aiken") -/// option.or_try(Some(42), fn(_) { Some(14) }) == Some(42) -/// option.or_try(None, fn (_) { fail }) => 💥 -/// ``` -pub fn or_try(self: Option, compute_default: fn() -> Option) -> Option { - when self is { - None -> compute_default() - _ -> self - } -} - -test or_try_1() { - or_try(None, fn() { Some("aiken") }) == Some("aiken") -} - -test or_try_2() { - or_try(Some(42), fn() { fail }) == Some(42) -} - -// ## Transforming - -/// Provide a default value, turning an optional value into a normal value. -/// -/// ```aiken -/// option.or_else(None, "aiken") == "aiken" -/// option.or_else(Some(42), 14) == 42 -/// ``` -pub fn or_else(self: Option, default: a) -> a { - when self is { - None -> default - Some(a) -> a - } -} - -test or_else_1() { - or_else(None, "aiken") == "aiken" -} - -test or_else_2() { - or_else(Some(42), 14) == 42 -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak deleted file mode 100644 index d2f125f5..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/bytearray.ak +++ /dev/null @@ -1,668 +0,0 @@ -use aiken/builtin -use aiken/math -use aiken/option - -pub type Byte = - Int - -// ## Constructing - -/// Encode an integer value as a Big-Endian (most-significant bytes first) `ByteArray`. -/// The size is the expected size in number of bytes. -/// -/// > [!IMPORTANT] -/// > This function fails (i.e. halts the program) if the value cannot fit in the given size. When the -/// > size is _too large_, the array is left-padded with zeroes. -/// -/// ```aiken -/// bytearray.from_int_big_endian(1_000_000, 3) == #"0f4240" -/// bytearray.from_int_big_endian(1_000_000, 5) == #"00000f4240" -/// bytearray.from_int_big_endian(0, 8) == #"0000000000000000" -/// bytearray.from_int_big_endian(1_000_000, 1) => 💥 -/// ``` -pub fn from_int_big_endian(self: Int, size: Int) -> ByteArray { - builtin.integer_to_bytearray(True, size, self) -} - -test from_int_big_endian_1() { - from_int_big_endian(1_000_000, 3) == #"0f4240" -} - -test from_int_big_endian_2() { - from_int_big_endian(1_000_000, 5) == #"00000f4240" -} - -test from_int_big_endian_3() { - from_int_big_endian(0, 8) == #"0000000000000000" -} - -test from_int_big_endian_4() fail { - from_int_big_endian(1_000_000, 1) == #"40" -} - -/// Encode an integer value as a Little-Endian (least-significant bytes first) `ByteArray`. -/// The size is the expected size in number of bytes. -/// -/// > [!IMPORTANT] -/// > This function fails (i.e. halts the program) if the value cannot fit in the given size. When the -/// > size is _too large_, the array is right-padded with zeroes. -/// -/// ```aiken -/// bytearray.from_int_little_endian(1_000_000, 3) == #"40420f" -/// bytearray.from_int_little_endian(1_000_000, 5) == #"40420f0000" -/// bytearray.from_int_little_endian(0, 8) == #"0000000000000000" -/// bytearray.from_int_little_endian(1_000_000, 1) => 💥 -/// ``` -pub fn from_int_little_endian(self: Int, size: Int) -> ByteArray { - builtin.integer_to_bytearray(False, size, self) -} - -test from_int_little_endian_1() { - from_int_little_endian(1_000_000, 3) == #"40420f" -} - -test from_int_little_endian_2() { - from_int_little_endian(1_000_000, 5) == #"40420f0000" -} - -test from_int_little_endian_3() { - from_int_little_endian(0, 8) == #"0000000000000000" -} - -test from_int_little_endian_4() fail { - from_int_little_endian(1_000_000, 1) == #"40" -} - -/// Convert a `String` into a `ByteArray`. -/// -/// ```aiken -/// bytearray.from_string(@"ABC") == #"414243" -/// ``` -pub fn from_string(str: String) -> ByteArray { - builtin.encode_utf8(str) -} - -test from_string_1() { - from_string(@"") == "" -} - -test from_string_2() { - from_string(@"ABC") == #"414243" -} - -/// Add a byte element in front of a `ByteArray`. When the given byte is -/// greater than 255, it wraps-around. **PlutusV2 behavior** So 256 is mapped to 0, 257 to 1, and so -/// forth. -/// In PlutusV3 this will error instead of wrapping around. -/// -/// ```aiken -/// bytearray.push(#"", 0) == #"00" -/// bytearray.push(#"0203", 1) == #"010203" -/// bytearray.push(#"0203", 257) == #"010203" -/// ``` -pub fn push(self: ByteArray, byte: Byte) -> ByteArray { - builtin.cons_bytearray(byte, self) -} - -test push_1() { - push(#[], 0) == #[0] -} - -test push_2() { - push(#[2, 3], 1) == #[1, 2, 3] -} - -test push_3() fail { - let x = 257 - push(#[2, 3], x) == #[1, 2, 3] -} - -// ## Inspecting - -/// Get the `Byte` at the given index, or crash. -/// -/// > [!WARNING] -/// > This functions fails (i.e. halts the program) if there's no byte at the given index. -pub fn at(self: ByteArray, index: Int) -> Byte { - builtin.index_bytearray(self, index) -} - -/// Search the start and end positions of a sub-array in a `ByteArray`. -/// -/// ```aiken -/// bytearray.index_of("Hello, World!", "World") == Some((7, 11)) -/// bytearray.index_of("Hello, World!", "foo") == None -/// bytearray.index_of("Hello, World!", "!") == Some((12, 12)) -/// bytearray.index_of("Hello, World!", "o") == Some((4, 4)) -/// bytearray.index_of("Hello, World!", "Hello, World!") == Some((0, 12)) -/// ``` -pub fn index_of(self: ByteArray, bytes: ByteArray) -> Option<(Int, Int)> { - let offset = length(bytes) - - do_index_of(self, bytes, 0, offset, length(self)) - |> option.map(fn(ix) { (ix, ix + offset - 1) }) -} - -fn do_index_of( - self: ByteArray, - bytes: ByteArray, - cursor: Int, - offset: Int, - size: Int, -) -> Option { - if cursor + offset > size { - None - } else { - if builtin.slice_bytearray(cursor, offset, self) == bytes { - Some(cursor) - } else { - do_index_of(self, bytes, cursor + 1, offset, size) - } - } -} - -test index_of_1() { - index_of("Hello, World!", "World") == Some((7, 11)) -} - -test index_of_2() { - index_of("Hello, World!", "foo") == None -} - -test index_of_3() { - index_of("Hello, World!", "!") == Some((12, 12)) -} - -test index_of_4() { - index_of("Hello, World!", "o") == Some((4, 4)) -} - -test index_of_5() { - index_of("Hello, World!", "Hello, World!") == Some((0, 12)) -} - -/// Returns `True` when the given `ByteArray` is empty. -/// -/// ```aiken -/// bytearray.is_empty(#"") == True -/// bytearray.is_empty(#"00ff") == False -/// ``` -pub fn is_empty(self: ByteArray) -> Bool { - builtin.length_of_bytearray(self) == 0 -} - -test is_empty_1() { - is_empty(#"") == True -} - -test is_empty_2() { - is_empty(#"01") == False -} - -/// Returns the number of bytes in a `ByteArray`. -/// -/// ```aiken -/// bytearray.length(#[1, 2, 3]) == 3 -/// ``` -pub fn length(self: ByteArray) -> Int { - builtin.length_of_bytearray(self) -} - -test length_1() { - length(#"") == 0 -} - -test length_2() { - length(#"010203") == 3 -} - -/// Checks whether a bit (Most-Significant-Bit first) is set in the given 'ByteArray'. -/// -/// For example, consider the following bytearray: `#"8b765f"`. It can also be written as the -/// following bits sequence: -/// -/// `8` | `b` | `7` | `6` | `5` | `f` -/// --- | --- | --- | --- | --- | --- -/// `1000` | `1011` | `0111` | `0110` | `0101` | `1111` -/// -/// And thus, we have: -/// -/// ```aiken -/// test_bit(#"8b765f", 0) == True -/// test_bit(#"8b765f", 1) == False -/// test_bit(#"8b765f", 2) == False -/// test_bit(#"8b765f", 3) == False -/// test_bit(#"8b765f", 7) == True -/// test_bit(#"8b765f", 8) == False -/// test_bit(#"8b765f", 20) == True -/// test_bit(#"8b765f", 21) == True -/// test_bit(#"8b765f", 22) == True -/// test_bit(#"8b765f", 23) == True -/// ``` -pub fn test_bit(self: ByteArray, ix: Int) -> Bool { - builtin.less_than_equals_bytearray( - #[128], - builtin.cons_bytearray( - builtin.index_bytearray(self, ix / 8) * math.pow2(ix % 8) % 256, - "", - ), - ) -} - -test test_bit_0() { - test_bit(#"8b765f", 0) -} - -test test_bit_1() { - !test_bit(#"8b765f", 1) -} - -test test_bit_2() { - !test_bit(#"8b765f", 2) -} - -test test_bit_3() { - !test_bit(#"8b765f", 3) -} - -test test_bit_7() { - test_bit(#"8b765f", 7) -} - -test test_bit_8() { - !test_bit(#"8b765f", 8) -} - -test test_bit_20_21_22_23() { - and { - test_bit(#"8b765f", 20), - test_bit(#"8b765f", 21), - test_bit(#"8b765f", 22), - test_bit(#"8b765f", 23), - } -} - -// ## Modifying - -/// Returns the suffix of a `ByteArray` after `n` elements. -/// -/// ```aiken -/// bytearray.drop(#[1, 2, 3], n: 2) == #[3] -/// ``` -pub fn drop(self: ByteArray, n: Int) -> ByteArray { - builtin.slice_bytearray(n, builtin.length_of_bytearray(self) - n, self) -} - -test drop_1() { - let x = #"01020304050607" - drop(x, 2) == #"0304050607" -} - -test drop_2() { - let x = #"01020304050607" - drop(x, 0) == x -} - -test drop_3() { - let x = #"01" - drop(x, 1) == #"" -} - -test drop_4() { - let x = #"" - drop(x, 2) == #"" -} - -/// Extract a `ByteArray` as a slice of another `ByteArray`. -/// -/// Indexes are 0-based and inclusive. -/// -/// ```aiken -/// bytearray.slice(#[0, 1, 2, 3, 4, 5, 6], start: 1, end: 3) == #[1, 2, 3] -/// ``` -pub fn slice(self: ByteArray, start: Int, end: Int) -> ByteArray { - builtin.slice_bytearray(start, end - start + 1, self) -} - -test slice_1() { - slice(#"", 1, 2) == #"" -} - -test slice_2() { - slice(#"010203", 1, 2) == #"0203" -} - -test slice_3() { - slice(#"010203", 0, 42) == #"010203" -} - -test slice_4() { - slice(#[0, 1, 2, 3, 4], 0, 3) == #[0, 1, 2, 3] -} - -test slice_5() { - slice(#[0, 1, 2, 3, 4], 1, 2) == #[1, 2] -} - -/// Returns the n-length prefix of a `ByteArray`. -/// -/// ```aiken -/// bytearray.take(#[1, 2, 3], n: 2) == #[1, 2] -/// ``` -pub fn take(self: ByteArray, n: Int) -> ByteArray { - builtin.slice_bytearray(0, n, self) -} - -test take_1() { - let x = #"01020304050607" - take(x, 2) == #"0102" -} - -test take_2() { - let x = #"01020304050607" - take(x, 0) == #"" -} - -test take_3() { - let x = #"01" - take(x, 1) == x -} - -test take_4() { - let x = #"010203" - take(x, 0) == #"" -} - -// ## Combining - -/// Combine two `ByteArray` together. -/// -/// ```aiken -/// bytearray.concat(left: #[1, 2, 3], right: #[4, 5, 6]) == #[1, 2, 3, 4, 5, 6] -/// ``` -pub fn concat(left: ByteArray, right: ByteArray) -> ByteArray { - builtin.append_bytearray(left, right) -} - -test concat_1() { - concat(#"", #"") == #"" -} - -test concat_2() { - concat(#"", #"01") == #"01" -} - -test concat_3() { - concat(#"0102", #"") == #"0102" -} - -test concat_4() { - concat(#"0102", #"0304") == #"01020304" -} - -/// Compare two bytearrays lexicographically. -/// -/// ```aiken -/// bytearray.compare(#"00", #"FF") == Less -/// bytearray.compare(#"42", #"42") == Equal -/// bytearray.compare(#"FF", #"00") == Greater -/// ``` -pub fn compare(left: ByteArray, right: ByteArray) -> Ordering { - if builtin.less_than_bytearray(left, right) { - Less - } else if builtin.equals_bytearray(left, right) { - Equal - } else { - Greater - } -} - -// ## Transforming - -/// Left-fold over bytes of a [`ByteArray`](https://aiken-lang.github.io/prelude/aiken.html#ByteArray). Note that every byte given to the callback function is comprised between 0 and 255. -/// -/// ```aiken -/// bytearray.foldl(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 44203 -/// bytearray.foldl(#[1, 2, 3], #"", flip(bytearray.push)) == #[3, 2, 1] -/// ``` -pub fn foldl( - self: ByteArray, - zero: result, - with: fn(Int, result) -> result, -) -> result { - do_foldl(self, zero, builtin.length_of_bytearray(self), 0, with) -} - -fn do_foldl( - self: ByteArray, - zero: result, - len: Int, - cursor: Int, - with: fn(Int, result) -> result, -) -> result { - if cursor == len { - zero - } else { - do_foldl( - self, - with(builtin.index_bytearray(self, cursor), zero), - len, - cursor + 1, - with, - ) - } -} - -test foldl_1() { - foldl(#[], 42, fn(byte, acc) { byte + acc }) == 42 -} - -test foldl_2() { - foldl(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 44203 -} - -test foldl_3() { - foldl( - #"356cf088720a169dae0ce0bb1df8588944389fa43322f0d6ef4ed8c069bfd405", - 0, - fn(byte, acc) { acc * 256 + byte }, - ) == 24165060555594911913195642527692216679757672038384202527929620681761931383813 -} - -test foldl_4() { - foldl(#[1, 2, 3, 4, 5], #"", flip(push)) == #[5, 4, 3, 2, 1] -} - -/// Right-fold over bytes of a [`ByteArray`](https://aiken-lang.github.io/prelude/aiken.html#ByteArray). Note that every byte given to the callback function is comprised between 0 and 255. -/// -/// ```aiken -/// bytearray.foldr(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 43948 -/// bytearray.foldl(#[1, 2, 3], #"", flip(bytearray.push)) == #[1, 2, 3] -/// ``` -pub fn foldr( - self: ByteArray, - zero: result, - with: fn(Int, result) -> result, -) -> result { - do_foldr(self, zero, builtin.length_of_bytearray(self) - 1, with) -} - -fn do_foldr( - self: ByteArray, - zero: result, - cursor: Int, - with: fn(Int, result) -> result, -) -> result { - if cursor < 0 { - zero - } else { - do_foldr( - self, - with(builtin.index_bytearray(self, cursor), zero), - cursor - 1, - with, - ) - } -} - -test foldr_1() { - foldr(#[], 42, fn(byte, acc) { byte + acc }) == 42 -} - -test foldr_2() { - foldr(#"acab", 0, fn(byte, acc) { acc * 256 + byte }) == 43948 -} - -test foldr_3() { - foldr(#[1, 2, 3, 4, 5], #"", flip(push)) == #[1, 2, 3, 4, 5] -} - -/// Reduce bytes in a ByteArray from left to right using the accumulator as left operand. -/// Said differently, this is [`foldl`](#foldl) with callback arguments swapped. -/// -/// ```aiken -/// bytearray.reduce(#[1,2,3], #[], bytearray.push) == #[3, 2, 1] -/// ``` -pub fn reduce( - self: ByteArray, - zero: result, - with: fn(result, Int) -> result, -) -> result { - foldl(self, zero, flip(with)) -} - -test reduce_1() { - reduce(#[], #[], push) == #[] -} - -test reduce_2() { - reduce(#[1, 2, 3], #[], push) == #[3, 2, 1] -} - -/// Interpret a Big-Endian (most-significant bytes first) `ByteArray` as an `Int`. -/// -/// ```aiken -/// bytearray.to_int_big_endian(#"0f4240") == 1_000_000 -/// bytearray.to_int_big_endian(#"00000f4240") == 1_000_000 -/// bytearray.to_int_big_endian(#"0000000000000000") == 0 -/// ``` -pub fn to_int_big_endian(self: ByteArray) -> Int { - builtin.bytearray_to_integer(True, self) -} - -test to_int_big_endian_1() { - to_int_big_endian(#"0f4240") == 1_000_000 -} - -test to_int_big_endian_2() { - to_int_big_endian(#"00000f4240") == 1_000_000 -} - -test to_int_big_endian_3() { - to_int_big_endian(#"0000000000000000") == 0 -} - -/// Interpret a Little-Endian (least-significant bytes first) `ByteArray` as an `Int`. -/// -/// ```aiken -/// bytearray.to_int_big_endian(#"40420f") == 1_000_000 -/// bytearray.to_int_big_endian(#"40420f0000") == 1_000_000 -/// bytearray.to_int_big_endian(#"0000000000000000") == 0 -/// ``` -pub fn to_int_little_endian(self: ByteArray) -> Int { - builtin.bytearray_to_integer(False, self) -} - -test to_int_little_endian_1() { - to_int_little_endian(#"40420f") == 1_000_000 -} - -test to_int_little_endian_2() { - to_int_little_endian(#"40420f0000") == 1_000_000 -} - -test to_int_little_endian_3() { - to_int_little_endian(#"0000000000000000") == 0 -} - -/// Convert a `ByteArray` into a `String`. -/// -/// > [!WARNING] -/// > This functions fails (i.e. halts the program) if the underlying `ByteArray` isn't UTF-8-encoded. In particular, you cannot convert arbitrary hash digests using this function. -/// > -/// > For converting arbitrary `ByteArray`s, use [bytearray.to_hex](#to_hex). -/// -/// ```aiken -/// bytearray.to_string(#"414243") == "ABC" -/// bytearray.to_string(some_hash) => 💥 -/// ``` -pub fn to_string(self: ByteArray) -> String { - builtin.decode_utf8(self) -} - -test to_string_1() { - to_string("") == @"" -} - -test to_string_2() { - to_string("ABC") == @"ABC" -} - -/// Encode a `ByteArray` as a hexidecimal `String`. -/// -/// ```aiken -/// bytearray.to_hex("Hello world!") == @"48656c6c6f20776f726c6421" -/// ``` -pub fn to_hex(self: ByteArray) -> String { - self - |> encode_base16(builtin.length_of_bytearray(self) - 1, "") - |> builtin.decode_utf8 -} - -test to_hex_1() { - to_hex("Hello world!") == @"48656C6C6F20776F726C6421" -} - -test to_hex_2() { - to_hex("The quick brown fox jumps over the lazy dog") == @"54686520717569636B2062726F776E20666F78206A756D7073206F76657220746865206C617A7920646F67" -} - -/// Checks whether a `ByteArray` starts with a given prefix. -/// -/// ```aiken -/// bytearray.starts_with("Hello, World!", prefix: "Hello") == True -/// bytearray.starts_with("", prefix: "") == True -/// bytearray.starts_with("Hello", prefix: "Hello, World!") == False -/// ``` -pub fn starts_with(self: ByteArray, prefix: ByteArray) -> Bool { - let prefix_length = length(prefix) - if length(self) < prefix_length { - False - } else { - take(self, prefix_length) == prefix - } -} - -test starts_with_1() { - starts_with("", "") -} - -test starts_with_2() { - starts_with("Hello, World!", "Hello, World!") -} - -test starts_with_3() { - !starts_with("Hello, World!", "hello") -} - -test starts_with_4() { - !starts_with("", "World") -} - -test starts_with_5() { - starts_with("Hello, World", "Hello") -} - -test starts_with_6() { - !starts_with("foo", "foo_") -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak deleted file mode 100644 index 217749e9..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/int.ak +++ /dev/null @@ -1,156 +0,0 @@ -use aiken/builtin.{bytearray_to_integer, decode_utf8} -use aiken/math -use aiken/option -use aiken/primitive/bytearray - -// ## Combining - -/// Compare two integers. -/// -/// ```aiken -/// int.compare(14, 42) == Less -/// int.compare(14, 14) == Equal -/// int.compare(42, 14) == Greater -/// ``` -pub fn compare(left: Int, right: Int) -> Ordering { - if left < right { - Less - } else if left > right { - Greater - } else { - Equal - } -} - -// ## Transforming - -/// Interpret a Big-Endian (most-significant bytes first) `ByteArray` as an `Int`. -/// -/// ```aiken -/// int.from_bytearray_big_endian(#"0f4240") == 1_000_000 -/// int.from_bytearray_big_endian(#"00000f4240") == 1_000_000 -/// int.from_bytearray_big_endian(#"0000000000000000") == 0 -/// ``` -pub fn from_bytearray_big_endian(self: ByteArray) -> Int { - bytearray_to_integer(True, self) -} - -test from_bytearray_big_endian_1() { - from_bytearray_big_endian(#"0f4240") == 1_000_000 -} - -test from_bytearray_big_endian_2() { - from_bytearray_big_endian(#"00000f4240") == 1_000_000 -} - -test from_bytearray_big_endian_3() { - from_bytearray_big_endian(#"0000000000000000") == 0 -} - -/// Interpret a Little-Endian (least-significant bytes first) `ByteArray` as an `Int`. -/// -/// ```aiken -/// int.from_bytearray_big_endian(#"40420f") == 1_000_000 -/// int.from_bytearray_big_endian(#"40420f0000") == 1_000_000 -/// int.from_bytearray_big_endian(#"0000000000000000") == 0 -/// ``` -pub fn from_bytearray_little_endian(self: ByteArray) -> Int { - bytearray_to_integer(False, self) -} - -test from_bytearray_little_endian_1() { - from_bytearray_little_endian(#"40420f") == 1_000_000 -} - -test from_bytearray_little_endian_2() { - from_bytearray_little_endian(#"40420f0000") == 1_000_000 -} - -test from_bytearray_little_endian_3() { - from_bytearray_little_endian(#"0000000000000000") == 0 -} - -/// Parse an integer from a utf-8 encoded `ByteArray`, when possible. -/// -/// ```aiken -/// int.from_utf8("14") == Some(14) -/// int.from_utf8("-42") == Some(-42) -/// int.from_utf8("007") == Some(7) -/// int.from_utf8("foo") == None -/// int.from_utf8("1.0") == None -/// int.from_utf8("1-2") == None -/// ``` -pub fn from_utf8(bytes: ByteArray) -> Option { - bytes - |> bytearray.foldr( - Some((0, 0)), - fn(byte, st) { - when st is { - None -> None - Some((n, e)) -> - if byte < 48 || byte > 57 { - if byte == 45 { - Some((-n, 0)) - } else { - None - } - } else if n < 0 { - None - } else { - let digit = byte - 48 - Some((n + digit * math.pow(10, e), e + 1)) - } - } - }, - ) - |> option.map(fn(tuple) { tuple.1st }) -} - -test from_utf8_1() { - from_utf8("0017") == Some(17) -} - -test from_utf8_2() { - from_utf8("42") == Some(42) -} - -test from_utf8_3() { - from_utf8("1337") == Some(1337) -} - -test from_utf8_4() { - from_utf8("-14") == Some(-14) -} - -test from_utf8_5() { - from_utf8("foo") == None -} - -test from_utf8_6() { - from_utf8("1-2") == None -} - -/// Convert an `Int` to its `String` representation. -/// -/// ```aiken -/// int.to_string(42) == @"42" -/// ``` -pub fn to_string(n: Int) -> String { - diagnostic(n, "") |> decode_utf8 -} - -test to_string_1() { - to_string(0) == @"0" -} - -test to_string_2() { - to_string(5) == @"5" -} - -test to_string_3() { - to_string(42) == @"42" -} - -test to_string_4() { - to_string(200) == @"200" -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak deleted file mode 100644 index 35fa5567..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/aiken/primitive/string.ak +++ /dev/null @@ -1,139 +0,0 @@ -use aiken/builtin.{ - append_bytearray, append_string, decode_utf8, encode_utf8, length_of_bytearray, -} - -// ## Constructing - -/// Convert a `ByteArray` into a `String` -/// -/// > [!WARNING] -/// > This functions fails if the underlying `ByteArray` isn't UTF-8-encoded. In particular, you cannot convert arbitrary hash digests using this function. -/// > -/// > For converting arbitrary `ByteArray`s, use [bytearray.to_hex](./bytearray.html#to_hex). -/// -/// ```aiken -/// string.from_bytearray("foo") == @"foo" -/// string.from_bytearray(#"666f6f") == @"foo" -/// string.from_bytearray(some_hash) -> fail -/// ``` -pub fn from_bytearray(bytes: ByteArray) -> String { - decode_utf8(bytes) -} - -test from_bytearray_1() { - from_bytearray(#[]) == @"" -} - -test from_bytearray_2() { - from_bytearray(#[65, 66, 67]) == @"ABC" -} - -test from_bytearray_3() { - from_bytearray("ABC") == @"ABC" -} - -/// Convert an `Int` to its `String` representation. -/// -/// ```aiken -/// string.from_int(42) == @"42" -/// ``` -pub fn from_int(n: Int) -> String { - diagnostic(n, "") |> decode_utf8 -} - -test from_int_1() { - from_int(0) == @"0" -} - -test from_int_2() { - from_int(5) == @"5" -} - -test from_int_3() { - from_int(42) == @"42" -} - -test from_int_4() { - from_int(200) == @"200" -} - -// ## Combining - -/// Combine two `String` together. -/// -/// ```aiken -/// string.concat(left: @"Hello", right: @", World!") == @"Hello, World!" -/// ``` -pub fn concat(left: String, right: String) -> String { - append_string(left, right) -} - -test concat_1() { - concat(@"", @"") == @"" -} - -test concat_2() { - concat(@"", @"foo") == concat(@"foo", @"") -} - -test concat_3() { - concat(left: @"Hello", right: @", World!") == @"Hello, World!" -} - -/// Join a list of strings, separated by a given _delimiter_. -/// -/// ```aiken -/// string.join([], @"+") == @"" -/// string.join([@"a", @"b", @"c"], @",") == @"a,b,c" -/// ``` -pub fn join(list: List, delimiter: String) -> String { - do_join(list, encode_utf8(delimiter), #"") - |> decode_utf8 -} - -fn do_join(xs, delimiter, bytes) { - when xs is { - [] -> bytes - [x, ..rest] -> - do_join( - rest, - delimiter, - if length_of_bytearray(bytes) == 0 { - encode_utf8(x) - } else { - append_bytearray(bytes, append_bytearray(delimiter, encode_utf8(x))) - }, - ) - } -} - -test join_1() { - join([], @",") == @"" -} - -test join_2() { - join([@"a", @"b", @"c"], @",") == @"a,b,c" -} - -// ## Transforming - -/// Convert a `String` into a `ByteArray` -/// -/// ```aiken -/// string.to_bytearray(@"foo") == "foo" -/// ``` -pub fn to_bytearray(self: String) -> ByteArray { - encode_utf8(self) -} - -test to_bytearray_1() { - to_bytearray(@"") == "" -} - -test to_bytearray_2() { - to_bytearray(@"ABC") == #[65, 66, 67] -} - -test to_bytearray_3() { - to_bytearray(@"ABC") == "ABC" -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak deleted file mode 100644 index 0167b90f..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address.ak +++ /dev/null @@ -1,86 +0,0 @@ -use aiken/crypto.{ - Blake2b_224, Hash, Script, ScriptHash, VerificationKey, VerificationKeyHash, -} - -/// A general structure for representing an on-chain `Credential`. -/// -/// Credentials are always one of two kinds: a direct public/private key -/// pair, or a script (native or Plutus). -pub type Credential { - VerificationKey(VerificationKeyHash) - Script(ScriptHash) -} - -// ## Constructing - -/// A Cardano `Address` typically holding one or two credential references. -/// -/// Note that legacy bootstrap addresses (a.k.a. 'Byron addresses') are -/// completely excluded from Plutus contexts. Thus, from an on-chain -/// perspective only exists addresses of type 00, 01, ..., 07 as detailed -/// in [CIP-0019 :: Shelley Addresses](https://github.com/cardano-foundation/CIPs/tree/master/CIP-0019/#shelley-addresses). -pub type Address { - payment_credential: PaymentCredential, - stake_credential: Option, -} - -/// Smart-constructor for an [Address](#Address) from a [script](#Script) hash. The address has no delegation rights whatsoever. -pub fn from_script(script: Hash) -> Address { - Address { payment_credential: Script(script), stake_credential: None } -} - -/// Smart-constructor for an [Address](#Address) from a [verification key](#VerificationKey) hash. The resulting address has no delegation rights whatsoever. -pub fn from_verification_key(vk: Hash) -> Address { - Address { payment_credential: VerificationKey(vk), stake_credential: None } -} - -/// Set (or reset) the delegation part of an [Address](#Address) using a [verification key](#VerificationKey) hash. This is useful when combined with [`from_verification_key`](#from_verification_key) and/or [`from_script`](#from_script). -pub fn with_delegation_key( - self: Address, - vk: Hash, -) -> Address { - Address { - payment_credential: self.payment_credential, - stake_credential: Some(Inline(VerificationKey(vk))), - } -} - -/// Set (or reset) the delegation part of an [Address](#Address) using a [script](#Script) hash. This is useful when combined with [`from_verification_key`](#from_verification_key) and/or [`from_script`](#from_script). -pub fn with_delegation_script( - self: Address, - script: Hash, -) -> Address { - Address { - payment_credential: self.payment_credential, - stake_credential: Some(Inline(Script(script))), - } -} - -/// Represent a type of object that can be represented either inline (by hash) -/// or via a reference (i.e. a pointer to an on-chain location). -/// -/// This is mainly use for capturing pointers to a stake credential -/// registration certificate in the case of so-called pointer addresses. -pub type Referenced { - Inline(a) - Pointer { slot_number: Int, transaction_index: Int, certificate_index: Int } -} - -/// A `StakeCredential` represents the delegation and rewards withdrawal conditions -/// associated with some stake address / account. -/// -/// A `StakeCredential` is either provided inline, or, by reference using an -/// on-chain pointer. -/// -/// Read more about pointers in [CIP-0019 :: Pointers](https://github.com/cardano-foundation/CIPs/tree/master/CIP-0019/#pointers). -pub type StakeCredential = - Referenced - -/// A 'PaymentCredential' represents the spending conditions associated with -/// some output. Hence, -/// -/// - a `VerificationKey` captures an output locked by a public/private key pair; -/// - and a `Script` captures an output locked by a native or Plutus script. -/// -pub type PaymentCredential = - Credential diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak deleted file mode 100644 index 2ebeaa91..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/address/credential.ak +++ /dev/null @@ -1,30 +0,0 @@ -use aiken/primitive/bytearray -use cardano/address.{Credential, Script, VerificationKey} - -pub fn compare(left: Credential, right: Credential) -> Ordering { - when left is { - Script(left) -> - when right is { - Script(right) -> bytearray.compare(left, right) - _ -> Less - } - VerificationKey(left) -> - when right is { - Script(_) -> Greater - VerificationKey(right) -> bytearray.compare(left, right) - } - } -} - -test compare_matrix() { - and { - (compare(Script(""), Script("")) == Equal)?, - (compare(VerificationKey(""), VerificationKey("")) == Equal)?, - (compare(Script(""), VerificationKey("")) == Less)?, - (compare(VerificationKey(""), Script("")) == Greater)?, - (compare(Script("01"), Script("02")) == Less)?, - (compare(Script("02"), Script("01")) == Greater)?, - (compare(VerificationKey("01"), VerificationKey("02")) == Less)?, - (compare(VerificationKey("02"), VerificationKey("01")) == Greater)?, - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak deleted file mode 100644 index 664a3983..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/assets.ak +++ /dev/null @@ -1,920 +0,0 @@ -use aiken/builtin -use aiken/collection/dict.{Dict, from_ascending_pairs_with} -use aiken/collection/list -use aiken/crypto.{Blake2b_224, Hash, Script} -use aiken/option - -/// Lovelace is now a type wrapper for Int. -pub type Lovelace = - Int - -/// A type-alias for a `PolicyId`. A `PolicyId` is always 28-byte long -pub type PolicyId = - Hash - -/// Ada, the native currency, isn't associated with any `PolicyId` (it's not -/// possible to mint Ada!). -/// -/// By convention, it is an empty `ByteArray`. -pub const ada_policy_id = "" - -/// A type-alias for 'AssetName`, which are free-form byte-arrays between -/// 0 and 32 bytes. -pub type AssetName = - ByteArray - -/// Ada, the native currency, isn't associated with any `AssetName` (it's not -/// possible to mint Ada!). -/// -/// By convention, it is an empty `ByteArray`. -pub const ada_asset_name = "" - -/// A multi-asset output `Value`. Contains tokens indexed by [PolicyId](#PolicyId) and [AssetName](#AssetName). -/// -/// > [!IMPORTANT] -/// > This type maintain some invariants by construction; in particular, a `Value` will never contain a -/// zero quantity of a particular token. -pub opaque type Value { - inner: Dict>, -} - -// ## Constructing - -/// Construct a `Value` from an asset identifier (i.e. `PolicyId` + `AssetName`) -/// and a given quantity. -pub fn from_asset( - policy_id: PolicyId, - asset_name: AssetName, - quantity: Int, -) -> Value { - if quantity == 0 { - zero - } else { - let asset = - dict.empty - |> dict.insert(asset_name, quantity) - dict.empty - |> dict.insert(policy_id, asset) - |> Value - } -} - -/// Promote an arbitrary list of assets into a `Value`. This function fails -/// (i.e. halts the program execution) if: -/// -/// - there's any duplicate amongst `PolicyId`; -/// - there's any duplicate amongst `AssetName`; -/// - the `AssetName` aren't sorted in ascending lexicographic order; or -/// - any asset quantity is null. -/// -/// This function is meant to turn arbitrary user-defined `Data` into safe `Value`, -/// while checking for internal invariants. -pub fn from_asset_list(xs: Pairs>) -> Value { - xs - |> list.foldr( - dict.empty, - fn(inner, acc) { - expect Pair(p, [_, ..] as x) = inner - x - |> from_ascending_pairs_with(fn(v) { v != 0 }) - |> dict.insert_with( - acc, - p, - _, - fn(_, _, _) { - fail @"Duplicate policy in the asset list." - }, - ) - }, - ) - |> Value -} - -test from_asset_list_1() { - let v = from_asset_list([]) - v == zero -} - -test from_asset_list_2() fail { - let v = from_asset_list([Pair(#"33", [])]) - v == zero -} - -test from_asset_list_3() fail { - let v = from_asset_list([Pair(#"33", [Pair(#"", 0)])]) - v != zero -} - -test from_asset_list_4() { - let v = from_asset_list([Pair(#"33", [Pair(#"", 1)])]) - flatten(v) == [(#"33", #"", 1)] -} - -test from_asset_list_5() { - let v = from_asset_list([Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)])]) - flatten(v) == [(#"33", #"", 1), (#"33", #"33", 1)] -} - -test from_asset_list_6() fail { - let v = - from_asset_list( - [ - Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), - Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), - ], - ) - v != zero -} - -test from_asset_list_7() fail { - let v = - from_asset_list( - [ - Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), - Pair(#"34", [Pair(#"", 1), Pair(#"", 1)]), - ], - ) - v != zero -} - -test from_asset_list_8() { - let v = - from_asset_list( - [ - Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), - Pair(#"34", [Pair(#"31", 1)]), Pair(#"35", [Pair(#"", 1)]), - ], - ) - flatten(v) == [ - (#"33", #"", 1), (#"33", #"33", 1), (#"34", #"31", 1), (#"35", #"", 1), - ] -} - -test from_asset_list_9() { - let v = - from_asset_list( - [ - Pair(#"35", [Pair(#"", 1)]), Pair(#"33", [Pair(#"", 1), Pair(#"33", 1)]), - Pair(#"34", [Pair(#"31", 1)]), - ], - ) - flatten(v) == [ - (#"33", #"", 1), (#"33", #"33", 1), (#"34", #"31", 1), (#"35", #"", 1), - ] -} - -/// Construct a `Value` from a lovelace quantity. -/// -/// Friendly reminder: 1 Ada = 1.000.000 Lovelace -pub fn from_lovelace(quantity: Int) -> Value { - from_asset(ada_policy_id, ada_asset_name, quantity) -} - -/// Construct an empty `Value` with nothing in it. -pub const zero: Value = Value { inner: dict.empty } - -// ## Inspecting - -/// Check is a `Value` is zero. That is, it has no assets and holds no Ada/Lovelace. -pub fn is_zero(self: Value) -> Bool { - self == zero -} - -/// Efficiently compare two values together, allowing a custom behaviour for Ada/Lovelace. -/// The second parameter is provided as `Data`, allowing to conveniently compare serialized -/// datums or similar structurually equivalent types (such as `Pairs>`). -/// -/// The third argument is a callback function to assert the left and right lovelace -/// quantities. Its first argument refers to the quantity of the first argument of -/// `match`, and the second argument of the callback to the quantity of the second -/// argument of `match`. In the absence of lovelace in any value, it defaults to `0`. -/// -/// ```aiken -/// const value: Value = -/// assets.from_lovelace(30) -/// |> assets.add("foo", "bar", 1) -/// |> assets.add("foo", "baz", 42) -/// -/// const datum: Data = -/// assets.from_lovelace(20) -/// |> assets.add("foo", "bar", 1) -/// |> assets.add("foo", "baz", 42) -/// -/// True == assets.match(value, datum, >=) -/// -/// False == assets.match(value, datum, ==) -/// -/// True == assets.match(value, datum, fn(value_lovelace, datum_lovelace) { -/// 2 * datum_lovelace >= value_lovelace -/// }) -/// ``` -pub fn match( - left: Value, - right: Data, - assert_lovelace: fn(Lovelace, Lovelace) -> Bool, -) -> Bool { - builtin.choose_data( - right, - False, - { - let (left_lovelace, left_assets) = dict.pop(left.inner, ada_policy_id) - let left_assets: Data = left_assets - let left_lovelace = - when left_lovelace is { - Some(tokens) -> builtin.head_list(dict.to_pairs(tokens)).2nd - None -> 0 - } - when builtin.un_map_data(right) is { - [] -> left_assets == right && assert_lovelace(left_lovelace, 0) - [first_asset, ..right_assets] -> - if first_asset.1st == builtin.b_data(ada_policy_id) { - and { - assert_lovelace( - left_lovelace, - builtin.un_i_data( - builtin.head_list(builtin.un_map_data(first_asset.2nd)).2nd, - ), - ), - left_assets == builtin.map_data(right_assets), - } - } else { - and { - assert_lovelace(left_lovelace, 0), - left_assets == right, - } - } - } - }, - False, - False, - False, - ) -} - -const fixture_match_value: Value = - zero - |> add(ada_policy_id, ada_asset_name, 42) - |> add("foo", "01", 1) - |> add("foo", "02", 1) - |> add("bar", "01", 42) - -const fixture_match_data: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - |> add("foo", "01", 1) - |> add("foo", "02", 1) - |> add("bar", "01", 42) - -const fixture_match_data_missing_foo_02: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - |> add("foo", "01", 1) - |> add("bar", "01", 42) - -const fixture_match_data_altered_foo_01: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - |> add("foo", "01", 14) - |> add("foo", "02", 1) - |> add("bar", "01", 42) - -const fixture_match_data_missing_bar: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - |> add("foo", "01", 1) - |> add("foo", "02", 1) - -const fixture_match_data_extra_policy: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - |> add("foo", "01", 1) - |> add("foo", "02", 1) - |> add("bar", "01", 42) - |> add("baz", "01", 1) - -const fixture_match_data_extra_asset: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - |> add("foo", "01", 1) - |> add("foo", "02", 1) - |> add("foo", "03", 1) - |> add("bar", "01", 42) - -const fixture_match_data_no_assets: Data = - zero - |> add(ada_policy_id, ada_asset_name, 14) - -test match_1() { - match(fixture_match_value, fixture_match_data, fn(_, _) { True }) -} - -test match_2() { - !match( - fixture_match_value, - fixture_match_data, - fn(source, target) { source == target }, - ) -} - -test match_3() { - !match( - fixture_match_value, - fixture_match_data_missing_foo_02, - fn(_, _) { True }, - ) -} - -test match_4() { - !match(fixture_match_value, fixture_match_data_missing_bar, fn(_, _) { True }) -} - -test match_5() { - !match( - fixture_match_value, - fixture_match_data_altered_foo_01, - fn(_, _) { True }, - ) -} - -test match_6() { - !match( - fixture_match_value, - fixture_match_data_extra_policy, - fn(_, _) { True }, - ) -} - -test match_7() { - !match(fixture_match_value, fixture_match_data_extra_asset, fn(_, _) { True }) -} - -test match_8() { - !match(fixture_match_value, fixture_match_data_no_assets, fn(_, _) { True }) -} - -test match_9() { - match(zero, zero, ==) -} - -test match_10() { - match( - without_lovelace(fixture_match_value), - without_lovelace(fixture_match_value), - fn(left, right) { left == 0 && right == 0 }, - ) -} - -test match_11() { - match( - without_lovelace(fixture_match_value), - fixture_match_value, - fn(left, right) { left == 0 && right > 0 }, - ) -} - -test match_12() { - match( - fixture_match_value, - without_lovelace(fixture_match_value), - fn(left, right) { left > 0 && right == 0 }, - ) -} - -test match_13() { - match( - zero |> add(ada_policy_id, ada_asset_name, 42), - zero, - fn(left, right) { left == 42 && right == 0 }, - ) -} - -test match_14() { - match( - zero, - zero |> add(ada_policy_id, ada_asset_name, 42), - fn(left, right) { left == 0 && right == 42 }, - ) -} - -const fixture_match_benchmark_left: Value = - zero - |> add(ada_policy_id, ada_asset_name, 1337) - |> add( - #"0246a14d04c3a0e9b65f6b90a3d1aa5faee5d56ab1e30ec7e8b02f29", - "MATTR", - 200, - ) - |> add( - #"0a9e126256cb38c4865cdac6eb2ada51c328ba0df2ebde22ae126c0d", - "ProphecyPoster076", - 1, - ) - |> add( - #"1774343241680e4daef7cbfe3536fc857ce23fb66cd0b66320b2e3dd", - "BISON", - 12_004_999_999, - ) - |> add( - #"279c909f348e533da5808898f87f9a14bb2c3dfbbacccd631d927a3f", - "SNEK", - 1486, - ) - |> add( - #"651dfc074202423585996ffa717cb45237d307e705e2cc3dab1ccabd", - "MAYZSilverFoundersEdition0035", - 1, - ) - |> add( - #"63df49056617dd14034986cf7c250bad6552fd2f0f9c71d797932008", - "CardanoSpaceSession", - 20, - ) - |> add( - #"5b01968867e13432afaa2f814e1d15e332d6cd0aa77e350972b0967d", - "ADAOGovernanceToken", - 1, - ) - |> add( - #"a0028f350aaabe0545fdcb56b039bfb08e4bb4d8c4d7c3c7d481c235", - "HOSKY", - 400_001_000, - ) - |> add( - #"da8c30857834c6ae7203935b89278c532b3995245295456f993e1d24", - "LQ", - 10_635_899, - ) - |> add( - #"95d9a98c2f7999a3d5e0f4d795cb1333837c09eb0f24835cd2ce954c", - "GrandmasterAdventurer659", - 1, - ) - |> add( - #"702cbdb06a81ef2fa4f85f9e32159c03f502539d762a71194fc11eb3", - "AdventurerOfThiolden8105", - 1, - ) - |> add( - #"d0112837f8f856b2ca14f69b375bc394e73d146fdadcc993bb993779", - "DiscoSolaris3725", - 1, - ) - |> add( - #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", - "CardaWorld535", - 1, - ) - |> add( - #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", - "CardaWorld1213", - 1, - ) - |> add( - #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", - "CardaWorld1518", - 1, - ) - |> add( - #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", - "CardaWorld1537", - 1, - ) - |> add( - #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", - "CardaWorld4199", - 1, - ) - |> add( - #"8dd5717e7d4d993019dbd788c19837910e3fcf647ab282f828c80a7a", - "CardaWorld3767", - 1, - ) - |> add( - #"7597444754551a8c17edbf7291cdaeca898ca02ee4e732b09a949396", - "Algae1", - 1, - ) - |> add( - #"7597444754551a8c17edbf7291cdaeca898ca02ee4e732b09a949396", - "Algae2", - 1, - ) - -const fixture_match_benchmark_right: Data = fixture_match_benchmark_left - -test match_benchmark() { - match(fixture_match_benchmark_left, fixture_match_benchmark_right, ==) -} - -test match_benchmark_vs() { - let data: Data = fixture_match_benchmark_right - expect pairs: Pairs> = data - fixture_match_benchmark_left == from_asset_list(pairs) -} - -/// A specialized version of `quantity_of` for the Ada currency. -pub fn lovelace_of(self: Value) -> Int { - quantity_of(self, ada_policy_id, ada_asset_name) -} - -/// A list of all token policies in that Value with non-zero tokens. -pub fn policies(self: Value) -> List { - dict.keys(self.inner) -} - -/// Extract the quantity of a given asset. -pub fn quantity_of( - self: Value, - policy_id: PolicyId, - asset_name: AssetName, -) -> Int { - self.inner - |> dict.get(policy_id) - |> option.and_then(dict.get(_, asset_name)) - |> option.or_else(0) -} - -/// Get all tokens associated with a given policy. -pub fn tokens(self: Value, policy_id: PolicyId) -> Dict { - self.inner - |> dict.get(policy_id) - |> option.or_else(dict.empty) -} - -// ## Combining - -/// Add a (positive or negative) quantity of a single token to a assets. -/// This is more efficient than [`merge`](#merge) for a single asset. -pub fn add( - self: Value, - policy_id: PolicyId, - asset_name: AssetName, - quantity: Int, -) -> Value { - if quantity == 0 { - self - } else { - let helper = - fn(_, left, _right) { - let inner_result = - dict.insert_with( - left, - asset_name, - quantity, - fn(_k, ql, qr) { - let q = ql + qr - if q == 0 { - None - } else { - Some(q) - } - }, - ) - if dict.is_empty(inner_result) { - None - } else { - Some(inner_result) - } - } - - Value( - dict.insert_with( - self.inner, - policy_id, - dict.from_ascending_pairs([Pair(asset_name, quantity)]), - helper, - ), - ) - } -} - -test add_1() { - let v = - zero - |> add(#"acab", #"beef", 321) - |> add(#"acab", #"beef", -321) - v == zero -} - -test add_2() { - let v = - from_lovelace(123) - |> add(#"acab", #"beef", 321) - |> add(#"acab", #"beef", -1 * 321) - v == from_lovelace(123) -} - -test add_3() { - let v = - from_lovelace(1) - |> add(ada_policy_id, ada_asset_name, 2) - |> add(ada_policy_id, ada_asset_name, 3) - v == from_lovelace(6) -} - -test add_4() { - let v = - zero - |> add(#"acab", #"beef", 0) - v == zero -} - -test add_5() { - let v = - zero - |> add(#"acab", #"beef", 0) - |> add(#"acab", #"beef", 0) - v == zero -} - -/// Combine two `Value` together. -pub fn merge(left v0: Value, right v1: Value) -> Value { - Value( - dict.union_with( - v0.inner, - v1.inner, - fn(_, a0, a1) { - let result = - dict.union_with( - a0, - a1, - fn(_, q0, q1) { - let q = q0 + q1 - if q == 0 { - None - } else { - Some(q) - } - }, - ) - if dict.is_empty(result) { - None - } else { - Some(result) - } - }, - ), - ) -} - -test merge_1() { - let v1 = from_lovelace(1) - let v2 = from_lovelace(-1) - merge(v1, v2) == zero -} - -test merge_2() { - let v1 = from_asset(#"00", #"", 1) - let v2 = from_asset(#"01", #"", 2) - let v3 = from_asset(#"02", #"", 3) - let v = - from_lovelace(42) - |> merge(v3) - |> merge(v1) - |> merge(v2) - - flatten(v) == [ - (#"", #"", 42), (#"00", #"", 1), (#"01", #"", 2), (#"02", #"", 3), - ] -} - -test merge_3() { - let v1 = from_asset(#"00", #"", 1) - let v2 = from_asset(#"00", #"", -1) - let v3 = from_asset(#"01", #"", 1) - - let v = - zero - |> merge(v1) - |> merge(v2) - |> merge(v3) - - flatten(v) == [(#"01", #"", 1)] -} - -test merge_4() { - let v1 = from_asset(#"00", #"", 1) - let v2 = from_asset(#"00", #"", -1) - - merge(v1, v2) == zero -} - -test merge_5() { - let v = - zero - |> add(#"acab", #"beef", 0) - - merge(zero, v) == zero -} - -/// Negates quantities of all tokens (including Ada) in that `Value`. -/// -/// ``` -/// v1 -/// |> assets.negate -/// |> assets.merge(v1) -/// |> assets.is_zero -/// // True -/// ``` -pub fn negate(self: Value) -> Value { - dict.map(self.inner, fn(_, a) { dict.map(a, fn(_, q) { 0 - q }) }) - |> Value -} - -/// Get a subset of the assets restricted to the given policies. -pub fn restricted_to(self: Value, mask: List) -> Value { - list.foldr( - policies(self), - zero, - fn(policy_id, value) { - if list.has(mask, policy_id) { - dict.foldr( - tokens(self, policy_id), - value, - fn(asset_name, quantity, value) { - add(value, policy_id, asset_name, quantity) - }, - ) - } else { - value - } - }, - ) -} - -test restricted_to_1() { - let self = from_lovelace(42) |> add("foo", "", 1) - restricted_to(self, []) == zero -} - -test restricted_to_2() { - let self = from_lovelace(42) |> add("foo", "", 1) - restricted_to(self, [ada_policy_id]) == from_lovelace(42) -} - -test restricted_to_3() { - let self = from_lovelace(42) |> add("foo", "", 1) |> add("bar", "", 1) - restricted_to(self, ["foo", "bar"]) == without_lovelace(self) -} - -test restricted_to_4() { - let self = from_lovelace(42) |> add("foo", "bar", 1) |> add("foo", "baz", 1) - restricted_to(self, ["foo"]) == without_lovelace(self) -} - -test restricted_to_5() { - let self = from_lovelace(42) |> add("foo", "bar", 1) |> add("foo", "baz", 1) - restricted_to(self, [ada_policy_id, "foo"]) == self -} - -/// Get a `Value` excluding Ada. -pub fn without_lovelace(self: Value) -> Value { - dict.delete(self.inner, ada_policy_id) - |> Value -} - -test without_lovelace_1() { - let v = from_lovelace(1000000) - without_lovelace(v) == zero -} - -test without_lovelace_2() { - let v = from_lovelace(1000000) - let v2 = from_lovelace(50000000) - without_lovelace(v) == without_lovelace(v2) -} - -test without_lovelace_3() { - let v = - from_asset(#"010203", #"040506", 100) - |> add(ada_policy_id, ada_asset_name, 100000000) - let v2 = from_asset(#"010203", #"040506", 100) - without_lovelace(v) == without_lovelace(v2) && without_lovelace(v) == v2 -} - -// ## Transforming - -/// Flatten a `Value` as list of 3-tuple `(PolicyId, AssetName, Quantity)`. -/// -/// Handy to manipulate values as uniform lists. -pub fn flatten(self: Value) -> List<(PolicyId, AssetName, Int)> { - dict.foldr( - self.inner, - [], - fn(policy_id, asset_list, value) { - dict.foldr( - asset_list, - value, - fn(asset_name, quantity, xs) { - [(policy_id, asset_name, quantity), ..xs] - }, - ) - }, - ) -} - -/// Flatten a `Value` as a list of results, possibly discarding some along the way. -/// -/// When the transform function returns `None`, the result is discarded altogether. -pub fn flatten_with( - self: Value, - with: fn(PolicyId, AssetName, Int) -> Option, -) -> List { - dict.foldr( - self.inner, - [], - fn(policy_id, asset_list, value) { - dict.foldr( - asset_list, - value, - fn(asset_name, quantity, xs) { - when with(policy_id, asset_name, quantity) is { - None -> xs - Some(x) -> [x, ..xs] - } - }, - ) - }, - ) -} - -test flatten_with_1() { - flatten_with(zero, fn(p, a, q) { Some((p, a, q)) }) == [] -} - -test flatten_with_2() { - let v = - zero - |> add("a", "1", 14) - |> add("b", "", 42) - |> add("a", "2", 42) - - flatten_with( - v, - fn(p, a, q) { - if q == 42 { - Some((p, a)) - } else { - None - } - }, - ) == [("a", "2"), ("b", "")] -} - -/// Reduce a value into a single result -/// -/// ``` -/// assets.zero -/// |> assets.add("a", "1", 10) -/// |> assets.add("b", "2", 20) -/// |> assets.reduce(v, 0, fn(_, _, quantity, acc) { acc + quantity }) -/// // 30 -/// ``` -pub fn reduce( - self: Value, - start: result, - with: fn(PolicyId, AssetName, Int, result) -> result, -) -> result { - dict.foldr( - self.inner, - start, - fn(policy_id, asset_list, result) { - dict.foldr(asset_list, result, with(policy_id, _, _, _)) - }, - ) -} - -test reduce_1() { - let v = - zero - |> add("a", "1", 10) - |> add("b", "2", 20) - let result = reduce(v, 0, fn(_, _, quantity, acc) { acc + quantity }) - result == 30 -} - -test reduce_2() { - let v = - zero - |> add("a", "1", 5) - |> add("a", "2", 15) - |> add("b", "", 10) - let result = - reduce( - v, - [], - fn(policy_id, asset_name, _, acc) { [(policy_id, asset_name), ..acc] }, - ) - result == [("a", "1"), ("a", "2"), ("b", "")] -} - -test reduce_3() { - let v = zero - let result = reduce(v, 1, fn(_, _, quantity, acc) { acc + quantity }) - result == 1 -} - -/// Convert the value into a dictionary of dictionaries. -pub fn to_dict(self: Value) -> Dict> { - self.inner -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak deleted file mode 100644 index f0b6d258..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/certificate.ak +++ /dev/null @@ -1,93 +0,0 @@ -use aiken/crypto.{Blake2b_224, Hash, VerificationKey, VerificationKeyHash} -use cardano/address.{Credential} -use cardano/assets.{Lovelace} - -pub type StakePoolId = - Hash - -/// An on-chain certificate attesting of some operation. Publishing -/// certificates triggers different kind of rules; most of the time, -/// they require signatures from specific keys. -pub type Certificate { - /// Register a stake credential with an optional deposit amount. - /// The deposit is always present when using the new registration certificate - /// format available since the Conway era. - RegisterCredential { - credential: Credential, - /// > [!NOTE] - /// > The `deposit` ought to be an `Option`, but due to unfortunate - /// > circumstances it will always be instantiated to `None` even when set in - /// > the host transaction. This is what the `Never` type captures here. - deposit: Never, - } - /// Un-Register a stake credential with an optional refund amount - /// The deposit is always present when using the new de-registration certificate - /// format available since the Conway era. - UnregisterCredential { - credential: Credential, - /// > [!NOTE] - /// > The `refund` ought to be an `Option`, but due to unfortunate - /// > circumstances it will always be instantiated to `None` even when set in - /// > the host transaction. This is what the `Never` type captures here. - refund: Never, - } - /// Delegate stake to a [Delegate](#Delegate). - DelegateCredential { credential: Credential, delegate: Delegate } - /// Register and delegate staking credential to a Delegatee in one certificate. - RegisterAndDelegateCredential { - credential: Credential, - delegate: Delegate, - deposit: Lovelace, - } - /// Register a delegate representative (a.k.a DRep). The deposit is explicit and - /// is refunded when the delegate steps down (unregister). - RegisterDelegateRepresentative { - delegate_representative: Credential, - deposit: Lovelace, - } - /// Update a delegate representative (a.k.a DRep). The certificate also contains - /// metadata which aren't visible on-chain. - UpdateDelegateRepresentative { delegate_representative: Credential } - /// UnRegister a delegate representative, and refund back its past deposit. - UnregisterDelegateRepresentative { - delegate_representative: Credential, - refund: Lovelace, - } - /// Register a new stake pool - RegisterStakePool { - /// The hash digest of the stake pool's cold (public) key - stake_pool: StakePoolId, - /// The hash digest of the stake pool's VRF (public) key - vrf: VerificationKeyHash, - } - /// Retire a stake pool. 'at_epoch' indicates in which the retirement will take place - RetireStakePool { stake_pool: StakePoolId, at_epoch: Int } - /// Authorize a Hot credential for a specific Committee member's cold credential - AuthorizeConstitutionalCommitteeProxy { - constitutional_committee_member: Credential, - proxy: Credential, - } - /// Step down from the constitutional committee as a member. - RetireFromConstitutionalCommittee { - constitutional_committee_member: Credential, - } -} - -/// A type of stake delegation that can be either block-production, vote or -/// both. Note that delegation types aren't cancelling one another, so it is -/// possible to delegate block production in one transaction, and delegate vote -/// in another. This second delegation **does NOT** invalidate the first one. -pub type Delegate { - DelegateBlockProduction { stake_pool: StakePoolId } - DelegateVote { delegate_representative: DelegateRepresentative } - DelegateBoth { - stake_pool: StakePoolId, - delegate_representative: DelegateRepresentative, - } -} - -pub type DelegateRepresentative { - Registered(Credential) - AlwaysAbstain - AlwaysNoConfidence -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak deleted file mode 100644 index 3ec96800..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance.ak +++ /dev/null @@ -1,109 +0,0 @@ -use aiken/collection.{Index} -use aiken/crypto.{Blake2b_256, Hash, ScriptHash, VerificationKeyHash} -use aiken/math/rational.{Rational} -use cardano/address.{Credential} -use cardano/assets.{Lovelace} -use cardano/governance/protocol_parameters.{ProtocolParametersUpdate} - -pub type ProposalProcedure { - deposit: Lovelace, - return_address: Credential, - governance_action: GovernanceAction, -} - -pub type GovernanceAction { - ProtocolParameters { - /// The last governance action of type 'ProtocolParameters'. They must all - /// form a chain. - ancestor: Option, - /// The new proposed protocol parameters. Only values set to `Some` are relevant. - new_parameters: ProtocolParametersUpdate, - /// The optional guardrails script defined in the constitution. The script - /// is executed by the ledger in addition to the hard-coded ledger rules. - /// - /// It must pass for the new protocol parameters to be deemed valid. - guardrails: Option, - } - HardFork { - /// The last governance action of type `HardFork`. They must all - /// form a chain. - ancestor: Option, - /// The new proposed version. Few rules apply to proposing new versions: - /// - /// - The `major` component, if incremented, must be exactly one more than the current. - /// - The `minor` component, if incremented, must be exactly one more than the current. - /// - If the `major` component is incremented, `minor` must be set to `0`. - /// - Neither `minor` nor `major` can be decremented. - new_version: ProtocolVersion, - } - TreasuryWithdrawal { - /// A collection of beneficiaries, which can be plain verification key - /// hashes or script hashes (e.g. DAO). - beneficiaries: Pairs, - /// The optional guardrails script defined in the constitution. The script - /// is executed by the ledger in addition to the hard-coded ledger rules. - /// - /// It must pass for the withdrawals to be authorized. - guardrails: Option, - } - NoConfidence { - /// The last governance action of type `NoConfidence` or - /// `ConstitutionalCommittee`. They must all / form a chain. - ancestor: Option, - } - ConstitutionalCommittee { - /// The last governance action of type `NoConfidence` or - /// `ConstitutionalCommittee`. They must all / form a chain. - ancestor: Option, - /// Constitutional members to be removed. - evicted_members: List, - /// Constitutional members to be added. - added_members: Pairs, - /// The new quorum value, as a ratio of a numerator and a denominator. The - /// quorum specifies the threshold of 'Yes' votes necessary for the - /// constitutional committee to accept a proposal procedure. - quorum: Rational, - } - NewConstitution { - /// The last governance action of type `Constitution` or - /// `ConstitutionalCommittee`. They must all / form a chain. - ancestor: Option, - /// The new proposed constitution. - constitution: Constitution, - } - NicePoll -} - -pub type Vote { - No - Yes - Abstain -} - -pub type TransactionId = - Hash - -pub type GovernanceActionId { - transaction: TransactionId, - proposal_procedure: Index, -} - -pub type ProtocolVersion { - major: Int, - minor: Int, -} - -pub type Constitution { - guardrails: Option, -} - -/// An epoch number after which constitutional committee member -/// mandate expires. -pub type Mandate = - Int - -pub type Voter { - ConstitutionalCommitteeMember(Credential) - DelegateRepresentative(Credential) - StakePool(VerificationKeyHash) -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak deleted file mode 100644 index d9e7be95..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/protocol_parameters.ak +++ /dev/null @@ -1,360 +0,0 @@ -use aiken/math/rational.{Rational} -use cardano/assets.{Lovelace} - -pub opaque type ProtocolParametersUpdate { - inner: Pairs, -} - -pub type ScriptExecutionPrices { - memory: Rational, - cpu: Rational, -} - -pub type ExecutionUnits { - memory: Int, - cpu: Int, -} - -pub type StakePoolOperatorVotingThresholds { - motion_of_no_confidence: Rational, - constitutional_committee: ConstitutionalCommitteeThresholds, - hard_fork: Rational, - protocol_parameters: ProtocolParametersThresholds< - Rational, - Void, - Void, - Void, - Void, - >, -} - -pub type DelegateRepresentativeVotingThresholds { - motion_of_no_confidence: Rational, - constitutional_committee: ConstitutionalCommitteeThresholds, - constitution: Rational, - hard_fork: Rational, - protocol_parameters: ProtocolParametersThresholds< - Void, - Rational, - Rational, - Rational, - Rational, - >, - treasury_withdrawal: Rational, -} - -pub type ProtocolParametersThresholds< - security, - network, - economic, - technical, - governance, -> { - security_group: security, - network_group: network, - economic_group: economic, - technical_group: technical, - governance_group: governance, -} - -pub type ConstitutionalCommitteeThresholds { - default: Rational, - under_no_confidence: Rational, -} - -/// The linear coefficient that intervenes in the transaction fee calculation. -/// It is multiplied by the size of the transaction in bytes to obtain a Lovelace value. -pub fn min_fee_coefficient(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 0, into_int) -} - -/// The constant factor that intervenes in the transaction fee calculation. It is -/// a flat cost of lovelace that is added to every fee calculation. -pub fn min_fee_constant(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 1, into_int) -} - -/// The maximum size of a serialized block body, expressed in bytes. -pub fn max_block_body_size(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 2, into_int) -} - -/// The maximum size of a serialized transaction (body + witnesses), expressed in bytes. -pub fn max_transaction_size(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 3, into_int) -} - -/// The maximum size of a serialized block header, expressed in bytes. -pub fn max_block_header_size(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 4, into_int) -} - -/// The required deposit amount when registering stake credentials, expressed in Lovelace. -pub fn stake_credential_deposit( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 5, into_int) -} - -/// The required deposit amount when registering a stake pool, expressed in Lovelace. -pub fn stake_pool_deposit(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 6, into_int) -} - -/// The maximum number of epoch in the future allowed for a stake pool retirement to be scheduled. -pub fn stake_pool_retirement_horizon( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 7, into_int) -} - -/// The desired/optimal number of fully saturated stake pools in the system. Also known as the _'k-parameter'_. -pub fn desired_number_of_stake_pools( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 8, into_int) -} - -/// A parameter controlling the influence of an pool owner's pledge on the rewards. Also known as _'a0'_. -pub fn stake_pool_pledge_influence( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 9, into_rational) -} - -/// The monetary expansion parameter, controlling the fraction of Ada put in circulation on every epoch through the incentivies model. Also known as _'ρ'_. -pub fn monetary_expansion(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 10, into_rational) -} - -/// The parameter controlling what fraction (%) of available rewards is sent to the treasury on every epoch. Also known as _'τ'_. -pub fn treasury_expansion(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 11, into_rational) -} - -/// Minimum authorized constant cost that stake pools can declare when registering, expressed in Lovelace. -pub fn min_stake_pool_cost(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 16, into_int) -} - -/// The linear coefficient that intervenes in the calculation of the minimum Ada value that any UTxO must hold. It is expressed in Lovelace per Byte, and is also known as the 'coins per utxo byte' parameter. -pub fn min_utxo_deposit_coefficient( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 17, into_int) -} - -/// The costs associated with the various operations of the Plutus Virtual Machine, which can be different for each Plutus version. -pub fn cost_models(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 18, identity) -} - -/// The price, in Lovelace per unit, of the execution units corresponding to cpu and memory usage of on-chain scripts. -pub fn script_execution_prices( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 19, into_script_execution_prices) -} - -/// The maximum execution units allowed for a single transaction. -pub fn max_transaction_execution_units( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 20, into_execution_units) -} - -/// The maximum execution units allowed for a single block. -pub fn max_block_execution_units( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 21, into_execution_units) -} - -/// The maximum size of a serialized value in a transaction output. This effectively limits -/// the maximum kinds of assets that can be sent in a single output. It is expressed in bytes. -pub fn max_value_size(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 22, into_int) -} - -/// The scaling factor applied to the transaction cost for defining the minimum collateral -/// amount. It is expressed in percent points (so 100 = 100%). -pub fn collateral_percentage(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 23, into_int) -} - -/// The maximum number of collateral inputs allowed in the transaction. -pub fn max_collateral_inputs(self: ProtocolParametersUpdate) -> Option { - get_protocol_param(self.inner, 24, into_int) -} - -/// The various governance voting thresholds pertaining to stake pool operators. -pub fn stake_pool_operator_voting_thresholds( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 25, into_spo_voting_thresholds) -} - -/// The various governance voting thresholds pertaining to delegate representatives -/// (a.k.a DReps). -pub fn delegate_representative_voting_thresholds( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 26, into_drep_voting_thresholds) -} - -/// The minimum number of members in the constitutional committee. Any updates of the committee -/// must leave at least this number of members. -pub fn min_constitutional_committee_size( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 27, into_int) -} - -/// The maximum length of a constitutional committee member, expressed in number of epochs. -pub fn max_constitutional_committee_mandate( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 28, into_int) -} - -/// The lifetime of any governance proposal. An action that hasn't been approved beyond that -/// period is considered inactive and discarded. It is expressed in number of epochs. -pub fn governance_proposal_lifetime( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 29, into_int) -} - -/// The required deposit amount for governance proposal procedures, expressed in Lovelace. -pub fn governance_proposal_deposit( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 30, into_int) -} - -/// The required deposit amount when registering as a delegate representative, expressed in -/// Lovelace. -pub fn delegate_representative_deposit( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 31, into_int) -} - -/// The maximum number of epochs that a delegate representative can stay inactive (i.e. no -/// voting) without becoming _inactive_ and removed from thresholds calculations. -pub fn delegate_representative_max_idle_time( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 32, into_int) -} - -/// The base tier fee coefficient for reference scripts. Reference scripts gets increasingly -/// more expensives every ~24KB, the base coefficient is a multiplicating factor which grows -/// exponentially with each tier. -pub fn reference_scripts_tier_fee_initial_factor( - self: ProtocolParametersUpdate, -) -> Option { - get_protocol_param(self.inner, 33, into_rational) -} - -// Internals ------------------------------------------------------------------- - -type ProtocolParametersIndex = - Int - -fn get_protocol_param( - self: Pairs, - ix: ProtocolParametersIndex, - into: fn(Data) -> a, -) -> Option { - when self is { - [] -> None - [Pair(jx, param), ..tail] -> - if ix == jx { - Some(into(param)) - } else { - get_protocol_param(tail, ix, into) - } - } -} - -fn into_int(param: Data) -> Int { - expect param: Int = param - param -} - -fn into_rational(param: Data) -> Rational { - expect [numerator, denominator]: List = param - expect Some(r) = rational.new(numerator, denominator) - r -} - -fn into_execution_units(param: Data) -> ExecutionUnits { - expect [memory, cpu]: List = param - ExecutionUnits { memory, cpu } -} - -fn into_script_execution_prices(param: Data) -> ScriptExecutionPrices { - expect [memory, cpu]: List = param - let memory = into_rational(memory) - let cpu = into_rational(cpu) - ScriptExecutionPrices { memory, cpu } -} - -fn into_spo_voting_thresholds(param: Data) -> StakePoolOperatorVotingThresholds { - expect [ - motion_of_no_confidence, constitutional_committee, - constitutional_committee_under_no_confidence, hard_fork, - protocol_parameters_security_group, - ]: List = param - - StakePoolOperatorVotingThresholds { - motion_of_no_confidence: into_rational(motion_of_no_confidence), - constitutional_committee: ConstitutionalCommitteeThresholds { - default: into_rational(constitutional_committee), - under_no_confidence: into_rational( - constitutional_committee_under_no_confidence, - ), - }, - hard_fork: into_rational(hard_fork), - protocol_parameters: ProtocolParametersThresholds { - security_group: into_rational(protocol_parameters_security_group), - network_group: Void, - economic_group: Void, - technical_group: Void, - governance_group: Void, - }, - } -} - -fn into_drep_voting_thresholds( - param: Data, -) -> DelegateRepresentativeVotingThresholds { - expect [ - motion_of_no_confidence, constitutional_committee, - constitutional_committee_under_no_confidence, constitution, hard_fork, - protocol_parameters_network_group, protocol_parameters_economic_group, - protocol_parameters_technical_group, protocol_parameters_governance_group, - treasury_withdrawal, - ]: List = param - - DelegateRepresentativeVotingThresholds { - motion_of_no_confidence: into_rational(motion_of_no_confidence), - constitutional_committee: ConstitutionalCommitteeThresholds { - default: into_rational(constitutional_committee), - under_no_confidence: into_rational( - constitutional_committee_under_no_confidence, - ), - }, - constitution: into_rational(constitution), - hard_fork: into_rational(hard_fork), - protocol_parameters: ProtocolParametersThresholds { - security_group: Void, - network_group: into_rational(protocol_parameters_network_group), - economic_group: into_rational(protocol_parameters_economic_group), - technical_group: into_rational(protocol_parameters_technical_group), - governance_group: into_rational(protocol_parameters_governance_group), - }, - treasury_withdrawal: into_rational(treasury_withdrawal), - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak deleted file mode 100644 index e723e2d5..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/governance/voter.ak +++ /dev/null @@ -1,62 +0,0 @@ -use aiken/primitive/bytearray -use cardano/address.{Script} -use cardano/address/credential -use cardano/governance.{ - ConstitutionalCommitteeMember, DelegateRepresentative, StakePool, Voter, -} - -pub fn compare(left: Voter, right: Voter) -> Ordering { - when left is { - ConstitutionalCommitteeMember(left) -> - when right is { - ConstitutionalCommitteeMember(right) -> credential.compare(left, right) - _ -> Less - } - DelegateRepresentative(left) -> - when right is { - DelegateRepresentative(right) -> credential.compare(left, right) - ConstitutionalCommitteeMember(_) -> Greater - _ -> Less - } - StakePool(left) -> - when right is { - StakePool(right) -> bytearray.compare(left, right) - _ -> Greater - } - } -} - -test compare_matrix() { - let cc0 = ConstitutionalCommitteeMember(Script("0")) - let cc1 = ConstitutionalCommitteeMember(Script("1")) - - let drep0 = DelegateRepresentative(Script("0")) - let drep1 = DelegateRepresentative(Script("1")) - - let spo0 = StakePool("0") - let spo1 = StakePool("1") - - and { - (compare(cc0, cc0) == Equal)?, - (compare(cc0, cc1) == Less)?, - (compare(cc1, cc0) == Greater)?, - (compare(drep0, drep0) == Equal)?, - (compare(drep0, drep1) == Less)?, - (compare(drep1, drep0) == Greater)?, - (compare(spo0, spo0) == Equal)?, - (compare(spo0, spo1) == Less)?, - (compare(spo1, spo0) == Greater)?, - (compare(cc0, drep0) == Less)?, - (compare(cc0, drep1) == Less)?, - (compare(cc0, spo0) == Less)?, - (compare(cc0, spo1) == Less)?, - (compare(drep0, cc0) == Greater)?, - (compare(drep0, cc1) == Greater)?, - (compare(drep0, spo0) == Less)?, - (compare(drep0, spo1) == Less)?, - (compare(spo0, cc0) == Greater)?, - (compare(spo0, cc1) == Greater)?, - (compare(spo0, drep0) == Greater)?, - (compare(spo0, drep1) == Greater)?, - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak deleted file mode 100644 index ff73836a..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/script_context.ak +++ /dev/null @@ -1,62 +0,0 @@ -//// This module contains utilities for manually dealing with [`ScriptContext`](#ScriptContext). This is only ever useful for writing custom `else` handlers in validators. -//// -//// > [!NOTE] -//// > Unless you know what you're doing, you should prefer using named handlers: -//// > -//// > - `mint` -//// > - `spend` -//// > - `withdraw` -//// > - `publish` -//// > - `vote` -//// > - `propose` - -use aiken/collection.{Index} -use cardano/address.{Credential} -use cardano/assets.{PolicyId} -use cardano/certificate.{Certificate} -use cardano/governance.{ProposalProcedure, Voter} -use cardano/transaction.{OutputReference, Redeemer, Transaction} - -/// A context given to a script by the Cardano ledger when being executed. -/// -/// The context contains information about the entire transaction that contains -/// the script. The transaction may also contain other scripts; to distinguish -/// between multiple scripts, the [`ScriptContext`](#ScriptContext) contains a -/// [`ScriptInfo`](#ScriptInfo) which indicates which script (or, for what -/// purpose) the transaction is being executed. -pub type ScriptContext { - transaction: Transaction, - redeemer: Redeemer, - info: ScriptInfo, -} - -/// Characterizes the script information. The main (and only) difference with [`ScriptPurpose`](./transaction.html#ScriptPurpose) resides in the `Spending` variant which here contains a second field `datum: Option`. -pub type ScriptInfo { - /// For scripts executed as minting/burning policies, to insert - /// or remove assets from circulation. It's parameterized by the identifier - /// of the associated policy. - Minting(PolicyId) - /// For scripts that are used as payment credentials for addresses in - /// transaction outputs. They govern the rule by which the output they - /// reference can be spent. - Spending { output: OutputReference, datum: Option } - /// For scripts that validate reward withdrawals from a reward account. - /// - /// The argument identifies the target reward account. - Withdrawing(Credential) - /// Needed when delegating to a pool using stake credentials defined as a - /// custom script. This purpose is also triggered when de-registering such - /// stake credentials. - /// - /// The Int is a 0-based index of the given `Certificate` in `certificates`. - Publishing { at: Index, certificate: Certificate } - /// Voting for a type of voter using a governance action id to vote - /// yes / no / abstain inside a transaction. - /// - /// The voter is who is doing the governance action. - Voting(Voter) - /// Used to propose a governance action. - /// - /// A 0-based index of the given `ProposalProcedure` in `proposal_procedures`. - Proposing { at: Index, proposal_procedure: ProposalProcedure } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak deleted file mode 100644 index 6511a596..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction.ak +++ /dev/null @@ -1,225 +0,0 @@ -use aiken/builtin -use aiken/collection.{Index} -use aiken/collection/dict.{Dict} -use aiken/collection/list -use aiken/crypto.{ - Blake2b_256, DataHash, Hash, ScriptHash, VerificationKeyHash, blake2b_256, -} -use aiken/interval.{Interval} -use aiken/option -use cardano/address.{Address, Credential, Script, VerificationKey} -use cardano/assets.{Lovelace, PolicyId, Value} -use cardano/certificate.{Certificate} -use cardano/governance.{GovernanceActionId, ProposalProcedure, Vote, Voter} - -pub type TransactionId = - Hash - -/// Characterizes the script purpose. -pub type ScriptPurpose { - /// For scripts executed as minting/burning policies, to insert - /// or remove assets from circulation. It's parameterized by the identifier - /// of the associated policy. - Mint(PolicyId) - /// For scripts that are used as payment credentials for addresses in - /// transaction outputs. They govern the rule by which the output they - /// reference can be spent. - Spend(OutputReference) - /// For scripts that validate reward withdrawals from a reward account. - /// - /// The argument identifies the target reward account. - Withdraw(Credential) - /// Needed when delegating to a pool using stake credentials defined as a - /// custom script. This purpose is also triggered when de-registering such - /// stake credentials. - /// - /// The Int is a 0-based index of the given `Certificate` in `certificates`. - Publish { at: Index, certificate: Certificate } - /// Voting for a type of voter using a governance action id to vote - /// yes / no / abstain inside a transaction. - /// - /// The voter is who is doing the governance action. - Vote(Voter) - /// Used to propose a governance action. - /// - /// A 0-based index of the given `ProposalProcedure` in `proposal_procedures`. - Propose { at: Index, proposal_procedure: ProposalProcedure } -} - -/// A Cardano `Transaction`, as seen by on-chain scripts. -/// -/// Note that this is a representation of a transaction, and not the 1:1 -/// translation of the transaction as seen by the ledger. In particular, -/// on-chain scripts can't see inputs locked by bootstrap addresses, outputs -/// to bootstrap addresses or just transaction metadata. -pub type Transaction { - inputs: List, - reference_inputs: List, - outputs: List, - fee: Lovelace, - mint: Value, - certificates: List, - /// > [!IMPORTANT] - /// > Withdrawals are ordered by ascending [Credential](./credential.html#Credential). Yet, note that [`Script`](./credential.html#Credential) credentials are treated as **lower values** than [`VerificationKey`](./credential.html#Credential) credentials. - withdrawals: Pairs, - validity_range: ValidityRange, - extra_signatories: List, - /// > [!IMPORTANT] - /// > Redeemers are ordered by ascending [ScriptPurpose](./transaction.html#ScriptPurpose). - redeemers: Pairs, - datums: Dict, - id: TransactionId, - /// > [!IMPORTANT] - /// > Votes are ordered by ascending [Voter](./governance.html#Voter) and [GovernanceActionId](./governance.html#GovernanceActionId).
First constructor variants in a type are treated as lower indices; except for [Credential](./credential.html#Credential) where [`Script`](./credential.html#Credential) credentials are treated as **lower values** than [`VerificationKey`](./credential.html#Credential) credentials. - votes: Pairs>, - proposal_procedures: List, - current_treasury_amount: Option, - treasury_donation: Option, -} - -/// An interval of POSIX time, measured in **number of milliseconds** since 1970-01-01T00:00:00Z. -pub type ValidityRange = - Interval - -/// An `Input` made of an output reference and, the resolved value associated with that output. -pub type Input { - output_reference: OutputReference, - output: Output, -} - -/// An `OutputReference` is a unique reference to an output on-chain. The `output_index` -/// corresponds to the position in the output list of the transaction (identified by its id) -/// that produced that output -pub type OutputReference { - transaction_id: Hash, - output_index: Int, -} - -/// A transaction `Output`, with an address, a value and optional datums and script references. -pub type Output { - address: Address, - value: Value, - datum: Datum, - reference_script: Option, -} - -/// An output `Datum`. -pub type Datum { - NoDatum - /// A datum referenced by its hash digest. - DatumHash(DataHash) - /// A datum completely inlined in the output. - InlineDatum(Data) -} - -/// A type-alias for Redeemers, passed to scripts for validation. The `Data` is -/// opaque because it is user-defined and it is the script's responsibility to -/// parse it into its expected form. -pub type Redeemer = - Data - -// ## Querying - -/// Find an input by its [`OutputReference`](#OutputReference). This is typically used in -/// combination with the `Spend` [`ScriptPurpose`](#ScriptPurpose) to find a script's own -/// input. -/// -/// ```aiken -/// validator { -/// spend(datum, redeemer, my_output_reference, self) { -/// expect Some(input) = -/// self.inputs -/// |> transaction.find_input(my_output_reference) -/// } -/// } -/// ``` -pub fn find_input( - inputs: List, - output_reference: OutputReference, -) -> Option { - inputs - |> list.find(fn(input) { input.output_reference == output_reference }) -} - -/// Find a [`Datum`](#Datum) by its hash, if present. The function looks first for -/// datums in the witness set, and then for inline datums if it doesn't find any in -/// witnesses. -pub fn find_datum( - outputs: List, - datums: Dict, - datum_hash: DataHash, -) -> Option { - datums - |> dict.get(datum_hash) - |> option.or_try( - fn() { - outputs - |> list.filter_map( - fn(output) { - when output.datum is { - InlineDatum(data) -> - if blake2b_256(builtin.serialise_data(data)) == datum_hash { - Some(data) - } else { - None - } - _ -> None - } - }, - ) - |> list.head - }, - ) -} - -/// Find all outputs that are paying into the given script hash, if any. This is useful for -/// contracts running over multiple transactions. -pub fn find_script_outputs( - outputs: List, - script_hash: ScriptHash, -) -> List { - outputs - |> list.filter( - fn(output) { - when output.address.payment_credential is { - Script(addr_script_hash) -> script_hash == addr_script_hash - VerificationKey(_) -> False - } - }, - ) -} - -// ## Testing - -/// A placeholder / empty `Transaction` to serve as a base in a transaction -/// builder. This is particularly useful for constructing test transactions. -/// -/// Every field is empty or null, and we have in particular: -/// -/// ```aiken -/// use aiken/interval -/// -/// transaction.placeholder.id == -/// #"0000000000000000000000000000000000000000000000000000000000000000" -/// -/// transaction.placeholder.validity_range == interval.everything -/// ``` -pub const placeholder: Transaction = - Transaction { - inputs: [], - reference_inputs: [], - outputs: [], - fee: 0, - mint: assets.zero, - certificates: [], - withdrawals: [], - validity_range: interval.everything, - extra_signatories: [], - redeemers: [], - datums: dict.empty, - id: #"0000000000000000000000000000000000000000000000000000000000000000", - votes: [], - proposal_procedures: [], - current_treasury_amount: None, - treasury_donation: None, - } diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak deleted file mode 100644 index 70b7550d..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/output_reference.ak +++ /dev/null @@ -1,23 +0,0 @@ -use aiken/primitive/bytearray -use aiken/primitive/int -use cardano/transaction.{OutputReference} - -pub fn compare(left: OutputReference, right: OutputReference) -> Ordering { - when bytearray.compare(left.transaction_id, right.transaction_id) is { - Equal -> int.compare(left.output_index, right.output_index) - ordering -> ordering - } -} - -test compare_matrix() { - and { - (compare(OutputReference("", 0), OutputReference("", 0)) == Equal)?, - (compare(OutputReference("00", 42), OutputReference("00", 42)) == Equal)?, - (compare(OutputReference("00", 0), OutputReference("01", 0)) == Less)?, - (compare(OutputReference("01", 0), OutputReference("00", 0)) == Greater)?, - (compare(OutputReference("00", 42), OutputReference("01", 14)) == Less)?, - (compare(OutputReference("01", 14), OutputReference("00", 42)) == Greater)?, - (compare(OutputReference("", 42), OutputReference("", 14)) == Greater)?, - (compare(OutputReference("", 14), OutputReference("", 42)) == Less)?, - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak deleted file mode 100644 index 4fef2cbe..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/aiken-lang-stdlib/lib/cardano/transaction/script_purpose.ak +++ /dev/null @@ -1,126 +0,0 @@ -use aiken/primitive/bytearray -use aiken/primitive/int -use cardano/address.{Script, VerificationKey} -use cardano/address/credential -use cardano/certificate.{RegisterCredential} -use cardano/governance.{NicePoll, ProposalProcedure, StakePool} -use cardano/governance/voter -use cardano/transaction.{ - Mint, OutputReference, Propose, Publish, ScriptPurpose, Spend, Vote, Withdraw, -} -use cardano/transaction/output_reference - -pub fn compare(left: ScriptPurpose, right: ScriptPurpose) -> Ordering { - when left is { - Mint(left) -> - when right is { - Mint(right) -> bytearray.compare(left, right) - _ -> Less - } - - Spend(left) -> - when right is { - Spend(right) -> output_reference.compare(left, right) - Mint(_) -> Greater - _ -> Less - } - - Withdraw(left) -> - when right is { - Withdraw(right) -> credential.compare(left, right) - Spend(_) | Mint(_) -> Greater - _ -> Less - } - - Publish(left, _) -> - when right is { - Publish(right, _) -> int.compare(left, right) - Spend(_) | Mint(_) | Withdraw(_) -> Greater - _ -> Less - } - - Vote(left) -> - when right is { - Vote(right) -> voter.compare(left, right) - Propose(..) -> Less - _ -> Greater - } - - Propose(left, _) -> - when right is { - Propose(right, _) -> int.compare(left, right) - _ -> Greater - } - } -} - -test compare_matrix() { - let mint0 = Mint("0") - let mint1 = Mint("1") - - let spend0 = Spend(OutputReference("", 0)) - let spend1 = Spend(OutputReference("", 1)) - - let withdraw0 = Withdraw(VerificationKey("0")) - let withdraw1 = Withdraw(VerificationKey("1")) - - let publish0 = Publish(0, RegisterCredential(Script(""), Never)) - let publish1 = Publish(1, RegisterCredential(Script(""), Never)) - - let vote0 = Vote(StakePool("0")) - let vote1 = Vote(StakePool("1")) - - let propose0 = Propose(0, ProposalProcedure(0, Script(""), NicePoll)) - let propose1 = Propose(1, ProposalProcedure(0, Script(""), NicePoll)) - - and { - (compare(mint0, mint0) == Equal)?, - (compare(mint0, mint1) == Less)?, - (compare(mint1, mint0) == Greater)?, - (compare(mint0, spend0) == Less)?, - (compare(mint0, withdraw0) == Less)?, - (compare(mint0, publish0) == Less)?, - (compare(mint0, vote0) == Less)?, - (compare(mint0, propose0) == Less)?, - (compare(spend0, spend0) == Equal)?, - (compare(spend0, spend1) == Less)?, - (compare(spend1, spend0) == Greater)?, - (compare(spend0, mint0) == Greater)?, - (compare(spend0, withdraw0) == Less)?, - (compare(spend0, publish0) == Less)?, - (compare(spend0, vote0) == Less)?, - (compare(spend0, propose0) == Less)?, - (compare(withdraw0, withdraw0) == Equal)?, - (compare(withdraw0, withdraw1) == Less)?, - (compare(withdraw1, withdraw0) == Greater)?, - (compare(withdraw0, mint0) == Greater)?, - (compare(withdraw0, spend0) == Greater)?, - (compare(withdraw0, publish0) == Less)?, - (compare(withdraw0, vote0) == Less)?, - (compare(withdraw0, propose0) == Less)?, - (compare(publish0, publish0) == Equal)?, - (compare(publish0, publish1) == Less)?, - (compare(publish1, publish0) == Greater)?, - (compare(publish0, mint0) == Greater)?, - (compare(publish0, spend0) == Greater)?, - (compare(publish0, withdraw0) == Greater)?, - (compare(publish0, vote0) == Less)?, - (compare(publish0, propose0) == Less)?, - (compare(vote0, vote0) == Equal)?, - (compare(vote0, vote1) == Less)?, - (compare(vote1, vote0) == Greater)?, - (compare(vote0, mint0) == Greater)?, - (compare(vote0, spend0) == Greater)?, - (compare(vote0, withdraw0) == Greater)?, - (compare(vote0, publish0) == Greater)?, - (compare(vote0, propose0) == Less)?, - (compare(propose0, propose0) == Equal)?, - (compare(propose0, propose1) == Less)?, - (compare(propose1, propose0) == Greater)?, - (compare(propose0, mint0) == Greater)?, - (compare(propose0, spend0) == Greater)?, - (compare(propose0, withdraw0) == Greater)?, - (compare(propose0, publish0) == Greater)?, - (compare(propose0, vote0) == Greater)?, - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/packages.toml b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/packages.toml deleted file mode 100644 index f0c64bc6..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/build/packages/packages.toml +++ /dev/null @@ -1,4 +0,0 @@ -[[packages]] -name = "aiken-lang/stdlib" -version = "v2.2.0" -source = "github" diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cip.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cip.ak deleted file mode 100644 index a4ca9d6f..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cip.ak +++ /dev/null @@ -1,73 +0,0 @@ -//// Vodka provides utility functions to build CIP onchain code -//// -//// ### Example usage -//// ```aiken -//// use cip -//// use cardano/assets.{AssetName} -//// -//// let cip68_100_asset_name = cip.cip68_100(asset_name) -//// let cip68_222_asset_name = cip.cip68_222(asset_name) -//// let cip68_333_asset_name = cip.cip68_333(asset_name) -//// let cip68_444_asset_name = cip.cip68_444(asset_name) - -use aiken/primitive/bytearray.{concat} -use cardano/assets.{AssetName} - -/// The byte prefix for CIP-68 asset - Reference Token -pub const cip68_100_prefix: ByteArray = #"000643b0" - -/// The byte prefix for CIP-68 asset - Non-Fungible Token -pub const cip68_222_prefix: ByteArray = #"000de140" - -/// The byte prefix for CIP-68 asset - Fungible Token -pub const cip68_333_prefix: ByteArray = #"0014df10" - -/// The byte prefix for CIP-68 asset - Rich-Fungible Token -pub const cip68_444_prefix: ByteArray = #"001bc280" - -pub fn drop_cip68_prefix(cip_68_asset_name: AssetName) -> AssetName { - cip_68_asset_name |> bytearray.drop(4) -} - -test test_drop_cip68_prefix() { - let cip68_100_asset_name = #"000643b012345678" - drop_cip68_prefix(cip68_100_asset_name) == #"12345678" -} - -/// Obtain the asset name for CIP-68 asset - Reference Token -/// ```aiken -/// let cip68_100_asset_name = cip68_100(asset_name) -/// ``` -pub fn cip68_100(asset_name: AssetName) -> AssetName { - concat(cip68_100_prefix, asset_name) -} - -/// Obtain the asset name for CIP-68 asset - Non-Fungible Token -/// ```aiken -/// let cip68_222_asset_name = cip68_222(asset_name) -/// ``` -pub fn cip68_222(asset_name: AssetName) -> AssetName { - concat(cip68_222_prefix, asset_name) -} - -/// Obtain the asset name for CIP-68 asset - Fungible Token -/// ```aiken -/// let cip68_333_asset_name = cip68_333(asset_name) -/// ``` -pub fn cip68_333(asset_name: AssetName) -> AssetName { - concat(cip68_333_prefix, asset_name) -} - -/// Obtain the asset name for CIP-68 asset - Rich-Fungible Token -/// ```aiken -/// let cip68_444_asset_name = cip68_444(asset_name) -/// ``` -pub fn cip68_444(asset_name: AssetName) -> AssetName { - concat(cip68_444_prefix, asset_name) -} - -/// The metadata attached with CIP-68 reference token (100) -pub type CIP68Metadata { - metadata: Pairs, - version: Int, -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail.ak deleted file mode 100644 index f67cb2cc..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail.ak +++ /dev/null @@ -1,161 +0,0 @@ -//// Vodka cocktail provides utility functions to build Aiken onchain code -//// -//// All onchain utility functions are grouped with a naming convention of `vodka_`, -//// and all can be imported directly with `use cocktail` -//// -//// ### Cardano data types -//// - `Address` - [`use vodka_address`](./cocktail/vodka_address.html) -//// - `Value` - [`use vodka_value`](./cocktail/vodka_value.html) -//// -//// ### Transaction types -//// - `extra_signatories` - [Documentation](./cocktail/vodka_extra_signatories.html) -//// - `inputs` - [Documentation](./cocktail/vodka_inputs.html) -//// - `outputs` - [Documentation](./cocktail/vodka_outputs.html) -//// - `mints` - [Documentation](./cocktail/vodka_mints.html) -//// - `validity_range` - [Documentation](./cocktail/vodka_validity_range.html) -//// - `Redeemers` - [Documentation](./cocktail/vodka_redeemers.html) - -use cocktail/vodka_address -use cocktail/vodka_converter -use cocktail/vodka_extra_signatories -use cocktail/vodka_inputs -use cocktail/vodka_mints -use cocktail/vodka_outputs -use cocktail/vodka_redeemers -use cocktail/vodka_validity_range -use cocktail/vodka_value - -// Address - -/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) -pub const compare_script_address = vodka_address.compare_script_address - -/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) -pub const compare_address = vodka_address.compare_address - -/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) -pub const address_payment_key = vodka_address.address_payment_key - -/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) -pub const address_pub_key = vodka_address.address_pub_key - -/// Documentation please refer to [`vodka_address`](./cocktail/vodka_address.html) -pub const address_script_hash = vodka_address.address_script_hash - -// Converter - -/// Documentation please refer to [`vodka_converter`](./cocktail/vodka_converter.html) -pub const convert_int_to_bytes = vodka_converter.convert_int_to_bytes - -/// Documentation please refer to [`vodka_converter`](./cocktail/vodka_converter.html) -pub const get_number_digit = vodka_converter.get_number_digit - -// Extra Signatories - -/// Documentation please refer to [`vodka_extra_signatories`](./cocktail/vodka_extra_signatories.html) -pub const key_signed = vodka_extra_signatories.key_signed - -/// Documentation please refer to [`vodka_extra_signatories`](./cocktail/vodka_extra_signatories.html) -pub const one_of_keys_signed = vodka_extra_signatories.one_of_keys_signed - -/// Documentation please refer to [`vodka_extra_signatories`](./cocktail/vodka_extra_signatories.html) -pub const all_key_signed = vodka_extra_signatories.all_key_signed - -// Inputs - -/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) -pub const input_inline_datum = vodka_inputs.input_inline_datum - -/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) -pub const only_input_datum_with = vodka_inputs.only_input_datum_with - -/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) -pub const inputs_at = vodka_inputs.inputs_at - -/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) -pub const inputs_with = vodka_inputs.inputs_with - -/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) -pub const inputs_with_policy = vodka_inputs.inputs_with_policy - -/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) -pub const inputs_at_with = vodka_inputs.inputs_at_with - -/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) -pub const inputs_at_with_policy = vodka_inputs.inputs_at_with_policy - -/// Documentation please refer to [`vodka_inputs`](./cocktail/vodka_inputs.html) -pub const inputs_token_quantity = vodka_inputs.inputs_token_quantity - -// Mints - -/// Documentation please refer to [`vodka_mints`](./cocktail/vodka_mints.html) -pub const check_policy_only_burn = vodka_mints.check_policy_only_burn - -/// Documentation please refer to [`vodka_mints`](./cocktail/vodka_mints.html) -pub const only_minted_token = vodka_mints.only_minted_token - -/// Documentation please refer to [`vodka_mints`](./cocktail/vodka_mints.html) -pub const policy_only_minted_token = vodka_mints.policy_only_minted_token - -/// Documentation please refer to [`vodka_mints`](./cocktail/vodka_mints.html) -pub const token_minted = vodka_mints.token_minted - -// Outputs - -/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) -pub const output_inline_datum = vodka_outputs.output_inline_datum - -/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) -pub const outputs_at = vodka_outputs.outputs_at - -/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) -pub const outputs_with = vodka_outputs.outputs_with - -/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) -pub const outputs_with_policy = vodka_outputs.outputs_with_policy - -/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) -pub const outputs_at_with = vodka_outputs.outputs_at_with - -/// Documentation please refer to [`vodka_outputs`](./cocktail/vodka_outputs.html) -pub const outputs_at_with_policy = vodka_outputs.outputs_at_with_policy - -// Redeemers - -/// Documentation please refer to [`vodka_redeemers`](./cocktail/vodka_redeemers.html) -pub const redeemer_from = vodka_redeemers.redeemer_from - -/// Documentation please refer to [`vodka_redeemers`](./cocktail/vodka_redeemers.html) -pub const withdrawal_redeemer = vodka_redeemers.withdrawal_redeemer - -/// Documentation please refer to [`vodka_redeemers`](./cocktail/vodka_redeemers.html) -pub const compare_output_reference = vodka_redeemers.compare_output_reference - -// Validity Range - -/// Documentation please refer to [`vodka_validity_range`](./cocktail/vodka_validity_range.html) -pub const valid_after = vodka_validity_range.valid_after - -/// Documentation please refer to [`vodka_validity_range`](./cocktail/vodka_validity_range.html) -pub const valid_before = vodka_validity_range.valid_before - -// Value - -/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) -pub const value_length = vodka_value.value_length - -/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) -pub const get_all_value_to = vodka_value.get_all_value_to - -/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) -pub const get_all_value_from = vodka_value.get_all_value_from - -/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) -pub const value_geq = vodka_value.value_geq - -/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) -pub const value_policy_info = vodka_value.value_policy_info - -/// Documentation please refer to [`vodka_value`](./cocktail/vodka_value.html) -pub const value_tokens = vodka_value.value_tokens diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak deleted file mode 100644 index 5bbf8a3d..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_address.ak +++ /dev/null @@ -1,56 +0,0 @@ -use aiken/crypto.{Blake2b_224, Hash, ScriptHash, VerificationKeyHash} -use aiken/primitive/bytearray -use cardano/address.{Address, Script, VerificationKey} - -/// Compare two script addresses -pub fn compare_script_address(x: Address, y: Address) -> Ordering { - expect Script(x_hash) = x.payment_credential - expect Script(y_hash) = y.payment_credential - bytearray.compare(x_hash, y_hash) -} - -/// Compare two addresses -pub fn compare_address(x: Address, y: Address) -> Ordering { - let x_hash = x.payment_credential - let y_hash = y.payment_credential - when (x_hash, y_hash) is { - (Script(x_script_hash), Script(y_script_hash)) -> - bytearray.compare(x_script_hash, y_script_hash) - (VerificationKey(x_key_hash), VerificationKey(y_key_hash)) -> - bytearray.compare(x_key_hash, y_key_hash) - _ -> Equal - } -} - -/// Obtain the payment key of an address, it can be either a script hash or a verification key -/// ```aiken -/// let payment_key_hash = address_payment_key(address) -/// ``` -pub fn address_payment_key(address: Address) -> Hash { - when address.payment_credential is { - Script(hash) -> hash - VerificationKey(key_hash) -> key_hash - } -} - -/// Obtain the verification key of an address, None if it is a script address -/// ```aiken -/// expect Some(pub_key_hash) = address_pub_key(address) -/// ``` -pub fn address_pub_key(address: Address) -> Option { - when address.payment_credential is { - VerificationKey(key_hash) -> Some(key_hash) - _ -> None - } -} - -/// Obtain the script hash of an address, None if it is a verification key address -/// ```aiken -/// expect Some(script_hash) = address_pub_key(address) -/// ``` -pub fn address_script_hash(address: Address) -> Option { - when address.payment_credential is { - Script(script_hash) -> Some(script_hash) - _ -> None - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak deleted file mode 100644 index f13c84aa..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_converter.ak +++ /dev/null @@ -1,40 +0,0 @@ -use aiken/primitive/bytearray - -/// Convert an integer to a "stringify" ByteArray value -/// ```aiken -/// let int_bytes = convert_int_to_bytes(123) -/// let this_is_true = int_bytes == "123" -/// ``` -pub fn convert_int_to_bytes(i: Int) -> ByteArray { - convert_int_to_bytes_go(i, get_number_digit(i)) -} - -fn convert_int_to_bytes_go(newInt: Int, digit: Int) -> ByteArray { - if digit == 1 { - bytearray.push("", newInt + 48) - } else { - bytearray.push( - convert_int_to_bytes_go(newInt % digit, digit / 10), - newInt / digit + 48, - ) - } -} - -/// Get the number of digits in an integer -pub fn get_number_digit(i: Int) -> Int { - go_get_number_digit(i, 1) -} - -fn go_get_number_digit(newInt: Int, digit: Int) -> Int { - if newInt < 10 { - digit - } else { - go_get_number_digit(newInt / 10, digit * 10) - } -} - -test byte_conversion() { - convert_int_to_bytes(1) == "1" && convert_int_to_bytes(123) == "123" && convert_int_to_bytes( - 672912, - ) == "672912" -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak deleted file mode 100644 index 17eb3b20..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_extra_signatories.ak +++ /dev/null @@ -1,46 +0,0 @@ -use aiken/collection/list - -/// Check if a key is signed by any of the extra_signatories -/// ```aiken -/// let extra_signatories = ["key1", "key2", "key3"] -/// -/// let key_to_test_1 = "key2" -/// let this_is_true = key_signed(extra_signatories, key_to_test_1) -/// -/// let key_to_test_2 = "key4" -/// let this_is_false = key_signed(extra_signatories, key_to_test_2) -/// ``` -pub fn key_signed(extra_signatories: List, key: ByteArray) { - list.has(extra_signatories, key) -} - -/// Check if any of the keys are signed by the extra_signatories -/// ```aiken -/// let extra_signatories = ["key1", "key2", "key3"] -/// -/// let keys_to_test_1 = ["key4", "key2", "key5"] -/// let this_is_true = one_of_keys_signed(extra_signatories, keys_to_test_1) -/// -/// let keys_to_test_2 = ["key4", "key5"] -/// let this_is_false = one_of_keys_signed(extra_signatories, keys_to_test_2) -/// ``` -pub fn one_of_keys_signed( - extra_signatories: List, - keys: List, -) { - list.any(keys, fn(key) { key_signed(extra_signatories, key) }) -} - -/// Check if all of the keys are signed by the extra_signatories -/// ```aiken -/// let extra_signatories = ["key1", "key2", "key3"] -/// -/// let keys_to_test_1 = ["key2", "key3"] -/// let this_is_true = all_keys_signed(extra_signatories, keys_to_test_1) -/// -/// let keys_to_test_2 = ["key2", "key4"] -/// let this_is_false = all_keys_signed(extra_signatories, keys_to_test_2) -/// ``` -pub fn all_key_signed(extra_signatories: List, keys: List) { - list.all(keys, fn(key) { key_signed(extra_signatories, key) }) -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak deleted file mode 100644 index 5ddaaa7f..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_inputs.ak +++ /dev/null @@ -1,123 +0,0 @@ -use aiken/collection/list -use cardano/address.{Address} -use cardano/assets.{AssetName, PolicyId, flatten, quantity_of} -use cardano/transaction.{InlineDatum, Input} - -/// Extracts the inline datum from an input. -/// ```aiken -/// expect inline_datum: MyDatumType = input_inline_datum(input) -/// ``` -pub fn input_inline_datum(input: Input) { - expect InlineDatum(raw_datum) = input.output.datum - raw_datum -} - -/// Extract the inline datum by locating the first input in a list of inputs by Address and PolicyId. -/// ```aiken -/// expect inline_datum: MyDatumType = only_input_datum_with(inputs, policy, name) -/// ``` -pub fn only_input_datum_with( - inputs: List, - policy: PolicyId, - name: AssetName, -) { - expect Some(input) = - list.find( - inputs, - fn(input) { quantity_of(input.output.value, policy, name) == 1 }, - ) - input_inline_datum(input) -} - -/// Filters inputs by Address. -/// ```aiken -/// let filtered_inputs = inputs_at(inputs, address) -/// ``` -pub fn inputs_at(inputs: List, address: Address) -> List { - list.filter(inputs, fn(input) { input.output.address == address }) -} - -/// Filters inputs by PolicyId and AssetName. -/// ```aiken -/// let filtered_inputs = inputs_with(inputs, policy, name) -/// ``` -pub fn inputs_with( - inputs: List, - policy: PolicyId, - name: AssetName, -) -> List { - list.filter( - inputs, - fn(input) { quantity_of(input.output.value, policy, name) == 1 }, - ) -} - -/// Filters inputs by token policy. -/// ```aiken -/// let filtered_inputs = inputs_with_policy(inputs, policy) -/// ``` -pub fn inputs_with_policy(inputs: List, policy: PolicyId) -> List { - list.filter( - inputs, - fn(input) { - list.any(flatten(input.output.value), fn(token) { token.1st == policy }) - }, - ) -} - -/// Filters inputs by Address, PolicyId, and AssetName. -/// ```aiken -/// let filtered_inputs = inputs_at_with(inputs, address, policy, name) -/// ``` -pub fn inputs_at_with( - inputs: List, - address: Address, - policy: PolicyId, - name: AssetName, -) -> List { - list.filter( - inputs, - fn(input) { - input.output.address == address && quantity_of( - input.output.value, - policy, - name, - ) == 1 - }, - ) -} - -/// Filters inputs by Address and PolicyId. -/// ```aiken -/// let filtered_inputs = inputs_at_with_policy(inputs, address, policy) -/// ``` -pub fn inputs_at_with_policy( - inputs: List, - address: Address, - policy: PolicyId, -) -> List { - list.filter( - inputs, - fn(input) { - input.output.address == address && list.any( - flatten(input.output.value), - fn(token) { token.1st == policy }, - ) - }, - ) -} - -/// Calculate the total quantity of a token in a list of inputs. -/// ```aiken -/// let total_quantity = inputs_token_quantity(inputs, token) -/// ``` -pub fn inputs_token_quantity( - inputs: List, - token: (PolicyId, AssetName), -) -> Int { - list.map( - inputs, - fn(input) { quantity_of(input.output.value, token.1st, token.2nd) }, - ) - |> list.foldr(0, fn(n, total) { n + total }) -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak deleted file mode 100644 index aa7dfe9a..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_mints.ak +++ /dev/null @@ -1,68 +0,0 @@ -use aiken/collection/list -use cardano/assets.{AssetName, PolicyId, Value, flatten} - -/// Check if a certain PolicyId is burning only if exists in the minted value. -/// ```aiken -/// let is_policy_only_burn = check_policy_only_burn(minted_value, policy) -/// ``` -pub fn check_policy_only_burn(mint: Value, policy: PolicyId) -> Bool { - let mint_value = flatten(mint) - list.all( - mint_value, - fn(x) { - if x.1st == policy { - x.3rd < 0 - } else { - True - } - }, - ) -} - -/// Check if a certain policy has only minted this token. -/// ```aiken -/// let is_policy_only_minted = check_policy_only_minted(minted_value, policy, name, quantity) -/// ``` -pub fn policy_only_minted_token( - mint: Value, - policy: PolicyId, - name: AssetName, - quantity: Int, -) { - when flatten(mint) |> list.filter(fn(asset) { asset.1st == policy }) is { - [(_, minted_asset_name, minted_quantity)] -> - minted_asset_name == name && minted_quantity == quantity - _ -> False - } -} - -/// Check if the minted value contains only one distinct asset with particular PolicyId. -/// ```aiken -/// let is_only_minted_token = only_minted_token(minted_value, policy, name, quantity) -/// ``` -pub fn only_minted_token( - mint: Value, - policy: PolicyId, - name: AssetName, - quantity: Int, -) { - when flatten(mint) is { - [(minted_policy, minted_asset_name, minted_quantity)] -> - minted_policy == policy && minted_asset_name == name && minted_quantity == quantity - _ -> False - } -} - -/// Check if the minted value contains a token with particular PolicyId, AssetName, and quantity. -/// ```aiken -/// let is_token_minted = token_minted(minted_value, policy, name, quantity) -/// ``` -pub fn token_minted( - mint: Value, - policy: PolicyId, - name: AssetName, - quantity: Int, -) { - flatten(mint) - |> list.any(fn(x) { x.1st == policy && x.2nd == name && x.3rd == quantity }) -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak deleted file mode 100644 index d83ffaf5..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_outputs.ak +++ /dev/null @@ -1,90 +0,0 @@ -use aiken/collection/list -use cardano/address.{Address} -use cardano/assets.{AssetName, PolicyId, flatten, quantity_of} -use cardano/transaction.{InlineDatum, Output} - -/// Extracts the inline datum from an output. -/// ```aiken -/// expect inline_datum: MyDatumType = output_inline_datum(output) -/// ``` -pub fn output_inline_datum(output: Output) { - expect InlineDatum(raw_datum) = output.datum - raw_datum -} - -/// Filters outputs by Address. -/// ```aiken -/// let filtered_outputs = outputs_at(outputs, address) -/// ``` -pub fn outputs_at(outputs: List, address: Address) -> List { - list.filter(outputs, fn(output) { output.address == address }) -} - -/// Filters outputs by PolicyId and AssetName. -/// ```aiken -/// let filtered_outputs = outputs_with(outputs, policy, name) -/// ``` -pub fn outputs_with( - outputs: List, - policy: PolicyId, - name: AssetName, -) -> List { - list.filter( - outputs, - fn(output) { quantity_of(output.value, policy, name) == 1 }, - ) -} - -/// Filters outputs by token policy. -/// ```aiken -/// let filtered_outputs = outputs_with_policy(outputs, policy) -/// ``` -pub fn outputs_with_policy( - outputs: List, - policy: PolicyId, -) -> List { - list.filter( - outputs, - fn(output) { - list.any(flatten(output.value), fn(token) { token.1st == policy }) - }, - ) -} - -/// Filters outputs by Address, PolicyId, and AssetName. -/// ```aiken -/// let filtered_outputs = outputs_at_with(outputs, address, policy, name) -/// ``` -pub fn outputs_at_with( - outputs: List, - address: Address, - policy: PolicyId, - name: AssetName, -) -> List { - list.filter( - outputs, - fn(output) { - output.address == address && quantity_of(output.value, policy, name) == 1 - }, - ) -} - -/// Filters outputs by Address and PolicyId. -/// ```aiken -/// let filtered_outputs = outputs_at_with_policy(outputs, address, policy) -/// ``` -pub fn outputs_at_with_policy( - outputs: List, - address: Address, - policy: PolicyId, -) -> List { - list.filter( - outputs, - fn(output) { - output.address == address && list.any( - flatten(output.value), - fn(token) { token.1st == policy }, - ) - }, - ) -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak deleted file mode 100644 index df3d69e9..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_redeemers.ak +++ /dev/null @@ -1,45 +0,0 @@ -use aiken/collection/pairs -use aiken/primitive/bytearray -use cardano/address.{Address, Script} -use cardano/transaction.{ - Input, OutputReference, Redeemer, ScriptPurpose, Spend, Withdraw, find_input, -} - -/// Obtain the redeemer for a given output reference and address -/// ```aiken -/// expect Some(redeemer) = redeemer_from(redeemers, inputs, output_reference, input_address) -/// ``` -pub fn redeemer_from( - redeemers: Pairs, - inputs: List, - output_reference: OutputReference, - input_address: Address, -) -> Option { - expect Some(redeemer) = redeemers |> pairs.get_first(Spend(output_reference)) - expect Some(input) = find_input(inputs, output_reference) - if input.output.address == input_address { - Some(redeemer) - } else { - None - } -} - -/// Obtain the first redeemer for a given withdrawal script hash -/// ```aiken -/// expect Some(raw_redeemer) = withdrawal_redeemer(redeemers, withdrawal_script_hash) -/// expect my_redeemer: MyRedeemerType = raw_redeemer; -/// ``` -pub fn withdrawal_redeemer( - redeemers: Pairs, - withdrawal_script_hash: ByteArray, -) -> Option { - redeemers - |> pairs.get_first(Withdraw(Script(withdrawal_script_hash))) -} - -/// Compare the output reference of two spend transactions -pub fn compare_output_reference(x, y) { - expect Spend(out_ref_x) = x - expect Spend(out_ref_y) = y - bytearray.compare(out_ref_x.transaction_id, out_ref_y.transaction_id) -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak deleted file mode 100644 index 498bc600..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_validity_range.ak +++ /dev/null @@ -1,34 +0,0 @@ -use aiken/interval.{Finite, IntervalBound} -use cardano/transaction.{ValidityRange} - -/// Check if the validity range is after the required timestamp. -/// ```aiken -/// let is_valid_after = valid_after(transaction.validity_range, required_timestamp) -/// ``` -pub fn valid_after( - validity_range: ValidityRange, - required_timestamp: Int, -) -> Bool { - let IntervalBound { bound_type, is_inclusive } = validity_range.lower_bound - when (bound_type, is_inclusive) is { - (Finite(lower_bound), True) -> lower_bound > required_timestamp - (Finite(lower_bound), False) -> lower_bound >= required_timestamp - _ -> False - } -} - -/// Check if the validity range is before the required timestamp. -/// ```aiken -/// let is_valid_before = valid_before(transaction.validity_range, required_timestamp) -/// ``` -pub fn valid_before( - validity_range: ValidityRange, - required_timestamp: Int, -) -> Bool { - let IntervalBound { bound_type, is_inclusive } = validity_range.upper_bound - when (bound_type, is_inclusive) is { - (Finite(upper_bound), True) -> upper_bound < required_timestamp - (Finite(upper_bound), False) -> upper_bound <= required_timestamp - _ -> False - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak deleted file mode 100644 index 770ddbcc..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/cocktail/vodka_value.ak +++ /dev/null @@ -1,80 +0,0 @@ -use aiken/collection/list -use cardano/address.{Address} -use cardano/assets.{ - AssetName, PolicyId, Value, flatten, merge, quantity_of, zero, -} -use cardano/transaction.{Input, Output} - -/// Calulate the length of a value -/// ```aiken -/// let value_length = value_length(value) -/// ``` -pub fn value_length(value: Value) -> Int { - list.length(flatten(value)) -} - -/// Get the value send to a particular address in a list of outputs -/// ```aiken -/// let value_to = get_all_value_to(outputs, address) -/// ``` -pub fn get_all_value_to(outputs: List, address: Address) -> Value { - list.foldr( - outputs, - zero, - fn(output, acc_value) { - if output.address == address { - merge(acc_value, output.value) - } else { - acc_value - } - }, - ) -} - -/// Get the value coming from a particular address in a list of inputs -/// ```aiken -/// let value_from = get_all_value_from(inputs, address) -/// ``` -pub fn get_all_value_from(inputs: List, address: Address) -> Value { - list.foldr( - inputs, - zero, - fn(input, acc_value) { - if input.output.address == address { - merge(acc_value, input.output.value) - } else { - acc_value - } - }, - ) -} - -/// Check if the first value provided is greater than or equal to the second value -/// ```aiken -/// let is_geq = value_geq(supposed_greater, supposed_smaller) -/// ``` -pub fn value_geq(greater: Value, smaller: Value) -> Bool { - list.all( - flatten(smaller), - fn(token) { quantity_of(greater, token.1st, token.2nd) >= token.3rd }, - ) -} - -/// Obtain the information (i.e. flattened value) of a policy in a value -/// ```aiken -/// expect Some((policyId, assetName, quantity)) = value_policy_info(value, policy) -/// ``` -pub fn value_policy_info( - value: Value, - policy: ByteArray, -) -> Option<(ByteArray, ByteArray, Int)> { - list.find(flatten(value), fn(t) { t.1st == policy }) -} - -/// Obtain the non-lovelace information (i.e. flattened value) of a policy in a value -/// ```aiken -/// let tokens = value_tokens(value) -/// ``` -pub fn value_tokens(value: Value) -> List<(PolicyId, AssetName, Int)> { - list.filter(flatten(value), fn(t) { t.1st != "" }) -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail.ak deleted file mode 100644 index 79a8e7c7..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail.ak +++ /dev/null @@ -1,776 +0,0 @@ -//// Mocktail contains a set of functions to build transactions for testing purposes. -//// -//// To use Mocktail Tx, there are 4 steps -//// 1. Starts with [`mocktail_tx()`](./mocktail.html#mocktail_tx) to create a new transaction builder. -//// 2. Use tx building methods similar to MeshJS lower level APIs to build the transaction. -//// 3. Call [`complete`](./mocktail.html#complete) to complete building transaction. -//// 4. Finally, if there is any whole items to be added to the transaction, use the `add` functions. -//// -//// Mocktail is built with devex and multiple test cases compatibility in mind. -//// 1. It is pipable. -//// 2. For every tx building and adding methods, it takes first param as condition. that function will only run when this condition is `True`. -//// -//// ## Example -//// ```aiken -//// let tx: Transaction = -//// mocktail_tx() -//// |> required_signer_hash(is_signature_provided, mock_pub_key_hash(0)) -//// |> script_withdrawal(True, mock_script_hash(0), 0) -//// |> script_withdrawal(True, mock_script_hash(1), 0) -//// |> required_signer_hash(True, mock_pub_key_hash(1)) -//// |> complete() -//// |> add_reference_input(True, mock_oracle_ref_input_1()) -//// |> add_reference_input(True, mock_oracle_ref_input_2()) -//// |> add_output(True, mock_pub_key_output(mock_fee_collection_address, mock_fee)) -//// ``` - -use aiken/cbor -use aiken/collection/dict -use aiken/collection/list -use aiken/crypto.{VerificationKeyHash, blake2b_256} -use aiken/interval.{Finite, Interval} -use cardano/address.{Address, Credential, Script} -use cardano/assets.{Value, from_asset, merge, zero} -use cardano/certificate.{Certificate} -use cardano/transaction.{ - InlineDatum, Input, Output, OutputReference, Redeemer, ScriptPurpose, - Transaction, TransactionId, placeholder, -} -use mocktail/virgin_address -use mocktail/virgin_key_hash -use mocktail/virgin_output_reference -use mocktail/virgin_outputs -use mocktail/virgin_validity_range - -/// A mock transaction builder. It can be initialized with `mocktail_tx()`. -pub type MocktailTx { - tx: Transaction, - queue_input: Option, - queue_output: Option, - queue_ref_input: Option, -} - -/// Initialize a new mock transaction builder, and output a built transaction wiht [`.complete().`](./mocktail.html#complete) -/// ```aiken -/// let tx = mocktail_tx() -/// |> ...other tx building methods -/// |> complete() -/// ``` -pub fn mocktail_tx() -> MocktailTx { - MocktailTx { - tx: placeholder, - queue_input: None, - queue_output: None, - queue_ref_input: None, - } -} - -/// Tx building method - Add an input to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> tx_in(condition, tx_hash, tx_index, amount, address) -/// |> ...other tx building methods -/// |> complete() -/// ``` -pub fn tx_in( - mocktail_tx: MocktailTx, - condition: Bool, - tx_hash: ByteArray, - tx_index: Int, - amount: Value, - address: Address, -) -> MocktailTx { - if !condition { - mocktail_tx - } else { - let queue_input = - Some( - Input { - output_reference: OutputReference { - transaction_id: tx_hash, - output_index: tx_index, - }, - output: mock_pub_key_output(address, amount), - }, - ) - when mocktail_tx.queue_input is { - Some(input) -> - MocktailTx { - ..mocktail_tx, - queue_input: queue_input, - tx: mocktail_tx.tx |> add_input(True, input), - } - None -> MocktailTx { ..mocktail_tx, queue_input: queue_input } - } - } -} - -/// Tx building method - Add an input with inline datum to the transaction. -/// This can only be used right after [`tx_in`](./mocktail.html#tx_in). -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> tx_in(condition, tx_hash, tx_index, amount, address) -/// |> tx_in_inline_datum(condition, datum) -/// |> ...other tx building methods -/// |> complete() -/// ``` -pub fn tx_in_inline_datum( - mocktail_tx: MocktailTx, - condition: Bool, - datum: Data, -) -> MocktailTx { - if !condition { - mocktail_tx - } else { - let queue_input = - when mocktail_tx.queue_input is { - Some(input) -> { - let Input { output_reference, output } = input - Some( - Input { - output_reference, - output: Output { ..output, datum: InlineDatum(datum) }, - }, - ) - } - None -> - Some( - Input { - output_reference: mock_utxo_ref(0, 0), - output: mock_script_output( - mock_script_address(0, None), - zero, - InlineDatum(datum), - ), - }, - ) - } - MocktailTx { ..mocktail_tx, queue_input: queue_input } - } -} - -/// Tx building method - Add an output to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> tx_out(condition, address, amount) -/// |> ...other tx building methods -/// |> complete() -/// ``` -pub fn tx_out( - mocktail_tx: MocktailTx, - condition: Bool, - address: Address, - amount: Value, -) -> MocktailTx { - if !condition { - mocktail_tx - } else { - let queue_output = Some(mock_pub_key_output(address, amount)) - when mocktail_tx.queue_output is { - Some(output) -> - MocktailTx { - ..mocktail_tx, - queue_output: queue_output, - tx: mocktail_tx.tx |> add_output(True, output), - } - None -> MocktailTx { ..mocktail_tx, queue_output: queue_output } - } - } -} - -/// Tx building method - Add an output with inline datum to the transaction. -/// This can only be used right after [`tx_out`](./mocktail.html#tx_out). -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> tx_out(condition, address, amount) -/// |> tx_out_inline_datum(condition, datum) -/// |> ...other tx building methods -/// |> complete() -/// ``` -pub fn tx_out_inline_datum( - mocktail_tx: MocktailTx, - condition: Bool, - datum: Data, -) -> MocktailTx { - if !condition { - mocktail_tx - } else { - let queue_output = - when mocktail_tx.queue_output is { - Some(output) -> Some(Output { ..output, datum: InlineDatum(datum) }) - None -> - Some( - mock_script_output( - mock_script_address(0, None), - zero, - InlineDatum(datum), - ), - ) - } - MocktailTx { ..mocktail_tx, queue_output: queue_output } - } -} - -/// Tx building method - Add a mint to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> mint(condition, quantity, policy_id, token_name) -/// |> ...other tx building methods -/// |> complete() -/// ``` -pub fn mint( - mocktail_tx: MocktailTx, - condition: Bool, - quantity: Int, - policy_id: ByteArray, - token_name: ByteArray, -) -> MocktailTx { - if !condition { - mocktail_tx - } else { - MocktailTx { - ..mocktail_tx, - tx: mocktail_tx.tx - |> add_mint(True, from_asset(policy_id, token_name, quantity)), - } - } -} - -/// Tx building method - Add a reference input to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> ref_tx_in(condition, tx_hash, tx_index, amount, address) -/// |> ...other tx building methods -/// |> complete() -/// ``` -pub fn ref_tx_in( - mocktail_tx: MocktailTx, - condition: Bool, - tx_hash: ByteArray, - tx_index: Int, - amount: Value, - address: Address, -) -> MocktailTx { - if !condition { - mocktail_tx - } else { - let queue_ref_input = - Some( - Input { - output_reference: OutputReference { - transaction_id: tx_hash, - output_index: tx_index, - }, - output: mock_pub_key_output(address, amount), - }, - ) - when mocktail_tx.queue_ref_input is { - Some(input) -> - MocktailTx { - ..mocktail_tx, - queue_ref_input: queue_ref_input, - tx: mocktail_tx.tx |> add_reference_input(True, input), - } - None -> MocktailTx { ..mocktail_tx, queue_ref_input: queue_ref_input } - } - } -} - -/// Tx building method - Add an inline datum to last reference input in the transaction. -/// This can only be used right after [`ref_tx_in`](./mocktail.html#ref_tx_in). -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> ref_tx_in(condition, tx_hash, tx_index, amount, address) -/// |> ref_tx_in_inline_datum(condition, datum) -/// |> ...other tx building methods -/// |> complete() -/// ``` -pub fn ref_tx_in_inline_datum( - mocktail_tx: MocktailTx, - condition: Bool, - datum: Data, -) -> MocktailTx { - if !condition { - mocktail_tx - } else { - let queue_ref_input = - when mocktail_tx.queue_ref_input is { - Some(input) -> { - let Input { output_reference, output } = input - Some( - Input { - output_reference, - output: Output { ..output, datum: InlineDatum(datum) }, - }, - ) - } - None -> - Some( - Input { - output_reference: mock_utxo_ref(0, 0), - output: mock_script_output( - mock_script_address(0, None), - zero, - InlineDatum(datum), - ), - }, - ) - } - MocktailTx { ..mocktail_tx, queue_ref_input: queue_ref_input } - } -} - -/// Tx building method - Add a a lower bound validity range to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> valid_hereafter(condition, time) -/// |> ...other tx building methods -/// |> complete() -/// ``` -pub fn invalid_before( - mocktail_tx: MocktailTx, - condition: Bool, - time: Int, -) -> MocktailTx { - if !condition { - mocktail_tx - } else { - let tx = mocktail_tx.tx - let upper_bound = - when tx.validity_range.upper_bound.bound_type is { - Finite(x) -> Some(x) - _ -> None - } - MocktailTx { - ..mocktail_tx, - tx: Transaction { - ..tx, - validity_range: mock_interval(Some(time), upper_bound), - }, - } - } -} - -/// Tx building method - Add a a upper bound validity range to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> valid_hereafter(condition, time) -/// |> ...other tx building methods -/// |> complete() -/// ``` -pub fn invalid_hereafter( - mocktail_tx: MocktailTx, - condition: Bool, - time: Int, -) -> MocktailTx { - if !condition { - mocktail_tx - } else { - let tx = mocktail_tx.tx - let lower_bound = - when tx.validity_range.lower_bound.bound_type is { - Finite(x) -> Some(x) - _ -> None - } - MocktailTx { - ..mocktail_tx, - tx: Transaction { - ..tx, - validity_range: mock_interval(lower_bound, Some(time)), - }, - } - } -} - -/// Tx building method - Add a required signer hash to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> required_signer_hash(condition, key) -/// |> ...other tx building methods -/// |> complete() -/// ``` -pub fn required_signer_hash( - mocktail_tx: MocktailTx, - condition: Bool, - key: ByteArray, -) -> MocktailTx { - if !condition { - mocktail_tx - } else { - MocktailTx { - ..mocktail_tx, - tx: mocktail_tx.tx |> add_extra_signatory(True, key), - } - } -} - -/// Tx building method - Add a script withdrawal to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> script_withdrawal(condition, script_hash, withdrawal_amount) -/// |> ...other tx building methods -/// |> complete() -/// ``` -pub fn script_withdrawal( - mocktail_tx: MocktailTx, - condition: Bool, - script_hash: ByteArray, - withdrawal_amount: Int, -) -> MocktailTx { - if !condition { - mocktail_tx - } else { - MocktailTx { - ..mocktail_tx, - tx: mocktail_tx.tx - |> add_withdrawal(True, Pair(Script(script_hash), withdrawal_amount)), - } - } -} - -/// Tx building method - Conclude the transaction building process, and return the built transaction. -/// ```aiken -/// let tx = mocktail_tx() -/// |> ...tx building methods -/// |> complete() -pub fn complete(mocktail_tx: MocktailTx) -> Transaction { - let tx = mocktail_tx.tx - let tx = - when mocktail_tx.queue_input is { - Some(input) -> tx |> add_input(True, input) - None -> tx - } - let tx = - when mocktail_tx.queue_output is { - Some(output) -> tx |> add_output(True, output) - None -> tx - } - let tx = - when mocktail_tx.queue_ref_input is { - Some(input) -> tx |> add_reference_input(True, input) - None -> tx - } - tx -} - -/// Tx maniputlator - Add an input to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> ...tx building methods -/// |> complete() -/// |> add_input(condition, input) -/// |> ...other tx maniputlator methods -/// ``` -pub fn add_input(tx: Transaction, condition: Bool, input: Input) -> Transaction { - if !condition { - tx - } else { - Transaction { ..tx, inputs: tx.inputs |> list.concat([input]) } - } -} - -/// Tx maniputlator - Add a reference input to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> ...tx building methods -/// |> complete() -/// |> add_reference_input(condition, input) -/// |> ...other tx maniputlator methods -/// ``` -pub fn add_reference_input( - tx: Transaction, - condition: Bool, - input: Input, -) -> Transaction { - if !condition { - tx - } else { - Transaction { - ..tx, - reference_inputs: tx.reference_inputs |> list.concat([input]), - } - } -} - -/// Tx maniputlator - Add an output to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let t = mocktail_tx() -/// |> ...tx building methods -/// |> complete() -/// |> add_output(condition, output) -/// |> ...other tx maniputlator methods -/// ``` -pub fn add_output( - tx: Transaction, - condition: Bool, - output: Output, -) -> Transaction { - if !condition { - tx - } else { - Transaction { ..tx, outputs: tx.outputs |> list.concat([output]) } - } -} - -/// Tx maniputlator - Set a fee to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> ...tx building methods -/// |> complete() -/// |> set_fee(condition, lovelace_fee) -/// |> ...other tx maniputlator methods -/// ``` -pub fn set_fee( - tx: Transaction, - condition: Bool, - lovelace_fee: Int, -) -> Transaction { - if !condition { - tx - } else { - Transaction { ..tx, fee: lovelace_fee } - } -} - -/// Tx maniputlator - Add a mint to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> ...tx building methods -/// |> complete() -/// |> add_mint(condition, mint) -/// |> ...other tx maniputlator methods -/// ``` -pub fn add_mint(tx: Transaction, condition: Bool, mint: Value) -> Transaction { - if !condition { - tx - } else { - Transaction { - ..tx, - mint: tx.mint - |> merge(mint), - } - } -} - -/// Tx maniputlator - Add a certificate to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> ...tx building methods -/// |> complete() -/// |> add_certificate(condition, certificate) -/// |> ...other tx maniputlator methods -/// ``` -pub fn add_certificate( - tx: Transaction, - condition: Bool, - certificate: Certificate, -) -> Transaction { - if !condition { - tx - } else { - Transaction { - ..tx, - certificates: tx.certificates |> list.concat([certificate]), - } - } -} - -/// Tx maniputlator - Add a withdrawal to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> ...tx building methods -/// |> complete() -/// |> add_withdrawal(condition, stake_credential, amount) -/// |> ...other tx maniputlator methods -/// ``` -pub fn add_withdrawal( - tx: Transaction, - condition: Bool, - withdrawal: Pair, -) -> Transaction { - if !condition { - tx - } else { - Transaction { - ..tx, - withdrawals: tx.withdrawals |> list.concat([withdrawal]), - } - } -} - -/// Tx maniputlator - Add an extra signatory to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> ...tx building methods -/// |> complete() -/// |> add_extra_signatory(condition, signatory) -/// |> ...other tx maniputlator methods -/// ``` -pub fn add_extra_signatory( - tx: Transaction, - condition: Bool, - signatory: VerificationKeyHash, -) -> Transaction { - if !condition { - tx - } else { - Transaction { - ..tx, - extra_signatories: tx.extra_signatories |> list.concat([signatory]), - } - } -} - -/// Tx maniputlator - Add a redeemer to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> ...tx building methods -/// |> complete() -/// |> add_redeemer(condition, redeemer) -/// |> ...other tx maniputlator methods -/// ``` -pub fn add_redeemer( - tx: Transaction, - condition: Bool, - redeemer: Pair, -) -> Transaction { - if !condition { - tx - } else { - Transaction { ..tx, redeemers: tx.redeemers |> list.concat([redeemer]) } - } -} - -/// Tx maniputlator - Add a datum to the transaction. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> ...tx building methods -/// |> complete() -/// |> add_datum(condition, datum) -/// |> ...other tx maniputlator methods -/// ``` -pub fn add_datum(tx: Transaction, condition: Bool, datum: Data) -> Transaction { - if !condition { - tx - } else { - let datum_hash = blake2b_256(cbor.serialise(datum)) - Transaction { ..tx, datums: tx.datums |> dict.insert(datum_hash, datum) } - } -} - -/// Tx maniputlator - Set the transaction id. -/// This function will only run when the condition is `True`. -/// ```aiken -/// let tx = mocktail_tx() -/// |> ...tx building methods -/// |> complete() -/// |> set_transaction_id(condition, transaction_id) -/// |> ...other tx maniputlator methods -/// ``` -pub fn set_transaction_id( - tx: Transaction, - condition: Bool, - transaction_id: TransactionId, -) -> Transaction { - if !condition { - tx - } else { - Transaction { ..tx, id: transaction_id } - } -} - -// Address - -/// Documentation please refer to [`virgin_address`](./mocktail/virgin_address.html) -pub const mock_verfication_key_credential = - virgin_address.mock_verfication_key_credential - -/// Documentation please refer to [`virgin_address`](./mocktail/virgin_address.html) -pub const mock_pub_key_address = virgin_address.mock_pub_key_address - -/// Documentation please refer to [`virgin_address`](./mocktail/virgin_address.html) -pub const mock_script_credential = virgin_address.mock_script_credential - -/// Documentation please refer to [`virgin_address`](./mocktail/virgin_address.html) -pub const mock_script_address = virgin_address.mock_script_address - -// Key hash - -/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) -pub const mock_key_hash = virgin_key_hash.mock_key_hash - -/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) -pub const mock_policy_id = virgin_key_hash.mock_policy_id - -/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) -pub const mock_pub_key_hash = virgin_key_hash.mock_pub_key_hash - -/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) -pub const mock_script_hash = virgin_key_hash.mock_script_hash - -/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) -pub const mock_stake_key_hash = virgin_key_hash.mock_stake_key_hash - -/// Documentation please refer to [`virgin_key_hash`](./mocktail/virgin_key_hash.html) -pub const mock_script_stake_key_hash = - virgin_key_hash.mock_script_stake_key_hash - -// Output reference - -/// Documentation please refer to [`virgin_output_reference`](./mocktail/virgin_output_reference.html) -pub const mock_tx_hash = virgin_output_reference.mock_tx_hash - -/// Documentation please refer to [`virgin_output_reference`](./mocktail/virgin_output_reference.html) -pub const mock_utxo_ref = virgin_output_reference.mock_utxo_ref - -// Outputs - -/// Documentation please refer to [`virgin_outputs`](./mocktail/virgin_outputs.html) -pub const mock_output = virgin_outputs.mock_output - -/// Documentation please refer to [`virgin_outputs`](./mocktail/virgin_outputs.html) -pub const mock_pub_key_output = virgin_outputs.mock_pub_key_output - -/// Documentation please refer to [`virgin_outputs`](./mocktail/virgin_outputs.html) -pub const mock_script_output = virgin_outputs.mock_script_output - -// Validity range - -/// Documentation please refer to [`virgin_validity_range`](./mocktail/virgin_validity_range.html) -pub const mock_interval = virgin_validity_range.mock_interval - -test preserve_tx_in_order() { - let tx: Transaction = - mocktail_tx() - |> tx_in(True, mock_tx_hash(0), 0, zero, mock_pub_key_address(0, None)) - |> tx_in(True, mock_tx_hash(1), 1, zero, mock_pub_key_address(1, None)) - |> complete() - expect [input1, input2] = tx.inputs - and { - input1.output_reference.transaction_id == mock_tx_hash(0), - input1.output_reference.output_index == 0, - input2.output_reference.transaction_id == mock_tx_hash(1), - input2.output_reference.output_index == 1, - } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak deleted file mode 100644 index 30a6e40b..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_address.ak +++ /dev/null @@ -1,57 +0,0 @@ -use cardano/address.{ - Address, Credential, Inline, Script, StakeCredential, VerificationKey, -} -use mocktail/virgin_key_hash.{ - mock_pub_key_hash, mock_script_hash, mock_script_stake_key_hash, - mock_stake_key_hash, -} - -/// Mock a pub key credential -/// `variation` same the same index as `mock_pub_key_hash` -pub fn mock_verfication_key_credential(variation: Int) -> Credential { - VerificationKey(mock_pub_key_hash(variation)) -} - -/// Mock a pub key address -/// `variation` same the same index as `mock_pub_key_hash` -/// `stake_credential` is optional -pub fn mock_pub_key_address( - variation: Int, - stake_credential: Option, -) -> Address { - Address { - payment_credential: mock_verfication_key_credential(variation), - stake_credential, - } -} - -/// Mock a script credential -/// `variation` same the same index as `mock_script_hash` -pub fn mock_script_credential(variation: Int) -> Credential { - Script(mock_script_hash(variation)) -} - -/// Mock a script address -/// `variation` same the same index as `mock_script_hash` -/// `stake_credential` is optional -pub fn mock_script_address( - variation: Int, - stake_credential: Option, -) -> Address { - Address { - payment_credential: mock_script_credential(variation), - stake_credential, - } -} - -/// Mock a pub key stake credential -/// `variation` same the same index as `mock_stake_key_hash` -pub fn mock_pub_key_stake_cred(variation: Int) -> StakeCredential { - Inline(VerificationKey(mock_stake_key_hash(variation))) -} - -/// Mock a script stake credential -/// `variation` same the same index as `mock_script_stake_key_hash` -pub fn mock_script_stake_cred(variation: Int) -> StakeCredential { - Inline(Script(mock_script_stake_key_hash(variation))) -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak deleted file mode 100644 index 9a32dd06..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_key_hash.ak +++ /dev/null @@ -1,47 +0,0 @@ -use aiken/cbor.{serialise} -use aiken/crypto.{ScriptHash, VerificationKeyHash, blake2b_224} -use aiken/primitive/bytearray.{concat} -use cardano/assets.{PolicyId} - -pub const root_hash = - #"a2c20c77887ace1cd986193e4e75babd8993cfd56995cd5cfce609c2" - -/// Mock a key in hexadecimal format -pub fn mock_key_hash(variation: Int) -> ByteArray { - serialise(variation) |> concat(root_hash) |> blake2b_224() -} - -/// Mock a PolicyID -/// The variation is used to distinguish between different PolicyIDs -/// Use this but not other `mock_key_hash` functions to avoid hash collision -pub fn mock_policy_id(variation: Int) -> PolicyId { - mock_key_hash(variation) -} - -/// Mock a public key hash -/// The variation is used to distinguish between different public keys -/// Use this but not other `mock_key_hash` functions to avoid hash collision -pub fn mock_pub_key_hash(variation: Int) -> VerificationKeyHash { - mock_key_hash(variation + 1000) -} - -/// Mock a script hash -/// The variation is used to distinguish between different scripts -/// Use this but not other `mock_key_hash` functions to avoid hash collision -pub fn mock_script_hash(variation: Int) -> ScriptHash { - mock_key_hash(variation + 2000) -} - -/// Mock a stake key hash -/// The variation is used to distinguish between different stake keys -/// Use this but not other `mock_key_hash` functions to avoid hash collision -pub fn mock_stake_key_hash(variation: Int) -> VerificationKeyHash { - mock_key_hash(variation + 3000) -} - -/// Mock a script stake key hash -/// The variation is used to distinguish between different scripts -/// Use this but not other `mock_key_hash` functions to avoid hash collision -pub fn mock_script_stake_key_hash(variation: Int) -> ScriptHash { - mock_key_hash(variation + 4000) -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak deleted file mode 100644 index 03ec9c02..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_output_reference.ak +++ /dev/null @@ -1,16 +0,0 @@ -use aiken/cbor.{serialise} -use aiken/crypto.{Blake2b_256, Hash, blake2b_256} -use aiken/primitive/bytearray.{concat} -use cardano/transaction.{OutputReference, Transaction} - -const root_hash = - #"5a077cbcdffb88b104f292aacb9687ce93e2191e103a30a0cc5505c18b719f98" - -pub fn mock_tx_hash(variation: Int) -> Hash { - serialise(variation) |> concat(root_hash) |> blake2b_256() -} - -/// Mock an output reference -pub fn mock_utxo_ref(variation: Int, output_index: Int) -> OutputReference { - OutputReference { transaction_id: mock_tx_hash(variation), output_index } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak deleted file mode 100644 index b75181ba..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_outputs.ak +++ /dev/null @@ -1,30 +0,0 @@ -use aiken/crypto.{ScriptHash} -use cardano/address.{Address} -use cardano/assets.{Value} -use cardano/transaction.{Datum, NoDatum, Output} - -/// Mock an output -pub fn mock_output( - address: Address, - value: Value, - datum: Datum, - reference_script: Option, -) -> Output { - Output { address, value, datum, reference_script } -} - -/// Mock an output with a public key address -/// `datum` and `reference_script` is omitted as it is seldom used in practice -pub fn mock_pub_key_output(address: Address, value: Value) -> Output { - mock_output(address, value, NoDatum, reference_script: None) -} - -/// Mock an output with a script address -/// `reference_script` is omitted as it is seldom used in practice -pub fn mock_script_output( - address: Address, - value: Value, - datum: Datum, -) -> Output { - mock_output(address, value, datum, reference_script: None) -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak deleted file mode 100644 index c11a249d..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/lib/mocktail/virgin_validity_range.ak +++ /dev/null @@ -1,28 +0,0 @@ -use aiken/interval.{ - Finite, Interval, IntervalBound, NegativeInfinity, PositiveInfinity, -} -use cardano/transaction.{ValidityRange} - -/// Mock a validity range with the given lower and upper bounds. -pub fn mock_interval(lower: Option, upper: Option) -> ValidityRange { - let lower_bound = - when lower is { - Some(lower_bound_number) -> - IntervalBound { - bound_type: Finite(lower_bound_number), - is_inclusive: True, - } - None -> IntervalBound { bound_type: NegativeInfinity, is_inclusive: True } - } - let upper_bound = - when upper is { - Some(upper_bound_number) -> - IntervalBound { - bound_type: Finite(upper_bound_number), - is_inclusive: True, - } - None -> IntervalBound { bound_type: PositiveInfinity, is_inclusive: True } - } - - Interval { lower_bound, upper_bound } -} diff --git a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/plutus.json b/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/plutus.json deleted file mode 100644 index ebc0bcfa..00000000 --- a/src/components/multisig/aiken-proxy/build/packages/sidan-lab-vodka/plutus.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "preamble": { - "title": "sidan-lab/vodka", - "description": "Aiken utils for project 'sidan-lab/vodka", - "version": "0.1.13", - "plutusVersion": "v3", - "compiler": { - "name": "Aiken", - "version": "v1.1.9+2217206" - }, - "license": "Apache-2.0" - }, - "validators": [] -} \ No newline at end of file diff --git a/src/components/multisig/aiken-proxy/lib/types.ak b/src/components/multisig/aiken-proxy/lib/types.ak deleted file mode 100644 index 3084fe32..00000000 --- a/src/components/multisig/aiken-proxy/lib/types.ak +++ /dev/null @@ -1,9 +0,0 @@ -pub type MintPolarity { - RMint - RBurn -} - -pub type ProxyRedeemer { - SpendFunds - RemoveEmptyInstance -} diff --git a/src/components/multisig/aiken-proxy/plutus.json b/src/components/multisig/aiken-proxy/plutus.json deleted file mode 100644 index ee53388c..00000000 --- a/src/components/multisig/aiken-proxy/plutus.json +++ /dev/null @@ -1,163 +0,0 @@ -{ - "preamble": { - "title": "mesh/proxy", - "description": "Aiken contracts for project 'aiken-proxy'", - "version": "0.0.0", - "plutusVersion": "v3", - "compiler": { - "name": "Aiken", - "version": "v1.1.17+c3a7fba" - }, - "license": "Apache-2.0" - }, - "validators": [ - { - "title": "auth_token/mint.auth_token.mint", - "redeemer": { - "title": "redeemer", - "schema": { - "$ref": "#/definitions/types~1MintPolarity" - } - }, - "parameters": [ - { - "title": "utxo_ref", - "schema": { - "$ref": "#/definitions/cardano~1transaction~1OutputReference" - } - } - ], - "compiledCode": "59017b010100229800aba2aba1aba0aab9faab9eaab9dab9a488888896600264653001300800198041804800cdc3a400130080024888966002600460106ea800e2646644b300130050018acc004c030dd5003c00a2c806a2b30013370e9001000c56600260186ea801e00516403516402880504c966002601e00313259800980298059baa0078acc004c8cc004004dd6180818069baa0052259800800c528456600266ebcc044c038dd5180880080a4528c4cc008008c04800500c201e899b8700148052294100a44cdc3800a40268050dd698059807000c5900c192cc004cdc3a400460146ea8006297adef6c6089bab300e300b37540028048c8cc004004dd59807180798079807980798059baa0032259800800c5300103d87a8000899192cc004cdc8802800c56600266e3c014006266e95200033010300e0024bd7045300103d87a80004031133004004301200340306eb8c030004c03c00500d18049baa005375c601860126ea800e2c8038601000260066ea802229344d9590011", - "hash": "eaff18079b75649a8cfc35b99f8a145c4fabf5c02e239822316e9321" - }, - { - "title": "auth_token/mint.auth_token.else", - "redeemer": { - "schema": {} - }, - "parameters": [ - { - "title": "utxo_ref", - "schema": { - "$ref": "#/definitions/cardano~1transaction~1OutputReference" - } - } - ], - "compiledCode": "59017b010100229800aba2aba1aba0aab9faab9eaab9dab9a488888896600264653001300800198041804800cdc3a400130080024888966002600460106ea800e2646644b300130050018acc004c030dd5003c00a2c806a2b30013370e9001000c56600260186ea801e00516403516402880504c966002601e00313259800980298059baa0078acc004c8cc004004dd6180818069baa0052259800800c528456600266ebcc044c038dd5180880080a4528c4cc008008c04800500c201e899b8700148052294100a44cdc3800a40268050dd698059807000c5900c192cc004cdc3a400460146ea8006297adef6c6089bab300e300b37540028048c8cc004004dd59807180798079807980798059baa0032259800800c5300103d87a8000899192cc004cdc8802800c56600266e3c014006266e95200033010300e0024bd7045300103d87a80004031133004004301200340306eb8c030004c03c00500d18049baa005375c601860126ea800e2c8038601000260066ea802229344d9590011", - "hash": "eaff18079b75649a8cfc35b99f8a145c4fabf5c02e239822316e9321" - }, - { - "title": "proxy/spend.proxy.spend", - "datum": { - "title": "_datum", - "schema": { - "$ref": "#/definitions/Data" - } - }, - "redeemer": { - "title": "redeemer", - "schema": { - "$ref": "#/definitions/types~1ProxyRedeemer" - } - }, - "parameters": [ - { - "title": "auth_token", - "schema": { - "$ref": "#/definitions/cardano~1assets~1PolicyId" - } - } - ], - "compiledCode": "590253010100229800aba2aba1aba0aab9faab9eaab9dab9a9bae0024888888896600264653001300900198049805000cc0240092225980099b8748008c020dd500144ca6002601a003300d300e00198049baa0048992cc004cdc3a400060146ea8016266e212000323300100132330010013758602000644b30010018a5eb8226644b3001323300100132330010013756602a602c60246ea8010896600200314bd7044c8cc88c8cc004004dd5980b802112cc00400620071323301b374e660366ea4014cc06cc060004cc06cc0640052f5c066006006603a004603600280c8dd7180980099801801980c001180b000a0282259800800c528456600266e3cdd7180a9bac301500100f8a51899801001180b000a020404d13301200233004004001899802002000a01c30110013012001403c44b30010018a40011337009001198010011808800a01c899912cc0040060051598009808800c4c8c96600266e3cdd71809001006456600266e3cdd71809000a44100899b87375a602460260029009c528201a8a50403460240026eb0c040006004807100e0a5032330010013756601e6020602000444b30010018a5eb82264664464660020026eacc044010896600200310038991980a9ba733015375200a6602a60240026602a602600297ae03300300330170023015001404c6eb8c034004cc00c00cc048008c04000500e2012300e300e300a375400491112cc004cdc3a400000515980098071baa008800c5900f456600266e1d20020028acc004c038dd500440062c807a2c806100c0c024dd50014590070c024004c010dd5004c52689b2b200401", - "hash": "0dc0f294df05817b4471985a0fd255fa8c9b81fadc4c630a66783943" - }, - { - "title": "proxy/spend.proxy.else", - "redeemer": { - "schema": {} - }, - "parameters": [ - { - "title": "auth_token", - "schema": { - "$ref": "#/definitions/cardano~1assets~1PolicyId" - } - } - ], - "compiledCode": "590253010100229800aba2aba1aba0aab9faab9eaab9dab9a9bae0024888888896600264653001300900198049805000cc0240092225980099b8748008c020dd500144ca6002601a003300d300e00198049baa0048992cc004cdc3a400060146ea8016266e212000323300100132330010013758602000644b30010018a5eb8226644b3001323300100132330010013756602a602c60246ea8010896600200314bd7044c8cc88c8cc004004dd5980b802112cc00400620071323301b374e660366ea4014cc06cc060004cc06cc0640052f5c066006006603a004603600280c8dd7180980099801801980c001180b000a0282259800800c528456600266e3cdd7180a9bac301500100f8a51899801001180b000a020404d13301200233004004001899802002000a01c30110013012001403c44b30010018a40011337009001198010011808800a01c899912cc0040060051598009808800c4c8c96600266e3cdd71809001006456600266e3cdd71809000a44100899b87375a602460260029009c528201a8a50403460240026eb0c040006004807100e0a5032330010013756601e6020602000444b30010018a5eb82264664464660020026eacc044010896600200310038991980a9ba733015375200a6602a60240026602a602600297ae03300300330170023015001404c6eb8c034004cc00c00cc048008c04000500e2012300e300e300a375400491112cc004cdc3a400000515980098071baa008800c5900f456600266e1d20020028acc004c038dd500440062c807a2c806100c0c024dd50014590070c024004c010dd5004c52689b2b200401", - "hash": "0dc0f294df05817b4471985a0fd255fa8c9b81fadc4c630a66783943" - } - ], - "definitions": { - "ByteArray": { - "title": "ByteArray", - "dataType": "bytes" - }, - "Data": { - "title": "Data", - "description": "Any Plutus data." - }, - "Int": { - "dataType": "integer" - }, - "cardano/assets/PolicyId": { - "title": "PolicyId", - "dataType": "bytes" - }, - "cardano/transaction/OutputReference": { - "title": "OutputReference", - "description": "An `OutputReference` is a unique reference to an output on-chain. The `output_index`\n corresponds to the position in the output list of the transaction (identified by its id)\n that produced that output", - "anyOf": [ - { - "title": "OutputReference", - "dataType": "constructor", - "index": 0, - "fields": [ - { - "title": "transaction_id", - "$ref": "#/definitions/ByteArray" - }, - { - "title": "output_index", - "$ref": "#/definitions/Int" - } - ] - } - ] - }, - "types/MintPolarity": { - "title": "MintPolarity", - "anyOf": [ - { - "title": "RMint", - "dataType": "constructor", - "index": 0, - "fields": [] - }, - { - "title": "RBurn", - "dataType": "constructor", - "index": 1, - "fields": [] - } - ] - }, - "types/ProxyRedeemer": { - "title": "ProxyRedeemer", - "anyOf": [ - { - "title": "SpendFunds", - "dataType": "constructor", - "index": 0, - "fields": [] - }, - { - "title": "RemoveEmptyInstance", - "dataType": "constructor", - "index": 1, - "fields": [] - } - ] - } - } -} \ No newline at end of file diff --git a/src/components/multisig/aiken-proxy/specs/1_auth_tokens.md b/src/components/multisig/aiken-proxy/specs/1_auth_tokens.md deleted file mode 100644 index 78afb6a1..00000000 --- a/src/components/multisig/aiken-proxy/specs/1_auth_tokens.md +++ /dev/null @@ -1,15 +0,0 @@ -# Auth Tokens - One Shot - -## Parameter - -- `utxo_ref`: UTxO to be spent at minting - -## User Action - -1. Mint - Redeemer `RMint` - - - Transaction hash as parameterized is included in input - -2. Burn - Redeemer `RBurn` - - - The current policy id only has negative minting value in transaction body. diff --git a/src/components/multisig/aiken-proxy/specs/2_proxy.md b/src/components/multisig/aiken-proxy/specs/2_proxy.md deleted file mode 100644 index 1f9abb9d..00000000 --- a/src/components/multisig/aiken-proxy/specs/2_proxy.md +++ /dev/null @@ -1,19 +0,0 @@ -# Specification - Crowdfund - -## Parameter - -- `auth_token`: The policy id of `AuthToken` - -## User Action - -1. SpendFunds - -2. RegisterDrep - -3. DeregisterDrep - -4. VoteasDrep - -5. RemoveEmptyInstance - - - `auth_token` from current input is burnt diff --git a/src/components/multisig/aiken-proxy/specs/_scripts.md b/src/components/multisig/aiken-proxy/specs/_scripts.md deleted file mode 100644 index c44c42f3..00000000 --- a/src/components/multisig/aiken-proxy/specs/_scripts.md +++ /dev/null @@ -1,20 +0,0 @@ -# Aiken Crowdfunding - -## 1. Auth Tokens - -The tokens are held in a native script multisig wallet and have to be included in every transaction. - - -## 2. Proxy - -The validator that represents the actual treasury / drep - -## Param dependency tree - -1. First layer - - - `auth_tokens` - `utxo_ref` - -2. Second layer - - - `proxy` - param `auth_tokens` diff --git a/src/components/multisig/aiken-proxy/specs/application_setup_doc.md b/src/components/multisig/aiken-proxy/specs/application_setup_doc.md deleted file mode 100644 index 743db0a5..00000000 --- a/src/components/multisig/aiken-proxy/specs/application_setup_doc.md +++ /dev/null @@ -1,13 +0,0 @@ -# Application Setup Documentation - -## Setup - -The are 2 steps of setting up the applications: - -1. Minting `auth_token`, one time minting policy with empty token name with a quantity decided by the user. - - - Validation: 1.1 - -2. Sending the the `auth_token` to the owner multisig - - - Validation: N/A \ No newline at end of file diff --git a/src/components/multisig/aiken-proxy/specs/user_action_doc.md b/src/components/multisig/aiken-proxy/specs/user_action_doc.md deleted file mode 100644 index dcb542e7..00000000 --- a/src/components/multisig/aiken-proxy/specs/user_action_doc.md +++ /dev/null @@ -1,6 +0,0 @@ -# User Actions Documentation - -## Multisig Users - - - diff --git a/src/components/multisig/aiken-proxy/validators/auth_token/mint.ak b/src/components/multisig/aiken-proxy/validators/auth_token/mint.ak deleted file mode 100644 index 11e14cf2..00000000 --- a/src/components/multisig/aiken-proxy/validators/auth_token/mint.ak +++ /dev/null @@ -1,27 +0,0 @@ -use aiken/collection/dict -use aiken/collection/list -use cardano/assets.{PolicyId} -use cardano/transaction.{OutputReference, Transaction} -use types.{MintPolarity, RBurn, RMint} - -validator auth_token(utxo_ref: OutputReference) { - mint(redeemer: MintPolarity, policy_id: PolicyId, self: Transaction) { - expect [Pair(_asset_name, quantity)] = - self.mint - |> assets.tokens(policy_id) - |> dict.to_pairs() - let Transaction { inputs, .. } = self - when redeemer is { - RMint -> { - let is_output_consumed = - list.any(inputs, fn(input) { input.output_reference == utxo_ref }) - is_output_consumed? && quantity == 10 - } - RBurn -> quantity == -10 - } - } - - else(_) { - fail - } -} diff --git a/src/components/multisig/aiken-proxy/validators/proxy/spend.ak b/src/components/multisig/aiken-proxy/validators/proxy/spend.ak deleted file mode 100644 index 8cc68e1a..00000000 --- a/src/components/multisig/aiken-proxy/validators/proxy/spend.ak +++ /dev/null @@ -1,34 +0,0 @@ -use aiken/collection/list -use cardano/assets.{PolicyId} -use cardano/transaction.{OutputReference, Transaction} -use cocktail.{only_minted_token, outputs_with_policy} -use types.{ProxyRedeemer, RemoveEmptyInstance, SpendFunds} - -validator proxy(auth_token: PolicyId) { - spend( - _datum: Option, - redeemer: ProxyRedeemer, - _input: OutputReference, - self: Transaction, - ) { - let Transaction { mint, outputs, .. } = self - - when redeemer is { - // check if one auth token is moved in the transaction - SpendFunds -> { - // Check if any token from the auth_token policy is present in the outputs - // This means the token is being moved/transferred - let auth_tokens_in_outputs = outputs_with_policy(outputs, auth_token) - // The token is considered "moved" if any auth token appears in outputs - // (meaning it's being transferred somewhere) - list.length(auth_tokens_in_outputs) > 0 - } - - RemoveEmptyInstance -> only_minted_token(mint, auth_token, "", -10)? - } - } - - else(_) { - fail - } -} diff --git a/src/components/multisig/aiken-proxy/validators/tests/auth_token/mint.ak b/src/components/multisig/aiken-proxy/validators/tests/auth_token/mint.ak deleted file mode 100644 index b113dd1c..00000000 --- a/src/components/multisig/aiken-proxy/validators/tests/auth_token/mint.ak +++ /dev/null @@ -1,63 +0,0 @@ -use auth_token/mint as auth_token_mint -use cardano/assets.{add, from_asset, zero} -use cardano/transaction.{Transaction, placeholder} -use mocktail.{ - complete, mint, mock_policy_id, mock_pub_key_address, mock_tx_hash, - mock_utxo_ref, mocktail_tx, tx_in, -} -use tests/utils.{mock_auth_token} -use types.{RBurn, RMint} - -test s1_mint_success_mint() { - let redeemer = RMint - let input_utxo = mock_utxo_ref(0, 1) - let policy_id = mock_auth_token - - let tx = - mocktail_tx() - |> mint(True, 1, policy_id, "") - |> tx_in(True, mock_tx_hash(0), 1, zero, mock_pub_key_address(0, None)) - |> complete() - - auth_token_mint.auth_token.mint(input_utxo, redeemer, policy_id, tx) -} - -test s1_mint_fail_mint_no_utxo_ref_supply() { - let redeemer = RMint - let policy_id = mock_auth_token - - let tx = - mocktail_tx() - |> mint(True, 1, policy_id, "") - |> tx_in(True, mock_tx_hash(0), 1, zero, mock_pub_key_address(0, None)) - |> complete() - !auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) -} - -test s1_mint_success_burn() { - let redeemer = RBurn - let policy_id = mock_auth_token - - let tx = Transaction { ..placeholder, mint: from_asset(policy_id, "", -1) } - auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) -} - -test s1_mint_success_burn_with_other_minting() { - let redeemer = RBurn - let policy_id = mock_auth_token - - let tx = - Transaction { - ..placeholder, - mint: from_asset(policy_id, "", -1) |> add(mock_policy_id(999), "", 1), - } - auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) -} - -test s1_mint_fail_burn_with_mint() { - let redeemer = RBurn - let policy_id = mock_auth_token - - let tx = Transaction { ..placeholder, mint: from_asset(policy_id, "", 1) } - !auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) -} diff --git a/src/components/multisig/aiken-proxy/validators/tests/proxy/spend.ak b/src/components/multisig/aiken-proxy/validators/tests/proxy/spend.ak deleted file mode 100644 index 05e836fc..00000000 --- a/src/components/multisig/aiken-proxy/validators/tests/proxy/spend.ak +++ /dev/null @@ -1,65 +0,0 @@ -use cardano/assets.{add, from_lovelace} -use mocktail.{ - complete, mint, mock_policy_id, mock_pub_key_address, mock_utxo_ref, - mocktail_tx, tx_out, -} -use proxy/spend as proxy_spend -use types.{RemoveEmptyInstance, SpendFunds} - -const auth_token = mock_policy_id(0) - -// SpendFunds: succeeds when any token of auth_token policy appears in outputs -test p_proxy_spend_success_spend_funds_when_auth_token_in_outputs() { - let value = from_lovelace(2_000_000) |> add(auth_token, "", 1) - let tx = - mocktail_tx() - |> tx_out(True, mock_pub_key_address(0, None), value) - |> complete() - - proxy_spend.proxy.spend(auth_token, None, SpendFunds, mock_utxo_ref(0, 0), tx) -} - -// SpendFunds: fails when no auth_token policy appears in outputs -test p_proxy_spend_fail_spend_funds_when_no_auth_token_in_outputs() { - let value = from_lovelace(2_000_000) - let tx = - mocktail_tx() - |> tx_out(True, mock_pub_key_address(1, None), value) - |> complete() - - !proxy_spend.proxy.spend( - auth_token, - None, - SpendFunds, - mock_utxo_ref(0, 0), - tx, - ) -} - -// RemoveEmptyInstance: succeeds when exact burn of -10 under auth_token policy -test p_proxy_spend_success_remove_empty_instance_with_auth_burnt() { - let tx = - mocktail_tx() - |> mint(True, -10, auth_token, "") - |> complete() - - proxy_spend.proxy.spend( - auth_token, - None, - RemoveEmptyInstance, - mock_utxo_ref(0, 0), - tx, - ) -} - -// RemoveEmptyInstance: fails when no burn occurs -test p_proxy_spend_fail_remove_empty_instance_without_burn() { - let tx = mocktail_tx() |> complete() - !proxy_spend.proxy.spend( - auth_token, - None, - RemoveEmptyInstance, - mock_utxo_ref(0, 0), - tx, - ) -} diff --git a/src/components/multisig/aiken-proxy/validators/tests/utils.ak b/src/components/multisig/aiken-proxy/validators/tests/utils.ak deleted file mode 100644 index c2ee6a99..00000000 --- a/src/components/multisig/aiken-proxy/validators/tests/utils.ak +++ /dev/null @@ -1,64 +0,0 @@ -use cardano/address.{from_script} -use mocktail.{ - mock_policy_id, mock_pub_key_hash, mock_script_hash, - mock_script_stake_key_hash, -} -use types.{CrowdfundDatum} - -pub const mock_auth_token = mock_policy_id(0) - -pub const mock_share_token = mock_policy_id(1) - -pub const mock_completion_script = mock_script_hash(0) - -pub const mock_crowdfund_spend_script_hash = mock_script_hash(1) - -pub const mock_crowdfund_stake_script_hash = mock_script_stake_key_hash(0) - -pub const mock_crowdfund_address = from_script(mock_crowdfund_spend_script_hash) - -pub const mock_fee_address = from_script("fee_address") - -pub const mock_fundraise_target = 100000000000 - -pub const mock_deadline = 1750735607 - -pub const mock_expiry_buffer = 3600 * 24 - -pub const mock_min_charge = 10 - -pub fn mock_crowdfund_datum( - current_fundraised_amount: Int, - allow_over_subscription: Bool, -) { - CrowdfundDatum { - completion_script: mock_completion_script, - share_token: mock_share_token, - crowdfund_address: mock_crowdfund_address, - fundraise_target: mock_fundraise_target, - current_fundraised_amount, - allow_over_subscription, - deadline: mock_deadline, - expiry_buffer: mock_expiry_buffer, - fee_address: mock_fee_address, - min_charge: mock_min_charge, - } -} - -pub const mock_current_fundraised_amount = 2000000 - -pub const mock_extra_fundraised_amount = 4000000 - -pub const mock_contribute_less_than_min_fundraised_amount = 1999999 - -pub const mock_contribute_min_fundraised_amount = 2000000 - -pub const mock_contribute_over_fundraised_amount = 100000000001 - -pub const auth_allow_over_subscription = - mock_crowdfund_datum(mock_current_fundraised_amount, True) - -pub const auth_not_allow_over_subscription = - mock_crowdfund_datum(mock_current_fundraised_amount, False) - -pub const mock_proposer_key_hash = mock_pub_key_hash(0) diff --git a/src/components/multisig/proxy/aiken-workspace/plutus.json b/src/components/multisig/proxy/aiken-workspace/plutus.json index ee53388c..7cd5c1ab 100644 --- a/src/components/multisig/proxy/aiken-workspace/plutus.json +++ b/src/components/multisig/proxy/aiken-workspace/plutus.json @@ -51,7 +51,7 @@ "datum": { "title": "_datum", "schema": { - "$ref": "#/definitions/Data" + "$ref": "#/definitions/cardano~1transaction~1Datum" } }, "redeemer": { @@ -100,10 +100,48 @@ "Int": { "dataType": "integer" }, + "aiken/crypto/DataHash": { + "title": "DataHash", + "dataType": "bytes" + }, "cardano/assets/PolicyId": { "title": "PolicyId", "dataType": "bytes" }, + "cardano/transaction/Datum": { + "title": "Datum", + "description": "An output `Datum`.", + "anyOf": [ + { + "title": "NoDatum", + "dataType": "constructor", + "index": 0, + "fields": [] + }, + { + "title": "DatumHash", + "description": "A datum referenced by its hash digest.", + "dataType": "constructor", + "index": 1, + "fields": [ + { + "$ref": "#/definitions/aiken~1crypto~1DataHash" + } + ] + }, + { + "title": "InlineDatum", + "description": "A datum completely inlined in the output.", + "dataType": "constructor", + "index": 2, + "fields": [ + { + "$ref": "#/definitions/Data" + } + ] + } + ] + }, "cardano/transaction/OutputReference": { "title": "OutputReference", "description": "An `OutputReference` is a unique reference to an output on-chain. The `output_index`\n corresponds to the position in the output list of the transaction (identified by its id)\n that produced that output", diff --git a/src/components/multisig/proxy/aiken-workspace/validators/proxy/spend.ak b/src/components/multisig/proxy/aiken-workspace/validators/proxy/spend.ak index 8cc68e1a..d8ea9620 100644 --- a/src/components/multisig/proxy/aiken-workspace/validators/proxy/spend.ak +++ b/src/components/multisig/proxy/aiken-workspace/validators/proxy/spend.ak @@ -1,12 +1,12 @@ use aiken/collection/list use cardano/assets.{PolicyId} -use cardano/transaction.{OutputReference, Transaction} +use cardano/transaction.{Datum,OutputReference, Transaction} use cocktail.{only_minted_token, outputs_with_policy} use types.{ProxyRedeemer, RemoveEmptyInstance, SpendFunds} validator proxy(auth_token: PolicyId) { spend( - _datum: Option, + _datum: Option, redeemer: ProxyRedeemer, _input: OutputReference, self: Transaction, @@ -17,10 +17,12 @@ validator proxy(auth_token: PolicyId) { // check if one auth token is moved in the transaction SpendFunds -> { // Check if any token from the auth_token policy is present in the outputs - // This means the token is being moved/transferred + // This means the token is being moved/transferred signalising that the proxy owners are spending funds let auth_tokens_in_outputs = outputs_with_policy(outputs, auth_token) // The token is considered "moved" if any auth token appears in outputs // (meaning it's being transferred somewhere) + // This is used to ensure that the proxy owners are spending funds + // and not someone else list.length(auth_tokens_in_outputs) > 0 } diff --git a/src/components/multisig/proxy/aiken-workspace/validators/tests/auth_token/mint.ak b/src/components/multisig/proxy/aiken-workspace/validators/tests/auth_token/mint.ak index b113dd1c..c02a7111 100644 --- a/src/components/multisig/proxy/aiken-workspace/validators/tests/auth_token/mint.ak +++ b/src/components/multisig/proxy/aiken-workspace/validators/tests/auth_token/mint.ak @@ -5,13 +5,15 @@ use mocktail.{ complete, mint, mock_policy_id, mock_pub_key_address, mock_tx_hash, mock_utxo_ref, mocktail_tx, tx_in, } -use tests/utils.{mock_auth_token} use types.{RBurn, RMint} + +const auth_token = mock_policy_id(0) + test s1_mint_success_mint() { let redeemer = RMint let input_utxo = mock_utxo_ref(0, 1) - let policy_id = mock_auth_token + let policy_id = auth_token let tx = mocktail_tx() @@ -24,7 +26,7 @@ test s1_mint_success_mint() { test s1_mint_fail_mint_no_utxo_ref_supply() { let redeemer = RMint - let policy_id = mock_auth_token + let policy_id = auth_token let tx = mocktail_tx() @@ -36,7 +38,7 @@ test s1_mint_fail_mint_no_utxo_ref_supply() { test s1_mint_success_burn() { let redeemer = RBurn - let policy_id = mock_auth_token + let policy_id = auth_token let tx = Transaction { ..placeholder, mint: from_asset(policy_id, "", -1) } auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) @@ -44,7 +46,7 @@ test s1_mint_success_burn() { test s1_mint_success_burn_with_other_minting() { let redeemer = RBurn - let policy_id = mock_auth_token + let policy_id = auth_token let tx = Transaction { @@ -56,7 +58,7 @@ test s1_mint_success_burn_with_other_minting() { test s1_mint_fail_burn_with_mint() { let redeemer = RBurn - let policy_id = mock_auth_token + let policy_id = auth_token let tx = Transaction { ..placeholder, mint: from_asset(policy_id, "", 1) } !auth_token_mint.auth_token.mint(mock_utxo_ref(0, 0), redeemer, policy_id, tx) diff --git a/src/components/multisig/proxy/aiken-workspace/validators/tests/utils.ak b/src/components/multisig/proxy/aiken-workspace/validators/tests/utils.ak deleted file mode 100644 index c2ee6a99..00000000 --- a/src/components/multisig/proxy/aiken-workspace/validators/tests/utils.ak +++ /dev/null @@ -1,64 +0,0 @@ -use cardano/address.{from_script} -use mocktail.{ - mock_policy_id, mock_pub_key_hash, mock_script_hash, - mock_script_stake_key_hash, -} -use types.{CrowdfundDatum} - -pub const mock_auth_token = mock_policy_id(0) - -pub const mock_share_token = mock_policy_id(1) - -pub const mock_completion_script = mock_script_hash(0) - -pub const mock_crowdfund_spend_script_hash = mock_script_hash(1) - -pub const mock_crowdfund_stake_script_hash = mock_script_stake_key_hash(0) - -pub const mock_crowdfund_address = from_script(mock_crowdfund_spend_script_hash) - -pub const mock_fee_address = from_script("fee_address") - -pub const mock_fundraise_target = 100000000000 - -pub const mock_deadline = 1750735607 - -pub const mock_expiry_buffer = 3600 * 24 - -pub const mock_min_charge = 10 - -pub fn mock_crowdfund_datum( - current_fundraised_amount: Int, - allow_over_subscription: Bool, -) { - CrowdfundDatum { - completion_script: mock_completion_script, - share_token: mock_share_token, - crowdfund_address: mock_crowdfund_address, - fundraise_target: mock_fundraise_target, - current_fundraised_amount, - allow_over_subscription, - deadline: mock_deadline, - expiry_buffer: mock_expiry_buffer, - fee_address: mock_fee_address, - min_charge: mock_min_charge, - } -} - -pub const mock_current_fundraised_amount = 2000000 - -pub const mock_extra_fundraised_amount = 4000000 - -pub const mock_contribute_less_than_min_fundraised_amount = 1999999 - -pub const mock_contribute_min_fundraised_amount = 2000000 - -pub const mock_contribute_over_fundraised_amount = 100000000001 - -pub const auth_allow_over_subscription = - mock_crowdfund_datum(mock_current_fundraised_amount, True) - -pub const auth_not_allow_over_subscription = - mock_crowdfund_datum(mock_current_fundraised_amount, False) - -pub const mock_proposer_key_hash = mock_pub_key_hash(0) diff --git a/src/components/multisig/proxy/offchain.ts b/src/components/multisig/proxy/offchain.ts index 67d81ea1..8477ab66 100644 --- a/src/components/multisig/proxy/offchain.ts +++ b/src/components/multisig/proxy/offchain.ts @@ -1,41 +1,37 @@ import { - AssetMetadata, - conStr0, - Data, - integer, - mConStr0, - mOutputReference, - mPubKeyAddress, - stringToHex, - } from "@meshsdk/common"; - import { - deserializeAddress, - resolveScriptHash, - serializeAddressObj, - serializePlutusScript, - UTxO, - applyCborEncoding, - applyParamsToScript, - } from "@meshsdk/core"; - import { - parseDatumCbor - } from "@meshsdk/core-cst"; - - import { MeshTxInitiator, MeshTxInitiatorInput } from "./common"; - import blueprint from "./aiken-workspace/plutus.json"; - + AssetMetadata, + conStr0, + Data, + integer, + mConStr0, + mOutputReference, + mPubKeyAddress, + stringToHex, +} from "@meshsdk/common"; +import { + deserializeAddress, + resolveScriptHash, + serializeAddressObj, + serializePlutusScript, + UTxO, + applyCborEncoding, + applyParamsToScript, +} from "@meshsdk/core"; +import { parseDatumCbor } from "@meshsdk/core-cst"; +import { MeshTxInitiator, MeshTxInitiatorInput } from "./common"; +import blueprint from "./aiken-workspace/plutus.json"; /** * Mesh Plutus NFT contract class - * - * This NFT minting script enables users to mint NFTs with an automatically incremented index, which increases by one for each newly minted NFT. - * - * To facilitate this process, the first step is to set up a one-time minting policy by minting an oracle token. This oracle token is essential as it holds the current state and index of the NFTs, acting as a reference for the minting sequence. - * + * + * This NFT minting script enables users to mint NFTs with an automatically incremented index, which increases by one for each newly minted NFT. + * + * To facilitate this process, the first step is to set up a one-time minting policy by minting an oracle token. This oracle token is essential as it holds the current state and index of the NFTs, acting as a reference for the minting sequence. + * * With each new NFT minted, the token index within the oracle is incremented by one, ensuring a consistent and orderly progression in the numbering of the NFTs. */ -export class MeshProxyContract { +export class MeshProxyContract extends MeshTxInitiator { paramUtxo: UTxO["input"] = { outputIndex: 0, txHash: "" }; proxyAddress?: string; stakeCredential?: string | undefined; @@ -76,6 +72,7 @@ export class MeshProxyContract { paramUtxo?: UTxO["input"]; }, ) { + super(inputs); this.stakeCredential = inputs.stakeCredential; this.networkId = inputs.networkId ? inputs.networkId : 0; // Set the proxyAddress if paramUtxo is provided @@ -85,17 +82,17 @@ export class MeshProxyContract { } } -/** - * Set up a proxy address with fixed amount of 10 auth tokens, that will be sent to the owner multisig - * Moving an auth token unlocks the proxy address. - * - * @returns - Transaction hex to be signed by the owner multisig - * - * @example - * ```typescript - * const { tx, paramUtxo } = await contract.setupProxy(); - * ``` - */ + /** + * Set up a proxy address with fixed amount of 10 auth tokens, that will be sent to the owner multisig + * Moving an auth token unlocks the proxy address. + * + * @returns - Transaction hex to be signed by the owner multisig + * + * @example + * ```typescript + * const { tx, paramUtxo } = await contract.setupProxy(); + * ``` + */ setupProxy = async () => { const { utxos, collateral, walletAddress } = await this.getWalletInfoForTx(); @@ -147,88 +144,142 @@ export class MeshProxyContract { authTokenId: policyId, proxyAddress: proxyAddress, }; - }; - - /** - * Mint NFT token with an automatically incremented index, which increases by one for each newly minted NFT. - * @param assetMetadata - Asset metadata - * @returns - Transaction hex - * - * @example - * ```typescript - * const assetMetadata = { - * ...demoAssetMetadata, - * name: `Mesh Token ${oracleData.nftIndex}`, - * }; - * const tx = await contract.mintPlutusNFT(assetMetadata); - * ``` - */ - mintPlutusNFT = async (assetMetadata?: AssetMetadata) => { + spendProxySimple = async ( + outputs: { address: string; unit: string; amount: string }[], + ) => { const { utxos, collateral, walletAddress } = await this.getWalletInfoForTx(); + if (utxos?.length <= 0) { throw new Error("No UTxOs found"); } - const { - nftIndex, - policyId, - lovelacePrice, - oracleUtxo, - oracleNftPolicyId, - feeCollectorAddress, - feeCollectorAddressObj, - } = await this.getOracleData(); - - const tokenName = `${this.collectionName} (${nftIndex})`; - const tokenNameHex = stringToHex(tokenName); - - const updatedOracleDatum: OracleDatum = conStr0([ - integer((nftIndex as number) + 1), - integer(lovelacePrice), - feeCollectorAddressObj, - ]); + if (this.proxyAddress === undefined) { + throw new Error("Proxy address not set. Please setupProxy first."); + } - const tx = this.mesh + const blockchainProvider = this.mesh.fetcher; + if (!blockchainProvider) { + throw new Error("Blockchain provider not found"); + } + + const proxyUtxos = await blockchainProvider.fetchAddressUTxOs( + this.proxyAddress, + ); + + const freeProxyUtxos = proxyUtxos[0]!; + console.log("freeProxyUtxos", freeProxyUtxos); + + const paramScriptAT = this.getAuthTokenCbor(); + const policyIdAT = resolveScriptHash(paramScriptAT, "V3"); + + const authTokenUtxos = utxos.filter((utxo) => + utxo.output.amount.some((asset) => asset.unit === policyIdAT), + ); + + console.log("authTokenUtxos", authTokenUtxos); + console.log("policyIdAT", policyIdAT); + + if (!authTokenUtxos || authTokenUtxos.length === 0) { + throw new Error("No AuthToken found at control wallet address"); + } + //ToDo check if AuthToken utxo is used in a pending transaction and blocked then use a free AuthToken + const authTokenUtxo = authTokenUtxos[0]; + if (!authTokenUtxo) { + throw new Error("No AuthToken found"); + } + const authTokenUtxoAmt = authTokenUtxo.output.amount; + if (!authTokenUtxoAmt) { + throw new Error("No AuthToken amount found"); + } + + //prepare Proxy spend + //1 Get + const txHex = await this.mesh .spendingPlutusScriptV3() .txIn( - oracleUtxo.input.txHash, - oracleUtxo.input.outputIndex, - oracleUtxo.output.amount, - oracleUtxo.output.address, + freeProxyUtxos.input.txHash, + freeProxyUtxos.input.outputIndex, + freeProxyUtxos.output.amount, + freeProxyUtxos.output.address, ) + .txInScript(this.getProxyCbor()) .txInRedeemerValue(mConStr0([])) - .txInScript(this.getOracleCbor()) - .txInInlineDatumPresent() - .txOut(this.oracleAddress, [{ unit: oracleNftPolicyId, quantity: "1" }]) - .txOutInlineDatumValue(updatedOracleDatum, "JSON") - .mintPlutusScriptV3() - .mint("1", policyId, tokenNameHex) - .mintingScript(this.getNFTCbor()); - - if (assetMetadata) { - const metadata = { [policyId]: { [tokenName]: { ...assetMetadata } } }; - tx.metadataValue(721, metadata); - } - - tx.mintRedeemerValue(mConStr0([])) - .txOut(feeCollectorAddress, [ - { unit: "lovelace", quantity: lovelacePrice.toString() }, - ]) + .txInDatumValue(mConStr0([])) // Add empty datum since script expects Option + .txIn( + authTokenUtxo.input.txHash, + authTokenUtxo.input.outputIndex, + authTokenUtxo.output.amount, + authTokenUtxo.output.address, + ) .txInCollateral( collateral.input.txHash, collateral.input.outputIndex, collateral.output.amount, collateral.output.address, ) - .changeAddress(walletAddress) - .selectUtxosFrom(utxos); + .txOut(walletAddress, [{ unit: policyIdAT, quantity: "1" }]) - const txHex = await tx.complete(); - return txHex; + + for (const output of outputs) { + txHex.txOut(output.address, [ + { unit: output.unit, quantity: output.amount }, + ]); + } + + txHex.changeAddress(walletAddress) + // Only pass pubkey (KeyHash) UTxOs for coin selection + .selectUtxosFrom(utxos) + + const tx = await txHex.complete(); + console.log("tx", tx); + + return tx; }; + /** + * Fetch the balance of the proxy address + * + * @returns - Array of assets with their quantities at the proxy address + * + * @example + * ```typescript + * const balance = await contract.getProxyBalance(); + * console.log(balance); // [{ unit: "lovelace", quantity: "1000000" }, ...] + * ``` + */ + getProxyBalance = async () => { + if (this.proxyAddress === undefined) { + throw new Error("Proxy address not set. Please setupProxy first."); + } -} \ No newline at end of file + const blockchainProvider = this.mesh.fetcher; + if (!blockchainProvider) { + throw new Error("Blockchain provider not found"); + } + + const utxos = await blockchainProvider.fetchAddressUTxOs(this.proxyAddress); + + // Aggregate all assets from UTxOs + const balanceMap = new Map(); + + for (const utxo of utxos) { + for (const asset of utxo.output.amount) { + const currentAmount = balanceMap.get(asset.unit) || BigInt(0); + balanceMap.set(asset.unit, currentAmount + BigInt(asset.quantity)); + } + } + + // Convert back to string format for consistency + const balance = Array.from(balanceMap.entries()).map( + ([unit, quantity]) => ({ + unit, + quantity: quantity.toString(), + }), + ); + + return balance; + }; +} diff --git a/src/components/pages/wallet/info/index.tsx b/src/components/pages/wallet/info/index.tsx index 09d00ee1..6b5849ed 100644 --- a/src/components/pages/wallet/info/index.tsx +++ b/src/components/pages/wallet/info/index.tsx @@ -8,6 +8,7 @@ import { ArchiveWallet } from "./archive-wallet"; import InspectMultisigScript from "@/components/multisig/inspect-multisig-script"; import { UpgradeStakingWallet } from "./upgrade-staking-wallet"; import { RegisterWallet } from "./register-wallet"; +import ProxyControlCard from "./proxy-control"; export default function WalletInfo() { const { appWallet } = useAppWallet(); @@ -20,6 +21,7 @@ export default function WalletInfo() {
+ {(!multisigWallet || !multisigWallet.stakingEnabled()) && } {multisigWallet && multisigWallet.stakingEnabled() && } diff --git a/src/server/api/root.ts b/src/server/api/root.ts index c8fa9a5d..00a28053 100644 --- a/src/server/api/root.ts +++ b/src/server/api/root.ts @@ -4,6 +4,7 @@ import { walletRouter } from "./routers/wallets"; import { transactionRouter } from "./routers/transactions"; import { signableRouter } from "./routers/signable"; import { ballotRouter } from "./routers/ballot"; +import { proxyRouter } from "./routers/proxy"; /** @@ -17,6 +18,7 @@ export const appRouter = createTRPCRouter({ wallet: walletRouter, signable: signableRouter, ballot: ballotRouter, + proxy: proxyRouter, }); // export type definition of API diff --git a/src/server/api/routers/proxy.ts b/src/server/api/routers/proxy.ts new file mode 100644 index 00000000..d0c8c4a5 --- /dev/null +++ b/src/server/api/routers/proxy.ts @@ -0,0 +1,69 @@ +import { z } from "zod"; + +import { createTRPCRouter, publicProcedure } from "@/server/api/trpc"; + +export const proxyRouter = createTRPCRouter({ + createProxy: publicProcedure + .input( + z.object({ + walletId: z.string(), + proxyAddress: z.string(), + authTokenId: z.string(), + paramUtxo: z.string(), + description: z.string().optional(), + }), + ) + .mutation(async ({ ctx, input }) => { + return ctx.db.proxy.create({ + data: { + walletId: input.walletId, + proxyAddress: input.proxyAddress, + authTokenId: input.authTokenId, + paramUtxo: input.paramUtxo, + description: input.description, + }, + }); + }), + + getProxyByWalletId: publicProcedure + .input(z.object({ walletId: z.string() })) + .query(async ({ ctx, input }) => { + return await ctx.db.proxy.findUnique({ + where: { + walletId: input.walletId, + }, + }); + }), + + updateProxy: publicProcedure + .input( + z.object({ + walletId: z.string(), + proxyAddress: z.string().optional(), + authTokenId: z.string().optional(), + paramUtxo: z.string().optional(), + description: z.string().optional(), + isActive: z.boolean().optional(), + }), + ) + .mutation(async ({ ctx, input }) => { + const { walletId, ...updateData } = input; + return ctx.db.proxy.update({ + where: { + walletId: walletId, + }, + data: updateData, + }); + }), + + deleteProxy: publicProcedure + .input(z.object({ walletId: z.string() })) + .mutation(async ({ ctx, input }) => { + return ctx.db.proxy.delete({ + where: { + walletId: input.walletId, + }, + }); + }), +}); + From d37992f7c9fe159260bee0474937621080e5f43f Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Mon, 13 Oct 2025 10:38:10 +0200 Subject: [PATCH 09/15] s08 --- prisma/schema.prisma | 3 +- .../wallet-data-loader-wrapper.tsx | 2 + .../overall-layout/wallet-data-loader.tsx | 2 + .../multisig/proxy/ProxyControl.tsx | 750 ++++++++++++++++++ .../multisig/proxy/ProxyControlExample.tsx | 107 +++ .../multisig/proxy/ProxyOverview.tsx | 582 ++++++++++++++ src/components/multisig/proxy/ProxySetup.tsx | 323 ++++++++ src/components/multisig/proxy/ProxySpend.tsx | 249 ++++++ src/components/multisig/proxy/README.md | 150 ++++ src/components/multisig/proxy/index.ts | 4 +- src/components/multisig/proxy/offchain.ts | 31 +- src/components/pages/wallet/dapps/index.tsx | 6 + .../pages/wallet/info/proxy-control.tsx | 36 + src/server/api/routers/proxy.ts | 129 ++- 14 files changed, 2353 insertions(+), 21 deletions(-) create mode 100644 src/components/multisig/proxy/ProxyControl.tsx create mode 100644 src/components/multisig/proxy/ProxyControlExample.tsx create mode 100644 src/components/multisig/proxy/ProxyOverview.tsx create mode 100644 src/components/multisig/proxy/ProxySetup.tsx create mode 100644 src/components/multisig/proxy/ProxySpend.tsx create mode 100644 src/components/multisig/proxy/README.md create mode 100644 src/components/pages/wallet/info/proxy-control.tsx diff --git a/prisma/schema.prisma b/prisma/schema.prisma index cf6e4de0..e65505d9 100644 --- a/prisma/schema.prisma +++ b/prisma/schema.prisma @@ -97,7 +97,8 @@ model Ballot { model Proxy { id String @id @default(cuid()) - walletId String @unique + walletId String? // Optional - can be linked to wallet or user + userId String? // Optional - can be linked to user directly proxyAddress String authTokenId String paramUtxo String diff --git a/src/components/common/overall-layout/mobile-wrappers/wallet-data-loader-wrapper.tsx b/src/components/common/overall-layout/mobile-wrappers/wallet-data-loader-wrapper.tsx index 4119ed76..7034cc00 100644 --- a/src/components/common/overall-layout/mobile-wrappers/wallet-data-loader-wrapper.tsx +++ b/src/components/common/overall-layout/mobile-wrappers/wallet-data-loader-wrapper.tsx @@ -182,6 +182,8 @@ export default function WalletDataLoaderWrapper({ await getDRepInfo(); void ctx.transaction.getPendingTransactions.invalidate(); void ctx.transaction.getAllTransactions.invalidate(); + // Also refresh proxy data + void ctx.proxy.getProxiesByUserOrWallet.invalidate(); setRandomState(); setLoading(false); fetchingTransactions.current = false; diff --git a/src/components/common/overall-layout/wallet-data-loader.tsx b/src/components/common/overall-layout/wallet-data-loader.tsx index 92dc7284..b699bfe7 100644 --- a/src/components/common/overall-layout/wallet-data-loader.tsx +++ b/src/components/common/overall-layout/wallet-data-loader.tsx @@ -59,6 +59,8 @@ export default function WalletDataLoader() { await getTransactionsOnChain(); void ctx.transaction.getPendingTransactions.invalidate(); void ctx.transaction.getAllTransactions.invalidate(); + // Also refresh proxy data + void ctx.proxy.getProxiesByUserOrWallet.invalidate(); setRandomState(); setLoading(false); } diff --git a/src/components/multisig/proxy/ProxyControl.tsx b/src/components/multisig/proxy/ProxyControl.tsx new file mode 100644 index 00000000..f21eaacd --- /dev/null +++ b/src/components/multisig/proxy/ProxyControl.tsx @@ -0,0 +1,750 @@ +import React, { useState, useEffect, useCallback } from "react"; +import { useWallet } from "@meshsdk/react"; +import { MeshProxyContract } from "./offchain"; +import { useUserStore } from "@/lib/zustand/user"; +import { useSiteStore } from "@/lib/zustand/site"; +import { toast } from "@/hooks/use-toast"; +import { getTxBuilder } from "@/utils/get-tx-builder"; +import useAppWallet from "@/hooks/useAppWallet"; +import { api } from "@/utils/api"; +import ProxyOverview from "./ProxyOverview"; +import ProxySetup from "./ProxySetup"; +import ProxySpend from "./ProxySpend"; + +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { Dialog, DialogContent, DialogHeader, DialogTitle } from "@/components/ui/dialog"; +import { Card, CardContent, CardHeader } from "@/components/ui/card"; +import { Button } from "@/components/ui/button"; +import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip"; +import { AlertCircle, ChevronDown, ChevronUp, Wallet, TrendingUp, Info } from "lucide-react"; + +interface ProxyOutput { + address: string; + unit: string; + amount: string; +} + +interface ProxySetupResult { + tx: string; + paramUtxo: { txHash: string; outputIndex: number }; + authTokenId: string; + proxyAddress: string; +} + +export default function ProxyControl() { + const { wallet, connected } = useWallet(); + const userAddress = useUserStore((state) => state.userAddress); + const setLoading = useSiteStore((state) => state.setLoading); + const network = useSiteStore((state) => state.network); + const { appWallet } = useAppWallet(); + const ctx = api.useUtils(); + + const { mutateAsync: createTransaction } = api.transaction.createTransaction.useMutation({ + onSuccess: () => { + void ctx.transaction.getPendingTransactions.invalidate(); + }, + }); + + const { mutateAsync: createProxy } = api.proxy.createProxy.useMutation({ + onSuccess: () => { + void ctx.proxy.getProxiesByUserOrWallet.invalidate(); + }, + }); + + const { mutateAsync: updateProxy } = api.proxy.updateProxy.useMutation({ + onSuccess: () => { + void ctx.proxy.getProxiesByUserOrWallet.invalidate(); + }, + }); + + // Get user by address for user-linked proxies + const { data: user } = api.proxy.getUserByAddress.useQuery( + { address: userAddress || "" }, + { enabled: !!userAddress && !appWallet?.id } + ); + + const { data: proxies, refetch: refetchProxies } = api.proxy.getProxiesByUserOrWallet.useQuery( + { + walletId: appWallet?.id || undefined, + userAddress: userAddress || undefined, + }, + { enabled: !!(appWallet?.id || userAddress) } + ); + + // State management + const [proxyContract, setProxyContract] = useState(null); + const [proxyBalance, setProxyBalance] = useState>([]); + const [isProxySetup, setIsProxySetup] = useState(false); + const [loading, setLocalLoading] = useState(false); + const [selectedProxy, setSelectedProxy] = useState(""); + const [selectedProxyBalance, setSelectedProxyBalance] = useState>([]); + const [allProxyBalances, setAllProxyBalances] = useState>>({}); + const [tvlLoading, setTvlLoading] = useState(false); + + // Setup flow state + const [setupStep, setSetupStep] = useState(0); + const [setupData, setSetupData] = useState<{ + paramUtxo?: { txHash: string; outputIndex: number }; + authTokenId?: string; + proxyAddress?: string; + txHex?: string; + description?: string; + }>({}); + + // Tab management + + // Form states + const [setupLoading, setSetupLoading] = useState(false); + const [spendLoading, setSpendLoading] = useState(false); + const [showSetupModal, setShowSetupModal] = useState(false); + const [showSpendSection, setShowSpendSection] = useState(false); + const [isExpanded, setIsExpanded] = useState(false); + + // Spend form + const [spendOutputs, setSpendOutputs] = useState([ + { address: "", unit: "lovelace", amount: "" } + ]); + + // Initialize proxy contract + useEffect(() => { + if (connected && wallet && userAddress) { + try { + const txBuilder = getTxBuilder(network); + const contract = new MeshProxyContract( + { + mesh: txBuilder, + wallet: wallet, + networkId: network, + }, + {} + ); + setProxyContract(contract); + } catch (error) { + console.error("Failed to initialize proxy contract:", error); + toast({ + title: "Error", + description: "Failed to initialize proxy contract", + variant: "destructive", + }); + } + } + }, [connected, wallet, userAddress, network]); + + // Check if proxy is already set up + const checkProxySetup = useCallback(async () => { + if (!proxyContract) return; + + try { + const balance = await proxyContract.getProxyBalance(); + setProxyBalance(balance); + setIsProxySetup(balance.length > 0); + } catch (error) { + // Proxy not set up yet + setIsProxySetup(false); + setProxyBalance([]); + } + }, [proxyContract]); + + // Load initial state + useEffect(() => { + checkProxySetup(); + }, [checkProxySetup]); + + // Step 1: Initialize proxy setup + const handleInitializeSetup = useCallback(async (description?: string) => { + if (!proxyContract || !connected) { + toast({ + title: "Error", + description: "Wallet not connected or proxy contract not initialized", + variant: "destructive", + }); + return; + } + + try { + setSetupLoading(true); + setLoading(true); + + // Reset setup data to prevent conflicts with previous attempts + setSetupData({}); + setSetupStep(0); + + // Reset proxy contract state to prevent policy ID conflicts + proxyContract.reset(); + + const result: ProxySetupResult = await proxyContract.setupProxy(); + + setSetupData({ + paramUtxo: result.paramUtxo, + authTokenId: result.authTokenId, + proxyAddress: result.proxyAddress, + txHex: result.tx, + description: description || undefined, + }); + + setSetupStep(1); + toast({ + title: "Step 1 Complete", + description: "Proxy parameters generated successfully", + variant: "default", + }); + + } catch (error) { + console.error("Initialize setup error:", error); + toast({ + title: "Error", + description: `Failed to initialize proxy setup: ${error instanceof Error ? error.message : 'Unknown error'}`, + variant: "destructive", + }); + } finally { + setSetupLoading(false); + setLoading(false); + } + }, [proxyContract, connected, setLoading]); + + // Step 2: Review and confirm setup + const handleConfirmSetup = useCallback(async () => { + if (!setupData.txHex || !setupData.proxyAddress || !setupData.authTokenId) { + toast({ + title: "Error", + description: "Setup data is incomplete", + variant: "destructive", + }); + return; + } + + try { + setSetupLoading(true); + setLoading(true); + + // Sign and submit the transaction + const signedTx = await wallet.signTx(setupData.txHex, true); + await wallet.submitTx(signedTx); + + // Store proxy information in the database + if (!appWallet?.id && !userAddress) { + throw new Error("Either wallet ID or user address is required to create proxy"); + } + + await createProxy({ + walletId: appWallet?.id || undefined, + userId: user?.id || undefined, + proxyAddress: setupData.proxyAddress, + authTokenId: setupData.authTokenId, + paramUtxo: JSON.stringify(setupData.paramUtxo), + description: setupData.description || undefined, + }); + + // Update local state + setIsProxySetup(true); + + // Refresh the proxies list + await refetchProxies(); + + setSetupStep(2); + toast({ + title: "Setup Complete!", + description: "Proxy has been successfully created and is ready to use", + variant: "default", + }); + + // Close the setup modal after successful completion + setTimeout(() => { + setShowSetupModal(false); + setSetupStep(0); + setSetupData({}); + }, 2000); // Close after 2 seconds to let user see the success message + + } catch (error) { + console.error("Confirm setup error:", error); + + // Handle specific error cases + let errorMessage = "Failed to complete proxy setup"; + if (error instanceof Error) { + if (error.message.includes("No collateral found")) { + errorMessage = "Wallet collateral not set up. Please set up collateral in your wallet settings (requires 5 ADA minimum)."; + } else if (error.message.includes("No UTxOs found")) { + errorMessage = "No UTxOs available in wallet. Please ensure your wallet has sufficient funds."; + } else { + errorMessage = `Failed to complete proxy setup: ${error.message}`; + } + } + + toast({ + title: "Error", + description: errorMessage, + variant: "destructive", + }); + } finally { + setSetupLoading(false); + setLoading(false); + } + }, [setupData, wallet, appWallet, user, createProxy, refetchProxies, setLoading]); + + // Reset setup flow + const handleResetSetup = useCallback(() => { + setSetupStep(0); + setSetupData({}); + }, []); + + // Navigation functions + const handleStartSetup = useCallback(() => { + setShowSetupModal(true); + }, []); + + const handleStartSpending = useCallback(() => { + if (selectedProxy) { + setShowSpendSection(true); + } + }, [selectedProxy]); + + const handleCloseSetup = useCallback(() => { + setShowSetupModal(false); + setSetupStep(0); + setSetupData({}); + }, []); + + const handleCloseSpend = useCallback(() => { + setShowSpendSection(false); + }, []); + + const handleUpdateProxy = useCallback(async (proxyId: string, description: string) => { + await updateProxy({ + id: proxyId, + description: description || undefined, + }); + }, [updateProxy]); + + // Get balance for a specific proxy + const getProxyBalance = useCallback(async (proxyAddress: string) => { + if (!proxyContract) return []; + + try { + // Create a temporary contract instance for this proxy + const tempContract = new MeshProxyContract( + { + mesh: getTxBuilder(network), + wallet: wallet, + networkId: network, + }, + {} + ); + tempContract.proxyAddress = proxyAddress; + + const balance = await tempContract.getProxyBalance(); + return balance; + } catch (error) { + console.error("Get proxy balance error:", error); + return []; + } + }, [proxyContract, network, wallet]); + + // Fetch all proxy balances for TVL calculation + const fetchAllProxyBalances = useCallback(async () => { + if (!proxies || proxies.length === 0 || !proxyContract) return; + + try { + setTvlLoading(true); + const balances: Record> = {}; + + for (const proxy of proxies) { + try { + const balance = await getProxyBalance(proxy.proxyAddress); + balances[proxy.id] = balance; + } catch (error) { + console.error(`Failed to fetch balance for proxy ${proxy.id}:`, error); + balances[proxy.id] = []; + } + } + + setAllProxyBalances(balances); + } catch (error) { + console.error("Failed to fetch proxy balances:", error); + } finally { + setTvlLoading(false); + } + }, [proxies, proxyContract, getProxyBalance]); + + // Calculate Total Value Locked (TVL) across all proxies + const calculateTVL = useCallback(() => { + if (!proxies || proxies.length === 0) { + return { totalADA: 0, totalAssets: 0, totalProxies: 0 }; + } + + let totalADA = 0; + let totalAssets = 0; + let totalProxies = proxies.length; + + // Sum up all ADA from all proxy balances + Object.values(allProxyBalances).forEach((balance) => { + balance.forEach((asset) => { + if (asset.unit === "lovelace") { + totalADA += parseFloat(asset.quantity) / 1000000; // Convert lovelace to ADA + } + totalAssets++; + }); + }); + + return { totalADA, totalAssets, totalProxies }; + }, [proxies, allProxyBalances]); + + const { totalADA, totalAssets } = calculateTVL(); + + // Fetch all proxy balances when proxies change + useEffect(() => { + if (proxies && proxies.length > 0 && proxyContract) { + fetchAllProxyBalances(); + } + }, [proxies, proxyContract, fetchAllProxyBalances]); + + // Refresh balances when component mounts or wallet changes + useEffect(() => { + if (proxies && proxies.length > 0 && proxyContract && connected) { + // Small delay to ensure everything is initialized + const timer = setTimeout(() => { + fetchAllProxyBalances(); + }, 1000); + return () => clearTimeout(timer); + } + }, [connected, proxyContract, fetchAllProxyBalances, proxies]); + + // Manual TVL refresh function + const refreshTVL = useCallback(async () => { + if (proxies && proxies.length > 0 && proxyContract) { + await fetchAllProxyBalances(); + } + }, [proxies, proxyContract, fetchAllProxyBalances]); + + // Global refresh function for all proxy balances + const refreshAllBalances = useCallback(async () => { + if (proxies && proxies.length > 0 && proxyContract) { + await fetchAllProxyBalances(); + } + }, [proxies, proxyContract, fetchAllProxyBalances]); + + // Spend outputs management + const handleSpendOutputsChange = useCallback((outputs: ProxyOutput[]) => { + setSpendOutputs(outputs); + }, []); + + // Handle proxy selection + const handleProxySelection = useCallback(async (proxyId: string) => { + setSelectedProxy(proxyId); + const proxy = proxies?.find(p => p.id === proxyId); + if (proxy) { + const balance = await getProxyBalance(proxy.proxyAddress); + setSelectedProxyBalance(balance); + } + }, [proxies, getProxyBalance]); + + + // Spend from proxy + const handleSpendFromProxy = useCallback(async () => { + if (!proxyContract || !connected) { + toast({ + title: "Error", + description: "Wallet not connected or proxy contract not initialized", + variant: "destructive", + }); + return; + } + + if (!selectedProxy) { + toast({ + title: "Error", + description: "Please select a proxy to spend from", + variant: "destructive", + }); + return; + } + + // Validate outputs + const validOutputs = spendOutputs.filter(output => + output.address && output.amount && parseFloat(output.amount) > 0 + ); + + if (validOutputs.length === 0) { + toast({ + title: "Error", + description: "Please provide at least one valid output", + variant: "destructive", + }); + return; + } + + try { + setSpendLoading(true); + setLoading(true); + + // Get the selected proxy + const proxy = proxies?.find(p => p.id === selectedProxy); + if (!proxy) { + throw new Error("Selected proxy not found"); + } + + // Create a contract instance for the selected proxy + const selectedProxyContract = new MeshProxyContract( + { + mesh: getTxBuilder(network), + wallet: wallet, + networkId: network, + }, + { + paramUtxo: JSON.parse(proxy.paramUtxo), + } + ); + selectedProxyContract.proxyAddress = proxy.proxyAddress; + + const txHex = await selectedProxyContract.spendProxySimple(validOutputs); + + // Sign and submit the transaction + const signedTx = await wallet.signTx(txHex, true); + await wallet.submitTx(signedTx); + + toast({ + title: "Success", + description: "Proxy spend transaction submitted successfully", + variant: "default", + }); + + // Refresh balance after successful spend + await handleProxySelection(selectedProxy); + + // Close the spend modal after successful transaction + setTimeout(() => { + setShowSpendSection(false); + }, 2000); // Close after 2 seconds to let user see the success message + + } catch (error) { + console.error("Spend from proxy error:", error); + + // Handle specific error cases + let errorMessage = "Failed to spend from proxy"; + if (error instanceof Error) { + if (error.message.includes("No collateral found")) { + errorMessage = "Wallet collateral not set up. Please set up collateral in your wallet settings (requires 5 ADA minimum)."; + } else if (error.message.includes("No UTxOs found")) { + errorMessage = "No UTxOs available in wallet. Please ensure your wallet has sufficient funds."; + } else { + errorMessage = `Failed to spend from proxy: ${error.message}`; + } + } + + toast({ + title: "Error", + description: errorMessage, + variant: "destructive", + }); + } finally { + setSpendLoading(false); + setLoading(false); + } + }, [proxyContract, connected, spendOutputs, selectedProxy, proxies, network, wallet, setLoading, handleProxySelection]); + + + // Copy to clipboard + const copyToClipboard = (text: string) => { + navigator.clipboard.writeText(text); + toast({ + title: "Copied", + description: "Address copied to clipboard", + variant: "default", + }); + }; + + + if (!connected) { + return ( + + + + + + Please connect your wallet to use proxy control features. + + + + + ); + } + + if (loading) { + return ( + + +
+
+ +
+
+
+
+
+
+
+
+ ); + } + + + return ( +
+ {/* Single Expanding Proxy Control Card */} + + setIsExpanded(!isExpanded)} + onKeyDown={(e) => { + if (e.key === 'Enter' || e.key === ' ') { + e.preventDefault(); + setIsExpanded(!isExpanded); + } + }} + tabIndex={0} + role="button" + aria-expanded={isExpanded} + aria-label={`${isExpanded ? 'Collapse' : 'Expand'} proxy control panel`} + > +
+
+
+ +
+
+
+ Proxy Contracts +
+
+ Automated transaction management +
+
+
+
+ {/* TVL Display */} + + + +
+
+ +
+ {tvlLoading ? ( +
+ ) : ( + `${totalADA.toFixed(2)} ADA` + )} +
+ +
+
+ {proxies && proxies.length > 0 + ? `${proxies.length} proxy${proxies.length !== 1 ? 'ies' : ''} • ${totalAssets} asset${totalAssets !== 1 ? 's' : ''}` + : 'Ready to setup' + } +
+
+
+ +
+
+
Total Value Locked (TVL)
+ +
+
+
Total ADA: {totalADA.toFixed(6)} ADA
+
Total Assets: {totalAssets}
+
Active Proxies: {proxies?.length || 0}
+ {tvlLoading && ( +
Updating balances...
+ )} +
+
+
+
+
+ + {/* Expand/Collapse Button */} + +
+
+
+ + {/* Expandable Content */} + {isExpanded && ( + +
+ {/* Overview Section */} + + +
+
+ )} +
+ + {/* Spend Modal */} + + + + Spend from Proxy + + + + + + {/* Setup Modal */} + + + + Setup New Proxy + + + + +
+ ); +} diff --git a/src/components/multisig/proxy/ProxyControlExample.tsx b/src/components/multisig/proxy/ProxyControlExample.tsx new file mode 100644 index 00000000..7c110a04 --- /dev/null +++ b/src/components/multisig/proxy/ProxyControlExample.tsx @@ -0,0 +1,107 @@ +import React from "react"; +import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"; +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { Info } from "lucide-react"; +import ProxyControl from "./ProxyControl"; + +/** + * Example page demonstrating how to use the ProxyControl component + * + * This component shows how to integrate the ProxyControl into your application + * and provides context about what the proxy system does. + */ +export default function ProxyControlExample() { + return ( +
+
+

Proxy Control System

+

+ Manage your Cardano proxy contract for automated and controlled transactions. +

+
+ + + + + What is a Proxy Contract?
+ A proxy contract allows you to create a controlled address that can be managed through auth tokens. + This enables automated transactions while maintaining security through your multisig wallet. + The proxy can hold assets and execute transactions when you have the required auth tokens. +
+
+ + + + How it Works + + Understanding the proxy system workflow + + + +
+
+

1. Setup

+

+ Initialize the proxy by minting 10 auth tokens. These tokens are sent to your multisig wallet. +

+
+
+

2. Control

+

+ Use auth tokens to authorize spending from the proxy address. Each spend consumes one auth token. +

+
+
+

3. Automate

+

+ The proxy can hold assets and execute transactions automatically when properly authorized. +

+
+
+
+
+ + + + + + Integration Example + + How to use the ProxyControl component in your application + + + +
+

Basic Usage

+
+{`import ProxyControl from "@/components/multisig/proxy/ProxyControl";
+
+export default function MyPage() {
+  return (
+    
+

My Proxy Management

+ +
+ ); +}`} +
+ +

Key Features

+
    +
  • Automatic wallet connection detection
  • +
  • Proxy setup with auth token minting
  • +
  • Real-time balance monitoring
  • +
  • Multi-output spending capabilities
  • +
  • Integration with multisig transaction system
  • +
  • Error handling and loading states
  • +
  • Responsive design for mobile and desktop
  • +
+
+
+
+
+ ); +} + + + diff --git a/src/components/multisig/proxy/ProxyOverview.tsx b/src/components/multisig/proxy/ProxyOverview.tsx new file mode 100644 index 00000000..91f903e2 --- /dev/null +++ b/src/components/multisig/proxy/ProxyOverview.tsx @@ -0,0 +1,582 @@ +import React, { memo, useState, useEffect } from "react"; +import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"; +import { Badge } from "@/components/ui/badge"; +import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip"; +import { + AlertCircle, + CheckCircle, + Copy, + Settings, + Send, + Plus, + Wallet, + Activity, + ArrowRight, + ChevronDown, + ChevronUp, + Calendar, + Key, + Hash, + Clock, + TrendingUp, + Edit3, + Save, + X +} from "lucide-react"; + +// ProxyCard Component +interface ProxyCardProps { + proxy: { + id: string; + proxyAddress: string; + authTokenId: string; + description: string | null; + isActive: boolean; + createdAt: Date; + }; + isSelected: boolean; + balance: Array<{ unit: string; quantity: string }>; + balanceLoading?: boolean; + onSelect: () => void; + onCopy: () => void; + onSpend: () => void; + onUpdateProxy: (proxyId: string, description: string) => Promise; + onRefreshBalance?: () => void; +} + +// Component to fetch and display proxy balance +const ProxyCardWithBalance = memo(function ProxyCardWithBalance({ + proxy, + isSelected, + onSelect, + onCopy, + onSpend, + onGetProxyBalance, + onUpdateProxy +}: Omit & { + onGetProxyBalance: (proxyAddress: string) => Promise>; + onUpdateProxy: (proxyId: string, description: string) => Promise; +}) { + const [balance, setBalance] = useState>([]); + const [balanceLoading, setBalanceLoading] = useState(false); + const [lastFetchTime, setLastFetchTime] = useState(0); + + const fetchBalance = React.useCallback(async () => { + setBalanceLoading(true); + try { + const proxyBalance = await onGetProxyBalance(proxy.proxyAddress); + setBalance(proxyBalance); + setLastFetchTime(Date.now()); + } catch (error) { + console.error("Failed to fetch proxy balance:", error); + setBalance([]); + } finally { + setBalanceLoading(false); + } + }, [proxy.proxyAddress, onGetProxyBalance]); + + // Initial fetch and refresh when proxy address changes + useEffect(() => { + fetchBalance(); + }, [fetchBalance]); + + // Refresh balance when proxy is selected (to ensure latest data) + useEffect(() => { + if (isSelected && Date.now() - lastFetchTime > 5000) { // Refresh if older than 5 seconds + fetchBalance(); + } + }, [isSelected, fetchBalance, lastFetchTime]); + + return ( + + ); +}); + +const ProxyCard = memo(function ProxyCard({ proxy, isSelected, balance, balanceLoading = false, onSelect, onCopy, onSpend, onUpdateProxy, onRefreshBalance }: ProxyCardProps) { + const [isExpanded, setIsExpanded] = React.useState(false); + const [isEditing, setIsEditing] = React.useState(false); + const [editDescription, setEditDescription] = React.useState(proxy.description || ""); + const [isUpdating, setIsUpdating] = React.useState(false); + + const formatDate = (date: Date) => { + return new Intl.DateTimeFormat('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric', + hour: '2-digit', + minute: '2-digit' + }).format(new Date(date)); + }; + + const copyToClipboard = (text: string, label: string) => { + navigator.clipboard.writeText(text); + // You could add a toast notification here + }; + + const handleSaveDescription = async () => { + if (editDescription.trim() === (proxy.description || "")) { + setIsEditing(false); + return; + } + + setIsUpdating(true); + try { + await onUpdateProxy(proxy.id, editDescription.trim()); + setIsEditing(false); + } catch (error) { + console.error("Failed to update proxy description:", error); + // Reset to original value on error + setEditDescription(proxy.description || ""); + } finally { + setIsUpdating(false); + } + }; + + const handleCancelEdit = () => { + setEditDescription(proxy.description || ""); + setIsEditing(false); + }; + + const handleStartEdit = () => { + setEditDescription(proxy.description || ""); + setIsEditing(true); + }; + + return ( + + +
+ + {proxy.proxyAddress.slice(0, 20)}... + +
+ + {proxy.isActive ? "Active" : "Inactive"} + + +
+
+ + {isEditing ? ( +
+ setEditDescription(e.target.value)} + placeholder="Enter description..." + className="h-6 text-xs" + onKeyDown={(e) => { + if (e.key === 'Enter') { + handleSaveDescription(); + } else if (e.key === 'Escape') { + handleCancelEdit(); + } + }} + autoFocus + /> + + +
+ ) : ( +
+ + {proxy.description || "Click to add description"} + + +
+ )} +
+
+ + + {/* Balance Display */} +
+
+ + Balance + {onRefreshBalance && ( + + )} +
+ + {balanceLoading ? ( +
+ ) : balance.length > 0 ? ( + + + +
+ {balance.map((asset, index) => ( +
+ + {asset.unit === "lovelace" + ? `${(parseFloat(asset.quantity) / 1000000).toFixed(2)} ADA` + : asset.quantity + } + +
+ ))} +
+
+ +
+
Proxy Balance Details
+
+ {balance.map((asset, index) => ( +
+ {asset.unit === "lovelace" ? "ADA" : asset.unit}: + + {asset.unit === "lovelace" + ? `${(parseFloat(asset.quantity) / 1000000).toFixed(6)} ADA` + : asset.quantity + } + +
+ ))} +
+ Total: {balance.length} asset{balance.length !== 1 ? 's' : ''} +
+
+
+
+
+
+ ) : ( +
No balance
+ )} +
+ + {/* Expanded Details */} + {isExpanded && ( +
+ {/* Full Address */} +
+
+ + Proxy Address + +
+
+ {proxy.proxyAddress} +
+
+ + {/* Auth Token ID */} +
+
+ + Auth Token ID + +
+
+ {proxy.authTokenId} +
+
+ + {/* Creation Date */} +
+
+ + Created +
+
+ + {formatDate(proxy.createdAt)} +
+
+ + {/* Proxy ID */} +
+
+ + Proxy ID + +
+
+ {proxy.id} +
+
+
+ )} + + {/* Actions */} +
+ + + {isSelected && ( + + )} +
+
+
+ ); +}); + +interface ProxyOverviewProps { + proxies: Array<{ + id: string; + proxyAddress: string; + authTokenId: string; + description: string | null; + isActive: boolean; + createdAt: Date; + }> | undefined; + selectedProxy: string; + selectedProxyBalance: Array<{ unit: string; quantity: string }>; + proxyBalance: Array<{ unit: string; quantity: string }>; + isProxySetup: boolean; + onProxySelection: (proxyId: string) => void; + onCopyToClipboard: (text: string) => void; + onStartSetup: () => void; + onStartSpending: () => void; + onGetProxyBalance: (proxyAddress: string) => Promise>; + onUpdateProxy: (proxyId: string, description: string) => Promise; + onRefreshAllBalances?: () => void; +} + +const ProxyOverview = memo(function ProxyOverview({ + proxies, + selectedProxy, + selectedProxyBalance, + proxyBalance, + isProxySetup, + onProxySelection, + onCopyToClipboard, + onStartSetup, + onStartSpending, + onGetProxyBalance, + onUpdateProxy, + onRefreshAllBalances, +}: ProxyOverviewProps) { + return ( +
+ {/* General Info and Introduction */} +
+ + {/* Status Card - Only show when proxies are active */} + {isProxySetup && ( + + +
+
+ +
+
+

Proxy system active

+

Ready for automated transactions

+
+
+
+
+ )} +
+ + {/* Proxy Carousel */} + {proxies && proxies.length > 0 ? ( +
+
+
+ + +
+
+ {onRefreshAllBalances && ( + + )} + {proxies.length} proxy{proxies.length !== 1 ? 'ies' : ''} +
+
+ + {!selectedProxy && ( + + + + Select a proxy from the cards below to enable spending functionality. + + + )} + +
+ {proxies.map((proxy) => ( + onProxySelection(proxy.id)} + onCopy={() => onCopyToClipboard(proxy.proxyAddress)} + onSpend={() => onStartSpending()} + onGetProxyBalance={onGetProxyBalance} + onUpdateProxy={onUpdateProxy} + /> + ))} +
+
+ ) : ( + + + +

No Proxies Found

+

+ You haven't set up any proxy contracts yet. Create your first proxy to start managing automated transactions. +

+ +
+
+ )} + + {/* Add New Proxy */} + + +
+
+
+ +
+
+

Add New Proxy

+

Create additional proxy contracts

+
+
+ +
+
+
+
+ ); +}); + +export default ProxyOverview; diff --git a/src/components/multisig/proxy/ProxySetup.tsx b/src/components/multisig/proxy/ProxySetup.tsx new file mode 100644 index 00000000..007a04b9 --- /dev/null +++ b/src/components/multisig/proxy/ProxySetup.tsx @@ -0,0 +1,323 @@ +import React, { memo } from "react"; +import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { + RefreshCw, + AlertCircle, + CheckCircle, + Copy, + Settings, + ChevronLeft, + Play, + Check +} from "lucide-react"; + +interface ProxySetupProps { + setupStep: number; + setupData: { + paramUtxo?: { txHash: string; outputIndex: number }; + authTokenId?: string; + proxyAddress?: string; + txHex?: string; + description?: string; + }; + setupLoading: boolean; + onInitializeSetup: (description?: string) => void; + onConfirmSetup: () => void; + onResetSetup: () => void; + onCopyToClipboard: (text: string) => void; + onCloseSetup: () => void; +} + +// Step indicator component +const StepIndicator = ({ currentStep, totalSteps }: { currentStep: number; totalSteps: number }) => { + return ( +
+ {Array.from({ length: totalSteps }, (_, index) => ( +
+
+ {index < currentStep ? : index + 1} +
+ {index < totalSteps - 1 && ( +
+ )} +
+ ))} +
+ ); +}; + +const ProxySetup = memo(function ProxySetup({ + setupStep, + setupData, + setupLoading, + onInitializeSetup, + onConfirmSetup, + onResetSetup, + onCopyToClipboard, + onCloseSetup, +}: ProxySetupProps) { + const [description, setDescription] = React.useState(""); + return ( +
+ {/* Header Section */} +
+

+ Follow the steps below to create a new proxy contract. This process will mint 10 auth tokens that you can use to control the proxy. +

+
+ + + + {/* Collateral Requirement Alert */} + + + + Collateral Required: Your wallet needs at least 5 ADA set aside as collateral for smart contract transactions. + If you encounter "No collateral found" errors, please set up collateral in your wallet settings. + + + + {/* Step 0: Introduction */} + {setupStep === 0 && ( +
+
+
+
+ +
+
+

Ready to Setup Proxy

+

+ This process will create a new proxy contract for automated transactions +

+
+
+ +
+
+ + setDescription(e.target.value)} + placeholder="Enter a description for this proxy..." + className="w-full" + /> +

+ Add a description to help identify this proxy later +

+
+ +
+
+
+ +
+ Generate proxy parameters and addresses +
+
+
+ +
+ Create a transaction to mint 10 auth tokens +
+
+
+ +
+ Store proxy information in your wallet +
+
+
+ + + + + You'll need to sign a transaction to complete the setup. Make sure you have sufficient ADA for transaction fees. + + +
+ + +
+ )} + + {/* Step 1: Review Generated Parameters */} + {setupStep === 1 && ( +
+
+

+ + Proxy Parameters Generated +

+

+ Review the generated proxy information before proceeding: +

+
+ +
+
+ +
+ + +
+
+ +
+ +
+ + +
+
+ +
+ +
+ {setupData.paramUtxo ? + `${setupData.paramUtxo.txHash.slice(0, 20)}... (${setupData.paramUtxo.outputIndex})` : + "Not available" + } +
+
+
+ +
+ + +
+
+ )} + + {/* Step 2: Setup Complete */} + {setupStep === 2 && ( +
+
+

+ + Proxy Setup Complete! +

+

+ Your proxy has been successfully created and is ready to use. You can now spend from this proxy using the auth tokens. +

+
+ +
+ +
+
+ Address: + {setupData.proxyAddress?.slice(0, 30)}... +
+
+ Auth Tokens: + 10 tokens minted +
+
+ Status: + Active +
+
+
+ +
+ + +
+
+ )} +
+ ); +}); + +export default ProxySetup; diff --git a/src/components/multisig/proxy/ProxySpend.tsx b/src/components/multisig/proxy/ProxySpend.tsx new file mode 100644 index 00000000..4ecd8af1 --- /dev/null +++ b/src/components/multisig/proxy/ProxySpend.tsx @@ -0,0 +1,249 @@ +import React, { memo } from "react"; +import { Button } from "@/components/ui/button"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { Badge } from "@/components/ui/badge"; +import { Separator } from "@/components/ui/separator"; +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"; +import { + Send, + RefreshCw, + AlertCircle, + Plus, + Trash2, + Wallet, + Activity, + ArrowRight, + X +} from "lucide-react"; + +interface ProxyOutput { + address: string; + unit: string; + amount: string; +} + +interface ProxySpendProps { + proxies: Array<{ + id: string; + proxyAddress: string; + authTokenId: string; + description: string | null; + isActive: boolean; + createdAt: Date; + }> | undefined; + selectedProxy: string; + selectedProxyBalance: Array<{ unit: string; quantity: string }>; + spendOutputs: ProxyOutput[]; + spendLoading: boolean; + onProxySelection: (proxyId: string) => void; + onSpendOutputsChange: (outputs: ProxyOutput[]) => void; + onSpendFromProxy: () => void; + onCloseSpend: () => void; +} + +const ProxySpend = memo(function ProxySpend({ + proxies, + selectedProxy, + selectedProxyBalance, + spendOutputs, + spendLoading, + onProxySelection, + onSpendOutputsChange, + onSpendFromProxy, + onCloseSpend, +}: ProxySpendProps) { + // Add spend output + const addSpendOutput = () => { + const newOutputs = [...spendOutputs, { address: "", unit: "lovelace", amount: "" }]; + onSpendOutputsChange(newOutputs); + }; + + // Remove spend output + const removeSpendOutput = (index: number) => { + if (spendOutputs.length > 1) { + const newOutputs = spendOutputs.filter((_, i) => i !== index); + onSpendOutputsChange(newOutputs); + } + }; + + // Update spend output + const updateSpendOutput = (index: number, field: keyof ProxyOutput, value: string) => { + const updated = [...spendOutputs]; + if (updated[index]) { + updated[index] = { ...updated[index], [field]: value }; + onSpendOutputsChange(updated); + } + }; + + return ( +
+ {/* Description */} +

+ Create transactions to spend assets from your proxy address +

+ + {/* Collateral Requirement Alert */} + + + + Collateral Required: Your wallet needs at least 5 ADA set aside as collateral for smart contract transactions. + If you encounter "No collateral found" errors, please set up collateral in your wallet settings. + + + + {(!proxies || proxies.length === 0) && ( + + + + No proxies found. Please setup a proxy first before attempting to spend from it. + + + )} + + {proxies && proxies.length > 0 && ( +
+ {/* Proxy Selection */} +
+
+ + +
+ +
+ + {/* Selected Proxy Balance */} + {selectedProxy && selectedProxyBalance.length > 0 && ( +
+
+ + +
+
+ {selectedProxyBalance.map((asset, index) => ( +
+
+ + {asset.unit === "lovelace" ? "ADA" : asset.unit} + + + {asset.unit === "lovelace" + ? `${(parseFloat(asset.quantity) / 1000000).toFixed(6)} ADA` + : asset.quantity + } + +
+
+ ))} +
+
+ )} + + {/* Spend Outputs */} +
+
+
+ + +
+ +
+ + {spendOutputs.map((output, index) => ( +
+
+ + {spendOutputs.length > 1 && ( + + )} +
+ +
+
+ + updateSpendOutput(index, "address", e.target.value)} + placeholder="addr1..." + className="font-mono text-sm" + /> +
+ +
+ + updateSpendOutput(index, "unit", e.target.value)} + placeholder="lovelace or policyId.assetName" + className="font-mono text-sm" + /> +
+ +
+ + updateSpendOutput(index, "amount", e.target.value)} + placeholder="1000000" + type="number" + /> +
+
+
+ ))} +
+ + + + {/* Submit Button */} + +
+ )} +
+ ); +}); + +export default ProxySpend; diff --git a/src/components/multisig/proxy/README.md b/src/components/multisig/proxy/README.md new file mode 100644 index 00000000..f8cef8c6 --- /dev/null +++ b/src/components/multisig/proxy/README.md @@ -0,0 +1,150 @@ +# Proxy Control System + +A comprehensive UI component system for managing Cardano proxy contracts with multisig wallet integration. + +## Overview + +The Proxy Control System provides a user-friendly interface for managing proxy contracts on Cardano. It allows users to: + +- Set up proxy contracts with auth token minting +- Monitor proxy balances in real-time +- Create and manage spending transactions +- Integrate with multisig wallet systems + +## Components + +### ProxyControl + +The main component that provides a complete interface for proxy management. + +**Features:** +- **Setup Tab**: Initialize proxy contracts by minting auth tokens +- **Overview Tab**: Monitor proxy status, address, and balance +- **Spend Tab**: Create multi-output spending transactions + +**Usage:** +```tsx +import { ProxyControl } from "@/components/multisig/proxy"; + +export default function MyPage() { + return ( +
+

Proxy Management

+ +
+ ); +} +``` + +### ProxyControlExample + +A demonstration page showing how to integrate and use the ProxyControl component. + +## Architecture + +### Core Classes + +#### MeshProxyContract + +Located in `offchain.ts`, this class handles all blockchain interactions: + +- **setupProxy()**: Mints 10 auth tokens and sets up the proxy address +- **spendProxySimple()**: Creates spending transactions from the proxy +- **getProxyBalance()**: Fetches current balance of the proxy address + +### Integration Points + +The system integrates with: + +1. **MeshSDK**: For wallet connections and transaction building +2. **Multisig System**: Creates transactions that require multisig approval +3. **Database**: Stores transaction data for multisig workflows +4. **Toast System**: Provides user feedback for all operations + +## Workflow + +### 1. Setup Phase +1. User connects wallet +2. Clicks "Setup Proxy" +3. System mints 10 auth tokens +4. Creates multisig transaction for approval +5. Auth tokens are sent to user's multisig wallet + +### 2. Control Phase +1. User can monitor proxy balance +2. Create spending transactions with multiple outputs +3. Each spend consumes one auth token +4. Transactions require multisig approval + +### 3. Automation Phase +1. Proxy can hold various assets +2. Automated spending when auth tokens are available +3. Full audit trail through multisig system + +## Security Features + +- **Multisig Integration**: All transactions require multisig approval +- **Auth Token System**: Controlled spending through token consumption +- **Address Validation**: Proper Cardano address format checking +- **Error Handling**: Comprehensive error states and user feedback + +## UI/UX Features + +- **Responsive Design**: Works on mobile and desktop +- **Loading States**: Visual feedback during operations +- **Error Handling**: Clear error messages and recovery options +- **Real-time Updates**: Balance and status monitoring +- **Copy Functions**: Easy address and token ID copying + +## Dependencies + +- `@meshsdk/react`: Wallet connection and transaction building +- `@meshsdk/core`: Core Cardano functionality +- `@meshsdk/common`: Common utilities and types +- React hooks for state management +- Custom UI components (shadcn/ui based) + +## Error Handling + +The system handles various error scenarios: + +- Wallet not connected +- Insufficient funds +- Invalid addresses +- Network errors +- Transaction failures + +All errors are displayed to users with actionable feedback. + +## Future Enhancements + +Potential improvements: + +1. **Batch Operations**: Multiple proxy management +2. **Advanced Spending**: Time-locked or conditional spending +3. **Analytics**: Transaction history and analytics +4. **Notifications**: Real-time proxy activity alerts +5. **API Integration**: REST API for proxy management + +## Testing + +The component should be tested with: + +- Different wallet types (Nami, Eternl, etc.) +- Various network conditions +- Edge cases (empty balances, invalid inputs) +- Mobile responsiveness +- Error scenarios + +## Contributing + +When contributing to this system: + +1. Follow existing code patterns +2. Add proper TypeScript types +3. Include error handling +4. Test with real wallets +5. Update documentation + + + diff --git a/src/components/multisig/proxy/index.ts b/src/components/multisig/proxy/index.ts index 90e9fd4e..e7c46daa 100644 --- a/src/components/multisig/proxy/index.ts +++ b/src/components/multisig/proxy/index.ts @@ -1 +1,3 @@ -export * from "./offchain"; +export { default as ProxyControl } from "./ProxyControl"; +export { default as ProxyControlExample } from "./ProxyControlExample"; +export { MeshProxyContract } from "./offchain"; \ No newline at end of file diff --git a/src/components/multisig/proxy/offchain.ts b/src/components/multisig/proxy/offchain.ts index 8477ab66..8d86844f 100644 --- a/src/components/multisig/proxy/offchain.ts +++ b/src/components/multisig/proxy/offchain.ts @@ -37,6 +37,13 @@ export class MeshProxyContract extends MeshTxInitiator { stakeCredential?: string | undefined; networkId: number; + // Reset method to clear state for retry + reset() { + this.paramUtxo = { outputIndex: 0, txHash: "" }; + this.proxyAddress = undefined; + this.stakeCredential = undefined; + } + getAuthTokenCbor = () => { return applyParamsToScript(blueprint.validators[0]!.compiledCode, [ mOutputReference(this.paramUtxo.txHash, this.paramUtxo.outputIndex), @@ -114,8 +121,15 @@ export class MeshProxyContract extends MeshTxInitiator { const policyId = this.getAuthTokenPolicyId(); const tokenName = ""; + console.log("policyId", policyId); + console.log("tokenName", tokenName); + console.log("walletAddress", walletAddress); + console.log("paramUtxo", paramUtxo); + console.log("collateral", collateral); + console.log("utxos", utxos); + // Try completing the transaction step by step - const tx = this.mesh + let tx = await this.mesh .txIn( paramUtxo.input.txHash, paramUtxo.input.outputIndex, @@ -126,8 +140,13 @@ export class MeshProxyContract extends MeshTxInitiator { .mint("10", policyId, tokenName) .mintingScript(this.getAuthTokenCbor()) .mintRedeemerValue(mConStr0([])) - .txOut(walletAddress, [{ unit: policyId, quantity: "10" }]) - .txInCollateral( + .txOut(proxyAddress, [{ unit: "lovelace", quantity: "1000000" }]) + + for(let i = 0; i < 10; i++) { + tx.txOut(walletAddress, [{ unit: policyId, quantity: "1" }]) + } + + tx.txInCollateral( collateral.input.txHash, collateral.input.outputIndex, collateral.output.amount, @@ -151,6 +170,9 @@ export class MeshProxyContract extends MeshTxInitiator { ) => { const { utxos, collateral, walletAddress } = await this.getWalletInfoForTx(); + console.log("utxos", utxos); + console.log("collateral", collateral); + console.log("walletAddress", walletAddress); if (utxos?.length <= 0) { throw new Error("No UTxOs found"); @@ -181,6 +203,7 @@ export class MeshProxyContract extends MeshTxInitiator { console.log("authTokenUtxos", authTokenUtxos); console.log("policyIdAT", policyIdAT); + if (!authTokenUtxos || authTokenUtxos.length === 0) { throw new Error("No AuthToken found at control wallet address"); @@ -207,7 +230,7 @@ export class MeshProxyContract extends MeshTxInitiator { ) .txInScript(this.getProxyCbor()) .txInRedeemerValue(mConStr0([])) - .txInDatumValue(mConStr0([])) // Add empty datum since script expects Option + .txInInlineDatumPresent() .txIn( authTokenUtxo.input.txHash, authTokenUtxo.input.outputIndex, diff --git a/src/components/pages/wallet/dapps/index.tsx b/src/components/pages/wallet/dapps/index.tsx index 6c2ac479..2a003238 100644 --- a/src/components/pages/wallet/dapps/index.tsx +++ b/src/components/pages/wallet/dapps/index.tsx @@ -2,6 +2,7 @@ import { useEffect, useState } from "react"; import { Button } from "@/components/ui/button"; import { ExternalLink, Code, Database, ArrowLeft, CheckCircle, AlertTriangle, Info } from "lucide-react"; +import ProxyControl from "@/components/multisig/proxy/ProxyControl"; function DappCard({ title, description, url }: { title: string; description: string; url: string }) { const [ogImage, setOgImage] = useState(null); @@ -381,6 +382,11 @@ export default function PageDapps() {
+ {/* Proxy Control Section */} +
+ +
+ {/* dApps Grid */}
diff --git a/src/components/pages/wallet/info/proxy-control.tsx b/src/components/pages/wallet/info/proxy-control.tsx new file mode 100644 index 00000000..e1c0c240 --- /dev/null +++ b/src/components/pages/wallet/info/proxy-control.tsx @@ -0,0 +1,36 @@ +import React from "react"; +import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"; +import { Badge } from "@/components/ui/badge"; +import { Settings, Wallet } from "lucide-react"; +import { ProxyControl } from "@/components/multisig/proxy"; + +/** + * ProxyControlCard component for the wallet info page + * + * This component wraps the ProxyControl component in a card format + * that matches the styling of other wallet info components. + */ +function ProxyControlCard() { + return ( + + + + + Proxy Control + + + Advanced + + + + Manage automated transactions through proxy contracts with auth token control + + + + + + + ); +} + +export default ProxyControlCard; diff --git a/src/server/api/routers/proxy.ts b/src/server/api/routers/proxy.ts index d0c8c4a5..4800d61d 100644 --- a/src/server/api/routers/proxy.ts +++ b/src/server/api/routers/proxy.ts @@ -1,22 +1,37 @@ import { z } from "zod"; - import { createTRPCRouter, publicProcedure } from "@/server/api/trpc"; export const proxyRouter = createTRPCRouter({ + getUserByAddress: publicProcedure + .input(z.object({ address: z.string() })) + .query(async ({ ctx, input }) => { + return ctx.db.user.findUnique({ + where: { + address: input.address, + }, + }); + }), createProxy: publicProcedure .input( z.object({ - walletId: z.string(), + walletId: z.string().optional(), + userId: z.string().optional(), proxyAddress: z.string(), authTokenId: z.string(), paramUtxo: z.string(), description: z.string().optional(), - }), + }).refine( + (data) => data.walletId || data.userId, + { + message: "Either walletId or userId must be provided", + } + ), ) .mutation(async ({ ctx, input }) => { return ctx.db.proxy.create({ data: { walletId: input.walletId, + userId: input.userId, proxyAddress: input.proxyAddress, authTokenId: input.authTokenId, paramUtxo: input.paramUtxo, @@ -25,12 +40,85 @@ export const proxyRouter = createTRPCRouter({ }); }), - getProxyByWalletId: publicProcedure + getProxiesByWallet: publicProcedure .input(z.object({ walletId: z.string() })) .query(async ({ ctx, input }) => { - return await ctx.db.proxy.findUnique({ + return ctx.db.proxy.findMany({ where: { walletId: input.walletId, + isActive: true, + }, + orderBy: { + createdAt: "desc", + }, + }); + }), + + getProxiesByUser: publicProcedure + .input(z.object({ userAddress: z.string() })) + .query(async ({ ctx, input }) => { + // First find the user by address + const user = await ctx.db.user.findUnique({ + where: { + address: input.userAddress, + }, + }); + + if (!user) { + return []; + } + + return ctx.db.proxy.findMany({ + where: { + userId: user.id, + isActive: true, + }, + orderBy: { + createdAt: "desc", + }, + }); + }), + + getProxiesByUserOrWallet: publicProcedure + .input(z.object({ + walletId: z.string().optional(), + userAddress: z.string().optional(), + })) + .query(async ({ ctx, input }) => { + const conditions: any = { + isActive: true, + }; + + if (input.walletId) { + conditions.walletId = input.walletId; + } + + if (input.userAddress) { + const user = await ctx.db.user.findUnique({ + where: { + address: input.userAddress, + }, + }); + + if (user) { + conditions.userId = user.id; + } + } + + return ctx.db.proxy.findMany({ + where: conditions, + orderBy: { + createdAt: "desc", + }, + }); + }), + + getProxyById: publicProcedure + .input(z.object({ id: z.string() })) + .query(async ({ ctx, input }) => { + return ctx.db.proxy.findUnique({ + where: { + id: input.id, }, }); }), @@ -38,32 +126,43 @@ export const proxyRouter = createTRPCRouter({ updateProxy: publicProcedure .input( z.object({ - walletId: z.string(), - proxyAddress: z.string().optional(), - authTokenId: z.string().optional(), - paramUtxo: z.string().optional(), + id: z.string(), description: z.string().optional(), isActive: z.boolean().optional(), }), ) .mutation(async ({ ctx, input }) => { - const { walletId, ...updateData } = input; return ctx.db.proxy.update({ where: { - walletId: walletId, + id: input.id, + }, + data: { + description: input.description, + isActive: input.isActive, }, - data: updateData, }); }), deleteProxy: publicProcedure - .input(z.object({ walletId: z.string() })) + .input(z.object({ id: z.string() })) .mutation(async ({ ctx, input }) => { return ctx.db.proxy.delete({ where: { - walletId: input.walletId, + id: input.id, }, }); }), -}); + deactivateProxy: publicProcedure + .input(z.object({ id: z.string() })) + .mutation(async ({ ctx, input }) => { + return ctx.db.proxy.update({ + where: { + id: input.id, + }, + data: { + isActive: false, + }, + }); + }), +}); \ No newline at end of file From 311a04268cbd44cb7dfeda8041d3ef5a7edc9559 Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Fri, 17 Oct 2025 21:18:47 +0200 Subject: [PATCH 10/15] mig01 --- package-lock.json | 420 +++++++++++-- package.json | 4 +- .../migration.sql | 3 + prisma/schema.prisma | 34 +- .../multisig/proxy/ProxyControl.tsx | 120 +++- src/components/multisig/proxy/ProxySetup.tsx | 3 +- src/components/multisig/proxy/offchain.ts | 238 ++++++-- .../create/ReviewNativeScript.tsx | 15 +- src/components/pages/wallet/info/index.tsx | 4 +- .../pages/wallet/info/migrate-wallet.tsx | 458 +++++++++++++- .../info/migration/FundTransferStep.tsx | 355 +++++++++++ .../info/migration/MigrationCompleteStep.tsx | 184 ++++++ .../info/migration/MigrationPreChecks.tsx | 278 +++++++++ .../info/migration/NewWalletCreationStep.tsx | 136 +++++ .../wallet/info/migration/ProxySetupStep.tsx | 210 +++++++ .../migration/useMigrationWalletFlowState.tsx | 558 ++++++++++++++++++ src/server/api/routers/proxy.ts | 31 +- src/server/api/routers/wallets.ts | 53 ++ 18 files changed, 2951 insertions(+), 153 deletions(-) create mode 100644 prisma/migrations/20251017112523_add_migration_target_wallet_id/migration.sql create mode 100644 src/components/pages/wallet/info/migration/FundTransferStep.tsx create mode 100644 src/components/pages/wallet/info/migration/MigrationCompleteStep.tsx create mode 100644 src/components/pages/wallet/info/migration/MigrationPreChecks.tsx create mode 100644 src/components/pages/wallet/info/migration/NewWalletCreationStep.tsx create mode 100644 src/components/pages/wallet/info/migration/ProxySetupStep.tsx create mode 100644 src/components/pages/wallet/info/migration/useMigrationWalletFlowState.tsx diff --git a/package-lock.json b/package-lock.json index 0f47d62a..66a769b9 100644 --- a/package-lock.json +++ b/package-lock.json @@ -17,7 +17,7 @@ "@meshsdk/core-cst": "^1.9.0-beta.19", "@meshsdk/react": "^1.9.0-beta.18", "@octokit/core": "^6.1.2", - "@prisma/client": "^6.4.1", + "@prisma/client": "^6.17.1", "@radix-ui/react-accordion": "^1.2.0", "@radix-ui/react-checkbox": "^1.1.1", "@radix-ui/react-collapsible": "^1.1.0", @@ -99,7 +99,7 @@ "postcss": "^8.4.39", "prettier": "^3.3.2", "prettier-plugin-tailwindcss": "^0.6.5", - "prisma": "^6.4.1", + "prisma": "^6.17.1", "tailwindcss": "^3.4.3", "ts-jest": "^29.4.4", "typescript": "^5.5.3" @@ -1060,6 +1060,7 @@ "os": [ "aix" ], + "peer": true, "engines": { "node": ">=18" } @@ -1077,6 +1078,7 @@ "os": [ "android" ], + "peer": true, "engines": { "node": ">=18" } @@ -1094,6 +1096,7 @@ "os": [ "android" ], + "peer": true, "engines": { "node": ">=18" } @@ -1111,6 +1114,7 @@ "os": [ "android" ], + "peer": true, "engines": { "node": ">=18" } @@ -1128,6 +1132,7 @@ "os": [ "darwin" ], + "peer": true, "engines": { "node": ">=18" } @@ -1145,6 +1150,7 @@ "os": [ "darwin" ], + "peer": true, "engines": { "node": ">=18" } @@ -1162,6 +1168,7 @@ "os": [ "freebsd" ], + "peer": true, "engines": { "node": ">=18" } @@ -1179,6 +1186,7 @@ "os": [ "freebsd" ], + "peer": true, "engines": { "node": ">=18" } @@ -1196,6 +1204,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=18" } @@ -1213,6 +1222,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=18" } @@ -1230,6 +1240,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=18" } @@ -1247,6 +1258,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=18" } @@ -1264,6 +1276,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=18" } @@ -1281,6 +1294,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=18" } @@ -1298,6 +1312,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=18" } @@ -1315,6 +1330,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=18" } @@ -1332,6 +1348,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": ">=18" } @@ -1349,6 +1366,7 @@ "os": [ "netbsd" ], + "peer": true, "engines": { "node": ">=18" } @@ -1366,6 +1384,7 @@ "os": [ "netbsd" ], + "peer": true, "engines": { "node": ">=18" } @@ -1383,6 +1402,7 @@ "os": [ "openbsd" ], + "peer": true, "engines": { "node": ">=18" } @@ -1400,6 +1420,7 @@ "os": [ "openbsd" ], + "peer": true, "engines": { "node": ">=18" } @@ -1417,6 +1438,7 @@ "os": [ "sunos" ], + "peer": true, "engines": { "node": ">=18" } @@ -1434,6 +1456,7 @@ "os": [ "win32" ], + "peer": true, "engines": { "node": ">=18" } @@ -1451,6 +1474,7 @@ "os": [ "win32" ], + "peer": true, "engines": { "node": ">=18" } @@ -1468,6 +1492,7 @@ "os": [ "win32" ], + "peer": true, "engines": { "node": ">=18" } @@ -3198,9 +3223,9 @@ } }, "node_modules/@prisma/client": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/@prisma/client/-/client-6.4.1.tgz", - "integrity": "sha512-A7Mwx44+GVZVexT5e2GF/WcKkEkNNKbgr059xpr5mn+oUm2ZW1svhe+0TRNBwCdzhfIZ+q23jEgsNPvKD9u+6g==", + "version": "6.17.1", + "resolved": "https://registry.npmjs.org/@prisma/client/-/client-6.17.1.tgz", + "integrity": "sha512-zL58jbLzYamjnNnmNA51IOZdbk5ci03KviXCuB0Tydc9btH2kDWsi1pQm2VecviRTM7jGia0OPPkgpGnT3nKvw==", "hasInstallScript": true, "license": "Apache-2.0", "engines": { @@ -3219,54 +3244,67 @@ } } }, + "node_modules/@prisma/config": { + "version": "6.17.1", + "resolved": "https://registry.npmjs.org/@prisma/config/-/config-6.17.1.tgz", + "integrity": "sha512-fs8wY6DsvOCzuiyWVckrVs1LOcbY4LZNz8ki4uUIQ28jCCzojTGqdLhN2Jl5lDnC1yI8/gNIKpsWDM8pLhOdwA==", + "devOptional": true, + "license": "Apache-2.0", + "dependencies": { + "c12": "3.1.0", + "deepmerge-ts": "7.1.5", + "effect": "3.16.12", + "empathic": "2.0.0" + } + }, "node_modules/@prisma/debug": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/@prisma/debug/-/debug-6.4.1.tgz", - "integrity": "sha512-Q9xk6yjEGIThjSD8zZegxd5tBRNHYd13GOIG0nLsanbTXATiPXCLyvlYEfvbR2ft6dlRsziQXfQGxAgv7zcMUA==", + "version": "6.17.1", + "resolved": "https://registry.npmjs.org/@prisma/debug/-/debug-6.17.1.tgz", + "integrity": "sha512-Vf7Tt5Wh9XcndpbmeotuqOMLWPTjEKCsgojxXP2oxE1/xYe7PtnP76hsouG9vis6fctX+TxgmwxTuYi/+xc7dQ==", "devOptional": true, "license": "Apache-2.0" }, "node_modules/@prisma/engines": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/@prisma/engines/-/engines-6.4.1.tgz", - "integrity": "sha512-KldENzMHtKYwsOSLThghOIdXOBEsfDuGSrxAZjMnimBiDKd3AE4JQ+Kv+gBD/x77WoV9xIPf25GXMWffXZ17BA==", + "version": "6.17.1", + "resolved": "https://registry.npmjs.org/@prisma/engines/-/engines-6.17.1.tgz", + "integrity": "sha512-D95Ik3GYZkqZ8lSR4EyFOJ/tR33FcYRP8kK61o+WMsyD10UfJwd7+YielflHfKwiGodcqKqoraWw8ElAgMDbPw==", "devOptional": true, "hasInstallScript": true, "license": "Apache-2.0", "dependencies": { - "@prisma/debug": "6.4.1", - "@prisma/engines-version": "6.4.0-29.a9055b89e58b4b5bfb59600785423b1db3d0e75d", - "@prisma/fetch-engine": "6.4.1", - "@prisma/get-platform": "6.4.1" + "@prisma/debug": "6.17.1", + "@prisma/engines-version": "6.17.1-1.272a37d34178c2894197e17273bf937f25acdeac", + "@prisma/fetch-engine": "6.17.1", + "@prisma/get-platform": "6.17.1" } }, "node_modules/@prisma/engines-version": { - "version": "6.4.0-29.a9055b89e58b4b5bfb59600785423b1db3d0e75d", - "resolved": "https://registry.npmjs.org/@prisma/engines-version/-/engines-version-6.4.0-29.a9055b89e58b4b5bfb59600785423b1db3d0e75d.tgz", - "integrity": "sha512-Xq54qw55vaCGrGgIJqyDwOq0TtjZPJEWsbQAHugk99hpDf2jcEeQhUcF+yzEsSqegBaDNLA4IC8Nn34sXmkiTQ==", + "version": "6.17.1-1.272a37d34178c2894197e17273bf937f25acdeac", + "resolved": "https://registry.npmjs.org/@prisma/engines-version/-/engines-version-6.17.1-1.272a37d34178c2894197e17273bf937f25acdeac.tgz", + "integrity": "sha512-17140E3huOuD9lMdJ9+SF/juOf3WR3sTJMVyyenzqUPbuH+89nPhSWcrY+Mf7tmSs6HvaO+7S+HkELinn6bhdg==", "devOptional": true, "license": "Apache-2.0" }, "node_modules/@prisma/fetch-engine": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/@prisma/fetch-engine/-/fetch-engine-6.4.1.tgz", - "integrity": "sha512-uZ5hVeTmDspx7KcaRCNoXmcReOD+84nwlO2oFvQPRQh9xiFYnnUKDz7l9bLxp8t4+25CsaNlgrgilXKSQwrIGQ==", + "version": "6.17.1", + "resolved": "https://registry.npmjs.org/@prisma/fetch-engine/-/fetch-engine-6.17.1.tgz", + "integrity": "sha512-AYZiHOs184qkDMiTeshyJCtyL4yERkjfTkJiSJdYuSfc24m94lTNL5+GFinZ6vVz+ktX4NJzHKn1zIFzGTWrWg==", "devOptional": true, "license": "Apache-2.0", "dependencies": { - "@prisma/debug": "6.4.1", - "@prisma/engines-version": "6.4.0-29.a9055b89e58b4b5bfb59600785423b1db3d0e75d", - "@prisma/get-platform": "6.4.1" + "@prisma/debug": "6.17.1", + "@prisma/engines-version": "6.17.1-1.272a37d34178c2894197e17273bf937f25acdeac", + "@prisma/get-platform": "6.17.1" } }, "node_modules/@prisma/get-platform": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/@prisma/get-platform/-/get-platform-6.4.1.tgz", - "integrity": "sha512-gXqZaDI5scDkBF8oza7fOD3Q3QMD0e0rBynlzDDZdTWbWmzjuW58PRZtj+jkvKje2+ZigCWkH8SsWZAsH6q1Yw==", + "version": "6.17.1", + "resolved": "https://registry.npmjs.org/@prisma/get-platform/-/get-platform-6.17.1.tgz", + "integrity": "sha512-AKEn6fsfz0r482S5KRDFlIGEaq9wLNcgalD1adL+fPcFFblIKs1sD81kY/utrHdqKuVC6E1XSRpegDK3ZLL4Qg==", "devOptional": true, "license": "Apache-2.0", "dependencies": { - "@prisma/debug": "6.4.1" + "@prisma/debug": "6.17.1" } }, "node_modules/@radix-ui/number": { @@ -4649,6 +4687,13 @@ "@sinonjs/commons": "^3.0.1" } }, + "node_modules/@standard-schema/spec": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", + "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", + "devOptional": true, + "license": "MIT" + }, "node_modules/@swagger-api/apidom-ast": { "version": "1.0.0-beta.39", "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-1.0.0-beta.39.tgz", @@ -7830,6 +7875,75 @@ "node": ">= 0.8" } }, + "node_modules/c12": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/c12/-/c12-3.1.0.tgz", + "integrity": "sha512-uWoS8OU1MEIsOv8p/5a82c3H31LsWVR5qiyXVfBNOzfffjUWtPnhAb4BYI2uG2HfGmZmFjCtui5XNWaps+iFuw==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "chokidar": "^4.0.3", + "confbox": "^0.2.2", + "defu": "^6.1.4", + "dotenv": "^16.6.1", + "exsolve": "^1.0.7", + "giget": "^2.0.0", + "jiti": "^2.4.2", + "ohash": "^2.0.11", + "pathe": "^2.0.3", + "perfect-debounce": "^1.0.0", + "pkg-types": "^2.2.0", + "rc9": "^2.1.2" + }, + "peerDependencies": { + "magicast": "^0.3.5" + }, + "peerDependenciesMeta": { + "magicast": { + "optional": true + } + } + }, + "node_modules/c12/node_modules/chokidar": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz", + "integrity": "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "readdirp": "^4.0.1" + }, + "engines": { + "node": ">= 14.16.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/c12/node_modules/jiti": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "devOptional": true, + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/c12/node_modules/readdirp": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz", + "integrity": "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">= 14.18.0" + }, + "funding": { + "type": "individual", + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/cache-chunk-store": { "version": "3.2.2", "resolved": "https://registry.npmjs.org/cache-chunk-store/-/cache-chunk-store-3.2.2.tgz", @@ -8184,6 +8298,16 @@ "node": ">= 0.10" } }, + "node_modules/citty": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/citty/-/citty-0.1.6.tgz", + "integrity": "sha512-tskPPKEs8D2KPafUypv2gxwJP8h/OaJmC82QQGGDQcHvXX43xF2VDACcJVmZ0EuSxkpO9Kc4MlrA3q0+FG58AQ==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "consola": "^3.2.3" + } + }, "node_modules/cjs-module-lexer": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-2.1.0.tgz", @@ -8361,6 +8485,23 @@ "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", "license": "MIT" }, + "node_modules/confbox": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.2.2.tgz", + "integrity": "sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==", + "devOptional": true, + "license": "MIT" + }, + "node_modules/consola": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/consola/-/consola-3.4.2.tgz", + "integrity": "sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": "^14.18.0 || >=16.10.0" + } + }, "node_modules/convert-source-map": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", @@ -8985,6 +9126,16 @@ "node": ">=0.10.0" } }, + "node_modules/deepmerge-ts": { + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/deepmerge-ts/-/deepmerge-ts-7.1.5.tgz", + "integrity": "sha512-HOJkrhaYsweh+W+e74Yn7YStZOilkoPb6fycpwNLKzSPtruFs48nYis0zy5yJz1+ktUhHxoRDJ27RQAWLIJVJw==", + "devOptional": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/default-gateway": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-6.0.3.tgz", @@ -9032,6 +9183,13 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/defu": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/defu/-/defu-6.1.4.tgz", + "integrity": "sha512-mEQCMmwJu317oSz8CwdIOdwf3xMif1ttiM8LTufzc3g6kR+9Pe236twL8j3IYT1F7GfRgGcW6MWxzZjLIkuHIg==", + "devOptional": true, + "license": "MIT" + }, "node_modules/delaunator": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", @@ -9059,6 +9217,13 @@ "node": ">=6" } }, + "node_modules/destr": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/destr/-/destr-2.0.5.tgz", + "integrity": "sha512-ugFTXCtDZunbzasqBxrK93Ik/DRYsO6S/fedkWEMKqt04xZ4csmnmwGDBAb07QWNaGMAmnTIemsYZCksjATwsA==", + "devOptional": true, + "license": "MIT" + }, "node_modules/detect-gpu": { "version": "5.0.66", "resolved": "https://registry.npmjs.org/detect-gpu/-/detect-gpu-5.0.66.tgz", @@ -9177,6 +9342,19 @@ "@types/trusted-types": "^2.0.7" } }, + "node_modules/dotenv": { + "version": "16.6.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.6.1.tgz", + "integrity": "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==", + "devOptional": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, "node_modules/draco3d": { "version": "1.5.7", "resolved": "https://registry.npmjs.org/draco3d/-/draco3d-1.5.7.tgz", @@ -9227,6 +9405,17 @@ "safe-buffer": "^5.0.1" } }, + "node_modules/effect": { + "version": "3.16.12", + "resolved": "https://registry.npmjs.org/effect/-/effect-3.16.12.tgz", + "integrity": "sha512-N39iBk0K71F9nb442TLbTkjl24FLUzuvx2i1I2RsEAQsdAdUTuUoW0vlfUXgkMTUOnYqKnWcFfqw4hK4Pw27hg==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.0.0", + "fast-check": "^3.23.1" + } + }, "node_modules/electron-to-chromium": { "version": "1.5.223", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.223.tgz", @@ -9252,6 +9441,16 @@ "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", "license": "MIT" }, + "node_modules/empathic": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/empathic/-/empathic-2.0.0.tgz", + "integrity": "sha512-i6UzDscO/XfAcNYD75CfICkmfLedpyPDdozrLMmQc5ORaQcdMoc21OnlEylMIqI7U8eniKrPMxxtj8k0vhmJhA==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">=14" + } + }, "node_modules/end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", @@ -9462,9 +9661,11 @@ "version": "0.25.0", "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.0.tgz", "integrity": "sha512-BXq5mqc8ltbaN34cDqWuYKyNhX8D/Z0J1xdtdQ8UcIIIyJyz+ZMKUt58tF3SrZ85jcfN/PZYhjR5uDQAYNVbuw==", - "devOptional": true, + "dev": true, "hasInstallScript": true, "license": "MIT", + "optional": true, + "peer": true, "bin": { "esbuild": "bin/esbuild" }, @@ -9503,8 +9704,10 @@ "version": "3.6.0", "resolved": "https://registry.npmjs.org/esbuild-register/-/esbuild-register-3.6.0.tgz", "integrity": "sha512-H2/S7Pm8a9CL1uhp9OvjwrBh5Pvx0H8qVOxNu8Wed9Y7qv56MPtq+GGM8RJpq6glYJn9Wspr8uw7l55uyinNeg==", - "devOptional": true, + "dev": true, "license": "MIT", + "optional": true, + "peer": true, "dependencies": { "debug": "^4.3.4" }, @@ -10198,12 +10401,59 @@ "node": "^18.14.0 || ^20.0.0 || ^22.0.0 || >=24.0.0" } }, + "node_modules/exsolve": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/exsolve/-/exsolve-1.0.7.tgz", + "integrity": "sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw==", + "devOptional": true, + "license": "MIT" + }, "node_modules/extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", "license": "MIT" }, + "node_modules/fast-check": { + "version": "3.23.2", + "resolved": "https://registry.npmjs.org/fast-check/-/fast-check-3.23.2.tgz", + "integrity": "sha512-h5+1OzzfCC3Ef7VbtKdcv7zsstUQwUDlYpUTvjeUsJAssPgLn7QzbboPtL5ro04Mq0rPOsMzl7q5hIbRs2wD1A==", + "devOptional": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT", + "dependencies": { + "pure-rand": "^6.1.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/fast-check/node_modules/pure-rand": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", + "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "devOptional": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ], + "license": "MIT" + }, "node_modules/fast-content-type-parse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-2.0.1.tgz", @@ -10852,6 +11102,24 @@ "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" } }, + "node_modules/giget": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/giget/-/giget-2.0.0.tgz", + "integrity": "sha512-L5bGsVkxJbJgdnwyuheIunkGatUF/zssUoxxjACCseZYAVbaqdh9Tsmmlkl8vYan09H7sbvKt4pS8GqKLBrEzA==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "citty": "^0.1.6", + "consola": "^3.4.0", + "defu": "^6.1.4", + "node-fetch-native": "^1.6.6", + "nypm": "^0.6.0", + "pathe": "^2.0.3" + }, + "bin": { + "giget": "dist/cli.mjs" + } + }, "node_modules/github-from-package": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", @@ -15505,6 +15773,13 @@ "url": "https://opencollective.com/node-fetch" } }, + "node_modules/node-fetch-native": { + "version": "1.6.7", + "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.6.7.tgz", + "integrity": "sha512-g9yhqoedzIUm0nTnTqAQvueMPVOuIY16bqgAJJC8XOOubYFNwz6IER9qs0Gq2Xd0+CecCKFjtdDTMA4u4xG06Q==", + "devOptional": true, + "license": "MIT" + }, "node_modules/node-gyp-build": { "version": "4.8.4", "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.4.tgz", @@ -18631,6 +18906,26 @@ "inBundle": true, "license": "ISC" }, + "node_modules/nypm": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/nypm/-/nypm-0.6.2.tgz", + "integrity": "sha512-7eM+hpOtrKrBDCh7Ypu2lJ9Z7PNZBdi/8AT3AX8xoCj43BBVHD0hPSTEvMtkMpfs8FCqBGhxB+uToIQimA111g==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "citty": "^0.1.6", + "consola": "^3.4.2", + "pathe": "^2.0.3", + "pkg-types": "^2.3.0", + "tinyexec": "^1.0.1" + }, + "bin": { + "nypm": "dist/cli.mjs" + }, + "engines": { + "node": "^14.16.0 || >=16.10.0" + } + }, "node_modules/oauth": { "version": "0.9.15", "resolved": "https://registry.npmjs.org/oauth/-/oauth-0.9.15.tgz", @@ -18776,6 +19071,13 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/ohash": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/ohash/-/ohash-2.0.11.tgz", + "integrity": "sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ==", + "devOptional": true, + "license": "MIT" + }, "node_modules/oidc-token-hash": { "version": "5.0.3", "resolved": "https://registry.npmjs.org/oidc-token-hash/-/oidc-token-hash-5.0.3.tgz", @@ -19140,6 +19442,13 @@ "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", "license": "ISC" }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "devOptional": true, + "license": "MIT" + }, "node_modules/pbkdf2": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.2.tgz", @@ -19156,6 +19465,13 @@ "node": ">=0.12" } }, + "node_modules/perfect-debounce": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-1.0.0.tgz", + "integrity": "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==", + "devOptional": true, + "license": "MIT" + }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", @@ -19267,6 +19583,18 @@ "node": ">=8" } }, + "node_modules/pkg-types": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-2.3.0.tgz", + "integrity": "sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "confbox": "^0.2.2", + "exsolve": "^1.0.7", + "pathe": "^2.0.3" + } + }, "node_modules/point-in-polygon-hao": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/point-in-polygon-hao/-/point-in-polygon-hao-1.2.4.tgz", @@ -19594,16 +19922,15 @@ "license": "MIT" }, "node_modules/prisma": { - "version": "6.4.1", - "resolved": "https://registry.npmjs.org/prisma/-/prisma-6.4.1.tgz", - "integrity": "sha512-q2uJkgXnua/jj66mk6P9bX/zgYJFI/jn4Yp0aS6SPRrjH/n6VyOV7RDe1vHD0DX8Aanx4MvgmUPPoYnR6MJnPg==", + "version": "6.17.1", + "resolved": "https://registry.npmjs.org/prisma/-/prisma-6.17.1.tgz", + "integrity": "sha512-ac6h0sM1Tg3zu8NInY+qhP/S9KhENVaw9n1BrGKQVFu05JT5yT5Qqqmb8tMRIE3ZXvVj4xcRA5yfrsy4X7Yy5g==", "devOptional": true, "hasInstallScript": true, "license": "Apache-2.0", "dependencies": { - "@prisma/engines": "6.4.1", - "esbuild": ">=0.12 <1", - "esbuild-register": "3.6.0" + "@prisma/config": "6.17.1", + "@prisma/engines": "6.17.1" }, "bin": { "prisma": "build/index.js" @@ -19611,9 +19938,6 @@ "engines": { "node": ">=18.18" }, - "optionalDependencies": { - "fsevents": "2.3.3" - }, "peerDependencies": { "typescript": ">=5.1.0" }, @@ -19891,6 +20215,17 @@ "node": ">=0.10.0" } }, + "node_modules/rc9": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/rc9/-/rc9-2.1.2.tgz", + "integrity": "sha512-btXCnMmRIBINM2LDZoEmOogIZU7Qe7zn4BpomSKZ/ykbLObuBdvG+mFq11DL6fjH1DRwHhrlgtYWG96bJiC7Cg==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "defu": "^6.1.4", + "destr": "^2.0.3" + } + }, "node_modules/rdf-canonize": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/rdf-canonize/-/rdf-canonize-3.4.0.tgz", @@ -22314,6 +22649,13 @@ "integrity": "sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw==", "license": "MIT" }, + "node_modules/tinyexec": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.1.tgz", + "integrity": "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==", + "devOptional": true, + "license": "MIT" + }, "node_modules/tmpl": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", diff --git a/package.json b/package.json index 948bb2be..87081fc7 100644 --- a/package.json +++ b/package.json @@ -29,7 +29,7 @@ "@meshsdk/core-cst": "^1.9.0-beta.19", "@meshsdk/react": "^1.9.0-beta.18", "@octokit/core": "^6.1.2", - "@prisma/client": "^6.4.1", + "@prisma/client": "^6.17.1", "@radix-ui/react-accordion": "^1.2.0", "@radix-ui/react-checkbox": "^1.1.1", "@radix-ui/react-collapsible": "^1.1.0", @@ -111,7 +111,7 @@ "postcss": "^8.4.39", "prettier": "^3.3.2", "prettier-plugin-tailwindcss": "^0.6.5", - "prisma": "^6.4.1", + "prisma": "^6.17.1", "tailwindcss": "^3.4.3", "ts-jest": "^29.4.4", "typescript": "^5.5.3" diff --git a/prisma/migrations/20251017112523_add_migration_target_wallet_id/migration.sql b/prisma/migrations/20251017112523_add_migration_target_wallet_id/migration.sql new file mode 100644 index 00000000..088f2d48 --- /dev/null +++ b/prisma/migrations/20251017112523_add_migration_target_wallet_id/migration.sql @@ -0,0 +1,3 @@ +-- AlterTable +ALTER TABLE "Wallet" ADD COLUMN "migrationTargetWalletId" TEXT; + diff --git a/prisma/schema.prisma b/prisma/schema.prisma index e65505d9..1d367371 100644 --- a/prisma/schema.prisma +++ b/prisma/schema.prisma @@ -1,6 +1,3 @@ -// This is your Prisma schema file, -// learn more about it in the docs: https://pris.ly/d/prisma-schema - generator client { provider = "prisma-client-js" } @@ -19,19 +16,20 @@ model User { } model Wallet { - id String @id @default(cuid()) - name String - description String? - signersAddresses String[] - signersStakeKeys String[] - signersDescriptions String[] - numRequiredSigners Int? - verified String[] - scriptCbor String - stakeCredentialHash String? - type String - isArchived Boolean @default(false) - clarityApiKey String? + id String @id @default(cuid()) + name String + description String? + signersAddresses String[] + signersStakeKeys String[] + signersDescriptions String[] + numRequiredSigners Int? + verified String[] + scriptCbor String + stakeCredentialHash String? + type String + isArchived Boolean @default(false) + clarityApiKey String? + migrationTargetWalletId String? } model Transaction { @@ -97,8 +95,7 @@ model Ballot { model Proxy { id String @id @default(cuid()) - walletId String? // Optional - can be linked to wallet or user - userId String? // Optional - can be linked to user directly + walletId String? proxyAddress String authTokenId String paramUtxo String @@ -106,4 +103,5 @@ model Proxy { isActive Boolean @default(true) createdAt DateTime @default(now()) updatedAt DateTime @updatedAt + userId String? } diff --git a/src/components/multisig/proxy/ProxyControl.tsx b/src/components/multisig/proxy/ProxyControl.tsx index f21eaacd..2893cd42 100644 --- a/src/components/multisig/proxy/ProxyControl.tsx +++ b/src/components/multisig/proxy/ProxyControl.tsx @@ -7,9 +7,13 @@ import { toast } from "@/hooks/use-toast"; import { getTxBuilder } from "@/utils/get-tx-builder"; import useAppWallet from "@/hooks/useAppWallet"; import { api } from "@/utils/api"; +import useTransaction from "@/hooks/useTransaction"; import ProxyOverview from "./ProxyOverview"; import ProxySetup from "./ProxySetup"; import ProxySpend from "./ProxySpend"; +import UTxOSelector from "@/components/pages/wallet/new-transaction/utxoSelector"; +import { getProvider } from "@/utils/get-provider"; +import { MeshTxBuilder, UTxO } from "@meshsdk/core"; import { Alert, AlertDescription } from "@/components/ui/alert"; import { Dialog, DialogContent, DialogHeader, DialogTitle } from "@/components/ui/dialog"; @@ -25,7 +29,7 @@ interface ProxyOutput { } interface ProxySetupResult { - tx: string; + tx: MeshTxBuilder; paramUtxo: { txHash: string; outputIndex: number }; authTokenId: string; proxyAddress: string; @@ -38,12 +42,9 @@ export default function ProxyControl() { const network = useSiteStore((state) => state.network); const { appWallet } = useAppWallet(); const ctx = api.useUtils(); + const { newTransaction } = useTransaction(); + - const { mutateAsync: createTransaction } = api.transaction.createTransaction.useMutation({ - onSuccess: () => { - void ctx.transaction.getPendingTransactions.invalidate(); - }, - }); const { mutateAsync: createProxy } = api.proxy.createProxy.useMutation({ onSuccess: () => { @@ -63,7 +64,7 @@ export default function ProxyControl() { { enabled: !!userAddress && !appWallet?.id } ); - const { data: proxies, refetch: refetchProxies } = api.proxy.getProxiesByUserOrWallet.useQuery( + const { data: proxies, refetch: refetchProxies, isLoading: proxiesLoading, error: proxiesError } = api.proxy.getProxiesByUserOrWallet.useQuery( { walletId: appWallet?.id || undefined, userAddress: userAddress || undefined, @@ -71,6 +72,19 @@ export default function ProxyControl() { { enabled: !!(appWallet?.id || userAddress) } ); + // Debug logging for proxy loading + useEffect(() => { + console.log("Proxy loading debug:", { + appWalletId: appWallet?.id, + userAddress, + enabled: !!(appWallet?.id || userAddress), + proxiesLoading, + proxiesError, + proxiesCount: proxies?.length || 0, + proxies: proxies + }); + }, [appWallet?.id, userAddress, proxiesLoading, proxiesError, proxies]); + // State management const [proxyContract, setProxyContract] = useState(null); const [proxyBalance, setProxyBalance] = useState>([]); @@ -87,7 +101,7 @@ export default function ProxyControl() { paramUtxo?: { txHash: string; outputIndex: number }; authTokenId?: string; proxyAddress?: string; - txHex?: string; + txHex?: MeshTxBuilder; description?: string; }>({}); @@ -105,6 +119,25 @@ export default function ProxyControl() { { address: "", unit: "lovelace", amount: "" } ]); + // UTxO selection state (UI only). We will still pass all UTxOs from provider to contract. + const [selectedUtxos, setSelectedUtxos] = useState([]); + const [manualSelected, setManualSelected] = useState(false); + + // Helper to resolve inputs for multisig controlled txs + const getMsInputs = useCallback(async (): Promise<{ utxos: UTxO[]; walletAddress: string }> => { + if (!appWallet?.address) { + throw new Error("Multisig wallet address not available"); + } + const provider = getProvider(network); + const utxos = await provider.fetchAddressUTxOs(appWallet.address); + if (!utxos || utxos.length === 0) { + throw new Error("No UTxOs found at multisig wallet address"); + } + console.log("utxos", utxos); + console.log("walletAddress", appWallet.address); + return { utxos, walletAddress: appWallet.address }; + }, [appWallet?.address, network]); + // Initialize proxy contract useEffect(() => { if (connected && wallet && userAddress) { @@ -116,7 +149,8 @@ export default function ProxyControl() { wallet: wallet, networkId: network, }, - {} + {}, + appWallet?.scriptCbor || undefined, ); setProxyContract(contract); } catch (error) { @@ -172,8 +206,10 @@ export default function ProxyControl() { // Reset proxy contract state to prevent policy ID conflicts proxyContract.reset(); - const result: ProxySetupResult = await proxyContract.setupProxy(); - + // Use multisig wallet inputs: pass all UTxOs, first >=5 ADA as collateral, and ms wallet address + const { utxos, collateral, walletAddress } = await getMsInputs(); + const result: ProxySetupResult = await proxyContract.setupProxy(utxos, walletAddress); + setSetupData({ paramUtxo: result.paramUtxo, authTokenId: result.authTokenId, @@ -217,9 +253,21 @@ export default function ProxyControl() { setSetupLoading(true); setLoading(true); - // Sign and submit the transaction - const signedTx = await wallet.signTx(setupData.txHex, true); - await wallet.submitTx(signedTx); + // If msCbor is set, route through useTransaction hook to create a signable + if (appWallet?.scriptCbor && setupData.txHex) { + + await newTransaction({ + txBuilder: setupData.txHex, + description: setupData.description, + toastMessage: "Proxy setup transaction created", + }); + } else if (setupData.txHex) { + // Sign and submit the transaction + const signedTx = await wallet.signTx(await setupData.txHex.complete(), true); + await wallet.submitTx(signedTx); + } else { + throw new Error("No transaction to submit"); + } // Store proxy information in the database if (!appWallet?.id && !userAddress) { @@ -430,7 +478,7 @@ export default function ProxyControl() { // Handle proxy selection const handleProxySelection = useCallback(async (proxyId: string) => { setSelectedProxy(proxyId); - const proxy = proxies?.find(p => p.id === proxyId); + const proxy = proxies?.find((p: any) => p.id === proxyId); if (proxy) { const balance = await getProxyBalance(proxy.proxyAddress); setSelectedProxyBalance(balance); @@ -477,7 +525,7 @@ export default function ProxyControl() { setLoading(true); // Get the selected proxy - const proxy = proxies?.find(p => p.id === selectedProxy); + const proxy = proxies?.find((p: any) => p.id === selectedProxy); if (!proxy) { throw new Error("Selected proxy not found"); } @@ -491,21 +539,23 @@ export default function ProxyControl() { }, { paramUtxo: JSON.parse(proxy.paramUtxo), - } + }, + appWallet?.scriptCbor || undefined, ); selectedProxyContract.proxyAddress = proxy.proxyAddress; - const txHex = await selectedProxyContract.spendProxySimple(validOutputs); - - // Sign and submit the transaction - const signedTx = await wallet.signTx(txHex, true); - await wallet.submitTx(signedTx); - - toast({ - title: "Success", - description: "Proxy spend transaction submitted successfully", - variant: "default", - }); + // Pass multisig inputs to spend as well + const { utxos, walletAddress } = await getMsInputs(); + const txHex = await selectedProxyContract.spendProxySimple(validOutputs, utxos, walletAddress); + if (appWallet?.scriptCbor) { + await newTransaction({ + txBuilder: txHex, + description: "Proxy spend transaction", + toastMessage: "Proxy spend transaction created", + }); + } else { + await wallet.submitTx(await txHex.complete()); + } // Refresh balance after successful spend await handleProxySelection(selectedProxy); @@ -702,6 +752,20 @@ export default function ProxyControl() { onRefreshAllBalances={refreshAllBalances} /> + {/* UTxO Selector for visibility/control. Contract uses all UTxOs from provider. */} + {appWallet && ( +
+ { + setSelectedUtxos(utxos); + setManualSelected(manual); + }} + /> +
+ )} +
)} diff --git a/src/components/multisig/proxy/ProxySetup.tsx b/src/components/multisig/proxy/ProxySetup.tsx index 007a04b9..f99bbc22 100644 --- a/src/components/multisig/proxy/ProxySetup.tsx +++ b/src/components/multisig/proxy/ProxySetup.tsx @@ -13,6 +13,7 @@ import { Play, Check } from "lucide-react"; +import { MeshTxBuilder } from "@meshsdk/core"; interface ProxySetupProps { setupStep: number; @@ -20,7 +21,7 @@ interface ProxySetupProps { paramUtxo?: { txHash: string; outputIndex: number }; authTokenId?: string; proxyAddress?: string; - txHex?: string; + txHex?: MeshTxBuilder; description?: string; }; setupLoading: boolean; diff --git a/src/components/multisig/proxy/offchain.ts b/src/components/multisig/proxy/offchain.ts index 8d86844f..aaa03bc5 100644 --- a/src/components/multisig/proxy/offchain.ts +++ b/src/components/multisig/proxy/offchain.ts @@ -36,6 +36,7 @@ export class MeshProxyContract extends MeshTxInitiator { proxyAddress?: string; stakeCredential?: string | undefined; networkId: number; + msCbor?: string; // Multisig script cbor // Reset method to clear state for retry reset() { @@ -78,10 +79,13 @@ export class MeshProxyContract extends MeshTxInitiator { contract: { paramUtxo?: UTxO["input"]; }, + msCbor?: string, ) { super(inputs); this.stakeCredential = inputs.stakeCredential; this.networkId = inputs.networkId ? inputs.networkId : 0; + this.msCbor = msCbor; + // Set the proxyAddress if paramUtxo is provided if (contract.paramUtxo) { this.paramUtxo = contract.paramUtxo; @@ -100,15 +104,38 @@ export class MeshProxyContract extends MeshTxInitiator { * const { tx, paramUtxo } = await contract.setupProxy(); * ``` */ - setupProxy = async () => { - const { utxos, collateral, walletAddress } = - await this.getWalletInfoForTx(); + setupProxy = async ( + msUtxos?: UTxO[], + msWalletAddress?: string, + ) => { + if (this.msCbor && !msUtxos && !msWalletAddress) { + throw new Error( + "No UTxOs and wallet address for multisig script cbor found", + ); + } + + let { utxos, collateral, walletAddress } = await this.getWalletInfoForTx(); + + if (this.msCbor && msUtxos && msWalletAddress){ + utxos = msUtxos; + walletAddress = msWalletAddress; + } //look for, get and set a paramUtxo for minting the AuthToken - if (utxos?.length <= 0) { + if (!utxos || utxos.length <= 0) { throw new Error("No UTxOs found"); } - const paramUtxo = utxos[0]!; + const paramUtxo = utxos?.filter((utxo) => + utxo.output.amount.map( + (asset) => + asset.unit === "lovelace" && Number(asset.quantity) >= 20000000, + ).reduce((pa,ca,i,a)=>pa||ca), + )[0]; + if (!paramUtxo) { + throw new Error( + "Insufficicient balance. Create one utxo holding at Least 20 ADA.", + ); + } this.paramUtxo = paramUtxo.input; //Set proxyAddress depending on the paramUtxo @@ -121,41 +148,39 @@ export class MeshProxyContract extends MeshTxInitiator { const policyId = this.getAuthTokenPolicyId(); const tokenName = ""; - console.log("policyId", policyId); - console.log("tokenName", tokenName); - console.log("walletAddress", walletAddress); - console.log("paramUtxo", paramUtxo); - console.log("collateral", collateral); - console.log("utxos", utxos); - // Try completing the transaction step by step - let tx = await this.mesh - .txIn( - paramUtxo.input.txHash, - paramUtxo.input.outputIndex, - paramUtxo.output.amount, - paramUtxo.output.address, - ) - .mintPlutusScriptV3() + let tx = await this.mesh.txIn( + paramUtxo.input.txHash, + paramUtxo.input.outputIndex, + paramUtxo.output.amount, + paramUtxo.output.address, + ); + // Add the multisig script cbor if it exists + if (this.msCbor) { + tx.txInScript(this.msCbor); + } + + tx.mintPlutusScriptV3() .mint("10", policyId, tokenName) .mintingScript(this.getAuthTokenCbor()) .mintRedeemerValue(mConStr0([])) - .txOut(proxyAddress, [{ unit: "lovelace", quantity: "1000000" }]) + .txOut(proxyAddress, [{ unit: "lovelace", quantity: "1000000" }]); - for(let i = 0; i < 10; i++) { - tx.txOut(walletAddress, [{ unit: policyId, quantity: "1" }]) - } - - tx.txInCollateral( - collateral.input.txHash, - collateral.input.outputIndex, - collateral.output.amount, - collateral.output.address, - ) - .changeAddress(walletAddress) - .selectUtxosFrom(utxos); + for (let i = 0; i < 10; i++) { + tx.txOut(walletAddress, [ + { unit: policyId, quantity: "1" }, + ]); + } - const txHex = await tx.complete(); + tx.txInCollateral( + collateral.input.txHash, + collateral.input.outputIndex, + collateral.output.amount, + collateral.output.address, + ).changeAddress(walletAddress); + //.selectUtxosFrom(utxos); + + const txHex = tx; return { tx: txHex, @@ -167,17 +192,41 @@ export class MeshProxyContract extends MeshTxInitiator { spendProxySimple = async ( outputs: { address: string; unit: string; amount: string }[], + msUtxos?: UTxO[], + msWalletAddress?: string, ) => { - const { utxos, collateral, walletAddress } = - await this.getWalletInfoForTx(); + + if (this.msCbor && !msUtxos && !msWalletAddress) { + throw new Error( + "No UTxOs and wallet address for multisig script cbor found", + ); + } + console.log("msCbor", this.msCbor); + console.log("msUtxos", msUtxos); + console.log("msWalletAddress", msWalletAddress); + + let { utxos, collateral, walletAddress } = await this.getWalletInfoForTx(); + if (this.msCbor && msUtxos && msWalletAddress){ + utxos = msUtxos; + walletAddress = msWalletAddress; + } + console.log("utxos", utxos); console.log("collateral", collateral); console.log("walletAddress", walletAddress); - if (utxos?.length <= 0) { + if (!utxos || utxos.length <= 0) { throw new Error("No UTxOs found"); } + if (!walletAddress) { + throw new Error("No wallet address found"); + } + + if (!collateral) { + throw new Error("No collateral found"); + } + if (this.proxyAddress === undefined) { throw new Error("Proxy address not set. Please setupProxy first."); } @@ -191,7 +240,84 @@ export class MeshProxyContract extends MeshTxInitiator { this.proxyAddress, ); - const freeProxyUtxos = proxyUtxos[0]!; + // Calculate spend requirements and ensure coverage by proxy UTxOs + const REQUIRED_FEE_BUFFER = BigInt(500_000); // 0.5 ADA buffer in lovelace + + const requiredByUnit = new Map(); + for (const out of outputs) { + const prev = requiredByUnit.get(out.unit) ?? BigInt(0); + requiredByUnit.set(out.unit, prev + BigInt(out.amount)); + } + // Add buffer to lovelace + const lovelaceNeed = (requiredByUnit.get("lovelace") ?? BigInt(0)) + REQUIRED_FEE_BUFFER; + requiredByUnit.set("lovelace", lovelaceNeed); + + const availableByUnit = new Map(); + for (const utxo of proxyUtxos) { + for (const asset of utxo.output.amount) { + const prev = availableByUnit.get(asset.unit) ?? BigInt(0); + availableByUnit.set(asset.unit, prev + BigInt(asset.quantity)); + } + } + + for (const [unit, needed] of requiredByUnit.entries()) { + const available = availableByUnit.get(unit) ?? BigInt(0); + if (available < needed) { + throw new Error(`Insufficient proxy balance for ${unit}. Needed: ${needed.toString()}, Available: ${available.toString()}`); + } + } + + // Select as few UTxOs as possible to cover required amounts + const remainingByUnit = new Map(requiredByUnit); + const candidateUtxos = [...proxyUtxos]; + const selectedUtxos: typeof proxyUtxos = []; + + const hasRemaining = () => { + for (const value of remainingByUnit.values()) { + if (value > BigInt(0)) return true; + } + return false; + }; + + const contributionScore = (utxo: typeof proxyUtxos[number]) => { + let score = BigInt(0); + for (const asset of utxo.output.amount) { + const remaining = remainingByUnit.get(asset.unit) ?? BigInt(0); + if (remaining > BigInt(0)) { + const qty = BigInt(asset.quantity); + score += qty < remaining ? qty : remaining; + } + } + return score; + }; + + while (hasRemaining()) { + let bestIdx = -1; + let bestScore = BigInt(0); + for (let i = 0; i < candidateUtxos.length; i++) { + const s = contributionScore(candidateUtxos[i]!); + if (s > bestScore) { + bestScore = s; + bestIdx = i; + } + } + if (bestIdx === -1 || bestScore === BigInt(0)) { + throw new Error("Unable to select proxy UTxOs to cover required amounts."); + } + const chosen = candidateUtxos.splice(bestIdx, 1)[0]!; + selectedUtxos.push(chosen); + // Decrease remaining by chosen utxo's amounts + for (const asset of chosen.output.amount) { + const remaining = remainingByUnit.get(asset.unit) ?? BigInt(0); + if (remaining > BigInt(0)) { + const qty = BigInt(asset.quantity); + const newRemaining = remaining - (qty < remaining ? qty : remaining); + remainingByUnit.set(asset.unit, newRemaining); + } + } + } + + const freeProxyUtxos = selectedUtxos; console.log("freeProxyUtxos", freeProxyUtxos); const paramScriptAT = this.getAuthTokenCbor(); @@ -203,7 +329,6 @@ export class MeshProxyContract extends MeshTxInitiator { console.log("authTokenUtxos", authTokenUtxos); console.log("policyIdAT", policyIdAT); - if (!authTokenUtxos || authTokenUtxos.length === 0) { throw new Error("No AuthToken found at control wallet address"); @@ -220,17 +345,23 @@ export class MeshProxyContract extends MeshTxInitiator { //prepare Proxy spend //1 Get - const txHex = await this.mesh + let txHex = await this.mesh; + + for ( const input of freeProxyUtxos) { + txHex .spendingPlutusScriptV3() .txIn( - freeProxyUtxos.input.txHash, - freeProxyUtxos.input.outputIndex, - freeProxyUtxos.output.amount, - freeProxyUtxos.output.address, + input.input.txHash, + input.input.outputIndex, + input.output.amount, + input.output.address, ) .txInScript(this.getProxyCbor()) .txInRedeemerValue(mConStr0([])) .txInInlineDatumPresent() + } + + txHex .txIn( authTokenUtxo.input.txHash, authTokenUtxo.input.outputIndex, @@ -243,8 +374,9 @@ export class MeshProxyContract extends MeshTxInitiator { collateral.output.amount, collateral.output.address, ) - .txOut(walletAddress, [{ unit: policyIdAT, quantity: "1" }]) - + .txOut(walletAddress, [ + { unit: policyIdAT, quantity: "1" }, + ]); for (const output of outputs) { txHex.txOut(output.address, [ @@ -252,14 +384,16 @@ export class MeshProxyContract extends MeshTxInitiator { ]); } - txHex.changeAddress(walletAddress) - // Only pass pubkey (KeyHash) UTxOs for coin selection - .selectUtxosFrom(utxos) + txHex.changeAddress(this.proxyAddress); + + // Add the multisig script cbor if it exists (like in setupProxy) + if (this.msCbor) { + txHex.txInScript(this.msCbor); + } - const tx = await txHex.complete(); - console.log("tx", tx); + console.log("tx", txHex); - return tx; + return txHex; }; /** diff --git a/src/components/pages/homepage/wallets/new-wallet-flow/create/ReviewNativeScript.tsx b/src/components/pages/homepage/wallets/new-wallet-flow/create/ReviewNativeScript.tsx index a85560bf..088c10af 100644 --- a/src/components/pages/homepage/wallets/new-wallet-flow/create/ReviewNativeScript.tsx +++ b/src/components/pages/homepage/wallets/new-wallet-flow/create/ReviewNativeScript.tsx @@ -84,7 +84,20 @@ export default function ReviewNativeScript({ }, [appWallet, walletsUtxos]); if (!mWallet) return null; - const dSAddr = deserializeAddress(mWallet.getScript().address); + + let dSAddr; + try { + dSAddr = deserializeAddress(mWallet.getScript().address); + } catch (error) { + console.error("Failed to get script address:", error); + return ( +
+

+ Unable to generate script address. Please check your wallet configuration. +

+
+ ); + } const menuItems = [ { id: "basics", label: "Basics" }, diff --git a/src/components/pages/wallet/info/index.tsx b/src/components/pages/wallet/info/index.tsx index 6b5849ed..52870326 100644 --- a/src/components/pages/wallet/info/index.tsx +++ b/src/components/pages/wallet/info/index.tsx @@ -21,14 +21,14 @@ export default function WalletInfo() {
- {(!multisigWallet || !multisigWallet.stakingEnabled()) && } {multisigWallet && multisigWallet.stakingEnabled() && } + {multisigWallet && } {multisigWallet && } - +
diff --git a/src/components/pages/wallet/info/migrate-wallet.tsx b/src/components/pages/wallet/info/migrate-wallet.tsx index 9ed36bd4..4dcec498 100644 --- a/src/components/pages/wallet/info/migrate-wallet.tsx +++ b/src/components/pages/wallet/info/migrate-wallet.tsx @@ -1,14 +1,468 @@ +import React, { useState } from "react"; import CardUI from "@/components/ui/card-content"; +import { Button } from "@/components/ui/button"; +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { ArrowRight, AlertCircle, Loader, CheckCircle, X } from "lucide-react"; import { Wallet } from "@/types/wallet"; +import { toast } from "@/hooks/use-toast"; +import { api } from "@/utils/api"; +import { useUserStore } from "@/lib/zustand/user"; +import MigrationPreChecks from "./migration/MigrationPreChecks"; +import NewWalletCreationStep from "./migration/NewWalletCreationStep"; +import ProxySetupStep from "./migration/ProxySetupStep"; +import FundTransferStep from "./migration/FundTransferStep"; +import MigrationCompleteStep from "./migration/MigrationCompleteStep"; + +// Progress indicator component +const MigrationProgress = ({ + currentStep, + totalSteps, + onAbortMigration, + isAbortingMigration +}: { + currentStep: number; + totalSteps: number; + onAbortMigration: () => void; + isAbortingMigration: boolean; +}) => { + const steps = [ + { id: 0, title: "Pre-checks", description: "Verify wallet status" }, + { id: 1, title: "Create Wallet", description: "Configure new wallet" }, + { id: 2, title: "Proxy Setup", description: "Setup proxy (optional)" }, + { id: 3, title: "Transfer Funds", description: "Move all assets" }, + { id: 4, title: "Complete", description: "Finish migration" }, + ]; + + return ( +
+
+

Migration Progress

+
+ + Step {currentStep + 1} of {totalSteps} + + +
+
+ +
+ {/* Circles Row with Connecting Lines */} +
+ {steps.map((step, index) => ( +
+ {/* Circle */} +
+ {index < currentStep ? ( + + ) : ( + index + 1 + )} +
+ + {/* Connecting Line (except for last step) */} + {index < steps.length - 1 && ( +
+ )} +
+ ))} +
+ + {/* Text Row */} +
+ {steps.map((step, index) => ( +
+
+

+ {step.title} +

+

+ {step.description} +

+
+
+ ))} +
+
+
+ ); +}; export function MigrateWallet({ appWallet }: { appWallet: Wallet }) { + // Migration step state: 0 = pre-checks, 1 = create wallet, 2 = proxy setup, 3 = fund transfer, 4 = complete + const [migrationStep, setMigrationStep] = useState(null); + const [newWalletId, setNewWalletId] = useState(null); + const [isStartingMigration, setIsStartingMigration] = useState(false); + const [isAbortingMigration, setIsAbortingMigration] = useState(false); + const [hasAbortedMigration, setHasAbortedMigration] = useState(false); + + // API mutations + const { mutateAsync: abortMigration } = api.wallet.abortMigration.useMutation(); + const utils = api.useUtils(); + const { userAddress } = useUserStore(); + + + // Auto-start migration if there's already a migration target + React.useEffect(() => { + const migrationTargetId = (appWallet as any).migrationTargetWalletId; + + if (migrationTargetId && migrationStep === null && !hasAbortedMigration) { + // Set the newWalletId but don't auto-start the migration steps + // Let the user explicitly click "Continue Migration" to proceed + setNewWalletId(migrationTargetId); + } + }, [(appWallet as any).migrationTargetWalletId, migrationStep, hasAbortedMigration]); + + // Reset abort flag when migration target is cleared (after successful abort) + React.useEffect(() => { + if (hasAbortedMigration && !(appWallet as any).migrationTargetWalletId) { + setHasAbortedMigration(false); + } + }, [hasAbortedMigration, (appWallet as any).migrationTargetWalletId]); + + const handleStartMigration = () => { + setIsStartingMigration(true); + // If there's already a migration target, start at step 1 (wallet creation) + // Otherwise start at step 0 (pre-checks) + const migrationTargetId = (appWallet as any).migrationTargetWalletId; + if (migrationTargetId) { + setMigrationStep(1); + setNewWalletId(migrationTargetId); + } else { + setMigrationStep(0); + } + }; + + const handlePreChecksContinue = () => { + setMigrationStep(1); + }; + + const handlePreChecksCancel = () => { + setMigrationStep(null); + setIsStartingMigration(false); + }; + + const handleNewWalletCreated = (createdWalletId: string) => { + setNewWalletId(createdWalletId); + setMigrationStep(2); + }; + + const handleNewWalletBack = () => { + setMigrationStep(0); + }; + + const handleProxySetupContinue = () => { + setMigrationStep(3); + }; + + const handleProxySetupSkip = () => { + setMigrationStep(3); + }; + + const handleProxySetupBack = () => { + setMigrationStep(1); + }; + + const handleFundTransferContinue = () => { + setMigrationStep(4); + }; + + const handleFundTransferBack = () => { + setMigrationStep(2); + }; + + const handleArchiveOldWallet = () => { + // Reset migration state + setMigrationStep(null); + setNewWalletId(null); + setIsStartingMigration(false); + + toast({ + title: "Migration Complete", + description: "Your wallet migration has been completed successfully.", + }); + }; + + const handleCancelMigration = () => { + setMigrationStep(null); + setNewWalletId(null); + setIsStartingMigration(false); + }; + + const handleAbortMigration = async () => { + // Try multiple sources for the migration target ID + const migrationTargetId = newWalletId || (appWallet as any).migrationTargetWalletId; + + // If we still don't have it, try to fetch the wallet data directly + let finalMigrationTargetId = migrationTargetId; + if (!finalMigrationTargetId && userAddress) { + try { + const freshWalletData = await utils.wallet.getWallet.fetch({ + address: userAddress, + walletId: appWallet.id, + }); + finalMigrationTargetId = (freshWalletData as any).migrationTargetWalletId; + } catch (error) { + console.error("Failed to fetch fresh wallet data:", error); + } + } + + if (!finalMigrationTargetId) { + toast({ + title: "Error", + description: "No migration to abort. No migration target wallet found.", + variant: "destructive", + }); + return; + } + + setIsAbortingMigration(true); + try { + await abortMigration({ + walletId: appWallet.id, + newWalletId: finalMigrationTargetId, + }); + + // Reset migration state + setMigrationStep(null); + setNewWalletId(null); + setIsStartingMigration(false); + setHasAbortedMigration(true); + + // Invalidate wallet queries to refresh the UI + await Promise.all([ + utils.wallet.getWallet.invalidate({ + address: userAddress!, + walletId: appWallet.id, + }), + utils.wallet.getUserWallets.invalidate({ + address: userAddress!, + }), + ]); + + toast({ + title: "Migration Aborted", + description: "The migration has been cancelled and the new wallet has been removed.", + }); + } catch (error) { + console.error("Failed to abort migration:", error); + toast({ + title: "Error", + description: "Failed to abort migration. Please try again.", + variant: "destructive", + }); + } finally { + setIsAbortingMigration(false); + } + }; + + // Show migration steps + if (migrationStep !== null) { + return ( +
+ {/* Progress Indicator */} + + + + + {/* Connecting Line */} +
+
+
+ + {/* Step Content */} +
+ {migrationStep === 0 && ( + + )} + + {migrationStep === 1 && ( + + )} + + {migrationStep === 2 && ( + + )} + + {migrationStep === 3 && ( + + )} + + {migrationStep === 4 && ( + + )} +
+
+ ); + } + + // Show initial migration card return ( - <>Coming soon. +
+ {(appWallet as any).migrationTargetWalletId ? ( + + + + Migration In Progress: You have an ongoing migration. Click "Continue Migration" to resume where you left off. +
+ + Debug: Migration Target ID: {(appWallet as any).migrationTargetWalletId} + +
+
+ ) : ( + + + + Migration Process: This will create a new wallet with updated signers and transfer all funds from your current wallet. + The process includes pre-checks, wallet creation, proxy setup, and fund transfer. + + + )} + +
+
+
+ 1 +
+ Pre-checks (DRep, staking, pending transactions) +
+ +
+
+ 2 +
+ Create new wallet with updated configuration +
+ +
+
+ 3 +
+ Setup proxy for the new wallet (optional) +
+ +
+
+ 4 +
+ Transfer all funds to the new wallet +
+ +
+
+ 5 +
+ Complete migration and archive old wallet +
+
+ +
+ {((appWallet as any).migrationTargetWalletId || newWalletId || migrationStep !== null) && ( + + )} + +
+
); } diff --git a/src/components/pages/wallet/info/migration/FundTransferStep.tsx b/src/components/pages/wallet/info/migration/FundTransferStep.tsx new file mode 100644 index 00000000..ec4c8e62 --- /dev/null +++ b/src/components/pages/wallet/info/migration/FundTransferStep.tsx @@ -0,0 +1,355 @@ +import React, { useState, useEffect } from "react"; +import { Button } from "@/components/ui/button"; +import CardUI from "@/components/ui/card-content"; +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { ArrowLeft, ArrowRight, Loader, AlertCircle, CheckCircle, Send } from "lucide-react"; +import { Wallet } from "@/types/wallet"; +import { api } from "@/utils/api"; +import { useUserStore } from "@/lib/zustand/user"; +import { useSiteStore } from "@/lib/zustand/site"; +import { useWalletsStore } from "@/lib/zustand/wallets"; +import { toast } from "@/hooks/use-toast"; +import useTransaction from "@/hooks/useTransaction"; +import { getProvider } from "@/utils/get-provider"; +import { getTxBuilder } from "@/utils/get-tx-builder"; +import { getBalanceFromUtxos } from "@/utils/getBalance"; +import { numberWithCommas } from "@/utils/strings"; + +interface FundTransferStepProps { + appWallet: Wallet; + newWalletId: string; + onBack: () => void; + onContinue: () => void; +} + +export default function FundTransferStep({ + appWallet, + newWalletId, + onBack, + onContinue +}: FundTransferStepProps) { + const userAddress = useUserStore((state) => state.userAddress); + const network = useSiteStore((state) => state.network); + const walletsUtxos = useWalletsStore((state) => state.walletsUtxos); + const walletAssets = useWalletsStore((state) => state.walletAssets); + const { newTransaction } = useTransaction(); + + // State + const [newWallet, setNewWallet] = useState(null); + const [isLoadingNewWallet, setIsLoadingNewWallet] = useState(true); + const [isTransferring, setIsTransferring] = useState(false); + const [transferComplete, setTransferComplete] = useState(false); + const [transferTxId, setTransferTxId] = useState(null); + + // Get current wallet data + const currentUtxos = walletsUtxos[appWallet.id] || []; + const currentBalance = getBalanceFromUtxos(currentUtxos); + const nonAdaAssets = walletAssets?.filter((asset) => asset.unit !== "lovelace") || []; + + // Load new wallet information + const { data: newWalletData, isLoading: isLoadingNewWalletData, error: newWalletError } = api.wallet.getWallet.useQuery( + { + address: userAddress!, + walletId: newWalletId, + }, + { + enabled: !!newWalletId && !!userAddress, + retry: false, // Don't retry if wallet doesn't exist + } + ); + + useEffect(() => { + if (newWalletData) { + setNewWallet(newWalletData); + setIsLoadingNewWallet(false); + } else if (newWalletError) { + console.error("Failed to load new wallet:", newWalletError); + setIsLoadingNewWallet(false); + toast({ + title: "Error", + description: "Failed to load new wallet information. The new wallet may not exist yet.", + variant: "destructive", + }); + } + }, [newWalletData, newWalletError]); + + const handleTransferAllFunds = async () => { + if (!newWallet || !userAddress) return; + + setIsTransferring(true); + try { + const blockchainProvider = getProvider(network); + const utxos = await blockchainProvider.fetchAddressUTxOs(appWallet.address); + + if (utxos.length === 0) { + toast({ + title: "No Funds to Transfer", + description: "There are no funds in the current wallet to transfer.", + variant: "destructive", + }); + return; + } + + const txBuilder = getTxBuilder(network); + + // Add all UTxOs as inputs + for (const utxo of utxos) { + txBuilder.txIn( + utxo.input.txHash, + utxo.input.outputIndex, + utxo.output.amount, + utxo.output.address, + ); + txBuilder.txInScript(appWallet.scriptCbor); + } + + // Set new wallet address as change address (sends everything there) + txBuilder.changeAddress(newWallet.address); + + // Create the transaction + await newTransaction({ + txBuilder, + description: "Migration: Transfer all funds to new wallet", + toastMessage: "Fund transfer transaction created successfully", + }); + + setTransferComplete(true); + toast({ + title: "Transfer Initiated", + description: "Fund transfer transaction has been created and is pending signatures.", + }); + } catch (error) { + console.error("Failed to transfer funds:", error); + toast({ + title: "Transfer Failed", + description: "Failed to create fund transfer transaction.", + variant: "destructive", + }); + } finally { + setIsTransferring(false); + } + }; + + if (isLoadingNewWalletData) { + return ( + +
+ + Loading new wallet information... +
+
+ ); + } + + if (newWalletError) { + return ( + + + + + The new wallet could not be loaded. This might happen if the wallet hasn't been created yet or if there's a connection issue. + + +
+ +
+
+ ); + } + + if (!newWallet) { + return ( + + + + + Failed to load new wallet information. Please ensure the new wallet was created successfully. + + +
+ +
+
+ ); + } + + return ( +
+ {/* Header */} + + + + + This will transfer all funds from your current wallet to the new wallet. + This action cannot be undone. + + + + + {/* Current Wallet Balance */} + +
+
+
+

ADA Balance

+

Native currency

+
+
+

+ {currentBalance ? numberWithCommas(currentBalance.toFixed(2)) : "0.00"} ₳ +

+
+
+ + {nonAdaAssets.length > 0 && ( +
+

Other Assets

+ {nonAdaAssets.map((asset, index) => ( +
+
+

{asset.unit}

+

Custom asset

+
+
+

{asset.quantity}

+
+
+ ))} +
+ )} + + {currentUtxos.length === 0 && ( +
+ +
+

No Funds

+

+ There are no funds in the current wallet to transfer. +

+
+
+ )} +
+
+ + {/* New Wallet Information */} + +
+
+

Wallet Details

+
+
+ Name: + {newWallet.name} +
+
+ Address: + {newWallet.address.slice(0, 20)}... +
+
+ Signers: + {newWallet.signersAddresses.length} +
+
+
+
+
+ + {/* Transfer Status */} + {transferComplete && ( + +
+ +
+

Transfer Initiated

+

+ The fund transfer transaction has been created and is pending signatures. + You can view it in the transactions section. +

+
+
+
+ )} + + {/* Action Buttons */} + +
+ + {!transferComplete ? ( + + ) : ( + + )} +
+
+
+ ); +} diff --git a/src/components/pages/wallet/info/migration/MigrationCompleteStep.tsx b/src/components/pages/wallet/info/migration/MigrationCompleteStep.tsx new file mode 100644 index 00000000..23b306a0 --- /dev/null +++ b/src/components/pages/wallet/info/migration/MigrationCompleteStep.tsx @@ -0,0 +1,184 @@ +import React, { useState } from "react"; +import { Button } from "@/components/ui/button"; +import CardUI from "@/components/ui/card-content"; +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { CheckCircle, ExternalLink, Loader, AlertCircle } from "lucide-react"; +import { Wallet } from "@/types/wallet"; +import { api } from "@/utils/api"; +import { useUserStore } from "@/lib/zustand/user"; +import { toast } from "@/hooks/use-toast"; +import { useRouter } from "next/router"; + +interface MigrationCompleteStepProps { + appWallet: Wallet; + newWalletId: string; + onBack: () => void; +} + +export default function MigrationCompleteStep({ + appWallet, + newWalletId, + onBack +}: MigrationCompleteStepProps) { + const { userAddress } = useUserStore(); + const router = useRouter(); + const [isCompleting, setIsCompleting] = useState(false); + + // Get new wallet data + const { data: newWalletData, isLoading: isLoadingNewWallet } = api.wallet.getWallet.useQuery( + { + address: userAddress!, + walletId: newWalletId, + }, + { + enabled: !!userAddress && !!newWalletId, + } + ); + + const handleCompleteMigration = async () => { + setIsCompleting(true); + try { + // Clear migration target from old wallet + await api.wallet.clearMigrationTarget.mutate({ + walletId: appWallet.id, + }); + + // Invalidate queries to refresh UI + await api.useContext().wallet.getWallet.invalidate(); + await api.useContext().wallet.getUserWallets.invalidate(); + + toast({ + title: "Migration Complete", + description: "Your wallet migration has been completed successfully!", + }); + + // Navigate to the new wallet + router.push(`/wallets/${newWalletId}/info`); + } catch (error) { + console.error("Failed to complete migration:", error); + toast({ + title: "Error", + description: "Failed to complete migration. Please try again.", + variant: "destructive", + }); + } finally { + setIsCompleting(false); + } + }; + + const handleViewNewWallet = () => { + router.push(`/wallets/${newWalletId}/info`); + }; + + if (isLoadingNewWallet) { + return ( + +
+ + Loading new wallet... +
+
+ ); + } + + return ( +
+ +
+
+ +

+ Migration Successful! +

+

+ Your wallet has been successfully migrated to a new configuration. +

+
+ +
+

What's Next?

+
    +
  • • Your new wallet is ready to use
  • +
  • • All funds have been transferred
  • +
  • • Proxy settings have been updated
  • +
  • • You can now use your new wallet for transactions
  • +
+
+ + {newWalletData && ( +
+

New Wallet Details

+
+
+ Name: {newWalletData.name} +
+
+ Address: + + {newWalletData.signersAddresses?.[0] || "N/A"} + +
+
+ Signers: {newWalletData.signersAddresses?.length || 0} +
+
+ Required Signatures: {newWalletData.numRequiredSigners || 1} +
+
+
+ )} + + + + + Important: Your old wallet configuration is still available but is no longer + the active migration target. You can continue using your new wallet for all future transactions. + + +
+ +
+ + + +
+
+
+ ); +} diff --git a/src/components/pages/wallet/info/migration/MigrationPreChecks.tsx b/src/components/pages/wallet/info/migration/MigrationPreChecks.tsx new file mode 100644 index 00000000..d794b6fc --- /dev/null +++ b/src/components/pages/wallet/info/migration/MigrationPreChecks.tsx @@ -0,0 +1,278 @@ +import React, { useState, useEffect } from "react"; +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { Button } from "@/components/ui/button"; +import CardUI from "@/components/ui/card-content"; +import { AlertCircle, CheckCircle, Loader, ArrowRight } from "lucide-react"; +import { Wallet } from "@/types/wallet"; +import { useWalletsStore } from "@/lib/zustand/wallets"; +import { useSiteStore } from "@/lib/zustand/site"; +import { getProvider } from "@/utils/get-provider"; +import usePendingTransactions from "@/hooks/usePendingTransactions"; +import { MultisigWallet } from "@/utils/multisigSDK"; + +interface PreCheckResult { + status: "loading" | "success" | "warning" | "error"; + message: string; + details?: string; +} + +interface MigrationPreChecksProps { + appWallet: Wallet; + onContinue: () => void; + onCancel: () => void; +} + +export default function MigrationPreChecks({ + appWallet, + onContinue, + onCancel +}: MigrationPreChecksProps) { + const network = useSiteStore((state) => state.network); + const drepInfo = useWalletsStore((state) => state.drepInfo); + const { transactions: pendingTransactions } = usePendingTransactions({ + walletId: appWallet.id, + }); + + const [drepCheck, setDrepCheck] = useState({ status: "loading", message: "Checking DRep registration..." }); + const [stakingCheck, setStakingCheck] = useState({ status: "loading", message: "Checking staking registration..." }); + const [pendingTxCheck, setPendingTxCheck] = useState({ status: "loading", message: "Checking pending transactions..." }); + + // Build multisig wallet to get stake address + const multisigWallet = React.useMemo(() => { + if (!appWallet) return null; + try { + return new MultisigWallet( + appWallet.name, + appWallet.signersAddresses.map((addr, i) => ({ + keyHash: addr, + role: 0, + name: appWallet.signersDescriptions[i] || "", + })), + appWallet.description || "", + appWallet.numRequiredSigners || 1, + network + ); + } catch (error) { + console.error("Failed to build multisig wallet:", error); + return null; + } + }, [appWallet, network]); + + // Check DRep registration + useEffect(() => { + async function checkDRepStatus() { + try { + if (drepInfo) { + setDrepCheck({ + status: drepInfo.active ? "warning" : "success", + message: drepInfo.active ? "DRep is registered" : "DRep is not registered", + details: drepInfo.active + ? "You have an active DRep registration. Consider updating your DRep registration after migration." + : "No DRep registration found." + }); + } else { + setDrepCheck({ + status: "success", + message: "DRep is not registered", + details: "No DRep registration found." + }); + } + } catch (error) { + setDrepCheck({ + status: "error", + message: "Failed to check DRep status", + details: "Could not verify DRep registration status." + }); + } + } + + checkDRepStatus(); + }, [drepInfo]); + + // Check staking registration + useEffect(() => { + async function checkStakingStatus() { + try { + if (!multisigWallet) { + setStakingCheck({ + status: "error", + message: "Could not determine stake address", + details: "Failed to build multisig wallet for staking check." + }); + return; + } + + const stakeAddress = multisigWallet.getStakeAddress(); + if (!stakeAddress) { + setStakingCheck({ + status: "success", + message: "No stake address configured", + details: "This wallet does not have staking capabilities." + }); + return; + } + + const blockchainProvider = getProvider(network); + const stakingInfo = await blockchainProvider.get(`/accounts/${stakeAddress}`); + + setStakingCheck({ + status: stakingInfo.active ? "warning" : "success", + message: stakingInfo.active ? "Stake is registered" : "Stake is not registered", + details: stakingInfo.active + ? `Stake is registered to pool: ${stakingInfo.pool_id || "Unknown"}. Consider updating delegation after migration.` + : "No staking registration found." + }); + } catch (error) { + setStakingCheck({ + status: "error", + message: "Failed to check staking status", + details: "Could not verify staking registration status." + }); + } + } + + checkStakingStatus(); + }, [multisigWallet, network]); + + // Check pending transactions + useEffect(() => { + if (pendingTransactions !== undefined) { + const count = pendingTransactions.length; + setPendingTxCheck({ + status: count > 0 ? "warning" : "success", + message: count > 0 ? `${count} pending transaction(s)` : "No pending transactions", + details: count > 0 + ? "You have pending transactions that may need to be completed before migration." + : "No pending transactions found." + }); + } + }, [pendingTransactions]); + + const allChecksComplete = drepCheck.status !== "loading" && + stakingCheck.status !== "loading" && + pendingTxCheck.status !== "loading"; + + const hasWarnings = drepCheck.status === "warning" || + stakingCheck.status === "warning" || + pendingTxCheck.status === "warning"; + + const hasErrors = drepCheck.status === "error" || + stakingCheck.status === "error" || + pendingTxCheck.status === "error"; + + const getStatusIcon = (status: PreCheckResult["status"]) => { + switch (status) { + case "loading": + return ; + case "success": + return ; + case "warning": + return ; + case "error": + return ; + } + }; + + const getStatusColor = (status: PreCheckResult["status"]) => { + switch (status) { + case "success": + return "border-green-200 bg-green-50 dark:border-green-800 dark:bg-green-900/20"; + case "warning": + return "border-yellow-200 bg-yellow-50 dark:border-yellow-800 dark:bg-yellow-900/20"; + case "error": + return "border-red-200 bg-red-50 dark:border-red-800 dark:bg-red-900/20"; + default: + return "border-gray-200 bg-gray-50 dark:border-gray-700 dark:bg-gray-800"; + } + }; + + return ( + +
+ {/* DRep Check */} +
+
+ {getStatusIcon(drepCheck.status)} +
+

DRep Registration

+

{drepCheck.message}

+ {drepCheck.details && ( +

{drepCheck.details}

+ )} +
+
+
+ + {/* Staking Check */} +
+
+ {getStatusIcon(stakingCheck.status)} +
+

Staking Registration

+

{stakingCheck.message}

+ {stakingCheck.details && ( +

{stakingCheck.details}

+ )} +
+
+
+ + {/* Pending Transactions Check */} +
+
+ {getStatusIcon(pendingTxCheck.status)} +
+

Pending Transactions

+

{pendingTxCheck.message}

+ {pendingTxCheck.details && ( +

{pendingTxCheck.details}

+ )} +
+
+
+ + {/* Summary Alert */} + {allChecksComplete && (hasWarnings || hasErrors) && ( + + + + {hasErrors + ? "Some checks failed. Please resolve these issues before proceeding with migration." + : "Some warnings were found. You can proceed with migration, but consider addressing these items after migration." + } + + + )} + + {/* Action Buttons */} +
+ + +
+
+
+ ); +} diff --git a/src/components/pages/wallet/info/migration/NewWalletCreationStep.tsx b/src/components/pages/wallet/info/migration/NewWalletCreationStep.tsx new file mode 100644 index 00000000..4cf84e78 --- /dev/null +++ b/src/components/pages/wallet/info/migration/NewWalletCreationStep.tsx @@ -0,0 +1,136 @@ +import React from "react"; +import { Button } from "@/components/ui/button"; +import { ArrowLeft, ArrowRight, Loader } from "lucide-react"; +import { Wallet } from "@/types/wallet"; +import ReviewWalletInfoCard from "@/components/pages/homepage/wallets/new-wallet-flow/create/ReviewWalletInfoCard"; +import ReviewSignersCard from "@/components/pages/homepage/wallets/new-wallet-flow/create/ReviewSignersCard"; +import ReviewRequiredSignersCard from "@/components/pages/homepage/wallets/new-wallet-flow/create/ReviewRequiredSignersCard"; +import CollapsibleAdvancedSection from "@/components/pages/homepage/wallets/new-wallet-flow/create/CollapsibleAdvancedSection"; +import { useMigrationWalletFlowState } from "./useMigrationWalletFlowState"; + +interface NewWalletCreationStepProps { + appWallet: Wallet; + onBack: () => void; + onContinue: (newWalletId: string) => void; +} + +export default function NewWalletCreationStep({ + appWallet, + onBack, + onContinue +}: NewWalletCreationStepProps) { + const walletFlow = useMigrationWalletFlowState(appWallet); + + const handleCreateWallet = async () => { + await walletFlow.createMigrationWallet(); + }; + + // Watch for newWalletId changes and continue when wallet is created + React.useEffect(() => { + console.log("NewWalletCreationStep: newWalletId changed", walletFlow.newWalletId); + if (walletFlow.newWalletId) { + console.log("NewWalletCreationStep: calling onContinue with", walletFlow.newWalletId); + onContinue(walletFlow.newWalletId); + } + }, [walletFlow.newWalletId, onContinue]); + + return ( +
+ {/* Wallet Info */} + + + {/* Signers */} + + + {/* Required Signatures */} + + + {/* Advanced Section */} + + + {/* Action Section */} +
+ {/* Warning Message */} +
+ + + +

+ Important: Creation is final - signers and rules can not be changed afterwards. +

+
+ + {/* Action Buttons */} +
+ + +
+
+
+ ); +} diff --git a/src/components/pages/wallet/info/migration/ProxySetupStep.tsx b/src/components/pages/wallet/info/migration/ProxySetupStep.tsx new file mode 100644 index 00000000..efcb83ac --- /dev/null +++ b/src/components/pages/wallet/info/migration/ProxySetupStep.tsx @@ -0,0 +1,210 @@ +import React, { useState } from "react"; +import { Button } from "@/components/ui/button"; +import CardUI from "@/components/ui/card-content"; +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { ArrowLeft, ArrowRight, Loader, AlertCircle, CheckCircle, SkipForward } from "lucide-react"; +import { Wallet } from "@/types/wallet"; +import { api } from "@/utils/api"; +import { useUserStore } from "@/lib/zustand/user"; +import { toast } from "@/hooks/use-toast"; +import ProxySetup from "@/components/multisig/proxy/ProxySetup"; + +interface ProxySetupStepProps { + appWallet: Wallet; + newWalletId: string; + onBack: () => void; + onContinue: () => void; +} + +export default function ProxySetupStep({ + appWallet, + newWalletId, + onBack, + onContinue +}: ProxySetupStepProps) { + const { userAddress } = useUserStore(); + const [isCheckingProxies, setIsCheckingProxies] = useState(true); + const [hasExistingProxy, setHasExistingProxy] = useState(false); + const [showProxySetup, setShowProxySetup] = useState(false); + const [isCreatingProxy, setIsCreatingProxy] = useState(false); + + // Check for existing proxies + const { data: existingProxies, isLoading: isLoadingProxies } = api.proxy.getProxiesByWallet.useQuery( + { + walletId: appWallet.id, + }, + { + enabled: !!appWallet.id, + } + ); + + // Check for new wallet proxies + const { data: newWalletProxies, isLoading: isLoadingNewProxies } = api.proxy.getProxiesByWallet.useQuery( + { + walletId: newWalletId, + }, + { + enabled: !!newWalletId, + } + ); + + React.useEffect(() => { + if (!isLoadingProxies && !isLoadingNewProxies) { + setIsCheckingProxies(false); + setHasExistingProxy((existingProxies?.length || 0) > 0); + } + }, [isLoadingProxies, isLoadingNewProxies, existingProxies]); + + const handleSkipProxy = () => { + toast({ + title: "Skipped", + description: "Proxy setup skipped. You can set up a proxy later.", + }); + onContinue(); + }; + + const handleShowProxySetup = () => { + setShowProxySetup(true); + }; + + const handleProxyCreated = () => { + setIsCreatingProxy(false); + toast({ + title: "Success", + description: "Proxy created successfully!", + }); + onContinue(); + }; + + const handleProxyError = (error: string) => { + setIsCreatingProxy(false); + toast({ + title: "Error", + description: `Failed to create proxy: ${error}`, + variant: "destructive", + }); + }; + + if (isCheckingProxies) { + return ( + +
+ + Checking proxy status... +
+
+ ); + } + + if (showProxySetup) { + return ( +
+ + setShowProxySetup(false)} + /> + +
+ ); + } + + return ( +
+ +
+ {hasExistingProxy ? ( + + + + Your current wallet has an existing proxy configuration. + You can create a new proxy for the migrated wallet or skip this step. + + + ) : ( + + + + No existing proxy found. You can optionally create a proxy for your new wallet + to enable advanced features like delegation and governance participation. + + + )} + +
+

What is a Proxy?

+

+ A proxy allows you to delegate certain operations to another address while maintaining + control over your funds. This is useful for: +

+
    +
  • • Staking delegation
  • +
  • • Governance participation (DRep voting)
  • +
  • • Advanced transaction management
  • +
+
+ + {hasExistingProxy && ( +
+

Existing Proxy

+

+ Your current wallet has {existingProxies?.length || 0} active proxy configuration(s). + The proxy will need to be updated to point to your new wallet after migration. +

+
+ )} +
+ +
+ + + +
+
+
+ ); +} diff --git a/src/components/pages/wallet/info/migration/useMigrationWalletFlowState.tsx b/src/components/pages/wallet/info/migration/useMigrationWalletFlowState.tsx new file mode 100644 index 00000000..5ab38f35 --- /dev/null +++ b/src/components/pages/wallet/info/migration/useMigrationWalletFlowState.tsx @@ -0,0 +1,558 @@ +/** + * useMigrationWalletFlowState Hook + * Adapts the existing wallet flow state for migration purposes + * Pre-populates data from the current wallet and handles migration-specific logic + */ + +import { useState, useEffect, useMemo, useCallback } from "react"; +import { resolvePaymentKeyHash, resolveStakeKeyHash } from "@meshsdk/core"; +import type { MultisigKey } from "@/utils/multisigSDK"; +import { MultisigWallet } from "@/utils/multisigSDK"; +import { paymentKeyHash } from "@/utils/multisigSDK"; + +import { api } from "@/utils/api"; +import { useUserStore } from "@/lib/zustand/user"; +import { useSiteStore } from "@/lib/zustand/site"; +import { useToast } from "@/hooks/use-toast"; +import { Wallet } from "@/types/wallet"; + +export interface MigrationWalletFlowState { + // Core wallet data + name: string; + setName: React.Dispatch>; + description: string; + setDescription: React.Dispatch>; + + // Signers management + signersAddresses: string[]; + setSignerAddresses: React.Dispatch>; + signersDescriptions: string[]; + setSignerDescriptions: React.Dispatch>; + signersStakeKeys: string[]; + setSignerStakeKeys: React.Dispatch>; + addSigner: () => void; + removeSigner: (index: number) => void; + + // Signature rules + numRequiredSigners: number; + setNumRequiredSigners: React.Dispatch>; + nativeScriptType: "all" | "any" | "atLeast"; + setNativeScriptType: React.Dispatch>; + + // Advanced options + stakeKey: string; + setStakeKey: React.Dispatch>; + removeExternalStakeAndBackfill: () => void; + + // UI state + loading: boolean; + setLoading: React.Dispatch>; + + // Computed values + multisigWallet?: MultisigWallet; + isValidForCreate: boolean; + + // Dependencies + userAddress?: string; + network: number; + toast: ReturnType['toast']; + + // Migration-specific + appWallet: Wallet; + newWalletId?: string; + + // Actions + createMigrationWallet: () => Promise; + + // Save callbacks for create page + handleSaveWalletInfo: (newName: string, newDescription: string) => void; + handleSaveSigners: (newAddresses: string[], newDescriptions: string[], newStakeKeys: string[]) => void; + handleSaveSignatureRules: (numRequired: number) => void; + handleSaveAdvanced: (newStakeKey: string, scriptType: "all" | "any" | "atLeast") => void; +} + +export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletFlowState { + const [signersAddresses, setSignerAddresses] = useState([]); + const [signersDescriptions, setSignerDescriptions] = useState([]); + const [signersStakeKeys, setSignerStakeKeys] = useState([]); + const [numRequiredSigners, setNumRequiredSigners] = useState(1); + const [name, setName] = useState(""); + const [description, setDescription] = useState(""); + const [loading, setLoading] = useState(false); + const [nativeScriptType, setNativeScriptType] = useState<"all" | "any" | "atLeast">("atLeast"); + const [stakeKey, setStakeKey] = useState(""); + const [newWalletId, setNewWalletId] = useState(); + + // Dependencies + const userAddress = useUserStore((state) => state.userAddress); + const network = useSiteStore((state) => state.network); + const { toast } = useToast(); + + // Get complete wallet data from database + const { data: walletData } = api.wallet.getWallet.useQuery( + { + address: userAddress!, + walletId: appWallet.id, + }, + { + enabled: !!userAddress && !!appWallet.id, + } + ); + + // Get existing new wallet data if migration is in progress + const { data: existingNewWallet } = api.wallet.getNewWallet.useQuery( + { + walletId: (appWallet as any).migrationTargetWalletId || "", + }, + { + enabled: !!(appWallet as any).migrationTargetWalletId, + } + ); + + // Initialize data from current wallet + useEffect(() => { + if (walletData) { + setName(`${walletData.name} - Migrated`); + setDescription(walletData.description || ""); + setSignerAddresses(walletData.signersAddresses || []); + setSignerDescriptions(walletData.signersDescriptions || []); + setNumRequiredSigners(walletData.numRequiredSigners || 1); + setNativeScriptType(walletData.type || "atLeast"); + setStakeKey(walletData.stakeCredentialHash || ""); + + // Filter and process stake keys + const validStakeKeys = (walletData.signersStakeKeys || []).filter((key: string) => { + // Check if it's a valid 28-byte or 32-byte hex hash + if (/^[0-9a-fA-F]{56}$/.test(key) || /^[0-9a-fA-F]{64}$/.test(key)) { + return true; + } + // Check if it's a full stake address + if (key.startsWith('stake1') || key.startsWith('stake_test1')) { + return true; + } + return false; + }); + + setSignerStakeKeys(validStakeKeys); + } + }, [walletData]); + + // Load existing new wallet data if available + useEffect(() => { + if (existingNewWallet) { + setNewWalletId(existingNewWallet.id); + setName(existingNewWallet.name); + setDescription(existingNewWallet.description || ""); + setSignerAddresses(existingNewWallet.signersAddresses || []); + setSignerDescriptions(existingNewWallet.signersDescriptions || []); + setSignerStakeKeys(existingNewWallet.signersStakeKeys || []); + setNumRequiredSigners(existingNewWallet.numRequiredSigners || 1); + setStakeKey(existingNewWallet.stakeCredentialHash || ""); + setNativeScriptType(existingNewWallet.scriptType || "atLeast"); + } + }, [existingNewWallet]); + + // MultisigWallet computation + const multisigWallet = useMemo(() => { + const keys: MultisigKey[] = []; + if (signersAddresses.length === 0) return; + + if (signersAddresses.length > 0) { + signersAddresses.forEach((addr, i) => { + if (addr) { + try { + const paymentHash = paymentKeyHash(addr); + keys.push({ + keyHash: paymentHash, + role: 0, + name: signersDescriptions[i] ?? "", + }); + } catch { + // Invalid payment address at index + } + } + }); + } + + // Only add individual signer stake keys if no external stake credential + if (!stakeKey && signersStakeKeys.length > 0) { + signersStakeKeys.forEach((stakeKey, i) => { + if (stakeKey) { + try { + const stakeKeyHash = resolveStakeKeyHash(stakeKey); + keys.push({ + keyHash: stakeKeyHash, + role: 2, + name: signersDescriptions[i] ?? "", + }); + } catch { + // Invalid stake address at index + } + } + }); + } + + if (keys.length === 0) return; + return new MultisigWallet( + name, + keys, + description, + numRequiredSigners, + network, + stakeKey || undefined, + nativeScriptType, + ); + }, [ + name, + description, + signersAddresses, + signersStakeKeys, + signersDescriptions, + numRequiredSigners, + network, + stakeKey, + nativeScriptType, + ]); + + // API Mutations + const { mutate: createNewWallet } = api.wallet.createNewWallet.useMutation({ + onSuccess: (data) => { + setNewWalletId(data.id); + setLoading(false); + toast({ + title: "Wallet Created", + description: "New wallet configuration created successfully", + duration: 3000, + }); + }, + onError: (e) => { + setLoading(false); + toast({ + title: "Error", + description: "Failed to create new wallet configuration", + variant: "destructive", + duration: 3000, + }); + }, + }); + + const { mutate: updateNewWallet } = api.wallet.updateNewWallet.useMutation({ + onSuccess: () => { + toast({ + title: "Saved", + description: "Changes saved successfully", + duration: 2000, + }); + }, + onError: (e) => { + toast({ + title: "Error", + description: "Failed to save changes", + variant: "destructive", + duration: 3000, + }); + }, + }); + + const { mutate: createWallet } = api.wallet.createWallet.useMutation({ + onSuccess: (data) => { + console.log("Wallet created successfully:", data); + + // Set migration target after successful wallet creation + setMigrationTarget({ + walletId: appWallet.id, + newWalletId: data.id, + }); + + setNewWalletId(data.id); + setLoading(false); + toast({ + title: "Success", + description: "New wallet created successfully!", + duration: 3000, + }); + }, + onError: (e) => { + console.error("Failed to create wallet:", e); + setLoading(false); + toast({ + title: "Error", + description: "Failed to create new wallet", + variant: "destructive", + duration: 3000, + }); + }, + }); + + const { mutate: setMigrationTarget } = api.wallet.setMigrationTarget.useMutation({ + onSuccess: () => { + // Migration target set successfully - no need for additional toast since wallet creation already shows success + }, + onError: (e) => { + console.error("Failed to set migration target:", e); + toast({ + title: "Warning", + description: "Wallet created but migration target not set. Please try again.", + variant: "destructive", + duration: 3000, + }); + }, + }); + + // Utility functions + function addSigner() { + setSignerAddresses([...signersAddresses, ""]); + setSignerDescriptions([...signersDescriptions, ""]); + setSignerStakeKeys([...signersStakeKeys, ""]); + } + + function removeSigner(index: number) { + const updatedAddresses = [...signersAddresses]; + updatedAddresses.splice(index, 1); + setSignerAddresses(updatedAddresses); + + const updatedDescriptions = [...signersDescriptions]; + updatedDescriptions.splice(index, 1); + setSignerDescriptions(updatedDescriptions); + + const updatedStakeKeys = [...signersStakeKeys]; + updatedStakeKeys.splice(index, 1); + setSignerStakeKeys(updatedStakeKeys); + } + + // Adjust numRequiredSigners if it exceeds the number of signers + useEffect(() => { + if (numRequiredSigners > signersAddresses.length && signersAddresses.length > 0) { + setNumRequiredSigners(signersAddresses.length); + } + }, [signersAddresses.length, numRequiredSigners]); + + // Create migration wallet + async function createMigrationWallet() { + console.log("createMigrationWallet called", { multisigWallet, name, signersAddresses }); + + if (!multisigWallet) { + toast({ + title: "Error", + description: "Invalid wallet configuration. Please check your settings.", + variant: "destructive", + }); + return; + } + + setLoading(true); + try { + const { scriptCbor } = multisigWallet.getScript(); + if (!scriptCbor) { + throw new Error("Failed to generate script CBOR"); + } + + console.log("Creating wallet with data:", { + name, + description, + signersAddresses, + signersDescriptions, + signersStakeKeys, + numRequiredSigners, + stakeCredentialHash: stakeKey || undefined, + type: nativeScriptType, + }); + + // Create the new wallet directly + createWallet({ + name: name, + description: description, + signersAddresses: signersAddresses, + signersDescriptions: signersDescriptions, + signersStakeKeys: signersStakeKeys, + numRequiredSigners: numRequiredSigners, + scriptCbor: scriptCbor, + stakeCredentialHash: stakeKey || undefined, + type: nativeScriptType, + }); + + } catch (error) { + console.error("Failed to create wallet:", error); + setLoading(false); + toast({ + title: "Error", + description: "Failed to create new wallet. Please try again.", + variant: "destructive", + }); + } + } + + // Save callbacks for create page + const handleSaveWalletInfo = useCallback((newName: string, newDescription: string) => { + setName(newName); + setDescription(newDescription); + + if (newWalletId) { + updateNewWallet({ + walletId: newWalletId, + name: newName, + description: newDescription, + signersAddresses: signersAddresses, + signersDescriptions: signersDescriptions, + signersStakeKeys: signersStakeKeys, + numRequiredSigners: numRequiredSigners, + stakeCredentialHash: stakeKey || undefined, + scriptType: nativeScriptType, + }); + } + }, [newWalletId, signersAddresses, signersDescriptions, signersStakeKeys, numRequiredSigners, stakeKey, nativeScriptType, updateNewWallet]); + + const handleSaveSigners = useCallback((newAddresses: string[], newDescriptions: string[], newStakeKeys: string[]) => { + setSignerAddresses(newAddresses); + setSignerDescriptions(newDescriptions); + setSignerStakeKeys(newStakeKeys); + + if (newWalletId) { + updateNewWallet({ + walletId: newWalletId, + name: name, + description: description, + signersAddresses: newAddresses, + signersDescriptions: newDescriptions, + signersStakeKeys: newStakeKeys, + numRequiredSigners: numRequiredSigners, + stakeCredentialHash: stakeKey || undefined, + scriptType: nativeScriptType, + }); + } + }, [newWalletId, name, description, numRequiredSigners, stakeKey, nativeScriptType, updateNewWallet]); + + const handleSaveSignatureRules = useCallback((numRequired: number) => { + setNumRequiredSigners(numRequired); + + if (newWalletId) { + updateNewWallet({ + walletId: newWalletId, + name: name, + description: description, + signersAddresses: signersAddresses, + signersDescriptions: signersDescriptions, + signersStakeKeys: signersStakeKeys, + numRequiredSigners: numRequired, + stakeCredentialHash: stakeKey || undefined, + scriptType: nativeScriptType, + }); + } + }, [newWalletId, name, description, signersAddresses, signersDescriptions, signersStakeKeys, stakeKey, nativeScriptType, updateNewWallet]); + + const handleSaveAdvanced = useCallback((newStakeKey: string, scriptType: "all" | "any" | "atLeast") => { + setStakeKey(newStakeKey); + setNativeScriptType(scriptType); + + // If external stake credential is set, clear all signer stake keys + const updatedSignerStakeKeys = newStakeKey ? + signersStakeKeys.map(() => "") : + signersStakeKeys; + + if (newStakeKey) { + setSignerStakeKeys(updatedSignerStakeKeys); + } + + if (newWalletId) { + updateNewWallet({ + walletId: newWalletId, + name: name, + description: description, + signersAddresses: signersAddresses, + signersDescriptions: signersDescriptions, + signersStakeKeys: updatedSignerStakeKeys, + numRequiredSigners: numRequiredSigners, + stakeCredentialHash: newStakeKey || undefined, + scriptType: scriptType, + }); + } + }, [newWalletId, name, description, signersAddresses, signersDescriptions, signersStakeKeys, numRequiredSigners, updateNewWallet]); + + // Remove external stake credential and try to backfill stake keys from addresses + const removeExternalStakeAndBackfill = useCallback(() => { + setStakeKey(""); + setSignerStakeKeys(signersStakeKeys); + + if (newWalletId) { + updateNewWallet({ + walletId: newWalletId, + name: name, + description: description, + signersAddresses: signersAddresses, + signersDescriptions: signersDescriptions, + signersStakeKeys: signersStakeKeys, + numRequiredSigners: numRequiredSigners, + stakeCredentialHash: null, + scriptType: nativeScriptType, + }); + } + + toast({ + title: "External stake removed", + description: "External stake credential has been removed.", + duration: 3000, + }); + }, [signersAddresses, signersStakeKeys, newWalletId, name, description, signersDescriptions, numRequiredSigners, nativeScriptType, updateNewWallet, toast]); + + // Validation + const isValidForCreate = signersAddresses.length > 0 && + !signersAddresses.some((signer) => !signer || signer.length === 0) && + (nativeScriptType !== "atLeast" || numRequiredSigners > 0) && + name.length > 0 && + !loading; + + return { + // Core wallet data + name, + setName, + description, + setDescription, + + // Signers management + signersAddresses, + setSignerAddresses, + signersDescriptions, + setSignerDescriptions, + signersStakeKeys, + setSignerStakeKeys, + addSigner, + removeSigner, + + // Signature rules + numRequiredSigners, + setNumRequiredSigners, + nativeScriptType, + setNativeScriptType, + + // Advanced options + stakeKey, + setStakeKey, + removeExternalStakeAndBackfill, + + // UI state + loading, + setLoading, + + // Computed values + multisigWallet, + isValidForCreate, + + // Dependencies + userAddress, + network, + toast, + + // Migration-specific + appWallet, + newWalletId, + + // Actions + createMigrationWallet, + + // Save callbacks + handleSaveWalletInfo, + handleSaveSigners, + handleSaveSignatureRules, + handleSaveAdvanced, + }; +} diff --git a/src/server/api/routers/proxy.ts b/src/server/api/routers/proxy.ts index 4800d61d..6781c94e 100644 --- a/src/server/api/routers/proxy.ts +++ b/src/server/api/routers/proxy.ts @@ -85,12 +85,12 @@ export const proxyRouter = createTRPCRouter({ userAddress: z.string().optional(), })) .query(async ({ ctx, input }) => { - const conditions: any = { - isActive: true, - }; - + console.log("getProxiesByUserOrWallet called with:", input); + + const orConditions: any[] = []; + if (input.walletId) { - conditions.walletId = input.walletId; + orConditions.push({ walletId: input.walletId }); } if (input.userAddress) { @@ -101,16 +101,27 @@ export const proxyRouter = createTRPCRouter({ }); if (user) { - conditions.userId = user.id; + orConditions.push({ userId: user.id }); } } - return ctx.db.proxy.findMany({ - where: conditions, + if (orConditions.length === 0) { + console.log("No conditions found, returning empty array"); + return []; + } + + const result = await ctx.db.proxy.findMany({ + where: { + isActive: true, + OR: orConditions, + }, orderBy: { createdAt: "desc", }, }); + + console.log("Found proxies:", result.length, result); + return result; }), getProxyById: publicProcedure @@ -129,6 +140,8 @@ export const proxyRouter = createTRPCRouter({ id: z.string(), description: z.string().optional(), isActive: z.boolean().optional(), + walletId: z.string().optional(), + userId: z.string().optional(), }), ) .mutation(async ({ ctx, input }) => { @@ -139,6 +152,8 @@ export const proxyRouter = createTRPCRouter({ data: { description: input.description, isActive: input.isActive, + walletId: input.walletId, + userId: input.userId, }, }); }), diff --git a/src/server/api/routers/wallets.ts b/src/server/api/routers/wallets.ts index bb62f664..ad6d71d1 100644 --- a/src/server/api/routers/wallets.ts +++ b/src/server/api/routers/wallets.ts @@ -285,4 +285,57 @@ export const walletRouter = createTRPCRouter({ }, }); }), + + setMigrationTarget: publicProcedure + .input(z.object({ + walletId: z.string(), + migrationTargetWalletId: z.string() + })) + .mutation(async ({ ctx, input }) => { + return ctx.db.wallet.update({ + where: { + id: input.walletId, + }, + data: { + migrationTargetWalletId: input.migrationTargetWalletId, + }, + }); + }), + + clearMigrationTarget: publicProcedure + .input(z.object({ walletId: z.string() })) + .mutation(async ({ ctx, input }) => { + return ctx.db.wallet.update({ + where: { + id: input.walletId, + }, + data: { + migrationTargetWalletId: null, + }, + }); + }), + + abortMigration: publicProcedure + .input(z.object({ + walletId: z.string(), + newWalletId: z.string() + })) + .mutation(async ({ ctx, input }) => { + // Delete the new wallet that was created for migration + await ctx.db.newWallet.delete({ + where: { + id: input.newWalletId, + }, + }); + + // Clear the migration target reference from the original wallet + return ctx.db.wallet.update({ + where: { + id: input.walletId, + }, + data: { + migrationTargetWalletId: null, + }, + }); + }), }); From 6c54ecc3959cffa7eabd2b04fc013f1a9cbdb8c5 Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Fri, 24 Oct 2025 10:39:22 +0200 Subject: [PATCH 11/15] s09 --- .../proxy/aiken-workspace/plutus.json | 88 +-------- .../aiken-workspace/validators/proxy/spend.ak | 39 +--- .../validators/tests/proxy/spend.ak | 65 ------- src/components/multisig/proxy/offchain.ts | 170 ++++++++++++------ 4 files changed, 124 insertions(+), 238 deletions(-) delete mode 100644 src/components/multisig/proxy/aiken-workspace/validators/tests/proxy/spend.ak diff --git a/src/components/multisig/proxy/aiken-workspace/plutus.json b/src/components/multisig/proxy/aiken-workspace/plutus.json index 7cd5c1ab..a3e575f5 100644 --- a/src/components/multisig/proxy/aiken-workspace/plutus.json +++ b/src/components/multisig/proxy/aiken-workspace/plutus.json @@ -46,31 +46,6 @@ "compiledCode": "59017b010100229800aba2aba1aba0aab9faab9eaab9dab9a488888896600264653001300800198041804800cdc3a400130080024888966002600460106ea800e2646644b300130050018acc004c030dd5003c00a2c806a2b30013370e9001000c56600260186ea801e00516403516402880504c966002601e00313259800980298059baa0078acc004c8cc004004dd6180818069baa0052259800800c528456600266ebcc044c038dd5180880080a4528c4cc008008c04800500c201e899b8700148052294100a44cdc3800a40268050dd698059807000c5900c192cc004cdc3a400460146ea8006297adef6c6089bab300e300b37540028048c8cc004004dd59807180798079807980798059baa0032259800800c5300103d87a8000899192cc004cdc8802800c56600266e3c014006266e95200033010300e0024bd7045300103d87a80004031133004004301200340306eb8c030004c03c00500d18049baa005375c601860126ea800e2c8038601000260066ea802229344d9590011", "hash": "eaff18079b75649a8cfc35b99f8a145c4fabf5c02e239822316e9321" }, - { - "title": "proxy/spend.proxy.spend", - "datum": { - "title": "_datum", - "schema": { - "$ref": "#/definitions/cardano~1transaction~1Datum" - } - }, - "redeemer": { - "title": "redeemer", - "schema": { - "$ref": "#/definitions/types~1ProxyRedeemer" - } - }, - "parameters": [ - { - "title": "auth_token", - "schema": { - "$ref": "#/definitions/cardano~1assets~1PolicyId" - } - } - ], - "compiledCode": "590253010100229800aba2aba1aba0aab9faab9eaab9dab9a9bae0024888888896600264653001300900198049805000cc0240092225980099b8748008c020dd500144ca6002601a003300d300e00198049baa0048992cc004cdc3a400060146ea8016266e212000323300100132330010013758602000644b30010018a5eb8226644b3001323300100132330010013756602a602c60246ea8010896600200314bd7044c8cc88c8cc004004dd5980b802112cc00400620071323301b374e660366ea4014cc06cc060004cc06cc0640052f5c066006006603a004603600280c8dd7180980099801801980c001180b000a0282259800800c528456600266e3cdd7180a9bac301500100f8a51899801001180b000a020404d13301200233004004001899802002000a01c30110013012001403c44b30010018a40011337009001198010011808800a01c899912cc0040060051598009808800c4c8c96600266e3cdd71809001006456600266e3cdd71809000a44100899b87375a602460260029009c528201a8a50403460240026eb0c040006004807100e0a5032330010013756601e6020602000444b30010018a5eb82264664464660020026eacc044010896600200310038991980a9ba733015375200a6602a60240026602a602600297ae03300300330170023015001404c6eb8c034004cc00c00cc048008c04000500e2012300e300e300a375400491112cc004cdc3a400000515980098071baa008800c5900f456600266e1d20020028acc004c038dd500440062c807a2c806100c0c024dd50014590070c024004c010dd5004c52689b2b200401", - "hash": "0dc0f294df05817b4471985a0fd255fa8c9b81fadc4c630a66783943" - }, { "title": "proxy/spend.proxy.else", "redeemer": { @@ -84,8 +59,8 @@ } } ], - "compiledCode": "590253010100229800aba2aba1aba0aab9faab9eaab9dab9a9bae0024888888896600264653001300900198049805000cc0240092225980099b8748008c020dd500144ca6002601a003300d300e00198049baa0048992cc004cdc3a400060146ea8016266e212000323300100132330010013758602000644b30010018a5eb8226644b3001323300100132330010013756602a602c60246ea8010896600200314bd7044c8cc88c8cc004004dd5980b802112cc00400620071323301b374e660366ea4014cc06cc060004cc06cc0640052f5c066006006603a004603600280c8dd7180980099801801980c001180b000a0282259800800c528456600266e3cdd7180a9bac301500100f8a51899801001180b000a020404d13301200233004004001899802002000a01c30110013012001403c44b30010018a40011337009001198010011808800a01c899912cc0040060051598009808800c4c8c96600266e3cdd71809001006456600266e3cdd71809000a44100899b87375a602460260029009c528201a8a50403460240026eb0c040006004807100e0a5032330010013756601e6020602000444b30010018a5eb82264664464660020026eacc044010896600200310038991980a9ba733015375200a6602a60240026602a602600297ae03300300330170023015001404c6eb8c034004cc00c00cc048008c04000500e2012300e300e300a375400491112cc004cdc3a400000515980098071baa008800c5900f456600266e1d20020028acc004c038dd500440062c807a2c806100c0c024dd50014590070c024004c010dd5004c52689b2b200401", - "hash": "0dc0f294df05817b4471985a0fd255fa8c9b81fadc4c630a66783943" + "compiledCode": "59011a010100229800aba2aba1aba0aab9faab9eaab9dab9a9bae0024888888896600266e212000323300100132330010013758601260146014600c6ea8c024c018dd5005912cc004006297ae0899912cc004c8cc004004c8cc004004dd59807180798059baa0042259800800c52f5c1132332232330010013756602000844b30010018801c4c8cc050dd39980a1ba90053301430110013301430120014bd7019801801980b001180a000a024375c6018002660060066022004601e0028068896600200314a115980099b8f375c601c6eb0c0380040222946266004004601e002804900c44cc02c008cc0100100062660080080028038c028004c02c005008112cc0040062900044cdc024004660040046014002803a29344d95900201", + "hash": "2baa0c6ed4aa555c15d53f6b85062d8661f380fb8d023d1b1535b035" } ], "definitions": { @@ -93,55 +68,13 @@ "title": "ByteArray", "dataType": "bytes" }, - "Data": { - "title": "Data", - "description": "Any Plutus data." - }, "Int": { "dataType": "integer" }, - "aiken/crypto/DataHash": { - "title": "DataHash", - "dataType": "bytes" - }, "cardano/assets/PolicyId": { "title": "PolicyId", "dataType": "bytes" }, - "cardano/transaction/Datum": { - "title": "Datum", - "description": "An output `Datum`.", - "anyOf": [ - { - "title": "NoDatum", - "dataType": "constructor", - "index": 0, - "fields": [] - }, - { - "title": "DatumHash", - "description": "A datum referenced by its hash digest.", - "dataType": "constructor", - "index": 1, - "fields": [ - { - "$ref": "#/definitions/aiken~1crypto~1DataHash" - } - ] - }, - { - "title": "InlineDatum", - "description": "A datum completely inlined in the output.", - "dataType": "constructor", - "index": 2, - "fields": [ - { - "$ref": "#/definitions/Data" - } - ] - } - ] - }, "cardano/transaction/OutputReference": { "title": "OutputReference", "description": "An `OutputReference` is a unique reference to an output on-chain. The `output_index`\n corresponds to the position in the output list of the transaction (identified by its id)\n that produced that output", @@ -179,23 +112,6 @@ "fields": [] } ] - }, - "types/ProxyRedeemer": { - "title": "ProxyRedeemer", - "anyOf": [ - { - "title": "SpendFunds", - "dataType": "constructor", - "index": 0, - "fields": [] - }, - { - "title": "RemoveEmptyInstance", - "dataType": "constructor", - "index": 1, - "fields": [] - } - ] } } } \ No newline at end of file diff --git a/src/components/multisig/proxy/aiken-workspace/validators/proxy/spend.ak b/src/components/multisig/proxy/aiken-workspace/validators/proxy/spend.ak index d8ea9620..f983ce93 100644 --- a/src/components/multisig/proxy/aiken-workspace/validators/proxy/spend.ak +++ b/src/components/multisig/proxy/aiken-workspace/validators/proxy/spend.ak @@ -1,36 +1,13 @@ use aiken/collection/list use cardano/assets.{PolicyId} -use cardano/transaction.{Datum,OutputReference, Transaction} -use cocktail.{only_minted_token, outputs_with_policy} -use types.{ProxyRedeemer, RemoveEmptyInstance, SpendFunds} +use cardano/transaction.{Transaction} +use cardano/script_context.{ScriptContext} +use cocktail.{outputs_with_policy} validator proxy(auth_token: PolicyId) { - spend( - _datum: Option, - redeemer: ProxyRedeemer, - _input: OutputReference, - self: Transaction, - ) { - let Transaction { mint, outputs, .. } = self - - when redeemer is { - // check if one auth token is moved in the transaction - SpendFunds -> { - // Check if any token from the auth_token policy is present in the outputs - // This means the token is being moved/transferred signalising that the proxy owners are spending funds - let auth_tokens_in_outputs = outputs_with_policy(outputs, auth_token) - // The token is considered "moved" if any auth token appears in outputs - // (meaning it's being transferred somewhere) - // This is used to ensure that the proxy owners are spending funds - // and not someone else - list.length(auth_tokens_in_outputs) > 0 - } - - RemoveEmptyInstance -> only_minted_token(mint, auth_token, "", -10)? - } - } - - else(_) { - fail + else(ctx: ScriptContext) { + let Transaction { outputs, .. } = ctx.transaction + let auth_tokens_in_outputs = outputs_with_policy(outputs, auth_token) + list.length(auth_tokens_in_outputs) > 0 } -} +} \ No newline at end of file diff --git a/src/components/multisig/proxy/aiken-workspace/validators/tests/proxy/spend.ak b/src/components/multisig/proxy/aiken-workspace/validators/tests/proxy/spend.ak deleted file mode 100644 index 05e836fc..00000000 --- a/src/components/multisig/proxy/aiken-workspace/validators/tests/proxy/spend.ak +++ /dev/null @@ -1,65 +0,0 @@ -use cardano/assets.{add, from_lovelace} -use mocktail.{ - complete, mint, mock_policy_id, mock_pub_key_address, mock_utxo_ref, - mocktail_tx, tx_out, -} -use proxy/spend as proxy_spend -use types.{RemoveEmptyInstance, SpendFunds} - -const auth_token = mock_policy_id(0) - -// SpendFunds: succeeds when any token of auth_token policy appears in outputs -test p_proxy_spend_success_spend_funds_when_auth_token_in_outputs() { - let value = from_lovelace(2_000_000) |> add(auth_token, "", 1) - let tx = - mocktail_tx() - |> tx_out(True, mock_pub_key_address(0, None), value) - |> complete() - - proxy_spend.proxy.spend(auth_token, None, SpendFunds, mock_utxo_ref(0, 0), tx) -} - -// SpendFunds: fails when no auth_token policy appears in outputs -test p_proxy_spend_fail_spend_funds_when_no_auth_token_in_outputs() { - let value = from_lovelace(2_000_000) - let tx = - mocktail_tx() - |> tx_out(True, mock_pub_key_address(1, None), value) - |> complete() - - !proxy_spend.proxy.spend( - auth_token, - None, - SpendFunds, - mock_utxo_ref(0, 0), - tx, - ) -} - -// RemoveEmptyInstance: succeeds when exact burn of -10 under auth_token policy -test p_proxy_spend_success_remove_empty_instance_with_auth_burnt() { - let tx = - mocktail_tx() - |> mint(True, -10, auth_token, "") - |> complete() - - proxy_spend.proxy.spend( - auth_token, - None, - RemoveEmptyInstance, - mock_utxo_ref(0, 0), - tx, - ) -} - -// RemoveEmptyInstance: fails when no burn occurs -test p_proxy_spend_fail_remove_empty_instance_without_burn() { - let tx = mocktail_tx() |> complete() - !proxy_spend.proxy.spend( - auth_token, - None, - RemoveEmptyInstance, - mock_utxo_ref(0, 0), - tx, - ) -} diff --git a/src/components/multisig/proxy/offchain.ts b/src/components/multisig/proxy/offchain.ts index aaa03bc5..d68c83c8 100644 --- a/src/components/multisig/proxy/offchain.ts +++ b/src/components/multisig/proxy/offchain.ts @@ -16,6 +16,7 @@ import { UTxO, applyCborEncoding, applyParamsToScript, + resolveScriptHashDRepId, } from "@meshsdk/core"; import { parseDatumCbor } from "@meshsdk/core-cst"; @@ -104,11 +105,8 @@ export class MeshProxyContract extends MeshTxInitiator { * const { tx, paramUtxo } = await contract.setupProxy(); * ``` */ - setupProxy = async ( - msUtxos?: UTxO[], - msWalletAddress?: string, - ) => { - if (this.msCbor && !msUtxos && !msWalletAddress) { + setupProxy = async (msUtxos?: UTxO[], msWalletAddress?: string) => { + if (this.msCbor && !msUtxos && !msWalletAddress) { throw new Error( "No UTxOs and wallet address for multisig script cbor found", ); @@ -116,7 +114,7 @@ export class MeshProxyContract extends MeshTxInitiator { let { utxos, collateral, walletAddress } = await this.getWalletInfoForTx(); - if (this.msCbor && msUtxos && msWalletAddress){ + if (this.msCbor && msUtxos && msWalletAddress) { utxos = msUtxos; walletAddress = msWalletAddress; } @@ -126,10 +124,12 @@ export class MeshProxyContract extends MeshTxInitiator { throw new Error("No UTxOs found"); } const paramUtxo = utxos?.filter((utxo) => - utxo.output.amount.map( - (asset) => - asset.unit === "lovelace" && Number(asset.quantity) >= 20000000, - ).reduce((pa,ca,i,a)=>pa||ca), + utxo.output.amount + .map( + (asset) => + asset.unit === "lovelace" && Number(asset.quantity) >= 20000000, + ) + .reduce((pa, ca, i, a) => pa || ca), )[0]; if (!paramUtxo) { throw new Error( @@ -167,9 +167,7 @@ export class MeshProxyContract extends MeshTxInitiator { .txOut(proxyAddress, [{ unit: "lovelace", quantity: "1000000" }]); for (let i = 0; i < 10; i++) { - tx.txOut(walletAddress, [ - { unit: policyId, quantity: "1" }, - ]); + tx.txOut(walletAddress, [{ unit: policyId, quantity: "1" }]); } tx.txInCollateral( @@ -178,7 +176,6 @@ export class MeshProxyContract extends MeshTxInitiator { collateral.output.amount, collateral.output.address, ).changeAddress(walletAddress); - //.selectUtxosFrom(utxos); const txHex = tx; @@ -195,42 +192,29 @@ export class MeshProxyContract extends MeshTxInitiator { msUtxos?: UTxO[], msWalletAddress?: string, ) => { - - if (this.msCbor && !msUtxos && !msWalletAddress) { + if (this.msCbor && !msUtxos && !msWalletAddress) { throw new Error( "No UTxOs and wallet address for multisig script cbor found", ); } - console.log("msCbor", this.msCbor); - console.log("msUtxos", msUtxos); - console.log("msWalletAddress", msWalletAddress); - let { utxos, collateral, walletAddress } = await this.getWalletInfoForTx(); - if (this.msCbor && msUtxos && msWalletAddress){ + // If multisig inputs are provided, use them instead of the wallet inputs + if (this.msCbor && msUtxos && msWalletAddress) { utxos = msUtxos; walletAddress = msWalletAddress; } - - console.log("utxos", utxos); - console.log("collateral", collateral); - console.log("walletAddress", walletAddress); - if (!utxos || utxos.length <= 0) { throw new Error("No UTxOs found"); } - if (!walletAddress) { throw new Error("No wallet address found"); } - if (!collateral) { throw new Error("No collateral found"); } - if (this.proxyAddress === undefined) { throw new Error("Proxy address not set. Please setupProxy first."); } - const blockchainProvider = this.mesh.fetcher; if (!blockchainProvider) { throw new Error("Blockchain provider not found"); @@ -249,7 +233,8 @@ export class MeshProxyContract extends MeshTxInitiator { requiredByUnit.set(out.unit, prev + BigInt(out.amount)); } // Add buffer to lovelace - const lovelaceNeed = (requiredByUnit.get("lovelace") ?? BigInt(0)) + REQUIRED_FEE_BUFFER; + const lovelaceNeed = + (requiredByUnit.get("lovelace") ?? BigInt(0)) + REQUIRED_FEE_BUFFER; requiredByUnit.set("lovelace", lovelaceNeed); const availableByUnit = new Map(); @@ -263,7 +248,9 @@ export class MeshProxyContract extends MeshTxInitiator { for (const [unit, needed] of requiredByUnit.entries()) { const available = availableByUnit.get(unit) ?? BigInt(0); if (available < needed) { - throw new Error(`Insufficient proxy balance for ${unit}. Needed: ${needed.toString()}, Available: ${available.toString()}`); + throw new Error( + `Insufficient proxy balance for ${unit}. Needed: ${needed.toString()}, Available: ${available.toString()}`, + ); } } @@ -279,7 +266,7 @@ export class MeshProxyContract extends MeshTxInitiator { return false; }; - const contributionScore = (utxo: typeof proxyUtxos[number]) => { + const contributionScore = (utxo: (typeof proxyUtxos)[number]) => { let score = BigInt(0); for (const asset of utxo.output.amount) { const remaining = remainingByUnit.get(asset.unit) ?? BigInt(0); @@ -302,7 +289,9 @@ export class MeshProxyContract extends MeshTxInitiator { } } if (bestIdx === -1 || bestScore === BigInt(0)) { - throw new Error("Unable to select proxy UTxOs to cover required amounts."); + throw new Error( + "Unable to select proxy UTxOs to cover required amounts.", + ); } const chosen = candidateUtxos.splice(bestIdx, 1)[0]!; selectedUtxos.push(chosen); @@ -318,18 +307,12 @@ export class MeshProxyContract extends MeshTxInitiator { } const freeProxyUtxos = selectedUtxos; - console.log("freeProxyUtxos", freeProxyUtxos); - const paramScriptAT = this.getAuthTokenCbor(); const policyIdAT = resolveScriptHash(paramScriptAT, "V3"); - const authTokenUtxos = utxos.filter((utxo) => utxo.output.amount.some((asset) => asset.unit === policyIdAT), ); - console.log("authTokenUtxos", authTokenUtxos); - console.log("policyIdAT", policyIdAT); - if (!authTokenUtxos || authTokenUtxos.length === 0) { throw new Error("No AuthToken found at control wallet address"); } @@ -347,20 +330,20 @@ export class MeshProxyContract extends MeshTxInitiator { //1 Get let txHex = await this.mesh; - for ( const input of freeProxyUtxos) { + for (const input of freeProxyUtxos) { txHex - .spendingPlutusScriptV3() - .txIn( - input.input.txHash, - input.input.outputIndex, - input.output.amount, - input.output.address, - ) - .txInScript(this.getProxyCbor()) - .txInRedeemerValue(mConStr0([])) - .txInInlineDatumPresent() + .spendingPlutusScriptV3() + .txIn( + input.input.txHash, + input.input.outputIndex, + input.output.amount, + input.output.address, + ) + .txInScript(this.getProxyCbor()) + .txInInlineDatumPresent() + .txInRedeemerValue(mConStr0([])); } - + txHex .txIn( authTokenUtxo.input.txHash, @@ -374,9 +357,7 @@ export class MeshProxyContract extends MeshTxInitiator { collateral.output.amount, collateral.output.address, ) - .txOut(walletAddress, [ - { unit: policyIdAT, quantity: "1" }, - ]); + .txOut(walletAddress, [{ unit: policyIdAT, quantity: "1" }]); for (const output of outputs) { txHex.txOut(output.address, [ @@ -391,8 +372,85 @@ export class MeshProxyContract extends MeshTxInitiator { txHex.txInScript(this.msCbor); } - console.log("tx", txHex); + return txHex; + }; + registerProxyDrep = async (anchorUrl: string, anchorHash: string, msUtxos?: UTxO[], msWalletAddress?: string ) => { + if (this.proxyAddress === undefined) { + throw new Error("Proxy address not set. Please setupProxy first."); + } + if (!anchorUrl || !anchorHash) { + throw new Error("Anchor URL and hash are required"); + } + if (this.msCbor && !msUtxos && !msWalletAddress) { + throw new Error( + "No UTxOs and wallet address for multisig script cbor found", + ); + } + let { utxos, collateral, walletAddress } = await this.getWalletInfoForTx(); + // If multisig inputs are provided, use them instead of the wallet inputs + if (this.msCbor && msUtxos && msWalletAddress) { + utxos = msUtxos; + walletAddress = msWalletAddress; + } + if (!utxos || utxos.length <= 0) { + throw new Error("No UTxOs found"); + } + if (!walletAddress) { + throw new Error("No wallet address found"); + } + if (!collateral) { + throw new Error("No collateral found"); + } + if (this.proxyAddress === undefined) { + throw new Error("Proxy address not set. Please setupProxy first."); + } + const blockchainProvider = this.mesh.fetcher; + if (!blockchainProvider) { + throw new Error("Blockchain provider not found"); + } + + const paramScriptAT = this.getAuthTokenCbor(); + const policyIdAT = resolveScriptHash(paramScriptAT, "V3"); + const authTokenUtxos = utxos.filter((utxo) => + utxo.output.amount.some((asset) => asset.unit === policyIdAT), + ); + + if (!authTokenUtxos || authTokenUtxos.length === 0) { + throw new Error("No AuthToken found at control wallet address"); + } + //ToDo check if AuthToken utxo is used in a pending transaction and blocked then use a free AuthToken + const authTokenUtxo = authTokenUtxos[0]; + if (!authTokenUtxo) { + throw new Error("No AuthToken found"); + } + const authTokenUtxoAmt = authTokenUtxo.output.amount; + if (!authTokenUtxoAmt) { + throw new Error("No AuthToken amount found"); + } + + const drepId = resolveScriptHashDRepId(this.getProxyCbor()); + const txHex = await this.mesh; + txHex + .txIn( + authTokenUtxo.input.txHash, + authTokenUtxo.input.outputIndex, + authTokenUtxo.output.amount, + authTokenUtxo.output.address, + ) + .txInCollateral( + collateral.input.txHash, + collateral.input.outputIndex, + collateral.output.amount, + collateral.output.address, + ) + .txOut(walletAddress, [{ unit: policyIdAT, quantity: "1" }]) + .drepRegistrationCertificate(drepId, { + anchorUrl: anchorUrl, + anchorDataHash: anchorHash, + }) + .certificateScript(this.getProxyCbor()) + .changeAddress(walletAddress); return txHex; }; From 7ce6ef036fe5082168c1618b0c9bc39bee759a53 Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Mon, 27 Oct 2025 10:24:25 +0100 Subject: [PATCH 12/15] s11 --- .../multisig/proxy/ProxyControl.tsx | 121 +++++- .../multisig/proxy/ProxyOverview.tsx | 91 ++++- src/components/multisig/proxy/offchain.ts | 353 ++++++++++++++++-- .../pages/wallet/governance/ballot/ballot.tsx | 151 +++++++- .../pages/wallet/governance/card-info.tsx | 136 ++++++- .../pages/wallet/governance/drep/drepForm.tsx | 14 +- .../wallet/governance/drep/registerDrep.tsx | 148 +++++++- .../pages/wallet/governance/drep/retire.tsx | 280 +++++++++++--- .../wallet/governance/drep/updateDrep.tsx | 89 ++++- .../pages/wallet/governance/index.tsx | 2 +- .../wallet/governance/proposal/addBallot.tsx | 0 .../governance/proposal/voteButtton.tsx | 134 ++++++- src/hooks/useProxy.ts | 48 +++ 13 files changed, 1450 insertions(+), 117 deletions(-) delete mode 100644 src/components/pages/wallet/governance/proposal/addBallot.tsx create mode 100644 src/hooks/useProxy.ts diff --git a/src/components/multisig/proxy/ProxyControl.tsx b/src/components/multisig/proxy/ProxyControl.tsx index 2893cd42..a892ba6b 100644 --- a/src/components/multisig/proxy/ProxyControl.tsx +++ b/src/components/multisig/proxy/ProxyControl.tsx @@ -14,13 +14,14 @@ import ProxySpend from "./ProxySpend"; import UTxOSelector from "@/components/pages/wallet/new-transaction/utxoSelector"; import { getProvider } from "@/utils/get-provider"; import { MeshTxBuilder, UTxO } from "@meshsdk/core"; +import { useProxy } from "@/hooks/useProxy"; import { Alert, AlertDescription } from "@/components/ui/alert"; import { Dialog, DialogContent, DialogHeader, DialogTitle } from "@/components/ui/dialog"; import { Card, CardContent, CardHeader } from "@/components/ui/card"; import { Button } from "@/components/ui/button"; import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip"; -import { AlertCircle, ChevronDown, ChevronUp, Wallet, TrendingUp, Info } from "lucide-react"; +import { AlertCircle, ChevronDown, ChevronUp, Wallet, TrendingUp, Info, UserCheck, UserX } from "lucide-react"; interface ProxyOutput { address: string; @@ -43,6 +44,7 @@ export default function ProxyControl() { const { appWallet } = useAppWallet(); const ctx = api.useUtils(); const { newTransaction } = useTransaction(); + const { isProxyEnabled, selectedProxyId, setSelectedProxy, clearSelectedProxy, toggleProxy } = useProxy(); @@ -90,10 +92,14 @@ export default function ProxyControl() { const [proxyBalance, setProxyBalance] = useState>([]); const [isProxySetup, setIsProxySetup] = useState(false); const [loading, setLocalLoading] = useState(false); - const [selectedProxy, setSelectedProxy] = useState(""); const [selectedProxyBalance, setSelectedProxyBalance] = useState>([]); const [allProxyBalances, setAllProxyBalances] = useState>>({}); const [tvlLoading, setTvlLoading] = useState(false); + + // DRep information state + const [selectedProxyDrepId, setSelectedProxyDrepId] = useState(""); + const [selectedProxyDrepStatus, setSelectedProxyDrepStatus] = useState(null); + const [drepLoading, setDrepLoading] = useState(false); // Setup flow state const [setupStep, setSetupStep] = useState(0); @@ -133,6 +139,7 @@ export default function ProxyControl() { if (!utxos || utxos.length === 0) { throw new Error("No UTxOs found at multisig wallet address"); } + console.log("utxos", utxos); console.log("walletAddress", appWallet.address); return { utxos, walletAddress: appWallet.address }; @@ -206,8 +213,8 @@ export default function ProxyControl() { // Reset proxy contract state to prevent policy ID conflicts proxyContract.reset(); - // Use multisig wallet inputs: pass all UTxOs, first >=5 ADA as collateral, and ms wallet address - const { utxos, collateral, walletAddress } = await getMsInputs(); + // Use multisig wallet inputs: pass all UTxOs and ms wallet address + const { utxos, walletAddress } = await getMsInputs(); const result: ProxySetupResult = await proxyContract.setupProxy(utxos, walletAddress); setSetupData({ @@ -341,10 +348,10 @@ export default function ProxyControl() { }, []); const handleStartSpending = useCallback(() => { - if (selectedProxy) { + if (selectedProxyId) { setShowSpendSection(true); } - }, [selectedProxy]); + }, [selectedProxyId]); const handleCloseSetup = useCallback(() => { setShowSetupModal(false); @@ -387,6 +394,40 @@ export default function ProxyControl() { } }, [proxyContract, network, wallet]); + // Get DRep information for a specific proxy + const getProxyDrepInfo = useCallback(async (proxy: any) => { + if (!proxy) return { drepId: "", status: null }; + + try { + setDrepLoading(true); + + // Create a temporary contract instance for this proxy + const tempContract = new MeshProxyContract( + { + mesh: getTxBuilder(network), + wallet: wallet, + networkId: network, + }, + { + paramUtxo: JSON.parse(proxy.paramUtxo), + }, + appWallet?.scriptCbor || undefined, + ); + tempContract.proxyAddress = proxy.proxyAddress; + + // Get DRep ID and status + const drepId = await tempContract.getDrepId(); + const status = await tempContract.getDrepStatus(); + + return { drepId, status }; + } catch (error) { + console.error("Get proxy DRep info error:", error); + return { drepId: "", status: null }; + } finally { + setDrepLoading(false); + } + }, [network, wallet, appWallet?.scriptCbor]); + // Fetch all proxy balances for TVL calculation const fetchAllProxyBalances = useCallback(async () => { if (!proxies || proxies.length === 0 || !proxyContract) return; @@ -480,10 +521,17 @@ export default function ProxyControl() { setSelectedProxy(proxyId); const proxy = proxies?.find((p: any) => p.id === proxyId); if (proxy) { - const balance = await getProxyBalance(proxy.proxyAddress); + // Fetch both balance and DRep information + const [balance, drepInfo] = await Promise.all([ + getProxyBalance(proxy.proxyAddress), + getProxyDrepInfo(proxy) + ]); + setSelectedProxyBalance(balance); + setSelectedProxyDrepId(drepInfo.drepId); + setSelectedProxyDrepStatus(drepInfo.status); } - }, [proxies, getProxyBalance]); + }, [proxies, getProxyBalance, getProxyDrepInfo, setSelectedProxy]); // Spend from proxy @@ -497,7 +545,7 @@ export default function ProxyControl() { return; } - if (!selectedProxy) { + if (!selectedProxyId) { toast({ title: "Error", description: "Please select a proxy to spend from", @@ -525,7 +573,7 @@ export default function ProxyControl() { setLoading(true); // Get the selected proxy - const proxy = proxies?.find((p: any) => p.id === selectedProxy); + const proxy = proxies?.find((p: any) => p.id === selectedProxyId); if (!proxy) { throw new Error("Selected proxy not found"); } @@ -558,7 +606,7 @@ export default function ProxyControl() { } // Refresh balance after successful spend - await handleProxySelection(selectedProxy); + await handleProxySelection(selectedProxyId); // Close the spend modal after successful transaction setTimeout(() => { @@ -589,7 +637,7 @@ export default function ProxyControl() { setSpendLoading(false); setLoading(false); } - }, [proxyContract, connected, spendOutputs, selectedProxy, proxies, network, wallet, setLoading, handleProxySelection]); + }, [proxyContract, connected, spendOutputs, selectedProxyId, proxies, network, wallet, setLoading, handleProxySelection]); // Copy to clipboard @@ -670,6 +718,48 @@ export default function ProxyControl() {
+ {/* Global Proxy Toggle */} +
+
+ { + if (!e.target.checked) { + clearSelectedProxy(); + } + toggleProxy(); + }} + className="sr-only" + /> + +
+ + + + + Global Proxy + + + +

Enable proxy functionality across the application

+
+
+
+
+ {/* TVL Display */} @@ -739,10 +829,13 @@ export default function ProxyControl() { {/* Overview Section */} ; proxyBalance: Array<{ unit: string; quantity: string }>; isProxySetup: boolean; + selectedProxyDrepId: string; + selectedProxyDrepStatus: any; + drepLoading: boolean; onProxySelection: (proxyId: string) => void; onCopyToClipboard: (text: string) => void; onStartSetup: () => void; @@ -461,6 +467,9 @@ const ProxyOverview = memo(function ProxyOverview({ selectedProxyBalance, proxyBalance, isProxySetup, + selectedProxyDrepId, + selectedProxyDrepStatus, + drepLoading, onProxySelection, onCopyToClipboard, onStartSetup, @@ -490,6 +499,86 @@ const ProxyOverview = memo(function ProxyOverview({ )} + + {/* DRep Information Card - Only show when a proxy is selected */} + {selectedProxy && ( + + +
+
+ +
+
+

Proxy DRep Information

+

Delegated Representative details

+
+
+ +
+ {/* DRep ID */} +
+
+ + DRep ID + {selectedProxyDrepId && ( + + )} +
+
+ {drepLoading ? ( +
+ ) : selectedProxyDrepId ? ( + selectedProxyDrepId + ) : ( + Not registered + )} +
+
+ + {/* DRep Status */} +
+
+ {selectedProxyDrepStatus?.active ? ( + + ) : ( + + )} + Status +
+
+ {drepLoading ? ( +
+ ) : selectedProxyDrepStatus ? ( +
+ + {selectedProxyDrepStatus.active ? 'Active' : 'Inactive'} + + {selectedProxyDrepStatus.amount && ( + + {Math.round(Number(selectedProxyDrepStatus.amount) / 1000000).toLocaleString()} ₳ + + )} +
+ ) : ( + Not registered + )} +
+
+
+
+
+ )}
{/* Proxy Carousel */} diff --git a/src/components/multisig/proxy/offchain.ts b/src/components/multisig/proxy/offchain.ts index d68c83c8..114e7e07 100644 --- a/src/components/multisig/proxy/offchain.ts +++ b/src/components/multisig/proxy/offchain.ts @@ -17,6 +17,7 @@ import { applyCborEncoding, applyParamsToScript, resolveScriptHashDRepId, + MeshTxBuilder, } from "@meshsdk/core"; import { parseDatumCbor } from "@meshsdk/core-cst"; @@ -375,12 +376,23 @@ export class MeshProxyContract extends MeshTxInitiator { return txHex; }; - registerProxyDrep = async (anchorUrl: string, anchorHash: string, msUtxos?: UTxO[], msWalletAddress?: string ) => { + manageProxyDrep = async ( + action: "register" | "deregister" | "update", + anchorUrl?: string, + anchorHash?: string, + msUtxos?: UTxO[], + msWalletAddress?: string, + ) => { if (this.proxyAddress === undefined) { throw new Error("Proxy address not set. Please setupProxy first."); } - if (!anchorUrl || !anchorHash) { - throw new Error("Anchor URL and hash are required"); + if ( + (action === "register" || action === "update") && + (!anchorUrl || !anchorHash) + ) { + throw new Error( + "Anchor URL and hash are required for register and update actions", + ); } if (this.msCbor && !msUtxos && !msWalletAddress) { throw new Error( @@ -405,6 +417,7 @@ export class MeshProxyContract extends MeshTxInitiator { if (this.proxyAddress === undefined) { throw new Error("Proxy address not set. Please setupProxy first."); } + const blockchainProvider = this.mesh.fetcher; if (!blockchainProvider) { throw new Error("Blockchain provider not found"); @@ -429,31 +442,142 @@ export class MeshProxyContract extends MeshTxInitiator { throw new Error("No AuthToken amount found"); } - const drepId = resolveScriptHashDRepId(this.getProxyCbor()); + const proxyCbor = this.getProxyCbor(); + const proxyScriptHash = resolveScriptHash(proxyCbor, "V3"); + const drepId = resolveScriptHashDRepId(proxyScriptHash); + const txHex = await this.mesh; + txHex.txIn( + authTokenUtxo.input.txHash, + authTokenUtxo.input.outputIndex, + authTokenUtxo.output.amount, + authTokenUtxo.output.address, + ); + + if (this.msCbor) { + txHex.txInScript(this.msCbor); + } + txHex.txInCollateral( + collateral.input.txHash, + collateral.input.outputIndex, + collateral.output.amount, + collateral.output.address, + ); + + // add more utxo inputs until the required amount is reached, use utxos list. + // Register requires 505 ADA, deregister and update only need 2 ADA + const requiredAmount = + action === "register" ? BigInt(505000000) : BigInt(2000000); + let totalAmount = BigInt(0); + for (const utxo of utxos) { + if (totalAmount >= requiredAmount) { + break; + } + txHex.txIn( + utxo.input.txHash, + utxo.input.outputIndex, + utxo.output.amount, + utxo.output.address, + ); + if (this.msCbor) { + txHex.txInScript(this.msCbor); + } + totalAmount += BigInt( + utxo.output.amount.find((asset: any) => asset.unit === "lovelace") + ?.quantity || "0", + ); + } + + txHex.txOut(walletAddress, [{ unit: policyIdAT, quantity: "1" }]); + + // Add the appropriate certificate based on action + if (action === "register") { + txHex.drepRegistrationCertificate(drepId, { + anchorUrl: anchorUrl!, + anchorDataHash: anchorHash!, + }); + } else if (action === "deregister") { + txHex.drepDeregistrationCertificate(drepId, "500000000"); + } else if (action === "update") { + txHex.drepUpdateCertificate(drepId, { + anchorUrl: anchorUrl!, + anchorDataHash: anchorHash!, + }); + } + txHex - .txIn( - authTokenUtxo.input.txHash, - authTokenUtxo.input.outputIndex, - authTokenUtxo.output.amount, - authTokenUtxo.output.address, - ) - .txInCollateral( - collateral.input.txHash, - collateral.input.outputIndex, - collateral.output.amount, - collateral.output.address, - ) - .txOut(walletAddress, [{ unit: policyIdAT, quantity: "1" }]) - .drepRegistrationCertificate(drepId, { - anchorUrl: anchorUrl, - anchorDataHash: anchorHash, - }) - .certificateScript(this.getProxyCbor()) + .certificateScript(this.getProxyCbor(), "V3") + .certificateRedeemerValue(mConStr0([])) .changeAddress(walletAddress); + return txHex; }; + /** + * Register a proxy DRep + * + * @param anchorUrl - URL for the DRep metadata + * @param anchorHash - Hash of the DRep metadata + * @param msUtxos - Optional multisig UTxOs + * @param msWalletAddress - Optional multisig wallet address + * @returns - Transaction hex for signing + */ + registerProxyDrep = async ( + anchorUrl: string, + anchorHash: string, + msUtxos?: UTxO[], + msWalletAddress?: string, + ) => { + return this.manageProxyDrep( + "register", + anchorUrl, + anchorHash, + msUtxos, + msWalletAddress, + ); + }; + + /** + * Deregister a proxy DRep + * + * @param msUtxos - Optional multisig UTxOs + * @param msWalletAddress - Optional multisig wallet address + * @returns - Transaction hex for signing + */ + deregisterProxyDrep = async (msUtxos?: UTxO[], msWalletAddress?: string) => { + return this.manageProxyDrep( + "deregister", + undefined, + undefined, + msUtxos, + msWalletAddress, + ); + }; + + /** + * Update a proxy DRep + * + * @param anchorUrl - URL for the DRep metadata + * @param anchorHash - Hash of the DRep metadata + * @param msUtxos - Optional multisig UTxOs + * @param msWalletAddress - Optional multisig wallet address + * @returns - Transaction hex for signing + */ + updateProxyDrep = async ( + anchorUrl: string, + anchorHash: string, + msUtxos?: UTxO[], + msWalletAddress?: string, + ) => { + return this.manageProxyDrep( + "update", + anchorUrl, + anchorHash, + msUtxos, + msWalletAddress, + ); + }; + /** * Fetch the balance of the proxy address * @@ -497,4 +621,189 @@ export class MeshProxyContract extends MeshTxInitiator { return balance; }; + + getDrepId = () => { + const proxyCbor = this.getProxyCbor(); + const proxyScriptHash = resolveScriptHash(proxyCbor, "V3"); + return resolveScriptHashDRepId(proxyScriptHash); + }; + + getDrepStatus = async () => { + const drepId = this.getDrepId(); + const drepStatus = await this.mesh.fetcher?.get( + `/governance/dreps/${drepId}`, + ); + return drepStatus; + }; + + /** + * Vote on governance proposals using proxy DRep + * @param votes Array of vote objects with proposalId, voteKind, and optional metadata + * @param msUtxos Multisig UTxOs for transaction inputs (optional) + * @param msWalletAddress Multisig wallet address (optional) + * @returns Transaction builder + */ + voteProxyDrep = async ( + votes: Array<{ + proposalId: string; + voteKind: "Yes" | "No" | "Abstain"; + metadata?: any; + }>, + msUtxos?: UTxO[], + msWalletAddress?: string, + ): Promise => { + if (!votes || votes.length === 0) { + throw new Error("No votes provided"); + } + + // Get wallet info for transaction + const walletInfo = await this.getWalletInfoForTx(); + + // Use multisig inputs if provided, otherwise use regular wallet + const utxos = msUtxos || walletInfo.utxos; + const walletAddress = msWalletAddress || walletInfo.walletAddress; + + // Always get collateral from user's regular wallet + let collateral: UTxO; + try { + const collateralInfo = await this.getWalletInfoForTx(); + const foundCollateral = collateralInfo.utxos.find((utxo: UTxO) => + utxo.output.amount.some( + (amount: any) => + amount.unit === "lovelace" && + BigInt(amount.quantity) >= BigInt(5000000), + ), + ); + if (!foundCollateral) { + throw new Error( + "No suitable collateral UTxO found in regular wallet. Please add at least 5 ADA to your regular wallet.", + ); + } + collateral = foundCollateral; + } catch (error) { + throw new Error( + "Failed to get collateral from regular wallet. Please ensure you have at least 5 ADA in your regular wallet for transaction collateral.", + ); + } + + if (!walletAddress) { + throw new Error("No wallet address found"); + } + if (!collateral) { + throw new Error("No collateral found"); + } + if (this.proxyAddress === undefined) { + throw new Error("Proxy address not set. Please setupProxy first."); + } + + const blockchainProvider = this.mesh.fetcher; + if (!blockchainProvider) { + throw new Error("Blockchain provider not found"); + } + + const paramScriptAT = this.getAuthTokenCbor(); + const policyIdAT = resolveScriptHash(paramScriptAT, "V3"); + const authTokenUtxos = utxos.filter((utxo) => + utxo.output.amount.some((asset) => asset.unit === policyIdAT), + ); + + if (!authTokenUtxos || authTokenUtxos.length === 0) { + throw new Error("No AuthToken found at control wallet address"); + } + + const authTokenUtxo = authTokenUtxos[0]; + if (!authTokenUtxo) { + throw new Error("No AuthToken found"); + } + const authTokenUtxoAmt = authTokenUtxo.output.amount; + if (!authTokenUtxoAmt) { + throw new Error("No AuthToken amount found"); + } + + const proxyCbor = this.getProxyCbor(); + const proxyScriptHash = resolveScriptHash(proxyCbor, "V3"); + const drepId = resolveScriptHashDRepId(proxyScriptHash); + + const txHex = await this.mesh; + + // 1. Add AuthToken UTxO first (following manageProxyDrep pattern) + txHex.txIn( + authTokenUtxo.input.txHash, + authTokenUtxo.input.outputIndex, + authTokenUtxo.output.amount, + authTokenUtxo.output.address, + ); + + if (this.msCbor) { + txHex.txInScript(this.msCbor); + } + + // 2. Add collateral + txHex.txInCollateral( + collateral.input.txHash, + collateral.input.outputIndex, + collateral.output.amount, + collateral.output.address, + ); + + // 3. Add additional UTxOs if needed (for voting fees) + const requiredAmount = BigInt(2000000); // 2 ADA for voting + let totalAmount = BigInt(0); + for (const utxo of utxos) { + if (totalAmount >= requiredAmount) { + break; + } + txHex.txIn( + utxo.input.txHash, + utxo.input.outputIndex, + utxo.output.amount, + utxo.output.address, + ); + if (this.msCbor) { + txHex.txInScript(this.msCbor); + } + totalAmount += BigInt( + utxo.output.amount.find((asset: any) => asset.unit === "lovelace") + ?.quantity || "0", + ); + } + + // 4. Add output (return AuthToken) + txHex.txOut(walletAddress, [{ unit: policyIdAT, quantity: "1" }]); + + console.log("votes", votes); + console.log("txHex", txHex); + + // 5. Add votes for each proposal + for (const vote of votes) { + const [txHash, certIndex] = vote.proposalId.split("#"); + if (!txHash || certIndex === undefined) { + throw new Error(`Invalid proposal ID format: ${vote.proposalId}`); + } + + txHex + .votePlutusScriptV3() + .vote( + { + type: "DRep", + drepId: drepId, + }, + { + txHash: txHash, + txIndex: parseInt(certIndex), + }, + { + voteKind: vote.voteKind, + }, + ) + .voteScript(this.getProxyCbor()) + .voteRedeemerValue("") + } + + // 6. Add certificate script and redeemer (following manageProxyDrep pattern) + txHex + .changeAddress(walletAddress); + + return txHex; + }; } diff --git a/src/components/pages/wallet/governance/ballot/ballot.tsx b/src/components/pages/wallet/governance/ballot/ballot.tsx index d0b2b997..3a983d04 100644 --- a/src/components/pages/wallet/governance/ballot/ballot.tsx +++ b/src/components/pages/wallet/governance/ballot/ballot.tsx @@ -6,6 +6,8 @@ import type { Quantity, Unit, UTxO } from "@meshsdk/core"; import { useWalletsStore } from "@/lib/zustand/wallets"; import useMultisigWallet from "@/hooks/useMultisigWallet"; import { useToast } from "@/hooks/use-toast"; +import { useWallet } from "@meshsdk/react"; +import { useUserStore } from "@/lib/zustand/user"; import React, { useState } from "react"; import CardUI from "@/components/ui/card-content"; import { @@ -19,6 +21,8 @@ import { import { Button } from "@/components/ui/button"; import { api } from "@/utils/api"; import { ToastAction } from "@/components/ui/toast"; +import { useProxy } from "@/hooks/useProxy"; +import { MeshProxyContract } from "@/components/multisig/proxy/offchain"; const GovAction = 1; @@ -62,6 +66,20 @@ export default function BallotCard({ const { multisigWallet } = useMultisigWallet(); const [loading, setLoading] = useState(false); + // Proxy state + const { isProxyEnabled, selectedProxyId } = useProxy(); + const { wallet } = useWallet(); + const userAddress = useUserStore((state) => state.userAddress); + + // Get proxies for proxy mode + const { data: proxies } = api.proxy.getProxiesByUserOrWallet.useQuery( + { + walletId: appWallet?.id || undefined, + userAddress: userAddress || undefined, + }, + { enabled: !!(appWallet?.id || userAddress) } + ); + // CreateBallot mutation const createBallot = api.ballot.create.useMutation(); // Get ballots for wallet @@ -86,6 +104,124 @@ export default function BallotCard({ }, [getBallots.data]); + // Proxy ballot vote submission logic + async function handleSubmitProxyVote() { + if (!selectedBallot || !Array.isArray(selectedBallot.items) || selectedBallot.items.length === 0) { + toast({ + title: "No proposals in ballot", + description: "There are no proposals to vote on in this ballot.", + duration: 2000, + }); + return; + } + if (!utxos || utxos.length === 0) { + toast({ + title: "No UTxOs available", + description: "No UTxOs are available to build the transaction.", + duration: 2000, + }); + return; + } + if (!isProxyEnabled || !selectedProxyId) { + toast({ + title: "Proxy Error", + description: "Proxy mode not enabled or no proxy selected", + variant: "destructive", + }); + return; + } + + setLoading(true); + try { + // Get the selected proxy + const proxy = proxies?.find((p: any) => p.id === selectedProxyId); + if (!proxy) { + toast({ + title: "Proxy Error", + description: "Selected proxy not found", + variant: "destructive", + }); + return; + } + + // Create proxy contract instance + const meshTxBuilder = getTxBuilder(network); + const proxyContract = new MeshProxyContract( + { + mesh: meshTxBuilder, + wallet: wallet, + networkId: network, + }, + { + paramUtxo: JSON.parse(proxy.paramUtxo), + }, + appWallet.scriptCbor, + ); + proxyContract.proxyAddress = proxy.proxyAddress; + + // Prepare votes array + const votes = selectedBallot.items.map((proposalId: string, index: number) => ({ + proposalId, + voteKind: (selectedBallot.choices?.[index] ?? "Abstain") as "Yes" | "No" | "Abstain", + })); + + // Vote using proxy + const txBuilder = await proxyContract.voteProxyDrep(votes, utxos, multisigWallet?.getScript().address); + + await newTransaction({ + txBuilder: txBuilder, + description: `Proxy Ballot Vote: ${selectedBallot.description || ""}`, + toastMessage: "Proxy ballot vote transaction has been created", + }); + + toast({ + title: "Proxy Ballot Vote Successful", + description: `Your proxy ballot vote has been recorded.`, + duration: 5000, + }); + + setAlert("Proxy ballot vote transaction successfully created!"); + await getBallots.refetch(); + onBallotChanged?.(); + } catch (error: unknown) { + if ( + error instanceof Error && + error.message.includes("User rejected transaction") + ) { + toast({ + title: "Transaction Aborted", + description: "You canceled the proxy ballot vote transaction.", + duration: 1000, + }); + } else { + toast({ + title: "Proxy Ballot Vote Failed", + description: `Error: ${error instanceof Error ? error.message : String(error)}`, + duration: 10000, + action: ( + { + navigator.clipboard.writeText(JSON.stringify(error)); + toast({ + title: "Error Copied", + description: "Error details copied to clipboard.", + duration: 5000, + }); + }} + > + Copy Error + + ), + variant: "destructive", + }); + console.error("Proxy ballot vote transaction error:", error); + } + } finally { + setLoading(false); + } + } + // Ballot vote submission logic async function handleSubmitVote() { if (!selectedBallot || !Array.isArray(selectedBallot.items) || selectedBallot.items.length === 0) { @@ -350,12 +486,23 @@ export default function BallotCard({ )} {selectedBallot && ( <> + {isProxyEnabled && !selectedProxyId && ( +
+

+ Proxy Mode Active - Select a proxy to continue +

+

+ Go to the Proxy Control panel above and select a proxy to enable ballot voting. +

+
+ )} @@ -117,14 +221,14 @@ export default function CardInfo({ appWallet }: { appWallet: Wallet }) { {/* Voting Power */} - {isDRepRegistered && ( + {isDRepRegistered && currentDrepInfo && (
- {Math.round(Number(drepInfo.amount) / 1000000) + {Math.round(Number(currentDrepInfo.amount) / 1000000) .toString() .replace(/\B(?=(\d{3})+(?!\d))/g, ' ')} ₳ @@ -153,7 +257,7 @@ export default function CardInfo({ appWallet }: { appWallet: Wallet }) { Update DRep - + + {isProxyMode && ( +

+ This will create a multisig transaction for proxy DRep registration +

+ )}
); diff --git a/src/components/pages/wallet/governance/drep/registerDrep.tsx b/src/components/pages/wallet/governance/drep/registerDrep.tsx index 44557189..03cd137b 100644 --- a/src/components/pages/wallet/governance/drep/registerDrep.tsx +++ b/src/components/pages/wallet/governance/drep/registerDrep.tsx @@ -1,5 +1,5 @@ import { Plus } from "lucide-react"; -import { useState } from "react"; +import { useState, useCallback } from "react"; import useAppWallet from "@/hooks/useAppWallet"; import { useWallet } from "@meshsdk/react"; import { useUserStore } from "@/lib/zustand/user"; @@ -13,6 +13,12 @@ import { getFile, hashDrepAnchor } from "@meshsdk/core"; import type { UTxO } from "@meshsdk/core"; import router from "next/router"; import useMultisigWallet from "@/hooks/useMultisigWallet"; +import { MeshProxyContract } from "@/components/multisig/proxy/offchain"; +import { getProvider } from "@/utils/get-provider"; +import { Label } from "@/components/ui/label"; +import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"; +import { api } from "@/utils/api"; +import { useProxy } from "@/hooks/useProxy"; interface PutResponse { url: string; @@ -20,13 +26,23 @@ interface PutResponse { export default function RegisterDRep() { const { appWallet } = useAppWallet(); - const { connected } = useWallet(); + const { connected, wallet } = useWallet(); const userAddress = useUserStore((state) => state.userAddress); const network = useSiteStore((state) => state.network); const loading = useSiteStore((state) => state.loading); const setLoading = useSiteStore((state) => state.setLoading); const { newTransaction } = useTransaction(); const { multisigWallet } = useMultisigWallet(); + const { isProxyEnabled, selectedProxyId, setSelectedProxy } = useProxy(); + + // Get proxies for the current wallet + const { data: proxies, isLoading: proxiesLoading } = api.proxy.getProxiesByUserOrWallet.useQuery( + { + walletId: appWallet?.id || undefined, + userAddress: userAddress || undefined, + }, + { enabled: !!(appWallet?.id || userAddress) } + ); const [manualUtxos, setManualUtxos] = useState([]); const [formState, setFormState] = useState({ @@ -42,6 +58,17 @@ export default function RegisterDRep() { identities: [""], }); + // Helper to resolve inputs for multisig controlled txs + const getMsInputs = useCallback(async (): Promise<{ utxos: UTxO[]; walletAddress: string }> => { + if (!multisigWallet?.getScript().address) { + throw new Error("Multisig wallet address not available"); + } + if (!manualUtxos || manualUtxos.length === 0) { + throw new Error("No UTxOs selected. Please select UTxOs from the selector."); + } + return { utxos: manualUtxos, walletAddress: multisigWallet.getScript().address }; + }, [multisigWallet?.getScript().address, manualUtxos]); + async function createAnchor(): Promise<{ anchorUrl: string; anchorHash: string; @@ -139,6 +166,58 @@ export default function RegisterDRep() { setLoading(false); } + async function registerProxyDrep(): Promise { + if (!connected || !userAddress || !multisigWallet || !appWallet) { + throw new Error("Multisig wallet not connected"); + } + + if (!selectedProxyId) { + throw new Error("Please select a proxy for registration"); + } + + setLoading(true); + try { + const { anchorUrl, anchorHash } = await createAnchor(); + + // Get multisig inputs + const { utxos, walletAddress } = await getMsInputs(); + + // Get the selected proxy + const proxy = proxies?.find((p: any) => p.id === selectedProxyId); + if (!proxy) { + throw new Error("Selected proxy not found"); + } + + // Create proxy contract instance with the selected proxy + const txBuilder = getTxBuilder(network); + const proxyContract = new MeshProxyContract( + { + mesh: txBuilder, + wallet: wallet, + networkId: network, + }, + { + paramUtxo: JSON.parse(proxy.paramUtxo), + }, + appWallet.scriptCbor, + ); + proxyContract.proxyAddress = proxy.proxyAddress; + + // Register DRep using proxy + const txHex = await proxyContract.registerProxyDrep(anchorUrl, anchorHash, utxos, walletAddress); + + await newTransaction({ + txBuilder: txHex, + description: "Proxy DRep registration", + toastMessage: "Proxy DRep registration transaction has been created", + }); + } catch (e) { + console.error(e); + } + router.push(`/wallets/${appWallet.id}/governance`); + setLoading(false); + } + return (
@@ -146,6 +225,68 @@ export default function RegisterDRep() { Register DRep +
+ {/* Global Proxy Status */} +
+
+ + {isProxyEnabled ? 'Proxy Mode Enabled' : 'Standard Mode'} + + + {isProxyEnabled + ? 'DRep will be registered using a proxy contract' + : 'DRep will be registered directly' + } + +
+ + {/* Proxy Configuration */} + {isProxyEnabled && ( +
+

+ This will register the DRep using a proxy contract, allowing for more flexible governance control. +

+ {proxies && proxies.length > 0 ? ( +
+ + +
+ ) : ( +
+ {proxiesLoading ? "Loading proxies..." : "No proxies available. Please create a proxy first."} +
+ )} +
+ )} + + {/* Standard Mode Info */} + {!isProxyEnabled && ( +
+

+ DRep will be registered directly to your multisig wallet. + To use proxy registration, enable proxy mode in the Proxy Control panel. +

+
+ )} +
{appWallet && ( )}
diff --git a/src/components/pages/wallet/governance/drep/retire.tsx b/src/components/pages/wallet/governance/drep/retire.tsx index 88ed5679..bfe9ca29 100644 --- a/src/components/pages/wallet/governance/drep/retire.tsx +++ b/src/components/pages/wallet/governance/drep/retire.tsx @@ -3,85 +3,267 @@ import { Wallet } from "@/types/wallet"; import { useSiteStore } from "@/lib/zustand/site"; import { getProvider } from "@/utils/get-provider"; import { getTxBuilder } from "@/utils/get-tx-builder"; -import { keepRelevant, Quantity, Unit } from "@meshsdk/core"; +import { keepRelevant, Quantity, Unit, UTxO } from "@meshsdk/core"; import { useWallet } from "@meshsdk/react"; import { useUserStore } from "@/lib/zustand/user"; import { useWalletsStore } from "@/lib/zustand/wallets"; import useTransaction from "@/hooks/useTransaction"; import useMultisigWallet from "@/hooks/useMultisigWallet"; +import { useProxy } from "@/hooks/useProxy"; +import { MeshProxyContract } from "@/components/multisig/proxy/offchain"; +import { api } from "@/utils/api"; +import { useCallback } from "react"; +import { useToast } from "@/hooks/use-toast"; -export default function Retire({ appWallet }: { appWallet: Wallet }) { +export default function Retire({ appWallet, manualUtxos }: { appWallet: Wallet; manualUtxos: UTxO[] }) { const network = useSiteStore((state) => state.network); - const { connected } = useWallet(); + const { wallet, connected } = useWallet(); const userAddress = useUserStore((state) => state.userAddress); const drepInfo = useWalletsStore((state) => state.drepInfo); const { newTransaction } = useTransaction(); const loading = useSiteStore((state) => state.loading); const setLoading = useSiteStore((state) => state.setLoading); const { multisigWallet } = useMultisigWallet(); + const { isProxyEnabled, selectedProxyId } = useProxy(); + const { toast } = useToast(); - async function retireDrep() { - if (!connected) throw new Error("Not connected to wallet"); - if (!userAddress) throw new Error("No user address"); - if (!multisigWallet) throw new Error("Multisig Wallet could not be built."); - setLoading(true); + // Get proxies for proxy mode + const { data: proxies } = api.proxy.getProxiesByUserOrWallet.useQuery( + { + walletId: appWallet?.id || undefined, + userAddress: userAddress || undefined, + }, + { enabled: !!(appWallet?.id || userAddress) } + ); - const blockchainProvider = getProvider(network); - const utxos = await blockchainProvider.fetchAddressUTxOs(multisigWallet.getScript().address); - - const assetMap = new Map(); - assetMap.set("lovelace", "5000000"); - const selectedUtxos = keepRelevant(assetMap, utxos); - if (selectedUtxos.length === 0) throw new Error("No relevant UTxOs found"); - - const txBuilder = getTxBuilder(network); - const dRepId = multisigWallet?.getKeysByRole(3) ? multisigWallet?.getDRepId() : appWallet?.dRepId; - const scriptCbor = multisigWallet?.getKeysByRole(3) ? multisigWallet?.getScript().scriptCbor : appWallet.scriptCbor; - const drepCbor = multisigWallet?.getKeysByRole(3) ? multisigWallet?.getDRepScript() : appWallet.scriptCbor; - const changeAddress = multisigWallet?.getKeysByRole(3) ? multisigWallet?.getScript().address : appWallet.address; - if (!changeAddress) { - throw new Error("Change address not found"); + // Helper function to get multisig inputs (like in register component) + const getMsInputs = useCallback(async (): Promise<{ utxos: UTxO[]; walletAddress: string }> => { + if (!multisigWallet?.getScript().address) { + throw new Error("Multisig wallet address not available"); } - if (!scriptCbor) { - throw new Error("Script not found"); + if (!manualUtxos || manualUtxos.length === 0) { + throw new Error("No UTxOs selected. Please select UTxOs from the selector."); } - if (!drepCbor) { - throw new Error("DRep script not found"); + return { utxos: manualUtxos, walletAddress: multisigWallet.getScript().address }; + }, [multisigWallet?.getScript().address, manualUtxos]); + + async function retireProxyDrep(): Promise { + if (!connected || !userAddress || !multisigWallet || !appWallet) { + toast({ + title: "Connection Error", + description: "Multisig wallet not connected", + variant: "destructive", + }); + return; } - if (!dRepId) { - throw new Error("DRep not found"); + if (!isProxyEnabled || !selectedProxyId) { + toast({ + title: "Proxy Error", + description: "Proxy mode not enabled or no proxy selected", + variant: "destructive", + }); + return; } - for (const utxo of selectedUtxos) { - txBuilder.txIn( - utxo.input.txHash, - utxo.input.outputIndex, - utxo.output.amount, - utxo.output.address, + + setLoading(true); + + try { + // Get the selected proxy + const proxy = proxies?.find((p: any) => p.id === selectedProxyId); + if (!proxy) { + toast({ + title: "Proxy Error", + description: "Selected proxy not found", + variant: "destructive", + }); + return; + } + + // Get multisig inputs + let utxos, walletAddress; + try { + const inputs = await getMsInputs(); + utxos = inputs.utxos; + walletAddress = inputs.walletAddress; + } catch (error) { + toast({ + title: "UTxO Selection Error", + description: error instanceof Error ? error.message : "Failed to get multisig inputs", + variant: "destructive", + }); + return; + } + + // Create proxy contract instance + const txBuilder = getTxBuilder(network); + const proxyContract = new MeshProxyContract( + { + mesh: txBuilder, + wallet: wallet, + networkId: network, + }, + { + paramUtxo: JSON.parse(proxy.paramUtxo), + }, + appWallet.scriptCbor, ); + proxyContract.proxyAddress = proxy.proxyAddress; + + // Deregister DRep using proxy + const txHex = await proxyContract.deregisterProxyDrep(utxos, walletAddress); + + await newTransaction({ + txBuilder: txHex, + description: "Proxy DRep retirement", + toastMessage: "Proxy DRep retirement transaction has been created", + }); + } catch (error) { + console.error("Proxy DRep retirement error:", error); + toast({ + title: "Proxy DRep Retirement Failed", + description: error instanceof Error ? error.message : "An unexpected error occurred", + variant: "destructive", + }); + } finally { + setLoading(false); + } + } + + async function retireDrep() { + if (!connected) { + toast({ + title: "Connection Error", + description: "Not connected to wallet", + variant: "destructive", + }); + return; } + if (!userAddress) { + toast({ + title: "User Error", + description: "No user address", + variant: "destructive", + }); + return; + } + if (!multisigWallet) { + toast({ + title: "Wallet Error", + description: "Multisig Wallet could not be built", + variant: "destructive", + }); + return; + } + + setLoading(true); + + try { + const blockchainProvider = getProvider(network); + const utxos = await blockchainProvider.fetchAddressUTxOs(multisigWallet.getScript().address); + + const assetMap = new Map(); + assetMap.set("lovelace", "5000000"); + const selectedUtxos = keepRelevant(assetMap, utxos); + if (selectedUtxos.length === 0) { + toast({ + title: "UTxO Error", + description: "No relevant UTxOs found", + variant: "destructive", + }); + return; + } - txBuilder - .txInScript(scriptCbor) - .changeAddress(changeAddress) - .drepDeregistrationCertificate(dRepId, "500000000") - .certificateScript(drepCbor); - - await newTransaction({ - txBuilder, - description: "DRep retirement", - toastMessage: "DRep retirement transaction has been created", - }); + const txBuilder = getTxBuilder(network); + const dRepId = multisigWallet?.getKeysByRole(3) ? multisigWallet?.getDRepId() : appWallet?.dRepId; + const scriptCbor = multisigWallet?.getKeysByRole(3) ? multisigWallet?.getScript().scriptCbor : appWallet.scriptCbor; + const drepCbor = multisigWallet?.getKeysByRole(3) ? multisigWallet?.getDRepScript() : appWallet.scriptCbor; + const changeAddress = multisigWallet?.getKeysByRole(3) ? multisigWallet?.getScript().address : appWallet.address; + + if (!changeAddress) { + toast({ + title: "Address Error", + description: "Change address not found", + variant: "destructive", + }); + return; + } + if (!scriptCbor) { + toast({ + title: "Script Error", + description: "Script not found", + variant: "destructive", + }); + return; + } + if (!drepCbor) { + toast({ + title: "DRep Script Error", + description: "DRep script not found", + variant: "destructive", + }); + return; + } + if (!dRepId) { + toast({ + title: "DRep Error", + description: "DRep not found", + variant: "destructive", + }); + return; + } + + for (const utxo of selectedUtxos) { + txBuilder.txIn( + utxo.input.txHash, + utxo.input.outputIndex, + utxo.output.amount, + utxo.output.address, + ); + } + + txBuilder + .txInScript(scriptCbor) + .changeAddress(changeAddress) + .drepDeregistrationCertificate(dRepId, "500000000") + .certificateScript(drepCbor); + + await newTransaction({ + txBuilder, + description: "DRep retirement", + toastMessage: "DRep retirement transaction has been created", + }); + } catch (error) { + console.error("DRep retirement error:", error); + toast({ + title: "DRep Retirement Failed", + description: error instanceof Error ? error.message : "An unexpected error occurred", + variant: "destructive", + }); + } finally { + setLoading(false); + } } return (
+ + {isProxyEnabled && !selectedProxyId && ( +
+

+ Proxy Mode Active - Select a proxy to continue +

+

+ Go to the Proxy Control panel above and select a proxy to enable DRep retirement. +

+
+ )}
); } diff --git a/src/components/pages/wallet/governance/drep/updateDrep.tsx b/src/components/pages/wallet/governance/drep/updateDrep.tsx index c0145a43..2b117ef7 100644 --- a/src/components/pages/wallet/governance/drep/updateDrep.tsx +++ b/src/components/pages/wallet/governance/drep/updateDrep.tsx @@ -1,5 +1,5 @@ import { Minus } from "lucide-react"; -import { useState } from "react"; +import { useState, useCallback } from "react"; import useAppWallet from "@/hooks/useAppWallet"; import { useWallet } from "@meshsdk/react"; import { useUserStore } from "@/lib/zustand/user"; @@ -13,6 +13,10 @@ import { getFile, hashDrepAnchor } from "@meshsdk/core"; import type { UTxO } from "@meshsdk/core"; import router from "next/router"; import useMultisigWallet from "@/hooks/useMultisigWallet"; +import { useProxy } from "@/hooks/useProxy"; +import { MeshProxyContract } from "@/components/multisig/proxy/offchain"; +import { api } from "@/utils/api"; +import { getProvider } from "@/utils/get-provider"; interface PutResponse { url: string; @@ -20,15 +24,37 @@ interface PutResponse { export default function UpdateDRep() { const { appWallet } = useAppWallet(); - const { connected } = useWallet(); + const { wallet, connected } = useWallet(); const userAddress = useUserStore((state) => state.userAddress); const network = useSiteStore((state) => state.network); const loading = useSiteStore((state) => state.loading); const setLoading = useSiteStore((state) => state.setLoading); const { newTransaction } = useTransaction(); const { multisigWallet } = useMultisigWallet(); + const { isProxyEnabled, selectedProxyId } = useProxy(); + // UTxO selection state const [manualUtxos, setManualUtxos] = useState([]); + + // Get proxies for proxy mode + const { data: proxies } = api.proxy.getProxiesByUserOrWallet.useQuery( + { + walletId: appWallet?.id || undefined, + userAddress: userAddress || undefined, + }, + { enabled: !!(appWallet?.id || userAddress) } + ); + + // Helper function to get multisig inputs (like in register component) + const getMsInputs = useCallback(async (): Promise<{ utxos: UTxO[]; walletAddress: string }> => { + if (!multisigWallet?.getScript().address) { + throw new Error("Multisig wallet address not available"); + } + if (!manualUtxos || manualUtxos.length === 0) { + throw new Error("No UTxOs selected. Please select UTxOs from the selector."); + } + return { utxos: manualUtxos, walletAddress: multisigWallet.getScript().address }; + }, [multisigWallet?.getScript().address, manualUtxos]); const [formState, setFormState] = useState({ givenName: "", bio: "", @@ -78,6 +104,62 @@ export default function UpdateDRep() { return { anchorUrl, anchorHash }; } + async function updateProxyDrep(): Promise { + if (!connected || !userAddress || !multisigWallet || !appWallet) { + throw new Error("Multisig wallet not connected"); + } + if (!isProxyEnabled || !selectedProxyId) { + throw new Error("Proxy mode not enabled or no proxy selected"); + } + + setLoading(true); + + try { + // Get the selected proxy + const proxy = proxies?.find((p: any) => p.id === selectedProxyId); + if (!proxy) { + throw new Error("Selected proxy not found"); + } + + // Create anchor metadata + const { anchorUrl, anchorHash } = await createAnchor(); + + // Get multisig inputs + const { utxos, walletAddress } = await getMsInputs(); + + // Create proxy contract instance + const txBuilder = getTxBuilder(network); + const proxyContract = new MeshProxyContract( + { + mesh: txBuilder, + wallet: wallet, + networkId: network, + }, + { + paramUtxo: JSON.parse(proxy.paramUtxo), + }, + appWallet.scriptCbor, + ); + proxyContract.proxyAddress = proxy.proxyAddress; + + // Update DRep using proxy + const txHex = await proxyContract.updateProxyDrep(anchorUrl, anchorHash, utxos, walletAddress); + + await newTransaction({ + txBuilder: txHex, + description: "Proxy DRep update", + toastMessage: "Proxy DRep update transaction has been created", + }); + + router.push(`/wallets/${appWallet.id}/governance`); + } catch (error) { + console.error("Proxy DRep update error:", error); + throw error; + } finally { + setLoading(false); + } + } + async function updateDrep(): Promise { if (!connected || !userAddress || !multisigWallet || !appWallet) throw new Error("Multisig wallet not connected"); @@ -192,8 +274,9 @@ export default function UpdateDRep() { // This function is intentionally left empty. }} loading={loading} - onSubmit={updateDrep} + onSubmit={isProxyEnabled ? updateProxyDrep : updateDrep} mode="update" + isProxyMode={isProxyEnabled} /> )}
diff --git a/src/components/pages/wallet/governance/index.tsx b/src/components/pages/wallet/governance/index.tsx index 8322fb78..caac526b 100644 --- a/src/components/pages/wallet/governance/index.tsx +++ b/src/components/pages/wallet/governance/index.tsx @@ -28,7 +28,7 @@ export default function PageGovernance() { return (
{/* Info section */} - + {/* Proposals section right under info */} state.userAddress); + + // Get proxies for proxy mode + const { data: proxies } = api.proxy.getProxiesByUserOrWallet.useQuery( + { + walletId: appWallet?.id || undefined, + userAddress: userAddress || undefined, + }, + { enabled: !!(appWallet?.id || userAddress) } + ); + + async function voteProxy() { + if (!isProxyEnabled || !selectedProxyId) { + toast({ + title: "Proxy Error", + description: "Proxy mode not enabled or no proxy selected", + variant: "destructive", + }); + return; + } + + setLoading(true); + try { + // Get the selected proxy + const proxy = proxies?.find((p: any) => p.id === selectedProxyId); + if (!proxy) { + toast({ + title: "Proxy Error", + description: "Selected proxy not found", + variant: "destructive", + }); + return; + } + + // Create proxy contract instance + const txBuilder = getTxBuilder(network); + const proxyContract = new MeshProxyContract( + { + mesh: txBuilder, + wallet: wallet, + networkId: network, + }, + { + paramUtxo: JSON.parse(proxy.paramUtxo), + }, + appWallet.scriptCbor, + ); + proxyContract.proxyAddress = proxy.proxyAddress; + + // Prepare vote + const vote = { + proposalId, + voteKind: voteKind, + }; + + // Vote using proxy + const txBuilderResult = await proxyContract.voteProxyDrep([vote], utxos, multisigWallet?.getScript().address); + + await newTransaction({ + txBuilder: txBuilderResult, + description: `Proxy Vote: ${voteKind} - ${description}`, + metadataValue: metadata ? { label: "674", value: metadata } : undefined, + }); + + toast({ + title: "Proxy Vote Successful", + description: `Your proxy vote (${voteKind}) has been recorded.`, + duration: 5000, + }); + + setAlert("Proxy vote transaction successfully created!"); + } catch (error) { + if ( + error instanceof Error && + error.message.includes("User rejected transaction") + ) { + toast({ + title: "Transaction Aborted", + description: "You canceled the proxy vote transaction.", + duration: 1000, + }); + } else { + toast({ + title: "Proxy Vote Failed", + description: `Error: ${error instanceof Error ? error.message : String(error)}`, + duration: 10000, + action: ( + { + navigator.clipboard.writeText(JSON.stringify(error)); + toast({ + title: "Error Copied", + description: "Error details copied to clipboard.", + duration: 5000, + }); + }} + > + Copy Error + + ), + variant: "destructive", + }); + console.error("Proxy vote transaction error:", error); + } + } finally { + setLoading(false); + } + } + async function vote() { if (drepInfo === undefined) { setAlert("DRep not found"); @@ -269,12 +386,23 @@ export default function VoteButton({ + {isProxyEnabled && !selectedProxyId && ( +
+

+ Proxy Mode Active - Select a proxy to continue +

+

+ Go to the Proxy Control panel above and select a proxy to enable voting. +

+
+ )} + {selectedBallotId && ( diff --git a/src/hooks/useProxy.ts b/src/hooks/useProxy.ts new file mode 100644 index 00000000..44107df0 --- /dev/null +++ b/src/hooks/useProxy.ts @@ -0,0 +1,48 @@ +import { create } from "zustand"; +import { persist } from "zustand/middleware"; + +interface ProxyState { + isProxyEnabled: boolean; + selectedProxyId: string; + toggleProxy: () => void; + setProxyEnabled: (enabled: boolean) => void; + setSelectedProxy: (proxyId: string) => void; + clearSelectedProxy: () => void; +} + +export const useProxyStore = create()( + persist( + (set) => ({ + isProxyEnabled: false, + selectedProxyId: "", + toggleProxy: () => set((state) => ({ isProxyEnabled: !state.isProxyEnabled })), + setProxyEnabled: (enabled: boolean) => set({ isProxyEnabled: enabled }), + setSelectedProxy: (proxyId: string) => set({ selectedProxyId: proxyId }), + clearSelectedProxy: () => set({ selectedProxyId: "" }), + }), + { + name: "proxy-settings", // unique name for localStorage key + } + ) +); + +// Hook for easy access to proxy state +export const useProxy = () => { + const { + isProxyEnabled, + selectedProxyId, + toggleProxy, + setProxyEnabled, + setSelectedProxy, + clearSelectedProxy + } = useProxyStore(); + + return { + isProxyEnabled, + selectedProxyId, + toggleProxy, + setProxyEnabled, + setSelectedProxy, + clearSelectedProxy, + }; +}; From 06548ad03afa27d77d480077d2910271398a17bc Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Tue, 28 Oct 2025 10:36:08 +0100 Subject: [PATCH 13/15] s12 --- prisma/schema.prisma | 20 + .../common/overall-layout/layout.tsx | 1 + .../wallet-data-loader-wrapper.tsx | 51 ++ .../overall-layout/proxy-data-loader.tsx | 103 ++++ .../overall-layout/wallet-data-loader.tsx | 67 +++ .../multisig/inspect-multisig-script.tsx | 53 +- .../multisig/proxy/ProxyControl.tsx | 187 ++----- .../multisig/proxy/ProxyOverview.tsx | 275 ++++----- src/components/multisig/proxy/offchain.ts | 112 +++- .../governance/ballot/ballotOverview.tsx | 1 - .../pages/wallet/governance/card-info.tsx | 447 ++++++++++----- src/components/pages/wallet/info/index.tsx | 2 +- .../pages/wallet/info/migrate-wallet.tsx | 520 ++++++++++++++---- .../info/migration/FundTransferStep.tsx | 307 ++++++++--- .../info/migration/MigrationCompleteStep.tsx | 23 +- .../info/migration/NewWalletCreationStep.tsx | 149 +++-- .../info/migration/ProxyTransferStep.tsx | 233 ++++++++ .../migration/useMigrationWalletFlowState.tsx | 337 ++++++++---- src/hooks/useProxy.ts | 48 +- src/lib/zustand/proxy.ts | 284 ++++++++++ src/server/api/root.ts | 2 + src/server/api/routers/migrations.ts | 152 +++++ src/server/api/routers/proxy.ts | 79 ++- src/server/api/routers/wallets.ts | 52 +- 24 files changed, 2664 insertions(+), 841 deletions(-) create mode 100644 src/components/common/overall-layout/proxy-data-loader.tsx delete mode 100644 src/components/pages/wallet/governance/ballot/ballotOverview.tsx create mode 100644 src/components/pages/wallet/info/migration/ProxyTransferStep.tsx create mode 100644 src/lib/zustand/proxy.ts create mode 100644 src/server/api/routers/migrations.ts diff --git a/prisma/schema.prisma b/prisma/schema.prisma index 6ef89b03..fc50e172 100644 --- a/prisma/schema.prisma +++ b/prisma/schema.prisma @@ -123,3 +123,23 @@ model BalanceSnapshot { isArchived Boolean snapshotDate DateTime @default(now()) } + +model Migration { + id String @id @default(cuid()) + originalWalletId String // The wallet being migrated from + newWalletId String? // The new wallet being created (null until created) + ownerAddress String // The user who initiated the migration + currentStep Int @default(0) // 0=pre-checks, 1=create wallet, 2=proxy setup, 3=transfer funds, 4=complete + status String @default("pending") // pending, in_progress, completed, failed, cancelled + migrationData Json? // Store any additional migration-specific data + errorMessage String? // Store error details if migration fails + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + completedAt DateTime? + + // Indexes for efficient querying + @@index([ownerAddress]) + @@index([originalWalletId]) + @@index([status]) + @@index([createdAt]) +} diff --git a/src/components/common/overall-layout/layout.tsx b/src/components/common/overall-layout/layout.tsx index a23aa539..8b78317b 100644 --- a/src/components/common/overall-layout/layout.tsx +++ b/src/components/common/overall-layout/layout.tsx @@ -296,6 +296,7 @@ export default function RootLayout({ +
state.setWalletAssetMetadata, ); + const { fetchProxyBalance, fetchProxyDrepInfo, setProxies } = useProxyActions(); const setDrepInfo = useWalletsStore((state) => state.setDrepInfo); @@ -179,6 +181,51 @@ export default function WalletDataLoaderWrapper({ } } + async function fetchProxyData() { + if (appWallet?.id && appWallet?.scriptCbor) { + try { + // Get proxies from API + const proxies = await ctx.proxy.getProxiesByUserOrWallet.fetch({ + walletId: appWallet.id, + }); + + + // First, add proxies to the store + setProxies(appWallet.id, proxies); + + // Fetch balance and DRep info for each proxy + for (const proxy of proxies) { + try { + + // Fetch balance + await fetchProxyBalance( + appWallet.id, + proxy.id, + proxy.proxyAddress, + network.toString() + ); + + // Fetch DRep info + await fetchProxyDrepInfo( + appWallet.id, + proxy.id, + proxy.proxyAddress, + proxy.authTokenId, + appWallet.scriptCbor, + network.toString(), + proxy.paramUtxo + ); + + } catch (error) { + console.error(`WalletDataLoaderWrapper: Error fetching data for proxy ${proxy.id}:`, error); + } + } + } catch (error) { + console.error("WalletDataLoaderWrapper: Error fetching proxy data:", error); + } + } + } + async function refreshWallet() { if (fetchingTransactions.current) return; @@ -188,6 +235,7 @@ export default function WalletDataLoaderWrapper({ await getTransactionsOnChain(); await getWalletAssets(); await getDRepInfo(); + await fetchProxyData(); // Fetch proxy data void ctx.transaction.getPendingTransactions.invalidate(); void ctx.transaction.getAllTransactions.invalidate(); // Also refresh proxy data @@ -206,6 +254,9 @@ export default function WalletDataLoaderWrapper({ if (appWallet && prevWalletIdRef.current !== appWallet.id) { refreshWallet(); prevWalletIdRef.current = appWallet.id; + } else if (appWallet) { + // If wallet exists but we already have data, still fetch proxy data + fetchProxyData(); } }, [appWallet]); diff --git a/src/components/common/overall-layout/proxy-data-loader.tsx b/src/components/common/overall-layout/proxy-data-loader.tsx new file mode 100644 index 00000000..13f65240 --- /dev/null +++ b/src/components/common/overall-layout/proxy-data-loader.tsx @@ -0,0 +1,103 @@ +import { useEffect } from "react"; +import useAppWallet from "@/hooks/useAppWallet"; +import { useProxyStore, useProxyData, useProxyActions } from "@/lib/zustand/proxy"; +import { useSiteStore } from "@/lib/zustand/site"; +import { api } from "@/utils/api"; + +export default function ProxyDataLoader() { + const { appWallet } = useAppWallet(); + const network = useSiteStore((state) => state.network); + const { proxies, loading, error } = useProxyData(appWallet?.id); + const { + setProxies, + fetchProxyBalance, + fetchProxyDrepInfo, + clearProxyData + } = useProxyActions(); + + + + // Get proxies from API + const { data: apiProxies, refetch: refetchProxies, isLoading: apiLoading } = api.proxy.getProxiesByUserOrWallet.useQuery( + { + walletId: appWallet?.id || undefined, + }, + { + enabled: !!appWallet?.id, + refetchOnWindowFocus: false, + staleTime: 30000, // 30 seconds + } + ); + + // Update store when API data changes + useEffect(() => { + if (apiProxies && appWallet?.id) { + const proxyData = apiProxies.map(proxy => ({ + id: proxy.id, + proxyAddress: proxy.proxyAddress, + authTokenId: proxy.authTokenId, + paramUtxo: proxy.paramUtxo, + description: proxy.description, + isActive: proxy.isActive, + createdAt: new Date(proxy.createdAt), + lastUpdated: Date.now(), + })); + + setProxies(appWallet.id, proxyData); + } + }, [apiProxies, appWallet?.id, setProxies]); + + // Fetch additional data for each proxy + useEffect(() => { + + if (proxies.length > 0 && appWallet?.id && appWallet?.scriptCbor) { + proxies.forEach(async (proxy) => { + // Only fetch if we don't have recent data (older than 5 minutes) + const isStale = !proxy.lastUpdated || (Date.now() - proxy.lastUpdated) > 5 * 60 * 1000; + + + if (isStale) { + try { + + // Fetch balance + await fetchProxyBalance(appWallet.id, proxy.id, proxy.proxyAddress, network.toString()); + + // Fetch DRep info + await fetchProxyDrepInfo( + appWallet.id, + proxy.id, + proxy.proxyAddress, + proxy.authTokenId, + appWallet.scriptCbor, + network.toString(), + proxy.paramUtxo + ); + + + } catch (error) { + console.error(`Error fetching data for proxy ${proxy.id}:`, error); + } + } + }); + } + }, [proxies, appWallet?.id, appWallet?.scriptCbor, network, fetchProxyBalance, fetchProxyDrepInfo]); + + // Clear proxy data when wallet changes + useEffect(() => { + return () => { + if (appWallet?.id) { + clearProxyData(appWallet.id); + } + }; + }, [appWallet?.id, clearProxyData]); + + // Expose refetch function for manual refresh + useEffect(() => { + // Store refetch function in window for global access if needed + if (typeof window !== 'undefined') { + (window as any).refetchProxyData = refetchProxies; + } + }, [refetchProxies]); + + return null; // This is a data loader component, no UI +} diff --git a/src/components/common/overall-layout/wallet-data-loader.tsx b/src/components/common/overall-layout/wallet-data-loader.tsx index b699bfe7..6ce25bdc 100644 --- a/src/components/common/overall-layout/wallet-data-loader.tsx +++ b/src/components/common/overall-layout/wallet-data-loader.tsx @@ -7,6 +7,7 @@ import { useWalletsStore } from "@/lib/zustand/wallets"; import { api } from "@/utils/api"; import { OnChainTransaction, TxInfo } from "@/types/transaction"; import { useSiteStore } from "@/lib/zustand/site"; +import { useProxyActions } from "@/lib/zustand/proxy"; export default function WalletDataLoader() { const { appWallet } = useAppWallet(); @@ -19,6 +20,7 @@ export default function WalletDataLoader() { const ctx = api.useUtils(); const network = useSiteStore((state) => state.network); const setRandomState = useSiteStore((state) => state.setRandomState); + const { fetchProxyBalance, fetchProxyDrepInfo, setProxies } = useProxyActions(); async function fetchUtxos() { if (appWallet) { @@ -53,10 +55,64 @@ export default function WalletDataLoader() { } } + async function fetchProxyData() { + if (appWallet?.id && appWallet?.scriptCbor) { + console.log("WalletDataLoader: Fetching proxy data for wallet", appWallet.id); + + try { + // Get proxies from API + const proxies = await ctx.proxy.getProxiesByUserOrWallet.fetch({ + walletId: appWallet.id, + }); + + console.log("WalletDataLoader: Found proxies", proxies); + + // First, add proxies to the store + setProxies(appWallet.id, proxies); + + // Fetch balance and DRep info for each proxy + for (const proxy of proxies) { + try { + console.log(`WalletDataLoader: Fetching data for proxy ${proxy.id}`); + + // Fetch balance + await fetchProxyBalance( + appWallet.id, + proxy.id, + proxy.proxyAddress, + network.toString() + ); + + // Fetch DRep info + await fetchProxyDrepInfo( + appWallet.id, + proxy.id, + proxy.proxyAddress, + proxy.authTokenId, + appWallet.scriptCbor, + network.toString(), + proxy.paramUtxo + ); + + console.log(`WalletDataLoader: Successfully fetched data for proxy ${proxy.id}`); + } catch (error) { + console.error(`WalletDataLoader: Error fetching data for proxy ${proxy.id}:`, error); + } + } + } catch (error) { + console.error("WalletDataLoader: Error fetching proxy data:", error); + } + } + } + async function refreshWallet() { + console.log("WalletDataLoader: refreshWallet called"); setLoading(true); await fetchUtxos(); await getTransactionsOnChain(); + console.log("WalletDataLoader: About to fetch proxy data"); + await fetchProxyData(); // Fetch proxy data + console.log("WalletDataLoader: Finished fetching proxy data"); void ctx.transaction.getPendingTransactions.invalidate(); void ctx.transaction.getAllTransactions.invalidate(); // Also refresh proxy data @@ -66,8 +122,19 @@ export default function WalletDataLoader() { } useEffect(() => { + console.log("WalletDataLoader: useEffect triggered", { + hasAppWallet: !!appWallet, + walletId: appWallet?.id, + hasUtxos: appWallet?.id ? walletsUtxos[appWallet.id] !== undefined : false + }); + if (appWallet && walletsUtxos[appWallet?.id] === undefined) { + console.log("WalletDataLoader: Calling refreshWallet"); refreshWallet(); + } else if (appWallet) { + // If wallet exists but we already have UTxOs, still fetch proxy data + console.log("WalletDataLoader: Calling fetchProxyData directly"); + fetchProxyData(); } }, [appWallet]); diff --git a/src/components/multisig/inspect-multisig-script.tsx b/src/components/multisig/inspect-multisig-script.tsx index 5d291ae2..86f2e878 100644 --- a/src/components/multisig/inspect-multisig-script.tsx +++ b/src/components/multisig/inspect-multisig-script.tsx @@ -88,28 +88,37 @@ export default function InspectMultisigScript({ } return ( - - {mWallet.stakingEnabled() && {mWallet.getScript().address}} - copyString={mWallet.getScript().address} - />} - {`${balance} ₳`}} /> - {mWallet.stakingEnabled() && ( - {mWallet.getStakeAddress()}} - copyString={mWallet.getStakeAddress()} - /> - )} -{/* add pending rewards like balance */} - {mWallet.isGovernanceEnabled() && {mWallet.getDRepId()}} - copyString={mWallet.getDRepId()} - />} +
+
+

Native Script

+
+
+
+
+ {mWallet.stakingEnabled() && {mWallet.getScript().address}} + copyString={mWallet.getScript().address} + />} + {`${balance} ₳`}} /> + {mWallet.stakingEnabled() && ( + {mWallet.getStakeAddress()}} + copyString={mWallet.getStakeAddress()} + /> + )} + {/* add pending rewards like balance */} + {mWallet.isGovernanceEnabled() && {mWallet.getDRepId()}} + copyString={mWallet.getDRepId()} + />} - - + +
+
+
+
); } diff --git a/src/components/multisig/proxy/ProxyControl.tsx b/src/components/multisig/proxy/ProxyControl.tsx index a892ba6b..84e779c3 100644 --- a/src/components/multisig/proxy/ProxyControl.tsx +++ b/src/components/multisig/proxy/ProxyControl.tsx @@ -15,6 +15,7 @@ import UTxOSelector from "@/components/pages/wallet/new-transaction/utxoSelector import { getProvider } from "@/utils/get-provider"; import { MeshTxBuilder, UTxO } from "@meshsdk/core"; import { useProxy } from "@/hooks/useProxy"; +import { useProxyData } from "@/lib/zustand/proxy"; import { Alert, AlertDescription } from "@/components/ui/alert"; import { Dialog, DialogContent, DialogHeader, DialogTitle } from "@/components/ui/dialog"; @@ -44,62 +45,41 @@ export default function ProxyControl() { const { appWallet } = useAppWallet(); const ctx = api.useUtils(); const { newTransaction } = useTransaction(); - const { isProxyEnabled, selectedProxyId, setSelectedProxy, clearSelectedProxy, toggleProxy } = useProxy(); - + const { isProxyEnabled, selectedProxyId, setSelectedProxy, clearSelectedProxy } = useProxy(); + + // Get proxies from proxy store (includes balance and DRep info) + const { proxies: storeProxies, loading: storeLoading, error: storeError } = useProxyData(appWallet?.id); + + // Get proxies from API (for mutations) + const { data: apiProxies, refetch: refetchProxies, isLoading: apiLoading, error: apiError } = api.proxy.getProxiesByUserOrWallet.useQuery( + { + walletId: appWallet?.id || undefined, + }, + { enabled: !!appWallet?.id } + ); + // Use store proxies if available, otherwise fall back to API proxies + const proxies = storeProxies.length > 0 ? storeProxies : (apiProxies || []); + const proxiesLoading = storeLoading || apiLoading; + const proxiesError = storeError || apiError; const { mutateAsync: createProxy } = api.proxy.createProxy.useMutation({ onSuccess: () => { - void ctx.proxy.getProxiesByUserOrWallet.invalidate(); + void refetchProxies(); }, }); const { mutateAsync: updateProxy } = api.proxy.updateProxy.useMutation({ onSuccess: () => { - void ctx.proxy.getProxiesByUserOrWallet.invalidate(); + void refetchProxies(); }, }); - // Get user by address for user-linked proxies - const { data: user } = api.proxy.getUserByAddress.useQuery( - { address: userAddress || "" }, - { enabled: !!userAddress && !appWallet?.id } - ); - - const { data: proxies, refetch: refetchProxies, isLoading: proxiesLoading, error: proxiesError } = api.proxy.getProxiesByUserOrWallet.useQuery( - { - walletId: appWallet?.id || undefined, - userAddress: userAddress || undefined, - }, - { enabled: !!(appWallet?.id || userAddress) } - ); - - // Debug logging for proxy loading - useEffect(() => { - console.log("Proxy loading debug:", { - appWalletId: appWallet?.id, - userAddress, - enabled: !!(appWallet?.id || userAddress), - proxiesLoading, - proxiesError, - proxiesCount: proxies?.length || 0, - proxies: proxies - }); - }, [appWallet?.id, userAddress, proxiesLoading, proxiesError, proxies]); - // State management const [proxyContract, setProxyContract] = useState(null); - const [proxyBalance, setProxyBalance] = useState>([]); const [isProxySetup, setIsProxySetup] = useState(false); - const [loading, setLocalLoading] = useState(false); - const [selectedProxyBalance, setSelectedProxyBalance] = useState>([]); - const [allProxyBalances, setAllProxyBalances] = useState>>({}); + const [localLoading, setLocalLoading] = useState(false); const [tvlLoading, setTvlLoading] = useState(false); - - // DRep information state - const [selectedProxyDrepId, setSelectedProxyDrepId] = useState(""); - const [selectedProxyDrepStatus, setSelectedProxyDrepStatus] = useState(null); - const [drepLoading, setDrepLoading] = useState(false); // Setup flow state const [setupStep, setSetupStep] = useState(0); @@ -177,12 +157,10 @@ export default function ProxyControl() { try { const balance = await proxyContract.getProxyBalance(); - setProxyBalance(balance); setIsProxySetup(balance.length > 0); } catch (error) { // Proxy not set up yet setIsProxySetup(false); - setProxyBalance([]); } }, [proxyContract]); @@ -204,7 +182,7 @@ export default function ProxyControl() { try { setSetupLoading(true); - setLoading(true); + setLocalLoading(true); // Reset setup data to prevent conflicts with previous attempts setSetupData({}); @@ -241,7 +219,7 @@ export default function ProxyControl() { }); } finally { setSetupLoading(false); - setLoading(false); + setLocalLoading(false); } }, [proxyContract, connected, setLoading]); @@ -258,7 +236,7 @@ export default function ProxyControl() { try { setSetupLoading(true); - setLoading(true); + setLocalLoading(true); // If msCbor is set, route through useTransaction hook to create a signable if (appWallet?.scriptCbor && setupData.txHex) { @@ -283,7 +261,7 @@ export default function ProxyControl() { await createProxy({ walletId: appWallet?.id || undefined, - userId: user?.id || undefined, + userId: undefined, proxyAddress: setupData.proxyAddress, authTokenId: setupData.authTokenId, paramUtxo: JSON.stringify(setupData.paramUtxo), @@ -332,9 +310,9 @@ export default function ProxyControl() { }); } finally { setSetupLoading(false); - setLoading(false); + setLocalLoading(false); } - }, [setupData, wallet, appWallet, user, createProxy, refetchProxies, setLoading]); + }, [setupData, wallet, appWallet, createProxy, refetchProxies, setLoading]); // Reset setup flow const handleResetSetup = useCallback(() => { @@ -399,8 +377,6 @@ export default function ProxyControl() { if (!proxy) return { drepId: "", status: null }; try { - setDrepLoading(true); - // Create a temporary contract instance for this proxy const tempContract = new MeshProxyContract( { @@ -424,7 +400,7 @@ export default function ProxyControl() { console.error("Get proxy DRep info error:", error); return { drepId: "", status: null }; } finally { - setDrepLoading(false); + // DRep loading handled elsewhere } }, [network, wallet, appWallet?.scriptCbor]); @@ -446,7 +422,7 @@ export default function ProxyControl() { } } - setAllProxyBalances(balances); + // Balances handled elsewhere } catch (error) { console.error("Failed to fetch proxy balances:", error); } finally { @@ -464,18 +440,21 @@ export default function ProxyControl() { let totalAssets = 0; let totalProxies = proxies.length; - // Sum up all ADA from all proxy balances - Object.values(allProxyBalances).forEach((balance) => { - balance.forEach((asset) => { - if (asset.unit === "lovelace") { - totalADA += parseFloat(asset.quantity) / 1000000; // Convert lovelace to ADA - } - totalAssets++; - }); + // Calculate TVL from store data + proxies.forEach(proxy => { + if ('balance' in proxy && proxy.balance && proxy.balance.length > 0) { + proxy.balance.forEach((asset: any) => { + if (asset.unit === 'lovelace') { + totalADA += parseInt(asset.quantity) / 1000000; // Convert lovelace to ADA + } else { + totalAssets += 1; + } + }); + } }); return { totalADA, totalAssets, totalProxies }; - }, [proxies, allProxyBalances]); + }, [proxies]); const { totalADA, totalAssets } = calculateTVL(); @@ -517,21 +496,23 @@ export default function ProxyControl() { }, []); // Handle proxy selection - const handleProxySelection = useCallback(async (proxyId: string) => { - setSelectedProxy(proxyId); - const proxy = proxies?.find((p: any) => p.id === proxyId); - if (proxy) { - // Fetch both balance and DRep information - const [balance, drepInfo] = await Promise.all([ - getProxyBalance(proxy.proxyAddress), - getProxyDrepInfo(proxy) - ]); - - setSelectedProxyBalance(balance); - setSelectedProxyDrepId(drepInfo.drepId); - setSelectedProxyDrepStatus(drepInfo.status); + const handleProxySelection = useCallback((proxyId: string) => { + if (selectedProxyId === proxyId) { + // If clicking the same proxy, unselect it + clearSelectedProxy(); + toast({ + title: "Proxy Unselected", + description: "Proxy mode has been disabled. Using standard DRep mode.", + }); + } else { + // Select the new proxy + setSelectedProxy(proxyId); + toast({ + title: "Proxy Selected", + description: "Proxy mode enabled for governance operations.", + }); } - }, [proxies, getProxyBalance, getProxyDrepInfo, setSelectedProxy]); + }, [selectedProxyId, setSelectedProxy, clearSelectedProxy, toast]); // Spend from proxy @@ -570,7 +551,7 @@ export default function ProxyControl() { try { setSpendLoading(true); - setLoading(true); + setLocalLoading(true); // Get the selected proxy const proxy = proxies?.find((p: any) => p.id === selectedProxyId); @@ -635,7 +616,7 @@ export default function ProxyControl() { }); } finally { setSpendLoading(false); - setLoading(false); + setLocalLoading(false); } }, [proxyContract, connected, spendOutputs, selectedProxyId, proxies, network, wallet, setLoading, handleProxySelection]); @@ -666,7 +647,7 @@ export default function ProxyControl() { ); } - if (loading) { + if (proxiesLoading) { return ( @@ -718,47 +699,6 @@ export default function ProxyControl() {
- {/* Global Proxy Toggle */} -
-
- { - if (!e.target.checked) { - clearSelectedProxy(); - } - toggleProxy(); - }} - className="sr-only" - /> - -
- - - - - Global Proxy - - - -

Enable proxy functionality across the application

-
-
-
-
{/* TVL Display */} @@ -830,19 +770,12 @@ export default function ProxyControl() { {/* UTxO Selector for visibility/control. Contract uses all UTxOs from provider. */} @@ -873,7 +806,7 @@ export default function ProxyControl() { ; + drepId?: string; + drepInfo?: any; + lastUpdated?: number; }; isSelected: boolean; - balance: Array<{ unit: string; quantity: string }>; - balanceLoading?: boolean; onSelect: () => void; onCopy: () => void; onSpend: () => void; onUpdateProxy: (proxyId: string, description: string) => Promise; onRefreshBalance?: () => void; + onCopyToClipboard: (text: string, label?: string) => void; } // Component to fetch and display proxy balance @@ -58,58 +61,46 @@ const ProxyCardWithBalance = memo(function ProxyCardWithBalance({ onSelect, onCopy, onSpend, - onGetProxyBalance, - onUpdateProxy -}: Omit & { - onGetProxyBalance: (proxyAddress: string) => Promise>; - onUpdateProxy: (proxyId: string, description: string) => Promise; -}) { - const [balance, setBalance] = useState>([]); - const [balanceLoading, setBalanceLoading] = useState(false); - const [lastFetchTime, setLastFetchTime] = useState(0); + onUpdateProxy, + onCopyToClipboard +}: ProxyCardProps) { + // Use balance and DRep data directly from proxy object + const balance = proxy.balance || []; + const drepId = proxy.drepId; + const drepInfo = proxy.drepInfo; + const lastUpdated = proxy.lastUpdated; - const fetchBalance = React.useCallback(async () => { - setBalanceLoading(true); - try { - const proxyBalance = await onGetProxyBalance(proxy.proxyAddress); - setBalance(proxyBalance); - setLastFetchTime(Date.now()); - } catch (error) { - console.error("Failed to fetch proxy balance:", error); - setBalance([]); - } finally { - setBalanceLoading(false); - } - }, [proxy.proxyAddress, onGetProxyBalance]); - // Initial fetch and refresh when proxy address changes - useEffect(() => { - fetchBalance(); - }, [fetchBalance]); - // Refresh balance when proxy is selected (to ensure latest data) - useEffect(() => { - if (isSelected && Date.now() - lastFetchTime > 5000) { // Refresh if older than 5 seconds - fetchBalance(); - } - }, [isSelected, fetchBalance, lastFetchTime]); + // No need to fetch balance - it's already in the proxy object return ( ); }); -const ProxyCard = memo(function ProxyCard({ proxy, isSelected, balance, balanceLoading = false, onSelect, onCopy, onSpend, onUpdateProxy, onRefreshBalance }: ProxyCardProps) { +const ProxyCard = memo(function ProxyCard({ + proxy, + isSelected, + onSelect, + onCopy, + onSpend, + onUpdateProxy, + onCopyToClipboard +}: ProxyCardProps) { + // Use balance and DRep data directly from proxy object + const displayBalance = proxy.balance || []; + const drepId = proxy.drepId; + const drepInfo = proxy.drepInfo; + const balanceLoading = false; // No loading state needed since data is already loaded const [isExpanded, setIsExpanded] = React.useState(false); const [isEditing, setIsEditing] = React.useState(false); const [editDescription, setEditDescription] = React.useState(proxy.description || ""); @@ -189,7 +180,7 @@ const ProxyCard = memo(function ProxyCard({ proxy, isSelected, balance, balanceL
- +
{isEditing ? (
)} - +
@@ -245,30 +236,16 @@ const ProxyCard = memo(function ProxyCard({ proxy, isSelected, balance, balanceL
Balance - {onRefreshBalance && ( - - )}
{balanceLoading ? (
- ) : balance.length > 0 ? ( + ) : displayBalance.length > 0 ? (
- {balance.map((asset, index) => ( + {displayBalance.map((asset: any, index: number) => (
{asset.unit === "lovelace" @@ -284,7 +261,7 @@ const ProxyCard = memo(function ProxyCard({ proxy, isSelected, balance, balanceL
Proxy Balance Details
- {balance.map((asset, index) => ( + {displayBalance.map((asset: any, index: number) => (
{asset.unit === "lovelace" ? "ADA" : asset.unit}: @@ -296,7 +273,7 @@ const ProxyCard = memo(function ProxyCard({ proxy, isSelected, balance, balanceL
))}
- Total: {balance.length} asset{balance.length !== 1 ? 's' : ''} + Total: {displayBalance.length} asset{displayBalance.length !== 1 ? 's' : ''}
@@ -389,13 +366,75 @@ const ProxyCard = memo(function ProxyCard({ proxy, isSelected, balance, balanceL {proxy.id}
+ + {/* DRep Information - Only show when this proxy is selected */} + {isSelected && ( +
+
+ + DRep Information +
+ +
+ {/* DRep ID */} +
+
+ + DRep ID + {drepId && ( + + )} +
+
+ {drepId ? ( + drepId + ) : ( + Not registered + )} +
+
+ + {/* DRep Status */} +
+
+ {drepInfo?.active ? ( + + ) : ( + + )} + Status +
+
+ + {drepInfo?.active ? "Active" : "Inactive"} + + {drepInfo?.amount && ( + + {Math.round(Number(drepInfo.amount) / 1000000)} ₳ + + )} +
+
+
+
+ )} )} {/* Actions */}
- )} -
-
- {drepLoading ? ( -
- ) : selectedProxyDrepId ? ( - selectedProxyDrepId - ) : ( - Not registered - )} -
- - - {/* DRep Status */} -
-
- {selectedProxyDrepStatus?.active ? ( - - ) : ( - - )} - Status -
-
- {drepLoading ? ( -
- ) : selectedProxyDrepStatus ? ( -
- - {selectedProxyDrepStatus.active ? 'Active' : 'Inactive'} - - {selectedProxyDrepStatus.amount && ( - - {Math.round(Number(selectedProxyDrepStatus.amount) / 1000000).toLocaleString()} ₳ - - )} -
- ) : ( - Not registered - )} -
-
- -
-
- )} {/* Proxy Carousel */} @@ -590,16 +545,6 @@ const ProxyOverview = memo(function ProxyOverview({
- {onRefreshAllBalances && ( - - )} {proxies.length} proxy{proxies.length !== 1 ? 'ies' : ''}
@@ -622,8 +567,8 @@ const ProxyOverview = memo(function ProxyOverview({ onSelect={() => onProxySelection(proxy.id)} onCopy={() => onCopyToClipboard(proxy.proxyAddress)} onSpend={() => onStartSpending()} - onGetProxyBalance={onGetProxyBalance} onUpdateProxy={onUpdateProxy} + onCopyToClipboard={onCopyToClipboard} /> ))} diff --git a/src/components/multisig/proxy/offchain.ts b/src/components/multisig/proxy/offchain.ts index 114e7e07..f8257c3b 100644 --- a/src/components/multisig/proxy/offchain.ts +++ b/src/components/multisig/proxy/offchain.ts @@ -33,6 +33,10 @@ import blueprint from "./aiken-workspace/plutus.json"; * * With each new NFT minted, the token index within the oracle is incremented by one, ensuring a consistent and orderly progression in the numbering of the NFTs. */ +// Cache for DRep status to avoid multiple API calls +const drepStatusCache = new Map(); +const CACHE_DURATION = 5 * 60 * 1000; // 5 minutes + export class MeshProxyContract extends MeshTxInitiator { paramUtxo: UTxO["input"] = { outputIndex: 0, txHash: "" }; proxyAddress?: string; @@ -47,6 +51,15 @@ export class MeshProxyContract extends MeshTxInitiator { this.stakeCredential = undefined; } + // Static method to clear DRep status cache + static clearDrepStatusCache(drepId?: string) { + if (drepId) { + drepStatusCache.delete(drepId); + } else { + drepStatusCache.clear(); + } + } + getAuthTokenCbor = () => { return applyParamsToScript(blueprint.validators[0]!.compiledCode, [ mOutputReference(this.paramUtxo.txHash, this.paramUtxo.outputIndex), @@ -599,27 +612,31 @@ export class MeshProxyContract extends MeshTxInitiator { throw new Error("Blockchain provider not found"); } - const utxos = await blockchainProvider.fetchAddressUTxOs(this.proxyAddress); + try { + const utxos = await blockchainProvider.fetchAddressUTxOs(this.proxyAddress); - // Aggregate all assets from UTxOs - const balanceMap = new Map(); + // Aggregate all assets from UTxOs + const balanceMap = new Map(); - for (const utxo of utxos) { - for (const asset of utxo.output.amount) { - const currentAmount = balanceMap.get(asset.unit) || BigInt(0); - balanceMap.set(asset.unit, currentAmount + BigInt(asset.quantity)); + for (const utxo of utxos) { + for (const asset of utxo.output.amount) { + const currentAmount = balanceMap.get(asset.unit) || BigInt(0); + balanceMap.set(asset.unit, currentAmount + BigInt(asset.quantity)); + } } - } - // Convert back to string format for consistency - const balance = Array.from(balanceMap.entries()).map( - ([unit, quantity]) => ({ - unit, - quantity: quantity.toString(), - }), - ); + // Convert back to string format for consistency + const balance = Array.from(balanceMap.entries()).map( + ([unit, quantity]) => ({ + unit, + quantity: quantity.toString(), + }), + ); - return balance; + return balance; + } catch (error: any) { + throw new Error(`Failed to fetch proxy balance: ${error?.message || 'Unknown error'}`); + } }; getDrepId = () => { @@ -630,10 +647,65 @@ export class MeshProxyContract extends MeshTxInitiator { getDrepStatus = async () => { const drepId = this.getDrepId(); - const drepStatus = await this.mesh.fetcher?.get( - `/governance/dreps/${drepId}`, - ); - return drepStatus; + + // Check cache first + const cached = drepStatusCache.get(drepId); + if (cached && (Date.now() - cached.timestamp) < CACHE_DURATION) { + return cached.data; + } + + if (!this.mesh.fetcher) { + throw new Error("Blockchain provider not found"); + } + + try { + const drepStatus = await this.mesh.fetcher.get( + `/governance/dreps/${drepId}`, + ); + + // Cache the successful result + drepStatusCache.set(drepId, { + data: drepStatus, + timestamp: Date.now() + }); + + return drepStatus; + } catch (error: any) { + // Parse the error if it's a stringified JSON + let parsedError = error; + if (typeof error === 'string') { + try { + parsedError = JSON.parse(error); + } catch { + // If parsing fails, use the original error + } + } + + // Handle specific error cases - check multiple possible 404 indicators + const is404 = error?.status === 404 || + error?.response?.status === 404 || + error?.data?.status_code === 404 || + parsedError?.status === 404 || + parsedError?.data?.status_code === 404 || + error?.message?.includes('404') || + error?.message?.includes('Not Found') || + error?.message?.includes('not found') || + error?.message?.includes('NOT_FOUND') || + (error?.response?.data && error.response.data.status_code === 404) || + (error?.data && error.data.status_code === 404); + + if (is404) { + // DRep not registered yet - cache null result + drepStatusCache.set(drepId, { + data: null, + timestamp: Date.now() + }); + return null; + } + + // For other errors, don't cache and re-throw + console.log(`Failed to fetch DRep status: ${error?.message || 'Unknown error'}`); + } }; /** diff --git a/src/components/pages/wallet/governance/ballot/ballotOverview.tsx b/src/components/pages/wallet/governance/ballot/ballotOverview.tsx deleted file mode 100644 index 8b137891..00000000 --- a/src/components/pages/wallet/governance/ballot/ballotOverview.tsx +++ /dev/null @@ -1 +0,0 @@ - diff --git a/src/components/pages/wallet/governance/card-info.tsx b/src/components/pages/wallet/governance/card-info.tsx index 0106b5d9..53f661c5 100644 --- a/src/components/pages/wallet/governance/card-info.tsx +++ b/src/components/pages/wallet/governance/card-info.tsx @@ -8,108 +8,169 @@ import { DropdownMenuItem, DropdownMenuTrigger, } from "@/components/ui/dropdown-menu"; -import { MoreVertical, Info, ExternalLink, Copy } from "lucide-react"; +import { MoreVertical, Info, ExternalLink, Copy, X, UserCheck, CheckCircle, Hash, Activity, TrendingUp } from "lucide-react"; import { Button } from "@/components/ui/button"; import useMultisigWallet from "@/hooks/useMultisigWallet"; import { useToast } from "@/hooks/use-toast"; import { useProxy } from "@/hooks/useProxy"; -import { MeshProxyContract } from "@/components/multisig/proxy/offchain"; -import { getTxBuilder } from "@/utils/get-tx-builder"; import { useSiteStore } from "@/lib/zustand/site"; -import { api } from "@/utils/api"; -import { useState, useEffect } from "react"; import { UTxO } from "@meshsdk/core"; +import { useProxyData } from "@/lib/zustand/proxy"; +import { useState, useEffect } from "react"; +import { MeshProxyContract } from "@/components/multisig/proxy/offchain"; +import { + Tooltip, + TooltipContent, + TooltipProvider, + TooltipTrigger, +} from "@/components/ui/tooltip"; +import { getTxBuilder } from "@/utils/get-tx-builder"; +import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from "@/components/ui/select"; export default function CardInfo({ appWallet, manualUtxos }: { appWallet: Wallet; manualUtxos: UTxO[] }) { const drepInfo = useWalletsStore((state) => state.drepInfo); const { multisigWallet } = useMultisigWallet(); const { toast } = useToast(); - const { isProxyEnabled, selectedProxyId } = useProxy(); + const { isProxyEnabled, selectedProxyId, setSelectedProxy, clearSelectedProxy } = useProxy(); const network = useSiteStore((state) => state.network); + const { proxies } = useProxyData(appWallet?.id); // Proxy DRep state const [proxyDrepInfo, setProxyDrepInfo] = useState(null); const [proxyDrepId, setProxyDrepId] = useState(null); const [loadingProxyDrep, setLoadingProxyDrep] = useState(false); - - // Get proxies for the current wallet - const { data: proxies } = api.proxy.getProxiesByUserOrWallet.useQuery( - { - walletId: appWallet?.id || undefined, - }, - { enabled: !!(appWallet?.id && isProxyEnabled) } - ); + const [proxyDrepError, setProxyDrepError] = useState(null); + + // Get DRep info for standard mode + const currentDrepId = multisigWallet?.getKeysByRole(3) ? multisigWallet?.getDRepId() : appWallet?.dRepId; + const currentDrepInfo = drepInfo; - // Fetch proxy DRep information when proxy mode is enabled + + + // Fetch proxy DRep info when proxy is enabled and selected useEffect(() => { const fetchProxyDrepInfo = async () => { - if (!isProxyEnabled || !selectedProxyId || !proxies || !appWallet) { - setProxyDrepInfo(null); - setProxyDrepId(null); - return; - } - - try { + + + if (isProxyEnabled && selectedProxyId && appWallet?.scriptCbor) { setLoadingProxyDrep(true); + setProxyDrepError(null); - // Get the selected proxy - const proxy = proxies.find((p: any) => p.id === selectedProxyId); - if (!proxy) { - setProxyDrepInfo(null); - setProxyDrepId(null); - return; - } - - // Create proxy contract instance - const txBuilder = getTxBuilder(network); - const proxyContract = new MeshProxyContract( - { - mesh: txBuilder, - wallet: undefined, // We don't need wallet for getting DRep info - networkId: network, - }, - { - paramUtxo: JSON.parse(proxy.paramUtxo), - }, - appWallet.scriptCbor, - ); - proxyContract.proxyAddress = proxy.proxyAddress; - - // Get DRep ID and status - const drepId = proxyContract.getDrepId(); - setProxyDrepId(drepId); + // Set a timeout to prevent infinite loading + const timeoutId = setTimeout(() => { + setLoadingProxyDrep(false); + setProxyDrepError("Timeout: Could not fetch proxy DRep information"); + }, 10000); // 10 second timeout try { - const drepStatus = await proxyContract.getDrepStatus(); - setProxyDrepInfo(drepStatus); + const selectedProxy = proxies.find(p => p.id === selectedProxyId); + + if (selectedProxy) { + const txBuilder = getTxBuilder(network); + const proxyContract = new MeshProxyContract( + { + mesh: txBuilder, + wallet: undefined, + networkId: network, + }, + { + paramUtxo: JSON.parse(selectedProxy.paramUtxo || '{}'), + }, + appWallet.scriptCbor, + ); + proxyContract.proxyAddress = selectedProxy.proxyAddress; + + // Get DRep ID + const drepId = proxyContract.getDrepId(); + + setProxyDrepId(drepId); + + // Get DRep status (now with caching and proper error handling) + const status = await proxyContract.getDrepStatus(); + setProxyDrepInfo(status); + + clearTimeout(timeoutId); + } else { + setProxyDrepError("Selected proxy not found"); + clearTimeout(timeoutId); + } } catch (error) { - console.log("DRep not registered yet or error fetching status:", error); - setProxyDrepInfo(null); + // Only log unexpected errors, not 404s which are handled in offchain + console.error("Unexpected error in fetchProxyDrepInfo:", error); + setProxyDrepError("Failed to fetch proxy DRep information"); + clearTimeout(timeoutId); + } finally { + setLoadingProxyDrep(false); } - } catch (error) { - console.error("Error fetching proxy DRep info:", error); - setProxyDrepInfo(null); + } else { setProxyDrepId(null); - } finally { - setLoadingProxyDrep(false); + setProxyDrepInfo(null); + setProxyDrepError(null); } }; - + fetchProxyDrepInfo(); - }, [isProxyEnabled, selectedProxyId, proxies, appWallet, network]); - - // Determine which DRep info to use - const currentDrepId = isProxyEnabled && proxyDrepId ? proxyDrepId : - (multisigWallet?.getKeysByRole(3) ? multisigWallet?.getDRepId() : appWallet?.dRepId); + }, [isProxyEnabled, selectedProxyId, appWallet?.scriptCbor, network, proxies]); + + // Use proxy DRep info if proxy is enabled, otherwise use standard DRep info + const displayDrepId = isProxyEnabled ? proxyDrepId : currentDrepId; + const displayDrepInfo = isProxyEnabled ? proxyDrepInfo : currentDrepInfo; + - const currentDrepInfo = isProxyEnabled ? proxyDrepInfo : drepInfo; - if (!currentDrepId) { - throw new Error("DRep not found"); + // Show loading or error state if no DRep ID + if (!displayDrepId) { + return ( +
+ {/* Header */} +
+
+
+ +
+
+

+ {isProxyEnabled ? "Proxy DRep Information" : "DRep Information"} +

+

+ Note: governance features are currently in alpha as Blockfrost and CIPs standards are work in progress. +

+
+
+
+ + {/* Loading or Error State */} +
+ {loadingProxyDrep ? ( +
+
+

+ Loading proxy DRep information... +

+
+ ) : proxyDrepError ? ( +
+
+
+

Error loading proxy DRep

+

{proxyDrepError}

+
+
+ ) : ( +
+
+

+ {isProxyEnabled ? "No proxy DRep information available" : "No DRep information available"} +

+
+ )} +
+
+ ); } // Check if DRep is actually registered (has info from Blockfrost) - const isDRepRegistered = currentDrepInfo?.active === true; + const isDRepRegistered = displayDrepInfo?.active === true; return (
{/* Header */} @@ -120,10 +181,10 @@ export default function CardInfo({ appWallet, manualUtxos }: { appWallet: Wallet

- DRep Information + {isProxyEnabled ? "Proxy DRep Information" : "DRep Information"}

- {isProxyEnabled ? "Proxy DRep Management - Note: governance features are currently in alpha" : "Note: governance features are currently in alpha as Blockfrost and CIPs standards are work in progress."} + Note: governance features are currently in alpha as Blockfrost and CIPs standards are work in progress.

@@ -139,7 +200,7 @@ export default function CardInfo({ appWallet, manualUtxos }: { appWallet: Wallet @@ -150,91 +211,187 @@ export default function CardInfo({ appWallet, manualUtxos }: { appWallet: Wallet - {/* Proxy Mode Indicator */} - {isProxyEnabled && ( -
-
-
- - Proxy Mode Active - - - Using proxy DRep for governance operations - -
-
- )} - - {/* Content */} -
- {/* DRep ID */} -
- -
+ {/* Minimal DRep & Proxy Management Card */} +
+ {/* Header */} +
+
+
+ +
+

+ {isProxyEnabled ? "Proxy DRep Management" : "DRep Information"} +

+

+ {isProxyEnabled ? "Using proxy for governance operations" : "Standard DRep governance mode"} +

+
+
- {loadingProxyDrep ? ( - Loading proxy DRep ID... - ) : ( - - {currentDrepId} - + {/* Global Proxy Toggle */} +
+ Proxy Mode: + +
+ + {proxies.length > 0 && ( +
+ + {selectedProxyId && ( + + )} +
)} -
- {/* Status */} -
- -
-
-
- - {isDRepRegistered ? "Registered" : "Not registered"} - + {/* Content */} +
+ + + {/* DRep Information - Single Row */} +
+ {/* DRep ID */} +
+
+ + DRep ID +
+
+
+ + {loadingProxyDrep ? "..." : displayDrepId} + +
+ +
+
+ + {/* DRep Status */} +
+
+ + Status +
+
+ {loadingProxyDrep ? ( +
+
+ Loading... +
+ ) : ( + + + +
+
+ +

{displayDrepInfo?.active ? 'Active' : 'Inactive'}

+
+
+
+ )} + + {loadingProxyDrep ? "Loading..." : (displayDrepInfo?.active ? "Active" : "Inactive")} + +
-
-
- {/* Voting Power */} - {isDRepRegistered && currentDrepInfo && ( -
- -
- - {Math.round(Number(currentDrepInfo.amount) / 1000000) - .toString() - .replace(/\B(?=(\d{3})+(?!\d))/g, ' ')} ₳ - + {/* Voting Power */} +
+
+ + Voting Power +
+
+ {displayDrepInfo?.deposit ? `${(parseInt(displayDrepInfo.deposit) / 1000000).toFixed(2)}` : + displayDrepInfo?.amount ? `${(parseInt(displayDrepInfo.amount) / 1000000).toFixed(2)}` : + "0.00"} ADA +
+
+ {loadingProxyDrep ? "Loading..." : "Deposit amount"} +
- )} +
{/* Actions */} diff --git a/src/components/pages/wallet/info/index.tsx b/src/components/pages/wallet/info/index.tsx index ba664019..87624402 100644 --- a/src/components/pages/wallet/info/index.tsx +++ b/src/components/pages/wallet/info/index.tsx @@ -27,10 +27,10 @@ export default function WalletInfo() { {multisigWallet && multisigWallet.stakingEnabled() && } {multisigWallet && } + {multisigWallet && } {multisigWallet && } -
diff --git a/src/components/pages/wallet/info/migrate-wallet.tsx b/src/components/pages/wallet/info/migrate-wallet.tsx index 4dcec498..5c780488 100644 --- a/src/components/pages/wallet/info/migrate-wallet.tsx +++ b/src/components/pages/wallet/info/migrate-wallet.tsx @@ -1,4 +1,4 @@ -import React, { useState } from "react"; +import React, { useState, useCallback } from "react"; import CardUI from "@/components/ui/card-content"; import { Button } from "@/components/ui/button"; import { Alert, AlertDescription } from "@/components/ui/alert"; @@ -11,6 +11,7 @@ import MigrationPreChecks from "./migration/MigrationPreChecks"; import NewWalletCreationStep from "./migration/NewWalletCreationStep"; import ProxySetupStep from "./migration/ProxySetupStep"; import FundTransferStep from "./migration/FundTransferStep"; +import ProxyTransferStep from "./migration/ProxyTransferStep"; import MigrationCompleteStep from "./migration/MigrationCompleteStep"; // Progress indicator component @@ -30,7 +31,8 @@ const MigrationProgress = ({ { id: 1, title: "Create Wallet", description: "Configure new wallet" }, { id: 2, title: "Proxy Setup", description: "Setup proxy (optional)" }, { id: 3, title: "Transfer Funds", description: "Move all assets" }, - { id: 4, title: "Complete", description: "Finish migration" }, + { id: 4, title: "Transfer Proxies", description: "Move proxy registrations" }, + { id: 5, title: "Complete", description: "Finish migration" }, ]; return ( @@ -62,14 +64,14 @@ const MigrationProgress = ({ -
- {/* Circles Row with Connecting Lines */} -
+
+ {/* Mobile Layout - Vertical Stack */} +
{steps.map((step, index) => ( -
+
{/* Circle */}
{index < currentStep ? ( - + ) : ( index + 1 )}
- {/* Connecting Line (except for last step) */} - {index < steps.length - 1 && ( -
- )} -
- ))} -
- - {/* Text Row */} -
- {steps.map((step, index) => ( -
-
-

+

{step.title} @@ -110,102 +97,348 @@ const MigrationProgress = ({ {step.description}

+ + {/* Progress indicator for mobile */} + {index < currentStep && ( +
+ )}
))}
+ + {/* Desktop Layout - Horizontal with aligned circles and text */} +
+
+ {steps.map((step, index) => ( +
+ {/* Circle and Text Container */} +
+ {/* Circle */} +
+ {index < currentStep ? ( + + ) : ( + index + 1 + )} +
+ + {/* Text Content */} +
+

+ {step.title} +

+

+ {step.description} +

+
+
+ + {/* Connecting Line (except for last step) */} + {index < steps.length - 1 && ( +
+ )} +
+ ))} +
+
); }; +// Migration step constants for better maintainability +const MIGRATION_STEPS = { + PRE_CHECKS: 0, + CREATE_WALLET: 1, + PROXY_SETUP: 2, + FUND_TRANSFER: 3, + PROXY_TRANSFER: 4, + COMPLETE: 5, +} as const; + +type MigrationStep = typeof MIGRATION_STEPS[keyof typeof MIGRATION_STEPS]; + +interface MigrationState { + step: MigrationStep | null; + newWalletId: string | null; + migrationId: string | null; + isStarting: boolean; + isAborting: boolean; + hasAborted: boolean; +} + export function MigrateWallet({ appWallet }: { appWallet: Wallet }) { - // Migration step state: 0 = pre-checks, 1 = create wallet, 2 = proxy setup, 3 = fund transfer, 4 = complete - const [migrationStep, setMigrationStep] = useState(null); - const [newWalletId, setNewWalletId] = useState(null); - const [isStartingMigration, setIsStartingMigration] = useState(false); - const [isAbortingMigration, setIsAbortingMigration] = useState(false); - const [hasAbortedMigration, setHasAbortedMigration] = useState(false); + // Consolidated migration state + const [migrationState, setMigrationState] = useState({ + step: null, + newWalletId: null, + migrationId: null, + isStarting: false, + isAborting: false, + hasAborted: false, + }); // API mutations const { mutateAsync: abortMigration } = api.wallet.abortMigration.useMutation(); + const { mutate: createMigration } = api.migration.createMigration.useMutation(); + const { mutate: updateMigrationStep } = api.migration.updateMigrationStep.useMutation(); + const { mutate: completeMigration } = api.migration.completeMigration.useMutation(); + const { mutateAsync: cancelMigration } = api.migration.cancelMigration.useMutation(); + const { mutateAsync: deleteNewWallet } = api.wallet.deleteNewWallet.useMutation(); const utils = api.useUtils(); const { userAddress } = useUserStore(); + // Helper functions for state management + const updateMigrationState = useCallback((updates: Partial) => { + setMigrationState(prev => ({ ...prev, ...updates })); + }, []); + + const resetMigrationState = useCallback(() => { + setMigrationState({ + step: null, + newWalletId: null, + migrationId: null, + isStarting: false, + isAborting: false, + hasAborted: false, + }); + }, []); + + const resetMigrationStateWithAbortFlag = useCallback(() => { + setMigrationState({ + step: null, + newWalletId: null, + migrationId: null, + isStarting: false, + isAborting: false, + hasAborted: true, + }); + }, []); + + // Check for existing pending migrations + const { data: pendingMigrations } = api.migration.getPendingMigrations.useQuery( + { ownerAddress: userAddress! }, + { enabled: !!userAddress } + ); + + // Check if current wallet has existing proxies + const { data: existingProxies } = api.proxy.getProxiesByUserOrWallet.useQuery( + { + walletId: appWallet.id, + userAddress: userAddress! + }, + { enabled: !!userAddress && !!appWallet.id } + ); + - // Auto-start migration if there's already a migration target + // Auto-resume migration if there's an existing pending migration for this wallet + React.useEffect(() => { + if (pendingMigrations && pendingMigrations.length > 0) { + const existingMigration = pendingMigrations.find( + (migration: any) => migration.originalWalletId === appWallet.id + ); + + if (existingMigration && migrationState.step === null && !migrationState.hasAborted) { + updateMigrationState({ + migrationId: existingMigration.id, + step: existingMigration.currentStep, + newWalletId: existingMigration.newWalletId || null, + }); + } + } + }, [pendingMigrations, appWallet.id, migrationState.step, migrationState.hasAborted, updateMigrationState]); + + // Auto-start migration if there's already a migration target (legacy support) React.useEffect(() => { const migrationTargetId = (appWallet as any).migrationTargetWalletId; - if (migrationTargetId && migrationStep === null && !hasAbortedMigration) { + if (migrationTargetId && migrationState.step === null && !migrationState.hasAborted && !migrationState.migrationId) { // Set the newWalletId but don't auto-start the migration steps // Let the user explicitly click "Continue Migration" to proceed - setNewWalletId(migrationTargetId); + updateMigrationState({ newWalletId: migrationTargetId }); } - }, [(appWallet as any).migrationTargetWalletId, migrationStep, hasAbortedMigration]); + }, [(appWallet as any).migrationTargetWalletId, migrationState.step, migrationState.hasAborted, migrationState.migrationId, updateMigrationState]); // Reset abort flag when migration target is cleared (after successful abort) React.useEffect(() => { - if (hasAbortedMigration && !(appWallet as any).migrationTargetWalletId) { - setHasAbortedMigration(false); + if (migrationState.hasAborted && !(appWallet as any).migrationTargetWalletId) { + updateMigrationState({ hasAborted: false }); } - }, [hasAbortedMigration, (appWallet as any).migrationTargetWalletId]); + }, [migrationState.hasAborted, (appWallet as any).migrationTargetWalletId, updateMigrationState]); const handleStartMigration = () => { - setIsStartingMigration(true); - // If there's already a migration target, start at step 1 (wallet creation) - // Otherwise start at step 0 (pre-checks) - const migrationTargetId = (appWallet as any).migrationTargetWalletId; - if (migrationTargetId) { - setMigrationStep(1); - setNewWalletId(migrationTargetId); - } else { - setMigrationStep(0); - } + // Reset any previous state before starting new migration + resetMigrationState(); + updateMigrationState({ isStarting: true }); + + // Create migration record + createMigration({ + originalWalletId: appWallet.id, + ownerAddress: userAddress!, + migrationData: { + walletName: appWallet.name, + walletDescription: appWallet.description, + startedAt: new Date().toISOString() + } + }, { + onSuccess: (migration) => { + // If there's already a migration target, start at step 1 (wallet creation) + // Otherwise start at step 0 (pre-checks) + const migrationTargetId = (appWallet as any).migrationTargetWalletId; + const startStep = migrationTargetId ? MIGRATION_STEPS.CREATE_WALLET : MIGRATION_STEPS.PRE_CHECKS; + + updateMigrationState({ + migrationId: migration.id, + step: startStep, + newWalletId: migrationTargetId || null, + isStarting: false, + hasAborted: false, + }); + + updateMigrationStep({ + migrationId: migration.id, + currentStep: startStep, + status: "in_progress", + newWalletId: migrationTargetId || undefined + }); + }, + onError: (error) => { + console.error("Failed to create migration:", error); + updateMigrationState({ isStarting: false }); + toast({ + title: "Error", + description: "Failed to start migration. Please try again.", + variant: "destructive", + }); + } + }); }; const handlePreChecksContinue = () => { - setMigrationStep(1); + updateMigrationState({ step: MIGRATION_STEPS.CREATE_WALLET }); + if (migrationState.migrationId) { + updateMigrationStep({ + migrationId: migrationState.migrationId, + currentStep: MIGRATION_STEPS.CREATE_WALLET, + status: "in_progress" + }); + } }; const handlePreChecksCancel = () => { - setMigrationStep(null); - setIsStartingMigration(false); + resetMigrationState(); }; const handleNewWalletCreated = (createdWalletId: string) => { - setNewWalletId(createdWalletId); - setMigrationStep(2); + console.log("handleNewWalletCreated called with:", createdWalletId); + console.log("Current migration state:", migrationState); + console.log("App wallet migration target:", (appWallet as any).migrationTargetWalletId); + + // Check if wallet has existing proxies - if yes, skip to step 3 (fund transfer) + // If no proxies, go to step 2 (proxy setup) + const nextStep = existingProxies && existingProxies.length > 0 ? MIGRATION_STEPS.FUND_TRANSFER : MIGRATION_STEPS.PROXY_SETUP; + + updateMigrationState({ + newWalletId: createdWalletId, + step: nextStep + }); + + if (migrationState.migrationId) { + updateMigrationStep({ + migrationId: migrationState.migrationId, + currentStep: nextStep, + status: "in_progress", + newWalletId: createdWalletId + }); + } }; const handleNewWalletBack = () => { - setMigrationStep(0); + updateMigrationState({ step: MIGRATION_STEPS.PRE_CHECKS }); }; const handleProxySetupContinue = () => { - setMigrationStep(3); + updateMigrationState({ step: MIGRATION_STEPS.FUND_TRANSFER }); + if (migrationState.migrationId) { + updateMigrationStep({ + migrationId: migrationState.migrationId, + currentStep: MIGRATION_STEPS.FUND_TRANSFER, + status: "in_progress" + }); + } }; const handleProxySetupSkip = () => { - setMigrationStep(3); + updateMigrationState({ step: MIGRATION_STEPS.FUND_TRANSFER }); + if (migrationState.migrationId) { + updateMigrationStep({ + migrationId: migrationState.migrationId, + currentStep: MIGRATION_STEPS.FUND_TRANSFER, + status: "in_progress" + }); + } }; const handleProxySetupBack = () => { - setMigrationStep(1); + updateMigrationState({ step: MIGRATION_STEPS.CREATE_WALLET }); }; const handleFundTransferContinue = () => { - setMigrationStep(4); + updateMigrationState({ step: MIGRATION_STEPS.PROXY_TRANSFER }); + if (migrationState.migrationId) { + updateMigrationStep({ + migrationId: migrationState.migrationId, + currentStep: MIGRATION_STEPS.PROXY_TRANSFER, + status: "in_progress" + }); + } }; const handleFundTransferBack = () => { - setMigrationStep(2); + updateMigrationState({ step: MIGRATION_STEPS.PROXY_SETUP }); + }; + + const handleProxyTransferContinue = () => { + updateMigrationState({ step: MIGRATION_STEPS.COMPLETE }); + if (migrationState.migrationId) { + updateMigrationStep({ + migrationId: migrationState.migrationId, + currentStep: MIGRATION_STEPS.COMPLETE, + status: "in_progress" + }); + } + }; + + const handleProxyTransferBack = () => { + updateMigrationState({ step: MIGRATION_STEPS.FUND_TRANSFER }); }; const handleArchiveOldWallet = () => { + // Complete migration + if (migrationState.migrationId) { + completeMigration({ migrationId: migrationState.migrationId }); + } + // Reset migration state - setMigrationStep(null); - setNewWalletId(null); - setIsStartingMigration(false); + resetMigrationState(); toast({ title: "Migration Complete", @@ -214,14 +447,12 @@ export function MigrateWallet({ appWallet }: { appWallet: Wallet }) { }; const handleCancelMigration = () => { - setMigrationStep(null); - setNewWalletId(null); - setIsStartingMigration(false); + resetMigrationState(); }; const handleAbortMigration = async () => { // Try multiple sources for the migration target ID - const migrationTargetId = newWalletId || (appWallet as any).migrationTargetWalletId; + const migrationTargetId = migrationState.newWalletId || (appWallet as any).migrationTargetWalletId; // If we still don't have it, try to fetch the wallet data directly let finalMigrationTargetId = migrationTargetId; @@ -246,20 +477,37 @@ export function MigrateWallet({ appWallet }: { appWallet: Wallet }) { return; } - setIsAbortingMigration(true); + updateMigrationState({ isAborting: true }); + console.log("Starting migration abort process..."); try { - await abortMigration({ + console.log("Aborting migration with:", { walletId: appWallet.id, newWalletId: finalMigrationTargetId, + migrationState + }); + + // Delete the wallet (NewWallet or Wallet) + await abortMigration({ + walletId: appWallet.id, + newWalletId: finalMigrationTargetId || undefined, }); - // Reset migration state - setMigrationStep(null); - setNewWalletId(null); - setIsStartingMigration(false); - setHasAbortedMigration(true); + // Delete the migration record + if (migrationState.migrationId) { + await cancelMigration({ migrationId: migrationState.migrationId }); + } + + // Also delete any NewWallet if it exists (in case the final wallet wasn't created yet) + if (migrationState.newWalletId && migrationState.newWalletId !== finalMigrationTargetId) { + try { + await deleteNewWallet({ walletId: migrationState.newWalletId }); + console.log("Deleted NewWallet:", migrationState.newWalletId); + } catch (error) { + console.log("NewWallet deletion failed (may not exist):", error); + } + } - // Invalidate wallet queries to refresh the UI + // Invalidate all relevant queries to refresh the UI await Promise.all([ utils.wallet.getWallet.invalidate({ address: userAddress!, @@ -268,12 +516,25 @@ export function MigrateWallet({ appWallet }: { appWallet: Wallet }) { utils.wallet.getUserWallets.invalidate({ address: userAddress!, }), + utils.migration.getPendingMigrations.invalidate({ + ownerAddress: userAddress!, + }), ]); + // Show success message toast({ title: "Migration Aborted", - description: "The migration has been cancelled and the new wallet has been removed.", + description: "The migration has been cancelled and all related data has been cleaned up.", }); + + console.log("Migration abort completed successfully"); + + // Small delay to ensure UI updates are processed + setTimeout(() => { + // Reset migration state with abort flag to show success UI + resetMigrationStateWithAbortFlag(); + console.log("Migration state reset with abort flag"); + }, 100); } catch (error) { console.error("Failed to abort migration:", error); toast({ @@ -282,12 +543,12 @@ export function MigrateWallet({ appWallet }: { appWallet: Wallet }) { variant: "destructive", }); } finally { - setIsAbortingMigration(false); + updateMigrationState({ isAborting: false }); } }; // Show migration steps - if (migrationStep !== null) { + if (migrationState.step !== null) { return (
{/* Progress Indicator */} @@ -297,10 +558,10 @@ export function MigrateWallet({ appWallet }: { appWallet: Wallet }) { cardClassName="col-span-2" > @@ -311,7 +572,7 @@ export function MigrateWallet({ appWallet }: { appWallet: Wallet }) { {/* Step Content */}
- {migrationStep === 0 && ( + {migrationState.step === MIGRATION_STEPS.PRE_CHECKS && ( )} - {migrationStep === 1 && ( + {migrationState.step === MIGRATION_STEPS.CREATE_WALLET && ( )} - {migrationStep === 2 && ( + {migrationState.step === MIGRATION_STEPS.PROXY_SETUP && (!existingProxies || existingProxies.length === 0) && ( )} - {migrationStep === 3 && ( + {/* Show message if wallet already has proxies */} + {migrationState.step === MIGRATION_STEPS.PROXY_SETUP && existingProxies && existingProxies.length > 0 && ( +
+
+
+ +
+

Proxies Already Configured

+

+ This wallet already has {existingProxies.length} proxy{existingProxies.length !== 1 ? 'ies' : ''} configured. + You can proceed directly to fund transfer. +

+
+
+ +
+
+ )} + + {migrationState.step === MIGRATION_STEPS.FUND_TRANSFER && ( )} - {migrationStep === 4 && ( + {migrationState.step === MIGRATION_STEPS.PROXY_TRANSFER && ( + + )} + + {migrationState.step === MIGRATION_STEPS.COMPLETE && ( )}
@@ -358,6 +648,38 @@ export function MigrateWallet({ appWallet }: { appWallet: Wallet }) { ); } + // Show abort success state + if (migrationState.hasAborted) { + console.log("Rendering abort success state with migration state:", migrationState); + return ( + +
+ + + + Migration successfully aborted. All wallets, migration records, and references have been removed. You can start a new migration when ready. + + + +
+ +
+
+
+ ); + } + // Show initial migration card return (
- {(appWallet as any).migrationTargetWalletId ? ( + {(appWallet as any).migrationTargetWalletId || migrationState.newWalletId ? ( Migration In Progress: You have an ongoing migration. Click "Continue Migration" to resume where you left off.
- Debug: Migration Target ID: {(appWallet as any).migrationTargetWalletId} + Debug: Migration Target ID: {(appWallet as any).migrationTargetWalletId || migrationState.newWalletId}
@@ -425,13 +747,13 @@ export function MigrateWallet({ appWallet }: { appWallet: Wallet }) {
- {((appWallet as any).migrationTargetWalletId || newWalletId || migrationStep !== null) && ( + {((appWallet as any).migrationTargetWalletId || migrationState.newWalletId || migrationState.step !== null) && (
@@ -172,7 +319,7 @@ export default function FundTransferStep({ ); } - if (!newWallet) { + if (!newWallet || !newWallet.id) { return ( - Failed to load new wallet information. Please ensure the new wallet was created successfully. + Failed to load new wallet information. Please ensure the new wallet + was created successfully.
@@ -206,8 +354,8 @@ export default function FundTransferStep({ - This will transfer all funds from your current wallet to the new wallet. - This action cannot be undone. + This will transfer all funds from your current wallet to the new + wallet. This action cannot be undone.
@@ -218,14 +366,16 @@ export default function FundTransferStep({ description="Funds to be transferred" >
-
+

ADA Balance

Native currency

- {currentBalance ? numberWithCommas(currentBalance.toFixed(2)) : "0.00"} ₳ + {currentBalance + ? numberWithCommas(Number(currentBalance)) + " ₳" + : "0.00 ₳"}

@@ -234,10 +384,15 @@ export default function FundTransferStep({

Other Assets

{nonAdaAssets.map((asset, index) => ( -
+

{asset.unit}

-

Custom asset

+

+ Custom asset +

{asset.quantity}

@@ -248,12 +403,12 @@ export default function FundTransferStep({ )} {currentUtxos.length === 0 && ( -
- +
+
-

No Funds

+

No Funds to Transfer

- There are no funds in the current wallet to transfer. + The current wallet has no funds to transfer. You can continue to the next step.

@@ -262,25 +417,35 @@ export default function FundTransferStep({ {/* New Wallet Information */} - +
-
-

Wallet Details

+
+

Wallet Details

Name: - {newWallet.name} + + {newWallet?.name || "Loading..."} +
Address: - {newWallet.address.slice(0, 20)}... + + {newWallet + ? (() => { + const address = generateNewWalletAddress(newWallet); + return address + ? `${address.slice(0, 20)}...` + : "Generating..."; + })() + : "Loading..."} +
Signers: - {newWallet.signersAddresses.length} + + {newWallet?.signersAddresses?.length || 0} +
@@ -293,13 +458,13 @@ export default function FundTransferStep({ title="Transfer Complete" description="Fund transfer has been initiated" > -
+

Transfer Initiated

- The fund transfer transaction has been created and is pending signatures. - You can view it in the transactions section. + The fund transfer transaction has been created and is pending + signatures. You can view it in the transactions section.

@@ -312,40 +477,38 @@ export default function FundTransferStep({ description="Complete the fund transfer" cardClassName="col-span-2" > -
- {!transferComplete ? ( ) : ( - )}
diff --git a/src/components/pages/wallet/info/migration/MigrationCompleteStep.tsx b/src/components/pages/wallet/info/migration/MigrationCompleteStep.tsx index 23b306a0..fc043358 100644 --- a/src/components/pages/wallet/info/migration/MigrationCompleteStep.tsx +++ b/src/components/pages/wallet/info/migration/MigrationCompleteStep.tsx @@ -24,6 +24,11 @@ export default function MigrationCompleteStep({ const router = useRouter(); const [isCompleting, setIsCompleting] = useState(false); + // Define mutations + const { mutateAsync: clearMigrationTarget } = api.wallet.clearMigrationTarget.useMutation(); + const { mutateAsync: archiveWallet } = api.wallet.archiveWallet.useMutation(); + const utils = api.useUtils(); + // Get new wallet data const { data: newWalletData, isLoading: isLoadingNewWallet } = api.wallet.getWallet.useQuery( { @@ -39,17 +44,22 @@ export default function MigrationCompleteStep({ setIsCompleting(true); try { // Clear migration target from old wallet - await api.wallet.clearMigrationTarget.mutate({ + await clearMigrationTarget({ + walletId: appWallet.id, + }); + + // Archive the old wallet + await archiveWallet({ walletId: appWallet.id, }); // Invalidate queries to refresh UI - await api.useContext().wallet.getWallet.invalidate(); - await api.useContext().wallet.getUserWallets.invalidate(); + await utils.wallet.getWallet.invalidate(); + await utils.wallet.getUserWallets.invalidate(); toast({ title: "Migration Complete", - description: "Your wallet migration has been completed successfully!", + description: "Your wallet migration has been completed successfully! The old wallet has been archived.", }); // Navigate to the new wallet @@ -109,6 +119,7 @@ export default function MigrationCompleteStep({
  • • Your new wallet is ready to use
  • • All funds have been transferred
  • • Proxy settings have been updated
  • +
  • • Old wallet will be archived
  • • You can now use your new wallet for transactions
  • @@ -139,8 +150,8 @@ export default function MigrationCompleteStep({ - Important: Your old wallet configuration is still available but is no longer - the active migration target. You can continue using your new wallet for all future transactions. + Important: Your old wallet will be archived after migration completion. + You can continue using your new wallet for all future transactions.
    diff --git a/src/components/pages/wallet/info/migration/NewWalletCreationStep.tsx b/src/components/pages/wallet/info/migration/NewWalletCreationStep.tsx index 4cf84e78..83149bff 100644 --- a/src/components/pages/wallet/info/migration/NewWalletCreationStep.tsx +++ b/src/components/pages/wallet/info/migration/NewWalletCreationStep.tsx @@ -20,19 +20,36 @@ export default function NewWalletCreationStep({ onContinue }: NewWalletCreationStepProps) { const walletFlow = useMigrationWalletFlowState(appWallet); + const hasAttemptedTemporaryWallet = React.useRef(false); const handleCreateWallet = async () => { - await walletFlow.createMigrationWallet(); + const finalWalletId = await walletFlow.createMigrationWallet(); + + if (finalWalletId !== null) { + onContinue(finalWalletId); + } }; - // Watch for newWalletId changes and continue when wallet is created + // Create temporary wallet on mount to enable invite link React.useEffect(() => { - console.log("NewWalletCreationStep: newWalletId changed", walletFlow.newWalletId); - if (walletFlow.newWalletId) { - console.log("NewWalletCreationStep: calling onContinue with", walletFlow.newWalletId); - onContinue(walletFlow.newWalletId); + if (!walletFlow.newWalletId && + walletFlow.name && + walletFlow.signersAddresses.length > 0 && + !walletFlow.loading && + !hasAttemptedTemporaryWallet.current) { + hasAttemptedTemporaryWallet.current = true; + walletFlow.createTemporaryWallet(); } - }, [walletFlow.newWalletId, onContinue]); + }, [walletFlow.name, walletFlow.signersAddresses.length, walletFlow.newWalletId, walletFlow.loading, walletFlow.createTemporaryWallet]); + + // Don't automatically continue - let user decide when to proceed + // React.useEffect(() => { + // console.log("NewWalletCreationStep: newWalletId changed", walletFlow.newWalletId); + // if (walletFlow.newWalletId) { + // console.log("NewWalletCreationStep: calling onContinue with", walletFlow.newWalletId); + // onContinue(walletFlow.newWalletId); + // } + // }, [walletFlow.newWalletId, onContinue]); return (
    @@ -56,6 +73,8 @@ export default function NewWalletCreationStep({ setSignerDescriptions: walletFlow.setSignerDescriptions, signersStakeKeys: walletFlow.signersStakeKeys, setSignerStakeKeys: walletFlow.setSignerStakeKeys, + signersDRepKeys: walletFlow.signersDRepKeys, + setSignerDRepKeys: walletFlow.setSignerDRepKeys, addSigner: walletFlow.addSigner, removeSigner: walletFlow.removeSigner, }} @@ -89,16 +108,41 @@ export default function NewWalletCreationStep({ onSave={walletFlow.handleSaveAdvanced} /> + {/* Action Section */}
    - {/* Warning Message */} -
    - - - -

    - Important: Creation is final - signers and rules can not be changed afterwards. -

    + {/* Info Messages */} +
    + {!walletFlow.newWalletId && ( +
    + + + +

    + Step 1: Configure your wallet settings and generate an invite link to share with other signers. +

    +
    + )} + +
    + + + +

    + Important: Final wallet creation is permanent - signers and rules cannot be changed afterwards. +

    +
    + + {walletFlow.newWalletId && appWallet.migrationTargetWalletId && ( +
    + + + +

    + Final Wallet Already Created: The final wallet for this migration has already been created. You can only create one new wallet per migration. +

    +
    + )}
    {/* Action Buttons */} @@ -111,24 +155,63 @@ export default function NewWalletCreationStep({ Back - + + {!walletFlow.newWalletId ? ( + + ) : ( +
    + {/* Only show Create Final Wallet button if no final wallet has been created yet */} + {!appWallet.migrationTargetWalletId && ( + + )} + + {/* Show appropriate continue button based on whether final wallet exists */} + {appWallet.migrationTargetWalletId && ( + + )} +
    + )}
    diff --git a/src/components/pages/wallet/info/migration/ProxyTransferStep.tsx b/src/components/pages/wallet/info/migration/ProxyTransferStep.tsx new file mode 100644 index 00000000..1b98ab3e --- /dev/null +++ b/src/components/pages/wallet/info/migration/ProxyTransferStep.tsx @@ -0,0 +1,233 @@ +import React, { useState, useEffect } from "react"; +import { Button } from "@/components/ui/button"; +import CardUI from "@/components/ui/card-content"; +import { Alert, AlertDescription } from "@/components/ui/alert"; +import { ArrowLeft, ArrowRight, Loader, AlertCircle, CheckCircle, Users } from "lucide-react"; +import { Wallet } from "@/types/wallet"; +import { api } from "@/utils/api"; +import { toast } from "@/hooks/use-toast"; + +interface ProxyTransferStepProps { + appWallet: Wallet; + newWalletId: string; + onBack: () => void; + onContinue: () => void; +} + +export default function ProxyTransferStep({ + appWallet, + newWalletId, + onBack, + onContinue +}: ProxyTransferStepProps) { + const [isTransferring, setIsTransferring] = useState(false); + const [transferComplete, setTransferComplete] = useState(false); + + // Get existing proxies for the current wallet + const { data: existingProxies, isLoading: isLoadingProxies } = api.proxy.getProxiesByUserOrWallet.useQuery({ + walletId: appWallet.id, + }); + + // Mutation to transfer proxies + const { mutate: transferProxies } = api.proxy.transferProxies.useMutation({ + onSuccess: () => { + setTransferComplete(true); + toast({ + title: "Proxies Transferred", + description: "All proxy registrations have been transferred to the new wallet.", + }); + // Automatically proceed to the next step after a short delay + setTimeout(() => { + onContinue(); + }, 2000); + }, + onError: (error) => { + console.error("Failed to transfer proxies:", error); + toast({ + title: "Transfer Failed", + description: "Failed to transfer proxy registrations. Please try again.", + variant: "destructive", + }); + }, + }); + + const handleTransferProxies = async () => { + if (!existingProxies || existingProxies.length === 0) return; + + setIsTransferring(true); + try { + await transferProxies({ + fromWalletId: appWallet.id, + toWalletId: newWalletId, + }); + } catch (error) { + console.error("Failed to transfer proxies:", error); + } finally { + setIsTransferring(false); + } + }; + + if (isLoadingProxies) { + return ( + +
    + + Loading proxy information... +
    +
    + ); + } + + const hasProxies = existingProxies && existingProxies.length > 0; + + return ( +
    + {/* Header */} + + + + + This will transfer all proxy registrations from your current wallet to the new wallet. + This ensures your governance participation continues seamlessly. + + + + + {/* Proxy Information */} + +
    + {hasProxies ? ( +
    + {existingProxies.map((proxy, index) => ( +
    +
    + +
    +

    Proxy {index + 1}

    +

    + {proxy.name || `Proxy ${index + 1}`} +

    +
    +
    +
    +

    Active

    +

    Will be transferred

    +
    +
    + ))} +
    + ) : ( +
    + +
    +

    No Proxies Found

    +

    + There are no proxy registrations to transfer. You can continue to the next step. +

    +
    +
    + )} +
    +
    + + {/* New Wallet Information */} + +
    +
    +

    Transfer Details

    +
    +
    + From Wallet: + {appWallet.name} +
    +
    + To Wallet: + New Migrated Wallet +
    +
    + Proxies to Transfer: + {existingProxies?.length || 0} +
    +
    +
    +
    +
    + + {/* Transfer Status */} + {transferComplete && ( + +
    + +
    +

    Proxies Transferred

    +

    + All proxy registrations have been successfully transferred to the new wallet. + You can now complete the migration. +

    +
    +
    +
    + )} + + {/* Action Buttons */} + +
    + + {!transferComplete ? ( + + ) : ( + + )} +
    +
    +
    + ); +} diff --git a/src/components/pages/wallet/info/migration/useMigrationWalletFlowState.tsx b/src/components/pages/wallet/info/migration/useMigrationWalletFlowState.tsx index 5ab38f35..264a031c 100644 --- a/src/components/pages/wallet/info/migration/useMigrationWalletFlowState.tsx +++ b/src/components/pages/wallet/info/migration/useMigrationWalletFlowState.tsx @@ -30,6 +30,8 @@ export interface MigrationWalletFlowState { setSignerDescriptions: React.Dispatch>; signersStakeKeys: string[]; setSignerStakeKeys: React.Dispatch>; + signersDRepKeys: string[]; + setSignerDRepKeys: React.Dispatch>; addSigner: () => void; removeSigner: (index: number) => void; @@ -62,19 +64,21 @@ export interface MigrationWalletFlowState { newWalletId?: string; // Actions - createMigrationWallet: () => Promise; + createTemporaryWallet: () => Promise; + createMigrationWallet: () => Promise; // Save callbacks for create page handleSaveWalletInfo: (newName: string, newDescription: string) => void; - handleSaveSigners: (newAddresses: string[], newDescriptions: string[], newStakeKeys: string[]) => void; - handleSaveSignatureRules: (numRequired: number) => void; - handleSaveAdvanced: (newStakeKey: string, scriptType: "all" | "any" | "atLeast") => void; + handleSaveSigners: (newAddresses: string[], newDescriptions: string[], newStakeKeys: string[], newDRepKeys: string[]) => Promise; + handleSaveSignatureRules: (numRequired: number) => Promise; + handleSaveAdvanced: (newStakeKey: string, scriptType: "all" | "any" | "atLeast") => Promise; } export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletFlowState { const [signersAddresses, setSignerAddresses] = useState([]); const [signersDescriptions, setSignerDescriptions] = useState([]); const [signersStakeKeys, setSignerStakeKeys] = useState([]); + const [signersDRepKeys, setSignerDRepKeys] = useState([]); const [numRequiredSigners, setNumRequiredSigners] = useState(1); const [name, setName] = useState(""); const [description, setDescription] = useState(""); @@ -117,7 +121,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF setSignerAddresses(walletData.signersAddresses || []); setSignerDescriptions(walletData.signersDescriptions || []); setNumRequiredSigners(walletData.numRequiredSigners || 1); - setNativeScriptType(walletData.type || "atLeast"); + setNativeScriptType((walletData.type as "atLeast" | "all" | "any") || "atLeast"); setStakeKey(walletData.stakeCredentialHash || ""); // Filter and process stake keys @@ -134,6 +138,9 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF }); setSignerStakeKeys(validStakeKeys); + + // Initialize DRep keys (empty for now, can be added later) + setSignerDRepKeys((walletData as any).signersDRepKeys || []); } }, [walletData]); @@ -146,9 +153,10 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF setSignerAddresses(existingNewWallet.signersAddresses || []); setSignerDescriptions(existingNewWallet.signersDescriptions || []); setSignerStakeKeys(existingNewWallet.signersStakeKeys || []); + setSignerDRepKeys((existingNewWallet as any).signersDRepKeys || []); setNumRequiredSigners(existingNewWallet.numRequiredSigners || 1); setStakeKey(existingNewWallet.stakeCredentialHash || ""); - setNativeScriptType(existingNewWallet.scriptType || "atLeast"); + setNativeScriptType((existingNewWallet.scriptType as "atLeast" | "all" | "any") || "atLeast"); } }, [existingNewWallet]); @@ -236,7 +244,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF }, }); - const { mutate: updateNewWallet } = api.wallet.updateNewWallet.useMutation({ + const { mutateAsync: updateNewWallet } = api.wallet.updateNewWallet.useMutation({ onSuccess: () => { toast({ title: "Saved", @@ -254,35 +262,8 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF }, }); - const { mutate: createWallet } = api.wallet.createWallet.useMutation({ - onSuccess: (data) => { - console.log("Wallet created successfully:", data); - - // Set migration target after successful wallet creation - setMigrationTarget({ - walletId: appWallet.id, - newWalletId: data.id, - }); - - setNewWalletId(data.id); - setLoading(false); - toast({ - title: "Success", - description: "New wallet created successfully!", - duration: 3000, - }); - }, - onError: (e) => { - console.error("Failed to create wallet:", e); - setLoading(false); - toast({ - title: "Error", - description: "Failed to create new wallet", - variant: "destructive", - duration: 3000, - }); - }, - }); + const { mutate: createWallet } = api.wallet.createWallet.useMutation(); + const { mutateAsync: deleteNewWallet } = api.wallet.deleteNewWallet.useMutation(); const { mutate: setMigrationTarget } = api.wallet.setMigrationTarget.useMutation({ onSuccess: () => { @@ -304,6 +285,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF setSignerAddresses([...signersAddresses, ""]); setSignerDescriptions([...signersDescriptions, ""]); setSignerStakeKeys([...signersStakeKeys, ""]); + setSignerDRepKeys([...signersDRepKeys, ""]); } function removeSigner(index: number) { @@ -318,6 +300,10 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF const updatedStakeKeys = [...signersStakeKeys]; updatedStakeKeys.splice(index, 1); setSignerStakeKeys(updatedStakeKeys); + + const updatedDRepKeys = [...signersDRepKeys]; + updatedDRepKeys.splice(index, 1); + setSignerDRepKeys(updatedDRepKeys); } // Adjust numRequiredSigners if it exceeds the number of signers @@ -327,14 +313,17 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF } }, [signersAddresses.length, numRequiredSigners]); - // Create migration wallet - async function createMigrationWallet() { - console.log("createMigrationWallet called", { multisigWallet, name, signersAddresses }); + // Create temporary wallet for invite link (if not already created) + const createTemporaryWallet = useCallback(async () => { + if (newWalletId) { + return; // Already created + } + - if (!multisigWallet) { + if (!name || signersAddresses.length === 0) { toast({ title: "Error", - description: "Invalid wallet configuration. Please check your settings.", + description: "Please provide wallet name and at least one signer before creating invite link.", variant: "destructive", }); return; @@ -342,33 +331,123 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF setLoading(true); try { - const { scriptCbor } = multisigWallet.getScript(); - if (!scriptCbor) { - throw new Error("Failed to generate script CBOR"); - } - - console.log("Creating wallet with data:", { - name, - description, - signersAddresses, - signersDescriptions, - signersStakeKeys, - numRequiredSigners, - stakeCredentialHash: stakeKey || undefined, - type: nativeScriptType, - }); - - // Create the new wallet directly - createWallet({ + const walletData = { name: name, description: description, signersAddresses: signersAddresses, signersDescriptions: signersDescriptions, signersStakeKeys: signersStakeKeys, + signersDRepKeys: signersDRepKeys, numRequiredSigners: numRequiredSigners, - scriptCbor: scriptCbor, + ownerAddress: userAddress || "", stakeCredentialHash: stakeKey || undefined, - type: nativeScriptType, + scriptType: nativeScriptType || undefined, + }; + + + // Create temporary wallet for invite link + createNewWallet(walletData); + } catch (error) { + console.error("Failed to create temporary wallet:", error); + setLoading(false); + toast({ + title: "Error", + description: "Failed to create temporary wallet. Please try again.", + variant: "destructive", + }); + } + }, [newWalletId, name, signersAddresses, description, signersDescriptions, signersStakeKeys, numRequiredSigners, stakeKey, nativeScriptType, createNewWallet, toast]); + + // Create final migration wallet + async function createMigrationWallet(): Promise { + + if (!multisigWallet) { + toast({ + title: "Error", + description: "Invalid wallet configuration. Please check your settings.", + variant: "destructive", + }); + return null; + } + + if (!newWalletId) { + toast({ + title: "Error", + description: "Please create the temporary wallet first to generate invite link.", + variant: "destructive", + }); + return null; + } + + // Check if final wallet has already been created + if (appWallet.migrationTargetWalletId) { + toast({ + title: "Error", + description: "Final wallet has already been created for this migration. You can only create one new wallet per migration.", + variant: "destructive", + }); + return null; + } + + setLoading(true); + try { + const { scriptCbor } = multisigWallet.getScript(); + if (!scriptCbor) { + throw new Error("Failed to generate script CBOR"); + } + + + // Create the final wallet using the mutation + return new Promise((resolve, reject) => { + createWallet({ + name: name, + description: description, + signersAddresses: signersAddresses, + signersDescriptions: signersDescriptions, + signersStakeKeys: signersStakeKeys, + signersDRepKeys: signersDRepKeys, + numRequiredSigners: numRequiredSigners, + scriptCbor: scriptCbor, + stakeCredentialHash: stakeKey || undefined, + type: nativeScriptType, + }, { + onSuccess: async (data) => { + + // Set migration target after successful wallet creation + setMigrationTarget({ + walletId: appWallet.id, + migrationTargetWalletId: data.id, + }); + + // Clean up the temporary NewWallet + if (newWalletId && newWalletId !== data.id) { + try { + await deleteNewWallet({ walletId: newWalletId }); + } catch (error) { + } + } + + setNewWalletId(data.id); + setLoading(false); + toast({ + title: "Success", + description: "New wallet created successfully!", + duration: 3000, + }); + resolve(data.id); + }, + onError: (error) => { + console.error("Failed to create wallet:", error); + setLoading(false); + toast({ + title: "Error", + description: "Failed to create new wallet", + variant: "destructive", + duration: 3000, + }); + reject(error); + } + }); }); } catch (error) { @@ -379,6 +458,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF description: "Failed to create new wallet. Please try again.", variant: "destructive", }); + return null; } } @@ -395,52 +475,106 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF signersAddresses: signersAddresses, signersDescriptions: signersDescriptions, signersStakeKeys: signersStakeKeys, + signersDRepKeys: signersDRepKeys, numRequiredSigners: numRequiredSigners, stakeCredentialHash: stakeKey || undefined, - scriptType: nativeScriptType, + scriptType: nativeScriptType || undefined, }); } }, [newWalletId, signersAddresses, signersDescriptions, signersStakeKeys, numRequiredSigners, stakeKey, nativeScriptType, updateNewWallet]); - const handleSaveSigners = useCallback((newAddresses: string[], newDescriptions: string[], newStakeKeys: string[]) => { - setSignerAddresses(newAddresses); - setSignerDescriptions(newDescriptions); - setSignerStakeKeys(newStakeKeys); + const handleSaveSigners = useCallback(async (newAddresses: string[], newDescriptions: string[], newStakeKeys: string[], newDRepKeys: string[]) => { + // Ensure all arrays are defined and filter out undefined values + const safeAddresses = (newAddresses || []).filter(addr => addr !== undefined); + const safeDescriptions = (newDescriptions || []).filter(desc => desc !== undefined); + const safeStakeKeys = (newStakeKeys || []).filter(key => key !== undefined); + const safeDRepKeys = (newDRepKeys || []).filter(key => key !== undefined); + + // Ensure all arrays have the same length + const maxLength = Math.max(safeAddresses.length, safeDescriptions.length, safeStakeKeys.length, safeDRepKeys.length); + + const paddedAddresses = [...safeAddresses]; + const paddedDescriptions = [...safeDescriptions]; + const paddedStakeKeys = [...safeStakeKeys]; + const paddedDRepKeys = [...safeDRepKeys]; + + // Pad arrays to same length with empty strings + while (paddedAddresses.length < maxLength) paddedAddresses.push(""); + while (paddedDescriptions.length < maxLength) paddedDescriptions.push(""); + while (paddedStakeKeys.length < maxLength) paddedStakeKeys.push(""); + while (paddedDRepKeys.length < maxLength) paddedDRepKeys.push(""); + + setSignerAddresses(paddedAddresses); + setSignerDescriptions(paddedDescriptions); + setSignerStakeKeys(paddedStakeKeys); + setSignerDRepKeys(paddedDRepKeys); if (newWalletId) { - updateNewWallet({ + const updateData = { walletId: newWalletId, name: name, description: description, - signersAddresses: newAddresses, - signersDescriptions: newDescriptions, - signersStakeKeys: newStakeKeys, + signersAddresses: paddedAddresses, + signersDescriptions: paddedDescriptions, + signersStakeKeys: paddedStakeKeys, + signersDRepKeys: paddedDRepKeys, numRequiredSigners: numRequiredSigners, stakeCredentialHash: stakeKey || undefined, - scriptType: nativeScriptType, - }); + scriptType: nativeScriptType || undefined, + }; + + // Validate data before sending + if (!updateData.walletId || !updateData.name) { + toast({ + title: "Error", + description: "Invalid wallet data. Please try again.", + variant: "destructive", + }); + return; + } + + // Ensure all arrays contain only strings + const validatedData = { + ...updateData, + signersAddresses: paddedAddresses.map(addr => String(addr || "")), + signersDescriptions: paddedDescriptions.map(desc => String(desc || "")), + signersStakeKeys: paddedStakeKeys.map(key => String(key || "")), + signersDRepKeys: paddedDRepKeys.map(key => String(key || "")), + }; + + + try { + await updateNewWallet(validatedData); + } catch (error) { + console.error("Failed to update new wallet:", error); + } } }, [newWalletId, name, description, numRequiredSigners, stakeKey, nativeScriptType, updateNewWallet]); - const handleSaveSignatureRules = useCallback((numRequired: number) => { + const handleSaveSignatureRules = useCallback(async (numRequired: number) => { setNumRequiredSigners(numRequired); if (newWalletId) { - updateNewWallet({ - walletId: newWalletId, - name: name, - description: description, - signersAddresses: signersAddresses, - signersDescriptions: signersDescriptions, - signersStakeKeys: signersStakeKeys, - numRequiredSigners: numRequired, - stakeCredentialHash: stakeKey || undefined, - scriptType: nativeScriptType, - }); + try { + await updateNewWallet({ + walletId: newWalletId, + name: name, + description: description, + signersAddresses: signersAddresses, + signersDescriptions: signersDescriptions, + signersStakeKeys: signersStakeKeys, + signersDRepKeys: signersDRepKeys, + numRequiredSigners: numRequired, + stakeCredentialHash: stakeKey || undefined, + scriptType: nativeScriptType || undefined, + }); + } catch (error) { + console.error("Failed to update signature rules:", error); + } } }, [newWalletId, name, description, signersAddresses, signersDescriptions, signersStakeKeys, stakeKey, nativeScriptType, updateNewWallet]); - const handleSaveAdvanced = useCallback((newStakeKey: string, scriptType: "all" | "any" | "atLeast") => { + const handleSaveAdvanced = useCallback(async (newStakeKey: string, scriptType: "all" | "any" | "atLeast") => { setStakeKey(newStakeKey); setNativeScriptType(scriptType); @@ -454,17 +588,22 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF } if (newWalletId) { - updateNewWallet({ - walletId: newWalletId, - name: name, - description: description, - signersAddresses: signersAddresses, - signersDescriptions: signersDescriptions, - signersStakeKeys: updatedSignerStakeKeys, - numRequiredSigners: numRequiredSigners, - stakeCredentialHash: newStakeKey || undefined, - scriptType: scriptType, - }); + try { + await updateNewWallet({ + walletId: newWalletId, + name: name, + description: description, + signersAddresses: signersAddresses, + signersDescriptions: signersDescriptions, + signersStakeKeys: updatedSignerStakeKeys, + signersDRepKeys: signersDRepKeys, + numRequiredSigners: numRequiredSigners, + stakeCredentialHash: newStakeKey || null, + scriptType: scriptType, + }); + } catch (error) { + console.error("Failed to update advanced settings:", error); + } } }, [newWalletId, name, description, signersAddresses, signersDescriptions, signersStakeKeys, numRequiredSigners, updateNewWallet]); @@ -481,9 +620,10 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF signersAddresses: signersAddresses, signersDescriptions: signersDescriptions, signersStakeKeys: signersStakeKeys, + signersDRepKeys: signersDRepKeys, numRequiredSigners: numRequiredSigners, stakeCredentialHash: null, - scriptType: nativeScriptType, + scriptType: nativeScriptType || undefined, }); } @@ -515,6 +655,8 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF setSignerDescriptions, signersStakeKeys, setSignerStakeKeys, + signersDRepKeys, + setSignerDRepKeys, addSigner, removeSigner, @@ -547,6 +689,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF newWalletId, // Actions + createTemporaryWallet, createMigrationWallet, // Save callbacks diff --git a/src/hooks/useProxy.ts b/src/hooks/useProxy.ts index 44107df0..5fb0eb4a 100644 --- a/src/hooks/useProxy.ts +++ b/src/hooks/useProxy.ts @@ -1,48 +1,56 @@ import { create } from "zustand"; -import { persist } from "zustand/middleware"; +import { persist, createJSONStorage } from "zustand/middleware"; +import { zustandStorage } from "@/lib/indexeddb"; +// Simple proxy state interface interface ProxyState { - isProxyEnabled: boolean; selectedProxyId: string; - toggleProxy: () => void; - setProxyEnabled: (enabled: boolean) => void; setSelectedProxy: (proxyId: string) => void; clearSelectedProxy: () => void; } +// Create a simple proxy store export const useProxyStore = create()( persist( (set) => ({ - isProxyEnabled: false, selectedProxyId: "", - toggleProxy: () => set((state) => ({ isProxyEnabled: !state.isProxyEnabled })), - setProxyEnabled: (enabled: boolean) => set({ isProxyEnabled: enabled }), setSelectedProxy: (proxyId: string) => set({ selectedProxyId: proxyId }), clearSelectedProxy: () => set({ selectedProxyId: "" }), }), { - name: "proxy-settings", // unique name for localStorage key + name: "proxy-settings", + storage: createJSONStorage(() => zustandStorage), } ) ); -// Hook for easy access to proxy state +// Re-export from the main proxy store +export { + useProxyData, + useSelectedProxy, + useProxyActions +} from "@/lib/zustand/proxy"; + +// Convenience hook for backward compatibility export const useProxy = () => { - const { - isProxyEnabled, - selectedProxyId, - toggleProxy, - setProxyEnabled, - setSelectedProxy, - clearSelectedProxy - } = useProxyStore(); + const selectedProxyId = useProxyStore((state) => state.selectedProxyId); + const setSelectedProxy = useProxyStore((state) => state.setSelectedProxy); + const clearSelectedProxy = useProxyStore((state) => state.clearSelectedProxy); + + // Proxy is enabled when a proxy is selected + const isProxyEnabled = !!selectedProxyId; + + // Enhanced clearSelectedProxy with debugging + const enhancedClearSelectedProxy = () => { + console.log("useProxy: Clearing selected proxy, current:", selectedProxyId); + clearSelectedProxy(); + console.log("useProxy: Proxy cleared"); + }; return { isProxyEnabled, selectedProxyId, - toggleProxy, - setProxyEnabled, setSelectedProxy, - clearSelectedProxy, + clearSelectedProxy: enhancedClearSelectedProxy, }; }; diff --git a/src/lib/zustand/proxy.ts b/src/lib/zustand/proxy.ts new file mode 100644 index 00000000..448f4563 --- /dev/null +++ b/src/lib/zustand/proxy.ts @@ -0,0 +1,284 @@ +import { create } from "zustand"; +import { persist, createJSONStorage } from "zustand/middleware"; +import { zustandStorage } from "../indexeddb"; +import { MeshProxyContract } from "@/components/multisig/proxy/offchain"; +import { getTxBuilder } from "@/utils/get-tx-builder"; +import { useSiteStore } from "./site"; + +// Types for proxy data +export interface ProxyDrepInfo { + active: boolean; + amount: string; + deposit: string; + url?: string; + hash?: string; +} + +export interface ProxyData { + id: string; + proxyAddress: string; + authTokenId: string; + paramUtxo: string; + description: string | null; + isActive: boolean; + createdAt: Date; + balance?: Array<{ unit: string; quantity: string }>; + drepId?: string; + drepInfo?: ProxyDrepInfo; + lastUpdated?: number; +} + +interface ProxyState { + // Proxy data + proxies: { [walletId: string]: ProxyData[] }; + selectedProxyId: string; + isProxyEnabled: boolean; + + // Loading states + loading: { [walletId: string]: boolean }; + drepLoading: { [proxyId: string]: boolean }; + + // Error states + errors: { [walletId: string]: string | null }; + drepErrors: { [proxyId: string]: string | null }; + + // Actions + setProxies: (walletId: string, proxies: ProxyData[]) => void; + setSelectedProxy: (proxyId: string) => void; + setProxyEnabled: (enabled: boolean) => void; + toggleProxy: () => void; + clearSelectedProxy: () => void; + + // Loading actions + setLoading: (walletId: string, loading: boolean) => void; + setDrepLoading: (proxyId: string, loading: boolean) => void; + + // Error actions + setError: (walletId: string, error: string | null) => void; + setDrepError: (proxyId: string, error: string | null) => void; + + // Data fetching actions + fetchProxyBalance: (walletId: string, proxyId: string, proxyAddress: string, network: string) => Promise; + fetchProxyDrepInfo: (walletId: string, proxyId: string, proxyAddress: string, authTokenId: string, scriptCbor: string, network: string, paramUtxo: string) => Promise; + + // Utility actions + updateProxyData: (walletId: string, proxyId: string, updates: Partial) => void; + clearProxyData: (walletId: string) => void; +} + +export const useProxyStore = create()( + persist( + (set, get) => ({ + // Initial state + proxies: {}, + selectedProxyId: "", + isProxyEnabled: false, + loading: {}, + drepLoading: {}, + errors: {}, + drepErrors: {}, + + // Basic actions + setProxies: (walletId, proxies) => + set((state) => ({ + proxies: { ...state.proxies, [walletId]: proxies }, + loading: { ...state.loading, [walletId]: false }, + errors: { ...state.errors, [walletId]: null }, + })), + + setSelectedProxy: (proxyId) => + set({ selectedProxyId: proxyId }), + + setProxyEnabled: (enabled) => + set({ isProxyEnabled: enabled }), + + toggleProxy: () => + set((state) => ({ isProxyEnabled: !state.isProxyEnabled })), + + clearSelectedProxy: () => + set({ selectedProxyId: "" }), + + // Loading actions + setLoading: (walletId, loading) => + set((state) => ({ + loading: { ...state.loading, [walletId]: loading }, + })), + + setDrepLoading: (proxyId, loading) => + set((state) => ({ + drepLoading: { ...state.drepLoading, [proxyId]: loading }, + })), + + // Error actions + setError: (walletId, error) => + set((state) => ({ + errors: { ...state.errors, [walletId]: error }, + })), + + setDrepError: (proxyId, error) => + set((state) => ({ + drepErrors: { ...state.drepErrors, [proxyId]: error }, + })), + + // Fetch proxy balance + fetchProxyBalance: async (walletId, proxyId, proxyAddress, network) => { + try { + const state = get(); + const blockchainProvider = (await import("@/utils/get-provider")).getProvider(parseInt(network)); + + const balance = await blockchainProvider.fetchAddressUTxOs(proxyAddress); + const balanceData = balance.map(utxo => ({ + unit: utxo.output.amount[0]?.unit || "lovelace", + quantity: utxo.output.amount[0]?.quantity || "0", + })); + + // Update the specific proxy's balance + const currentState = get(); + const updatedProxies = currentState.proxies[walletId]?.map(proxy => + proxy.id === proxyId + ? { ...proxy, balance: balanceData, lastUpdated: Date.now() } + : proxy + ) || []; + + set((state) => ({ + proxies: { ...state.proxies, [walletId]: updatedProxies }, + })); + } catch (error) { + get().setError(walletId, `Failed to fetch balance for proxy ${proxyId}`); + } + }, + + // Fetch proxy DRep information + fetchProxyDrepInfo: async (walletId, proxyId, proxyAddress, authTokenId, scriptCbor, network, paramUtxo) => { + try { + get().setDrepLoading(proxyId, true); + get().setDrepError(proxyId, null); + + const txBuilder = getTxBuilder(parseInt(network)); + const proxyContract = new MeshProxyContract( + { + mesh: txBuilder, + wallet: undefined, + networkId: parseInt(network), + }, + { + paramUtxo: JSON.parse(paramUtxo || '{}'), + }, + scriptCbor, + ); + proxyContract.proxyAddress = proxyAddress; + + // Get DRep ID + const drepId = proxyContract.getDrepId(); + + // Get DRep status (now with caching and proper error handling) + const status = await proxyContract.getDrepStatus(); + const drepInfo: ProxyDrepInfo | undefined = status; + + // Update the specific proxy's DRep data + const currentState = get(); + const updatedProxies = currentState.proxies[walletId]?.map(proxy => + proxy.id === proxyId + ? { ...proxy, drepId, drepInfo, lastUpdated: Date.now() } + : proxy + ) || []; + + set((state) => ({ + proxies: { ...state.proxies, [walletId]: updatedProxies }, + drepLoading: { ...state.drepLoading, [proxyId]: false }, + drepErrors: { ...state.drepErrors, [proxyId]: null }, + })); + } catch (error) { + get().setDrepError(proxyId, `Failed to fetch DRep info for proxy ${proxyId}`); + get().setDrepLoading(proxyId, false); + } + }, + + // Update specific proxy data + updateProxyData: (walletId, proxyId, updates) => + set((state) => ({ + proxies: { + ...state.proxies, + [walletId]: state.proxies[walletId]?.map(proxy => + proxy.id === proxyId ? { ...proxy, ...updates } : proxy + ) || [], + }, + })), + + // Clear all proxy data for a wallet + clearProxyData: (walletId) => + set((state) => { + const newProxies = { ...state.proxies }; + delete newProxies[walletId]; + const newLoading = { ...state.loading }; + delete newLoading[walletId]; + const newErrors = { ...state.errors }; + delete newErrors[walletId]; + + return { + proxies: newProxies, + loading: newLoading, + errors: newErrors, + }; + }), + }), + { + name: "proxy-store", + storage: createJSONStorage(() => zustandStorage), + // Only persist essential state, not loading/error states + partialize: (state) => ({ + proxies: state.proxies, + selectedProxyId: state.selectedProxyId, + isProxyEnabled: state.isProxyEnabled, + }), + } + ) +); + +// Convenience hooks for easier access +export const useProxyData = (walletId?: string) => { + const proxies = useProxyStore((state) => walletId ? state.proxies[walletId] || [] : []); + const loading = useProxyStore((state) => walletId ? state.loading[walletId] || false : false); + const error = useProxyStore((state) => walletId ? state.errors[walletId] || null : null); + + return { proxies, loading, error }; +}; + +export const useSelectedProxy = () => { + const selectedProxyId = useProxyStore((state) => state.selectedProxyId); + const isProxyEnabled = useProxyStore((state) => state.isProxyEnabled); + const proxies = useProxyStore((state) => state.proxies); + + // Find the selected proxy across all wallets + let selectedProxy: ProxyData | undefined; + for (const walletProxies of Object.values(proxies)) { + selectedProxy = walletProxies.find(proxy => proxy.id === selectedProxyId); + if (selectedProxy) break; + } + + return { selectedProxy, selectedProxyId, isProxyEnabled }; +}; + +export const useProxyActions = () => { + const setProxies = useProxyStore((state) => state.setProxies); + const setSelectedProxy = useProxyStore((state) => state.setSelectedProxy); + const setProxyEnabled = useProxyStore((state) => state.setProxyEnabled); + const toggleProxy = useProxyStore((state) => state.toggleProxy); + const clearSelectedProxy = useProxyStore((state) => state.clearSelectedProxy); + const fetchProxyBalance = useProxyStore((state) => state.fetchProxyBalance); + const fetchProxyDrepInfo = useProxyStore((state) => state.fetchProxyDrepInfo); + const updateProxyData = useProxyStore((state) => state.updateProxyData); + const clearProxyData = useProxyStore((state) => state.clearProxyData); + + return { + setProxies, + setSelectedProxy, + setProxyEnabled, + toggleProxy, + clearSelectedProxy, + fetchProxyBalance, + fetchProxyDrepInfo, + updateProxyData, + clearProxyData, + }; +}; diff --git a/src/server/api/root.ts b/src/server/api/root.ts index 00a28053..3fba00d6 100644 --- a/src/server/api/root.ts +++ b/src/server/api/root.ts @@ -5,6 +5,7 @@ import { transactionRouter } from "./routers/transactions"; import { signableRouter } from "./routers/signable"; import { ballotRouter } from "./routers/ballot"; import { proxyRouter } from "./routers/proxy"; +import { migrationRouter } from "./routers/migrations"; /** @@ -19,6 +20,7 @@ export const appRouter = createTRPCRouter({ signable: signableRouter, ballot: ballotRouter, proxy: proxyRouter, + migration: migrationRouter, }); // export type definition of API diff --git a/src/server/api/routers/migrations.ts b/src/server/api/routers/migrations.ts new file mode 100644 index 00000000..11b42971 --- /dev/null +++ b/src/server/api/routers/migrations.ts @@ -0,0 +1,152 @@ +import { z } from "zod"; +import { createTRPCRouter, publicProcedure } from "@/server/api/trpc"; + +export const migrationRouter = createTRPCRouter({ + // Get pending migrations for a user + getPendingMigrations: publicProcedure + .input(z.object({ ownerAddress: z.string() })) + .query(async ({ ctx, input }) => { + return ctx.db.migration.findMany({ + where: { + ownerAddress: input.ownerAddress, + status: { + in: ["pending", "in_progress"] + } + }, + orderBy: { + createdAt: "desc" + } + }); + }), + + // Get a specific migration by ID + getMigration: publicProcedure + .input(z.object({ migrationId: z.string() })) + .query(async ({ ctx, input }) => { + return ctx.db.migration.findUnique({ + where: { + id: input.migrationId + } + }); + }), + + // Create a new migration + createMigration: publicProcedure + .input(z.object({ + originalWalletId: z.string(), + ownerAddress: z.string(), + migrationData: z.any().optional() + })) + .mutation(async ({ ctx, input }) => { + return ctx.db.migration.create({ + data: { + originalWalletId: input.originalWalletId, + ownerAddress: input.ownerAddress, + migrationData: input.migrationData, + currentStep: 0, + status: "pending" + } + }); + }), + + // Update migration step + updateMigrationStep: publicProcedure + .input(z.object({ + migrationId: z.string(), + currentStep: z.number(), + status: z.enum(["pending", "in_progress", "completed", "failed", "cancelled"]).optional(), + newWalletId: z.string().optional(), + errorMessage: z.string().optional() + })) + .mutation(async ({ ctx, input }) => { + const updateData: any = { + currentStep: input.currentStep, + updatedAt: new Date() + }; + + if (input.status) { + updateData.status = input.status; + if (input.status === "completed") { + updateData.completedAt = new Date(); + } + } + + if (input.newWalletId) { + updateData.newWalletId = input.newWalletId; + } + + if (input.errorMessage) { + updateData.errorMessage = input.errorMessage; + } + + return ctx.db.migration.update({ + where: { + id: input.migrationId + }, + data: updateData + }); + }), + + // Update migration data + updateMigrationData: publicProcedure + .input(z.object({ + migrationId: z.string(), + migrationData: z.any() + })) + .mutation(async ({ ctx, input }) => { + return ctx.db.migration.update({ + where: { + id: input.migrationId + }, + data: { + migrationData: input.migrationData, + updatedAt: new Date() + } + }); + }), + + // Cancel a migration + cancelMigration: publicProcedure + .input(z.object({ migrationId: z.string() })) + .mutation(async ({ ctx, input }) => { + // Delete the migration record completely + return ctx.db.migration.delete({ + where: { + id: input.migrationId + } + }); + }), + + // Complete a migration + completeMigration: publicProcedure + .input(z.object({ migrationId: z.string() })) + .mutation(async ({ ctx, input }) => { + return ctx.db.migration.update({ + where: { + id: input.migrationId + }, + data: { + status: "completed", + completedAt: new Date(), + updatedAt: new Date() + } + }); + }), + + // Get migration by original wallet ID + getMigrationByOriginalWallet: publicProcedure + .input(z.object({ originalWalletId: z.string() })) + .query(async ({ ctx, input }) => { + return ctx.db.migration.findFirst({ + where: { + originalWalletId: input.originalWalletId, + status: { + in: ["pending", "in_progress"] + } + }, + orderBy: { + createdAt: "desc" + } + }); + }) +}); diff --git a/src/server/api/routers/proxy.ts b/src/server/api/routers/proxy.ts index 6781c94e..48b25ade 100644 --- a/src/server/api/routers/proxy.ts +++ b/src/server/api/routers/proxy.ts @@ -85,43 +85,34 @@ export const proxyRouter = createTRPCRouter({ userAddress: z.string().optional(), })) .query(async ({ ctx, input }) => { - console.log("getProxiesByUserOrWallet called with:", input); - - const orConditions: any[] = []; - + // Prefer fetching by walletId when available if (input.walletId) { - orConditions.push({ walletId: input.walletId }); + return ctx.db.proxy.findMany({ + where: { + walletId: input.walletId, + isActive: true, + }, + orderBy: { createdAt: "desc" }, + }); } + // Fallback: fetch by user address if provided if (input.userAddress) { const user = await ctx.db.user.findUnique({ + where: { address: input.userAddress }, + }); + if (!user) return []; + return ctx.db.proxy.findMany({ where: { - address: input.userAddress, + userId: user.id, + isActive: true, }, + orderBy: { createdAt: "desc" }, }); - - if (user) { - orConditions.push({ userId: user.id }); - } } - if (orConditions.length === 0) { - console.log("No conditions found, returning empty array"); - return []; - } - - const result = await ctx.db.proxy.findMany({ - where: { - isActive: true, - OR: orConditions, - }, - orderBy: { - createdAt: "desc", - }, - }); - - console.log("Found proxies:", result.length, result); - return result; + // No criteria provided + return []; }), getProxyById: publicProcedure @@ -180,4 +171,38 @@ export const proxyRouter = createTRPCRouter({ }, }); }), + + transferProxies: publicProcedure + .input(z.object({ + fromWalletId: z.string(), + toWalletId: z.string(), + })) + .mutation(async ({ ctx, input }) => { + // Find all active proxies for the source wallet + const proxies = await ctx.db.proxy.findMany({ + where: { + walletId: input.fromWalletId, + isActive: true, + }, + }); + + if (proxies.length === 0) { + return { transferred: 0, message: "No proxies found to transfer" }; + } + + // Update all proxies to point to the new wallet + const updatePromises = proxies.map(proxy => + ctx.db.proxy.update({ + where: { id: proxy.id }, + data: { walletId: input.toWalletId }, + }) + ); + + await Promise.all(updatePromises); + + return { + transferred: proxies.length, + message: `Successfully transferred ${proxies.length} proxy${proxies.length !== 1 ? 'ies' : ''}` + }; + }), }); \ No newline at end of file diff --git a/src/server/api/routers/wallets.ts b/src/server/api/routers/wallets.ts index 65ee200b..9d5529c2 100644 --- a/src/server/api/routers/wallets.ts +++ b/src/server/api/routers/wallets.ts @@ -328,23 +328,63 @@ export const walletRouter = createTRPCRouter({ abortMigration: publicProcedure .input(z.object({ walletId: z.string(), - newWalletId: z.string() + newWalletId: z.string().optional() })) .mutation(async ({ ctx, input }) => { - // Delete the new wallet that was created for migration - await ctx.db.newWallet.delete({ + // Try to delete the new wallet if it exists (it might be a NewWallet or Wallet) + if (input.newWalletId) { + try { + // First check if it exists in NewWallet table + const newWallet = await ctx.db.newWallet.findUnique({ + where: { id: input.newWalletId } + }); + + if (newWallet) { + await ctx.db.newWallet.delete({ + where: { id: input.newWalletId } + }); + console.log("Deleted NewWallet:", input.newWalletId); + } else { + // Check if it exists in Wallet table + const wallet = await ctx.db.wallet.findUnique({ + where: { id: input.newWalletId } + }); + + if (wallet) { + await ctx.db.wallet.delete({ + where: { id: input.newWalletId } + }); + console.log("Deleted Wallet:", input.newWalletId); + } else { + console.log("No wallet found with ID:", input.newWalletId, "- migration might be in a different state"); + } + } + } catch (error) { + console.error("Error deleting wallet during migration abort:", error); + // Continue with clearing migration target even if deletion fails + } + } + + // Clear the migration target reference from the original wallet + return ctx.db.wallet.update({ where: { - id: input.newWalletId, + id: input.walletId, + }, + data: { + migrationTargetWalletId: null, }, }); + }), - // Clear the migration target reference from the original wallet + archiveWallet: publicProcedure + .input(z.object({ walletId: z.string() })) + .mutation(async ({ ctx, input }) => { return ctx.db.wallet.update({ where: { id: input.walletId, }, data: { - migrationTargetWalletId: null, + isArchived: true, }, }); }), From d85e8fa5b26a4782f1e70514d5b7bf7bc386a85a Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Tue, 28 Oct 2025 11:42:28 +0100 Subject: [PATCH 14/15] build fix --- .../overall-layout/proxy-data-loader.tsx | 60 +++++---- .../multisig/proxy/ProxyControl.tsx | 90 +++++++------- src/components/multisig/proxy/common.ts | 16 +-- src/components/multisig/proxy/offchain.ts | 117 ++++++++---------- .../pages/wallet/info/migrate-wallet.tsx | 2 +- .../wallet/info/migration/ProxySetupStep.tsx | 44 ++----- .../info/migration/ProxyTransferStep.tsx | 12 +- .../migration/useMigrationWalletFlowState.tsx | 85 +++++++------ 8 files changed, 194 insertions(+), 232 deletions(-) diff --git a/src/components/common/overall-layout/proxy-data-loader.tsx b/src/components/common/overall-layout/proxy-data-loader.tsx index 13f65240..7eed74b7 100644 --- a/src/components/common/overall-layout/proxy-data-loader.tsx +++ b/src/components/common/overall-layout/proxy-data-loader.tsx @@ -1,13 +1,13 @@ import { useEffect } from "react"; import useAppWallet from "@/hooks/useAppWallet"; -import { useProxyStore, useProxyData, useProxyActions } from "@/lib/zustand/proxy"; +import { useProxyData, useProxyActions } from "@/lib/zustand/proxy"; import { useSiteStore } from "@/lib/zustand/site"; import { api } from "@/utils/api"; export default function ProxyDataLoader() { const { appWallet } = useAppWallet(); const network = useSiteStore((state) => state.network); - const { proxies, loading, error } = useProxyData(appWallet?.id); + const { proxies } = useProxyData(appWallet?.id); const { setProxies, fetchProxyBalance, @@ -18,9 +18,9 @@ export default function ProxyDataLoader() { // Get proxies from API - const { data: apiProxies, refetch: refetchProxies, isLoading: apiLoading } = api.proxy.getProxiesByUserOrWallet.useQuery( + const { data: apiProxies, refetch: refetchProxies } = api.proxy.getProxiesByUserOrWallet.useQuery( { - walletId: appWallet?.id || undefined, + walletId: appWallet?.id ?? undefined, }, { enabled: !!appWallet?.id, @@ -49,36 +49,29 @@ export default function ProxyDataLoader() { // Fetch additional data for each proxy useEffect(() => { - if (proxies.length > 0 && appWallet?.id && appWallet?.scriptCbor) { - proxies.forEach(async (proxy) => { - // Only fetch if we don't have recent data (older than 5 minutes) - const isStale = !proxy.lastUpdated || (Date.now() - proxy.lastUpdated) > 5 * 60 * 1000; - - - if (isStale) { - try { - - // Fetch balance - await fetchProxyBalance(appWallet.id, proxy.id, proxy.proxyAddress, network.toString()); - - // Fetch DRep info - await fetchProxyDrepInfo( - appWallet.id, - proxy.id, - proxy.proxyAddress, - proxy.authTokenId, - appWallet.scriptCbor, - network.toString(), - proxy.paramUtxo - ); - - - } catch (error) { - console.error(`Error fetching data for proxy ${proxy.id}:`, error); + void (async () => { + for (const proxy of proxies) { + // Only fetch if we don't have recent data (older than 5 minutes) + const isStale = !proxy.lastUpdated || (Date.now() - proxy.lastUpdated) > 5 * 60 * 1000; + if (isStale) { + try { + await fetchProxyBalance(appWallet.id, proxy.id, proxy.proxyAddress, network.toString()); + await fetchProxyDrepInfo( + appWallet.id, + proxy.id, + proxy.proxyAddress, + proxy.authTokenId, + appWallet.scriptCbor, + network.toString(), + proxy.paramUtxo, + ); + } catch (error) { + console.error(`Error fetching data for proxy ${proxy.id}:`, error); + } } } - }); + })(); } }, [proxies, appWallet?.id, appWallet?.scriptCbor, network, fetchProxyBalance, fetchProxyDrepInfo]); @@ -95,7 +88,10 @@ export default function ProxyDataLoader() { useEffect(() => { // Store refetch function in window for global access if needed if (typeof window !== 'undefined') { - (window as any).refetchProxyData = refetchProxies; + const w = window as Window & { refetchProxyData?: () => void }; + w.refetchProxyData = () => { + void refetchProxies(); + }; } }, [refetchProxies]); diff --git a/src/components/multisig/proxy/ProxyControl.tsx b/src/components/multisig/proxy/ProxyControl.tsx index 84e779c3..c45a8b98 100644 --- a/src/components/multisig/proxy/ProxyControl.tsx +++ b/src/components/multisig/proxy/ProxyControl.tsx @@ -1,4 +1,4 @@ -import React, { useState, useEffect, useCallback } from "react"; +import React, { useState, useEffect, useCallback, useMemo } from "react"; import { useWallet } from "@meshsdk/react"; import { MeshProxyContract } from "./offchain"; import { useUserStore } from "@/lib/zustand/user"; @@ -13,7 +13,7 @@ import ProxySetup from "./ProxySetup"; import ProxySpend from "./ProxySpend"; import UTxOSelector from "@/components/pages/wallet/new-transaction/utxoSelector"; import { getProvider } from "@/utils/get-provider"; -import { MeshTxBuilder, UTxO } from "@meshsdk/core"; +import type { MeshTxBuilder, UTxO } from "@meshsdk/core"; import { useProxy } from "@/hooks/useProxy"; import { useProxyData } from "@/lib/zustand/proxy"; @@ -22,7 +22,7 @@ import { Dialog, DialogContent, DialogHeader, DialogTitle } from "@/components/u import { Card, CardContent, CardHeader } from "@/components/ui/card"; import { Button } from "@/components/ui/button"; import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip"; -import { AlertCircle, ChevronDown, ChevronUp, Wallet, TrendingUp, Info, UserCheck, UserX } from "lucide-react"; +import { AlertCircle, ChevronDown, ChevronUp, Wallet, TrendingUp, Info } from "lucide-react"; interface ProxyOutput { address: string; @@ -43,25 +43,23 @@ export default function ProxyControl() { const setLoading = useSiteStore((state) => state.setLoading); const network = useSiteStore((state) => state.network); const { appWallet } = useAppWallet(); - const ctx = api.useUtils(); const { newTransaction } = useTransaction(); - const { isProxyEnabled, selectedProxyId, setSelectedProxy, clearSelectedProxy } = useProxy(); + const { selectedProxyId, setSelectedProxy, clearSelectedProxy } = useProxy(); // Get proxies from proxy store (includes balance and DRep info) - const { proxies: storeProxies, loading: storeLoading, error: storeError } = useProxyData(appWallet?.id); + const { proxies: storeProxies, loading: storeLoading } = useProxyData(appWallet?.id); // Get proxies from API (for mutations) - const { data: apiProxies, refetch: refetchProxies, isLoading: apiLoading, error: apiError } = api.proxy.getProxiesByUserOrWallet.useQuery( + const { data: apiProxies, refetch: refetchProxies, isLoading: apiLoading } = api.proxy.getProxiesByUserOrWallet.useQuery( { - walletId: appWallet?.id || undefined, + walletId: appWallet?.id ?? undefined, }, { enabled: !!appWallet?.id } ); // Use store proxies if available, otherwise fall back to API proxies - const proxies = storeProxies.length > 0 ? storeProxies : (apiProxies || []); + const proxies = useMemo(() => storeProxies.length > 0 ? storeProxies : (apiProxies ?? []), [storeProxies, apiProxies]); const proxiesLoading = storeLoading || apiLoading; - const proxiesError = storeError || apiError; const { mutateAsync: createProxy } = api.proxy.createProxy.useMutation({ onSuccess: () => { @@ -78,7 +76,7 @@ export default function ProxyControl() { // State management const [proxyContract, setProxyContract] = useState(null); const [isProxySetup, setIsProxySetup] = useState(false); - const [localLoading, setLocalLoading] = useState(false); + const [, setLocalLoading] = useState(false); const [tvlLoading, setTvlLoading] = useState(false); // Setup flow state @@ -106,8 +104,8 @@ export default function ProxyControl() { ]); // UTxO selection state (UI only). We will still pass all UTxOs from provider to contract. - const [selectedUtxos, setSelectedUtxos] = useState([]); - const [manualSelected, setManualSelected] = useState(false); + const [, setSelectedUtxos] = useState([]); + const [, setManualSelected] = useState(false); // Helper to resolve inputs for multisig controlled txs const getMsInputs = useCallback(async (): Promise<{ utxos: UTxO[]; walletAddress: string }> => { @@ -137,7 +135,7 @@ export default function ProxyControl() { networkId: network, }, {}, - appWallet?.scriptCbor || undefined, + appWallet?.scriptCbor ?? undefined, ); setProxyContract(contract); } catch (error) { @@ -149,7 +147,7 @@ export default function ProxyControl() { }); } } - }, [connected, wallet, userAddress, network]); + }, [connected, wallet, userAddress, network, appWallet?.scriptCbor]); // Check if proxy is already set up const checkProxySetup = useCallback(async () => { @@ -158,7 +156,7 @@ export default function ProxyControl() { try { const balance = await proxyContract.getProxyBalance(); setIsProxySetup(balance.length > 0); - } catch (error) { + } catch { // Proxy not set up yet setIsProxySetup(false); } @@ -166,7 +164,7 @@ export default function ProxyControl() { // Load initial state useEffect(() => { - checkProxySetup(); + void checkProxySetup(); }, [checkProxySetup]); // Step 1: Initialize proxy setup @@ -200,7 +198,7 @@ export default function ProxyControl() { authTokenId: result.authTokenId, proxyAddress: result.proxyAddress, txHex: result.tx, - description: description || undefined, + description: description ?? undefined, }); setSetupStep(1); @@ -221,7 +219,7 @@ export default function ProxyControl() { setSetupLoading(false); setLocalLoading(false); } - }, [proxyContract, connected, setLoading]); + }, [proxyContract, connected, getMsInputs, newTransaction]); // Step 2: Review and confirm setup const handleConfirmSetup = useCallback(async () => { @@ -260,12 +258,12 @@ export default function ProxyControl() { } await createProxy({ - walletId: appWallet?.id || undefined, + walletId: appWallet?.id ?? undefined, userId: undefined, proxyAddress: setupData.proxyAddress, authTokenId: setupData.authTokenId, paramUtxo: JSON.stringify(setupData.paramUtxo), - description: setupData.description || undefined, + description: setupData.description ?? undefined, }); // Update local state @@ -312,7 +310,7 @@ export default function ProxyControl() { setSetupLoading(false); setLocalLoading(false); } - }, [setupData, wallet, appWallet, createProxy, refetchProxies, setLoading]); + }, [setupData, wallet, appWallet, createProxy, refetchProxies, getMsInputs, newTransaction]); // Reset setup flow const handleResetSetup = useCallback(() => { @@ -372,8 +370,8 @@ export default function ProxyControl() { } }, [proxyContract, network, wallet]); - // Get DRep information for a specific proxy - const getProxyDrepInfo = useCallback(async (proxy: any) => { + // Get DRep information for a specific proxy (unused but kept for potential future use) + const getProxyDrepInfo = useCallback(async (proxy: { paramUtxo: string; proxyAddress: string }) => { if (!proxy) return { drepId: "", status: null }; try { @@ -385,15 +383,15 @@ export default function ProxyControl() { networkId: network, }, { - paramUtxo: JSON.parse(proxy.paramUtxo), + paramUtxo: JSON.parse(proxy.paramUtxo) as { txHash: string; outputIndex: number }, }, - appWallet?.scriptCbor || undefined, + appWallet?.scriptCbor ?? undefined, ); tempContract.proxyAddress = proxy.proxyAddress; // Get DRep ID and status - const drepId = await tempContract.getDrepId(); - const status = await tempContract.getDrepStatus(); + const drepId = tempContract.getDrepId(); + const status = tempContract.getDrepStatus(); return { drepId, status }; } catch (error) { @@ -438,12 +436,12 @@ export default function ProxyControl() { let totalADA = 0; let totalAssets = 0; - let totalProxies = proxies.length; + const totalProxies = proxies.length; // Calculate TVL from store data proxies.forEach(proxy => { if ('balance' in proxy && proxy.balance && proxy.balance.length > 0) { - proxy.balance.forEach((asset: any) => { + proxy.balance.forEach((asset: { unit: string; quantity: string }) => { if (asset.unit === 'lovelace') { totalADA += parseInt(asset.quantity) / 1000000; // Convert lovelace to ADA } else { @@ -461,7 +459,7 @@ export default function ProxyControl() { // Fetch all proxy balances when proxies change useEffect(() => { if (proxies && proxies.length > 0 && proxyContract) { - fetchAllProxyBalances(); + void fetchAllProxyBalances(); } }, [proxies, proxyContract, fetchAllProxyBalances]); @@ -470,7 +468,7 @@ export default function ProxyControl() { if (proxies && proxies.length > 0 && proxyContract && connected) { // Small delay to ensure everything is initialized const timer = setTimeout(() => { - fetchAllProxyBalances(); + void fetchAllProxyBalances(); }, 1000); return () => clearTimeout(timer); } @@ -483,10 +481,10 @@ export default function ProxyControl() { } }, [proxies, proxyContract, fetchAllProxyBalances]); - // Global refresh function for all proxy balances + // Global refresh function for all proxy balances (unused but kept for potential future use) const refreshAllBalances = useCallback(async () => { if (proxies && proxies.length > 0 && proxyContract) { - await fetchAllProxyBalances(); + void fetchAllProxyBalances(); } }, [proxies, proxyContract, fetchAllProxyBalances]); @@ -512,7 +510,7 @@ export default function ProxyControl() { description: "Proxy mode enabled for governance operations.", }); } - }, [selectedProxyId, setSelectedProxy, clearSelectedProxy, toast]); + }, [selectedProxyId, setSelectedProxy, clearSelectedProxy]); // Spend from proxy @@ -554,7 +552,7 @@ export default function ProxyControl() { setLocalLoading(true); // Get the selected proxy - const proxy = proxies?.find((p: any) => p.id === selectedProxyId); + const proxy = proxies?.find((p: { id: string }) => p.id === selectedProxyId); if (!proxy) { throw new Error("Selected proxy not found"); } @@ -567,9 +565,9 @@ export default function ProxyControl() { networkId: network, }, { - paramUtxo: JSON.parse(proxy.paramUtxo), + paramUtxo: JSON.parse(proxy.paramUtxo) as { txHash: string; outputIndex: number }, }, - appWallet?.scriptCbor || undefined, + appWallet?.scriptCbor ?? undefined, ); selectedProxyContract.proxyAddress = proxy.proxyAddress; @@ -577,8 +575,8 @@ export default function ProxyControl() { const { utxos, walletAddress } = await getMsInputs(); const txHex = await selectedProxyContract.spendProxySimple(validOutputs, utxos, walletAddress); if (appWallet?.scriptCbor) { - await newTransaction({ - txBuilder: txHex, + await newTransaction({ + txBuilder: txHex, description: "Proxy spend transaction", toastMessage: "Proxy spend transaction created", }); @@ -587,7 +585,7 @@ export default function ProxyControl() { } // Refresh balance after successful spend - await handleProxySelection(selectedProxyId); + handleProxySelection(selectedProxyId); // Close the spend modal after successful transaction setTimeout(() => { @@ -618,18 +616,18 @@ export default function ProxyControl() { setSpendLoading(false); setLocalLoading(false); } - }, [proxyContract, connected, spendOutputs, selectedProxyId, proxies, network, wallet, setLoading, handleProxySelection]); + }, [proxyContract, connected, spendOutputs, selectedProxyId, proxies, network, wallet, handleProxySelection, getMsInputs, newTransaction, appWallet?.scriptCbor]); // Copy to clipboard - const copyToClipboard = (text: string) => { + const copyToClipboard = useCallback((text: string) => { navigator.clipboard.writeText(text); toast({ title: "Copied", description: "Address copied to clipboard", variant: "default", }); - }; + }, []); if (!connected) { @@ -734,7 +732,7 @@ export default function ProxyControl() { className="h-6 w-6 p-0" onClick={(e) => { e.stopPropagation(); - refreshTVL(); + void refreshTVL(); }} disabled={tvlLoading} > @@ -744,7 +742,7 @@ export default function ProxyControl() {
    Total ADA: {totalADA.toFixed(6)} ADA
    Total Assets: {totalAssets}
    -
    Active Proxies: {proxies?.length || 0}
    +
    Active Proxies: {proxies?.length ?? 0}
    {tvlLoading && (
    Updating balances...
    )} diff --git a/src/components/multisig/proxy/common.ts b/src/components/multisig/proxy/common.ts index e4ce1fc0..f620dafd 100644 --- a/src/components/multisig/proxy/common.ts +++ b/src/components/multisig/proxy/common.ts @@ -1,11 +1,11 @@ import { - BrowserWallet, + serializePlutusScript, +} from "@meshsdk/core"; +import type { IFetcher, IWallet, LanguageVersion, MeshTxBuilder, - MeshWallet, - serializePlutusScript, UTxO, } from "@meshsdk/core"; @@ -122,7 +122,7 @@ export class MeshTxInitiator { } return utxos.filter((u) => { const lovelaceAmount = u.output.amount.find( - (a: any) => a.unit === "lovelace", + (a: { unit: string; quantity: string }) => a.unit === "lovelace", )?.quantity; return Number(lovelaceAmount) > lovelace; }); @@ -138,7 +138,7 @@ export class MeshTxInitiator { } return utxos.filter((u) => { const assetAmount = u.output.amount.find( - (a: any) => a.unit === assetHex, + (a: { unit: string; quantity: string }) => a.unit === assetHex, )?.quantity; return Number(assetAmount) >= 1; }); @@ -155,7 +155,7 @@ export class MeshTxInitiator { } return utxos.filter((u) => { const lovelaceAmount = u.output.amount.find( - (a: any) => a.unit === "lovelace", + (a: { unit: string; quantity: string }) => a.unit === "lovelace", )?.quantity; return Number(lovelaceAmount) > lovelace; }); @@ -172,7 +172,7 @@ export class MeshTxInitiator { } return utxos.filter((u) => { const assetAmount = u.output.amount.find( - (a: any) => a.unit === assetHex, + (a: { unit: string; quantity: string }) => a.unit === assetHex, )?.quantity; return Number(assetAmount) >= 1; }); @@ -209,7 +209,7 @@ export class MeshTxInitiator { this.networkId, ).address; scriptUtxo = - utxos.filter((utxo) => utxo.output.address === scriptAddr)[0] || + utxos.find((utxo) => utxo.output.address === scriptAddr) ?? utxos[0]; } diff --git a/src/components/multisig/proxy/offchain.ts b/src/components/multisig/proxy/offchain.ts index f8257c3b..2c361e38 100644 --- a/src/components/multisig/proxy/offchain.ts +++ b/src/components/multisig/proxy/offchain.ts @@ -1,27 +1,15 @@ +import { mConStr0, mOutputReference } from "@meshsdk/common"; import { - AssetMetadata, - conStr0, - Data, - integer, - mConStr0, - mOutputReference, - mPubKeyAddress, - stringToHex, -} from "@meshsdk/common"; -import { - deserializeAddress, resolveScriptHash, - serializeAddressObj, serializePlutusScript, - UTxO, - applyCborEncoding, applyParamsToScript, resolveScriptHashDRepId, - MeshTxBuilder, } from "@meshsdk/core"; -import { parseDatumCbor } from "@meshsdk/core-cst"; +import type { UTxO, MeshTxBuilder } from "@meshsdk/core"; +// import { parseDatumCbor } from "@meshsdk/core-cst"; -import { MeshTxInitiator, MeshTxInitiatorInput } from "./common"; +import { MeshTxInitiator } from "./common"; +import type { MeshTxInitiatorInput } from "./common"; import blueprint from "./aiken-workspace/plutus.json"; /** @@ -34,7 +22,7 @@ import blueprint from "./aiken-workspace/plutus.json"; * With each new NFT minted, the token index within the oracle is incremented by one, ensuring a consistent and orderly progression in the numbering of the NFTs. */ // Cache for DRep status to avoid multiple API calls -const drepStatusCache = new Map(); +const drepStatusCache = new Map(); const CACHE_DURATION = 5 * 60 * 1000; // 5 minutes export class MeshProxyContract extends MeshTxInitiator { @@ -98,7 +86,7 @@ export class MeshProxyContract extends MeshTxInitiator { ) { super(inputs); this.stakeCredential = inputs.stakeCredential; - this.networkId = inputs.networkId ? inputs.networkId : 0; + this.networkId = inputs.networkId ?? 0; this.msCbor = msCbor; // Set the proxyAddress if paramUtxo is provided @@ -126,7 +114,9 @@ export class MeshProxyContract extends MeshTxInitiator { ); } - let { utxos, collateral, walletAddress } = await this.getWalletInfoForTx(); + const walletInfo = await this.getWalletInfoForTx(); + let { utxos, walletAddress } = walletInfo; + const { collateral } = walletInfo; if (this.msCbor && msUtxos && msWalletAddress) { utxos = msUtxos; @@ -137,14 +127,11 @@ export class MeshProxyContract extends MeshTxInitiator { if (!utxos || utxos.length <= 0) { throw new Error("No UTxOs found"); } - const paramUtxo = utxos?.filter((utxo) => - utxo.output.amount - .map( - (asset) => - asset.unit === "lovelace" && Number(asset.quantity) >= 20000000, - ) - .reduce((pa, ca, i, a) => pa || ca), - )[0]; + const paramUtxo = utxos?.find((utxo) => + utxo.output.amount.some( + (asset) => asset.unit === "lovelace" && Number(asset.quantity) >= 20000000, + ), + ); if (!paramUtxo) { throw new Error( "Insufficicient balance. Create one utxo holding at Least 20 ADA.", @@ -163,7 +150,7 @@ export class MeshProxyContract extends MeshTxInitiator { const tokenName = ""; // Try completing the transaction step by step - let tx = await this.mesh.txIn( + const tx = this.mesh.txIn( paramUtxo.input.txHash, paramUtxo.input.outputIndex, paramUtxo.output.amount, @@ -211,7 +198,9 @@ export class MeshProxyContract extends MeshTxInitiator { "No UTxOs and wallet address for multisig script cbor found", ); } - let { utxos, collateral, walletAddress } = await this.getWalletInfoForTx(); + const walletInfo = await this.getWalletInfoForTx(); + let { utxos, walletAddress } = walletInfo; + const { collateral } = walletInfo; // If multisig inputs are provided, use them instead of the wallet inputs if (this.msCbor && msUtxos && msWalletAddress) { utxos = msUtxos; @@ -342,7 +331,7 @@ export class MeshProxyContract extends MeshTxInitiator { //prepare Proxy spend //1 Get - let txHex = await this.mesh; + const txHex = this.mesh; for (const input of freeProxyUtxos) { txHex @@ -412,7 +401,9 @@ export class MeshProxyContract extends MeshTxInitiator { "No UTxOs and wallet address for multisig script cbor found", ); } - let { utxos, collateral, walletAddress } = await this.getWalletInfoForTx(); + const walletInfo2 = await this.getWalletInfoForTx(); + let { utxos, walletAddress } = walletInfo2; + const { collateral } = walletInfo2; // If multisig inputs are provided, use them instead of the wallet inputs if (this.msCbor && msUtxos && msWalletAddress) { utxos = msUtxos; @@ -459,7 +450,7 @@ export class MeshProxyContract extends MeshTxInitiator { const proxyScriptHash = resolveScriptHash(proxyCbor, "V3"); const drepId = resolveScriptHashDRepId(proxyScriptHash); - const txHex = await this.mesh; + const txHex = this.mesh; txHex.txIn( authTokenUtxo.input.txHash, authTokenUtxo.input.outputIndex, @@ -496,8 +487,8 @@ export class MeshProxyContract extends MeshTxInitiator { txHex.txInScript(this.msCbor); } totalAmount += BigInt( - utxo.output.amount.find((asset: any) => asset.unit === "lovelace") - ?.quantity || "0", + (utxo.output.amount.find((asset: { unit: string; quantity: string }) => asset.unit === "lovelace") + ?.quantity) ?? "0", ); } @@ -620,7 +611,7 @@ export class MeshProxyContract extends MeshTxInitiator { for (const utxo of utxos) { for (const asset of utxo.output.amount) { - const currentAmount = balanceMap.get(asset.unit) || BigInt(0); + const currentAmount = balanceMap.get(asset.unit) ?? BigInt(0); balanceMap.set(asset.unit, currentAmount + BigInt(asset.quantity)); } } @@ -634,8 +625,9 @@ export class MeshProxyContract extends MeshTxInitiator { ); return balance; - } catch (error: any) { - throw new Error(`Failed to fetch proxy balance: ${error?.message || 'Unknown error'}`); + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : 'Unknown error'; + throw new Error(`Failed to fetch proxy balance: ${errorMessage}`); } }; @@ -670,9 +662,9 @@ export class MeshProxyContract extends MeshTxInitiator { }); return drepStatus; - } catch (error: any) { + } catch (error: unknown) { // Parse the error if it's a stringified JSON - let parsedError = error; + let parsedError: unknown = error; if (typeof error === 'string') { try { parsedError = JSON.parse(error); @@ -682,17 +674,19 @@ export class MeshProxyContract extends MeshTxInitiator { } // Handle specific error cases - check multiple possible 404 indicators - const is404 = error?.status === 404 || - error?.response?.status === 404 || - error?.data?.status_code === 404 || - parsedError?.status === 404 || - parsedError?.data?.status_code === 404 || - error?.message?.includes('404') || - error?.message?.includes('Not Found') || - error?.message?.includes('not found') || - error?.message?.includes('NOT_FOUND') || - (error?.response?.data && error.response.data.status_code === 404) || - (error?.data && error.data.status_code === 404); + const errorObj = error as Record; + const parsedObj = parsedError as Record; + const is404 = errorObj?.status === 404 || + (errorObj?.response as Record)?.status === 404 || + (errorObj?.data as Record)?.status_code === 404 || + parsedObj?.status === 404 || + (parsedObj?.data as Record)?.status_code === 404 || + (errorObj?.message as string)?.includes('404') || + (errorObj?.message as string)?.includes('Not Found') || + (errorObj?.message as string)?.includes('not found') || + (errorObj?.message as string)?.includes('NOT_FOUND') || + ((errorObj?.response as Record)?.data as Record)?.status_code === 404 || + ((errorObj?.data as Record)?.status_code === 404); if (is404) { // DRep not registered yet - cache null result @@ -704,7 +698,8 @@ export class MeshProxyContract extends MeshTxInitiator { } // For other errors, don't cache and re-throw - console.log(`Failed to fetch DRep status: ${error?.message || 'Unknown error'}`); + const errorMessage = error instanceof Error ? error.message : 'Unknown error'; + console.log(`Failed to fetch DRep status: ${errorMessage}`); } }; @@ -719,7 +714,7 @@ export class MeshProxyContract extends MeshTxInitiator { votes: Array<{ proposalId: string; voteKind: "Yes" | "No" | "Abstain"; - metadata?: any; + metadata?: unknown; }>, msUtxos?: UTxO[], msWalletAddress?: string, @@ -732,8 +727,8 @@ export class MeshProxyContract extends MeshTxInitiator { const walletInfo = await this.getWalletInfoForTx(); // Use multisig inputs if provided, otherwise use regular wallet - const utxos = msUtxos || walletInfo.utxos; - const walletAddress = msWalletAddress || walletInfo.walletAddress; + const utxos = msUtxos ?? walletInfo.utxos; + const walletAddress = msWalletAddress ?? walletInfo.walletAddress; // Always get collateral from user's regular wallet let collateral: UTxO; @@ -741,7 +736,7 @@ export class MeshProxyContract extends MeshTxInitiator { const collateralInfo = await this.getWalletInfoForTx(); const foundCollateral = collateralInfo.utxos.find((utxo: UTxO) => utxo.output.amount.some( - (amount: any) => + (amount: { unit: string; quantity: string }) => amount.unit === "lovelace" && BigInt(amount.quantity) >= BigInt(5000000), ), @@ -752,7 +747,7 @@ export class MeshProxyContract extends MeshTxInitiator { ); } collateral = foundCollateral; - } catch (error) { + } catch { throw new Error( "Failed to get collateral from regular wallet. Please ensure you have at least 5 ADA in your regular wallet for transaction collateral.", ); @@ -796,7 +791,7 @@ export class MeshProxyContract extends MeshTxInitiator { const proxyScriptHash = resolveScriptHash(proxyCbor, "V3"); const drepId = resolveScriptHashDRepId(proxyScriptHash); - const txHex = await this.mesh; + const txHex = this.mesh; // 1. Add AuthToken UTxO first (following manageProxyDrep pattern) txHex.txIn( @@ -835,16 +830,14 @@ export class MeshProxyContract extends MeshTxInitiator { txHex.txInScript(this.msCbor); } totalAmount += BigInt( - utxo.output.amount.find((asset: any) => asset.unit === "lovelace") - ?.quantity || "0", + (utxo.output.amount.find((asset: { unit: string; quantity: string }) => asset.unit === "lovelace") + ?.quantity) ?? "0", ); } // 4. Add output (return AuthToken) txHex.txOut(walletAddress, [{ unit: policyIdAT, quantity: "1" }]); - console.log("votes", votes); - console.log("txHex", txHex); // 5. Add votes for each proposal for (const vote of votes) { diff --git a/src/components/pages/wallet/info/migrate-wallet.tsx b/src/components/pages/wallet/info/migrate-wallet.tsx index 5c780488..675723bc 100644 --- a/src/components/pages/wallet/info/migrate-wallet.tsx +++ b/src/components/pages/wallet/info/migrate-wallet.tsx @@ -259,7 +259,7 @@ export function MigrateWallet({ appWallet }: { appWallet: Wallet }) { if (existingMigration && migrationState.step === null && !migrationState.hasAborted) { updateMigrationState({ migrationId: existingMigration.id, - step: existingMigration.currentStep, + step: existingMigration.currentStep as MigrationStep, newWalletId: existingMigration.newWalletId || null, }); } diff --git a/src/components/pages/wallet/info/migration/ProxySetupStep.tsx b/src/components/pages/wallet/info/migration/ProxySetupStep.tsx index efcb83ac..06bcf0cf 100644 --- a/src/components/pages/wallet/info/migration/ProxySetupStep.tsx +++ b/src/components/pages/wallet/info/migration/ProxySetupStep.tsx @@ -7,7 +7,7 @@ import { Wallet } from "@/types/wallet"; import { api } from "@/utils/api"; import { useUserStore } from "@/lib/zustand/user"; import { toast } from "@/hooks/use-toast"; -import ProxySetup from "@/components/multisig/proxy/ProxySetup"; +import ProxyControl from "@/components/multisig/proxy/ProxyControl"; interface ProxySetupStepProps { appWallet: Wallet; @@ -26,7 +26,6 @@ export default function ProxySetupStep({ const [isCheckingProxies, setIsCheckingProxies] = useState(true); const [hasExistingProxy, setHasExistingProxy] = useState(false); const [showProxySetup, setShowProxySetup] = useState(false); - const [isCreatingProxy, setIsCreatingProxy] = useState(false); // Check for existing proxies const { data: existingProxies, isLoading: isLoadingProxies } = api.proxy.getProxiesByWallet.useQuery( @@ -67,23 +66,6 @@ export default function ProxySetupStep({ setShowProxySetup(true); }; - const handleProxyCreated = () => { - setIsCreatingProxy(false); - toast({ - title: "Success", - description: "Proxy created successfully!", - }); - onContinue(); - }; - - const handleProxyError = (error: string) => { - setIsCreatingProxy(false); - toast({ - title: "Error", - description: `Failed to create proxy: ${error}`, - variant: "destructive", - }); - }; if (isCheckingProxies) { return ( @@ -108,12 +90,7 @@ export default function ProxySetupStep({ description="Set up a proxy for your new wallet" cardClassName="col-span-2" > - setShowProxySetup(false)} - /> +
    ); @@ -188,20 +165,13 @@ export default function ProxySetupStep({
    diff --git a/src/components/pages/wallet/info/migration/ProxyTransferStep.tsx b/src/components/pages/wallet/info/migration/ProxyTransferStep.tsx index 1b98ab3e..6b04df8d 100644 --- a/src/components/pages/wallet/info/migration/ProxyTransferStep.tsx +++ b/src/components/pages/wallet/info/migration/ProxyTransferStep.tsx @@ -1,9 +1,9 @@ -import React, { useState, useEffect } from "react"; +import React, { useState } from "react"; import { Button } from "@/components/ui/button"; import CardUI from "@/components/ui/card-content"; import { Alert, AlertDescription } from "@/components/ui/alert"; import { ArrowLeft, ArrowRight, Loader, AlertCircle, CheckCircle, Users } from "lucide-react"; -import { Wallet } from "@/types/wallet"; +import type { Wallet } from "@/types/wallet"; import { api } from "@/utils/api"; import { toast } from "@/hooks/use-toast"; @@ -29,7 +29,7 @@ export default function ProxyTransferStep({ }); // Mutation to transfer proxies - const { mutate: transferProxies } = api.proxy.transferProxies.useMutation({ + const { mutateAsync: transferProxies } = api.proxy.transferProxies.useMutation({ onSuccess: () => { setTransferComplete(true); toast({ @@ -82,7 +82,7 @@ export default function ProxyTransferStep({ ); } - const hasProxies = existingProxies && existingProxies.length > 0; + const hasProxies = (existingProxies && existingProxies.length > 0) ?? false; return (
    @@ -109,14 +109,14 @@ export default function ProxyTransferStep({
    {hasProxies ? (
    - {existingProxies.map((proxy, index) => ( + {(existingProxies ?? []).map((proxy, index) => (

    Proxy {index + 1}

    - {proxy.name || `Proxy ${index + 1}`} + {proxy.description ?? `Proxy ${index + 1}`}

    diff --git a/src/components/pages/wallet/info/migration/useMigrationWalletFlowState.tsx b/src/components/pages/wallet/info/migration/useMigrationWalletFlowState.tsx index 264a031c..5de653c4 100644 --- a/src/components/pages/wallet/info/migration/useMigrationWalletFlowState.tsx +++ b/src/components/pages/wallet/info/migration/useMigrationWalletFlowState.tsx @@ -5,7 +5,7 @@ */ import { useState, useEffect, useMemo, useCallback } from "react"; -import { resolvePaymentKeyHash, resolveStakeKeyHash } from "@meshsdk/core"; +import { resolveStakeKeyHash } from "@meshsdk/core"; import type { MultisigKey } from "@/utils/multisigSDK"; import { MultisigWallet } from "@/utils/multisigSDK"; import { paymentKeyHash } from "@/utils/multisigSDK"; @@ -14,7 +14,7 @@ import { api } from "@/utils/api"; import { useUserStore } from "@/lib/zustand/user"; import { useSiteStore } from "@/lib/zustand/site"; import { useToast } from "@/hooks/use-toast"; -import { Wallet } from "@/types/wallet"; +import type { Wallet } from "@/types/wallet"; export interface MigrationWalletFlowState { // Core wallet data @@ -104,12 +104,13 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF ); // Get existing new wallet data if migration is in progress + type WalletWithMigration = { migrationTargetWalletId?: string }; const { data: existingNewWallet } = api.wallet.getNewWallet.useQuery( { - walletId: (appWallet as any).migrationTargetWalletId || "", + walletId: ((appWallet as unknown as WalletWithMigration).migrationTargetWalletId) ?? "", }, { - enabled: !!(appWallet as any).migrationTargetWalletId, + enabled: Boolean((appWallet as unknown as WalletWithMigration).migrationTargetWalletId), } ); @@ -117,12 +118,12 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF useEffect(() => { if (walletData) { setName(`${walletData.name} - Migrated`); - setDescription(walletData.description || ""); - setSignerAddresses(walletData.signersAddresses || []); - setSignerDescriptions(walletData.signersDescriptions || []); - setNumRequiredSigners(walletData.numRequiredSigners || 1); - setNativeScriptType((walletData.type as "atLeast" | "all" | "any") || "atLeast"); - setStakeKey(walletData.stakeCredentialHash || ""); + setDescription(walletData.description ?? ""); + setSignerAddresses(walletData.signersAddresses ?? []); + setSignerDescriptions(walletData.signersDescriptions ?? []); + setNumRequiredSigners(walletData.numRequiredSigners ?? 1); + setNativeScriptType((walletData.type as "atLeast" | "all" | "any") ?? "atLeast"); + setStakeKey(walletData.stakeCredentialHash ?? ""); // Filter and process stake keys const validStakeKeys = (walletData.signersStakeKeys || []).filter((key: string) => { @@ -140,7 +141,11 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF setSignerStakeKeys(validStakeKeys); // Initialize DRep keys (empty for now, can be added later) - setSignerDRepKeys((walletData as any).signersDRepKeys || []); + setSignerDRepKeys( + Array.isArray((walletData as unknown as { signersDRepKeys?: string[] }).signersDRepKeys) + ? (walletData as unknown as { signersDRepKeys?: string[] }).signersDRepKeys! + : [], + ); } }, [walletData]); @@ -149,14 +154,18 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF if (existingNewWallet) { setNewWalletId(existingNewWallet.id); setName(existingNewWallet.name); - setDescription(existingNewWallet.description || ""); - setSignerAddresses(existingNewWallet.signersAddresses || []); - setSignerDescriptions(existingNewWallet.signersDescriptions || []); - setSignerStakeKeys(existingNewWallet.signersStakeKeys || []); - setSignerDRepKeys((existingNewWallet as any).signersDRepKeys || []); - setNumRequiredSigners(existingNewWallet.numRequiredSigners || 1); - setStakeKey(existingNewWallet.stakeCredentialHash || ""); - setNativeScriptType((existingNewWallet.scriptType as "atLeast" | "all" | "any") || "atLeast"); + setDescription(existingNewWallet.description ?? ""); + setSignerAddresses(existingNewWallet.signersAddresses ?? []); + setSignerDescriptions(existingNewWallet.signersDescriptions ?? []); + setSignerStakeKeys(existingNewWallet.signersStakeKeys ?? []); + setSignerDRepKeys( + Array.isArray((existingNewWallet as unknown as { signersDRepKeys?: string[] }).signersDRepKeys) + ? (existingNewWallet as unknown as { signersDRepKeys?: string[] }).signersDRepKeys! + : [], + ); + setNumRequiredSigners(existingNewWallet.numRequiredSigners ?? 1); + setStakeKey(existingNewWallet.stakeCredentialHash ?? ""); + setNativeScriptType((existingNewWallet.scriptType as "atLeast" | "all" | "any") ?? "atLeast"); } }, [existingNewWallet]); @@ -233,7 +242,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF duration: 3000, }); }, - onError: (e) => { + onError: (_e) => { setLoading(false); toast({ title: "Error", @@ -252,7 +261,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF duration: 2000, }); }, - onError: (e) => { + onError: (_e) => { toast({ title: "Error", description: "Failed to save changes", @@ -266,9 +275,6 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF const { mutateAsync: deleteNewWallet } = api.wallet.deleteNewWallet.useMutation(); const { mutate: setMigrationTarget } = api.wallet.setMigrationTarget.useMutation({ - onSuccess: () => { - // Migration target set successfully - no need for additional toast since wallet creation already shows success - }, onError: (e) => { console.error("Failed to set migration target:", e); toast({ @@ -339,7 +345,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF signersStakeKeys: signersStakeKeys, signersDRepKeys: signersDRepKeys, numRequiredSigners: numRequiredSigners, - ownerAddress: userAddress || "", + ownerAddress: userAddress ?? "", stakeCredentialHash: stakeKey || undefined, scriptType: nativeScriptType || undefined, }; @@ -356,7 +362,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF variant: "destructive", }); } - }, [newWalletId, name, signersAddresses, description, signersDescriptions, signersStakeKeys, numRequiredSigners, stakeKey, nativeScriptType, createNewWallet, toast]); + }, [newWalletId, name, signersAddresses, description, signersDescriptions, signersStakeKeys, signersDRepKeys, numRequiredSigners, stakeKey, nativeScriptType, createNewWallet, toast, userAddress]); // Create final migration wallet async function createMigrationWallet(): Promise { @@ -380,7 +386,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF } // Check if final wallet has already been created - if (appWallet.migrationTargetWalletId) { + if ((appWallet as unknown as WalletWithMigration).migrationTargetWalletId) { toast({ title: "Error", description: "Final wallet has already been created for this migration. You can only create one new wallet per migration.", @@ -411,7 +417,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF stakeCredentialHash: stakeKey || undefined, type: nativeScriptType, }, { - onSuccess: async (data) => { + onSuccess: (data) => { // Set migration target after successful wallet creation setMigrationTarget({ @@ -421,10 +427,9 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF // Clean up the temporary NewWallet if (newWalletId && newWalletId !== data.id) { - try { - await deleteNewWallet({ walletId: newWalletId }); - } catch (error) { - } + void deleteNewWallet({ walletId: newWalletId }).catch((err) => { + console.warn("Failed to delete temporary new wallet:", err); + }); } setNewWalletId(data.id); @@ -445,7 +450,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF variant: "destructive", duration: 3000, }); - reject(error); + reject(error instanceof Error ? error : new Error(error?.message ?? 'Unknown error')); } }); }); @@ -468,7 +473,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF setDescription(newDescription); if (newWalletId) { - updateNewWallet({ + void updateNewWallet({ walletId: newWalletId, name: newName, description: newDescription, @@ -481,7 +486,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF scriptType: nativeScriptType || undefined, }); } - }, [newWalletId, signersAddresses, signersDescriptions, signersStakeKeys, numRequiredSigners, stakeKey, nativeScriptType, updateNewWallet]); + }, [newWalletId, signersAddresses, signersDescriptions, signersStakeKeys, signersDRepKeys, numRequiredSigners, stakeKey, nativeScriptType, updateNewWallet]); const handleSaveSigners = useCallback(async (newAddresses: string[], newDescriptions: string[], newStakeKeys: string[], newDRepKeys: string[]) => { // Ensure all arrays are defined and filter out undefined values @@ -549,7 +554,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF console.error("Failed to update new wallet:", error); } } - }, [newWalletId, name, description, numRequiredSigners, stakeKey, nativeScriptType, updateNewWallet]); + }, [newWalletId, name, description, numRequiredSigners, stakeKey, nativeScriptType, updateNewWallet, toast]); const handleSaveSignatureRules = useCallback(async (numRequired: number) => { setNumRequiredSigners(numRequired); @@ -572,7 +577,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF console.error("Failed to update signature rules:", error); } } - }, [newWalletId, name, description, signersAddresses, signersDescriptions, signersStakeKeys, stakeKey, nativeScriptType, updateNewWallet]); + }, [newWalletId, name, description, signersAddresses, signersDescriptions, signersStakeKeys, signersDRepKeys, stakeKey, nativeScriptType, updateNewWallet]); const handleSaveAdvanced = useCallback(async (newStakeKey: string, scriptType: "all" | "any" | "atLeast") => { setStakeKey(newStakeKey); @@ -605,7 +610,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF console.error("Failed to update advanced settings:", error); } } - }, [newWalletId, name, description, signersAddresses, signersDescriptions, signersStakeKeys, numRequiredSigners, updateNewWallet]); + }, [newWalletId, name, description, signersAddresses, signersDescriptions, signersStakeKeys, signersDRepKeys, numRequiredSigners, updateNewWallet]); // Remove external stake credential and try to backfill stake keys from addresses const removeExternalStakeAndBackfill = useCallback(() => { @@ -613,7 +618,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF setSignerStakeKeys(signersStakeKeys); if (newWalletId) { - updateNewWallet({ + void updateNewWallet({ walletId: newWalletId, name: name, description: description, @@ -632,7 +637,7 @@ export function useMigrationWalletFlowState(appWallet: Wallet): MigrationWalletF description: "External stake credential has been removed.", duration: 3000, }); - }, [signersAddresses, signersStakeKeys, newWalletId, name, description, signersDescriptions, numRequiredSigners, nativeScriptType, updateNewWallet, toast]); + }, [signersAddresses, signersStakeKeys, signersDRepKeys, newWalletId, name, description, signersDescriptions, numRequiredSigners, nativeScriptType, updateNewWallet, toast]); // Validation const isValidForCreate = signersAddresses.length > 0 && From ec0f0500e006444e9caf837ee7043345752f3406 Mon Sep 17 00:00:00 2001 From: QSchlegel Date: Tue, 28 Oct 2025 12:32:44 +0100 Subject: [PATCH 15/15] votingpower --- .../wallet-data-loader-wrapper.tsx | 19 +++++- .../overall-layout/proxy-data-loader.tsx | 14 +++- .../overall-layout/wallet-data-loader.tsx | 19 +++++- .../multisig/proxy/ProxyOverview.tsx | 65 +++++++++++++++++++ src/components/multisig/proxy/offchain.ts | 61 ++++++++++++++++- .../pages/wallet/governance/card-info.tsx | 39 +++++++++-- src/lib/zustand/proxy.ts | 64 +++++++++++++++++- 7 files changed, 264 insertions(+), 17 deletions(-) diff --git a/src/components/common/overall-layout/mobile-wrappers/wallet-data-loader-wrapper.tsx b/src/components/common/overall-layout/mobile-wrappers/wallet-data-loader-wrapper.tsx index 6f743a95..81e1de5f 100644 --- a/src/components/common/overall-layout/mobile-wrappers/wallet-data-loader-wrapper.tsx +++ b/src/components/common/overall-layout/mobile-wrappers/wallet-data-loader-wrapper.tsx @@ -48,7 +48,7 @@ export default function WalletDataLoaderWrapper({ const setWalletAssetMetadata = useWalletsStore( (state) => state.setWalletAssetMetadata, ); - const { fetchProxyBalance, fetchProxyDrepInfo, setProxies } = useProxyActions(); + const { fetchProxyBalance, fetchProxyDrepInfo, fetchProxyDelegatorsInfo, setProxies } = useProxyActions(); const setDrepInfo = useWalletsStore((state) => state.setDrepInfo); @@ -205,7 +205,7 @@ export default function WalletDataLoaderWrapper({ network.toString() ); - // Fetch DRep info + // Fetch DRep info with force refresh await fetchProxyDrepInfo( appWallet.id, proxy.id, @@ -213,7 +213,20 @@ export default function WalletDataLoaderWrapper({ proxy.authTokenId, appWallet.scriptCbor, network.toString(), - proxy.paramUtxo + proxy.paramUtxo, + true // Force refresh to bypass cache + ); + + // Fetch delegators info with force refresh + await fetchProxyDelegatorsInfo( + appWallet.id, + proxy.id, + proxy.proxyAddress, + proxy.authTokenId, + appWallet.scriptCbor, + network.toString(), + proxy.paramUtxo, + true // Force refresh to bypass cache ); } catch (error) { diff --git a/src/components/common/overall-layout/proxy-data-loader.tsx b/src/components/common/overall-layout/proxy-data-loader.tsx index 7eed74b7..dd81308b 100644 --- a/src/components/common/overall-layout/proxy-data-loader.tsx +++ b/src/components/common/overall-layout/proxy-data-loader.tsx @@ -12,6 +12,7 @@ export default function ProxyDataLoader() { setProxies, fetchProxyBalance, fetchProxyDrepInfo, + fetchProxyDelegatorsInfo, clearProxyData } = useProxyActions(); @@ -65,6 +66,17 @@ export default function ProxyDataLoader() { appWallet.scriptCbor, network.toString(), proxy.paramUtxo, + true, + ); + await fetchProxyDelegatorsInfo( + appWallet.id, + proxy.id, + proxy.proxyAddress, + proxy.authTokenId, + appWallet.scriptCbor, + network.toString(), + proxy.paramUtxo, + true, ); } catch (error) { console.error(`Error fetching data for proxy ${proxy.id}:`, error); @@ -73,7 +85,7 @@ export default function ProxyDataLoader() { } })(); } - }, [proxies, appWallet?.id, appWallet?.scriptCbor, network, fetchProxyBalance, fetchProxyDrepInfo]); + }, [proxies, appWallet?.id, appWallet?.scriptCbor, network, fetchProxyBalance, fetchProxyDrepInfo, fetchProxyDelegatorsInfo]); // Clear proxy data when wallet changes useEffect(() => { diff --git a/src/components/common/overall-layout/wallet-data-loader.tsx b/src/components/common/overall-layout/wallet-data-loader.tsx index 6ce25bdc..af28bb11 100644 --- a/src/components/common/overall-layout/wallet-data-loader.tsx +++ b/src/components/common/overall-layout/wallet-data-loader.tsx @@ -20,7 +20,7 @@ export default function WalletDataLoader() { const ctx = api.useUtils(); const network = useSiteStore((state) => state.network); const setRandomState = useSiteStore((state) => state.setRandomState); - const { fetchProxyBalance, fetchProxyDrepInfo, setProxies } = useProxyActions(); + const { fetchProxyBalance, fetchProxyDrepInfo, fetchProxyDelegatorsInfo, setProxies } = useProxyActions(); async function fetchUtxos() { if (appWallet) { @@ -83,7 +83,7 @@ export default function WalletDataLoader() { network.toString() ); - // Fetch DRep info + // Fetch DRep info with force refresh await fetchProxyDrepInfo( appWallet.id, proxy.id, @@ -91,7 +91,20 @@ export default function WalletDataLoader() { proxy.authTokenId, appWallet.scriptCbor, network.toString(), - proxy.paramUtxo + proxy.paramUtxo, + true // Force refresh to bypass cache + ); + + // Fetch delegators info with force refresh + await fetchProxyDelegatorsInfo( + appWallet.id, + proxy.id, + proxy.proxyAddress, + proxy.authTokenId, + appWallet.scriptCbor, + network.toString(), + proxy.paramUtxo, + true // Force refresh to bypass cache ); console.log(`WalletDataLoader: Successfully fetched data for proxy ${proxy.id}`); diff --git a/src/components/multisig/proxy/ProxyOverview.tsx b/src/components/multisig/proxy/ProxyOverview.tsx index dd371532..50412117 100644 --- a/src/components/multisig/proxy/ProxyOverview.tsx +++ b/src/components/multisig/proxy/ProxyOverview.tsx @@ -43,6 +43,12 @@ interface ProxyCardProps { balance?: Array<{ unit: string; quantity: string }>; drepId?: string; drepInfo?: any; + delegatorsInfo?: { + delegators: Array<{ address: string; amount: string }>; + totalDelegation: string; + totalDelegationADA: number; + count: number; + }; lastUpdated?: number; }; isSelected: boolean; @@ -100,6 +106,7 @@ const ProxyCard = memo(function ProxyCard({ const displayBalance = proxy.balance || []; const drepId = proxy.drepId; const drepInfo = proxy.drepInfo; + const delegatorsInfo = proxy.delegatorsInfo; const balanceLoading = false; // No loading state needed since data is already loaded const [isExpanded, setIsExpanded] = React.useState(false); const [isEditing, setIsEditing] = React.useState(false); @@ -426,6 +433,64 @@ const ProxyCard = memo(function ProxyCard({
    + + {/* Delegators Information */} + {delegatorsInfo && delegatorsInfo.count > 0 && ( +
    +
    + + Delegations +
    + +
    + {/* Total Delegation */} +
    +
    + + Total Delegation +
    +
    +
    + {delegatorsInfo.totalDelegationADA.toLocaleString(undefined, { + minimumFractionDigits: 2, + maximumFractionDigits: 6 + })} ₳ +
    +
    + {delegatorsInfo.count} delegator{delegatorsInfo.count !== 1 ? 's' : ''} +
    +
    +
    + + {/* Top Delegators */} + {delegatorsInfo.delegators.length > 0 && ( +
    +
    + + Top Delegators +
    +
    + {delegatorsInfo.delegators.slice(0, 5).map((delegator, index) => ( +
    + + {delegator.address.slice(0, 20)}... + + + {(Number(delegator.amount) / 1000000).toFixed(2)} ₳ + +
    + ))} + {delegatorsInfo.delegators.length > 5 && ( +
    + +{delegatorsInfo.delegators.length - 5} more +
    + )} +
    +
    + )} +
    +
    + )}
    )}
    diff --git a/src/components/multisig/proxy/offchain.ts b/src/components/multisig/proxy/offchain.ts index 2c361e38..d0f5ebff 100644 --- a/src/components/multisig/proxy/offchain.ts +++ b/src/components/multisig/proxy/offchain.ts @@ -637,12 +637,12 @@ export class MeshProxyContract extends MeshTxInitiator { return resolveScriptHashDRepId(proxyScriptHash); }; - getDrepStatus = async () => { + getDrepStatus = async (forceRefresh = false) => { const drepId = this.getDrepId(); // Check cache first const cached = drepStatusCache.get(drepId); - if (cached && (Date.now() - cached.timestamp) < CACHE_DURATION) { + if (!forceRefresh && cached && (Date.now() - cached.timestamp) < CACHE_DURATION) { return cached.data; } @@ -703,6 +703,63 @@ export class MeshProxyContract extends MeshTxInitiator { } }; + /** + * Get DRep delegators and their delegation amounts + * @param forceRefresh Whether to bypass cache + * @returns Array of delegators with addresses and amounts, plus total delegation + */ + getDrepDelegators = async (forceRefresh = false) => { + const drepId = this.getDrepId(); + + // Check cache first + const cacheKey = `${drepId}_delegators`; + const cached = drepStatusCache.get(cacheKey); + if (!forceRefresh && cached && (Date.now() - cached.timestamp) < CACHE_DURATION) { + return cached.data; + } + + if (!this.mesh.fetcher) { + throw new Error("Blockchain provider not found"); + } + + try { + const delegators = await this.mesh.fetcher.get( + `/governance/dreps/${drepId}/delegators?count=100&page=1&order=asc`, + ); + + // Calculate total delegation amount + const totalDelegation = delegators.reduce((sum: bigint, delegator: { amount: string }) => { + return sum + BigInt(delegator.amount); + }, BigInt(0)); + + const result = { + delegators, + totalDelegation: totalDelegation.toString(), + totalDelegationADA: Number(totalDelegation) / 1000000, // Convert to ADA + count: delegators.length + }; + + // Cache the successful result + drepStatusCache.set(cacheKey, { + data: result, + timestamp: Date.now() + }); + + return result; + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : 'Unknown error'; + console.log(`Failed to fetch DRep delegators: ${errorMessage}`); + + // Return empty result for errors + return { + delegators: [], + totalDelegation: "0", + totalDelegationADA: 0, + count: 0 + }; + } + }; + /** * Vote on governance proposals using proxy DRep * @param votes Array of vote objects with proposalId, voteKind, and optional metadata diff --git a/src/components/pages/wallet/governance/card-info.tsx b/src/components/pages/wallet/governance/card-info.tsx index 53f661c5..37546326 100644 --- a/src/components/pages/wallet/governance/card-info.tsx +++ b/src/components/pages/wallet/governance/card-info.tsx @@ -40,6 +40,12 @@ export default function CardInfo({ appWallet, manualUtxos }: { appWallet: Wallet const [proxyDrepId, setProxyDrepId] = useState(null); const [loadingProxyDrep, setLoadingProxyDrep] = useState(false); const [proxyDrepError, setProxyDrepError] = useState(null); + const [proxyDelegatorsInfo, setProxyDelegatorsInfo] = useState<{ + delegators: Array<{ address: string; amount: string }>; + totalDelegation: string; + totalDelegationADA: number; + count: number; + } | null>(null); // Get DRep info for standard mode const currentDrepId = multisigWallet?.getKeysByRole(3) ? multisigWallet?.getDRepId() : appWallet?.dRepId; @@ -86,8 +92,22 @@ export default function CardInfo({ appWallet, manualUtxos }: { appWallet: Wallet setProxyDrepId(drepId); // Get DRep status (now with caching and proper error handling) - const status = await proxyContract.getDrepStatus(); + const status = await proxyContract.getDrepStatus(true); setProxyDrepInfo(status); + + // Get DRep delegators (force refresh on manual view) + try { + const delegators = await proxyContract.getDrepDelegators(true); + setProxyDelegatorsInfo(delegators as { + delegators: Array<{ address: string; amount: string }>; + totalDelegation: string; + totalDelegationADA: number; + count: number; + }); + } catch { + // ignore, leave as null + setProxyDelegatorsInfo(null); + } clearTimeout(timeoutId); } else { @@ -106,6 +126,7 @@ export default function CardInfo({ appWallet, manualUtxos }: { appWallet: Wallet setProxyDrepId(null); setProxyDrepInfo(null); setProxyDrepError(null); + setProxyDelegatorsInfo(null); } }; @@ -382,12 +403,20 @@ export default function CardInfo({ appWallet, manualUtxos }: { appWallet: Wallet Voting Power
    - {displayDrepInfo?.deposit ? `${(parseInt(displayDrepInfo.deposit) / 1000000).toFixed(2)}` : - displayDrepInfo?.amount ? `${(parseInt(displayDrepInfo.amount) / 1000000).toFixed(2)}` : - "0.00"} ADA + {proxyDelegatorsInfo?.totalDelegationADA !== undefined + ? proxyDelegatorsInfo.totalDelegationADA.toLocaleString(undefined, { minimumFractionDigits: 2, maximumFractionDigits: 6 }) + : displayDrepInfo?.deposit + ? (parseInt(displayDrepInfo.deposit) / 1000000).toFixed(2) + : displayDrepInfo?.amount + ? (parseInt(displayDrepInfo.amount) / 1000000).toFixed(2) + : "0.00"} ADA
    - {loadingProxyDrep ? "Loading..." : "Deposit amount"} + {loadingProxyDrep + ? "Loading..." + : proxyDelegatorsInfo + ? `${proxyDelegatorsInfo.count} delegator${proxyDelegatorsInfo.count !== 1 ? 's' : ''}` + : "Deposit amount"}
    diff --git a/src/lib/zustand/proxy.ts b/src/lib/zustand/proxy.ts index 448f4563..68c360e8 100644 --- a/src/lib/zustand/proxy.ts +++ b/src/lib/zustand/proxy.ts @@ -14,6 +14,18 @@ export interface ProxyDrepInfo { hash?: string; } +export interface DelegatorInfo { + address: string; + amount: string; +} + +export interface ProxyDelegatorsInfo { + delegators: DelegatorInfo[]; + totalDelegation: string; + totalDelegationADA: number; + count: number; +} + export interface ProxyData { id: string; proxyAddress: string; @@ -25,6 +37,7 @@ export interface ProxyData { balance?: Array<{ unit: string; quantity: string }>; drepId?: string; drepInfo?: ProxyDrepInfo; + delegatorsInfo?: ProxyDelegatorsInfo; lastUpdated?: number; } @@ -59,7 +72,8 @@ interface ProxyState { // Data fetching actions fetchProxyBalance: (walletId: string, proxyId: string, proxyAddress: string, network: string) => Promise; - fetchProxyDrepInfo: (walletId: string, proxyId: string, proxyAddress: string, authTokenId: string, scriptCbor: string, network: string, paramUtxo: string) => Promise; + fetchProxyDrepInfo: (walletId: string, proxyId: string, proxyAddress: string, authTokenId: string, scriptCbor: string, network: string, paramUtxo: string, forceRefresh?: boolean) => Promise; + fetchProxyDelegatorsInfo: (walletId: string, proxyId: string, proxyAddress: string, authTokenId: string, scriptCbor: string, network: string, paramUtxo: string, forceRefresh?: boolean) => Promise; // Utility actions updateProxyData: (walletId: string, proxyId: string, updates: Partial) => void; @@ -149,7 +163,7 @@ export const useProxyStore = create()( }, // Fetch proxy DRep information - fetchProxyDrepInfo: async (walletId, proxyId, proxyAddress, authTokenId, scriptCbor, network, paramUtxo) => { + fetchProxyDrepInfo: async (walletId, proxyId, proxyAddress, authTokenId, scriptCbor, network, paramUtxo, forceRefresh = false) => { try { get().setDrepLoading(proxyId, true); get().setDrepError(proxyId, null); @@ -172,7 +186,7 @@ export const useProxyStore = create()( const drepId = proxyContract.getDrepId(); // Get DRep status (now with caching and proper error handling) - const status = await proxyContract.getDrepStatus(); + const status = await proxyContract.getDrepStatus(forceRefresh); const drepInfo: ProxyDrepInfo | undefined = status; // Update the specific proxy's DRep data @@ -193,6 +207,48 @@ export const useProxyStore = create()( get().setDrepLoading(proxyId, false); } }, + + // Fetch proxy delegators information + fetchProxyDelegatorsInfo: async (walletId, proxyId, proxyAddress, authTokenId, scriptCbor, network, paramUtxo, forceRefresh = false) => { + try { + get().setDrepLoading(proxyId, true); + get().setDrepError(proxyId, null); + + const txBuilder = getTxBuilder(parseInt(network)); + const proxyContract = new MeshProxyContract( + { + mesh: txBuilder, + wallet: undefined, + networkId: parseInt(network), + }, + { + paramUtxo: JSON.parse(paramUtxo || '{}'), + }, + scriptCbor, + ); + proxyContract.proxyAddress = proxyAddress; + + // Get delegators info + const delegatorsInfo = await proxyContract.getDrepDelegators(forceRefresh) as ProxyDelegatorsInfo; + + // Update the specific proxy's delegators data + const currentState = get(); + const updatedProxies = currentState.proxies[walletId]?.map(proxy => + proxy.id === proxyId + ? { ...proxy, delegatorsInfo, lastUpdated: Date.now() } + : proxy + ) || []; + + set((state) => ({ + proxies: { ...state.proxies, [walletId]: updatedProxies }, + drepLoading: { ...state.drepLoading, [proxyId]: false }, + drepErrors: { ...state.drepErrors, [proxyId]: null }, + })); + } catch (error) { + get().setDrepError(proxyId, `Failed to fetch delegators info for proxy ${proxyId}`); + get().setDrepLoading(proxyId, false); + } + }, // Update specific proxy data updateProxyData: (walletId, proxyId, updates) => @@ -267,6 +323,7 @@ export const useProxyActions = () => { const clearSelectedProxy = useProxyStore((state) => state.clearSelectedProxy); const fetchProxyBalance = useProxyStore((state) => state.fetchProxyBalance); const fetchProxyDrepInfo = useProxyStore((state) => state.fetchProxyDrepInfo); + const fetchProxyDelegatorsInfo = useProxyStore((state) => state.fetchProxyDelegatorsInfo); const updateProxyData = useProxyStore((state) => state.updateProxyData); const clearProxyData = useProxyStore((state) => state.clearProxyData); @@ -278,6 +335,7 @@ export const useProxyActions = () => { clearSelectedProxy, fetchProxyBalance, fetchProxyDrepInfo, + fetchProxyDelegatorsInfo, updateProxyData, clearProxyData, };