diff --git a/aztec-up/bootstrap.sh b/aztec-up/bootstrap.sh index 4a74b689269b..50eb80443394 100755 --- a/aztec-up/bootstrap.sh +++ b/aztec-up/bootstrap.sh @@ -104,7 +104,7 @@ EOF } function test_cmds { - for test in amm_flow bridge_and_claim basic_install counter_contract; do + for test in amm_flow bridge_and_claim basic_install counter_contract default_scaffold; do echo "$hash:TIMEOUT=15m aztec-up/scripts/run_test.sh $test" done } diff --git a/aztec-up/test/counter_contract.sh b/aztec-up/test/counter_contract.sh index 28f337ec3256..c95bed113ee9 100755 --- a/aztec-up/test/counter_contract.sh +++ b/aztec-up/test/counter_contract.sh @@ -4,28 +4,38 @@ set -euo pipefail export LOG_LEVEL=silent # Execute commands as per: https://docs.aztec.network/tutorials/codealong/contract_tutorials/counter_contract -aztec new counter_contract -if [ ! -f counter_contract/Nargo.toml ] || [ ! -f counter_contract/src/main.nr ]; then - echo "Failed to create contract." +aztec new counter + +# Verify workspace structure +if [ ! -f counter/Nargo.toml ]; then + echo "Failed to create workspace Nargo.toml." + exit 1 +fi +if [ ! -f counter/counter_contract/Nargo.toml ] || [ ! -f counter/counter_contract/src/main.nr ]; then + echo "Failed to create contract crate." + exit 1 +fi +if [ ! -f counter/counter_test/Nargo.toml ] || [ ! -f counter/counter_test/src/lib.nr ]; then + echo "Failed to create test crate." exit 1 fi -# Check counter_contract dir is owned by aztec-dev. -if [ "$(stat -c %U counter_contract)" != "ubuntu" ]; then - echo "counter_contract dir is not owned by ubuntu." +# Check counter dir is owned by ubuntu. +if [ "$(stat -c %U counter)" != "ubuntu" ]; then + echo "counter dir is not owned by ubuntu." exit 1 fi -# "Write" our contract. -cp -Rf ./aztec-packages/noir-projects/noir-contracts/contracts/test/counter_contract . -cd counter_contract -sed -i 's|\.\./\.\./\.\./\.\./|/home/ubuntu/aztec-packages/noir-projects/|g' Nargo.toml +# "Write" our contract over the scaffold. +cp -Rf ./aztec-packages/noir-projects/noir-contracts/contracts/test/counter/* counter/ +cd counter +sed -i 's|\.\./\.\./\.\./\.\./\.\./|/home/ubuntu/aztec-packages/noir-projects/|g' counter_contract/Nargo.toml counter_test/Nargo.toml # Compile the contract. aztec compile # Codegen -aztec codegen -o src/artifacts target -if [ ! -d src/artifacts ]; then +aztec codegen -o counter_contract/src/artifacts target +if [ ! -d counter_contract/src/artifacts ]; then echo "Failed to codegen TypeScript." exit 1 fi diff --git a/aztec-up/test/default_scaffold.sh b/aztec-up/test/default_scaffold.sh new file mode 100755 index 000000000000..9eb08df644fd --- /dev/null +++ b/aztec-up/test/default_scaffold.sh @@ -0,0 +1,66 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Tests that the default scaffold generated by `aztec new` compiles and passes its tests without any modifications. +# Also tests that a second contract can be added to the workspace with `aztec new`. + +export LOG_LEVEL=silent + +aztec new my_workspace + +# Verify workspace structure with named crate directories. +if [ ! -f my_workspace/Nargo.toml ]; then + echo "Failed to create workspace Nargo.toml." + exit 1 +fi +if [ ! -f my_workspace/my_workspace_contract/Nargo.toml ] || [ ! -f my_workspace/my_workspace_contract/src/main.nr ]; then + echo "Failed to create contract crate." + exit 1 +fi +if [ ! -f my_workspace/my_workspace_test/Nargo.toml ] || [ ! -f my_workspace/my_workspace_test/src/lib.nr ]; then + echo "Failed to create test crate." + exit 1 +fi + +cd my_workspace + +# This is unfortunate as it makes the test worse but in CI setting the aztec version is 0.0.1 which doesn't exist as +# a remote git tag, so we need to rewrite dependencies to use local aztec-nr. +sed -i 's|aztec = .*git.*AztecProtocol/aztec-nr.*|aztec = { path="/home/ubuntu/aztec-packages/noir-projects/aztec-nr/aztec" }|' \ + my_workspace_contract/Nargo.toml my_workspace_test/Nargo.toml + +# Compile the default scaffold contract. +aztec compile + +# Run the default scaffold tests. +aztec test + +# --- Test adding a second contract to the workspace --- +aztec new token + +# Verify token crates were created. +if [ ! -f token_contract/Nargo.toml ] || [ ! -f token_contract/src/main.nr ]; then + echo "Failed to create token contract crate." + exit 1 +fi +if [ ! -f token_test/Nargo.toml ] || [ ! -f token_test/src/lib.nr ]; then + echo "Failed to create token test crate." + exit 1 +fi + +# Verify workspace Nargo.toml contains all four members. +if ! grep -q '"my_workspace_contract"' Nargo.toml || \ + ! grep -q '"my_workspace_test"' Nargo.toml || \ + ! grep -q '"token_contract"' Nargo.toml || \ + ! grep -q '"token_test"' Nargo.toml; then + echo "Workspace Nargo.toml does not contain all expected members." + exit 1 +fi + +# Rewrite aztec deps for token crates too. +sed -i 's|aztec = .*git.*AztecProtocol/aztec-nr.*|aztec = { path="/home/ubuntu/aztec-packages/noir-projects/aztec-nr/aztec" }|' \ + token_contract/Nargo.toml token_test/Nargo.toml + +# Compile and test the full workspace (both contracts). +aztec compile +aztec test diff --git a/barretenberg/cpp/src/barretenberg/api/aztec_process.cpp b/barretenberg/cpp/src/barretenberg/api/aztec_process.cpp index 6663060f993c..671f72dfd9a7 100644 --- a/barretenberg/cpp/src/barretenberg/api/aztec_process.cpp +++ b/barretenberg/cpp/src/barretenberg/api/aztec_process.cpp @@ -258,6 +258,21 @@ bool process_aztec_artifact(const std::string& input_path, const std::string& ou return true; } + // Strip __aztec_nr_internals__ prefix from function names. + // The #[aztec] macro generates wrapper functions with this prefix; we strip it so + // the exported ABI exposes the original developer-written names. + const std::string internal_prefix = "__aztec_nr_internals__"; + for (auto& function : artifact_json["functions"]) { + auto& name = function["name"]; + if (name.is_string()) { + std::string fn_name = name.get(); + if (fn_name.size() >= internal_prefix.size() && + fn_name.compare(0, internal_prefix.size(), internal_prefix) == 0) { + name = fn_name.substr(internal_prefix.size()); + } + } + } + // Filter to private constrained functions std::vector private_functions; for (auto& function : artifact_json["functions"]) { @@ -266,14 +281,13 @@ bool process_aztec_artifact(const std::string& input_path, const std::string& ou } } - if (private_functions.empty()) { + if (!private_functions.empty()) { + // Generate VKs + generate_vks_for_functions(cache_dir, private_functions, force); + } else { info("No private constrained functions found"); - return true; } - // Generate VKs - generate_vks_for_functions(cache_dir, private_functions, force); - // Write updated JSON back to file std::ofstream out_file(output_path); out_file << artifact_json.dump(2) << std::endl; diff --git a/boxes/init/.gitignore b/boxes/init/.gitignore new file mode 100644 index 000000000000..292dee672e8b --- /dev/null +++ b/boxes/init/.gitignore @@ -0,0 +1,2 @@ +target/ +codegenCache.json diff --git a/boxes/init/Nargo.toml b/boxes/init/Nargo.toml index 14bcaec85ac4..3ae352f74220 100644 --- a/boxes/init/Nargo.toml +++ b/boxes/init/Nargo.toml @@ -1,6 +1,2 @@ -[package] -name = "init" -type = "contract" - -[dependencies] -aztec = { path = "../../noir-projects/aztec-nr/aztec" } +[workspace] +members = ["contract", "test"] diff --git a/boxes/init/README.md b/boxes/init/README.md new file mode 100644 index 000000000000..5f0fe47a795f --- /dev/null +++ b/boxes/init/README.md @@ -0,0 +1,27 @@ +# init + +An Aztec Noir contract project. + +## Compile + +```bash +aztec compile +``` + +This compiles the contract in `contract/` and outputs artifacts to `target/`. + +## Test + +```bash +aztec test +``` + +This runs the tests in `test/`. + +## Generate TypeScript bindings + +```bash +aztec codegen target -o src/artifacts +``` + +This generates TypeScript contract artifacts from the compiled output in `target/` into `src/artifacts/`. diff --git a/boxes/init/contract/Nargo.toml b/boxes/init/contract/Nargo.toml new file mode 100644 index 000000000000..48e749065754 --- /dev/null +++ b/boxes/init/contract/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "init" +type = "contract" + +[dependencies] +aztec = { path = "../../../noir-projects/aztec-nr/aztec" } diff --git a/boxes/init/contract/src/main.nr b/boxes/init/contract/src/main.nr new file mode 100644 index 000000000000..fedcf9a88eea --- /dev/null +++ b/boxes/init/contract/src/main.nr @@ -0,0 +1,10 @@ +use aztec::macros::aztec; + +#[aztec] +pub contract Main { + use aztec::macros::functions::{external, initializer}; + + #[initializer] + #[external("private")] + fn constructor() {} +} diff --git a/boxes/init/src/main.nr b/boxes/init/src/main.nr deleted file mode 100644 index 302aec7a8469..000000000000 --- a/boxes/init/src/main.nr +++ /dev/null @@ -1,9 +0,0 @@ - -use aztec::macros::aztec; - -#[aztec] -contract Main { - #[external("private")] - #[initializer] - fn constructor() { } -} diff --git a/boxes/init/test/Nargo.toml b/boxes/init/test/Nargo.toml new file mode 100644 index 000000000000..38d2b87c645f --- /dev/null +++ b/boxes/init/test/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "init_test" +type = "lib" + +[dependencies] +aztec = { path = "../../../noir-projects/aztec-nr/aztec" } +init = { path = "../contract" } diff --git a/boxes/init/test/src/lib.nr b/boxes/init/test/src/lib.nr new file mode 100644 index 000000000000..7b5a395d4fd4 --- /dev/null +++ b/boxes/init/test/src/lib.nr @@ -0,0 +1,17 @@ +use aztec::test::helpers::test_environment::TestEnvironment; +use init::Main; + +#[test] +unconstrained fn test_constructor() { + let mut env = TestEnvironment::new(); + let deployer = env.create_light_account(); + + // Deploy the contract with the default constructor: + let contract_address = env.deploy("@init/Main").with_private_initializer( + deployer, + Main::interface().constructor(), + ); + + // Deploy without an initializer: + let contract_address = env.deploy("@init/Main").without_initializer(); +} diff --git a/docs/bootstrap.sh b/docs/bootstrap.sh index ad7a3dd3f6ae..88bbfaeb45ad 100755 --- a/docs/bootstrap.sh +++ b/docs/bootstrap.sh @@ -4,7 +4,6 @@ source $(git rev-parse --show-toplevel)/ci3/source_bootstrap repo_root=$(git rev-parse --show-toplevel) export BB=${BB:-$repo_root/barretenberg/cpp/build/bin/bb} export NARGO=${NARGO:-$repo_root/noir/noir-repo/target/release/nargo} -export TRANSPILER=${TRANSPILER:-$repo_root/avm-transpiler/target/release/avm-transpiler} export BB_HASH=${BB_HASH:-$($repo_root/barretenberg/cpp/bootstrap.sh hash)} # We search the docs/*.md files to find included code, and use those as our rebuild dependencies. diff --git a/docs/developer_versioned_docs/version-v4.1.0-rc.2/getting_started_on_local_network.md b/docs/developer_versioned_docs/version-v4.1.0-rc.2/getting_started_on_local_network.md index d1ccbd24873b..a9715151ab20 100644 --- a/docs/developer_versioned_docs/version-v4.1.0-rc.2/getting_started_on_local_network.md +++ b/docs/developer_versioned_docs/version-v4.1.0-rc.2/getting_started_on_local_network.md @@ -8,7 +8,7 @@ tags: [local_network, testnet] import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -Get started on your local environment using a local network. +Get started on your local environment using a local network. If you'd rather deploy to a live network, read the [getting started on testnet guide](./getting_started_on_testnet.md). The local network is a local development Aztec network running fully on your machine, and interacting with a development Ethereum node. You can develop and deploy on it just like on a testnet or mainnet (when the time comes). The local network makes it faster and easier to develop and test your Aztec applications. @@ -252,4 +252,5 @@ Simulation result: 25n Want to build something cool on Aztec? - Check out the [Token Contract Tutorial](./docs/tutorials/contract_tutorials/token_contract.md) for a beginner tutorial, or jump into more advanced ones +- Ready for a live network? Try [deploying on testnet](./getting_started_on_testnet.md) - Start on your own thing and check out the How To Guides to help you! diff --git a/docs/developer_versioned_docs/version-v4.1.0-rc.2/getting_started_on_testnet.md b/docs/developer_versioned_docs/version-v4.1.0-rc.2/getting_started_on_testnet.md new file mode 100644 index 000000000000..bd7938826ff2 --- /dev/null +++ b/docs/developer_versioned_docs/version-v4.1.0-rc.2/getting_started_on_testnet.md @@ -0,0 +1,207 @@ +--- +title: Getting Started on Testnet +sidebar_position: 1 +tags: [testnet] +description: Deploy contracts and send transactions on the Aztec testnet using the CLI wallet and the Sponsored FPC for fee payment. +--- + +import { General } from '@site/src/components/Snippets/general_snippets'; + +This guide walks you through deploying your first contract on the Aztec testnet. You will install the CLI tools, create an account using the Sponsored FPC (so you don't need to bridge Fee Juice yourself), and deploy and interact with a contract. + +## Testnet vs Local Network + +| Feature | Local Network | Testnet | +|---------|-------------|---------| +| **Environment** | Local machine | Decentralized network on Sepolia | +| **Fees** | Free (test accounts prefunded) | Sponsored FPC available | +| **Block times** | Instant | ~36 seconds | +| **Proving** | Optional | Required | +| **Accounts** | Test accounts pre-deployed | Must create and deploy your own | + +:::info +If you want to develop and iterate quickly, start with the [local network guide](./getting_started_on_local_network.md). The local network has instant blocks and no proving, making it faster for development. +::: + +## Prerequisites + +- + +## Install the Aztec toolchain + +Install the testnet version of the Aztec CLI: + +```bash +VERSION=4.2.0-aztecnr-rc.2 bash -i <(curl -sL https://install.aztec.network/4.2.0-aztecnr-rc.2) +``` + +:::warning +Testnet is version-dependent. It is currently running version `4.2.0-aztecnr-rc.2`. Maintain version consistency when interacting with the testnet to avoid errors. +::: + +This installs: + +- **aztec** - Compiles and tests Aztec contracts, launches infrastructure, and provides utility commands +- **aztec-up** - Version manager for the Aztec toolchain (`aztec-up install`, `aztec-up use`, `aztec-up list`) +- **aztec-wallet** - CLI tool for interacting with the Aztec network + +## Getting started on testnet + +### Step 1: Set up your environment + +Set the required environment variables: + +```bash +export NODE_URL=https://rpc.testnet.aztec-labs.com +export SPONSORED_FPC_ADDRESS=0x254082b62f9108d044b8998f212bb145619d91bfcd049461d74babb840181257 +``` + +### Step 2: Register the Sponsored FPC + +The Sponsored FPC (Fee Payment Contract) pays transaction fees on your behalf, so you don't need to bridge Fee Juice from L1. Register it in your wallet: + +```bash +aztec-wallet register-contract \ + --node-url $NODE_URL \ + --alias sponsoredfpc \ + $SPONSORED_FPC_ADDRESS SponsoredFPC \ + --salt 0 +``` + +### Step 3: Create and deploy an account + +Unlike the local network, testnet has no pre-deployed accounts. Create and deploy your own: + +```bash +aztec-wallet create-account \ + --node-url $NODE_URL \ + --alias my-wallet \ + --payment method=fpc-sponsored,fpc=$SPONSORED_FPC_ADDRESS +``` + +:::note +The first transaction will take longer as it downloads proving keys. If you see `Timeout awaiting isMined`, the transaction is still processing — this is normal on testnet. +::: + +### Step 4: Deploy a contract + +Deploy a token contract as an example: + +```bash +aztec-wallet deploy \ + --node-url $NODE_URL \ + --from accounts:my-wallet \ + --payment method=fpc-sponsored,fpc=$SPONSORED_FPC_ADDRESS \ + --alias token \ + TokenContract \ + --args accounts:my-wallet Token TOK 18 +``` + +This deploys the `TokenContract` with: +- `admin`: your wallet address +- `name`: Token +- `symbol`: TOK +- `decimals`: 18 + +You can check the transaction status on [Aztecscan](https://testnet.aztecscan.xyz). + +### Step 5: Interact with your contract + +Mint some tokens: + +```bash +aztec-wallet send mint_to_public \ + --node-url $NODE_URL \ + --from accounts:my-wallet \ + --payment method=fpc-sponsored,fpc=$SPONSORED_FPC_ADDRESS \ + --contract-address token \ + --args accounts:my-wallet 100 +``` + +Check your balance: + +```bash +aztec-wallet simulate balance_of_public \ + --node-url $NODE_URL \ + --from accounts:my-wallet \ + --contract-address token \ + --args accounts:my-wallet +``` + +This should print: + +``` +Simulation result: 100n +``` + +Move tokens to private state: + +```bash +aztec-wallet send transfer_to_private \ + --node-url $NODE_URL \ + --from accounts:my-wallet \ + --payment method=fpc-sponsored,fpc=$SPONSORED_FPC_ADDRESS \ + --contract-address token \ + --args accounts:my-wallet 25 +``` + +Check your private balance: + +```bash +aztec-wallet simulate balance_of_private \ + --node-url $NODE_URL \ + --from accounts:my-wallet \ + --contract-address token \ + --args accounts:my-wallet +``` + +This should print: + +``` +Simulation result: 25n +``` + +## Viewing transactions on the block explorer + +You can view your transactions, contracts, and account on the testnet block explorers: + +- [Aztecscan](https://testnet.aztecscan.xyz) +- [Aztec Explorer](https://aztecexplorer.xyz/?network=testnet) + +Search by transaction hash, contract address, or account address to see details and status. + +## Registering existing contracts + +To interact with a contract deployed by someone else, you need to register it in your local PXE first: + +```bash +aztec-wallet register-contract \ + --node-url $NODE_URL \ + --alias mycontract \ + +``` + +For example, to register a `TokenContract` deployed by someone else: + +```bash +aztec-wallet register-contract \ + --node-url $NODE_URL \ + --alias external-token \ + 0x1234...abcd TokenContract +``` + +After registration, you can interact with it using `aztec-wallet send` and `aztec-wallet simulate` as shown above. + +## Paying fees without the Sponsored FPC + +The Sponsored FPC is convenient for getting started, but you can also pay fees directly by bridging Fee Juice from Ethereum Sepolia. See [Paying Fees](./docs/aztec-js/how_to_pay_fees.md#bridge-fee-juice-from-l1) for details on bridging and other fee payment methods. + +## Testnet information + +For complete testnet technical details including contract addresses and network configuration, see the [Networks page](/networks#testnet). + +## Next steps + +- Check out the [Tutorials](./docs/tutorials/contract_tutorials/counter_contract.md) for building more complex contracts +- Learn about [paying fees](./docs/aztec-js/how_to_pay_fees.md) with different methods +- Explore [Aztec Playground](https://play.aztec.network/) for an interactive development experience diff --git a/docs/developer_versioned_sidebars/version-v4.1.0-rc.2-sidebars.json b/docs/developer_versioned_sidebars/version-v4.1.0-rc.2-sidebars.json index d1a66f8279f4..75eaf90f9f30 100644 --- a/docs/developer_versioned_sidebars/version-v4.1.0-rc.2-sidebars.json +++ b/docs/developer_versioned_sidebars/version-v4.1.0-rc.2-sidebars.json @@ -13,6 +13,10 @@ "type": "doc", "id": "getting_started_on_local_network" }, + { + "type": "doc", + "id": "getting_started_on_testnet" + }, { "type": "doc", "id": "ai_tooling" diff --git a/docs/docs-developers/docs/aztec-nr/api.mdx b/docs/docs-developers/docs/aztec-nr/api.mdx index 59cc025240d4..55dfaef610f3 100644 --- a/docs/docs-developers/docs/aztec-nr/api.mdx +++ b/docs/docs-developers/docs/aztec-nr/api.mdx @@ -10,11 +10,13 @@ import { useActiveVersion } from "@docusaurus/plugin-content-docs/client"; export const useApiVersion = () => { const version = useActiveVersion("developer"); - const versionName = version?.name || "current"; - // Map Docusaurus version to API docs folder - if (versionName === "current") return "next"; - if (versionName.includes("rc") || versionName.includes("testnet")) return "testnet"; - return versionName; + if (!version || version.name === "current") return "next"; + // Map Docusaurus version to API docs folder using version label + // Labels are set explicitly in docusaurus.config.js (e.g., "Mainnet (...)") + const label = version.label || ""; + if (label.startsWith("Alpha")) return "mainnet"; + if (label.startsWith("Testnet")) return "testnet"; + return version.name; }; export const ModuleLink = ({ path, children }) => { diff --git a/docs/docs-developers/docs/aztec-nr/framework-description/dependencies.md b/docs/docs-developers/docs/aztec-nr/framework-description/dependencies.md index 62f1a7d80b05..488979261b2b 100644 --- a/docs/docs-developers/docs/aztec-nr/framework-description/dependencies.md +++ b/docs/docs-developers/docs/aztec-nr/framework-description/dependencies.md @@ -18,25 +18,17 @@ aztec = { git="https://github.com/AztecProtocol/aztec-nr/", tag="#include_aztec_ ### Aztec (required) ```toml -aztec = { git="https://github.com/AztecProtocol/aztec-packages/", tag="#include_aztec_version", directory="noir-projects/aztec-nr/aztec" } +aztec = { git="https://github.com/AztecProtocol/aztec-nr/", tag="#include_aztec_version", directory="aztec" } ``` The core Aztec library required for every Aztec.nr smart contract. -### Protocol Types - -```toml -protocol = { git="https://github.com/AztecProtocol/aztec-packages/", tag="#include_aztec_version", directory="noir-projects/noir-protocol-circuits/crates/types"} -``` - -Contains types used in the Aztec protocol (addresses, constants, hashes, etc.). - ## Note Types ### Address Note ```toml -address_note = { git="https://github.com/AztecProtocol/aztec-packages/", tag="#include_aztec_version", directory="noir-projects/aztec-nr/address-note" } +address_note = { git="https://github.com/AztecProtocol/aztec-nr/", tag="#include_aztec_version", directory="address-note" } ``` Provides `AddressNote`, a note type for storing `AztecAddress` values. @@ -76,3 +68,24 @@ compressed_string = { git="https://github.com/AztecProtocol/aztec-nr/", tag="#in ``` Provides `CompressedString` and `FieldCompressedString` utilities for working with compressed string data. + +## Updating your aztec dependencies + +When `aztec compile` warns that your aztec dependency tag does not match the CLI version, update +the `tag` field in every Aztec.nr entry in your `Nargo.toml` to match the CLI version you are +running. + +For example, if your CLI is `v#include_aztec_version`, change: + +```toml +aztec = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v", directory="aztec" } +``` + +to: + +```toml +aztec = { git="https://github.com/AztecProtocol/aztec-nr/", tag="v#include_aztec_version", directory="aztec" } +``` + +Repeat for every other Aztec.nr dependency in your `Nargo.toml` (e.g. `address_note`, +`balance_set`, etc.). You can check your current CLI version with `aztec --version`. diff --git a/docs/docs-developers/docs/cli/aztec_cli_reference.md b/docs/docs-developers/docs/cli/aztec_cli_reference.md index a854c95e836a..a3cfe9d98346 100644 --- a/docs/docs-developers/docs/cli/aztec_cli_reference.md +++ b/docs/docs-developers/docs/cli/aztec_cli_reference.md @@ -111,10 +111,11 @@ aztec [options] [command] - `get-logs [options]` - Gets all the public logs from an intersection of all the filter params. - `get-node-info [options]` - Gets the information of an Aztec node from a PXE or directly from an Aztec node. - `help [command]` - display help for command -- `init [folder] [options]` - creates a new Aztec Noir project. +- `init [options]` - creates a new Noir workspace in the current directory - `inspect-contract ` - Shows list of external callable functions for a contract +- `lsp` - starts the Nargo Language Server Protocol server - `migrate-ha-db` - Run validator-ha-signer database migrations -- `new [options]` - creates a new Aztec Noir project in a new directory. +- `new [options]` - creates a new Noir workspace (or adds a contract to an existing workspace) - `parse-parameter-struct [options] ` - Helper for parsing an encoded string into a contract's parameter struct. - `preload-crs` - Preload the points data needed for proving and verifying - `profile` - Profile compiled Aztec artifacts. diff --git a/docs/docs-developers/docs/resources/migration_notes.md b/docs/docs-developers/docs/resources/migration_notes.md index fd40830d061d..e531a2764e93 100644 --- a/docs/docs-developers/docs/resources/migration_notes.md +++ b/docs/docs-developers/docs/resources/migration_notes.md @@ -9,6 +9,33 @@ Aztec is in active development. Each version may introduce breaking changes that ## TBD +### [Aztec.nr] `emit_private_log_unsafe` / `emit_raw_note_log_unsafe` are deprecated + +`emit_private_log_unsafe` and `emit_raw_note_log_unsafe` are deprecated and will be removed in a future release. Migrate to the new `emit_private_log_vec_unsafe` / `emit_raw_note_log_vec_unsafe` functions, which take a `BoundedVec` instead of the `(log: [Field; PRIVATE_LOG_CIPHERTEXT_LEN], length: u32)` pair. + +```diff +- context.emit_private_log_unsafe(tag, log, length); ++ context.emit_private_log_vec_unsafe(tag, bounded_vec_log); +- context.emit_raw_note_log_unsafe(tag, log, length, note_hash_counter); ++ context.emit_raw_note_log_vec_unsafe(tag, bounded_vec_log, note_hash_counter); +``` + +If you were manually padding an array and passing a shorter length, you can now create a `BoundedVec` from just the meaningful fields: + +```diff +- let padded = payload.concat([0; PRIVATE_LOG_CIPHERTEXT_LEN - 2]); +- context.emit_private_log_unsafe(tag, padded, 2); ++ let log = BoundedVec::from_array(payload); ++ context.emit_private_log_vec_unsafe(tag, log); +``` + +If you were passing the full array, wrap it with `BoundedVec::from_array`: + +```diff +- context.emit_private_log_unsafe(tag, ciphertext, ciphertext.len()); ++ context.emit_private_log_vec_unsafe(tag, BoundedVec::from_array(ciphertext)); +``` + ### [aztec-nr] Nullifier membership witness oracle returns split types `get_nullifier_membership_witness` and `get_low_nullifier_membership_witness` now return `(NullifierLeafPreimage, MembershipWitness)` instead of the bundled `NullifierMembershipWitness` struct (which has been removed). @@ -90,6 +117,75 @@ The zero address (`AztecAddress::zero()`) is always allowed regardless of the sc **Impact**: Contracts that access capsules scoped to addresses not included in the transaction's authorized scopes will now fail at runtime. Ensure the correct scopes are passed when executing transactions. +### `aztec new` and `aztec init` now create a 2-crate workspace + +`aztec new` and `aztec init` now create a workspace with two crates instead of a single contract crate: + +- A `contract` crate (type = "contract") for your smart contract code +- A `test` crate (type = "lib") for Noir tests, which depends on the contract crate + +The new project structure looks like: + +``` +my_project/ +├── Nargo.toml # [workspace] members = ["contract", "test"] +├── contract/ +│ ├── src/main.nr +│ └── Nargo.toml # type = "contract" +└── test/ + ├── src/lib.nr + └── Nargo.toml # type = "lib" +``` + +**What changed:** + +- The `--contract` and `--lib` flags have been removed from `aztec new` and `aztec init`. These commands now always create a contract workspace. +- Contract code is now at `contract/src/main.nr` instead of `src/main.nr`. +- The `Nargo.toml` in the project root is now a workspace file. Contract dependencies go in `contract/Nargo.toml`. +- Tests should be written in the separate `test` crate (`test/src/lib.nr`) and import the contract by package name (e.g., `use my_contract::MyContract;`) instead of using `crate::`. + +### `aztec new` crate directories are now named after the contract + +`aztec new` and `aztec init` now name the generated crate directories after the contract instead of using generic `contract/` and `test/` names. For example, `aztec new counter` now creates: + +``` +counter/ +├── Nargo.toml # [workspace] members = ["counter_contract", "counter_test"] +├── counter_contract/ +│ ├── src/main.nr +│ └── Nargo.toml # type = "contract" +└── counter_test/ + ├── src/lib.nr + └── Nargo.toml # type = "lib" +``` + +This enables adding multiple contracts to a single workspace. Running `aztec new ` inside an existing workspace (a directory with a `Nargo.toml` containing `[workspace]`) now adds a new `_contract` and `_test` crate pair to the workspace instead of creating a new directory. + +**What changed:** + +- Crate directories are now `_contract/` and `_test/` instead of `contract/` and `test/`. +- Contract code is now at `_contract/src/main.nr` instead of `contract/src/main.nr`. +- Contract dependencies go in `_contract/Nargo.toml` instead of `contract/Nargo.toml`. +- Tests import the contract by its new crate name (e.g., `use counter_contract::Main;` instead of `use counter::Main;`). + +### [CLI] `--name` flag removed from `aztec new` and `aztec init` + +The `--name` flag has been removed from both `aztec new` and `aztec init`. For `aztec new`, the positional argument now serves as both the contract name and the directory name. For `aztec init`, the directory name is always used as the contract name. + +**Migration:** + +```diff +- aztec new my_project --name counter ++ aztec new counter +``` + +```diff +- aztec init --name counter ++ aztec init +``` + +**Impact**: If you were using `--name` to set a contract name different from the directory name, rename your directory or use `aztec new` with the desired contract name directly. + ## 4.2.0-aztecnr-rc.2 ### Custom token FPCs removed from default public setup allowlist @@ -534,47 +630,6 @@ If you implement the `Wallet` interface (or extend `BaseWallet`), the `sendTx()` } ``` -### `aztec new` crate directories are now named after the contract - -`aztec new` and `aztec init` now name the generated crate directories after the contract instead of using generic `contract/` and `test/` names. For example, `aztec new counter` now creates: - -``` -counter/ -├── Nargo.toml # [workspace] members = ["counter_contract", "counter_test"] -├── counter_contract/ -│ ├── src/main.nr -│ └── Nargo.toml # type = "contract" -└── counter_test/ - ├── src/lib.nr - └── Nargo.toml # type = "lib" -``` - -This enables adding multiple contracts to a single workspace. Running `aztec new ` inside an existing workspace (a directory with a `Nargo.toml` containing `[workspace]`) now adds a new `_contract` and `_test` crate pair to the workspace instead of creating a new directory. - -**What changed:** - -- Crate directories are now `_contract/` and `_test/` instead of `contract/` and `test/`. -- Contract code is now at `_contract/src/main.nr` instead of `contract/src/main.nr`. -- Contract dependencies go in `_contract/Nargo.toml` instead of `contract/Nargo.toml`. -- Tests import the contract by its new crate name (e.g., `use counter_contract::Main;` instead of `use counter::Main;`). - -### [CLI] `--name` flag removed from `aztec new` and `aztec init` - -The `--name` flag has been removed from both `aztec new` and `aztec init`. For `aztec new`, the positional argument now serves as both the contract name and the directory name. For `aztec init`, the directory name is always used as the contract name. - -**Migration:** - -```diff -- aztec new my_project --name counter -+ aztec new counter -``` - -```diff -- aztec init --name counter -+ aztec init -``` - -**Impact**: If you were using `--name` to set a contract name different from the directory name, rename your directory or use `aztec new` with the desired contract name directly. ### [Aztec.js] Removed `SingleKeyAccountContract` The `SchnorrSingleKeyAccount` contract and its TypeScript wrapper `SingleKeyAccountContract` have been removed. This contract was insecure: it used `ivpk_m` (incoming viewing public key) as its Schnorr signing key, meaning anyone who received a user's viewing key could sign transactions on their behalf. @@ -590,32 +645,6 @@ The `SchnorrSingleKeyAccount` contract and its TypeScript wrapper `SingleKeyAcco **Impact**: If you were using `@aztec/accounts/single_key`, switch to `@aztec/accounts/schnorr` which uses separate keys for encryption and authentication. -### `aztec new` and `aztec init` now create a 2-crate workspace - -`aztec new` and `aztec init` now create a workspace with two crates instead of a single contract crate: - -- A `contract` crate (type = "contract") for your smart contract code -- A `test` crate (type = "lib") for Noir tests, which depends on the contract crate - -The new project structure looks like: - -``` -my_project/ -├── Nargo.toml # [workspace] members = ["contract", "test"] -├── contract/ -│ ├── src/main.nr -│ └── Nargo.toml # type = "contract" -└── test/ - ├── src/lib.nr - └── Nargo.toml # type = "lib" -``` - -**What changed:** - -- The `--contract` and `--lib` flags have been removed from `aztec new` and `aztec init`. These commands now always create a contract workspace. -- Contract code is now at `contract/src/main.nr` instead of `src/main.nr`. -- The `Nargo.toml` in the project root is now a workspace file. Contract dependencies go in `contract/Nargo.toml`. -- Tests should be written in the separate `test` crate (`test/src/lib.nr`) and import the contract by package name (e.g., `use my_contract::MyContract;`) instead of using `crate::`. ### Scope enforcement for private state access (TXE and PXE) Scope enforcement is now active across both TXE (test environment) and PXE (client). Previously, private execution could implicitly access any account's keys and notes. Now, only the caller (`from`) address is in scope by default, and accessing another address's private state requires explicitly granting scope. diff --git a/docs/docs-developers/docs/tutorials/contract_tutorials/counter_contract.md b/docs/docs-developers/docs/tutorials/contract_tutorials/counter_contract.md index 77e26bf0afa9..39fe64bd857f 100644 --- a/docs/docs-developers/docs/tutorials/contract_tutorials/counter_contract.md +++ b/docs/docs-developers/docs/tutorials/contract_tutorials/counter_contract.md @@ -22,7 +22,7 @@ This tutorial is compatible with the Aztec version `#include_aztec_version`. Ins Run this to create a new contract project: ```bash -aztec new --contract counter +aztec new counter ``` Your structure should look like this: @@ -30,14 +30,20 @@ Your structure should look like this: ```tree . |-counter -| |-src -| | |-main.nr -| |-Nargo.toml +| |-Nargo.toml <-- workspace root +| |-counter_contract +| | |-src +| | | |-main.nr +| | |-Nargo.toml <-- contract package config +| |-counter_test +| | |-src +| | | |-lib.nr +| | |-Nargo.toml <-- test package config ``` -The `aztec new` command creates a contract project with `Nargo.toml` and `src/main.nr`. The file `src/main.nr` will soon turn into our smart contract! +The `aztec new` command creates a workspace with two crates: a `counter_contract` crate for your smart contract code and a `counter_test` crate for Noir tests. The file `counter_contract/src/main.nr` will soon turn into our smart contract! -Add the following dependency to `Nargo.toml` under the existing `aztec` dependency: +Add the following dependency to `counter_contract/Nargo.toml` under the existing `aztec` dependency: ```toml [dependencies] @@ -47,7 +53,7 @@ balance_set = { git="https://github.com/AztecProtocol/aztec-nr/", tag="#include_ ## Define the functions -Go to `main.nr`, and replace the boilerplate code with this contract initialization: +Go to `counter_contract/src/main.nr`, and replace the boilerplate code with this contract initialization: ```rust #include_code setup /docs/examples/contracts/counter_contract/src/main.nr raw diff --git a/docs/docs-developers/docs/tutorials/contract_tutorials/recursive_verification.md b/docs/docs-developers/docs/tutorials/contract_tutorials/recursive_verification.md index 4d9be3222241..f340dcc28eb9 100644 --- a/docs/docs-developers/docs/tutorials/contract_tutorials/recursive_verification.md +++ b/docs/docs-developers/docs/tutorials/contract_tutorials/recursive_verification.md @@ -205,24 +205,30 @@ The contract demonstrates several important patterns: ### Create the Contract Project -Use `aztec init` to generate the contract project structure: +Use `aztec new` to generate the contract project structure: ```bash -aztec init --contract contract +aztec new contract --name ValueNotEqual ``` -This creates: +This creates a workspace with two crates: ```tree contract/ -├── src/ -│ └── main.nr # Contract code -└── Nargo.toml # Contract configuration +├── Nargo.toml # Workspace root +├── contract/ +│ ├── src/ +│ │ └── main.nr # Contract code +│ └── Nargo.toml # Contract configuration +└── test/ + ├── src/ + │ └── lib.nr # Test code + └── Nargo.toml # Test configuration ``` ### Contract Configuration -Update `contract/Nargo.toml` with the required dependencies: +Update `contract/contract/Nargo.toml` with the required dependencies: ```toml [package] @@ -235,7 +241,7 @@ aztec = { git = "https://github.com/AztecProtocol/aztec-nr/", tag = "#include_az bb_proof_verification = { git = "https://github.com/AztecProtocol/aztec-packages/", tag = "#include_aztec_version", directory = "barretenberg/noir/bb_proof_verification" } ``` -**Key differences from the circuit's Nargo.toml**: +**Key differences from the circuit's Nargo.toml** (in `contract/contract/Nargo.toml`): - `type = "contract"` (not `"bin"`) - Depends on `aztec` for Aztec-specific features @@ -243,7 +249,7 @@ bb_proof_verification = { git = "https://github.com/AztecProtocol/aztec-packages ### Contract Structure -Replace the contents of `contract/src/main.nr` with: +Replace the contents of `contract/contract/src/main.nr` with: #include_code full_contract /docs/examples/contracts/recursive_verification_contract/src/main.nr rust @@ -375,7 +381,7 @@ Create the following files in your project root directory. "name": "recursive-verification-tutorial", "type": "module", "scripts": { - "ccc": "cd contract && aztec compile && aztec codegen target -o ../artifacts", + "ccc": "cd contract && aztec compile && aztec codegen target -o contract/artifacts", "data": "tsx scripts/generate_data.ts", "recursion": "tsx index.ts" }, @@ -446,7 +452,7 @@ yarn ccc This generates: - `contract/target/ValueNotEqual.json` - Contract artifact (bytecode, ABI, etc.) -- `artifacts/ValueNotEqual.ts` - TypeScript class for deploying and interacting with the contract +- `contract/contract/artifacts/ValueNotEqual.ts` - TypeScript class for deploying and interacting with the contract ### Proof Generation Script diff --git a/docs/docs-developers/docs/tutorials/contract_tutorials/token_contract.md b/docs/docs-developers/docs/tutorials/contract_tutorials/token_contract.md index 251ffffe8a75..a408634df7cc 100644 --- a/docs/docs-developers/docs/tutorials/contract_tutorials/token_contract.md +++ b/docs/docs-developers/docs/tutorials/contract_tutorials/token_contract.md @@ -49,7 +49,7 @@ yarn add @aztec/aztec.js@#include_aztec_version @aztec/accounts@#include_aztec_v ## Contract structure -The `aztec new` command created a contract project with `Nargo.toml` and `src/main.nr`. Let's replace the boilerplate in `src/main.nr` with a simple starting point: +The `aztec init` command created a workspace with two crates: a `bob_token_contract` crate for your smart contract code and a `bob_token_test` crate for Noir tests. In `bob_token_contract/src/main.nr` we even have a proto-contract. Let's replace it with a simple starting point: ```rust #include_code start /docs/examples/contracts/bob_token_contract/src/main.nr raw @@ -59,7 +59,7 @@ The `aztec new` command created a contract project with `Nargo.toml` and `src/ma The `#[aztec]` macro transforms our contract code to work with Aztec's privacy protocol. -Replace the contents of `Nargo.toml` with the following: +Let's make sure the Aztec.nr library is listed in our dependencies in `bob_token_contract/Nargo.toml`: ```toml [package] @@ -253,7 +253,7 @@ In this case, all that the network sees (including Giggle) is just "something ha ### Updating Storage for Privacy -For something like balances, you can use a simple library called `easy_private_state` which abstracts away a custom private Note. A Note is at the core of how private state works in Aztec and you can read about it [here](../../foundational-topics/state_management.md). For now, let's add it by replacing the `[dependencies]` section in `Nargo.toml`: +For something like balances, you can use a simple library called `easy_private_state` which abstracts away a custom private Note. A Note is at the core of how private state works in Aztec and you can read about it [here](../../foundational-topics/state_management.md). For now, let's just import the library in `bob_token_contract/Nargo.toml`: ```toml [dependencies] diff --git a/docs/docs-developers/docs/tutorials/js_tutorials/token_bridge.md b/docs/docs-developers/docs/tutorials/js_tutorials/token_bridge.md index 38a88723df02..fe4df0b37ba2 100644 --- a/docs/docs-developers/docs/tutorials/js_tutorials/token_bridge.md +++ b/docs/docs-developers/docs/tutorials/js_tutorials/token_bridge.md @@ -86,25 +86,20 @@ aztec new contracts/aztec/nft cd contracts/aztec/nft ``` +This creates a workspace with two crates: an `nft_contract` crate for the smart contract code and an `nft_test` crate for Noir tests. The `aztec` dependency is already configured in `nft_contract/Nargo.toml`. + :::tip Noir Language Server If you're using VS Code, install the [Noir Language Support extension](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir) for syntax highlighting, error checking, and code completion while writing Noir contracts. ::: -Open `Nargo.toml` and make sure `aztec` is a dependency: - -```toml -[dependencies] -aztec = { git = "https://github.com/AztecProtocol/aztec-nr", tag = "#include_aztec_version", directory = "aztec" } -``` - ### Create the NFT Note -First, let's create a custom note type for private NFT ownership. In the `src/` directory, create a new file called `nft.nr`: +First, let's create a custom note type for private NFT ownership. In the `nft_contract/src/` directory, create a new file called `nft.nr`: ```bash -touch src/nft.nr +touch nft_contract/src/nft.nr ``` In this file, you're going to create a **private note** that represents NFT ownership. This is a struct with macros that indicate it is a note that can be compared and packed: @@ -121,7 +116,7 @@ Notes are powerful concepts. Learn more about how to use them in the [state mana ### Define Storage -Back in `main.nr`, you can now build the contract storage. You need: +Back in `nft_contract/src/main.nr`, you can now build the contract storage. You need: - **admin**: Who controls the contract (set once, never changes) - **minter**: The bridge address (set once by admin) @@ -130,7 +125,7 @@ Back in `main.nr`, you can now build the contract storage. You need: One interesting aspect of this storage configuration is the use of `DelayedPublicMutable`, which allows private functions to read and use public state. You're using it to publicly track which NFTs are already minted while keeping their owners private. Read more about `DelayedPublicMutable` in [the storage guide](../../aztec-nr/framework-description/state_variables.md). -Write the storage struct and a simple [initializer](../../foundational-topics/contract_creation.md#initialization) to set the admin in the `main.nr` file: +Write the storage struct and a simple [initializer](../../foundational-topics/contract_creation.md#initialization) to set the admin in the `nft_contract/src/main.nr` file: @@ -218,12 +213,12 @@ aztec new nft_bridge cd nft_bridge ``` -And again, add the `aztec-nr` dependency to `Nargo.toml`. We also need to add the `NFTPunk` contract we just wrote above: +Now add the `NFTPunk` contract dependency to `nft_bridge_contract/Nargo.toml`. The `aztec` dependency is already there: ```toml [dependencies] aztec = { git="https://github.com/AztecProtocol/aztec-nr", tag = "#include_aztec_version", directory = "aztec" } -NFTPunk = { path = "../nft" } +NFTPunk = { path = "../../nft/nft_contract" } ``` ### Understanding Bridges @@ -237,7 +232,7 @@ This means having knowledge about the L2 NFT contract, and the bridge on the L1 ### Bridge Storage -Clean up `main.nr` which is just a placeholder, and let's write the storage struct and the constructor. We'll use `PublicImmutable` since these values never change: +Clean up `nft_bridge_contract/src/main.nr` which is just a placeholder, and let's write the storage struct and the constructor. We'll use `PublicImmutable` since these values never change: diff --git a/docs/docs-developers/getting_started_on_local_network.md b/docs/docs-developers/getting_started_on_local_network.md index 04a0840f22a0..d72d34dd0360 100644 --- a/docs/docs-developers/getting_started_on_local_network.md +++ b/docs/docs-developers/getting_started_on_local_network.md @@ -8,7 +8,7 @@ tags: [local_network, testnet] import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -Get started on your local environment using a local network. +Get started on your local environment using a local network. If you'd rather deploy to a live network, read the [getting started on testnet guide](./getting_started_on_testnet.md). The local network is a local development Aztec network running fully on your machine, and interacting with a development Ethereum node. You can develop and deploy on it just like on a testnet or mainnet (when the time comes). The local network makes it faster and easier to develop and test your Aztec applications. @@ -257,4 +257,5 @@ Simulation result: 25n Want to build something cool on Aztec? - Check out the [Token Contract Tutorial](./docs/tutorials/contract_tutorials/token_contract.md) for a beginner tutorial, or jump into more advanced ones +- Ready for a live network? Try [deploying on testnet](./getting_started_on_testnet.md) - Start on your own thing and check out the How To Guides to help you! diff --git a/docs/docs-developers/getting_started_on_testnet.md b/docs/docs-developers/getting_started_on_testnet.md new file mode 100644 index 000000000000..1e670c372a8e --- /dev/null +++ b/docs/docs-developers/getting_started_on_testnet.md @@ -0,0 +1,207 @@ +--- +title: Getting Started on Testnet +sidebar_position: 1 +tags: [testnet] +description: Deploy contracts and send transactions on the Aztec testnet using the CLI wallet and the Sponsored FPC for fee payment. +--- + +import { General } from '@site/src/components/Snippets/general_snippets'; + +This guide walks you through deploying your first contract on the Aztec testnet. You will install the CLI tools, create an account using the Sponsored FPC (so you don't need to bridge Fee Juice yourself), and deploy and interact with a contract. + +## Testnet vs Local Network + +| Feature | Local Network | Testnet | +|---------|-------------|---------| +| **Environment** | Local machine | Decentralized network on Sepolia | +| **Fees** | Free (test accounts prefunded) | Sponsored FPC available | +| **Block times** | Instant | ~36 seconds | +| **Proving** | Optional | Required | +| **Accounts** | Test accounts pre-deployed | Must create and deploy your own | + +:::info +If you want to develop and iterate quickly, start with the [local network guide](./getting_started_on_local_network.md). The local network has instant blocks and no proving, making it faster for development. +::: + +## Prerequisites + +- + +## Install the Aztec toolchain + +Install the testnet version of the Aztec CLI: + +```bash +VERSION=#include_testnet_version bash -i <(curl -sL https://install.aztec.network/#include_testnet_version) +``` + +:::warning +Testnet is version-dependent. It is currently running version `#include_testnet_version`. Maintain version consistency when interacting with the testnet to avoid errors. +::: + +This installs: + +- **aztec** - Compiles and tests Aztec contracts, launches infrastructure, and provides utility commands +- **aztec-up** - Version manager for the Aztec toolchain (`aztec-up install`, `aztec-up use`, `aztec-up list`) +- **aztec-wallet** - CLI tool for interacting with the Aztec network + +## Getting started on testnet + +### Step 1: Set up your environment + +Set the required environment variables: + +```bash +export NODE_URL=https://rpc.testnet.aztec-labs.com +export SPONSORED_FPC_ADDRESS=0x254082b62f9108d044b8998f212bb145619d91bfcd049461d74babb840181257 +``` + +### Step 2: Register the Sponsored FPC + +The Sponsored FPC (Fee Payment Contract) pays transaction fees on your behalf, so you don't need to bridge Fee Juice from L1. Register it in your wallet: + +```bash +aztec-wallet register-contract \ + --node-url $NODE_URL \ + --alias sponsoredfpc \ + $SPONSORED_FPC_ADDRESS SponsoredFPC \ + --salt 0 +``` + +### Step 3: Create and deploy an account + +Unlike the local network, testnet has no pre-deployed accounts. Create and deploy your own: + +```bash +aztec-wallet create-account \ + --node-url $NODE_URL \ + --alias my-wallet \ + --payment method=fpc-sponsored,fpc=$SPONSORED_FPC_ADDRESS +``` + +:::note +The first transaction will take longer as it downloads proving keys. If you see `Timeout awaiting isMined`, the transaction is still processing — this is normal on testnet. +::: + +### Step 4: Deploy a contract + +Deploy a token contract as an example: + +```bash +aztec-wallet deploy \ + --node-url $NODE_URL \ + --from accounts:my-wallet \ + --payment method=fpc-sponsored,fpc=$SPONSORED_FPC_ADDRESS \ + --alias token \ + TokenContract \ + --args accounts:my-wallet Token TOK 18 +``` + +This deploys the `TokenContract` with: +- `admin`: your wallet address +- `name`: Token +- `symbol`: TOK +- `decimals`: 18 + +You can check the transaction status on [Aztecscan](https://testnet.aztecscan.xyz). + +### Step 5: Interact with your contract + +Mint some tokens: + +```bash +aztec-wallet send mint_to_public \ + --node-url $NODE_URL \ + --from accounts:my-wallet \ + --payment method=fpc-sponsored,fpc=$SPONSORED_FPC_ADDRESS \ + --contract-address token \ + --args accounts:my-wallet 100 +``` + +Check your balance: + +```bash +aztec-wallet simulate balance_of_public \ + --node-url $NODE_URL \ + --from accounts:my-wallet \ + --contract-address token \ + --args accounts:my-wallet +``` + +This should print: + +``` +Simulation result: 100n +``` + +Move tokens to private state: + +```bash +aztec-wallet send transfer_to_private \ + --node-url $NODE_URL \ + --from accounts:my-wallet \ + --payment method=fpc-sponsored,fpc=$SPONSORED_FPC_ADDRESS \ + --contract-address token \ + --args accounts:my-wallet 25 +``` + +Check your private balance: + +```bash +aztec-wallet simulate balance_of_private \ + --node-url $NODE_URL \ + --from accounts:my-wallet \ + --contract-address token \ + --args accounts:my-wallet +``` + +This should print: + +``` +Simulation result: 25n +``` + +## Viewing transactions on the block explorer + +You can view your transactions, contracts, and account on the testnet block explorers: + +- [Aztecscan](https://testnet.aztecscan.xyz) +- [Aztec Explorer](https://aztecexplorer.xyz/?network=testnet) + +Search by transaction hash, contract address, or account address to see details and status. + +## Registering existing contracts + +To interact with a contract deployed by someone else, you need to register it in your local PXE first: + +```bash +aztec-wallet register-contract \ + --node-url $NODE_URL \ + --alias mycontract \ + +``` + +For example, to register a `TokenContract` deployed by someone else: + +```bash +aztec-wallet register-contract \ + --node-url $NODE_URL \ + --alias external-token \ + 0x1234...abcd TokenContract +``` + +After registration, you can interact with it using `aztec-wallet send` and `aztec-wallet simulate` as shown above. + +## Paying fees without the Sponsored FPC + +The Sponsored FPC is convenient for getting started, but you can also pay fees directly by bridging Fee Juice from Ethereum Sepolia. See [Paying Fees](./docs/aztec-js/how_to_pay_fees.md#bridge-fee-juice-from-l1) for details on bridging and other fee payment methods. + +## Testnet information + +For complete testnet technical details including contract addresses and network configuration, see the [Networks page](/networks#testnet). + +## Next steps + +- Check out the [Tutorials](./docs/tutorials/contract_tutorials/counter_contract.md) for building more complex contracts +- Learn about [paying fees](./docs/aztec-js/how_to_pay_fees.md) with different methods +- Explore [Aztec Playground](https://play.aztec.network/) for an interactive development experience diff --git a/docs/docs-operate/reference/changelog/v4.md b/docs/docs-operate/reference/changelog/v4.md index adba87dd0bcc..a7f3b5a9179e 100644 --- a/docs/docs-operate/reference/changelog/v4.md +++ b/docs/docs-operate/reference/changelog/v4.md @@ -1,85 +1,325 @@ --- -title: v4.0.0 (from v3.0.0) -description: TODO +title: v4.x (Upgrade from Ignition) +description: Breaking changes and migration guide for upgrading from Ignition (v2.x) to Alpha (v4.x). --- ## Overview -**Migration difficulty**: TODO +**Migration difficulty**: High ## Breaking changes -### StakingAssetHandler refactored to simple token faucet +### Node.js upgraded to v24 -The `StakingAssetHandler` L1 contract has been significantly simplified. It no longer handles validator registration directly - instead it functions as a simple STK token faucet with ZKPassport sybil resistance. +Node.js minimum version changed from v22 to v24.12.0. -**v3.0.0:** -```solidity -// Single call registered validator -stakingAssetHandler.addValidator(attester, merkleProof, zkPassportParams, publicKeyG1, publicKeyG2, signature); +### Bot fee padding configuration renamed + +The bot configuration for fee padding has been renamed from "base fee" to "min fee". + +**v3.x:** + +```bash +--bot.baseFeePadding ($BOT_BASE_FEE_PADDING) +``` + +**v4.0.0:** + +```bash +--bot.minFeePadding ($BOT_MIN_FEE_PADDING) +``` + +**Migration**: Update your configuration to use the new flag name and environment variable. + +### L2Tips API restructured with checkpoint information + +The `getL2Tips()` RPC endpoint now returns a restructured response with additional checkpoint tracking. + +**v3.x response:** + +```json +{ + "latest": { "number": 100, "hash": "0x..." }, + "proven": { "number": 98, "hash": "0x..." }, + "finalized": { "number": 95, "hash": "0x..." } +} +``` + +**v4.0.0 response:** + +```json +{ + "proposed": { "number": 100, "hash": "0x..." }, + "checkpointed": { + "block": { "number": 99, "hash": "0x..." }, + "checkpoint": { "number": 10, "hash": "0x..." } + }, + "proven": { + "block": { "number": 98, "hash": "0x..." }, + "checkpoint": { "number": 9, "hash": "0x..." } + }, + "finalized": { + "block": { "number": 95, "hash": "0x..." }, + "checkpoint": { "number": 8, "hash": "0x..." } + } +} +``` + +**Migration**: + +- Replace `tips.latest` with `tips.proposed` +- For `checkpointed`, `proven`, and `finalized` tips, access block info via `.block` (e.g., `tips.proven.block.number`) + +### Block gas limits reworked + +The byte-based block size limit has been removed and replaced with field-based blob limits and automatic gas budget computation from L1 rollup limits. + +**Removed:** + +```bash +--maxBlockSizeInBytes ($SEQ_MAX_BLOCK_SIZE_IN_BYTES) +``` + +**Changed to optional (now auto-computed from L1 if not set):** + +```bash +--maxL2BlockGas ($SEQ_MAX_L2_BLOCK_GAS) +--maxDABlockGas ($SEQ_MAX_DA_BLOCK_GAS) ``` +**New (proposer):** + +```bash +--perBlockAllocationMultiplier ($SEQ_PER_BLOCK_ALLOCATION_MULTIPLIER) +--maxTxsPerCheckpoint ($SEQ_MAX_TX_PER_CHECKPOINT) +``` + +**New (validator):** + +```bash +--validateMaxL2BlockGas ($VALIDATOR_MAX_L2_BLOCK_GAS) +--validateMaxDABlockGas ($VALIDATOR_MAX_DA_BLOCK_GAS) +--validateMaxTxsPerBlock ($VALIDATOR_MAX_TX_PER_BLOCK) +--validateMaxTxsPerCheckpoint ($VALIDATOR_MAX_TX_PER_CHECKPOINT) +``` + +**Migration**: Remove `SEQ_MAX_BLOCK_SIZE_IN_BYTES` from your configuration. Per-block L2 and DA gas budgets are now derived automatically as `(checkpointLimit / maxBlocks) * multiplier`, where the multiplier defaults to 2. You can still override `SEQ_MAX_L2_BLOCK_GAS` and `SEQ_MAX_DA_BLOCK_GAS` explicitly, but they will be capped at the checkpoint-level limits. Validators can now set independent per-block and per-checkpoint limits via the `VALIDATOR_` env vars; when not set, only checkpoint-level protocol limits are enforced. + +### Setup phase allow list requires function selectors + +The transaction setup phase allow list now enforces function selectors, restricting which specific functions can run during setup on whitelisted contracts. Previously, any public function on a whitelisted contract or class was permitted. + +The semantics of the environment variable `TX_PUBLIC_SETUP_ALLOWLIST` have changed: + +**v3.x:** + +```bash +--txPublicSetupAllowList ($TX_PUBLIC_SETUP_ALLOWLIST) +``` + +The variable fully **replaced** the hardcoded defaults. Format allowed entries without selectors: `I:address`, `C:classId`. + **v4.0.0:** -```solidity -// Step 1: Claim STK tokens from faucet -stakingAssetHandler.claim(zkPassportParams); -// Step 2: Approve rollup to spend tokens -stakingAsset.approve(rollupAddress, amount); +```bash +--txPublicSetupAllowListExtend ($TX_PUBLIC_SETUP_ALLOWLIST) +``` + +The variable now **extends** the hardcoded defaults (which are always present). Selectors are now mandatory. An optional flags segment can be appended for additional validation: + +``` +I:address:selector[:flags] +C:classId:selector[:flags] +``` + +Where `flags` is a `+`-separated list of: +- `os` — `onlySelf`: only allow calls where msg_sender == contract address +- `rn` — `rejectNullMsgSender`: reject calls with a null msg_sender +- `cl=N` — `calldataLength`: enforce exact calldata length of N fields + +Example: `C:0xabc:0x1234:os+cl=4` + +**Migration**: If you were using `TX_PUBLIC_SETUP_ALLOWLIST`, ensure all entries include function selectors. Note the variable now adds to defaults rather than replacing them. If you were not setting this variable, no action is needed — the hardcoded defaults now include the correct selectors automatically. + +### Token removed from default setup allowlist + +Token class-based entries (`_increase_public_balance` and `transfer_in_public`) have been removed from the default public setup allowlist. FPC-based fee payments using custom tokens no longer work out of the box. + +This change was made because Token class IDs change with aztec-nr releases, making the allowlist impossible to keep up to date with new library releases. In addition, `transfer_in_public` requires complex additional logic to be built into the node to prevent mass transaction invalidation attacks. **FPC-based fee payment with custom tokens won't work on mainnet alpha**. + +**Migration**: Node operators who need FPC support must manually add Token entries via `TX_PUBLIC_SETUP_ALLOWLIST`. Example: -// Step 3: Deposit into rollup (user chooses their own withdrawer) -rollup.deposit(attester, withdrawer, publicKeyG1, publicKeyG2, signature, moveWithLatestRollup); +```bash +TX_PUBLIC_SETUP_ALLOWLIST="C:::os+cl=3,C:::cl=5" +``` + +Replace `` with the deployed Token contract class ID and ``/`` with the respective function selectors. Keep in mind that this will only work on local network setups, since even if you as an operator add these entries, other nodes will not have them and will not pick up these transactions. + +### Sequencer environment variable renames + +Several sequencer environment variables have been renamed: + +| Old variable | New variable | +|---|---| +| `SEQ_TX_POLLING_INTERVAL_MS` | `SEQ_POLLING_INTERVAL_MS` | +| `SEQ_MAX_L1_TX_INCLUSION_TIME_INTO_SLOT` | `SEQ_L1_PUBLISHING_TIME_ALLOWANCE_IN_SLOT` | +| `SEQ_MAX_TX_PER_BLOCK` | `SEQ_MAX_TX_PER_CHECKPOINT` | +| `SEQ_MAX_BLOCK_SIZE_IN_BYTES` | Removed (see [Block gas limits reworked](#block-gas-limits-reworked)) | + +**Migration**: Search your configuration for the old variable names and replace them. The node will not recognize the old names. + +### Double signing slashing + +New slashable offenses have been introduced for duplicate proposals and duplicate attestations. Penalty amounts are currently set to 0, but the detection infrastructure is active. + +If you run redundant sequencer nodes, you **must** enable high-availability signing with PostgreSQL to prevent accidental double signing: + +```bash +VALIDATOR_HA_SIGNING_ENABLED=true +VALIDATOR_HA_DATABASE_URL=postgresql://:@:/ +VALIDATOR_HA_NODE_ID= +``` + +Run the database migration before starting your nodes: + +```bash +aztec migrate-ha-db up --database-url ``` -**Removed functions:** -- `addValidator()` - replaced by `claim()` + direct rollup deposit -- `reenterExitedValidator()` -- `setValidatorsToFlush()`, `setMintInterval()`, `setDepositsPerMint()`, `setWithdrawer()` -- `setSkipMerkleCheck()`, `setDepositMerkleRoot()` +**Migration**: If you run a single node, no action is required. If you run redundant nodes for high availability, configure HA signing immediately. See the [High Availability Sequencers](../../operators/setup/high-availability.md) guide for details. + +### Blob-only data publication + +Transaction data is now published entirely via EIP-4844 blobs. The calldata fallback has been removed. + +Your consensus client (e.g., Lighthouse, Prysm) must run as a **supernode** or **semi-supernode** to make blobs available for retrieval. Standard pruning configurations will not retain blobs long enough. -**New functions:** -- `claim(ProofVerificationParams)` - claim STK tokens with ZKPassport proof -- `setFaucetAmount(uint256)` - owner sets claim amount -- `resetNullifier(bytes32)` - owner can reset a nullifier +You should also configure blob file stores for redundancy: -**CLI changes:** ```bash -# v3.0.0 -aztec add-l1-validator --merkle-proof +BLOB_FILE_STORE_URLS= +BLOB_FILE_STORE_UPLOAD_URL= +BLOB_ARCHIVE_API_URL= +``` + +**Migration**: Ensure your consensus client is configured as a supernode. If you previously relied on calldata for data availability, switch to blob-based retrieval. See the [Blob Storage](../../operators/setup/blob_storage) guide for configuration details. + +### Withdrawal delay increase + +The governance execution delay has increased from 7 days to 30 days. This extends the time required for staker withdrawals from approximately 15 days to approximately 38 days. -# v4.0.0 -aztec add-l1-validator --withdrawer-address
+**Migration**: No configuration changes needed. Be aware that withdrawal processing will take longer after the upgrade. + +### Prover architecture change + +The prover now runs as a node subsystem rather than a separate standalone process. Start it alongside your node using the `--prover-node` flag: + +```bash +aztec start --node --prover-node ``` -**Migration**: Users must now call `claim()` to get STK tokens, then deposit into the rollup themselves. The `--merkle-proof` CLI flag is removed; add `--withdrawer-address` instead. +**Migration**: If you were running the prover as a separate process, update your deployment to run it as part of the node with `--prover-node`. ## Removed features ## New features -### P2P clock tolerance for slot validation +### Initial ETH per fee asset configuration + +A new environment variable `AZTEC_INITIAL_ETH_PER_FEE_ASSET` has been added to configure the initial exchange rate between ETH and the fee asset (AZTEC) at contract deployment. This value uses 1e12 precision. + +**Default**: `10000000` (0.00001 ETH per AZTEC) + +**Configuration:** -Added a 500ms tolerance window for P2P messages from the previous slot, following Ethereum's `MAXIMUM_GOSSIP_CLOCK_DISPARITY` approach. This prevents peers from being penalized for valid messages that arrive slightly after the slot boundary due to network latency. +```bash +--initialEthPerFeeAsset ($AZTEC_INITIAL_ETH_PER_FEE_ASSET) +``` + +This replaces the previous hardcoded default and allows network operators to set the starting price point for the fee asset. + +### `reloadKeystore` admin RPC endpoint + +Node operators can now update validator attester keys, coinbase, and fee recipient without restarting the node by calling the new `reloadKeystore` admin RPC endpoint. + +What is updated on reload: + +- Validator attester keys (add, remove, or replace) +- Coinbase and fee recipient per validator +- Publisher-to-validator mapping + +What is NOT updated (requires restart): -The tolerance is hardcoded at 500ms (matching Ethereum's current value) and can be made configurable via environment variables in the future if needed. +- L1 publisher signers +- Prover keys +- HA signer connections -**Impact**: Improved network stability with no action required from node operators. +New validators must use a publisher key already initialized at startup. Reload is rejected with a clear error if validation fails. -### Transaction collection config renamed +### Admin API key authentication -The environment variable for selecting the missing transactions collector implementation has been renamed: +The admin JSON-RPC endpoint now supports auto-generated API key authentication. + +**Behavior:** + +- A cryptographically secure API key is auto-generated at first startup and displayed once via stdout +- Only the SHA-256 hash is persisted to `/admin/api_key_hash` +- The key is reused across restarts when `--data-directory` is set +- Supports both `x-api-key` and `Authorization: Bearer ` headers +- Health check endpoint (`GET /status`) is excluded from auth (for k8s probes) + +**Configuration:** -**v3.0.0:** ```bash ---tx-collection-proposal-tx-collector-type ($TX_COLLECTION_PROPOSAL_TX_COLLECTOR_TYPE) +--admin-api-key-hash ($AZTEC_ADMIN_API_KEY_HASH) # Use a pre-generated SHA-256 key hash +--disable-admin-api-key ($AZTEC_DISABLE_ADMIN_API_KEY) # Disable auth entirely +--reset-admin-api-key ($AZTEC_RESET_ADMIN_API_KEY) # Force key regeneration ``` -**v4.0.0:** +**Helm charts**: Admin API key auth is disabled by default (`disableAdminApiKey: true`). Set to `false` in production values to enable. + +**Migration**: No action required — auth is opt-out. To enable, ensure `--disable-admin-api-key` is not set and note the key printed at startup. + +### Transaction pool error codes for RPC callers + +Transaction submission via RPC now returns structured rejection codes when a transaction is rejected by the mempool: + +- `LOW_PRIORITY_FEE` — tx priority fee is too low +- `INSUFFICIENT_FEE_PAYER_BALANCE` — fee payer doesn't have enough balance +- `NULLIFIER_CONFLICT` — conflicting nullifier already in pool + +**Impact**: Improved developer experience — callers can now programmatically handle specific rejection reasons. + +### RPC transaction replacement price bump + +Transactions submitted via RPC that clash on nullifiers with existing pool transactions must now pay at least X% more in priority fee to replace them. The same bump applies when the pool is full and the incoming tx needs to evict the lowest-priority tx. P2P gossip behavior is unchanged. + +**Configuration:** + ```bash ---tx-collection-missing-txs-collector-type ($TX_COLLECTION_MISSING_TXS_COLLECTOR_TYPE) +P2P_RPC_PRICE_BUMP_PERCENTAGE=10 # default: 10 (percent) ``` -**Migration**: Update your configuration to use the new environment variable name. The default value (`new`) remains unchanged. +Set to `0` to disable the percentage-based bump (still requires strictly higher fee). + +### Validator-specific block limits + +Validators can now enforce per-block and per-checkpoint limits independently from the sequencer (proposer) limits. This allows operators to accept proposals that exceed their own proposer settings, or to reject proposals that are too large even if the proposer's limits allow them. + +**Configuration:** + +```bash +VALIDATOR_MAX_L2_BLOCK_GAS= # Max L2 gas per block for validation +VALIDATOR_MAX_DA_BLOCK_GAS= # Max DA gas per block for validation +VALIDATOR_MAX_TX_PER_BLOCK= # Max txs per block for validation +VALIDATOR_MAX_TX_PER_CHECKPOINT= # Max txs per checkpoint for validation +``` + +When not set, no per-block limit is enforced for that dimension — only checkpoint-level protocol limits apply. These do not fall back to the `SEQ_` values. + +### Setup allow list extendable via network config + +The setup phase allow list can now be extended via the network configuration JSON (`txPublicSetupAllowListExtend` field). This allows network operators to distribute additional allowed setup functions to all nodes without requiring code changes. The local environment variable takes precedence over the network-json value. ## Changed defaults diff --git a/docs/docs-words.txt b/docs/docs-words.txt index 03cddcaaebdb..9a6c9e1abcd5 100644 --- a/docs/docs-words.txt +++ b/docs/docs-words.txt @@ -365,6 +365,7 @@ valuenote vecs versionˮ viewability +vnext visualisation visualise visualised diff --git a/docs/examples/bootstrap.sh b/docs/examples/bootstrap.sh index c7626aeaf390..21ad15605f24 100755 --- a/docs/examples/bootstrap.sh +++ b/docs/examples/bootstrap.sh @@ -6,8 +6,6 @@ REPO_ROOT=$(git rev-parse --show-toplevel) export BB=${BB:-"$REPO_ROOT/barretenberg/cpp/build/bin/bb"} export NARGO=${NARGO:-"$REPO_ROOT/noir/noir-repo/target/release/nargo"} -export TRANSPILER=${TRANSPILER:-"$REPO_ROOT/avm-transpiler/target/release/avm-transpiler"} -export STRIP_AZTEC_NR_PREFIX=${STRIP_AZTEC_NR_PREFIX:-"$REPO_ROOT/noir-projects/noir-contracts/scripts/strip_aztec_nr_prefix.sh"} export BB_HASH=${BB_HASH:-$("$REPO_ROOT/barretenberg/cpp/bootstrap.sh" hash)} export NOIR_HASH=${NOIR_HASH:-$("$REPO_ROOT/noir/bootstrap.sh" hash)} diff --git a/docs/netlify.toml b/docs/netlify.toml index 0f1ff6c83380..995dc8eaf80a 100644 --- a/docs/netlify.toml +++ b/docs/netlify.toml @@ -334,11 +334,11 @@ # FaceID wallet redirect (page removed) [[redirects]] from = "/developers/tutorials/codealong/faceid_wallet" - to = "/developers/docs/tutorials" + to = "/developers/docs/tutorials/contract_tutorials/counter_contract" [[redirects]] from = "/developers/docs/tutorials/faceid_wallet" - to = "/developers/docs/tutorials" + to = "/developers/docs/tutorials/contract_tutorials/counter_contract" # Legacy network paths - SPECIFIC PATHS MUST COME BEFORE WILDCARDS # (Netlify processes redirects top-to-bottom, first match wins) @@ -594,7 +594,7 @@ [[redirects]] from = "/developers/docs/guides/local_env/advanced/faceid_wallet" - to = "/developers/docs/tutorials/faceid_wallet" + to = "/developers/docs/tutorials/contract_tutorials/counter_contract" [[redirects]] from = "/developers/docs/guides/local_env/*" diff --git a/docs/network_versioned_docs/version-v4.1.2/operators/reference/changelog/v4.md b/docs/network_versioned_docs/version-v4.1.2/operators/reference/changelog/v4.md index 26a67ed75c82..a12bee3d00b6 100644 --- a/docs/network_versioned_docs/version-v4.1.2/operators/reference/changelog/v4.md +++ b/docs/network_versioned_docs/version-v4.1.2/operators/reference/changelog/v4.md @@ -186,7 +186,7 @@ Run the database migration before starting your nodes: aztec migrate-ha-db up --database-url ``` -**Migration**: If you run a single node, no action is required. If you run redundant nodes for high availability, configure HA signing immediately. See the [High Availability Sequencers](../../setup/high_availability_sequencers) guide for details. +**Migration**: If you run a single node, no action is required. If you run redundant nodes for high availability, configure HA signing immediately. See the [High Availability Sequencers](../../setup/high-availability.md) guide for details. ### Blob-only data publication diff --git a/docs/scripts/validate_redirect_targets.sh b/docs/scripts/validate_redirect_targets.sh index dfe5c648aeb0..9a9fbe784c87 100755 --- a/docs/scripts/validate_redirect_targets.sh +++ b/docs/scripts/validate_redirect_targets.sh @@ -291,8 +291,11 @@ while IFS= read -r to_path; do continue fi + # Strip fragment identifiers (#anchor) before validation + check_path="${to_path%%#*}" + # Validate the path - if check_docs_path "$to_path"; then + if check_docs_path "$check_path"; then VALIDATED_COUNT=$((VALIDATED_COUNT + 1)) else INVALID_PATHS="${INVALID_PATHS} - ${to_path}\n" diff --git a/docs/sidebars-developer.js b/docs/sidebars-developer.js index e6b15b880d35..35d4dc0a53b6 100644 --- a/docs/sidebars-developer.js +++ b/docs/sidebars-developer.js @@ -17,6 +17,10 @@ const sidebars = { type: "doc", id: "getting_started_on_local_network", }, + { + type: "doc", + id: "getting_started_on_testnet", + }, { type: "doc", id: "ai_tooling", diff --git a/noir-projects/aztec-nr/aztec/src/context/private_context.nr b/noir-projects/aztec-nr/aztec/src/context/private_context.nr index a4416cc95249..967023c310a5 100644 --- a/noir-projects/aztec-nr/aztec/src/context/private_context.nr +++ b/noir-projects/aztec-nr/aztec/src/context/private_context.nr @@ -557,11 +557,20 @@ impl PrivateContext { /// counter. /// pub fn end_setup(&mut self) { - // Incrementing the side effect counter when ending setup ensures non ambiguity for the counter where we change - // phases. + // We bump the counter twice: once so that `min_revertible_side_effect_counter` sits strictly above any + // non-revertible side effect counter (including queries made via `in_revertible_phase` before this call), and + // once more so that the next revertible side effect counter is strictly greater than + // `min_revertible_side_effect_counter`. This ensures `min_revertible_side_effect_counter` occupies a gap that + // no side effect takes, which the kernel relies on when validating the phase split. self.side_effect_counter += 1; - aztecnr_trace_log_format!("Ending setup at counter {0}")([self.side_effect_counter as Field]); - self.min_revertible_side_effect_counter = self.next_counter(); + self.min_revertible_side_effect_counter = self.side_effect_counter; + self.side_effect_counter += 1; + + aztecnr_trace_log_format!( + "Ending setup, minimum revertible side effect counter is {0}", + )( + [self.min_revertible_side_effect_counter as Field], + ); notify_revertible_phase_start(self.min_revertible_side_effect_counter); } @@ -900,6 +909,8 @@ impl PrivateContext { /// happen to share a raw tag value become indistinguishable. Prefer the higher-level APIs /// ([`crate::messages::message_delivery::MessageDelivery`] for messages, `self.emit(event)` for events) which /// handle tagging automatically. + // TODO(F-555): remove this function in favor of `emit_private_log_vec_unsafe` + #[deprecated("use `emit_private_log_vec_unsafe` instead")] pub fn emit_private_log_unsafe(&mut self, tag: Field, log: [Field; PRIVATE_LOG_CIPHERTEXT_LEN], length: u32) { let counter = self.next_counter(); let full_log = [tag].concat(log); @@ -907,6 +918,15 @@ impl PrivateContext { .count(counter)); } + /// `BoundedVec`-based variant of [`emit_private_log_unsafe`](PrivateContext::emit_private_log_unsafe). + /// + /// See [`emit_private_log_unsafe`](PrivateContext::emit_private_log_unsafe) for the full description of private + /// log semantics. + // TODO(F-555): once `emit_private_log_unsafe` is removed, rename this function to drop the `_vec` suffix. + pub fn emit_private_log_vec_unsafe(&mut self, tag: Field, log: BoundedVec) { + self.emit_raw_note_log_vec_unsafe(tag, log, 0); + } + // TODO: rename. /// Emits a private log that is explicitly tied to a newly-emitted note_hash, to convey to the kernel: "this log /// relates to this note". @@ -929,6 +949,8 @@ impl PrivateContext { /// ## Safety /// /// Same as [`PrivateContext::emit_private_log_unsafe`]: the `tag` should be domain-separated. + // TODO(F-555): remove this function in favor of `emit_raw_note_log_vec_unsafe` + #[deprecated("use `emit_raw_note_log_vec_unsafe` instead")] pub fn emit_raw_note_log_unsafe( &mut self, tag: Field, @@ -942,6 +964,23 @@ impl PrivateContext { self.private_logs.push(private_log.count(counter)); } + /// `BoundedVec`-based variant of [`emit_raw_note_log_unsafe`](PrivateContext::emit_raw_note_log_unsafe). + /// + /// See [`emit_raw_note_log_unsafe`](PrivateContext::emit_raw_note_log_unsafe) for the full description of + /// note-tied private log semantics. + // TODO(F-555): once `emit_raw_note_log_unsafe` is removed, rename this function to drop the `_vec` suffix. + pub fn emit_raw_note_log_vec_unsafe( + &mut self, + tag: Field, + log: BoundedVec, + note_hash_counter: u32, + ) { + let counter = self.next_counter(); + let full_log = [tag].concat(log.storage()); + let private_log = PrivateLogData { log: PrivateLog::new(full_log, log.len() + 1), note_hash_counter }; + self.private_logs.push(private_log.count(counter)); + } + /// Emits large data blobs. /// /// This reuses the Contract Class Log channel to emit blobs of up to [`CONTRACT_CLASS_LOG_SIZE_IN_FIELDS`]. @@ -1154,16 +1193,12 @@ impl PrivateContext { }, ); - // TODO (fees) figure out why this crashes the prover and enable it we need this in order to pay fees inside - // child call contexts assert( - // (item.public_inputs.min_revertible_side_effect_counter == 0 as u32) - // | (item.public_inputs.min_revertible_side_effect_counter - // > self.min_revertible_side_effect_counter) - // ); if item.public_inputs.min_revertible_side_effect_counter - // > self.min_revertible_side_effect_counter { self.min_revertible_side_effect_counter = - // item.public_inputs.min_revertible_side_effect_counter; } - self.side_effect_counter = end_side_effect_counter + 1; // TODO: call `next_counter` - // instead, for consistency + // The kernel circuits ensure that end_side_effect_counter is greater than start_side_effect_counter, and that + // all side effects emitted in the child call have counters within the range [start_side_effect_counter, + // end_side_effect_counter]. Therefore, we only need to ensure that the next side effect from the current call + // starts after the end side effect from the child call. + self.side_effect_counter = end_side_effect_counter + 1; + ReturnsHash::new(returns_hash) } diff --git a/noir-projects/aztec-nr/aztec/src/messages/encoding.nr b/noir-projects/aztec-nr/aztec/src/messages/encoding.nr index 322d5dd78103..045c15bee1a1 100644 --- a/noir-projects/aztec-nr/aztec/src/messages/encoding.nr +++ b/noir-projects/aztec-nr/aztec/src/messages/encoding.nr @@ -19,12 +19,13 @@ pub(crate) global AES128_PKCS7_EXPANSION_IN_BYTES: u32 = 16; pub global EPH_PK_X_SIZE_IN_FIELDS: u32 = 1; // (15 - 1) * 31 - 16 - 16 = 402. Note: We multiply by 31 because ciphertext bytes are stored in fields using -// bytes_to_fields, which packs 31 bytes per field (since a Field is ~254 bits and can safely store 31 whole bytes). +// encode_bytes_as_fields, which packs 31 bytes per field (since a Field is ~254 bits and can safely store 31 whole +// bytes). pub(crate) global MESSAGE_PLAINTEXT_SIZE_IN_BYTES: u32 = (MESSAGE_CIPHERTEXT_LEN - EPH_PK_X_SIZE_IN_FIELDS) * 31 - HEADER_CIPHERTEXT_SIZE_IN_BYTES - AES128_PKCS7_EXPANSION_IN_BYTES; -// The plaintext bytes represent Field values that were originally serialized using fields_to_bytes, which converts -// each Field to 32 bytes. To convert the plaintext bytes back to fields, we divide by 32. 402 / 32 = 12 +// The plaintext bytes represent Field values that were originally serialized using encode_fields_as_bytes, which +// converts each Field to 32 bytes. To convert the plaintext bytes back to fields, we divide by 32. 402 / 32 = 12 pub global MESSAGE_PLAINTEXT_LEN: u32 = MESSAGE_PLAINTEXT_SIZE_IN_BYTES / 32; pub global MESSAGE_EXPANDED_METADATA_LEN: u32 = 1; diff --git a/noir-projects/aztec-nr/aztec/src/messages/encryption/aes128.nr b/noir-projects/aztec-nr/aztec/src/messages/encryption/aes128.nr index f5327b8484f9..46f72ba6d4ba 100644 --- a/noir-projects/aztec-nr/aztec/src/messages/encryption/aes128.nr +++ b/noir-projects/aztec-nr/aztec/src/messages/encryption/aes128.nr @@ -23,8 +23,8 @@ use crate::{ utils::{ array, conversion::{ - bytes_to_fields::{bytes_from_fields, bytes_to_fields}, - fields_to_bytes::{fields_to_bytes, try_fields_from_bytes}, + bytes_as_fields::{decode_bytes_from_fields, encode_bytes_as_fields}, + fields_as_bytes::{encode_fields_as_bytes, try_decode_fields_from_bytes}, }, point::point_from_x_coord_and_sign, }, @@ -231,7 +231,7 @@ impl MessageEncryption for AES128 { // AES 128 operates on bytes, not fields, so we need to convert the fields to bytes. (This process is then // reversed when processing the message in `process_message_ciphertext`) - let plaintext_bytes = fields_to_bytes(plaintext); + let plaintext_bytes = encode_fields_as_bytes(plaintext); // Derive ECDH shared secret with recipient using a fresh ephemeral keypair. let (eph_sk, eph_pk) = generate_positive_ephemeral_key_pair(); @@ -335,7 +335,7 @@ impl MessageEncryption for AES128 { assert(offset == message_bytes.len(), "unexpected encrypted message length"); // Pack message bytes into fields (31 bytes per field) and prepend eph_pk.x. - let message_bytes_as_fields = bytes_to_fields(message_bytes); + let message_bytes_as_fields = encode_bytes_as_fields(message_bytes); let mut ciphertext: [Field; MESSAGE_CIPHERTEXT_LEN] = [0; MESSAGE_CIPHERTEXT_LEN]; @@ -386,7 +386,7 @@ impl MessageEncryption for AES128 { // later, so we can simply set it to 0 unmasked.unwrap_or(0) }); - let ciphertext_without_eph_pk_x = bytes_from_fields(unmasked_fields); + let ciphertext_without_eph_pk_x = decode_bytes_from_fields(unmasked_fields); // Derive symmetric keys: let pairs = derive_aes_symmetric_key_and_iv_from_shared_secret::<2>(s_app); @@ -419,7 +419,7 @@ impl MessageEncryption for AES128 { .and_then(|ciphertext| try_aes128_decrypt(ciphertext, body_iv, body_sym_key)) // Convert bytes back to fields (32 bytes per field). Returns None if the actual bytes are // not valid. - .and_then(|plaintext_bytes| try_fields_from_bytes(plaintext_bytes)) + .and_then(|plaintext_bytes| try_decode_fields_from_bytes(plaintext_bytes)) }) }) } diff --git a/noir-projects/aztec-nr/aztec/src/messages/message_delivery.nr b/noir-projects/aztec-nr/aztec/src/messages/message_delivery.nr index 8b4a489ffe50..867b56dcbfad 100644 --- a/noir-projects/aztec-nr/aztec/src/messages/message_delivery.nr +++ b/noir-projects/aztec-nr/aztec/src/messages/message_delivery.nr @@ -192,6 +192,11 @@ pub global MessageDelivery: MessageDeliveryEnum = /// be created, there is no point in delivering the message. /// /// `delivery_mode` must be one of [`MessageDeliveryEnum`]. +/// +/// ## Privacy +/// +/// The emitted log always has the same length regardless of `MESSAGE_PLAINTEXT_LEN`, because all message ciphertexts +/// also have the same length. This prevents accidental privacy leakage via the log length. pub fn do_private_message_delivery( context: &mut PrivateContext, encode_into_message_plaintext: fn[Env]() -> [Field; MESSAGE_PLAINTEXT_LEN], @@ -211,6 +216,7 @@ pub fn do_private_message_delivery( let _constrained_tagging = delivery_mode == MessageDelivery.ONCHAIN_CONSTRAINED; let contract_address = context.this_address(); + let ciphertext = remove_constraints_if( !constrained_encryption, || AES128::encrypt(encode_into_message_plaintext(), recipient, contract_address), @@ -231,15 +237,13 @@ pub fn do_private_message_delivery( // we're emitting a note or non-note message. assert_constant(maybe_note_hash_counter.is_some()); + let log = BoundedVec::from_array(ciphertext); if maybe_note_hash_counter.is_some() { // We associate the log with the note's side effect counter, so that if the note ends up being squashed in // the current transaction, the log will be removed as well. - // - // Note that the log always has the same length regardless of `MESSAGE_PLAINTEXT_LEN`, because all message - // ciphertexts also have the same length. This prevents accidental privacy leakage via the log length. - context.emit_raw_note_log_unsafe(log_tag, ciphertext, ciphertext.len(), maybe_note_hash_counter.unwrap()); + context.emit_raw_note_log_vec_unsafe(log_tag, log, maybe_note_hash_counter.unwrap()); } else { - context.emit_private_log_unsafe(log_tag, ciphertext, ciphertext.len()); + context.emit_private_log_vec_unsafe(log_tag, log); } } } diff --git a/noir-projects/aztec-nr/aztec/src/oracle/auth_witness.nr b/noir-projects/aztec-nr/aztec/src/oracle/auth_witness.nr index 57a54b5f7f35..b850be1dd3bd 100644 --- a/noir-projects/aztec-nr/aztec/src/oracle/auth_witness.nr +++ b/noir-projects/aztec-nr/aztec/src/oracle/auth_witness.nr @@ -5,3 +5,37 @@ unconstrained fn get_auth_witness_oracle(_message_hash: Field) -> [F pub unconstrained fn get_auth_witness(message_hash: Field) -> [Field; N] { get_auth_witness_oracle(message_hash) } + +/// Fetches an auth witness and casts each field to a byte. +/// +/// Each field is range-checked to `[0, 256)` before casting to prevent silent truncation (e.g. a field value of +/// `b + 256` would truncate to the same byte as `b`). +pub unconstrained fn get_auth_witness_as_bytes(message_hash: Field) -> [u8; N] { + let witness = get_auth_witness::(message_hash); + let mut result: [u8; N] = [0; N]; + for i in 0..N { + assert(witness[i].lt(256), "auth witness field is not a single byte"); + result[i] = witness[i] as u8; + } + result +} + +mod test { + use super::get_auth_witness_as_bytes; + use std::test::OracleMock; + + #[test] + unconstrained fn get_auth_witness_as_bytes_casts_valid_witness() { + let witness: [Field; 3] = [0, 127, 255]; + let _ = OracleMock::mock("aztec_utl_getAuthWitness").returns(witness); + let bytes: [u8; 3] = get_auth_witness_as_bytes(0); + assert_eq(bytes, [0, 127, 255]); + } + + #[test(should_fail_with = "auth witness field is not a single byte")] + unconstrained fn get_auth_witness_as_bytes_rejects_field_above_byte_range() { + let witness: [Field; 1] = [256]; + let _ = OracleMock::mock("aztec_utl_getAuthWitness").returns(witness); + let _: [u8; 1] = get_auth_witness_as_bytes(0); + } +} diff --git a/noir-projects/aztec-nr/aztec/src/utils/conversion.nr b/noir-projects/aztec-nr/aztec/src/utils/conversion.nr index 1fd1fc0d8b61..ed3ba893d5cd 100644 --- a/noir-projects/aztec-nr/aztec/src/utils/conversion.nr +++ b/noir-projects/aztec-nr/aztec/src/utils/conversion.nr @@ -1,20 +1,24 @@ -// The following functions were removed as they were unused: -// - le_bytes_31_to_fields -// - fields_to_be_bytes_31 -// - fields_to_le_bytes_31 -// - byte_to_bits -// - get_random_bits -// - get_chunks_of_random_bits -// - pad_31_byte_fields_with_random_bits -// - le_bytes_to_padded_fields -// - be_bytes_to_padded_fields -// -// If you need any of these functions, please check the PR linked below: -// https://github.com/AztecProtocol/aztec-packages/pull/12581 +pub mod bytes_as_fields; +pub mod fields_as_bytes; -pub mod bytes_to_fields; -pub mod fields_to_bytes; +// Deprecated wrappers. TODO(F-371): remove these once external contracts have migrated. -// Convenience re-exports -pub use bytes_to_fields::{bytes_from_fields, bytes_to_fields}; -pub use fields_to_bytes::{fields_from_bytes, fields_to_bytes}; +#[deprecated("use decode_bytes_from_fields from bytes_as_fields")] +pub fn bytes_from_fields(fields: BoundedVec) -> BoundedVec { + bytes_as_fields::decode_bytes_from_fields(fields) +} + +#[deprecated("use encode_bytes_as_fields from bytes_as_fields")] +pub fn bytes_to_fields(bytes: [u8; N]) -> [Field; N / 31] { + bytes_as_fields::encode_bytes_as_fields(bytes) +} + +#[deprecated("use decode_fields_from_bytes from fields_as_bytes")] +pub fn fields_from_bytes(bytes: BoundedVec) -> BoundedVec { + fields_as_bytes::decode_fields_from_bytes(bytes) +} + +#[deprecated("use encode_fields_as_bytes from fields_as_bytes")] +pub fn fields_to_bytes(fields: [Field; N]) -> [u8; 32 * N] { + fields_as_bytes::encode_fields_as_bytes(fields) +} diff --git a/noir-projects/aztec-nr/aztec/src/utils/conversion/bytes_as_fields.nr b/noir-projects/aztec-nr/aztec/src/utils/conversion/bytes_as_fields.nr new file mode 100644 index 000000000000..0c9cb7c9747f --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/utils/conversion/bytes_as_fields.nr @@ -0,0 +1,75 @@ +use std::static_assert; + +/// Encodes an array of bytes as fields. +/// +/// Use +/// [`decode_bytes_from_fields`](crate::utils::conversion::bytes_as_fields::decode_bytes_from_fields) to recover +/// the original bytes. +/// +/// The `bytes` array length must be a multiple of 31. If padding is added, it will need to be manually removed +/// after decoding. +/// +/// ## Encoding +/// +/// Each 31-byte chunk is interpreted as a big-endian integer and stored in a `Field`. For input `[1, 10, 3, ..., 0]` +/// (31 bytes), the resulting `Field` is `1 * 256^30 + 10 * 256^29 + 3 * 256^28 + ... + 0`. +pub fn encode_bytes_as_fields(bytes: [u8; N]) -> [Field; N / 31] { + static_assert(N % 31 == 0, "N must be a multiple of 31"); + + let mut fields = [0; N / 31]; + for i in 0..N / 31 { + let mut field = 0; + for j in 0..31 { + field = field * 256 + bytes[i * 31 + j] as Field; + } + fields[i] = field; + } + + fields +} + +/// Decodes fields back into bytes. +/// +/// Inverse of +/// [`encode_bytes_as_fields`](crate::utils::conversion::bytes_as_fields::encode_bytes_as_fields). +/// Each input `Field` must fit in 248 bits; `Field::to_be_bytes::<31>()` fails the proof otherwise. +pub fn decode_bytes_from_fields(fields: BoundedVec) -> BoundedVec { + let mut bytes = BoundedVec::new(); + for i in 0..fields.len() { + let chunk: [u8; 31] = fields.get(i).to_be_bytes(); + for j in 0..31 { + bytes.push(chunk[j]); + } + } + bytes +} + +mod tests { + use crate::utils::array::subarray; + use super::{decode_bytes_from_fields, encode_bytes_as_fields}; + + #[test] + unconstrained fn round_trips_bytes(input: [u8; 93]) { + let fields = encode_bytes_as_fields(input); + + // In production the fields fly through the system and arrive as a BoundedVec on the other end. + let fields_bvec = BoundedVec::<_, 6>::from_array(fields); + let bytes_back = decode_bytes_from_fields(fields_bvec); + + assert_eq(bytes_back.len(), input.len()); + assert_eq(subarray(bytes_back.storage(), 0), input); + } + + #[test(should_fail_with = "N must be a multiple of 31")] + unconstrained fn encode_rejects_length_not_multiple_of_31() { + let _fields = encode_bytes_as_fields([0; 32]); + } + + #[test(should_fail_with = "Field failed to decompose into specified 31 limbs")] + unconstrained fn decode_rejects_oversized_field() { + // `Field::to_be_bytes::<31>()` fails the proof when a field has any bit above position 247 set. + let oversized: Field = (1 as Field) * 2.pow_32(249); + let input = BoundedVec::<_, 1>::from_array([oversized]); + let _bytes = decode_bytes_from_fields(input); + } +} diff --git a/noir-projects/aztec-nr/aztec/src/utils/conversion/bytes_to_fields.nr b/noir-projects/aztec-nr/aztec/src/utils/conversion/bytes_to_fields.nr deleted file mode 100644 index ce218fc02fdf..000000000000 --- a/noir-projects/aztec-nr/aztec/src/utils/conversion/bytes_to_fields.nr +++ /dev/null @@ -1,88 +0,0 @@ -use std::static_assert; - -// These functions are used to facilitate the conversion of log ciphertext between byte and field representations. -// -// `bytes_to_fields` uses fixed-size arrays since encryption contexts have compile-time size information. -// `bytes_from_fields` uses BoundedVec for flexibility in unconstrained contexts where sizes are dynamic. -// -// Together they provide bidirectional conversion between bytes and fields when processing encrypted logs. - -/// Converts the input bytes into an array of fields. A Field is ~254 bits meaning that each field can store 31 whole -/// bytes. Use `bytes_from_fields` to obtain the original bytes array. -/// -/// The input bytes are chunked into chunks of 31 bytes. Each 31-byte chunk is viewed as big-endian, and is converted -/// into a Field. For example, [1, 10, 3, ..., 0] (31 bytes) is encoded as [1 * 256^30 + 10 * 256^29 + 3 * 256^28 + ... -/// + 0] Note: N must be a multiple of 31 bytes -pub fn bytes_to_fields(bytes: [u8; N]) -> [Field; N / 31] { - // Assert that N is a multiple of 31 - static_assert(N % 31 == 0, "N must be a multiple of 31"); - - let mut fields = [0; N / 31]; - - // Since N is a multiple of 31, we can simply process all chunks fully - for i in 0..N / 31 { - let mut field = 0; - for j in 0..31 { - // Shift the existing value left by 8 bits and add the new byte - field = field * 256 + bytes[i * 31 + j] as Field; - } - fields[i] = field; - } - - fields -} - -/// Converts an input BoundedVec of fields into a BoundedVec of bytes in big-endian order. Arbitrary Field arrays are -/// not allowed: this is assumed to be an array obtained via `bytes_to_fields`, i.e. one that actually represents -/// bytes. To convert a Field array into bytes, use `fields_to_bytes`. -/// -/// Each input field must contain at most 31 bytes (this is constrained to be so). Each field is converted into 31 -/// big-endian bytes, and the resulting 31-byte chunks are concatenated back together in the order of the original -/// fields. -pub fn bytes_from_fields(fields: BoundedVec) -> BoundedVec { - let mut bytes = BoundedVec::new(); - - for i in 0..fields.len() { - let field = fields.get(i); - - // We expect that the field contains at most 31 bytes of information. - field.assert_max_bit_size::<248>(); - - // Now we can safely convert the field to 31 bytes. - let field_as_bytes: [u8; 31] = field.to_be_bytes(); - - for j in 0..31 { - bytes.push(field_as_bytes[j]); - } - } - - bytes -} - -mod tests { - use crate::utils::array::subarray; - use super::{bytes_from_fields, bytes_to_fields}; - - #[test] - unconstrained fn random_bytes_to_fields_and_back(input: [u8; 93]) { - let fields = bytes_to_fields(input); - - // At this point in production, the log flies through the system and we get a BoundedVec on the other end. So - // we need to convert the field array to a BoundedVec to be able to feed it to the `bytes_from_fields` - // function. - let fields_as_bounded_vec = BoundedVec::<_, 6>::from_array(fields); - - let bytes_back = bytes_from_fields(fields_as_bounded_vec); - - // Compare the original input with the round-tripped result - assert_eq(bytes_back.len(), input.len()); - assert_eq(subarray(bytes_back.storage(), 0), input); - } - - #[test(should_fail_with = "N must be a multiple of 31")] - unconstrained fn bytes_to_fields_input_length_not_multiple_of_31() { - // Try to convert 32 bytes (not a multiple of 31) to fields - let _fields = bytes_to_fields([0; 32]); - } - -} diff --git a/noir-projects/aztec-nr/aztec/src/utils/conversion/fields_as_bytes.nr b/noir-projects/aztec-nr/aztec/src/utils/conversion/fields_as_bytes.nr new file mode 100644 index 000000000000..d0a07d5446ce --- /dev/null +++ b/noir-projects/aztec-nr/aztec/src/utils/conversion/fields_as_bytes.nr @@ -0,0 +1,172 @@ +/// Encodes an array of fields as bytes. +/// +/// Losslessly preserves any field value; use +/// [`try_decode_fields_from_bytes`](crate::utils::conversion::fields_as_bytes::try_decode_fields_from_bytes) to +/// recover the original fields. +/// +/// ## Encoding +/// +/// Each field is written as 32 big-endian bytes and the chunks are concatenated. The field array `[5, 42]` becomes: +/// +/// ```text +/// [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5, // First field (32 bytes) +/// 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,42] // Second field (32 bytes) +/// ``` +/// +/// ## Privacy +/// +/// The BN254 modulus is `< 2^254`, so every 32-byte chunk has its top bit at zero and the next bit biased. The output +/// is therefore distinguishable from uniform random bytes; take this into account when feeding it into anything that +/// assumes uniform randomness (e.g. ciphertexts meant to look random). +pub fn encode_fields_as_bytes(fields: [Field; N]) -> [u8; 32 * N] { + let mut bytes = [0; 32 * N]; + for i in 0..N { + let chunk: [u8; 32] = fields[i].to_be_bytes(); + for j in 0..32 { + bytes[i * 32 + j] = chunk[j]; + } + } + bytes +} + +/// Decodes bytes back into fields. +/// +/// Panics if the input length is not a multiple of 32 or if any chunk exceeds the BN254 field modulus. See +/// [`try_decode_fields_from_bytes`](crate::utils::conversion::fields_as_bytes::try_decode_fields_from_bytes) +/// for a non-panicking variant. +pub fn decode_fields_from_bytes(bytes: BoundedVec) -> BoundedVec { + assert(bytes.len() % 32 == 0, "Input length must be a multiple of 32"); + try_decode_fields_from_bytes(bytes).expect(f"Value does not fit in field") +} + +/// Decodes bytes back into fields, returning None on failure. +/// +/// Inverse of +/// [`encode_fields_as_bytes`](crate::utils::conversion::fields_as_bytes::encode_fields_as_bytes). +/// Returns `Option::none()` if the input length is not a multiple of 32, or if any 32-byte chunk is `>=` the BN254 +/// field modulus. +pub fn try_decode_fields_from_bytes(bytes: BoundedVec) -> Option> { + if bytes.len() % 32 == 0 { + let num_chunks = bytes.len() / 32; + let mut fields: BoundedVec = BoundedVec::new(); + for i in 0..num_chunks { + let maybe_field = try_decode_field_from_bytes(bytes, i * 32); + if maybe_field.is_some() { + fields.push(maybe_field.unwrap()); + } + } + if fields.len() == num_chunks { + Option::some(fields) + } else { + Option::none() + } + } else { + Option::none() + } +} + +fn try_decode_field_from_bytes(bytes: BoundedVec, offset: u32) -> Option { + // Field arithmetic silently wraps values >= the modulus, so we compare each chunk against the modulus + // byte-by-byte (big-endian) while building `field`. cmp: 0 = equal so far, 1 = less than modulus, 2 = exceeds. + let p = std::field::modulus_be_bytes(); + let mut field = 0; + let mut cmp: u8 = 0; + for j in 0..32 { + let byte = bytes.get(offset + j); + field = field * 256 + byte as Field; + if cmp == 0 { + if byte < p[j] { + cmp = 1; + } else if byte > p[j] { + cmp = 2; + } + } + } + + if cmp == 1 { + Option::some(field) + } else { + Option::none() + } +} + +mod tests { + use crate::utils::array::subarray; + use super::{decode_fields_from_bytes, encode_fields_as_bytes, try_decode_fields_from_bytes}; + + #[test] + unconstrained fn round_trips_fields(input: [Field; 3]) { + let bytes = encode_fields_as_bytes(input); + + // In production the bytes fly through the system and arrive as a BoundedVec on the other end. 113 is an + // arbitrary max length larger than the input length of 96. + let bytes_bvec = BoundedVec::<_, 113>::from_array(bytes); + let fields_back = try_decode_fields_from_bytes(bytes_bvec).unwrap(); + + assert_eq(fields_back.len(), input.len()); + assert_eq(subarray(fields_back.storage(), 0), input); + } + + #[test] + unconstrained fn try_decode_returns_none_on_length_not_multiple_of_32() { + let input = BoundedVec::<_, 64>::from_parts([0 as u8; 64], 33); + assert(try_decode_fields_from_bytes(input).is_none()); + } + + #[test] + unconstrained fn try_decode_accepts_max_field() { + // -1 in field arithmetic wraps to `modulus - 1`, the largest valid field value. + let max_field_as_bytes: [u8; 32] = (-1).to_be_bytes(); + let input = BoundedVec::<_, 32>::from_array(max_field_as_bytes); + + let fields = try_decode_fields_from_bytes(input).unwrap(); + + assert_eq(fields.get(0), -1); + } + + // Verifies the overflow check: take the max allowed value, bump a random byte, feed it in. + #[test] + unconstrained fn try_decode_returns_none_on_chunk_above_modulus(random_value: u8) { + let index_of_byte_to_bump = random_value % 32; + let max_field_value_as_bytes: [u8; 32] = (-1).to_be_bytes(); + let byte_to_bump = max_field_value_as_bytes[index_of_byte_to_bump as u32]; + + // Skip if the selected byte is already 255. Acceptable under fuzz testing. + if byte_to_bump != 255 { + let mut input = BoundedVec::<_, 32>::from_array(max_field_value_as_bytes); + input.set(index_of_byte_to_bump as u32, byte_to_bump + 1); + + assert(try_decode_fields_from_bytes(input).is_none()); + } + } + + #[test] + unconstrained fn try_decode_returns_none_on_chunk_equal_to_modulus() { + // The field modulus itself is not a valid field value (it wraps to 0). + let p: [u8; 32] = std::field::modulus_be_bytes().as_array(); + let input = BoundedVec::::from_array(p); + assert(try_decode_fields_from_bytes(input).is_none()); + } + + #[test(should_fail_with = "Input length must be a multiple of 32")] + unconstrained fn decode_asserts_length_multiple_of_32() { + let input = BoundedVec::<_, 143>::from_array([ + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, + 30, 31, 32, 33, + ]); + let _fields = decode_fields_from_bytes(input); + } + + #[test(should_fail_with = "Value does not fit in field")] + unconstrained fn decode_panics_on_chunk_above_modulus(random_value: u8) { + let index_of_byte_to_bump = random_value % 32; + let max_field_value_as_bytes: [u8; 32] = (-1).to_be_bytes(); + let byte_to_bump = max_field_value_as_bytes[index_of_byte_to_bump as u32]; + + if byte_to_bump != 255 { + let mut input = BoundedVec::<_, 32>::from_array(max_field_value_as_bytes); + input.set(index_of_byte_to_bump as u32, byte_to_bump + 1); + let _fields = decode_fields_from_bytes(input); + } + } +} diff --git a/noir-projects/aztec-nr/aztec/src/utils/conversion/fields_to_bytes.nr b/noir-projects/aztec-nr/aztec/src/utils/conversion/fields_to_bytes.nr deleted file mode 100644 index c48093bb9514..000000000000 --- a/noir-projects/aztec-nr/aztec/src/utils/conversion/fields_to_bytes.nr +++ /dev/null @@ -1,189 +0,0 @@ -// These functions are used to facilitate the conversion of log plaintext represented as fields into bytes and back. -// -// `fields_to_bytes` uses fixed-size arrays since encryption contexts have compile-time size information. -// `fields_from_bytes` uses BoundedVec for flexibility in unconstrained contexts where sizes are dynamic. -// -// Together they provide bidirectional conversion between fields and bytes. - -/// Converts an input array of fields into a single array of bytes. Use `fields_from_bytes` to obtain the original -/// field array. Each field is converted to a 32-byte big-endian array. -/// -/// For example, if you have a field array [123, 456], it will be converted to a 64-byte array: -/// [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,123, // First field (32 bytes) -/// 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,200] // Second field (32 bytes) -/// -/// Since a field is ~254 bits, you'll end up with a subtle 2-bit "gap" at the big end, every 32 bytes. Be careful that -/// such a gap doesn't leak information! This could happen if you for example expected the output to be -/// indistinguishable from random bytes. -pub fn fields_to_bytes(fields: [Field; N]) -> [u8; 32 * N] { - let mut bytes = [0; 32 * N]; - - for i in 0..N { - let field_as_bytes: [u8; 32] = fields[i].to_be_bytes(); - - for j in 0..32 { - bytes[i * 32 + j] = field_as_bytes[j]; - } - } - - bytes -} - -/// Converts an input BoundedVec of bytes into a BoundedVec of fields. Arbitrary byte arrays are not allowed: this is -/// assumed to be an array obtained via `fields_to_bytes`, i.e. one that actually represents fields. To convert a byte -/// array into Fields, use `bytes_to_fields`. -/// -/// The input bytes are chunked into chunks of 32 bytes. Each 32-byte chunk is viewed as big-endian, and is converted -/// into a Field. For example, [1, 10, 3, ..., 0] (32 bytes) is encoded as [1 * 256^31 + 10 * 256^30 + 3 * 256^29 + ... -/// + 0] Note 1: N must be a multiple of 32 bytes Note 2: The max value check code was taken from -/// std::field::to_be_bytes function. -pub fn fields_from_bytes(bytes: BoundedVec) -> BoundedVec { - // Assert that input length is a multiple of 32 - assert(bytes.len() % 32 == 0, "Input length must be a multiple of 32"); - - try_fields_from_bytes(bytes).expect(f"Value does not fit in field") -} - -/// Converts a single 32-byte big-endian chunk (starting at `offset`) into a Field, returning -/// `Option::none()` if the chunk's value is >= the field modulus. -fn try_field_from_be_bytes(bytes: BoundedVec, offset: u32) -> Option { - let p = std::field::modulus_be_bytes(); - let mut field = 0; - // Field arithmetic silently wraps values >= the modulus, so we compare each 32-byte chunk against - // the modulus byte-by-byte (big-endian, most significant first). The first byte that differs - // determines the result: if our byte is smaller we're valid, if larger we've overflowed. - // cmp tracks the result: 0 = equal so far, 1 = less than modulus, 2 = exceeds modulus. - let mut cmp: u8 = 0; - for j in 0..32 { - let byte = bytes.get(offset + j); - field = field * 256 + byte as Field; - - if cmp == 0 { - if byte < p[j] { - cmp = 1; - } else if byte > p[j] { - cmp = 2; - } - } - } - if cmp == 1 { - Option::some(field) - } else { - Option::none() - } -} - -/// Non-panicking version of `fields_from_bytes`. Returns `Option::none()` if the input -/// length is not a multiple of 32 or if any 32-byte chunk exceeds the field modulus. -pub(crate) fn try_fields_from_bytes(bytes: BoundedVec) -> Option> { - if bytes.len() % 32 == 0 { - let mut fields = BoundedVec::new(); - let num_chunks = bytes.len() / 32; - for i in 0..num_chunks { - let field = try_field_from_be_bytes(bytes, i * 32); - if field.is_some() { - fields.push(field.unwrap()); - } - } - if fields.len() as u32 == num_chunks { - Option::some(fields) - } else { - Option::none() - } - } else { - Option::none() - } -} - -mod tests { - use crate::utils::array::subarray; - use super::{fields_from_bytes, fields_to_bytes, try_fields_from_bytes}; - - #[test] - unconstrained fn random_fields_to_bytes_and_back(input: [Field; 3]) { - // Convert to bytes - let bytes = fields_to_bytes(input); - - // At this point in production, the log flies through the system and we get a BoundedVec on the other end. So - // we need to convert the field array to a BoundedVec to be able to feed it to the `fields_from_bytes` - // function. 113 is an arbitrary max length that is larger than the input length of 96. - let bytes_as_bounded_vec = BoundedVec::<_, 113>::from_array(bytes); - - // Convert back to fields - let fields_back = fields_from_bytes(bytes_as_bounded_vec); - - // Compare the original input with the round-tripped result - assert_eq(fields_back.len(), input.len()); - assert_eq(subarray(fields_back.storage(), 0), input); - } - - #[test(should_fail_with = "Input length must be a multiple of 32")] - unconstrained fn to_fields_assert() { - // 143 is an arbitrary max length that is larger than 33 - let input = BoundedVec::<_, 143>::from_array([ - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, - 30, 31, 32, 33, - ]); - - // This should fail since 33 is not a multiple of 32 - let _fields = fields_from_bytes(input); - } - - #[test] - unconstrained fn fields_from_bytes_max_value() { - let max_field_as_bytes: [u8; 32] = (-1).to_be_bytes(); - let input = BoundedVec::<_, 32>::from_array(max_field_as_bytes); - - let fields = fields_from_bytes(input); - - // The result should be a largest value storable in a field (-1 since we are modulo-ing) - assert_eq(fields.get(0), -1); - } - - // In this test we verify that overflow check works by taking the max allowed value, bumping a random byte and then - // feeding it to `fields_from_bytes` as input. - #[test(should_fail_with = "Value does not fit in field")] - unconstrained fn fields_from_bytes_overflow(random_value: u8) { - let index_of_byte_to_bump = random_value % 32; - - // Obtain the byte representation of the maximum field value - let max_field_value_as_bytes: [u8; 32] = (-1).to_be_bytes(); - - let byte_to_bump = max_field_value_as_bytes[index_of_byte_to_bump as u32]; - - // Skip test execution if the selected byte is already at maximum value (255). This is acceptable since we are - // using fuzz testing to generate many test cases. - if byte_to_bump != 255 { - let mut input = BoundedVec::<_, 32>::from_array(max_field_value_as_bytes); - - // Increment the selected byte to exceed the field's maximum value - input.set(index_of_byte_to_bump as u32, byte_to_bump + 1); - - // Attempt the conversion, which should fail due to the value exceeding the field's capacity - let _fields = fields_from_bytes(input); - } - } - - #[test] - unconstrained fn try_fields_from_bytes_returns_none_on_unaligned_length() { - let input = BoundedVec::<_, 64>::from_parts([0 as u8; 64], 33); - assert(try_fields_from_bytes(input).is_none()); - } - - #[test] - unconstrained fn try_fields_from_bytes_returns_none_on_field_modulus() { - // The field modulus itself is not a valid field value (it wraps to 0). - let p: [u8; 32] = std::field::modulus_be_bytes().as_array(); - let input = BoundedVec::::from_array(p); - assert(try_fields_from_bytes(input).is_none()); - } - - #[test] - unconstrained fn try_fields_from_bytes_round_trips(input: [Field; 3]) { - let bytes = BoundedVec::<_, 113>::from_array(fields_to_bytes(input)); - let fields = try_fields_from_bytes(bytes).unwrap(); - - assert_eq(fields.len(), input.len()); - assert_eq(subarray(fields.storage(), 0), input); - } -} diff --git a/noir-projects/aztec-nr/compressed-string/src/compressed_string.nr b/noir-projects/aztec-nr/compressed-string/src/compressed_string.nr index 0740acfa462b..744fd145dccf 100644 --- a/noir-projects/aztec-nr/compressed-string/src/compressed_string.nr +++ b/noir-projects/aztec-nr/compressed-string/src/compressed_string.nr @@ -13,7 +13,7 @@ pub struct CompressedString { impl CompressedString { // TODO: if we move this into the utils of aztecnr (as suggested by #15968), we can adopt its existing - // `fields_from_bytes` conversion function, instead of this duplicated logic here. + // `encode_bytes_as_fields` conversion function, instead of this duplicated logic here. pub fn from_string(input_string: str) -> Self { let mut fields = [0; N]; let byts = input_string.as_bytes(); @@ -36,7 +36,7 @@ impl CompressedString { } // TODO: if we move this into the utils of aztecnr (as suggested by #15968), we can adopt its existing - // `bytes_from_fields` conversion function, instead of this duplicated logic here. + // `decode_bytes_from_fields` conversion function, instead of this duplicated logic here. pub fn to_bytes(self) -> [u8; M] { let mut result = [0; M]; let mut w_index = 0 as u32; diff --git a/noir-projects/aztec-nr/uint-note/src/uint_note.nr b/noir-projects/aztec-nr/uint-note/src/uint_note.nr index 3049c25fbba0..749404d2d4cc 100644 --- a/noir-projects/aztec-nr/uint-note/src/uint_note.nr +++ b/noir-projects/aztec-nr/uint-note/src/uint_note.nr @@ -10,10 +10,7 @@ use aztec::{ oracle::random::random, protocol::{ address::AztecAddress, - constants::{ - DOM_SEP__NOTE_COMPLETION_LOG_TAG, DOM_SEP__NOTE_HASH, DOM_SEP__PARTIAL_NOTE_VALIDITY_COMMITMENT, - PRIVATE_LOG_CIPHERTEXT_LEN, - }, + constants::{DOM_SEP__NOTE_COMPLETION_LOG_TAG, DOM_SEP__NOTE_HASH, DOM_SEP__PARTIAL_NOTE_VALIDITY_COMMITMENT}, hash::{compute_log_tag, compute_siloed_nullifier, poseidon2_hash_with_separator}, traits::{Deserialize, FromField, Hash, Packable, Serialize, ToField}, }, @@ -171,8 +168,6 @@ pub struct PartialUintNote { } // docs:end:partial_uint_note_def -global NOTE_COMPLETION_PAYLOAD_LENGTH: u32 = 2; - impl PartialUintNote { /// Completes the partial note, creating a new note that can be used like any other UintNote. pub fn complete(self, context: PublicContext, completer: AztecAddress, storage_slot: Field, value: u128) { @@ -228,8 +223,8 @@ impl PartialUintNote { // only done in private to hide the preimage of the hash that is inserted, but completed partial notes are // inserted in public as the public values are provided and the note hash computed. let log_tag = compute_log_tag(self.commitment, DOM_SEP__NOTE_COMPLETION_LOG_TAG); - let padded_payload = self.compute_note_completion_payload_padded_for_private_log(storage_slot, value); - context.emit_private_log_unsafe(log_tag, padded_payload, NOTE_COMPLETION_PAYLOAD_LENGTH); + let payload = BoundedVec::from_array([storage_slot, value.to_field()]); + context.emit_private_log_vec_unsafe(log_tag, payload); context.push_note_hash(self.compute_complete_note_hash(storage_slot, value)); } @@ -245,15 +240,6 @@ impl PartialUintNote { ) } - fn compute_note_completion_payload_padded_for_private_log( - _self: Self, - storage_slot: Field, - value: u128, - ) -> [Field; PRIVATE_LOG_CIPHERTEXT_LEN] { - let payload = [storage_slot, value.to_field()]; - payload.concat([0; PRIVATE_LOG_CIPHERTEXT_LEN - NOTE_COMPLETION_PAYLOAD_LENGTH]) - } - // docs:start:compute_complete_note_hash fn compute_complete_note_hash(self, storage_slot: Field, value: u128) -> Field { // Here we finalize the note hash by including the (public) storage slot and value into the partial note diff --git a/noir-projects/noir-contracts/Nargo.toml b/noir-projects/noir-contracts/Nargo.toml index 585057b7cfac..aebfcfdf6dd8 100644 --- a/noir-projects/noir-contracts/Nargo.toml +++ b/noir-projects/noir-contracts/Nargo.toml @@ -43,7 +43,7 @@ members = [ "contracts/test/avm_test_contract", "contracts/test/benchmarking_contract", "contracts/test/child_contract", - "contracts/test/counter_contract", + "contracts/test/counter/counter_contract", "contracts/test/custom_message_contract", "contracts/test/ephemeral_child_contract", "contracts/test/ephemeral_parent_contract", diff --git a/noir-projects/noir-contracts/bootstrap.sh b/noir-projects/noir-contracts/bootstrap.sh index 0f568bf70f2a..175af921b50f 100755 --- a/noir-projects/noir-contracts/bootstrap.sh +++ b/noir-projects/noir-contracts/bootstrap.sh @@ -1,5 +1,4 @@ #!/usr/bin/env bash -# TODO: THIS SCRIPT SHOULD NOW BE ABLE TO REPLACE TRANSPILATION AND VK GENERATION WITH 'bb aztec_process'. # # Some notes if you have to work on this script. # - First of all, I'm sorry (edit: not sorry). It's a beautiful script but it's no fun to debug. I got carried away. @@ -30,77 +29,12 @@ export PLATFORM_TAG=any export BB=${BB:-../../barretenberg/cpp/build/bin/bb} export NARGO=${NARGO:-../../noir/noir-repo/target/release/nargo} -export TRANSPILER=${TRANSPILER:-../../avm-transpiler/target/release/avm-transpiler} -export STRIP_AZTEC_NR_PREFIX=${STRIP_AZTEC_NR_PREFIX:-./scripts/strip_aztec_nr_prefix.sh} export BB_HASH=${BB_HASH:-$(../../barretenberg/cpp/bootstrap.sh hash)} export NOIR_HASH=${NOIR_HASH:-$(../../noir/bootstrap.sh hash)} -export tmp_dir=./target/tmp - -# Remove our tmp dir from last run. -# Note: This can use BASH 'trap' for better cleanliness, but the script has been hitting edge-cases so is (temporarily?) simplified. -rm -rf $tmp_dir -mkdir -p $tmp_dir - # Set common flags for parallel. export PARALLEL_FLAGS="-j${PARALLELISM:-16} --halt now,fail=1 --memsuspend $(memsuspend_limit)" -# This computes a vk and adds it to the input function json if it's private, else returns same input. -# stdin has the function json. -# stdout receives the function json with the vk added (if private). -# The function is exported and called by a sub-shell in parallel, so we must "set -eu" etc.. -# If debugging, a set -x at the start can help. -function process_function { - set -euo pipefail - local func name bytecode_b64 hash vk - - contract_hash=$1 - # Read the function json. - func="$(cat)" - name=$(echo "$func" | jq -r '.name') - echo_stderr "Processing function: $name..." - - # Check if the function is neither public nor unconstrained. - # TODO: Why do we need to gen keys for functions that are not marked private? - # We allow the jq call to error (set +e) because it returns an error code if the result is false. - # We then differentiate between a real error, and the result being false. - set +e - make_vk=$(echo "$func" | jq -e '(.custom_attributes | index("public") == null) and (.is_unconstrained == false)') - if [ $? -ne 0 ] && [ "$make_vk" != "false" ]; then - echo_stderr "Failed to check function $name is neither public nor unconstrained." - exit 1 - fi - set -e - - if [ "$make_vk" == "true" ]; then - # It's a private function. - # Build hash, check if in cache. - # If it's in the cache it's extracted to $tmp_dir/$hash - bytecode_b64=$(echo "$func" | jq -r '.bytecode') - hash=$((echo "$BB_HASH"; echo "$bytecode_b64") | sha256sum | tr -d ' -') - - if ! cache_download vk-$contract_hash-$hash.tar.gz >&2; then - # It's not in the cache. Generate the vk file and upload it to the cache. - echo_stderr "Generating vk for function: $name..." - - local outdir=$(mktemp -d -p $tmp_dir) - echo "$bytecode_b64" | base64 -d | gunzip | $BB write_vk --scheme chonk -b - -o $outdir -v - mv $outdir/vk $tmp_dir/$contract_hash/$hash - - cache_upload vk-$contract_hash-$hash.tar.gz $tmp_dir/$contract_hash/$hash - fi - - # Return (echo) json containing the base64 encoded verification key. - vk=$(cat $tmp_dir/$contract_hash/$hash | base64 -w 0) - echo "$func" | jq -c --arg vk "$vk" '. + {verification_key: $vk}' - else - echo_stderr "Function $name is neither public nor unconstrained, skipping." - # Not a private function. Return the original function json. - echo "$func" - fi -} -export -f process_function - # Compute hash for a given contract. # $1 is the contract name, $2 is the folder name (e.g. "contracts" or "examples") function get_contract_hash { @@ -159,42 +93,26 @@ function get_contract_path { } export -f get_contract_path -# This compiles a noir contract, transpile's public functions, and generates vk's for private functions. +# This compiles a noir contract, transpiles public functions, strips internal prefixes, +# and generates verification keys for private functions via 'bb aztec_process'. # $1 is the input package name, $2 is the folder name (e.g. "contracts" or "examples") -# On exit it's fully processed json artifact is in the target dir. +# On exit its fully processed json artifact is in the target dir. # The function is exported and called by a sub-shell in parallel, so we must "set -eu" etc.. function compile { set -euo pipefail - local contract_name contract_hash local contract_path=$(get_contract_path "$1" "$2") - local contract=${contract_path##*/} + local contract=$(grep -oP '(?<=^name = ")[^"]+' "$2/$contract_path/Nargo.toml") # Calculate filename because nargo... - contract_name=$(cat $2/$contract_path/src/main.nr | awk '/^contract / { print $2 } /^pub contract / { print $3 }') + local contract_name=$(cat $2/$contract_path/src/main.nr | awk '/^contract / { print $2 } /^pub contract / { print $3 }') local filename="$contract-$contract_name.json" local json_path="./target/$filename" - contract_hash=$(get_contract_hash $1 $2) + local contract_hash=$(get_contract_hash $1 $2) if ! cache_download contract-$contract_hash.tar.gz; then - $NARGO compile --package $contract --inliner-aggressiveness 0 --deny-warnings - $TRANSPILER $json_path $json_path - $STRIP_AZTEC_NR_PREFIX $json_path + $NARGO compile --package $contract --inliner-aggressiveness 0 --deny-warnings + $BB aztec_process -i $json_path cache_upload contract-$contract_hash.tar.gz $json_path fi - - # We segregate equivalent vk's created by process_function. This was done to narrow down potential edge cases with identical VKs - # reading from cache at the same time. Create this folder up-front. - mkdir -p $tmp_dir/$contract_hash - - # Pipe each contract function, one per line (jq -c), into parallel calls of process_function. - # The returned jsons from process_function are converted back to a json array in the second jq -s call. - # When slurping (-s) in the last jq, we get an array of two elements: - # .[0] is the original json (at $json_path) - # .[1] is the updated functions on stdin (-) - # * merges their fields. - jq -c '.functions[]' $json_path | \ - parallel $PARALLEL_FLAGS --keep-order -N1 --block 8M --pipe process_function $contract_hash | \ - jq -s '{functions: .}' | jq -s '.[0] * {functions: .[1].functions}' $json_path - > $tmp_dir/$filename - mv $tmp_dir/$filename $json_path } export -f compile @@ -211,7 +129,7 @@ function build { if [ "$#" -eq 0 ]; then rm -rf target - mkdir -p $tmp_dir + mkdir -p target local contracts=$(grep -oP "(?<=$folder_name/)[^\"]+" Nargo.toml) # If pinned contracts exist, extract them and skip their compilation. diff --git a/noir-projects/noir-contracts/contracts/account/ecdsa_k_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/account/ecdsa_k_account_contract/src/main.nr index 443cbca8a0f5..4d950b4cf661 100644 --- a/noir-projects/noir-contracts/contracts/account/ecdsa_k_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/account/ecdsa_k_account_contract/src/main.nr @@ -12,7 +12,10 @@ pub contract EcdsaKAccount { storage::storage, }, messages::message_delivery::MessageDelivery, - oracle::{auth_witness::get_auth_witness, notes::{get_sender_for_tags, set_sender_for_tags}}, + oracle::{ + auth_witness::get_auth_witness_as_bytes, + notes::{get_sender_for_tags, set_sender_for_tags}, + }, state_vars::SinglePrivateImmutable, }; @@ -81,14 +84,9 @@ pub contract EcdsaKAccount { let storage = Storage::init(context); let public_key = storage.signing_public_key.get_note(); - // Load auth witness // Safety: The witness is only used as a "magical value" that makes the signature verification below pass. // Hence it's safe. - let witness: [Field; 64] = unsafe { get_auth_witness(outer_hash) }; - let mut signature: [u8; 64] = [0; 64]; - for i in 0..64 { - signature[i] = witness[i] as u8; - } + let signature: [u8; 64] = unsafe { get_auth_witness_as_bytes(outer_hash) }; // Verify payload signature using Ethereum's signing scheme // Note that noir expects the hash of the message/challenge as input to the ECDSA verification. diff --git a/noir-projects/noir-contracts/contracts/account/ecdsa_r_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/account/ecdsa_r_account_contract/src/main.nr index 610b3d4eaddc..69279ff5c129 100644 --- a/noir-projects/noir-contracts/contracts/account/ecdsa_r_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/account/ecdsa_r_account_contract/src/main.nr @@ -11,7 +11,10 @@ pub contract EcdsaRAccount { storage::storage, }, messages::message_delivery::MessageDelivery, - oracle::{auth_witness::get_auth_witness, notes::{get_sender_for_tags, set_sender_for_tags}}, + oracle::{ + auth_witness::get_auth_witness_as_bytes, + notes::{get_sender_for_tags, set_sender_for_tags}, + }, state_vars::SinglePrivateImmutable, }; @@ -79,14 +82,9 @@ pub contract EcdsaRAccount { let storage = Storage::init(context); let public_key = storage.signing_public_key.get_note(); - // Load auth witness // Safety: The witness is only used as a "magical value" that makes the signature verification below pass. // Hence it's safe. - let witness: [Field; 64] = unsafe { get_auth_witness(outer_hash) }; - let mut signature: [u8; 64] = [0; 64]; - for i in 0..64 { - signature[i] = witness[i] as u8; - } + let signature: [u8; 64] = unsafe { get_auth_witness_as_bytes(outer_hash) }; // Verify payload signature using Ethereum's signing scheme // Note that noir expects the hash of the message/challenge as input to the ECDSA verification. diff --git a/noir-projects/noir-contracts/contracts/account/schnorr_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/account/schnorr_account_contract/src/main.nr index 34fd2a160d9f..ff55d41771a5 100644 --- a/noir-projects/noir-contracts/contracts/account/schnorr_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/account/schnorr_account_contract/src/main.nr @@ -20,7 +20,7 @@ pub contract SchnorrAccount { }, messages::message_delivery::MessageDelivery, oracle::{ - auth_witness::get_auth_witness, + auth_witness::get_auth_witness_as_bytes, get_nullifier_membership_witness::get_low_nullifier_membership_witness, notes::{get_sender_for_tags, set_sender_for_tags}, }, @@ -95,14 +95,9 @@ pub contract SchnorrAccount { let storage = Storage::init(context); let public_key = storage.signing_public_key.get_note(); - // Load auth witness // Safety: The witness is only used as a "magical value" that makes the signature verification below pass. // Hence it's safe. - let witness: [Field; 64] = unsafe { get_auth_witness(outer_hash) }; - let mut signature: [u8; 64] = [0; 64]; - for i in 0..64 { - signature[i] = witness[i] as u8; - } + let signature: [u8; 64] = unsafe { get_auth_witness_as_bytes(outer_hash) }; let pub_key = std::embedded_curve_ops::EmbeddedCurvePoint { x: public_key.x, y: public_key.y }; @@ -128,11 +123,7 @@ pub contract SchnorrAccount { inner_hash, ); - let witness: [Field; 64] = get_auth_witness(message_hash); - let mut signature: [u8; 64] = [0; 64]; - for i in 0..64 { - signature[i] = witness[i] as u8; - } + let signature: [u8; 64] = get_auth_witness_as_bytes(message_hash); let pub_key = std::embedded_curve_ops::EmbeddedCurvePoint { x: public_key.x, y: public_key.y }; let valid_in_private = diff --git a/noir-projects/noir-contracts/contracts/account/schnorr_hardcoded_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/account/schnorr_hardcoded_account_contract/src/main.nr index ac0d6e83f6f9..6657da79aeb1 100644 --- a/noir-projects/noir-contracts/contracts/account/schnorr_hardcoded_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/account/schnorr_hardcoded_account_contract/src/main.nr @@ -7,7 +7,7 @@ pub contract SchnorrHardcodedAccount { authwit::{account::AccountActions, entrypoint::app::AppPayload}, context::PrivateContext, macros::functions::{allow_phase_change, external, view}, - oracle::{auth_witness::get_auth_witness, notes::set_sender_for_tags}, + oracle::{auth_witness::get_auth_witness_as_bytes, notes::set_sender_for_tags}, }; use std::embedded_curve_ops::EmbeddedCurvePoint; @@ -38,15 +38,9 @@ pub contract SchnorrHardcodedAccount { #[contract_library_method] fn is_valid_impl(_context: &mut PrivateContext, outer_hash: Field) -> bool { - // Load auth witness and format as an u8 array - // Safety: The witness is only used as a "magical value" that makes the signature verification below pass. // Hence it's safe. - let witness: [Field; 64] = unsafe { get_auth_witness(outer_hash) }; - let mut signature: [u8; 64] = [0; 64]; - for i in 0..64 { - signature[i] = witness[i] as u8; - } + let signature: [u8; 64] = unsafe { get_auth_witness_as_bytes(outer_hash) }; // Verify signature using hardcoded public key schnorr::verify_signature( diff --git a/noir-projects/noir-contracts/contracts/account/simulated_ecdsa_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/account/simulated_ecdsa_account_contract/src/main.nr index b27a1d6f5a0e..805b6d429e0a 100644 --- a/noir-projects/noir-contracts/contracts/account/simulated_ecdsa_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/account/simulated_ecdsa_account_contract/src/main.nr @@ -39,10 +39,9 @@ pub contract SimulatedEcdsaAccount { // and is therefore never nullified, so in practice the log will never be squashed. We // pass the note hash counter anyway for correctness. let dummy_log: [Field; MESSAGE_CIPHERTEXT_LEN] = [seed + 2; MESSAGE_CIPHERTEXT_LEN]; - self.context.emit_raw_note_log_unsafe( + self.context.emit_raw_note_log_vec_unsafe( seed + 3, - dummy_log, - MESSAGE_CIPHERTEXT_LEN, + BoundedVec::from_array(dummy_log), self.context.side_effect_counter, ); } diff --git a/noir-projects/noir-contracts/contracts/account/simulated_schnorr_account_contract/src/main.nr b/noir-projects/noir-contracts/contracts/account/simulated_schnorr_account_contract/src/main.nr index 4ac523076a6e..29853d73d2dd 100644 --- a/noir-projects/noir-contracts/contracts/account/simulated_schnorr_account_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/account/simulated_schnorr_account_contract/src/main.nr @@ -39,10 +39,9 @@ pub contract SimulatedSchnorrAccount { // and is therefore never nullified, so in practice the log will never be squashed. We // pass the note hash counter anyway for correctness. let dummy_log: [Field; MESSAGE_CIPHERTEXT_LEN] = [seed + 2; MESSAGE_CIPHERTEXT_LEN]; - self.context.emit_raw_note_log_unsafe( + self.context.emit_raw_note_log_vec_unsafe( seed + 3, - dummy_log, - MESSAGE_CIPHERTEXT_LEN, + BoundedVec::from_array(dummy_log), self.context.side_effect_counter, ); } diff --git a/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr index 42ad349a3889..fd79a97d926b 100644 --- a/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test/benchmarking_contract/src/main.nr @@ -110,11 +110,11 @@ pub contract Benchmarking { let random_seed = unsafe { random() }; for i in 0..MAX_PRIVATE_LOGS_PER_CALL { - let mut log = [0; PRIVATE_LOG_CIPHERTEXT_LEN]; + let mut log: BoundedVec = BoundedVec::new(); for j in 0..PRIVATE_LOG_CIPHERTEXT_LEN { - log[j] = random_seed + (i * MAX_PRIVATE_LOGS_PER_CALL + j) as Field; + log.push(random_seed + (i * MAX_PRIVATE_LOGS_PER_CALL + j) as Field); } - self.context.emit_private_log_unsafe(0, log, PRIVATE_LOG_CIPHERTEXT_LEN); + self.context.emit_private_log_vec_unsafe(0, log); } } diff --git a/noir-projects/noir-contracts/contracts/test/counter/Nargo.toml b/noir-projects/noir-contracts/contracts/test/counter/Nargo.toml new file mode 100644 index 000000000000..8bec3f6a6701 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/test/counter/Nargo.toml @@ -0,0 +1,2 @@ +[workspace] +members = ["counter_contract", "counter_test"] diff --git a/noir-projects/noir-contracts/contracts/test/counter_contract/Nargo.toml b/noir-projects/noir-contracts/contracts/test/counter/counter_contract/Nargo.toml similarity index 50% rename from noir-projects/noir-contracts/contracts/test/counter_contract/Nargo.toml rename to noir-projects/noir-contracts/contracts/test/counter/counter_contract/Nargo.toml index 09b54cf10ec5..9b2c58e42173 100644 --- a/noir-projects/noir-contracts/contracts/test/counter_contract/Nargo.toml +++ b/noir-projects/noir-contracts/contracts/test/counter/counter_contract/Nargo.toml @@ -5,5 +5,5 @@ compiler_version = ">=0.25.0" type = "contract" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } -balance_set = { path = "../../../../aztec-nr/balance-set" } +aztec = { path = "../../../../../aztec-nr/aztec" } +balance_set = { path = "../../../../../aztec-nr/balance-set" } diff --git a/noir-projects/noir-contracts/contracts/test/counter_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test/counter/counter_contract/src/main.nr similarity index 51% rename from noir-projects/noir-contracts/contracts/test/counter_contract/src/main.nr rename to noir-projects/noir-contracts/contracts/test/counter/counter_contract/src/main.nr index 6f67a87424fc..87260f157ad1 100644 --- a/noir-projects/noir-contracts/contracts/test/counter_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test/counter/counter_contract/src/main.nr @@ -82,74 +82,4 @@ pub contract Counter { self.call(Counter::at(other_counter).increment(owner)); } - - mod test { - use crate::Counter; - use aztec::{ - protocol::address::AztecAddress, test::helpers::test_environment::TestEnvironment, - }; - - pub unconstrained fn setup( - initial_value: u128, - ) -> (TestEnvironment, AztecAddress, AztecAddress) { - // Setup env, generate keys - let mut env = TestEnvironment::new(); - let owner = env.create_light_account(); - - // Deploy contract and initialize - let initializer = Counter::interface().initialize(initial_value as u64, owner); - let contract_address = - env.deploy("Counter").with_private_initializer(owner, initializer); - (env, contract_address, owner) - } - - #[test] - unconstrained fn test_increment() { - let initial_value = 5; - let (env, contract_address, owner) = setup(initial_value); - - // Read the stored value in the note - let initial_counter = - env.execute_utility(Counter::at(contract_address).get_counter(owner)); - assert( - initial_counter == initial_value, - f"Expected {initial_value} but got {initial_counter}", - ); - - // Increment the counter - env.call_private(owner, Counter::at(contract_address).increment(owner)); - - let incremented_counter = - env.execute_utility(Counter::at(contract_address).get_counter(owner)); - let expected_current_value = initial_value + 1; - assert( - expected_current_value == incremented_counter, - f"Expected {expected_current_value} but got {incremented_counter}", - ); - } - - #[test] - unconstrained fn extended_incrementing_and_decrementing() { - let initial_value = 5; - let (env, contract_address, owner) = setup(initial_value); - - // Checking that the note was discovered from private logs - let initial_note_value = - env.execute_utility(Counter::at(contract_address).get_counter(owner)); - assert(initial_note_value == initial_value); - - env.call_private(owner, Counter::at(contract_address).increment_twice(owner)); - - assert_eq(env.execute_utility(Counter::at(contract_address).get_counter(owner)), 7); - - let _ = env.call_private( - owner, - Counter::at(contract_address).increment_and_decrement(owner), - ); - assert_eq(env.execute_utility(Counter::at(contract_address).get_counter(owner)), 7); - - env.call_private(owner, Counter::at(contract_address).decrement(owner)); - assert_eq(env.execute_utility(Counter::at(contract_address).get_counter(owner)), 6); - } - } } diff --git a/noir-projects/noir-contracts/contracts/test/counter/counter_test/Nargo.toml b/noir-projects/noir-contracts/contracts/test/counter/counter_test/Nargo.toml new file mode 100644 index 000000000000..b587b4b2e560 --- /dev/null +++ b/noir-projects/noir-contracts/contracts/test/counter/counter_test/Nargo.toml @@ -0,0 +1,10 @@ +[package] +name = "counter_contract_test" +authors = [""] +compiler_version = ">=0.25.0" +type = "lib" + +[dependencies] +aztec = { path = "../../../../../aztec-nr/aztec" } +balance_set = { path = "../../../../../aztec-nr/balance-set" } +counter_contract = { path = "../counter_contract" } diff --git a/noir-projects/noir-contracts/contracts/test/counter/counter_test/src/lib.nr b/noir-projects/noir-contracts/contracts/test/counter/counter_test/src/lib.nr new file mode 100644 index 000000000000..2b1e84c12c3f --- /dev/null +++ b/noir-projects/noir-contracts/contracts/test/counter/counter_test/src/lib.nr @@ -0,0 +1,54 @@ +use aztec::{protocol::address::AztecAddress, test::helpers::test_environment::TestEnvironment}; +use counter_contract::Counter; + +pub unconstrained fn setup(initial_value: u128) -> (TestEnvironment, AztecAddress, AztecAddress) { + // Setup env, generate keys + let mut env = TestEnvironment::new(); + let owner = env.create_light_account(); + + // Deploy contract and initialize + let initializer = Counter::interface().initialize(initial_value as u64, owner); + let contract_address = + env.deploy("@counter_contract/Counter").with_private_initializer(owner, initializer); + (env, contract_address, owner) +} + +#[test] +unconstrained fn test_increment() { + let initial_value = 5; + let (mut env, contract_address, owner) = setup(initial_value); + + // Read the stored value in the note + let initial_counter = env.execute_utility(Counter::at(contract_address).get_counter(owner)); + assert(initial_counter == initial_value, f"Expected {initial_value} but got {initial_counter}"); + + // Increment the counter + env.call_private(owner, Counter::at(contract_address).increment(owner)); + + let incremented_counter = env.execute_utility(Counter::at(contract_address).get_counter(owner)); + let expected_current_value = initial_value + 1; + assert( + expected_current_value == incremented_counter, + f"Expected {expected_current_value} but got {incremented_counter}", + ); +} + +#[test] +unconstrained fn extended_incrementing_and_decrementing() { + let initial_value = 5; + let (env, contract_address, owner) = setup(initial_value); + + // Checking that the note was discovered from private logs + let initial_note_value = env.execute_utility(Counter::at(contract_address).get_counter(owner)); + assert(initial_note_value == initial_value); + + env.call_private(owner, Counter::at(contract_address).increment_twice(owner)); + + assert_eq(env.execute_utility(Counter::at(contract_address).get_counter(owner)), 7); + + let _ = env.call_private(owner, Counter::at(contract_address).increment_and_decrement(owner)); + assert_eq(env.execute_utility(Counter::at(contract_address).get_counter(owner)), 7); + + env.call_private(owner, Counter::at(contract_address).decrement(owner)); + assert_eq(env.execute_utility(Counter::at(contract_address).get_counter(owner)), 6); +} diff --git a/noir-projects/noir-contracts/contracts/test/counter_contract/src/test.nr b/noir-projects/noir-contracts/contracts/test/counter_contract/src/test.nr deleted file mode 100644 index 71f6bdde08e3..000000000000 --- a/noir-projects/noir-contracts/contracts/test/counter_contract/src/test.nr +++ /dev/null @@ -1,16 +0,0 @@ -use crate::Counter; -use aztec::{ - oracle::notes::set_sender_for_tags, protocol::address::AztecAddress, - test::helpers::test_environment::TestEnvironment, -}; - -pub unconstrained fn setup(initial_value: Field) -> (TestEnvironment, AztecAddress, AztecAddress) { - // Setup env, generate keys - let mut env = TestEnvironment::new(); - let owner = env.create_light_account(); - - // Deploy contract and initialize - let initializer = Counter::interface().initialize(initial_value as u64, owner); - let contract_address = env.deploy("Counter").with_private_initializer(owner, initializer); - (env, contract_address, owner) -} diff --git a/noir-projects/noir-contracts/contracts/test/test_contract/src/main.nr b/noir-projects/noir-contracts/contracts/test/test_contract/src/main.nr index bc952c1943b0..f230a400ec57 100644 --- a/noir-projects/noir-contracts/contracts/test/test_contract/src/main.nr +++ b/noir-projects/noir-contracts/contracts/test/test_contract/src/main.nr @@ -31,7 +31,7 @@ pub contract Test { protocol::{ abis::function_selector::FunctionSelector, address::{AztecAddress, EthAddress}, - constants::{PRIVATE_LOG_CIPHERTEXT_LEN, MAX_PUBLIC_LOG_SIZE_IN_FIELDS}, + constants::MAX_PUBLIC_LOG_SIZE_IN_FIELDS, traits::{Hash, Packable, Serialize}, }, // Event related @@ -347,8 +347,8 @@ pub contract Test { self.call_self.emit_array_as_encrypted_log(tag, [0, 0, 0, 0, 0], owner, false); // Emit a log with non-encrypted content for testing purpose. - let leaky_log = event.serialize().concat([0; PRIVATE_LOG_CIPHERTEXT_LEN - 5]); - self.context.emit_private_log_unsafe(tag, leaky_log, 5); + let leaky_log = BoundedVec::from_array(event.serialize()); + self.context.emit_private_log_vec_unsafe(tag, leaky_log); } } diff --git a/noir-projects/noir-contracts/scripts/bootstrap_just_one_contract.sh b/noir-projects/noir-contracts/scripts/bootstrap_just_one_contract.sh index 2fad30ebac67..6681d0139a72 100755 --- a/noir-projects/noir-contracts/scripts/bootstrap_just_one_contract.sh +++ b/noir-projects/noir-contracts/scripts/bootstrap_just_one_contract.sh @@ -28,11 +28,7 @@ echo "Compiling contract..." NARGO=${NARGO:-../../../noir/noir-repo/target/release/nargo} $NARGO compile --silence-warnings --inliner-aggressiveness 0 --package $CONTRACT_PACKAGE_NAME -# Strip __aztec_nr_internals__ prefix from function names in the ABI. -echo "Stripping aztec nr prefix..." -./strip_aztec_nr_prefix.sh "../target/$JSON_NAME.json" - -# Transpile public functions and generate VKs for private functions. +# Transpile public functions, strip internal prefixes, and generate VKs for private functions. echo "Processing contract artifact..." BB=${BB:-../../../barretenberg/cpp/build/bin/bb} "$BB" aztec_process -i "../target/$JSON_NAME.json" -o "../target/$JSON_NAME.json" -f diff --git a/noir-projects/noir-contracts/scripts/strip_aztec_nr_prefix.sh b/noir-projects/noir-contracts/scripts/strip_aztec_nr_prefix.sh deleted file mode 100755 index e0466361421d..000000000000 --- a/noir-projects/noir-contracts/scripts/strip_aztec_nr_prefix.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# This script strips the `__aztec_nr_internals__` prefix from function names in the exported contract ABI JSON. -# -# Background: -# The #[aztec] macro generates new functions prefixed with `__aztec_nr_internals__` from the original external contract -# functions (see aztec.nr and internals_functions_generation/mod.nr). The original functions are then modified to be -# uncallable (replaced with static_assert(false, ...)) to prevent developers from inadvertently calling them directly -# instead of performing proper contract calls. -# -# Why this script is needed: -# During compilation, the transformed functions with the `__aztec_nr_internals__` prefix are what actually get -# compiled into circuits. However, in the exported ABI JSON that external tools and developers use, we want to -# expose the original function names without the internal prefix. This makes the ABI cleaner and matches what -# developers originally wrote in their contracts. - -json_path=$1 -temp_file="${json_path}.tmp" - -jq '.functions |= map(.name |= sub("^__aztec_nr_internals__"; ""))' "$json_path" > "$temp_file" -mv "$temp_file" "$json_path" diff --git a/noir-projects/scripts/test_aztec_process.sh b/noir-projects/scripts/test_aztec_process.sh index 83a04c3fbba1..da4aecb4df36 100755 --- a/noir-projects/scripts/test_aztec_process.sh +++ b/noir-projects/scripts/test_aztec_process.sh @@ -93,4 +93,14 @@ fi echo "✓ Force regeneration works" +# Test 4: Verify prefix stripping +echo "Test 4: Prefix stripping" +prefix_count=$(jq '[.functions[] | select(.name | startswith("__aztec_nr_internals__"))] | length' "$tmp_output") +if [ "$prefix_count" -ne 0 ]; then + echo "Error: Found $prefix_count functions with __aztec_nr_internals__ prefix after processing" + exit 1 +fi + +echo "✓ Prefix stripping works (no functions with internal prefix)" + echo "All bb aztec_process tests passed!" diff --git a/noir/bootstrap.sh b/noir/bootstrap.sh index 91515a2005e4..a57176b7921c 100755 --- a/noir/bootstrap.sh +++ b/noir/bootstrap.sh @@ -94,11 +94,33 @@ function build { echo_header "noir build" if semver check $REF_NAME; then + # REF_NAME matches semver meaning we are doing a release + git -C noir-repo fetch --tags if ! git -C noir-repo describe --tags --exact-match HEAD &>/dev/null; then echo_stderr "We're building a release but the noir-repo HEAD is not an official release." exit 1 fi + + # Check that the noir release has nargo binaries available for download. Without this check, we could push an aztec + # release that errors out with a 404/gzip error on install (the install scripts invoke noirup which would fail). + local noir_tag=$(git -C noir-repo describe --tags --exact-match HEAD) + echo "Checking noir release $noir_tag for nargo binary assets..." + local asset_count + asset_count=$(gh release view "$noir_tag" \ + --repo noir-lang/noir \ + --json assets \ + --jq '[.assets[] | select(.name | test("^nargo-"))] | length') || { + echo_stderr "Error: Failed to query noir-lang/noir release '$noir_tag'. Does the release exist?" + exit 1 + } + if [ "$asset_count" -eq 0 ]; then + echo_stderr "Error: Noir release '$noir_tag' exists but has no nargo binary assets." + echo_stderr "Users will get 404 errors when trying to install nargo via noirup." + echo_stderr "Ensure the noir release pipeline has finished uploading binaries before releasing aztec-packages." + exit 1 + fi + echo "Found $asset_count nargo binary asset(s) in noir release $noir_tag." fi denoise "retry install_deps" diff --git a/yarn-project/aztec/package.json b/yarn-project/aztec/package.json index 8ea062dcafd2..df0af1e38d73 100644 --- a/yarn-project/aztec/package.json +++ b/yarn-project/aztec/package.json @@ -61,6 +61,7 @@ "@aztec/validator-ha-signer": "workspace:^", "@aztec/wallets": "workspace:^", "@aztec/world-state": "workspace:^", + "@iarna/toml": "^2.2.5", "@types/chalk": "^2.2.0", "abitype": "^0.8.11", "chalk": "^5.3.0", diff --git a/yarn-project/aztec/scripts/add_crate.sh b/yarn-project/aztec/scripts/add_crate.sh new file mode 100755 index 000000000000..fea8a4b66e42 --- /dev/null +++ b/yarn-project/aztec/scripts/add_crate.sh @@ -0,0 +1,102 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Creates a contract+test crate pair and adds them to an existing workspace. +# Usage: add_crate.sh +# Must be called from a workspace root that already has Nargo.toml with [workspace]. + +crate_name=$1 + +if [ -z "$crate_name" ]; then + echo "Error: crate name is required" + exit 1 +fi + +if [[ "$crate_name" == *"/"* ]] || [[ "$crate_name" == *"\\"* ]]; then + echo "Error: crate name must not contain path separators" + exit 1 +fi + +contract_dir="${crate_name}_contract" +test_dir="${crate_name}_test" + +if [ -d "$contract_dir" ]; then + echo "Error: directory '$contract_dir' already exists" + exit 1 +fi +if [ -d "$test_dir" ]; then + echo "Error: directory '$test_dir' already exists" + exit 1 +fi + +# Get the actual aztec version for the git tag. +AZTEC_VERSION=$(jq -r '.version' $(dirname $0)/../package.json) + +# Create contract crate +mkdir -p "$contract_dir/src" +cat > "$contract_dir/Nargo.toml" << CEOF +[package] +name = "${crate_name}_contract" +type = "contract" + +[dependencies] +aztec = { git="https://github.com/AztecProtocol/aztec-nr", tag="v${AZTEC_VERSION}", directory="aztec" } +CEOF + +cat > "$contract_dir/src/main.nr" << 'EOF' +use aztec::macros::aztec; + +#[aztec] +pub contract Main { + use aztec::macros::functions::{external, initializer}; + + #[initializer] + #[external("private")] + fn constructor() {} +} +EOF + +# Create test crate +mkdir -p "$test_dir/src" +cat > "$test_dir/Nargo.toml" << TEOF +[package] +name = "${crate_name}_test" +type = "lib" + +[dependencies] +aztec = { git="https://github.com/AztecProtocol/aztec-nr", tag="v${AZTEC_VERSION}", directory="aztec" } +${crate_name}_contract = { path = "../${contract_dir}" } +TEOF + +cat > "$test_dir/src/lib.nr" << 'NOIR' +use aztec::test::helpers::test_environment::TestEnvironment; +use __CRATE_NAME___contract::Main; + +#[test] +unconstrained fn test_constructor() { + let mut env = TestEnvironment::new(); + let deployer = env.create_light_account(); + + // Deploy the contract with the default constructor: + let contract_address = env.deploy("@__CRATE_NAME___contract/Main").with_private_initializer( + deployer, + Main::interface().constructor(), + ); + + // Deploy without an initializer: + let contract_address = env.deploy("@__CRATE_NAME___contract/Main").without_initializer(); +} +NOIR + +sed -i "s/__CRATE_NAME__/${crate_name}/g" "$test_dir/src/lib.nr" + +# Add members to workspace Nargo.toml +if grep -q 'members\s*=\s*\[\s*\]' Nargo.toml; then + # Empty array: members = [] + sed -i "s|members\s*=\s*\[\s*\]|members = [\"${contract_dir}\", \"${test_dir}\"]|" Nargo.toml +else + # Non-empty array: add before closing ] + sed -i "s|\(members\s*=\s*\[.*\)\]|\1, \"${contract_dir}\", \"${test_dir}\"]|" Nargo.toml +fi + +echo "Created crates '${contract_dir}' and '${test_dir}'" diff --git a/yarn-project/aztec/scripts/aztec.sh b/yarn-project/aztec/scripts/aztec.sh index 3b905090d772..3db3dbecebe8 100755 --- a/yarn-project/aztec/scripts/aztec.sh +++ b/yarn-project/aztec/scripts/aztec.sh @@ -21,7 +21,10 @@ function aztec { case $cmd in test) - export LOG_LEVEL="${LOG_LEVEL:-"error;trace:contract"}" + # Attempt to compile, no-op if there are no changes + node --no-warnings "$script_dir/../dest/bin/index.js" compile + + export LOG_LEVEL="${LOG_LEVEL:-"error;trace:contract_log"}" aztec start --txe --port 8081 & server_pid=$! trap 'kill $server_pid &>/dev/null || true' EXIT diff --git a/yarn-project/aztec/scripts/init.sh b/yarn-project/aztec/scripts/init.sh index fa66617ff6ed..e0641a9c17fa 100755 --- a/yarn-project/aztec/scripts/init.sh +++ b/yarn-project/aztec/scripts/init.sh @@ -1,35 +1,39 @@ #!/usr/bin/env bash set -euo pipefail -NARGO=${NARGO:-nargo} script_path=$(realpath $(dirname "$0")) -for arg in "$@"; do - if [ "$arg" == "--help" ] || [ "$arg" == "-h" ]; then - cat << 'EOF' +# Parse arguments +while [[ $# -gt 0 ]]; do + case $1 in + --help|-h) + cat << 'EOF' Aztec Init - Create a new Aztec Noir project in the current directory -Usage: aztec init [OPTIONS] +Usage: aztec init Options: - --name Name of the package [default: current directory name] - --lib Use a library template -h, --help Print help -This command creates a new Aztec Noir project in the current directory using nargo -and automatically adds the Aztec.nr dependency to your Nargo.toml file. +This command creates a new Aztec Noir project in the current directory with +a workspace containing a contract crate and a test crate, and automatically +adds the Aztec.nr dependency to both. +If a workspace already exists in the current directory, use +'aztec new ' instead to add another contract. EOF - exit 0 - fi - if [ "$arg" == "--lib" ]; then - is_contract=0 - fi + exit 0 + ;; + *) + echo "Error: unexpected argument '$1'" + echo "Usage: aztec init" + echo "Run 'aztec init --help' for more information" + exit 1 + ;; + esac done -echo "Initializing Noir project..." -$NARGO init "$@" +package_name="$(basename $(pwd))" -if [ "${is_contract:-1}" -eq 1 ]; then - $script_path/setup_project.sh -fi +echo "Initializing Aztec contract project..." +$script_path/setup_workspace.sh "$package_name" diff --git a/yarn-project/aztec/scripts/new.sh b/yarn-project/aztec/scripts/new.sh index 6901a235bfe8..9fb73277811a 100755 --- a/yarn-project/aztec/scripts/new.sh +++ b/yarn-project/aztec/scripts/new.sh @@ -1,59 +1,83 @@ #!/usr/bin/env bash set -euo pipefail -NARGO=${NARGO:-nargo} script_path=$(realpath $(dirname "$0")) -type_arg="--contract" +project_path="" while [[ $# -gt 0 ]]; do case $1 in --help|-h) cat << 'EOF' -Aztec New - Create a new Aztec Noir project in a new directory +Aztec New - Create a new Aztec Noir project or add a contract to an existing workspace -Usage: aztec new [OPTIONS] +Usage: aztec new Arguments: - The path to save the new project + The name for the new contract (also used as the directory name when + creating a new workspace) Options: - --name Name of the package [default: package directory name] - --lib Create a library template instead of a contract -h, --help Print help -This command creates a new Aztec Noir project using nargo and automatically -adds the Aztec.nr dependency to your Nargo.toml file. +When run outside an existing workspace: + Creates a new directory with a workspace containing a contract crate and a + test crate, and automatically adds the Aztec.nr dependency to both. + +When run inside an existing workspace (Nargo.toml with [workspace] exists): + Adds a new contract crate and test crate to the existing workspace. EOF exit 0 ;; - --lib) - type_arg="--lib" - shift - ;; - --name) - name_arg="--name $2" - shift 2 + -*) + echo "Error: unknown option '$1'" + echo "Usage: aztec new " + echo "Run 'aztec new --help' for more information" + exit 1 ;; *) + if [ -n "$project_path" ]; then + echo "Error: unexpected argument '$1'" + echo "Usage: aztec new " + echo "Run 'aztec new --help' for more information" + exit 1 + fi project_path=$1 shift - break ;; esac done if [ -z "$project_path" ]; then - echo "Error: PATH argument is required" - echo "Usage: aztec new [OPTIONS] " + echo "Error: NAME argument is required" + echo "Usage: aztec new " echo "Run 'aztec new --help' for more information" exit 1 fi -echo "Creating new Noir project at $project_path..." -$NARGO new $type_arg ${name_arg:-} $project_path +package_name="$(basename $project_path)" + +# Validate that the name contains only valid Noir identifier characters +if ! [[ "$package_name" =~ ^[a-zA-Z][a-zA-Z0-9_]*$ ]]; then + echo "Error: '$package_name' is not a valid contract name" + echo "Name must start with a letter and contain only letters, digits, and underscores" + exit 1 +fi + +# Check if we're inside an existing workspace +if [ -f "Nargo.toml" ] && grep -q '\[workspace\]' Nargo.toml; then + # Add crate pair to existing workspace + echo "Adding contract '$package_name' to existing workspace..." + $script_path/add_crate.sh "$package_name" +else + # Create new workspace + if [ -d "$project_path" ] && [ "$(ls -A $project_path 2>/dev/null)" ]; then + echo "Error: $project_path already exists and is not empty" + exit 1 + fi -if [ "$type_arg" == "--contract" ]; then - cd $project_path - $script_path/setup_project.sh + echo "Creating new Aztec contract project at $project_path..." + mkdir -p "$project_path" + cd "$project_path" + $script_path/setup_workspace.sh "$package_name" fi diff --git a/yarn-project/aztec/scripts/setup_project.sh b/yarn-project/aztec/scripts/setup_project.sh deleted file mode 100755 index 1797a5625491..000000000000 --- a/yarn-project/aztec/scripts/setup_project.sh +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# Get the actual aztec version for the git tag. -AZTEC_VERSION=$(jq -r '.version' $(dirname $0)/../package.json) -NARGO_TOML_PATH="Nargo.toml" -MAIN_NR_PATH="src/main.nr" - -if [ ! -f "$NARGO_TOML_PATH" ]; then - >&2 echo "Warning: Could not find Nargo.toml at $NARGO_TOML_PATH to add aztec dependency" - exit 1 -fi - -if [ ! -f "$MAIN_NR_PATH" ]; then - >&2 echo "Warning: Could not find main.nr at $MAIN_NR_PATH" - exit 1 -fi - -# Add aztec dependency to Nargo.toml -echo "" >> "$NARGO_TOML_PATH" -echo "aztec = { git=\"https://github.com/AztecProtocol/aztec-nr\", tag=\"v${AZTEC_VERSION}\", directory=\"aztec\" }" >> "$NARGO_TOML_PATH" -echo "Added aztec dependency (v${AZTEC_VERSION}) to Nargo.toml" - -# Replace the contents of main.nr with the Aztec contract template -cat > "$MAIN_NR_PATH" << 'EOF' -use aztec::macros::aztec; - -#[aztec] -contract Main {} -EOF -echo "Created main.nr with Aztec contract template" diff --git a/yarn-project/aztec/scripts/setup_workspace.sh b/yarn-project/aztec/scripts/setup_workspace.sh new file mode 100755 index 000000000000..e08f1665136a --- /dev/null +++ b/yarn-project/aztec/scripts/setup_workspace.sh @@ -0,0 +1,68 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Creates an Aztec contract workspace with a contract crate and a test crate. +# Usage: setup_workspace.sh +# Must be called from the workspace root directory. + +package_name=$1 +script_path=$(realpath $(dirname "$0")) + +if [ -z "$package_name" ]; then + echo "Error: package name is required" + exit 1 +fi + +if [ -f "Nargo.toml" ]; then + echo "Error: Nargo.toml already exists in the current directory." + echo "To add another contract crate to this workspace, use 'aztec new ' instead." + exit 1 +fi + +# Create workspace root Nargo.toml with empty members (add_crate.sh will populate) +cat > Nargo.toml << 'EOF' +[workspace] +members = [] +EOF + +# Create the first crate pair +$script_path/add_crate.sh "$package_name" + +# Create README +cat > README.md << REOF +# ${package_name} + +An Aztec Noir contract project. + +## Compile + +\`\`\`bash +aztec compile +\`\`\` + +This compiles all contract crates and outputs artifacts to \`target/\`. + +## Test + +\`\`\`bash +aztec test +\`\`\` + +This runs all tests in the workspace. + +## Generate TypeScript bindings + +\`\`\`bash +aztec codegen target -o src/artifacts +\`\`\` + +This generates TypeScript contract artifacts from the compiled output in \`target/\` into \`src/artifacts/\`. +REOF + +# Create .gitignore +cat > .gitignore << 'GEOF' +target/ +codegenCache.json +GEOF + +echo "Created Aztec contract workspace with crates '${package_name}_contract' and '${package_name}_test'" diff --git a/yarn-project/aztec/src/cli/aztec_start_action.ts b/yarn-project/aztec/src/cli/aztec_start_action.ts index 7a3c3331a4a4..7b03c921e83e 100644 --- a/yarn-project/aztec/src/cli/aztec_start_action.ts +++ b/yarn-project/aztec/src/cli/aztec_start_action.ts @@ -27,8 +27,7 @@ export async function aztecStart(options: any, userLog: LogFn, debugLogger: Logg let config: ChainConfig | undefined = undefined; if (options.localNetwork) { - const localNetwork = extractNamespacedOptions(options, 'local-network'); - localNetwork.testAccounts = true; + const localNetwork = extractNamespacedOptions(options, 'localNetwork'); userLog(`${splash}\n${github}\n\n`); userLog(`Setting up Aztec local network ${packageVersion ?? 'unknown'}, please stand by...`); diff --git a/yarn-project/aztec/src/cli/aztec_start_options.test.ts b/yarn-project/aztec/src/cli/aztec_start_options.test.ts index 057e459467bc..befe8bedf91a 100644 --- a/yarn-project/aztec/src/cli/aztec_start_options.test.ts +++ b/yarn-project/aztec/src/cli/aztec_start_options.test.ts @@ -94,6 +94,21 @@ describe('aztec_start_options commander integration', () => { expect(typeof opts.port).toBe('number'); }); + it('respects TEST_ACCOUNTS env var for local network', () => { + process.env.TEST_ACCOUNTS = 'false'; + const cmd = buildCommandWith(['LOCAL_NETWORK']); + cmd.parse(['node', 'cli']); + const opts = cmd.opts(); + expect(opts['localNetwork.testAccounts']).toBe(false); + }); + + it('defaults testAccounts to true for local network', () => { + const cmd = buildCommandWith(['LOCAL_NETWORK']); + cmd.parse(['node', 'cli']); + const opts = cmd.opts(); + expect(opts['localNetwork.testAccounts']).toBe(true); + }); + it('parses optional boolean flag values', () => { const cmd = buildCommandWith(['P2P SUBSYSTEM']); diff --git a/yarn-project/aztec/src/cli/aztec_start_options.ts b/yarn-project/aztec/src/cli/aztec_start_options.ts index 1b477f8d4d9e..c4ebb4db196a 100644 --- a/yarn-project/aztec/src/cli/aztec_start_options.ts +++ b/yarn-project/aztec/src/cli/aztec_start_options.ts @@ -125,6 +125,12 @@ export const aztecStartOptions: { [key: string]: AztecStartOption[] } = { defaultValue: DefaultMnemonic, env: 'MNEMONIC', }, + { + flag: '--local-network.testAccounts', + description: 'Deploy test accounts on local network start', + env: 'TEST_ACCOUNTS', + ...booleanConfigHelper(true), + }, ], API: [ { diff --git a/yarn-project/aztec/src/cli/cli.ts b/yarn-project/aztec/src/cli/cli.ts index 1c79cf24f294..f1c8269ef0e3 100644 --- a/yarn-project/aztec/src/cli/cli.ts +++ b/yarn-project/aztec/src/cli/cli.ts @@ -37,9 +37,9 @@ export function injectAztecCommands(program: Command, userLog: LogFn, debugLogge ` Additional commands: - init [folder] [options] creates a new Aztec Noir project. - new [options] creates a new Aztec Noir project in a new directory. - test [options] starts a TXE and runs "nargo test" using it as the oracle resolver. + init creates a new Aztec Noir workspace in the current directory. + new creates a new Aztec Noir workspace in its own directory (or creates a new contract-test crates pair and adds it to the current workspace if run in workspace). + test [options] starts a TXE and runs "nargo test" using it as the oracle resolver. `, ); } diff --git a/yarn-project/aztec/src/cli/cmds/compile.test.ts b/yarn-project/aztec/src/cli/cmds/compile.test.ts index 8a0af802b005..4943b6b268e5 100644 --- a/yarn-project/aztec/src/cli/cmds/compile.test.ts +++ b/yarn-project/aztec/src/cli/cmds/compile.test.ts @@ -80,3 +80,22 @@ function runCodegen() { throw new Error(`codegen failed:\n${e.stderr?.toString() ?? e.message}`); } } + +// Validates that aztec compile warns when contract crates contain tests. We want tests to be in a separate `lib` crate. +describe('aztec compile warns about tests in contract crates', () => { + const CONTRACT_WITH_TESTS_WORKSPACE = join(PACKAGE_ROOT, 'test/contract-with-tests'); + const CONTRACT_WITH_TESTS_TARGET = join(CONTRACT_WITH_TESTS_WORKSPACE, 'target'); + + afterAll(() => { + rmSync(CONTRACT_WITH_TESTS_TARGET, { recursive: true, force: true }); + }); + + it('warns when a contract crate contains tests', () => { + const result = execFileSync('node', [CLI, 'compile'], { + cwd: CONTRACT_WITH_TESTS_WORKSPACE, + stdio: 'pipe', + encoding: 'utf-8', + }); + expect(result).toMatch(/Found tests in contract crate/); + }, 120_000); +}); diff --git a/yarn-project/aztec/src/cli/cmds/compile.ts b/yarn-project/aztec/src/cli/cmds/compile.ts index e4fe46e2d0e8..5fc2259b2305 100644 --- a/yarn-project/aztec/src/cli/cmds/compile.ts +++ b/yarn-project/aztec/src/cli/cmds/compile.ts @@ -3,10 +3,13 @@ import type { LogFn } from '@aztec/foundation/log'; import { execFileSync } from 'child_process'; import type { Command } from 'commander'; -import { readFile, writeFile } from 'fs/promises'; +import { readFile } from 'fs/promises'; +import { join } from 'path'; import { readArtifactFiles } from './utils/artifacts.js'; +import { needsRecompile } from './utils/needs_recompile.js'; import { run } from './utils/spawn.js'; +import { warnIfAztecVersionMismatch } from './utils/warn_if_aztec_version_mismatch.js'; /** Returns paths to contract artifacts in the target directory. */ async function collectContractArtifacts(): Promise { @@ -22,35 +25,129 @@ async function collectContractArtifacts(): Promise { return files.filter(f => Array.isArray(f.content.functions)).map(f => f.filePath); } -/** Strips the `__aztec_nr_internals__` prefix from function names in contract artifacts. */ -async function stripInternalPrefixes(artifactPaths: string[]): Promise { - for (const path of artifactPaths) { - const artifact = JSON.parse(await readFile(path, 'utf-8')); - for (const fn of artifact.functions) { - if (typeof fn.name === 'string') { - fn.name = fn.name.replace(/^__aztec_nr_internals__/, ''); +/** Returns the set of package names that are contract crates in the current workspace. */ +async function getContractPackageNames(): Promise> { + const contractNames = new Set(); + + let rootToml: string; + try { + rootToml = await readFile('Nargo.toml', 'utf-8'); + } catch { + return contractNames; + } + + const membersMatch = rootToml.match(/members\s*=\s*\[([^\]]*)\]/); + if (membersMatch) { + const members = membersMatch[1] + .split(',') + .map(m => m.trim().replace(/^"|"$/g, '')) + .filter(m => m.length > 0); + + for (const member of members) { + try { + const memberToml = await readFile(join(member, 'Nargo.toml'), 'utf-8'); + if (/type\s*=\s*"contract"/.test(memberToml)) { + const nameMatch = memberToml.match(/name\s*=\s*"([^"]+)"/); + if (nameMatch) { + contractNames.add(nameMatch[1]); + } + } + } catch { + // Member directory might not exist or have no Nargo.toml; skip. } } - await writeFile(path, JSON.stringify(artifact, null, 2) + '\n'); + } else { + // Single-crate project (no workspace): check if the root Nargo.toml itself is a contract. + if (/type\s*=\s*"contract"/.test(rootToml)) { + const nameMatch = rootToml.match(/name\s*=\s*"([^"]+)"/); + if (nameMatch) { + contractNames.add(nameMatch[1]); + } + } + } + + return contractNames; +} + +/** Checks that no tests exist in contract crates and fails with a helpful message if they do. */ +async function checkNoTestsInContracts(nargo: string, log: LogFn): Promise { + const contractPackages = await getContractPackageNames(); + if (contractPackages.size === 0) { + return; + } + + let output: string; + try { + // We list tests for all the crates in the workspace + output = execFileSync(nargo, ['test', '--list-tests', '--silence-warnings'], { + encoding: 'utf-8', + stdio: ['pipe', 'pipe', 'inherit'], + }); + } catch { + // If listing tests fails (e.g. test crate has compile errors), skip the check. + return; + } + + // The output of the `nargo test --list-tests` command is as follows: + // ``` + // crate_name_1 test_name_1 + // crate_name_2 test_name_2 + // ... + // crate_name_n test_name_n + // ``` + // + // We parse the individual lines and then we check if any contract crate appeared in the parsed output. + const lines = output + .trim() + .split('\n') + .filter(line => line.length > 0); + const testsInContracts: { packageName: string; testName: string }[] = []; + + for (const line of lines) { + const spaceIndex = line.indexOf(' '); + if (spaceIndex === -1) { + continue; + } + const packageName = line.substring(0, spaceIndex); + const testName = line.substring(spaceIndex + 1); + if (contractPackages.has(packageName)) { + testsInContracts.push({ packageName, testName }); + } + } + + if (testsInContracts.length > 0) { + const details = testsInContracts.map(t => ` ${t.packageName}::${t.testName}`).join('\n'); + log( + `WARNING: Found tests in contract crate(s):\n${details}\n\n` + + `Tests should be in a dedicated test crate, not in the contract crate.\n` + + `Learn more: https://docs.aztec.network/errors/1`, + ); } } /** Compiles Aztec Noir contracts and postprocesses artifacts. */ async function compileAztecContract(nargoArgs: string[], log: LogFn): Promise { + await warnIfAztecVersionMismatch(log); + + if (!(await needsRecompile())) { + log('No source changes detected, skipping compilation.'); + return; + } + const nargo = process.env.NARGO ?? 'nargo'; const bb = process.env.BB ?? findBbBinary() ?? 'bb'; await run(nargo, ['compile', ...nargoArgs]); + // Ensure contract crates contain no tests (tests belong in the test crate). + await checkNoTestsInContracts(nargo, log); + const artifacts = await collectContractArtifacts(); if (artifacts.length > 0) { log('Postprocessing contracts...'); const bbArgs = artifacts.flatMap(a => ['-i', a]); await run(bb, ['aztec_process', ...bbArgs]); - - // TODO: This should be part of bb aztec_process! - await stripInternalPrefixes(artifacts); } log('Compilation complete!'); diff --git a/yarn-project/aztec/src/cli/cmds/utils/collect_crate_dirs.test.ts b/yarn-project/aztec/src/cli/cmds/utils/collect_crate_dirs.test.ts new file mode 100644 index 000000000000..76e7d81d5dfa --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/utils/collect_crate_dirs.test.ts @@ -0,0 +1,251 @@ +import { afterEach, beforeEach, describe, expect, it } from '@jest/globals'; +import { mkdir, rm, writeFile } from 'fs/promises'; +import { tmpdir } from 'os'; +import { join, resolve } from 'path'; + +import { collectCrateDirs, nargoGitDepPath } from './collect_crate_dirs.js'; +import { run } from './spawn.js'; + +/** Create a directory (recursively). */ +async function mkdirp(dir: string) { + await mkdir(dir, { recursive: true }); +} + +/** Create a directory with a minimal `[package]` Nargo.toml inside it. */ +async function makePackage(dir: string, name: string, type = 'lib', deps?: Record): Promise { + await mkdirp(dir); + let toml = `[package]\nname = "${name}"\ntype = "${type}"\n`; + if (deps) { + toml += `\n[dependencies]\n`; + for (const [depName, depValue] of Object.entries(deps)) { + toml += `${depName} = ${depValue}\n`; + } + } + await writeFile(join(dir, 'Nargo.toml'), toml); +} + +/** + * Creates a local git repository with a Nargo.toml committed and tagged. Returns the repo path, + * which can be used as a `file://` git URL in tests. + */ +async function makeGitRepo(dir: string, tag: string, pkgName: string): Promise { + await makePackage(dir, pkgName); + await run('git', ['-C', dir, 'init']); + await run('git', ['-C', dir, 'config', 'user.email', 'test@test.com']); + await run('git', ['-C', dir, 'config', 'user.name', 'Test']); + await run('git', ['-C', dir, 'add', '.']); + await run('git', ['-C', dir, 'commit', '-m', 'init', '--no-gpg-sign']); + await run('git', ['-C', dir, 'tag', tag]); +} + +describe('collectCrateDirs', () => { + let tempDir: string; + let originalHome: string | undefined; + + beforeEach(async () => { + tempDir = join(tmpdir(), `collect-crate-dirs-${Date.now()}-${Math.random().toString(36).slice(2)}`); + await mkdirp(tempDir); + // Redirect $HOME so nargoGitDepPath writes into our temp tree instead of the real ~/.nargo + originalHome = process.env.HOME; + process.env.HOME = tempDir; + }); + + afterEach(async () => { + process.env.HOME = originalHome; + await rm(tempDir, { recursive: true, force: true }); + }); + + // ── Path / workspace traversal ─────────────────────────────────────────── + + it('returns only the start crate when there are no dependencies', async () => { + const projectDir = join(tempDir, 'project'); + await makePackage(projectDir, 'project', 'contract'); + + const result = await collectCrateDirs(projectDir); + + expect(result).toEqual([resolve(projectDir)]); + }); + + it('follows path-based dependencies', async () => { + const projectDir = join(tempDir, 'project'); + const libDir = join(tempDir, 'lib'); + + await makePackage(projectDir, 'project', 'contract', { lib: '{ path = "../lib" }' }); + await makePackage(libDir, 'lib'); + + const result = await collectCrateDirs(projectDir); + + expect(result).toHaveLength(2); + expect(result).toContain(resolve(projectDir)); + expect(result).toContain(resolve(libDir)); + }); + + it('visits all members of a workspace root', async () => { + const workspaceDir = join(tempDir, 'workspace'); + const crateA = join(workspaceDir, 'a'); + const crateB = join(workspaceDir, 'b'); + + await mkdirp(workspaceDir); + await writeFile(join(workspaceDir, 'Nargo.toml'), `[workspace]\nmembers = ["a", "b"]\n`); + await makePackage(crateA, 'a', 'contract'); + await makePackage(crateB, 'b', 'lib'); + + const result = await collectCrateDirs(workspaceDir); + + expect(result).toHaveLength(3); + expect(result).toContain(resolve(workspaceDir)); + expect(result).toContain(resolve(crateA)); + expect(result).toContain(resolve(crateB)); + }); + + it('visits a shared dependency only once', async () => { + const projectDir = join(tempDir, 'project'); + const crateA = join(tempDir, 'a'); + const crateB = join(tempDir, 'b'); + const shared = join(tempDir, 'shared'); + + await makePackage(projectDir, 'project', 'contract', { a: '{ path = "../a" }', b: '{ path = "../b" }' }); + await makePackage(crateA, 'a', 'lib', { shared: '{ path = "../shared" }' }); + await makePackage(crateB, 'b', 'lib', { shared: '{ path = "../shared" }' }); + await makePackage(shared, 'shared'); + + const result = await collectCrateDirs(projectDir); + + expect(result).toHaveLength(4); // project, a, b, shared + expect(result.filter(d => d === resolve(shared))).toHaveLength(1); + }); + + it('does not infinite-loop on circular path dependencies', async () => { + const crateA = join(tempDir, 'a'); + const crateB = join(tempDir, 'b'); + + await makePackage(crateA, 'a', 'lib', { b: '{ path = "../b" }' }); + await makePackage(crateB, 'b', 'lib', { a: '{ path = "../a" }' }); + + const result = await collectCrateDirs(crateA); + + expect(result).toHaveLength(2); + expect(result).toContain(resolve(crateA)); + expect(result).toContain(resolve(crateB)); + }); + + it('throws when Nargo.toml is missing from the start directory', async () => { + const projectDir = join(tempDir, 'project'); + await mkdirp(projectDir); // directory exists but no Nargo.toml + + await expect(collectCrateDirs(projectDir)).rejects.toThrow('Nargo.toml not found'); + }); + + it('throws when a path dependency resolves to a file instead of a directory', async () => { + const projectDir = join(tempDir, 'project'); + await writeFile(join(tempDir, 'not_a_dir'), 'I am a file'); + await makePackage(projectDir, 'project', 'contract', { bad: '{ path = "../not_a_dir" }' }); + + await expect(collectCrateDirs(projectDir)).rejects.toThrow('not a directory'); + }); + + // ── skipGitDeps ────────────────────────────────────────────────────────── + + it('ignores git-based dependencies when skipGitDeps is true', async () => { + const projectDir = join(tempDir, 'project'); + await makePackage(projectDir, 'project', 'contract', { + aztec: '{ git = "https://github.com/AztecProtocol/aztec-packages", tag = "v1.0" }', + }); + + // Should return only the project dir without attempting to resolve or clone the git dep + const result = await collectCrateDirs(projectDir, { skipGitDeps: true }); + + expect(result).toEqual([resolve(projectDir)]); + }); + + // ── Git deps — already cached ($HOME redirected, no real clone) ────────── + + it('includes a cached git dependency without cloning', async () => { + const projectDir = join(tempDir, 'project'); + // Pre-create the nargo cache entry that nargoGitDepPath would compute — simulates nargo having + // already fetched this dep (existsSync check passes so no clone is attempted). + const cacheDir = nargoGitDepPath('https://github.com/AztecProtocol/aztec-packages', 'v1.0'); + await makePackage(projectDir, 'project', 'contract', { + aztec: '{ git = "https://github.com/AztecProtocol/aztec-packages", tag = "v1.0" }', + }); + await makePackage(cacheDir, 'aztec-packages'); + + const result = await collectCrateDirs(projectDir); + + expect(result).toContain(resolve(cacheDir)); + }); + + it('visits the subdirectory crate when the git dep has a directory field', async () => { + // Models the real aztec-nr pattern: + // aztec = { git = "…/aztec-packages", tag = "v0.82.0", directory = "noir-projects/aztec-nr/aztec" } + // nargo clones the whole repo to the cache root; only the subdir is the actual crate. + const projectDir = join(tempDir, 'project'); + const cacheRoot = nargoGitDepPath('https://github.com/AztecProtocol/aztec-packages', 'v0.82.0'); + const crateDir = join(cacheRoot, 'noir-projects', 'aztec-nr', 'aztec'); + + await makePackage(projectDir, 'project', 'contract', { + aztec: + '{ git = "https://github.com/AztecProtocol/aztec-packages", tag = "v0.82.0", directory = "noir-projects/aztec-nr/aztec" }', + }); + // makePackage(crateDir) creates all parents including cacheRoot via mkdirp + await makePackage(crateDir, 'aztec'); + + const result = await collectCrateDirs(projectDir); + + expect(result).toContain(resolve(crateDir)); + // The cache root itself is never visited as a crate (only the subdir is) + expect(result).not.toContain(resolve(cacheRoot)); + }); + + it('follows path deps declared inside a cached git dependency', async () => { + // This test scenario could occur as the git based dep could point to a path based dep in the repository that got + // cloned. + + const projectDir = join(tempDir, 'project'); + const cacheDir = nargoGitDepPath('https://github.com/SomeOrg/some-repo', 'v2.0'); + const transitiveDep = join(cacheDir, 'lib'); + + await makePackage(projectDir, 'project', 'contract', { + some_dep: '{ git = "https://github.com/SomeOrg/some-repo", tag = "v2.0" }', // eslint-disable-line camelcase + }); + // Cached dep itself has a path dependency + await makePackage(cacheDir, 'some_dep', 'lib', { lib: '{ path = "lib" }' }); + await makePackage(transitiveDep, 'lib'); + + const result = await collectCrateDirs(projectDir); + + expect(result).toContain(resolve(cacheDir)); + expect(result).toContain(resolve(transitiveDep)); + }); + + // ── Git deps — fetch behavior (real local git repos) ─────────────────── + + it('clones a git dependency when it is not in the cache', async () => { + // Create a real local git repo tagged "v2.0" to serve as the remote. + // This exercises the actual clone path without any network access. + const repoDir = join(tempDir, 'upstream-repo'); + await makeGitRepo(repoDir, 'v2.0', 'my-dep'); + + const projectDir = join(tempDir, 'project'); + await makePackage(projectDir, 'project', 'contract', { + my_dep: `{ git = "file://${repoDir}", tag = "v2.0" }`, // eslint-disable-line camelcase + }); + + const expectedCachePath = nargoGitDepPath(`file://${repoDir}`, 'v2.0'); + + const result = await collectCrateDirs(projectDir); + + expect(result).toContain(resolve(expectedCachePath)); + }); + + it('throws a helpful error when the git clone fails', async () => { + const projectDir = join(tempDir, 'project'); + await makePackage(projectDir, 'project', 'contract', { + bad: '{ git = "file:///nonexistent/repo/that/does/not/exist", tag = "v1.0" }', + }); + + const err: Error = await collectCrateDirs(projectDir).catch(e => e); + expect(err.message).toContain('Failed to fetch git dependency'); + expect(err.message).toContain('nargo check'); + }); +}); diff --git a/yarn-project/aztec/src/cli/cmds/utils/collect_crate_dirs.ts b/yarn-project/aztec/src/cli/cmds/utils/collect_crate_dirs.ts new file mode 100644 index 000000000000..f98dffcc864c --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/utils/collect_crate_dirs.ts @@ -0,0 +1,118 @@ +import TOML from '@iarna/toml'; +import { existsSync } from 'fs'; +import { mkdir, readFile, stat } from 'fs/promises'; +import { homedir } from 'os'; +import { dirname, join, resolve } from 'path'; + +import { run } from './spawn.js'; + +/** + * Recursively collects crate directories starting from startCrateDir by following dependencies declared in Nargo.toml + * files. + * + * When `skipGitDeps` is false (default), git-based deps are followed and fetched into the nargo cache + * (`$HOME/nargo///`) if not already present. + * + * When `skipGitDeps` is true, git-based deps are ignored entirely. + */ +export async function collectCrateDirs(startCrateDir: string, opts?: { skipGitDeps?: boolean }): Promise { + const { skipGitDeps = false } = opts ?? {}; + const visited = new Set(); + + async function visit(crateDir: string): Promise { + const absDir = resolve(crateDir); + if (visited.has(absDir)) { + return; + } + visited.add(absDir); + + const tomlPath = join(absDir, 'Nargo.toml'); + const content = await readFile(tomlPath, 'utf-8').catch(() => { + throw new Error(`Incorrectly defined dependency. Nargo.toml not found in ${absDir}`); + }); + + const parsed = TOML.parse(content) as Record; + const members = (parsed.workspace as Record)?.members as string[] | undefined; + + // A Nargo.toml is either a workspace root (has workspace.members) or a single crate (has dependencies). + if (Array.isArray(members)) { + // The crate is a workspace root and has members defined so we visit the members + for (const member of members) { + await visit(resolve(absDir, member)); + } + } else { + // Single crate — follow its deps + const deps = (parsed.dependencies as Record) ?? {}; + for (const dep of Object.values(deps)) { + if (!dep || typeof dep !== 'object') { + continue; + } + if (typeof dep.path === 'string') { + // Dependency contains "path" hence it's a local dependency. We just check it's a real directory and then we + // recursively search through it + const depPath = resolve(absDir, dep.path); + const s = await stat(depPath); + if (!s.isDirectory()) { + throw new Error( + `Dependency path "${dep.path}" in ${tomlPath} resolves to ${depPath} which is not a directory`, + ); + } + await visit(depPath); + } else if (!skipGitDeps && typeof dep.git === 'string' && typeof dep.tag === 'string') { + // Dependency contains "git" hence it's a git dependency. We ensure it has been fetched and fetch it if + // it's not the case and then we recursively search through it. + await fetchAndVisit(dep.git, dep.tag, dep.directory); + } + } + } + } + + async function fetchAndVisit(gitUrl: string, tag: string, directory?: string): Promise { + // `directory` is set when the dep lives in a subdirectory of a repository, e.g.: + // aztec = { git = "https://github.com/AztecProtocol/aztec-packages", tag = "v0.82.0", + // directory = "noir-projects/aztec-nr/aztec" } + // In that case nargo clones the whole repo and the crate root is /. + const cachePath = nargoGitDepPath(gitUrl, tag); + const crateDir = directory ? join(cachePath, directory) : cachePath; + await ensureGitDepCached(gitUrl, tag, cachePath); + await visit(crateDir); + } + + await visit(startCrateDir); + return [...visited]; +} + +/** + * Computes the local nargo cache path for a git dependency, mirroring nargo's own `git_dep_location` function. + * Path format: `$HOME/nargo///` + * e.g. `~/nargo/github.com/AztecProtocol/aztec-packages/v0.82.0` + * + * Source: noir/noir-repo/tooling/nargo_toml/src/git.rs + */ +export function nargoGitDepPath(gitUrl: string, tag: string): string { + const url = new URL(gitUrl); + const domain = url.hostname; + const repoPath = url.pathname.replace(/^\//, ''); + return join(process.env.HOME ?? homedir(), 'nargo', domain, repoPath, tag); +} + +/** + * Ensures a git dep is present in the nargo cache, cloning it if it isn't. Mirrors nargo's `clone_git_repo`. + * If cloning fails (e.g. no network), throws with a message suggesting `nargo check` to prime the cache. + * + * Source: noir/noir-repo/tooling/nargo_toml/src/git.rs + */ +async function ensureGitDepCached(gitUrl: string, tag: string, cachePath: string): Promise { + if (existsSync(cachePath)) { + return; + } + await mkdir(dirname(cachePath), { recursive: true }); + try { + await run('git', ['-c', 'advice.detachedHead=false', 'clone', '--depth', '1', '--branch', tag, gitUrl, cachePath]); + } catch (err: any) { + throw new Error( + `Failed to fetch git dependency ${gitUrl}@${tag}: ${err?.message ?? err}.\n` + + `Try running \`nargo check\` first to prime the dependency cache.`, + ); + } +} diff --git a/yarn-project/aztec/src/cli/cmds/utils/needs_recompile.test.ts b/yarn-project/aztec/src/cli/cmds/utils/needs_recompile.test.ts new file mode 100644 index 000000000000..9f2755898be6 --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/utils/needs_recompile.test.ts @@ -0,0 +1,278 @@ +import { afterEach, beforeEach, describe, expect, it } from '@jest/globals'; +import { mkdir, rm, utimes, writeFile } from 'fs/promises'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { needsRecompile } from './needs_recompile.js'; + +/** Create a file (if needed) and set its timestamp to the given value (seconds since epoch). */ +async function touch(filePath: string, timeSec: number) { + // we apply the 'a' flag to mimic the behavior of touch command that does not change contents of a file if it already + // exist + await writeFile(filePath, '', { flag: 'a' }); + await utimes(filePath, timeSec, timeSec); +} + +/** Create a directory (recursively). */ +async function mkdirp(dir: string) { + await mkdir(dir, { recursive: true }); +} + +describe('needsRecompile', () => { + let tempDir: string; + let originalCwd: string; + + beforeEach(async () => { + originalCwd = process.cwd(); + // Create a unique temp directory and chdir into it so needsRecompile() + // resolves its relative paths ('target', '.') against our test fixtures. + tempDir = join(tmpdir(), `needs-recompile-test-${Date.now()}-${Math.random().toString(36).slice(2)}`); + await mkdirp(tempDir); + process.chdir(tempDir); + }); + + afterEach(async () => { + process.chdir(originalCwd); + await rm(tempDir, { recursive: true, force: true }); + }); + + it('returns true when target directory does not exist', async () => { + // No target/ at all — always needs recompile. + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + expect(await needsRecompile()).toBe(true); + }); + + it('returns true when target directory is empty (no .json artifacts)', async () => { + await mkdirp('target'); + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + expect(await needsRecompile()).toBe(true); + }); + + it('returns true when target has only non-json files', async () => { + await mkdirp('target'); + await touch(join('target', 'something.txt'), 1000); + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + expect(await needsRecompile()).toBe(true); + }); + + it('returns false when artifacts are newer than all sources', async () => { + // Source files at t=1000, artifact at t=2000. + await mkdirp('src'); + await mkdirp('target'); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'main.nr'), 1000); + await touch(join('target', 'artifact.json'), 2000); + + expect(await needsRecompile()).toBe(false); + }); + + it('returns true when a .nr source file is newer than the newest artifact', async () => { + await mkdirp('src'); + await mkdirp('target'); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('target', 'artifact.json'), 2000); + await touch(join('src', 'main.nr'), 3000); + + expect(await needsRecompile()).toBe(true); + }); + + it('returns true when Nargo.toml is newer than the newest artifact', async () => { + await mkdirp('src'); + await mkdirp('target'); + + await touch(join('src', 'main.nr'), 1000); + await touch(join('target', 'artifact.json'), 2000); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 3000, 3000); + + expect(await needsRecompile()).toBe(true); + }); + + it('follows path-based dependencies and detects newer sources in them', async () => { + // Main project depends on a local library via path. + const libDir = join(tempDir, 'lib', 'my_dep'); + await mkdirp(join(libDir, 'src')); + await mkdirp('src'); + await mkdirp('target'); + + // Main project Nargo.toml with a path dependency. + const mainToml = `[package] +name = "test" +type = "contract" + +[dependencies] +my_dep = { path = "lib/my_dep" } +`; + await writeFile('Nargo.toml', mainToml); + await utimes('Nargo.toml', 1000, 1000); + + // Dependency Nargo.toml + await writeFile(join(libDir, 'Nargo.toml'), '[package]\nname = "my_dep"\ntype = "lib"\n'); + await utimes(join(libDir, 'Nargo.toml'), 1000, 1000); + + // Source files — all old. + await touch(join('src', 'main.nr'), 1000); + await touch(join(libDir, 'src', 'lib.nr'), 1000); + + // Artifact is newer than all sources. + await touch(join('target', 'artifact.json'), 2000); + + expect(await needsRecompile()).toBe(false); + + // Now update a source file in the dependency. + await utimes(join(libDir, 'src', 'lib.nr'), 3000, 3000); + + expect(await needsRecompile()).toBe(true); + }); + + it('ignores git-based dependencies (no path field)', async () => { + await mkdirp('src'); + await mkdirp('target'); + + // Nargo.toml with a git dependency only. + const toml = `[package] +name = "test" +type = "contract" + +[dependencies] +aztec = { git = "https://github.com/example/repo", tag = "v1.0" } +`; + await writeFile('Nargo.toml', toml); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'main.nr'), 1000); + await touch(join('target', 'artifact.json'), 2000); + + // Should return false and not error out because of invalid links — git deps are not searched through since they + // are fixed to a tag in Nargo.toml (and if Nargo.toml got modified we would detect it). + expect(await needsRecompile()).toBe(false); + }); + + it('skips target/ directories when scanning for source files', async () => { + await mkdirp('src'); + await mkdirp('target'); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'main.nr'), 1000); + await touch(join('target', 'artifact.json'), 2000); + + // Place a newer .nr file inside a nested target/ directory. + // This should be ignored. + await mkdirp(join('src', 'target')); + await touch(join('src', 'target', 'cached.nr'), 5000); + + expect(await needsRecompile()).toBe(false); + }); + + it('compares against the oldest artifact when multiple exist', async () => { + await mkdirp('src'); + await mkdirp('target'); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'main.nr'), 2500); + // Two artifacts: one old, one very new. + await touch(join('target', 'old_artifact.json'), 2000); + await touch(join('target', 'new_artifact.json'), 3000); + + // Source (2500) is newer than the oldest artifact (2000), so recompile. + expect(await needsRecompile()).toBe(true); + }); + + it('returns false when all sources are older than the oldest artifact', async () => { + await mkdirp('src'); + await mkdirp('target'); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'main.nr'), 1000); + await touch(join('target', 'old_artifact.json'), 2000); + await touch(join('target', 'new_artifact.json'), 3000); + + // Source (1000) < oldest artifact (2000), no recompile. + expect(await needsRecompile()).toBe(false); + }); + + it('handles deeply nested .nr source files', async () => { + await mkdirp('src/nested/deep'); + await mkdirp('target'); + + await writeFile('Nargo.toml', '[package]\nname = "test"\ntype = "contract"\n'); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'nested', 'deep', 'module.nr'), 3000); + await touch(join('target', 'artifact.json'), 2000); + + expect(await needsRecompile()).toBe(true); + }); + + it('throws when a path dependency resolves to a file instead of a directory', async () => { + await mkdirp('src'); + await mkdirp('target'); + + // Create a file where the dependency path points. + await writeFile('not_a_dir', 'I am a file'); + + const mainToml = `[package] +name = "test" +type = "contract" + +[dependencies] +bad_dep = { path = "not_a_dir" } +`; + await writeFile('Nargo.toml', mainToml); + await utimes('Nargo.toml', 1000, 1000); + + await touch(join('src', 'main.nr'), 1000); + await touch(join('target', 'artifact.json'), 2000); + + await expect(needsRecompile()).rejects.toThrow('which is not a directory'); + }); + + it('does not follow circular path dependencies', async () => { + // Two projects that depend on each other via path. + const libDir = join(tempDir, 'lib'); + await mkdirp(join(libDir, 'src')); + await mkdirp('src'); + await mkdirp('target'); + + const mainToml = `[package] +name = "main" +type = "contract" + +[dependencies] +lib = { path = "lib" } +`; + await writeFile('Nargo.toml', mainToml); + await utimes('Nargo.toml', 1000, 1000); + + // lib depends back on the main project. + const libToml = `[package] +name = "lib" +type = "lib" + +[dependencies] +main = { path = ".." } +`; + await writeFile(join(libDir, 'Nargo.toml'), libToml); + await utimes(join(libDir, 'Nargo.toml'), 1000, 1000); + + await touch(join('src', 'main.nr'), 1000); + await touch(join(libDir, 'src', 'lib.nr'), 1000); + await touch(join('target', 'artifact.json'), 2000); + + // Should not infinite-loop; should return false since all sources are old. + expect(await needsRecompile()).toBe(false); + }); +}); diff --git a/yarn-project/aztec/src/cli/cmds/utils/needs_recompile.ts b/yarn-project/aztec/src/cli/cmds/utils/needs_recompile.ts new file mode 100644 index 000000000000..1c0c177d5a02 --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/utils/needs_recompile.ts @@ -0,0 +1,98 @@ +import { readdir, stat } from 'fs/promises'; +import { join } from 'path'; + +import { collectCrateDirs } from './collect_crate_dirs.js'; + +/** + * Returns true if recompilation is needed: either no artifacts exist in target/ or any .nr or Nargo.toml source file + * (including path-based dependencies) is newer than the oldest artifact. We compare against the oldest artifact so + * that a source change between the oldest and newest compilation (e.g. in a multi-contract workspace) still triggers + * a recompile. + * + * Note: The above implies that if there is a random json file in the target dir we would be always recompiling. + */ +export async function needsRecompile(): Promise { + const oldestArtifactMs = await getOldestArtifactModificationTime('target'); + if (oldestArtifactMs === undefined) { + return true; + } + + // Git deps are pinned to a specific tag in Nargo.toml and nargo always fetches an exact tag, so their contents never + // change without Nargo.toml itself changing — and Nargo.toml is already tracked as a source file. Hence we can + // safely ignore checking source files of git deps. + const crateDirs = await collectCrateDirs('.', { skipGitDeps: true }); + return hasNewerSourceFile(crateDirs, oldestArtifactMs); +} + +/** + * Returns the last modification time (timestamp in ms) of the oldest .json artifact in targetDir, or undefined if + * none exist. + */ +async function getOldestArtifactModificationTime(targetDir: string): Promise { + let entries: string[]; + try { + entries = (await readdir(targetDir)).filter(f => f.endsWith('.json')); + } catch (err: any) { + if (err?.code === 'ENOENT') { + return undefined; + } + throw err; + } + + if (entries.length === 0) { + return undefined; + } + + let oldest = Infinity; + for (const entry of entries) { + const s = await stat(join(targetDir, entry)); + if (s.mtimeMs < oldest) { + oldest = s.mtimeMs; + } + } + return oldest; +} + +/** + * Walks crate dirs looking for .nr and Nargo.toml files newer than thresholdMs. Short-circuits on the first match. + */ +async function hasNewerSourceFile(crateDirs: string[], thresholdMs: number): Promise { + // Returns true if it find a new file than thresholdMs, false otherwise + async function walkForNewer(dir: string): Promise { + let entries; + try { + entries = await readdir(dir, { withFileTypes: true }); + } catch { + return false; + } + + // We iterate over the entries in the dir + for (const entry of entries) { + const fullPath = join(dir, entry.name); + if (entry.isDirectory()) { + // If the entry is a dir and it's not called `target` we recursively enter it + if (entry.name === 'target') { + continue; + } + if (await walkForNewer(fullPath)) { + return true; + } + } else if (entry.name === 'Nargo.toml' || entry.name.endsWith('.nr')) { + // The entry is a Nargo.toml file or *.nr file so we check the timestamp + const s = await stat(fullPath); + if (s.mtimeMs > thresholdMs) { + return true; + } + } + } + return false; + } + + // We search through the crate dirs + for (const dir of crateDirs) { + if (await walkForNewer(dir)) { + return true; + } + } + return false; +} diff --git a/yarn-project/aztec/src/cli/cmds/utils/warn_if_aztec_version_mismatch.test.ts b/yarn-project/aztec/src/cli/cmds/utils/warn_if_aztec_version_mismatch.test.ts new file mode 100644 index 000000000000..544e201bfc0d --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/utils/warn_if_aztec_version_mismatch.test.ts @@ -0,0 +1,159 @@ +import { afterEach, beforeEach, describe, expect, it } from '@jest/globals'; +import { mkdir, rm, writeFile } from 'fs/promises'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { warnIfAztecVersionMismatch } from './warn_if_aztec_version_mismatch.js'; + +/** Create a directory (recursively). */ +async function mkdirp(dir: string) { + await mkdir(dir, { recursive: true }); +} + +/** Create a directory with a minimal `[package]` Nargo.toml inside it. */ +async function makePackage(dir: string, name: string, type = 'lib', deps?: Record): Promise { + await mkdirp(dir); + let toml = `[package]\nname = "${name}"\ntype = "${type}"\n`; + if (deps) { + toml += `\n[dependencies]\n`; + for (const [depName, depValue] of Object.entries(deps)) { + toml += `${depName} = ${depValue}\n`; + } + } + await writeFile(join(dir, 'Nargo.toml'), toml); +} + +describe('warnIfAztecVersionMismatch', () => { + let tempDir: string; + let originalCwd: string; + let logMessages: string[]; + const log = (msg: string) => { + logMessages.push(msg); + }; + + beforeEach(async () => { + originalCwd = process.cwd(); + tempDir = join(tmpdir(), `version-match-${Date.now()}-${Math.random().toString(36).slice(2)}`); + await mkdirp(tempDir); + process.chdir(tempDir); + logMessages = []; + }); + + afterEach(async () => { + process.chdir(originalCwd); + await rm(tempDir, { recursive: true, force: true }); + }); + + it('does not warn when the aztec dependency tag matches the CLI version', async () => { + await makePackage(tempDir, 'project', 'contract', { + aztec: '{ git = "https://github.com/AztecProtocol/aztec-nr", tag = "v1.0.0", directory = "aztec" }', + }); + + await warnIfAztecVersionMismatch(log, '1.0.0'); + + expect(logMessages.filter(m => m.includes('WARNING'))).toHaveLength(0); + }); + + it('warns when the aztec dependency tag does not match the CLI version', async () => { + await makePackage(tempDir, 'project', 'contract', { + aztec: '{ git = "https://github.com/AztecProtocol/aztec-nr", tag = "v0.99.0", directory = "aztec" }', + }); + + await warnIfAztecVersionMismatch(log, '1.0.0'); + + expect(logMessages).toHaveLength(1); + expect(logMessages[0]).toContain('WARNING'); + expect(logMessages[0]).toContain('v0.99.0'); + expect(logMessages[0]).toContain('v1.0.0'); + }); + + it('warns when a non-aztec aztec-nr dependency tag does not match the CLI version', async () => { + await makePackage(tempDir, 'project', 'contract', { + // eslint-disable-next-line camelcase + uint_note: '{ git = "https://github.com/AztecProtocol/aztec-nr", tag = "v0.99.0", directory = "uint-note" }', + }); + + await warnIfAztecVersionMismatch(log, '1.0.0'); + + expect(logMessages).toHaveLength(1); + expect(logMessages[0]).toContain('WARNING'); + expect(logMessages[0]).toContain('uint_note'); + expect(logMessages[0]).toContain('v0.99.0'); + expect(logMessages[0]).toContain('v1.0.0'); + }); + + it('warns about a sibling aztec-nr dependency even when the aztec dependency matches', async () => { + await makePackage(tempDir, 'project', 'contract', { + aztec: '{ git = "https://github.com/AztecProtocol/aztec-nr", tag = "v1.0.0", directory = "aztec" }', + // eslint-disable-next-line camelcase + uint_note: '{ git = "https://github.com/AztecProtocol/aztec-nr", tag = "v0.99.0", directory = "uint-note" }', + }); + + await warnIfAztecVersionMismatch(log, '1.0.0'); + + expect(logMessages).toHaveLength(1); + expect(logMessages[0]).toContain('WARNING'); + expect(logMessages[0]).toContain('uint_note'); + expect(logMessages[0]).not.toMatch(/—\s*aztec\s*\(/); + }); + + it('does not warn when multiple aztec-nr dependencies all match the CLI version', async () => { + await makePackage(tempDir, 'project', 'contract', { + aztec: '{ git = "https://github.com/AztecProtocol/aztec-nr", tag = "v1.0.0", directory = "aztec" }', + // eslint-disable-next-line camelcase + uint_note: '{ git = "https://github.com/AztecProtocol/aztec-nr", tag = "v1.0.0", directory = "uint-note" }', + // eslint-disable-next-line camelcase + compressed_string: + '{ git = "https://github.com/AztecProtocol/aztec-nr", tag = "v1.0.0", directory = "compressed-string" }', + }); + + await warnIfAztecVersionMismatch(log, '1.0.0'); + + expect(logMessages.filter(m => m.includes('WARNING'))).toHaveLength(0); + }); + + it('does not warn for unrelated third-party git dependencies', async () => { + await makePackage(tempDir, 'project', 'contract', { + aztec: '{ git = "https://github.com/AztecProtocol/aztec-nr", tag = "v1.0.0", directory = "aztec" }', + // eslint-disable-next-line camelcase + noir_string_search: '{ git = "https://github.com/noir-lang/noir_string_search", tag = "v0.1.0" }', + }); + + await warnIfAztecVersionMismatch(log, '1.0.0'); + + expect(logMessages.filter(m => m.includes('WARNING'))).toHaveLength(0); + }); + + it('normalizes trailing slashes and .git suffixes in the aztec-nr git URL', async () => { + await makePackage(tempDir, 'project', 'contract', { + aztec: '{ git = "https://github.com/AztecProtocol/aztec-nr.git", tag = "v1.0.0", directory = "aztec" }', + // eslint-disable-next-line camelcase + uint_note: '{ git = "https://github.com/AztecProtocol/aztec-nr/", tag = "v1.0.0", directory = "uint-note" }', + }); + + await warnIfAztecVersionMismatch(log, '1.0.0'); + + expect(logMessages.filter(m => m.includes('WARNING'))).toHaveLength(0); + }); + + it('warns when the CLI version is not available', async () => { + await makePackage(tempDir, 'project', 'contract'); + + await warnIfAztecVersionMismatch(log, ''); + + expect(logMessages).toHaveLength(1); + expect(logMessages[0]).toContain('CLI version not found'); + }); + + it('does not warn when the project has no aztec dependency', async () => { + const libDir = join(tempDir, 'lib'); + await makePackage(tempDir, 'project', 'contract', { + some_other_lib: '{ path = "lib" }', // eslint-disable-line camelcase + }); + await makePackage(libDir, 'lib'); + + await warnIfAztecVersionMismatch(log, '1.0.0'); + + expect(logMessages.filter(m => m.includes('WARNING'))).toHaveLength(0); + }); +}); diff --git a/yarn-project/aztec/src/cli/cmds/utils/warn_if_aztec_version_mismatch.ts b/yarn-project/aztec/src/cli/cmds/utils/warn_if_aztec_version_mismatch.ts new file mode 100644 index 000000000000..3cedfa39a660 --- /dev/null +++ b/yarn-project/aztec/src/cli/cmds/utils/warn_if_aztec_version_mismatch.ts @@ -0,0 +1,77 @@ +import type { LogFn } from '@aztec/foundation/log'; +import { getPackageVersion } from '@aztec/stdlib/update-checker'; + +import TOML from '@iarna/toml'; +import { readFile } from 'fs/promises'; +import { join } from 'path'; + +import { collectCrateDirs } from './collect_crate_dirs.js'; + +/** Returns true if the given git URL points to the AztecProtocol/aztec-nr repository. */ +function isAztecNrGitUrl(gitUrl: string): boolean { + let url: URL; + try { + url = new URL(gitUrl); + } catch { + return false; + } + if (url.hostname !== 'github.com') { + return false; + } + const repoPath = url.pathname + .replace(/^\//, '') + .replace(/\.git$/, '') + .replace(/\/$/, ''); + return repoPath === 'AztecProtocol/aztec-nr'; +} + +/** Warns if any aztec-nr git dependency in a crate's Nargo.toml has a tag that doesn't match the CLI version. */ +export async function warnIfAztecVersionMismatch(log: LogFn, cliVersion?: string): Promise { + const version = cliVersion ?? getPackageVersion(); + if (!version) { + log(`WARNING: aztec CLI version not found. Skipping dependency compatibility check.`); + return; + } + + const expectedTag = `v${version}`; + const mismatches: { file: string; depName: string; tag: string }[] = []; + + const crateDirs = await collectCrateDirs('.', { skipGitDeps: true }); + + for (const dir of crateDirs) { + const tomlPath = join(dir, 'Nargo.toml'); + let content: string; + try { + content = await readFile(tomlPath, 'utf-8'); + } catch { + continue; + } + + const parsed = TOML.parse(content) as Record; + const deps = (parsed.dependencies as Record) ?? {}; + + for (const [depName, dep] of Object.entries(deps)) { + // Skip non-object deps (e.g. malformed entries) and anything that isn't a tagged git dep. + if (!dep || typeof dep !== 'object' || typeof dep.git !== 'string' || typeof dep.tag !== 'string') { + continue; + } + // Only flag deps that are sourced from the aztec-nr repo. + if (!isAztecNrGitUrl(dep.git)) { + continue; + } + if (dep.tag !== expectedTag) { + mismatches.push({ file: tomlPath, depName, tag: dep.tag }); + } + } + } + + if (mismatches.length > 0) { + const details = mismatches.map(m => ` ${m.file} — ${m.depName} (${m.tag})`).join('\n'); + log( + `WARNING: Aztec dependency version mismatch detected.\n` + + `The following aztec-nr dependencies do not match the CLI version (${expectedTag}):\n` + + `${details}\n\n` + + `See https://docs.aztec.network/errors/9 for how to update your dependencies.`, + ); + } +} diff --git a/yarn-project/aztec/src/testing/anvil_test_watcher.ts b/yarn-project/aztec/src/testing/anvil_test_watcher.ts index 4eabc6583949..2eb87b017aae 100644 --- a/yarn-project/aztec/src/testing/anvil_test_watcher.ts +++ b/yarn-project/aztec/src/testing/anvil_test_watcher.ts @@ -136,8 +136,15 @@ export class AnvilTestWatcher { this.logger.warn(`L1 is ahead of wall time. Syncing wall time to L1 time`); this.dateProvider.setTime(l1Time); } else if (l1Time + Number(this.l2SlotDuration) * 1000 < wallTime) { - this.logger.warn(`L1 is more than 1 L2 slot behind wall time. Warping to wall time`); - await this.cheatcodes.warp(Math.ceil(wallTime / 1000)); + // Warp L1 to the slot boundary at-or-before wall time. Rounding to a slot boundary (rather than + // `ceil(wallTime / 1000)`) keeps this loop's target aligned with `warpTimeIfNeeded`'s + // `nextSlotTimestamp` target, avoiding a race where the two loops pick timestamps a fraction of + // a second apart and one of them is then rejected by anvil as non-monotonic. + const wallSec = Math.floor(wallTime / 1000); + const targetSlot = await this.rollup.read.getSlotAt([BigInt(wallSec)]); + const targetTimestamp = Number(await this.rollup.read.getTimestampForSlot([targetSlot])); + this.logger.warn(`L1 is more than 1 L2 slot behind wall time. Warping to slot ${targetSlot} boundary`); + await this.warpToTimestamp(targetTimestamp); } } @@ -151,8 +158,9 @@ export class AnvilTestWatcher { if (BigInt(currentSlot) === checkpointLog.slotNumber) { // The current slot has been filled, we should jump to the next slot. - await this.warpToTimestamp(nextSlotTimestamp); - this.logger.info(`Slot ${currentSlot} was filled, jumped to next slot`); + if (await this.warpToTimestamp(nextSlotTimestamp)) { + this.logger.info(`Slot ${currentSlot} was filled, jumped to next slot`); + } return; } @@ -180,9 +188,10 @@ export class AnvilTestWatcher { } if (realNow - this.unfilledSlotFirstSeen.realTime > 2000) { - await this.warpToTimestamp(nextSlotTimestamp); + if (await this.warpToTimestamp(nextSlotTimestamp)) { + this.logger.info(`Slot ${currentSlot} was missed with pending txs, jumped to next slot`); + } this.unfilledSlotFirstSeen = undefined; - this.logger.info(`Slot ${currentSlot} was missed with pending txs, jumped to next slot`); } return; @@ -192,19 +201,33 @@ export class AnvilTestWatcher { // Fallback: warp when the dateProvider time has passed the next slot timestamp. const currentTimestamp = this.dateProvider?.now() ?? Date.now(); if (currentTimestamp > nextSlotTimestamp * 1000) { - await this.warpToTimestamp(nextSlotTimestamp); - this.logger.info(`Slot ${currentSlot} was missed, jumped to next slot`); + if (await this.warpToTimestamp(nextSlotTimestamp)) { + this.logger.info(`Slot ${currentSlot} was missed, jumped to next slot`); + } } } catch { this.logger.error('mineIfSlotFilled failed'); } } - private async warpToTimestamp(timestamp: number) { + /** + * Warps L1 to `timestamp`, unless L1 is already at or past it. Returns true when a warp actually + * happened, false when skipped or on error. Callers use the return value to gate success logs. + */ + private async warpToTimestamp(timestamp: number): Promise { try { + // Anvil rejects evm_setNextBlockTimestamp values <= the current block's timestamp. The two + // watcher loops can race and pick targets a fraction of a second apart; skip here rather than + // letting the second one error out noisily. + const lastTimestamp = await this.cheatcodes.lastBlockTimestamp(); + if (timestamp <= lastTimestamp) { + return false; + } await this.cheatcodes.warp(timestamp, { resetBlockInterval: true }); + return true; } catch (e) { this.logger.error(`Failed to warp to timestamp ${timestamp}: ${e}`); + return false; } } } diff --git a/yarn-project/aztec/test/contract-with-tests/.gitignore b/yarn-project/aztec/test/contract-with-tests/.gitignore new file mode 100644 index 000000000000..2f7896d1d136 --- /dev/null +++ b/yarn-project/aztec/test/contract-with-tests/.gitignore @@ -0,0 +1 @@ +target/ diff --git a/yarn-project/aztec/test/contract-with-tests/Nargo.toml b/yarn-project/aztec/test/contract-with-tests/Nargo.toml new file mode 100644 index 000000000000..699d1075abf8 --- /dev/null +++ b/yarn-project/aztec/test/contract-with-tests/Nargo.toml @@ -0,0 +1,2 @@ +[workspace] +members = ["test_contract"] diff --git a/yarn-project/aztec/test/contract-with-tests/test_contract/Nargo.toml b/yarn-project/aztec/test/contract-with-tests/test_contract/Nargo.toml new file mode 100644 index 000000000000..27fe3c597d6e --- /dev/null +++ b/yarn-project/aztec/test/contract-with-tests/test_contract/Nargo.toml @@ -0,0 +1,5 @@ +[package] +name = "test_contract" +type = "contract" + +[dependencies] diff --git a/yarn-project/aztec/test/contract-with-tests/test_contract/src/main.nr b/yarn-project/aztec/test/contract-with-tests/test_contract/src/main.nr new file mode 100644 index 000000000000..44161dfa9728 --- /dev/null +++ b/yarn-project/aztec/test/contract-with-tests/test_contract/src/main.nr @@ -0,0 +1,10 @@ +contract TestContract { + fn dummy() -> pub Field { + 0 + } +} + +#[test] +fn test_should_not_be_in_contract() { + assert(1 == 1); +} diff --git a/yarn-project/end-to-end/scripts/forward-compat/docker-compose.yml b/yarn-project/end-to-end/scripts/forward-compat/docker-compose.yml new file mode 100644 index 000000000000..ab4067c0cdf3 --- /dev/null +++ b/yarn-project/end-to-end/scripts/forward-compat/docker-compose.yml @@ -0,0 +1,60 @@ +# Forward-compatibility test infrastructure. +# +# Runs an OLD release of the Aztec stack (node + wallet) alongside NEW tests compiled from the current tree. The test +# suite sends new contract artifacts to the old wallet/PXE/simulator over RPC, verifying that old runtime code can +# still parse, simulate, and prove contracts compiled with the latest `aztec` command. +# +# Usage: +# OLD_STACK_VERSION=0.4.2 docker compose -f docker-compose.yml up --abort-on-container-exit +# +services: + fork: + image: aztecprotocol/build:3.0 + cpus: 1 + cpuset: ${CPU_LIST:-} + mem_limit: 2G + entrypoint: 'anvil --silent -p 8545 --host 0.0.0.0 --chain-id 31337' + + old-stack: + image: aztecprotocol/end-to-end:${OLD_STACK_VERSION:?set OLD_STACK_VERSION} + cpus: 4 + cpuset: ${CPU_LIST:-} + mem_limit: 8G + stop_grace_period: 60s + entrypoint: + - /bin/bash + - -c + - | + node --no-warnings /usr/src/yarn-project/end-to-end/dest/forward-compatibility/wallet_service.js + environment: + LOG_LEVEL: ${LOG_LEVEL:-verbose} + ETHEREUM_HOSTS: http://fork:8545 + L1_CHAIN_ID: 31337 + TEST_ACCOUNTS: 'true' + NODE_PORT: 8080 + WALLET_PORT: 8081 + FORCE_COLOR: ${FORCE_COLOR:-1} + depends_on: + - fork + + end-to-end: + extends: + file: ../docker-compose.yml + service: end-to-end + environment: + AZTEC_NODE_URL: http://old-stack:8080 + REMOTE_WALLET_URL: http://old-stack:8081 + # Override entrypoint to wait for old-stack wallet service instead of local-network. + entrypoint: > + bash -c ' + while ! nc -z old-stack 8081; do sleep 1; done; + setsid ./scripts/test_simple.sh $${TEST:-./src/forward-compatibility/} & + pid=$$! + pgid=$$(($$(ps -o pgid= -p $$pid))) + trap "kill -SIGTERM -$$pgid" SIGTERM + while kill -0 -$$pgid 2>/dev/null; do sleep 0.1; done + wait $$pid + ' + depends_on: + - old-stack + - fork diff --git a/yarn-project/end-to-end/src/e2e_amm.test.ts b/yarn-project/end-to-end/src/e2e_amm.test.ts index b92c5e9dc683..1df2829bdc0a 100644 --- a/yarn-project/end-to-end/src/e2e_amm.test.ts +++ b/yarn-project/end-to-end/src/e2e_amm.test.ts @@ -12,6 +12,8 @@ import type { TestWallet } from './test-wallet/test_wallet.js'; const TIMEOUT = 120_000; +// TODO(F-560): Consider whether it makes sense to drop this +// https://linear.app/aztec-labs/issue/F-560/add-more-tests-to-forward-compatibility-testing describe('AMM', () => { jest.setTimeout(TIMEOUT); diff --git a/yarn-project/end-to-end/src/forward-compatibility/e2e_amm.test.ts b/yarn-project/end-to-end/src/forward-compatibility/e2e_amm.test.ts new file mode 100644 index 000000000000..d5c86371874b --- /dev/null +++ b/yarn-project/end-to-end/src/forward-compatibility/e2e_amm.test.ts @@ -0,0 +1,395 @@ +/** + * Forward-compatibility variant of the AMM e2e test. + * + * Connects to a remote wallet (old release) over JSON-RPC and deploys contracts compiled with the current Noir + * version. Exercises old loadContractArtifact, class-ID computation, ACIR simulator, entrypoint encoding, and wallet + * RPC deserialization against new artifacts. + * + * Uses only the standard {@link Wallet} interface (no TestWallet). Requires 4 pre-funded accounts from the wallet + * service. + * + * ## How to run + * + * Terminal 1 — start an Anvil L1 fork: + * anvil --silent -p 8545 --host 0.0.0.0 --chain-id 31337 + * + * Terminal 2 — start the wallet service (from yarn-project/): + * ETHEREUM_HOSTS=http://localhost:8545 L1_CHAIN_ID=31337 TEST_ACCOUNTS=true \ + * node --no-warnings ./end-to-end/dest/forward-compatibility/wallet_service.js + * + * Terminal 3 — run this test (from yarn-project/): + * REMOTE_WALLET_URL=http://localhost:8081 \ + * yarn workspace @aztec/end-to-end test:e2e src/forward-compatibility/e2e_amm.test.ts + */ +import { AztecAddress } from '@aztec/aztec.js/addresses'; +import { Fr } from '@aztec/aztec.js/fields'; +import type { Logger } from '@aztec/aztec.js/log'; +import { createLogger } from '@aztec/aztec.js/log'; +import type { Wallet } from '@aztec/aztec.js/wallet'; +import { AMMContract } from '@aztec/noir-contracts.js/AMM'; +import type { TokenContract } from '@aztec/noir-contracts.js/Token'; + +import { jest } from '@jest/globals'; + +import { deployToken, mintTokensToPrivate } from '../fixtures/token_utils.js'; +import { createWalletClient } from './wallet_rpc_client.js'; + +const TIMEOUT = 300_000; + +const { REMOTE_WALLET_URL = 'http://localhost:8081' } = process.env; + +describe('forward-compatibility: AMM', () => { + jest.setTimeout(TIMEOUT); + + let logger: Logger; + + let wallet: Wallet; + + let adminAddress: AztecAddress; + let liquidityProviderAddress: AztecAddress; + let otherLiquidityProviderAddress: AztecAddress; + let swapperAddress: AztecAddress; + + let token0: TokenContract; + let token1: TokenContract; + let liquidityToken: TokenContract; + + let amm: AMMContract; + + const INITIAL_AMM_TOTAL_SUPPLY = 100000n; + + // We need a large token amount so that the swap fee (0.3%) is observable. + const INITIAL_TOKEN_BALANCE = 1_000_000_000n; + + beforeAll(async () => { + logger = createLogger('e2e:forward-compatibility:amm'); + + wallet = createWalletClient(REMOTE_WALLET_URL); + + const accounts = (await wallet.getAccounts()).map(a => a.item); + expect(accounts.length).toBeGreaterThanOrEqual(4); + [adminAddress, liquidityProviderAddress, otherLiquidityProviderAddress, swapperAddress] = accounts; + + ({ contract: token0 } = await deployToken(wallet, adminAddress, 0n, logger)); + ({ contract: token1 } = await deployToken(wallet, adminAddress, 0n, logger)); + ({ contract: liquidityToken } = await deployToken(wallet, adminAddress, 0n, logger)); + + ({ contract: amm } = await AMMContract.deploy(wallet, token0.address, token1.address, liquidityToken.address).send({ + from: adminAddress, + })); + + // TODO(#9480): consider deploying the token by some factory when the AMM is deployed, and making the AMM be the + // minter there. + await liquidityToken.methods.set_minter(amm.address, true).send({ from: adminAddress }); + + // We mint the tokens to both liquidity providers and the swapper + await mintTokensToPrivate(token0, adminAddress, liquidityProviderAddress, INITIAL_TOKEN_BALANCE); + await mintTokensToPrivate(token1, adminAddress, liquidityProviderAddress, INITIAL_TOKEN_BALANCE); + + await mintTokensToPrivate(token0, adminAddress, otherLiquidityProviderAddress, INITIAL_TOKEN_BALANCE); + await mintTokensToPrivate(token1, adminAddress, otherLiquidityProviderAddress, INITIAL_TOKEN_BALANCE); + + // Note that the swapper only holds token0, not token1 + await mintTokensToPrivate(token0, adminAddress, swapperAddress, INITIAL_TOKEN_BALANCE); + }); + + describe('full flow', () => { + // This is an integration test in which we perform an entire run of the happy path. Thorough unit testing is not + // included. + + type Balance = { + token0: bigint; + token1: bigint; + }; + + async function getAmmBalances(): Promise { + return { + token0: (await token0.methods.balance_of_public(amm.address).simulate({ from: adminAddress })).result, + token1: (await token1.methods.balance_of_public(amm.address).simulate({ from: adminAddress })).result, + }; + } + + async function getWalletBalances(lp: AztecAddress): Promise { + return { + token0: (await token0.methods.balance_of_private(lp).simulate({ from: lp })).result, + token1: (await token1.methods.balance_of_private(lp).simulate({ from: lp })).result, + }; + } + + function assertBalancesDelta(before: Balance, after: Balance, delta: Balance) { + expect(after.token0 - before.token0).toEqual(delta.token0); + expect(after.token1 - before.token1).toEqual(delta.token1); + } + + it('add initial liquidity', async () => { + const ammBalancesBefore = await getAmmBalances(); + const lpBalancesBefore = await getWalletBalances(liquidityProviderAddress); + + const amount0Max = lpBalancesBefore.token0; + const amount0Min = lpBalancesBefore.token0 / 2n; + const amount1Max = lpBalancesBefore.token1; + const amount1Min = lpBalancesBefore.token1 / 2n; + + // First we need to add authwits such that the AMM can transfer the tokens from the liquidity provider. These + // authwits are for the full amount, since the AMM will first transfer that to itself, and later refund any + // excess during public execution. + const nonceForAuthwits = Fr.random(); + const token0Authwit = await wallet.createAuthWit(liquidityProviderAddress, { + caller: amm.address, + call: await token0.methods + .transfer_to_public_and_prepare_private_balance_increase( + liquidityProviderAddress, + amm.address, + amount0Max, + nonceForAuthwits, + ) + .getFunctionCall(), + }); + const token1Authwit = await wallet.createAuthWit(liquidityProviderAddress, { + caller: amm.address, + call: await token1.methods + .transfer_to_public_and_prepare_private_balance_increase( + liquidityProviderAddress, + amm.address, + amount1Max, + nonceForAuthwits, + ) + .getFunctionCall(), + }); + + const addLiquidityInteraction = amm.methods + .add_liquidity(amount0Max, amount1Max, amount0Min, amount1Min, nonceForAuthwits) + .with({ authWitnesses: [token0Authwit, token1Authwit] }); + await addLiquidityInteraction.send({ from: liquidityProviderAddress }); + + const ammBalancesAfter = await getAmmBalances(); + const lpBalancesAfter = await getWalletBalances(liquidityProviderAddress); + + // Since the LP was the first one to enter the pool, the maximum amounts of tokens should have been deposited as + // there is no prior token ratio to follow. + assertBalancesDelta(ammBalancesBefore, ammBalancesAfter, { token0: amount0Max, token1: amount1Max }); + assertBalancesDelta(lpBalancesBefore, lpBalancesAfter, { token0: -amount0Max, token1: -amount1Max }); + + // Liquidity tokens should also be minted for the liquidity provider, as well as locked at the zero address. + const expectedLiquidityTokens = (INITIAL_AMM_TOTAL_SUPPLY * 99n) / 100n; + expect( + ( + await liquidityToken.methods + .balance_of_private(liquidityProviderAddress) + .simulate({ from: liquidityProviderAddress }) + ).result, + ).toEqual(expectedLiquidityTokens); + expect((await liquidityToken.methods.total_supply().simulate({ from: adminAddress })).result).toEqual( + INITIAL_AMM_TOTAL_SUPPLY, + ); + }); + + it('add liquidity from another lp', async () => { + // This is the same as when we add liquidity for the first time, but we'll be going through a different code path + // since total supply for the liquidity token is non-zero + + const ammBalancesBefore = await getAmmBalances(); + const lpBalancesBefore = await getWalletBalances(otherLiquidityProviderAddress); + + const liquidityTokenSupplyBefore = (await liquidityToken.methods.total_supply().simulate({ from: adminAddress })) + .result; + + // The pool currently has the same number of tokens for token0 and token1, since that is the ratio the first + // liquidity provider used. Our maximum values have a different ratio (6:5 instead of 1:1), so we will end up + // adding the maximum amount that does result in the correct ratio (i.e. using amount1Max and a 1:1 ratio). + const amount0Max = (lpBalancesBefore.token0 * 6n) / 10n; + const amount0Min = (lpBalancesBefore.token0 * 4n) / 10n; + const amount1Max = (lpBalancesBefore.token1 * 5n) / 10n; + const amount1Min = (lpBalancesBefore.token1 * 4n) / 10n; + + const expectedAmount0 = amount1Max; + const expectedAmount1 = amount1Max; + + // We again add authwits such that the AMM can transfer the tokens from the liquidity provider. These authwits are + // for the full amount, since the AMM will first transfer that to itself, and later refund any excess during + // public execution. We expect for there to be excess since our maximum amounts do not have the same balance ratio + // as the pool currently holds. + const nonceForAuthwits = Fr.random(); + const token1Authwit = await wallet.createAuthWit(otherLiquidityProviderAddress, { + caller: amm.address, + call: await token0.methods + .transfer_to_public_and_prepare_private_balance_increase( + otherLiquidityProviderAddress, + amm.address, + amount0Max, + nonceForAuthwits, + ) + .getFunctionCall(), + }); + const token2Authwit = await wallet.createAuthWit(otherLiquidityProviderAddress, { + caller: amm.address, + call: await token1.methods + .transfer_to_public_and_prepare_private_balance_increase( + otherLiquidityProviderAddress, + amm.address, + amount1Max, + nonceForAuthwits, + ) + .getFunctionCall(), + }); + + await amm.methods + .add_liquidity(amount0Max, amount1Max, amount0Min, amount1Min, nonceForAuthwits) + .send({ from: otherLiquidityProviderAddress, authWitnesses: [token1Authwit, token2Authwit] }); + + const ammBalancesAfter = await getAmmBalances(); + const lpBalancesAfter = await getWalletBalances(otherLiquidityProviderAddress); + + assertBalancesDelta(ammBalancesBefore, ammBalancesAfter, { token0: expectedAmount0, token1: expectedAmount1 }); + assertBalancesDelta(lpBalancesBefore, lpBalancesAfter, { token0: -expectedAmount0, token1: -expectedAmount1 }); + + // The liquidity token supply should have grown with the same proportion as the pool balances + const expectedTotalSupply = + (liquidityTokenSupplyBefore * (ammBalancesBefore.token0 + expectedAmount0)) / ammBalancesBefore.token0; + const expectedLiquidityTokens = expectedTotalSupply - INITIAL_AMM_TOTAL_SUPPLY; + + expect((await liquidityToken.methods.total_supply().simulate({ from: adminAddress })).result).toEqual( + expectedTotalSupply, + ); + expect( + ( + await liquidityToken.methods + .balance_of_private(otherLiquidityProviderAddress) + .simulate({ from: otherLiquidityProviderAddress }) + ).result, + ).toEqual(expectedLiquidityTokens); + }); + + it('swap exact tokens in', async () => { + const swapperBalancesBefore = await getWalletBalances(swapperAddress); + const ammBalancesBefore = await getAmmBalances(); + + // The token in will be token0 + const amountIn = swapperBalancesBefore.token0 / 10n; + + // Swaps also transfer tokens into the AMM, so we provide an authwit for the full amount in. + const nonceForAuthwits = Fr.random(); + const swapAuthwit = await wallet.createAuthWit(swapperAddress, { + caller: amm.address, + call: await token0.methods + .transfer_to_public(swapperAddress, amm.address, amountIn, nonceForAuthwits) + .getFunctionCall(), + }); + + // We compute the expected amount out and set it as the minimum. In a real-life scenario we'd choose a slightly + // lower value to account for slippage, but since we're the only actor interacting with the AMM we can afford to + // just pass the exact value. Of course any lower value would also suffice. + const amountOutMin = ( + await amm.methods + .get_amount_out_for_exact_in(ammBalancesBefore.token0, ammBalancesBefore.token1, amountIn) + .simulate({ from: swapperAddress }) + ).result; + + const swapExactTokensInteraction = amm.methods + .swap_exact_tokens_for_tokens(token0.address, token1.address, amountIn, amountOutMin, nonceForAuthwits) + .with({ authWitnesses: [swapAuthwit] }); + await swapExactTokensInteraction.send({ from: swapperAddress }); + + // We know exactly how many tokens we're supposed to get because we know nobody else interacted with the AMM + // before we did. + const swapperBalancesAfter = await getWalletBalances(swapperAddress); + assertBalancesDelta(swapperBalancesBefore, swapperBalancesAfter, { token0: -amountIn, token1: amountOutMin }); + }); + + it('swap exact tokens out', async () => { + const swapperBalancesBefore = await getWalletBalances(swapperAddress); + const ammBalancesBefore = await getAmmBalances(); + + // We want to undo the previous swap (except for the fees, which we can't recover), so we try to send the full + // token1 balance (since the swapper held no token1 tokens prior to the swap). However, we're using the method + // that receives an exact amount of tokens *out*, not in, so we can't quite specify this. What we do instead is + // query the contract for how much token0 we'd get if we sent our entire token1 balance, and then request exactly + // that amount. This would fail in a real-life scenario since we'd need to account for slippage, but we can do it + // in this test environment since there's nobody else interacting with the AMM. + const amountOut = ( + await amm.methods + .get_amount_out_for_exact_in(ammBalancesBefore.token1, ammBalancesBefore.token0, swapperBalancesBefore.token1) + .simulate({ from: swapperAddress }) + ).result; + const amountInMax = swapperBalancesBefore.token1; + + // Swaps also transfer tokens into the AMM, so we provide an authwit for the full amount in (any change will be + // later returned, though in this case there won't be any). + const nonceForAuthwits = Fr.random(); + const swapAuthwit = await wallet.createAuthWit(swapperAddress, { + caller: amm.address, + call: await token1.methods + .transfer_to_public_and_prepare_private_balance_increase( + swapperAddress, + amm.address, + amountInMax, + nonceForAuthwits, + ) + .getFunctionCall(), + }); + + await amm.methods + .swap_tokens_for_exact_tokens(token1.address, token0.address, amountOut, amountInMax, nonceForAuthwits) + .send({ from: swapperAddress, authWitnesses: [swapAuthwit] }); + + // Because nobody else interacted with the AMM, we know the amount in will be the maximum (i.e. the value the + // contract returned as what we'd need to send in order to get the amount out we requested). + const swapperBalancesAfter = await getWalletBalances(swapperAddress); + assertBalancesDelta(swapperBalancesBefore, swapperBalancesAfter, { token0: amountOut, token1: -amountInMax }); + + // We can also check that the swapper ends up with fewer tokens than they started with, since they had to pay + // swap fees during both swaps. + expect(swapperBalancesAfter.token0).toBeLessThan(INITIAL_TOKEN_BALANCE); + }); + + it('remove liquidity', async () => { + // We now withdraw all of the tokens of one of the liquidity providers by burning their entire liquidity token + // balance. + const liquidityTokenBalance = ( + await liquidityToken.methods + .balance_of_private(otherLiquidityProviderAddress) + .simulate({ from: otherLiquidityProviderAddress }) + ).result; + + // Because private burning requires first transferring the tokens into the AMM, we again need to provide an + // authwit. + const nonceForAuthwits = Fr.random(); + const liquidityAuthwit = await wallet.createAuthWit(otherLiquidityProviderAddress, { + caller: amm.address, + call: await liquidityToken.methods + .transfer_to_public(otherLiquidityProviderAddress, amm.address, liquidityTokenBalance, nonceForAuthwits) + .getFunctionCall(), + }); + + // We don't bother setting the minimum amounts, since we know nobody else is interacting with the AMM. In a + // real-life scenario we'd need to choose sensible amounts to avoid losing value due to slippage. + const amount0Min = 1n; + const amount1Min = 1n; + + await amm.methods + .remove_liquidity(liquidityTokenBalance, amount0Min, amount1Min, nonceForAuthwits) + .send({ from: otherLiquidityProviderAddress, authWitnesses: [liquidityAuthwit] }); + + // The liquidity provider should have no remaining liquidity tokens, and should have recovered the value they + // originally deposited. + expect( + ( + await liquidityToken.methods + .balance_of_private(otherLiquidityProviderAddress) + .simulate({ from: otherLiquidityProviderAddress }) + ).result, + ).toEqual(0n); + + // We now assert that the liquidity provider ended up with more tokens than they began with. These extra tokens + // come from the swap fees paid during each of the swaps. While swap fees are always collected on the token in, + // the net fees will all be accrued on token0 due to how the swaps were orchestrated. This can be intuited by the + // fact that the swapper held no token1 initially, so it'd be impossible for them to cause an increase in the + // AMM's token1 balance. + // We perform this test using the second liquidity provider, since the first one did lose some percentage of the + // value of their deposit during setup when liquidity was locked by minting tokens for the zero address. + const lpBalancesAfter = await getWalletBalances(otherLiquidityProviderAddress); + expect(lpBalancesAfter.token0).toBeGreaterThan(INITIAL_TOKEN_BALANCE); + expect(lpBalancesAfter.token1).toEqual(INITIAL_TOKEN_BALANCE); + }); + }); +}); diff --git a/yarn-project/end-to-end/src/forward-compatibility/wallet_rpc_client.ts b/yarn-project/end-to-end/src/forward-compatibility/wallet_rpc_client.ts new file mode 100644 index 000000000000..74ba5a793e0b --- /dev/null +++ b/yarn-project/end-to-end/src/forward-compatibility/wallet_rpc_client.ts @@ -0,0 +1,14 @@ +import type { Wallet } from '@aztec/aztec.js/wallet'; +import { WalletSchema } from '@aztec/aztec.js/wallet'; +import { createSafeJsonRpcClient, makeFetch } from '@aztec/foundation/json-rpc/client'; + +/** + * Creates a JSON-RPC client that connects to a remote wallet service. + * The returned object implements the {@link Wallet} interface, proxying all calls over HTTP to the specified URL. + */ +export function createWalletClient(url: string): Wallet { + return createSafeJsonRpcClient(url, WalletSchema, { + namespaceMethods: 'wallet', + fetch: makeFetch([1, 2, 3], false), + }); +} diff --git a/yarn-project/end-to-end/src/forward-compatibility/wallet_service.ts b/yarn-project/end-to-end/src/forward-compatibility/wallet_service.ts new file mode 100644 index 000000000000..674ef390f4db --- /dev/null +++ b/yarn-project/end-to-end/src/forward-compatibility/wallet_service.ts @@ -0,0 +1,104 @@ +#!/usr/bin/env -S node --no-warnings +/** + * Standalone entrypoint that spins up a local Aztec network (L1 + node) and exposes a {@link NodeEmbeddedWallet} over + * JSON-RPC. + * + * Intended for forward-compatibility testing: an **old** release image runs this script so that **new** tests can send + * new artifacts to old runtime code (loadContractArtifact, ACIR simulator, class-ID computation, entrypoint encoding, + * etc.). + */ +import { getSchnorrAccountContractAddress } from '@aztec/accounts/schnorr'; +import { getInitialTestAccountsData } from '@aztec/accounts/testing'; +import { createLocalNetwork } from '@aztec/aztec'; +import { Fr } from '@aztec/aztec.js/fields'; +import { WalletSchema } from '@aztec/aztec.js/wallet'; +import { GrumpkinScalar } from '@aztec/foundation/curves/grumpkin'; +import { createNamespacedSafeJsonRpcServer, startHttpRpcServer } from '@aztec/foundation/json-rpc/server'; +import { createLogger } from '@aztec/foundation/log'; +import { AztecNodeApiSchema } from '@aztec/stdlib/interfaces/client'; +import { EmbeddedWallet } from '@aztec/wallets/embedded'; + +const logger = createLogger('wallet-service'); + +const { ETHEREUM_HOSTS = 'http://localhost:8545', NODE_PORT = '8080', WALLET_PORT = '8081' } = process.env; + +async function main() { + const l1RpcUrls = ETHEREUM_HOSTS.split(',').map(url => url.trim()); + + // Some tests (e.g. AMM) need 4 accounts but only 3 are funded via genesis. Generate deterministic keys for a 4th + // account so we can compute its address before network startup and include it in genesis funding. We cannot do this + // in the test because Wallet interface does not expose account creation functionality (only TestWallet exposes that + // but that's not used in forward compatibility testing). + const extraAccountSecret = Fr.fromHexString('0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef'); + const extraAccountSalt = Fr.ZERO; + const extraAccountSigningKey = GrumpkinScalar.random(); + const extraAccountAddress = await getSchnorrAccountContractAddress( + extraAccountSecret, + extraAccountSalt, + extraAccountSigningKey, + ); + + logger.info('Starting wallet service...', { l1RpcUrls }); + + // createLocalNetwork deploys L1 contracts, starts the node, and optionally deploys funded test accounts (when + // TEST_ACCOUNTS=true via env). We are not proving anything just like is done when local network is started by + // the `aztecStart` function. The extra account address is passed via prefundAddresses so it gets fee juice at genesis. + const { node, stop: stopNetwork } = await createLocalNetwork( + { l1RpcUrls, realProofs: false, prefundAddresses: [extraAccountAddress.toString()] }, + logger.info, + ); + + // Create an ephemeral embedded wallet backed by the local node. + const wallet = await EmbeddedWallet.create(node, { ephemeral: true }); + + // Re-register the initial test accounts so they are available via wallet.getAccounts(). createLocalNetwork deploys + // them onchain but uses a temporary wallet that is then stopped. + // + // We use the non-lazy import path (@aztec/accounts/testing, not /lazy) to avoid the dynamic JSON import that is + // incompatible with Node.js import attribute enforcement. + const testAccountsData = await getInitialTestAccountsData(); + const accounts = await Promise.all( + testAccountsData.map(({ secret, salt, signingKey }) => wallet.createSchnorrAccount(secret, salt, signingKey)), + ); + + // Register and deploy the 4th account. + const extraAccount = await wallet.createSchnorrAccount(extraAccountSecret, extraAccountSalt, extraAccountSigningKey); + const deployMethod = await extraAccount.getDeployMethod(); + await deployMethod.send({ from: accounts[0].address }); + + logger.info('Embedded wallet created', { + accounts: [...accounts, extraAccount].map(a => a.address.toString()), + }); + + // Contract artifacts are large, so allow generous body sizes for RPC requests. + const rpcOptions = { maxBodySizeBytes: '50mb' }; + + // Serve node RPC + const nodeRpcServer = createNamespacedSafeJsonRpcServer({ node: [node, AztecNodeApiSchema] }, rpcOptions); + const nodeHttpServer = await startHttpRpcServer(nodeRpcServer, { port: NODE_PORT }); + logger.info(`Node JSON-RPC server listening on port ${nodeHttpServer.port}`); + + // Serve wallet RPC + const walletRpcServer = createNamespacedSafeJsonRpcServer({ wallet: [wallet, WalletSchema] }, rpcOptions); + const walletHttpServer = await startHttpRpcServer(walletRpcServer, { port: WALLET_PORT }); + logger.info(`Wallet JSON-RPC server listening on port ${walletHttpServer.port}`); + + const shutdown = async () => { + logger.info('Shutting down...'); + nodeHttpServer.close(); + walletHttpServer.close(); + await wallet.stop(); + await stopNetwork(); + process.exit(0); + }; + + // eslint-disable-next-line @typescript-eslint/no-misused-promises + process.once('SIGINT', shutdown); + // eslint-disable-next-line @typescript-eslint/no-misused-promises + process.once('SIGTERM', shutdown); +} + +main().catch(err => { + logger.error('Wallet service failed to start', err); + process.exit(1); +}); diff --git a/yarn-project/key-store/src/key_store.ts b/yarn-project/key-store/src/key_store.ts index 73016fc43a5f..86cab0cc9752 100644 --- a/yarn-project/key-store/src/key_store.ts +++ b/yarn-project/key-store/src/key_store.ts @@ -116,44 +116,46 @@ export class KeyStore { * @param contractAddress - The contract address to silo the secret key in the key validation request with. * @returns The key validation request. */ - public async getKeyValidationRequest(pkMHash: Fr, contractAddress: AztecAddress): Promise { - const [keyPrefix, account] = await this.getKeyPrefixAndAccount(pkMHash); + public getKeyValidationRequest(pkMHash: Fr, contractAddress: AztecAddress): Promise { + return this.#db.transactionAsync(async () => { + const [keyPrefix, account] = await this.getKeyPrefixAndAccount(pkMHash); + + // Now we find the master public key for the account + const pkMBuffer = await this.#keys.getAsync(`${account.toString()}-${keyPrefix}pk_m`); + if (!pkMBuffer) { + throw new Error( + `Could not find ${keyPrefix}pk_m for account ${account.toString()} whose address was successfully obtained with ${keyPrefix}pk_m_hash ${pkMHash.toString()}.`, + ); + } - // Now we find the master public key for the account - const pkMBuffer = await this.#keys.getAsync(`${account.toString()}-${keyPrefix}pk_m`); - if (!pkMBuffer) { - throw new Error( - `Could not find ${keyPrefix}pk_m for account ${account.toString()} whose address was successfully obtained with ${keyPrefix}pk_m_hash ${pkMHash.toString()}.`, - ); - } + const pkM = Point.fromBuffer(pkMBuffer); - const pkM = Point.fromBuffer(pkMBuffer); - const computedPkMHash = await pkM.hash(); - if (!computedPkMHash.equals(pkMHash)) { - throw new Error(`Could not find ${keyPrefix}pkM for ${keyPrefix}pk_m_hash ${pkMHash.toString()}.`); - } + // Now we find the secret key for the public key + const skStorageSuffix = secretKeyStorageSuffix(keyPrefix); + const skMBuffer = await this.#keys.getAsync(`${account.toString()}-${skStorageSuffix}`); + if (!skMBuffer) { + throw new Error( + `Could not find ${skStorageSuffix} for account ${account.toString()} whose address was successfully obtained with ${keyPrefix}pk_m_hash ${pkMHash.toString()}.`, + ); + } - // Now we find the secret key for the public key - const skStorageSuffix = secretKeyStorageSuffix(keyPrefix); - const skMBuffer = await this.#keys.getAsync(`${account.toString()}-${skStorageSuffix}`); - if (!skMBuffer) { - throw new Error( - `Could not find ${skStorageSuffix} for account ${account.toString()} whose address was successfully obtained with ${keyPrefix}pk_m_hash ${pkMHash.toString()}.`, - ); - } + const skM = GrumpkinScalar.fromBuffer(skMBuffer); - const skM = GrumpkinScalar.fromBuffer(skMBuffer); + // The remaining awaits are non-DB computations. They are safe because no further IDB operations follow them. + const computedPkMHash = await pkM.hash(); + if (!computedPkMHash.equals(pkMHash)) { + throw new Error(`Could not find ${keyPrefix}pkM for ${keyPrefix}pk_m_hash ${pkMHash.toString()}.`); + } - // We sanity check that it's possible to derive the public key from the secret key - const derivedPkM = await derivePublicKeyFromSecretKey(skM); - if (!derivedPkM.equals(pkM)) { - throw new Error(`Could not derive ${keyPrefix}pkM from ${keyPrefix}skM.`); - } + const derivedPkM = await derivePublicKeyFromSecretKey(skM); + if (!derivedPkM.equals(pkM)) { + throw new Error(`Could not derive ${keyPrefix}pkM from ${keyPrefix}skM.`); + } - // At last we silo the secret key and return the key validation request - const skApp = await computeAppSecretKey(skM, contractAddress, keyPrefix!); + const skApp = await computeAppSecretKey(skM, contractAddress, keyPrefix!); - return new KeyValidationRequest(pkM, skApp); + return new KeyValidationRequest(pkM, skApp); + }); } /** @@ -265,24 +267,30 @@ export class KeyStore { * @returns A Promise that resolves to sk_m. * @dev Used when feeding the sk_m to the kernel circuit for keys verification. */ - public async getMasterSecretKey(pkM: PublicKey): Promise { - const [keyPrefix, account] = await this.getKeyPrefixAndAccount(pkM); + public getMasterSecretKey(pkM: PublicKey): Promise { + return this.#db.transactionAsync(async () => { + const [keyPrefix, account] = await this.getKeyPrefixAndAccount(pkM); + + const skStorageSuffix = secretKeyStorageSuffix(keyPrefix); + const secretKeyBuffer = await this.#keys.getAsync(`${account.toString()}-${skStorageSuffix}`); + if (!secretKeyBuffer) { + throw new Error( + `Could not find ${skStorageSuffix} for ${keyPrefix}pk_m ${pkM.toString()}. This should not happen.`, + ); + } - const skStorageSuffix = secretKeyStorageSuffix(keyPrefix); - const secretKeyBuffer = await this.#keys.getAsync(`${account.toString()}-${skStorageSuffix}`); - if (!secretKeyBuffer) { - throw new Error( - `Could not find ${skStorageSuffix} for ${keyPrefix}pk_m ${pkM.toString()}. This should not happen.`, - ); - } + const skM = GrumpkinScalar.fromBuffer(secretKeyBuffer); - const skM = GrumpkinScalar.fromBuffer(secretKeyBuffer); - const derivedpkM = await derivePublicKeyFromSecretKey(skM); - if (!derivedpkM.equals(pkM)) { - throw new Error(`Could not find ${skStorageSuffix} for ${keyPrefix}pkM ${pkM.toString()} in secret keys buffer.`); - } + // Non-DB computation — safe because no further IDB operations follow. + const derivedpkM = await derivePublicKeyFromSecretKey(skM); + if (!derivedpkM.equals(pkM)) { + throw new Error( + `Could not find ${skStorageSuffix} for ${keyPrefix}pkM ${pkM.toString()} in secret keys buffer.`, + ); + } - return Promise.resolve(skM); + return skM; + }); } /** @@ -291,15 +299,17 @@ export class KeyStore { * @param pkMHash - The master public key hash to look for. * @returns True if the account has a key with the given hash. */ - public async accountHasKey(account: AztecAddress, pkMHash: Fr): Promise { - const pkMHashBuffer = serializeToBuffer(pkMHash); - for (const prefix of KEY_PREFIXES) { - const stored = await this.#keys.getAsync(`${account.toString()}-${prefix}pk_m_hash`); - if (stored && Buffer.from(stored).equals(pkMHashBuffer)) { - return true; + public accountHasKey(account: AztecAddress, pkMHash: Fr): Promise { + return this.#db.transactionAsync(async () => { + const pkMHashBuffer = serializeToBuffer(pkMHash); + for (const prefix of KEY_PREFIXES) { + const stored = await this.#keys.getAsync(`${account.toString()}-${prefix}pk_m_hash`); + if (stored && Buffer.from(stored).equals(pkMHashBuffer)) { + return true; + } } - } - return false; + return false; + }); } /** diff --git a/yarn-project/noir-contracts.js/scripts/generate-types.sh b/yarn-project/noir-contracts.js/scripts/generate-types.sh index 04ecfe03a107..ce75af14440b 100755 --- a/yarn-project/noir-contracts.js/scripts/generate-types.sh +++ b/yarn-project/noir-contracts.js/scripts/generate-types.sh @@ -12,7 +12,7 @@ fi mkdir -p $OUT_DIR # Extract contract names from Nargo.toml, excluding test contracts -CONTRACTS=$(grep "contracts/" ../../noir-projects/noir-contracts/Nargo.toml | grep -v "contracts/test/" | sed 's/.*contracts\/[^/]*\/\([^"]*\)_contract.*/\1/') +CONTRACTS=$(grep "contracts/" ../../noir-projects/noir-contracts/Nargo.toml | grep -v "contracts/test/" | sed 's/.*\/\([^/"]*\)_contract.*/\1/') # Check for .json files existence if ! ls ../../noir-projects/noir-contracts/target/*.json >/dev/null 2>&1; then diff --git a/yarn-project/noir-test-contracts.js/scripts/generate-types.sh b/yarn-project/noir-test-contracts.js/scripts/generate-types.sh index 53ad93b684a9..6890206f7e11 100755 --- a/yarn-project/noir-test-contracts.js/scripts/generate-types.sh +++ b/yarn-project/noir-test-contracts.js/scripts/generate-types.sh @@ -12,7 +12,7 @@ fi mkdir -p $OUT_DIR # Extract test contract names from Nargo.toml -TEST_CONTRACTS=$(grep "contracts/test/" ../../noir-projects/noir-contracts/Nargo.toml | sed 's/.*contracts\/test\/\([^"]*\)_contract.*/\1/') +TEST_CONTRACTS=$(grep "contracts/test/" ../../noir-projects/noir-contracts/Nargo.toml | sed 's/.*\/\([^/"]*\)_contract.*/\1/') # Check for .json files existence if ! ls ../../noir-projects/noir-contracts/target/*.json >/dev/null 2>&1; then diff --git a/yarn-project/pxe/src/block_synchronizer/block_synchronizer.test.ts b/yarn-project/pxe/src/block_synchronizer/block_synchronizer.test.ts index d7f77efab6e5..c052adf3e163 100644 --- a/yarn-project/pxe/src/block_synchronizer/block_synchronizer.test.ts +++ b/yarn-project/pxe/src/block_synchronizer/block_synchronizer.test.ts @@ -116,6 +116,43 @@ describe('BlockSynchronizer', () => { expect(rollback).toHaveBeenCalledWith(3, 4); }); + describe('stop', () => { + it('resolves immediately when no sync is in progress', async () => { + await synchronizer.stop(); + expect(blockStream.stop).toHaveBeenCalled(); + }); + + it('waits for in-progress sync to complete', async () => { + let resolveSync!: () => void; + const syncBlocker = new Promise(resolve => { + resolveSync = resolve; + }); + blockStream.sync.mockReturnValue(syncBlocker); + aztecNode.getBlockHeader.mockResolvedValue((await L2Block.random(BlockNumber(0))).header); + + // Start a sync (don't await) + const syncPromise = synchronizer.sync(); + + // stop() should not resolve until the sync finishes + let stopped = false; + const stopPromise = synchronizer.stop().then(() => { + stopped = true; + }); + + // Give the event loop a tick + await new Promise(resolve => setTimeout(resolve, 10)); + expect(stopped).toBe(false); + + // Release the sync + resolveSync(); + await syncPromise; + await stopPromise; + + expect(stopped).toBe(true); + expect(blockStream.stop).toHaveBeenCalled(); + }); + }); + describe('syncChainTip config', () => { it('updates anchor on blocks-added when syncChainTip is proposed (default)', async () => { synchronizer = createSynchronizer({ syncChainTip: 'proposed' }); diff --git a/yarn-project/pxe/src/block_synchronizer/block_synchronizer.ts b/yarn-project/pxe/src/block_synchronizer/block_synchronizer.ts index 2ea826e439aa..ea2f889d529e 100644 --- a/yarn-project/pxe/src/block_synchronizer/block_synchronizer.ts +++ b/yarn-project/pxe/src/block_synchronizer/block_synchronizer.ts @@ -1,5 +1,6 @@ import { BlockNumber } from '@aztec/foundation/branded-types'; import { type Logger, type LoggerBindings, createLogger } from '@aztec/foundation/log'; +import { SerialQueue } from '@aztec/foundation/queue'; import type { AztecAsyncKVStore } from '@aztec/kv-store'; import type { L2TipsKVStore } from '@aztec/kv-store/stores'; import { BlockHash, L2BlockStream, type L2BlockStreamEvent, type L2BlockStreamEventHandler } from '@aztec/stdlib/block'; @@ -20,6 +21,7 @@ import type { PrivateEventStore } from '../storage/private_event_store/private_e export class BlockSynchronizer implements L2BlockStreamEventHandler { private log: Logger; private isSyncing: Promise | undefined; + private readonly eventQueue = new SerialQueue(); protected readonly blockStream: L2BlockStream; constructor( @@ -35,6 +37,7 @@ export class BlockSynchronizer implements L2BlockStreamEventHandler { ) { this.log = createLogger('pxe:block_synchronizer', bindings); this.blockStream = this.createBlockStream(config); + this.eventQueue.start(); } protected createBlockStream(config: Partial): L2BlockStream { @@ -52,8 +55,12 @@ export class BlockSynchronizer implements L2BlockStreamEventHandler { ); } - /** Handle events emitted by the block stream. */ - public async handleBlockStreamEvent(event: L2BlockStreamEvent): Promise { + /** Handle events emitted by the block stream. Serialized to prevent concurrent mutations to anchor state. */ + public handleBlockStreamEvent(event: L2BlockStreamEvent): Promise { + return this.eventQueue.put(() => this.doHandleBlockStreamEvent(event)); + } + + private async doHandleBlockStreamEvent(event: L2BlockStreamEvent): Promise { await this.l2TipsStore.handleBlockStreamEvent(event); switch (event.type) { @@ -167,6 +174,13 @@ export class BlockSynchronizer implements L2BlockStreamEventHandler { } } + /** Stops the block synchronizer, waiting for any in-progress sync and queued events to complete. */ + public async stop() { + await this.isSyncing; + await this.blockStream.stop(); + await this.eventQueue.end(); + } + private async doSync() { let currentHeader; diff --git a/yarn-project/pxe/src/contract_function_simulator/contract_function_simulator.ts b/yarn-project/pxe/src/contract_function_simulator/contract_function_simulator.ts index ec2793416b7e..2db7b6038255 100644 --- a/yarn-project/pxe/src/contract_function_simulator/contract_function_simulator.ts +++ b/yarn-project/pxe/src/contract_function_simulator/contract_function_simulator.ts @@ -208,7 +208,7 @@ export class ContractFunctionSimulator { } if (request.origin !== contractAddress) { - this.log.warn( + throw new Error( `Request origin does not match contract address in simulation. Request origin: ${request.origin}, contract address: ${contractAddress}`, ); } diff --git a/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution.test.ts b/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution.test.ts index 95da879d711c..eaefd93aea47 100644 --- a/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution.test.ts +++ b/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution.test.ts @@ -520,6 +520,40 @@ describe('Private Execution test suite', () => { }); }); + it('throws when request origin does not match contract address', async () => { + const contractAddress = await mockContractInstance(TestContractArtifact); + const differentAddress = await AztecAddress.random(); + contracts[differentAddress.toString()] = TestContractArtifact; + + const functionArtifact = getFunctionArtifactByName(TestContractArtifact, 'emit_array_as_encrypted_log'); + const selector = await FunctionSelector.fromNameAndParameters(functionArtifact.name, functionArtifact.parameters); + const hashedArguments = await HashedValues.fromArgs( + encodeArguments(functionArtifact, [Fr.ZERO, times(5, () => Fr.random()), owner, false]), + ); + + const txRequest = TxExecutionRequest.from({ + origin: differentAddress, + firstCallArgsHash: hashedArguments.hash, + functionSelector: selector, + txContext: TxContext.from(txContextFields), + argsOfCalls: [hashedArguments], + authWitnesses: [], + capsules: [], + salt: Fr.random(), + }); + + await expect( + acirSimulator.run(txRequest, { + contractAddress, + selector, + anchorBlockHeader, + senderForTags, + jobId: TEST_JOB_ID, + scopes: [owner], + }), + ).rejects.toThrow('Request origin does not match contract address'); + }); + describe('stateful test contract', () => { let contractAddress: AztecAddress; const mockFirstNullifier = new Fr(1111); diff --git a/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution_oracle.ts b/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution_oracle.ts index 62d52433018f..4899899afb56 100644 --- a/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution_oracle.ts +++ b/yarn-project/pxe/src/contract_function_simulator/oracle/private_execution_oracle.ts @@ -81,7 +81,7 @@ export class PrivateExecutionOracle extends UtilityExecutionOracle implements IP private readonly taggingIndexCache: ExecutionTaggingIndexCache; private readonly senderTaggingStore: SenderTaggingStore; private totalPublicCalldataCount: number; - protected sideEffectCounter: number; + private readonly initialSideEffectCounter: number; private senderForTags?: AztecAddress; private readonly simulator?: CircuitSimulator; @@ -100,13 +100,18 @@ export class PrivateExecutionOracle extends UtilityExecutionOracle implements IP this.taggingIndexCache = args.taggingIndexCache; this.senderTaggingStore = args.senderTaggingStore; this.totalPublicCalldataCount = args.totalPublicCalldataCount ?? 0; - this.sideEffectCounter = args.sideEffectCounter ?? 0; + this.initialSideEffectCounter = args.sideEffectCounter ?? 0; this.senderForTags = args.senderForTags; this.simulator = args.simulator; } public getPrivateContextInputs(): PrivateContextInputs { - return new PrivateContextInputs(this.callContext, this.anchorBlockHeader, this.txContext, this.sideEffectCounter); + return new PrivateContextInputs( + this.callContext, + this.anchorBlockHeader, + this.txContext, + this.initialSideEffectCounter, + ); } // We still need this function until we can get user-defined ordering of structs for fn arguments diff --git a/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts b/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts index a50e1e4f01c9..450903a01f5f 100644 --- a/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts +++ b/yarn-project/pxe/src/contract_function_simulator/oracle/utility_execution_oracle.ts @@ -333,10 +333,9 @@ export class UtilityExecutionOracle implements IMiscOracle, IUtilityExecutionOra } /** - * Returns an auth witness for the given message hash. Checks on the list of transient witnesses - * for this transaction first, and falls back to the local database if not found. + * Returns an auth witness for the given message hash from the list of transient witnesses for this transaction. * @param messageHash - Hash of the message to authenticate. - * @returns Authentication witness for the requested message hash. + * @returns Authentication witness for the requested message hash, or undefined if not found. */ public getAuthWitness(messageHash: Fr): Promise { return Promise.resolve(this.authWitnesses.find(w => w.requestHash.equals(messageHash))?.witness); diff --git a/yarn-project/pxe/src/contract_function_simulator/pick_notes.test.ts b/yarn-project/pxe/src/contract_function_simulator/pick_notes.test.ts index 2d75773ab5e9..90731bb38725 100644 --- a/yarn-project/pxe/src/contract_function_simulator/pick_notes.test.ts +++ b/yarn-project/pxe/src/contract_function_simulator/pick_notes.test.ts @@ -361,4 +361,58 @@ describe('getNotes', () => { [7n, 6n, 3n], ]); }); + + it('throws a clear error for an invalid comparator value', () => { + const notes = [createNote([1n])]; + const options = { + selects: [ + { + selector: { index: 0, offset: 0, length: 32 }, + value: new Fr(1n), + comparator: 99 as Comparator, + }, + ], + }; + + expect(() => pickNotes(notes, options)).toThrow('Invalid comparator value: 99'); + }); + + it('throws when selector.index is out of bounds', () => { + const notes = [createNote([1n, 2n])]; + const options = { + selects: [ + { + selector: { index: 5, offset: 0, length: 32 }, + value: new Fr(1n), + comparator: Comparator.EQ, + }, + ], + }; + + expect(() => pickNotes(notes, options)).toThrow(/index 5 out of bounds/); + }); + + it('throws when selector.index is out of bounds in a sort', () => { + const notes = [createNote([1n]), createNote([2n])]; + const options = { + sorts: [{ selector: { index: 3, offset: 0, length: 32 }, order: SortOrder.ASC }], + }; + + expect(() => pickNotes(notes, options)).toThrow(/index 3 out of bounds/); + }); + + it('throws when selector.offset + selector.length exceeds Fr buffer size', () => { + const notes = [createNote([1n])]; + const options = { + selects: [ + { + selector: { index: 0, offset: 30, length: 5 }, + value: new Fr(0n), + comparator: Comparator.EQ, + }, + ], + }; + + expect(() => pickNotes(notes, options)).toThrow(/exceeds Fr buffer size/); + }); }); diff --git a/yarn-project/pxe/src/contract_function_simulator/pick_notes.ts b/yarn-project/pxe/src/contract_function_simulator/pick_notes.ts index 21d181ca6fc4..82c4d82386a3 100644 --- a/yarn-project/pxe/src/contract_function_simulator/pick_notes.ts +++ b/yarn-project/pxe/src/contract_function_simulator/pick_notes.ts @@ -85,6 +85,14 @@ interface ContainsNote { } const selectPropertyFromPackedNoteContent = (noteData: Fr[], selector: PropertySelector): Fr => { + if (selector.index >= noteData.length) { + throw new Error(`Property selector index ${selector.index} out of bounds for note with ${noteData.length} fields`); + } + if (selector.offset + selector.length > Fr.SIZE_IN_BYTES) { + throw new Error( + `Property selector range (offset=${selector.offset}, length=${selector.length}) exceeds Fr buffer size of ${Fr.SIZE_IN_BYTES} bytes`, + ); + } const noteValueBuffer = noteData[selector.index].toBuffer(); // Noir's PropertySelector counts offset from the LSB (last byte of the big-endian buffer), // so offset=0,length=Fr.SIZE_IN_BYTES reads the entire field, and offset=0,length=1 reads the last byte. @@ -110,7 +118,11 @@ const selectNotes = (noteDatas: T[], selects: Select[]): [Comparator.GTE]: () => !noteValueFr.lt(value), }; - return comparatorSelector[comparator](); + const fn = comparatorSelector[comparator]; + if (!fn) { + throw new Error(`Invalid comparator value: ${comparator}`); + } + return fn(); }), ); diff --git a/yarn-project/pxe/src/events/private_event_filter_validator.test.ts b/yarn-project/pxe/src/events/private_event_filter_validator.test.ts index f98afd3d7638..9a1a4f543c4f 100644 --- a/yarn-project/pxe/src/events/private_event_filter_validator.test.ts +++ b/yarn-project/pxe/src/events/private_event_filter_validator.test.ts @@ -93,6 +93,38 @@ describe('PrivateEventFilterValidator', () => { ).toThrow(/toBlock must be strictly greater than fromBlock/); }); + it('caps toBlock to last synced block + 1 when it exceeds the synced range', () => { + const dataProviderFilter = validator.validate({ + contractAddress, + scopes: [scope], + fromBlock: INITIAL_L2_BLOCK_NUM, + toBlock: BlockNumber(lastKnownBlockNumber + 100), + }); + expect(dataProviderFilter).toEqual({ + contractAddress, + scopes: [scope], + fromBlock: INITIAL_L2_BLOCK_NUM, + toBlock: BlockNumber(lastKnownBlockNumber + 1), + }); + }); + + it('leaves filter unchanged when fromBlock is past the synced range', () => { + const fromBlock = BlockNumber(lastKnownBlockNumber + 5); + const toBlock = BlockNumber(lastKnownBlockNumber + 10); + const dataProviderFilter = validator.validate({ + contractAddress, + scopes: [scope], + fromBlock, + toBlock, + }); + expect(dataProviderFilter).toEqual({ + contractAddress, + scopes: [scope], + fromBlock, + toBlock, + }); + }); + it('preserves txHash', () => { let dataProviderFilter = validator.validate({ contractAddress, diff --git a/yarn-project/pxe/src/events/private_event_filter_validator.ts b/yarn-project/pxe/src/events/private_event_filter_validator.ts index b7805675f83e..feb1bfd8be33 100644 --- a/yarn-project/pxe/src/events/private_event_filter_validator.ts +++ b/yarn-project/pxe/src/events/private_event_filter_validator.ts @@ -1,11 +1,14 @@ import type { PrivateEventFilter } from '@aztec/aztec.js/wallet'; import { INITIAL_L2_BLOCK_NUM } from '@aztec/constants'; import { BlockNumber } from '@aztec/foundation/branded-types'; +import { createLogger } from '@aztec/foundation/log'; import type { PrivateEventStoreFilter } from '../storage/private_event_store/private_event_store.js'; export class PrivateEventFilterValidator { - constructor(private lastBlock: BlockNumber) {} + private readonly log = createLogger('pxe:private_event_filter_validator'); + + constructor(private readonly lastBlock: BlockNumber) {} validate(filter: PrivateEventFilter): PrivateEventStoreFilter { let { fromBlock, toBlock } = filter; @@ -35,6 +38,23 @@ export class PrivateEventFilterValidator { throw new Error('toBlock must be strictly greater than fromBlock'); } + // Cap the requested range to the synced block range. Without this, callers that pass a large + // toBlock (e.g. Number.MAX_SAFE_INTEGER as a "give me everything" idiom) would silently receive + // only the events that happen to be synced and believe they have complete coverage. + // We warn + cap rather than throw so callers don't need to query the last synced block before + // every request (which would also be unreliable, as the block can advance between the two calls). + const syncedUpperBound = BlockNumber(this.lastBlock + 1); + if (fromBlock >= syncedUpperBound) { + this.log.warn( + `Requested fromBlock ${fromBlock} is past last synced block ${this.lastBlock}; no events will be returned until PXE syncs further.`, + ); + } else if (toBlock > syncedUpperBound) { + this.log.warn( + `Requested toBlock ${toBlock} exceeds last synced block ${this.lastBlock}; capping to ${syncedUpperBound}. Retry once PXE is further synced for complete coverage.`, + ); + toBlock = syncedUpperBound; + } + return { contractAddress: filter.contractAddress, scopes: filter.scopes, diff --git a/yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.test.ts b/yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.test.ts index a30455750679..0274f596de86 100644 --- a/yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.test.ts +++ b/yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.test.ts @@ -292,6 +292,25 @@ describe('Private Kernel Sequencer', () => { expect(proofCreator.simulateTail).toHaveBeenCalledTimes(1); }); + it('rounds the expiration timestamp down before passing it to the tail circuit', async () => { + // Raw offset 7265s (1h + 1805s) should round down to the 1-hour bucket. + const rawOffset = 7265n; + const expectedRoundedOffset = 7200n; + + const customOutput = simulateProofOutput(); + customOutput.publicInputs.expirationTimestamp = blockTimestamp + rawOffset; + proofCreator.simulateInit.mockResolvedValue(customOutput); + proofCreator.simulateReset.mockResolvedValue(customOutput); + + dependencies = { a: [] }; + const executionResult = createExecutionResult('a'); + await prove(executionResult); + + expect(proofCreator.simulateTail).toHaveBeenCalledTimes(1); + const tailInputs = proofCreator.simulateTail.mock.calls[0][0]; + expect(tailInputs.expirationTimestampUpperBound).toBe(blockTimestamp + expectedRoundedOffset); + }); + it('runs two consecutive inner resets when first reset output still overflows', async () => { // Set up: init output has MAX note hash read requests and key validation requests. proofCreator.simulateInit.mockResolvedValue( diff --git a/yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.ts b/yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.ts index d4652a36dce5..295f29752a17 100644 --- a/yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.ts +++ b/yarn-project/pxe/src/private_kernel/private_kernel_execution_prover.ts @@ -33,6 +33,7 @@ import { } from '@aztec/stdlib/tx'; import { VerificationKeyAsFields, VerificationKeyData, VkData } from '@aztec/stdlib/vks'; +import { computeTxExpirationTimestamp } from './hints/compute_tx_expiration_timestamp.js'; import { PrivateKernelResetPrivateInputsBuilder } from './hints/private_kernel_reset_private_inputs_builder.js'; import type { PrivateKernelOracle } from './private_kernel_oracle.js'; @@ -267,15 +268,9 @@ export class PrivateKernelExecutionProver { // TODO: Enable padding once we better understand the final amounts to pad to. const paddedSideEffectAmounts = PaddedSideEffectAmounts.empty(); - // Use the aggregated expirationTimestamp set throughout the tx execution. - // TODO: Call `computeTxExpirationTimestamp` to round the value down and reduce precision, improving privacy. - const expirationTimestampUpperBound = previousKernelData.publicInputs.expirationTimestamp; - const anchorBlockTimestamp = previousKernelData.publicInputs.constants.anchorBlockHeader.globalVariables.timestamp; - if (expirationTimestampUpperBound <= anchorBlockTimestamp) { - throw new Error( - `Include-by timestamp must be greater than the anchor block timestamp. Anchor block timestamp: ${anchorBlockTimestamp}. Include-by timestamp: ${expirationTimestampUpperBound}.`, - ); - } + // Round the aggregated expirationTimestamp down to reduce precision and avoid leaking which private + // functions were called via their exact expiration offsets. + const expirationTimestampUpperBound = computeTxExpirationTimestamp(previousKernelData.publicInputs); const privateInputs = new PrivateKernelTailCircuitPrivateInputs( previousKernelData, diff --git a/yarn-project/pxe/src/private_kernel/private_kernel_oracle.ts b/yarn-project/pxe/src/private_kernel/private_kernel_oracle.ts index 6bbf8983af94..52e3ceb10327 100644 --- a/yarn-project/pxe/src/private_kernel/private_kernel_oracle.ts +++ b/yarn-project/pxe/src/private_kernel/private_kernel_oracle.ts @@ -7,13 +7,13 @@ import { getVKIndex, getVKSiblingPath } from '@aztec/noir-protocol-circuits-type import { ProtocolContractAddress } from '@aztec/protocol-contracts'; import type { FunctionSelector } from '@aztec/stdlib/abi'; import type { AztecAddress } from '@aztec/stdlib/aztec-address'; -import { BlockHash } from '@aztec/stdlib/block'; import { type ContractInstanceWithAddress, computeSaltedInitializationHash } from '@aztec/stdlib/contract'; import { DelayedPublicMutableValues, DelayedPublicMutableValuesWithHash } from '@aztec/stdlib/delayed-public-mutable'; import { computePublicDataTreeLeafSlot } from '@aztec/stdlib/hash'; import type { AztecNode } from '@aztec/stdlib/interfaces/client'; import { UpdatedClassIdHints } from '@aztec/stdlib/kernel'; import type { NullifierMembershipWitness } from '@aztec/stdlib/trees'; +import type { BlockHeader } from '@aztec/stdlib/tx'; import type { VerificationKeyAsFields } from '@aztec/stdlib/vks'; import type { ContractStore } from '../storage/contract_store/contract_store.js'; @@ -26,7 +26,7 @@ export class PrivateKernelOracle { private contractStore: ContractStore, private keyStore: KeyStore, private node: AztecNode, - private blockHash: BlockHash, + private blockHeader: BlockHeader, ) {} /** Retrieves the preimage of a contract address from the registered contract instances db. */ @@ -80,22 +80,20 @@ export class PrivateKernelOracle { } /** Returns a membership witness with the sibling path and leaf index in our note hash tree. */ - getNoteHashMembershipWitness(noteHash: Fr): Promise | undefined> { - return this.node.getNoteHashMembershipWitness(this.blockHash, noteHash); + async getNoteHashMembershipWitness( + noteHash: Fr, + ): Promise | undefined> { + return this.node.getNoteHashMembershipWitness(await this.blockHeader.hash(), noteHash); } /** Returns a membership witness with the sibling path and leaf index in our nullifier indexed merkle tree. */ - getNullifierMembershipWitness(nullifier: Fr): Promise { - return this.node.getNullifierMembershipWitness(this.blockHash, nullifier); + async getNullifierMembershipWitness(nullifier: Fr): Promise { + return this.node.getNullifierMembershipWitness(await this.blockHeader.hash(), nullifier); } /** Returns the root of our note hash merkle tree. */ - async getNoteHashTreeRoot(): Promise { - const header = await this.node.getBlockHeader(this.blockHash); - if (!header) { - throw new Error(`No block header found for block hash ${this.blockHash}`); - } - return header.state.partial.noteHashTree.root; + getNoteHashTreeRoot(): Fr { + return this.blockHeader.state.partial.noteHashTree.root; } /** @@ -126,14 +124,16 @@ export class PrivateKernelOracle { ProtocolContractAddress.ContractInstanceRegistry, delayedPublicMutableHashSlot, ); - const updatedClassIdWitness = await this.node.getPublicDataWitness(this.blockHash, hashLeafSlot); + const blockHash = await this.blockHeader.hash(); + + const updatedClassIdWitness = await this.node.getPublicDataWitness(blockHash, hashLeafSlot); if (!updatedClassIdWitness) { throw new Error(`No public data tree witness found for ${hashLeafSlot}`); } const readStorage = (storageSlot: Fr) => - this.node.getPublicStorageAt(this.blockHash, ProtocolContractAddress.ContractInstanceRegistry, storageSlot); + this.node.getPublicStorageAt(blockHash, ProtocolContractAddress.ContractInstanceRegistry, storageSlot); const delayedPublicMutableValues = await DelayedPublicMutableValues.readFromTree( delayedPublicMutableSlot, readStorage, diff --git a/yarn-project/pxe/src/pxe.ts b/yarn-project/pxe/src/pxe.ts index 3ab2f8fa74ff..74f495940b99 100644 --- a/yarn-project/pxe/src/pxe.ts +++ b/yarn-project/pxe/src/pxe.ts @@ -484,8 +484,7 @@ export class PXE { config: PrivateKernelExecutionProverConfig, ): Promise> { const anchorBlockHeader = await this.anchorBlockStore.getBlockHeader(); - const anchorBlockHash = await anchorBlockHeader.hash(); - const kernelOracle = new PrivateKernelOracle(this.contractStore, this.keyStore, this.node, anchorBlockHash); + const kernelOracle = new PrivateKernelOracle(this.contractStore, this.keyStore, this.node, anchorBlockHeader); const kernelTraceProver = new PrivateKernelExecutionProver( kernelOracle, proofCreator, @@ -579,8 +578,8 @@ export class PXE { if (wasAdded) { this.log.info(`Added sender:\n ${sender.toString()}`); // Wipe the entire sync cache: the new sender's tagged logs could contain notes/events for any contract, so - // all contracts must re-sync to discover them. - this.contractSyncService.wipe(); + // all contracts must re-sync to discover them. Queued to avoid wiping while a job is in flight. + await this.#putInJobQueue(() => Promise.resolve(this.contractSyncService.wipe())); } else { this.log.info(`Sender:\n "${sender.toString()}"\n already registered.`); } @@ -1176,7 +1175,8 @@ export class PXE { /** * Stops the PXE's job queue. */ - public stop(): Promise { - return this.jobQueue.end(); + public async stop(): Promise { + await this.jobQueue.end(); + await this.blockStateSynchronizer.stop(); } } diff --git a/yarn-project/pxe/src/storage/anchor_block_store/anchor_block_store.ts b/yarn-project/pxe/src/storage/anchor_block_store/anchor_block_store.ts index 8df1f6dbad36..265f09e555d6 100644 --- a/yarn-project/pxe/src/storage/anchor_block_store/anchor_block_store.ts +++ b/yarn-project/pxe/src/storage/anchor_block_store/anchor_block_store.ts @@ -23,7 +23,7 @@ export class AnchorBlockStore { } async getBlockHeader(): Promise { - const headerBuffer = await this.#synchronizedHeader.getAsync(); + const headerBuffer = await this.#store.transactionAsync(() => this.#synchronizedHeader.getAsync()); if (!headerBuffer) { throw new Error(`Trying to get block header with a not-yet-synchronized PXE - this should never happen`); } diff --git a/yarn-project/pxe/src/storage/capsule_store/capsule_store.ts b/yarn-project/pxe/src/storage/capsule_store/capsule_store.ts index cf977a60d977..480f4e14e6ea 100644 --- a/yarn-project/pxe/src/storage/capsule_store/capsule_store.ts +++ b/yarn-project/pxe/src/storage/capsule_store/capsule_store.ts @@ -148,7 +148,17 @@ export class CapsuleStore implements StagedStore { * @param slot - The slot in the database to read. * @returns The stored data or `null` if no data is stored under the slot. */ - async getCapsule(contractAddress: AztecAddress, slot: Fr, jobId: string, scope: AztecAddress): Promise { + getCapsule(contractAddress: AztecAddress, slot: Fr, jobId: string, scope: AztecAddress): Promise { + return this.#store.transactionAsync(() => this.#getCapsuleInternal(contractAddress, slot, jobId, scope)); + } + + /** Same as getCapsule but without its own transaction, for use inside an existing transactionAsync. */ + async #getCapsuleInternal( + contractAddress: AztecAddress, + slot: Fr, + jobId: string, + scope: AztecAddress, + ): Promise { const dataBuffer = await this.#getFromStage(jobId, dbSlotToKey(contractAddress, slot, scope)); if (!dataBuffer) { this.logger.trace(`Data not found for contract ${contractAddress.toString()} and slot ${slot.toString()}`); @@ -240,7 +250,7 @@ export class CapsuleStore implements StagedStore { // and not using a transaction here would heavily impact performance. return this.#store.transactionAsync(async () => { // Load current length, defaulting to 0 if not found - const lengthData = await this.getCapsule(contractAddress, baseSlot, jobId, scope); + const lengthData = await this.#getCapsuleInternal(contractAddress, baseSlot, jobId, scope); const currentLength = lengthData ? lengthData[0].toNumber() : 0; // Store each capsule at consecutive slots after baseSlot + 1 + currentLength @@ -263,14 +273,14 @@ export class CapsuleStore implements StagedStore { // of jobs: different calls running concurrently on the same contract may cause trouble. return this.#store.transactionAsync(async () => { // Load length, defaulting to 0 if not found - const maybeLength = await this.getCapsule(contractAddress, baseSlot, jobId, scope); + const maybeLength = await this.#getCapsuleInternal(contractAddress, baseSlot, jobId, scope); const length = maybeLength ? maybeLength[0].toBigInt() : 0n; const values: Fr[][] = []; // Read each capsule at consecutive slots after baseSlot for (let i = 0; i < length; i++) { - const currentValue = await this.getCapsule(contractAddress, arraySlot(baseSlot, i), jobId, scope); + const currentValue = await this.#getCapsuleInternal(contractAddress, arraySlot(baseSlot, i), jobId, scope); if (currentValue == undefined) { throw new Error( `Expected non-empty value at capsule array in base slot ${baseSlot} at index ${i} for contract ${contractAddress}`, @@ -295,7 +305,7 @@ export class CapsuleStore implements StagedStore { // of jobs: different calls running concurrently on the same contract may cause trouble. return this.#store.transactionAsync(async () => { // Load current length, defaulting to 0 if not found - const maybeLength = await this.getCapsule(contractAddress, baseSlot, jobId, scope); + const maybeLength = await this.#getCapsuleInternal(contractAddress, baseSlot, jobId, scope); const originalLength = maybeLength ? maybeLength[0].toNumber() : 0; // Set the new length diff --git a/yarn-project/pxe/src/storage/contract_store/contract_store.ts b/yarn-project/pxe/src/storage/contract_store/contract_store.ts index 5b7f36a88c07..3c9cccccb864 100644 --- a/yarn-project/pxe/src/storage/contract_store/contract_store.ts +++ b/yarn-project/pxe/src/storage/contract_store/contract_store.ts @@ -168,12 +168,14 @@ export class ContractStore { } async addContractInstance(contract: ContractInstanceWithAddress): Promise { - this.#contractClassIdMap.set(contract.address.toString(), contract.currentContractClassId); + await this.#store.transactionAsync(async () => { + await this.#contractInstances.set( + contract.address.toString(), + new SerializableContractInstance(contract).toBuffer(), + ); + }); - await this.#contractInstances.set( - contract.address.toString(), - new SerializableContractInstance(contract).toBuffer(), - ); + this.#contractClassIdMap.set(contract.address.toString(), contract.currentContractClassId); } // Private getters @@ -246,7 +248,7 @@ export class ContractStore { contractClassId: Fr, ): Promise<(ContractClassWithId & ContractClassIdPreimage) | undefined> { const key = contractClassId.toString(); - const buf = await this.#contractClassData.getAsync(key); + const buf = await this.#store.transactionAsync(() => this.#contractClassData.getAsync(key)); if (!buf) { return undefined; } diff --git a/yarn-project/pxe/src/storage/private_event_store/private_event_store.test.ts b/yarn-project/pxe/src/storage/private_event_store/private_event_store.test.ts index cfdcea692655..9bbdb98729f3 100644 --- a/yarn-project/pxe/src/storage/private_event_store/private_event_store.test.ts +++ b/yarn-project/pxe/src/storage/private_event_store/private_event_store.test.ts @@ -669,7 +669,7 @@ describe('PrivateEventStore', () => { txIndexInBlock: 0, eventIndexInTx: 0, }, - 'test', + 'before-rollback', ); await privateEventStore.commit('before-rollback'); @@ -747,6 +747,33 @@ describe('PrivateEventStore', () => { expect(events.length).toBe(1); expect(events[0].packedEvent).toEqual(msgContent1); }); + + it('throws when rollback is called while jobs are running', async () => { + await privateEventStore.storePrivateEventLog( + eventSelector, + randomness, + msgContent1, + Fr.random(), + { + contractAddress, + scope, + txHash: TxHash.random(), + l2BlockNumber: BlockNumber(100), + l2BlockHash, + txIndexInBlock: 0, + eventIndexInTx: 0, + }, + 'uncommitted-job', + ); + + await expect(privateEventStore.rollback(0, 10)).rejects.toThrow( + 'PXE private event store rollback is not allowed while jobs are running', + ); + + await privateEventStore.discardStaged('uncommitted-job'); + + await expect(privateEventStore.rollback(0, 10)).resolves.not.toThrow(); + }); }); describe('staging', () => { diff --git a/yarn-project/pxe/src/storage/private_event_store/private_event_store.ts b/yarn-project/pxe/src/storage/private_event_store/private_event_store.ts index 804178b882af..3518a7205abb 100644 --- a/yarn-project/pxe/src/storage/private_event_store/private_event_store.ts +++ b/yarn-project/pxe/src/storage/private_event_store/private_event_store.ts @@ -234,6 +234,10 @@ export class PrivateEventStore implements StagedStore { * IMPORTANT: This method must be called within a transaction to ensure atomicity. */ public async rollback(blockNumber: number, synchedBlockNumber: number): Promise { + if (this.#eventsForJob.size > 0) { + throw new Error('PXE private event store rollback is not allowed while jobs are running'); + } + // First pass: collect all event IDs for all blocks, starting reads during iteration to keep tx alive. const eventsByBlock: Map }[]> = new Map(); diff --git a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts index 1216b02724cd..feed4773fa54 100644 --- a/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts +++ b/yarn-project/pxe/src/tagging/recipient_sync/load_private_logs_for_sender_recipient_pair.ts @@ -113,7 +113,9 @@ export async function loadPrivateLogsForSenderRecipientPair( if (highestAgedIndex !== undefined && highestAgedIndex > highestFinalizedIndex) { // This is just a sanity check as this should never happen. - throw new Error('Highest aged index lower than highest finalized index invariant violated'); + throw new Error( + `Highest aged index (${highestAgedIndex}) must not exceed highest finalized index (${highestFinalizedIndex})`, + ); } await taggingStore.updateHighestFinalizedIndex(secret, highestFinalizedIndex, jobId); diff --git a/yarn-project/wallet-sdk/src/base-wallet/base_wallet.ts b/yarn-project/wallet-sdk/src/base-wallet/base_wallet.ts index bf7422b9fb2f..70f5faf4b291 100644 --- a/yarn-project/wallet-sdk/src/base-wallet/base_wallet.ts +++ b/yarn-project/wallet-sdk/src/base-wallet/base_wallet.ts @@ -43,6 +43,7 @@ import type { AuthWitness } from '@aztec/stdlib/auth-witness'; import { AztecAddress } from '@aztec/stdlib/aztec-address'; import { type ContractInstanceWithAddress, + type NodeInfo, computePartialAddress, getContractClassFromArtifact, } from '@aztec/stdlib/contract'; @@ -107,6 +108,9 @@ export type CompleteFeeOptionsConfig = { export abstract class BaseWallet implements Wallet { protected minFeePadding = 0.5; protected cancellableTransactions = false; + // A wallet is instantiated for a particular chain, so chain info never changes during its lifetime. + // We cache it here because getChainInfo is called frequently (every tx simulation, send, auth wit, etc.). + private nodeInfoPromise: Promise | undefined; // Protected because we want to force wallets to instantiate their own PXE. protected constructor( @@ -138,7 +142,10 @@ export abstract class BaseWallet implements Wallet { } async getChainInfo(): Promise { - const { l1ChainId, rollupVersion } = await this.aztecNode.getNodeInfo(); + if (!this.nodeInfoPromise) { + this.nodeInfoPromise = this.aztecNode.getNodeInfo(); + } + const { l1ChainId, rollupVersion } = await this.nodeInfoPromise; return { chainId: new Fr(l1ChainId), version: new Fr(rollupVersion) }; } diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index d6268a7bcc68..31ddde564a7a 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -910,6 +910,7 @@ __metadata: "@aztec/validator-ha-signer": "workspace:^" "@aztec/wallets": "workspace:^" "@aztec/world-state": "workspace:^" + "@iarna/toml": "npm:^2.2.5" "@jest/globals": "npm:^30.0.0" "@types/chalk": "npm:^2.2.0" "@types/jest": "npm:^30.0.0"