diff --git a/.github/release-please/manifest.json b/.github/release-please/manifest.json index 0ba598bb0516..c64c1f4ecc4f 100644 --- a/.github/release-please/manifest.json +++ b/.github/release-please/manifest.json @@ -1,5 +1,5 @@ { - "core": "26.1.0", - "prover": "17.1.1", + "core": "26.2.1", + "prover": "18.0.0", "zkstack_cli": "0.1.2" } diff --git a/.github/workflows/ci-core-reusable.yml b/.github/workflows/ci-core-reusable.yml index 6be2a05e52e2..3a399cc8f738 100644 --- a/.github/workflows/ci-core-reusable.yml +++ b/.github/workflows/ci-core-reusable.yml @@ -72,7 +72,7 @@ jobs: ci_run zkstack dev contracts - name: Download compilers for contract verifier tests - run: ci_run zkstack contract-verifier init --zksolc-version=v1.5.3 --zkvyper-version=v1.5.4 --solc-version=0.8.26 --vyper-version=v0.3.10 --era-vm-solc-version=0.8.26-1.0.1 --only --chain era + run: ci_run zkstack contract-verifier init --zksolc-version=v1.5.10 --zkvyper-version=v1.5.4 --solc-version=0.8.26 --vyper-version=v0.3.10 --era-vm-solc-version=0.8.26-1.0.1 --only --chain era - name: Rust unit tests run: | @@ -431,7 +431,7 @@ jobs: - name: Initialize Contract verifier run: | - ci_run zkstack contract-verifier init --zksolc-version=v1.5.3 --zkvyper-version=v1.5.4 --solc-version=0.8.26 --vyper-version=v0.3.10 --era-vm-solc-version=0.8.26-1.0.1 --only --chain era + ci_run zkstack contract-verifier init --zksolc-version=v1.5.10 --zkvyper-version=v1.5.4 --solc-version=0.8.26 --vyper-version=v0.3.10 --era-vm-solc-version=0.8.26-1.0.1 --only --chain era ci_run zkstack contract-verifier run --chain era &> ${{ env.SERVER_LOGS_DIR }}/contract-verifier-rollup.log & ci_run zkstack contract-verifier wait --chain era --verbose @@ -456,7 +456,7 @@ jobs: - name: Set up attester committee for the consensus chain run: | ci_run zkstack consensus wait-for-registry --ignore-prerequisites --verbose --chain consensus - ci_run zkstack consensus set-attester-committee --chain consensus --from-genesis &> ${{ env.INTEGRATION_TESTS_LOGS_DIR }}/consensus.log + ci_run zkstack consensus set-attester-committee --chain consensus --ignore-prerequisites --verbose --from-genesis &> ${{ env.INTEGRATION_TESTS_LOGS_DIR }}/consensus.log - name: Run integration tests run: | diff --git a/core/CHANGELOG.md b/core/CHANGELOG.md index 256c83058ee2..42ddd01a6775 100644 --- a/core/CHANGELOG.md +++ b/core/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## [26.2.1](https://github.com/matter-labs/zksync-era/compare/core-v26.2.0...core-v26.2.1) (2025-01-28) + + +### Bug Fixes + +* add . to readme ([#3538](https://github.com/matter-labs/zksync-era/issues/3538)) ([512dd45](https://github.com/matter-labs/zksync-era/commit/512dd459307e57762dd4cc2c78ff4151634b6941)) + +## [26.2.0](https://github.com/matter-labs/zksync-era/compare/core-v26.1.0...core-v26.2.0) (2025-01-24) + + +### Features + +* Compressor optimizations ([#3476](https://github.com/matter-labs/zksync-era/issues/3476)) ([3e931be](https://github.com/matter-labs/zksync-era/commit/3e931be6bddaacbd7d029c537db03a3c191fdc21)) + + +### Bug Fixes + +* **en:** better defaults, i.e. the same as used by main node ([#3521](https://github.com/matter-labs/zksync-era/issues/3521)) ([2b5fe98](https://github.com/matter-labs/zksync-era/commit/2b5fe983acf78f73fb6e90a6a7d041e8aef1c595)) +* **en:** Fix race condition in EN storage initialization ([#3515](https://github.com/matter-labs/zksync-era/issues/3515)) ([c916797](https://github.com/matter-labs/zksync-era/commit/c916797d49d636c9e642264786d4124ebd338ec3)) +* JSON proof serialization ([#3514](https://github.com/matter-labs/zksync-era/issues/3514)) ([516e521](https://github.com/matter-labs/zksync-era/commit/516e5210ed70b25a15a68a58c8065331aab542e0)) + ## [26.1.0](https://github.com/matter-labs/zksync-era/compare/core-v26.0.0...core-v26.1.0) (2025-01-21) diff --git a/core/Cargo.lock b/core/Cargo.lock index 1b251bec741d..e8943b2a369a 100644 --- a/core/Cargo.lock +++ b/core/Cargo.lock @@ -1233,7 +1233,7 @@ checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" [[package]] name = "block_reverter" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "clap 4.5.23", @@ -2233,7 +2233,7 @@ dependencies = [ [[package]] name = "custom_genesis_export" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "bincode", @@ -3485,7 +3485,7 @@ dependencies = [ [[package]] name = "genesis_generator" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "clap 4.5.23", @@ -5156,7 +5156,7 @@ checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" [[package]] name = "loadnext" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -5343,7 +5343,7 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "merkle_tree_consistency_checker" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "clap 4.5.23", @@ -7984,7 +7984,7 @@ dependencies = [ [[package]] name = "selector_generator" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "clap 4.5.23", @@ -8644,7 +8644,7 @@ dependencies = [ [[package]] name = "snapshots_creator" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "futures 0.3.31", @@ -9393,7 +9393,7 @@ dependencies = [ [[package]] name = "system-constants-generator" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "codegen", "once_cell", @@ -10373,7 +10373,7 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "verified_sources_fetcher" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "serde_json", @@ -10430,7 +10430,7 @@ dependencies = [ [[package]] name = "vm-benchmark" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "assert_matches", "criterion", @@ -11293,7 +11293,7 @@ dependencies = [ [[package]] name = "zksync_base_token_adjuster" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -11315,7 +11315,7 @@ dependencies = [ [[package]] name = "zksync_basic_types" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "bincode", @@ -11360,7 +11360,7 @@ dependencies = [ [[package]] name = "zksync_block_reverter" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -11384,7 +11384,7 @@ dependencies = [ [[package]] name = "zksync_circuit_breaker" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -11398,7 +11398,7 @@ dependencies = [ [[package]] name = "zksync_commitment_generator" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "circuit_encodings", @@ -11446,7 +11446,7 @@ dependencies = [ [[package]] name = "zksync_config" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "rand 0.8.5", @@ -11619,7 +11619,7 @@ dependencies = [ [[package]] name = "zksync_consistency_checker" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -11644,7 +11644,7 @@ dependencies = [ [[package]] name = "zksync_contract_verification_server" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "axum 0.7.9", @@ -11663,7 +11663,7 @@ dependencies = [ [[package]] name = "zksync_contract_verifier" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "clap 4.5.23", @@ -11680,7 +11680,7 @@ dependencies = [ [[package]] name = "zksync_contract_verifier_lib" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -11712,7 +11712,7 @@ dependencies = [ [[package]] name = "zksync_contracts" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "bincode", "envy", @@ -11726,7 +11726,7 @@ dependencies = [ [[package]] name = "zksync_core_leftovers" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "ctrlc", @@ -11740,7 +11740,7 @@ dependencies = [ [[package]] name = "zksync_crypto_primitives" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "blake2 0.10.6", @@ -11768,7 +11768,7 @@ dependencies = [ [[package]] name = "zksync_da_client" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -11777,7 +11777,7 @@ dependencies = [ [[package]] name = "zksync_da_clients" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -11821,7 +11821,7 @@ dependencies = [ [[package]] name = "zksync_da_dispatcher" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "chrono", @@ -11838,7 +11838,7 @@ dependencies = [ [[package]] name = "zksync_dal" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "bigdecimal", @@ -11848,6 +11848,7 @@ dependencies = [ "itertools 0.10.5", "prost 0.12.6", "rand 0.8.5", + "rayon", "serde", "serde_json", "sqlx", @@ -11874,7 +11875,7 @@ dependencies = [ [[package]] name = "zksync_db_connection" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -11892,7 +11893,7 @@ dependencies = [ [[package]] name = "zksync_env_config" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "envy", @@ -11904,7 +11905,7 @@ dependencies = [ [[package]] name = "zksync_eth_client" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "assert_matches", "async-trait", @@ -11926,7 +11927,7 @@ dependencies = [ [[package]] name = "zksync_eth_sender" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -11956,7 +11957,7 @@ dependencies = [ [[package]] name = "zksync_eth_signer" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "async-trait", "rlp", @@ -11967,7 +11968,7 @@ dependencies = [ [[package]] name = "zksync_eth_watch" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-recursion", @@ -11994,7 +11995,7 @@ dependencies = [ [[package]] name = "zksync_external_node" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12048,7 +12049,7 @@ dependencies = [ [[package]] name = "zksync_external_price_api" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12069,7 +12070,7 @@ dependencies = [ [[package]] name = "zksync_external_proof_integration_api" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12115,7 +12116,7 @@ dependencies = [ [[package]] name = "zksync_health_check" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "assert_matches", "async-trait", @@ -12130,7 +12131,7 @@ dependencies = [ [[package]] name = "zksync_house_keeper" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12162,7 +12163,7 @@ dependencies = [ [[package]] name = "zksync_l1_contract_interface" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "circuit_definitions", @@ -12183,7 +12184,7 @@ dependencies = [ [[package]] name = "zksync_logs_bloom_backfill" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "tokio", @@ -12195,7 +12196,7 @@ dependencies = [ [[package]] name = "zksync_mempool" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "tracing", "zksync_types", @@ -12203,7 +12204,7 @@ dependencies = [ [[package]] name = "zksync_merkle_tree" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12232,7 +12233,7 @@ dependencies = [ [[package]] name = "zksync_metadata_calculator" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12266,7 +12267,7 @@ dependencies = [ [[package]] name = "zksync_mini_merkle_tree" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "criterion", "once_cell", @@ -12276,7 +12277,7 @@ dependencies = [ [[package]] name = "zksync_multivm" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12308,7 +12309,7 @@ dependencies = [ [[package]] name = "zksync_node_api_server" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12362,7 +12363,7 @@ dependencies = [ [[package]] name = "zksync_node_consensus" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12406,7 +12407,7 @@ dependencies = [ [[package]] name = "zksync_node_db_pruner" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12428,7 +12429,7 @@ dependencies = [ [[package]] name = "zksync_node_fee_model" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12447,7 +12448,7 @@ dependencies = [ [[package]] name = "zksync_node_framework" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12510,7 +12511,7 @@ dependencies = [ [[package]] name = "zksync_node_framework_derive" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "proc-macro2 1.0.92", "quote 1.0.37", @@ -12519,7 +12520,7 @@ dependencies = [ [[package]] name = "zksync_node_genesis" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "bincode", @@ -12540,7 +12541,7 @@ dependencies = [ [[package]] name = "zksync_node_storage_init" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12562,7 +12563,7 @@ dependencies = [ [[package]] name = "zksync_node_sync" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12597,7 +12598,7 @@ dependencies = [ [[package]] name = "zksync_node_test_utils" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "zksync_contracts", "zksync_dal", @@ -12609,7 +12610,7 @@ dependencies = [ [[package]] name = "zksync_object_store" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12647,7 +12648,7 @@ dependencies = [ [[package]] name = "zksync_proof_data_handler" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "axum 0.7.9", @@ -12709,7 +12710,7 @@ dependencies = [ [[package]] name = "zksync_protobuf_config" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "hex", @@ -12729,7 +12730,7 @@ dependencies = [ [[package]] name = "zksync_prover_interface" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "bincode", "chrono", @@ -12749,7 +12750,7 @@ dependencies = [ [[package]] name = "zksync_queued_job_processor" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12761,7 +12762,7 @@ dependencies = [ [[package]] name = "zksync_reorg_detector" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12782,7 +12783,7 @@ dependencies = [ [[package]] name = "zksync_server" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "clap 4.5.23", @@ -12812,7 +12813,7 @@ dependencies = [ [[package]] name = "zksync_shared_metrics" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "rustc_version 0.4.1", "serde", @@ -12824,7 +12825,7 @@ dependencies = [ [[package]] name = "zksync_snapshots_applier" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12863,7 +12864,7 @@ dependencies = [ [[package]] name = "zksync_state" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12888,7 +12889,7 @@ dependencies = [ [[package]] name = "zksync_state_keeper" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -12928,7 +12929,7 @@ dependencies = [ [[package]] name = "zksync_storage" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "num_cpus", "once_cell", @@ -12941,7 +12942,7 @@ dependencies = [ [[package]] name = "zksync_system_constants" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "once_cell", "zksync_basic_types", @@ -12949,7 +12950,7 @@ dependencies = [ [[package]] name = "zksync_tee_prover" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -12974,7 +12975,7 @@ dependencies = [ [[package]] name = "zksync_tee_verifier" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "bincode", @@ -12992,7 +12993,7 @@ dependencies = [ [[package]] name = "zksync_test_contracts" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "ethabi", "foundry-compilers", @@ -13008,7 +13009,7 @@ dependencies = [ [[package]] name = "zksync_types" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -13017,6 +13018,7 @@ dependencies = [ "bincode", "blake2 0.10.6", "chrono", + "ciborium", "derive_more 1.0.0", "hex", "itertools 0.10.5", @@ -13043,7 +13045,7 @@ dependencies = [ [[package]] name = "zksync_utils" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -13059,7 +13061,7 @@ dependencies = [ [[package]] name = "zksync_vlog" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "chrono", @@ -13104,7 +13106,7 @@ dependencies = [ [[package]] name = "zksync_vm_executor" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -13122,7 +13124,7 @@ dependencies = [ [[package]] name = "zksync_vm_interface" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -13140,7 +13142,7 @@ dependencies = [ [[package]] name = "zksync_vm_runner" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", @@ -13174,7 +13176,7 @@ dependencies = [ [[package]] name = "zksync_web3_decl" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "assert_matches", diff --git a/core/Cargo.toml b/core/Cargo.toml index 80e9ac035283..e636e1079f13 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -91,7 +91,7 @@ inherits = "release" debug = true [workspace.package] -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" edition = "2021" authors = ["The Matter Labs Team "] homepage = "https://zksync.io/" @@ -170,6 +170,7 @@ serde = "1" serde_json = "1" serde_with = "1" serde_yaml = "0.9" +ciborium = "0.2" sha2 = "0.10.8" sha3 = "0.10.8" sqlx = "0.8.1" @@ -231,7 +232,7 @@ tokio-stream = "0.1.16" circuit_encodings = "=0.150.20" circuit_sequencer_api = "=0.150.20" circuit_definitions = "=0.150.20" -crypto_codegen = { package = "zksync_solidity_vk_codegen",version = "=0.30.13" } +crypto_codegen = { package = "zksync_solidity_vk_codegen", version = "=0.30.13" } kzg = { package = "zksync_kzg", version = "=0.150.20" } zk_evm = { version = "=0.133.0" } zk_evm_1_3_1 = { package = "zk_evm", version = "0.131.0-rc.2" } @@ -240,7 +241,7 @@ zk_evm_1_4_0 = { package = "zk_evm", version = "0.140" } zk_evm_1_4_1 = { package = "zk_evm", version = "0.141" } zk_evm_1_5_0 = { package = "zk_evm", version = "=0.150.20" } fflonk = "=0.30.13" -bellman = {package = "zksync_bellman", version = "=0.30.13"} +bellman = { package = "zksync_bellman", version = "=0.30.13" } # New VM; pinned to a specific commit because of instability zksync_vm2 = { git = "https://github.com/matter-labs/vm2.git", rev = "457d8a7eea9093af9440662e33e598c13ba41633" } @@ -258,70 +259,70 @@ zksync_protobuf = "=0.8.0" zksync_protobuf_build = "=0.8.0" # "Local" dependencies -zksync_multivm = { version = "=26.1.0-non-semver-compat", path = "lib/multivm" } -zksync_vlog = { version = "=26.1.0-non-semver-compat", path = "lib/vlog" } -zksync_vm_interface = { version = "=26.1.0-non-semver-compat", path = "lib/vm_interface" } -zksync_vm_executor = { version = "=26.1.0-non-semver-compat", path = "lib/vm_executor" } -zksync_basic_types = { version = "=26.1.0-non-semver-compat", path = "lib/basic_types" } -zksync_circuit_breaker = { version = "=26.1.0-non-semver-compat", path = "lib/circuit_breaker" } -zksync_config = { version = "=26.1.0-non-semver-compat", path = "lib/config" } -zksync_contract_verifier_lib = { version = "=26.1.0-non-semver-compat", path = "lib/contract_verifier" } -zksync_contracts = { version = "=26.1.0-non-semver-compat", path = "lib/contracts" } -zksync_core_leftovers = { version = "=26.1.0-non-semver-compat", path = "lib/zksync_core_leftovers" } -zksync_dal = { version = "=26.1.0-non-semver-compat", path = "lib/dal" } -zksync_db_connection = { version = "=26.1.0-non-semver-compat", path = "lib/db_connection" } -zksync_env_config = { version = "=26.1.0-non-semver-compat", path = "lib/env_config" } -zksync_eth_client = { version = "=26.1.0-non-semver-compat", path = "lib/eth_client" } -zksync_da_client = { version = "=26.1.0-non-semver-compat", path = "lib/da_client" } -zksync_eth_signer = { version = "=26.1.0-non-semver-compat", path = "lib/eth_signer" } -zksync_health_check = { version = "=26.1.0-non-semver-compat", path = "lib/health_check" } -zksync_l1_contract_interface = { version = "=26.1.0-non-semver-compat", path = "lib/l1_contract_interface" } -zksync_mempool = { version = "=26.1.0-non-semver-compat", path = "lib/mempool" } -zksync_merkle_tree = { version = "=26.1.0-non-semver-compat", path = "lib/merkle_tree" } +zksync_multivm = { version = "26.2.1-non-semver-compat", path = "lib/multivm" } +zksync_vlog = { version = "26.2.1-non-semver-compat", path = "lib/vlog" } +zksync_vm_interface = { version = "26.2.1-non-semver-compat", path = "lib/vm_interface" } +zksync_vm_executor = { version = "26.2.1-non-semver-compat", path = "lib/vm_executor" } +zksync_basic_types = { version = "26.2.1-non-semver-compat", path = "lib/basic_types" } +zksync_circuit_breaker = { version = "26.2.1-non-semver-compat", path = "lib/circuit_breaker" } +zksync_config = { version = "26.2.1-non-semver-compat", path = "lib/config" } +zksync_contract_verifier_lib = { version = "26.2.1-non-semver-compat", path = "lib/contract_verifier" } +zksync_contracts = { version = "26.2.1-non-semver-compat", path = "lib/contracts" } +zksync_core_leftovers = { version = "26.2.1-non-semver-compat", path = "lib/zksync_core_leftovers" } +zksync_dal = { version = "26.2.1-non-semver-compat", path = "lib/dal" } +zksync_db_connection = { version = "26.2.1-non-semver-compat", path = "lib/db_connection" } +zksync_env_config = { version = "26.2.1-non-semver-compat", path = "lib/env_config" } +zksync_eth_client = { version = "26.2.1-non-semver-compat", path = "lib/eth_client" } +zksync_da_client = { version = "26.2.1-non-semver-compat", path = "lib/da_client" } +zksync_eth_signer = { version = "26.2.1-non-semver-compat", path = "lib/eth_signer" } +zksync_health_check = { version = "26.2.1-non-semver-compat", path = "lib/health_check" } +zksync_l1_contract_interface = { version = "26.2.1-non-semver-compat", path = "lib/l1_contract_interface" } +zksync_mempool = { version = "26.2.1-non-semver-compat", path = "lib/mempool" } +zksync_merkle_tree = { version = "26.2.1-non-semver-compat", path = "lib/merkle_tree" } zksync_bin_metadata = { version = "=26.1.0-non-semver-compat", path = "lib/bin_metadata" } -zksync_mini_merkle_tree = { version = "=26.1.0-non-semver-compat", path = "lib/mini_merkle_tree" } -zksync_object_store = { version = "=26.1.0-non-semver-compat", path = "lib/object_store" } -zksync_protobuf_config = { version = "=26.1.0-non-semver-compat", path = "lib/protobuf_config" } -zksync_prover_interface = { version = "=26.1.0-non-semver-compat", path = "lib/prover_interface" } -zksync_queued_job_processor = { version = "=26.1.0-non-semver-compat", path = "lib/queued_job_processor" } -zksync_snapshots_applier = { version = "=26.1.0-non-semver-compat", path = "lib/snapshots_applier" } -zksync_state = { version = "=26.1.0-non-semver-compat", path = "lib/state" } -zksync_storage = { version = "=26.1.0-non-semver-compat", path = "lib/storage" } -zksync_system_constants = { version = "=26.1.0-non-semver-compat", path = "lib/constants" } -zksync_tee_verifier = { version = "=26.1.0-non-semver-compat", path = "lib/tee_verifier" } -zksync_test_contracts = { version = "=26.1.0-non-semver-compat", path = "lib/test_contracts" } -zksync_types = { version = "=26.1.0-non-semver-compat", path = "lib/types" } -zksync_utils = { version = "=26.1.0-non-semver-compat", path = "lib/utils" } -zksync_web3_decl = { version = "=26.1.0-non-semver-compat", path = "lib/web3_decl" } -zksync_crypto_primitives = { version = "=26.1.0-non-semver-compat", path = "lib/crypto_primitives" } -zksync_external_price_api = { version = "=26.1.0-non-semver-compat", path = "lib/external_price_api" } +zksync_mini_merkle_tree = { version = "26.2.1-non-semver-compat", path = "lib/mini_merkle_tree" } +zksync_object_store = { version = "26.2.1-non-semver-compat", path = "lib/object_store" } +zksync_protobuf_config = { version = "26.2.1-non-semver-compat", path = "lib/protobuf_config" } +zksync_prover_interface = { version = "26.2.1-non-semver-compat", path = "lib/prover_interface" } +zksync_queued_job_processor = { version = "26.2.1-non-semver-compat", path = "lib/queued_job_processor" } +zksync_snapshots_applier = { version = "26.2.1-non-semver-compat", path = "lib/snapshots_applier" } +zksync_state = { version = "26.2.1-non-semver-compat", path = "lib/state" } +zksync_storage = { version = "26.2.1-non-semver-compat", path = "lib/storage" } +zksync_system_constants = { version = "26.2.1-non-semver-compat", path = "lib/constants" } +zksync_tee_verifier = { version = "26.2.1-non-semver-compat", path = "lib/tee_verifier" } +zksync_test_contracts = { version = "26.2.1-non-semver-compat", path = "lib/test_contracts" } +zksync_types = { version = "26.2.1-non-semver-compat", path = "lib/types" } +zksync_utils = { version = "26.2.1-non-semver-compat", path = "lib/utils" } +zksync_web3_decl = { version = "26.2.1-non-semver-compat", path = "lib/web3_decl" } +zksync_crypto_primitives = { version = "26.2.1-non-semver-compat", path = "lib/crypto_primitives" } +zksync_external_price_api = { version = "26.2.1-non-semver-compat", path = "lib/external_price_api" } # Framework and components -zksync_node_framework = { version = "=26.1.0-non-semver-compat", path = "node/node_framework" } -zksync_node_framework_derive = { version = "=26.1.0-non-semver-compat", path = "lib/node_framework_derive" } -zksync_eth_watch = { version = "=26.1.0-non-semver-compat", path = "node/eth_watch" } -zksync_shared_metrics = { version = "=26.1.0-non-semver-compat", path = "node/shared_metrics" } -zksync_proof_data_handler = { version = "=26.1.0-non-semver-compat", path = "node/proof_data_handler" } -zksync_block_reverter = { version = "=26.1.0-non-semver-compat", path = "node/block_reverter" } -zksync_commitment_generator = { version = "=26.1.0-non-semver-compat", path = "node/commitment_generator" } -zksync_house_keeper = { version = "=26.1.0-non-semver-compat", path = "node/house_keeper" } -zksync_node_genesis = { version = "=26.1.0-non-semver-compat", path = "node/genesis" } -zksync_da_dispatcher = { version = "=26.1.0-non-semver-compat", path = "node/da_dispatcher" } -zksync_da_clients = { version = "=26.1.0-non-semver-compat", path = "node/da_clients" } -zksync_eth_sender = { version = "=26.1.0-non-semver-compat", path = "node/eth_sender" } -zksync_node_db_pruner = { version = "=26.1.0-non-semver-compat", path = "node/db_pruner" } -zksync_node_fee_model = { version = "=26.1.0-non-semver-compat", path = "node/fee_model" } -zksync_vm_runner = { version = "=26.1.0-non-semver-compat", path = "node/vm_runner" } -zksync_external_proof_integration_api = { version = "=26.1.0-non-semver-compat", path = "node/external_proof_integration_api" } -zksync_node_test_utils = { version = "=26.1.0-non-semver-compat", path = "node/test_utils" } -zksync_state_keeper = { version = "=26.1.0-non-semver-compat", path = "node/state_keeper" } -zksync_reorg_detector = { version = "=26.1.0-non-semver-compat", path = "node/reorg_detector" } -zksync_consistency_checker = { version = "=26.1.0-non-semver-compat", path = "node/consistency_checker" } -zksync_metadata_calculator = { version = "=26.1.0-non-semver-compat", path = "node/metadata_calculator" } -zksync_node_sync = { version = "=26.1.0-non-semver-compat", path = "node/node_sync" } -zksync_node_storage_init = { version = "=26.1.0-non-semver-compat", path = "node/node_storage_init" } -zksync_node_consensus = { version = "=26.1.0-non-semver-compat", path = "node/consensus" } -zksync_contract_verification_server = { version = "=26.1.0-non-semver-compat", path = "node/contract_verification_server" } -zksync_node_api_server = { version = "=26.1.0-non-semver-compat", path = "node/api_server" } -zksync_base_token_adjuster = { version = "=26.1.0-non-semver-compat", path = "node/base_token_adjuster" } -zksync_logs_bloom_backfill = { version = "=26.1.0-non-semver-compat", path = "node/logs_bloom_backfill" } +zksync_node_framework = { version = "26.2.1-non-semver-compat", path = "node/node_framework" } +zksync_node_framework_derive = { version = "26.2.1-non-semver-compat", path = "lib/node_framework_derive" } +zksync_eth_watch = { version = "26.2.1-non-semver-compat", path = "node/eth_watch" } +zksync_shared_metrics = { version = "26.2.1-non-semver-compat", path = "node/shared_metrics" } +zksync_proof_data_handler = { version = "26.2.1-non-semver-compat", path = "node/proof_data_handler" } +zksync_block_reverter = { version = "26.2.1-non-semver-compat", path = "node/block_reverter" } +zksync_commitment_generator = { version = "26.2.1-non-semver-compat", path = "node/commitment_generator" } +zksync_house_keeper = { version = "26.2.1-non-semver-compat", path = "node/house_keeper" } +zksync_node_genesis = { version = "26.2.1-non-semver-compat", path = "node/genesis" } +zksync_da_dispatcher = { version = "26.2.1-non-semver-compat", path = "node/da_dispatcher" } +zksync_da_clients = { version = "26.2.1-non-semver-compat", path = "node/da_clients" } +zksync_eth_sender = { version = "26.2.1-non-semver-compat", path = "node/eth_sender" } +zksync_node_db_pruner = { version = "26.2.1-non-semver-compat", path = "node/db_pruner" } +zksync_node_fee_model = { version = "26.2.1-non-semver-compat", path = "node/fee_model" } +zksync_vm_runner = { version = "26.2.1-non-semver-compat", path = "node/vm_runner" } +zksync_external_proof_integration_api = { version = "26.2.1-non-semver-compat", path = "node/external_proof_integration_api" } +zksync_node_test_utils = { version = "26.2.1-non-semver-compat", path = "node/test_utils" } +zksync_state_keeper = { version = "26.2.1-non-semver-compat", path = "node/state_keeper" } +zksync_reorg_detector = { version = "26.2.1-non-semver-compat", path = "node/reorg_detector" } +zksync_consistency_checker = { version = "26.2.1-non-semver-compat", path = "node/consistency_checker" } +zksync_metadata_calculator = { version = "26.2.1-non-semver-compat", path = "node/metadata_calculator" } +zksync_node_sync = { version = "26.2.1-non-semver-compat", path = "node/node_sync" } +zksync_node_storage_init = { version = "26.2.1-non-semver-compat", path = "node/node_storage_init" } +zksync_node_consensus = { version = "26.2.1-non-semver-compat", path = "node/consensus" } +zksync_contract_verification_server = { version = "26.2.1-non-semver-compat", path = "node/contract_verification_server" } +zksync_node_api_server = { version = "26.2.1-non-semver-compat", path = "node/api_server" } +zksync_base_token_adjuster = { version = "26.2.1-non-semver-compat", path = "node/base_token_adjuster" } +zksync_logs_bloom_backfill = { version = "26.2.1-non-semver-compat", path = "node/logs_bloom_backfill" } diff --git a/core/bin/contract-verifier/src/main.rs b/core/bin/contract-verifier/src/main.rs index ab86c147977d..93f23816c67d 100644 --- a/core/bin/contract-verifier/src/main.rs +++ b/core/bin/contract-verifier/src/main.rs @@ -6,7 +6,7 @@ use tokio::sync::watch; use zksync_config::configs::PrometheusConfig; use zksync_contract_verifier_lib::ContractVerifier; use zksync_core_leftovers::temp_config_store::{load_database_secrets, load_general_config}; -use zksync_dal::{ConnectionPool, Core}; +use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_queued_job_processor::JobProcessor; use zksync_utils::wait_for_tasks::ManagedTasks; use zksync_vlog::prometheus::PrometheusExporterConfig; @@ -25,6 +25,32 @@ struct Opt { secrets_path: Option, } +async fn perform_storage_migration(pool: &ConnectionPool) -> anyhow::Result<()> { + const BATCH_SIZE: usize = 1000; + + // Make it possible to override just in case. + let batch_size = std::env::var("CONTRACT_VERIFIER_MIGRATION_BATCH_SIZE") + .ok() + .and_then(|v| v.parse().ok()) + .unwrap_or(BATCH_SIZE); + + let mut storage = pool.connection().await?; + let migration_performed = storage + .contract_verification_dal() + .is_verification_info_migration_performed() + .await?; + if !migration_performed { + tracing::info!(batch_size = %batch_size, "Running the storage migration for the contract verifier table"); + storage + .contract_verification_dal() + .perform_verification_info_migration(batch_size) + .await?; + } else { + tracing::info!("Storage migration is not needed"); + } + Ok(()) +} + #[tokio::main] async fn main() -> anyhow::Result<()> { let opt = Opt::parse(); @@ -51,6 +77,8 @@ async fn main() -> anyhow::Result<()> { .build() .await?; + perform_storage_migration(&pool).await?; + let (stop_sender, stop_receiver) = watch::channel(false); let contract_verifier = ContractVerifier::new(verifier_config.compilation_timeout(), pool) .await diff --git a/core/bin/external_node/README.md b/core/bin/external_node/README.md index 335ceed7b719..658dfe157068 100644 --- a/core/bin/external_node/README.md +++ b/core/bin/external_node/README.md @@ -6,7 +6,7 @@ Note: this README is under construction. ## Local development -This section describes how to run the external node locally +This section describes how to run the external node locally. ### Configuration diff --git a/core/bin/external_node/src/config/mod.rs b/core/bin/external_node/src/config/mod.rs index 3f1727c619b4..9274edf9d1ab 100644 --- a/core/bin/external_node/src/config/mod.rs +++ b/core/bin/external_node/src/config/mod.rs @@ -760,7 +760,7 @@ impl OptionalENConfig { } const fn default_req_entities_limit() -> usize { - 1_024 + 10_000 } const fn default_max_tx_size_bytes() -> usize { diff --git a/core/bin/verified_sources_fetcher/src/main.rs b/core/bin/verified_sources_fetcher/src/main.rs index 981eebf4a706..5ddf65fd580d 100644 --- a/core/bin/verified_sources_fetcher/src/main.rs +++ b/core/bin/verified_sources_fetcher/src/main.rs @@ -3,7 +3,7 @@ use std::io::Write; use zksync_config::configs::DatabaseSecrets; use zksync_dal::{ConnectionPool, Core, CoreDal}; use zksync_env_config::FromEnv; -use zksync_types::contract_verification_api::SourceCodeData; +use zksync_types::contract_verification::api::SourceCodeData; #[tokio::main] async fn main() { diff --git a/core/lib/basic_types/src/bytecode.rs b/core/lib/basic_types/src/bytecode.rs index 12b4df69a6c6..a96c5b2b10cd 100644 --- a/core/lib/basic_types/src/bytecode.rs +++ b/core/lib/basic_types/src/bytecode.rs @@ -168,6 +168,14 @@ impl BytecodeMarker { } } +/// Removes padding from the bytecode, if necessary. +pub fn trim_bytecode(bytecode_hash: BytecodeHash, raw: &[u8]) -> anyhow::Result<&[u8]> { + match bytecode_hash.marker() { + BytecodeMarker::EraVm => Ok(raw), + BytecodeMarker::Evm => trim_padded_evm_bytecode(bytecode_hash, raw), + } +} + /// Removes padding from an EVM bytecode, returning the original EVM bytecode. pub fn trim_padded_evm_bytecode(bytecode_hash: BytecodeHash, raw: &[u8]) -> anyhow::Result<&[u8]> { if bytecode_hash.marker() != BytecodeMarker::Evm { diff --git a/core/lib/contract_verifier/src/compilers/mod.rs b/core/lib/contract_verifier/src/compilers/mod.rs index c82a6575ee4c..cbaf9d2225bc 100644 --- a/core/lib/contract_verifier/src/compilers/mod.rs +++ b/core/lib/contract_verifier/src/compilers/mod.rs @@ -2,7 +2,7 @@ use std::collections::HashMap; use anyhow::Context as _; use serde::{Deserialize, Serialize}; -use zksync_types::contract_verification_api::CompilationArtifacts; +use zksync_types::contract_verification::api::CompilationArtifacts; pub(crate) use self::{ solc::{Solc, SolcInput}, diff --git a/core/lib/contract_verifier/src/compilers/solc.rs b/core/lib/contract_verifier/src/compilers/solc.rs index 10adcad3542e..4224e2a3dbcc 100644 --- a/core/lib/contract_verifier/src/compilers/solc.rs +++ b/core/lib/contract_verifier/src/compilers/solc.rs @@ -3,7 +3,7 @@ use std::{collections::HashMap, path::PathBuf, process::Stdio}; use anyhow::Context; use tokio::io::AsyncWriteExt; use zksync_queued_job_processor::async_trait; -use zksync_types::contract_verification_api::{ +use zksync_types::contract_verification::api::{ CompilationArtifacts, SourceCodeData, VerificationIncomingRequest, }; diff --git a/core/lib/contract_verifier/src/compilers/vyper.rs b/core/lib/contract_verifier/src/compilers/vyper.rs index 59b950f9f17f..46034a62e0a3 100644 --- a/core/lib/contract_verifier/src/compilers/vyper.rs +++ b/core/lib/contract_verifier/src/compilers/vyper.rs @@ -3,7 +3,7 @@ use std::{collections::HashMap, mem, path::PathBuf, process::Stdio}; use anyhow::Context; use tokio::io::AsyncWriteExt; use zksync_queued_job_processor::async_trait; -use zksync_types::contract_verification_api::{ +use zksync_types::contract_verification::api::{ CompilationArtifacts, SourceCodeData, VerificationIncomingRequest, }; diff --git a/core/lib/contract_verifier/src/compilers/zksolc.rs b/core/lib/contract_verifier/src/compilers/zksolc.rs index ff435e96aeb6..1b20f6d5d242 100644 --- a/core/lib/contract_verifier/src/compilers/zksolc.rs +++ b/core/lib/contract_verifier/src/compilers/zksolc.rs @@ -6,7 +6,7 @@ use semver::Version; use serde::{Deserialize, Serialize}; use tokio::io::AsyncWriteExt; use zksync_queued_job_processor::async_trait; -use zksync_types::contract_verification_api::{ +use zksync_types::contract_verification::api::{ CompilationArtifacts, SourceCodeData, VerificationIncomingRequest, }; @@ -65,6 +65,7 @@ pub(crate) struct Optimizer { /// Whether the optimizer is enabled. pub enabled: bool, /// The optimization mode string. + #[serde(skip_serializing_if = "Option::is_none")] pub mode: Option, } @@ -144,12 +145,24 @@ impl ZkSolc { fn parse_single_file_yul_output( output: &str, ) -> Result { - let re = Regex::new(r"Contract `.*` bytecode: 0x([\da-f]+)").unwrap(); - let cap = re - .captures(output) - .context("Yul output doesn't match regex")?; + let cap = if output.contains("Binary:\n") { + // Format of the new output + // ======= /tmp/input.yul:Empty ======= + // Binary: + // 00000001002 <..> + let re = Regex::new(r"Binary:\n([\da-f]+)").unwrap(); + re.captures(output) + .with_context(|| format!("Yul output doesn't match regex. Output: {output}"))? + } else { + // Old compiler versions + let re_old = Regex::new(r"Contract `.*` bytecode: 0x([\da-f]+)").unwrap(); + re_old + .captures(output) + .with_context(|| format!("Yul output doesn't match regex. Output: {output}"))? + }; let bytecode_str = cap.get(1).context("no matches in Yul output")?.as_str(); let bytecode = hex::decode(bytecode_str).context("invalid Yul output bytecode")?; + Ok(CompilationArtifacts { bytecode, deployed_bytecode: None, @@ -255,6 +268,9 @@ impl Compiler for ZkSolc { .context("cannot create temporary Yul file")?; file.write_all(source_code.as_bytes()) .context("failed writing Yul file")?; + + // TODO: `zksolc` support standard JSON for `yul` since 1.5.0, so we don't have + // to parse `--bin` output. let child = command .arg(file.path().to_str().unwrap()) .arg("--optimization") diff --git a/core/lib/contract_verifier/src/compilers/zkvyper.rs b/core/lib/contract_verifier/src/compilers/zkvyper.rs index 4f7c10214f8a..4056736547f6 100644 --- a/core/lib/contract_verifier/src/compilers/zkvyper.rs +++ b/core/lib/contract_verifier/src/compilers/zkvyper.rs @@ -3,7 +3,7 @@ use std::{ffi::OsString, path, path::Path, process::Stdio}; use anyhow::Context as _; use tokio::{fs, io::AsyncWriteExt}; use zksync_queued_job_processor::async_trait; -use zksync_types::contract_verification_api::CompilationArtifacts; +use zksync_types::contract_verification::api::CompilationArtifacts; use super::VyperInput; use crate::{ diff --git a/core/lib/contract_verifier/src/lib.rs b/core/lib/contract_verifier/src/lib.rs index 43da4127b809..864c7b747d43 100644 --- a/core/lib/contract_verifier/src/lib.rs +++ b/core/lib/contract_verifier/src/lib.rs @@ -15,9 +15,12 @@ use zksync_dal::{contract_verification_dal::DeployedContractData, ConnectionPool use zksync_queued_job_processor::{async_trait, JobProcessor}; use zksync_types::{ bytecode::{trim_padded_evm_bytecode, BytecodeHash, BytecodeMarker}, - contract_verification_api::{ - self as api, CompilationArtifacts, VerificationIncomingRequest, VerificationInfo, - VerificationRequest, + contract_verification::{ + api::{ + self as api, CompilationArtifacts, VerificationIncomingRequest, VerificationInfo, + VerificationProblem, VerificationRequest, + }, + contract_identifier::{ContractIdentifier, Match}, }, Address, CONTRACT_DEPLOYER_ADDRESS, }; @@ -224,7 +227,7 @@ impl ContractVerifier { async fn verify( &self, mut request: VerificationRequest, - ) -> Result { + ) -> Result<(VerificationInfo, ContractIdentifier), ContractVerifierError> { // Bytecode should be present because it is checked when accepting request. let mut storage = self .connection_pool @@ -245,6 +248,8 @@ impl ContractVerifier { let bytecode_marker = BytecodeMarker::new(deployed_contract.bytecode_hash) .context("unknown bytecode kind")?; let artifacts = self.compile(request.req.clone(), bytecode_marker).await?; + let identifier = + ContractIdentifier::from_bytecode(bytecode_marker, artifacts.deployed_bytecode()); let constructor_args = match bytecode_marker { BytecodeMarker::EraVm => self .decode_era_vm_constructor_args(&deployed_contract, request.req.contract_address)?, @@ -265,14 +270,28 @@ impl ContractVerifier { .context("invalid stored EVM bytecode")?, }; - if artifacts.deployed_bytecode() != deployed_bytecode { - tracing::info!( - request_id = request.id, - deployed = hex::encode(deployed_bytecode), - compiled = hex::encode(artifacts.deployed_bytecode()), - "Deployed (runtime) bytecode mismatch", - ); - return Err(ContractVerifierError::BytecodeMismatch); + let mut verification_problems = Vec::new(); + + match identifier.matches(deployed_bytecode) { + Match::Full => {} + Match::Partial => { + tracing::trace!( + request_id = request.id, + deployed = hex::encode(deployed_bytecode), + compiled = hex::encode(artifacts.deployed_bytecode()), + "Partial bytecode match", + ); + verification_problems.push(VerificationProblem::IncorrectMetadata); + } + Match::None => { + tracing::trace!( + request_id = request.id, + deployed = hex::encode(deployed_bytecode), + compiled = hex::encode(artifacts.deployed_bytecode()), + "Deployed (runtime) bytecode mismatch", + ); + return Err(ContractVerifierError::BytecodeMismatch); + } } match constructor_args { @@ -284,6 +303,11 @@ impl ContractVerifier { hex::encode(&args), hex::encode(provided_constructor_args) ); + // We could, in theory, accept this contract and mark it as partially verified, + // but in during verification it is always possible to reconstruct the + // constructor arguments, so there is no reason for that. + // Mismatching constructor arguments are only needed for "similar bytecodes" + // (e.g. displayed contract as verified without a direct verification request). return Err(ContractVerifierError::IncorrectConstructorArguments); } } @@ -294,11 +318,13 @@ impl ContractVerifier { let verified_at = Utc::now(); tracing::trace!(%verified_at, "verified request"); - Ok(VerificationInfo { + let info = VerificationInfo { request, artifacts, verified_at, - }) + verification_problems, + }; + Ok((info, identifier)) } async fn compile_zksolc( @@ -544,17 +570,21 @@ impl ContractVerifier { async fn process_result( &self, request_id: usize, - verification_result: Result, + verification_result: Result<(VerificationInfo, ContractIdentifier), ContractVerifierError>, ) -> anyhow::Result<()> { let mut storage = self .connection_pool .connection_tagged("contract_verifier") .await?; match verification_result { - Ok(info) => { + Ok((info, identifier)) => { storage .contract_verification_dal() - .save_verification_info(info) + .save_verification_info( + info, + identifier.bytecode_keccak256, + identifier.bytecode_without_metadata_keccak256, + ) .await?; tracing::info!("Successfully processed request with id = {request_id}"); } diff --git a/core/lib/contract_verifier/src/resolver/mod.rs b/core/lib/contract_verifier/src/resolver/mod.rs index a9d2bcf9049d..b2bd659408d3 100644 --- a/core/lib/contract_verifier/src/resolver/mod.rs +++ b/core/lib/contract_verifier/src/resolver/mod.rs @@ -8,7 +8,7 @@ use std::{ use anyhow::Context as _; use tokio::fs; use zksync_queued_job_processor::async_trait; -use zksync_types::contract_verification_api::CompilationArtifacts; +use zksync_types::contract_verification::api::CompilationArtifacts; pub(crate) use self::{env::EnvCompilerResolver, github::GitHubCompilerResolver}; use crate::{ diff --git a/core/lib/contract_verifier/src/tests/mod.rs b/core/lib/contract_verifier/src/tests/mod.rs index 2ffb51ceb30a..31cec03e138e 100644 --- a/core/lib/contract_verifier/src/tests/mod.rs +++ b/core/lib/contract_verifier/src/tests/mod.rs @@ -9,7 +9,7 @@ use zksync_node_test_utils::{create_l1_batch, create_l2_block}; use zksync_types::{ address_to_h256, bytecode::{pad_evm_bytecode, BytecodeHash}, - contract_verification_api::{CompilerVersions, SourceCodeData, VerificationIncomingRequest}, + contract_verification::api::{CompilerVersions, SourceCodeData, VerificationIncomingRequest}, get_code_key, get_known_code_key, l2::L2Tx, tx::IncludedTxLocation, @@ -435,7 +435,7 @@ async fn contract_verifier_basics(contract: TestContract) { let (_stop_sender, stop_receiver) = watch::channel(false); verifier.run(stop_receiver, Some(1)).await.unwrap(); - assert_request_success(&mut storage, request_id, address, &expected_bytecode).await; + assert_request_success(&mut storage, request_id, address, &expected_bytecode, &[]).await; } async fn assert_request_success( @@ -443,6 +443,7 @@ async fn assert_request_success( request_id: usize, address: Address, expected_bytecode: &[u8], + verification_problems: &[VerificationProblem], ) -> VerificationInfo { let status = storage .contract_verification_dal() @@ -465,6 +466,11 @@ async fn assert_request_success( without_internal_types(verification_info.artifacts.abi.clone()), without_internal_types(counter_contract_abi()) ); + assert_eq!( + &verification_info.verification_problems, + verification_problems + ); + verification_info } @@ -541,7 +547,7 @@ async fn verifying_evm_bytecode(contract: TestContract) { let (_stop_sender, stop_receiver) = watch::channel(false); verifier.run(stop_receiver, Some(1)).await.unwrap(); - assert_request_success(&mut storage, request_id, address, &creation_bytecode).await; + assert_request_success(&mut storage, request_id, address, &creation_bytecode, &[]).await; } #[tokio::test] @@ -708,10 +714,12 @@ async fn creation_bytecode_mismatch() { .await .unwrap(); - let mock_resolver = MockCompilerResolver::solc(move |_| CompilationArtifacts { - bytecode: vec![4; 20], // differs from `creation_bytecode` - deployed_bytecode: Some(deployed_bytecode.clone()), - abi: counter_contract_abi(), + let mock_resolver = MockCompilerResolver::solc(move |_| { + CompilationArtifacts { + bytecode: vec![4; 20], // differs from `creation_bytecode` + deployed_bytecode: Some(deployed_bytecode.clone()), + abi: counter_contract_abi(), + } }); let verifier = ContractVerifier::with_resolver( Duration::from_secs(60), diff --git a/core/lib/contract_verifier/src/tests/real.rs b/core/lib/contract_verifier/src/tests/real.rs index ba7615528e15..d82edf7b0020 100644 --- a/core/lib/contract_verifier/src/tests/real.rs +++ b/core/lib/contract_verifier/src/tests/real.rs @@ -1,11 +1,18 @@ //! Tests using real compiler toolchains. Should be prepared by calling `zkstack contract-verifier init` //! with at least one `solc` and `zksolc` version. If there are no compilers, the tests will be ignored //! unless the `RUN_CONTRACT_VERIFICATION_TEST` env var is set to `true`, in which case the tests will fail. +//! +//! You can install the compilers to run these tests with the following command: +//! ``` +//! zkstack contract-verifier init --zksolc-version=v1.5.10 --zkvyper-version=v1.5.4 --solc-version=0.8.26 --vyper-version=v0.3.10 --era-vm-solc-version=0.8.26-1.0.1 --only +//! ``` use std::{env, sync::Arc, time::Duration}; use assert_matches::assert_matches; -use zksync_types::bytecode::validate_bytecode; +use zksync_types::{ + bytecode::validate_bytecode, contract_verification::contract_identifier::DetectedMetadata, +}; use super::*; @@ -19,38 +26,72 @@ impl Toolchain { const ALL: [Self; 2] = [Self::Solidity, Self::Vyper]; } +// The tests may expect specific compiler versions (e.g. contracts won't compile with Vyper 0.4.0), +// so we hardcode versions. +const ZKSOLC_VERSION: &str = "v1.5.10"; +const ERA_VM_SOLC_VERSION: &str = "0.8.26-1.0.1"; +const SOLC_VERSION: &str = "0.8.26"; +const VYPER_VERSION: &str = "v0.3.10"; +const ZKVYPER_VERSION: &str = "v1.5.4"; + #[derive(Debug, Clone)] struct TestCompilerVersions { solc: String, + eravm_solc: String, zksolc: String, vyper: String, zkvyper: String, } impl TestCompilerVersions { - fn new(versions: SupportedCompilerVersions) -> Option { - let solc = versions - .solc - .into_iter() - .find(|ver| !ver.starts_with("zkVM"))?; - Some(Self { - solc, - zksolc: versions.zksolc.into_iter().next()?, - vyper: versions.vyper.into_iter().next()?, - zkvyper: versions.zkvyper.into_iter().next()?, + fn new(versions: SupportedCompilerVersions) -> anyhow::Result { + // Stored compilers for our fork are prefixed with `zkVM-`. + let eravm_solc = format!("zkVM-{ERA_VM_SOLC_VERSION}"); + // Stored compilers for vyper do not have `v` prefix. + let vyper = VYPER_VERSION.strip_prefix("v").unwrap().to_owned(); + anyhow::ensure!( + versions.solc.contains(SOLC_VERSION), + "Expected solc version {SOLC_VERSION} to be installed, but it is not" + ); + anyhow::ensure!( + versions.solc.contains(&eravm_solc), + "Expected era-vm solc version {ERA_VM_SOLC_VERSION} to be installed, but it is not" + ); + anyhow::ensure!( + versions.zksolc.contains(ZKSOLC_VERSION), + "Expected zksolc version {ZKSOLC_VERSION} to be installed, but it is not" + ); + anyhow::ensure!( + versions.vyper.contains(&vyper), + "Expected vyper version {VYPER_VERSION} to be installed, but it is not" + ); + anyhow::ensure!( + versions.zkvyper.contains(ZKVYPER_VERSION), + "Expected zkvyper version {ZKVYPER_VERSION} to be installed, but it is not" + ); + + Ok(Self { + solc: SOLC_VERSION.to_owned(), + eravm_solc, + zksolc: ZKSOLC_VERSION.to_owned(), + vyper, + zkvyper: ZKVYPER_VERSION.to_owned(), }) } fn zksolc(self) -> ZkCompilerVersions { ZkCompilerVersions { - base: self.solc, + base: self.eravm_solc, zk: self.zksolc, } } fn solc_for_api(self, bytecode_kind: BytecodeMarker) -> CompilerVersions { CompilerVersions::Solc { - compiler_solc_version: self.solc, + compiler_solc_version: match bytecode_kind { + BytecodeMarker::Evm => self.solc, + BytecodeMarker::EraVm => self.eravm_solc, + }, compiler_zksolc_version: match bytecode_kind { BytecodeMarker::Evm => None, BytecodeMarker::EraVm => Some(self.zksolc), @@ -76,32 +117,39 @@ impl TestCompilerVersions { } } -async fn checked_env_resolver() -> Option<(EnvCompilerResolver, TestCompilerVersions)> { +async fn checked_env_resolver() -> anyhow::Result<(EnvCompilerResolver, TestCompilerVersions)> { let compiler_resolver = EnvCompilerResolver::default(); - let supported_compilers = compiler_resolver.supported_versions().await.ok()?; - Some(( + let supported_compilers = compiler_resolver.supported_versions().await?; + Ok(( compiler_resolver, TestCompilerVersions::new(supported_compilers)?, )) } -fn assert_no_compilers_expected() { +fn assert_no_compilers_expected(err: anyhow::Error) { + let error_message = format!( + "Expected pre-installed compilers since `RUN_CONTRACT_VERIFICATION_TEST=true`, but at least one compiler is not installed.\n \ + Detail: {}\n\n \ + Use the following command to install compilers:\n \ + zkstack contract-verifier init --zksolc-version={} --zkvyper-version={} --solc-version={} --vyper-version={} --era-vm-solc-version={} --only", + err, ZKSOLC_VERSION, ZKVYPER_VERSION, SOLC_VERSION, VYPER_VERSION, ERA_VM_SOLC_VERSION + ); + assert_ne!( env::var("RUN_CONTRACT_VERIFICATION_TEST").ok().as_deref(), Some("true"), - "Expected pre-installed compilers since `RUN_CONTRACT_VERIFICATION_TEST=true`, but they are not installed. \ - Use `zkstack contract-verifier init` to install compilers" + "{error_message}" ); - println!("No compilers found, skipping the test"); + println!("At least one compiler is not found, skipping the test"); } /// Simplifies initializing real compiler resolver in tests. macro_rules! real_resolver { () => { match checked_env_resolver().await { - Some(resolver_and_versions) => resolver_and_versions, - None => { - assert_no_compilers_expected(); + Ok(resolver_and_versions) => resolver_and_versions, + Err(err) => { + assert_no_compilers_expected(err); return; } } @@ -254,10 +302,16 @@ async fn compiling_yul_with_zksolc() { let req = test_yul_request(supported_compilers.solc_for_api(BytecodeMarker::EraVm)); let input = ZkSolc::build_input(req).unwrap(); let output = compiler.compile(input).await.unwrap(); + let identifier = + ContractIdentifier::from_bytecode(BytecodeMarker::EraVm, output.deployed_bytecode()); assert!(!output.bytecode.is_empty()); assert!(output.deployed_bytecode.is_none()); assert_eq!(output.abi, serde_json::json!([])); + assert_matches!( + identifier.detected_metadata, + Some(DetectedMetadata::Keccak256) + ); } #[tokio::test] @@ -272,10 +326,17 @@ async fn compiling_standalone_yul() { }); let input = Solc::build_input(req).unwrap(); let output = compiler.compile(input).await.unwrap(); + let identifier = + ContractIdentifier::from_bytecode(BytecodeMarker::Evm, output.deployed_bytecode()); assert!(!output.bytecode.is_empty()); assert_ne!(output.deployed_bytecode.unwrap(), output.bytecode); assert_eq!(output.abi, serde_json::json!([])); + assert_matches!( + identifier.detected_metadata, + None, + "No metadata for compiler yul for EVM" + ); } fn test_vyper_request( @@ -322,9 +383,15 @@ async fn using_real_zkvyper(specify_contract_file: bool) { ); let input = VyperInput::new(req).unwrap(); let output = compiler.compile(input).await.unwrap(); + let identifier = + ContractIdentifier::from_bytecode(BytecodeMarker::EraVm, output.deployed_bytecode()); validate_bytecode(&output.bytecode).unwrap(); assert_eq!(output.abi, without_internal_types(counter_contract_abi())); + assert_matches!( + identifier.detected_metadata, + Some(DetectedMetadata::Keccak256) + ); } #[test_casing(2, [false, true])] @@ -347,9 +414,13 @@ async fn using_standalone_vyper(specify_contract_file: bool) { ); let input = VyperInput::new(req).unwrap(); let output = compiler.compile(input).await.unwrap(); + let identifier = + ContractIdentifier::from_bytecode(BytecodeMarker::Evm, output.deployed_bytecode()); assert!(output.deployed_bytecode.is_some()); assert_eq!(output.abi, without_internal_types(counter_contract_abi())); + // Vyper does not provide metadata for bytecode. + assert_matches!(identifier.detected_metadata, None); } #[tokio::test] @@ -367,9 +438,13 @@ async fn using_standalone_vyper_without_optimization() { req.optimization_used = false; let input = VyperInput::new(req).unwrap(); let output = compiler.compile(input).await.unwrap(); + let identifier = + ContractIdentifier::from_bytecode(BytecodeMarker::Evm, output.deployed_bytecode()); assert!(output.deployed_bytecode.is_some()); assert_eq!(output.abi, without_internal_types(counter_contract_abi())); + // Vyper does not provide metadata for bytecode. + assert_matches!(identifier.detected_metadata, None); } #[tokio::test] @@ -469,6 +544,30 @@ async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker, toolchai compiler.compile(input).await.unwrap() } }; + let identifier = ContractIdentifier::from_bytecode(bytecode_kind, output.deployed_bytecode()); + + match (bytecode_kind, toolchain) { + (BytecodeMarker::Evm, Toolchain::Vyper) => { + assert!( + identifier.detected_metadata.is_none(), + "No metadata for EVM Vyper" + ); + } + (BytecodeMarker::Evm, Toolchain::Solidity) => { + assert_matches!( + identifier.detected_metadata, + Some(DetectedMetadata::Cbor), + "Cbor metadata for EVM Solidity by default" + ); + } + (BytecodeMarker::EraVm, _) => { + assert_matches!( + identifier.detected_metadata, + Some(DetectedMetadata::Keccak256), + "Keccak256 metadata for EraVM by default" + ); + } + } let pool = ConnectionPool::test_pool().await; let mut storage = pool.connection().await.unwrap(); @@ -505,7 +604,165 @@ async fn using_real_compiler_in_verifier(bytecode_kind: BytecodeMarker, toolchai let (_stop_sender, stop_receiver) = watch::channel(false); verifier.run(stop_receiver, Some(1)).await.unwrap(); - assert_request_success(&mut storage, request_id, address, &output.bytecode).await; + assert_request_success(&mut storage, request_id, address, &output.bytecode, &[]).await; +} + +#[test_casing(2, [false, true])] +#[tokio::test] +async fn using_zksolc_partial_match(use_cbor: bool) { + let (compiler_resolver, supported_compilers) = real_resolver!(); + + let mut req: VerificationIncomingRequest = VerificationIncomingRequest { + compiler_versions: supported_compilers + .clone() + .solc_for_api(BytecodeMarker::EraVm), + ..test_request(Address::repeat_byte(1), COUNTER_CONTRACT) + }; + let hash_type = if use_cbor { "ipfs" } else { "keccak256" }; + // We need to manually construct the input, since `SolSingleFile` doesn't let us specify metadata hash type. + // Note: prior to 1.5.7 field was named `bytecodeHash`. + req.source_code_data = SourceCodeData::StandardJsonInput( + serde_json::json!({ + "language": "Solidity", + "sources": { + "Counter.sol": { + "content": COUNTER_CONTRACT, + }, + }, + "settings": { + "outputSelection": { + "*": { + "": [ "abi" ], + "*": [ "abi" ] + } + }, + "isSystem": false, + "forceEvmla": false, + "metadata": { + "hashType": hash_type + }, + "optimizer": { + "enabled": true + } + } + }) + .as_object() + .unwrap() + .clone(), + ); + let contract_name = req.contract_name.clone(); + let address = Address::repeat_byte(1); + let compiler = compiler_resolver + .resolve_zksolc(&supported_compilers.clone().zksolc()) + .await + .unwrap(); + let input_for_request = ZkSolc::build_input(req.clone()).unwrap(); + + let output_for_request = compiler.compile(input_for_request).await.unwrap(); + let identifier_for_request = ContractIdentifier::from_bytecode( + BytecodeMarker::EraVm, + output_for_request.deployed_bytecode(), + ); + + // Now prepare data for contract verification storage (with different metadata). + let compiler = compiler_resolver + .resolve_zksolc(&supported_compilers.zksolc()) + .await + .unwrap(); + let mut input_for_storage = ZkSolc::build_input(req.clone()).unwrap(); + // Change the source file name. + if let ZkSolcInput::StandardJson { + input, file_name, .. + } = &mut input_for_storage + { + let source = input + .sources + .remove(&format!("{contract_name}.sol")) + .unwrap(); + let new_file_name = "random_name.sol".to_owned(); + input.sources.insert(new_file_name.clone(), source); + *file_name = new_file_name; + if use_cbor { + input.settings.other.as_object_mut().unwrap().insert( + "metadata".to_string(), + serde_json::json!({ "hashType": "ipfs"}), + ); + } + } else { + panic!("unexpected input: {input_for_storage:?}"); + } + + let output_for_storage = compiler.compile(input_for_storage).await.unwrap(); + let identifier_for_storage = ContractIdentifier::from_bytecode( + BytecodeMarker::EraVm, + output_for_storage.deployed_bytecode(), + ); + + assert_eq!( + identifier_for_request.matches(output_for_storage.deployed_bytecode()), + Match::Partial, + "must be a partial match (1)" + ); + assert_eq!( + identifier_for_storage.matches(output_for_request.deployed_bytecode()), + Match::Partial, + "must be a partial match (2)" + ); + if use_cbor { + assert_matches!( + identifier_for_request.detected_metadata, + Some(DetectedMetadata::Cbor) + ); + assert_matches!( + identifier_for_storage.detected_metadata, + Some(DetectedMetadata::Cbor) + ); + } else { + assert_matches!( + identifier_for_request.detected_metadata, + Some(DetectedMetadata::Keccak256) + ); + assert_matches!( + identifier_for_storage.detected_metadata, + Some(DetectedMetadata::Keccak256) + ); + } + + let pool = ConnectionPool::test_pool().await; + let mut storage = pool.connection().await.unwrap(); + prepare_storage(&mut storage).await; + mock_deployment( + &mut storage, + address, + output_for_storage.bytecode.clone(), + &[], + ) + .await; + let request_id = storage + .contract_verification_dal() + .add_contract_verification_request(&req) + .await + .unwrap(); + + let verifier = ContractVerifier::with_resolver( + Duration::from_secs(60), + pool.clone(), + Arc::new(compiler_resolver), + ) + .await + .unwrap(); + + let (_stop_sender, stop_receiver) = watch::channel(false); + verifier.run(stop_receiver, Some(1)).await.unwrap(); + + assert_request_success( + &mut storage, + request_id, + address, + &output_for_request.bytecode, + &[VerificationProblem::IncorrectMetadata], + ) + .await; } #[test_casing(2, BYTECODE_KINDS)] diff --git a/core/lib/da_client/src/lib.rs b/core/lib/da_client/src/lib.rs index 7e4a2643a259..dce9c0fa8d1b 100644 --- a/core/lib/da_client/src/lib.rs +++ b/core/lib/da_client/src/lib.rs @@ -23,6 +23,8 @@ pub trait DataAvailabilityClient: Sync + Send + fmt::Debug { /// Returns the maximum size of the blob (in bytes) that can be dispatched. None means no limit. fn blob_size_limit(&self) -> Option; + + async fn balance(&self) -> Result; } impl Clone for Box { diff --git a/core/lib/dal/.sqlx/query-1823e1ac602ce4ba1db06543af9cb2685cda1ae7ecca83062ede7320c3b4a427.json b/core/lib/dal/.sqlx/query-1823e1ac602ce4ba1db06543af9cb2685cda1ae7ecca83062ede7320c3b4a427.json deleted file mode 100644 index 1e20a9151b98..000000000000 --- a/core/lib/dal/.sqlx/query-1823e1ac602ce4ba1db06543af9cb2685cda1ae7ecca83062ede7320c3b4a427.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n contracts_verification_info (address, verification_info)\n VALUES\n ($1, $2)\n ON CONFLICT (address) DO\n UPDATE\n SET\n verification_info = $2\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Bytea", - "Jsonb" - ] - }, - "nullable": [] - }, - "hash": "1823e1ac602ce4ba1db06543af9cb2685cda1ae7ecca83062ede7320c3b4a427" -} diff --git a/core/lib/dal/.sqlx/query-2a2083fd04ebd006eb0aa4e0e5f62f3339768a85aaff9a509901e9f42b09097b.json b/core/lib/dal/.sqlx/query-2a2083fd04ebd006eb0aa4e0e5f62f3339768a85aaff9a509901e9f42b09097b.json deleted file mode 100644 index a713616d582c..000000000000 --- a/core/lib/dal/.sqlx/query-2a2083fd04ebd006eb0aa4e0e5f62f3339768a85aaff9a509901e9f42b09097b.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n number,\n pubdata_input\n FROM\n l1_batches\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND data_availability.blob_id IS NULL\n AND pubdata_input IS NOT NULL\n ORDER BY\n number\n LIMIT\n $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "number", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "pubdata_input", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - false, - true - ] - }, - "hash": "2a2083fd04ebd006eb0aa4e0e5f62f3339768a85aaff9a509901e9f42b09097b" -} diff --git a/core/lib/dal/.sqlx/query-2d0c2e9ec4187641baef8a33229bffc78d92adb3c1e3ca60b12163e38c67047e.json b/core/lib/dal/.sqlx/query-2d0c2e9ec4187641baef8a33229bffc78d92adb3c1e3ca60b12163e38c67047e.json deleted file mode 100644 index f61f39e3b0b0..000000000000 --- a/core/lib/dal/.sqlx/query-2d0c2e9ec4187641baef8a33229bffc78d92adb3c1e3ca60b12163e38c67047e.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n COUNT(*) AS \"count!\"\n FROM\n contracts_verification_info\n WHERE\n address = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "count!", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [ - "Bytea" - ] - }, - "nullable": [ - null - ] - }, - "hash": "2d0c2e9ec4187641baef8a33229bffc78d92adb3c1e3ca60b12163e38c67047e" -} diff --git a/core/lib/dal/.sqlx/query-349d41c8ce192e82152e9d254c23ed5f1e6eac5b71232c784abd4d4cd8677805.json b/core/lib/dal/.sqlx/query-349d41c8ce192e82152e9d254c23ed5f1e6eac5b71232c784abd4d4cd8677805.json new file mode 100644 index 000000000000..2de71e3d1ab5 --- /dev/null +++ b/core/lib/dal/.sqlx/query-349d41c8ce192e82152e9d254c23ed5f1e6eac5b71232c784abd4d4cd8677805.json @@ -0,0 +1,17 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n contract_verification_info_v2 (\n initial_contract_addr,\n bytecode_keccak256,\n bytecode_without_metadata_keccak256,\n verification_info\n )\n VALUES\n ($1, $2, $3, $4)\n ON CONFLICT (initial_contract_addr) DO\n UPDATE\n SET\n bytecode_keccak256 = $2,\n bytecode_without_metadata_keccak256 = $3,\n verification_info = $4\n ", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Bytea", + "Bytea", + "Bytea", + "Jsonb" + ] + }, + "nullable": [] + }, + "hash": "349d41c8ce192e82152e9d254c23ed5f1e6eac5b71232c784abd4d4cd8677805" +} diff --git a/core/lib/dal/.sqlx/query-668cf72b78c6071340143ba9498046b7820e39b63ed4f98bcaa3b3f305cbe576.json b/core/lib/dal/.sqlx/query-668cf72b78c6071340143ba9498046b7820e39b63ed4f98bcaa3b3f305cbe576.json new file mode 100644 index 000000000000..9dffecdc4c19 --- /dev/null +++ b/core/lib/dal/.sqlx/query-668cf72b78c6071340143ba9498046b7820e39b63ed4f98bcaa3b3f305cbe576.json @@ -0,0 +1,26 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n (SELECT COUNT(*) FROM contracts_verification_info) AS count_v1,\n (SELECT COUNT(*) FROM contract_verification_info_v2) AS count_v2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count_v1", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "count_v2", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null, + null + ] + }, + "hash": "668cf72b78c6071340143ba9498046b7820e39b63ed4f98bcaa3b3f305cbe576" +} diff --git a/core/lib/dal/.sqlx/query-6cb50a8fbe1341ba7ea496bb0f2072dcee6e6f8439e6b43eebd6df5563a4d0b9.json b/core/lib/dal/.sqlx/query-6cb50a8fbe1341ba7ea496bb0f2072dcee6e6f8439e6b43eebd6df5563a4d0b9.json new file mode 100644 index 000000000000..a78dcf480064 --- /dev/null +++ b/core/lib/dal/.sqlx/query-6cb50a8fbe1341ba7ea496bb0f2072dcee6e6f8439e6b43eebd6df5563a4d0b9.json @@ -0,0 +1,35 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n verification_info,\n bytecode_keccak256,\n bytecode_without_metadata_keccak256\n FROM\n contract_verification_info_v2\n WHERE\n bytecode_keccak256 = $1\n OR\n (\n bytecode_without_metadata_keccak256 IS NOT null\n AND bytecode_without_metadata_keccak256 = $2\n )\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "verification_info", + "type_info": "Jsonb" + }, + { + "ordinal": 1, + "name": "bytecode_keccak256", + "type_info": "Bytea" + }, + { + "ordinal": 2, + "name": "bytecode_without_metadata_keccak256", + "type_info": "Bytea" + } + ], + "parameters": { + "Left": [ + "Bytea", + "Bytea" + ] + }, + "nullable": [ + false, + false, + false + ] + }, + "hash": "6cb50a8fbe1341ba7ea496bb0f2072dcee6e6f8439e6b43eebd6df5563a4d0b9" +} diff --git a/core/lib/dal/.sqlx/query-a331b209eafd82595ad75e24135989ff52100e6a93537c35961e62268d7cd26e.json b/core/lib/dal/.sqlx/query-a331b209eafd82595ad75e24135989ff52100e6a93537c35961e62268d7cd26e.json new file mode 100644 index 000000000000..7d2121d708f3 --- /dev/null +++ b/core/lib/dal/.sqlx/query-a331b209eafd82595ad75e24135989ff52100e6a93537c35961e62268d7cd26e.json @@ -0,0 +1,29 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n address,\n verification_info::text AS verification_info\n FROM\n contracts_verification_info\n WHERE address > $1\n ORDER BY\n address\n LIMIT $2\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "address", + "type_info": "Bytea" + }, + { + "ordinal": 1, + "name": "verification_info", + "type_info": "Text" + } + ], + "parameters": { + "Left": [ + "Bytea", + "Int8" + ] + }, + "nullable": [ + false, + null + ] + }, + "hash": "a331b209eafd82595ad75e24135989ff52100e6a93537c35961e62268d7cd26e" +} diff --git a/core/lib/dal/.sqlx/query-ca2c63a0e25406eec4e92a7cfda80a651e28abb3dd3582211ceb6b2bb8009258.json b/core/lib/dal/.sqlx/query-ca2c63a0e25406eec4e92a7cfda80a651e28abb3dd3582211ceb6b2bb8009258.json new file mode 100644 index 000000000000..2708c5b78701 --- /dev/null +++ b/core/lib/dal/.sqlx/query-ca2c63a0e25406eec4e92a7cfda80a651e28abb3dd3582211ceb6b2bb8009258.json @@ -0,0 +1,20 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n COUNT(*)\n FROM\n contract_verification_info_v2 v2\n JOIN contracts_verification_info v1 ON initial_contract_addr = address\n WHERE v1.verification_info::text != v2.verification_info::text\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "count", + "type_info": "Int8" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null + ] + }, + "hash": "ca2c63a0e25406eec4e92a7cfda80a651e28abb3dd3582211ceb6b2bb8009258" +} diff --git a/core/lib/dal/.sqlx/query-daa2ad4ebde17808e059aa6bcf148743ffed494316750be3b0ffb10b2fc09e93.json b/core/lib/dal/.sqlx/query-daa2ad4ebde17808e059aa6bcf148743ffed494316750be3b0ffb10b2fc09e93.json new file mode 100644 index 000000000000..fd279a570be9 --- /dev/null +++ b/core/lib/dal/.sqlx/query-daa2ad4ebde17808e059aa6bcf148743ffed494316750be3b0ffb10b2fc09e93.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n verification_info\n FROM\n contract_verification_info_v2\n WHERE\n initial_contract_addr = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "verification_info", + "type_info": "Jsonb" + } + ], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [ + false + ] + }, + "hash": "daa2ad4ebde17808e059aa6bcf148743ffed494316750be3b0ffb10b2fc09e93" +} diff --git a/core/lib/dal/.sqlx/query-f2eeb448a856b9e57bcc2a724791fb0ee6299fddc9f89cf70c5b69c7182f0a54.json b/core/lib/dal/.sqlx/query-f2eeb448a856b9e57bcc2a724791fb0ee6299fddc9f89cf70c5b69c7182f0a54.json new file mode 100644 index 000000000000..bba0056a5ffe --- /dev/null +++ b/core/lib/dal/.sqlx/query-f2eeb448a856b9e57bcc2a724791fb0ee6299fddc9f89cf70c5b69c7182f0a54.json @@ -0,0 +1,34 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n number,\n pubdata_input,\n sealed_at\n FROM\n l1_batches\n LEFT JOIN\n data_availability\n ON data_availability.l1_batch_number = l1_batches.number\n WHERE\n eth_commit_tx_id IS NULL\n AND number != 0\n AND data_availability.blob_id IS NULL\n AND pubdata_input IS NOT NULL\n AND sealed_at IS NOT NULL\n ORDER BY\n number\n LIMIT\n $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "number", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "pubdata_input", + "type_info": "Bytea" + }, + { + "ordinal": 2, + "name": "sealed_at", + "type_info": "Timestamp" + } + ], + "parameters": { + "Left": [ + "Int8" + ] + }, + "nullable": [ + false, + true, + true + ] + }, + "hash": "f2eeb448a856b9e57bcc2a724791fb0ee6299fddc9f89cf70c5b69c7182f0a54" +} diff --git a/core/lib/dal/Cargo.toml b/core/lib/dal/Cargo.toml index 4b093dd181bb..80cd955ff298 100644 --- a/core/lib/dal/Cargo.toml +++ b/core/lib/dal/Cargo.toml @@ -53,6 +53,7 @@ hex.workspace = true strum = { workspace = true, features = ["derive"] } tracing.workspace = true chrono = { workspace = true, features = ["serde"] } +rayon.workspace = true [dev-dependencies] zksync_test_contracts.workspace = true diff --git a/core/lib/dal/migrations/20250122102800_contract-verifier-new-schema.down.sql b/core/lib/dal/migrations/20250122102800_contract-verifier-new-schema.down.sql new file mode 100644 index 000000000000..03c94eab5f61 --- /dev/null +++ b/core/lib/dal/migrations/20250122102800_contract-verifier-new-schema.down.sql @@ -0,0 +1,3 @@ +DROP INDEX IF EXISTS contract_verification_info_v2_bytecode_keccak256_idx; +DROP INDEX IF EXISTS contract_verification_info_v2_bytecode_without_metadata_keccak256_idx; +DROP TABLE IF EXISTS contract_verification_info_v2; diff --git a/core/lib/dal/migrations/20250122102800_contract-verifier-new-schema.up.sql b/core/lib/dal/migrations/20250122102800_contract-verifier-new-schema.up.sql new file mode 100644 index 000000000000..c7eee9063221 --- /dev/null +++ b/core/lib/dal/migrations/20250122102800_contract-verifier-new-schema.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE IF NOT EXISTS contract_verification_info_v2 ( + initial_contract_addr BYTEA NOT NULL PRIMARY KEY, + bytecode_keccak256 BYTEA NOT NULL, + bytecode_without_metadata_keccak256 BYTEA NOT NULL, + verification_info JSONB NOT NULL, + + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP +); + +-- Add hash indexes for hash columns +CREATE INDEX IF NOT EXISTS contract_verification_info_v2_bytecode_keccak256_idx ON contract_verification_info_v2 (bytecode_keccak256); +CREATE INDEX IF NOT EXISTS contract_verification_info_v2_bytecode_without_metadata_keccak256_idx ON contract_verification_info_v2 (bytecode_without_metadata_keccak256); diff --git a/core/lib/dal/src/contract_verification_dal.rs b/core/lib/dal/src/contract_verification_dal.rs index 57bea5392cf8..9125f972c55b 100644 --- a/core/lib/dal/src/contract_verification_dal.rs +++ b/core/lib/dal/src/contract_verification_dal.rs @@ -5,13 +5,20 @@ use std::{ time::Duration, }; +use rayon::prelude::*; use sqlx::postgres::types::PgInterval; -use zksync_db_connection::{error::SqlxContext, instrument::InstrumentExt}; +use zksync_db_connection::{ + error::SqlxContext, + instrument::{CopyStatement, InstrumentExt}, +}; use zksync_types::{ address_to_h256, - contract_verification_api::{ - VerificationIncomingRequest, VerificationInfo, VerificationRequest, - VerificationRequestStatus, + contract_verification::{ + api::{ + VerificationIncomingRequest, VerificationInfo, VerificationRequest, + VerificationRequestStatus, + }, + contract_identifier::ContractIdentifier, }, web3, Address, CONTRACT_DEPLOYER_ADDRESS, H256, }; @@ -188,6 +195,8 @@ impl ContractVerificationDal<'_, '_> { pub async fn save_verification_info( &mut self, verification_info: VerificationInfo, + bytecode_keccak256: H256, + bytecode_without_metadata_keccak256: H256, ) -> DalResult<()> { let mut transaction = self.storage.start_transaction().await?; let id = verification_info.request.id; @@ -216,15 +225,24 @@ impl ContractVerificationDal<'_, '_> { sqlx::query!( r#" INSERT INTO - contracts_verification_info (address, verification_info) + contract_verification_info_v2 ( + initial_contract_addr, + bytecode_keccak256, + bytecode_without_metadata_keccak256, + verification_info + ) VALUES - ($1, $2) - ON CONFLICT (address) DO + ($1, $2, $3, $4) + ON CONFLICT (initial_contract_addr) DO UPDATE SET - verification_info = $2 + bytecode_keccak256 = $2, + bytecode_without_metadata_keccak256 = $3, + verification_info = $4 "#, address.as_bytes(), + bytecode_keccak256.as_bytes(), + bytecode_without_metadata_keccak256.as_bytes(), &verification_info_json ) .instrument("save_verification_info#insert") @@ -376,27 +394,6 @@ impl ContractVerificationDal<'_, '_> { .await } - /// Returns true if the contract has a stored contracts_verification_info. - pub async fn is_contract_verified(&mut self, address: Address) -> DalResult { - let count = sqlx::query!( - r#" - SELECT - COUNT(*) AS "count!" - FROM - contracts_verification_info - WHERE - address = $1 - "#, - address.as_bytes() - ) - .instrument("is_contract_verified") - .with_arg("address", &address) - .fetch_one(self.storage) - .await? - .count; - Ok(count > 0) - } - async fn get_compiler_versions(&mut self, compiler: Compiler) -> DalResult> { let compiler = format!("{compiler}"); let versions: Vec<_> = sqlx::query!( @@ -537,6 +534,29 @@ impl ContractVerificationDal<'_, '_> { pub async fn get_contract_verification_info( &mut self, address: Address, + ) -> anyhow::Result> { + // Do everything in a read-only transaction for a consistent view. + let mut transaction = self + .storage + .transaction_builder()? + .set_readonly() + .build() + .await?; + + let mut dal = ContractVerificationDal { + storage: &mut transaction, + }; + let info = if dal.is_verification_info_migration_performed().await? { + dal.get_contract_verification_info_v2(address).await? + } else { + dal.get_contract_verification_info_v1(address).await? + }; + Ok(info) + } + + async fn get_contract_verification_info_v1( + &mut self, + address: Address, ) -> DalResult> { Ok(sqlx::query!( r#" @@ -560,6 +580,236 @@ impl ContractVerificationDal<'_, '_> { .await? .flatten()) } + + async fn get_contract_verification_info_v2( + &mut self, + address: Address, + ) -> anyhow::Result> { + Ok(sqlx::query!( + r#" + SELECT + verification_info + FROM + contract_verification_info_v2 + WHERE + initial_contract_addr = $1 + "#, + address.as_bytes(), + ) + .try_map(|row| { + serde_json::from_value(row.verification_info).decode_column("verification_info") + }) + .instrument("get_contract_verification_info_v2") + .with_arg("address", &address) + .fetch_optional(self.storage) + .await? + .flatten()) + } + + pub async fn get_partial_match_verification_info( + &mut self, + bytecode_keccak256: H256, + bytecode_without_metadata_keccak256: H256, + ) -> DalResult> { + sqlx::query!( + r#" + SELECT + verification_info, + bytecode_keccak256, + bytecode_without_metadata_keccak256 + FROM + contract_verification_info_v2 + WHERE + bytecode_keccak256 = $1 + OR + ( + bytecode_without_metadata_keccak256 IS NOT null + AND bytecode_without_metadata_keccak256 = $2 + ) + "#, + bytecode_keccak256.as_bytes(), + bytecode_without_metadata_keccak256.as_bytes() + ) + .try_map(|row| { + let info = serde_json::from_value::(row.verification_info) + .decode_column("verification_info")?; + let bytecode_keccak256 = H256::from_slice(&row.bytecode_keccak256); + let bytecode_without_metadata_keccak256 = + H256::from_slice(&row.bytecode_without_metadata_keccak256); + Ok(( + info, + bytecode_keccak256, + bytecode_without_metadata_keccak256, + )) + }) + .instrument("get_partial_match_verification_info") + .with_arg("bytecode_keccak256", &bytecode_keccak256) + .with_arg( + "bytecode_without_metadata_keccak256", + &bytecode_without_metadata_keccak256, + ) + .fetch_optional(self.storage) + .await + } + + /// Checks if migration from `contracts_verification_info` to `contract_verification_info_v2` is performed + /// by checking if the latter has more or equal number of rows. + pub async fn is_verification_info_migration_performed(&mut self) -> DalResult { + let row = sqlx::query!( + r#" + SELECT + (SELECT COUNT(*) FROM contracts_verification_info) AS count_v1, + (SELECT COUNT(*) FROM contract_verification_info_v2) AS count_v2 + "#, + ) + .instrument("is_verification_info_migration_performed") + .fetch_one(self.storage) + .await?; + + Ok(row.count_v2 >= row.count_v1) + } + + pub async fn perform_verification_info_migration( + &mut self, + batch_size: usize, + ) -> anyhow::Result<()> { + // We use a long-running transaction, since the migration is one-time and during it + // no writes are expected to the tables, so locked rows are not a problem. + let mut transaction = self.storage.start_transaction().await?; + + // Offset is a number of already migrated contracts. + let mut offset = 0usize; + let mut cursor = vec![]; + loop { + let cursor_str = format!("0x{}", hex::encode(&cursor)); + + // Fetch JSON as text to avoid roundtrip through `serde_json::Value`, as it's super slow. + let (addresses, verification_infos): (Vec>, Vec) = sqlx::query!( + r#" + SELECT + address, + verification_info::text AS verification_info + FROM + contracts_verification_info + WHERE address > $1 + ORDER BY + address + LIMIT $2 + "#, + &cursor, + batch_size as i64, + ) + .instrument("perform_verification_info_migration#select") + .with_arg("cursor", &cursor_str) + .with_arg("batch_size", &batch_size) + .fetch_all(&mut transaction) + .await? + .into_iter() + .filter_map(|row| row.verification_info.map(|info| (row.address, info))) + .collect(); + + if addresses.is_empty() { + tracing::info!("No more contracts to process"); + break; + } + + tracing::info!( + "Processing {} contracts (processed: {offset}); cursor {cursor_str}", + addresses.len() + ); + + let ids: Vec = (0..addresses.len()) + .into_par_iter() + .map(|idx| { + let address = &addresses[idx]; + let info_json = &verification_infos[idx]; + let verification_info = serde_json::from_str::(info_json) + .unwrap_or_else(|err| { + panic!( + "Malformed data in DB, address {}, data: {info_json}, error: {err}", + hex::encode(address) + ); + }); + ContractIdentifier::from_bytecode( + verification_info.bytecode_marker(), + verification_info.artifacts.deployed_bytecode(), + ) + }) + .collect(); + + let now = chrono::Utc::now().naive_utc().to_string(); + let mut buffer = String::new(); + for idx in 0..addresses.len() { + let address = hex::encode(&addresses[idx]); + let bytecode_keccak256 = hex::encode(ids[idx].bytecode_keccak256); + let bytecode_without_metadata_keccak256 = + hex::encode(ids[idx].bytecode_without_metadata_keccak256); + let verification_info = verification_infos[idx].replace('"', r#""""#); + + let row = format!( + r#"\\x{initial_contract_addr},\\x{bytecode_keccak256},\\x{bytecode_without_metadata_keccak256},"{verification_info}",{created_at},{updated_at}"#, + initial_contract_addr = address, + bytecode_keccak256 = bytecode_keccak256, + bytecode_without_metadata_keccak256 = bytecode_without_metadata_keccak256, + verification_info = verification_info, + created_at = now, + updated_at = now + ); + buffer.push_str(&row); + buffer.push('\n'); + } + + let contracts_len = addresses.len(); + let copy = CopyStatement::new( + "COPY contract_verification_info_v2( + initial_contract_addr, + bytecode_keccak256, + bytecode_without_metadata_keccak256, + verification_info, + created_at, + updated_at + ) FROM STDIN (FORMAT CSV, NULL 'null', DELIMITER ',')", + ) + .instrument("perform_verification_info_migration#copy") + .with_arg("cursor", &cursor_str) + .with_arg("contracts.len", &contracts_len) + .start(&mut transaction) + .await?; + + copy.send(buffer.as_bytes()).await?; + + offset += batch_size; + cursor = addresses.last().unwrap().clone(); + } + + // Sanity check. + tracing::info!("All the rows are migrated, verifying the migration"); + let count_unequal = sqlx::query!( + r#" + SELECT + COUNT(*) + FROM + contract_verification_info_v2 v2 + JOIN contracts_verification_info v1 ON initial_contract_addr = address + WHERE v1.verification_info::text != v2.verification_info::text + "#, + ) + .instrument("is_verification_info_migration_performed") + .fetch_one(&mut transaction) + .await? + .count + .unwrap(); + if count_unequal > 0 { + anyhow::bail!( + "Migration failed: {} rows have different data in the new table", + count_unequal + ); + } + + tracing::info!("Migration is successful, committing the transaction"); + transaction.commit().await?; + Ok(()) + } } #[cfg(test)] @@ -568,7 +818,7 @@ mod tests { use zksync_types::{ bytecode::BytecodeHash, - contract_verification_api::{CompilerVersions, SourceCodeData}, + contract_verification::api::{CompilerVersions, SourceCodeData}, tx::IncludedTxLocation, Execute, L1BatchNumber, L2BlockNumber, ProtocolVersion, }; diff --git a/core/lib/dal/src/data_availability_dal.rs b/core/lib/dal/src/data_availability_dal.rs index 41dd7efe2732..8503cc21f283 100644 --- a/core/lib/dal/src/data_availability_dal.rs +++ b/core/lib/dal/src/data_availability_dal.rs @@ -184,7 +184,8 @@ impl DataAvailabilityDal<'_, '_> { r#" SELECT number, - pubdata_input + pubdata_input, + sealed_at FROM l1_batches LEFT JOIN @@ -195,6 +196,7 @@ impl DataAvailabilityDal<'_, '_> { AND number != 0 AND data_availability.blob_id IS NULL AND pubdata_input IS NOT NULL + AND sealed_at IS NOT NULL ORDER BY number LIMIT @@ -213,6 +215,7 @@ impl DataAvailabilityDal<'_, '_> { // `unwrap` is safe here because we have a `WHERE` clause that filters out `NULL` values pubdata: row.pubdata_input.unwrap(), l1_batch_number: L1BatchNumber(row.number as u32), + sealed_at: row.sealed_at.unwrap().and_utc(), }) .collect()) } diff --git a/core/lib/dal/src/models/storage_data_availability.rs b/core/lib/dal/src/models/storage_data_availability.rs index 2a1b39845e69..cfdbde5ac3a8 100644 --- a/core/lib/dal/src/models/storage_data_availability.rs +++ b/core/lib/dal/src/models/storage_data_availability.rs @@ -1,4 +1,4 @@ -use chrono::NaiveDateTime; +use chrono::{DateTime, NaiveDateTime, Utc}; use zksync_types::{pubdata_da::DataAvailabilityBlob, L1BatchNumber}; /// Represents a blob in the data availability layer. @@ -26,4 +26,5 @@ impl From for DataAvailabilityBlob { pub struct L1BatchDA { pub pubdata: Vec, pub l1_batch_number: L1BatchNumber, + pub sealed_at: DateTime, } diff --git a/core/lib/dal/src/models/storage_verification_request.rs b/core/lib/dal/src/models/storage_verification_request.rs index ae4718e41290..1ea70ed38129 100644 --- a/core/lib/dal/src/models/storage_verification_request.rs +++ b/core/lib/dal/src/models/storage_verification_request.rs @@ -1,5 +1,5 @@ use zksync_types::{ - contract_verification_api::{ + contract_verification::api::{ CompilerType, CompilerVersions, SourceCodeData, VerificationIncomingRequest, VerificationRequest, }, diff --git a/core/lib/multivm/src/versions/vm_latest/old_vm/history_recorder.rs b/core/lib/multivm/src/versions/vm_latest/old_vm/history_recorder.rs index 9dac6480dc57..7e16382861c5 100644 --- a/core/lib/multivm/src/versions/vm_latest/old_vm/history_recorder.rs +++ b/core/lib/multivm/src/versions/vm_latest/old_vm/history_recorder.rs @@ -820,8 +820,12 @@ impl HistoryRecorder { self.apply_historic_record(StorageHistoryRecord { key, value }, timestamp) } - pub(crate) fn drain_inner(&mut self) -> Vec<(StorageKey, U256)> { - self.inner.inner.drain().collect() + pub(crate) fn clone_vec(&mut self) -> Vec<(StorageKey, U256)> { + self.inner + .inner + .iter() + .map(|(key, value)| (*key, *value)) + .collect() } } diff --git a/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs b/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs index 242cdc6a2239..5c8fe1d2a97f 100644 --- a/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs +++ b/core/lib/multivm/src/versions/vm_latest/oracles/storage.rs @@ -576,7 +576,7 @@ impl VmStorageOracle for StorageOracle { // Note that while the history is preserved, the inner parts are fully cleared out. // TODO(X): potentially optimize this function by allowing rollbacks only at the bounds of transactions. - let current_active_keys = self.transient_storage.drain_inner(); + let current_active_keys = self.transient_storage.clone_vec(); for (key, current_value) in current_active_keys { self.write_transient_storage_value(ReducedTstoreLogQuery { // We currently only support rollup shard id diff --git a/core/lib/types/Cargo.toml b/core/lib/types/Cargo.toml index 6af0e39d14f0..f4eeebfce038 100644 --- a/core/lib/types/Cargo.toml +++ b/core/lib/types/Cargo.toml @@ -29,6 +29,7 @@ rlp.workspace = true serde.workspace = true serde_json.workspace = true serde_with = { workspace = true, features = ["hex"] } +ciborium.workspace = true bigdecimal.workspace = true strum = { workspace = true, features = ["derive"] } thiserror.workspace = true diff --git a/core/lib/types/src/contract_verification_api.rs b/core/lib/types/src/contract_verification/api.rs similarity index 92% rename from core/lib/types/src/contract_verification_api.rs rename to core/lib/types/src/contract_verification/api.rs index cca5ae5a83a0..594596070d9f 100644 --- a/core/lib/types/src/contract_verification_api.rs +++ b/core/lib/types/src/contract_verification/api.rs @@ -5,6 +5,7 @@ use serde::{ de::{Deserializer, Error, MapAccess, Unexpected, Visitor}, Deserialize, Serialize, }; +use zksync_basic_types::bytecode::BytecodeMarker; pub use crate::Execute as ExecuteData; use crate::{web3::Bytes, Address}; @@ -232,12 +233,37 @@ impl CompilationArtifacts { } } +/// Non-critical issues detected during verification. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] +#[serde(rename_all = "camelCase")] +pub enum VerificationProblem { + /// The bytecode is correct, but metadata hash is different. + IncorrectMetadata, +} + #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct VerificationInfo { pub request: VerificationRequest, pub artifacts: CompilationArtifacts, pub verified_at: DateTime, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub verification_problems: Vec, +} + +impl VerificationInfo { + pub fn is_perfect_match(&self) -> bool { + self.verification_problems.is_empty() + } + + pub fn bytecode_marker(&self) -> BytecodeMarker { + // Deployed bytecode is only present for EVM contracts. + if self.artifacts.deployed_bytecode.is_some() { + BytecodeMarker::Evm + } else { + BytecodeMarker::EraVm + } + } } #[derive(Debug, Clone, Serialize, Deserialize)] diff --git a/core/lib/types/src/contract_verification/contract_identifier.rs b/core/lib/types/src/contract_verification/contract_identifier.rs new file mode 100644 index 000000000000..354ddeaa0e29 --- /dev/null +++ b/core/lib/types/src/contract_verification/contract_identifier.rs @@ -0,0 +1,396 @@ +use serde::{Deserialize, Serialize}; + +use crate::{bytecode::BytecodeMarker, web3::keccak256, H256}; + +/// An identifier of the contract bytecode. +/// This identifier can be used to detect different contracts that share the same sources, +/// even if they differ in bytecode verbatim (e.g. if the contract metadata is different). +/// +/// Identifier depends on the marker of the bytecode of the contract. +/// This might be important, since the metadata can be different for EVM and EraVM, +/// e.g. `zksolc` [supports][zksolc_keccak] keccak256 hash of the metadata as an alternative to CBOR. +/// +/// [zksolc_keccak]: https://matter-labs.github.io/era-compiler-solidity/latest/02-command-line-interface.html#--metadata-hash +// Note: there are missing opportunities here, e.g. Etherscan is able to detect the contracts +// that differ in creation bytecode and/or constructor arguments (for partial match). This is +// less relevant for ZKsync, since there is no concept of creation bytecode there; although +// this may become needed if we will extend the EVM support. +#[derive(Debug, Clone, Copy)] +pub struct ContractIdentifier { + /// Marker of the bytecode of the contract. + pub bytecode_marker: BytecodeMarker, + /// keccak256 hash of the full contract bytecode. + /// Can be used as an identifier of precise contract compilation. + pub bytecode_keccak256: H256, + /// keccak256 hash of the contract bytecode without metadata (e.g. with either + /// CBOR or keccak256 metadata hash being stripped). + /// If no metadata is detected, equal to `bytecode_keccak256`. + pub bytecode_without_metadata_keccak256: H256, + /// Kind of detected metadata. + pub detected_metadata: Option, +} + +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum Match { + /// Contracts are identical. + Full, + /// Metadata is different. + Partial, + /// No match. + None, +} + +/// Metadata detected in the contract bytecode. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum DetectedMetadata { + /// keccak256 metadata (only for EraVM) + Keccak256, + /// CBOR metadata + Cbor, +} + +/// Possible values for the metadata hashes structure. +/// Details can be found here: https://docs.soliditylang.org/en/latest/metadata.html +/// +/// We're not really interested in the values here, we just want to make sure that we +/// can deserialize the metadata. +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +struct CborMetadata { + #[serde(skip_serializing_if = "Option::is_none")] + ipfs: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + bzzr1: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + bzzr0: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + experimental: Option, + #[serde(skip_serializing_if = "Option::is_none")] + solc: Option>, +} + +impl ContractIdentifier { + pub fn from_bytecode(bytecode_marker: BytecodeMarker, bytecode: &[u8]) -> Self { + // Calculate the hash for bytecode with metadata. + let bytecode_keccak256 = H256(keccak256(bytecode)); + + // Try to detect metadata. + // CBOR takes precedence (since keccak doesn't have direct markers, so it's partially a + // fallback). + let (detected_metadata, bytecode_without_metadata_keccak256) = + if let Some(hash) = Self::detect_cbor_metadata(bytecode_marker, bytecode) { + (Some(DetectedMetadata::Cbor), hash) + } else if let Some(hash) = Self::detect_keccak_metadata(bytecode_marker, bytecode) { + (Some(DetectedMetadata::Keccak256), hash) + } else { + // Fallback + (None, bytecode_keccak256) + }; + + Self { + bytecode_marker, + bytecode_keccak256, + bytecode_without_metadata_keccak256, + detected_metadata, + } + } + + /// Will try to detect keccak256 metadata hash (only for EraVM) + fn detect_keccak_metadata(bytecode_marker: BytecodeMarker, bytecode: &[u8]) -> Option { + // For EraVM, the one option for metadata hash is keccak256 hash of the metadata. + if bytecode_marker == BytecodeMarker::EraVm { + // For metadata, we might have padding: it takes either 32 or 64 bytes depending + // on whether the amount of words in the contract is odd, so we need to check + // if there is padding. + let bytecode_without_metadata = Self::strip_padding(bytecode, 32)?; + let hash = H256(keccak256(bytecode_without_metadata)); + Some(hash) + } else { + None + } + } + + /// Will try to detect CBOR metadata. + fn detect_cbor_metadata(bytecode_marker: BytecodeMarker, bytecode: &[u8]) -> Option { + let length = bytecode.len(); + + // Last two bytes is the length of the metadata in big endian. + if length < 2 { + return None; + } + let metadata_length = + u16::from_be_bytes([bytecode[length - 2], bytecode[length - 1]]) as usize; + // Including size + let full_metadata_length = metadata_length + 2; + + // Get slice for the metadata. + if length < full_metadata_length { + return None; + } + let raw_metadata = &bytecode[length - full_metadata_length..length - 2]; + // Try decoding. We are not interested in the actual value. + let _metadata: CborMetadata = match ciborium::from_reader(raw_metadata) { + Ok(metadata) => metadata, + Err(_) => return None, + }; + + // Strip metadata and calculate hash. + let bytecode_without_metadata = match bytecode_marker { + BytecodeMarker::Evm => { + // On EVM, there is no padding. + &bytecode[..length - full_metadata_length] + } + BytecodeMarker::EraVm => { + // On EraVM, there is padding: + // 1. We must align the metadata length to 32 bytes. + // 2. We may need to add 32 bytes of padding. + let aligned_metadata_length = metadata_length.div_ceil(32) * 32; + Self::strip_padding(bytecode, aligned_metadata_length)? + } + }; + let hash = H256(keccak256(bytecode_without_metadata)); + Some(hash) + } + + /// Adds one word to the metadata length and check if it's a padding word. + /// If it is, strips the padding. + /// Returns `None` if `metadata_length` + padding won't fit into the bytecode. + fn strip_padding(bytecode: &[u8], metadata_length: usize) -> Option<&[u8]> { + const PADDING_WORD: [u8; 32] = [0u8; 32]; + + let length = bytecode.len(); + let metadata_with_padding_length = metadata_length + 32; + if length < metadata_with_padding_length { + return None; + } + if bytecode[length - metadata_with_padding_length..length - metadata_length] == PADDING_WORD + { + // Padding was added, strip it. + Some(&bytecode[..length - metadata_with_padding_length]) + } else { + // Padding wasn't added, strip metadata only. + Some(&bytecode[..length - metadata_length]) + } + } + + /// Checks the kind of match between identifier and other bytecode. + pub fn matches(&self, other: &[u8]) -> Match { + let other_identifier = Self::from_bytecode(self.bytecode_marker, other); + + if self.bytecode_keccak256 == other_identifier.bytecode_keccak256 { + return Match::Full; + } + + // Check if metadata is different. + // Note that here we do not handle "complex" cases, e.g. lack of metadata in one contract + // and presence in another, or different kinds of metadata. This is OK: partial + // match is needed mostly when you cannot reproduce the original metadata, but one always + // can submit the contract with the same metadata kind. + if self.bytecode_without_metadata_keccak256 + == other_identifier.bytecode_without_metadata_keccak256 + { + return Match::Partial; + } + + Match::None + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn eravm_cbor_without_padding() { + // Sample contract with no methods, compiled from the root of monorepo with: + // ./etc/zksolc-bin/v1.5.8/zksolc --solc ./etc/solc-bin/zkVM-0.8.28-1.0.1/solc --metadata-hash ipfs --codegen yul test.sol --bin + // (Use `zkstack contract-verifier init` to download compilers) + let data = hex::decode("0000008003000039000000400030043f0000000100200190000000110000c13d0000000900100198000000190000613d000000000101043b0000000a011001970000000b0010009c000000190000c13d0000000001000416000000000001004b000000190000c13d000000000100041a000000800010043f0000000c010000410000001c0001042e0000000001000416000000000001004b000000190000c13d00000020010000390000010000100443000001200000044300000008010000410000001c0001042e00000000010000190000001d000104300000001b000004320000001c0001042e0000001d0001043000000000000000000000000000000000000000020000000000000000000000000000004000000100000000000000000000000000000000000000000000000000fffffffc000000000000000000000000ffffffff000000000000000000000000000000000000000000000000000000006d4ce63c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000008000000000000000000000000000000000000000000000000000000000a16469706673582212208acf048570dcc1c3ff41bf8f20376049a42ae8a471f2b2ae8c14d8b356d86d79002a").unwrap(); + let bytecode_keccak256 = H256(keccak256(&data)); + let full_metadata_len = 64; // (CBOR metadata + len bytes) + let bytecode_without_metadata_keccak256 = + H256(keccak256(&data[..data.len() - full_metadata_len])); + + let identifier = ContractIdentifier::from_bytecode(BytecodeMarker::EraVm, &data); + assert_eq!( + identifier.bytecode_keccak256, bytecode_keccak256, + "Incorrect bytecode hash" + ); + assert_eq!( + identifier.detected_metadata, + Some(DetectedMetadata::Cbor), + "Incorrect detected metadata" + ); + assert_eq!( + identifier.bytecode_without_metadata_keccak256, bytecode_without_metadata_keccak256, + "Incorrect bytecode without metadata hash" + ); + } + + #[test] + fn eravm_cbor_with_padding() { + // Same as `eravm_cbor_without_padding` but now bytecode has padding. + let data = hex::decode("00000001002001900000000c0000613d0000008001000039000000400010043f0000000001000416000000000001004b0000000c0000c13d00000020010000390000010000100443000001200000044300000005010000410000000f0001042e000000000100001900000010000104300000000e000004320000000f0001042e0000001000010430000000000000000000000000000000000000000000000000000000020000000000000000000000000000004000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a1646970667358221220d5be4da510b089bb58fa6c65f0a387eef966bcf48671a24fb2b1bc7190842978002a").unwrap(); + let bytecode_keccak256 = H256(keccak256(&data)); + let full_metadata_len = 64 + 32; // (CBOR metadata + len bytes + padding) + let bytecode_without_metadata_keccak256 = + H256(keccak256(&data[..data.len() - full_metadata_len])); + + let identifier = ContractIdentifier::from_bytecode(BytecodeMarker::EraVm, &data); + assert_eq!( + identifier.bytecode_keccak256, bytecode_keccak256, + "Incorrect bytecode hash" + ); + assert_eq!( + identifier.detected_metadata, + Some(DetectedMetadata::Cbor), + "Incorrect detected metadata" + ); + assert_eq!( + identifier.bytecode_without_metadata_keccak256, bytecode_without_metadata_keccak256, + "Incorrect bytecode without metadata hash" + ); + } + + #[test] + fn eravm_keccak_without_padding() { + // Sample contract with no methods, compiled from the root of monorepo with: + // ./etc/zksolc-bin/v1.5.8/zksolc --solc ./etc/solc-bin/zkVM-0.8.28-1.0.1/solc --metadata-hash keccak256 --codegen yul test.sol --bin + // (Use `zkstack contract-verifier init` to download compilers) + let data = hex::decode("00000001002001900000000c0000613d0000008001000039000000400010043f0000000001000416000000000001004b0000000c0000c13d00000020010000390000010000100443000001200000044300000005010000410000000f0001042e000000000100001900000010000104300000000e000004320000000f0001042e000000100001043000000000000000000000000000000000000000000000000000000002000000000000000000000000000000400000010000000000000000000a00e4a5f19bb139176aa501024c7032404c065bc0012897fefd9ebc7e9a7677").unwrap(); + let bytecode_keccak256 = H256(keccak256(&data)); + let full_metadata_len = 32; // (keccak only) + let bytecode_without_metadata_keccak256 = + H256(keccak256(&data[..data.len() - full_metadata_len])); + + let identifier = ContractIdentifier::from_bytecode(BytecodeMarker::EraVm, &data); + assert_eq!( + identifier.bytecode_keccak256, bytecode_keccak256, + "Incorrect bytecode hash" + ); + assert_eq!( + identifier.detected_metadata, + Some(DetectedMetadata::Keccak256), + "Incorrect detected metadata" + ); + assert_eq!( + identifier.bytecode_without_metadata_keccak256, bytecode_without_metadata_keccak256, + "Incorrect bytecode without metadata hash" + ); + } + + #[test] + fn eravm_keccak_with_padding() { + // Same as `eravm_keccak_without_padding`, but now bytecode has padding. + let data = hex::decode("0000008003000039000000400030043f0000000100200190000000110000c13d0000000900100198000000190000613d000000000101043b0000000a011001970000000b0010009c000000190000c13d0000000001000416000000000001004b000000190000c13d000000000100041a000000800010043f0000000c010000410000001c0001042e0000000001000416000000000001004b000000190000c13d00000020010000390000010000100443000001200000044300000008010000410000001c0001042e00000000010000190000001d000104300000001b000004320000001c0001042e0000001d0001043000000000000000000000000000000000000000020000000000000000000000000000004000000100000000000000000000000000000000000000000000000000fffffffc000000000000000000000000ffffffff000000000000000000000000000000000000000000000000000000006d4ce63c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000080000000000000000000000000000000000000000000000000000000000000000000000000000000009b1f0a6172ae84051eca37db231c0fa6249349f4ddaf86a87474a587c19d946d").unwrap(); + let bytecode_keccak256 = H256(keccak256(&data)); + let full_metadata_len = 64; // (keccak + padding) + let bytecode_without_metadata_keccak256 = + H256(keccak256(&data[..data.len() - full_metadata_len])); + + let identifier = ContractIdentifier::from_bytecode(BytecodeMarker::EraVm, &data); + assert_eq!( + identifier.bytecode_keccak256, bytecode_keccak256, + "Incorrect bytecode hash" + ); + assert_eq!( + identifier.detected_metadata, + Some(DetectedMetadata::Keccak256), + "Incorrect detected metadata" + ); + assert_eq!( + identifier.bytecode_without_metadata_keccak256, bytecode_without_metadata_keccak256, + "Incorrect bytecode without metadata hash" + ); + } + + #[test] + fn eravm_too_short_bytecode() { + // Random short bytecode + let data = hex::decode("0000008003000039000000400030043f0000000100200190000000110000c13d") + .unwrap(); + let bytecode_keccak256 = H256(keccak256(&data)); + + let identifier = ContractIdentifier::from_bytecode(BytecodeMarker::EraVm, &data); + assert_eq!( + identifier.bytecode_keccak256, bytecode_keccak256, + "Incorrect bytecode hash" + ); + assert_eq!( + identifier.detected_metadata, None, + "Incorrect detected metadata" + ); + // When no metadata is detected, `bytecode_without_metadata_keccak256` is equal to + // `bytecode_keccak256`. + assert_eq!( + identifier.bytecode_without_metadata_keccak256, bytecode_keccak256, + "Incorrect bytecode without metadata hash" + ); + } + + #[test] + fn evm_none() { + // Sample contract with no methods, compiled from the root of monorepo with: + // ./etc/solc-bin/0.8.28/solc test.sol --bin --no-cbor-metadata + // (Use `zkstack contract-verifier init` to download compilers) + let data = hex::decode("6080604052348015600e575f5ffd5b50607980601a5f395ff3fe6080604052348015600e575f5ffd5b50600436106026575f3560e01c80636d4ce63c14602a575b5f5ffd5b60306044565b604051603b91906062565b60405180910390f35b5f5f54905090565b5f819050919050565b605c81604c565b82525050565b5f60208201905060735f8301846055565b9291505056").unwrap(); + let bytecode_keccak256 = H256(keccak256(&data)); + + let identifier = ContractIdentifier::from_bytecode(BytecodeMarker::Evm, &data); + assert_eq!( + identifier.bytecode_keccak256, bytecode_keccak256, + "Incorrect bytecode hash" + ); + assert_eq!( + identifier.detected_metadata, None, + "Incorrect detected metadata" + ); + // When no metadata is detected, `bytecode_without_metadata_keccak256` is equal to + // `bytecode_keccak256`. + assert_eq!( + identifier.bytecode_without_metadata_keccak256, bytecode_keccak256, + "Incorrect bytecode without metadata hash" + ); + } + + #[test] + fn evm_cbor() { + // ./etc/solc-bin/0.8.28/solc test.sol --bin --metadata-hash ipfs + let ipfs_bytecode = "6080604052348015600e575f5ffd5b5060af80601a5f395ff3fe6080604052348015600e575f5ffd5b50600436106026575f3560e01c80636d4ce63c14602a575b5f5ffd5b60306044565b604051603b91906062565b60405180910390f35b5f5f54905090565b5f819050919050565b605c81604c565b82525050565b5f60208201905060735f8301846055565b9291505056fea2646970667358221220bca846db362b62d2eb9891565b12433410e0f6a634657d2c7d1e7469447e8ab564736f6c634300081c0033"; + // ./etc/solc-bin/0.8.28/solc test.sol --bin --metadata-hash none + // Note that cbor will still be included but will only have solc version. + let none_bytecode = "6080604052348015600e575f5ffd5b50608680601a5f395ff3fe6080604052348015600e575f5ffd5b50600436106026575f3560e01c80636d4ce63c14602a575b5f5ffd5b60306044565b604051603b91906062565b60405180910390f35b5f5f54905090565b5f819050919050565b605c81604c565b82525050565b5f60208201905060735f8301846055565b9291505056fea164736f6c634300081c000a"; + // ./etc/solc-bin/0.8.28/solc test.sol --bin --metadata-hash swarm + let swarm_bytecode = "6080604052348015600e575f5ffd5b5060ae80601a5f395ff3fe6080604052348015600e575f5ffd5b50600436106026575f3560e01c80636d4ce63c14602a575b5f5ffd5b60306044565b604051603b91906062565b60405180910390f35b5f5f54905090565b5f819050919050565b605c81604c565b82525050565b5f60208201905060735f8301846055565b9291505056fea265627a7a72315820c0def30c57166e97d6a58290213f3b0d1f83532e7a0371c8e2b6dba826bae46164736f6c634300081c0032"; + + // Different variations of the same contract, compiled with different metadata options. + // Tuples of (label, bytecode, size of metadata (including length)). + // Size of metadata can be found using https://playground.sourcify.dev/ + let test_vector = [ + ("ipfs", ipfs_bytecode, 51usize + 2), + ("none", none_bytecode, 10 + 2), + ("swarm", swarm_bytecode, 50 + 2), + ]; + + for (label, bytecode, full_metadata_len) in test_vector { + let data = hex::decode(bytecode).unwrap(); + let bytecode_keccak256 = H256(keccak256(&data)); + let bytecode_without_metadata_keccak256 = + H256(keccak256(&data[..data.len() - full_metadata_len])); + + let identifier = ContractIdentifier::from_bytecode(BytecodeMarker::Evm, &data); + assert_eq!( + identifier.bytecode_keccak256, bytecode_keccak256, + "{label}: Incorrect bytecode hash" + ); + assert_eq!( + identifier.detected_metadata, + Some(DetectedMetadata::Cbor), + "{label}: Incorrect detected metadata" + ); + assert_eq!( + identifier.bytecode_without_metadata_keccak256, bytecode_without_metadata_keccak256, + "{label}: Incorrect bytecode without metadata hash" + ); + } + } +} diff --git a/core/lib/types/src/contract_verification/mod.rs b/core/lib/types/src/contract_verification/mod.rs new file mode 100644 index 000000000000..7f8686941edb --- /dev/null +++ b/core/lib/types/src/contract_verification/mod.rs @@ -0,0 +1,2 @@ +pub mod api; +pub mod contract_identifier; diff --git a/core/lib/types/src/lib.rs b/core/lib/types/src/lib.rs index 8ec98ec0571e..606de9b9e798 100644 --- a/core/lib/types/src/lib.rs +++ b/core/lib/types/src/lib.rs @@ -32,7 +32,7 @@ pub mod aggregated_operations; pub mod blob; pub mod block; pub mod commitment; -pub mod contract_verification_api; +pub mod contract_verification; pub mod debug_flat_call; pub mod fee; pub mod fee_model; diff --git a/core/node/api_server/src/execution_sandbox/mod.rs b/core/node/api_server/src/execution_sandbox/mod.rs index bcba200f5ebc..78bc85de9f22 100644 --- a/core/node/api_server/src/execution_sandbox/mod.rs +++ b/core/node/api_server/src/execution_sandbox/mod.rs @@ -274,7 +274,7 @@ impl BlockStartInfo { #[derive(Debug, thiserror::Error)] pub enum BlockArgsError { - #[error("Block is pruned; first retained block is {0}")] + #[error("Block is not available, either it was pruned or the node was started from a snapshot created later than this block; first retained block is {0}")] Pruned(L2BlockNumber), #[error("Block is missing, but can appear in the future")] Missing, diff --git a/core/node/api_server/src/web3/namespaces/debug.rs b/core/node/api_server/src/web3/namespaces/debug.rs index 180de6b273e5..a70367a858fb 100644 --- a/core/node/api_server/src/web3/namespaces/debug.rs +++ b/core/node/api_server/src/web3/namespaces/debug.rs @@ -174,6 +174,11 @@ impl DebugNamespace { } let mut connection = self.state.acquire_connection().await?; + self.state + .start_info + .ensure_not_pruned(block_id, &mut connection) + .await?; + let block_number = self.state.resolve_block(&mut connection, block_id).await?; // let block_hash = block_hash self.state. self.current_method() @@ -252,6 +257,11 @@ impl DebugNamespace { let options = options.unwrap_or_default(); let mut connection = self.state.acquire_connection().await?; + self.state + .start_info + .ensure_not_pruned(block_id, &mut connection) + .await?; + let block_args = self .state .resolve_block_args(&mut connection, block_id) diff --git a/core/node/contract_verification_server/src/api_impl.rs b/core/node/contract_verification_server/src/api_impl.rs index b0336fd284b6..3e21c2f2cba8 100644 --- a/core/node/contract_verification_server/src/api_impl.rs +++ b/core/node/contract_verification_server/src/api_impl.rs @@ -7,11 +7,15 @@ use axum::{ response::{IntoResponse, Response}, Json, }; -use zksync_dal::{CoreDal, DalError}; +use zksync_dal::{contract_verification_dal::ContractVerificationDal, CoreDal, DalError}; use zksync_types::{ - bytecode::BytecodeMarker, - contract_verification_api::{ - CompilerVersions, VerificationIncomingRequest, VerificationInfo, VerificationRequestStatus, + bytecode::{trim_bytecode, BytecodeHash, BytecodeMarker}, + contract_verification::{ + api::{ + CompilerVersions, VerificationIncomingRequest, VerificationInfo, VerificationProblem, + VerificationRequestStatus, + }, + contract_identifier::ContractIdentifier, }, Address, }; @@ -220,15 +224,73 @@ impl RestApi { address: Path
, ) -> ApiResult { let method_latency = METRICS.call[&"contract_verification_info"].start(); - let info = self_ + let mut conn = self_ .replica_connection_pool .connection_tagged("api") - .await? - .contract_verification_dal() - .get_contract_verification_info(*address) - .await? - .ok_or(ApiError::VerificationInfoNotFound)?; + .await?; + let mut dal = conn.contract_verification_dal(); + + let info = if let Some(info) = dal.get_contract_verification_info(*address).await? { + info + } else if let Some(partial_match) = + get_partial_match_verification_info(&mut dal, *address).await? + { + partial_match + } else { + return Err(ApiError::VerificationInfoNotFound); + }; method_latency.observe(); Ok(Json(info)) } } + +/// Tries to do a lookup for partial match verification info. +/// Should be called only if a perfect match is not found. +async fn get_partial_match_verification_info( + dal: &mut ContractVerificationDal<'_, '_>, + address: Address, +) -> anyhow::Result> { + let Some(deployed_contract) = dal.get_contract_info_for_verification(address).await? else { + return Ok(None); + }; + + let bytecode_hash = + BytecodeHash::try_from(deployed_contract.bytecode_hash).context("Invalid bytecode hash")?; + let deployed_bytecode = trim_bytecode(bytecode_hash, &deployed_contract.bytecode) + .context("Invalid deployed bytecode")?; + + let identifier = ContractIdentifier::from_bytecode(bytecode_hash.marker(), deployed_bytecode); + let Some((mut info, fetched_keccak256, fetched_keccak256_without_metadata)) = dal + .get_partial_match_verification_info( + identifier.bytecode_keccak256, + identifier.bytecode_without_metadata_keccak256, + ) + .await? + else { + return Ok(None); + }; + + if identifier.bytecode_keccak256 != fetched_keccak256 { + // Sanity check + let has_metadata = identifier.detected_metadata.is_some(); + let hashes_without_metadata_match = + identifier.bytecode_without_metadata_keccak256 == fetched_keccak256_without_metadata; + + if !has_metadata || !hashes_without_metadata_match { + tracing::error!( + contract_address = ?address, + identifier = ?identifier, + fetched_keccak256 = ?fetched_keccak256, + fetched_keccak256_without_metadata = ?fetched_keccak256_without_metadata, + info = ?info, + "Bogus verification info fetched for contract", + ); + anyhow::bail!("Internal error: bogus verification info detected"); + } + + // Mark the contract as partial match (regardless of other issues). + info.verification_problems = vec![VerificationProblem::IncorrectMetadata]; + } + + Ok(Some(info)) +} diff --git a/core/node/contract_verification_server/src/cache.rs b/core/node/contract_verification_server/src/cache.rs index c8e367515287..f7ba10c2bf92 100644 --- a/core/node/contract_verification_server/src/cache.rs +++ b/core/node/contract_verification_server/src/cache.rs @@ -5,7 +5,7 @@ use std::{ use tokio::sync::RwLock; use zksync_dal::{Connection, ConnectionPool, Core, CoreDal, DalError}; -use zksync_types::contract_verification_api::CompilerVersions; +use zksync_types::contract_verification::api::CompilerVersions; /// Compiler versions supported by the contract verifier. #[derive(Debug, Clone)] diff --git a/core/node/contract_verification_server/src/tests.rs b/core/node/contract_verification_server/src/tests.rs index 88b14db68733..3eb27056b960 100644 --- a/core/node/contract_verification_server/src/tests.rs +++ b/core/node/contract_verification_server/src/tests.rs @@ -13,7 +13,7 @@ use zksync_dal::{Connection, Core, CoreDal}; use zksync_node_test_utils::create_l2_block; use zksync_types::{ bytecode::{BytecodeHash, BytecodeMarker}, - contract_verification_api::CompilerVersions, + contract_verification::api::CompilerVersions, get_code_key, Address, L2BlockNumber, ProtocolVersion, StorageLog, }; diff --git a/core/node/da_clients/src/avail/client.rs b/core/node/da_clients/src/avail/client.rs index 411a0354d632..d4831ec81ce2 100644 --- a/core/node/da_clients/src/avail/client.rs +++ b/core/node/da_clients/src/avail/client.rs @@ -245,4 +245,27 @@ impl DataAvailabilityClient for AvailClient { fn blob_size_limit(&self) -> Option { Some(RawAvailClient::MAX_BLOB_SIZE) } + + async fn balance(&self) -> Result { + match self.sdk_client.as_ref() { + AvailClientMode::Default(client) => { + let AvailClientConfig::FullClient(default_config) = &self.config.config else { + unreachable!(); // validated in protobuf config + }; + + let ws_client = WsClientBuilder::default() + .build(default_config.api_node_url.clone().as_str()) + .await + .map_err(to_non_retriable_da_error)?; + + Ok(client + .balance(&ws_client) + .await + .map_err(to_non_retriable_da_error)?) + } + AvailClientMode::GasRelay(_) => { + Ok(0) // TODO: implement balance for gas relay (PE-304) + } + } + } } diff --git a/core/node/da_clients/src/avail/sdk.rs b/core/node/da_clients/src/avail/sdk.rs index 8f28e797dc9a..bf6bdcb13ff5 100644 --- a/core/node/da_clients/src/avail/sdk.rs +++ b/core/node/da_clients/src/avail/sdk.rs @@ -3,6 +3,7 @@ use std::{fmt::Debug, sync::Arc, time}; +use anyhow::Context; use backon::{ConstantBuilder, Retryable}; use bytes::Bytes; use jsonrpsee::{ @@ -22,7 +23,6 @@ use crate::utils::to_non_retriable_da_error; const PROTOCOL_VERSION: u8 = 4; -/// An implementation of the `DataAvailabilityClient` trait that interacts with the Avail network. #[derive(Debug, Clone)] pub(crate) struct RawAvailClient { app_id: u32, @@ -344,6 +344,23 @@ impl RawAvailClient { Ok(tx_id) } + + /// Returns the balance of the address controlled by the `keypair` + pub async fn balance(&self, client: &Client) -> anyhow::Result { + let address = to_addr(self.keypair.clone()); + let resp: serde_json::Value = client + .request("state_getStorage", rpc_params![address]) + .await + .context("Error calling state_getStorage RPC")?; + + let balance = resp + .as_str() + .ok_or_else(|| anyhow::anyhow!("Invalid balance"))?; + + balance + .parse() + .context("Unable to parse the account balance") + } } fn blake2(data: Vec) -> [u8; N] { diff --git a/core/node/da_clients/src/celestia/client.rs b/core/node/da_clients/src/celestia/client.rs index df0735d4e1e4..9dc91ed141f5 100644 --- a/core/node/da_clients/src/celestia/client.rs +++ b/core/node/da_clients/src/celestia/client.rs @@ -97,6 +97,13 @@ impl DataAvailabilityClient for CelestiaClient { fn blob_size_limit(&self) -> Option { Some(1973786) // almost 2MB } + + async fn balance(&self) -> Result { + self.client + .balance() + .await + .map_err(to_non_retriable_da_error) + } } impl Debug for CelestiaClient { diff --git a/core/node/da_clients/src/celestia/generated/cosmos.bank.v1beta1.rs b/core/node/da_clients/src/celestia/generated/cosmos.bank.v1beta1.rs new file mode 100644 index 000000000000..3eb8c536c915 --- /dev/null +++ b/core/node/da_clients/src/celestia/generated/cosmos.bank.v1beta1.rs @@ -0,0 +1,1121 @@ +// This file is @generated by prost-build. +/// Params defines the parameters for the bank module. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Params { + #[prost(message, repeated, tag = "1")] + pub send_enabled: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "2")] + pub default_send_enabled: bool, +} +impl ::prost::Name for Params { + const NAME: &'static str = "Params"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.Params".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.Params".into() + } +} +/// SendEnabled maps coin denom to a send_enabled status (whether a denom is +/// sendable). +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SendEnabled { + #[prost(string, tag = "1")] + pub denom: ::prost::alloc::string::String, + #[prost(bool, tag = "2")] + pub enabled: bool, +} +impl ::prost::Name for SendEnabled { + const NAME: &'static str = "SendEnabled"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.SendEnabled".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.SendEnabled".into() + } +} +/// Input models transaction input. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Input { + #[prost(string, tag = "1")] + pub address: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub coins: ::prost::alloc::vec::Vec, +} +impl ::prost::Name for Input { + const NAME: &'static str = "Input"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.Input".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.Input".into() + } +} +/// Output models transaction outputs. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Output { + #[prost(string, tag = "1")] + pub address: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub coins: ::prost::alloc::vec::Vec, +} +impl ::prost::Name for Output { + const NAME: &'static str = "Output"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.Output".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.Output".into() + } +} +/// Supply represents a struct that passively keeps track of the total supply +/// amounts in the network. +/// This message is deprecated now that supply is indexed by denom. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Supply { + #[prost(message, repeated, tag = "1")] + pub total: ::prost::alloc::vec::Vec, +} +impl ::prost::Name for Supply { + const NAME: &'static str = "Supply"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.Supply".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.Supply".into() + } +} +/// DenomUnit represents a struct that describes a given +/// denomination unit of the basic token. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DenomUnit { + /// denom represents the string name of the given denom unit (e.g uatom). + #[prost(string, tag = "1")] + pub denom: ::prost::alloc::string::String, + /// exponent represents power of 10 exponent that one must + /// raise the base_denom to in order to equal the given DenomUnit's denom + /// 1 denom = 10^exponent base_denom + /// (e.g. with a base_denom of uatom, one can create a DenomUnit of 'atom' with + /// exponent = 6, thus: 1 atom = 10^6 uatom). + #[prost(uint32, tag = "2")] + pub exponent: u32, + /// aliases is a list of string aliases for the given denom + #[prost(string, repeated, tag = "3")] + pub aliases: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, +} +impl ::prost::Name for DenomUnit { + const NAME: &'static str = "DenomUnit"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.DenomUnit".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.DenomUnit".into() + } +} +/// Metadata represents a struct that describes +/// a basic token. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Metadata { + #[prost(string, tag = "1")] + pub description: ::prost::alloc::string::String, + /// denom_units represents the list of DenomUnit's for a given coin + #[prost(message, repeated, tag = "2")] + pub denom_units: ::prost::alloc::vec::Vec, + /// base represents the base denom (should be the DenomUnit with exponent = 0). + #[prost(string, tag = "3")] + pub base: ::prost::alloc::string::String, + /// display indicates the suggested denom that should be + /// displayed in clients. + #[prost(string, tag = "4")] + pub display: ::prost::alloc::string::String, + /// name defines the name of the token (eg: Cosmos Atom) + /// + /// Since: cosmos-sdk 0.43 + #[prost(string, tag = "5")] + pub name: ::prost::alloc::string::String, + /// symbol is the token symbol usually shown on exchanges (eg: ATOM). This can + /// be the same as the display. + /// + /// Since: cosmos-sdk 0.43 + #[prost(string, tag = "6")] + pub symbol: ::prost::alloc::string::String, + /// URI to a document (on or off-chain) that contains additional information. Optional. + /// + /// Since: cosmos-sdk 0.46 + #[prost(string, tag = "7")] + pub uri: ::prost::alloc::string::String, + /// URIHash is a sha256 hash of a document pointed by URI. It's used to verify that + /// the document didn't change. Optional. + /// + /// Since: cosmos-sdk 0.46 + #[prost(string, tag = "8")] + pub uri_hash: ::prost::alloc::string::String, +} +impl ::prost::Name for Metadata { + const NAME: &'static str = "Metadata"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.Metadata".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.Metadata".into() + } +} +/// GenesisState defines the bank module's genesis state. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GenesisState { + /// params defines all the paramaters of the module. + #[prost(message, optional, tag = "1")] + pub params: ::core::option::Option, + /// balances is an array containing the balances of all the accounts. + #[prost(message, repeated, tag = "2")] + pub balances: ::prost::alloc::vec::Vec, + /// supply represents the total supply. If it is left empty, then supply will be calculated based on the provided + /// balances. Otherwise, it will be used to validate that the sum of the balances equals this amount. + #[prost(message, repeated, tag = "3")] + pub supply: ::prost::alloc::vec::Vec, + /// denom_metadata defines the metadata of the differents coins. + #[prost(message, repeated, tag = "4")] + pub denom_metadata: ::prost::alloc::vec::Vec, +} +impl ::prost::Name for GenesisState { + const NAME: &'static str = "GenesisState"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.GenesisState".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.GenesisState".into() + } +} +/// Balance defines an account address and balance pair used in the bank module's +/// genesis state. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Balance { + /// address is the address of the balance holder. + #[prost(string, tag = "1")] + pub address: ::prost::alloc::string::String, + /// coins defines the different coins this balance holds. + #[prost(message, repeated, tag = "2")] + pub coins: ::prost::alloc::vec::Vec, +} +impl ::prost::Name for Balance { + const NAME: &'static str = "Balance"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.Balance".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.Balance".into() + } +} +/// QueryBalanceRequest is the request type for the Query/Balance RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryBalanceRequest { + /// address is the address to query balances for. + #[prost(string, tag = "1")] + pub address: ::prost::alloc::string::String, + /// denom is the coin denom to query balances for. + #[prost(string, tag = "2")] + pub denom: ::prost::alloc::string::String, +} +impl ::prost::Name for QueryBalanceRequest { + const NAME: &'static str = "QueryBalanceRequest"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryBalanceRequest".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryBalanceRequest".into() + } +} +/// QueryBalanceResponse is the response type for the Query/Balance RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryBalanceResponse { + /// balance is the balance of the coin. + #[prost(message, optional, tag = "1")] + pub balance: ::core::option::Option, +} +impl ::prost::Name for QueryBalanceResponse { + const NAME: &'static str = "QueryBalanceResponse"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryBalanceResponse".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryBalanceResponse".into() + } +} +/// QueryBalanceRequest is the request type for the Query/AllBalances RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryAllBalancesRequest { + /// address is the address to query balances for. + #[prost(string, tag = "1")] + pub address: ::prost::alloc::string::String, + /// pagination defines an optional pagination for the request. + #[prost(message, optional, tag = "2")] + pub pagination: ::core::option::Option< + super::super::base::query::PageRequest, + >, +} +impl ::prost::Name for QueryAllBalancesRequest { + const NAME: &'static str = "QueryAllBalancesRequest"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryAllBalancesRequest".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryAllBalancesRequest".into() + } +} +/// QueryAllBalancesResponse is the response type for the Query/AllBalances RPC +/// method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryAllBalancesResponse { + /// balances is the balances of all the coins. + #[prost(message, repeated, tag = "1")] + pub balances: ::prost::alloc::vec::Vec, + /// pagination defines the pagination in the response. + #[prost(message, optional, tag = "2")] + pub pagination: ::core::option::Option< + super::super::base::query::PageResponse, + >, +} +impl ::prost::Name for QueryAllBalancesResponse { + const NAME: &'static str = "QueryAllBalancesResponse"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryAllBalancesResponse".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryAllBalancesResponse".into() + } +} +/// QuerySpendableBalancesRequest defines the gRPC request structure for querying +/// an account's spendable balances. +/// +/// Since: cosmos-sdk 0.46 +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QuerySpendableBalancesRequest { + /// address is the address to query spendable balances for. + #[prost(string, tag = "1")] + pub address: ::prost::alloc::string::String, + /// pagination defines an optional pagination for the request. + #[prost(message, optional, tag = "2")] + pub pagination: ::core::option::Option< + super::super::base::query::PageRequest, + >, +} +impl ::prost::Name for QuerySpendableBalancesRequest { + const NAME: &'static str = "QuerySpendableBalancesRequest"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QuerySpendableBalancesRequest".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QuerySpendableBalancesRequest".into() + } +} +/// QuerySpendableBalancesResponse defines the gRPC response structure for querying +/// an account's spendable balances. +/// +/// Since: cosmos-sdk 0.46 +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QuerySpendableBalancesResponse { + /// balances is the spendable balances of all the coins. + #[prost(message, repeated, tag = "1")] + pub balances: ::prost::alloc::vec::Vec, + /// pagination defines the pagination in the response. + #[prost(message, optional, tag = "2")] + pub pagination: ::core::option::Option< + super::super::base::query::PageResponse, + >, +} +impl ::prost::Name for QuerySpendableBalancesResponse { + const NAME: &'static str = "QuerySpendableBalancesResponse"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QuerySpendableBalancesResponse".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QuerySpendableBalancesResponse".into() + } +} +/// QueryTotalSupplyRequest is the request type for the Query/TotalSupply RPC +/// method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryTotalSupplyRequest { + /// pagination defines an optional pagination for the request. + /// + /// Since: cosmos-sdk 0.43 + #[prost(message, optional, tag = "1")] + pub pagination: ::core::option::Option< + super::super::base::query::PageRequest, + >, +} +impl ::prost::Name for QueryTotalSupplyRequest { + const NAME: &'static str = "QueryTotalSupplyRequest"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryTotalSupplyRequest".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryTotalSupplyRequest".into() + } +} +/// QueryTotalSupplyResponse is the response type for the Query/TotalSupply RPC +/// method +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryTotalSupplyResponse { + /// supply is the supply of the coins + #[prost(message, repeated, tag = "1")] + pub supply: ::prost::alloc::vec::Vec, + /// pagination defines the pagination in the response. + /// + /// Since: cosmos-sdk 0.43 + #[prost(message, optional, tag = "2")] + pub pagination: ::core::option::Option< + super::super::base::query::PageResponse, + >, +} +impl ::prost::Name for QueryTotalSupplyResponse { + const NAME: &'static str = "QueryTotalSupplyResponse"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryTotalSupplyResponse".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryTotalSupplyResponse".into() + } +} +/// QuerySupplyOfRequest is the request type for the Query/SupplyOf RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QuerySupplyOfRequest { + /// denom is the coin denom to query balances for. + #[prost(string, tag = "1")] + pub denom: ::prost::alloc::string::String, +} +impl ::prost::Name for QuerySupplyOfRequest { + const NAME: &'static str = "QuerySupplyOfRequest"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QuerySupplyOfRequest".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QuerySupplyOfRequest".into() + } +} +/// QuerySupplyOfResponse is the response type for the Query/SupplyOf RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QuerySupplyOfResponse { + /// amount is the supply of the coin. + #[prost(message, optional, tag = "1")] + pub amount: ::core::option::Option, +} +impl ::prost::Name for QuerySupplyOfResponse { + const NAME: &'static str = "QuerySupplyOfResponse"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QuerySupplyOfResponse".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QuerySupplyOfResponse".into() + } +} +/// QueryParamsRequest defines the request type for querying x/bank parameters. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct QueryParamsRequest {} +impl ::prost::Name for QueryParamsRequest { + const NAME: &'static str = "QueryParamsRequest"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryParamsRequest".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryParamsRequest".into() + } +} +/// QueryParamsResponse defines the response type for querying x/bank parameters. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryParamsResponse { + #[prost(message, optional, tag = "1")] + pub params: ::core::option::Option, +} +impl ::prost::Name for QueryParamsResponse { + const NAME: &'static str = "QueryParamsResponse"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryParamsResponse".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryParamsResponse".into() + } +} +/// QueryDenomsMetadataRequest is the request type for the Query/DenomsMetadata RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryDenomsMetadataRequest { + /// pagination defines an optional pagination for the request. + #[prost(message, optional, tag = "1")] + pub pagination: ::core::option::Option< + super::super::base::query::PageRequest, + >, +} +impl ::prost::Name for QueryDenomsMetadataRequest { + const NAME: &'static str = "QueryDenomsMetadataRequest"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryDenomsMetadataRequest".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryDenomsMetadataRequest".into() + } +} +/// QueryDenomsMetadataResponse is the response type for the Query/DenomsMetadata RPC +/// method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryDenomsMetadataResponse { + /// metadata provides the client information for all the registered tokens. + #[prost(message, repeated, tag = "1")] + pub metadatas: ::prost::alloc::vec::Vec, + /// pagination defines the pagination in the response. + #[prost(message, optional, tag = "2")] + pub pagination: ::core::option::Option< + super::super::base::query::PageResponse, + >, +} +impl ::prost::Name for QueryDenomsMetadataResponse { + const NAME: &'static str = "QueryDenomsMetadataResponse"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryDenomsMetadataResponse".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryDenomsMetadataResponse".into() + } +} +/// QueryDenomMetadataRequest is the request type for the Query/DenomMetadata RPC method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryDenomMetadataRequest { + /// denom is the coin denom to query the metadata for. + #[prost(string, tag = "1")] + pub denom: ::prost::alloc::string::String, +} +impl ::prost::Name for QueryDenomMetadataRequest { + const NAME: &'static str = "QueryDenomMetadataRequest"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryDenomMetadataRequest".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryDenomMetadataRequest".into() + } +} +/// QueryDenomMetadataResponse is the response type for the Query/DenomMetadata RPC +/// method. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryDenomMetadataResponse { + /// metadata describes and provides all the client information for the requested token. + #[prost(message, optional, tag = "1")] + pub metadata: ::core::option::Option, +} +impl ::prost::Name for QueryDenomMetadataResponse { + const NAME: &'static str = "QueryDenomMetadataResponse"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryDenomMetadataResponse".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryDenomMetadataResponse".into() + } +} +/// QueryDenomOwnersRequest defines the request type for the DenomOwners RPC query, +/// which queries for a paginated set of all account holders of a particular +/// denomination. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryDenomOwnersRequest { + /// denom defines the coin denomination to query all account holders for. + #[prost(string, tag = "1")] + pub denom: ::prost::alloc::string::String, + /// pagination defines an optional pagination for the request. + #[prost(message, optional, tag = "2")] + pub pagination: ::core::option::Option< + super::super::base::query::PageRequest, + >, +} +impl ::prost::Name for QueryDenomOwnersRequest { + const NAME: &'static str = "QueryDenomOwnersRequest"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryDenomOwnersRequest".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryDenomOwnersRequest".into() + } +} +/// DenomOwner defines structure representing an account that owns or holds a +/// particular denominated token. It contains the account address and account +/// balance of the denominated token. +/// +/// Since: cosmos-sdk 0.46 +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DenomOwner { + /// address defines the address that owns a particular denomination. + #[prost(string, tag = "1")] + pub address: ::prost::alloc::string::String, + /// balance is the balance of the denominated coin for an account. + #[prost(message, optional, tag = "2")] + pub balance: ::core::option::Option, +} +impl ::prost::Name for DenomOwner { + const NAME: &'static str = "DenomOwner"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.DenomOwner".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.DenomOwner".into() + } +} +/// QueryDenomOwnersResponse defines the RPC response of a DenomOwners RPC query. +/// +/// Since: cosmos-sdk 0.46 +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct QueryDenomOwnersResponse { + #[prost(message, repeated, tag = "1")] + pub denom_owners: ::prost::alloc::vec::Vec, + /// pagination defines the pagination in the response. + #[prost(message, optional, tag = "2")] + pub pagination: ::core::option::Option< + super::super::base::query::PageResponse, + >, +} +impl ::prost::Name for QueryDenomOwnersResponse { + const NAME: &'static str = "QueryDenomOwnersResponse"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.QueryDenomOwnersResponse".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.QueryDenomOwnersResponse".into() + } +} +/// Generated client implementations. +pub mod query_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + /// Query defines the gRPC querier service. + #[derive(Debug, Clone)] + pub struct QueryClient { + inner: tonic::client::Grpc, + } + impl QueryClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> QueryClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + QueryClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// Balance queries the balance of a single coin for a single account. + pub async fn balance( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cosmos.bank.v1beta1.Query/Balance", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("cosmos.bank.v1beta1.Query", "Balance")); + self.inner.unary(req, path, codec).await + } + /// AllBalances queries the balance of all coins for a single account. + pub async fn all_balances( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cosmos.bank.v1beta1.Query/AllBalances", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("cosmos.bank.v1beta1.Query", "AllBalances")); + self.inner.unary(req, path, codec).await + } + /// SpendableBalances queries the spenable balance of all coins for a single + /// account. + /// + /// Since: cosmos-sdk 0.46 + pub async fn spendable_balances( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cosmos.bank.v1beta1.Query/SpendableBalances", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("cosmos.bank.v1beta1.Query", "SpendableBalances"), + ); + self.inner.unary(req, path, codec).await + } + /// TotalSupply queries the total supply of all coins. + pub async fn total_supply( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cosmos.bank.v1beta1.Query/TotalSupply", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("cosmos.bank.v1beta1.Query", "TotalSupply")); + self.inner.unary(req, path, codec).await + } + /// SupplyOf queries the supply of a single coin. + pub async fn supply_of( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cosmos.bank.v1beta1.Query/SupplyOf", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("cosmos.bank.v1beta1.Query", "SupplyOf")); + self.inner.unary(req, path, codec).await + } + /// Params queries the parameters of x/bank module. + pub async fn params( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cosmos.bank.v1beta1.Query/Params", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("cosmos.bank.v1beta1.Query", "Params")); + self.inner.unary(req, path, codec).await + } + /// DenomsMetadata queries the client metadata of a given coin denomination. + pub async fn denom_metadata( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cosmos.bank.v1beta1.Query/DenomMetadata", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("cosmos.bank.v1beta1.Query", "DenomMetadata")); + self.inner.unary(req, path, codec).await + } + /// DenomsMetadata queries the client metadata for all registered coin + /// denominations. + pub async fn denoms_metadata( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cosmos.bank.v1beta1.Query/DenomsMetadata", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("cosmos.bank.v1beta1.Query", "DenomsMetadata")); + self.inner.unary(req, path, codec).await + } + /// DenomOwners queries for all account addresses that own a particular token + /// denomination. + /// + /// Since: cosmos-sdk 0.46 + pub async fn denom_owners( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cosmos.bank.v1beta1.Query/DenomOwners", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("cosmos.bank.v1beta1.Query", "DenomOwners")); + self.inner.unary(req, path, codec).await + } + } +} +/// MsgSend represents a message to send coins from one account to another. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MsgSend { + #[prost(string, tag = "1")] + pub from_address: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub to_address: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "3")] + pub amount: ::prost::alloc::vec::Vec, +} +impl ::prost::Name for MsgSend { + const NAME: &'static str = "MsgSend"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.MsgSend".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.MsgSend".into() + } +} +/// MsgSendResponse defines the Msg/Send response type. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct MsgSendResponse {} +impl ::prost::Name for MsgSendResponse { + const NAME: &'static str = "MsgSendResponse"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.MsgSendResponse".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.MsgSendResponse".into() + } +} +/// MsgMultiSend represents an arbitrary multi-in, multi-out send message. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MsgMultiSend { + #[prost(message, repeated, tag = "1")] + pub inputs: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "2")] + pub outputs: ::prost::alloc::vec::Vec, +} +impl ::prost::Name for MsgMultiSend { + const NAME: &'static str = "MsgMultiSend"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.MsgMultiSend".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.MsgMultiSend".into() + } +} +/// MsgMultiSendResponse defines the Msg/MultiSend response type. +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct MsgMultiSendResponse {} +impl ::prost::Name for MsgMultiSendResponse { + const NAME: &'static str = "MsgMultiSendResponse"; + const PACKAGE: &'static str = "cosmos.bank.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.bank.v1beta1.MsgMultiSendResponse".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.bank.v1beta1.MsgMultiSendResponse".into() + } +} +/// Generated client implementations. +pub mod msg_client { + #![allow( + unused_variables, + dead_code, + missing_docs, + clippy::wildcard_imports, + clippy::let_unit_value, + )] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + /// Msg defines the bank Msg service. + #[derive(Debug, Clone)] + pub struct MsgClient { + inner: tonic::client::Grpc, + } + impl MsgClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + std::marker::Send + 'static, + ::Error: Into + std::marker::Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> MsgClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + std::marker::Send + std::marker::Sync, + { + MsgClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// Send defines a method for sending coins from one account to another account. + pub async fn send( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cosmos.bank.v1beta1.Msg/Send", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("cosmos.bank.v1beta1.Msg", "Send")); + self.inner.unary(req, path, codec).await + } + /// MultiSend defines a method for sending coins from some accounts to other accounts. + pub async fn multi_send( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::unknown( + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/cosmos.bank.v1beta1.Msg/MultiSend", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("cosmos.bank.v1beta1.Msg", "MultiSend")); + self.inner.unary(req, path, codec).await + } + } +} diff --git a/core/node/da_clients/src/celestia/generated/cosmos.base.query.v1beta1.rs b/core/node/da_clients/src/celestia/generated/cosmos.base.query.v1beta1.rs new file mode 100644 index 000000000000..b236f3026d3f --- /dev/null +++ b/core/node/da_clients/src/celestia/generated/cosmos.base.query.v1beta1.rs @@ -0,0 +1,77 @@ +// This file is @generated by prost-build. +/// PageRequest is to be embedded in gRPC request messages for efficient +/// pagination. Ex: +/// +/// message SomeRequest { +/// Foo some_parameter = 1; +/// PageRequest pagination = 2; +/// } +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PageRequest { + /// key is a value returned in PageResponse.next_key to begin + /// querying the next page most efficiently. Only one of offset or key + /// should be set. + #[prost(bytes = "vec", tag = "1")] + pub key: ::prost::alloc::vec::Vec, + /// offset is a numeric offset that can be used when key is unavailable. + /// It is less efficient than using key. Only one of offset or key should + /// be set. + #[prost(uint64, tag = "2")] + pub offset: u64, + /// limit is the total number of results to be returned in the result page. + /// If left empty it will default to a value to be set by each app. + #[prost(uint64, tag = "3")] + pub limit: u64, + /// count_total is set to true to indicate that the result set should include + /// a count of the total number of items available for pagination in UIs. + /// count_total is only respected when offset is used. It is ignored when key + /// is set. + #[prost(bool, tag = "4")] + pub count_total: bool, + /// reverse is set to true if results are to be returned in the descending order. + /// + /// Since: cosmos-sdk 0.43 + #[prost(bool, tag = "5")] + pub reverse: bool, +} +impl ::prost::Name for PageRequest { + const NAME: &'static str = "PageRequest"; + const PACKAGE: &'static str = "cosmos.base.query.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.base.query.v1beta1.PageRequest".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.base.query.v1beta1.PageRequest".into() + } +} +/// PageResponse is to be embedded in gRPC response messages where the +/// corresponding request message has used PageRequest. +/// +/// message SomeResponse { +/// repeated Bar results = 1; +/// PageResponse page = 2; +/// } +#[derive(::serde::Deserialize, ::serde::Serialize)] +#[serde(default)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PageResponse { + /// next_key is the key to be passed to PageRequest.key to + /// query the next page most efficiently. It will be empty if + /// there are no more results. + #[prost(bytes = "vec", tag = "1")] + pub next_key: ::prost::alloc::vec::Vec, + /// total is total number of results available if PageRequest.count_total + /// was set, its value is undefined otherwise + #[prost(uint64, tag = "2")] + pub total: u64, +} +impl ::prost::Name for PageResponse { + const NAME: &'static str = "PageResponse"; + const PACKAGE: &'static str = "cosmos.base.query.v1beta1"; + fn full_name() -> ::prost::alloc::string::String { + "cosmos.base.query.v1beta1.PageResponse".into() + } + fn type_url() -> ::prost::alloc::string::String { + "/cosmos.base.query.v1beta1.PageResponse".into() + } +} diff --git a/core/node/da_clients/src/celestia/mod.rs b/core/node/da_clients/src/celestia/mod.rs index ce648531f282..1baa52466cf2 100644 --- a/core/node/da_clients/src/celestia/mod.rs +++ b/core/node/da_clients/src/celestia/mod.rs @@ -24,6 +24,16 @@ pub mod cosmos { pub mod v1beta1 { include!("generated/cosmos.base.v1beta1.rs"); } + + pub mod query { + include!("generated/cosmos.base.query.v1beta1.rs"); + } + } + + pub mod bank { + pub mod v1beta1 { + include!("generated/cosmos.bank.v1beta1.rs"); + } } pub mod tx { diff --git a/core/node/da_clients/src/celestia/sdk.rs b/core/node/da_clients/src/celestia/sdk.rs index 5fd9aea79f07..11f10d823f31 100644 --- a/core/node/da_clients/src/celestia/sdk.rs +++ b/core/node/da_clients/src/celestia/sdk.rs @@ -20,6 +20,7 @@ use super::{ query_client::QueryClient as AuthQueryClient, BaseAccount, QueryAccountRequest, QueryParamsRequest as QueryAuthParamsRequest, }, + bank::v1beta1::{query_client::QueryClient as BankQueryClient, QueryAllBalancesRequest}, base::{ node::{ service_client::ServiceClient as MinGasPriceClient, @@ -377,6 +378,37 @@ impl RawCelestiaClient { tracing::debug!(tx_hash = %tx_response.txhash, height, "transaction succeeded"); Ok(Some(height)) } + + pub async fn balance(&self) -> anyhow::Result { + let mut auth_query_client = BankQueryClient::new(self.grpc_channel.clone()); + let resp = auth_query_client + .all_balances(QueryAllBalancesRequest { + address: self.address.clone(), + pagination: None, + }) + .await?; + + let micro_tia_balance = resp + .into_inner() + .balances + .into_iter() + .find(|coin| coin.denom == UNITS_SUFFIX) + .map_or_else( + || { + Err(anyhow::anyhow!( + "no balance found for address: {}", + self.address + )) + }, + |coin| { + coin.amount + .parse::() + .map_err(|e| anyhow::anyhow!("failed to parse balance: {}", e)) + }, + )?; + + Ok(micro_tia_balance) + } } /// Returns a `BlobTx` for the given signed tx and blobs. diff --git a/core/node/da_clients/src/eigen/client.rs b/core/node/da_clients/src/eigen/client.rs index d977620526aa..c7404344e7df 100644 --- a/core/node/da_clients/src/eigen/client.rs +++ b/core/node/da_clients/src/eigen/client.rs @@ -62,4 +62,8 @@ impl DataAvailabilityClient for EigenClient { fn blob_size_limit(&self) -> Option { Some(1920 * 1024) // 2mb - 128kb as a buffer } + + async fn balance(&self) -> Result { + Ok(0) // TODO fetch from API when payments are enabled in Eigen (PE-305) + } } diff --git a/core/node/da_clients/src/no_da.rs b/core/node/da_clients/src/no_da.rs index db0557510ed2..ecfa78ba44de 100644 --- a/core/node/da_clients/src/no_da.rs +++ b/core/node/da_clients/src/no_da.rs @@ -25,4 +25,8 @@ impl DataAvailabilityClient for NoDAClient { fn blob_size_limit(&self) -> Option { None } + + async fn balance(&self) -> Result { + Ok(0) + } } diff --git a/core/node/da_clients/src/object_store.rs b/core/node/da_clients/src/object_store.rs index 55764e8260e0..8c652e1e2341 100644 --- a/core/node/da_clients/src/object_store.rs +++ b/core/node/da_clients/src/object_store.rs @@ -87,6 +87,10 @@ impl DataAvailabilityClient for ObjectStoreDAClient { fn blob_size_limit(&self) -> Option { None } + + async fn balance(&self) -> Result { + Ok(0) + } } /// Used as a wrapper for the pubdata to be stored in the GCS. diff --git a/core/node/da_dispatcher/src/da_dispatcher.rs b/core/node/da_dispatcher/src/da_dispatcher.rs index f59a30b362ee..3bdb5406c92a 100644 --- a/core/node/da_dispatcher/src/da_dispatcher.rs +++ b/core/node/da_dispatcher/src/da_dispatcher.rs @@ -103,7 +103,7 @@ impl DataAvailabilityDispatcher { .await?; drop(conn); - for batch in batches { + for batch in &batches { let dispatch_latency = METRICS.blob_dispatch_latency.start(); let dispatch_response = retry(self.config.max_retries(), batch.l1_batch_number, || { self.client @@ -119,14 +119,14 @@ impl DataAvailabilityDispatcher { })?; let dispatch_latency_duration = dispatch_latency.observe(); - let sent_at = Utc::now().naive_utc(); + let sent_at = Utc::now(); let mut conn = self.pool.connection_tagged("da_dispatcher").await?; conn.data_availability_dal() .insert_l1_batch_da( batch.l1_batch_number, dispatch_response.blob_id.as_str(), - sent_at, + sent_at.naive_utc(), ) .await?; drop(conn); @@ -135,6 +135,12 @@ impl DataAvailabilityDispatcher { .last_dispatched_l1_batch .set(batch.l1_batch_number.0 as usize); METRICS.blob_size.observe(batch.pubdata.len()); + METRICS.sealed_to_dispatched_lag.observe( + sent_at + .signed_duration_since(batch.sealed_at) + .to_std() + .context("sent_at has to be higher than sealed_at")?, + ); tracing::info!( "Dispatched a DA for batch_number: {}, pubdata_size: {}, dispatch_latency: {dispatch_latency_duration:?}", batch.l1_batch_number, @@ -142,6 +148,27 @@ impl DataAvailabilityDispatcher { ); } + // We don't need to report this metric every iteration, only once when the balance is changed + if !batches.is_empty() { + let client_arc = Arc::new(self.client.clone_boxed()); + + tokio::spawn(async move { + let balance = client_arc + .balance() + .await + .with_context(|| "Unable to retrieve DA operator balance"); + + match balance { + Ok(balance) => { + METRICS.operator_balance.set(balance); + } + Err(err) => { + tracing::error!("{err}") + } + } + }); + } + Ok(()) } diff --git a/core/node/da_dispatcher/src/metrics.rs b/core/node/da_dispatcher/src/metrics.rs index 67ac5ed68222..2e167f2083b3 100644 --- a/core/node/da_dispatcher/src/metrics.rs +++ b/core/node/da_dispatcher/src/metrics.rs @@ -4,12 +4,12 @@ use vise::{Buckets, Gauge, Histogram, Metrics, Unit}; /// Buckets for `blob_dispatch_latency` (from 0.1 to 120 seconds). const DISPATCH_LATENCIES: Buckets = - Buckets::values(&[0.1, 0.5, 1.0, 2.0, 5.0, 10.0, 30.0, 60.0, 120.0]); + Buckets::values(&[0.1, 0.5, 1.0, 2.0, 5.0, 10.0, 30.0, 60.0, 120.0, 240.0]); #[derive(Debug, Metrics)] #[metrics(prefix = "server_da_dispatcher")] pub(super) struct DataAvailabilityDispatcherMetrics { - /// Latency of the dispatch of the blob. + /// Latency of the dispatch of the blob. Only the communication with DA layer. #[metrics(buckets = DISPATCH_LATENCIES, unit = Unit::Seconds)] pub blob_dispatch_latency: Histogram, /// The duration between the moment when the blob is dispatched and the moment when it is included. @@ -19,7 +19,6 @@ pub(super) struct DataAvailabilityDispatcherMetrics { /// Buckets are bytes ranging from 1 KB to 16 MB, which has to satisfy all blob size values. #[metrics(buckets = Buckets::exponential(1_024.0..=16.0 * 1_024.0 * 1_024.0, 2.0), unit = Unit::Bytes)] pub blob_size: Histogram, - /// Number of transactions resent by the DA dispatcher. #[metrics(buckets = Buckets::linear(0.0..=10.0, 1.0))] pub dispatch_call_retries: Histogram, @@ -27,6 +26,12 @@ pub(super) struct DataAvailabilityDispatcherMetrics { pub last_dispatched_l1_batch: Gauge, /// Last L1 batch that has its inclusion finalized by DA layer. pub last_included_l1_batch: Gauge, + /// The delay between the moment batch was sealed and the moment it was dispatched. Includes + /// both communication with DA layer and time it spends in the queue on the `da_dispatcher` side. + #[metrics(buckets = DISPATCH_LATENCIES, unit = Unit::Seconds)] + pub sealed_to_dispatched_lag: Histogram, + /// The balance of the operator wallet on DA network. + pub operator_balance: Gauge, } #[vise::register] diff --git a/core/node/eth_watch/src/lib.rs b/core/node/eth_watch/src/lib.rs index f866c8e627c5..d5590159046c 100644 --- a/core/node/eth_watch/src/lib.rs +++ b/core/node/eth_watch/src/lib.rs @@ -150,11 +150,13 @@ impl EthWatch { _ = timer.tick() => { /* continue iterations */ } _ = stop_receiver.changed() => break, } - METRICS.eth_poll.inc(); let mut storage = pool.connection_tagged("eth_watch").await?; match self.loop_iteration(&mut storage).await { - Ok(()) => { /* everything went fine */ } + Ok(()) => { + /* everything went fine */ + METRICS.eth_poll.inc(); + } Err(EventProcessorError::Internal(err)) => { tracing::error!("Internal error processing new blocks: {err:?}"); return Err(err); diff --git a/core/node/node_storage_init/src/lib.rs b/core/node/node_storage_init/src/lib.rs index a8b72b769a18..f876fa446c4b 100644 --- a/core/node/node_storage_init/src/lib.rs +++ b/core/node/node_storage_init/src/lib.rs @@ -118,7 +118,11 @@ impl NodeStorageInitializer { recovery.initialize_storage(stop_receiver.clone()).await?; } else { anyhow::bail!( - "Snapshot recovery should be performed, but the strategy is not provided" + "Snapshot recovery should be performed, but the strategy is not provided. \ + In most of the cases this error means that the node was first started \ + with snapshots recovery enabled, but then it was disabled. \ + To get rid of this error and have the node sync from genesis \ + please clear the Node's database" ); } } diff --git a/core/tests/ts-integration/hardhat.config.ts b/core/tests/ts-integration/hardhat.config.ts index a96a83ca3ee3..20f3ecd4f4f7 100644 --- a/core/tests/ts-integration/hardhat.config.ts +++ b/core/tests/ts-integration/hardhat.config.ts @@ -4,7 +4,7 @@ import '@matterlabs/hardhat-zksync-vyper'; export default { zksolc: { - version: '1.5.3', + version: '1.5.10', compilerSource: 'binary', settings: { enableEraVMExtensions: true diff --git a/core/tests/ts-integration/scripts/compile-yul.ts b/core/tests/ts-integration/scripts/compile-yul.ts index 876caacdfab3..868f7d10ae6f 100644 --- a/core/tests/ts-integration/scripts/compile-yul.ts +++ b/core/tests/ts-integration/scripts/compile-yul.ts @@ -7,7 +7,7 @@ import { getZksolcUrl, saltFromUrl } from '@matterlabs/hardhat-zksync-solc'; import { getCompilersDir } from 'hardhat/internal/util/global-dir'; import path from 'path'; -const COMPILER_VERSION = '1.5.3'; +const COMPILER_VERSION = '1.5.10'; const IS_COMPILER_PRE_RELEASE = false; async function compilerLocation(): Promise { diff --git a/core/tests/ts-integration/tests/api/contract-verification.test.ts b/core/tests/ts-integration/tests/api/contract-verification.test.ts index 8f8830ce7516..21657bec9950 100644 --- a/core/tests/ts-integration/tests/api/contract-verification.test.ts +++ b/core/tests/ts-integration/tests/api/contract-verification.test.ts @@ -10,7 +10,7 @@ import { NodeMode } from '../../src/types'; // Regular expression to match ISO dates. const DATE_REGEX = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{6})?/; -const ZKSOLC_VERSION = 'v1.5.3'; +const ZKSOLC_VERSION = 'v1.5.10'; const SOLC_VERSION = '0.8.26'; const ZK_VM_SOLC_VERSION = 'zkVM-0.8.26-1.0.1'; diff --git a/core/tests/ts-integration/tests/api/debug.test.ts b/core/tests/ts-integration/tests/api/debug.test.ts index 2af18c8438b8..8cde65ac2555 100644 --- a/core/tests/ts-integration/tests/api/debug.test.ts +++ b/core/tests/ts-integration/tests/api/debug.test.ts @@ -29,7 +29,7 @@ describe('Debug methods', () => { test('Should not fail for infinity recursion', async () => { const bytecodePath = `${ testMaster.environment().pathToHome - }/core/tests/ts-integration/contracts/zkasm/artifacts/deep_stak.zkasm/zkasm/deep_stak.zkasm.zbin`; + }/core/tests/ts-integration/contracts/zkasm/artifacts/deep_stak.zkasm/deep_stak.zkasm/deep_stak.zkasm.zbin`; const bytecode = fs.readFileSync(bytecodePath, 'utf-8'); const contractFactory = new zksync.ContractFactory([], bytecode, testMaster.mainAccount()); diff --git a/docs/src/guides/external-node/01_intro.md b/docs/src/guides/external-node/01_intro.md index b5842e160b6c..2218ea3f153b 100644 --- a/docs/src/guides/external-node/01_intro.md +++ b/docs/src/guides/external-node/01_intro.md @@ -129,6 +129,11 @@ calls and transactions. This namespace is disabled by default and can be configured via setting `EN_API_NAMESPACES` as described in the [example config](prepared_configs/mainnet-config.env). +> [!NOTE] +> +> The traces will only start being generated for blocks synced after the debug namespace is enabled, they will not be +> backfilled! The only way to get traces for historical blocks is to fully re-sync the node + Available methods: | Method | Notes | diff --git a/docs/src/guides/external-node/02_configuration.md b/docs/src/guides/external-node/02_configuration.md index 90da7c1eea79..e570b1e9a1a3 100644 --- a/docs/src/guides/external-node/02_configuration.md +++ b/docs/src/guides/external-node/02_configuration.md @@ -52,6 +52,15 @@ There are variables that allow you to fine-tune the limits of the RPC servers, s entries or the limit for the accepted transaction size. Provided files contain sane defaults that are recommended for use, but these can be edited, e.g. to make the Node more/less restrictive. +**Some common api limits config:**\ +`EN_MAX_RESPONSE_BODY_SIZE_MB` (default 10 i.e. 10MB) controls max size of a single response. Hitting the limit will +result in errors similar to:\ +`Response is too big (...)` + +`EN_REQ_ENTITIES_LIMIT` (default 10000) controls max possible limit of entities to be requested at once. Hitting the +limit will result in errors similar to:\ +`Query returned more than 10000 results (...)` + ## JSON-RPC API namespaces There are 7 total supported API namespaces: `eth`, `net`, `web3`, `debug` - standard ones; `zks` - rollup-specific one; diff --git a/docs/src/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml b/docs/src/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml index 5ee9de187bf0..30e0ca8b91a5 100644 --- a/docs/src/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml +++ b/docs/src/guides/external-node/docker-compose-examples/mainnet-external-node-docker-compose.yml @@ -52,7 +52,7 @@ services: # Generation of consensus secrets. # The secrets are generated iff the secrets file doesn't already exist. generate-secrets: - image: "matterlabs/external-node:2.0-v25.1.0" + image: "matterlabs/external-node:2.0-v26.2.1" entrypoint: [ "/configs/generate_secrets.sh", @@ -61,7 +61,7 @@ services: volumes: - ./configs:/configs external-node: - image: "matterlabs/external-node:2.0-v25.1.0" + image: "matterlabs/external-node:2.0-v26.2.1" entrypoint: [ "/usr/bin/entrypoint.sh", diff --git a/docs/src/guides/external-node/docker-compose-examples/testnet-external-node-docker-compose.yml b/docs/src/guides/external-node/docker-compose-examples/testnet-external-node-docker-compose.yml index 42c5861b79d8..aaa185df0984 100644 --- a/docs/src/guides/external-node/docker-compose-examples/testnet-external-node-docker-compose.yml +++ b/docs/src/guides/external-node/docker-compose-examples/testnet-external-node-docker-compose.yml @@ -52,7 +52,7 @@ services: # Generation of consensus secrets. # The secrets are generated iff the secrets file doesn't already exist. generate-secrets: - image: "matterlabs/external-node:2.0-v24.16.0" + image: "matterlabs/external-node:2.0-v26.2.1" entrypoint: [ "/configs/generate_secrets.sh", @@ -61,7 +61,7 @@ services: volumes: - ./configs:/configs external-node: - image: "matterlabs/external-node:2.0-v24.16.0" + image: "matterlabs/external-node:2.0-v26.2.1" entrypoint: [ "/usr/bin/entrypoint.sh", diff --git a/etc/nix/README.md b/etc/nix/README.md index a7cce422e6e6..cd0339c5d224 100644 --- a/etc/nix/README.md +++ b/etc/nix/README.md @@ -1,8 +1,7 @@ # Declarative and Reproducible builds with Nix This directory contains the nix build recipes for various components of this project. Most importantly it is used to -reproducible build `zksync_tee_prover` reproducibly and create a container containing all what is needed to run it on an -SGX machine. +reproducibly build `zksync_tee_prover` and create a container containing all what is needed to run it on an SGX machine. ## Prerequisites @@ -32,34 +31,34 @@ or on nixos in `/etc/nixos/configuration.nix` add the following lines: Build various components of this project with `nix`. -### Build as the CI would +### Build as a CI would ```shell -nix run github:nixos/nixpkgs/nixos-23.11#nixci +nix run github:nixos/nixpkgs/nixos-24.11#nixci -- build -- --no-sandbox ``` ### Build individual parts ```shell -nix build .#zksync +nix build .#tee_prover +nix build .#container-tee-prover-dcap +nix build .#container-tee-prover-azure ``` -or +or `zksync`, which requires an internet connection while building (not reproducible) ```shell -nix build .#zksync.contract_verifier -nix build .#zksync.external_node -nix build .#zksync.server -nix build .#zksync.snapshots_creator -nix build .#zksync.block_reverter +nix build --no-sandbox .#zksync ``` or ```shell -nix build .#tee_prover -nix build .#container-tee-prover-dcap -nix build .#container-tee-prover-azure +nix build --no-sandbox .#zksync.contract_verifier +nix build --no-sandbox .#zksync.external_node +nix build --no-sandbox .#zksync.server +nix build --no-sandbox .#zksync.snapshots_creator +nix build --no-sandbox .#zksync.block_reverter ``` ## Develop @@ -79,6 +78,23 @@ EOF $ direnv allow ``` +### Full development stack + +If you also want `zkstack` and `foundry` you want to use: + +```shell +nix develop --no-sandbox .#devShellAll +``` + +optionally create `.envrc` for `direnv` to automatically load the environment when entering the main directory: + +```shell +$ cat < .envrc +use flake .#devShellAll --no-sandbox +EOF +$ direnv allow +``` + ### Format for commit ```shell diff --git a/etc/nix/container-tee-prover-azure.nix b/etc/nix/container-tee-prover-azure.nix new file mode 100644 index 000000000000..75462dd20de7 --- /dev/null +++ b/etc/nix/container-tee-prover-azure.nix @@ -0,0 +1,64 @@ +{ lib +, pkgs +, teepot +, tee_prover +, container-name ? "zksync-tee-prover-azure" +, isAzure ? true +, tag ? null +, ... +}: +let + name = container-name; + entrypoint = "${teepot.teepot.tee_key_preexec}/bin/tee-key-preexec"; +in +pkgs.lib.tee.sgxGramineContainer + { + inherit name; + inherit tag; + + packages = [ teepot.teepot.tee_key_preexec tee_prover ]; + inherit entrypoint; + inherit isAzure; + + manifest = { + loader = { + argv = [ + entrypoint + "--env-prefix" + "TEE_PROVER_" + "--" + "${tee_prover}/bin/zksync_tee_prover" + ]; + + log_level = "error"; + + env = { + TEE_PROVER_API_URL.passthrough = true; + TEE_PROVER_MAX_RETRIES.passthrough = true; + TEE_PROVER_INITIAL_RETRY_BACKOFF_SEC.passthrough = true; + TEE_PROVER_RETRY_BACKOFF_MULTIPLIER.passthrough = true; + TEE_PROVER_MAX_BACKOFF_SEC.passthrough = true; + API_PROMETHEUS_LISTENER_PORT.passthrough = true; + API_PROMETHEUS_PUSHGATEWAY_URL.passthrough = true; + API_PROMETHEUS_PUSH_INTERVAL_MS.passthrough = true; + + ### DEBUG ### + RUST_BACKTRACE = "1"; + RUST_LOG = "warning,zksync_tee_prover=debug"; + }; + }; + + sgx = { + edmm_enable = false; + enclave_size = "8G"; + max_threads = 128; + }; + }; + } // { + meta = { + description = "SGX on Azure container for the ZKsync TEE prover"; + homepage = "https://github.com/matter-labs/zksync-era/tree/main/core/bin/zksync_tee_prover"; + platforms = [ "x86_64-linux" ]; + license = [ lib.licenses.asl20 lib.licenses.mit ]; + }; +} diff --git a/etc/nix/container-tee-prover-dcap.nix b/etc/nix/container-tee-prover-dcap.nix new file mode 100644 index 000000000000..2da135c8432b --- /dev/null +++ b/etc/nix/container-tee-prover-dcap.nix @@ -0,0 +1,17 @@ +{ lib +, container-tee-prover-azure +, ... +}: container-tee-prover-azure.overrideAttrs + { + isAzure = false; + container-name = "zksync-tee-prover-dcap"; + } + // { + meta = { + description = "SGX DCAP container for the ZKsync TEE prover"; + homepage = "https://github.com/matter-labs/zksync-era/tree/main/core/bin/zksync_tee_prover"; + platforms = [ "x86_64-linux" ]; + license = [ lib.licenses.asl20 lib.licenses.mit ]; + }; +} + diff --git a/etc/nix/container-tee_prover.nix b/etc/nix/container-tee_prover.nix deleted file mode 100644 index cb8ebfb51549..000000000000 --- a/etc/nix/container-tee_prover.nix +++ /dev/null @@ -1,55 +0,0 @@ -{ pkgs -, nixsgxLib -, teepot -, tee_prover -, container-name -, isAzure ? true -, tag ? null -}: -let - name = container-name; - entrypoint = "${teepot.teepot.tee_key_preexec}/bin/tee-key-preexec"; -in -nixsgxLib.mkSGXContainer { - inherit name; - inherit tag; - - packages = [ teepot.teepot.tee_key_preexec tee_prover ]; - inherit entrypoint; - inherit isAzure; - - manifest = { - loader = { - argv = [ - entrypoint - "--env-prefix" - "TEE_PROVER_" - "--" - "${tee_prover}/bin/zksync_tee_prover" - ]; - - log_level = "error"; - - env = { - TEE_PROVER_API_URL.passthrough = true; - TEE_PROVER_MAX_RETRIES.passthrough = true; - TEE_PROVER_INITIAL_RETRY_BACKOFF_SEC.passthrough = true; - TEE_PROVER_RETRY_BACKOFF_MULTIPLIER.passthrough = true; - TEE_PROVER_MAX_BACKOFF_SEC.passthrough = true; - API_PROMETHEUS_LISTENER_PORT.passthrough = true; - API_PROMETHEUS_PUSHGATEWAY_URL.passthrough = true; - API_PROMETHEUS_PUSH_INTERVAL_MS.passthrough = true; - - ### DEBUG ### - RUST_BACKTRACE = "1"; - RUST_LOG = "warning,zksync_tee_prover=debug"; - }; - }; - - sgx = { - edmm_enable = false; - enclave_size = "8G"; - max_threads = 128; - }; - }; -} diff --git a/etc/nix/devshell.nix b/etc/nix/devShell.nix similarity index 54% rename from etc/nix/devshell.nix rename to etc/nix/devShell.nix index 046cd210d162..a63f6c656c2f 100644 --- a/etc/nix/devshell.nix +++ b/etc/nix/devShell.nix @@ -1,22 +1,21 @@ { pkgs -, zksync -, commonArgs +, tee_prover +, coreCommonArgs +, inputs +, ... }: -pkgs.mkShell { - inputsFrom = [ zksync ]; +let + toolchain = pkgs.rust-bin.fromRustupToolchainFile (inputs.src + "/rust-toolchain"); - packages = with pkgs; [ - docker-compose - nodejs - yarn - axel - postgresql - python3 - solc - sqlx-cli - ]; + toolchain_with_src = (toolchain.override { + extensions = [ "rustfmt" "clippy" "rust-src" ]; + }); +in +pkgs.mkShell { + inputsFrom = [ tee_prover ]; + packages = [ ]; - inherit (commonArgs) env hardeningEnable; + inherit (coreCommonArgs) env hardeningEnable; shellHook = '' export ZKSYNC_HOME=$PWD @@ -32,6 +31,7 @@ pkgs.mkShell { fi ''; + RUST_SRC_PATH = "${toolchain_with_src}/lib/rustlib/src/rust/library"; ZK_NIX_LD_LIBRARY_PATH = pkgs.lib.makeLibraryPath [ ]; } diff --git a/etc/nix/devShellAll.nix b/etc/nix/devShellAll.nix new file mode 100644 index 000000000000..ed043c30b2eb --- /dev/null +++ b/etc/nix/devShellAll.nix @@ -0,0 +1,28 @@ +{ pkgs +, zksync +, zkstack +, devShell +, foundry-zksync +, ... +}: +let + newshell = (pkgs.mkShell { + inputsFrom = [ zksync zkstack ]; + + packages = with pkgs; [ + docker-compose + nodejs + yarn + axel + postgresql + python3 + solc + sqlx-cli + zkstack + foundry-zksync + nodePackages.prettier + ]; + }); +in +devShell.overrideAttrs + (old: { inherit (newshell) buildInputs nativeBuildInputs; }) diff --git a/etc/nix/foundry-zksync.nix b/etc/nix/foundry-zksync.nix new file mode 100644 index 000000000000..765b019fac3d --- /dev/null +++ b/etc/nix/foundry-zksync.nix @@ -0,0 +1,63 @@ +{ pkgs +, lib +, fetchFromGitHub +, inputs +, ... +}: +let + src = fetchFromGitHub { + owner = "matter-labs"; + repo = "foundry-zksync"; + tag = "0.2.0-zksync.0.0.2"; + hash = "sha256-SEIpt/kQSuYbR0w/DFeXXLMlT7YLP6T7tDDuRioUWlA="; + }; + + toolchain = pkgs.rust-bin.fromRustupToolchainFile "${src}/rust-toolchain"; + + craneLib = (inputs.crane.mkLib pkgs).overrideToolchain toolchain; + + rustPlatform = pkgs.makeRustPlatform { + cargo = toolchain; + rustc = toolchain; + }; +in +craneLib.buildPackage { + # Some crates download stuff from the network while compiling!!!! + # Allows derivation to access network + # + # Users of this package must set options to indicate that the sandbox conditions can be relaxed for this package. + # These are: + # - When used in a flake, set the flake's config with this line: nixConfig.sandbox = false; + # - From the command line with nix , add one of these options: + # - --option sandbox false + # - --no-sandbox + __noChroot = true; + + pname = src.repo; + version = src.tag; + inherit src; + + doCheck = false; + + nativeBuildInputs = with pkgs;[ + pkg-config + rustPlatform.bindgenHook + ] ++ lib.optionals stdenv.hostPlatform.isDarwin [ darwin.DarwinTools ]; + + buildInputs = with pkgs;[ + libusb1.dev + libclang.dev + openssl.dev + lz4.dev + bzip2.dev + rocksdb_8_3 + snappy.dev + ] ++ lib.optionals stdenv.hostPlatform.isDarwin [ darwin.apple_sdk.frameworks.AppKit ]; + + env = { + OPENSSL_NO_VENDOR = "1"; + ROCKSDB_LIB_DIR = "${pkgs.rocksdb_8_3.out}/lib"; + ROCKSDB_INCLUDE_DIR = "${pkgs.rocksdb_8_3.out}/include"; + SNAPPY_LIB_DIR = "${pkgs.snappy.out}/lib"; + }; +} diff --git a/etc/nix/tee_prover.nix b/etc/nix/tee_prover.nix index 5811297ce854..315eaf9e169d 100644 --- a/etc/nix/tee_prover.nix +++ b/etc/nix/tee_prover.nix @@ -1,16 +1,17 @@ { craneLib -, commonArgs +, coreCommonArgs +, ... }: let pname = "zksync_tee_prover"; cargoExtraArgs = "--locked -p zksync_tee_prover"; in -craneLib.buildPackage (commonArgs // { +craneLib.buildPackage (coreCommonArgs // { inherit pname; version = (builtins.fromTOML (builtins.readFile ../../core/Cargo.toml)).workspace.package.version; inherit cargoExtraArgs; - cargoArtifacts = craneLib.buildDepsOnly (commonArgs // { + cargoArtifacts = craneLib.buildDepsOnly (coreCommonArgs // { inherit pname; inherit cargoExtraArgs; }); @@ -18,4 +19,13 @@ craneLib.buildPackage (commonArgs // { postInstall = '' strip $out/bin/zksync_tee_prover ''; + + # zksync-protobuf has store paths + postPatch = '' + mkdir -p "$TMPDIR/nix-vendor" + cp -Lr "$cargoVendorDir" -T "$TMPDIR/nix-vendor" + sed -i "s|$cargoVendorDir|$TMPDIR/nix-vendor/|g" "$TMPDIR/nix-vendor/config.toml" + chmod -R +w "$TMPDIR/nix-vendor" + cargoVendorDir="$TMPDIR/nix-vendor" + ''; }) diff --git a/etc/nix/zkstack.nix b/etc/nix/zkstack.nix new file mode 100644 index 000000000000..0ec960da3a13 --- /dev/null +++ b/etc/nix/zkstack.nix @@ -0,0 +1,50 @@ +{ craneLib +, zkstackArgs +, ... +}: +craneLib.buildPackage (zkstackArgs // { + # Some crates download stuff from the network while compiling!!!! + # Allows derivation to access the network + # + # Users of this package must set options to indicate that the sandbox conditions can be relaxed for this package. + # These are: + # - In the appropriate nix.conf file (depends on multi vs single user nix installation), add the line: sandbox = relaxed + # - When used in a flake, set the flake's config with this line: nixConfig.sandbox = "relaxed"; + # - Same as above, but disabling the sandbox completely: nixConfig.sandbox = false; + # - From the command line with nix , add one of these options: + # - --option sandbox relaxed + # - --option sandbox false + # - --no-sandbox + # - --relaxed-sandbox + __noChroot = true; + cargoToml = "${zkstackArgs.src}/zkstack_cli/Cargo.toml"; + cargoLock = "${zkstackArgs.src}/zkstack_cli/Cargo.lock"; + + pname = "zkstack"; + + cargoArtifacts = craneLib.buildDepsOnly (zkstackArgs // { + pname = "zkstack-workspace"; + cargoToml = "${zkstackArgs.src}/zkstack_cli/Cargo.toml"; + cargoLock = "${zkstackArgs.src}/zkstack_cli/Cargo.lock"; + postUnpack = '' + cd $sourceRoot/zkstack_cli + sourceRoot="." + ''; + }); + + version = (builtins.fromTOML (builtins.readFile "${zkstackArgs.src}/zkstack_cli/Cargo.toml")).workspace.package.version; + + postUnpack = '' + cd $sourceRoot/zkstack_cli + sourceRoot="." + ''; + + # zksync-protobuf has store paths + postPatch = '' + mkdir -p "$TMPDIR/nix-vendor" + cp -Lr "$cargoVendorDir" -T "$TMPDIR/nix-vendor" + sed -i "s|$cargoVendorDir|$TMPDIR/nix-vendor/|g" "$TMPDIR/nix-vendor/config.toml" + chmod -R +w "$TMPDIR/nix-vendor" + cargoVendorDir="$TMPDIR/nix-vendor" + ''; +}) diff --git a/etc/nix/zksync.nix b/etc/nix/zksync.nix index 16d452c01bfd..cb002d1b918d 100644 --- a/etc/nix/zksync.nix +++ b/etc/nix/zksync.nix @@ -1,14 +1,32 @@ { craneLib -, commonArgs +, coreCommonArgs +, zkstack +, foundry-zksync +, ... }: -craneLib.buildPackage (commonArgs // { +let + cargoExtraArgs = "--locked --bin zksync_server --bin zksync_contract_verifier --bin zksync_external_node --bin snapshots_creator --bin block_reverter --bin merkle_tree_consistency_checker"; +in +craneLib.buildPackage (coreCommonArgs // { + # Some crates download stuff from the network while compiling!!!! + # Allows derivation to access network + # + # Users of this package must set options to indicate that the sandbox conditions can be relaxed for this package. + # These are: + # - When used in a flake, set the flake's config with this line: nixConfig.sandbox = false; + # - From the command line with nix , add one of these options: + # - --option sandbox false + # - --no-sandbox + __noChroot = true; + pname = "zksync"; - version = (builtins.fromTOML (builtins.readFile ../../core/Cargo.toml)).workspace.package.version; - cargoExtraArgs = "--all"; + version = (builtins.fromTOML (builtins.readFile (coreCommonArgs.src + "/Cargo.toml"))).workspace.package.version; + inherit cargoExtraArgs; - cargoArtifacts = craneLib.buildDepsOnly (commonArgs // { - pname = "zksync-era-workspace"; - }); + buildInputs = coreCommonArgs.buildInputs ++ [ + zkstack + foundry-zksync + ]; outputs = [ "out" @@ -39,4 +57,15 @@ craneLib.buildPackage (commonArgs // { mkdir -p $server/nix-support echo "block_reverter" >> $server/nix-support/propagated-user-env-packages ''; -}) + + # zksync-protobuf has store paths + postPatch = '' + mkdir -p "$TMPDIR/nix-vendor" + cp -Lr "$cargoVendorDir" -T "$TMPDIR/nix-vendor" + sed -i "s|$cargoVendorDir|$TMPDIR/nix-vendor/|g" "$TMPDIR/nix-vendor/config.toml" + chmod -R +w "$TMPDIR/nix-vendor" + cargoVendorDir="$TMPDIR/nix-vendor" + ''; +} +) + diff --git a/flake.lock b/flake.lock index e1905f2a1f65..43bcf9c2b42a 100644 --- a/flake.lock +++ b/flake.lock @@ -1,17 +1,12 @@ { "nodes": { "crane": { - "inputs": { - "nixpkgs": [ - "nixpkgs" - ] - }, "locked": { - "lastModified": 1722960479, - "narHash": "sha256-NhCkJJQhD5GUib8zN9JrmYGMwt4lCRp6ZVNzIiYCl0Y=", + "lastModified": 1737250794, + "narHash": "sha256-bdIPhvsAKyYQzqAIeay4kOxTHGwLGkhM+IlBIsmMYFI=", "owner": "ipetkov", "repo": "crane", - "rev": "4c6c77920b8d44cd6660c1621dea6b3fc4b4c4f4", + "rev": "c5b7075f4a6d523fe8204618aa9754e56478c0e0", "type": "github" }, "original": { @@ -21,23 +16,17 @@ } }, "crane_2": { - "inputs": { - "nixpkgs": [ - "teepot-flake", - "nixsgx-flake", - "nixpkgs" - ] - }, "locked": { - "lastModified": 1716156051, - "narHash": "sha256-TjUX7WWRcrhuUxDHsR8pDR2N7jitqZehgCVSy3kBeS8=", + "lastModified": 1731974531, + "narHash": "sha256-z7hiGBWsbWwSnu5UMmYyfHEehlSmfB8sCA8iH4nmxm8=", "owner": "ipetkov", "repo": "crane", - "rev": "7443df1c478947bf96a2e699209f53b2db26209d", + "rev": "8ff9c457d60951bdd37a05ae903423de7ff55c6e", "type": "github" }, "original": { "owner": "ipetkov", + "ref": "8ff9c457d60951bdd37a05ae903423de7ff55c6e", "repo": "crane", "type": "github" } @@ -95,11 +84,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", "owner": "numtide", "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", "type": "github" }, "original": { @@ -148,7 +137,7 @@ }, "flake-utils-plus_3": { "inputs": { - "flake-utils": "flake-utils_6" + "flake-utils": "flake-utils_5" }, "locked": { "lastModified": 1715533576, @@ -205,24 +194,6 @@ "inputs": { "systems": "systems_4" }, - "locked": { - "lastModified": 1705309234, - "narHash": "sha256-uNRRNRKmJyCRC/8y1RqBkqWBLM034y4qN7EprSdmgyA=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "1ef2e671c3b0c19053962c07dbda38332dcebf26", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, - "flake-utils_5": { - "inputs": { - "systems": "systems_5" - }, "locked": { "lastModified": 1710146030, "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", @@ -237,9 +208,9 @@ "type": "github" } }, - "flake-utils_6": { + "flake-utils_5": { "inputs": { - "systems": "systems_6" + "systems": "systems_5" }, "locked": { "lastModified": 1694529238, @@ -257,43 +228,43 @@ }, "nixpkgs": { "locked": { - "lastModified": 1722869614, - "narHash": "sha256-7ojM1KSk3mzutD7SkrdSflHXEujPvW1u7QuqWoTLXQU=", + "lastModified": 1737404927, + "narHash": "sha256-e1WgPJpIYbOuokjgylcsuoEUCB4Jl2rQXa2LUD6XAG8=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "883180e6550c1723395a3a342f830bfc5c371f6b", + "rev": "ae584d90cbd0396a422289ee3efb1f1c9d141dc3", "type": "github" }, "original": { "owner": "NixOS", - "ref": "nixos-24.05", + "ref": "nixos-24.11", "repo": "nixpkgs", "type": "github" } }, "nixpkgs_2": { "locked": { - "lastModified": 1719707984, - "narHash": "sha256-RoxIr/fbndtuKqulGvNCcuzC6KdAib85Q8gXnjzA1dw=", + "lastModified": 1733550349, + "narHash": "sha256-NcGumB4Lr6KSDq+nIqXtNA8QwAQKDSZT7N9OTGWbTrs=", "owner": "nixos", "repo": "nixpkgs", - "rev": "7dca15289a1c2990efbe4680f0923ce14139b042", + "rev": "e2605d0744c2417b09f8bf850dfca42fcf537d34", "type": "github" }, "original": { "owner": "nixos", - "ref": "nixos-24.05", + "ref": "nixos-24.11", "repo": "nixpkgs", "type": "github" } }, "nixpkgs_3": { "locked": { - "lastModified": 1718428119, - "narHash": "sha256-WdWDpNaq6u1IPtxtYHHWpl5BmabtpmLnMAx0RdJ/vo8=", + "lastModified": 1736320768, + "narHash": "sha256-nIYdTAiKIGnFNugbomgBJR+Xv5F1ZQU+HfaBqJKroC0=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "e6cea36f83499eb4e9cd184c8a8e823296b50ad5", + "rev": "4bc9c909d9ac828a039f288cf872d16d38185db8", "type": "github" }, "original": { @@ -305,16 +276,16 @@ }, "nixpkgs_4": { "locked": { - "lastModified": 1719707984, - "narHash": "sha256-RoxIr/fbndtuKqulGvNCcuzC6KdAib85Q8gXnjzA1dw=", + "lastModified": 1733550349, + "narHash": "sha256-NcGumB4Lr6KSDq+nIqXtNA8QwAQKDSZT7N9OTGWbTrs=", "owner": "nixos", "repo": "nixpkgs", - "rev": "7dca15289a1c2990efbe4680f0923ce14139b042", + "rev": "e2605d0744c2417b09f8bf850dfca42fcf537d34", "type": "github" }, "original": { "owner": "nixos", - "ref": "nixos-24.05", + "ref": "nixos-24.11", "repo": "nixpkgs", "type": "github" } @@ -341,11 +312,11 @@ "snowfall-lib": "snowfall-lib" }, "locked": { - "lastModified": 1721741092, - "narHash": "sha256-ghFoP5gZpc1i4I4PiVCH00QNZ6s6ipGUcA0P1TsSSC8=", + "lastModified": 1733824290, + "narHash": "sha256-8MKgW3pFW+IxsM/iGfHio91Gym89gh9uNQ3JO4+D8QY=", "owner": "matter-labs", "repo": "nixsgx", - "rev": "be2c19592d0d5601184c52c07ab6d88dec07ffd6", + "rev": "788ff5233053a52421c9f8fa626a936785dda511", "type": "github" }, "original": { @@ -360,11 +331,11 @@ "snowfall-lib": "snowfall-lib_2" }, "locked": { - "lastModified": 1723120465, - "narHash": "sha256-sWu5lKy71hHnSwydhwzG2XgSehjvLfK2iuUtNimvGkg=", + "lastModified": 1733824290, + "narHash": "sha256-8MKgW3pFW+IxsM/iGfHio91Gym89gh9uNQ3JO4+D8QY=", "owner": "matter-labs", "repo": "nixsgx", - "rev": "b080c32f2aa8b3d4b4bc4356a8a513279b6f82ab", + "rev": "788ff5233053a52421c9f8fa626a936785dda511", "type": "github" }, "original": { @@ -379,11 +350,11 @@ "snowfall-lib": "snowfall-lib_3" }, "locked": { - "lastModified": 1717758565, - "narHash": "sha256-yscuZ3ixjwTkqS6ew5cB3Uvy9e807szRlMoPSyQuRJM=", + "lastModified": 1719403531, + "narHash": "sha256-JYqPdAB393YZIndGs5om7EsLUha3fpLckb9RKjKN7Fg=", "owner": "matter-labs", "repo": "nixsgx", - "rev": "49a1ae79d92ccb6ed7cabfe5c5042b1399e3cd3e", + "rev": "3a272950fa21601f31e8ca8b4e4897975069a00a", "type": "github" }, "original": { @@ -407,11 +378,11 @@ "nixpkgs": "nixpkgs_3" }, "locked": { - "lastModified": 1722997267, - "narHash": "sha256-8Pncp8IKd0f0N711CRrCGTC4iLfBE+/5kaMqyWxnYic=", + "lastModified": 1737512878, + "narHash": "sha256-dgF6htdmfNnZzVInifks6npnCAyVsIHWSpWNs10RSW0=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "d720bf3cebac38c2426d77ee2e59943012854cb8", + "rev": "06b8ed0eee289fe94c66f1202ced9a6a2c59a14c", "type": "github" }, "original": { @@ -422,7 +393,6 @@ }, "rust-overlay_2": { "inputs": { - "flake-utils": "flake-utils_4", "nixpkgs": [ "teepot-flake", "nixsgx-flake", @@ -430,11 +400,11 @@ ] }, "locked": { - "lastModified": 1717985971, - "narHash": "sha256-24h/qKp0aeI+Ew13WdRF521kY24PYa5HOvw0mlrABjk=", + "lastModified": 1734661750, + "narHash": "sha256-BI58NBdimxu1lnpOrG9XxBz7Cwqy+qIf99zunWofX5w=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "abfe5b3126b1b7e9e4daafc1c6478d17f0b584e7", + "rev": "7d3d910d5fd575e6e8c5600d83d54e5c47273bfe", "type": "github" }, "original": { @@ -462,6 +432,7 @@ }, "original": { "owner": "snowfallorg", + "ref": "c6238c83de101729c5de3a29586ba166a9a65622", "repo": "lib", "type": "github" } @@ -486,6 +457,7 @@ }, "original": { "owner": "snowfallorg", + "ref": "c6238c83de101729c5de3a29586ba166a9a65622", "repo": "lib", "type": "github" } @@ -590,21 +562,6 @@ "type": "github" } }, - "systems_6": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } - }, "teepot-flake": { "inputs": { "crane": "crane_2", @@ -623,11 +580,11 @@ "vault-auth-tee-flake": "vault-auth-tee-flake" }, "locked": { - "lastModified": 1725354393, - "narHash": "sha256-RSiDY3sr0hdlydO3cYtidjVx+OlqIsmcnvsSDSGQPF0=", + "lastModified": 1737116236, + "narHash": "sha256-Bk52s9ENa6zbdaog6YZXMA720K6E+IRDqwYMi5NOWa0=", "owner": "matter-labs", "repo": "teepot", - "rev": "2c21d0161e43dc7a786787c89b84ecd6e8857106", + "rev": "e2c31919c92c5aa803fa3ce75824bea421bb3480", "type": "github" }, "original": { @@ -638,7 +595,7 @@ }, "vault-auth-tee-flake": { "inputs": { - "flake-utils": "flake-utils_5", + "flake-utils": "flake-utils_4", "nixpkgs": [ "teepot-flake", "nixsgx-flake", @@ -647,11 +604,11 @@ "nixsgx-flake": "nixsgx-flake_3" }, "locked": { - "lastModified": 1718012107, - "narHash": "sha256-uKiUBaEOj9f3NCn6oTw5VqoZJxsTXSoAn2IWVB/LSS0=", + "lastModified": 1719832445, + "narHash": "sha256-Dnueq3A1sf8zT+bY6CcuaxPvX4AK7B6Sveqb8YfoY8o=", "owner": "matter-labs", "repo": "vault-auth-tee", - "rev": "b10204436bc2fbad74c5716bd265fad74acc197c", + "rev": "2b53a4387fc8ecfb7826acd93d4895e7e810677d", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 630d719aa4df..2e1e4577ef43 100644 --- a/flake.nix +++ b/flake.nix @@ -12,20 +12,16 @@ }; inputs = { - nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.05"; + nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11"; teepot-flake.url = "github:matter-labs/teepot"; nixsgx-flake.url = "github:matter-labs/nixsgx"; flake-utils.url = "github:numtide/flake-utils"; rust-overlay.url = "github:oxalica/rust-overlay"; - crane = { - url = "github:ipetkov/crane?tag=v0.17.3"; - inputs.nixpkgs.follows = "nixpkgs"; - }; + crane.url = "github:ipetkov/crane?tag=v0.20.0"; }; - outputs = { self, nixpkgs, teepot-flake, nixsgx-flake, flake-utils, rust-overlay, crane }: + outputs = { self, nixpkgs, teepot-flake, nixsgx-flake, flake-utils, rust-overlay, crane } @ inputs: let - officialRelease = false; hardeningEnable = [ "fortify3" "pie" "relro" ]; out = system: @@ -46,11 +42,16 @@ packages = { # to ease potential cross-compilation, the overlay is used - inherit (appliedOverlay.zksync-era) zksync tee_prover container-tee-prover-azure container-tee-prover-dcap; + inherit (appliedOverlay.zksync-era) zksync tee_prover zkstack foundry-zksync; default = appliedOverlay.zksync-era.tee_prover; - }; + } // (pkgs.lib.optionalAttrs (pkgs.stdenv.hostPlatform.isx86_64 && pkgs.stdenv.hostPlatform.isLinux) { + inherit (appliedOverlay.zksync-era) container-tee-prover-azure container-tee-prover-dcap; + }); - devShells.default = appliedOverlay.zksync-era.devShell; + devShells = { + inherit (appliedOverlay.zksync-era) devShell devShellAll; + default = appliedOverlay.zksync-era.devShell; + }; }; in flake-utils.lib.eachDefaultSystem out // { @@ -59,16 +60,16 @@ let pkgs = final; - rustVersion = pkgs.rust-bin.fromRustupToolchainFile ./rust-toolchain; + toolchain = pkgs.rust-bin.fromRustupToolchainFile ./rust-toolchain; rustPlatform = pkgs.makeRustPlatform { - cargo = rustVersion; - rustc = rustVersion; + cargo = toolchain; + rustc = toolchain; }; - craneLib = (crane.mkLib pkgs).overrideToolchain rustVersion; + craneLib = (crane.mkLib pkgs).overrideToolchain toolchain; - commonArgs = { + coreCommonArgs = { nativeBuildInputs = with pkgs;[ pkg-config rustPlatform.bindgenHook @@ -80,15 +81,25 @@ snappy.dev lz4.dev bzip2.dev - rocksdb + rocksdb_8_3 snappy.dev ]; - src = ./core/.; + src = with pkgs.lib.fileset; let root = ./core/.; in toSource { + inherit root; + fileset = unions [ + # Default files from crane (Rust and cargo files) + (craneLib.fileset.commonCargoSources root) + # proto files and friends + (fileFilter (file: file.hasExt "proto" || file.hasExt "js" || file.hasExt "ts" || file.hasExt "map" || file.hasExt "json") root) + (maybeMissing ./core/lib/dal/.) + ]; + }; env = { OPENSSL_NO_VENDOR = "1"; - ROCKSDB_LIB_DIR = "${pkgs.rocksdb.out}/lib"; + ROCKSDB_LIB_DIR = "${pkgs.rocksdb_8_3.out}/lib"; + ROCKSDB_INCLUDE_DIR = "${pkgs.rocksdb_8_3.out}/include"; SNAPPY_LIB_DIR = "${pkgs.snappy.out}/lib"; NIX_OUTPATH_USED_AS_RANDOM_SEED = "aaaaaaaaaa"; }; @@ -97,35 +108,32 @@ strictDeps = true; inherit hardeningEnable; }; - in - { - zksync-era = rec { - devShell = pkgs.callPackage ./etc/nix/devshell.nix { - inherit zksync; - inherit commonArgs; - }; - zksync = pkgs.callPackage ./etc/nix/zksync.nix { - inherit craneLib; - inherit commonArgs; - }; - - tee_prover = pkgs.callPackage ./etc/nix/tee_prover.nix { - inherit craneLib; - inherit commonArgs; - }; - - container-tee-prover-azure = pkgs.callPackage ./etc/nix/container-tee_prover.nix { - inherit tee_prover; - isAzure = true; - container-name = "zksync-tee-prover-azure"; - }; - container-tee-prover-dcap = pkgs.callPackage ./etc/nix/container-tee_prover.nix { - inherit tee_prover; - isAzure = false; - container-name = "zksync-tee-prover-dcap"; + zkstackArgs = coreCommonArgs // { + src = with pkgs.lib.fileset; let root = ./.; in toSource { + inherit root; + fileset = unions [ + # Default files from crane (Rust and cargo files) + (craneLib.fileset.commonCargoSources root) + # proto files and friends + (fileFilter (file: file.hasExt "proto" || file.hasExt "js" || file.hasExt "ts" || file.hasExt "map" || file.hasExt "json") ./.) + (maybeMissing ./core/lib/dal/.) + ]; }; }; + in + { + zksync-era = pkgs.lib.makeScope pkgs.newScope ( + self: pkgs.lib.filesystem.packagesFromDirectoryRecursive { + callPackage = package: params: self.callPackage package (params // { + inherit craneLib; + inherit coreCommonArgs; + inherit zkstackArgs; + inputs = inputs // { src = ./.; }; + }); + directory = ./etc/nix; + } + ); }; }; } diff --git a/prover/CHANGELOG.md b/prover/CHANGELOG.md index e99c20193eb5..68a02884328b 100644 --- a/prover/CHANGELOG.md +++ b/prover/CHANGELOG.md @@ -1,5 +1,29 @@ # Changelog +## [18.0.0](https://github.com/matter-labs/zksync-era/compare/prover-v17.1.1...prover-v18.0.0) (2025-01-27) + + +### âš  BREAKING CHANGES + +* **contracts:** gateway integration ([#1934](https://github.com/matter-labs/zksync-era/issues/1934)) + +### Features + +* Compressor optimizations ([#3476](https://github.com/matter-labs/zksync-era/issues/3476)) ([3e931be](https://github.com/matter-labs/zksync-era/commit/3e931be6bddaacbd7d029c537db03a3c191fdc21)) +* **consensus:** Added view_timeout to consensus config ([#3383](https://github.com/matter-labs/zksync-era/issues/3383)) ([fc02a8f](https://github.com/matter-labs/zksync-era/commit/fc02a8f1c9f0bffb438fb27769d6dced3ce14cd9)) +* **consensus:** Update consensus dependencies ([#3339](https://github.com/matter-labs/zksync-era/issues/3339)) ([aa9575f](https://github.com/matter-labs/zksync-era/commit/aa9575fccbbc941f416d597256442afa974efd0a)) +* **contracts:** gateway integration ([#1934](https://github.com/matter-labs/zksync-era/issues/1934)) ([f06cb79](https://github.com/matter-labs/zksync-era/commit/f06cb79883bf320f50089099e0abeb95eaace470)) +* **eth-watch:** Change protocol upgrade schema ([#3435](https://github.com/matter-labs/zksync-era/issues/3435)) ([2c778fd](https://github.com/matter-labs/zksync-era/commit/2c778fdd3fcd1e774bcb945f14a640ccf4227a2f)) +* FFLONK support for compressor ([#3359](https://github.com/matter-labs/zksync-era/issues/3359)) ([1a297be](https://github.com/matter-labs/zksync-era/commit/1a297bedd226c56fc2ba02dc54d79129a271a1eb)) +* Support stable compiler for VM (and some other crates) ([#3248](https://github.com/matter-labs/zksync-era/issues/3248)) ([cbee99d](https://github.com/matter-labs/zksync-era/commit/cbee99d8661b38aa6b49784c3934b8070a743fb4)) + + +### Bug Fixes + +* added missing quote in prover query ([#3347](https://github.com/matter-labs/zksync-era/issues/3347)) ([668ca51](https://github.com/matter-labs/zksync-era/commit/668ca51f5d52646e64b19b973acec05daa1c6f09)) +* Compressor setup data ([#3526](https://github.com/matter-labs/zksync-era/issues/3526)) ([62aea8b](https://github.com/matter-labs/zksync-era/commit/62aea8b4dcd986587de2cf17979e1042307d6b3e)) +* **prover:** Create reqwest client only once ([#3324](https://github.com/matter-labs/zksync-era/issues/3324)) ([40f8123](https://github.com/matter-labs/zksync-era/commit/40f8123a67970efbba3519f7954f807958a76cff)) + ## [17.1.1](https://github.com/matter-labs/zksync-era/compare/prover-v17.1.0...prover-v17.1.1) (2024-11-26) diff --git a/prover/Cargo.lock b/prover/Cargo.lock index 19223f360857..bc52f1ab2905 100644 --- a/prover/Cargo.lock +++ b/prover/Cargo.lock @@ -803,6 +803,33 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + [[package]] name = "circuit_definitions" version = "0.150.20" @@ -2370,6 +2397,16 @@ dependencies = [ "tracing", ] +[[package]] +name = "half" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" +dependencies = [ + "cfg-if", + "crunchy", +] + [[package]] name = "handlebars" version = "3.5.5" @@ -4706,7 +4743,7 @@ dependencies = [ [[package]] name = "prover_cli" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "assert_cmd", @@ -4739,7 +4776,7 @@ dependencies = [ [[package]] name = "prover_version" -version = "17.1.1" +version = "18.0.0" dependencies = [ "zksync_prover_fri_types", ] @@ -8070,7 +8107,7 @@ dependencies = [ [[package]] name = "zksync_basic_types" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "chrono", @@ -8114,7 +8151,7 @@ dependencies = [ [[package]] name = "zksync_circuit_prover" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "async-trait", @@ -8144,7 +8181,7 @@ dependencies = [ [[package]] name = "zksync_circuit_prover_service" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "async-trait", @@ -8183,7 +8220,7 @@ dependencies = [ [[package]] name = "zksync_config" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "rand 0.8.5", @@ -8274,7 +8311,7 @@ dependencies = [ [[package]] name = "zksync_contracts" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "envy", "hex", @@ -8287,7 +8324,7 @@ dependencies = [ [[package]] name = "zksync_core_leftovers" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "ctrlc", @@ -8301,7 +8338,7 @@ dependencies = [ [[package]] name = "zksync_crypto_primitives" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "blake2 0.10.6", @@ -8329,7 +8366,7 @@ dependencies = [ [[package]] name = "zksync_dal" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "bigdecimal", @@ -8339,6 +8376,7 @@ dependencies = [ "itertools 0.10.5", "prost 0.12.6", "rand 0.8.5", + "rayon", "serde", "serde_json", "sqlx", @@ -8364,7 +8402,7 @@ dependencies = [ [[package]] name = "zksync_db_connection" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "rand 0.8.5", @@ -8380,7 +8418,7 @@ dependencies = [ [[package]] name = "zksync_env_config" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "envy", @@ -8391,7 +8429,7 @@ dependencies = [ [[package]] name = "zksync_eth_client" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "async-trait", "jsonrpsee", @@ -8408,7 +8446,7 @@ dependencies = [ [[package]] name = "zksync_eth_signer" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "async-trait", "rlp", @@ -8464,7 +8502,7 @@ dependencies = [ [[package]] name = "zksync_l1_contract_interface" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "circuit_definitions", @@ -8481,7 +8519,7 @@ dependencies = [ [[package]] name = "zksync_mini_merkle_tree" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "once_cell", "zksync_basic_types", @@ -8490,7 +8528,7 @@ dependencies = [ [[package]] name = "zksync_multivm" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "circuit_sequencer_api", @@ -8516,7 +8554,7 @@ dependencies = [ [[package]] name = "zksync_object_store" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -8552,7 +8590,7 @@ dependencies = [ [[package]] name = "zksync_proof_fri_compressor" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "async-trait", @@ -8627,7 +8665,7 @@ dependencies = [ [[package]] name = "zksync_protobuf_config" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "hex", @@ -8647,7 +8685,7 @@ dependencies = [ [[package]] name = "zksync_prover_autoscaler" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "async-trait", @@ -8684,7 +8722,7 @@ dependencies = [ [[package]] name = "zksync_prover_dal" -version = "17.1.1" +version = "18.0.0" dependencies = [ "sqlx", "strum", @@ -8694,7 +8732,7 @@ dependencies = [ [[package]] name = "zksync_prover_fri" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "async-trait", @@ -8728,7 +8766,7 @@ dependencies = [ [[package]] name = "zksync_prover_fri_gateway" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "async-trait", @@ -8754,7 +8792,7 @@ dependencies = [ [[package]] name = "zksync_prover_fri_types" -version = "17.1.1" +version = "18.0.0" dependencies = [ "circuit_definitions", "serde", @@ -8764,7 +8802,7 @@ dependencies = [ [[package]] name = "zksync_prover_fri_utils" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "regex", @@ -8782,7 +8820,7 @@ dependencies = [ [[package]] name = "zksync_prover_interface" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "chrono", "circuit_definitions", @@ -8799,7 +8837,7 @@ dependencies = [ [[package]] name = "zksync_prover_job_monitor" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "async-trait", @@ -8821,7 +8859,7 @@ dependencies = [ [[package]] name = "zksync_prover_job_processor" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "async-trait", @@ -8836,7 +8874,7 @@ dependencies = [ [[package]] name = "zksync_prover_keystore" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "bincode", @@ -8864,7 +8902,7 @@ dependencies = [ [[package]] name = "zksync_queued_job_processor" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -8893,7 +8931,7 @@ dependencies = [ [[package]] name = "zksync_system_constants" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "once_cell", "zksync_basic_types", @@ -8901,13 +8939,14 @@ dependencies = [ [[package]] name = "zksync_types" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", "bigdecimal", "blake2 0.10.6", "chrono", + "ciborium", "derive_more", "hex", "itertools 0.10.5", @@ -8933,7 +8972,7 @@ dependencies = [ [[package]] name = "zksync_utils" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "futures 0.3.31", @@ -8948,7 +8987,7 @@ dependencies = [ [[package]] name = "zksync_vk_setup_data_generator_server_fri" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "bincode", @@ -8972,7 +9011,7 @@ dependencies = [ [[package]] name = "zksync_vlog" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "chrono", @@ -9017,7 +9056,7 @@ dependencies = [ [[package]] name = "zksync_vm_interface" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -9033,7 +9072,7 @@ dependencies = [ [[package]] name = "zksync_web3_decl" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", @@ -9054,7 +9093,7 @@ dependencies = [ [[package]] name = "zksync_witness_generator" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "async-trait", @@ -9092,7 +9131,7 @@ dependencies = [ [[package]] name = "zksync_witness_vector_generator" -version = "17.1.1" +version = "18.0.0" dependencies = [ "anyhow", "async-trait", diff --git a/prover/Cargo.toml b/prover/Cargo.toml index 4e2483cbf0c0..7144b3821215 100644 --- a/prover/Cargo.toml +++ b/prover/Cargo.toml @@ -4,7 +4,7 @@ members = ["crates/bin/*", "crates/lib/*"] resolver = "2" [workspace.package] -version = "17.1.1" # x-release-please-version +version = "18.0.0" # x-release-please-version edition = "2021" authors = ["The Matter Labs Team "] homepage = "https://zksync.io/" @@ -100,14 +100,14 @@ zksync_core_leftovers = { path = "../core/lib/zksync_core_leftovers" } zksync_protobuf_config = { path = "../core/lib/protobuf_config" } # Prover workspace dependencies -zksync_prover_dal = { version = "17.1.1", path = "crates/lib/prover_dal" } -zksync_prover_fri_types = { version = "17.1.1", path = "crates/lib/prover_fri_types" } -zksync_prover_fri_utils = { version = "17.1.1", path = "crates/lib/prover_fri_utils" } -zksync_prover_keystore = { version = "17.1.1", path = "crates/lib/keystore" } -zksync_vk_setup_data_generator_server_fri = { version = "17.1.1", path = "crates/bin/vk_setup_data_generator_server_fri" } -zksync_prover_job_processor = { version = "17.1.1", path = "crates/lib/prover_job_processor" } -zksync_circuit_prover_service = { version = "17.1.1", path = "crates/lib/circuit_prover_service" } -zksync_prover_job_monitor = { version = "17.1.1", path = "crates/bin/prover_job_monitor" } +zksync_prover_dal = { version = "18.0.0", path = "crates/lib/prover_dal" } +zksync_prover_fri_types = { version = "18.0.0", path = "crates/lib/prover_fri_types" } +zksync_prover_fri_utils = { version = "18.0.0", path = "crates/lib/prover_fri_utils" } +zksync_prover_keystore = { version = "18.0.0", path = "crates/lib/keystore" } +zksync_vk_setup_data_generator_server_fri = { version = "18.0.0", path = "crates/bin/vk_setup_data_generator_server_fri" } +zksync_prover_job_processor = { version = "18.0.0", path = "crates/lib/prover_job_processor" } +zksync_circuit_prover_service = { version = "18.0.0", path = "crates/lib/circuit_prover_service" } +zksync_prover_job_monitor = { version = "18.0.0", path = "crates/bin/prover_job_monitor" } # for `perf` profiling [profile.perf] diff --git a/prover/crates/bin/witness_generator/src/main.rs b/prover/crates/bin/witness_generator/src/main.rs index 8b28ecf3cdd6..b9006341d03c 100644 --- a/prover/crates/bin/witness_generator/src/main.rs +++ b/prover/crates/bin/witness_generator/src/main.rs @@ -104,7 +104,6 @@ async fn main() -> anyhow::Result<()> { let _observability_guard = observability_config.install()?; let started_at = Instant::now(); - let use_push_gateway = opt.batch_size.is_some(); let prover_config = general_config.prover_config.context("prover config")?; let object_store_config = ProverObjectStoreConfig( @@ -121,16 +120,27 @@ async fn main() -> anyhow::Result<()> { let keystore = Keystore::locate().with_setup_path(Some(prover_config.setup_data_path.clone().into())); - let prometheus_config = general_config.prometheus_config.clone(); + let prometheus_config = general_config + .prometheus_config + .context("missing prometheus config")?; - // If the prometheus listener port is not set in the witness generator config, use the one from the prometheus config. - let prometheus_listener_port = if let Some(port) = config.prometheus_listener_port { - port + let prometheus_exporter_config = if prometheus_config.pushgateway_url.is_some() { + let url = prometheus_config + .gateway_endpoint() + .context("missing prometheus gateway endpoint")?; + tracing::info!("Using Prometheus push gateway: {}", url); + PrometheusExporterConfig::push(url, prometheus_config.push_interval()) } else { - prometheus_config - .clone() - .context("prometheus config")? - .listener_port + let prometheus_listener_port = if let Some(port) = config.prometheus_listener_port { + port + } else { + prometheus_config.listener_port + }; + tracing::info!( + "Using Prometheus pull on port: {}", + prometheus_listener_port + ); + PrometheusExporterConfig::pull(prometheus_listener_port) }; let connection_pool = ConnectionPool::::singleton(database_secrets.prover_url()?) @@ -166,20 +176,7 @@ async fn main() -> anyhow::Result<()> { } }; - let prometheus_config = if use_push_gateway { - let prometheus_config = prometheus_config - .clone() - .context("prometheus config needed when use_push_gateway enabled")?; - PrometheusExporterConfig::push( - prometheus_config - .gateway_endpoint() - .context("gateway_endpoint needed when use_push_gateway enabled")?, - prometheus_config.push_interval(), - ) - } else { - PrometheusExporterConfig::pull(prometheus_listener_port as u16) - }; - let prometheus_task = prometheus_config.run(stop_receiver.clone()); + let prometheus_task = prometheus_exporter_config.run(stop_receiver.clone()); let mut tasks = Vec::new(); tasks.push(tokio::spawn(prometheus_task)); diff --git a/prover/crates/bin/witness_generator/src/rounds/mod.rs b/prover/crates/bin/witness_generator/src/rounds/mod.rs index 6fd72c968693..6da6f5bb393d 100644 --- a/prover/crates/bin/witness_generator/src/rounds/mod.rs +++ b/prover/crates/bin/witness_generator/src/rounds/mod.rs @@ -173,6 +173,9 @@ where artifacts, ) .await?; + + tracing::info!("Saved {:?} to database for job {:?}", R::ROUND, job_id); + Ok(()) } diff --git a/prover/crates/lib/circuit_prover_service/src/lib.rs b/prover/crates/lib/circuit_prover_service/src/lib.rs index 19e7bb1f41ee..59132e17c1bc 100644 --- a/prover/crates/lib/circuit_prover_service/src/lib.rs +++ b/prover/crates/lib/circuit_prover_service/src/lib.rs @@ -2,8 +2,8 @@ // Crypto code uses generic const exprs, allocator_api is needed to use global allocators #![feature(generic_const_exprs, allocator_api)] -mod gpu_circuit_prover; +pub mod gpu_circuit_prover; pub mod job_runner; mod metrics; -mod types; -mod witness_vector_generator; +pub mod types; +pub mod witness_vector_generator; diff --git a/prover/crates/lib/circuit_prover_service/src/types/circuit.rs b/prover/crates/lib/circuit_prover_service/src/types/circuit.rs index 264daba63b7d..7185c9abbcd2 100644 --- a/prover/crates/lib/circuit_prover_service/src/types/circuit.rs +++ b/prover/crates/lib/circuit_prover_service/src/types/circuit.rs @@ -134,7 +134,7 @@ impl Circuit { /// Synthesize vector for a given circuit. /// Expects finalization hints to match circuit. - pub(crate) fn synthesize_vector( + pub fn synthesize_vector( &self, finalization_hints: Arc, ) -> anyhow::Result> { diff --git a/prover/data/historical_data/0.25.0/commitments.json b/prover/data/historical_data/0.25.0/commitments.json new file mode 100644 index 000000000000..086609a5822b --- /dev/null +++ b/prover/data/historical_data/0.25.0/commitments.json @@ -0,0 +1,6 @@ +{ + "leaf": "0xf9664f4324c1400fa5c3822d667f30e873f53f1b8033180cd15fe41c1e2355c6", + "node": "0xf520cd5b37e74e19fdb369c8d676a04dce8a19457497ac6686d2bb95d94109c8", + "scheduler": "0xe6ba9d6b042440c480fa1c7182be32387db6e90281e82f37398d3f98f63f098a", + "snark_wrapper": "0x14f97b81e54b35fe673d8708cc1a19e1ea5b5e348e12d31e39824ed4f42bbca2" +} \ No newline at end of file diff --git a/prover/data/historical_data/0.25.0/snark_verification_scheduler_key.json b/prover/data/historical_data/0.25.0/snark_verification_scheduler_key.json new file mode 100644 index 000000000000..acb7e3fe8969 --- /dev/null +++ b/prover/data/historical_data/0.25.0/snark_verification_scheduler_key.json @@ -0,0 +1,399 @@ +{ + "n": 16777215, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 14543631136906534221, + 11532161447842416044, + 11114175029926010938, + 1228896787564295039 + ], + "y": [ + 13293602262342424489, + 8897930584356943159, + 13256028170406220369, + 3214939367598363288 + ], + "infinity": false + }, + { + "x": [ + 11488992528554025682, + 12016824828223971094, + 11942004360057333370, + 316831626296641307 + ], + "y": [ + 304673622018339856, + 7139037552557818730, + 12475560967982555143, + 1055588351918295250 + ], + "infinity": false + }, + { + "x": [ + 2274984630539920017, + 5398167177582250136, + 16440396753384808945, + 1037682586893548769 + ], + "y": [ + 10168660308952593373, + 16526369642614237721, + 569062739734175056, + 155645558476901406 + ], + "infinity": false + }, + { + "x": [ + 14005362797509427677, + 2662603874351919260, + 14261489165672308143, + 1470528288349794782 + ], + "y": [ + 11144229651170108862, + 11439490264313454962, + 114993091474760680, + 1037267173208738614 + ], + "infinity": false + }, + { + "x": [ + 10726125240955612787, + 1916320162213728495, + 1058608086768277905, + 1651114031905829493 + ], + "y": [ + 13237242732587628574, + 4774776044666137690, + 14401013098807103799, + 2514139699916115771 + ], + "infinity": false + }, + { + "x": [ + 14434760601334248377, + 5316938318287831815, + 6221098547630910324, + 980422841280734466 + ], + "y": [ + 9201886393750447942, + 3840149540273146267, + 18179910191622136829, + 1563809864380914603 + ], + "infinity": false + }, + { + "x": [ + 9586697317366528906, + 2325800863365957883, + 1243781259615311278, + 3048012003267036960 + ], + "y": [ + 612821620743617231, + 1510385666449513894, + 9368337288452385056, + 2949736812933507034 + ], + "infinity": false + }, + { + "x": [ + 11830690209042008764, + 11761396005838073769, + 18271188400274886574, + 2896734446482773484 + ], + "y": [ + 1890606551566554401, + 10220931290312275762, + 3256711195869515344, + 2466626485328709457 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 10865727529243127085, + 4083978853392244827, + 14303622309482785753, + 2263042021033673595 + ], + "y": [ + 3019601017411802529, + 880444282195426618, + 9998743525359587628, + 2891421025832200233 + ], + "infinity": false + }, + { + "x": [ + 5208608554346323426, + 8575970970223832576, + 2966209169082345602, + 239576408267301488 + ], + "y": [ + 17715084817752316452, + 2726293100894160682, + 17920596859559317135, + 3485576345363305439 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 14761045450946573029, + 17157644513453531531, + 2555518804134782053, + 1415819224310783987 + ], + "y": [ + 17265629196749977462, + 4128711855633066822, + 8435602817910411328, + 1408116296902303196 + ], + "infinity": false + }, + { + "x": [ + 3307267823832528482, + 2406249680085831639, + 9091964031261402109, + 2846274000290842933 + ], + "y": [ + 17374905554931807856, + 6690578002079222163, + 11809376320193686210, + 2676076649992974574 + ], + "infinity": false + }, + { + "x": [ + 3159118708748226574, + 5508845413629697013, + 13350869305506486049, + 689297560178790472 + ], + "y": [ + 15696011303896469684, + 12551611148155235140, + 14438660833518031207, + 425021756161657108 + ], + "infinity": false + }, + { + "x": [ + 18349397811516917436, + 4473982696343317918, + 13070312540813307819, + 2109468484629113245 + ], + "y": [ + 13254534552549721008, + 17388411854346636521, + 17875890960520499518, + 1062184221180884481 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 1787472, + "lookup_selector_commitment": { + "x": [ + 9324906502432882695, + 14977861238256290580, + 12538013124354067293, + 3408438202312564138 + ], + "y": [ + 14942105932194201701, + 12210090881357612547, + 14774705021036784261, + 2531694948512337448 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 10873859091125335643, + 3906092213625635374, + 17046157606087980048, + 3193402705223440293 + ], + "y": [ + 10158946293873382504, + 2171386304067884865, + 6918663094168980658, + 350601565475975409 + ], + "infinity": false + }, + { + "x": [ + 12822112641313049260, + 3646552465186399021, + 10324071010773924047, + 2209084192380614662 + ], + "y": [ + 11045141628975531869, + 12589678537679955590, + 3065046617868727674, + 2099447669854151830 + ], + "infinity": false + }, + { + "x": [ + 11395032673621937545, + 3000063650268118516, + 7857619430005721792, + 805706808484810738 + ], + "y": [ + 6817063666434679427, + 1646386051225388537, + 4677946977082722827, + 1369650305976868514 + ], + "infinity": false + }, + { + "x": [ + 2885179371868476351, + 159944842081142878, + 6092294387055034894, + 213843603626505240 + ], + "y": [ + 11868113133779277990, + 8509646480531194854, + 14088068011597639414, + 707070630614027545 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 1732877442096985191, + 7537030715658833452, + 14073502080301311448, + 2178792007727681099 + ], + "y": [ + 8513095304113652904, + 6581396660744182779, + 13939755637576387431, + 2477157044961106453 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/prover/data/historical_data/0.26.0/commitments.json b/prover/data/historical_data/0.26.0/commitments.json new file mode 100644 index 000000000000..086609a5822b --- /dev/null +++ b/prover/data/historical_data/0.26.0/commitments.json @@ -0,0 +1,6 @@ +{ + "leaf": "0xf9664f4324c1400fa5c3822d667f30e873f53f1b8033180cd15fe41c1e2355c6", + "node": "0xf520cd5b37e74e19fdb369c8d676a04dce8a19457497ac6686d2bb95d94109c8", + "scheduler": "0xe6ba9d6b042440c480fa1c7182be32387db6e90281e82f37398d3f98f63f098a", + "snark_wrapper": "0x14f97b81e54b35fe673d8708cc1a19e1ea5b5e348e12d31e39824ed4f42bbca2" +} \ No newline at end of file diff --git a/prover/data/historical_data/0.26.0/snark_verification_scheduler_key.json b/prover/data/historical_data/0.26.0/snark_verification_scheduler_key.json new file mode 100644 index 000000000000..acb7e3fe8969 --- /dev/null +++ b/prover/data/historical_data/0.26.0/snark_verification_scheduler_key.json @@ -0,0 +1,399 @@ +{ + "n": 16777215, + "num_inputs": 1, + "state_width": 4, + "num_witness_polys": 0, + "gate_setup_commitments": [ + { + "x": [ + 14543631136906534221, + 11532161447842416044, + 11114175029926010938, + 1228896787564295039 + ], + "y": [ + 13293602262342424489, + 8897930584356943159, + 13256028170406220369, + 3214939367598363288 + ], + "infinity": false + }, + { + "x": [ + 11488992528554025682, + 12016824828223971094, + 11942004360057333370, + 316831626296641307 + ], + "y": [ + 304673622018339856, + 7139037552557818730, + 12475560967982555143, + 1055588351918295250 + ], + "infinity": false + }, + { + "x": [ + 2274984630539920017, + 5398167177582250136, + 16440396753384808945, + 1037682586893548769 + ], + "y": [ + 10168660308952593373, + 16526369642614237721, + 569062739734175056, + 155645558476901406 + ], + "infinity": false + }, + { + "x": [ + 14005362797509427677, + 2662603874351919260, + 14261489165672308143, + 1470528288349794782 + ], + "y": [ + 11144229651170108862, + 11439490264313454962, + 114993091474760680, + 1037267173208738614 + ], + "infinity": false + }, + { + "x": [ + 10726125240955612787, + 1916320162213728495, + 1058608086768277905, + 1651114031905829493 + ], + "y": [ + 13237242732587628574, + 4774776044666137690, + 14401013098807103799, + 2514139699916115771 + ], + "infinity": false + }, + { + "x": [ + 14434760601334248377, + 5316938318287831815, + 6221098547630910324, + 980422841280734466 + ], + "y": [ + 9201886393750447942, + 3840149540273146267, + 18179910191622136829, + 1563809864380914603 + ], + "infinity": false + }, + { + "x": [ + 9586697317366528906, + 2325800863365957883, + 1243781259615311278, + 3048012003267036960 + ], + "y": [ + 612821620743617231, + 1510385666449513894, + 9368337288452385056, + 2949736812933507034 + ], + "infinity": false + }, + { + "x": [ + 11830690209042008764, + 11761396005838073769, + 18271188400274886574, + 2896734446482773484 + ], + "y": [ + 1890606551566554401, + 10220931290312275762, + 3256711195869515344, + 2466626485328709457 + ], + "infinity": false + } + ], + "gate_selectors_commitments": [ + { + "x": [ + 10865727529243127085, + 4083978853392244827, + 14303622309482785753, + 2263042021033673595 + ], + "y": [ + 3019601017411802529, + 880444282195426618, + 9998743525359587628, + 2891421025832200233 + ], + "infinity": false + }, + { + "x": [ + 5208608554346323426, + 8575970970223832576, + 2966209169082345602, + 239576408267301488 + ], + "y": [ + 17715084817752316452, + 2726293100894160682, + 17920596859559317135, + 3485576345363305439 + ], + "infinity": false + } + ], + "permutation_commitments": [ + { + "x": [ + 14761045450946573029, + 17157644513453531531, + 2555518804134782053, + 1415819224310783987 + ], + "y": [ + 17265629196749977462, + 4128711855633066822, + 8435602817910411328, + 1408116296902303196 + ], + "infinity": false + }, + { + "x": [ + 3307267823832528482, + 2406249680085831639, + 9091964031261402109, + 2846274000290842933 + ], + "y": [ + 17374905554931807856, + 6690578002079222163, + 11809376320193686210, + 2676076649992974574 + ], + "infinity": false + }, + { + "x": [ + 3159118708748226574, + 5508845413629697013, + 13350869305506486049, + 689297560178790472 + ], + "y": [ + 15696011303896469684, + 12551611148155235140, + 14438660833518031207, + 425021756161657108 + ], + "infinity": false + }, + { + "x": [ + 18349397811516917436, + 4473982696343317918, + 13070312540813307819, + 2109468484629113245 + ], + "y": [ + 13254534552549721008, + 17388411854346636521, + 17875890960520499518, + 1062184221180884481 + ], + "infinity": false + } + ], + "total_lookup_entries_length": 1787472, + "lookup_selector_commitment": { + "x": [ + 9324906502432882695, + 14977861238256290580, + 12538013124354067293, + 3408438202312564138 + ], + "y": [ + 14942105932194201701, + 12210090881357612547, + 14774705021036784261, + 2531694948512337448 + ], + "infinity": false + }, + "lookup_tables_commitments": [ + { + "x": [ + 10873859091125335643, + 3906092213625635374, + 17046157606087980048, + 3193402705223440293 + ], + "y": [ + 10158946293873382504, + 2171386304067884865, + 6918663094168980658, + 350601565475975409 + ], + "infinity": false + }, + { + "x": [ + 12822112641313049260, + 3646552465186399021, + 10324071010773924047, + 2209084192380614662 + ], + "y": [ + 11045141628975531869, + 12589678537679955590, + 3065046617868727674, + 2099447669854151830 + ], + "infinity": false + }, + { + "x": [ + 11395032673621937545, + 3000063650268118516, + 7857619430005721792, + 805706808484810738 + ], + "y": [ + 6817063666434679427, + 1646386051225388537, + 4677946977082722827, + 1369650305976868514 + ], + "infinity": false + }, + { + "x": [ + 2885179371868476351, + 159944842081142878, + 6092294387055034894, + 213843603626505240 + ], + "y": [ + 11868113133779277990, + 8509646480531194854, + 14088068011597639414, + 707070630614027545 + ], + "infinity": false + } + ], + "lookup_table_type_commitment": { + "x": [ + 1732877442096985191, + 7537030715658833452, + 14073502080301311448, + 2178792007727681099 + ], + "y": [ + 8513095304113652904, + 6581396660744182779, + 13939755637576387431, + 2477157044961106453 + ], + "infinity": false + }, + "non_residues": [ + [ + 5, + 0, + 0, + 0 + ], + [ + 7, + 0, + 0, + 0 + ], + [ + 10, + 0, + 0, + 0 + ] + ], + "g2_elements": [ + { + "x": { + "c0": [ + 5106727233969649389, + 7440829307424791261, + 4785637993704342649, + 1729627375292849782 + ], + "c1": [ + 10945020018377822914, + 17413811393473931026, + 8241798111626485029, + 1841571559660931130 + ] + }, + "y": { + "c0": [ + 5541340697920699818, + 16416156555105522555, + 5380518976772849807, + 1353435754470862315 + ], + "c1": [ + 6173549831154472795, + 13567992399387660019, + 17050234209342075797, + 650358724130500725 + ] + }, + "infinity": false + }, + { + "x": { + "c0": [ + 9089143573911733168, + 11482283522806384523, + 13585589533905622862, + 79029415676722370 + ], + "c1": [ + 5692040832573735873, + 16884514497384809355, + 16717166481813659368, + 2742131088506155463 + ] + }, + "y": { + "c0": [ + 9604638503594647125, + 1289961608472612514, + 6217038149984805214, + 2521661352385209130 + ], + "c1": [ + 17168069778630926308, + 11309277837895768996, + 15154989611154567813, + 359271377050603491 + ] + }, + "infinity": false + } + ] +} \ No newline at end of file diff --git a/prover/data/historical_data/README.md b/prover/data/historical_data/README.md index 22df8acd3384..8d368e249fa1 100644 --- a/prover/data/historical_data/README.md +++ b/prover/data/historical_data/README.md @@ -3,12 +3,17 @@ This directory contains historical verification keys and hashes. The name of the subdirectory should match the protocol version. -- 18 - boojum - 1.4.0 -- 19 - boojum fix -- 20 - fee model - 1.4.1 -- 21 - blobs - 1.4.2 -- 22 - fix - 1.4.2 -- 23 - 16 blobs + AA hashes + shared bridge - 1.5.0 -- 24 - 23 + fixes +| Version | Description | Circuit version | Other | +| ------- | ------------------------------------ | --------------- | ------------------ | +| 18 | boojum - 1.4.0 | 1.4.0 | | +| 19 | boojum fix | | | +| 20 | fee model - 1.4.1 | 1.4.1 | | +| 21 | blobs - 1.4.2 | 1.4.2 | | +| 22 | fix - 1.4.2 | | | +| 23 | 16 blobs + AA hashes + shared bridge | 1.5.0 | | +| 24.0 | 23 + fixes | | | +| 24.1 | fixes | | | +| 25.0 | protocol defence | | no circuit changes | +| 26.0 | gateway & bridges | | no circuit changes | And from version 24, we switched to semver (so 0.24.0, 0.24.1 etc). diff --git a/zkstack_cli/Cargo.lock b/zkstack_cli/Cargo.lock index bbdb50159076..58bd4ce4f1ee 100644 --- a/zkstack_cli/Cargo.lock +++ b/zkstack_cli/Cargo.lock @@ -628,6 +628,33 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + [[package]] name = "cipher" version = "0.4.4" @@ -2143,6 +2170,16 @@ dependencies = [ "tracing", ] +[[package]] +name = "half" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" +dependencies = [ + "cfg-if", + "crunchy", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -7084,8 +7121,6 @@ dependencies = [ "zksync_contracts", "zksync_eth_client", "zksync_protobuf", - "zksync_protobuf_build", - "zksync_protobuf_config", "zksync_system_constants", "zksync_types", "zksync_web3_decl", @@ -7133,6 +7168,7 @@ dependencies = [ "serde_yaml", "strum", "thiserror", + "tokio", "url", "xshell", "zkstack_cli_common", @@ -7140,7 +7176,6 @@ dependencies = [ "zksync_basic_types", "zksync_config", "zksync_protobuf", - "zksync_protobuf_config", "zksync_system_constants", ] @@ -7165,7 +7200,7 @@ dependencies = [ [[package]] name = "zksync_basic_types" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "chrono", @@ -7205,7 +7240,7 @@ dependencies = [ [[package]] name = "zksync_config" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "rand", @@ -7274,7 +7309,7 @@ dependencies = [ [[package]] name = "zksync_contracts" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "envy", "hex", @@ -7287,7 +7322,7 @@ dependencies = [ [[package]] name = "zksync_crypto_primitives" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "blake2", @@ -7303,7 +7338,7 @@ dependencies = [ [[package]] name = "zksync_eth_client" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "async-trait", "jsonrpsee", @@ -7320,7 +7355,7 @@ dependencies = [ [[package]] name = "zksync_eth_signer" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "async-trait", "rlp", @@ -7331,7 +7366,7 @@ dependencies = [ [[package]] name = "zksync_mini_merkle_tree" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "once_cell", "zksync_basic_types", @@ -7376,29 +7411,9 @@ dependencies = [ "syn 2.0.89", ] -[[package]] -name = "zksync_protobuf_config" -version = "26.1.0-non-semver-compat" -dependencies = [ - "anyhow", - "hex", - "prost 0.12.6", - "rand", - "secrecy", - "serde_json", - "serde_yaml", - "tracing", - "zksync_basic_types", - "zksync_concurrency", - "zksync_config", - "zksync_protobuf", - "zksync_protobuf_build", - "zksync_types", -] - [[package]] name = "zksync_system_constants" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "once_cell", "zksync_basic_types", @@ -7406,13 +7421,14 @@ dependencies = [ [[package]] name = "zksync_types" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", "bigdecimal", "blake2", "chrono", + "ciborium", "derive_more", "hex", "itertools 0.10.5", @@ -7438,7 +7454,7 @@ dependencies = [ [[package]] name = "zksync_utils" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "futures", @@ -7453,7 +7469,7 @@ dependencies = [ [[package]] name = "zksync_vlog" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "chrono", @@ -7478,7 +7494,7 @@ dependencies = [ [[package]] name = "zksync_web3_decl" -version = "26.1.0-non-semver-compat" +version = "26.2.1-non-semver-compat" dependencies = [ "anyhow", "async-trait", diff --git a/zkstack_cli/Cargo.toml b/zkstack_cli/Cargo.toml index cd5e84fc9eb9..5ef8338dca09 100644 --- a/zkstack_cli/Cargo.toml +++ b/zkstack_cli/Cargo.toml @@ -28,7 +28,6 @@ zkstack_cli_git_version_macro = { version = "0.1.2", path = "crates/git_version_ # ZkSync deps zksync_config = { path = "../core/lib/config" } -zksync_protobuf_config = { path = "../core/lib/protobuf_config" } zksync_basic_types = { path = "../core/lib/basic_types" } zksync_system_constants = { path = "../core/lib/constants" } zksync_types = { path = "../core/lib/types" } diff --git a/zkstack_cli/crates/config/Cargo.toml b/zkstack_cli/crates/config/Cargo.toml index 0926f2522cb2..c3d4afdce2c8 100644 --- a/zkstack_cli/crates/config/Cargo.toml +++ b/zkstack_cli/crates/config/Cargo.toml @@ -13,7 +13,6 @@ keywords.workspace = true [dependencies] anyhow.workspace = true clap.workspace = true -zkstack_cli_common.workspace = true ethers.workspace = true rand.workspace = true serde.workspace = true @@ -21,12 +20,13 @@ serde_json.workspace = true serde_yaml.workspace = true strum.workspace = true thiserror.workspace = true -zkstack_cli_types.workspace = true +tokio.workspace = true url.workspace = true xshell.workspace = true -zksync_protobuf_config.workspace = true zksync_protobuf.workspace = true zksync_config.workspace = true zksync_basic_types.workspace = true zksync_system_constants.workspace = true +zkstack_cli_common.workspace = true +zkstack_cli_types.workspace = true diff --git a/zkstack_cli/crates/config/src/chain.rs b/zkstack_cli/crates/config/src/chain.rs index 19f275d78f80..b411b1f882a3 100644 --- a/zkstack_cli/crates/config/src/chain.rs +++ b/zkstack_cli/crates/config/src/chain.rs @@ -7,22 +7,20 @@ use serde::{Deserialize, Serialize, Serializer}; use xshell::Shell; use zkstack_cli_types::{BaseToken, L1BatchCommitmentMode, L1Network, ProverMode, WalletCreation}; use zksync_basic_types::L2ChainId; -use zksync_config::{ - configs::{gateway::GatewayChainConfig, GatewayConfig}, - DAClientConfig::{Avail, NoDA}, -}; +use zksync_config::configs::{gateway::GatewayChainConfig, GatewayConfig}; use crate::{ consts::{ - CONFIG_NAME, CONTRACTS_FILE, EN_CONFIG_FILE, GENERAL_FILE, GENESIS_FILE, + CONFIG_NAME, CONTRACTS_FILE, EN_CONFIG_FILE, GATEWAY_FILE, GENERAL_FILE, GENESIS_FILE, L1_CONTRACTS_FOUNDRY, SECRETS_FILE, WALLETS_FILE, }, create_localhost_wallets, + raw::RawConfig, traits::{ FileConfigWithDefaultName, ReadConfig, ReadConfigWithBasePath, SaveConfig, SaveConfigWithBasePath, ZkStackConfig, }, - ContractsConfig, GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, GATEWAY_FILE, + ContractsConfig, WalletsConfig, }; /// Chain configuration file. This file is created in the chain @@ -91,12 +89,12 @@ impl ChainConfig { self.shell.get().expect("Not initialized") } - pub fn get_genesis_config(&self) -> anyhow::Result { - GenesisConfig::read_with_base_path(self.get_shell(), &self.configs) + pub async fn get_genesis_config(&self) -> anyhow::Result { + RawConfig::read(self.get_shell(), self.path_to_genesis_config()).await } - pub fn get_general_config(&self) -> anyhow::Result { - GeneralConfig::read_with_base_path(self.get_shell(), &self.configs) + pub async fn get_general_config(&self) -> anyhow::Result { + RawConfig::read(self.get_shell(), self.path_to_general_config()).await } pub fn get_wallets_config(&self) -> anyhow::Result { @@ -112,25 +110,12 @@ impl ChainConfig { anyhow::bail!("Wallets configs has not been found"); } - pub fn get_da_validator_type(&self) -> anyhow::Result { - let general = self.get_general_config().expect("General config not found"); - match ( - self.l1_batch_commit_data_generator_mode, - general.da_client_config, - ) { - (L1BatchCommitmentMode::Rollup, _) => Ok(DAValidatorType::Rollup), - (L1BatchCommitmentMode::Validium, None | Some(NoDA)) => Ok(DAValidatorType::NoDA), - (L1BatchCommitmentMode::Validium, Some(Avail(_))) => Ok(DAValidatorType::Avail), - _ => anyhow::bail!("DAValidatorType is not supported"), - } - } - pub fn get_contracts_config(&self) -> anyhow::Result { ContractsConfig::read_with_base_path(self.get_shell(), &self.configs) } - pub fn get_secrets_config(&self) -> anyhow::Result { - SecretsConfig::read_with_base_path(self.get_shell(), &self.configs) + pub async fn get_secrets_config(&self) -> anyhow::Result { + RawConfig::read(self.get_shell(), self.path_to_secrets_config()).await } pub fn get_gateway_config(&self) -> anyhow::Result { @@ -165,10 +150,6 @@ impl ChainConfig { self.configs.join(GATEWAY_FILE) } - pub fn save_general_config(&self, general_config: &GeneralConfig) -> anyhow::Result<()> { - general_config.save_with_base_path(self.get_shell(), &self.configs) - } - pub fn path_to_l1_foundry(&self) -> PathBuf { self.link_to_code.join(L1_CONTRACTS_FOUNDRY) } diff --git a/zkstack_cli/crates/config/src/consensus_config.rs b/zkstack_cli/crates/config/src/consensus_config.rs deleted file mode 100644 index 0bb4750d1fc0..000000000000 --- a/zkstack_cli/crates/config/src/consensus_config.rs +++ /dev/null @@ -1,18 +0,0 @@ -use zksync_config::configs::consensus::ConsensusConfig; -use zksync_protobuf_config::encode_yaml_repr; - -use crate::{ - traits::{FileConfigWithDefaultName, SaveConfig}, - CONSENSUS_CONFIG_FILE, -}; - -impl FileConfigWithDefaultName for ConsensusConfig { - const FILE_NAME: &'static str = CONSENSUS_CONFIG_FILE; -} - -impl SaveConfig for ConsensusConfig { - fn save(&self, shell: &xshell::Shell, path: impl AsRef) -> anyhow::Result<()> { - let bytes = encode_yaml_repr::(self)?; - Ok(shell.write_file(path.as_ref(), bytes)?) - } -} diff --git a/zkstack_cli/crates/config/src/consensus_secrets.rs b/zkstack_cli/crates/config/src/consensus_secrets.rs deleted file mode 100644 index da551a452799..000000000000 --- a/zkstack_cli/crates/config/src/consensus_secrets.rs +++ /dev/null @@ -1,14 +0,0 @@ -use std::path::Path; - -use xshell::Shell; -use zksync_config::configs::consensus::ConsensusSecrets; -use zksync_protobuf_config::read_yaml_repr; - -use crate::traits::ReadConfig; - -impl ReadConfig for ConsensusSecrets { - fn read(shell: &Shell, path: impl AsRef) -> anyhow::Result { - let path = shell.current_dir().join(path); - read_yaml_repr::(&path, false) - } -} diff --git a/zkstack_cli/crates/config/src/da.rs b/zkstack_cli/crates/config/src/da.rs new file mode 100644 index 000000000000..744613c3f8d8 --- /dev/null +++ b/zkstack_cli/crates/config/src/da.rs @@ -0,0 +1,39 @@ +//! Mirrored types for data availability configs. + +use serde::Serialize; + +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct AvailDefaultConfig { + pub api_node_url: String, + pub app_id: u32, + pub finality_state: Option, +} + +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct AvailGasRelayConfig { + pub gas_relay_api_url: String, + pub max_retries: usize, +} + +#[derive(Clone, Debug, PartialEq, Serialize)] +#[serde(rename_all = "snake_case")] +pub enum AvailClientConfig { + FullClient(AvailDefaultConfig), + GasRelay(AvailGasRelayConfig), +} + +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct AvailConfig { + pub bridge_api_url: String, + pub timeout_ms: usize, + #[serde(flatten)] + pub config: AvailClientConfig, +} + +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct AvailSecrets { + #[serde(skip_serializing_if = "Option::is_none")] + pub seed_phrase: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub gas_relay_api_key: Option, +} diff --git a/zkstack_cli/crates/config/src/external_node.rs b/zkstack_cli/crates/config/src/external_node.rs deleted file mode 100644 index 7d884d3e2346..000000000000 --- a/zkstack_cli/crates/config/src/external_node.rs +++ /dev/null @@ -1,28 +0,0 @@ -use std::path::Path; - -use xshell::Shell; -pub use zksync_config::configs::en_config::ENConfig; -use zksync_protobuf_config::{encode_yaml_repr, read_yaml_repr}; - -use crate::{ - consts::EN_CONFIG_FILE, - traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig}, -}; - -impl FileConfigWithDefaultName for ENConfig { - const FILE_NAME: &'static str = EN_CONFIG_FILE; -} - -impl SaveConfig for ENConfig { - fn save(&self, shell: &Shell, path: impl AsRef) -> anyhow::Result<()> { - let bytes = encode_yaml_repr::(self)?; - Ok(shell.write_file(path, bytes)?) - } -} - -impl ReadConfig for ENConfig { - fn read(shell: &Shell, path: impl AsRef) -> anyhow::Result { - let path = shell.current_dir().join(path); - read_yaml_repr::(&path, false) - } -} diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs index 47fe66143250..c90c3a08de3b 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_ecosystem/input.rs @@ -7,14 +7,42 @@ use ethers::{ use rand::Rng; use serde::{Deserialize, Serialize}; use zkstack_cli_types::L1Network; -use zksync_basic_types::L2ChainId; +use zksync_basic_types::{protocol_version::ProtocolSemanticVersion, L2ChainId}; use crate::{ consts::INITIAL_DEPLOYMENT_FILE, + raw::RawConfig, traits::{FileConfigWithDefaultName, ZkStackConfig}, - ContractsConfig, GenesisConfig, WalletsConfig, ERC20_DEPLOYMENT_FILE, + ContractsConfig, WalletsConfig, ERC20_DEPLOYMENT_FILE, }; +/// Part of the genesis config influencing `DeployGatewayCTMInput`. +#[derive(Debug)] +pub struct GenesisInput { + pub bootloader_hash: H256, + pub default_aa_hash: H256, + pub evm_emulator_hash: Option, + pub genesis_root_hash: H256, + pub rollup_last_leaf_index: u64, + pub genesis_commitment: H256, + pub protocol_version: ProtocolSemanticVersion, +} + +impl GenesisInput { + // FIXME: is this enough? (cf. aliases in the "real" config definition) + pub fn new(raw: &RawConfig) -> anyhow::Result { + Ok(Self { + bootloader_hash: raw.get("bootloader_hash")?, + default_aa_hash: raw.get("default_aa_hash")?, + evm_emulator_hash: raw.get_opt("evm_emulator_hash")?, + genesis_root_hash: raw.get("genesis_root")?, + rollup_last_leaf_index: raw.get("genesis_rollup_leaf_index")?, + genesis_commitment: raw.get("genesis_batch_commitment")?, + protocol_version: raw.get("genesis_protocol_semantic_version")?, + }) + } +} + #[derive(Debug, Deserialize, Serialize, Clone)] pub struct InitialDeploymentConfig { #[serde(skip_serializing_if = "Option::is_none")] @@ -121,7 +149,7 @@ impl ZkStackConfig for DeployL1Config {} impl DeployL1Config { pub fn new( - genesis_config: &GenesisConfig, + genesis_input: &GenesisInput, wallets_config: &WalletsConfig, initial_deployment_config: &InitialDeploymentConfig, era_chain_id: L2ChainId, @@ -149,18 +177,18 @@ impl DeployL1Config { .diamond_init_max_pubdata_per_batch, diamond_init_minimal_l2_gas_price: initial_deployment_config .diamond_init_minimal_l2_gas_price, - bootloader_hash: genesis_config.bootloader_hash.unwrap(), - default_aa_hash: genesis_config.default_aa_hash.unwrap(), - evm_emulator_hash: genesis_config.evm_emulator_hash, + bootloader_hash: genesis_input.bootloader_hash, + default_aa_hash: genesis_input.default_aa_hash, + evm_emulator_hash: genesis_input.evm_emulator_hash, diamond_init_priority_tx_max_pubdata: initial_deployment_config .diamond_init_priority_tx_max_pubdata, diamond_init_pubdata_pricing_mode: initial_deployment_config .diamond_init_pubdata_pricing_mode, // These values are not optional in genesis config with file based configuration - genesis_batch_commitment: genesis_config.genesis_commitment.unwrap(), - genesis_rollup_leaf_index: genesis_config.rollup_last_leaf_index.unwrap(), - genesis_root: genesis_config.genesis_root_hash.unwrap(), - latest_protocol_version: genesis_config.protocol_version.unwrap().pack(), + genesis_batch_commitment: genesis_input.genesis_commitment, + genesis_rollup_leaf_index: genesis_input.rollup_last_leaf_index, + genesis_root: genesis_input.genesis_root_hash, + latest_protocol_version: genesis_input.protocol_version.pack(), recursion_circuits_set_vks_hash: H256::zero(), recursion_leaf_level_vk_hash: H256::zero(), recursion_node_level_vk_hash: H256::zero(), diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/input.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/input.rs index afd71cd97757..fdda2009978c 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_gateway_ctm/input.rs @@ -2,10 +2,10 @@ use ethers::abi::Address; use serde::{Deserialize, Serialize}; use zkstack_cli_types::ProverMode; use zksync_basic_types::{H256, U256}; -use zksync_config::GenesisConfig; use crate::{ - forge_interface::deploy_ecosystem::input::InitialDeploymentConfig, traits::ZkStackConfig, + forge_interface::deploy_ecosystem::input::{GenesisInput, InitialDeploymentConfig}, + traits::ZkStackConfig, ChainConfig, ContractsConfig, EcosystemConfig, }; @@ -59,7 +59,7 @@ impl DeployGatewayCTMInput { pub fn new( chain_config: &ChainConfig, ecosystem_config: &EcosystemConfig, - genesis_config: &GenesisConfig, + genesis_input: &GenesisInput, contracts_config: &ContractsConfig, initial_deployment_config: &InitialDeploymentConfig, ) -> Self { @@ -107,16 +107,16 @@ impl DeployGatewayCTMInput { diamond_init_minimal_l2_gas_price: initial_deployment_config .diamond_init_minimal_l2_gas_price, - bootloader_hash: genesis_config.bootloader_hash.unwrap(), - default_aa_hash: genesis_config.default_aa_hash.unwrap(), + bootloader_hash: genesis_input.bootloader_hash, + default_aa_hash: genesis_input.default_aa_hash, priority_tx_max_gas_limit: initial_deployment_config.priority_tx_max_gas_limit, - genesis_root: genesis_config.genesis_root_hash.unwrap(), - genesis_rollup_leaf_index: genesis_config.rollup_last_leaf_index.unwrap(), - genesis_batch_commitment: genesis_config.genesis_commitment.unwrap(), + genesis_root: genesis_input.genesis_root_hash, + genesis_rollup_leaf_index: genesis_input.rollup_last_leaf_index, + genesis_batch_commitment: genesis_input.genesis_commitment, - latest_protocol_version: genesis_config.protocol_version.unwrap().pack(), + latest_protocol_version: genesis_input.protocol_version.pack(), expected_rollup_l2_da_validator: contracts_config .ecosystem_contracts diff --git a/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/input.rs b/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/input.rs index 78ffcd16eaa8..41aa65bf3bfb 100644 --- a/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/deploy_l2_contracts/input.rs @@ -1,8 +1,10 @@ use ethers::types::Address; use serde::{Deserialize, Serialize}; -use zksync_basic_types::{L2ChainId, U256}; +use zksync_basic_types::{commitment::L1BatchCommitmentMode, L2ChainId, U256}; -use crate::{traits::ZkStackConfig, ChainConfig, ContractsConfig}; +use crate::{ + get_da_client_type, traits::ZkStackConfig, ChainConfig, ContractsConfig, DAValidatorType, +}; impl ZkStackConfig for DeployL2ContractsInput {} @@ -21,13 +23,14 @@ pub struct DeployL2ContractsInput { } impl DeployL2ContractsInput { - pub fn new( + pub async fn new( chain_config: &ChainConfig, contracts_config: &ContractsConfig, era_chain_id: L2ChainId, ) -> anyhow::Result { let contracts = chain_config.get_contracts_config()?; let wallets = chain_config.get_wallets_config()?; + let da_validator_type = get_da_validator_type(chain_config).await?; Ok(Self { era_chain_id, @@ -36,8 +39,22 @@ impl DeployL2ContractsInput { bridgehub: contracts.ecosystem_contracts.bridgehub_proxy_addr, governance: contracts_config.l1.governance_addr, erc20_bridge: contracts.bridges.erc20.l1_address, - da_validator_type: U256::from(chain_config.get_da_validator_type()? as u8), + da_validator_type: U256::from(da_validator_type as u8), consensus_registry_owner: wallets.governor.address, }) } } + +async fn get_da_validator_type(config: &ChainConfig) -> anyhow::Result { + let general = config.get_general_config().await?; + + match ( + config.l1_batch_commit_data_generator_mode, + get_da_client_type(&general), + ) { + (L1BatchCommitmentMode::Rollup, _) => Ok(DAValidatorType::Rollup), + (L1BatchCommitmentMode::Validium, None | Some("no_da")) => Ok(DAValidatorType::NoDA), + (L1BatchCommitmentMode::Validium, Some("avail")) => Ok(DAValidatorType::Avail), + _ => anyhow::bail!("DAValidatorType is not supported"), + } +} diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/input.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/input.rs index 41100c55a2ae..8b6320e07ffe 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_chain_upgrade/input.rs @@ -1,3 +1,4 @@ +use anyhow::Context; use ethers::types::Address; use serde::{Deserialize, Serialize}; use zkstack_cli_types::L1BatchCommitmentMode; @@ -23,19 +24,22 @@ pub struct GatewayChainUpgradeChain { } impl GatewayChainUpgradeInput { - pub fn new(current_chain_config: &ChainConfig) -> Self { - let contracts_config = current_chain_config.get_contracts_config().unwrap(); + pub async fn new(current_chain_config: &ChainConfig) -> anyhow::Result { + let contracts_config = current_chain_config + .get_contracts_config() + .context("failed loading contracts config")?; let validum = current_chain_config .get_genesis_config() - .unwrap() - .l1_batch_commit_data_generator_mode + .await + .context("failed loading genesis config")? + .get::("l1_batch_commit_data_generator_mode")? == L1BatchCommitmentMode::Validium; - Self { + Ok(Self { owner_address: current_chain_config .get_wallets_config() - .unwrap() + .context("failed loading wallets config")? .governor .address, chain: GatewayChainUpgradeChain { @@ -45,6 +49,6 @@ impl GatewayChainUpgradeInput { // TODO(EVM-860): we assume that all rollup chains want to forever remain this way permanent_rollup: !validum, }, - } + }) } } diff --git a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/input.rs b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/input.rs index 8bd300f50581..4d739ac476f5 100644 --- a/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/input.rs +++ b/zkstack_cli/crates/config/src/forge_interface/gateway_ecosystem_upgrade/input.rs @@ -3,8 +3,9 @@ use serde::{Deserialize, Serialize}; use zksync_basic_types::L2ChainId; use crate::{ - forge_interface::deploy_ecosystem::input::InitialDeploymentConfig, traits::ZkStackConfig, - ContractsConfig, GenesisConfig, + forge_interface::deploy_ecosystem::input::{GenesisInput, InitialDeploymentConfig}, + traits::ZkStackConfig, + ContractsConfig, }; #[derive(Debug, Deserialize, Serialize, Clone)] @@ -21,7 +22,7 @@ impl ZkStackConfig for GatewayEcosystemUpgradeInput {} impl GatewayEcosystemUpgradeInput { pub fn new( - new_genesis_config: &GenesisConfig, + new_genesis_input: &GenesisInput, current_contracts_config: &ContractsConfig, // It is expected to not change between the versions initial_deployment_config: &InitialDeploymentConfig, @@ -48,16 +49,16 @@ impl GatewayEcosystemUpgradeInput { .diamond_init_max_pubdata_per_batch, diamond_init_minimal_l2_gas_price: initial_deployment_config .diamond_init_minimal_l2_gas_price, - bootloader_hash: new_genesis_config.bootloader_hash.unwrap(), - default_aa_hash: new_genesis_config.default_aa_hash.unwrap(), + bootloader_hash: new_genesis_input.bootloader_hash, + default_aa_hash: new_genesis_input.default_aa_hash, diamond_init_priority_tx_max_pubdata: initial_deployment_config .diamond_init_priority_tx_max_pubdata, diamond_init_pubdata_pricing_mode: initial_deployment_config .diamond_init_pubdata_pricing_mode, // These values are not optional in genesis config with file based configuration - genesis_batch_commitment: new_genesis_config.genesis_commitment.unwrap(), - genesis_rollup_leaf_index: new_genesis_config.rollup_last_leaf_index.unwrap(), - genesis_root: new_genesis_config.genesis_root_hash.unwrap(), + genesis_batch_commitment: new_genesis_input.genesis_commitment, + genesis_rollup_leaf_index: new_genesis_input.rollup_last_leaf_index, + genesis_root: new_genesis_input.genesis_root_hash, recursion_circuits_set_vks_hash: H256::zero(), recursion_leaf_level_vk_hash: H256::zero(), recursion_node_level_vk_hash: H256::zero(), diff --git a/zkstack_cli/crates/config/src/general.rs b/zkstack_cli/crates/config/src/general.rs index c1639d6bea15..9eb7dc7756cd 100644 --- a/zkstack_cli/crates/config/src/general.rs +++ b/zkstack_cli/crates/config/src/general.rs @@ -1,16 +1,10 @@ use std::path::{Path, PathBuf}; -use anyhow::Context; -use url::Url; use xshell::Shell; use zkstack_cli_common::yaml::merge_yaml; -use zksync_config::configs::object_store::ObjectStoreMode; -pub use zksync_config::configs::GeneralConfig; -use zksync_protobuf_config::{encode_yaml_repr, read_yaml_repr}; use crate::{ - consts::GENERAL_FILE, - traits::{ConfigWithL2RpcUrl, FileConfigWithDefaultName, ReadConfig, SaveConfig}, + raw::{PatchedConfig, RawConfig}, ChainConfig, }; @@ -40,77 +34,57 @@ impl FileArtifacts { } } -pub fn set_rocks_db_config(config: &mut GeneralConfig, rocks_dbs: RocksDbs) -> anyhow::Result<()> { - config - .db_config - .as_mut() - .context("DB config is not presented")? - .state_keeper_db_path = rocks_dbs.state_keeper.to_str().unwrap().to_string(); - config - .db_config - .as_mut() - .context("DB config is not presented")? - .merkle_tree - .path = rocks_dbs.merkle_tree.to_str().unwrap().to_string(); - config - .protective_reads_writer_config - .as_mut() - .context("Protective reads config is not presented")? - .db_path = rocks_dbs.protective_reads.to_str().unwrap().to_string(); - config - .basic_witness_input_producer_config - .as_mut() - .context("Basic witness input producer config is not presented")? - .db_path = rocks_dbs - .basic_witness_input_producer - .to_str() - .unwrap() - .to_string(); +pub fn set_rocks_db_config(config: &mut PatchedConfig, rocks_dbs: RocksDbs) -> anyhow::Result<()> { + config.insert_path("db.state_keeper_db_path", &rocks_dbs.state_keeper)?; + config.insert_path("db.merkle_tree.path", &rocks_dbs.merkle_tree)?; + config.insert_path( + "protective_reads_writer.db_path", + &rocks_dbs.protective_reads, + )?; + config.insert_path( + "basic_witness_input_producer.db_path", + &rocks_dbs.basic_witness_input_producer, + )?; Ok(()) } -pub fn set_file_artifacts(config: &mut GeneralConfig, file_artifacts: FileArtifacts) { - macro_rules! set_artifact_path { - ($config:expr, $name:ident, $value:expr) => { - $config - .as_mut() - .map(|a| set_artifact_path!(a.$name, $value)) - }; +pub fn set_file_artifacts( + config: &mut PatchedConfig, + file_artifacts: FileArtifacts, +) -> anyhow::Result<()> { + set_file_backed_path_if_selected( + config, + "prover.prover_object_store", + &file_artifacts.prover_object_store, + )?; + set_file_backed_path_if_selected( + config, + "prover.public_object_store", + &file_artifacts.public_object_store, + )?; + set_file_backed_path_if_selected( + config, + "snapshot_creator.object_store", + &file_artifacts.snapshot, + )?; + set_file_backed_path_if_selected( + config, + "snapshot_recovery.object_store", + &file_artifacts.snapshot, + )?; + Ok(()) +} - ($config:expr, $value:expr) => { - $config.as_mut().map(|a| { - if let ObjectStoreMode::FileBacked { - ref mut file_backed_base_path, - } = &mut a.mode - { - *file_backed_base_path = $value.to_str().unwrap().to_string() - } - }) - }; +fn set_file_backed_path_if_selected( + config: &mut PatchedConfig, + prefix: &str, + path: &Path, +) -> anyhow::Result<()> { + let container = config.base().get_raw(&format!("{prefix}.file_backed")); + if matches!(container, Some(serde_yaml::Value::Mapping(_))) { + config.insert_path(&format!("{prefix}.file_backed.file_backed_base_path"), path)?; } - - set_artifact_path!( - config.prover_config, - prover_object_store, - file_artifacts.prover_object_store - ); - set_artifact_path!( - config.prover_config, - public_object_store, - file_artifacts.public_object_store - ); - set_artifact_path!( - config.snapshot_creator, - object_store, - file_artifacts.snapshot - ); - set_artifact_path!( - config.snapshot_recovery, - object_store, - file_artifacts.snapshot - ); - - set_artifact_path!(config.core_object_store, file_artifacts.core_object_store); + Ok(()) } pub fn override_config(shell: &Shell, path: PathBuf, chain: &ChainConfig) -> anyhow::Result<()> { @@ -122,32 +96,13 @@ pub fn override_config(shell: &Shell, path: PathBuf, chain: &ChainConfig) -> any Ok(()) } -impl FileConfigWithDefaultName for GeneralConfig { - const FILE_NAME: &'static str = GENERAL_FILE; -} - -impl SaveConfig for GeneralConfig { - fn save(&self, shell: &Shell, path: impl AsRef) -> anyhow::Result<()> { - let bytes = - encode_yaml_repr::(self)?; - Ok(shell.write_file(path, bytes)?) - } -} - -impl ReadConfig for GeneralConfig { - fn read(shell: &Shell, path: impl AsRef) -> anyhow::Result { - let path = shell.current_dir().join(path); - read_yaml_repr::(&path, false) - } -} - -impl ConfigWithL2RpcUrl for GeneralConfig { - fn get_l2_rpc_url(&self) -> anyhow::Result { - self.api_config - .as_ref() - .map(|api_config| &api_config.web3_json_rpc.http_url) - .context("API config is missing")? - .parse() - .context("Failed to parse L2 RPC URL") - } +pub fn get_da_client_type(general: &RawConfig) -> Option<&str> { + general.get_raw("da_client").and_then(|val| { + let val = val.as_mapping()?; + if val.len() == 1 { + val.keys().next()?.as_str() + } else { + None + } + }) } diff --git a/zkstack_cli/crates/config/src/genesis.rs b/zkstack_cli/crates/config/src/genesis.rs index e457f3d1924c..0f75f374cbf9 100644 --- a/zkstack_cli/crates/config/src/genesis.rs +++ b/zkstack_cli/crates/config/src/genesis.rs @@ -1,41 +1,15 @@ -use std::path::Path; - -use xshell::Shell; -use zksync_basic_types::L1ChainId; -pub use zksync_config::GenesisConfig; -use zksync_protobuf_config::{encode_yaml_repr, read_yaml_repr}; - -use crate::{ - consts::GENESIS_FILE, - traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig}, - ChainConfig, -}; +use crate::{raw::PatchedConfig, ChainConfig}; pub fn update_from_chain_config( - genesis: &mut GenesisConfig, + genesis: &mut PatchedConfig, config: &ChainConfig, ) -> anyhow::Result<()> { - genesis.l2_chain_id = config.chain_id; + genesis.insert("l2_chain_id", config.chain_id.as_u64())?; // TODO(EVM-676): for now, the settlement layer is always the same as the L1 network - genesis.l1_chain_id = L1ChainId(config.l1_network.chain_id()); - genesis.l1_batch_commit_data_generator_mode = config.l1_batch_commit_data_generator_mode; + genesis.insert("l1_chain_id", config.l1_network.chain_id())?; + genesis.insert_yaml( + "l1_batch_commit_data_generator_mode", + config.l1_batch_commit_data_generator_mode, + )?; Ok(()) } - -impl FileConfigWithDefaultName for GenesisConfig { - const FILE_NAME: &'static str = GENESIS_FILE; -} - -impl SaveConfig for GenesisConfig { - fn save(&self, shell: &Shell, path: impl AsRef) -> anyhow::Result<()> { - let bytes = encode_yaml_repr::(self)?; - Ok(shell.write_file(path, bytes)?) - } -} - -impl ReadConfig for GenesisConfig { - fn read(shell: &Shell, path: impl AsRef) -> anyhow::Result { - let path = shell.current_dir().join(path); - read_yaml_repr::(&path, false) - } -} diff --git a/zkstack_cli/crates/config/src/lib.rs b/zkstack_cli/crates/config/src/lib.rs index 4d4fb8da61d2..f4f1e3a68835 100644 --- a/zkstack_cli/crates/config/src/lib.rs +++ b/zkstack_cli/crates/config/src/lib.rs @@ -10,28 +10,25 @@ pub use manipulations::*; pub use secrets::*; pub use wallet_creation::*; pub use wallets::*; -pub use zksync_protobuf_config::{encode_yaml_repr, read_yaml_repr}; mod apps; mod chain; mod consts; mod contracts; +pub mod da; +pub mod docker_compose; mod ecosystem; +pub mod explorer; +pub mod explorer_compose; mod file_config; +pub mod forge_interface; mod gateway; mod general; mod genesis; mod manipulations; +pub mod portal; +pub mod raw; mod secrets; +pub mod traits; mod wallet_creation; mod wallets; - -pub mod consensus_config; -pub mod consensus_secrets; -pub mod docker_compose; -pub mod explorer; -pub mod explorer_compose; -pub mod external_node; -pub mod forge_interface; -pub mod portal; -pub mod traits; diff --git a/zkstack_cli/crates/config/src/raw.rs b/zkstack_cli/crates/config/src/raw.rs new file mode 100644 index 000000000000..b14f404422f8 --- /dev/null +++ b/zkstack_cli/crates/config/src/raw.rs @@ -0,0 +1,156 @@ +use std::path::{Path, PathBuf}; + +use anyhow::Context; +use serde::{de::DeserializeOwned, Serialize}; +use tokio::fs; +use xshell::Shell; + +#[derive(Debug)] +pub struct RawConfig { + path: PathBuf, + inner: serde_yaml::Value, +} + +impl RawConfig { + pub async fn read(shell: &Shell, path: PathBuf) -> anyhow::Result { + let path = shell.current_dir().join(&path); + let raw = fs::read_to_string(&path) + .await + .with_context(|| format!("failed reading config at `{path:?}`"))?; + let inner: serde_yaml::Value = serde_yaml::from_str(&raw) + .with_context(|| format!("failed deserializing config at `{path:?}` as YAML"))?; + anyhow::ensure!( + matches!(&inner, serde_yaml::Value::Mapping(_)), + "configuration is not a map" + ); + Ok(Self { inner, path }) + } + + pub fn get_raw(&self, path: &str) -> Option<&serde_yaml::Value> { + path.split('.') + .try_fold(&self.inner, |ptr, segment| match ptr { + serde_yaml::Value::Mapping(map) => map.get(segment), + _ => None, + }) + } + + pub fn get_opt(&self, path: &str) -> anyhow::Result> { + let Some(raw) = self.get_raw(path) else { + return Ok(None); + }; + serde_yaml::from_value(raw.clone()).with_context(|| { + format!( + "failed deserializing config param `{path}` in `{:?}`", + self.path + ) + }) + } + + pub fn get(&self, path: &str) -> anyhow::Result { + self.get_opt(path)? + .with_context(|| format!("config param `{path}` is missing in {:?}", self.path)) + } + + pub fn patched(self) -> PatchedConfig { + PatchedConfig { base: self } + } +} + +/// Mutable YAML configuration file. +#[derive(Debug)] +#[must_use = "Must be persisted"] +pub struct PatchedConfig { + base: RawConfig, +} + +impl PatchedConfig { + pub fn empty(shell: &Shell, path: PathBuf) -> Self { + let path = shell.current_dir().join(&path); + Self { + base: RawConfig { + path, + inner: serde_yaml::Value::Mapping(serde_yaml::Mapping::default()), + }, + } + } + + pub fn base(&self) -> &RawConfig { + &self.base + } + + pub fn insert(&mut self, key: &str, value: impl Into) -> anyhow::Result<()> { + assert!(!key.is_empty(), "key cannot be empty"); + let value = value.into(); + + let serde_yaml::Value::Mapping(map) = &mut self.base.inner else { + unreachable!(); // checked during initialization + }; + let mut map = map; + if let Some((parent_path, last_segment)) = key.rsplit_once('.') { + for segment in parent_path.split('.') { + if !map.contains_key(segment) { + let new_map = serde_yaml::Mapping::new(); + map.insert(segment.into(), new_map.into()); + } + + map = match map.get_mut(segment) { + Some(serde_yaml::Value::Mapping(child)) => child, + Some(_) => anyhow::bail!("Encountered non-map parent when inserting `{key}`"), + None => unreachable!(), + }; + } + map.insert(last_segment.into(), value); + } else { + map.insert(key.into(), value); + } + Ok(()) + } + + pub fn insert_yaml(&mut self, key: &str, value: impl Serialize) -> anyhow::Result<()> { + let value = serde_yaml::to_value(value) + .unwrap_or_else(|err| panic!("failed serializing config value at `{key}`: {err}")); + self.insert(key, value) + } + + pub fn insert_path(&mut self, key: &str, value: &Path) -> anyhow::Result<()> { + let value = value + .to_str() + .with_context(|| format!("path at `{key}` is not UTF-8"))?; + self.insert(key, value)?; + Ok(()) + } + + pub fn extend(&mut self, source: serde_yaml::Mapping) { + let serde_yaml::Value::Mapping(map) = &mut self.base.inner else { + unreachable!(); // checked during initialization + }; + map.extend(source); + } + + pub fn remove(&mut self, key: &str) { + let serde_yaml::Value::Mapping(map) = &mut self.base.inner else { + unreachable!(); // checked during initialization + }; + let mut map = map; + + if let Some((parent_path, last_segment)) = key.rsplit_once('.') { + for segment in parent_path.split('.') { + map = match map.get_mut(segment) { + Some(serde_yaml::Value::Mapping(child)) => child, + _ => return, + }; + } + map.remove(last_segment); + } else { + map.remove(key); + } + } + + pub async fn save(self) -> anyhow::Result<()> { + let contents = + serde_yaml::to_string(&self.base.inner).context("failed serializing config")?; + fs::write(&self.base.path, contents) + .await + .with_context(|| format!("failed writing config to `{:?}`", self.base.path)) + } +} diff --git a/zkstack_cli/crates/config/src/secrets.rs b/zkstack_cli/crates/config/src/secrets.rs index 91e8964b4651..fd33ebfd86d8 100644 --- a/zkstack_cli/crates/config/src/secrets.rs +++ b/zkstack_cli/crates/config/src/secrets.rs @@ -1,64 +1,27 @@ -use std::{path::Path, str::FromStr}; - -use anyhow::Context; -use xshell::Shell; use zkstack_cli_common::db::DatabaseConfig; -use zksync_basic_types::url::SensitiveUrl; -pub use zksync_config::configs::Secrets as SecretsConfig; -use zksync_protobuf_config::{encode_yaml_repr, read_yaml_repr}; -use crate::{ - consts::SECRETS_FILE, - traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig}, -}; +use crate::raw::PatchedConfig; pub fn set_server_database( - secrets: &mut SecretsConfig, + secrets: &mut PatchedConfig, server_db_config: &DatabaseConfig, ) -> anyhow::Result<()> { - let database = secrets - .database - .as_mut() - .context("Server database must be presented")?; - database.server_url = Some(SensitiveUrl::from(server_db_config.full_url())); - Ok(()) + secrets.insert( + "database.server_url", + server_db_config.full_url().to_string(), + ) } pub fn set_prover_database( - secrets: &mut SecretsConfig, + secrets: &mut PatchedConfig, prover_db_config: &DatabaseConfig, ) -> anyhow::Result<()> { - let database = secrets - .database - .as_mut() - .context("Prover database must be presented")?; - database.prover_url = Some(SensitiveUrl::from(prover_db_config.full_url())); - Ok(()) -} - -pub fn set_l1_rpc_url(secrets: &mut SecretsConfig, l1_rpc_url: String) -> anyhow::Result<()> { - secrets - .l1 - .as_mut() - .context("L1 Secrets must be presented")? - .l1_rpc_url = SensitiveUrl::from_str(&l1_rpc_url)?; - Ok(()) -} - -impl FileConfigWithDefaultName for SecretsConfig { - const FILE_NAME: &'static str = SECRETS_FILE; -} - -impl SaveConfig for SecretsConfig { - fn save(&self, shell: &Shell, path: impl AsRef) -> anyhow::Result<()> { - let bytes = encode_yaml_repr::(self)?; - Ok(shell.write_file(path, bytes)?) - } + secrets.insert( + "database.prover_url", + prover_db_config.full_url().to_string(), + ) } -impl ReadConfig for SecretsConfig { - fn read(shell: &Shell, path: impl AsRef) -> anyhow::Result { - let path = shell.current_dir().join(path); - read_yaml_repr::(&path, false) - } +pub fn set_l1_rpc_url(secrets: &mut PatchedConfig, l1_rpc_url: String) -> anyhow::Result<()> { + secrets.insert("l1.l1_rpc_url", l1_rpc_url) } diff --git a/zkstack_cli/crates/config/src/traits.rs b/zkstack_cli/crates/config/src/traits.rs index d21641e33ff5..e5429cc683c6 100644 --- a/zkstack_cli/crates/config/src/traits.rs +++ b/zkstack_cli/crates/config/src/traits.rs @@ -2,7 +2,6 @@ use std::path::{Path, PathBuf}; use anyhow::{bail, Context}; use serde::{de::DeserializeOwned, Serialize}; -use url::Url; use xshell::Shell; use zkstack_cli_common::files::{ read_json_file, read_toml_file, read_yaml_file, save_json_file, save_toml_file, save_yaml_file, @@ -157,7 +156,3 @@ fn save_with_comment( } Ok(()) } - -pub trait ConfigWithL2RpcUrl { - fn get_l2_rpc_url(&self) -> anyhow::Result; -} diff --git a/zkstack_cli/crates/zkstack/Cargo.toml b/zkstack_cli/crates/zkstack/Cargo.toml index 169fe593ba14..f39b5f5ea085 100644 --- a/zkstack_cli/crates/zkstack/Cargo.toml +++ b/zkstack_cli/crates/zkstack/Cargo.toml @@ -42,7 +42,6 @@ zksync_config.workspace = true zksync_consensus_roles.workspace = true zksync_consensus_crypto.workspace = true zksync_protobuf.workspace = true -zksync_protobuf_config.workspace = true zksync_types.workspace = true zksync_web3_decl.workspace = true zksync_system_constants.workspace = true @@ -61,9 +60,8 @@ clap_complete.workspace = true dirs.workspace = true ethers.workspace = true xshell.workspace = true -zksync_protobuf_build.workspace = true [features] # Features that allows gateway-chain related actions. -# These should be available for outside users until stabilized. +# These should be available for outside users until stabilized. gateway = [] diff --git a/zkstack_cli/crates/zkstack/build.rs b/zkstack_cli/crates/zkstack/build.rs index d2d478f80227..5d077b77fc82 100644 --- a/zkstack_cli/crates/zkstack/build.rs +++ b/zkstack_cli/crates/zkstack/build.rs @@ -25,15 +25,6 @@ fn main() -> anyhow::Result<()> { println!("cargo:error={}", e); }; - zksync_protobuf_build::Config { - input_root: "src/commands/consensus/proto".into(), - proto_root: "zksync/toolbox/consensus".into(), - dependencies: vec!["::zksync_protobuf_config::proto".parse().unwrap()], - protobuf_crate: "::zksync_protobuf".parse().unwrap(), - is_public: false, - } - .generate() - .unwrap(); Ok(()) } diff --git a/zkstack_cli/crates/zkstack/completion/_zkstack.zsh b/zkstack_cli/crates/zkstack/completion/_zkstack.zsh index c7357d661e2e..93496c220e15 100644 --- a/zkstack_cli/crates/zkstack/completion/_zkstack.zsh +++ b/zkstack_cli/crates/zkstack/completion/_zkstack.zsh @@ -1906,7 +1906,7 @@ _arguments "${_arguments_options[@]}" : \ '(--clone)--bellman-cuda-dir=[]:BELLMAN_CUDA_DIR:_default' \ '--bellman-cuda=[]' \ '--setup-compressor-key=[]' \ -'--path=[]:PATH:_default' \ +'--path=[]:PATH:_files' \ '--region=[]:REGION:(us europe asia)' \ '--mode=[]:MODE:(download generate)' \ '--setup-keys=[]' \ @@ -1977,7 +1977,7 @@ _arguments "${_arguments_options[@]}" : \ ;; (compressor-keys) _arguments "${_arguments_options[@]}" : \ -'--path=[]:PATH:_default' \ +'--path=[]:PATH:_files' \ '--chain=[Chain to use]:CHAIN:_default' \ '-v[Verbose mode]' \ '--verbose[Verbose mode]' \ diff --git a/zkstack_cli/crates/zkstack/completion/zkstack.fish b/zkstack_cli/crates/zkstack/completion/zkstack.fish index b724ddc6c723..b05971fe5f3f 100644 --- a/zkstack_cli/crates/zkstack/completion/zkstack.fish +++ b/zkstack_cli/crates/zkstack/completion/zkstack.fish @@ -602,7 +602,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_ false\t''" complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l setup-compressor-key -r -f -a "true\t'' false\t''" -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l path -r +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l path -r -F complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init" -l region -r -f -a "us\t'' europe\t'' asia\t''" @@ -668,7 +668,7 @@ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_ complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init-bellman-cuda" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init-bellman-cuda" -l ignore-prerequisites -d 'Ignores prerequisites checks' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from init-bellman-cuda" -s h -l help -d 'Print help' -complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from compressor-keys" -l path -r +complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from compressor-keys" -l path -r -F complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from compressor-keys" -l chain -d 'Chain to use' -r complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from compressor-keys" -s v -l verbose -d 'Verbose mode' complete -c zkstack -n "__fish_zkstack_using_subcommand prover; and __fish_seen_subcommand_from compressor-keys" -l ignore-prerequisites -d 'Ignores prerequisites checks' diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/accept_chain_ownership.rs b/zkstack_cli/crates/zkstack/src/commands/chain/accept_chain_ownership.rs index 46b92248ae3f..b238c1160f66 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/accept_chain_ownership.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/accept_chain_ownership.rs @@ -7,7 +7,6 @@ use crate::{ accept_ownership::accept_admin, messages::{ MSG_ACCEPTING_ADMIN_SPINNER, MSG_CHAIN_NOT_INITIALIZED, MSG_CHAIN_OWNERSHIP_TRANSFERRED, - MSG_L1_SECRETS_MUST_BE_PRESENTED, }, }; @@ -17,13 +16,8 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { .load_current_chain() .context(MSG_CHAIN_NOT_INITIALIZED)?; let contracts = chain_config.get_contracts_config()?; - let secrets = chain_config.get_secrets_config()?; - let l1_rpc_url = secrets - .l1 - .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? - .l1_rpc_url - .expose_str() - .to_string(); + let secrets = chain_config.get_secrets_config().await?; + let l1_rpc_url = secrets.get("l1.l1_rpc_url")?; let spinner = Spinner::new(MSG_ACCEPTING_ADMIN_SPINNER); accept_admin( @@ -33,7 +27,7 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { &chain_config.get_wallets_config()?.governor, contracts.l1.diamond_proxy_addr, &args, - l1_rpc_url.clone(), + l1_rpc_url, ) .await?; spinner.finish(); diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/args/genesis.rs b/zkstack_cli/crates/zkstack/src/commands/chain/args/genesis.rs index ef98a777352e..08421fcc6abf 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/args/genesis.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/args/genesis.rs @@ -56,18 +56,16 @@ impl GenesisArgs { } } - pub fn fill_values_with_secrets( + pub async fn fill_values_with_secrets( mut self, chain_config: &ChainConfig, ) -> anyhow::Result { - let secrets = chain_config.get_secrets_config()?; - let database = secrets - .database - .context("Database secrets must be present")?; + let secrets = chain_config.get_secrets_config().await?; + let server_url = secrets.get_opt::("database.server_url")?; - let (server_db_url, server_db_name) = if let Some(db_full_url) = database.server_url { - let db_config = DatabaseConfig::from_url(db_full_url.expose_url()) - .context("Invalid server database URL")?; + let (server_db_url, server_db_name) = if let Some(db_full_url) = &server_url { + let db_config = + DatabaseConfig::from_url(db_full_url).context("Invalid server database URL")?; (Some(db_config.url), Some(db_config.name)) } else { (None, None) diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/args/init/da_configs.rs b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/da_configs.rs index 4d0e97e7ef05..7a8e6d5f838e 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/args/init/da_configs.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/args/init/da_configs.rs @@ -3,11 +3,8 @@ use serde::{Deserialize, Serialize}; use strum::{Display, EnumIter, IntoEnumIterator}; use url::Url; use zkstack_cli_common::{Prompt, PromptSelect}; -use zksync_config::{ - configs::da_client::avail::{ - AvailClientConfig, AvailDefaultConfig, AvailGasRelayConfig, AvailSecrets, - }, - AvailConfig, +use zkstack_cli_config::da::{ + AvailClientConfig, AvailConfig, AvailDefaultConfig, AvailGasRelayConfig, AvailSecrets, }; use crate::{ diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/build_transactions.rs b/zkstack_cli/crates/zkstack/src/commands/chain/build_transactions.rs index 3bf4db7188f7..43ea4b55ab37 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/build_transactions.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/build_transactions.rs @@ -40,8 +40,9 @@ pub(crate) async fn run(args: BuildTransactionsArgs, shell: &Shell) -> anyhow::R logger::note(MSG_SELECTED_CONFIG, logger::object_to_string(&chain_config)); - let mut genesis_config = chain_config.get_genesis_config()?; + let mut genesis_config = chain_config.get_genesis_config().await?.patched(); update_from_chain_config(&mut genesis_config, &chain_config)?; + // FIXME: config isn't saved; why? // Copy ecosystem contracts let mut contracts_config = config diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/convert_to_gateway.rs b/zkstack_cli/crates/zkstack/src/commands/chain/convert_to_gateway.rs index 0b06cd8de3c2..cb4c5c8e555d 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/convert_to_gateway.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/convert_to_gateway.rs @@ -9,19 +9,19 @@ use zkstack_cli_common::{ }; use zkstack_cli_config::{ forge_interface::{ - deploy_ecosystem::input::InitialDeploymentConfig, + deploy_ecosystem::input::{GenesisInput, InitialDeploymentConfig}, deploy_gateway_ctm::{input::DeployGatewayCTMInput, output::DeployGatewayCTMOutput}, gateway_preparation::{input::GatewayPreparationConfig, output::GatewayPreparationOutput}, script_params::{DEPLOY_GATEWAY_CTM, GATEWAY_GOVERNANCE_TX_PATH1, GATEWAY_PREPARATION}, }, traits::{ReadConfig, SaveConfig, SaveConfigWithBasePath}, - ChainConfig, EcosystemConfig, GenesisConfig, + ChainConfig, EcosystemConfig, }; use zksync_basic_types::H256; use zksync_config::configs::GatewayConfig; use crate::{ - messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_L1_SECRETS_MUST_BE_PRESENTED}, + messages::MSG_CHAIN_NOT_INITIALIZED, utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; @@ -55,14 +55,12 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { .load_chain(chain_name) .context(MSG_CHAIN_NOT_INITIALIZED)?; let l1_url = chain_config - .get_secrets_config()? - .l1 - .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? - .l1_rpc_url - .expose_str() - .to_string(); + .get_secrets_config() + .await? + .get::("l1.l1_rpc_url")?; let mut chain_contracts_config = chain_config.get_contracts_config()?; - let chain_genesis_config = chain_config.get_genesis_config()?; + let chain_genesis_config = chain_config.get_genesis_config().await?; + let genesis_input = GenesisInput::new(&chain_genesis_config)?; // Firstly, deploying gateway contracts let gateway_config = calculate_gateway_ctm( @@ -70,7 +68,7 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { args.clone(), &ecosystem_config, &chain_config, - &chain_genesis_config, + &genesis_input, &ecosystem_config.get_initial_deployment_config().unwrap(), l1_url.clone(), ) @@ -147,7 +145,7 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { args, &ecosystem_config, &chain_config, - &chain_genesis_config, + &genesis_input, &ecosystem_config.get_initial_deployment_config().unwrap(), l1_url, ) @@ -163,8 +161,8 @@ pub async fn calculate_gateway_ctm( forge_args: ForgeScriptArgs, config: &EcosystemConfig, chain_config: &ChainConfig, - chain_genesis_config: &GenesisConfig, - initial_deployemnt_config: &InitialDeploymentConfig, + genesis_input: &GenesisInput, + initial_deployment_config: &InitialDeploymentConfig, l1_rpc_url: String, ) -> anyhow::Result { let contracts_config = chain_config.get_contracts_config()?; @@ -173,9 +171,9 @@ pub async fn calculate_gateway_ctm( let deploy_config = DeployGatewayCTMInput::new( chain_config, config, - chain_genesis_config, + genesis_input, &contracts_config, - initial_deployemnt_config, + initial_deployment_config, ); deploy_config.save(shell, deploy_config_path)?; @@ -214,20 +212,19 @@ pub async fn deploy_gateway_ctm( forge_args: ForgeScriptArgs, config: &EcosystemConfig, chain_config: &ChainConfig, - chain_genesis_config: &GenesisConfig, - initial_deployemnt_config: &InitialDeploymentConfig, + genesis_input: &GenesisInput, + initial_deployment_config: &InitialDeploymentConfig, l1_rpc_url: String, ) -> anyhow::Result<()> { let contracts_config = chain_config.get_contracts_config()?; - // let contracts_config = config.get_contracts_config()?; let deploy_config_path = DEPLOY_GATEWAY_CTM.input(&config.link_to_code); let deploy_config = DeployGatewayCTMInput::new( chain_config, config, - chain_genesis_config, + genesis_input, &contracts_config, - initial_deployemnt_config, + initial_deployment_config, ); deploy_config.save(shell, deploy_config_path)?; diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/create.rs b/zkstack_cli/crates/zkstack/src/commands/chain/create.rs index 529d861a2559..1112907374a4 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/create.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/create.rs @@ -4,11 +4,11 @@ use anyhow::Context; use xshell::Shell; use zkstack_cli_common::{logger, spinner::Spinner}; use zkstack_cli_config::{ - create_local_configs_dir, create_wallets, - traits::{ReadConfigWithBasePath, SaveConfigWithBasePath}, - ChainConfig, EcosystemConfig, GenesisConfig, + create_local_configs_dir, create_wallets, raw::RawConfig, traits::SaveConfigWithBasePath, + ChainConfig, EcosystemConfig, GENESIS_FILE, }; use zksync_basic_types::L2ChainId; +use zksync_types::H256; use crate::{ commands::chain::args::create::{ChainCreateArgs, ChainCreateArgsFinal}, @@ -20,12 +20,12 @@ use crate::{ utils::link_to_code::resolve_link_to_code, }; -pub fn run(args: ChainCreateArgs, shell: &Shell) -> anyhow::Result<()> { +pub async fn run(args: ChainCreateArgs, shell: &Shell) -> anyhow::Result<()> { let mut ecosystem_config = EcosystemConfig::from_file(shell)?; - create(args, &mut ecosystem_config, shell) + create(args, &mut ecosystem_config, shell).await } -pub fn create( +pub async fn create( args: ChainCreateArgs, ecosystem_config: &mut EcosystemConfig, shell: &Shell, @@ -46,7 +46,7 @@ pub fn create( let spinner = Spinner::new(MSG_CREATING_CHAIN_CONFIGURATIONS_SPINNER); let name = args.chain_name.clone(); let set_as_default = args.set_as_default; - create_chain_inner(args, ecosystem_config, shell)?; + create_chain_inner(args, ecosystem_config, shell).await?; if set_as_default { ecosystem_config.default_chain = name; ecosystem_config.save_with_base_path(shell, ".")?; @@ -58,7 +58,7 @@ pub fn create( Ok(()) } -pub(crate) fn create_chain_inner( +pub(crate) async fn create_chain_inner( args: ChainCreateArgsFinal, ecosystem_config: &EcosystemConfig, shell: &Shell, @@ -82,11 +82,12 @@ pub(crate) fn create_chain_inner( args.link_to_code.clone(), args.update_submodules, )?; - let default_genesis_config = GenesisConfig::read_with_base_path( - shell, - EcosystemConfig::default_configs_path(&link_to_code), - )?; - let has_evm_emulation_support = default_genesis_config.evm_emulator_hash.is_some(); + let genesis_config_path = + EcosystemConfig::default_configs_path(&link_to_code).join(GENESIS_FILE); + let default_genesis_config = RawConfig::read(shell, genesis_config_path).await?; + let has_evm_emulation_support = default_genesis_config + .get_opt::("evm_emulator_hash")? + .is_some(); if args.evm_emulator && !has_evm_emulation_support { anyhow::bail!(MSG_EVM_EMULATOR_HASH_MISSING_ERR); } diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs index 7cf628b1170b..98130859b7a3 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_l2_contracts.rs @@ -23,10 +23,7 @@ use zkstack_cli_config::{ }; use crate::{ - messages::{ - MSG_CHAIN_NOT_INITIALIZED, MSG_DEPLOYING_L2_CONTRACT_SPINNER, - MSG_L1_SECRETS_MUST_BE_PRESENTED, - }, + messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_DEPLOYING_L2_CONTRACT_SPINNER}, utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; @@ -263,9 +260,11 @@ async fn call_forge( chain_config, &ecosystem_config.get_contracts_config()?, ecosystem_config.era_chain_id, - )?; + ) + .await?; + let foundry_contracts_path = chain_config.path_to_l1_foundry(); - let secrets = chain_config.get_secrets_config()?; + let secrets = chain_config.get_secrets_config().await?; input.save( shell, DEPLOY_L2_CONTRACTS_SCRIPT_PARAMS.input(&chain_config.link_to_code), @@ -277,14 +276,7 @@ async fn call_forge( forge_args.clone(), ) .with_ffi() - .with_rpc_url( - secrets - .l1 - .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? - .l1_rpc_url - .expose_str() - .to_string(), - ); + .with_rpc_url(secrets.get("l1.l1_rpc_url")?); if with_broadcast { forge = forge.with_broadcast(); } diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_paymaster.rs b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_paymaster.rs index 1c103ea29910..126b0df1d3e5 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/deploy_paymaster.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/deploy_paymaster.rs @@ -11,7 +11,7 @@ use zkstack_cli_config::{ }; use crate::{ - messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_L1_SECRETS_MUST_BE_PRESENTED}, + messages::MSG_CHAIN_NOT_INITIALIZED, utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; @@ -39,19 +39,12 @@ pub async fn deploy_paymaster( shell, DEPLOY_PAYMASTER_SCRIPT_PARAMS.input(&chain_config.link_to_code), )?; - let secrets = chain_config.get_secrets_config()?; + let secrets = chain_config.get_secrets_config().await?; let mut forge = Forge::new(&foundry_contracts_path) .script(&DEPLOY_PAYMASTER_SCRIPT_PARAMS.script(), forge_args.clone()) .with_ffi() - .with_rpc_url( - secrets - .l1 - .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? - .l1_rpc_url - .expose_str() - .to_string(), - ); + .with_rpc_url(secrets.get("l1.l1_rpc_url")?); if let Some(address) = sender { forge = forge.with_sender(address); diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/enable_evm_emulator.rs b/zkstack_cli/crates/zkstack/src/commands/chain/enable_evm_emulator.rs index e15cdbacf103..2e80fb8ab7b7 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/enable_evm_emulator.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/enable_evm_emulator.rs @@ -1,13 +1,13 @@ use anyhow::Context; use xshell::Shell; use zkstack_cli_common::{forge::ForgeScriptArgs, logger}; -use zkstack_cli_config::{traits::ReadConfigWithBasePath, EcosystemConfig, GenesisConfig}; +use zkstack_cli_config::{raw::RawConfig, EcosystemConfig, GENESIS_FILE}; +use zksync_types::H256; use crate::{ enable_evm_emulator::enable_evm_emulator, messages::{ MSG_CHAIN_NOT_INITIALIZED, MSG_EVM_EMULATOR_ENABLED, MSG_EVM_EMULATOR_HASH_MISSING_ERR, - MSG_L1_SECRETS_MUST_BE_PRESENTED, }, }; @@ -17,22 +17,18 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { .load_current_chain() .context(MSG_CHAIN_NOT_INITIALIZED)?; - let default_genesis_config = GenesisConfig::read_with_base_path( - shell, - EcosystemConfig::default_configs_path(&chain_config.link_to_code), - )?; + let genesis_config_path = + EcosystemConfig::default_configs_path(&chain_config.link_to_code).join(GENESIS_FILE); + let default_genesis_config = RawConfig::read(shell, genesis_config_path).await?; - let has_evm_emulation_support = default_genesis_config.evm_emulator_hash.is_some(); + let has_evm_emulation_support = default_genesis_config + .get_opt::("evm_emulator_hash")? + .is_some(); anyhow::ensure!(has_evm_emulation_support, MSG_EVM_EMULATOR_HASH_MISSING_ERR); let contracts = chain_config.get_contracts_config()?; - let secrets = chain_config.get_secrets_config()?; - let l1_rpc_url = secrets - .l1 - .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? - .l1_rpc_url - .expose_str() - .to_string(); + let secrets = chain_config.get_secrets_config().await?; + let l1_rpc_url = secrets.get("l1.l1_rpc_url")?; enable_evm_emulator( shell, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/gateway_upgrade.rs b/zkstack_cli/crates/zkstack/src/commands/chain/gateway_upgrade.rs index f096daef032a..f61f0926369c 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/gateway_upgrade.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/gateway_upgrade.rs @@ -25,15 +25,14 @@ use zkstack_cli_config::{ ChainConfig, EcosystemConfig, }; use zkstack_cli_types::L1BatchCommitmentMode; -use zksync_basic_types::U256; -use zksync_types::Address; +use zksync_basic_types::{Address, U256}; use crate::{ commands::dev::commands::gateway::{ check_chain_readiness, fetch_chain_info, get_admin_call_builder, set_upgrade_timestamp_calldata, DAMode, GatewayUpgradeArgsInner, GatewayUpgradeInfo, }, - messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_L1_SECRETS_MUST_BE_PRESENTED}, + messages::MSG_CHAIN_NOT_INITIALIZED, utils::forge::{fill_forge_private_key, WalletOwner}, }; @@ -97,12 +96,9 @@ pub async fn run(args: GatewayUpgradeArgs, shell: &Shell) -> anyhow::Result<()> .context(MSG_CHAIN_NOT_INITIALIZED)?; let l1_url = chain_config - .get_secrets_config()? - .l1 - .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? - .l1_rpc_url - .expose_str() - .to_string(); + .get_secrets_config() + .await? + .get("l1.l1_rpc_url")?; match args.chain_upgrade_stage { GatewayChainUpgradeStage::PrepareStage1 => { @@ -138,31 +134,27 @@ async fn prepare_stage1( // No need to save it, we have enough for now let mut contracts_config = chain_config.get_contracts_config()?; - let general_config = chain_config.get_general_config()?; - let genesis_config = chain_config.get_genesis_config()?; + let general_config = chain_config.get_general_config().await?; + let genesis_config = chain_config.get_genesis_config().await?; let upgrade_info = GatewayUpgradeInfo::from_gateway_ecosystem_upgrade( contracts_config.ecosystem_contracts.bridgehub_proxy_addr, gateway_ecosystem_preparation_output, ); - let da_mode: DAMode = - if genesis_config.l1_batch_commit_data_generator_mode == L1BatchCommitmentMode::Rollup { - DAMode::PermanentRollup - } else { - DAMode::Validium - }; + let commitment_mode = + genesis_config.get::("l1_batch_commit_data_generator_mode")?; + let da_mode = match commitment_mode { + L1BatchCommitmentMode::Rollup => DAMode::PermanentRollup, + L1BatchCommitmentMode::Validium => DAMode::Validium, + }; let chain_info = fetch_chain_info( &upgrade_info, &GatewayUpgradeArgsInner { chain_id: chain_config.chain_id.as_u64(), l1_rpc_url: l1_url, - l2_rpc_url: general_config - .api_config - .context("api config")? - .web3_json_rpc - .http_url, + l2_rpc_url: general_config.get("api.web3_json_rpc.http_url")?, validator_addr1: chain_config.get_wallets_config()?.operator.address, validator_addr2: chain_config.get_wallets_config()?.blob_operator.address, da_mode, @@ -265,19 +257,13 @@ async fn finalize_stage1( println!("Finalizing stage1 of chain upgrade!"); let contracts_config = chain_config.get_contracts_config()?; - let general_config = chain_config.get_general_config()?; - let genesis_config = chain_config.get_genesis_config()?; + let general_config = chain_config.get_general_config().await?; + let genesis_config = chain_config.get_genesis_config().await?; println!("Checking chain readiness..."); check_chain_readiness( l1_url.clone(), - general_config - .api_config - .as_ref() - .context("api")? - .web3_json_rpc - .http_url - .clone(), + general_config.get("api.web3_json_rpc.http_url")?, chain_config.chain_id.as_u64(), ) .await?; @@ -287,12 +273,12 @@ async fn finalize_stage1( let gateway_ecosystem_preparation_output = GatewayEcosystemUpgradeOutput::read_with_base_path(shell, &ecosystem_config.config)?; - let da_mode: DAMode = - if genesis_config.l1_batch_commit_data_generator_mode == L1BatchCommitmentMode::Rollup { - DAMode::PermanentRollup - } else { - DAMode::Validium - }; + let commitment_mode = + genesis_config.get::("l1_batch_commit_data_generator_mode")?; + let da_mode = match commitment_mode { + L1BatchCommitmentMode::Rollup => DAMode::PermanentRollup, + L1BatchCommitmentMode::Validium => DAMode::Validium, + }; let upgrade_info = GatewayUpgradeInfo::from_gateway_ecosystem_upgrade( contracts_config.ecosystem_contracts.bridgehub_proxy_addr, @@ -301,11 +287,7 @@ async fn finalize_stage1( let args = GatewayUpgradeArgsInner { chain_id: chain_config.chain_id.as_u64(), l1_rpc_url: l1_url.clone(), - l2_rpc_url: general_config - .api_config - .context("api config")? - .web3_json_rpc - .http_url, + l2_rpc_url: general_config.get("api.web3_json_rpc.http_url")?, validator_addr1: chain_config.get_wallets_config()?.operator.address, validator_addr2: chain_config.get_wallets_config()?.blob_operator.address, da_mode, @@ -313,15 +295,10 @@ async fn finalize_stage1( }; let chain_info = fetch_chain_info(&upgrade_info, &args).await?; - let admin_calls_finalize = get_admin_call_builder(&upgrade_info, &chain_info, args); - admin_calls_finalize.display(); - let admin_calldata = admin_calls_finalize.compile_full_calldata(); - call_chain_admin(l1_url, chain_config, admin_calldata).await?; - println!("done!"); Ok(()) diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/genesis/database.rs b/zkstack_cli/crates/zkstack/src/commands/chain/genesis/database.rs index fe12bc017a45..cda28883d0f3 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/genesis/database.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/genesis/database.rs @@ -8,8 +8,8 @@ use zkstack_cli_common::{ logger, }; use zkstack_cli_config::{ - override_config, set_file_artifacts, set_rocks_db_config, set_server_database, - traits::SaveConfigWithBasePath, ChainConfig, EcosystemConfig, FileArtifacts, + override_config, set_file_artifacts, set_rocks_db_config, set_server_database, ChainConfig, + EcosystemConfig, FileArtifacts, }; use zkstack_cli_types::ProverMode; use zksync_basic_types::commitment::L1BatchCommitmentMode; @@ -34,10 +34,10 @@ pub async fn run(args: GenesisArgs, shell: &Shell) -> anyhow::Result<()> { .load_current_chain() .context(MSG_CHAIN_NOT_INITIALIZED)?; - let mut secrets = chain_config.get_secrets_config()?; - let args = args.fill_values_with_secrets(&chain_config)?; + let mut secrets = chain_config.get_secrets_config().await?.patched(); + let args = args.fill_values_with_secrets(&chain_config).await?; set_server_database(&mut secrets, &args.server_db)?; - secrets.save_with_base_path(shell, &chain_config.configs)?; + secrets.save().await?; initialize_server_database( shell, @@ -78,7 +78,7 @@ pub async fn initialize_server_database( Ok(()) } -pub fn update_configs( +pub async fn update_configs( args: GenesisArgsFinal, shell: &Shell, config: &ChainConfig, @@ -86,18 +86,18 @@ pub fn update_configs( shell.create_dir(&config.rocks_db_path)?; // Update secrets configs - let mut secrets = config.get_secrets_config()?; + let mut secrets = config.get_secrets_config().await?.patched(); set_server_database(&mut secrets, &args.server_db)?; - secrets.save_with_base_path(shell, &config.configs)?; + secrets.save().await?; // Update general config - let mut general = config.get_general_config()?; + let mut general = config.get_general_config().await?.patched(); let rocks_db = recreate_rocksdb_dirs(shell, &config.rocks_db_path, RocksDBDirOption::Main) .context(MSG_RECREATE_ROCKS_DB_ERRROR)?; let file_artifacts = FileArtifacts::new(config.artifacts.clone()); set_rocks_db_config(&mut general, rocks_db)?; - set_file_artifacts(&mut general, file_artifacts); - general.save_with_base_path(shell, &config.configs)?; + set_file_artifacts(&mut general, file_artifacts)?; + general.save().await?; let link_to_code = config.link_to_code.clone(); if config.prover_version != ProverMode::NoProofs { diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/genesis/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/genesis/mod.rs index 5cb289d32609..ee4bd90b3e15 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/genesis/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/genesis/mod.rs @@ -7,7 +7,7 @@ use zkstack_cli_config::{ChainConfig, EcosystemConfig}; use crate::{ commands::chain::{ args::genesis::{GenesisArgs, GenesisArgsFinal}, - genesis::{self, database::initialize_server_database, server::run_server_genesis}, + genesis::{database::initialize_server_database, server::run_server_genesis}, }, messages::{ MSG_CHAIN_NOT_INITIALIZED, MSG_GENESIS_COMPLETED, MSG_INITIALIZING_DATABASES_SPINNER, @@ -63,7 +63,7 @@ pub async fn genesis( shell: &Shell, config: &ChainConfig, ) -> anyhow::Result<()> { - genesis::database::update_configs(args.clone(), shell, config)?; + database::update_configs(args.clone(), shell, config).await?; logger::note( MSG_SELECTED_CONFIG, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/genesis/server.rs b/zkstack_cli/crates/zkstack/src/commands/chain/genesis/server.rs index 9a52595c978c..9e61d8f402e4 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/genesis/server.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/genesis/server.rs @@ -7,7 +7,7 @@ use zkstack_cli_common::{ }; use zkstack_cli_config::{ traits::FileConfigWithDefaultName, ChainConfig, ContractsConfig, EcosystemConfig, - GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, + WalletsConfig, GENERAL_FILE, GENESIS_FILE, SECRETS_FILE, }; use crate::messages::{ @@ -35,10 +35,10 @@ pub fn run_server_genesis(chain_config: &ChainConfig, shell: &Shell) -> anyhow:: .run( shell, ServerMode::Genesis, - GenesisConfig::get_path_with_base_path(&chain_config.configs), + chain_config.configs.join(GENESIS_FILE), WalletsConfig::get_path_with_base_path(&chain_config.configs), - GeneralConfig::get_path_with_base_path(&chain_config.configs), - SecretsConfig::get_path_with_base_path(&chain_config.configs), + chain_config.configs.join(GENERAL_FILE), + chain_config.configs.join(SECRETS_FILE), ContractsConfig::get_path_with_base_path(&chain_config.configs), None, vec![], diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/init/configs.rs b/zkstack_cli/crates/zkstack/src/commands/chain/init/configs.rs index 4db4c9927de1..dafc786333da 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/init/configs.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/init/configs.rs @@ -1,12 +1,11 @@ use anyhow::Context; -use ethers::types::Address; use xshell::Shell; use zkstack_cli_common::logger; use zkstack_cli_config::{ copy_configs, set_l1_rpc_url, traits::SaveConfigWithBasePath, update_from_chain_config, ChainConfig, ContractsConfig, EcosystemConfig, }; -use zksync_config::configs::DataAvailabilitySecrets; +use zksync_types::Address; use crate::{ commands::{ @@ -21,11 +20,11 @@ use crate::{ portal::update_portal_config, }, messages::{ - MSG_CHAIN_CONFIGS_INITIALIZED, MSG_CHAIN_NOT_FOUND_ERR, MSG_CONSENSUS_CONFIG_MISSING_ERR, + MSG_CHAIN_CONFIGS_INITIALIZED, MSG_CHAIN_NOT_FOUND_ERR, MSG_PORTAL_FAILED_TO_CREATE_CONFIG_ERR, }, utils::{ - consensus::{generate_consensus_keys, get_consensus_secrets, get_genesis_specs}, + consensus::{generate_consensus_keys, set_consensus_secrets, set_genesis_specs}, ports::EcosystemPortsScanner, }, }; @@ -61,52 +60,31 @@ pub async fn init_configs( )?; } - let consensus_keys = generate_consensus_keys(); - - // Initialize secrets config - let mut secrets = chain_config.get_secrets_config()?; - set_l1_rpc_url(&mut secrets, init_args.l1_rpc_url.clone())?; - secrets.consensus = Some(get_consensus_secrets(&consensus_keys)); - - let mut general_config = chain_config.get_general_config()?; - - if general_config.proof_data_handler_config.is_some() && general_config.prover_gateway.is_some() - { - let proof_data_handler_config = general_config.proof_data_handler_config.clone().unwrap(); - let mut prover_gateway = general_config.prover_gateway.clone().unwrap(); - - prover_gateway.api_url = - format!("http://127.0.0.1:{}", proof_data_handler_config.http_port); - - general_config.prover_gateway = Some(prover_gateway); + let mut general_config = chain_config.get_general_config().await?.patched(); + let prover_data_handler_port = general_config + .base() + .get_opt::("data_handler.http_port")?; + if let Some(port) = prover_data_handler_port { + general_config.insert("prover_gateway.api_url", format!("http://127.0.0.1:{port}"))?; } - let mut consensus_config = general_config - .consensus_config - .context(MSG_CONSENSUS_CONFIG_MISSING_ERR)?; - - consensus_config.genesis_spec = Some(get_genesis_specs(chain_config, &consensus_keys)); + let consensus_keys = generate_consensus_keys(); + set_genesis_specs(&mut general_config, chain_config, &consensus_keys)?; - general_config.consensus_config = Some(consensus_config); - if let Some(validium_config) = init_args.validium_config.clone() { - match validium_config { - ValidiumType::NoDA => { - general_config.da_client_config = None; - } - ValidiumType::Avail((avail_config, avail_secrets)) => { - general_config.da_client_config = Some(avail_config.into()); - secrets.data_availability = Some(DataAvailabilitySecrets::Avail(avail_secrets)); - } + match &init_args.validium_config { + None | Some(ValidiumType::NoDA) => { + general_config.remove("da_client"); + } + Some(ValidiumType::Avail((avail_config, _))) => { + general_config.insert_yaml("da_client.avail", avail_config)?; } } - - secrets.save_with_base_path(shell, &chain_config.configs)?; - general_config.save_with_base_path(shell, &chain_config.configs)?; + general_config.save().await?; // Initialize genesis config - let mut genesis_config = chain_config.get_genesis_config()?; + let mut genesis_config = chain_config.get_genesis_config().await?.patched(); update_from_chain_config(&mut genesis_config, chain_config)?; - genesis_config.save_with_base_path(shell, &chain_config.configs)?; + genesis_config.save().await?; // Initialize contracts config let mut contracts_config = ecosystem_config.get_contracts_config()?; @@ -115,12 +93,24 @@ pub async fn init_configs( contracts_config.l1.chain_admin_addr = Address::zero(); contracts_config.l1.base_token_addr = chain_config.base_token.address; contracts_config.l1.base_token_asset_id = Some(encode_ntv_asset_id( - genesis_config.l1_chain_id.0.into(), + chain_config.l1_network.chain_id().into(), contracts_config.l1.base_token_addr, )); contracts_config.save_with_base_path(shell, &chain_config.configs)?; - genesis::database::update_configs(init_args.genesis_args.clone(), shell, chain_config)?; + // Initialize secrets config + let mut secrets = chain_config.get_secrets_config().await?.patched(); + set_l1_rpc_url(&mut secrets, init_args.l1_rpc_url.clone())?; + set_consensus_secrets(&mut secrets, &consensus_keys)?; + match &init_args.validium_config { + None | Some(ValidiumType::NoDA) => { /* Do nothing */ } + Some(ValidiumType::Avail((_, avail_secrets))) => { + secrets.insert_yaml("da.avail", avail_secrets)?; + } + } + secrets.save().await?; + + genesis::database::update_configs(init_args.genesis_args.clone(), shell, chain_config).await?; update_portal_config(shell, chain_config) .await diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/init/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/init/mod.rs index f115048d1181..b2638e989d3f 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/init/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/init/mod.rs @@ -2,9 +2,10 @@ use anyhow::Context; use clap::{command, Parser, Subcommand}; use xshell::Shell; use zkstack_cli_common::{git, logger, spinner::Spinner}; -use zkstack_cli_config::{traits::SaveConfigWithBasePath, ChainConfig, EcosystemConfig}; +use zkstack_cli_config::{ + get_da_client_type, traits::SaveConfigWithBasePath, ChainConfig, EcosystemConfig, +}; use zkstack_cli_types::{BaseToken, L1BatchCommitmentMode}; -use zksync_config::DAClientConfig; use zksync_types::Address; use crate::{ @@ -175,7 +176,9 @@ pub async fn init( .await?; contracts_config.save_with_base_path(shell, &chain_config.configs)?; - let l1_da_validator_addr = get_l1_da_validator(chain_config); + let l1_da_validator_addr = get_l1_da_validator(chain_config) + .await + .context("l1_da_validator_addr")?; let spinner = Spinner::new(MSG_DA_PAIR_REGISTRATION_SPINNER); set_da_validator_pair( @@ -184,7 +187,7 @@ pub async fn init( contracts_config.l1.chain_admin_addr, &chain_config.get_wallets_config()?.governor, contracts_config.l1.diamond_proxy_addr, - l1_da_validator_addr.context("l1_da_validator_addr")?, + l1_da_validator_addr, contracts_config .l2 .da_validator_addr @@ -245,21 +248,19 @@ pub async fn init( Ok(()) } -pub(crate) fn get_l1_da_validator(chain_config: &ChainConfig) -> anyhow::Result
{ +pub(crate) async fn get_l1_da_validator(chain_config: &ChainConfig) -> anyhow::Result
{ let contracts_config = chain_config.get_contracts_config()?; let l1_da_validator_contract = match chain_config.l1_batch_commit_data_generator_mode { L1BatchCommitmentMode::Rollup => contracts_config.l1.rollup_l1_da_validator_addr, L1BatchCommitmentMode::Validium => { - let general_config = chain_config.get_general_config()?; - if let Some(da_client_config) = general_config.da_client_config { - match da_client_config { - DAClientConfig::Avail(_) => contracts_config.l1.avail_l1_da_validator_addr, - DAClientConfig::NoDA => contracts_config.l1.no_da_validium_l1_validator_addr, - _ => anyhow::bail!("DA client config is not supported"), + let general_config = chain_config.get_general_config().await?; + match get_da_client_type(&general_config) { + Some("avail") => contracts_config.l1.avail_l1_da_validator_addr, + Some("no_da") | None => contracts_config.l1.no_da_validium_l1_validator_addr, + Some(unsupported) => { + anyhow::bail!("DA client config is not supported: {unsupported:?}"); } - } else { - contracts_config.l1.no_da_validium_l1_validator_addr } } } diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/migrate_from_gateway.rs b/zkstack_cli/crates/zkstack/src/commands/chain/migrate_from_gateway.rs index cf9b9e8e6399..b2ebcda80a85 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/migrate_from_gateway.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/migrate_from_gateway.rs @@ -25,14 +25,12 @@ use zkstack_cli_config::{ EcosystemConfig, }; use zkstack_cli_types::L1BatchCommitmentMode; -use zksync_basic_types::{ - pubdata_da::PubdataSendingMode, settlement::SettlementMode, H256, U256, U64, -}; +use zksync_basic_types::{settlement::SettlementMode, H256, U256, U64}; use zksync_types::L2ChainId; use zksync_web3_decl::client::{Client, L2}; use crate::{ - messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_L1_SECRETS_MUST_BE_PRESENTED}, + messages::MSG_CHAIN_NOT_INITIALIZED, utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; @@ -74,17 +72,14 @@ pub async fn run(args: MigrateFromGatewayArgs, shell: &Shell) -> anyhow::Result< .context("Gateway config not present")?; let l1_url = chain_config - .get_secrets_config()? - .l1 - .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? - .l1_rpc_url - .expose_str() - .to_string(); + .get_secrets_config() + .await? + .get::("l1.l1_rpc_url")?; - let genesis_config = chain_config.get_genesis_config()?; + let genesis_config = chain_config.get_genesis_config().await?; let is_rollup = matches!( - genesis_config.l1_batch_commit_data_generator_mode, + genesis_config.get("l1_batch_commit_data_generator_mode")?, L1BatchCommitmentMode::Rollup ); @@ -126,29 +121,13 @@ pub async fn run(args: MigrateFromGatewayArgs, shell: &Shell) -> anyhow::Result< ) .await?; - let gateway_provider = Provider::::try_from( - gateway_chain_config - .get_general_config() - .unwrap() - .api_config - .unwrap() - .web3_json_rpc - .http_url, - )?; + let general_config = gateway_chain_config.get_general_config().await?; + let l2_rpc_url = general_config.get::("api.web3_json_rpc.http_url")?; + let gateway_provider = Provider::::try_from(&l2_rpc_url)?; - let client: Client = Client::http( - gateway_chain_config - .get_general_config() - .unwrap() - .api_config - .unwrap() - .web3_json_rpc - .http_url - .parse() - .unwrap(), - )? - .for_network(L2::from(L2ChainId::new(gateway_chain_id).unwrap())) - .build(); + let client: Client = Client::http(l2_rpc_url.parse().context("invalid L2 RPC URL")?)? + .for_network(L2::from(L2ChainId::new(gateway_chain_id).unwrap())) + .build(); if hash == H256::zero() { println!("Chain already migrated!"); @@ -188,43 +167,23 @@ pub async fn run(args: MigrateFromGatewayArgs, shell: &Shell) -> anyhow::Result< gateway_chain_chain_config.gateway_chain_id = 0u64.into(); gateway_chain_chain_config.save_with_base_path(shell, chain_config.configs.clone())?; - let mut general_config = chain_config.get_general_config().unwrap(); - - let eth_config = general_config.eth.as_mut().context("eth")?; - - eth_config - .gas_adjuster - .as_mut() - .expect("gas_adjuster") - .settlement_mode = SettlementMode::SettlesToL1; + let mut general_config = chain_config.get_general_config().await?.patched(); + general_config.insert_yaml( + "eth.gas_adjuster.settlement_mode", + SettlementMode::SettlesToL1, + )?; if is_rollup { - // For rollups, new type of commitment should be used, but - // not for validium. - eth_config - .sender - .as_mut() - .expect("sender") - .pubdata_sending_mode = PubdataSendingMode::Blobs; + // `PubdataSendingMode` has differing `serde` and file-based config serializations, hence + // we supply a raw string value. + general_config.insert("eth.sender.pubdata_sending_mode", "BLOBS")?; } - eth_config - .sender - .as_mut() - .context("sender")? - .wait_confirmations = Some(0); + general_config.insert("eth.sender.wait_confirmations", 0)?; + // Undoing what was changed during migration to gateway. // TODO(EVM-925): maybe remove this logic. - eth_config - .sender - .as_mut() - .expect("sender") - .max_aggregated_tx_gas = 15000000; - eth_config - .sender - .as_mut() - .expect("sender") - .max_eth_tx_data_size = 120_000; - - general_config.save_with_base_path(shell, chain_config.configs.clone())?; + general_config.insert("eth.sender.max_aggregated_tx_gas", 15000000)?; + general_config.insert("eth.sender.max_eth_tx_data_size", 120_000)?; + general_config.save().await?; Ok(()) } diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/migrate_to_gateway.rs b/zkstack_cli/crates/zkstack/src/commands/chain/migrate_to_gateway.rs index c51f6414ce97..0be8c62da627 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/migrate_to_gateway.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/migrate_to_gateway.rs @@ -24,14 +24,12 @@ use zkstack_cli_config::{ EcosystemConfig, }; use zkstack_cli_types::L1BatchCommitmentMode; -use zksync_basic_types::{ - pubdata_da::PubdataSendingMode, settlement::SettlementMode, Address, H256, U256, U64, -}; +use zksync_basic_types::{settlement::SettlementMode, Address, H256, U256, U64}; use zksync_config::configs::gateway::GatewayChainConfig; use zksync_system_constants::L2_BRIDGEHUB_ADDRESS; use crate::{ - messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_L1_SECRETS_MUST_BE_PRESENTED}, + messages::MSG_CHAIN_NOT_INITIALIZED, utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; @@ -84,14 +82,11 @@ pub async fn run(args: MigrateToGatewayArgs, shell: &Shell) -> anyhow::Result<() .context("Gateway config not present")?; let l1_url = chain_config - .get_secrets_config()? - .l1 - .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? - .l1_rpc_url - .expose_str() - .to_string(); + .get_secrets_config() + .await? + .get::("l1.l1_rpc_url")?; - let genesis_config = chain_config.get_genesis_config()?; + let genesis_config = chain_config.get_genesis_config().await?; let preparation_config_path = GATEWAY_PREPARATION.input(&ecosystem_config.link_to_code); let preparation_config = GatewayPreparationConfig::new( @@ -176,15 +171,9 @@ pub async fn run(args: MigrateToGatewayArgs, shell: &Shell) -> anyhow::Result<() .await? .governance_l2_tx_hash; - let gateway_provider = Provider::::try_from( - gateway_chain_config - .get_general_config() - .unwrap() - .api_config - .unwrap() - .web3_json_rpc - .http_url, - )?; + let general_config = gateway_chain_config.get_general_config().await?; + let l2_rpc_url = general_config.get::("api.web3_json_rpc.http_url")?; + let gateway_provider = Provider::::try_from(l2_rpc_url.clone())?; if hash == H256::zero() { println!("Chain already migrated!"); @@ -214,7 +203,7 @@ pub async fn run(args: MigrateToGatewayArgs, shell: &Shell) -> anyhow::Result<() let chain_contracts_config = chain_config.get_contracts_config().unwrap(); let is_rollup = matches!( - genesis_config.l1_batch_commit_data_generator_mode, + genesis_config.get("l1_batch_commit_data_generator_mode")?, L1BatchCommitmentMode::Rollup ); @@ -356,19 +345,10 @@ pub async fn run(args: MigrateToGatewayArgs, shell: &Shell) -> anyhow::Result<() hex::encode(hash.as_bytes()) ); - let gateway_url = gateway_chain_config - .get_general_config() - .unwrap() - .api_config - .unwrap() - .web3_json_rpc - .http_url - .clone(); - - let mut chain_secrets_config = chain_config.get_secrets_config().unwrap(); - chain_secrets_config.l1.as_mut().unwrap().gateway_rpc_url = - Some(url::Url::parse(&gateway_url).unwrap().into()); - chain_secrets_config.save_with_base_path(shell, chain_config.configs.clone())?; + let gateway_url = l2_rpc_url; + let mut chain_secrets_config = chain_config.get_secrets_config().await?.patched(); + chain_secrets_config.insert("l1.gateway_rpc_url", gateway_url)?; + chain_secrets_config.save().await?; let gateway_chain_config = GatewayChainConfig::from_gateway_and_chain_data( &gateway_gateway_config, @@ -378,43 +358,21 @@ pub async fn run(args: MigrateToGatewayArgs, shell: &Shell) -> anyhow::Result<() ); gateway_chain_config.save_with_base_path(shell, chain_config.configs.clone())?; - let mut general_config = chain_config.get_general_config().unwrap(); - - let eth_config = general_config.eth.as_mut().context("eth")?; + let mut general_config = chain_config.get_general_config().await?.patched(); + general_config.insert_yaml("eth.gas_adjuster.settlement_mode", SettlementMode::Gateway)?; - eth_config - .gas_adjuster - .as_mut() - .expect("gas_adjuster") - .settlement_mode = SettlementMode::Gateway; if is_rollup { - // For rollups, new type of commitment should be used, but - // not for validium. - eth_config - .sender - .as_mut() - .expect("sender") - .pubdata_sending_mode = PubdataSendingMode::RelayedL2Calldata; + // For rollups, new type of commitment should be used, but not for validium. + // `PubdataSendingMode` has differing `serde` and file-based config serializations, hence + // we supply a raw string value. + general_config.insert("eth.sender.pubdata_sending_mode", "RELAYED_L2_CALLDATA")?; } - eth_config - .sender - .as_mut() - .context("sender")? - .wait_confirmations = Some(0); + general_config.insert("eth.sender.wait_confirmations", 0)?; // TODO(EVM-925): the number below may not always work, especially for large prices on // top of Gateway. This field would have to be either not used on GW or transformed into u64. - eth_config - .sender - .as_mut() - .expect("sender") - .max_aggregated_tx_gas = 4294967295; - eth_config - .sender - .as_mut() - .expect("sender") - .max_eth_tx_data_size = 550_000; - - general_config.save_with_base_path(shell, chain_config.configs.clone())?; + general_config.insert("eth.sender.max_aggregated_tx_gas", 4294967295_u64)?; + general_config.insert("eth.sender.max_eth_tx_data_size", 550_000)?; + general_config.save().await?; Ok(()) } diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs b/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs index d6c1851d0c96..2c8930f33ceb 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/mod.rs @@ -92,7 +92,7 @@ pub enum ChainCommands { pub(crate) async fn run(shell: &Shell, args: ChainCommands) -> anyhow::Result<()> { match args { - ChainCommands::Create(args) => create::run(args, shell), + ChainCommands::Create(args) => create::run(args, shell).await, ChainCommands::Init(args) => init::run(*args, shell).await, ChainCommands::BuildTransactions(args) => build_transactions::run(args, shell).await, ChainCommands::Genesis(args) => genesis::run(args, shell).await, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/register_chain.rs b/zkstack_cli/crates/zkstack/src/commands/chain/register_chain.rs index 626d25438385..4711e8645b27 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/register_chain.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/register_chain.rs @@ -15,10 +15,7 @@ use zkstack_cli_config::{ }; use crate::{ - messages::{ - MSG_CHAIN_NOT_INITIALIZED, MSG_CHAIN_REGISTERED, MSG_L1_SECRETS_MUST_BE_PRESENTED, - MSG_REGISTERING_CHAIN_SPINNER, - }, + messages::{MSG_CHAIN_NOT_INITIALIZED, MSG_CHAIN_REGISTERED, MSG_REGISTERING_CHAIN_SPINNER}, utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; @@ -28,13 +25,8 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { .load_current_chain() .context(MSG_CHAIN_NOT_INITIALIZED)?; let mut contracts = chain_config.get_contracts_config()?; - let secrets = chain_config.get_secrets_config()?; - let l1_rpc_url = secrets - .l1 - .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? - .l1_rpc_url - .expose_str() - .to_string(); + let secrets = chain_config.get_secrets_config().await?; + let l1_rpc_url = secrets.get("l1.l1_rpc_url")?; let spinner = Spinner::new(MSG_REGISTERING_CHAIN_SPINNER); register_chain( shell, diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs b/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs index e1a57dcd0f00..1d17d179afdc 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/set_token_multiplier_setter.rs @@ -15,9 +15,9 @@ use zksync_basic_types::Address; use crate::{ messages::{ - MSG_CHAIN_NOT_INITIALIZED, MSG_L1_SECRETS_MUST_BE_PRESENTED, - MSG_TOKEN_MULTIPLIER_SETTER_UPDATED_TO, MSG_UPDATING_TOKEN_MULTIPLIER_SETTER_SPINNER, - MSG_WALLETS_CONFIG_MUST_BE_PRESENT, MSG_WALLET_TOKEN_MULTIPLIER_SETTER_NOT_FOUND, + MSG_CHAIN_NOT_INITIALIZED, MSG_TOKEN_MULTIPLIER_SETTER_UPDATED_TO, + MSG_UPDATING_TOKEN_MULTIPLIER_SETTER_SPINNER, MSG_WALLETS_CONFIG_MUST_BE_PRESENT, + MSG_WALLET_TOKEN_MULTIPLIER_SETTER_NOT_FOUND, }, utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; @@ -38,12 +38,9 @@ pub async fn run(args: ForgeScriptArgs, shell: &Shell) -> anyhow::Result<()> { .context(MSG_CHAIN_NOT_INITIALIZED)?; let contracts_config = chain_config.get_contracts_config()?; let l1_url = chain_config - .get_secrets_config()? - .l1 - .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? - .l1_rpc_url - .expose_str() - .to_string(); + .get_secrets_config() + .await? + .get("l1.l1_rpc_url")?; let token_multiplier_setter_address = chain_config .get_wallets_config() .context(MSG_WALLETS_CONFIG_MUST_BE_PRESENT)? diff --git a/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs b/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs index 24ef9d3c16d9..4d3e3cb1bd4e 100644 --- a/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs +++ b/zkstack_cli/crates/zkstack/src/commands/chain/setup_legacy_bridge.rs @@ -13,7 +13,7 @@ use zkstack_cli_config::{ }; use crate::{ - messages::{MSG_DEPLOYING_PAYMASTER, MSG_L1_SECRETS_MUST_BE_PRESENTED}, + messages::MSG_DEPLOYING_PAYMASTER, utils::forge::{check_the_balance, fill_forge_private_key, WalletOwner}, }; @@ -52,19 +52,12 @@ pub async fn setup_legacy_bridge( }; let foundry_contracts_path = chain_config.path_to_l1_foundry(); input.save(shell, SETUP_LEGACY_BRIDGE.input(&chain_config.link_to_code))?; - let secrets = chain_config.get_secrets_config()?; + let secrets = chain_config.get_secrets_config().await?; let mut forge = Forge::new(&foundry_contracts_path) .script(&SETUP_LEGACY_BRIDGE.script(), forge_args.clone()) .with_ffi() - .with_rpc_url( - secrets - .l1 - .context(MSG_L1_SECRETS_MUST_BE_PRESENTED)? - .l1_rpc_url - .expose_str() - .to_string(), - ) + .with_rpc_url(secrets.get("l1.l1_rpc_url")?) .with_broadcast(); forge = fill_forge_private_key( diff --git a/zkstack_cli/crates/zkstack/src/commands/consensus/conv.rs b/zkstack_cli/crates/zkstack/src/commands/consensus/conv.rs deleted file mode 100644 index c9d878c8fd32..000000000000 --- a/zkstack_cli/crates/zkstack/src/commands/consensus/conv.rs +++ /dev/null @@ -1,47 +0,0 @@ -use anyhow::Context as _; -use zksync_config::configs::consensus as config; -use zksync_consensus_crypto::TextFmt as _; -use zksync_consensus_roles::attester; -use zksync_protobuf::{ProtoFmt, ProtoRepr}; - -use super::proto; -use crate::utils::consensus::parse_attester_committee; - -#[derive(Debug, Clone, PartialEq)] -pub(super) struct SetAttesterCommitteeFile { - pub attesters: attester::Committee, -} - -impl ProtoFmt for SetAttesterCommitteeFile { - type Proto = proto::SetAttesterCommitteeFile; - - fn read(r: &Self::Proto) -> anyhow::Result { - // zksync_config was not allowed to depend on consensus crates, - // therefore to parse the config we need to go through the intermediate - // representation of consensus types defined in zksync_config. - let attesters: Vec<_> = r - .attesters - .iter() - .map(|x| x.read()) - .collect::>() - .context("attesters")?; - Ok(Self { - attesters: parse_attester_committee(&attesters)?, - }) - } - - fn build(&self) -> Self::Proto { - Self::Proto { - attesters: self - .attesters - .iter() - .map(|a| { - ProtoRepr::build(&config::WeightedAttester { - key: config::AttesterPublicKey(a.key.encode()), - weight: a.weight, - }) - }) - .collect(), - } - } -} diff --git a/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs b/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs index ad64207b481c..3a628d561e81 100644 --- a/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/consensus/mod.rs @@ -3,7 +3,6 @@ use std::{borrow::Borrow, collections::HashMap, path::PathBuf, sync::Arc}; /// Consensus registry contract operations. /// Includes code duplicated from `zksync_node_consensus::registry::abi`. use anyhow::Context as _; -use conv::*; use ethers::{ abi::Detokenize, contract::{FunctionCall, Multicall}, @@ -16,15 +15,11 @@ use tokio::time::MissedTickBehavior; use xshell::Shell; use zkstack_cli_common::{config::global_config, logger, wallets::Wallet}; use zkstack_cli_config::EcosystemConfig; +use zksync_basic_types::L2ChainId; use zksync_consensus_crypto::ByteFmt; use zksync_consensus_roles::{attester, validator}; -use crate::{commands::args::WaitArgs, messages, utils::consensus::parse_attester_committee}; - -mod conv; -mod proto; -#[cfg(test)] -mod tests; +use crate::{commands::args::WaitArgs, messages, utils::consensus::read_attester_committee_yaml}; #[allow(warnings)] mod abi { @@ -99,17 +94,20 @@ pub enum Command { /// Collection of sent transactions. #[derive(Default)] -pub struct TxSet(Vec<(H256, &'static str)>); +struct TxSet(Vec<(H256, String)>); impl TxSet { /// Sends a transactions and stores the transaction hash. - pub async fn send, D: Detokenize>( + async fn send, D: Detokenize>( &mut self, - name: &'static str, + name: String, call: FunctionCall, ) -> anyhow::Result<()> { - let h = call.send().await.context(name)?.tx_hash(); - self.0.push((h, name)); + let hash = call.send().await.with_context(|| name.clone())?.tx_hash(); + if global_config().verbose { + logger::debug(format!("Sent transaction {name}: {hash:?}")); + } + self.0.push((hash, name)); Ok(()) } @@ -146,19 +144,14 @@ fn print_attesters(committee: &attester::Committee) { struct Setup { chain: zkstack_cli_config::ChainConfig, contracts: zkstack_cli_config::ContractsConfig, - general: zkstack_cli_config::GeneralConfig, - genesis: zkstack_cli_config::GenesisConfig, + l2_chain_id: L2ChainId, + l2_http_url: String, + genesis_attesters: attester::Committee, } impl Setup { fn provider(&self) -> anyhow::Result> { - let l2_url = &self - .general - .api_config - .as_ref() - .context(messages::MSG_API_CONFIG_MISSING)? - .web3_json_rpc - .http_url; + let l2_url = &self.l2_http_url; Provider::try_from(l2_url).with_context(|| format!("Provider::try_from({l2_url})")) } @@ -173,7 +166,7 @@ impl Setup { .multicall3 .context(messages::MSG_MULTICALL3_CONTRACT_NOT_CONFIGURED)?, ), - Some(self.genesis.l2_chain_id.as_u64()), + Some(self.l2_chain_id.as_u64()), )?) } @@ -186,7 +179,7 @@ impl Setup { } fn signer(&self, wallet: LocalWallet) -> anyhow::Result> { - let wallet = wallet.with_chain_id(self.genesis.l2_chain_id.as_u64()); + let wallet = wallet.with_chain_id(self.l2_chain_id.as_u64()); let provider = self.provider().context("provider()")?; let signer = SignerMiddleware::new(provider, wallet.clone()); // Allows us to send next transaction without waiting for the previous to complete. @@ -194,7 +187,7 @@ impl Setup { Ok(Arc::new(signer)) } - fn new(shell: &Shell) -> anyhow::Result { + async fn new(shell: &Shell) -> anyhow::Result { let ecosystem_config = EcosystemConfig::from_file(shell).context("EcosystemConfig::from_file()")?; let chain = ecosystem_config @@ -203,13 +196,29 @@ impl Setup { let contracts = chain .get_contracts_config() .context("get_contracts_config()")?; - let genesis = chain.get_genesis_config().context("get_genesis_config()")?; - let general = chain.get_general_config().context("get_general_config()")?; + let l2_chain_id = chain + .get_genesis_config() + .await + .context("get_genesis_config()")? + .get("l2_chain_id")?; + + let general = chain + .get_general_config() + .await + .context("get_general_config()")?; + // We're getting a parent path here, since we need object input with the `attesters` array + let genesis_attesters = general + .get_raw("consensus.genesis_spec") + .context(messages::MSG_CONSENSUS_GENESIS_SPEC_ATTESTERS_MISSING_IN_GENERAL_YAML)? + .clone(); + let genesis_attesters = read_attester_committee_yaml(genesis_attesters)?; + Ok(Self { chain, contracts, - general, - genesis, + l2_chain_id, + l2_http_url: general.get("api.web3_json_rpc.http_url")?, + genesis_attesters, }) } @@ -260,26 +269,10 @@ impl Setup { // Fetch the desired state. if let Some(path) = &opts.from_file { let yaml = std::fs::read_to_string(path).context("read_to_string()")?; - let file: SetAttesterCommitteeFile = zksync_protobuf::serde::Deserialize { - deny_unknown_fields: true, - } - .proto_fmt_from_yaml(&yaml) - .context("proto_fmt_from_yaml()")?; - return Ok(file.attesters); + let yaml = serde_yaml::from_str(&yaml).context("parse YAML")?; + return read_attester_committee_yaml(yaml); } - let attesters = (|| { - Some( - &self - .general - .consensus_config - .as_ref()? - .genesis_spec - .as_ref()? - .attesters, - ) - })() - .context(messages::MSG_CONSENSUS_GENESIS_SPEC_ATTESTERS_MISSING_IN_GENERAL_YAML)?; - parse_attester_committee(attesters).context("parse_attester_committee()") + Ok(self.genesis_attesters.clone()) } async fn wait_for_registry_contract_inner( @@ -335,9 +328,21 @@ impl Setup { } async fn set_attester_committee(&self, want: &attester::Committee) -> anyhow::Result<()> { + if global_config().verbose { + logger::debug(format!("Setting attester committee: {want:?}")); + } + let provider = self.provider().context("provider()")?; let block_id = self.last_block(&provider).await.context("last_block()")?; + if global_config().verbose { + logger::debug(format!("Fetched latest L2 block: {block_id:?}")); + } + let governor = self.governor().context("governor()")?; + if global_config().verbose { + logger::debug(format!("Using governor: {:?}", governor.address)); + } + let signer = self.signer( governor .private_key @@ -348,6 +353,13 @@ impl Setup { .consensus_registry(signer.clone()) .context("consensus_registry()")?; let mut multicall = self.multicall(signer).context("multicall()")?; + if global_config().verbose { + logger::debug(format!( + "Using consensus registry at {:?}, multicall at {:?}", + consensus_registry.address(), + multicall.contract.address() + )); + } let owner = consensus_registry.owner().call().await.context("owner()")?; if owner != governor.address { @@ -368,6 +380,11 @@ impl Setup { .try_into() .ok() .context("num_nodes() overflow")?; + if global_config().verbose { + logger::debug(format!( + "Fetched number of nodes from consensus registry: {n}" + )); + } multicall.block = Some(block_id); let node_owners: Vec
= multicall @@ -379,6 +396,12 @@ impl Setup { .await .context("node_owners()")?; multicall.clear_calls(); + if global_config().verbose { + logger::debug(format!( + "Fetched node owners from consensus registry: {node_owners:?}" + )); + } + let nodes: Vec = multicall .add_calls( false, @@ -390,6 +413,11 @@ impl Setup { .await .context("nodes()")?; multicall.clear_calls(); + if global_config().verbose { + logger::debug(format!( + "Fetched node info from consensus registry: {nodes:?}" + )); + } // Update the state. let mut txs = TxSet::default(); @@ -398,15 +426,21 @@ impl Setup { if node.attester_latest.removed { continue; } + + let node_owner = node_owners[i]; let got = attester::WeightedAttester { key: decode_attester_key(&node.attester_latest.pub_key) .context("decode_attester_key()")?, weight: node.attester_latest.weight.into(), }; + if let Some(weight) = to_insert.remove(&got.key) { if weight != got.weight { txs.send( - "changed_attester_weight", + format!( + "change_attester_weight({node_owner:?}, {} -> {weight})", + got.weight + ), consensus_registry.change_attester_weight( node_owners[i], weight.try_into().context("weight overflow")?, @@ -415,18 +449,24 @@ impl Setup { .await?; } if !node.attester_latest.active { - txs.send("activate", consensus_registry.activate(node_owners[i])) - .await?; + txs.send( + format!("activate({node_owner:?})"), + consensus_registry.activate(node_owner), + ) + .await?; } } else { - txs.send("remove", consensus_registry.remove(node_owners[i])) - .await?; + txs.send( + format!("remove({node_owner:?})"), + consensus_registry.remove(node_owner), + ) + .await?; } } for (key, weight) in to_insert { let vk = validator::SecretKey::generate(); txs.send( - "add", + format!("add({key:?}, {weight})"), consensus_registry.add( Address::random(), /*validator_weight=*/ 1, @@ -439,7 +479,7 @@ impl Setup { .await?; } txs.send( - "commit_attester_committee", + "commit_attester_committee".to_owned(), consensus_registry.commit_attester_committee(), ) .await?; @@ -450,7 +490,7 @@ impl Setup { impl Command { pub(crate) async fn run(self, shell: &Shell) -> anyhow::Result<()> { - let setup = Setup::new(shell).context("Setup::new()")?; + let setup = Setup::new(shell).await?; match self { Self::SetAttesterCommittee(opts) => { let want = setup diff --git a/zkstack_cli/crates/zkstack/src/commands/consensus/proto/mod.proto b/zkstack_cli/crates/zkstack/src/commands/consensus/proto/mod.proto deleted file mode 100644 index d8a7323f7144..000000000000 --- a/zkstack_cli/crates/zkstack/src/commands/consensus/proto/mod.proto +++ /dev/null @@ -1,9 +0,0 @@ -syntax = "proto3"; - -package zksync.toolbox.consensus; - -import "zksync/core/consensus.proto"; - -message SetAttesterCommitteeFile { - repeated core.consensus.WeightedAttester attesters = 1; -} diff --git a/zkstack_cli/crates/zkstack/src/commands/consensus/proto/mod.rs b/zkstack_cli/crates/zkstack/src/commands/consensus/proto/mod.rs deleted file mode 100644 index 61a0a047f0a9..000000000000 --- a/zkstack_cli/crates/zkstack/src/commands/consensus/proto/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -#![allow(warnings)] - -include!(concat!( - env!("OUT_DIR"), - "/src/commands/consensus/proto/gen.rs" -)); diff --git a/zkstack_cli/crates/zkstack/src/commands/consensus/tests.rs b/zkstack_cli/crates/zkstack/src/commands/consensus/tests.rs deleted file mode 100644 index c2f393ad2294..000000000000 --- a/zkstack_cli/crates/zkstack/src/commands/consensus/tests.rs +++ /dev/null @@ -1,19 +0,0 @@ -use rand::{distributions::Distribution, Rng}; -use zksync_consensus_utils::EncodeDist; -use zksync_protobuf::testonly::{test_encode_all_formats, FmtConv}; - -use super::SetAttesterCommitteeFile; - -impl Distribution for EncodeDist { - fn sample(&self, rng: &mut R) -> SetAttesterCommitteeFile { - SetAttesterCommitteeFile { - attesters: rng.gen(), - } - } -} - -#[test] -fn test_encoding() { - let rng = &mut rand::thread_rng(); - test_encode_all_formats::>(rng); -} diff --git a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs index 0b844df61f4c..070d64b5bdb4 100644 --- a/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs +++ b/zkstack_cli/crates/zkstack/src/commands/contract_verifier/wait.rs @@ -13,11 +13,9 @@ pub(crate) async fn wait(shell: &Shell, args: WaitArgs) -> anyhow::Result<()> { let verbose = global_config().verbose; let prometheus_port = chain - .get_general_config()? - .contract_verifier - .as_ref() - .context("contract verifier config not specified")? - .prometheus_port; + .get_general_config() + .await? + .get("contract_verifier.prometheus_port")?; logger::info("Waiting for contract verifier to become alive"); args.poll_prometheus(prometheus_port, verbose).await?; logger::info(format!( diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/check_sqlx_data.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/check_sqlx_data.rs index abead3fe00d6..8957fc46ede9 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/check_sqlx_data.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/check_sqlx_data.rs @@ -14,7 +14,7 @@ use crate::commands::dev::{ }, }; -pub fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { +pub async fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { let args = args.parse(); if args.selected_dals.none() { logger::outro(MSG_NO_DATABASES_SELECTED); @@ -25,7 +25,7 @@ pub fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { logger::info(msg_database_info(MSG_DATABASE_CHECK_SQLX_DATA_GERUND)); - let dals = get_dals(shell, &args.selected_dals, &args.urls)?; + let dals = get_dals(shell, &args.selected_dals, &args.urls).await?; for dal in dals { check_sqlx_data(shell, &ecosystem_config.link_to_code, dal)?; } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/drop.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/drop.rs index e46a434cec06..f2bf647ed726 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/drop.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/drop.rs @@ -23,7 +23,7 @@ pub async fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> logger::info(msg_database_info(MSG_DATABASE_DROP_GERUND)); - let dals = get_dals(shell, &args.selected_dals, &args.urls)?; + let dals = get_dals(shell, &args.selected_dals, &args.urls).await?; for dal in dals { drop_database(dal).await?; } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/migrate.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/migrate.rs index 8c21262c0712..3ebb30c3dfe3 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/migrate.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/migrate.rs @@ -13,7 +13,7 @@ use crate::commands::dev::{ }, }; -pub fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { +pub async fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { let args = args.parse(); if args.selected_dals.none() { logger::outro(MSG_NO_DATABASES_SELECTED); @@ -23,7 +23,7 @@ pub fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { logger::info(msg_database_info(MSG_DATABASE_MIGRATE_GERUND)); let ecosystem_config = EcosystemConfig::from_file(shell)?; - let dals = get_dals(shell, &args.selected_dals, &args.urls)?; + let dals = get_dals(shell, &args.selected_dals, &args.urls).await?; for dal in dals { migrate_database(shell, &ecosystem_config.link_to_code, dal)?; } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/mod.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/mod.rs index ed039fc65019..909b6fa5bc22 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/mod.rs @@ -37,12 +37,12 @@ pub enum DatabaseCommands { pub async fn run(shell: &Shell, args: DatabaseCommands) -> anyhow::Result<()> { match args { - DatabaseCommands::CheckSqlxData(args) => check_sqlx_data::run(shell, args), + DatabaseCommands::CheckSqlxData(args) => check_sqlx_data::run(shell, args).await, DatabaseCommands::Drop(args) => drop::run(shell, args).await, - DatabaseCommands::Migrate(args) => migrate::run(shell, args), - DatabaseCommands::NewMigration(args) => new_migration::run(shell, args), - DatabaseCommands::Prepare(args) => prepare::run(shell, args), + DatabaseCommands::Migrate(args) => migrate::run(shell, args).await, + DatabaseCommands::NewMigration(args) => new_migration::run(shell, args).await, + DatabaseCommands::Prepare(args) => prepare::run(shell, args).await, DatabaseCommands::Reset(args) => reset::run(shell, args).await, - DatabaseCommands::Setup(args) => setup::run(shell, args), + DatabaseCommands::Setup(args) => setup::run(shell, args).await, } } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/new_migration.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/new_migration.rs index 655a841e060a..809886eb5e2f 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/new_migration.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/new_migration.rs @@ -10,12 +10,12 @@ use crate::commands::dev::{ messages::{msg_database_new_migration_loading, MSG_DATABASE_NEW_MIGRATION_SUCCESS}, }; -pub fn run(shell: &Shell, args: DatabaseNewMigrationArgs) -> anyhow::Result<()> { +pub async fn run(shell: &Shell, args: DatabaseNewMigrationArgs) -> anyhow::Result<()> { let args = args.fill_values_with_prompt(); let dal = match args.selected_database { - SelectedDatabase::Core => get_core_dal(shell, None)?, - SelectedDatabase::Prover => get_prover_dal(shell, None)?, + SelectedDatabase::Core => get_core_dal(shell, None).await?, + SelectedDatabase::Prover => get_prover_dal(shell, None).await?, }; let ecosystem_config = EcosystemConfig::from_file(shell)?; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/prepare.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/prepare.rs index 82c9ed2e338b..1d2c2d57cb03 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/prepare.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/prepare.rs @@ -13,7 +13,7 @@ use crate::commands::dev::{ }, }; -pub fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { +pub async fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { let args = args.parse(); if args.selected_dals.none() { logger::outro(MSG_NO_DATABASES_SELECTED); @@ -24,7 +24,7 @@ pub fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { logger::info(msg_database_info(MSG_DATABASE_PREPARE_GERUND)); - let dals = get_dals(shell, &args.selected_dals, &args.urls)?; + let dals = get_dals(shell, &args.selected_dals, &args.urls).await?; for dal in dals { prepare_sqlx_data(shell, &ecosystem_config.link_to_code, dal)?; } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/reset.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/reset.rs index 4a9ec022d723..c9d5ad112ae3 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/reset.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/reset.rs @@ -24,7 +24,7 @@ pub async fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> logger::info(msg_database_info(MSG_DATABASE_RESET_GERUND)); - let dals = get_dals(shell, &args.selected_dals, &args.urls)?; + let dals = get_dals(shell, &args.selected_dals, &args.urls).await?; for dal in dals { logger::info(msg_database_loading(MSG_DATABASE_RESET_GERUND, &dal.path)); reset_database(shell, ecosystem_config.link_to_code.clone(), dal).await?; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/setup.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/setup.rs index 4eba9b615fc0..ca44273ebf1a 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/setup.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/database/setup.rs @@ -13,7 +13,7 @@ use crate::commands::dev::{ }, }; -pub fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { +pub async fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { let args = args.parse(); if args.selected_dals.none() { logger::outro(MSG_NO_DATABASES_SELECTED); @@ -24,7 +24,7 @@ pub fn run(shell: &Shell, args: DatabaseCommonArgs) -> anyhow::Result<()> { logger::info(msg_database_info(MSG_DATABASE_SETUP_GERUND)); - let dals = get_dals(shell, &args.selected_dals, &args.urls)?; + let dals = get_dals(shell, &args.selected_dals, &args.urls).await?; for dal in dals { setup_database(shell, &ecosystem_config.link_to_code, dal)?; } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/genesis.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/genesis.rs index 8e7a3973e037..29e5f734e528 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/genesis.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/genesis.rs @@ -18,7 +18,7 @@ pub(crate) async fn run(shell: &Shell) -> anyhow::Result<()> { .context(MSG_CHAIN_NOT_FOUND_ERR)?; let spinner = Spinner::new(MSG_GENESIS_FILE_GENERATION_STARTED); let secrets_path = chain.path_to_secrets_config(); - let dal = get_core_dal(shell, None)?; + let dal = get_core_dal(shell, None).await?; reset_database(shell, ecosystem.link_to_code, dal).await?; Cmd::new(cmd!(shell,"cargo run --package genesis_generator --bin genesis_generator -- --config-path={secrets_path}")).run()?; spinner.finish(); diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/info.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/info.rs index 44d80d48d0ce..d90b350b0192 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/info.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/prover/info.rs @@ -84,14 +84,7 @@ pub(crate) async fn get_fflonk_snark_wrapper(link_to_prover: &Path) -> anyhow::R } pub(crate) async fn get_database_url(chain: &ChainConfig) -> anyhow::Result { - let prover_url = chain - .get_secrets_config()? - .database - .context("Database secrets not found")? - .prover_url()? - .expose_url() - .to_string(); - Ok(prover_url) + chain.get_secrets_config().await?.get("database.prover_url") } pub fn parse_version(version: &str) -> anyhow::Result<(&str, &str)> { diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/args.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/args.rs index 1cc65f194cdc..8395796781da 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/args.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/args.rs @@ -4,9 +4,7 @@ use xshell::Shell; use zkstack_cli_config::EcosystemConfig; use crate::{ - commands::dev::messages::{ - MSG_API_CONFIG_NOT_FOUND_ERR, MSG_STATUS_PORTS_HELP, MSG_STATUS_URL_HELP, - }, + commands::dev::messages::{MSG_STATUS_PORTS_HELP, MSG_STATUS_URL_HELP}, messages::MSG_CHAIN_NOT_FOUND_ERR, }; @@ -25,7 +23,7 @@ pub struct StatusArgs { } impl StatusArgs { - pub fn get_url(&self, shell: &Shell) -> anyhow::Result { + pub async fn get_url(&self, shell: &Shell) -> anyhow::Result { if let Some(url) = &self.url { Ok(url.clone()) } else { @@ -33,13 +31,9 @@ impl StatusArgs { let chain = ecosystem .load_current_chain() .context(MSG_CHAIN_NOT_FOUND_ERR)?; - let general_config = chain.get_general_config()?; - let health_check_port = general_config - .api_config - .context(MSG_API_CONFIG_NOT_FOUND_ERR)? - .healthcheck - .port; - Ok(format!("http://localhost:{}/health", health_check_port)) + let general_config = chain.get_general_config().await?; + let health_check_port = general_config.get::("api.healthcheck.port")?; + Ok(format!("http://localhost:{health_check_port}/health")) } } } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/mod.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/mod.rs index 7f2db7533655..7de264ca8252 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/status/mod.rs @@ -129,7 +129,7 @@ pub async fn run(shell: &Shell, args: StatusArgs) -> anyhow::Result<()> { return print_ports(shell); } - let health_check_url = args.get_url(shell)?; + let health_check_url = args.get_url(shell).await?; print_status(health_check_url) } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/loadtest.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/loadtest.rs index 64ea474fa2fa..07a0ae192618 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/loadtest.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/loadtest.rs @@ -5,17 +5,14 @@ use zkstack_cli_config::EcosystemConfig; use crate::commands::dev::messages::MSG_CHAIN_NOT_FOUND_ERR; -pub fn run(shell: &Shell) -> anyhow::Result<()> { +pub async fn run(shell: &Shell) -> anyhow::Result<()> { let ecosystem_config = EcosystemConfig::from_file(shell)?; let chain_config = ecosystem_config .load_current_chain() .context(MSG_CHAIN_NOT_FOUND_ERR)?; - let general_api = chain_config - .get_general_config()? - .api_config - .context("API config is not found")?; + let general_config = chain_config.get_general_config().await?; let mut command = cmd!( shell, @@ -24,9 +21,9 @@ pub fn run(shell: &Shell) -> anyhow::Result<()> { .env( "L2_CHAIN_ID", chain_config - .get_genesis_config()? - .l2_chain_id - .as_u64() + .get_genesis_config() + .await? + .get::("l2_chain_id")? .to_string(), ) .env( @@ -40,8 +37,14 @@ pub fn run(shell: &Shell) -> anyhow::Result<()> { .address ), ) - .env("L2_RPC_ADDRESS", general_api.web3_json_rpc.http_url) - .env("L2_WS_RPC_ADDRESS", general_api.web3_json_rpc.ws_url); + .env( + "L2_RPC_ADDRESS", + general_config.get::("api.web3_json_rpc.http_url")?, + ) + .env( + "L2_WS_RPC_ADDRESS", + general_config.get::("api.web3_json_rpc.ws_url")?, + ); if global_config().verbose { command = command.env("RUST_LOG", "loadnext=info") diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/mod.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/mod.rs index 095e27652aa0..90f9971508d3 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/mod.rs @@ -64,6 +64,6 @@ pub async fn run(shell: &Shell, args: TestCommands) -> anyhow::Result<()> { TestCommands::L1Contracts => l1_contracts::run(shell), TestCommands::Prover => prover::run(shell).await, TestCommands::Wallet => wallet::run(shell), - TestCommands::Loadtest => loadtest::run(shell), + TestCommands::Loadtest => loadtest::run(shell).await, } } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/rust.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/rust.rs index dce3cd9022da..7b93ebf974d6 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/rust.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/rust.rs @@ -11,10 +11,7 @@ use crate::commands::dev::{ commands::test::db::reset_test_databases, dals::{Dal, CORE_DAL_PATH, PROVER_DAL_PATH}, defaults::{TEST_DATABASE_PROVER_URL, TEST_DATABASE_SERVER_URL}, - messages::{ - MSG_CHAIN_NOT_FOUND_ERR, MSG_POSTGRES_CONFIG_NOT_FOUND_ERR, MSG_UNIT_TESTS_RUN_SUCCESS, - MSG_USING_CARGO_NEXTEST, - }, + messages::{MSG_CHAIN_NOT_FOUND_ERR, MSG_UNIT_TESTS_RUN_SUCCESS, MSG_USING_CARGO_NEXTEST}, }; pub async fn run(shell: &Shell, args: RustArgs) -> anyhow::Result<()> { @@ -23,21 +20,13 @@ pub async fn run(shell: &Shell, args: RustArgs) -> anyhow::Result<()> { .clone() .load_chain(Some(ecosystem.default_chain)) .context(MSG_CHAIN_NOT_FOUND_ERR)?; - let general_config = chain.get_general_config(); + let general_config = chain.get_general_config().await; let link_to_code = ecosystem.link_to_code; let (test_server_url, test_prover_url) = if let Ok(general_config) = general_config { - let postgres = general_config - .postgres_config - .context(MSG_POSTGRES_CONFIG_NOT_FOUND_ERR)?; - ( - postgres - .test_server_url - .context(MSG_POSTGRES_CONFIG_NOT_FOUND_ERR)?, - postgres - .test_prover_url - .context(MSG_POSTGRES_CONFIG_NOT_FOUND_ERR)?, + general_config.get::("postgres.test.server_url")?, + general_config.get::("postgres.test.prover_url")?, ) } else { ( diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/utils.rs b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/utils.rs index 7c042fad1fa9..871466d72c23 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/utils.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/commands/test/utils.rs @@ -56,12 +56,9 @@ impl TestWallets { let wallet = self.get_test_wallet(chain_config)?; let l1_rpc = chain_config - .get_secrets_config()? - .l1 - .context("No L1 secrets available")? - .l1_rpc_url - .expose_str() - .to_owned(); + .get_secrets_config() + .await? + .get::("l1.l1_rpc_url")?; let provider = Provider::::try_from(l1_rpc.clone())?; let balance = provider.get_balance(wallet.address, None).await?; diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/dals.rs b/zkstack_cli/crates/zkstack/src/commands/dev/dals.rs index 199c44bbeb70..0d1c505557f2 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/dals.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/dals.rs @@ -1,12 +1,9 @@ use anyhow::Context as _; use url::Url; use xshell::Shell; -use zkstack_cli_config::{EcosystemConfig, SecretsConfig}; +use zkstack_cli_config::{raw::RawConfig, EcosystemConfig}; -use super::{ - commands::database::args::DalUrls, - messages::{MSG_CHAIN_NOT_FOUND_ERR, MSG_DATABASE_MUST_BE_PRESENTED}, -}; +use super::{commands::database::args::DalUrls, messages::MSG_CHAIN_NOT_FOUND_ERR}; pub const CORE_DAL_PATH: &str = "core/lib/dal"; pub const PROVER_DAL_PATH: &str = "prover/crates/lib/prover_dal"; @@ -30,7 +27,7 @@ pub struct Dal { pub url: Url, } -pub fn get_dals( +pub async fn get_dals( shell: &Shell, selected_dals: &SelectedDals, urls: &DalUrls, @@ -38,27 +35,21 @@ pub fn get_dals( let mut dals = vec![]; if selected_dals.prover { - dals.push(get_prover_dal(shell, urls.prover.clone())?); + dals.push(get_prover_dal(shell, urls.prover.clone()).await?); } if selected_dals.core { - dals.push(get_core_dal(shell, urls.core.clone())?); + dals.push(get_core_dal(shell, urls.core.clone()).await?); } Ok(dals) } -pub fn get_prover_dal(shell: &Shell, url: Option) -> anyhow::Result { +pub async fn get_prover_dal(shell: &Shell, url: Option) -> anyhow::Result { let url = if let Some(url) = url { Url::parse(&url)? } else { - let secrets = get_secrets(shell)?; - secrets - .database - .as_ref() - .context(MSG_DATABASE_MUST_BE_PRESENTED)? - .prover_url()? - .expose_url() - .clone() + let secrets = get_secrets(shell).await?; + secrets.get("database.prover_url")? }; Ok(Dal { @@ -67,18 +58,12 @@ pub fn get_prover_dal(shell: &Shell, url: Option) -> anyhow::Result }) } -pub fn get_core_dal(shell: &Shell, url: Option) -> anyhow::Result { +pub async fn get_core_dal(shell: &Shell, url: Option) -> anyhow::Result { let url = if let Some(url) = url { Url::parse(&url)? } else { - let secrets = get_secrets(shell)?; - secrets - .database - .as_ref() - .context(MSG_DATABASE_MUST_BE_PRESENTED)? - .master_url()? - .expose_url() - .clone() + let secrets = get_secrets(shell).await?; + secrets.get("database.server_url")? }; Ok(Dal { @@ -87,12 +72,10 @@ pub fn get_core_dal(shell: &Shell, url: Option) -> anyhow::Result { }) } -fn get_secrets(shell: &Shell) -> anyhow::Result { +async fn get_secrets(shell: &Shell) -> anyhow::Result { let ecosystem_config = EcosystemConfig::from_file(shell)?; let chain_config = ecosystem_config .load_current_chain() .context(MSG_CHAIN_NOT_FOUND_ERR)?; - let secrets = chain_config.get_secrets_config()?; - - Ok(secrets) + chain_config.get_secrets_config().await } diff --git a/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs b/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs index b65750b34341..68b79da70bda 100644 --- a/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs +++ b/zkstack_cli/crates/zkstack/src/commands/dev/messages.rs @@ -48,7 +48,6 @@ pub(super) const MSG_DATABASE_RESET_GERUND: &str = "Resetting"; pub(super) const MSG_DATABASE_RESET_PAST: &str = "reset"; pub(super) const MSG_DATABASE_SETUP_GERUND: &str = "Setting up"; pub(super) const MSG_DATABASE_SETUP_PAST: &str = "set up"; -pub(super) const MSG_DATABASE_MUST_BE_PRESENTED: &str = "Database config must be presented"; pub(super) const MSG_DATABASE_COMMON_PROVER_HELP: &str = "Prover database"; pub(super) const MSG_DATABASE_COMMON_PROVER_URL_HELP: &str = "URL of the Prover database. If not specified, it is used from the current chain's secrets"; @@ -105,7 +104,6 @@ pub(super) const MSG_L1_CONTRACTS_ABOUT: &str = "Run L1 contracts tests"; pub(super) const MSG_L1_CONTRACTS_TEST_SUCCESS: &str = "L1 contracts tests ran successfully"; pub(super) const MSG_PROVER_TEST_ABOUT: &str = "Run prover tests"; pub(super) const MSG_PROVER_TEST_SUCCESS: &str = "Prover tests ran successfully"; -pub(super) const MSG_POSTGRES_CONFIG_NOT_FOUND_ERR: &str = "Postgres config not found"; pub(super) const MSG_RESETTING_TEST_DATABASES: &str = "Resetting test databases"; // Contract building related messages @@ -235,7 +233,6 @@ pub(super) const MSG_INVALID_L1_RPC_URL_ERR: &str = "Invalid L1 RPC URL"; // Status related messages pub(super) const MSG_STATUS_ABOUT: &str = "Get status of the server"; -pub(super) const MSG_API_CONFIG_NOT_FOUND_ERR: &str = "API config not found"; pub(super) const MSG_STATUS_URL_HELP: &str = "URL of the health check endpoint"; pub(super) const MSG_STATUS_PORTS_HELP: &str = "Show used ports"; pub(super) const MSG_COMPONENTS: &str = "Components:\n"; diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs index 7255ba9e1ca5..f8c45cfcf8bd 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/common.rs @@ -1,16 +1,16 @@ -use anyhow::Context; use xshell::Shell; use zkstack_cli_common::forge::{Forge, ForgeScriptArgs}; use zkstack_cli_config::{ forge_interface::{ deploy_ecosystem::{ - input::{DeployL1Config, InitialDeploymentConfig}, + input::{DeployL1Config, GenesisInput, InitialDeploymentConfig}, output::DeployL1Output, }, script_params::DEPLOY_ECOSYSTEM_SCRIPT_PARAMS, }, - traits::{ReadConfig, ReadConfigWithBasePath, SaveConfig}, - ContractsConfig, EcosystemConfig, GenesisConfig, + raw::RawConfig, + traits::{ReadConfig, SaveConfig}, + ContractsConfig, EcosystemConfig, GENESIS_FILE, }; use zkstack_cli_types::{L1Network, ProverMode}; @@ -28,15 +28,14 @@ pub async fn deploy_l1( support_l2_legacy_shared_bridge_test: bool, ) -> anyhow::Result { let deploy_config_path = DEPLOY_ECOSYSTEM_SCRIPT_PARAMS.input(&config.link_to_code); - dbg!(config.get_default_configs_path()); - let default_genesis_config = - GenesisConfig::read_with_base_path(shell, config.get_default_configs_path()) - .context("failed reading genesis config")?; + let genesis_config_path = config.get_default_configs_path().join(GENESIS_FILE); + let default_genesis_config = RawConfig::read(shell, genesis_config_path).await?; + let default_genesis_input = GenesisInput::new(&default_genesis_config)?; let wallets_config = config.get_wallets()?; // For deploying ecosystem we only need genesis batch params let deploy_config = DeployL1Config::new( - &default_genesis_config, + &default_genesis_input, &wallets_config, initial_deployment_config, config.era_chain_id, diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/create.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/create.rs index 404589afac2d..3bd936f69cb7 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/create.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/create.rs @@ -27,19 +27,19 @@ use crate::{ utils::link_to_code::resolve_link_to_code, }; -pub fn run(args: EcosystemCreateArgs, shell: &Shell) -> anyhow::Result<()> { +pub async fn run(args: EcosystemCreateArgs, shell: &Shell) -> anyhow::Result<()> { match EcosystemConfig::from_file(shell) { Ok(_) => bail!(MSG_ECOSYSTEM_ALREADY_EXISTS_ERR), Err(EcosystemConfigFromFileError::InvalidConfig { .. }) => { bail!(MSG_ECOSYSTEM_CONFIG_INVALID_ERR) } - Err(EcosystemConfigFromFileError::NotExists { .. }) => create(args, shell)?, + Err(EcosystemConfigFromFileError::NotExists { .. }) => create(args, shell).await?, }; Ok(()) } -fn create(args: EcosystemCreateArgs, shell: &Shell) -> anyhow::Result<()> { +async fn create(args: EcosystemCreateArgs, shell: &Shell) -> anyhow::Result<()> { let args = args .fill_values_with_prompt(shell) .context(MSG_ARGS_VALIDATOR_ERR)?; @@ -96,7 +96,7 @@ fn create(args: EcosystemCreateArgs, shell: &Shell) -> anyhow::Result<()> { spinner.finish(); let spinner = Spinner::new(MSG_CREATING_DEFAULT_CHAIN_SPINNER); - create_chain_inner(chain_config, &ecosystem_config, shell)?; + create_chain_inner(chain_config, &ecosystem_config, shell).await?; spinner.finish(); if args.start_containers { diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/gateway_upgrade.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/gateway_upgrade.rs index 01905afb9a5d..b8179ca4db83 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/gateway_upgrade.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/gateway_upgrade.rs @@ -6,6 +6,7 @@ use xshell::Shell; use zkstack_cli_common::{db::DatabaseConfig, forge::Forge, git, spinner::Spinner}; use zkstack_cli_config::{ forge_interface::{ + deploy_ecosystem::input::GenesisInput, gateway_ecosystem_upgrade::{ input::GatewayEcosystemUpgradeInput, output::GatewayEcosystemUpgradeOutput, }, @@ -14,8 +15,9 @@ use zkstack_cli_config::{ FINALIZE_UPGRADE_SCRIPT_PARAMS, GATEWAY_PREPARATION, GATEWAY_UPGRADE_ECOSYSTEM_PARAMS, }, }, + raw::RawConfig, traits::{ReadConfig, ReadConfigWithBasePath, SaveConfig, SaveConfigWithBasePath}, - EcosystemConfig, GenesisConfig, CONFIGS_PATH, + EcosystemConfig, GENESIS_FILE, }; use zkstack_cli_types::ProverMode; use zksync_basic_types::commitment::L1BatchCommitmentMode; @@ -95,7 +97,11 @@ async fn no_governance_prepare( let forge_args = init_args.forge_args.clone(); let l1_rpc_url = init_args.l1_rpc_url.clone(); - let new_genesis_config = GenesisConfig::read_with_base_path(shell, CONFIGS_PATH)?; + let genesis_config_path = ecosystem_config + .get_default_configs_path() + .join(GENESIS_FILE); + let default_genesis_config = RawConfig::read(shell, genesis_config_path).await?; + let default_genesis_input = GenesisInput::new(&default_genesis_config)?; let current_contracts_config = ecosystem_config.get_contracts_config()?; let initial_deployment_config = ecosystem_config.get_initial_deployment_config()?; @@ -110,7 +116,7 @@ async fn no_governance_prepare( // assert_eq!(era_config.chain_id, ecosystem_config.era_chain_id); let gateway_upgrade_input = GatewayEcosystemUpgradeInput::new( - &new_genesis_config, + &default_genesis_input, ¤t_contracts_config, &initial_deployment_config, ecosystem_config.era_chain_id, @@ -492,7 +498,8 @@ async fn no_governance_stage_3( .load_chain(Some("gateway".to_string())) .context(MSG_CHAIN_NOT_FOUND_ERR)?; - let chain_genesis_config = chain_config.get_genesis_config()?; + let chain_genesis_config = chain_config.get_genesis_config().await?; + let genesis_input = GenesisInput::new(&chain_genesis_config)?; let mut chain_contracts_config = chain_config.get_contracts_config()?; // Fund gateway's governor (chain_config.get_wallets_config()?.governor) @@ -521,7 +528,7 @@ async fn no_governance_stage_3( init_args.forge_args.clone(), ecosystem_config, &chain_config, - &chain_genesis_config, + &genesis_input, &ecosystem_config.get_initial_deployment_config().unwrap(), init_args.l1_rpc_url.clone(), ) @@ -587,7 +594,7 @@ async fn no_governance_stage_3( init_args.forge_args.clone(), ecosystem_config, &chain_config, - &chain_genesis_config, + &genesis_input, &ecosystem_config.get_initial_deployment_config().unwrap(), init_args.l1_rpc_url.clone(), ) diff --git a/zkstack_cli/crates/zkstack/src/commands/ecosystem/mod.rs b/zkstack_cli/crates/zkstack/src/commands/ecosystem/mod.rs index 19c2888edd0d..9e9277bb2084 100644 --- a/zkstack_cli/crates/zkstack/src/commands/ecosystem/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/ecosystem/mod.rs @@ -43,7 +43,7 @@ pub enum EcosystemCommands { pub(crate) async fn run(shell: &Shell, args: EcosystemCommands) -> anyhow::Result<()> { match args { - EcosystemCommands::Create(args) => create::run(args, shell), + EcosystemCommands::Create(args) => create::run(args, shell).await, EcosystemCommands::BuildTransactions(args) => build_transactions::run(args, shell).await, EcosystemCommands::Init(args) => init::run(args, shell).await, EcosystemCommands::ChangeDefaultChain(args) => change_default::run(args, shell), diff --git a/zkstack_cli/crates/zkstack/src/commands/explorer/init.rs b/zkstack_cli/crates/zkstack/src/commands/explorer/init.rs index 8bac0b84d982..d5c8b6f905f5 100644 --- a/zkstack_cli/crates/zkstack/src/commands/explorer/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/explorer/init.rs @@ -6,7 +6,7 @@ use zkstack_cli_common::{config::global_config, db, logger, Prompt}; use zkstack_cli_config::{ explorer::{ExplorerChainConfig, ExplorerConfig}, explorer_compose::{ExplorerBackendComposeConfig, ExplorerBackendConfig, ExplorerBackendPorts}, - traits::{ConfigWithL2RpcUrl, SaveConfig}, + traits::SaveConfig, ChainConfig, EcosystemConfig, }; @@ -41,14 +41,18 @@ pub(crate) async fn run(shell: &Shell) -> anyhow::Result<()> { // Initialize explorer database initialize_explorer_database(&backend_config.database_url).await?; // Create explorer backend docker compose file - let l2_rpc_url = chain_config.get_general_config()?.get_l2_rpc_url()?; + let l2_rpc_url = chain_config + .get_general_config() + .await? + .get("api.web3_json_rpc.http_url")?; let backend_compose_config = ExplorerBackendComposeConfig::new(chain_name, l2_rpc_url, &backend_config)?; let backend_compose_config_path = ExplorerBackendComposeConfig::get_config_path(&shell.current_dir(), chain_name); backend_compose_config.save(shell, &backend_compose_config_path)?; // Add chain to explorer.json - let explorer_chain_config = build_explorer_chain_config(&chain_config, &backend_config)?; + let explorer_chain_config = + build_explorer_chain_config(&chain_config, &backend_config).await?; explorer_config.add_chain_config(&explorer_chain_config); } // Save explorer config @@ -100,19 +104,15 @@ fn fill_database_values_with_prompt(config: &ChainConfig) -> db::DatabaseConfig db::DatabaseConfig::new(explorer_db_url, explorer_db_name) } -fn build_explorer_chain_config( +async fn build_explorer_chain_config( chain_config: &ChainConfig, backend_config: &ExplorerBackendConfig, ) -> anyhow::Result { - let general_config = chain_config.get_general_config()?; + let general_config = chain_config.get_general_config().await?; // Get L2 RPC URL from general config - let l2_rpc_url = general_config.get_l2_rpc_url()?; + let l2_rpc_url = general_config.get("api.web3_json_rpc.http_url")?; // Get Verification API URL from general config - let verification_api_port = general_config - .contract_verifier - .as_ref() - .map(|verifier| verifier.port) - .context("verifier.port")?; + let verification_api_port = general_config.get::("contract_verifier.port")?; let verification_api_url = format!("http://127.0.0.1:{verification_api_port}"); // Build API URL let api_port = backend_config.ports.api_http_port; @@ -123,7 +123,7 @@ fn build_explorer_chain_config( name: chain_config.name.clone(), l2_network_name: chain_config.name.clone(), l2_chain_id: chain_config.chain_id.as_u64(), - rpc_url: l2_rpc_url.to_string(), + rpc_url: l2_rpc_url, api_url: api_url.to_string(), base_token_address: L2_BASE_TOKEN_ADDRESS.to_string(), hostnames: Vec::new(), diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/init.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/init.rs index 526e9fd4bc5f..3e4dfb179200 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/init.rs @@ -4,16 +4,13 @@ use zkstack_cli_common::{ db::{drop_db_if_exists, init_db, migrate_db, DatabaseConfig}, spinner::Spinner, }; -use zkstack_cli_config::{ - traits::ReadConfigWithBasePath, ChainConfig, EcosystemConfig, SecretsConfig, -}; +use zkstack_cli_config::{raw::RawConfig, ChainConfig, EcosystemConfig, SECRETS_FILE}; use crate::{ consts::SERVER_MIGRATIONS, messages::{ - MSG_CHAIN_NOT_INITIALIZED, MSG_DATABASE_MUST_BE_PRESENTED, - MSG_EXTERNAL_NODE_CONFIG_NOT_INITIALIZED, MSG_FAILED_TO_DROP_SERVER_DATABASE_ERR, - MSG_INITIALIZING_DATABASES_SPINNER, + MSG_CHAIN_NOT_INITIALIZED, MSG_EXTERNAL_NODE_CONFIG_NOT_INITIALIZED, + MSG_FAILED_TO_DROP_SERVER_DATABASE_ERR, MSG_INITIALIZING_DATABASES_SPINNER, }, utils::rocks_db::{recreate_rocksdb_dirs, RocksDBDirOption}, }; @@ -30,21 +27,13 @@ pub async fn run(shell: &Shell) -> anyhow::Result<()> { pub async fn init(shell: &Shell, chain_config: &ChainConfig) -> anyhow::Result<()> { let spin = Spinner::new(MSG_INITIALIZING_DATABASES_SPINNER); - let secrets = SecretsConfig::read_with_base_path( - shell, - chain_config - .external_node_config_path - .clone() - .context(MSG_EXTERNAL_NODE_CONFIG_NOT_INITIALIZED)?, - )?; - let db_config = DatabaseConfig::from_url( - secrets - .database - .as_ref() - .context(MSG_DATABASE_MUST_BE_PRESENTED)? - .master_url()? - .expose_url(), - )?; + let secrets_path = chain_config + .external_node_config_path + .as_ref() + .context(MSG_EXTERNAL_NODE_CONFIG_NOT_INITIALIZED)? + .join(SECRETS_FILE); + let secrets = RawConfig::read(shell, secrets_path).await?; + let db_config = DatabaseConfig::from_url(&secrets.get("database.server_url")?)?; drop_db_if_exists(&db_config) .await .context(MSG_FAILED_TO_DROP_SERVER_DATABASE_ERR)?; diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/mod.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/mod.rs index 7bd366d5871c..28dc29dcf5eb 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/mod.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/mod.rs @@ -28,7 +28,7 @@ pub enum ExternalNodeCommands { pub async fn run(shell: &Shell, commands: ExternalNodeCommands) -> anyhow::Result<()> { match commands { - ExternalNodeCommands::Configs(args) => prepare_configs::run(shell, args), + ExternalNodeCommands::Configs(args) => prepare_configs::run(shell, args).await, ExternalNodeCommands::Init => init::run(shell).await, ExternalNodeCommands::Build => build::build(shell).await, ExternalNodeCommands::Run(args) => run::run(shell, args).await, diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs index ae36c1c9a1f4..9f0cd6bfbc8e 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/prepare_configs.rs @@ -1,37 +1,30 @@ -use std::{collections::BTreeMap, path::Path, str::FromStr}; +use std::path::Path; use anyhow::Context; use xshell::Shell; use zkstack_cli_common::logger; use zkstack_cli_config::{ - external_node::ENConfig, - set_rocks_db_config, - traits::{FileConfigWithDefaultName, SaveConfigWithBasePath}, - ChainConfig, EcosystemConfig, GeneralConfig, SecretsConfig, -}; -use zksync_basic_types::url::SensitiveUrl; -use zksync_config::configs::{ - consensus::{ConsensusConfig, ConsensusSecrets, NodeSecretKey, Secret}, - DatabaseSecrets, L1Secrets, + raw::{PatchedConfig, RawConfig}, + set_rocks_db_config, ChainConfig, EcosystemConfig, CONSENSUS_CONFIG_FILE, EN_CONFIG_FILE, + GENERAL_FILE, SECRETS_FILE, }; +use zksync_basic_types::{L1ChainId, L2ChainId}; use zksync_consensus_crypto::TextFmt; use zksync_consensus_roles as roles; use crate::{ commands::external_node::args::prepare_configs::{PrepareConfigArgs, PrepareConfigFinal}, messages::{ - msg_preparing_en_config_is_done, MSG_CHAIN_NOT_INITIALIZED, - MSG_CONSENSUS_CONFIG_MISSING_ERR, MSG_CONSENSUS_SECRETS_MISSING_ERR, - MSG_CONSENSUS_SECRETS_NODE_KEY_MISSING_ERR, MSG_PREPARING_EN_CONFIGS, + msg_preparing_en_config_is_done, MSG_CHAIN_NOT_INITIALIZED, MSG_PREPARING_EN_CONFIGS, }, utils::{ - consensus::node_public_key, + consensus::{node_public_key, KeyAndAddress}, ports::EcosystemPortsScanner, rocks_db::{recreate_rocksdb_dirs, RocksDBDirOption}, }, }; -pub fn run(shell: &Shell, args: PrepareConfigArgs) -> anyhow::Result<()> { +pub async fn run(shell: &Shell, args: PrepareConfigArgs) -> anyhow::Result<()> { logger::info(MSG_PREPARING_EN_CONFIGS); let ecosystem_config = EcosystemConfig::from_file(shell)?; let mut chain_config = ecosystem_config @@ -44,104 +37,86 @@ pub fn run(shell: &Shell, args: PrepareConfigArgs) -> anyhow::Result<()> { .unwrap_or_else(|| chain_config.configs.join("external_node")); shell.create_dir(&external_node_config_path)?; chain_config.external_node_config_path = Some(external_node_config_path.clone()); - prepare_configs(shell, &chain_config, &external_node_config_path, args)?; + prepare_configs(shell, &chain_config, &external_node_config_path, args).await?; let chain_path = ecosystem_config.chains.join(&chain_config.name); chain_config.save_with_base_path(shell, chain_path)?; logger::info(msg_preparing_en_config_is_done(&external_node_config_path)); Ok(()) } -fn prepare_configs( +async fn prepare_configs( shell: &Shell, config: &ChainConfig, en_configs_path: &Path, args: PrepareConfigFinal, ) -> anyhow::Result<()> { let mut ports = EcosystemPortsScanner::scan(shell)?; - let genesis = config.get_genesis_config()?; - let general = config.get_general_config()?; + let genesis = config.get_genesis_config().await?; + let general = config.get_general_config().await?; let gateway = config.get_gateway_chain_config().ok(); - let en_config = ENConfig { - l2_chain_id: genesis.l2_chain_id, - l1_chain_id: genesis.l1_chain_id, - l1_batch_commit_data_generator_mode: genesis.l1_batch_commit_data_generator_mode, - main_node_url: SensitiveUrl::from_str( - &general - .api_config - .as_ref() - .context("api_config")? - .web3_json_rpc - .http_url, - )?, - main_node_rate_limit_rps: None, - bridge_addresses_refresh_interval_sec: None, - gateway_chain_id: gateway.map(|g| g.gateway_chain_id), - }; - let mut general_en = general.clone(); - general_en.consensus_config = None; + let l2_rpc_port = general.get::("api.web3_json_rpc.http_port")?; - let main_node_consensus_config = general - .consensus_config - .context(MSG_CONSENSUS_CONFIG_MISSING_ERR)?; - let mut en_consensus_config = main_node_consensus_config.clone(); + let mut en_config = PatchedConfig::empty(shell, en_configs_path.join(EN_CONFIG_FILE)); + en_config.insert( + "l2_chain_id", + genesis.get::("l2_chain_id")?.as_u64(), + )?; + en_config.insert("l1_chain_id", genesis.get::("l1_chain_id")?.0)?; + en_config.insert_yaml( + "l1_batch_commit_data_generator_mode", + genesis.get::("l1_batch_commit_data_generator_mode")?, + )?; + en_config.insert("main_node_url", format!("http://127.0.0.1:{l2_rpc_port}"))?; + if let Some(gateway) = &gateway { + en_config.insert_yaml("gateway_chain_id", gateway.gateway_chain_id)?; + } + en_config.save().await?; - let mut gossip_static_outbound = BTreeMap::new(); + // Copy and modify the general config + let general_config_path = en_configs_path.join(GENERAL_FILE); + shell.copy_file(config.path_to_general_config(), &general_config_path)?; + let mut general_en = RawConfig::read(shell, general_config_path.clone()) + .await? + .patched(); + let main_node_public_addr: String = general_en.base().get("consensus.public_addr")?; + let raw_consensus = general_en.base().get("consensus")?; + general_en.remove("consensus"); + + // Copy and modify the consensus config + let mut en_consensus_config = + PatchedConfig::empty(shell, en_configs_path.join(CONSENSUS_CONFIG_FILE)); + en_consensus_config.extend(raw_consensus); let main_node_public_key = node_public_key( &config - .get_secrets_config()? - .consensus - .context(MSG_CONSENSUS_SECRETS_MISSING_ERR)?, - )? - .context(MSG_CONSENSUS_SECRETS_NODE_KEY_MISSING_ERR)?; - gossip_static_outbound.insert(main_node_public_key, main_node_consensus_config.public_addr); - en_consensus_config.gossip_static_outbound = gossip_static_outbound; + .get_secrets_config() + .await? + .get::("consensus.node_key")?, + )?; + let gossip_static_outbound = [KeyAndAddress { + key: main_node_public_key, + addr: main_node_public_addr, + }]; + en_consensus_config.insert_yaml("gossip_static_outbound", gossip_static_outbound)?; + en_consensus_config.save().await?; // Set secrets config + let mut secrets = PatchedConfig::empty(shell, en_configs_path.join(SECRETS_FILE)); let node_key = roles::node::SecretKey::generate().encode(); - let consensus_secrets = ConsensusSecrets { - validator_key: None, - attester_key: None, - node_key: Some(NodeSecretKey(Secret::new(node_key))), - }; - - let gateway_rpc_url = if let Some(url) = args.gateway_rpc_url { - Some(SensitiveUrl::from_str(&url).context("gateway_url")?) - } else { - None - }; - let secrets = SecretsConfig { - consensus: Some(consensus_secrets), - database: Some(DatabaseSecrets { - server_url: Some(args.db.full_url().into()), - prover_url: None, - server_replica_url: None, - }), - l1: Some(L1Secrets { - l1_rpc_url: SensitiveUrl::from_str(&args.l1_rpc_url).context("l1_rpc_url")?, - gateway_rpc_url, - }), - data_availability: None, - }; + secrets.insert("consensus.node_key", node_key)?; + secrets.insert("database.server_url", args.db.full_url().to_string())?; + secrets.insert("l1.l1_rpc_url", args.l1_rpc_url)?; + if let Some(url) = args.gateway_rpc_url { + secrets.insert("l1.gateway_rpc_url", url)?; + } + secrets.save().await?; let dirs = recreate_rocksdb_dirs(shell, &config.rocks_db_path, RocksDBDirOption::ExternalNode)?; set_rocks_db_config(&mut general_en, dirs)?; - - general_en.save_with_base_path(shell, en_configs_path)?; - en_config.save_with_base_path(shell, en_configs_path)?; - en_consensus_config.save_with_base_path(shell, en_configs_path)?; - secrets.save_with_base_path(shell, en_configs_path)?; + general_en.save().await?; let offset = 0; // This is zero because general_en ports already have a chain offset - ports.allocate_ports_in_yaml( - shell, - &GeneralConfig::get_path_with_base_path(en_configs_path), - offset, - )?; - ports.allocate_ports_in_yaml( - shell, - &ConsensusConfig::get_path_with_base_path(en_configs_path), - offset, - )?; + ports.allocate_ports_in_yaml(shell, &general_config_path, offset)?; + ports.allocate_ports_in_yaml(shell, &en_configs_path.join(CONSENSUS_CONFIG_FILE), offset)?; Ok(()) } diff --git a/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs b/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs index b645314dc9c2..40adcb3f73a0 100644 --- a/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs +++ b/zkstack_cli/crates/zkstack/src/commands/external_node/wait.rs @@ -1,8 +1,7 @@ use anyhow::Context as _; use xshell::Shell; use zkstack_cli_common::{config::global_config, logger}; -use zkstack_cli_config::{traits::ReadConfigWithBasePath, EcosystemConfig}; -use zksync_config::configs::GeneralConfig; +use zkstack_cli_config::{raw::RawConfig, EcosystemConfig, GENERAL_FILE}; use crate::{ commands::args::WaitArgs, @@ -20,13 +19,8 @@ pub async fn wait(shell: &Shell, args: WaitArgs) -> anyhow::Result<()> { .external_node_config_path .clone() .context("External node is not initialized")?; - let general_config = GeneralConfig::read_with_base_path(shell, &en_path)?; - let health_check_port = general_config - .api_config - .as_ref() - .context("no API config")? - .healthcheck - .port; + let general_config = RawConfig::read(shell, en_path.join(GENERAL_FILE)).await?; + let health_check_port = general_config.get("api.healthcheck.port")?; logger::info(MSG_WAITING_FOR_EN); args.poll_health_check(health_check_port, verbose).await?; diff --git a/zkstack_cli/crates/zkstack/src/commands/portal.rs b/zkstack_cli/crates/zkstack/src/commands/portal.rs index d534498aaacd..15043abd509f 100644 --- a/zkstack_cli/crates/zkstack/src/commands/portal.rs +++ b/zkstack_cli/crates/zkstack/src/commands/portal.rs @@ -5,9 +5,7 @@ use ethers::types::Address; use xshell::Shell; use zkstack_cli_common::{config::global_config, docker, ethereum, logger}; use zkstack_cli_config::{ - portal::*, - traits::{ConfigWithL2RpcUrl, SaveConfig}, - AppsEcosystemConfig, ChainConfig, EcosystemConfig, + portal::*, traits::SaveConfig, AppsEcosystemConfig, ChainConfig, EcosystemConfig, }; use zkstack_cli_types::{BaseToken, TokenInfo}; @@ -24,14 +22,13 @@ async fn build_portal_chain_config( chain_config: &ChainConfig, ) -> anyhow::Result { // Get L2 RPC URL from general config - let l2_rpc_url = chain_config.get_general_config()?.get_l2_rpc_url()?; - // Get L1 RPC URL from secrects config - let secrets_config = chain_config.get_secrets_config()?; - let l1_rpc_url = secrets_config - .l1 - .as_ref() - .map(|l1| l1.l1_rpc_url.expose_str()) - .context("l1")?; + let l2_rpc_url = chain_config + .get_general_config() + .await? + .get("api.web3_json_rpc.http_url")?; + // Get L1 RPC URL from secrets config + let secrets_config = chain_config.get_secrets_config().await?; + let l1_rpc_url = secrets_config.get::("l1.l1_rpc_url")?; // Build L1 network config let l1_network = Some(L1NetworkConfig { id: chain_config.l1_network.chain_id(), @@ -40,10 +37,10 @@ async fn build_portal_chain_config( native_currency: TokenInfo::eth(), rpc_urls: RpcUrls { default: RpcUrlConfig { - http: vec![l1_rpc_url.to_string()], + http: vec![l1_rpc_url.clone()], }, public: RpcUrlConfig { - http: vec![l1_rpc_url.to_string()], + http: vec![l1_rpc_url.clone()], }, }, }); @@ -53,8 +50,7 @@ async fn build_portal_chain_config( } else { ( format!("{:?}", chain_config.base_token.address), - ethereum::get_token_info(chain_config.base_token.address, l1_rpc_url.to_string()) - .await?, + ethereum::get_token_info(chain_config.base_token.address, l1_rpc_url).await?, ) }; let tokens = vec![TokenConfig { @@ -70,7 +66,7 @@ async fn build_portal_chain_config( id: chain_config.chain_id.as_u64(), key: chain_config.name.clone(), name: chain_config.name.clone(), - rpc_url: l2_rpc_url.to_string(), + rpc_url: l2_rpc_url, l1_network, public_l1_network_id: None, block_explorer_url: None, diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/args/compressor_keys.rs b/zkstack_cli/crates/zkstack/src/commands/prover/args/compressor_keys.rs index 9de616657b20..9c9445bc3718 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/args/compressor_keys.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/args/compressor_keys.rs @@ -1,3 +1,5 @@ +use std::path::{Path, PathBuf}; + use clap::Parser; use zkstack_cli_common::Prompt; @@ -6,14 +8,14 @@ use crate::messages::MSG_SETUP_COMPRESSOR_KEY_PATH_PROMPT; #[derive(Debug, Clone, Parser, Default)] pub struct CompressorKeysArgs { #[clap(long)] - pub path: Option, + pub path: Option, } impl CompressorKeysArgs { - pub fn fill_values_with_prompt(self, default_path: &str) -> CompressorKeysArgs { + pub fn fill_values_with_prompt(self, default_path: &Path) -> CompressorKeysArgs { let path = self.path.unwrap_or_else(|| { Prompt::new(MSG_SETUP_COMPRESSOR_KEY_PATH_PROMPT) - .default(default_path) + .default(default_path.to_str().expect("non-UTF8 path")) .ask() }); diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/args/init.rs b/zkstack_cli/crates/zkstack/src/commands/prover/args/init.rs index 4956a23ac987..b0eabd4925ea 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/args/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/args/init.rs @@ -1,3 +1,5 @@ +use std::path::Path; + use clap::{Parser, ValueEnum}; use serde::{Deserialize, Serialize}; use slugify_rs::slugify; @@ -6,7 +8,6 @@ use url::Url; use xshell::Shell; use zkstack_cli_common::{db::DatabaseConfig, logger, Prompt, PromptConfirm, PromptSelect}; use zkstack_cli_config::ChainConfig; -use zksync_config::configs::fri_prover::CloudConnectionMode; use super::{ compressor_keys::CompressorKeysArgs, init_bellman_cuda::InitBellmanCudaArgs, @@ -92,24 +93,14 @@ enum ProofStoreConfig { GCS, } -#[derive( - Debug, Clone, ValueEnum, EnumIter, strum::Display, PartialEq, Eq, Deserialize, Serialize, -)] +#[derive(Debug, Clone, ValueEnum, EnumIter, strum::Display, PartialEq, Eq, Serialize)] #[allow(clippy::upper_case_acronyms)] -enum InternalCloudConnectionMode { +pub enum InternalCloudConnectionMode { GCP, + #[serde(rename = "LOCAL")] // match name in file-based configs Local, } -impl From for CloudConnectionMode { - fn from(cloud_type: InternalCloudConnectionMode) -> Self { - match cloud_type { - InternalCloudConnectionMode::GCP => CloudConnectionMode::GCP, - InternalCloudConnectionMode::Local => CloudConnectionMode::Local, - } - } -} - #[derive(Clone, Debug, Serialize, Deserialize, Parser, Default)] pub struct ProofStorageGCSTmp { #[clap(long)] @@ -194,7 +185,7 @@ pub struct ProverInitArgsFinal { pub compressor_key_args: Option, pub setup_keys: Option, pub bellman_cuda_config: Option, - pub cloud_type: CloudConnectionMode, + pub cloud_type: InternalCloudConnectionMode, pub database_config: Option, } @@ -202,7 +193,7 @@ impl ProverInitArgs { pub(crate) fn fill_values_with_prompt( &self, shell: &Shell, - default_compressor_key_path: &str, + default_compressor_key_path: &Path, chain_config: &ChainConfig, ) -> anyhow::Result { let proof_store = self.fill_proof_storage_values_with_prompt(shell)?; @@ -355,11 +346,11 @@ impl ProverInitArgs { fn fill_setup_compressor_key_values_with_prompt( &self, - default_path: &str, + default_path: &Path, ) -> Option { if self.dev { return Some(CompressorKeysArgs { - path: Some(default_path.to_string()), + path: Some(default_path.to_owned()), }); } @@ -512,20 +503,18 @@ impl ProverInitArgs { } } - fn get_cloud_type_with_prompt(&self) -> CloudConnectionMode { + fn get_cloud_type_with_prompt(&self) -> InternalCloudConnectionMode { if self.dev { - return CloudConnectionMode::Local; + return InternalCloudConnectionMode::Local; } - let cloud_type = self.cloud_type.clone().unwrap_or_else(|| { + self.cloud_type.clone().unwrap_or_else(|| { PromptSelect::new( MSG_CLOUD_TYPE_PROMPT, InternalCloudConnectionMode::iter().rev(), ) .ask() - }); - - cloud_type.into() + }) } fn fill_database_values_with_prompt( diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/compressor_keys.rs b/zkstack_cli/crates/zkstack/src/commands/prover/compressor_keys.rs index 88eec0688da7..77bb45a1336d 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/compressor_keys.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/compressor_keys.rs @@ -1,12 +1,13 @@ +use std::path::{Path, PathBuf}; + use anyhow::Context; use xshell::Shell; use zkstack_cli_common::{logger, spinner::Spinner}; -use zkstack_cli_config::{get_link_to_prover, EcosystemConfig, GeneralConfig}; +use zkstack_cli_config::{get_link_to_prover, raw::PatchedConfig, EcosystemConfig}; use super::args::compressor_keys::CompressorKeysArgs; use crate::messages::{ - MSG_CHAIN_NOT_FOUND_ERR, MSG_DOWNLOADING_SETUP_COMPRESSOR_KEY_SPINNER, - MSG_PROOF_COMPRESSOR_CONFIG_NOT_FOUND_ERR, MSG_SETUP_KEY_PATH_ERROR, + MSG_CHAIN_NOT_FOUND_ERR, MSG_DOWNLOADING_SETUP_COMPRESSOR_KEY_SPINNER, MSG_SETUP_KEY_PATH_ERROR, }; pub(crate) async fn run(shell: &Shell, args: CompressorKeysArgs) -> anyhow::Result<()> { @@ -14,7 +15,7 @@ pub(crate) async fn run(shell: &Shell, args: CompressorKeysArgs) -> anyhow::Resu let chain_config = ecosystem_config .load_current_chain() .context(MSG_CHAIN_NOT_FOUND_ERR)?; - let mut general_config = chain_config.get_general_config()?; + let mut general_config = chain_config.get_general_config().await?.patched(); let default_path = get_default_compressor_keys_path(&ecosystem_config)?; let args = args.fill_values_with_prompt(&default_path); @@ -23,41 +24,29 @@ pub(crate) async fn run(shell: &Shell, args: CompressorKeysArgs) -> anyhow::Resu download_compressor_key(shell, &mut general_config, &path)?; - chain_config.save_general_config(&general_config)?; - + general_config.save().await?; Ok(()) } pub(crate) fn download_compressor_key( shell: &Shell, - general_config: &mut GeneralConfig, - path: &str, + general_config: &mut PatchedConfig, + path: &Path, ) -> anyhow::Result<()> { let spinner = Spinner::new(MSG_DOWNLOADING_SETUP_COMPRESSOR_KEY_SPINNER); - let mut compressor_config: zksync_config::configs::FriProofCompressorConfig = general_config - .proof_compressor_config - .as_ref() - .expect(MSG_PROOF_COMPRESSOR_CONFIG_NOT_FOUND_ERR) - .clone(); - compressor_config.universal_setup_path = path.to_string(); - general_config.proof_compressor_config = Some(compressor_config.clone()); + general_config.insert_path("proof_compressor.universal_setup_path", path)?; - let path = std::path::Path::new(path); - - logger::info(format!( - "Downloading setup key by URL: {}", - compressor_config.universal_setup_download_url - )); + let url = general_config + .base() + .get::("proof_compressor.universal_setup_download_url")?; + logger::info(format!("Downloading setup key by URL: {url}")); let client = reqwest::blocking::Client::builder() .timeout(std::time::Duration::from_secs(600)) .build()?; - let response = client - .get(compressor_config.universal_setup_download_url) - .send()? - .bytes()?; + let response = client.get(url).send()?.bytes()?; shell.write_file(path, &response)?; spinner.finish(); @@ -66,10 +55,7 @@ pub(crate) fn download_compressor_key( pub fn get_default_compressor_keys_path( ecosystem_config: &EcosystemConfig, -) -> anyhow::Result { +) -> anyhow::Result { let link_to_prover = get_link_to_prover(ecosystem_config); - let path = link_to_prover.join("keys/setup/setup_compact.key"); - let string = path.to_str().unwrap(); - - Ok(String::from(string)) + Ok(link_to_prover.join("keys/setup/setup_compact.key")) } diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/init.rs b/zkstack_cli/crates/zkstack/src/commands/prover/init.rs index 51034e02a213..edfdc31e6186 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/init.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/init.rs @@ -10,28 +10,23 @@ use zkstack_cli_common::{ spinner::Spinner, }; use zkstack_cli_config::{ - copy_configs, get_link_to_prover, set_prover_database, traits::SaveConfigWithBasePath, - EcosystemConfig, + copy_configs, get_link_to_prover, raw::PatchedConfig, set_prover_database, EcosystemConfig, }; use zksync_config::{configs::object_store::ObjectStoreMode, ObjectStoreConfig}; use super::{ - args::init::{ProofStorageConfig, ProverInitArgs}, - compressor_keys::download_compressor_key, + args::init::{ProofStorageConfig, ProofStorageFileBacked, ProverInitArgs}, + compressor_keys::{download_compressor_key, get_default_compressor_keys_path}, gcs::create_gcs_bucket, init_bellman_cuda::run as init_bellman_cuda, setup_keys, }; use crate::{ - commands::prover::{ - args::init::ProofStorageFileBacked, compressor_keys::get_default_compressor_keys_path, - }, consts::{PROVER_MIGRATIONS, PROVER_STORE_MAX_RETRIES}, messages::{ MSG_CHAIN_NOT_FOUND_ERR, MSG_FAILED_TO_DROP_PROVER_DATABASE_ERR, - MSG_GENERAL_CONFIG_NOT_FOUND_ERR, MSG_INITIALIZING_DATABASES_SPINNER, - MSG_INITIALIZING_PROVER_DATABASE, MSG_PROVER_CONFIG_NOT_FOUND_ERR, MSG_PROVER_INITIALIZED, - MSG_SETUP_KEY_PATH_ERROR, + MSG_INITIALIZING_DATABASES_SPINNER, MSG_INITIALIZING_PROVER_DATABASE, + MSG_PROVER_INITIALIZED, MSG_SETUP_KEY_PATH_ERROR, }, }; @@ -45,15 +40,16 @@ pub(crate) async fn run(args: ProverInitArgs, shell: &Shell) -> anyhow::Result<( .context(MSG_CHAIN_NOT_FOUND_ERR)?; let args = args.fill_values_with_prompt(shell, &default_compressor_key_path, &chain_config)?; - if chain_config.get_general_config().is_err() || chain_config.get_secrets_config().is_err() { + if chain_config.get_general_config().await.is_err() + || chain_config.get_secrets_config().await.is_err() + { copy_configs(shell, &ecosystem_config.link_to_code, &chain_config.configs)?; } - let mut general_config = chain_config - .get_general_config() - .context(MSG_GENERAL_CONFIG_NOT_FOUND_ERR)?; + let mut general_config = chain_config.get_general_config().await?.patched(); - let proof_object_store_config = get_object_store_config(shell, Some(args.proof_store))?; + let proof_object_store_config = + get_object_store_config(shell, Some(args.proof_store))?.unwrap(); let public_object_store_config = get_object_store_config(shell, args.public_store)?; if let Some(args) = args.compressor_key_args { @@ -66,22 +62,23 @@ pub(crate) async fn run(args: ProverInitArgs, shell: &Shell) -> anyhow::Result<( setup_keys::run(args, shell).await?; } - let mut prover_config = general_config - .prover_config - .expect(MSG_PROVER_CONFIG_NOT_FOUND_ERR); - prover_config - .prover_object_store - .clone_from(&proof_object_store_config); + set_object_store( + &mut general_config, + "prover.prover_object_store", + &proof_object_store_config, + )?; if let Some(public_object_store_config) = public_object_store_config { - prover_config.shall_save_to_public_bucket = true; - prover_config.public_object_store = Some(public_object_store_config); + general_config.insert("prover.shall_save_to_public_bucket", true)?; + set_object_store( + &mut general_config, + "prover.public_object_store", + &public_object_store_config, + )?; } else { - prover_config.shall_save_to_public_bucket = false; + general_config.insert("prover.shall_save_to_public_bucket", false)?; } - prover_config.cloud_type = args.cloud_type; - general_config.prover_config = Some(prover_config); - - chain_config.save_general_config(&general_config)?; + general_config.insert_yaml("prover.cloud_type", args.cloud_type)?; + general_config.save().await?; if let Some(args) = args.bellman_cuda_config { init_bellman_cuda(shell, args).await?; @@ -90,9 +87,9 @@ pub(crate) async fn run(args: ProverInitArgs, shell: &Shell) -> anyhow::Result<( if let Some(prover_db) = &args.database_config { let spinner = Spinner::new(MSG_INITIALIZING_DATABASES_SPINNER); - let mut secrets = chain_config.get_secrets_config()?; + let mut secrets = chain_config.get_secrets_config().await?.patched(); set_prover_database(&mut secrets, &prover_db.database_config)?; - secrets.save_with_base_path(shell, &chain_config.configs)?; + secrets.save().await?; initialize_prover_database( shell, &prover_db.database_config, @@ -135,6 +132,50 @@ fn get_object_store_config( Ok(object_store) } +fn set_object_store( + patch: &mut PatchedConfig, + prefix: &str, + config: &ObjectStoreConfig, +) -> anyhow::Result<()> { + patch.insert(&format!("{prefix}.max_retries"), config.max_retries)?; + match &config.mode { + ObjectStoreMode::FileBacked { + file_backed_base_path, + } => { + patch.insert_yaml( + &format!("{prefix}.file_backed.file_backed_base_path"), + file_backed_base_path, + )?; + } + ObjectStoreMode::GCS { bucket_base_url } => { + patch.insert( + &format!("{prefix}.gcs.bucket_base_url"), + bucket_base_url.clone(), + )?; + } + ObjectStoreMode::GCSWithCredentialFile { + bucket_base_url, + gcs_credential_file_path, + } => { + patch.insert( + &format!("{prefix}.gcs_with_credential_file.bucket_base_url"), + bucket_base_url.clone(), + )?; + patch.insert( + &format!("{prefix}.gcs_with_credential_file.gcs_credential_file_path"), + gcs_credential_file_path.clone(), + )?; + } + ObjectStoreMode::GCSAnonymousReadOnly { bucket_base_url } => { + patch.insert( + &format!("{prefix}.gcs_anonymous_read_only.bucket_base_url"), + bucket_base_url.clone(), + )?; + } + } + Ok(()) +} + async fn initialize_prover_database( shell: &Shell, prover_db_config: &DatabaseConfig, diff --git a/zkstack_cli/crates/zkstack/src/commands/prover/run.rs b/zkstack_cli/crates/zkstack/src/commands/prover/run.rs index 495c41ef8255..e7101d92ab6e 100644 --- a/zkstack_cli/crates/zkstack/src/commands/prover/run.rs +++ b/zkstack_cli/crates/zkstack/src/commands/prover/run.rs @@ -79,7 +79,7 @@ pub(crate) async fn run(args: ProverRunArgs, shell: &Shell) -> anyhow::Result<() if in_docker { let path_to_configs = chain.configs.clone(); let path_to_prover = get_link_to_prover(&ecosystem_config); - update_setup_data_path(&chain, "prover/data/keys".to_string())?; + update_setup_data_path(&chain, "prover/data/keys").await?; run_dockerized_component( shell, component.image_name(), @@ -93,7 +93,7 @@ pub(crate) async fn run(args: ProverRunArgs, shell: &Shell) -> anyhow::Result<() &path_to_ecosystem, )? } else { - update_setup_data_path(&chain, "data/keys".to_string())?; + update_setup_data_path(&chain, "data/keys").await?; run_binary_component( shell, component.binary_name(), @@ -151,13 +151,8 @@ fn run_binary_component( cmd.run().context(error) } -fn update_setup_data_path(chain: &ChainConfig, path: String) -> anyhow::Result<()> { - let mut general_config = chain.get_general_config()?; - general_config - .prover_config - .as_mut() - .expect("Prover config not found") - .setup_data_path = path; - chain.save_general_config(&general_config)?; - Ok(()) +async fn update_setup_data_path(chain: &ChainConfig, path: &str) -> anyhow::Result<()> { + let mut general_config = chain.get_general_config().await?.patched(); + general_config.insert_path("prover.setup_data_path", path.as_ref())?; + general_config.save().await } diff --git a/zkstack_cli/crates/zkstack/src/commands/server.rs b/zkstack_cli/crates/zkstack/src/commands/server.rs index e1e4ca3ff99d..11b10ecec594 100644 --- a/zkstack_cli/crates/zkstack/src/commands/server.rs +++ b/zkstack_cli/crates/zkstack/src/commands/server.rs @@ -8,7 +8,7 @@ use zkstack_cli_common::{ }; use zkstack_cli_config::{ traits::FileConfigWithDefaultName, ChainConfig, ContractsConfig, EcosystemConfig, - GeneralConfig, GenesisConfig, SecretsConfig, WalletsConfig, + WalletsConfig, GENERAL_FILE, GENESIS_FILE, SECRETS_FILE, }; use zksync_config::configs::gateway::GatewayChainConfig; @@ -78,10 +78,10 @@ fn run_server( .run( shell, mode, - GenesisConfig::get_path_with_base_path(&chain_config.configs), + chain_config.configs.join(GENESIS_FILE), WalletsConfig::get_path_with_base_path(&chain_config.configs), - GeneralConfig::get_path_with_base_path(&chain_config.configs), - SecretsConfig::get_path_with_base_path(&chain_config.configs), + chain_config.configs.join(GENERAL_FILE), + chain_config.configs.join(SECRETS_FILE), ContractsConfig::get_path_with_base_path(&chain_config.configs), gateway_contracts, vec![], @@ -93,13 +93,9 @@ async fn wait_for_server(args: WaitArgs, chain_config: &ChainConfig) -> anyhow:: let verbose = global_config().verbose; let health_check_port = chain_config - .get_general_config()? - .api_config - .as_ref() - .context("no API config")? - .healthcheck - .port; - + .get_general_config() + .await? + .get("api.healthcheck.port")?; logger::info(MSG_WAITING_FOR_SERVER); args.poll_health_check(health_check_port, verbose).await?; logger::info(msg_waiting_for_server_success(health_check_port)); diff --git a/zkstack_cli/crates/zkstack/src/commands/update.rs b/zkstack_cli/crates/zkstack/src/commands/update.rs index 0e1d385f8fef..095619b43f0e 100644 --- a/zkstack_cli/crates/zkstack/src/commands/update.rs +++ b/zkstack_cli/crates/zkstack/src/commands/update.rs @@ -1,6 +1,7 @@ use std::path::Path; -use anyhow::{Context, Ok}; +use anyhow::Context; +use url::Url; use xshell::Shell; use zkstack_cli_common::{ db::migrate_db, @@ -182,17 +183,14 @@ async fn update_chain( )?; } - let secrets = chain.get_secrets_config()?; - - if let Some(db) = secrets.database { - if let Some(url) = db.server_url { - let path_to_migration = chain.link_to_code.join(SERVER_MIGRATIONS); - migrate_db(shell, path_to_migration, url.expose_url()).await?; - } - if let Some(url) = db.prover_url { - let path_to_migration = chain.link_to_code.join(PROVER_MIGRATIONS); - migrate_db(shell, path_to_migration, url.expose_url()).await?; - } + let secrets = chain.get_secrets_config().await?; + if let Some(url) = secrets.get_opt::("database.server_url")? { + let path_to_migration = chain.link_to_code.join(SERVER_MIGRATIONS); + migrate_db(shell, path_to_migration, &url).await?; + } + if let Some(url) = secrets.get_opt::("database.prover_url")? { + let path_to_migration = chain.link_to_code.join(PROVER_MIGRATIONS); + migrate_db(shell, path_to_migration, &url).await?; } Ok(()) } diff --git a/zkstack_cli/crates/zkstack/src/external_node.rs b/zkstack_cli/crates/zkstack/src/external_node.rs index 21d4e0db5592..7cc4dcbe6d68 100644 --- a/zkstack_cli/crates/zkstack/src/external_node.rs +++ b/zkstack_cli/crates/zkstack/src/external_node.rs @@ -3,10 +3,8 @@ use std::path::PathBuf; use anyhow::Context; use xshell::Shell; use zkstack_cli_config::{ - external_node::ENConfig, traits::FileConfigWithDefaultName, ChainConfig, GeneralConfig, - SecretsConfig, + ChainConfig, CONSENSUS_CONFIG_FILE, EN_CONFIG_FILE, GENERAL_FILE, SECRETS_FILE, }; -use zksync_config::configs::consensus::ConsensusConfig; use crate::messages::MSG_FAILED_TO_RUN_SERVER_ERR; @@ -28,17 +26,17 @@ impl RunExternalNode { .external_node_config_path .clone() .context("External node is not initialized")?; - let general_config = GeneralConfig::get_path_with_base_path(&en_path); - let secrets = SecretsConfig::get_path_with_base_path(&en_path); - let enconfig = ENConfig::get_path_with_base_path(&en_path); - let consensus_config = ConsensusConfig::get_path_with_base_path(&en_path); + let general_config = en_path.join(GENERAL_FILE); + let secrets = en_path.join(SECRETS_FILE); + let en_config = en_path.join(EN_CONFIG_FILE); + let consensus_config = en_path.join(CONSENSUS_CONFIG_FILE); Ok(Self { components, code_path: chain_config.link_to_code.clone(), general_config, secrets, - en_config: enconfig, + en_config, consensus_config, }) } diff --git a/zkstack_cli/crates/zkstack/src/messages.rs b/zkstack_cli/crates/zkstack/src/messages.rs index 179f7100ef9e..4a2cbc950b44 100644 --- a/zkstack_cli/crates/zkstack/src/messages.rs +++ b/zkstack_cli/crates/zkstack/src/messages.rs @@ -197,8 +197,6 @@ pub(super) const MSG_EVM_EMULATOR_HASH_MISSING_ERR: &str = does not contain EVM emulator hash"; /// Chain genesis related messages -pub(super) const MSG_L1_SECRETS_MUST_BE_PRESENTED: &str = "L1 secret must be presented"; -pub(super) const MSG_DATABASE_MUST_BE_PRESENTED: &str = "Database secret must be presented"; pub(super) const MSG_SERVER_DB_URL_HELP: &str = "Server database url without database name"; pub(super) const MSG_SERVER_DB_NAME_HELP: &str = "Server database name"; pub(super) const MSG_PROVER_DB_URL_HELP: &str = "Prover database url without database name"; @@ -370,9 +368,6 @@ pub(super) fn msg_preparing_en_config_is_done(path: &Path) -> String { pub(super) const MSG_EXTERNAL_NODE_CONFIG_NOT_INITIALIZED: &str = "External node is not initialized"; -pub(super) const MSG_CONSENSUS_CONFIG_MISSING_ERR: &str = "Consensus config is missing"; -pub(super) const MSG_CONSENSUS_SECRETS_MISSING_ERR: &str = "Consensus secrets config is missing"; -pub(super) const MSG_CONSENSUS_SECRETS_NODE_KEY_MISSING_ERR: &str = "Consensus node key is missing"; pub(super) const MSG_BUILDING_EN: &str = "Building external node"; pub(super) const MSG_FAILED_TO_BUILD_EN_ERR: &str = "Failed to build external node"; @@ -413,8 +408,6 @@ pub(super) const MSG_PROOF_STORE_GCS_BUCKET_BASE_URL_ERR: &str = "Bucket base URL should start with gs://"; pub(super) const MSG_PROOF_STORE_GCS_CREDENTIALS_FILE_PROMPT: &str = "Provide the path to the GCS credentials file:"; -pub(super) const MSG_GENERAL_CONFIG_NOT_FOUND_ERR: &str = "General config not found"; -pub(super) const MSG_PROVER_CONFIG_NOT_FOUND_ERR: &str = "Prover config not found"; pub(super) const MSG_PROVER_INITIALIZED: &str = "Prover has been initialized successfully"; pub(super) const MSG_CREATE_GCS_BUCKET_PROMPT: &str = "Do you want to create a new GCS bucket?"; pub(super) const MSG_CREATE_GCS_BUCKET_PROJECT_ID_PROMPT: &str = "Select the project ID:"; @@ -422,8 +415,6 @@ pub(super) const MSG_CREATE_GCS_BUCKET_PROJECT_ID_NO_PROJECTS_PROMPT: &str = "Provide a project ID:"; pub(super) const MSG_CREATE_GCS_BUCKET_NAME_PROMTP: &str = "What do you want to name the bucket?"; pub(super) const MSG_CREATE_GCS_BUCKET_LOCATION_PROMPT: &str = "What location do you want to use? Find available locations at https://cloud.google.com/storage/docs/locations"; -pub(super) const MSG_PROOF_COMPRESSOR_CONFIG_NOT_FOUND_ERR: &str = - "Proof compressor config not found"; pub(super) const MSG_DOWNLOADING_SETUP_COMPRESSOR_KEY_SPINNER: &str = "Downloading compressor setup key..."; pub(super) const MSG_DOWNLOAD_SETUP_COMPRESSOR_KEY_PROMPT: &str = @@ -569,7 +560,6 @@ pub(super) fn msg_updating_chain(chain: &str) -> String { pub(super) const MSG_RECEIPT_MISSING: &str = "receipt missing"; pub(super) const MSG_STATUS_MISSING: &str = "status missing"; pub(super) const MSG_TRANSACTION_FAILED: &str = "transaction failed"; -pub(super) const MSG_API_CONFIG_MISSING: &str = "api config missing"; pub(super) const MSG_MULTICALL3_CONTRACT_NOT_CONFIGURED: &str = "multicall3 contract not configured"; pub(super) const MSG_GOVERNOR_PRIVATE_KEY_NOT_SET: &str = "governor private key not set"; diff --git a/zkstack_cli/crates/zkstack/src/utils/consensus.rs b/zkstack_cli/crates/zkstack/src/utils/consensus.rs index 0a1287067434..7e4273e4c253 100644 --- a/zkstack_cli/crates/zkstack/src/utils/consensus.rs +++ b/zkstack_cli/crates/zkstack/src/utils/consensus.rs @@ -1,31 +1,9 @@ use anyhow::Context as _; -use secrecy::{ExposeSecret, Secret}; -use zkstack_cli_config::ChainConfig; -use zksync_config::configs::consensus::{ - AttesterPublicKey, AttesterSecretKey, ConsensusSecrets, GenesisSpec, NodePublicKey, - NodeSecretKey, ProtocolVersion, ValidatorPublicKey, ValidatorSecretKey, WeightedAttester, - WeightedValidator, -}; +use serde::{Deserialize, Serialize}; +use zkstack_cli_config::{raw::PatchedConfig, ChainConfig}; use zksync_consensus_crypto::{Text, TextFmt}; use zksync_consensus_roles::{attester, node, validator}; -pub(crate) fn parse_attester_committee( - attesters: &[WeightedAttester], -) -> anyhow::Result { - let attesters: Vec<_> = attesters - .iter() - .enumerate() - .map(|(i, v)| { - Ok(attester::WeightedAttester { - key: Text::new(&v.key.0).decode().context("key").context(i)?, - weight: v.weight, - }) - }) - .collect::>() - .context("attesters")?; - attester::Committee::new(attesters).context("Committee::new()") -} - #[derive(Debug, Clone)] pub struct ConsensusSecretKeys { validator_key: validator::SecretKey, @@ -53,56 +31,93 @@ fn get_consensus_public_keys(consensus_keys: &ConsensusSecretKeys) -> ConsensusP } } -pub fn get_genesis_specs( +/// Mirrors key–address pair used in the consensus config. +#[derive(Debug, Serialize)] +pub(crate) struct KeyAndAddress { + pub key: String, + pub addr: String, +} + +#[derive(Debug, Serialize, Deserialize)] +struct Weighted { + key: String, + weight: u64, +} + +impl Weighted { + fn new(key: String, weight: u64) -> Self { + Self { key, weight } + } +} + +pub(crate) fn read_attester_committee_yaml( + raw_yaml: serde_yaml::Value, +) -> anyhow::Result { + #[derive(Debug, Deserialize)] + struct SetAttesterCommitteeFile { + attesters: Vec, + } + + let file: SetAttesterCommitteeFile = + serde_yaml::from_value(raw_yaml).context("invalid attester committee format")?; + let attesters: Vec<_> = file + .attesters + .iter() + .enumerate() + .map(|(i, v)| { + Ok(attester::WeightedAttester { + key: Text::new(&v.key).decode().context("key").context(i)?, + weight: v.weight, + }) + }) + .collect::>() + .context("attesters")?; + attester::Committee::new(attesters).context("Committee::new()") +} + +pub fn set_genesis_specs( + general: &mut PatchedConfig, chain_config: &ChainConfig, consensus_keys: &ConsensusSecretKeys, -) -> GenesisSpec { +) -> anyhow::Result<()> { let public_keys = get_consensus_public_keys(consensus_keys); let validator_key = public_keys.validator_key.encode(); let attester_key = public_keys.attester_key.encode(); + let leader = validator_key.clone(); - let validator = WeightedValidator { - key: ValidatorPublicKey(validator_key.clone()), - weight: 1, - }; - let attester = WeightedAttester { - key: AttesterPublicKey(attester_key), - weight: 1, - }; - let leader = ValidatorPublicKey(validator_key); - - GenesisSpec { - chain_id: chain_config.chain_id, - protocol_version: ProtocolVersion(1), - validators: vec![validator], - attesters: vec![attester], - leader, - registry_address: None, - seed_peers: [].into(), - } + general.insert( + "consensus.genesis_spec.chain_id", + chain_config.chain_id.as_u64(), + )?; + general.insert("consensus.genesis_spec.protocol_version", 1_u64)?; + general.insert_yaml( + "consensus.genesis_spec.validators", + [Weighted::new(validator_key, 1)], + )?; + general.insert_yaml( + "consensus.genesis_spec.attesters", + [Weighted::new(attester_key, 1)], + )?; + general.insert("consensus.genesis_spec.leader", leader)?; + Ok(()) } -pub fn get_consensus_secrets(consensus_keys: &ConsensusSecretKeys) -> ConsensusSecrets { +pub(crate) fn set_consensus_secrets( + secrets: &mut PatchedConfig, + consensus_keys: &ConsensusSecretKeys, +) -> anyhow::Result<()> { let validator_key = consensus_keys.validator_key.encode(); let attester_key = consensus_keys.attester_key.encode(); let node_key = consensus_keys.node_key.encode(); - - ConsensusSecrets { - validator_key: Some(ValidatorSecretKey(Secret::new(validator_key))), - attester_key: Some(AttesterSecretKey(Secret::new(attester_key))), - node_key: Some(NodeSecretKey(Secret::new(node_key))), - } -} - -pub fn node_public_key(secrets: &ConsensusSecrets) -> anyhow::Result> { - Ok(node_key(secrets)?.map(|node_secret_key| NodePublicKey(node_secret_key.public().encode()))) -} -fn node_key(secrets: &ConsensusSecrets) -> anyhow::Result> { - read_secret_text(secrets.node_key.as_ref().map(|x| &x.0)) + secrets.insert("consensus.validator_key", validator_key)?; + secrets.insert("consensus.attester_key", attester_key)?; + secrets.insert("consensus.node_key", node_key)?; + Ok(()) } -fn read_secret_text(text: Option<&Secret>) -> anyhow::Result> { - text.map(|text| Text::new(text.expose_secret()).decode()) - .transpose() - .map_err(|_| anyhow::format_err!("invalid format")) +pub fn node_public_key(secret_key: &str) -> anyhow::Result { + let secret_key: node::SecretKey = Text::new(secret_key) + .decode() + .context("invalid node key format")?; + Ok(secret_key.public().encode()) }