diff --git a/scripts/pmtp/.gitignore b/scripts/pmtp/.gitignore index e6905a2395..9959f86ca3 100644 --- a/scripts/pmtp/.gitignore +++ b/scripts/pmtp/.gitignore @@ -1 +1,2 @@ -.env* \ No newline at end of file +.env* +rewards.json-* \ No newline at end of file diff --git a/scripts/pmtp/scenarios/no-policy-liquity-changes/4-add-liquidity-symmetry.sh b/scripts/pmtp/scenarios/no-policy-liquity-changes/4-add-liquidity-symmetry.sh new file mode 100755 index 0000000000..9f14a3444b --- /dev/null +++ b/scripts/pmtp/scenarios/no-policy-liquity-changes/4-add-liquidity-symmetry.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +set -x + +sifnoded tx clp add-liquidity \ + --from $SIF_ACT \ + --keyring-backend test \ + --symbol cusdt \ + --nativeAmount 1000000000000000000000000 \ + --externalAmount 25378853317 \ + --fees 100000000000000000rowan \ + --node ${SIFNODE_NODE} \ + --chain-id $SIFNODE_CHAIN_ID \ + --broadcast-mode block \ + -y \ No newline at end of file diff --git a/scripts/pmtp/send.sh b/scripts/pmtp/send.sh new file mode 100755 index 0000000000..f7d7c63bc5 --- /dev/null +++ b/scripts/pmtp/send.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +set -x + +sifnoded tx bank send \ + $SIF_ACT \ + sif144w8cpva2xkly74xrms8djg69y3mljzplx3fjt \ + 9299999999750930000rowan \ + --keyring-backend test \ + --node ${SIFNODE_NODE} \ + --chain-id $SIFNODE_CHAIN_ID \ + --fees 100000000000000000rowan \ + --broadcast-mode block \ + -y \ No newline at end of file diff --git a/scripts/pmtp/set-rewards-params.sh b/scripts/pmtp/set-rewards-params.sh new file mode 100755 index 0000000000..3ab5aa6af1 --- /dev/null +++ b/scripts/pmtp/set-rewards-params.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +set -x + +sifnoded tx clp reward-params \ + --cancelPeriod 60 \ + --lockPeriod 20 \ + --from=$SIF_ACT \ + --keyring-backend=test \ + --fees 100000000000000000rowan \ + --gas 500000 \ + --node ${SIFNODE_NODE} \ + --chain-id=$SIFNODE_CHAIN_ID \ + --broadcast-mode=block \ + -y \ No newline at end of file diff --git a/scripts/rewards/rewards-period.js b/scripts/rewards/rewards-period.js index 13b845d882..63deb33942 100755 --- a/scripts/rewards/rewards-period.js +++ b/scripts/rewards/rewards-period.js @@ -2,14 +2,14 @@ const fs = require("fs"); -async function createAssetRewardsFile() { - const csv = fs.readFileSync("./pools.csv", "utf-8"); +async function createAssetRewardsFile(periodId, startBlock, endBlock) { + const csv = fs.readFileSync(`./${periodId}.csv`, "utf-8"); const entries = JSON.parse(fs.readFileSync("./entries.json", "utf-8")).result .registry.entries; - const lines = csv.split("\r\n"); + const lines = csv.split("\r\n").filter((line) => line.split(",")[1] !== ""); - let [, allocation] = lines[0].split('"'); - allocation = parseInt(allocation.trim().split(",").join("")); + let [, allocation] = lines[1].split('"'); + allocation = `${allocation.trim().split(",").join("")}${"0".repeat(18)}`; const multipliers = lines.slice(1).map((line) => { const [, poolName, multiplier] = line.split(","); @@ -30,22 +30,25 @@ async function createAssetRewardsFile() { }; }); - const rewards = [ - { - reward_period_id: "RP_1", - reward_period_start_block: 1, - reward_period_end_block: 100, - reward_period_allocation: allocation, - reward_period_pool_multipliers: multipliers, - reward_period_default_multiplier: "0.0", - }, - ]; + const rewardPeriod = { + reward_period_id: periodId, + reward_period_start_block: startBlock, + reward_period_end_block: endBlock, + reward_period_allocation: allocation, + reward_period_pool_multipliers: multipliers, + reward_period_default_multiplier: "0.0", + }; - fs.writeFileSync("./rewards.json", JSON.stringify(rewards, null, 2)); + return rewardPeriod; } async function start() { - await createAssetRewardsFile(); + const rewardPeriods = [ + await createAssetRewardsFile("RP_2", 6586931, 6687730), + await createAssetRewardsFile("RP_1", 6486131, 6586930), + ]; + + fs.writeFileSync("./rewards.json", JSON.stringify(rewardPeriods, null, 2)); } start(); diff --git a/test/integration/execute_integration_tests_against_test_chain_peg.sh b/test/integration/execute_integration_tests_against_test_chain_peg.sh index 8aff910f9c..68d10377f5 100755 --- a/test/integration/execute_integration_tests_against_test_chain_peg.sh +++ b/test/integration/execute_integration_tests_against_test_chain_peg.sh @@ -20,7 +20,7 @@ python3 -m pytest -olog_level=$loglevel -v -olog_file=/tmp/log.txt -v \ ${TEST_INTEGRATION_PY_DIR}/test_random_currency_roundtrip.py \ ${TEST_INTEGRATION_PY_DIR}/test_rollback_chain.py \ ${TEST_INTEGRATION_PY_DIR}/test_ofac_blocklist.py \ - ${TEST_INTEGRATION_PY_DIR}/test_integration_framework.py \ + ${TEST_INTEGRATION_PY_DIR}/test_siftool_framework.py \ ${TEST_INTEGRATION_PY_DIR}/test_inflate_tokens.py \ # run replay tests after other tests since they interact badly with replaydb diff --git a/test/integration/framework/.gitignore b/test/integration/framework/.gitignore index 92e506e338..5cc945694d 100644 --- a/test/integration/framework/.gitignore +++ b/test/integration/framework/.gitignore @@ -1,3 +1,4 @@ /venv/ -/__pycache__/ +/build/ +/src/siftool/__pycache__/ /.lock diff --git a/test/integration/framework/README.md b/test/integration/framework/README.md index 1b53acc6a3..f579ef0f1d 100644 --- a/test/integration/framework/README.md +++ b/test/integration/framework/README.md @@ -1,61 +1,16 @@ -# Resources +# siftool -1. Docker setup in docker/ (currently only on future/peggy2 branch, Tim Lind): -- setups two sifnode instances running independent chains + IBC relayer (ts-relayer) +To start the local environment: -2. Brent's PoC (docker): https://github.com/Sifchain/sifchain-deploy/tree/feature/ibc-poc/docker/localnet/ibc +siftool run-env -3. Test environment for testing the new Sifchain public SDK (Caner): -- https://docs.google.com/document/d/1MAlg-I0xMnUvbavAZdAN---WuqbyuRyKw-6Lfgfe130/edit -- https://github.com/sifchain/sifchain-ui/blob/3868ac7138c6c4149dced4ced5b36690e5fc1da7/ui/core/src/config/chains/index.ts#L1 -- https://github.com/Sifchain/sifchain-ui/blob/3868ac7138c6c4149dced4ced5b36690e5fc1da7/ui/core/src/config/chains/cosmoshub/index.ts +It will automatically install Python dependencies upon first use. This command will detect if you are on Peggy1 or +Peggy2 branch, and will start local processes accordingly: +- For Peggy1, it will run ganache-cli, sifnoded and ebrelayer. +- For Peggy2, it will run hardhat, sifnoded and two instances of ebrelayer. -4. scripts/init-multichain.sh (on future/peggy2 branch) +At the moment, the environment consists of Ethereum-compliant local node (ganache/hardhat), one `sifnode` validator and +a Peggy bridge implemented by `ebrelayer` binary. -5. https://github.com/Sifchain/sifnode/commit/9ab620e148be8f4850eef59d39b0e869956f87a4 -6. sifchain-devops script to deploy TestNet (by _IM): https://github.com/Sifchain/sifchain-devops/blob/main/scripts/testnet/launch.sh#L19 - -7. Tempnet scripts by chainops - -8. In Sifchain/sifnode/scripts there's init.sh which, if you have everything installed, will run a single node. Ping - @Brianosaurus for more info. - -9. erowan should be deployed and whitelisted (assumption) - -# RPC endpoints: -e.g. SIFNODE="https://api-testnet.sifchain.finance" -- $SIFNODE/node_info -- $SIFNODE/tokenregistry/entries - -# Peggy2 devenv -- Directory: smart-contracts/scripts/src/devenv -- Init: cd smart-contracts; rm -rf node_modules; npm install (plan is to move to yarn eventually) -- Run: GOBIN=/home/anderson/go/bin npx hardhat run scripts/devenv.ts -``` -{ - // vscode launch.json file to debug the Dev Environment Scripts - "version": "0.2.0", - "configurations": [ - { - "runtimeArgs": [ - "node_modules/.bin/hardhat", - "run" - ], - "cwd": "${workspaceFolder}/smart-contracts", - "type": "node", - "request": "launch", - "name": "Dev Environment Debugger", - "env": { - "GOBIN": "/home/anderson/go/bin" - }, - "skipFiles": [ - "/**" - ], - "program": "${workspaceFolder}/smart-contracts/scripts/devenv.ts", - } - ] -} -``` -- Integration test to be targeted for PoC: test_eth_transfers.py -- Dependency diagram: https://files.slack.com/files-pri/T0187TWB4V8-F02BC477N79/sifchaindevenv.jpg +Original design document: https://docs.google.com/document/d/1IhE2Y03Z48ROmTwO9-J_0x_lx2vIOFkyDFG7BkAIqCk/edit# diff --git a/test/integration/framework/hardhat.py b/test/integration/framework/hardhat.py deleted file mode 100644 index 9cbcf1ffce..0000000000 --- a/test/integration/framework/hardhat.py +++ /dev/null @@ -1,95 +0,0 @@ -import json -from dataclasses import dataclass -from common import * -from command import buildcmd - - -# Peggy uses different smart contracts (e.g. in Peggy2.0 there is no BridgeToken, there is CosmosBridge etc.) -@dataclass -class Peggy2SmartContractAddresses: - bridge_bank: str - bridge_registry: str - cosmos_bridge: str - rowan: str - - -class Hardhat: - def __init__(self, cmd): - self.cmd = cmd - self.project = cmd.project - - @staticmethod - def default_accounts(): - # Hardhat doesn't provide a way to get the private keys of its default accounts, so just hardcode them for now. - # TODO hardhat prints 20 accounts upon startup - # Keep synced to smart-contracts/src/devenv/hardhatNode.ts:defaultHardhatAccounts - # Format: [address, private_key] - # Note: for compatibility with ganache, private keys should be stripped of "0x" prefix - # (when you pass a private key to ebrelayer via ETHEREUM_PRIVATE_KEY, the key is treated as invalid) - return [[address, private_key[2:]] for address, private_key in [[ - "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266", - "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", - ], [ - "0x70997970c51812dc3a010c7d01b50e0d17dc79c8", - "0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d", - ], [ - "0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc", - "0x5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a", - ], [ - "0x90f79bf6eb2c4f870365e785982e1f101e93b906", - "0x7c852118294e51e653712a81e05800f419141751be58f605c371e15141b007a6", - ], [ - "0x15d34aaf54267db7d7c367839aaf71a00a2c6a65", - "0x47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a", - ], [ - "0x9965507d1a55bcc2695c58ba16fb37d819b0a4dc", - "0x8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba", - ], [ - "0x976ea74026e726554db657fa54763abd0c3a0aa9", - "0x92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e", - ], [ - "0x14dc79964da2c08b23698b3d3cc7ca32193d9955", - "0x4bbbf85ce3377467afe5d46f804f221813b2bb87f24d81f60f1fcdbf7cbf4356", - ], [ - "0x23618e81e3f5cdf7f54c3d65f7fbc0abf5b21e8f", - "0xdbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97", - ], [ - "0xa0ee7a142d267c1f36714e4a8f75612f20a79720", - "0x2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6", - ]]] - - def build_start_args(self, hostname=None, port=None, fork=None, fork_block_number=None): - # TODO We need to manaege smart-contracts/hardhat.config.ts + it also reads smart-contracts/.env via dotenv - # TODO Handle failures, e.g. if the process is already running we get exit value 1 and - # "Error: listen EADDRINUSE: address already in use 127.0.0.1:8545" - args = [os.path.join("node_modules", ".bin", "hardhat"), "node"] + \ - (["--hostname", hostname] if hostname else []) + \ - (["--port", str(port)] if port is not None else []) + \ - (["--fork", fork] if fork else []) + \ - (["--fork-block-number", str(fork_block_number)] if fork_block_number is not None else []) - return buildcmd(args, cwd=self.project.smart_contracts_dir) - - def compile_smart_contracts(self): - self.project.npx(["hardhat", "compile"], cwd=project_dir("smart-contracts"), pipe=False) - - def deploy_smart_contracts(self) -> Peggy2SmartContractAddresses: - # If this fails with tsyringe complaining about missing "../../build" directory, do this: - # rm -rf smart-contracts/artifacts. - res = self.project.npx(["hardhat", "run", "scripts/deploy_contracts.ts", "--network", "localhost"], - cwd=project_dir("smart-contracts")) - # Skip first line "No need to generate any newer types". This only works if the smart contracts have already - # been compiled, otherwise the output starts with 4 lines: - # Compiling 35 files with 0.5.16 - # Generating typings for: 36 artifacts in dir: build for target: ethers-v5 - # Successfully generated 65 typings! - # Compilation finished successfully - # With devtool, the compilation is performed automatically before invoking main() if the script is invoked - # via "npx hardhat run scripts/devenv.ts" instead of "npx ts-node scripts/devenv.ts", so normally this would - # not happen. - # TODO Suggested solution: pass a parameter to deploy_contracts.ts where it should write the output json file - stdout_lines = stdout(res).splitlines() - assert len(stdout_lines) == 2 - assert stdout_lines[0] == "No need to generate any newer typings." - tmp = json.loads(stdout_lines[1]) - return Peggy2SmartContractAddresses(cosmos_bridge=tmp["cosmosBridge"], bridge_bank=tmp["bridgeBank"], - bridge_registry=tmp["bridgeRegistry"], rowan=tmp["rowanContract"]) diff --git a/test/integration/framework/notes.md b/test/integration/framework/notes.md new file mode 100644 index 0000000000..00f0ef4d7c --- /dev/null +++ b/test/integration/framework/notes.md @@ -0,0 +1,161 @@ +# siftool + +Original design document: https://docs.google.com/document/d/1IhE2Y03Z48ROmTwO9-J_0x_lx2vIOFkyDFG7BkAIqCk/edit# + + +# Resources + +1. Docker setup in docker/ (currently only on future/peggy2 branch, Tim Lind): + +- setups two sifnode instances running independent chains + IBC relayer (ts-relayer) + +2. PoC (docker): https://github.com/Sifchain/sifchain-deploy/tree/feature/ibc-poc/docker/localnet/ibc + +3. Test environment for testing the new Sifchain public SDK (Caner): + +- https://docs.google.com/document/d/1MAlg-I0xMnUvbavAZdAN---WuqbyuRyKw-6Lfgfe130/edit +- https://github.com/sifchain/sifchain-ui/blob/3868ac7138c6c4149dced4ced5b36690e5fc1da7/ui/core/src/config/chains/index.ts#L1 +- https://github.com/Sifchain/sifchain-ui/blob/3868ac7138c6c4149dced4ced5b36690e5fc1da7/ui/core/src/config/chains/cosmoshub/index.ts + +4. scripts/init-multichain.sh (on future/peggy2 branch) + +5. https://github.com/Sifchain/sifnode/commit/9ab620e148be8f4850eef59d39b0e869956f87a4 + +6. sifchain-devops script to deploy TestNet (by \_IM): https://github.com/Sifchain/sifchain-devops/blob/main/scripts/testnet/launch.sh#L19 + +7. Tempnet scripts by chainops + +8. In Sifchain/sifnode/scripts there's init.sh which, if you have everything installed, will run a single node. Ping + @Brianosaurus for more info. + +9. erowan should be deployed and whitelisted (assumption) + +# RPC endpoints: + +e.g. SIFNODE="https://api-testnet.sifchain.finance" + +- $SIFNODE/node_info +- $SIFNODE/tokenregistry/entries + +# Peggy2 devenv + +- Directory: smart-contracts/scripts/src/devenv +- Init: cd smart-contracts; rm -rf node_modules; npm install (plan is to move to yarn eventually) +- Run: GOBIN=/home/anderson/go/bin npx hardhat run scripts/devenv.ts + +``` +{ + // vscode launch.json file to debug the Dev Environment Scripts + "version": "0.2.0", + "configurations": [ + { + "runtimeArgs": [ + "node_modules/.bin/hardhat", + "run" + ], + "cwd": "${workspaceFolder}/smart-contracts", + "type": "node", + "request": "launch", + "name": "Dev Environment Debugger", + "env": { + "GOBIN": "/home/anderson/go/bin" + }, + "skipFiles": [ + "/**" + ], + "program": "${workspaceFolder}/smart-contracts/scripts/devenv.ts", + } + ] +} +``` + +- Integration test to be targeted for PoC: test_eth_transfers.py +- Dependency diagram: https://files.slack.com/files-pri/T0187TWB4V8-F02BC477N79/sifchaindevenv.jpg + +# Standardized environment setup + +## Peggy1 - Tempnet on AWS + +chain_id = "mychain" // Parameter + +// Generate account with name 'sif' in the local keyring +mnemonic = generate_mnemonic() +exec("echo $mnemonic | sifnoded keys add --recover --keyring-backend test") +sif_admin = exec("sifnoded keys show sif -a --keyring-backend test") // sif1xxx... + +// Init the chain. This command creates files: +// ~/.sifnoded/config/node_key.json +// ~/.sifnoded/config/genesis.json +// ~/.sifnoded/config/priv_validator_key.json +// ~/.sifnoded/data/priv_validator_state.json +// and prints some JSON (what?) +exec("sifnoded init {moniker} --chain-id {chain_id}") + +// Add Genesis Accounts +exec("sifnoded add-genesis-account {sif_admin} --keyring-backend test 999999000000000000000000000rowan,500000000000000000000000catk,500000000000000000000000cbtk,500000000000000000000000ceth,990000000000000000000000000stake,500000000000000000000000cdash,500000000000000000000000clink") + +// Add Genesis CLP ADMIN sif +exec("sifnoded add-genesis-clp-admin ${sif_admin} --keyring-backend test") + +// Add Genesis CLP ADMIN sif +exec("sifnoded add-genesis-clp-admin ${sif_admin} --keyring-backend test") + +// Set Genesis whitelist admin ${SIF_WALLET} +exec("sifnoded set-genesis-whitelister-admin {sif_admin} --keyring-backend test") + +// Fund account (Genesis TX stake) +exec("sifnoded gentx {sif_admin} 1000000000000000000000000stake --keyring-backend test --chain-id {chain_id}") + +// Generate token json +sifnoded q tokenregistry generate -o json \ + --token_base_denom=cosmos \ + --token_ibc_counterparty_chain_id=${GAIA_CHAIN_ID} \ + --token_ibc_channel_id=$GAIA_CHANNEL_ID \ + --token_ibc_counterparty_channel_id=$GAIA_COUNTERPARTY_CHANNEL_ID \ + --token_ibc_counterparty_denom="" \ + --token_unit_denom="" \ + --token_decimals=6 \ + --token_display_name="COSMOS" \ + --token_external_symbol="cosmos" \ + --token_permission_clp=true \ + --token_permission_ibc_export=true \ + --token_permission_ibc_import=true | jq > gaia.json + +// Whitelist tokens +// printf "registering cosmos... \n" +sifnoded tx tokenregistry register gaia.json \ + --node tcp://${SIFNODE_P2P_HOSTNAME}:26657 \ + --chain-id $SIFCHAIN_ID \ + --from $SIF_WALLET \ + --keyring-backend test \ + --gas=500000 \ + --gas-prices=0.5rowan \ + -y + +// Deploy token registry +// Registering Tokens... +// Set Whitelist from denoms.json... +sifnoded set-gen-denom-whitelist DENOM.json + +## Peggy1 - integration tests + +// Parameters: validator moniker, validator mnemonic +valicator1_moniker, validator1_address, validator1_password, validator1_mnemonic = exec("sifgen create network ...") + +sifnoded_keys_add(validator1_moniker, validator1_password) // Test keyring +valoper = get_val_address(validator1_moniker) + +exec("sifnoded add-genesis-validators {valoper}") +exec("sifnoded add-geneeis-account {}") +exec("sifnoded set-genesis-oracle-admin {}") +exec("sifnoded set-denom-whitelist {}") + +## Coupled with the localnet framework + +The localnet test framework is located under `./test/localnet` within the same repository and offers some interesting features such as spinning up a bunch of IBC chains along with relayers and storing the states of the chains for later use for deterministic testing against various IBC flows. + +The `localnet` framework is supported by `siftool` and can be enabled by using the following environment variable `LOCALNET` set to `true` as follow: + +``` +LOCALNET=true siftool run-env +``` diff --git a/test/integration/framework/requirements.txt b/test/integration/framework/requirements.txt index d7f820d918..4fc508ceb3 100644 --- a/test/integration/framework/requirements.txt +++ b/test/integration/framework/requirements.txt @@ -1,4 +1,5 @@ -PyYAML==5.4.1 +grpcio-tools==1.44.0 pytest==6.2.5 +PyYAML==6.0 rusty-rlp==0.2.1 web3==5.25.0 diff --git a/test/integration/framework/siftool b/test/integration/framework/siftool index cefa5dd715..6f57256653 100755 --- a/test/integration/framework/siftool +++ b/test/integration/framework/siftool @@ -1,4 +1,4 @@ -#!/bin/python3 +#!/usr/bin/env python3 # This is an executable command-line frontend that makes sure we are running with a suitable Python virtual environment. # If the virtual environment does not exist yet, it is created on first use. @@ -13,9 +13,11 @@ import subprocess def execst(args, cwd=None): return subprocess.run(args, cwd=cwd, check=True, capture_output=False) + def get_basedir(): return os.path.abspath(os.path.join(os.path.normpath(os.path.dirname(__file__)))) + def init_venv(venv_dir, requirements_txt): execst(["python3", "-m", "venv", venv_dir]) venv_pip = os.path.join(venv_dir, "bin", "pip3") @@ -23,6 +25,7 @@ def init_venv(venv_dir, requirements_txt): execst([venv_pip, "install", "wheel"]) execst([venv_pip, "install", "-r", requirements_txt]) + def ensure_venv(venv_dir, requirements_txt, lock_file=None): def wrapped(): if not os.path.exists(venv_dir): @@ -38,6 +41,7 @@ def ensure_venv(venv_dir, requirements_txt, lock_file=None): else: wrapped() + def load_main_module(): base_dir = get_basedir() project_root = os.path.abspath(os.path.join(os.path.normpath(os.path.join(base_dir, *([os.path.pardir] * 3))))) @@ -47,16 +51,21 @@ def load_main_module(): ensure_venv(venv_dir, requirements_txt, lock_file=lock_file) venv_lib_dir = glob.glob(os.path.join(venv_dir, "lib", "python3.*"))[0] - sys.path = sys.path + [ + paths = [ + os.path.join(base_dir, "src"), + os.path.join(base_dir, "build/generated"), os.path.join(venv_lib_dir, "site-packages"), - os.path.join(project_root, "test", "integration"), # For running integration tests in-process ] - import main as main_module - return main_module + paths_to_add = [p for p in paths if not any(os.path.realpath(p) == os.path.realpath(s) for s in sys.path)] + sys.path.extend(paths_to_add) + import siftool.main as siftool_main + return siftool_main + def main(argv): main_module = load_main_module() main_module.main(argv) + if __name__ == "__main__": main(sys.argv[1:]) diff --git a/test/integration/framework/command.py b/test/integration/framework/src/siftool/command.py similarity index 64% rename from test/integration/framework/command.py rename to test/integration/framework/src/siftool/command.py index ece1f2fed7..7634676316 100644 --- a/test/integration/framework/command.py +++ b/test/integration/framework/src/siftool/command.py @@ -1,9 +1,13 @@ import shutil import time -from common import * +from typing import Mapping, List, Union, Optional +from siftool.common import * +ExecArgs = Mapping[str, Union[List[str], str, Mapping[str, str]]] -def buildcmd(args, cwd=None, env=None): + +def buildcmd(args: Optional[str] = None, cwd: Optional[str] = None, env: Optional[Mapping[str, Optional[str]]] = None +) -> ExecArgs: return dict((("args", args),) + ((("cwd", cwd),) if cwd is not None else ()) + ((("env", env),) if env is not None else ()) @@ -28,20 +32,26 @@ def execst(self, args, cwd=None, env=None, stdin=None, binary=False, pipe=True, return proc.returncode, stdout_data, stderr_data # Default implementation of popen for environemnts to start long-lived processes - def popen(self, args, log_file=None, **kwargs): + def popen(self, args, log_file=None, **kwargs) -> subprocess.Popen: stdout = log_file or None stderr = log_file or None return popen(args, stdout=stdout, stderr=stderr, **kwargs) # Starts a process asynchronously (for sifnoded, hardhat, ebrelayer etc.) # The arguments should correspond to what buildcmd() returns. - def spawn_asynchronous_process(self, exec_args, log_file=None): + def spawn_asynchronous_process(self, exec_args: ExecArgs, log_file=None) -> subprocess.Popen: return self.popen(**exec_args, log_file=log_file) + def ls(self, path): + return os.listdir(path) + def rm(self, path): if os.path.exists(path): os.remove(path) + def mv(self, src, dst): + os.rename(src, dst) + def read_text_file(self, path): with open(path, "rt") as f: return f.read() # TODO Convert to exec @@ -70,14 +80,37 @@ def copy_file(self, src, dst): def exists(self, path): return os.path.exists(path) + def is_dir(self, path): + return os.path.isdir(path) if self.exists(path) else False + + def find_files(self, path, filter=None): + items = [os.path.join(path, name) for name in self.ls(path)] + result = [] + for i in items: + if self.is_dir(i): + result.extend(self.find_files(i)) + else: + if (filter is None) or filter(i): + result.append(i) + return result + def get_user_home(self, *paths): return os.path.join(os.environ["HOME"], *paths) - def mktempdir(self): - return exactly_one(stdout_lines(self.execst(["mktemp", "-d"]))) + def mktempdir(self, parent_dir=None): + args = ["mktemp", "-d"] + (["-p", parent_dir] if parent_dir else []) + return exactly_one(stdout_lines(self.execst(args))) + + def mktempfile(self, parent_dir=None): + args = ["mktemp"] + (["-p", parent_dir] if parent_dir else []) + return exactly_one(stdout_lines(self.execst(args))) + + def chmod(self, path, mode_str, recursive=False): + args = ["chmod"] + (["-r"] if recursive else []) + [mode_str, path] + self.execst(args) - def mktempfile(self): - return exactly_one(stdout_lines(self.execst(["mktemp"]))) + def pwd(self): + return exactly_one(stdout_lines(self.execst(["pwd"]))) def __tar_compression_option(self, tarfile): filename = os.path.basename(tarfile).lower() @@ -91,7 +124,7 @@ def __tar_compression_option(self, tarfile): def tar_create(self, path, tarfile): comp = self.__tar_compression_option(tarfile) # tar on 9p filesystem reports "file shrank by ... bytes" and exits with errorcode 1 - tar_quirks = True + tar_quirks = False if tar_quirks: tmpdir = self.mktempdir() try: @@ -115,3 +148,13 @@ def wait_for_file(self, path): def tcp_probe_connect(self, host, port): res = self.execst(["nc", "-z", host, str(port)], check_exit=False) return res[0] == 0 + + def sha1_of_file(self, path): + res = self.execst(["sha1sum", "-b", path]) + return stdout_lines(res)[0][:40] + + def download_url(self, url, output_file=None, output_dir=None): + args = ["curl", "--location", "--silent", "--show-error", url] + \ + (["-O"] if not (output_dir or output_file) else []) + \ + (["-o", output_file] if (output_file and not output_dir) else []) + self.execst(args, cwd=output_dir) diff --git a/test/integration/framework/common.py b/test/integration/framework/src/siftool/common.py similarity index 83% rename from test/integration/framework/common.py rename to test/integration/framework/src/siftool/common.py index c34f5f67fa..9b2fb76df7 100644 --- a/test/integration/framework/common.py +++ b/test/integration/framework/src/siftool/common.py @@ -6,7 +6,7 @@ import random import yaml import urllib.request - +from typing import Optional, Mapping, Sequence, IO, Union log = logging.getLogger(__name__) @@ -49,7 +49,7 @@ def random_string(length): return "".join([chars[random.randrange(len(chars))] for _ in range(length)]) def project_dir(*paths): - return os.path.abspath(os.path.join(os.path.normpath(os.path.join(os.path.dirname(__file__), *([os.path.pardir]*3))), *paths)) + return os.path.abspath(os.path.join(os.path.normpath(os.path.join(os.path.dirname(__file__), *([os.path.pardir]*5))), *paths)) def yaml_load(s): return yaml.load(s, Loader=yaml.SafeLoader) @@ -76,10 +76,13 @@ def mkcmd(args, env=None, cwd=None, stdin=None): # stdin will always be redirected to the returned process' stdin. # If pipe, the stdout and stderr will be redirected and available as stdout and stderr of the returned object. # If not pipe, the stdout and stderr will not be redirected and will inherit sys.stdout and sys.stderr. -def popen(args, cwd=None, env=None, text=None, stdin=None, stdout=None, stderr=None): +def popen(args: Sequence[str], cwd: Optional[str] = None, env: Optional[Mapping[str, str]] = None, + text: Optional[bool] = None, stdin: Union[str, int, IO, None] = None, stdout: Optional[IO] = None, + stderr: Optional[IO] = None +) -> subprocess.Popen: if env: env = dict_merge(os.environ, env) - logging.debug(f"popen(): args={repr(args)}, cwd={repr(cwd)}") + log.debug(f"popen(): args={repr(args)}, cwd={repr(cwd)}") return subprocess.Popen(args, cwd=cwd, env=env, stdin=stdin, stdout=stdout, stderr=stderr, text=text) def dict_merge(*dicts, override=True): @@ -90,6 +93,9 @@ def dict_merge(*dicts, override=True): result[k] = v return result +def flatten_list(l): + return [item for sublist in l for item in sublist] + def format_as_shell_env_vars(env, export=True): # TODO escaping/quoting, e.g. shlex.quote(v) return ["{}{}=\"{}\"".format("export " if export else "", k, v) for k, v in env.items()] @@ -102,6 +108,7 @@ def basic_logging_setup(): logging.getLogger("eth").setLevel(logging.WARNING) logging.getLogger("websockets").setLevel(logging.WARNING) logging.getLogger("web3").setLevel(logging.WARNING) + logging.getLogger("asyncio").setLevel(logging.WARNING) # Recursively transforms template strings containing "${VALUE}". Example: # >>> template_transform("You are ${what}!", {"what": "${how} late", "how": "very"}) @@ -117,3 +124,5 @@ def template_transform(s, d): on_peggy2_branch = not os.path.exists(project_dir("smart-contracts", "truffle-config.js")) + +in_github_ci = (os.environ.get("CI") == "true") and os.environ.get("GITHUB_REPOSITORY") and os.environ.get("GITHUB_RUN_ID") diff --git a/test/integration/framework/cosmos.py b/test/integration/framework/src/siftool/cosmos.py similarity index 51% rename from test/integration/framework/cosmos.py rename to test/integration/framework/src/siftool/cosmos.py index b78eb29966..9790205c5c 100644 --- a/test/integration/framework/cosmos.py +++ b/test/integration/framework/src/siftool/cosmos.py @@ -1,7 +1,76 @@ -from common import * +from typing import Union, Iterable, Mapping, List +from siftool.common import * akash_binary = "akash" +LegacyBalance = List[List[Union[int, str]]] # e.g. [[3, "rowan"], [2, "ibc/xxxxx"]] +Balance = Mapping[str, int] +CompatBalance = Union[LegacyBalance, Balance] +Address = str + + +def balance_normalize(bal: CompatBalance = None) -> Balance: + if type(bal) == list: + bal = dict(((k, v) for v, k in bal)) + elif type(bal) == dict: + pass + else: + assert False, "Balances should be either a dict or a list" + return {k: v for k, v in bal.items() if v != 0} + + +def balance_add(*bal: Balance) -> Balance: + result = {} + all_denoms = set(flatten_list([[*b.keys()] for b in bal])) + for denom in all_denoms: + val = sum(b.get(denom, 0) for b in bal) + if val != 0: + result[denom] = val + return result + + +def balance_mul(bal: Balance, multiplier: Union[int, float]) -> Balance: + result = {} + for denom, value in bal.items(): + val = value * multiplier + if val != 0: + result[denom] = val + return result + + +def balance_neg(bal: Balance) -> Balance: + return {k: -v for k, v in bal.items()} + + +def balance_sub(bal1: Balance, *bal2: Balance) -> Balance: + return balance_add(bal1, *[balance_neg(b) for b in bal2]) + + +def balance_zero(bal: Balance) -> bool: + return len(bal) == 0 + + +def balance_equal(bal1: Balance, bal2: Balance) -> bool: + return balance_zero(balance_sub(bal1, bal2)) + + +def balance_format(bal: Balance) -> str: + return ",".join("{}{}".format(v, k) for k, v in bal.items()) + + +def balance_exceeds(bal: Balance, min_changes: Balance) -> bool: + have_all = True + for denom, required_value in min_changes.items(): + actual_value = bal.get(denom, 0) + if required_value < 0: + have_all &= actual_value <= required_value + elif required_value > 0: + have_all &= actual_value >= required_value + else: + assert False + return have_all + + # # This is for Akash, but might be useful for other cosmos-based chains as well. (If not, it should be moved to separate diff --git a/test/integration/framework/eth.py b/test/integration/framework/src/siftool/eth.py similarity index 85% rename from test/integration/framework/eth.py rename to test/integration/framework/src/siftool/eth.py index a552e88b59..70a778d124 100644 --- a/test/integration/framework/eth.py +++ b/test/integration/framework/src/siftool/eth.py @@ -1,14 +1,19 @@ import logging import time import web3 +import eth_typing +from hexbytes import HexBytes +from web3.types import TxReceipt +from typing import NewType, Sequence -from common import * +from siftool.common import * ETH = 10**18 GWEI = 10**9 NULL_ADDRESS = "0x0000000000000000000000000000000000000000" MIN_TX_GAS = 21000 +Address = eth_typing.AnyAddress log = logging.getLogger(__name__) @@ -25,6 +30,34 @@ def web3_connect(url, websocket_timeout=None): kwargs["websocket_timeout"] = websocket_timeout return web3.Web3(web3.Web3.WebsocketProvider(url, **kwargs)) +def web3_wait_for_connection_up(url, polling_time=1, timeout=90): + start_time = time.time() + w3_conn = web3_connect(url) + while True: + try: + w3_conn.eth.block_number + return w3_conn + except OSError: + pass + now = time.time() + if now - start_time > timeout: + raise Exception(f"Timeout when trying to connect to {url}") + time.sleep(polling_time) + +def validate_address_and_private_key(addr, private_key): + a = web3.Web3().eth.account + addr = web3.Web3.toChecksumAddress(addr) if addr else None + if private_key: + match_hex = re.match("^(0x)?([0-9a-fA-F]{64})$", private_key) + private_key = match_hex[2].lower() if match_hex else _mnemonic_to_private_key(private_key) + account = a.from_key(private_key) + addr = addr or account.address + assert addr == account.address, "Address does not correspond to private key" + assert (not private_key.startswith("0x")) and (private_key == private_key.lower()), "Private key must be in lowercase hex without '0x' prefix" + else: + private_key = None + assert addr + return addr, private_key class EthereumTxWrapper: """ @@ -37,7 +70,7 @@ class EthereumTxWrapper: """ def __init__(self, w3_conn, is_local_node): - self.w3_conn = w3_conn + self.w3_conn: web3.Web3 = w3_conn self.use_eip_1559 = True self.private_keys = {} self.default_timeout = 600 @@ -51,20 +84,26 @@ def __init__(self, w3_conn, is_local_node): self.gas_estimate_fn = None self.used_tx_nonces = {} + # These are only set in get_env_ctx_peggy2(), otherwise they are undefined. + # self.cross_chain_fee_base = None + # self.cross_chain_lock_fee = None + # self.cross_chain_burn_fee = None + # self.ethereum_network_descriptor = None + def _get_private_key(self, addr): - addr = self.w3_conn.toChecksumAddress(addr) - if not addr in self.private_keys: + addr = web3.Web3.toChecksumAddress(addr) + if addr not in self.private_keys: raise Exception(f"No private key set for address {addr}") return self.private_keys[addr] def set_private_key(self, addr, private_key): - addr = self.w3_conn.toChecksumAddress(addr) + a = web3.Web3().eth.account + addr = web3.Web3.toChecksumAddress(addr) if private_key is None: self.private_keys.pop(addr) # Remove else: - assert (not private_key.startswith("0x")) and (private_key == private_key.lower()), "Private key must be in lowercase hex without '0x' prefix" - check_addr = self.w3_conn.eth.account.from_key(private_key).address - assert check_addr == addr, f"Private key does not correspond to given address {addr}" + assert re.match("^([0-9a-f]{64})$", private_key) + assert addr == a.from_key(private_key).address, f"Private key does not correspond to given address {addr}" self.private_keys[addr] = private_key if self.is_local_node: # existing_accounts = self.w3_conn.geth.personal.list_accounts() @@ -208,7 +247,16 @@ def _send_raw_transaction(self, smart_contract_call_obj, from_addr, tx_opts=None txhash = self.w3_conn.eth.send_raw_transaction(signed_tx.rawTransaction) return txhash - def wait_for_transaction_receipt(self, txhash, sleep_time=5, timeout=None): + def wait_for_all_transaction_receipts(self, tx_hashes: Sequence[HexBytes], sleep_time: int = 5, + timeout: Union[int, None] = None + ) -> Sequence[TxReceipt]: + result = [] + for txhash in tx_hashes: + txrcpt = self.wait_for_transaction_receipt(txhash, sleep_time=sleep_time, timeout=timeout) + result.append(txrcpt) + return result + + def wait_for_transaction_receipt(self, txhash, sleep_time=5, timeout=None) -> TxReceipt: return self.w3_conn.eth.wait_for_transaction_receipt(txhash, timeout=timeout, poll_latency=sleep_time) def transact_sync(self, smart_contract_function, eth_addr, tx_opts=None, timeout=None): @@ -227,7 +275,7 @@ def wrapped_fn(*args, **kwargs): return txhash return wrapped_fn - def send_eth(self, from_addr, to_addr, amount): + def send_eth(self, from_addr: str, to_addr: str, amount: int): log.info(f"Sending {amount} wei from {from_addr} to {to_addr}...") tx = {"to": to_addr, "value": amount} txhash = self._send_raw_transaction(None, from_addr, tx) @@ -357,3 +405,15 @@ def estimate_fees(self, tx): @staticmethod def estimate_gas_price(): return 0 + + +__web3_enabled_unaudited_hdwallet_features = False + +# https://stackoverflow.com/questions/68050645/how-to-create-a-web3py-account-using-mnemonic-phrase +def _mnemonic_to_private_key(mnemonic, derivation_path="m/44'/60'/0'/0/0"): + a = web3.Web3().eth.account + global __web3_enabled_unaudited_hdwallet_features + if not __web3_enabled_unaudited_hdwallet_features: + a.enable_unaudited_hdwallet_features() + __web3_enabled_unaudited_hdwallet_features = True + return a.from_mnemonic(mnemonic, account_path=derivation_path).privateKey.hex()[2:] diff --git a/test/integration/framework/geth.py b/test/integration/framework/src/siftool/geth.py similarity index 99% rename from test/integration/framework/geth.py rename to test/integration/framework/src/siftool/geth.py index 6aea4e7216..05917cde8e 100644 --- a/test/integration/framework/geth.py +++ b/test/integration/framework/src/siftool/geth.py @@ -1,6 +1,6 @@ import json import re -from common import * +from siftool.common import * def js_fmt(str, *params): diff --git a/test/integration/framework/src/siftool/hardhat.py b/test/integration/framework/src/siftool/hardhat.py new file mode 100644 index 0000000000..918ada7e03 --- /dev/null +++ b/test/integration/framework/src/siftool/hardhat.py @@ -0,0 +1,119 @@ +import json +import web3 +from siftool.common import * +from siftool.command import buildcmd + + +class Hardhat: + def __init__(self, cmd): + self.cmd = cmd + self.project = cmd.project + + def build_start_args(self, hostname=None, port=None, fork=None, fork_block_number=None): + # TODO We need to manaege smart-contracts/hardhat.config.ts + it also reads smart-contracts/.env via dotenv + # TODO Handle failures, e.g. if the process is already running we get exit value 1 and + # "Error: listen EADDRINUSE: address already in use 127.0.0.1:8545" + args = [os.path.join("node_modules", ".bin", "hardhat"), "node"] + \ + (["--hostname", hostname] if hostname else []) + \ + (["--port", str(port)] if port is not None else []) + \ + (["--fork", fork] if fork else []) + \ + (["--fork-block-number", str(fork_block_number)] if fork_block_number is not None else []) + return buildcmd(args, cwd=self.project.smart_contracts_dir) + + def compile_smart_contracts(self): + # Creates: + # smart-contracts/artifacts + # smart-contracts/build + # smart-contracts/cache + self.project.npx(["hardhat", "compile"], cwd=project_dir("smart-contracts"), pipe=False) + + def deploy_smart_contracts(self): + # If this fails with tsyringe complaining about missing "../../build" directory, do this: + # rm -rf smart-contracts/artifacts. + res = self.project.npx(["hardhat", "run", "scripts/deploy_contracts.ts", "--network", "localhost"], + cwd=project_dir("smart-contracts")) + # Skip first line "No need to generate any newer types". This only works if the smart contracts have already + # been compiled, otherwise the output starts with 4 lines: + # Compiling 35 files with 0.5.16 + # Generating typings for: 36 artifacts in dir: build for target: ethers-v5 + # Successfully generated 65 typings! + # Compilation finished successfully + # With devtool, the compilation is performed automatically before invoking main() if the script is invoked + # via "npx hardhat run scripts/devenv.ts" instead of "npx ts-node scripts/devenv.ts", so normally this would + # not happen. + # TODO Suggested solution: pass a parameter to deploy_contracts.ts where it should write the output json file + stdout_lines = stdout(res).splitlines() + assert len(stdout_lines) == 2 + assert stdout_lines[0] == "No need to generate any newer typings." + tmp = json.loads(stdout_lines[1]) + return { + "BridgeBank": tmp["bridgeBank"], + "BridgeRegistry": tmp["bridgeRegistry"], + "CosmosBridge": tmp["cosmosBridge"], + "Rowan": tmp["rowanContract"], + } + + +class HardhatAbiProvider: + def __init__(self, cmd, deployed_contract_addresses): + self.cmd = cmd + self.deployed_contract_addresses = deployed_contract_addresses + + def get_descriptor(self, sc_name): + relpath = { + "BridgeBank": ["BridgeBank"], + "BridgeToken": ["BridgeBank"], + "CosmosBridge": [], + "Rowan": ["BridgeBank"], + "TrollToken": ["Mocks"], + "FailHardToken": ["Mocks"], + "UnicodeToken": ["Mocks"], + "CommissionToken": ["Mocks"], + "RandomTrollToken": ["Mocks"], + }.get(sc_name, []) + [f"{sc_name}.sol", f"{sc_name}.json"] + path = os.path.join(self.cmd.project.project_dir("smart-contracts/artifacts/contracts"), *relpath) + tmp = json.loads(self.cmd.read_text_file(path)) + abi = tmp["abi"] + bytecode = tmp["bytecode"] + deployed_address = self.deployed_contract_addresses.get(sc_name) + return abi, bytecode, deployed_address + + +def default_accounts(): + # Hardhat doesn't provide a way to get the private keys of its default accounts, so just hardcode them for now. + # TODO hardhat prints 20 accounts upon startup + # Keep synced to smart-contracts/src/devenv/hardhatNode.ts:defaultHardhatAccounts + # Format: [address, private_key] + # Note: for compatibility with ganache, private keys should be stripped of "0x" prefix + # (when you pass a private key to ebrelayer via ETHEREUM_PRIVATE_KEY, the key is treated as invalid) + return [[web3.Web3.toChecksumAddress(address), private_key] for address, private_key in [[ + "0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266", + "ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80", + ], [ + "0x70997970c51812dc3a010c7d01b50e0d17dc79c8", + "59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d", + ], [ + "0x3c44cdddb6a900fa2b585dd299e03d12fa4293bc", + "5de4111afa1a4b94908f83103eb1f1706367c2e68ca870fc3fb9a804cdab365a", + ], [ + "0x90f79bf6eb2c4f870365e785982e1f101e93b906", + "7c852118294e51e653712a81e05800f419141751be58f605c371e15141b007a6", + ], [ + "0x15d34aaf54267db7d7c367839aaf71a00a2c6a65", + "47e179ec197488593b187f80a00eb0da91f1b9d0b13f8733639f19c30a34926a", + ], [ + "0x9965507d1a55bcc2695c58ba16fb37d819b0a4dc", + "8b3a350cf5c34c9194ca85829a2df0ec3153be0318b5e2d3348e872092edffba", + ], [ + "0x976ea74026e726554db657fa54763abd0c3a0aa9", + "92db14e403b83dfe3df233f83dfa3a0d7096f21ca9b0d6d6b8d88b2b4ec1564e", + ], [ + "0x14dc79964da2c08b23698b3d3cc7ca32193d9955", + "4bbbf85ce3377467afe5d46f804f221813b2bb87f24d81f60f1fcdbf7cbf4356", + ], [ + "0x23618e81e3f5cdf7f54c3d65f7fbc0abf5b21e8f", + "dbda1821b80551c9d65939329250298aa3472ba22feea921c0cf5d620ea67b97", + ], [ + "0xa0ee7a142d267c1f36714e4a8f75612f20a79720", + "2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6", + ]]] diff --git a/test/integration/framework/ibc_transfer_test_tool.py b/test/integration/framework/src/siftool/ibc_transfer_test_tool.py similarity index 99% rename from test/integration/framework/ibc_transfer_test_tool.py rename to test/integration/framework/src/siftool/ibc_transfer_test_tool.py index fda8539016..7ac7a34f7b 100644 --- a/test/integration/framework/ibc_transfer_test_tool.py +++ b/test/integration/framework/src/siftool/ibc_transfer_test_tool.py @@ -8,7 +8,7 @@ import json import sys -from command import Command +from siftool.command import Command chains = { "akash": {"binary": "akash", "relayer": "ibc"}, diff --git a/test/integration/framework/inflate_tokens.py b/test/integration/framework/src/siftool/inflate_tokens.py similarity index 56% rename from test/integration/framework/inflate_tokens.py rename to test/integration/framework/src/siftool/inflate_tokens.py index 0b5ef47d51..0288d54c11 100644 --- a/test/integration/framework/inflate_tokens.py +++ b/test/integration/framework/src/siftool/inflate_tokens.py @@ -6,8 +6,8 @@ import logging import re -import test_utils -from common import * +from siftool import eth, test_utils +from siftool.common import * log = logging.getLogger(__name__) @@ -16,10 +16,26 @@ class InflateTokens: def __init__(self, ctx): self.ctx = ctx self.wait_for_account_change_timeout = 1800 # For Ropsten we need to wait for 50 blocks i.e. ~20 mins + self.excluded_token_symbols = ["erowan"] + + # Only transfer this tokens in a batch for Peggy1. See #2397. You would need to adjust this if + # test_inflate_tokens_short is passing, but test_inflate_tokens_long is timing out. It only applies to Peggy 1. + # The value of 3 is experimental; if tokens are still not getting across the bridge reliably, reduce the value + # down to 1 (minimum). The lower the value the more time the transfers will take as there will be more + # sequential transfers instead of parallel. + self.max_ethereum_batch_size = 0 + + # Firing transactions with "sifnoded tx bank send" in rapid succession does not work. This is currently a + # known limitation of Cosmos SDK, see https://github.com/cosmos/cosmos-sdk/issues/4186 + # Instead, we take advantage of batching multiple denoms to single account with single send command (amounts + # separated by by comma: "sifnoded tx bank send ... 100denoma,100denomb,100denomc") and wait for destination + # account to show changes for all denoms after each send. But also batches don't work reliably if they are too + # big, so we limit the maximum batch size here. + self.max_sifnoded_batch_size = 5 def get_whitelisted_tokens(self): whitelist = self.ctx.get_whitelisted_tokens_from_bridge_bank_past_events() - ibc_pattern = re.compile("^ibc\/([0-9a-fA-F]{64})$") + ibc_pattern = re.compile("^ibc/([0-9a-fA-F]{64})$") result = [] for token_addr, value in whitelist.items(): token_data = self.ctx.get_generic_erc20_token_data(token_addr) @@ -39,7 +55,9 @@ def get_whitelisted_tokens(self): assert token_symbol not in result, f"Symbol {token_symbol} is being used by more than one whitelisted token" result.append(token) erowan_token = [t for t in result if t["symbol"] == "erowan"] - assert len(erowan_token) == 1, "erowan is not whitelisted" + # These assertions are broken in Tempnet, possibly indicating missing/incomplete chain init, see README.md + # for comparision of steps + assert len(erowan_token) == 1, "erowan is not whitelisted, probably bad/incomplete deployment" assert erowan_token[0]["is_whitelisted"], "erowan is un-whitelisted" return result @@ -65,30 +83,15 @@ def build_list_of_tokens_to_create(self, existing_tokens, requested_tokens): # their addresses appear in BridgeBank's past events implies that the corresponding ERC20 smart contracts have # been deployed, hence there is no need to deploy them. - # This assumes that requested token symbols are in sifchain format (c-prefixed, i.e. "cusdt", "csushi" etc.). - # There can also be "ceth" and "rowan" in this list, which we ignore as they represent special cases. - # To compare it to entries on existing_whitelist, we need to prefix entries on existing_whitelist with "c". - # TODO It would be better if the requested tokens didn't have "c" prefixes. For now we keep it for - # compatibility. Ask people who use this script. - token_symbols_to_skip = set() - token_symbols_to_skip.add(test_utils.CETH) # ceth is special since we can't just mint it or create an ERC20 contract for it - token_symbols_to_skip.add(test_utils.ROWAN) - tokens_to_create = [] # = requested - existing - {rowan, ceth} + tokens_to_create = [] for token in requested_tokens: token_symbol = token["symbol"] - if (token_symbol == test_utils.CETH) or (token_symbol == test_utils.ROWAN): + if token_symbol in self.excluded_token_symbols: assert False, f"Token {token_symbol} cannot be used by this procedure, please remove it from list of requested assets" - if not token_symbol.startswith("c"): - assert False, f"Token {token_symbol} is invalid - should start with 'c'" - eth_token_symbol = token_symbol[1:] # Strip "c", e.g. "cusdt" -> "usdt" - existing_token = zero_or_one(find_by_value(existing_tokens, "symbol", eth_token_symbol)) + existing_token = zero_or_one(find_by_value(existing_tokens, "symbol", token_symbol)) if existing_token is None: - tokens_to_create.append({ - "name": token["name"], - "symbol": eth_token_symbol, - "decimals": token["decimals"], - }) + tokens_to_create.append(token) else: if not all([existing_token[f] == token[f] for f in ["name", "decimals"]]): assert False, "Existing token's name/decimals does not match requested for token: " \ @@ -100,23 +103,12 @@ def build_list_of_tokens_to_create(self, existing_tokens, requested_tokens): return tokens_to_create def create_new_tokens(self, tokens_to_create): - amount_in_token_units = 0 pending_txs = [] for token in tokens_to_create: token_name = token["name"] token_symbol = token["symbol"] token_decimals = token["decimals"] log.info(f"Creating token {token_symbol}...") - amount = amount_in_token_units * (10**token_decimals) - # Deploy a SifchainTestToken - # call BridgeBank.updateEthWhiteList with its address - # Mint amount_in_token_units to operator_address - # Approve entire minted amount to BridgeBank - # TODO We don't really need create_new_currency here, we only need to deploy the smart contract - # since we do the minting and approval in next step (token_refresh). - - # token_addr = self.ctx.create_new_currency(token_symbol, token_name, token_decimals, amount, minted_tokens_recipient) - txhash = self.ctx.tx_deploy_new_generic_erc20_token(self.ctx.operator, token_name, token_symbol, token_decimals) pending_txs.append(txhash) @@ -140,8 +132,9 @@ def create_new_tokens(self, tokens_to_create): "is_whitelisted": True, "sif_denom": self.ctx.eth_symbol_to_sif_symbol(token_symbol), }) - txhash = self.ctx.tx_update_bridge_bank_whitelist(token_sc.address, True) - pending_txs.append(txhash) + if not on_peggy2_branch: + txhash = self.ctx.tx_update_bridge_bank_whitelist(token_sc.address, True) + pending_txs.append(txhash) self.wait_for_all(pending_txs) return new_tokens @@ -156,45 +149,61 @@ def mint(self, list_of_tokens_addrs, amount_in_tokens, mint_recipient): pending_txs.append(txhash) self.wait_for_all(pending_txs) - def approve_and_lock(self, token_addr_list, eth_addr, to_sif_addr, amount): + def transfer_from_eth_to_sifnode(self, from_eth_addr, to_sif_addr, tokens_to_transfer, amount_in_tokens, amount_eth_gwei): + sif_balances_before = self.ctx.get_sifchain_balance(to_sif_addr) + sent_amounts = [] pending_txs = [] - for token_addr in token_addr_list: + for token in tokens_to_transfer: + token_addr = token["address"] + decimals = token["decimals"] token_sc = self.ctx.get_generic_erc20_sc(token_addr) - pending_txs.extend(self.ctx.tx_approve_and_lock(token_sc, eth_addr, to_sif_addr, amount)) - return self.wait_for_all(pending_txs) - - def transfer_from_eth_to_sifnode(self, eth_addr, sif_addr, tokens_to_transfer, amount): - sif_balances_before = self.ctx.get_sifchain_balance(sif_addr) - self.approve_and_lock([t["address"] for t in tokens_to_transfer], eth_addr, sif_addr, amount) + amount = amount_in_tokens * 10**decimals + pending_txs.extend(self.ctx.tx_approve_and_lock(token_sc, from_eth_addr, to_sif_addr, amount)) + sent_amounts.append([amount, token["sif_denom"]]) + if amount_eth_gwei > 0: + amount = amount_eth_gwei * eth.GWEI + pending_txs.append(self.ctx.tx_bridge_bank_lock_eth(from_eth_addr, to_sif_addr, amount)) + sent_amounts.append([amount, self.ctx.ceth_symbol]) + self.wait_for_all(pending_txs) + log.info("{} Ethereum transactions commited: {}".format(len(pending_txs), repr(sent_amounts))) # Wait for intermediate_sif_account to receive all funds across the bridge + previous_block = self.ctx.eth.w3_conn.eth.block_number self.ctx.advance_blocks() - send_amounts = [[amount, t["sif_denom"]] for t in tokens_to_transfer] - self.ctx.wait_for_sif_balance_change(sif_addr, sif_balances_before, - min_changes=send_amounts, timeout=self.wait_for_account_change_timeout) - - def distribute_tokens_to_wallets(self, from_sif_account, tokens_to_transfer, amount, target_sif_accounts): - # Distribute from intermediate_sif_account to each individual account - # Note: firing transactions with "sifnoded tx bank send" in rapid succession does not work. This is currently a - # known limitation of Cosmos SDK, see https://github.com/cosmos/cosmos-sdk/issues/4186 - # Instead, we take advantage of batching multiple denoms to single account with single send command (amounts - # separated by by comma: "sifnoded tx bank send ... 100denoma,100denomb,100denomc") and wait for destination - # account to show changes for all denoms after each send. - send_amounts = [[amount, t["sif_denom"]] for t in tokens_to_transfer] + log.info("Ethereum blocks advanced by {}".format(self.ctx.eth.w3_conn.eth.block_number - previous_block)) + self.ctx.wait_for_sif_balance_change(to_sif_addr, sif_balances_before, min_changes=sent_amounts, + polling_time=2, timeout=None, change_timeout=self.wait_for_account_change_timeout) + + # Distributes from intermediate_sif_account to each individual account + def distribute_tokens_to_wallets(self, from_sif_account, tokens_to_transfer, amount_in_tokens, target_sif_accounts, amount_eth_gwei): + send_amounts = [[amount_in_tokens * 10**t["decimals"], t["sif_denom"]] for t in tokens_to_transfer] + if amount_eth_gwei > 0: + send_amounts.append([amount_eth_gwei * eth.GWEI, self.ctx.ceth_symbol]) + progress_total = len(target_sif_accounts) * len(send_amounts) + progress_current = 0 for sif_acct in target_sif_accounts: - sif_balance_before = self.ctx.get_sifchain_balance(sif_acct) - self.ctx.send_from_sifchain_to_sifchain(from_sif_account, sif_acct, send_amounts) - self.ctx.wait_for_sif_balance_change(sif_acct, sif_balance_before, min_changes=send_amounts) + remaining = send_amounts + while remaining: + batch_size = len(remaining) + if (self.max_sifnoded_batch_size > 0) and (batch_size > self.max_sifnoded_batch_size): + batch_size = self.max_sifnoded_batch_size + batch = remaining[:batch_size] + remaining = remaining[batch_size:] + sif_balance_before = self.ctx.get_sifchain_balance(sif_acct) + self.ctx.send_from_sifchain_to_sifchain(from_sif_account, sif_acct, batch) + self.ctx.wait_for_sif_balance_change(sif_acct, sif_balance_before, min_changes=batch, + polling_time=2, timeout=None, change_timeout=self.wait_for_account_change_timeout) + progress_current += batch_size + log.debug("Distributing tokens to wallets: {:0.0f}% done".format((progress_current/progress_total) * 100)) def export(self): - excluded = ["erowan"] return [{ "symbol": token["symbol"], "name": token["name"], "decimals": token["decimals"] - } for token in self.get_whitelisted_tokens() if ("ibc" not in token) and (token["symbol"] not in excluded)] + } for token in self.get_whitelisted_tokens() if ("ibc" not in token) and (token["symbol"] not in self.excluded_token_symbols)] - def transfer(self, requested_tokens, amount, target_sif_accounts): + def transfer(self, requested_tokens, token_amount, target_sif_accounts, eth_amount_gwei): """ It goes like this: 1. Starting with assets.json of your choice, It will first compare the list of tokens to existing whitelist and deploy any new tokens (ones that have not yet been whitelisted) @@ -207,11 +216,24 @@ def transfer(self, requested_tokens, amount, target_sif_accounts): # TODO Add support for "ceth" and "rowan" - amount_per_token = amount * len(target_sif_accounts) - fund_rowan = [5 * test_utils.sifnode_funds_for_transfer_peggy1, "rowan"] + n_accounts = len(target_sif_accounts) + total_token_amount = token_amount * n_accounts + total_eth_amount_gwei = eth_amount_gwei * n_accounts + + # Calculate how much rowan we need to fund intermediate account with. This is only an estimation at this point. + # We need to take into account that we might need to break transfers in batches. The number of tokens is the + # number of ERC20 tokens plus one for ETH, rounded up. 5 is a safety factor + number_of_batches = 1 if self.max_sifnoded_batch_size == 0 else (len(requested_tokens) + 1) // self.max_sifnoded_batch_size + 1 + fund_rowan = [5 * test_utils.sifnode_funds_for_transfer_peggy1 * n_accounts * number_of_batches, "rowan"] + log.debug("Estimated number of batches needed to transfer tokens from intermediate sif account to target sif wallet: {}".format(number_of_batches)) + log.debug("Estimated rowan funding needed for intermediate account: {}".format(fund_rowan)) + ether_faucet_account = self.ctx.operator sif_broker_account = self.ctx.create_sifchain_addr(fund_amounts=[fund_rowan]) eth_broker_account = self.ctx.operator + if (total_eth_amount_gwei > 0) and (ether_faucet_account != eth_broker_account): + self.ctx.eth.send_eth(ether_faucet_account, eth_broker_account, total_eth_amount_gwei) + log.info("Using eth_broker_account {}".format(eth_broker_account)) log.info("Using sif_broker_account {}".format(sif_broker_account)) @@ -226,12 +248,35 @@ def transfer(self, requested_tokens, amount, target_sif_accounts): new_tokens = self.create_new_tokens(tokens_to_create) existing_tokens.extend(new_tokens) - tokens_to_transfer = [exactly_one(find_by_value(existing_tokens, "sif_denom", t["symbol"])) + # At this point, all tokens that we want to transfer should exist both on Ethereum blockchain as well as in + # existing_tokens. + tokens_to_transfer = [exactly_one(find_by_value(existing_tokens, "symbol", t["symbol"])) for t in requested_tokens] - self.mint([t["address"] for t in tokens_to_transfer], amount_per_token, eth_broker_account) - self.transfer_from_eth_to_sifnode(eth_broker_account, sif_broker_account, tokens_to_transfer, amount_per_token) - self.distribute_tokens_to_wallets(sif_broker_account, tokens_to_transfer, amount, target_sif_accounts) + self.mint([t["address"] for t in tokens_to_transfer], total_token_amount, eth_broker_account) + + if (self.max_ethereum_batch_size > 0) and (len(tokens_to_transfer) > self.max_ethereum_batch_size): + log.debug(f"Transferring {len(tokens_to_transfer)} tokens from ethereum to sifndde in batches of {self.max_ethereum_batch_size}...") + remaining = tokens_to_transfer + while remaining: + batch = remaining[:self.max_ethereum_batch_size] + remaining = remaining[self.max_ethereum_batch_size:] + self.transfer_from_eth_to_sifnode(eth_broker_account, sif_broker_account, batch, total_token_amount, 0) + log.debug(f"Batch completed, {len(remaining)} tokens remaining") + # Transfer ETH separately + log.debug("Thansfering ETH from ethereum to sifnode...") + self.transfer_from_eth_to_sifnode(eth_broker_account, sif_broker_account, [], 0, total_eth_amount_gwei) + else: + log.debug(f"Transferring {len(tokens_to_transfer)} tokens from ethereum to sifnode in single batch...") + self.transfer_from_eth_to_sifnode(eth_broker_account, sif_broker_account, tokens_to_transfer, total_token_amount, total_eth_amount_gwei) + self.distribute_tokens_to_wallets(sif_broker_account, tokens_to_transfer, token_amount, target_sif_accounts, eth_amount_gwei) + + def transfer_eth(self, from_eth_addr, amount_gwei, target_sif_accounts): + pending_txs = [] + for sif_acct in target_sif_accounts: + txrcpt = self.ctx.eth.tx_bridge_bank_lock_eth(from_eth_addr, sif_acct, amount_gwei * eth.GWEI) + pending_txs.append(txrcpt) + self.wait_for_all(pending_txs) def run(*args): @@ -245,10 +290,10 @@ def run(*args): ctx.cmd.write_text_file(args[0], json.dumps(script.export(), indent=4)) elif cmd == "transfer": # Usage: inflate_tokens.py transfer assets.json amount accounts.json - assets_json_file, amount, accounts_json_file = args + assets_json_file, token_amount, accounts_json_file, amount_eth_gwei = args tokens = json.loads(ctx.cmd.read_text_file(assets_json_file)) accounts = json.loads(ctx.cmd.read_text_file(accounts_json_file)) - script.transfer(tokens, int(amount), accounts) + script.transfer(tokens, int(token_amount), accounts, int(amount_eth_gwei)) else: raise Exception("Invalid usage") diff --git a/test/integration/framework/ip_addr_pool.py b/test/integration/framework/src/siftool/ip_addr_pool.py similarity index 100% rename from test/integration/framework/ip_addr_pool.py rename to test/integration/framework/src/siftool/ip_addr_pool.py diff --git a/test/integration/framework/src/siftool/localnet.py b/test/integration/framework/src/siftool/localnet.py new file mode 100644 index 0000000000..5ef2a00df6 --- /dev/null +++ b/test/integration/framework/src/siftool/localnet.py @@ -0,0 +1,186 @@ +import os +import json +from siftool.command import Command +from siftool.common import project_dir +import logging + + +log = logging.getLogger(__name__) + + +# This is called from run_env as a hook to run additional IBC chains defined in LOCALNET variable. +def run_localnet_hook(): + localnet_env_var = os.environ.get("LOCALNET") + if not localnet_env_var: + return + + localnet = Localnet() + if not os.path.exists(localnet.node_module_dir): + log.info("Installing localnet dependencies on first use in '{}'...".format(localnet.node_module_dir)) + localnet.install_deps() + if not os.path.exists(localnet.bin_dir): + log.info("Downloading localnet binaries on first use in '{}'...".format(localnet.bin_dir)) + localnet.download_binaries() + + if not os.path.exists(localnet.config_dir): + log.info("Init all chains on first use in '{}'...".format(localnet.config_dir)) + localnet.init_all_chains() + + # rm -rf /tmp/localnet/config/cosmos/cosmoshub-testnet + # mkdir -p /tmp/localnet/config/cosmos/cosmoshub-testnet + # /tmp/localnet/bin/gaiad init cosmoshub-testnet --chain-id cosmoshub-testnet --home /tmp/localnet/config/cosmos/cosmoshub-testnet + # /tmp/localnet/bin/gaiad keys add cosmos-validator --keyring-backend test --home /tmp/localnet/config/cosmos/cosmoshub-testnet + # /tmp/localnet/bin/gaiad keys add cosmos-source --keyring-backend test --home /tmp/localnet/config/cosmos/cosmoshub-testnet + # /tmp/localnet/bin/gaiad add-genesis-account cosmos-validator 10000000000000000000uphoton --keyring-backend test --home /tmp/localnet/config/cosmos/cosmoshub-testnet + # /tmp/localnet/bin/gaiad add-genesis-account cosmos-source 10000000000000000000uphoton --keyring-backend test --home /tmp/localnet/config/cosmos/cosmoshub-testnet + + # rm -rf /tmp/localnet/config/sifchain/sifchain-testnet-1 + # mkdir -p /tmp/localnet/config/sifchain/sifchain-testnet-1 + # /tmp/localnet/bin/sifnoded init sifchain-testnet-1 --chain-id sifchain-testnet-1 --home /tmp/localnet/config/sifchain/sifchain-testnet-1 + # /tmp/localnet/bin/sifnoded keys add sifchain-validator --keyring-backend test --home /tmp/localnet/config/sifchain/sifchain-testnet-1 + # /tmp/localnet/bin/sifnoded keys add sifchain-source --keyring-backend test --home /tmp/localnet/config/sifchain/sifchain-testnet-1 + # /tmp/localnet/bin/sifnoded add-genesis-account sifchain-validator 10000000000000000000rowan --keyring-backend test --home /tmp/localnet/config/sifchain/sifchain-testnet-1 + # /tmp/localnet/bin/sifnoded add-genesis-account sifchain-source 10000000000000000000rowan --keyring-backend test --home /tmp/localnet/config/sifchain/sifchain-testnet-1 + + # For each chain: + # defaultGenesis = what was created in ${home}/config/genesis.json + # remoteGenesis = curl (${node from config}/genesis).data e.g. https://rpc.testnet.cosmos.network:443/genesis + # cleanedUpGenesis = cleanUpGenesisState(defaultGenesis, remoteGenesis) + # + # writeFile(genesis, "${home}/config/genesis.json") + # + # if sifchain: ${binPath}/${binary} set-gen-denom-whitelist ${home}/config/denoms.json --home ${home} + # + # ${binPath}/${binary} gentx ${validatorAccountName} ${amount}${denom} --chain-id ${chainId} --keyring-backend test --home ${home} + # ${binPath}/${binary} collect-gentxs --home ${home} + + localnet.start_all_chains() # Runs sifnoded and gaiad + + + +def get_localnet_config(cmd): + config = json.loads(cmd.read_text_file(cmd.project.project_dir("test/localnet/config/chains.json"))) + return config + + +def run(cmd, argv): + log.debug(repr(argv)) + config = get_localnet_config(cmd) + # Filter out items with "disabled": true + config = {k: v for k, v in config.items() if not v.get("disabled", False)} + tmpdir = cmd.mktempdir() + log.debug(tmpdir) + + localnet = Localnet() + localnet.init_all_chains() + # localnet.start_all_chains() + + return + + +def download_ibc_binaries(cmd, chains_to_download=None, output_path=None): + if not output_path: + output_path = cmd.pwd() + else: + if not cmd.exists(output_path): + cmd.mkdir(output_path) + config = get_localnet_config(cmd) + tmpdir = cmd.mktempdir() + # We prefer to compile sifchain. Sentinel uses sourceUrl, but there is no Makefile. + all_supported_chains = set(config.keys()).difference({"sifchain", "sentinel"}) + chains_to_download = chains_to_download or "all" + if chains_to_download == "all": + chains_to_download = all_supported_chains + else: + chains_to_download = ",".split(chains_to_download) + try: + tmp_gobin = os.path.join(tmpdir, "bin") + cmd.mkdir(tmp_gobin) + for chain_name in chains_to_download: + if chain_name not in config: + raise Exception("Chain {} not supported yet".format(chain_name)) + values = config[chain_name] + binary = values["binary"] + binary_url = values.get("binaryUrl") + source_url = values.get("sourceUrl") + binary_relative_path = values.get("binaryRelativePath") + source_relative_path = values.get("sourceRelativePath") + assert bool(source_url) ^ bool(binary_url) + url = binary_url or source_url + dlfile = os.path.join(tmpdir, "{}-download.tmp".format(chain_name)) + log.info("Downloading {} from '{}' to '{}'...".format(chain_name, url, dlfile)) + cmd.download_url(url, output_file=dlfile) + extract_dir = os.path.join(tmpdir, chain_name) + src_file = None + cmd.mkdir(extract_dir) + if url.endswith(".zip"): + cmd.execst(["unzip", dlfile], cwd=extract_dir) + elif url.endswith(".tar.gz"): + cmd.execst(["tar", "xfz", dlfile], cwd=extract_dir) + elif binary_url: + # We have binaryUrl but it is not an archive => must be binary itself + assert not source_url and not binary_relative_path + src_file = dlfile + if not src_file: + if binary_url: + src_file = os.path.join(extract_dir, binary_relative_path if binary_relative_path else binary) + if source_url: + src_dir = extract_dir if not source_relative_path else os.path.join(extract_dir, source_relative_path) + cmd.execst(["make", "install"], cwd=src_dir, env={"GOBIN": tmp_gobin}) + src_file = os.path.join(tmp_gobin, binary) + assert src_file + dst_file = os.path.join(output_path, binary) + cmd.copy_file(src_file, dst_file) + cmd.chmod(dst_file, "+x") + finally: + cmd.rmf(tmpdir) + + +def fetch_genesis(base_url): + pass + + +def init_chain(cmd): + pass + + +class Localnet(Command): + def __init__(self, script_dir=None, config_dir=None, bin_dir=None): + self.script_dir = script_dir if script_dir else project_dir("test/localnet") + self.config_dir = config_dir if config_dir else os.path.join("/tmp/localnet", "./config") + self.bin_dir = bin_dir if bin_dir else os.path.join("/tmp/localnet", "./bin") + self.node_module_dir = os.path.join(self.script_dir, "./node_modules") + + def install_deps(self): + self.execst(["yarn"], cwd=self.script_dir, pipe=False) + + def download_binaries(self): + self.execst(["yarn", "downloadBinaries"], cwd=self.script_dir, pipe=False) + + def init_all_chains(self): + self.execst(["yarn", "initAllChains"], cwd=self.script_dir, pipe=False) + + def start_all_chains(self): + self.execst(['yarn', 'startAllChains'], cmd=self.script_dir, pipe=False) + + def init_all_relayers(self): + self.execst(['yarn', 'initAllRelayers'], cmd=self.script_dir, pipe=False) + + def start_all_relayers(self): + self.execst(['yarn', 'startAllRelayers'], cmd=self.script_dir, pipe=False) + + def build_local_net(self): + self.execst(['yarn', 'buildLocalNet'], cmd=self.script_dir, pipe=False) + + def load_local_net(self): + self.execst(['yarn', 'loadLocalNet'], cmd=self.script_dir, pipe=False) + + def take_snapshot(self): + self.execst(['yarn', 'takeSnapshot'], cmd=self.script_dir, pipe=False) + + def create_snapshot(self): + self.execst(['yarn', 'createSnapshot'], cmd=self.script_dir, pipe=False) + + def test(self): + self.execst(['yarn', 'test'], cmd=self.script_dir, pipe=False) + \ No newline at end of file diff --git a/test/integration/framework/src/siftool/main.py b/test/integration/framework/src/siftool/main.py new file mode 100755 index 0000000000..a4f2fd04f0 --- /dev/null +++ b/test/integration/framework/src/siftool/main.py @@ -0,0 +1,136 @@ +import sys +import time + +from siftool import test_utils +from siftool.run_env import Integrator, UIStackEnvironment, Peggy2Environment, IBCEnvironment, IntegrationTestsEnvironment +from siftool.project import Project, killall, force_kill_processes +from siftool.common import * + + +def main(argv): + # tmux usage: + # tmux new-session -d -s env1 + # tmux main-pane-height -t env1 10 + # tmux split-window -h -t env1 + # tmux split-window -h -t env1 + # tmux select-layout -t env1 even-vertical + # OR: tmux select-layout main-horizontal + basic_logging_setup() + what = argv[0] if argv else None + cmd = Integrator() + project = cmd.project + if what == "project-init": + project.init() + elif what == "clean": + project.clean() + elif what == "build": + project.build() + elif what == "rebuild": + project.rebuild() + elif what == "project": + return getattr(project, argv[1])(*argv[2:]) + elif what == "run-ui-env": + e = UIStackEnvironment(cmd) + e.stack_save_snapshot() + e.stack_push() + elif what == "run-env": + if on_peggy2_branch: + # Equivalent to future/devenv - hardhat, sifnoded, ebrelayer + # I.e. cd smart-contracts; GOBIN=/home/anderson/go/bin npx hardhat run scripts/devenv.ts + env = Peggy2Environment(cmd) + processes = env.run() + else: + env = IntegrationTestsEnvironment(cmd) + project.clean() + # deploy/networks already included in run() + processes = env.run() + # TODO Cleanup: + # - rm -rf test/integration/sifnoderelayerdb + # - rm -rf networks/validators/localnet/$moniker/.sifnoded + # - If you ran the execute_integration_test_*.sh you need to kill ganache-cli for proper cleanup + # as it might have been killed and started outside of our control + if not in_github_ci: + input("Press ENTER to exit...") + killall(processes) + elif what == "devenv": + project.npx(["hardhat", "run", "scripts/devenv.ts"], cwd=project.smart_contracts_dir, pipe=False) + elif what == "create_snapshot": + # Snapshots are only supported in IntegrationTestEnvironment + snapshot_name = argv[1] + project.clean() + env = IntegrationTestsEnvironment(cmd) + processes = env.run() + # Give processes some time to settle, for example relayerdb must init and create its "relayerdb" + time.sleep(45) + killall(processes) + # processes1 = e.restart_processes() + env.create_snapshot(snapshot_name) + elif what == "restore_snapshot": + # Snapshots are only supported in IntegrationTestEnvironment + snapshot_name = argv[1] + env = IntegrationTestsEnvironment(cmd) + env.restore_snapshot(snapshot_name) + processes = env.restart_processes() + input("Press ENTER to exit...") + killall(processes) + elif what == "run-ibc-env": + env = IBCEnvironment(cmd) + processes = env.run() + elif what == "run-integration-tests": + # TODO After switching the branch,: cd smart-contracts; rm -rf node_modules; + cmd.install_smart_contract_dependencies() (yarn clean + yarn install) + scripts = [ + "execute_integration_tests_against_test_chain_peg.sh", + "execute_integration_tests_against_test_chain_clp.sh", + "execute_integration_tests_against_any_chain.sh", + "execute_integration_tests_with_snapshots.sh", + ] + for script in scripts: + force_kill_processes(cmd) + e = IntegrationTestsEnvironment(cmd) + processes = e.run() + cmd.execst(script, cwd=project.test_integration_dir) + killall(processes) + force_kill_processes(cmd) # Some processes are restarted during integration tests so we don't own them + log.info("Everything OK") + elif what == "check-env": + ctx = test_utils.get_env_ctx() + ctx.sanity_check() + elif what == "test-logging": + ls_cmd = mkcmd(["ls", "-al", "."], cwd="/tmp") + res = stdout_lines(cmd.execst(**ls_cmd)) + print(ls_cmd) + elif what == "poc-geth": + import geth + g = geth.Geth(cmd) + with open(cmd.mktempfile(), "w") as geth_log_file: + datadir_for_running = cmd.mktempdir() + datadir_for_keys = cmd.mktempdir() + args = g.geth_cmd__test_integration_geth_branch(datadir=datadir_for_running) + geth_proc = cmd.popen(args, log_file=geth_log_file) + import hardhat + for expected_addr, private_key in hardhat.default_accounts(): + addr = g.create_account("password", private_key, datadir=datadir_for_keys) + assert addr == expected_addr + input("Press ENTER to exit...") + killall((geth_proc,)) + elif what == "inflate-tokens": + import inflate_tokens + inflate_tokens.run(*argv[1:]) + elif what == "recover-eth": + test_utils.recover_eth_from_test_accounts() + elif what == "run-peggy2-tests": + cmd.execst(["yarn", "test"], cwd=project.smart_contracts_dir) + elif what == "generate-python-protobuf-stubs": + project.generate_python_protobuf_stubs() + elif what == "localnet": + import localnet + localnet.run(cmd, argv[1:]) + elif what == "download-ibc-binaries": + import localnet + localnet.download_ibc_binaries(cmd, *argv[1:]) + else: + raise Exception("Missing/unknown command") + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/test/integration/framework/project.py b/test/integration/framework/src/siftool/project.py similarity index 53% rename from test/integration/framework/project.py rename to test/integration/framework/src/siftool/project.py index 98c5efc908..7b89837a9a 100644 --- a/test/integration/framework/project.py +++ b/test/integration/framework/src/siftool/project.py @@ -1,6 +1,6 @@ import os import json -from common import * +from siftool.common import * def force_kill_processes(cmd): @@ -24,6 +24,7 @@ def __init__(self, cmd, base_dir): self.base_dir = base_dir self.smart_contracts_dir = project_dir("smart-contracts") self.test_integration_dir = project_dir("test", "integration") + self.siftool_dir = project_dir("test", "integration", "framework") self.go_path = os.environ.get("GOPATH") if self.go_path is None: # https://pkg.go.dev/cmd/go#hdr-GOPATH_and_Modules @@ -42,19 +43,17 @@ def __rm(self, path): else: log.debug("Nothing to delete for '{}'".format(path)) - def rebuild(self): - # Use this after switching branches (i.e. develop vs. future/peggy2) - self.clean(1) - # self.cmd.execst(["npm", "install", "-g", "ganache-cli", "dotenv", "yarn"], cwd=self.smart_contracts_dir) - self.install_smart_contracts_dependencies() - self.cmd.execst(["make", "install"], cwd=self.project_dir(), pipe=False) + def __rm_files_develop(self): + self.__rm(self.project_dir("test", "integration", "sifchainrelayerdb")) # TODO move to /tmp def __rm_files(self, level): if level >= 0: # rm -rvf /tmp/tmp.xxxx (ganache DB, unique for every run) - self.__rm(self.project_dir("test", "integration", "sifchainrelayerdb")) # TODO move to /tmp + self.__rm_files_develop() self.__rm(self.project_dir("smart-contracts", "build")) # truffle deploy self.__rm(self.project_dir("test", "integration", "vagrant", "data")) + self.__rm(self.project_dir("test", "integration", "src", ".pytest_cache")) + self.__rm(self.project_dir("test", "integration", "src", "py", ".pytest_cache")) self.__rm(self.cmd.get_user_home(".sifnoded")) # Probably needed for "--keyring-backend test" self.__rm(self.project_dir("deploy", "networks")) # from running integration tests @@ -88,6 +87,7 @@ def __rm_files(self, level): for file in ["sifnoded", "ebrelayer", "sifgen"]: self.__rm(os.path.join(self.go_bin_dir, file)) self.__rm(self.project_dir("smart-contracts", "node_modules")) + self.__rm(self.project_dir("test", "localnet", "node_modules")) if level >= 2: if self.cmd.exists(self.go_path): @@ -99,7 +99,6 @@ def __rm_files(self, level): self.__rm(self.cmd.get_user_home(".cache/yarn")) self.__rm(self.cmd.get_user_home(".sifnoded")) self.__rm(self.cmd.get_user_home(".sifnode-integration")) - self.__rm(project_dir("smart-contracts/node_modules")) # Peggy2 # Generated Go stubs (by smart-contracts/Makefile) @@ -120,12 +119,6 @@ def __rm_files(self, level): # smart-contracts/env.json # smart-contracts/environment.json - # Use this between run-env. - def clean(self, level=None): - level = 0 if level is None else int(level) - force_kill_processes(self.cmd) - self.__rm_files(level) - def yarn(self, args, cwd=None, env=None): return self.cmd.execst(["yarn"] + args, cwd=cwd, env=env, pipe=False) @@ -155,7 +148,6 @@ def make_go_binaries(self): # TODO Merge # Main Makefile requires GOBIN to be set to an absolute path. Compiled executables ebrelayer, sifgen and # sifnoded will be written there. The directory will be created if it doesn't exist yet. - # def make_go_binaries_2(self): # Original: cd smart-contracts; make -C .. install self.cmd.execst(["make", "install"], cwd=project_dir(), pipe=False) @@ -164,7 +156,8 @@ def install_smart_contracts_dependencies(self): self.cmd.execst(["make", "clean-smartcontracts"], cwd=self.smart_contracts_dir) # = rm -rf build .openzeppelin # According to peggy2, the plan is to move from npm install to yarn, but there are some issues with yarn atm. # self.yarn(["install"], cwd=self.smart_contracts_dir) - self.cmd.execst(["npm", "install"], cwd=self.smart_contracts_dir, pipe=False) + # self.cmd.execst(["npm", "install"], cwd=self.smart_contracts_dir, pipe=False) + self.npm_install(self.smart_contracts_dir) def write_vagrantenv_sh(self, state_vars, data_dir, ethereum_websocket_address, chainnet): # Trace of test_utilities.py get_required_env_var/get_optional_env_var: @@ -212,12 +205,6 @@ def write_vagrantenv_sh(self, state_vars, data_dir, ethereum_websocket_address, self.cmd.write_text_file(vagrantenv_path, joinlines(format_as_shell_env_vars(env))) self.cmd.write_text_file(project_dir("test/integration/vagrantenv.json"), json.dumps(env)) - def init(self): - self.clean() - self.cmd.rmdir(project_dir("smart-contracts/node_modules")) - self.make_go_binaries_2() - self.install_smart_contracts_dependencies() - def get_peruser_config_dir(self): return self.cmd.get_user_home(".config", "siftool") @@ -231,3 +218,190 @@ def read_peruser_config_file(self, name): return json.loads(self.cmd.read_text_file(path)) else: return None + + def init(self): + self.clean() + # self.cmd.rmdir(project_dir("smart-contracts/node_modules")) + self.make_go_binaries_2() + self.install_smart_contracts_dependencies() + + def clean(self): + self.cmd.rmf(self.project_dir("smart-contracts", "node_modules")) + self.cmd.rmf(os.path.join(self.siftool_dir, "build")) + if on_peggy2_branch: + for file in [".proto-gen", ".run", "cmd/ebrelayer/contract/generated/artifacts", "smart-contracts/.hardhat-compile"]: + self.cmd.rmf(self.project_dir(file)) + else: + # Output from "truffle compile" / "npx hardhat compile". + # Wrong contents can cause hardhat to fail compilation after switching branches. + self.cmd.rmf(self.project_dir("smart-contracts", "build")) + self.cmd.rmf(self.project_dir("smart-contracts", "cache")) + self.cmd.rmf(self.project_dir("smart-contracts", "artifacts")) + + for filename in ["sifnoded", "ebrelayer", "sifgen"]: + self.cmd.rmf(os.path.join(self.go_bin_dir, filename)) + + # Use this between run-env. + def old_clean(self, level=None): + level = 0 if level is None else int(level) + force_kill_processes(self.cmd) + self.__rm_files(level) + + def build(self): + if on_peggy2_branch: + self.npm_install(self.project_dir("smart-contracts")) + self.npx(["hardhat", "compile"], cwd=self.project_dir("smart-contracts"), pipe=False) + else: + self.npm_install(self.project_dir("smart-contracts")) + self.cmd.execst(["make", "install"], cwd=self.project_dir(), pipe=False) + self.cmd.execst([self.project_dir("smart-contracts", "node_modules", ".bin", "truffle"), "compile"], + cwd=self.project_dir("smart-contracts"), pipe=False) + + def rebuild(self): + self.clean() + self.build() + + def old_rebuild(self): + # Use this after switching branches (i.e. develop vs. future/peggy2) + self.clean(1) + # self.cmd.execst(["npm", "install", "-g", "ganache-cli", "dotenv", "yarn"], cwd=self.smart_contracts_dir) + self.install_smart_contracts_dependencies() + self.cmd.execst(["make", "install"], cwd=self.project_dir(), pipe=False) + + def npm_install(self, path): + # TODO Add package-lock.json also on future/peggy2 branch? + package_lock_json = os.path.join(path, "package.json" if on_peggy2_branch else "package-lock.json") + sha1 = self.cmd.sha1_of_file(package_lock_json) + node_modules = os.path.join(path, "node_modules") + + if self.cmd.exists(node_modules): + cache_tag_file = os.path.join(node_modules, ".siftool-cache-tag") + cache_tag = self.cmd.read_text_file(cache_tag_file) if self.cmd.exists(cache_tag_file) else None + if (cache_tag is None) or (cache_tag != sha1): + self.cmd.rmdir(node_modules) + else: + return + + assert not self.cmd.exists(node_modules) + cache_dir = os.path.join(self.get_peruser_config_dir(), "npm-cache") + cache_index = os.path.join(cache_dir, "index.json") + cache = [] + if not self.cmd.exists(cache_dir): + self.cmd.mkdir(cache_dir) + if self.cmd.exists(cache_index): + cache = json.loads(self.cmd.read_text_file(cache_index)) + idx = None + for i, s in enumerate(cache): + if s == sha1: + idx = i + break + tarfile = os.path.join(cache_dir, "{}.tar".format(sha1)) + if idx is None: + saved = dict(((f, self.cmd.read_text_file(f)) + for f in [os.path.join(path, x) for x in ["package-lock.json", "yarn.lock"]] if self.cmd.exists(f))) + self.cmd.execst(["npm", "install"], cwd=path, pipe=False) + cache_tag_file = os.path.join(node_modules, ".siftool-cache-tag") + self.cmd.write_text_file(cache_tag_file, sha1) + for file, contents in saved.items(): + self.cmd.write_text_file(file, contents) + self.cmd.tar_create(node_modules, tarfile) + else: + cache.pop(idx) + self.cmd.tar_extract(tarfile, node_modules) + cache.insert(0, sha1) + max_cache_items = 5 + if len(cache) > max_cache_items: + for s in cache[max_cache_items:]: + self.cmd.rm(os.path.join(cache_dir, "{}.tar".format(s))) + cache = cache[:max_cache_items] + self.cmd.write_text_file(cache_index, json.dumps(cache)) + + def project_python(self): + project_venv_dir = project_dir("test", "integration", "framework", "venv") + return os.path.join(project_venv_dir, "bin", "python3") + + def _ensure_build_dirs(self): + for d in ["build", "build/repos", "build/generated"]: + self.cmd.mkdir(os.path.join(self.siftool_dir, d)) + + def generate_python_protobuf_stubs(self): + # https://grpc.io/ + # https://grpc.github.io/grpc/python/grpc_asyncio.html + self._ensure_build_dirs() + project_proto_dir = self.project_dir("proto") + third_party_proto_dir = self.project_dir("third_party", "proto") + generated_dir = os.path.join(self.siftool_dir, "build/generated") + repos_dir = os.path.join(self.siftool_dir, "build/repos") + self.cmd.rmf(generated_dir) + self.cmd.mkdir(generated_dir) + cosmos_sdk_repo_dir = os.path.join(repos_dir, "cosmos-sdk") + cosmos_proto_repo_dir = os.path.join(repos_dir, "cosmos-proto") + # self.git_clone("https://github.com/gogo/protobuf", gogo_proto_dir, shallow=True) + self.git_clone("https://github.com/cosmos/cosmos-sdk.git", cosmos_sdk_repo_dir, checkout_commit="dd65ef87322baa2023f195635890a2128a03d318") + self.git_clone("https://github.com/cosmos/cosmos-proto.git", cosmos_proto_repo_dir, checkout_commit="213b76899fac883ac122728f7ab258166137be29") + cosmos_sdk_proto_dir = os.path.join(cosmos_sdk_repo_dir, "proto") + cosmos_proto_proto_dir = os.path.join(cosmos_proto_repo_dir, "proto") + includes = [ + project_proto_dir, + third_party_proto_dir, + cosmos_sdk_proto_dir, + cosmos_proto_proto_dir, + ] + + # We cannot compile all proto files due to conflicting/inconsistent definitions (e.g. coin.proto). + # + # def find_proto_files(path, excludes=()): + # import re + # tmp = [os.path.relpath(i, start=path) for i in + # self.cmd.find_files(path, filter=lambda x: re.match(os.path.basename(x), "^(.*)\.proto$")) + # return sorted(list(set(tmp).difference(set(excludes) if excludes else set()))) + # + # project_proto_files = find_proto_files(project_proto_dir) + # third_party_proto_files = find_proto_files(third_party_proto_dir, excludes=[ + # "cosmos/base/coin.proto", + # ]) + # cosmos_sdk_proto_files = find_proto_files(cosmos_sdk_proto_dir, excludes=[ + # "cosmos/base/query/v1beta1/pagination.proto", + # ]) + # cosmos_proto_proto_files = find_proto_files(cosmos_proto_proto_dir) + # proto_files = project_proto_files + third_party_proto_files + cosmos_sdk_proto_files + cosmos_proto_proto_files + + proto_files = [ + os.path.join(project_proto_dir, "sifnode/ethbridge/v1/tx.proto"), + os.path.join(project_proto_dir, "sifnode/ethbridge/v1/query.proto"), + os.path.join(project_proto_dir, "sifnode/ethbridge/v1/types.proto"), + os.path.join(project_proto_dir, "sifnode/oracle/v1/network_descriptor.proto"), + os.path.join(project_proto_dir, "sifnode/oracle/v1/types.proto"), + os.path.join(third_party_proto_dir, "gogoproto/gogo.proto"), + os.path.join(third_party_proto_dir, "google/api/annotations.proto"), + os.path.join(third_party_proto_dir, "google/api/http.proto"), + os.path.join(third_party_proto_dir, "cosmos/base/query/v1beta1/pagination.proto"), + os.path.join(cosmos_sdk_proto_dir, "cosmos/tx/v1beta1/service.proto"), + os.path.join(cosmos_sdk_proto_dir, "cosmos/base/abci/v1beta1/abci.proto"), + os.path.join(cosmos_sdk_proto_dir, "cosmos/tx/v1beta1/tx.proto"), + os.path.join(cosmos_sdk_proto_dir, "cosmos/tx/signing/v1beta1/signing.proto"), + os.path.join(cosmos_sdk_proto_dir, "cosmos/crypto/multisig/v1beta1/multisig.proto"), + os.path.join(cosmos_sdk_proto_dir, "cosmos/base/v1beta1/coin.proto"), + os.path.join(cosmos_sdk_proto_dir, "tendermint/abci/types.proto"), + os.path.join(cosmos_sdk_proto_dir, "tendermint/crypto/proof.proto"), + os.path.join(cosmos_sdk_proto_dir, "tendermint/crypto/keys.proto"), + os.path.join(cosmos_sdk_proto_dir, "tendermint/types/types.proto"), + os.path.join(cosmos_sdk_proto_dir, "tendermint/types/validator.proto"), + os.path.join(cosmos_sdk_proto_dir, "tendermint/types/params.proto"), + os.path.join(cosmos_sdk_proto_dir, "tendermint/types/block.proto"), + os.path.join(cosmos_sdk_proto_dir, "tendermint/types/evidence.proto"), + os.path.join(cosmos_sdk_proto_dir, "tendermint/version/types.proto"), + os.path.join(cosmos_proto_proto_dir, "cosmos_proto/cosmos.proto"), + ] + + args = [self.project_python(), "-m", "grpc_tools.protoc"] + flatten_list([["-I", i] for i in includes]) + [ + "--python_out", generated_dir, "--grpc_python_out", generated_dir] + proto_files + self.cmd.execst(args, pipe=True) + + def git_clone(self, url, path, checkout_commit=None, shallow=False): + if self.cmd.exists(os.path.join(path, ".git")): + return + log.debug("Cloning repository '{}' into '{}',,,".format(url, path)) + self.cmd.execst(["git", "clone", "-q"] + (["--depth", "1"] if shallow else []) + [url, path]) + if checkout_commit: + self.cmd.execst(["git", "checkout", checkout_commit], cwd=path) diff --git a/test/integration/framework/main.py b/test/integration/framework/src/siftool/run_env.py old mode 100755 new mode 100644 similarity index 86% rename from test/integration/framework/main.py rename to test/integration/framework/src/siftool/run_env.py index e9e19e5da2..cc1d1ffc83 --- a/test/integration/framework/main.py +++ b/test/integration/framework/src/siftool/run_env.py @@ -1,15 +1,15 @@ import json import re -import sys import time -from eth import NULL_ADDRESS -from truffle import Ganache -from command import Command -from hardhat import Hardhat -from sifchain import Sifgen, Sifnoded, Ebrelayer, sifchain_denom_hash -from project import Project, killall, force_kill_processes -from test_utils import get_env_ctx -from common import * +from typing import List, Tuple, TextIO, Any + +from siftool import eth, hardhat, cosmos, command +from siftool.truffle import Ganache +from siftool.localnet import Localnet +from siftool.command import Command +from siftool.sifchain import Sifgen, Sifnoded, Ebrelayer, sifchain_denom_hash +from siftool.project import Project, killall, force_kill_processes +from siftool.common import * class Integrator(Ganache, Command): @@ -170,7 +170,6 @@ def sifchain_init_common(self, sifnode, denom_whitelist_file): # self.cmd.execst(["sifnoded", "add-genesis-account", sifnoded_admin_address, "100000000000000000000rowan", "--home", sifnoded_home]) sifnode.add_genesis_account(sifnodeadmin_addr, tokens) sifnode.set_genesis_oracle_admin(sifnodeadmin_addr) - sifnode.set_genesis_oracle_admin(sifnodeadmin_addr) sifnode.set_gen_denom_whitelist(denom_whitelist_file) return sifnodeadmin_addr @@ -213,6 +212,9 @@ def wait_for_sif_account_up(self, address, tcp_url=None): log.debug(f"Waiting for sif account {address}... ({repr(e)})") time.sleep(1) + def _npm_install(self): + self.project.npm_install(self.project.project_dir("smart-contracts")) + class UIStackEnvironment: def __init__(self, cmd): @@ -356,7 +358,7 @@ def stack_save_snapshot(self): # NOTE: this probably doesn't work anymore since setTokenLockBurnLimit.js was replaced burn_limits = [ - [NULL_ADDRESS, 31 * 10 ** 18], + [eth.NULL_ADDRESS, 31 * 10 ** 18], [bridge_token_address, 10 ** 25], [atk_address, 10 ** 25], [btk_address, 10 ** 25], @@ -540,7 +542,7 @@ def run(self): # # TODO This should be last (after return from setup_sifchain.sh) # burn_limits = [ - # [NULL_ADDRESS, 31*10**18], + # [eth.NULL_ADDRESS, 31*10**18], # [bridge_token_sc_addr, 10**25], # ] # env_file_vars = self.cmd.primitive_parse_env_file(env_file) @@ -603,6 +605,11 @@ def run(self): # This script is also called from tests relayer_db_path = os.path.join(self.test_integration_dir, "sifchainrelayerdb") + + # Prevent starting over dirty/existing relayer_db_path + if self.cmd.exists(relayer_db_path): + assert not self.cmd.ls(relayer_db_path), "relayer_db_path {} not empty".format(relayer_db_path) + ebrelayer_proc = self.run_ebrelayer(netdef_json, validator1_address, validator1_moniker, validator1_mnemonic, ebrelayer_ethereum_private_key, bridge_registry_sc_addr, relayer_db_path, log_file=ebrelayer_log_file) @@ -642,6 +649,9 @@ def run(self): } self.project.write_vagrantenv_sh(self.state_vars, self.data_dir, self.ethereum_websocket_address, self.chainnet) + from siftool import localnet + localnet.run_localnet_hook() + return ganache_proc, sifnoded_proc, ebrelayer_proc def remove_and_add_sifnoded_keys(self, moniker, mnemonic): @@ -761,9 +771,9 @@ def restart_processes(self): class Peggy2Environment(IntegrationTestsEnvironment): - def __init__(self, cmd): + def __init__(self, cmd: Command): super().__init__(cmd) - self.hardhat = Hardhat(cmd) + self.hardhat = hardhat.Hardhat(cmd) # Destuctures a linear array of EVM accounts into: # [operator, owner, pauser, [validator-0, validator-1, ...], [...available...]] @@ -809,36 +819,47 @@ def run(self): hardhat_exec_args = self.hardhat.build_start_args(hostname=hardhat_bind_hostname, port=hardhat_port) hardhat_proc = self.cmd.spawn_asynchronous_process(hardhat_exec_args, log_file=hardhat_log_file) - # This determines how much EVM accounts we want to allocate for validators. + # This determines how many EVM accounts we want to allocate for validators. # Since every validator needs on EVM account, this should be equal to the number of validators (possibly more). hardhat_validator_count = 1 hardhat_network_id = 1 # Not used in smart-contracts/src/devenv/hardhatNode.ts # This value is actually returned from HardhatNodeRunner. It comes from smart-contracts/hardhat.config.ts. # In Typescript, its value is obtained by 'require("hardhat").hre.network.config.chainId'. # See https://hardhat.org/advanced/hardhat-runtime-environment.html - # The value is not used; instead a hardcoded constant 31337 is passed to ebrelayerWitnessBuilder. + # The value is not used; instead a hardcoded constant 9999 is passed to ebrelayerWitnessBuilder. # Ask juniuszhou for details. - hardhat_chain_id = 1 - hardhat_chain_id = 31337 - hardhat_accounts = self.signer_array_to_ethereum_accounts(Hardhat.default_accounts(), hardhat_validator_count) + hardhat_chain_id = 9999 + hardhat_accounts = self.signer_array_to_ethereum_accounts(hardhat.default_accounts(), hardhat_validator_count) + + self.hardhat.compile_smart_contracts() + peggy_sc_addrs = self.hardhat.deploy_smart_contracts() + + # Initialization of smart contracts (technically this is part of deployment) + operator_acct = hardhat_accounts["operator"] + w3_websocket_address = eth.web3_host_port_url("localhost", hardhat_port) + self.init_smart_contracts(w3_websocket_address, operator_acct, peggy_sc_addrs) admin_account_name = "sifnodeadmin" chain_id = "localnet" - ceth_symbol = sifchain_denom_hash(hardhat_chain_id, NULL_ADDRESS) - assert ceth_symbol == "sif5ebfaf95495ceb5a3efbd0b0c63150676ec71e023b1043c40bcaaf91c00e15b2" - # Mint goes to validator - mint_amount = [ - [999999 * 10**21, "rowan"], + ceth_symbol = sifchain_denom_hash(hardhat_chain_id, eth.NULL_ADDRESS) + assert ceth_symbol == "sifBridge99990x0000000000000000000000000000000000000000" + # This goes to validator0, i.e. sifnode_validators[0]["address"] + validator_mint_amounts: cosmos.LegacyBalance = [ + [999999 * 10**27, "rowan"], [137 * 10**16, "ibc/FEEDFACEFEEDFACEFEEDFACEFEEDFACEFEEDFACEFEEDFACEFEEDFACEFEEDFACE"], [999999 * 10**21, ceth_symbol], + [137 * 10**16, "sifBridge00030x1111111111111111111111111111111111111111"], ] validator_power = 100 seed_ip_address = "10.10.1.1" tendermint_port = 26657 denom_whitelist_file = project_dir("test", "integration", "whitelisted-denoms.json") - tokens = [ - [10**20, "rowan"], - [2 * 10**19, "ceth"] + # These go to admin account, relayers and witnesses + admin_account_mint_amounts: cosmos.LegacyBalance = [ + [10**27, "rowan"], + [2 * 10**22, ceth_symbol], + [10 ** 16, "ibc/FEEDFACEFEEDFACEFEEDFACEFEEDFACEFEEDFACEFEEDFACEFEEDFACEFEEDFACE"], + [10 ** 16, "sifBridge00030x1111111111111111111111111111111111111111"], ] registry_json = project_dir("smart-contracts", "src", "devenv", "registry.json") sifnoded_network_dir = "/tmp/sifnodedNetwork" # Gets written to .vscode/launch.json @@ -846,14 +867,14 @@ def run(self): self.cmd.mkdir(sifnoded_network_dir) network_config_file, sifnoded_exec_args, sifnoded_proc, tcp_url, admin_account_address, sifnode_validators, \ sifnode_relayers, sifnode_witnesses, sifnode_validator0_home, chain_dir = \ - self.init_sifchain(sifnoded_network_dir, sifnoded_log_file, chain_id, hardhat_chain_id, mint_amount, - validator_power, seed_ip_address, tendermint_port, denom_whitelist_file, tokens, registry_json, - admin_account_name) + self.init_sifchain(sifnoded_network_dir, sifnoded_log_file, chain_id, hardhat_chain_id, + validator_mint_amounts, validator_power, seed_ip_address, tendermint_port, denom_whitelist_file, + admin_account_mint_amounts, registry_json, admin_account_name, ceth_symbol) - self.hardhat.compile_smart_contracts() - peggy_sc_addrs = self.hardhat.deploy_smart_contracts() + log.debug("ceth symbol is: {}".format(ceth_symbol)) + log.debug("Admin account address: {}".format(admin_account_address)) # tokens + log.debug("Validator 0 address: {}".format(sifnode_validators[0]["address"])) # mint - w3_websocket_address = "ws://localhost:{}/".format(hardhat_port) symbol_translator_file = os.path.join(self.test_integration_dir, "config", "symbol_translator.json") [relayer0_exec_args], [witness0_exec_args] = \ self.start_witnesses_and_relayers(w3_websocket_address, hardhat_chain_id, tcp_url, @@ -878,26 +899,38 @@ def run(self): "chain_id": chain_id, "validators": sifnode_validators, # From yaml file generated by sifgen "relayers": sifnode_relayers, - "smart_contracts": { - "BridgeBank": peggy_sc_addrs.bridge_bank, - "BridgeRegistry": peggy_sc_addrs.bridge_registry, - "CosmosBridge": peggy_sc_addrs.cosmos_bridge, - "RowanContract": peggy_sc_addrs.rowan, - } + "smart_contracts": peggy_sc_addrs } self.write_env_files(self.project.project_dir(), self.project.go_bin_dir, peggy_sc_addrs, hardhat_accounts, admin_account_name, admin_account_address, sifnode_validator0_home, sifnode_validators, sifnode_relayers, sifnode_witnesses, tcp_url, hardhat_bind_hostname, hardhat_port, hardhat_chain_id, chain_dir, sifnoded_exec_args, relayer0_exec_args, witness0_exec_args - ) + ) return hardhat_proc, sifnoded_proc, relayer0_proc, witness0_proc - def init_sifchain(self, sifnoded_network_dir, sifnoded_log_file, chain_id, hardhat_chain_id, mint_amount, - validator_power, seed_ip_address, tendermint_port, denom_whitelist_file, tokens, registry_json, - admin_account_name - ): + def init_smart_contracts(self, w3_url, operator_account, deployed_contract_addresses): + # TODO Looks like this is already done somewhere else... + # operator_addr, operator_private_key = operator_account + # w3_conn = eth.web3_wait_for_connection_up(w3_url) + # eth_tx = eth.EthereumTxWrapper(w3_conn, True) + # eth_tx.set_private_key(operator_addr, operator_private_key) + # + # # CosmosBridge doesn't have BridgeBank in its init and expects a separate setBridgeBank call. CosmosBridge + # # doesn't really work without BridgeBank. + # abi_provider = hardhat.HardhatAbiProvider(self.cmd, deployed_contract_addresses) + # abi, _, deployed_address = abi_provider.get_descriptor("CosmosBridge") + # cosmos_bridge = w3_conn.eth.contract(abi=abi, address=deployed_address) + # bridge_bank_addr = deployed_contract_addresses["BridgeBank"] + # txrcpt = eth_tx.transact_sync(cosmos_bridge.functions.setBridgeBank, operator_addr)(bridge_bank_addr) + return + + def init_sifchain(self, sifnoded_network_dir: str, sifnoded_log_file: TextIO, chain_id: str, hardhat_chain_id: int, + validator_mint_amounts: cosmos.LegacyBalance, validator_power: int, seed_ip_address: str, tendermint_port: int, + denom_whitelist_file: str, admin_account_mint_amounts: cosmos.LegacyBalance, registry_json: str, + admin_account_name: str, ceth_symbol: str + ) -> Tuple[str, command.ExecArgs, subprocess.Popen, str, cosmos.Address, List, List, List, str, str]: validator_count = 1 relayer_count = 1 witness_count = 1 @@ -907,7 +940,7 @@ def init_sifchain(self, sifnoded_network_dir, sifnoded_log_file, chain_id, hardh network_config_file_path = self.cmd.mktempfile() try: self.cmd.sifgen_create_network(chain_id, validator_count, sifnoded_network_dir, network_config_file_path, - seed_ip_address, mint_amount=mint_amount) + seed_ip_address, mint_amount=validator_mint_amounts) network_config_file = self.cmd.read_text_file(network_config_file_path) finally: self.cmd.rm(network_config_file_path) @@ -951,7 +984,7 @@ def init_sifchain(self, sifnoded_network_dir, sifnoded_log_file, chain_id, hardh sifnode = Sifnoded(self.cmd, home=validator0_home) # Create an ADMIN account on sifnode with name admin_account_name (e.g. "sifnodeadmin") - admin_account_address = sifnode.peggy2_add_account(admin_account_name, tokens, is_admin=True) + admin_account_address = sifnode.peggy2_add_account(admin_account_name, admin_account_mint_amounts, is_admin=True) # TODO Check if sifnoded_peggy2_add_relayer_witness_account can be executed offline (without sifnoded running) # TODO Check if sifnoded_peggy2_set_cross_chain_fee can be executed offline (without sifnoded running) @@ -960,7 +993,7 @@ def init_sifchain(self, sifnoded_network_dir, sifnoded_log_file, chain_id, hardh # Note: "--home" is shared with sifnoded's "--home" relayers = [{ "name": name, - "address": sifnode.peggy2_add_relayer_witness_account(name, tokens, hardhat_chain_id, + "address": sifnode.peggy2_add_relayer_witness_account(name, admin_account_mint_amounts, hardhat_chain_id, validator_power, denom_whitelist_file), "home": validator0_home, } for name in [f"relayer-{i}" for i in range(relayer_count)]] @@ -969,20 +1002,12 @@ def init_sifchain(self, sifnoded_network_dir, sifnoded_log_file, chain_id, hardh # Note: "--home" is shared with sifnoded's "--home" witnesses = [{ "name": name, - "address": sifnode.peggy2_add_relayer_witness_account(name, tokens, hardhat_chain_id, + "address": sifnode.peggy2_add_relayer_witness_account(name, admin_account_mint_amounts, hardhat_chain_id, validator_power, denom_whitelist_file), "home": validator0_home, - "db_path": project_dir("smart-contracts", "witnessdb"), } for name in [f"witness-{i}" for i in range(witness_count)]] tcp_url = "tcp://{}:{}".format(ANY_ADDR, tendermint_port) - # sifnoded - # start - # --log_level debug - # --log_format json - # --minimum-gas-prices 0.5rowan - # --rpc.laddr tcp://0.0.0.0:26657 - # --home /tmp/sifnodedNetwork/validators/localnet/xxx-yyy/.sifnoded # @TODO Detect if sifnoded is already running, for now it fails silently and we wait forever in wait_for_sif_account_up sifnoded_exec_args = sifnode.build_start_cmd(tcp_url=tcp_url, minimum_gas_prices=[0.5, "rowan"], log_format_json=True) @@ -992,23 +1017,32 @@ def init_sifchain(self, sifnoded_network_dir, sifnoded_log_file, chain_id, hardh # TODO This command exits with status 0, but looks like there are some errros. # The same happens also in devenv. - res = sifnode.peggy2_token_registry_register_all(registry_json, [0.5, "rowan"], 1.5, admin_account_address, + # TODO Try whitelister account instead of admin + res = sifnode.peggy2_token_registry_register_all(registry_json, [0.5, "rowan"], 1.5, admin_account_name, chain_id) log.debug("Result from token registry: {}".format(repr(res))) - assert len(res) == 2 - assert res[0]["raw_log"] == "failed to execute message; message index: 0: unauthorised signer: invalid address" - assert res[1]["raw_log"] == "failed to execute message; message index: 0: unauthorised signer: invalid address" + assert len(res) == 1 + assert res[0]["code"] == 0 + # We need wait for last tx wrapped up in block, otherwise we could get a wrong sequence, resulting in invalid + # signatures. This delay waits for block production. (See commit 5854d8b6f3970c1254cac0eca0e3817354151853) + sifnode.wait_for_last_transaction_to_be_mined() cross_chain_fee_base = 1 cross_chain_lock_fee = 1 cross_chain_burn_fee = 1 - ethereum_cross_chain_fee_token = sifchain_denom_hash(hardhat_chain_id, NULL_ADDRESS) + ethereum_cross_chain_fee_token = ceth_symbol + assert hardhat_chain_id == int(ethereum_cross_chain_fee_token[9:13]) # Assume they should match gas_prices = [0.5, "rowan"] gas_adjustment = 1.5 sifnode.peggy2_set_cross_chain_fee(admin_account_address, hardhat_chain_id, ethereum_cross_chain_fee_token, cross_chain_fee_base, cross_chain_lock_fee, cross_chain_burn_fee, admin_account_name, chain_id, gas_prices, gas_adjustment) + # We need wait for last tx wrapped up in block, otherwise we could get a wrong sequence, resulting in invalid + # signatures. This delay waits for block production. (See commit 5854d8b6f3970c1254cac0eca0e3817354151853) + sifnode.wait_for_last_transaction_to_be_mined() + sifnode.peggy2_update_consensus_needed(admin_account_address, hardhat_chain_id, chain_id) + return network_config_file, sifnoded_exec_args, sifnoded_proc, tcp_url, admin_account_address, validators, \ relayers, witnesses, validator0_home, chain_dir @@ -1030,36 +1064,13 @@ def start_witnesses_and_relayers(self, web3_websocket_address, hardhat_chain_id, sifnode_witness0_mnemonic = sifnode_witness0["name"] sifnode_witness0_address = sifnode_witness0["address"] sifnode_witness0_home = sifnode_witness0["home"] - sifnode_witness0_db_path = sifnode_witness0["db_path"] - self.cmd.rmdir(sifnode_witness0_db_path) - self.cmd.mkdir(sifnode_witness0_db_path) - bridge_registry_contract_addr = peggy_sc_addrs.bridge_registry - # bridge_bank_contract_addr = peggy_sc_addrs.bridge_bank - # cosmos_bridge_contract_addr = peggy_sc_addrs.cosmos_bridge - # rowan_contract_addr = peggy_sc_addrs.rowan + bridge_registry_contract_addr = peggy_sc_addrs["BridgeRegistry"] self.cmd.wait_for_sif_account_up(sifnode_validator0_address, tcp_url=tcp_url) # Required for both relayer and witness ebrelayer = Ebrelayer(self.cmd) - # Example: - # ebrelayer - # init-relayer - # --network-descriptor 31337 - # --tendermint-node tcp://0.0.0.0:26657 - # --web3-provider ws://localhost:8545/ - # --bridge-registry-contract-address 0xB7f8BC63BbcaD18155201308C8f3540b07f84F5e - # --validator-mnemonic relayer-0 - # --chain-id localnet - # --node tcp://0.0.0.0:26657 - # --from sif1a44w20496lgyv5asx4d4fnekdpy9xg8ymy9k3s - # --symbol-translator-file ../test/integration/config/symbol_translator.json - # --keyring-backend test - # --home /tmp/sifnodedNetwork/validators/localnet/xxx-yyy/.sifnoded - # env: - # "ETHEREUM_ADDRESS": evm_accounts["validators"][0] - # "ETHEREUM_PRIVATE_KEY": evm_account["validators"][1] relayer0_exec_args = ebrelayer.peggy2_build_ebrelayer_cmd( "init-relayer", hardhat_chain_id, @@ -1074,25 +1085,10 @@ def start_witnesses_and_relayers(self, web3_websocket_address, hardhat_chain_id, ethereum_address=evm_validator0_addr, ethereum_private_key=evm_validator0_key, keyring_backend="test", + keyring_dir=sifnode_relayer0_home, home=sifnode_relayer0_home, ) - # Example from devenv: - # ebrelayer - # init-witness - # --network-descriptor 31337 - # --tendermint-node tcp://0.0.0.0:26657 - # --web3-provider ws://localhost:8545/ - # --bridge-registry-contract-address 0xB7f8BC63BbcaD18155201308C8f3540b07f84F5e - # --validator-mnemonic witness-0 - # --chain-id localnet - # --node tcp://0.0.0.0:26657 - # --from sif1l7025ps7lt24effpduwxhk45sd977djvu38lhr - # --symbol-translator-file ../test/integration/config/symbol_translator.json - # --relayerdb-path ./witnessdb - # --log_format json - # --keyring-backend test - # --home /tmp/sifnodedNetwork/validators/localnet/xxx-yyy/.sifnoded witness0_exec_args = ebrelayer.peggy2_build_ebrelayer_cmd( "init-witness", hardhat_chain_id, @@ -1104,10 +1100,10 @@ def start_witnesses_and_relayers(self, web3_websocket_address, hardhat_chain_id, node=tcp_url, sign_with=sifnode_witness0_address, symbol_translator_file=symbol_translator_file, - relayerdb_path=sifnode_witness0_db_path, ethereum_address=evm_validator0_addr, ethereum_private_key=evm_validator0_key, keyring_backend="test", + keyring_dir=sifnode_relayer0_home, log_format="json", home=sifnode_witness0_home, ) @@ -1120,7 +1116,7 @@ def write_env_files(self, project_dir, go_bin_dir, evm_smart_contract_addrs, eth relayer0_exec_args, witness0_exec_args ): eth_chain_id = hardhat_chain_id - w3_url = f"ws://{hardhat_bind_hostname}:{hardhat_port}/" + w3_url = eth.web3_host_port_url(hardhat_bind_hostname, hardhat_port) # @TODO At the moment, values are fed from one rendered template into the next. # We should use values directly from parameters instead. @@ -1137,10 +1133,10 @@ def format_sif_account(sif_account): # "completed": True, # "output": "...", "contractAddresses": { - "cosmosBridge": evm_smart_contract_addrs.cosmos_bridge, - "bridgeBank": evm_smart_contract_addrs.bridge_bank, - "bridgeRegistry": evm_smart_contract_addrs.bridge_registry, - "rowanContract": evm_smart_contract_addrs.rowan, + "cosmosBridge": evm_smart_contract_addrs["CosmosBridge"], + "bridgeBank": evm_smart_contract_addrs["BridgeBank"], + "bridgeRegistry": evm_smart_contract_addrs["BridgeRegistry"], + "rowanContract": evm_smart_contract_addrs["Rowan"], } }, "ethResults": { @@ -1191,12 +1187,12 @@ def format_sif_account(sif_account): "ETH_HOST": hardhat_bind_hostname, "ETH_PORT": str(hardhat_port), "ROWAN_SOURCE": admin_account_address, - "BRIDGE_BANK_ADDRESS": evm_smart_contract_addrs.bridge_bank, - # "BRIDGE_REGISTRY_ADDRESS": evm_smart_contract_addrs.bridge_registry, - "BRIDGE_REGISTERY_ADDRESS": evm_smart_contract_addrs.bridge_registry, # TODO Typo, remove, keeping it for now for compatibility - "COSMOS_BRIDGE_ADDRESS": evm_smart_contract_addrs.cosmos_bridge, - "ROWANTOKEN_ADDRESS": evm_smart_contract_addrs.rowan, - "BRIDGE_TOKEN_ADDRESS": evm_smart_contract_addrs.rowan, + "BRIDGE_BANK_ADDRESS": evm_smart_contract_addrs["BridgeBank"], + # "BRIDGE_REGISTRY_ADDRESS": evm_smart_contract_addrs["BridgeRegistry"], + "BRIDGE_REGISTERY_ADDRESS": evm_smart_contract_addrs["BridgeRegistry"], # TODO Typo, remove, keeping it for now for compatibility + "COSMOS_BRIDGE_ADDRESS": evm_smart_contract_addrs["CosmosBridge"], + "ROWANTOKEN_ADDRESS": evm_smart_contract_addrs["Rowan"], + "BRIDGE_TOKEN_ADDRESS": evm_smart_contract_addrs["Rowan"], "GOBIN": go_bin_dir, "TCP_URL": tcp_url, "VALIDATOR_ADDRESS": sifnode_validators[0]["address"], @@ -1270,14 +1266,14 @@ def format_sif_account(sif_account): "--network-descriptor", str(eth_chain_id), "--tendermint-node", tcp_url, "--web3-provider", w3_url, - "--bridge-registry-contract-address", evm_smart_contract_addrs.bridge_registry, + "--bridge-registry-contract-address", evm_smart_contract_addrs["BridgeRegistry"], "--validator-mnemonic", relayer["name"], "--chain-id", "localnet", "--node", tcp_url, "--keyring-backend", "test", "--from", relayer["address"], "--symbol-translator-file", "${workspaceFolder}/test/integration/config/symbol_translator.json", - "--home", relayer["home"] + "--keyring-dir", relayer["home"], ] } for i, relayer in enumerate(sifnode_relayers)], *[{ "name": f"Debug Witness-{i}", @@ -1295,18 +1291,18 @@ def format_sif_account(sif_account): # "env": {"ETHEREUM_PRIVATE_KEY": eth_accounts["validators"][0][1]}, "args": [ "init-witness", - # TODO This is probably obsolete, need "--network-descriptor" etc. - str(eth_chain_id), - tcp_url, - w3_url, - evm_smart_contract_addrs.bridge_registry, - witness["name"], + "--network-descriptor", str(eth_chain_id), + "--tendermint-node", tcp_url, + "--web3-provider", w3_url, + "--bridge-registry-contract-address", evm_smart_contract_addrs["BridgeRegistry"], + "--validator-mnemonic", witness["name"], "--chain-id", "localnet", "--node", tcp_url, "--keyring-backend", "test", "--from", witness["address"], + # TODO: This shouldnt be needed, it defaults to --home value + "--keyring-dir", witness["home"], "--symbol-translator-file", "${workspaceFolder}/test/integration/config/symbol_translator.json", - "--relayerdb-path", witness["db_path"], "--home", witness["home"] ] } for i, witness in enumerate(sifnode_witnesses)], { @@ -1420,6 +1416,7 @@ def q(s): return s # TODO Qoute/escape return environment_json, dot_env, launch_json, intellij_ebrelayer_config, intellij_witness_config, intellij_sifnoded_config + class IBCEnvironment(IntegrationTestsEnvironment): def __init__(self, cmd): super().__init__(cmd) @@ -1439,122 +1436,3 @@ def run(self): sifgen = Sifgen(self.cmd) # This does not work - "--keyring-backend" is not supported x = sifgen.create_standalone(chainnet0, "chain1", mnemonic, ipaddr0, keyring_backend=None) - - print() - - -def main(argv): - # tmux usage: - # tmux new-session -d -s env1 - # tmux main-pane-height -t env1 10 - # tmux split-window -h -t env1 - # tmux split-window -h -t env1 - # tmux select-layout -t env1 even-vertical - # OR: tmux select-layout main-horizontal - basic_logging_setup() - what = argv[0] if argv else None - cmd = Integrator() - project = cmd.project - if what == "project-init": - project.init() - elif what == "clean": - project.clean(*argv[1:]) - elif what == "rebuild": - cmd.project.rebuild() - elif what == "run-ui-env": - e = UIStackEnvironment(cmd) - e.stack_save_snapshot() - e.stack_push() - elif what == "run-env": - if on_peggy2_branch: - # Equivalent to future/devenv - hardhat, sifnoded, ebrelayer - # I.e. cd smart-contracts; GOBIN=/home/anderson/go/bin npx hardhat run scripts/devenv.ts - env = Peggy2Environment(cmd) - processes = env.run() - else: - env = IntegrationTestsEnvironment(cmd) - project.clean() - # deploy/networks already included in run() - processes = env.run() - # TODO Cleanup: - # - rm -rf test/integration/sifnoderelayerdb - # - rm -rf networks/validators/localnet/$moniker/.sifnoded - # - If you ran the execute_integration_test_*.sh you need to kill ganache-cli for proper cleanup - # as it might have been killed and started outside of our control - input("Press ENTER to exit...") - killall(processes) - elif what == "devenv": - project.npx(["hardhat", "run", "scripts/devenv.ts"], cwd=project.smart_contracts_dir, pipe=False) - elif what == "create_snapshot": - # Snapshots are only supported in IntegrationTestEnvironment - snapshot_name = argv[1] - project.clean() - env = IntegrationTestsEnvironment(cmd) - processes = env.run() - # Give processes some time to settle, for example relayerdb must init and create its "relayerdb" - time.sleep(45) - killall(processes) - # processes1 = e.restart_processes() - env.create_snapshot(snapshot_name) - elif what == "restore_snapshot": - # Snapshots are only supported in IntegrationTestEnvironment - snapshot_name = argv[1] - env = IntegrationTestsEnvironment(cmd) - env.restore_snapshot(snapshot_name) - processes = env.restart_processes() - input("Press ENTER to exit...") - killall(processes) - elif what == "run-ibc-env": - env = IBCEnvironment(cmd) - processes = env.run() - elif what == "run-integration-tests": - # TODO After switching the branch,: cd smart-contracts; rm -rf node_modules; + cmd.install_smart_contract_dependencies() (yarn clean + yarn install) - scripts = [ - "execute_integration_tests_against_test_chain_peg.sh", - "execute_integration_tests_against_test_chain_clp.sh", - "execute_integration_tests_against_any_chain.sh", - "execute_integration_tests_with_snapshots.sh", - ] - for script in scripts: - force_kill_processes(cmd) - e = IntegrationTestsEnvironment(cmd) - processes = e.run() - cmd.execst(script, cwd=project.test_integration_dir) - killall(processes) - force_kill_processes(cmd) # Some processes are restarted during integration tests so we don't own them - log.info("Everything OK") - elif what == "check-env": - ctx = get_env_ctx() - ctx.sanity_check() - elif what == "test-logging": - ls_cmd = mkcmd(["ls", "-al", "."], cwd="/tmp") - res = stdout_lines(cmd.execst(**ls_cmd)) - print(ls_cmd) - elif what == "poc-geth": - import geth - g = geth.Geth(cmd) - with open(cmd.mktempfile(), "w") as geth_log_file: - datadir_for_running = cmd.mktempdir() - datadir_for_keys = cmd.mktempdir() - args = g.geth_cmd__test_integration_geth_branch(datadir=datadir_for_running) - geth_proc = cmd.popen(args, log_file=geth_log_file) - import hardhat - for expected_addr, private_key in hardhat.Hardhat(cmd).default_accounts(): - addr = g.create_account("password", private_key, datadir=datadir_for_keys) - assert addr == expected_addr - input("Press ENTER to exit...") - killall((geth_proc,)) - elif what == "inflate-tokens": - import inflate_tokens - inflate_tokens.run(*argv[1:]) - elif what == "recover-eth": - import test_utils - test_utils.recover_eth_from_test_accounts() - elif what == "run-peggy2-tests": - cmd.execst(["yarn", "test"], cwd=project.smart_contracts_dir) - else: - raise Exception("Missing/unknown command") - - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/test/integration/framework/sifchain.py b/test/integration/framework/src/siftool/sifchain.py similarity index 59% rename from test/integration/framework/sifchain.py rename to test/integration/framework/src/siftool/sifchain.py index 0ab246677c..7d9a0b0dc5 100644 --- a/test/integration/framework/sifchain.py +++ b/test/integration/framework/src/siftool/sifchain.py @@ -1,19 +1,49 @@ -import hashlib +import base64 import json import time -from command import buildcmd -from common import * - - -def sifchain_denom_hash(network_descriptor, token_contract_address): +import grpc +import re +import web3 +from typing import Mapping, Any, Tuple +from siftool import command, cosmos, eth +from siftool.common import * + +def sifchain_denom_hash(network_descriptor: int, token_contract_address: eth.Address) -> str: assert on_peggy2_branch assert token_contract_address.startswith("0x") - s = str(network_descriptor) + token_contract_address.lower() - return "sif" + hashlib.sha256(s.encode("UTF-8")).digest().hex() + assert type(network_descriptor) == int + assert network_descriptor in range(1, 10000) + denom = f"sifBridge{network_descriptor:04d}{token_contract_address.lower()}" + return denom + +def sifchain_denom_hash_to_token_contract_address(token_hash: str) -> Tuple[int, eth.Address]: + m = re.match("^sifBridge(\\d{4})0x([0-9a-fA-F]{40})$", token_hash) + if not m: + raise Exception("Invalid sifchain denom '{}'".format(token_hash)) + network_descriptor = int(m[1]) + token_address = web3.Web3.toChecksumAddress(m[2]) + return network_descriptor, token_address + +# Deprecated +def balance_delta(balances1: cosmos.Balance, balances2: cosmos.Balance) -> cosmos.Balance: + return cosmos.balance_sub(balances2, balances1) + +# Deprecated +def balance_zero(balances: cosmos.Balance) -> bool: + return cosmos.balance_zero(balances) + +def is_cosmos_native_denom(denom: str) -> bool: + """Returns true if denom is a native cosmos token (Rowan, ibc) + that was not imported using Peggy""" + return not str.startswith(denom, "sifBridge") + +def import_generated_protobuf_sources(): + import cosmos.tx.v1beta1.service_pb2 as cosmos_pb + import cosmos.tx.v1beta1.service_pb2_grpc as cosmos_pb_grpc class Sifnoded: - def __init__(self, cmd, home=None): + def __init__(self, cmd, home: str = None): self.cmd = cmd self.binary = "sifnoded" self.home = home @@ -85,6 +115,9 @@ def add_genesis_validators_peggy(self, evm_network_descriptor, valoper, validato def set_genesis_oracle_admin(self, address): self.sifnoded_exec(["set-genesis-oracle-admin", address], sifnoded_home=self.home) + def set_genesis_token_registry_admin(self, address): + self.sifnoded_exec(["set-genesis-token-registry-admin", address], sifnoded_home=self.home) + def set_genesis_whitelister_admin(self, address): self.sifnoded_exec(["set-genesis-whitelister-admin", address], sifnoded_home=self.home) @@ -101,7 +134,7 @@ def peggy2_add_account(self, name, tokens, is_admin=False): self.add_genesis_account(account_address, tokens) if is_admin: self.set_genesis_oracle_admin(account_address) - self.set_genesis_whitelister_admin(account_address) + self.set_genesis_whitelister_admin(account_address) return account_address def peggy2_add_relayer_witness_account(self, name, tokens, evm_network_descriptor, validator_power, denom_whitelist_file): @@ -122,8 +155,9 @@ def tx_clp_create_pool(self, chain_id, from_name, symbol, fees, native_amount, e def peggy2_token_registry_register_all(self, registry_path, gas_prices, gas_adjustment, from_account, chain_id ): - args = ["tx", "tokenregistry", "register-all", registry_path, "--gas-prices", sif_format_amount(*gas_prices), - "--gas-adjustment", str(gas_adjustment), "--from", from_account, "--chain-id", chain_id, "--yes"] + args = ["tx", "tokenregistry", "set-registry", registry_path, "--gas-prices", sif_format_amount(*gas_prices), + "--gas-adjustment", str(gas_adjustment), "--from", from_account, "--chain-id", chain_id, "--output", "json", + "--yes"] res = self.sifnoded_exec(args, keyring_backend=self.keyring_backend, sifnoded_home=self.home) return [json.loads(x) for x in stdout(res).splitlines()] @@ -139,19 +173,27 @@ def peggy2_set_cross_chain_fee(self, admin_account_address, network_id, ethereum res = self.sifnoded_exec(args, keyring_backend=self.keyring_backend, sifnoded_home=self.home) return res + def peggy2_update_consensus_needed(self, admin_account_address, hardhat_chain_id, chain_id): + consensus_needed = "49" + args = ["tx", "ethbridge", "update-consensus-needed", admin_account_address, str(hardhat_chain_id), + consensus_needed, "--from", admin_account_address, "--chain-id", chain_id, "--gas-prices", + "0.5rowan", "--gas-adjustment", "1.5", "-y"] + res = self.sifnoded_exec(args, keyring_backend=self.keyring_backend, sifnoded_home=self.home) + return res + def sifnoded_start(self, tcp_url=None, minimum_gas_prices=None, log_format_json=False, log_file=None): sifnoded_exec_args = self.build_start_cmd(tcp_url=tcp_url, minimum_gas_prices=minimum_gas_prices, log_format_json=log_format_json) return self.cmd.spawn_asynchronous_process(sifnoded_exec_args, log_file=log_file) - def build_start_cmd(self, tcp_url=None, minimum_gas_prices=None, log_format_json=False): - args = [self.binary, "start"] + \ + def build_start_cmd(self, tcp_url: str = None, minimum_gas_prices=None, log_format_json=False): + args = [self.binary, "start", "--trace"] + \ (["--minimum-gas-prices", sif_format_amount(*minimum_gas_prices)] if minimum_gas_prices is not None else []) + \ (["--rpc.laddr", tcp_url] if tcp_url else []) + \ (["--log_level", "debug"] if log_format_json else []) + \ (["--log_format", "json"] if log_format_json else []) + \ (["--home", self.home] if self.home else []) - return buildcmd(args) + return command.buildcmd(args) def sifnoded_exec(self, args, sifnoded_home=None, keyring_backend=None, stdin=None, cwd=None): args = [self.binary] + args + \ @@ -160,10 +202,22 @@ def sifnoded_exec(self, args, sifnoded_home=None, keyring_backend=None, stdin=No res = self.cmd.execst(args, stdin=stdin, cwd=cwd) return res - def get_status(self, host, port): - url = "http://{}:{}/node_info".format(host, port) + def _rpc_get(self, host, port, relative_url): + url = "http://{}:{}/{}".format(host, port, relative_url) return json.loads(http_get(url).decode("UTF-8")) + def get_status(self, host, port): + return self._rpc_get(host, port, "node_info") + + def wait_for_last_transaction_to_be_mined(self, count=1): + # TODO return int(self._rpc_get(host, port, abci_info)["response"]["last_block_height"]) + def latest_block_height(): + args = ["status"] # TODO --node + return int(json.loads(stderr(self.sifnoded_exec(args)))["SyncInfo"]["latest_block_height"]) + initial_block = latest_block_height() + while latest_block_height() < initial_block + count: + time.sleep(1) + def wait_up(self, host, port): while True: from urllib.error import URLError @@ -173,6 +227,119 @@ def wait_up(self, host, port): time.sleep(1) +# Refactoring in progress +class SifnodeClient: + def __init__(self, cmd, node=None, home=None, chain_id=None, grpc_port=None): + self.cmd = cmd + self.binary = "sifnoded" + self.node = node + self.home = home + self.chain_id = chain_id + self.grpc_port = grpc_port + + def query_account(self, sif_addr): + result = json.loads(stdout(self.sifnoded_exec(["query", "account", sif_addr, "--output", "json"]))) + return result + + def send_from_sifchain_to_ethereum(self, from_sif_addr: cosmos.Address, to_eth_addr: str, amount: int, denom: str, + generate_only: bool = False + ) -> Mapping: + """ Sends ETH from Sifchain to Ethereum (burn) """ + assert on_peggy2_branch, "Only for Peggy2.0" + assert self.ctx.eth + eth = self.ctx.eth + + direction = "lock" if is_cosmos_native_denom(denom) else "burn" + cross_chain_ceth_fee = eth.cross_chain_fee_base * eth.cross_chain_burn_fee # TODO + args = ["tx", "ethbridge", direction, from_sif_addr, to_eth_addr, str(amount), denom, str(cross_chain_ceth_fee), + "--network-descriptor", str(eth.ethereum_network_descriptor), # Mandatory + "--from", from_sif_addr, # Mandatory, either name from keyring or address + "--output", "json", + "-y" + ] + \ + (["--generate-only"] if generate_only else []) + \ + self._gas_prices_args() + \ + self._home_args() + \ + self._chain_id_and_node_args() + \ + (self._keyring_backend_args() if not generate_only else []) + res = self.sifnoded_exec(args) + result = json.loads(stdout(res)) + if not generate_only: + assert "failed to execute message" not in result["raw_log"] + return result + + def send_from_sifchain_to_ethereum_grpc(self, from_sif_addr: cosmos.Address, to_eth_addr: str, amount: int, + denom: str + ): + tx = self.send_from_sifchain_to_ethereum(from_sif_addr, to_eth_addr, amount, denom, generate_only=True) + signed_tx = self.sign_transaction(tx, from_sif_addr) + encoded_tx = self.encode_transaction(signed_tx) + result = self.broadcast_tx(encoded_tx) + return result + + def sign_transaction(self, tx: Mapping, from_sif_addr: cosmos.Address, sequence: int = None, + account_number: int = None + ) -> Mapping: + tmp_tx_file = self.cmd.mktempfile() + assert (sequence is not None) == (account_number is not None) # We need either both or none + try: + self.cmd.write_text_file(tmp_tx_file, json.dumps(tx)) + args = ["tx", "sign", tmp_tx_file, "--from", from_sif_addr] + \ + (["--sequence", str(sequence), "--offline", "--account-number", str(account_number)] if sequence else []) + \ + self._home_args() + \ + self._chain_id_and_node_args() + \ + self._keyring_backend_args() + res = self.sifnoded_exec(args) + signed_tx = json.loads(stderr(res)) + return signed_tx + finally: + self.cmd.rm(tmp_tx_file) + + def encode_transaction(self, tx: Mapping[str, Any]) -> bytes: + tmp_file = self.cmd.mktempfile() + try: + self.cmd.write_text_file(tmp_file, json.dumps(tx)) + res = self.sifnoded_exec(["tx", "encode", tmp_file]) + encoded_tx = base64.b64decode(stdout(res)) + return encoded_tx + finally: + self.cmd.rm(tmp_file) + + def open_grpc_channel(self) -> grpc.Channel: + # See https://docs.cosmos.network/v0.44/core/proto-docs.html + # See https://docs.cosmos.network/v0.44/core/grpc_rest.html + # See https://app.swaggerhub.com/apis/Ivan-Verchenko/sifnode-swagger-api/1.1.1 + # See https://raw.githubusercontent.com/Sifchain/sifchain-ui/develop/ui/core/swagger.yaml + return grpc.insecure_channel("127.0.0.1:9090") + + def broadcast_tx(self, encoded_tx: bytes): + import_generated_protobuf_sources() + broadcast_mode = cosmos_pb.BROADCAST_MODE_ASYNC + with self.open_grpc_channel() as channel: + tx_stub = cosmos_pb_grpc.ServiceStub(channel) + req = cosmos_pb.BroadcastTxRequest(tx_bytes=encoded_tx, mode=broadcast_mode) + resp = tx_stub.BroadcastTx(req) + return resp + + def _gas_prices_args(self): + return ["--gas-prices", "0.5rowan", "--gas-adjustment", "1.5"] + + def _chain_id_and_node_args(self): + return \ + (["--node", self.node] if self.node else []) + \ + (["--chain-id", self.chain_id] if self.chain_id else []) + + def _keyring_backend_args(self): + keyring_backend = self.ctx.sifnode.keyring_backend + return ["--keyring-backend", keyring_backend] if keyring_backend else [] + + def _home_args(self): + return ["--home", self.home] if self.home else [] + + def sifnoded_exec(self, *args, **kwargs): + return self.ctx.sifnode.sifnoded_exec(*args, **kwargs) + + class Sifgen: def __init__(self, cmd): self.cmd = cmd @@ -197,14 +364,14 @@ def __init__(self, cmd): def peggy2_build_ebrelayer_cmd(self, init_what, network_descriptor, tendermint_node, web3_provider, bridge_registry_contract_address, validator_mnemonic, chain_id, node=None, keyring_backend=None, - sign_with=None, symbol_translator_file=None, relayerdb_path=None, log_format=None, extra_args=None, + keyring_dir=None, sign_with=None, symbol_translator_file=None, log_format=None, extra_args=None, ethereum_private_key=None, ethereum_address=None, home=None, cwd=None ): env = _env_for_ethereum_address_and_key(ethereum_address, ethereum_private_key) args = [ self.binary, init_what, - "--network-descriptor", str(network_descriptor), # Network descriptor for the chain (31337) + "--network-descriptor", str(network_descriptor), # Network descriptor for the chain (9999) "--tendermint-node", tendermint_node, # URL to tendermint node "--web3-provider", web3_provider, # Ethereum web3 service address (ws://localhost:8545/) "--bridge-registry-contract-address", bridge_registry_contract_address, @@ -215,11 +382,11 @@ def peggy2_build_ebrelayer_cmd(self, init_what, network_descriptor, tendermint_n (["--node", node] if node else []) + \ (["--keyring-backend", keyring_backend] if keyring_backend else []) + \ (["--from", sign_with] if sign_with else []) + \ - (["--relayerdb-path", relayerdb_path] if relayerdb_path else []) + \ (["--home", home] if home else []) + \ + (["--keyring-dir", keyring_dir] if keyring_dir else []) + \ (["--symbol-translator-file", symbol_translator_file] if symbol_translator_file else []) + \ (["--log_format", log_format] if log_format else []) - return buildcmd(args, env=env, cwd=cwd) + return command.buildcmd(args, env=env, cwd=cwd) # Legacy stuff - pre-peggy2 # Called from IntegrationContext diff --git a/test/integration/framework/test_utils.py b/test/integration/framework/src/siftool/test_utils.py similarity index 71% rename from test/integration/framework/test_utils.py rename to test/integration/framework/src/siftool/test_utils.py index 37c246967d..abd1f23bf0 100644 --- a/test/integration/framework/test_utils.py +++ b/test/integration/framework/src/siftool/test_utils.py @@ -2,13 +2,14 @@ import os import random import time +from typing import Iterable, Mapping, Union, List import web3 +from web3.eth import Contract +from hexbytes import HexBytes +from web3.types import TxReceipt -import main -import eth -import sifchain -from common import * - +from siftool import eth, truffle, hardhat, run_env, sifchain, cosmos +from siftool.common import * # These are utilities to interact with running environment (running agains local ganache-cli/hardhat/sifnoded). # This is to replace test_utilities.py, conftest.py, burn_lock_functions.py and integration_test_context.py. @@ -40,6 +41,7 @@ def get_env_ctx(cmd=None, env_file=None, env_vars=None): if eth_user_private_keys: available_test_accounts = [] for address, key in [[e["address"], e["key"]] for e in eth_user_private_keys]: + address, key = eth.validate_address_and_private_key(address, key) available_test_accounts.append(address) ctx.eth.set_private_key(address, key) ctx.available_test_eth_accounts = available_test_accounts @@ -56,18 +58,19 @@ def get_env_ctx(cmd=None, env_file=None, env_vars=None): return ctx def get_env_ctx_peggy2(): - cmd = main.Integrator() + cmd = run_env.Integrator() dot_env_vars = json.loads(cmd.read_text_file(cmd.project.project_dir("smart-contracts/env.json"))) environment_vars = json.loads(cmd.read_text_file(cmd.project.project_dir("smart-contracts/environment.json"))) + deployed_contract_address_overrides = get_overrides_for_smart_contract_addresses(dot_env_vars) tmp = environment_vars["contractResults"]["contractAddresses"] - deployed_contract_addresses = { + deployed_contract_addresses = dict_merge({ "BridgeBank": tmp["bridgeBank"], "CosmosBridge": tmp["cosmosBridge"], "BridgeRegistry": tmp["bridgeRegistry"], "Rowan": tmp["rowanContract"], - } - abi_provider = HardhatAbiProvider(cmd, deployed_contract_addresses) + }, deployed_contract_address_overrides) + abi_provider = hardhat.HardhatAbiProvider(cmd, deployed_contract_addresses) # TODO We're mixing "OPERATOR" vs. "OWNER" # TODO Addressses from dot_env_vars are not in correct EIP55 "checksum" format @@ -76,7 +79,9 @@ def get_env_ctx_peggy2(): owner_address = web3.Web3.toChecksumAddress(dot_env_vars["ETH_ACCOUNT_OWNER_ADDRESS"]) owner_private_key = dot_env_vars.get("ETH_ACCOUNT_OWNER_PRIVATEKEY") if (owner_private_key is not None) and (owner_private_key.startswith("0x")): - owner_private_key = owner_private_key[2:] + owner_private_key = owner_private_key[2:] # TODO Remove + owner_address, owner_private_key = eth.validate_address_and_private_key(owner_address, owner_private_key) + rowan_source = dot_env_vars["ROWAN_SOURCE"] w3_url = eth.web3_host_port_url(dot_env_vars["ETH_HOST"], int(dot_env_vars["ETH_PORT"])) @@ -86,14 +91,16 @@ def get_env_ctx_peggy2(): sifnode_chain_id = "localnet" # TODO Mandatory, but not present either in environment_vars or dot_env_vars assert dot_env_vars["CHAINDIR"] == dot_env_vars["HOME"] sifnoded_home = os.path.join(dot_env_vars["CHAINDIR"], ".sifnoded") - ethereum_network_descriptor = dot_env_vars["ETH_CHAIN_ID"] + ethereum_network_descriptor = int(dot_env_vars["ETH_CHAIN_ID"]) eth_node_is_local = True generic_erc20_contract = "BridgeToken" + ceth_symbol = sifchain.sifchain_denom_hash(ethereum_network_descriptor, eth.NULL_ADDRESS) + assert ceth_symbol == "sifBridge99990x0000000000000000000000000000000000000000" ctx_eth = eth.EthereumTxWrapper(w3_conn, eth_node_is_local) ctx = EnvCtx(cmd, w3_conn, ctx_eth, abi_provider, owner_address, sifnoded_home, sifnode_url, sifnode_chain_id, - rowan_source, generic_erc20_contract) + rowan_source, ceth_symbol, generic_erc20_contract) if owner_private_key: ctx.eth.set_private_key(owner_address, owner_private_key) @@ -108,19 +115,17 @@ def get_env_ctx_peggy2(): assert ctx.eth.fixed_gas_args["gasPrice"] == 1 * eth.GWEI + 7 # Monkeypatching for peggy2 extras - # TODO These are set in main.py:Peggy2Environment.init_sifchain(), specifically "sifnoded tx ethbridge set-cross-chain-fee" + # TODO These are set in run_env.py:Peggy2Environment.init_sifchain(), specifically "sifnoded tx ethbridge set-cross-chain-fee" # Consider passing them via environment - ctx.cross_chain_fee_base = 1 - ctx.cross_chain_lock_fee = 1 - ctx.cross_chain_burn_fee = 1 - ctx.ethereum_network_descriptor = ethereum_network_descriptor - ctx.ceth_symbol = sifchain.sifchain_denom_hash(ctx.ethereum_network_descriptor, eth.NULL_ADDRESS) - assert ctx.ceth_symbol == "sif5ebfaf95495ceb5a3efbd0b0c63150676ec71e023b1043c40bcaaf91c00e15b2" + ctx.eth.cross_chain_fee_base = 1 + ctx.eth.cross_chain_lock_fee = 1 + ctx.eth.cross_chain_burn_fee = 1 + ctx.eth.ethereum_network_descriptor = ethereum_network_descriptor return ctx def get_env_ctx_peggy1(cmd=None, env_file=None, env_vars=None): - cmd = cmd or main.Integrator() + cmd = cmd or run_env.Integrator() if "ENV_FILE" in os.environ: env_file = os.environ["ENV_FILE"] @@ -164,6 +169,7 @@ def get_env_ctx_peggy1(cmd=None, env_file=None, env_vars=None): else: operator_address = env_vars["OPERATOR_ADDRESS"] operator_private_key = env_vars.get("OPERATOR_PRIVATE_KEY") + operator_address, operator_private_key = eth.validate_address_and_private_key(operator_address, operator_private_key) # Already added below # collected_private_keys[operator_address] = operator_private_key @@ -184,17 +190,18 @@ def get_env_ctx_peggy1(cmd=None, env_file=None, env_vars=None): if "SMART_CONTRACT_ARTIFACT_DIR" in env_vars: artifacts_dir = env_vars["SMART_CONTRACT_ARTIFACT_DIR"] elif deployment_name: - artifacts_dir = cmd.project.project_dir("smart-contracts/deployments/{}/build".format(deployment_name)) + artifacts_dir = cmd.project.project_dir("smart-contracts/deployments/{}/build/contracts".format(deployment_name)) if deployment_name == "sifchain-1": # Special case for Betanet because SifchainTestToken is not deployed there. # It's only available on Testnet, Devnet and in local environment. # However, BridgeToken will work on Betanet meaning that name(), symbol() and decimals() return meaningful values. generic_erc20_contract_name = "BridgeToken" else: - artifacts_dir = cmd.project.project_dir("smart-contracts/build") + artifacts_dir = cmd.project.project_dir("smart-contracts/build/contracts") sifnode_url = env_vars.get("SIFNODE") # Defaults to "tcp://localhost:26657" sifnoded_home = None # Implies default ~/.sifnoded + deployed_smart_contract_address_overrides = get_overrides_for_smart_contract_addresses(env_vars) w3_conn = eth.web3_connect(w3_url, websocket_timeout=90) @@ -207,9 +214,9 @@ def get_env_ctx_peggy1(cmd=None, env_file=None, env_vars=None): eth_node_is_local = deployment_name is None ctx_eth = eth.EthereumTxWrapper(w3_conn, eth_node_is_local) - abi_provider = GanacheAbiProvider(cmd, artifacts_dir, ethereum_network_id) + abi_provider = truffle.GanacheAbiProvider(cmd, artifacts_dir, ethereum_network_id, deployed_smart_contract_address_overrides) ctx = EnvCtx(cmd, w3_conn, ctx_eth, abi_provider, operator_address, sifnoded_home, sifnode_url, sifnode_chain_id, - rowan_source, generic_erc20_contract_name) + rowan_source, CETH, generic_erc20_contract_name) if operator_private_key: ctx.eth.set_private_key(operator_address, operator_private_key) @@ -242,83 +249,56 @@ def get_env_ctx_peggy1(cmd=None, env_file=None, env_vars=None): return ctx -def sif_addr_to_evm_arg(sif_address): - return sif_address.encode("UTF-8") +def get_overrides_for_smart_contract_addresses(env_vars): + mappings = { + "BridgeBank": "BRIDGE_BANK_ADDRESS", + "BridgeRegistry": "BRIDGE_REGISTRY_ADDRESS", + "CosmosBridge": "COSMOS_BRIDGE_ADDRESS", # Peggy2 only? + "Rowan": "ROWAN_ADDRESS", # Peggy2 only? + "BridgeToken": "BRIDGE_TOKEN_ADDRESS", # Peggy1 only + } + return dict(((k, web3.Web3.toChecksumAddress(env_vars[v])) for k, v in mappings.items() if v in env_vars)) -class GanacheAbiProvider: - def __init__(self, cmd, artifacts_dir, ethereum_network_id): - self.cmd = cmd - self.artifacts_dir = artifacts_dir - self.ethereum_default_network_id = ethereum_network_id - - def get_descriptor(self, sc_name): - path = self.cmd.project.project_dir(self.artifacts_dir, "contracts/{}.json".format(sc_name)) - tmp = json.loads(self.cmd.read_text_file(path)) - abi = tmp["abi"] - bytecode = tmp["bytecode"] - deployed_address = None - if ("networks" in tmp) and (self.ethereum_default_network_id is not None): - str_network_id = str(self.ethereum_default_network_id) - if str_network_id in tmp["networks"]: - deployed_address = tmp["networks"][str_network_id]["address"] - return abi, bytecode, deployed_address - - -class HardhatAbiProvider: - def __init__(self, cmd, deployed_contract_addresses): - self.cmd = cmd - self.deployed_contract_addresses = deployed_contract_addresses - - def get_descriptor(self, sc_name): - relpath = { - "BridgeBank": ["BridgeBank"], - "BridgeToken": ["BridgeBank"], - "TrollToken": ["Mocks"], - }.get(sc_name, []) + [f"{sc_name}.sol", f"{sc_name}.json"] - path = os.path.join(self.cmd.project.project_dir("smart-contracts/artifacts/contracts"), *relpath) - tmp = json.loads(self.cmd.read_text_file(path)) - abi = tmp["abi"] - bytecode = tmp["bytecode"] - deployed_address = self.deployed_contract_addresses.get(sc_name) - return abi, bytecode, deployed_address +def sif_addr_to_evm_arg(sif_address): + return sif_address.encode("UTF-8") class EnvCtx: - def __init__(self, cmd, w3_conn, ctx_eth, abi_provider, operator, sifnoded_home, sifnode_url, sifnode_chain_id, - rowan_source, generic_erc20_contract + def __init__(self, cmd, w3_conn: web3.Web3, ctx_eth, abi_provider, operator, sifnoded_home, sifnode_url, sifnode_chain_id, + rowan_source, ceth_symbol, generic_erc20_contract ): self.cmd = cmd self.w3_conn = w3_conn - self.eth = ctx_eth - self.abi_provider = abi_provider + self.eth: eth.EthereumTxWrapper = ctx_eth + self.abi_provider: hardhat.HardhatAbiProvider = abi_provider self.operator = operator self.sifnode = sifchain.Sifnoded(self.cmd, home=sifnoded_home) self.sifnode_url = sifnode_url self.sifnode_chain_id = sifnode_chain_id + # Refactoring in progress: moving stuff into separate client that encapsulates things like url, home and chain_id + self.sifnode_client = sifchain.SifnodeClient(self.cmd, node=sifnode_url, home=sifnoded_home, chain_id=sifnode_chain_id, grpc_port=9090) + self.sifnode_client.ctx = self # For cross-chain fees for Peggy2 self.rowan_source = rowan_source + self.ceth_symbol = ceth_symbol self.generic_erc20_contract = generic_erc20_contract self.available_test_eth_accounts = None + def get_current_block_number(self) -> int: + return self.eth.w3_conn.eth.block_number + def advance_block_w3(self, number): for _ in range(number): + # See smart-contracts/node_modules/@openzeppelin/test-helpers/src/time.js:advanceBlockTo() self.w3_conn.provider.make_request("evm_mine", []) - def advance_block_truffle(self, number): - args = ["npx", "truffle", "exec", "scripts/advanceBlock.js", str(number)] - self.cmd.execst(args, cwd=main.project_dir("smart-contracts")) - - def advance_block(self, number): - if on_peggy2_branch: - self.advance_block_w3(number) - else: - self.advance_block_truffle(number) # TODO Probably calls the same, check and remove - - def advance_blocks(self): + def advance_blocks(self, number=50): # TODO Move to eth (it should be per-w3_conn) if self.eth.is_local_node: - self.advance_block(50) - # Otherwise just wait + previous_block = self.eth.w3_conn.eth.block_number + self.advance_block_w3(number) + assert self.eth.w3_conn.eth.block_number - previous_block >= number + # Otherwise do nothing (e.g. wait for balance change takes longer) def get_blocklist_sc(self): abi, _, address = self.abi_provider.get_descriptor("Blocklist") @@ -327,6 +307,13 @@ def get_blocklist_sc(self): def get_bridge_bank_sc(self): abi, _, address = self.abi_provider.get_descriptor("BridgeBank") + assert address, "No address for BridgeBank" + result = self.w3_conn.eth.contract(address=address, abi=abi) + return result + + def get_cosmos_bridge_sc(self) -> Contract: + abi, _, address = self.abi_provider.get_descriptor("CosmosBridge") + assert address, "No address for CosmosBridge" result = self.w3_conn.eth.contract(address=address, abi=abi) return result @@ -334,7 +321,7 @@ def get_generic_erc20_sc(self, address): abi, _, _ = self.abi_provider.get_descriptor(self.generic_erc20_contract) return self.w3_conn.eth.contract(abi=abi, address=address) - def get_erc20_token_balance(self, token_addr, eth_addr): + def get_erc20_token_balance(self, token_addr: eth.Address, eth_addr: eth.Address) -> int: token_sc = self.get_generic_erc20_sc(token_addr) return token_sc.functions.balanceOf(eth_addr).call() @@ -367,12 +354,14 @@ def smart_contract_get_past_events(self, sc, event_name, from_block=None, to_blo finally: self.w3_conn.eth.uninstall_filter(filter.filter_id) - def tx_deploy_new_generic_erc20_token(self, deployer_addr, name, symbol, decimals): + def tx_deploy_new_generic_erc20_token(self, deployer_addr: str, name: str, symbol: str, decimals: int, cosmosDenom: str = None) -> Contract: # return self.tx_deploy("SifchainTestToken", self.operator, [name, symbol, decimals]) if on_peggy2_branch: # Use BridgeToken assert self.generic_erc20_contract == "BridgeToken" - cosmosDenom = "erc20denom" # TODO Dummy variable since we're using BridgeToken instead of SifchainTestToken + if cosmosDenom is None: + cosmosDenom = "erc20denom" # TODO Dummy variable since we're using BridgeToken instead of SifchainTestToken + constructor_args = [name, symbol, decimals, cosmosDenom] else: # Use SifchainTestToken for TestNet and Devnet, and BridgeToken for Betanet @@ -389,6 +378,16 @@ def tx_update_bridge_bank_whitelist(self, token_addr, value=True): bridge_bank = self.get_bridge_bank_sc() return self.eth.transact(bridge_bank.functions.updateEthWhiteList, self.operator)(token_addr, value) + def tx_grant_minter_role(self, token_sc: Contract, minter_addr: str): + self.get_erc20_token_minter_role(token_sc, minter_addr) + minter_role_hash = token_sc.functions.MINTER_ROLE().call() + self.eth.transact(token_sc.functions.grantRole, self.operator)(minter_role_hash, minter_addr) + assert self.get_erc20_token_minter_role(token_sc, minter_addr) is True + + def get_erc20_token_minter_role(self, token_sc: Contract, minter_addr: str) -> bool: + minter_role_hash = token_sc.functions.MINTER_ROLE().call() + return token_sc.functions.hasRole(minter_role_hash, minter_addr).call() + def tx_approve(self, token_sc, from_addr, to_addr, amount): return self.eth.transact(token_sc.functions.approve, from_addr)(to_addr, amount) @@ -408,18 +407,32 @@ def tx_bridge_bank_lock_erc20(self, token_addr, from_eth_acct, to_sif_acct, amou tx_opts = {"value": 0} return self.eth.transact(bridge_bank.functions.lock, from_eth_acct, tx_opts=tx_opts)(recipient, token_addr, amount) + def tx_bridge_bank_burn_erc20(self, token_addr: str, from_eth_acct: str, to_sif_acct: str, amount: int) -> HexBytes: + recipient = sif_addr_to_evm_arg(to_sif_acct) + bridge_bank = self.get_bridge_bank_sc() + # When transfering ERC20, the amount needs to be passed as argument, and the "message.value" should be 0 + tx_opts = {"value": 0} + return self.eth.transact(bridge_bank.functions.burn, from_eth_acct, tx_opts=tx_opts)(recipient, token_addr, amount) + + def tx_bridge_bank_add_existing_bridge_token(self, token_addr: str) -> HexBytes: + bridge_bank = self.get_bridge_bank_sc() + tx_opts = {"value": 0} + return self.eth.transact(bridge_bank.functions.addExistingBridgeToken, self.operator, tx_opts=tx_opts)(token_addr) + def tx_approve_and_lock(self, token_sc, from_eth_acct, to_sif_acct, amount): bridge_bank_sc = self.get_bridge_bank_sc() txhash1 = self.tx_approve(token_sc, self.operator, bridge_bank_sc.address, amount) txhash2 = self.tx_bridge_bank_lock_erc20(token_sc.address, from_eth_acct, to_sif_acct, amount) + log.debug("tx_approve_and_lock: {} '{}' ({}) from {} to {}".format(amount, token_sc.functions.name().call(), + token_sc.functions.symbol().call(), from_eth_acct, to_sif_acct)) return txhash1, txhash2 # # Used from test_integration_framework.py, test_eth_transfers.py - def deploy_new_generic_erc20_token(self, name, symbol, decimals, owner=None, mint_amount=None, mint_recipient=None): + def deploy_new_generic_erc20_token(self, name: str, symbol: str, decimals: int, owner: str = None, mint_amount: int = None, mint_recipient: str = None, cosmosDenom: str = None) -> Contract: owner = self.operator if owner is None else owner - txhash = self.tx_deploy_new_generic_erc20_token(owner, name, symbol, decimals) + txhash = self.tx_deploy_new_generic_erc20_token(owner, name, symbol, decimals, cosmosDenom) txrcpt = self.eth.wait_for_transaction_receipt(txhash) token_addr = txrcpt.contractAddress token_sc = self.get_generic_erc20_sc(token_addr) @@ -452,6 +465,8 @@ def update_bridge_bank_whitelist(self, token_addr, value): # Call of updateEthWhiteList will fail if we try to remove an item from whitelist which is not on the whitelist. return self.eth.wait_for_transaction_receipt(self.tx_update_bridge_bank_whitelist(token_addr, value)) + # This function walks through all historical events LogWhiteListUpdate of a BridgeBanksmart contract and builds the + # current whitelist from live on-chain data. def get_whitelisted_tokens_from_bridge_bank_past_events(self): bridge_bank = self.get_bridge_bank_sc() past_events = self.smart_contract_get_past_events(bridge_bank, "LogWhiteListUpdate") @@ -459,7 +474,7 @@ def get_whitelisted_tokens_from_bridge_bank_past_events(self): for e in past_events: token_addr = e.args["_token"] value = e.args["_value"] - assert self.eth.w3_conn.toChecksumAddress(token_addr) == token_addr + assert web3.Web3.toChecksumAddress(token_addr) == token_addr # Logically the whitelist only consists of entries that have the last value of True. # If the data is clean, then for each token_addr we should first see a True event, possibly # followed by alternating False and True. The last value is the active one. @@ -542,30 +557,7 @@ def send_erc20_from_ethereum_to_sifchain(self, from_eth_addr, dest_sichain_addr, self.approve_erc20_token(token_sc, from_eth_addr, amount) self.bridge_bank_lock_eth(from_eth_addr, dest_sichain_addr, amount) - def send_from_sifchain_to_ethereum(self, from_sif_addr, to_eth_addr, amount, denom): - """ Sends ETH from Sifchain to Ethereum (burn) """ - - # TODO Move to sifchain.py - - assert on_peggy2_branch, "Only for Peggy2.0" - - direction = "burn" - cross_chain_ceth_fee = self.cross_chain_fee_base * self.cross_chain_burn_fee # TODO - args = ["tx", "ethbridge", direction, from_sif_addr, to_eth_addr, str(amount), denom, str(cross_chain_ceth_fee), - "--network-descriptor", str(self.ethereum_network_descriptor), # Mandatory - "--from", from_sif_addr, # Mandatory, either name from keyring or address - "--gas-prices", "0.5rowan", - "--gas-adjustment", "1.5", - "-y" - ] + \ - self._sifnoded_home_arg() + \ - self._sifnoded_chain_id_and_node_arg() - res = self.sifnode.sifnoded_exec(args, keyring_backend=self.sifnode.keyring_backend) - result = json.loads(stdout(res)) - assert "failed to execute message" not in result["raw_log"] - return json.loads(stdout(res)) - - def create_sifchain_addr(self, moniker=None, fund_amounts=None): + def create_sifchain_addr(self, moniker: str = None, fund_amounts: Union[cosmos.Balance, cosmos.LegacyBalance] = None): """ Generates a new sifchain address in test keyring. If moniker is given, uses it, otherwise generates a random one 'test-xxx'. If fund_amounts is given, the sifchain funds are transferred @@ -575,18 +567,24 @@ def create_sifchain_addr(self, moniker=None, fund_amounts=None): acct = self.sifnode.keys_add_1(moniker) sif_address = acct["address"] if fund_amounts: + fund_amounts = cosmos.balance_normalize(fund_amounts) # Convert from old format if neccessary + rowan_source_balances = self.get_sifchain_balance(self.rowan_source) + for denom, required_amount in fund_amounts.items(): + available_amount = rowan_source_balances.get(denom, 0) + assert available_amount >= required_amount, "Rowan source {} would need {}, but only has {}".format( + self.rowan_source, sif_format_amount(required_amount, denom), sif_format_amount(available_amount, denom)) old_balances = self.get_sifchain_balance(sif_address) self.send_from_sifchain_to_sifchain(self.rowan_source, sif_address, fund_amounts) self.wait_for_sif_balance_change(sif_address, old_balances, min_changes=fund_amounts) + new_balances = self.get_sifchain_balance(sif_address) + assert cosmos.balance_zero(cosmos.balance_sub(new_balances, fund_amounts)) return sif_address - # smart-contracts/scripts/test/{sendLockTx.js OR sendBurnTx.js} - # sendBurnTx is called when sifchain_symbol == "rowan", sendLockTx otherwise - def send_from_ethereum_to_sifchain(self): - assert False,"Not implemented yet" # TODO - - def send_from_sifchain_to_sifchain(self, from_sif_addr, to_sif_addr, amounts): - amounts_string = ",".join([sif_format_amount(*a) for a in amounts]) + def send_from_sifchain_to_sifchain(self, from_sif_addr: cosmos.Address, to_sif_addr: cosmos.Address, + amounts: cosmos.Balance + ): + amounts = cosmos.balance_normalize(amounts) + amounts_string = cosmos.balance_format(amounts) args = ["tx", "bank", "send", from_sif_addr, to_sif_addr, amounts_string] + \ self._sifnoded_chain_id_and_node_arg() + \ self._sifnoded_fees_arg() + \ @@ -594,61 +592,59 @@ def send_from_sifchain_to_sifchain(self, from_sif_addr, to_sif_addr, amounts): res = self.sifnode.sifnoded_exec(args, sifnoded_home=self.sifnode.home, keyring_backend=self.sifnode.keyring_backend) retval = json.loads(stdout(res)) raw_log = retval["raw_log"] - if "insufficient funds" in raw_log: - raise Exception(raw_log) + for bad_thing in ["insufficient funds", "signature verification failed"]: + if bad_thing in raw_log: + raise Exception(raw_log) return retval - # TODO - # def generate_test_account(self, target_ceth_balance=10**18, target_rowan_balance=10**18): - # sifchain_addr = self.create_sifchain_addr() - # self.send_eth_from_ethereum_to_sifchain(self.operator, sifchain_addr, target_ceth_balance) - # self.send_from_sifchain_to_sifchain(self.rowan_source, sifchain_addr, target_rowan_balance) - # return sifchain_addr - - def get_sifchain_balance(self, sif_addr): + def get_sifchain_balance(self, sif_addr: cosmos.Address) -> cosmos.Balance: args = ["query", "bank", "balances", sif_addr, "--limit", str(100000000), "--output", "json"] + \ self._sifnoded_chain_id_and_node_arg() res = self.sifnode.sifnoded_exec(args, sifnoded_home=self.sifnode.home) res = json.loads(stdout(res))["balances"] - return dict(((x["denom"], int(x["amount"])) for x in res)) - - def sif_balances_equal(self, dict1, dict2): - d2k = set(dict2.keys()) - for k in dict1.keys(): - if (k not in dict2) or (dict1[k] != dict2[k]): - return False - d2k.remove(k) - return len(d2k) == 0 - - def sif_balance_delta(self, balances1, balances2): - all_denoms = set(balances1.keys()) - all_denoms.update(balances2.keys()) - result = {} - for denom in all_denoms: - change = balances2.get(denom, 0) - balances1.get(denom, 0) - if change != 0: - result[denom] = change - return result - - def wait_for_sif_balance_change(self, sif_addr, old_balances, min_changes=None, polling_time=1, timeout=90): + return {denom: amount for denom, amount in ((x["denom"], int(x["amount"])) for x in res) if amount != 0} + + # Unless timed out, this function will exit: + # - if min_changes are given: when changes are greater. + # - if expected_balance is given: when balances are equal to that. + # - if neither min_changes nor expected_balance are given: when anything changes. + # You cannot use min_changes and expected_balance at the same time. + def wait_for_sif_balance_change(self, sif_addr: cosmos.Address, old_balance: cosmos.Balance, + min_changes: cosmos.CompatBalance = None, expected_balance: cosmos.CompatBalance = None, polling_time: int = 1, + timeout: int = 90, change_timeout: int = None + ) -> cosmos.Balance: + assert (min_changes is None) or (expected_balance is None), "Cannot use both min_changes and expected_balance" + min_changes = None if min_changes is None else cosmos.balance_normalize(min_changes) + expected_balance = None if expected_balance is None else cosmos.balance_normalize(expected_balance) start_time = time.time() - result = None - while result is None: - new_balances = self.get_sifchain_balance(sif_addr) - if min_changes is not None: - have_all = True - for amount, denom in min_changes: - change = new_balances.get(denom, 0) - old_balances.get(denom, 0) - have_all = have_all and change >= amount - if have_all: - return new_balances + last_change_time = None + last_changed_balance = None + while True: + new_balance = self.get_sifchain_balance(sif_addr) + delta = cosmos.balance_sub(new_balance, old_balance) + if expected_balance is not None: + should_return = cosmos.balance_equal(expected_balance, new_balance) + elif min_changes is not None: + should_return = cosmos.balance_exceeds(delta, min_changes) else: - if not self.sif_balances_equal(old_balances, new_balances): - return new_balances - time.sleep(polling_time) + should_return = not cosmos.balance_zero(delta) + if should_return: + return new_balance now = time.time() - if now - start_time > timeout: + if (timeout is not None) and (now - start_time > timeout): raise Exception("Timeout waiting for sif balance to change") + if last_change_time is None: + last_changed_balance = new_balance + last_change_time = now + else: + delta = cosmos.balance_sub(new_balance, last_changed_balance) + if not cosmos.balance_zero(delta): + last_changed_balance = new_balance + last_change_time = now + log.debug("New state detected: {}".format(delta)) + if (change_timeout is not None) and (now - last_change_time > change_timeout): + raise Exception("Timeout waiting for sif balance to change") + time.sleep(polling_time) def eth_symbol_to_sif_symbol(self, eth_token_symbol): # TODO sifchain.use sifchain_denom_hash() if on_peggy2_branch @@ -740,7 +736,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): # self.scavenge_ether() pass - def wait_for_eth_balance_change(self, eth_addr, old_balance, timeout=90, polling_time=1, token_addr=None): + def wait_for_eth_balance_change(self, eth_addr, old_balance: int, timeout=90, polling_time=1, token_addr=None): start_time = time.time() while True: new_balance = self.get_erc20_token_balance(token_addr, eth_addr) if token_addr \ @@ -752,6 +748,22 @@ def wait_for_eth_balance_change(self, eth_addr, old_balance, timeout=90, polling if now - start_time > timeout: raise Exception("Timeout waiting for Ethereum balance to change") + def wait_for_new_bridge_token_created(self, cosmos_denom: str, timeout: int = 90, polling_time: int = 1) -> str: + start_time = time.time() + while True: + cosmos_bridge_sc = self.get_cosmos_bridge_sc() + events = self.smart_contract_get_past_events(cosmos_bridge_sc, "LogNewBridgeTokenCreated") + + if len(events) > 0: + for e in events: + if e.args["cosmosDenom"] == cosmos_denom: + return e.args["bridgeTokenAddress"] + + time.sleep(polling_time) + now = time.time() + if now - start_time > timeout: + raise Exception("Timeout waiting for Ethereum balance to change") + def create_and_fund_eth_account(self, fund_from=None, fund_amount=None): if self.available_test_eth_accounts is not None: address = self.available_test_eth_accounts.pop(0) @@ -763,7 +775,8 @@ def create_and_fund_eth_account(self, fund_from=None, fund_amount=None): if fund_amount is not None: fund_from = fund_from or self.operator funder_balance_before = self.eth.get_eth_balance(fund_from) - assert funder_balance_before >= fund_amount + assert funder_balance_before >= fund_amount, "Cannot fund created account with ETH: need {}, have {}" \ + .format(fund_amount, funder_balance_before) target_balance_before = self.eth.get_eth_balance(address) difference = fund_amount - target_balance_before if difference > 0: @@ -777,14 +790,48 @@ def bridge_bank_lock_eth(self, from_eth_acct, to_sif_acct, amount): txhash = self.tx_bridge_bank_lock_eth(from_eth_acct, to_sif_acct, amount) return self.eth.wait_for_transaction_receipt(txhash) - def bridge_bank_lock_erc20(self, token_addr, from_eth_acct, to_sif_acct, amount): - txhash = self.tx_bridge_bank_lock_erc20(token_addr, from_eth_acct, to_sif_acct, amount) + def bridge_bank_lock_erc20(self, token_sc, from_eth_acct, to_sif_acct, amount): + txhash = self.tx_bridge_bank_lock_erc20(token_sc.address, from_eth_acct, to_sif_acct, amount) return self.eth.wait_for_transaction_receipt(txhash) + def bridge_bank_burn_erc20(self, token_sc: Contract, from_eth_acct: str, to_sif_acct: str, amount: int) -> TxReceipt: + txhash = self.tx_bridge_bank_burn_erc20(token_sc.address, from_eth_acct, to_sif_acct, amount) + return self.eth.wait_for_transaction_receipt(txhash) + + def bridge_bank_add_existing_bridge_token(self, token_addr: str): + txhash = self.tx_bridge_bank_add_existing_bridge_token(token_addr) + self.eth.wait_for_transaction_receipt(txhash) + final_value = self.get_cosmos_token_in_white_list(token_addr) + assert final_value is True + + def get_cosmos_token_in_white_list(self, token_addr: str) -> bool: + bridge_bank_sc = self.get_bridge_bank_sc() + return bridge_bank_sc.functions.getCosmosTokenInWhiteList(token_addr).call() + + def get_destination_contract_address(self, cosmos_denom: str) -> Contract: + cosmos_bridge_sc = self.get_cosmos_bridge_sc() + return cosmos_bridge_sc.functions.cosmosDenomToDestinationAddress(cosmos_denom).call() + + # TODO At the moment this is only for Ethereum-native assets (ETH and ERC20 tokens) which always use "lock". + # For Sifchain-native assets (rowan) we need to use "burn". + # Compare: smart-contracts/scripts/test/{sendLockTx.js OR sendBurnTx.js} + # sendBurnTx is called when sifchain_symbol == "rowan", sendLockTx otherwise + def send_from_ethereum_to_sifchain(self, from_eth_acct: str, to_sif_acct: str, amount: int, token_sc: Contract = None, isLock: bool = True) -> TxReceipt: + if token_sc is None: + # ETH transfer + self.bridge_bank_lock_eth(from_eth_acct, to_sif_acct, amount) + else: + # ERC20 token transfer + self.approve_erc20_token(token_sc, from_eth_acct, amount) + if isLock: + self.bridge_bank_lock_erc20(token_sc, from_eth_acct, to_sif_acct, amount) + else: + self.bridge_bank_burn_erc20(token_sc, from_eth_acct, to_sif_acct, amount) + # Peggy1-specific def set_ofac_blocklist_to(self, addrs): blocklist_sc = self.get_blocklist_sc() - addrs = [self.eth.w3_conn.toChecksumAddress(addr) for addr in addrs] + addrs = [web3.Web3.toChecksumAddress(addr) for addr in addrs] existing_entries = blocklist_sc.functions.getFullList().call() to_add = [addr for addr in addrs if addr not in existing_entries] to_remove = [addr for addr in existing_entries if addr not in addrs] @@ -806,7 +853,8 @@ def sanity_check(self): assert (self.sifnode_chain_id != "sifchain-testnet-1") or (bridge_bank_sc.address == "0x6CfD69783E3fFb44CBaaFF7F509a4fcF0d8e2835") assert (self.sifnode_chain_id != "sifchain-devnet-1") or (bridge_bank_sc.address == "0x96DC6f02C66Bbf2dfbA934b8DafE7B2c08715A73") assert (self.sifnode_chain_id != "localnet") or (bridge_bank_sc.address == "0x30753E4A8aad7F8597332E813735Def5dD395028") - assert bridge_bank_sc.functions.owner().call() == self.operator + assert bridge_bank_sc.functions.owner().call() == self.operator, \ + "BridgeBank owner is {}, but OPERATOR is {}".format(bridge_bank_sc.functions.owner().call(), self.operator) operator_balance = self.eth.get_eth_balance(self.operator) / eth.ETH assert operator_balance >= 1, "Insufficient operator balance, should be at least 1 ETH" @@ -824,9 +872,9 @@ def sanity_check(self): class ERC20TokenData: def __init__(self, symbol, name, decimals): - self.symbol = symbol - self.name = name - self.decimals = decimals + self.symbol: string = symbol + self.name: string = name + self.decimals: int = decimals def recover_eth_from_test_accounts(): @@ -844,3 +892,12 @@ def recover_eth_from_test_accounts(): ctx.eth.send_eth(addr, ctx.operator, to_recover) total_recovered += to_recover log.info("Total recovered: {} ETH".format(total_recovered/eth.ETH)) + + +def sifnoded_parse_output_lines(stdout): + pat = re.compile("^(.*?): (.*)$") + result = {} + for line in stdout.splitlines(): + m = pat.match(line) + result[m[1]] = m[2] + return result diff --git a/test/integration/framework/src/siftool/truffle.py b/test/integration/framework/src/siftool/truffle.py new file mode 100644 index 0000000000..7c197bbd96 --- /dev/null +++ b/test/integration/framework/src/siftool/truffle.py @@ -0,0 +1,43 @@ +import json + + +class Ganache: + @staticmethod + def start_ganache_cli(env, mnemonic=None, db=None, port=None, host=None, network_id=None, gas_price=None, + gas_limit=None, default_balance_ether=None, block_time=None, account_keys_path=None, log_file=None + ): + args = ["ganache-cli"] + \ + (["--mnemonic", " ".join(mnemonic)] if mnemonic else []) + \ + (["--db", db] if db else []) + \ + (["--port", str(port)] if port is not None else []) + \ + (["--host", host] if host else []) + \ + (["--networkId", str(network_id)] if network_id is not None else []) + \ + (["--gasPrice", str(gas_price)] if gas_price is not None else []) + \ + (["--gasLimit", str(gas_limit)] if gas_limit is not None else []) + \ + (["--defaultBalanceEther", str(default_balance_ether)] if default_balance_ether is not None else []) + \ + (["--blockTime", str(block_time)] if block_time is not None else []) + \ + (["--account_keys_path", account_keys_path] if account_keys_path is not None else []) + return env.popen(args, log_file=log_file) + + +class GanacheAbiProvider: + def __init__(self, cmd, artifacts_dir, ethereum_network_id, deployed_smart_contract_address_overrides): + self.cmd = cmd + self.artifacts_dir = artifacts_dir + self.ethereum_default_network_id = ethereum_network_id + self.deployed_smart_contract_address_overrides = deployed_smart_contract_address_overrides + + def get_descriptor(self, sc_name): + path = self.cmd.project.project_dir(self.artifacts_dir, "{}.json".format(sc_name)) + tmp = json.loads(self.cmd.read_text_file(path)) + abi = tmp["abi"] + bytecode = tmp["bytecode"] + deployed_address = None + if (self.deployed_smart_contract_address_overrides is not None) and (sc_name in self.deployed_smart_contract_address_overrides): + deployed_address = self.deployed_smart_contract_address_overrides[sc_name] + else: + if ("networks" in tmp) and (self.ethereum_default_network_id is not None): + str_network_id = str(self.ethereum_default_network_id) + if str_network_id in tmp["networks"]: + deployed_address = tmp["networks"][str_network_id]["address"] + return abi, bytecode, deployed_address diff --git a/test/integration/framework/test_geth.py b/test/integration/framework/test/test_geth.py similarity index 91% rename from test/integration/framework/test_geth.py rename to test/integration/framework/test/test_geth.py index ead1d2a442..d252e143e7 100644 --- a/test/integration/framework/test_geth.py +++ b/test/integration/framework/test/test_geth.py @@ -1,6 +1,6 @@ -import main as mod_main -import geth as mod_geth -from eth import ETH +import siftool.main as mod_main +import siftool.geth as mod_geth +from siftool.eth import ETH def geth_proof_of_concept(): diff --git a/test/integration/framework/truffle.py b/test/integration/framework/truffle.py deleted file mode 100644 index 52c08016fd..0000000000 --- a/test/integration/framework/truffle.py +++ /dev/null @@ -1,17 +0,0 @@ -class Ganache: - @staticmethod - def start_ganache_cli(env, mnemonic=None, db=None, port=None, host=None, network_id=None, gas_price=None, - gas_limit=None, default_balance_ether=None, block_time=None, account_keys_path=None, log_file=None - ): - args = ["ganache-cli"] + \ - (["--mnemonic", " ".join(mnemonic)] if mnemonic else []) + \ - (["--db", db] if db else []) + \ - (["--port", str(port)] if port is not None else []) + \ - (["--host", host] if host else []) + \ - (["--networkId", str(network_id)] if network_id is not None else []) + \ - (["--gasPrice", str(gas_price)] if gas_price is not None else []) + \ - (["--gasLimit", str(gas_limit)] if gas_limit is not None else []) + \ - (["--defaultBalanceEther", str(default_balance_ether)] if default_balance_ether is not None else []) + \ - (["--blockTime", str(block_time)] if block_time is not None else []) + \ - (["--account_keys_path", account_keys_path] if account_keys_path is not None else []) - return env.popen(args, log_file=log_file) diff --git a/test/integration/setup-linux-environment-user.sh b/test/integration/setup-linux-environment-user.sh index 1ae9222d3c..c6c6f2d22c 100644 --- a/test/integration/setup-linux-environment-user.sh +++ b/test/integration/setup-linux-environment-user.sh @@ -27,4 +27,5 @@ echo '. ~/.bash_profile' >> ~/.bashrc . ~/.bash_profile curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.33.0 -python3 -m pip install -U pytest web3 +python3 -m pip install -U pytest web3 grpcio-tools + diff --git a/test/integration/src/py/conftest.py b/test/integration/src/py/conftest.py index 04442d63ff..c1817e6155 100644 --- a/test/integration/src/py/conftest.py +++ b/test/integration/src/py/conftest.py @@ -4,7 +4,7 @@ import threading import pytest -import integration_test_context +import siftool_path import test_utilities from burn_lock_functions import decrease_log_level, force_log_level @@ -381,8 +381,8 @@ def ctx(request): snapshot_name = request.node.get_closest_marker("snapshot_name") if snapshot_name is not None: snapshot_name = snapshot_name.args[0] - from integration_framework import test_utils - logging.error("Context setup: snapshot_name={}".format(repr(snapshot_name))) + logging.debug("Context setup: snapshot_name={}".format(repr(snapshot_name))) + from siftool import test_utils with test_utils.get_test_env_ctx() as ctx: yield ctx logging.debug("Test context cleanup") diff --git a/test/integration/src/py/integration_framework.py b/test/integration/src/py/integration_framework.py deleted file mode 100644 index 357a6dd1d5..0000000000 --- a/test/integration/src/py/integration_framework.py +++ /dev/null @@ -1,24 +0,0 @@ -import os -import sys - -# Temporary workaround to include integration framework -project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), *([os.path.pardir] * 4))) -base_dir = os.path.join(project_root, "test", "integration", "framework") -enabled = False -for p in sys.path: - enabled = enabled or os.path.realpath(p) == os.path.realpath(base_dir) -if not enabled: - sys.path = sys.path + [base_dir] - -import command -import cosmos -import eth -import main -import common -import project -import geth -import hardhat -import truffle -import test_utils -import inflate_tokens -import sifchain diff --git a/test/integration/src/py/integration_test_context.py b/test/integration/src/py/integration_test_context.py index 220693a6db..27238c04f8 100644 --- a/test/integration/src/py/integration_test_context.py +++ b/test/integration/src/py/integration_test_context.py @@ -3,7 +3,8 @@ import logging import test_utilities -from integration_framework import main +import siftool_path +from siftool import main # TODO This class is obsolete, transitioning to test_utils.Peggy1EnvCtx diff --git a/test/integration/src/py/siftool_path.py b/test/integration/src/py/siftool_path.py new file mode 100644 index 0000000000..2d0c2039f6 --- /dev/null +++ b/test/integration/src/py/siftool_path.py @@ -0,0 +1,16 @@ +import os +import sys + +# Temporary workaround to include siftool +project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), *([os.path.pardir] * 4))) +base_dir = os.path.join(project_root, "test", "integration", "framework") +src_dir = os.path.join(base_dir, "src") +build_generated_dir = os.path.join(base_dir, "build", "generated") +paths = [src_dir, build_generated_dir] +enabled = False +paths_to_add = [] +for p in paths: + enabled = any([os.path.realpath(p) == os.path.realpath(s) for s in sys.path]) + if not enabled: + paths_to_add.append(p) +sys.path.extend(paths_to_add) diff --git a/test/integration/src/py/test_inflate_tokens.py b/test/integration/src/py/test_inflate_tokens.py index 576cbbdf46..2de251d0b6 100644 --- a/test/integration/src/py/test_inflate_tokens.py +++ b/test/integration/src/py/test_inflate_tokens.py @@ -1,8 +1,9 @@ import pytest -from integration_framework import main, common, eth, test_utils, inflate_tokens -from inflate_tokens import InflateTokens -from common import * +import siftool_path +from siftool import eth, sifchain +from siftool.inflate_tokens import InflateTokens +from siftool.common import * # Sifchain wallets to which we want to distribute @@ -74,21 +75,38 @@ @pytest.mark.skipif("on_peggy2_branch") def test_inflate_tokens_short(ctx): - amount = 12 * 10**10 + _test_inflate_tokens_parametrized(ctx, 3) + + +# This test takes >1h, times out in GitHub CI +@pytest.mark.skipif("on_peggy2_branch") +@pytest.mark.skipif("in_github_ci") +def test_inflate_tokens_long(ctx): + _test_inflate_tokens_parametrized(ctx, 300) + + +def _test_inflate_tokens_parametrized(ctx, number_of_tokens): + amount_in_tokens = 123 + amount_gwei = 456 wallets = test_wallets[:2] # TODO Read tokens from file requested_tokens = [{ - "symbol": ctx.eth_symbol_to_sif_symbol(t.symbol), + "symbol": t.symbol, "name": t.name, "decimals": t.decimals, - # Those are ignored - # "imageUrl": None, - # "network": None, - } for t in [ctx.generate_random_erc20_token_data() for _ in range(3)]] + } for t in [ctx.generate_random_erc20_token_data() for _ in range(number_of_tokens)]] script = InflateTokens(ctx) - script.transfer(requested_tokens, amount, wallets) + + balances_before = [ctx.get_sifchain_balance(w) for w in wallets] + script.transfer(requested_tokens, amount_in_tokens, wallets, amount_gwei) + balances_delta = [sifchain.balance_delta(balances_before[i], ctx.get_sifchain_balance(w)) for i, w in enumerate(wallets)] + + for balances_delta in balances_delta: + for t in requested_tokens: + assert balances_delta[ctx.eth_symbol_to_sif_symbol(t["symbol"])] == amount_in_tokens * 10**t["decimals"] + assert balances_delta.get(ctx.ceth_symbol, 0) == amount_gwei * eth.GWEI @pytest.mark.skipif("on_peggy2_branch") diff --git a/test/integration/src/py/test_ofac_blocklist.py b/test/integration/src/py/test_ofac_blocklist.py index af6a81c3d5..0253310b49 100644 --- a/test/integration/src/py/test_ofac_blocklist.py +++ b/test/integration/src/py/test_ofac_blocklist.py @@ -1,7 +1,8 @@ import pytest -from integration_framework import main, common, eth, test_utils, inflate_tokens -from common import * +import siftool_path +from siftool import eth, test_utils +from siftool.common import * max_gas_required = 200000 @@ -23,7 +24,7 @@ def bridge_bank_lock_eth(ctx, from_eth_acct, to_sif_acct, amount): def bridge_bank_lock_erc20(ctx, bridge_token, from_eth_acct, to_sif_acct, amount): assert ctx.eth.get_eth_balance(from_eth_acct) > max_gas_required * max_gas_price, "Not enough gas for test" assert ctx.get_erc20_token_balance(bridge_token.address, from_eth_acct) >= amount, "Not enough tokens for test" - return ctx.bridge_bank_lock_erc20(bridge_token.address, from_eth_acct, to_sif_acct, amount) + return ctx.bridge_bank_lock_erc20(bridge_token, from_eth_acct, to_sif_acct, amount) def is_blocklisted_exception(ctx, exception): return ctx.eth.is_contract_logic_error(exception, "Address is blocklisted") diff --git a/test/integration/src/py/test_integration_framework.py b/test/integration/src/py/test_siftool_framework.py similarity index 98% rename from test/integration/src/py/test_integration_framework.py rename to test/integration/src/py/test_siftool_framework.py index e702c242c3..df7e86df3d 100644 --- a/test/integration/src/py/test_integration_framework.py +++ b/test/integration/src/py/test_siftool_framework.py @@ -1,10 +1,9 @@ import logging import web3 -from integration_framework import main, common, eth, test_utils, inflate_tokens -import eth -import test_utils -from common import * +import siftool_path +from siftool import eth +from siftool.common import * # Note: these tests burn a lot of ether very inefficiently. If you care about that make sure to recover diff --git a/test/localnet/config/chains.json b/test/localnet/config/chains.json index 3c5db7fede..64d9919344 100644 --- a/test/localnet/config/chains.json +++ b/test/localnet/config/chains.json @@ -86,7 +86,8 @@ "node": "http://rpc.sentinel.co:26657", "chainId": "sentinelhub-2", "binary": "sentinelhub", - "binaryUrl": "https://github.com/sentinel-official/sentinel/archive/refs/tags/v0.1.4.zip", + "sourceUrl": "https://github.com/sentinel-official/sentinel/archive/refs/tags/v0.1.4.zip", + "sourceRelativePath": "sentinel-0.1.4", "fees": 20000, "denom": "udvpn", "prefix": "sent", diff --git a/test/localnet/utils/tests/__snapshots__/getChains.test.mjs.snap b/test/localnet/utils/tests/__snapshots__/getChains.test.mjs.snap index 78de67a25f..4afd282d7a 100644 --- a/test/localnet/utils/tests/__snapshots__/getChains.test.mjs.snap +++ b/test/localnet/utils/tests/__snapshots__/getChains.test.mjs.snap @@ -124,7 +124,6 @@ Object { }, "sentinel": Object { "binary": "sentinelhub", - "binaryUrl": "https://github.com/sentinel-official/sentinel/archive/refs/tags/v0.1.4.zip", "chainId": "sentinelhub-2", "denom": "udvpn", "devnet-1": Object { @@ -139,6 +138,8 @@ Object { "pprofPort": 13004, "prefix": "sent", "rpcPort": 11004, + "sourceRelativePath": "sentinel-0.1.4", + "sourceUrl": "https://github.com/sentinel-official/sentinel/archive/refs/tags/v0.1.4.zip", "testnet-1": Object { "channelId": 39, "counterpartyChannelId": 2, diff --git a/x/clp/keeper/calculations_test.go b/x/clp/keeper/calculations_test.go index 3591248f37..0a43fcd4bc 100644 --- a/x/clp/keeper/calculations_test.go +++ b/x/clp/keeper/calculations_test.go @@ -1073,8 +1073,8 @@ func TestKeeper_CalculatePoolUnits(t *testing.T) { poolUnits: sdk.ZeroUint(), lpunits: sdk.ZeroUint(), }, - /*{ - name: "successful", + { + name: "fail asymmetric", oldPoolUnits: sdk.ZeroUint(), nativeAssetBalance: sdk.NewUint(10000), externalAssetBalance: sdk.NewUint(100), @@ -1084,7 +1084,8 @@ func TestKeeper_CalculatePoolUnits(t *testing.T) { adjustExternalToken: false, poolUnits: sdk.ZeroUint(), lpunits: sdk.ZeroUint(), - },*/ + errString: errors.New("Cannot add liquidity asymmetrically"), + }, } for _, tc := range testcases { diff --git a/x/clp/keeper/msg_server_test.go b/x/clp/keeper/msg_server_test.go index 54fe6e4015..ff7fcc3719 100644 --- a/x/clp/keeper/msg_server_test.go +++ b/x/clp/keeper/msg_server_test.go @@ -543,9 +543,8 @@ func TestMsgServer_RemoveLiquidity(t *testing.T) { Signer: "sif1syavy2npfyt9tcncdtsdzf7kny9lh777yqc2nd", ExternalAsset: &types.Asset{Symbol: "eth"}, WBasisPoints: sdk.NewInt(1), - Asymmetry: sdk.NewInt(1), + Asymmetry: sdk.NewInt(0), }, - err: types.ErrAsymmetricRemove, }, { name: "received amount below expected",