diff --git a/core/lib/dal/.sqlx/query-68c891ee9d71cffe709731f2804b734d5d255e36e48668b3bfc25a0f86ea52e7.json b/core/lib/dal/.sqlx/query-68c891ee9d71cffe709731f2804b734d5d255e36e48668b3bfc25a0f86ea52e7.json new file mode 100644 index 000000000000..1d5336030a44 --- /dev/null +++ b/core/lib/dal/.sqlx/query-68c891ee9d71cffe709731f2804b734d5d255e36e48668b3bfc25a0f86ea52e7.json @@ -0,0 +1,40 @@ +{ + "db_name": "PostgreSQL", + "query": "\n INSERT INTO\n transactions (\n hash,\n is_priority,\n initiator_address,\n nonce,\n signature,\n gas_limit,\n max_fee_per_gas,\n max_priority_fee_per_gas,\n gas_per_pubdata_limit,\n input,\n data,\n tx_format,\n contract_address,\n value,\n paymaster,\n paymaster_input,\n execution_info,\n received_at,\n created_at,\n updated_at\n )\n VALUES\n (\n $1,\n FALSE,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n JSONB_BUILD_OBJECT('gas_used', $16::BIGINT, 'storage_writes', $17::INT, 'contracts_used', $18::INT),\n $19,\n NOW(),\n NOW()\n )\n ON CONFLICT (initiator_address, nonce) DO\n UPDATE\n SET\n hash = $1,\n signature = $4,\n gas_limit = $5,\n max_fee_per_gas = $6,\n max_priority_fee_per_gas = $7,\n gas_per_pubdata_limit = $8,\n input = $9,\n data = $10,\n tx_format = $11,\n contract_address = $12,\n value = $13,\n paymaster = $14,\n paymaster_input = $15,\n execution_info = JSONB_BUILD_OBJECT('gas_used', $16::BIGINT, 'storage_writes', $17::INT, 'contracts_used', $18::INT),\n in_mempool = FALSE,\n received_at = $19,\n created_at = NOW(),\n updated_at = NOW(),\n error = NULL\n WHERE\n transactions.is_priority = FALSE\n AND transactions.miniblock_number IS NULL\n RETURNING\n (\n SELECT\n hash\n FROM\n transactions\n WHERE\n transactions.initiator_address = $2\n AND transactions.nonce = $3\n ) IS NOT NULL AS \"is_replaced!\"\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "is_replaced!", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Bytea", + "Bytea", + "Int8", + "Bytea", + "Numeric", + "Numeric", + "Numeric", + "Numeric", + "Bytea", + "Jsonb", + "Int4", + "Bytea", + "Numeric", + "Bytea", + "Bytea", + "Int8", + "Int4", + "Int4", + "Timestamp" + ] + }, + "nullable": [ + null + ] + }, + "hash": "68c891ee9d71cffe709731f2804b734d5d255e36e48668b3bfc25a0f86ea52e7" +} diff --git a/core/lib/dal/.sqlx/query-6f42658c16f19bc0da2e4a99b23eb12db58d0f15680b1467df0c13d49079d130.json b/core/lib/dal/.sqlx/query-6f42658c16f19bc0da2e4a99b23eb12db58d0f15680b1467df0c13d49079d130.json new file mode 100644 index 000000000000..f28927bb9575 --- /dev/null +++ b/core/lib/dal/.sqlx/query-6f42658c16f19bc0da2e4a99b23eb12db58d0f15680b1467df0c13d49079d130.json @@ -0,0 +1,22 @@ +{ + "db_name": "PostgreSQL", + "query": "\n SELECT\n TRUE\n FROM\n transactions\n WHERE\n hash = $1\n ", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "bool", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [ + "Bytea" + ] + }, + "nullable": [ + null + ] + }, + "hash": "6f42658c16f19bc0da2e4a99b23eb12db58d0f15680b1467df0c13d49079d130" +} diff --git a/core/lib/dal/.sqlx/query-9a0ad1128a9d7f1482c06cbd52c05ccf3e1c6fb83db296edec1f8b73f45a1c36.json b/core/lib/dal/.sqlx/query-9a0ad1128a9d7f1482c06cbd52c05ccf3e1c6fb83db296edec1f8b73f45a1c36.json deleted file mode 100644 index 563b2f0ee8d9..000000000000 --- a/core/lib/dal/.sqlx/query-9a0ad1128a9d7f1482c06cbd52c05ccf3e1c6fb83db296edec1f8b73f45a1c36.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT true FROM transactions\n WHERE hash = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "bool", - "type_info": "Bool" - } - ], - "parameters": { - "Left": [ - "Bytea" - ] - }, - "nullable": [ - null - ] - }, - "hash": "9a0ad1128a9d7f1482c06cbd52c05ccf3e1c6fb83db296edec1f8b73f45a1c36" -} diff --git a/core/lib/dal/.sqlx/query-cf8cff1e6d277088519ef7dfbdb1885d320c146cd8fad77c107ef12fa38e6c98.json b/core/lib/dal/.sqlx/query-cf8cff1e6d277088519ef7dfbdb1885d320c146cd8fad77c107ef12fa38e6c98.json deleted file mode 100644 index 1bd791a3f829..000000000000 --- a/core/lib/dal/.sqlx/query-cf8cff1e6d277088519ef7dfbdb1885d320c146cd8fad77c107ef12fa38e6c98.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO\n transactions (\n hash,\n is_priority,\n initiator_address,\n nonce,\n signature,\n gas_limit,\n max_fee_per_gas,\n max_priority_fee_per_gas,\n gas_per_pubdata_limit,\n input,\n data,\n tx_format,\n contract_address,\n value,\n paymaster,\n paymaster_input,\n execution_info,\n received_at,\n created_at,\n updated_at\n )\n VALUES\n (\n $1,\n FALSE,\n $2,\n $3,\n $4,\n $5,\n $6,\n $7,\n $8,\n $9,\n $10,\n $11,\n $12,\n $13,\n $14,\n $15,\n JSONB_BUILD_OBJECT('gas_used', $16::BIGINT, 'storage_writes', $17::INT, 'contracts_used', $18::INT),\n $19,\n NOW(),\n NOW()\n )\n ON CONFLICT (initiator_address, nonce) DO\n UPDATE\n SET\n hash = $1,\n signature = $4,\n gas_limit = $5,\n max_fee_per_gas = $6,\n max_priority_fee_per_gas = $7,\n gas_per_pubdata_limit = $8,\n input = $9,\n data = $10,\n tx_format = $11,\n contract_address = $12,\n value = $13,\n paymaster = $14,\n paymaster_input = $15,\n execution_info = JSONB_BUILD_OBJECT('gas_used', $16::BIGINT, 'storage_writes', $17::INT, 'contracts_used', $18::INT),\n in_mempool = FALSE,\n received_at = $19,\n created_at = NOW(),\n updated_at = NOW(),\n error = NULL\n WHERE\n transactions.is_priority = FALSE\n AND transactions.miniblock_number IS NULL\n RETURNING\n (\n SELECT\n hash\n FROM\n transactions\n WHERE\n transactions.initiator_address = $2\n AND transactions.nonce = $3\n ) IS NOT NULL AS \"is_replaced!\"\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "is_replaced!", - "type_info": "Bool" - } - ], - "parameters": { - "Left": [ - "Bytea", - "Bytea", - "Int8", - "Bytea", - "Numeric", - "Numeric", - "Numeric", - "Numeric", - "Bytea", - "Jsonb", - "Int4", - "Bytea", - "Numeric", - "Bytea", - "Bytea", - "Int8", - "Int4", - "Int4", - "Timestamp" - ] - }, - "nullable": [ - null - ] - }, - "hash": "cf8cff1e6d277088519ef7dfbdb1885d320c146cd8fad77c107ef12fa38e6c98" -} diff --git a/core/lib/dal/src/transactions_dal.rs b/core/lib/dal/src/transactions_dal.rs index 02278ea30966..34e725653689 100644 --- a/core/lib/dal/src/transactions_dal.rs +++ b/core/lib/dal/src/transactions_dal.rs @@ -255,198 +255,201 @@ impl TransactionsDal<'_, '_> { tx: L2Tx, exec_info: TransactionExecutionMetrics, ) -> L2TxSubmissionResult { - let tx_hash = tx.hash(); - let is_duplicate = sqlx::query!( - r#" - SELECT - TRUE - FROM - transactions - WHERE - hash = $1 - "#, - tx_hash.as_bytes(), - ) - .fetch_optional(self.storage.conn()) - .await - .unwrap() - .is_some(); - - if is_duplicate { - return L2TxSubmissionResult::Duplicate; - } - - let initiator_address = tx.initiator_account(); - let contract_address = tx.execute.contract_address.as_bytes(); - let json_data = serde_json::to_value(&tx.execute) - .unwrap_or_else(|_| panic!("cannot serialize tx {:?} to json", tx.hash())); - let gas_limit = u256_to_big_decimal(tx.common_data.fee.gas_limit); - let max_fee_per_gas = u256_to_big_decimal(tx.common_data.fee.max_fee_per_gas); - let max_priority_fee_per_gas = - u256_to_big_decimal(tx.common_data.fee.max_priority_fee_per_gas); - let gas_per_pubdata_limit = u256_to_big_decimal(tx.common_data.fee.gas_per_pubdata_limit); - let tx_format = tx.common_data.transaction_type as i32; - let signature = tx.common_data.signature; - let nonce = tx.common_data.nonce.0 as i64; - let input_data = tx.common_data.input.expect("Data is mandatory").data; - let value = u256_to_big_decimal(tx.execute.value); - let paymaster = tx.common_data.paymaster_params.paymaster.0.as_ref(); - let paymaster_input = tx.common_data.paymaster_params.paymaster_input; - let secs = (tx.received_timestamp_ms / 1000) as i64; - let nanosecs = ((tx.received_timestamp_ms % 1000) * 1_000_000) as u32; - let received_at = NaiveDateTime::from_timestamp_opt(secs, nanosecs).unwrap(); - // Besides just adding or updating(on conflict) the record, we want to extract some info - // from the query below, to indicate what actually happened: - // 1) transaction is added - // 2) transaction is replaced - // 3) WHERE clause conditions for DO UPDATE block were not met, so the transaction can't be replaced - // the subquery in RETURNING clause looks into pre-UPDATE state of the table. So if the subquery will return NULL - // transaction is fresh and was added to db(the second condition of RETURNING clause checks it). - // Otherwise, if the subquery won't return NULL it means that there is already tx with such nonce and `initiator_address` in DB - // and we can replace it WHERE clause conditions are met. - // It is worth mentioning that if WHERE clause conditions are not met, None will be returned. - let query_result = sqlx::query!( - r#" - INSERT INTO - transactions ( - hash, - is_priority, - initiator_address, - nonce, - signature, - gas_limit, - max_fee_per_gas, - max_priority_fee_per_gas, - gas_per_pubdata_limit, - input, - data, - tx_format, - contract_address, - value, - paymaster, - paymaster_input, - execution_info, - received_at, - created_at, - updated_at - ) - VALUES - ( - $1, - FALSE, - $2, - $3, - $4, - $5, - $6, - $7, - $8, - $9, - $10, - $11, - $12, - $13, - $14, - $15, - JSONB_BUILD_OBJECT('gas_used', $16::BIGINT, 'storage_writes', $17::INT, 'contracts_used', $18::INT), - $19, - NOW(), - NOW() - ) - ON CONFLICT (initiator_address, nonce) DO - UPDATE - SET - hash = $1, - signature = $4, - gas_limit = $5, - max_fee_per_gas = $6, - max_priority_fee_per_gas = $7, - gas_per_pubdata_limit = $8, - input = $9, - data = $10, - tx_format = $11, - contract_address = $12, - value = $13, - paymaster = $14, - paymaster_input = $15, - execution_info = JSONB_BUILD_OBJECT('gas_used', $16::BIGINT, 'storage_writes', $17::INT, 'contracts_used', $18::INT), - in_mempool = FALSE, - received_at = $19, - created_at = NOW(), - updated_at = NOW(), - error = NULL - WHERE - transactions.is_priority = FALSE - AND transactions.miniblock_number IS NULL - RETURNING - ( - SELECT - hash - FROM - transactions - WHERE - transactions.initiator_address = $2 - AND transactions.nonce = $3 - ) IS NOT NULL AS "is_replaced!" - "#, - tx_hash.as_bytes(), - initiator_address.as_bytes(), - nonce, - &signature, - gas_limit, - max_fee_per_gas, - max_priority_fee_per_gas, - gas_per_pubdata_limit, - input_data, - &json_data, - tx_format, - contract_address, - value, - &paymaster, - &paymaster_input, - exec_info.gas_used as i64, - (exec_info.initial_storage_writes + exec_info.repeated_storage_writes) as i32, - exec_info.contracts_used as i32, - received_at - ) + { + let tx_hash = tx.hash(); + let is_duplicate = sqlx::query!( + r#" + SELECT + TRUE + FROM + transactions + WHERE + hash = $1 + "#, + tx_hash.as_bytes(), + ) .fetch_optional(self.storage.conn()) .await - .map(|option_record| option_record.map(|record| record.is_replaced)); - - let l2_tx_insertion_result = match query_result { - Ok(option_query_result) => match option_query_result { - Some(true) => L2TxSubmissionResult::Replaced, - Some(false) => L2TxSubmissionResult::Added, - None => L2TxSubmissionResult::AlreadyExecuted, - }, - Err(err) => { - // So, we consider a tx hash to be a primary key of the transaction - // Based on the idea that we can't have two transactions with the same hash - // We assume that if there already exists some transaction with some tx hash - // another tx with the same tx hash is supposed to have the same data - // In this case we identify it as Duplicate - // Note, this error can happen because of the race condition (tx can be taken by several - // API servers, that simultaneously start execute it and try to inserted to DB) - if let error::Error::Database(ref error) = err { - if let Some(constraint) = error.constraint() { - if constraint == "transactions_pkey" { - return L2TxSubmissionResult::Duplicate; + .unwrap() + .is_some(); + + if is_duplicate { + return L2TxSubmissionResult::Duplicate; + } + + let initiator_address = tx.initiator_account(); + let contract_address = tx.execute.contract_address.as_bytes(); + let json_data = serde_json::to_value(&tx.execute) + .unwrap_or_else(|_| panic!("cannot serialize tx {:?} to json", tx.hash())); + let gas_limit = u256_to_big_decimal(tx.common_data.fee.gas_limit); + let max_fee_per_gas = u256_to_big_decimal(tx.common_data.fee.max_fee_per_gas); + let max_priority_fee_per_gas = + u256_to_big_decimal(tx.common_data.fee.max_priority_fee_per_gas); + let gas_per_pubdata_limit = + u256_to_big_decimal(tx.common_data.fee.gas_per_pubdata_limit); + let tx_format = tx.common_data.transaction_type as i32; + let signature = tx.common_data.signature; + let nonce = tx.common_data.nonce.0 as i64; + let input_data = tx.common_data.input.expect("Data is mandatory").data; + let value = u256_to_big_decimal(tx.execute.value); + let paymaster = tx.common_data.paymaster_params.paymaster.0.as_ref(); + let paymaster_input = tx.common_data.paymaster_params.paymaster_input; + let secs = (tx.received_timestamp_ms / 1000) as i64; + let nanosecs = ((tx.received_timestamp_ms % 1000) * 1_000_000) as u32; + let received_at = NaiveDateTime::from_timestamp_opt(secs, nanosecs).unwrap(); + // Besides just adding or updating(on conflict) the record, we want to extract some info + // from the query below, to indicate what actually happened: + // 1) transaction is added + // 2) transaction is replaced + // 3) WHERE clause conditions for DO UPDATE block were not met, so the transaction can't be replaced + // the subquery in RETURNING clause looks into pre-UPDATE state of the table. So if the subquery will return NULL + // transaction is fresh and was added to db(the second condition of RETURNING clause checks it). + // Otherwise, if the subquery won't return NULL it means that there is already tx with such nonce and `initiator_address` in DB + // and we can replace it WHERE clause conditions are met. + // It is worth mentioning that if WHERE clause conditions are not met, None will be returned. + let query_result = sqlx::query!( + r#" + INSERT INTO + transactions ( + hash, + is_priority, + initiator_address, + nonce, + signature, + gas_limit, + max_fee_per_gas, + max_priority_fee_per_gas, + gas_per_pubdata_limit, + input, + data, + tx_format, + contract_address, + value, + paymaster, + paymaster_input, + execution_info, + received_at, + created_at, + updated_at + ) + VALUES + ( + $1, + FALSE, + $2, + $3, + $4, + $5, + $6, + $7, + $8, + $9, + $10, + $11, + $12, + $13, + $14, + $15, + JSONB_BUILD_OBJECT('gas_used', $16::BIGINT, 'storage_writes', $17::INT, 'contracts_used', $18::INT), + $19, + NOW(), + NOW() + ) + ON CONFLICT (initiator_address, nonce) DO + UPDATE + SET + hash = $1, + signature = $4, + gas_limit = $5, + max_fee_per_gas = $6, + max_priority_fee_per_gas = $7, + gas_per_pubdata_limit = $8, + input = $9, + data = $10, + tx_format = $11, + contract_address = $12, + value = $13, + paymaster = $14, + paymaster_input = $15, + execution_info = JSONB_BUILD_OBJECT('gas_used', $16::BIGINT, 'storage_writes', $17::INT, 'contracts_used', $18::INT), + in_mempool = FALSE, + received_at = $19, + created_at = NOW(), + updated_at = NOW(), + error = NULL + WHERE + transactions.is_priority = FALSE + AND transactions.miniblock_number IS NULL + RETURNING + ( + SELECT + hash + FROM + transactions + WHERE + transactions.initiator_address = $2 + AND transactions.nonce = $3 + ) IS NOT NULL AS "is_replaced!" + "#, + tx_hash.as_bytes(), + initiator_address.as_bytes(), + nonce, + &signature, + gas_limit, + max_fee_per_gas, + max_priority_fee_per_gas, + gas_per_pubdata_limit, + input_data, + &json_data, + tx_format, + contract_address, + value, + &paymaster, + &paymaster_input, + exec_info.gas_used as i64, + (exec_info.initial_storage_writes + exec_info.repeated_storage_writes) as i32, + exec_info.contracts_used as i32, + received_at + ) + .fetch_optional(self.storage.conn()) + .await + .map(|option_record| option_record.map(|record| record.is_replaced)); + + let l2_tx_insertion_result = match query_result { + Ok(option_query_result) => match option_query_result { + Some(true) => L2TxSubmissionResult::Replaced, + Some(false) => L2TxSubmissionResult::Added, + None => L2TxSubmissionResult::AlreadyExecuted, + }, + Err(err) => { + // So, we consider a tx hash to be a primary key of the transaction + // Based on the idea that we can't have two transactions with the same hash + // We assume that if there already exists some transaction with some tx hash + // another tx with the same tx hash is supposed to have the same data + // In this case we identify it as Duplicate + // Note, this error can happen because of the race condition (tx can be taken by several + // API servers, that simultaneously start execute it and try to inserted to DB) + if let error::Error::Database(ref error) = err { + if let Some(constraint) = error.constraint() { + if constraint == "transactions_pkey" { + return L2TxSubmissionResult::Duplicate; + } } } + panic!("{}", err); } - panic!("{}", err); - } - }; - tracing::debug!( - "{:?} l2 transaction {:?} to DB. init_acc {:?} nonce {:?} returned option {:?}", - l2_tx_insertion_result, - tx_hash, - initiator_address, - nonce, - l2_tx_insertion_result - ); + }; + tracing::debug!( + "{:?} l2 transaction {:?} to DB. init_acc {:?} nonce {:?} returned option {:?}", + l2_tx_insertion_result, + tx_hash, + initiator_address, + nonce, + l2_tx_insertion_result + ); - l2_tx_insertion_result + l2_tx_insertion_result + } } pub async fn mark_txs_as_executed_in_l1_batch(