Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ci: bump nightly version #6133

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/base_node_binaries.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
{
"name": "linux-x86_64",
"runs-on": "ubuntu-20.04",
"rust": "nightly-2023-06-04",
"rust": "nightly-2024-02-01",
"target": "x86_64-unknown-linux-gnu",
"cross": false,
"target_cpu": "x86-64",
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/base_node_binaries.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ name: Build Matrix of Binaries
env:
TBN_FILENAME: "tari_suite"
TBN_BUNDLE_ID_BASE: "com.tarilabs.pkg"
toolchain: nightly-2023-06-04
toolchain: nightly-2024-02-01
matrix-json-file: ".github/workflows/base_node_binaries.json"
CARGO_HTTP_MULTIPLEXING: false
CARGO_UNSTABLE_SPARSE_REGISTRY: true
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_dockers.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ name: Build docker images
- xmrig

env:
toolchain_default: nightly-2023-06-04
toolchain_default: nightly-2024-02-01

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_dockers_workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ name: Build docker images - workflow_call/on-demand
toolchain:
type: string
description: 'Rust toolchain'
default: nightly-2023-06-04
default: nightly-2024-02-01
arch:
type: string
default: x86-64
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ name: CI
merge_group:

env:
toolchain: nightly-2023-06-04
toolchain: nightly-2024-02-01
CARGO_HTTP_MULTIPLEXING: false
CARGO_TERM_COLOR: always
CARGO_UNSTABLE_SPARSE_REGISTRY: true
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ name: Source Coverage
- ci-coverage-*

env:
toolchain: nightly-2023-06-04
toolchain: nightly-2024-02-01

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/integration_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ name: Integration tests
type: string

env:
toolchain: nightly-2023-06-04
toolchain: nightly-2024-02-01

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
Expand Down
19 changes: 0 additions & 19 deletions applications/minotari_app_grpc/src/conversions/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,25 +43,6 @@ mod unblinded_output;

use prost_types::Timestamp;

pub use self::{
aggregate_body::*,
base_node_state::*,
block::*,
block_header::*,
chain_metadata::*,
com_and_pub_signature::*,
consensus_constants::*,
historical_block::*,
new_block_template::*,
output_features::*,
peer::*,
proof_of_work::*,
signature::*,
transaction::*,
transaction_input::*,
transaction_kernel::*,
transaction_output::*,
};
use crate::{tari_rpc as grpc, tari_rpc::BlockGroupRequest};

/// Utility function that converts a `chrono::NaiveDateTime` to a `prost::Timestamp`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ impl BurnTab {
let mut column0_items = Vec::new();
let mut column1_items = Vec::new();

for item in windowed_view.iter() {
for item in windowed_view {
column0_items.push(ListItem::new(Span::raw(item.reciprocal_claim_public_key.clone())));
column1_items.push(ListItem::new(Span::raw(item.burned_at.to_string().clone())));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ impl ContactsTab {
let mut column2_items = Vec::new();
let mut column3_items = Vec::new();
let mut column4_items = Vec::new();
for c in windowed_view.iter() {
for c in windowed_view {
column0_items.push(ListItem::new(Span::raw(c.alias.clone())));
column1_items.push(ListItem::new(Span::raw(c.address.clone())));
column2_items.push(ListItem::new(Span::raw(display_compressed_string(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ impl NetworkTab {
let mut column0_items = Vec::with_capacity(peers.len());
let mut column1_items = Vec::with_capacity(peers.len());
let mut column2_items = Vec::with_capacity(peers.len());
for p in peers.iter() {
for p in peers {
column0_items.push(ListItem::new(Span::raw(p.node_id.to_string())));
column1_items.push(ListItem::new(Span::raw(p.public_key.to_string())));
column2_items.push(ListItem::new(Span::raw(p.user_agent.clone())));
Expand Down Expand Up @@ -434,11 +434,7 @@ impl<B: Backend> Component<B> for NetworkTab {
// set the currently selected base node as a custom base node
let base_node = app_state.get_selected_base_node();
let public_key = base_node.public_key.to_hex();
let address = base_node
.addresses
.best()
.map(|a| a.to_string())
.unwrap_or_else(|| "".to_string());
let address = base_node.addresses.best().map(|a| a.to_string()).unwrap_or_default();

match Handle::current().block_on(app_state.set_custom_base_node(public_key, address)) {
Ok(peer) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ impl TransactionsTab {
let mut column2_items = Vec::new();
let mut column3_items = Vec::new();

for t in windowed_view.iter() {
for t in windowed_view {
let text_color = text_colors
.get(&t.cancelled.is_some())
.unwrap_or(&Color::Reset)
Expand Down Expand Up @@ -205,7 +205,7 @@ impl TransactionsTab {
let mut column2_items = Vec::new();
let mut column3_items = Vec::new();

for t in windowed_view.iter() {
for t in windowed_view {
let cancelled = t.cancelled.is_some();
let text_color = text_colors.get(&cancelled).unwrap_or(&Color::Reset).to_owned();
if t.direction == TransactionDirection::Outbound {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ mod test {

#[test]
fn test_list_offset_update() {
let slist = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
let slist = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
let mut list_state = WindowedListState::new();
list_state.set_num_items(slist.len());
let height = 4;
Expand Down
10 changes: 5 additions & 5 deletions applications/minotari_merge_mining_proxy/src/common/json_rpc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -128,13 +128,13 @@ pub mod test {

#[test]
pub fn test_error_response() {
let req_id = Some(12);
let req_id = 12;
let err_code = 200;
let err_message = "error message";
let err_data = Some(json::json!({"test key":"test value"}));
let response = error_response(req_id, err_code, err_message, err_data.clone());
assert_eq!(response["id"], req_id.unwrap());
assert_eq!(response["error"]["data"], err_data.unwrap());
let err_data = json::json!({"test key":"test value"});
let response = error_response(Some(req_id), err_code, err_message, Some(err_data.clone()));
assert_eq!(response["id"], req_id);
assert_eq!(response["error"]["data"], err_data);
assert_eq!(response["error"]["code"], err_code);
assert_eq!(response["error"]["message"], err_message);
let response = error_response(None, err_code, err_message, None);
Expand Down
4 changes: 2 additions & 2 deletions applications/minotari_miner/src/miner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ pub fn mining_task(
// If we are mining in share mode, this share might not be a block, so we need to keep mining till we get a
// new job
if share_mode {
waker.clone().wake();
waker.wake_by_ref();
} else {
waker.wake();
trace!(target: LOG_TARGET, "Mining thread {} stopped", miner);
Expand All @@ -240,7 +240,7 @@ pub fn mining_task(
height: hasher.height(),
target_difficulty,
});
waker.clone().wake();
waker.wake_by_ref();
trace!(target: LOG_TARGET, "Reporting from {} result {:?}", miner, res);
if let Err(TrySendError::Disconnected(_)) = res {
info!(target: LOG_TARGET, "Mining thread {} disconnected", miner);
Expand Down
4 changes: 2 additions & 2 deletions applications/minotari_miner/src/stratum/controller.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,10 +236,10 @@ impl Controller {
}

fn handle_error(&mut self, error: types::rpc_error::RpcError) {
if vec![-1, 24].contains(&error.code) {
if [-1, 24].contains(&error.code) {
// unauthorized
let _result = self.send_login();
} else if vec![21, 20, 22, 23, 25].contains(&error.code) {
} else if [21, 20, 22, 23, 25].contains(&error.code) {
// problem with template
let _result = self.send_message_get_job_template();
} else {
Expand Down
3 changes: 2 additions & 1 deletion applications/minotari_node/src/grpc/base_node_grpc_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ impl BaseNodeGrpcServer {
}

fn is_method_enabled(&self, grpc_method: GrpcMethod) -> bool {
let mining_method = vec![
let mining_method = [
GrpcMethod::GetNewBlockTemplate,
GrpcMethod::GetNewBlock,
GrpcMethod::GetNewBlockBlob,
Expand Down Expand Up @@ -1888,6 +1888,7 @@ impl tari_rpc::base_node_server::BaseNode for BaseNodeGrpcServer {
};

for template_registration in template_registrations {
#[allow(clippy::unnecessary_fallible_conversions)]
let registration = match template_registration.registration_data.try_into() {
Ok(t) => t,
Err(e) => {
Expand Down
1 change: 1 addition & 0 deletions base_layer/chat_ffi/src/confirmation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ pub unsafe extern "C" fn read_confirmation_message_id(
/// # Safety
/// The ```confirmation``` When done with the Confirmation it should be destroyed
#[no_mangle]
#[allow(clippy::cast_possible_wrap)]
pub unsafe extern "C" fn read_confirmation_timestamp(
confirmation: *mut Confirmation,
error_out: *mut c_int,
Expand Down
3 changes: 2 additions & 1 deletion base_layer/chat_ffi/src/message.rs
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,7 @@ pub unsafe extern "C" fn chat_metadata_get_at(
/// ## Safety
/// `message` should be destroyed eventually
#[no_mangle]
#[allow(clippy::cast_possible_wrap)]
pub unsafe extern "C" fn chat_message_metadata_len(message: *mut Message, error_out: *mut c_int) -> c_longlong {
let mut error = 0;
ptr::swap(error_out, &mut error as *mut c_int);
Expand Down Expand Up @@ -289,7 +290,7 @@ pub unsafe extern "C" fn read_chat_message_direction(message: *mut Message, erro
return -1;
}

c_int::try_from((*message).direction.as_byte()).unwrap_or(-1)
c_int::from((*message).direction.as_byte())
}

/// Returns a c_ulonglong representation of the stored at timestamp as seconds since epoch
Expand Down
2 changes: 1 addition & 1 deletion base_layer/chat_ffi/src/message_metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ pub unsafe extern "C" fn read_chat_metadata_type(msg_metadata: *mut MessageMetad
}

let md = &(*msg_metadata);
c_int::try_from(md.metadata_type.as_byte()).unwrap_or(-1)
c_int::from(md.metadata_type.as_byte())
}

/// Returns a ptr to a ByteVector
Expand Down
2 changes: 1 addition & 1 deletion base_layer/common_types/src/tx_id.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ impl Hash for TxId {

impl PartialEq for TxId {
fn eq(&self, other: &Self) -> bool {
self.0.eq(&other.0)
self.0 == other.0
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -614,7 +614,7 @@ where B: BlockchainBackend + 'static
source_peer,
);
#[cfg(feature = "metrics")]
metrics::compact_block_tx_misses(header.height).set(excess_sigs.len() as i64);
metrics::compact_block_tx_misses(header.height).set(i64::try_from(excess_sigs.len()).unwrap_or(i64::MAX));
let block = self.request_full_block_from_peer(source_peer, block_hash).await?;
return Ok(block);
}
Expand All @@ -624,7 +624,8 @@ where B: BlockchainBackend + 'static
let known_transactions = known_transactions.into_iter().map(|tx| (*tx).clone()).collect();

#[cfg(feature = "metrics")]
metrics::compact_block_tx_misses(header.height).set(missing_excess_sigs.len() as i64);
metrics::compact_block_tx_misses(header.height)
.set(i64::try_from(missing_excess_sigs.len()).unwrap_or(i64::MAX));

let mut builder = BlockBuilder::new(header.version)
.with_coinbase_utxo(coinbase_output, coinbase_kernel)
Expand Down
2 changes: 1 addition & 1 deletion base_layer/core/src/base_node/proto/wallet_rpc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ impl From<TxQueryResponse> for proto::TxQueryResponse {
fn from(response: TxQueryResponse) -> Self {
Self {
location: proto::TxLocation::from(response.location) as i32,
best_block_hash: response.best_block_hash.map(|v| v.to_vec()).unwrap_or(vec![]),
best_block_hash: response.best_block_hash.map(|v| v.to_vec()).unwrap_or_default(),
confirmations: response.confirmations,
is_synced: response.is_synced,
best_block_height: response.best_block_height,
Expand Down
2 changes: 1 addition & 1 deletion base_layer/core/src/base_node/sync/rpc/service.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ impl<B: BlockchainBackend + 'static> BaseNodeSyncRpcService<B> {
let token = Arc::new(peer);
lock.push(Arc::downgrade(&token));
#[cfg(feature = "metrics")]
metrics::active_sync_peers().set(lock.len() as i64);
metrics::active_sync_peers().set(i64::try_from(lock.len()).unwrap_or(i64::MAX));
Ok(token)
}
}
Expand Down
6 changes: 3 additions & 3 deletions base_layer/core/src/chain_storage/blockchain_database.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1326,19 +1326,19 @@ pub fn calculate_mmr_roots<T: BlockchainBackend>(
let mut output_smt = db.fetch_tip_smt()?;
let mut input_mmr = PrunedInputMmr::new(PrunedHashSet::default());

for kernel in body.kernels().iter() {
for kernel in body.kernels() {
kernel_mmr.push(kernel.hash().to_vec())?;
}

for output in body.outputs().iter() {
for output in body.outputs() {
if !output.is_burned() {
let smt_key = NodeKey::try_from(output.commitment.as_bytes())?;
let smt_node = ValueHash::try_from(output.smt_hash(header.height).as_slice())?;
output_smt.insert(smt_key, smt_node)?;
}
}

for input in body.inputs().iter() {
for input in body.inputs() {
input_mmr.push(input.canonical_hash().to_vec())?;

// Search the DB for the output leaf index so that it can be marked as spent/deleted.
Expand Down
4 changes: 2 additions & 2 deletions base_layer/core/src/chain_storage/reorg.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ impl Reorg {
pub fn from_reorged_blocks(added: &VecDeque<Arc<ChainBlock>>, removed: &[Arc<ChainBlock>]) -> Self {
// Expects blocks to be ordered sequentially highest height to lowest (as in rewind_to_height)
Self {
new_height: added.get(0).map(|b| b.header().height).unwrap_or_default(),
new_hash: added.get(0).map(|b| *b.hash()).unwrap_or_default(),
new_height: added.front().map(|b| b.header().height).unwrap_or_default(),
new_hash: added.front().map(|b| *b.hash()).unwrap_or_default(),
prev_height: removed.first().map(|b| b.header().height).unwrap_or_default(),
prev_hash: removed.first().map(|b| *b.hash()).unwrap_or_default(),
num_blocks_added: added.len() as u64,
Expand Down
2 changes: 1 addition & 1 deletion base_layer/core/src/consensus/consensus_encoding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ mod bytes;
mod hashing;
mod string;

pub use hashing::{ConsensusHasher, DomainSeparatedConsensusHasher};
pub use hashing::DomainSeparatedConsensusHasher;
pub use string::MaxSizeString;

pub use self::bytes::MaxSizeBytes;
1 change: 1 addition & 0 deletions base_layer/core/src/covenants/decoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ pub(super) trait CovenantReadExt: io::Read {

impl<R: io::Read> CovenantReadExt for R {
/// Reads next byte code
#[allow(clippy::unused_io_amount)]
fn read_next_byte_code(&mut self) -> Result<Option<u8>, io::Error> {
let mut buf = [0u8; 1];
loop {
Expand Down
2 changes: 1 addition & 1 deletion base_layer/core/src/covenants/output_set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ impl<T> Eq for Indexed<T> {}

impl<T> PartialOrd for Indexed<T> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.index.partial_cmp(&other.index)
Some(self.cmp(other))
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -544,7 +544,7 @@ impl UnconfirmedPool {
.flat_map(|tx| tx.body.inputs())
.map(|i| i.output_hash())
.collect::<HashSet<_>>();
for (_, transaction) in current_transactions.iter() {
for transaction in current_transactions.values() {
for input in transaction.body.inputs() {
if insert_set.contains(&input.output_hash()) {
return true;
Expand Down Expand Up @@ -646,8 +646,7 @@ impl UnconfirmedPool {
Ok(Some(v)) => Some(Ok(v)),
Ok(None) => None,
})
.collect::<Result<Vec<_>, _>>()?
.into_iter(),
.collect::<Result<Vec<_>, _>>()?,
);
debug!(
target: LOG_TARGET,
Expand Down Expand Up @@ -680,8 +679,7 @@ impl UnconfirmedPool {
Ok(Some(v)) => Some(Ok(v)),
Ok(None) => None,
})
.collect::<Result<Vec<_>, _>>()?
.into_iter(),
.collect::<Result<Vec<_>, _>>()?,
);
debug!(
target: LOG_TARGET,
Expand Down
Loading
Loading