fix: resolve clippy warnings for Rust 1.93
- Replace manual modulo checks with .is_multiple_of() - Use enumerate() instead of manual loop counters - Use iterator .take() instead of index-based loops - Use slice literals instead of unnecessary vec![] - Allow too_many_arguments in IBC and bridge crates (protocol requirements) - Allow assertions on constants in integration tests
This commit is contained in:
parent
dcd1cccc67
commit
7c7137c4f6
14 changed files with 32 additions and 20 deletions
|
|
@ -494,7 +494,7 @@ async fn import_blocks(
|
|||
errors += 1;
|
||||
} else {
|
||||
imported += 1;
|
||||
if imported % 1000 == 0 {
|
||||
if imported.is_multiple_of(1000) {
|
||||
info!("Imported {} blocks...", imported);
|
||||
}
|
||||
}
|
||||
|
|
@ -586,7 +586,7 @@ async fn export_blocks(
|
|||
writer.write_all(&serialized)?;
|
||||
|
||||
exported += 1;
|
||||
if exported % 1000 == 0 {
|
||||
if exported.is_multiple_of(1000) {
|
||||
info!("Exported {} blocks...", exported);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -348,7 +348,7 @@ async fn test_long_running_queries() {
|
|||
let _: Vec<[u8; 32]> = consensus.tips().await;
|
||||
let _: Vec<[u8; 32]> = consensus.get_selected_chain(100).await;
|
||||
|
||||
if i % 10 == 0 {
|
||||
if i.is_multiple_of(10) {
|
||||
info!(iteration = i, "Long-running query progress");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -339,7 +339,7 @@ fn calculate_median(prices: &mut [u64]) -> u64 {
|
|||
prices.sort_unstable();
|
||||
let len = prices.len();
|
||||
|
||||
if len % 2 == 0 {
|
||||
if len.is_multiple_of(2) {
|
||||
(prices[len / 2 - 1] + prices[len / 2]) / 2
|
||||
} else {
|
||||
prices[len / 2]
|
||||
|
|
|
|||
|
|
@ -1,3 +1,8 @@
|
|||
// Allow many arguments since bridge functions require many parameters
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
// Allow complex types for multi-signature collections
|
||||
#![allow(clippy::type_complexity)]
|
||||
|
||||
//! Cross-Chain Bridge Infrastructure for Synor
|
||||
//!
|
||||
//! This crate provides bridge infrastructure for cross-chain asset transfers,
|
||||
|
|
|
|||
|
|
@ -273,7 +273,7 @@ impl DagKnightManager {
|
|||
let data = self.ghostdag.add_block(block_id, parents)?;
|
||||
|
||||
// Periodically update adaptive k
|
||||
if self.latency_tracker.sample_count() % 50 == 0 {
|
||||
if self.latency_tracker.sample_count().is_multiple_of(50) {
|
||||
self.update_adaptive_k();
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
// Allow many arguments since IBC protocol requires many parameters
|
||||
#![allow(clippy::too_many_arguments)]
|
||||
|
||||
//! Inter-Blockchain Communication (IBC) Protocol for Synor
|
||||
//!
|
||||
//! This crate implements the IBC protocol for cross-chain interoperability,
|
||||
|
|
|
|||
|
|
@ -166,7 +166,7 @@ impl KHeavyHash {
|
|||
}
|
||||
|
||||
// Report progress every 10000 hashes
|
||||
if tried % 10000 == 0 && !callback(tried, nonce) {
|
||||
if tried.is_multiple_of(10000) && !callback(tried, nonce) {
|
||||
return None; // Cancelled
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -282,7 +282,7 @@ impl BlockMiner {
|
|||
nonce = nonce.wrapping_add(1);
|
||||
|
||||
// Update stats periodically
|
||||
if hashes % 10000 == 0 {
|
||||
if hashes.is_multiple_of(10000) {
|
||||
self.hash_counter.fetch_add(10000, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1134,7 +1134,7 @@ mod tests {
|
|||
for i in 0..3 {
|
||||
let peer_id = random_peer_id();
|
||||
let ip = Some(IpAddr::V4(Ipv4Addr::new(10, i, 1, 1)));
|
||||
detector.record_peer_connected(peer_id, ip, i % 2 == 0);
|
||||
detector.record_peer_connected(peer_id, ip, i.is_multiple_of(2));
|
||||
detector.update_peer_blue_score(&peer_id, 1000 + i as u64);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -85,12 +85,10 @@ impl Chunker {
|
|||
pub fn chunk(&self, data: &[u8]) -> Vec<Chunk> {
|
||||
let mut chunks = Vec::new();
|
||||
let mut offset = 0u64;
|
||||
let mut index = 0u32;
|
||||
|
||||
for chunk_data in data.chunks(self.config.chunk_size) {
|
||||
chunks.push(Chunk::new(index, chunk_data.to_vec(), offset));
|
||||
for (index, chunk_data) in data.chunks(self.config.chunk_size).enumerate() {
|
||||
chunks.push(Chunk::new(index as u32, chunk_data.to_vec(), offset));
|
||||
offset += chunk_data.len() as u64;
|
||||
index += 1;
|
||||
}
|
||||
|
||||
chunks
|
||||
|
|
@ -135,7 +133,7 @@ impl Chunker {
|
|||
pub fn chunk_count(&self, file_size: u64) -> u32 {
|
||||
let size = file_size as usize;
|
||||
let full_chunks = size / self.config.chunk_size;
|
||||
let has_remainder = size % self.config.chunk_size != 0;
|
||||
let has_remainder = !size.is_multiple_of(self.config.chunk_size);
|
||||
|
||||
(full_chunks + if has_remainder { 1 } else { 0 }) as u32
|
||||
}
|
||||
|
|
|
|||
|
|
@ -184,8 +184,8 @@ impl ErasureCoder {
|
|||
|
||||
// Combine data shards
|
||||
let mut result = Vec::with_capacity(encoded.original_size);
|
||||
for i in 0..encoded.data_shards {
|
||||
if let Some(ref shard_data) = shards[i] {
|
||||
for shard in shards.iter().take(encoded.data_shards) {
|
||||
if let Some(ref shard_data) = shard {
|
||||
result.extend_from_slice(shard_data);
|
||||
} else {
|
||||
return Err(Error::ErasureCoding("Reconstruction incomplete".into()));
|
||||
|
|
|
|||
|
|
@ -231,7 +231,7 @@ mod tests {
|
|||
index: i as usize,
|
||||
is_data: i < 3,
|
||||
data: vec![i; 10],
|
||||
hash: *blake3::hash(&vec![i; 10]).as_bytes(),
|
||||
hash: *blake3::hash(&[i; 10]).as_bytes(),
|
||||
};
|
||||
store.store_shard(&deal_id, &shard).await.unwrap();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -187,7 +187,11 @@ impl MerkleTree {
|
|||
let mut level_size = self.leaf_count;
|
||||
|
||||
while level_size > 1 {
|
||||
let sibling_idx = if idx % 2 == 0 { idx + 1 } else { idx - 1 };
|
||||
let sibling_idx = if idx.is_multiple_of(2) {
|
||||
idx + 1
|
||||
} else {
|
||||
idx - 1
|
||||
};
|
||||
|
||||
if sibling_idx < level_size {
|
||||
proof.push(self.nodes[level_start + sibling_idx]);
|
||||
|
|
@ -232,7 +236,7 @@ mod tests {
|
|||
let mut current = *leaf;
|
||||
for (j, sibling) in proof.iter().enumerate() {
|
||||
let mut combined = [0u8; 64];
|
||||
if (i >> j) % 2 == 0 {
|
||||
if (i >> j).is_multiple_of(2) {
|
||||
combined[..32].copy_from_slice(¤t);
|
||||
combined[32..].copy_from_slice(sibling);
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
// Allow assertions on constants to validate configuration
|
||||
#![allow(clippy::assertions_on_constants)]
|
||||
|
||||
//! Cross-Crate Integration Tests for Synor Blockchain
|
||||
//!
|
||||
//! Tests interactions between multiple crates to ensure proper interoperability:
|
||||
|
|
@ -617,8 +620,7 @@ mod mining_consensus_integration {
|
|||
let blocks_since_coinbase = current_daa_score - coinbase_daa_score;
|
||||
|
||||
if blocks_since_coinbase >= COINBASE_MATURITY {
|
||||
// Can spend
|
||||
assert!(true, "Coinbase is mature");
|
||||
// Can spend - coinbase is mature
|
||||
} else {
|
||||
// Cannot spend yet
|
||||
let blocks_remaining = COINBASE_MATURITY - blocks_since_coinbase;
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue