A complete blockchain implementation featuring: - synord: Full node with GHOSTDAG consensus - explorer-web: Modern React blockchain explorer with 3D DAG visualization - CLI wallet and tools - Smart contract SDK and example contracts (DEX, NFT, token) - WASM crypto library for browser/mobile
501 lines
17 KiB
Rust
501 lines
17 KiB
Rust
//! Property-based tests for Synor consensus module.
|
|
//!
|
|
//! These tests verify invariants that should hold for all inputs,
|
|
//! not just specific examples. They help find edge cases that
|
|
//! manual testing might miss.
|
|
|
|
use proptest::prelude::*;
|
|
use synor_consensus::{
|
|
difficulty::{DaaBlock, DaaParams, DifficultyManager},
|
|
utxo::{UtxoDiff, UtxoEntry, UtxoSet},
|
|
};
|
|
use synor_types::{
|
|
transaction::{Outpoint, ScriptPubKey, TxOutput},
|
|
Amount, Hash256,
|
|
};
|
|
|
|
// ============================================================================
|
|
// UTXO Property Tests
|
|
// ============================================================================
|
|
|
|
/// Strategy for generating valid outpoints
|
|
fn arb_outpoint() -> impl Strategy<Value = Outpoint> {
|
|
(any::<[u8; 32]>(), 0u32..100)
|
|
.prop_map(|(bytes, idx)| Outpoint::new(Hash256::from_bytes(bytes), idx))
|
|
}
|
|
|
|
/// Strategy for generating valid amounts (non-zero, under max supply)
|
|
fn arb_amount() -> impl Strategy<Value = Amount> {
|
|
(1u64..=100_000_000_000u64).prop_map(Amount::from_sompi)
|
|
}
|
|
|
|
/// Strategy for generating UTXO entries
|
|
fn arb_utxo_entry() -> impl Strategy<Value = UtxoEntry> {
|
|
(arb_amount(), 0u64..1_000_000, any::<bool>()).prop_map(|(amount, daa_score, is_coinbase)| {
|
|
UtxoEntry::new(
|
|
TxOutput::new(amount, ScriptPubKey::p2pkh(&[0u8; 32])),
|
|
daa_score,
|
|
is_coinbase,
|
|
)
|
|
})
|
|
}
|
|
|
|
proptest! {
|
|
/// Property: Adding then removing a UTXO leaves the set unchanged
|
|
#[test]
|
|
fn utxo_add_remove_identity(
|
|
outpoint in arb_outpoint(),
|
|
entry in arb_utxo_entry(),
|
|
) {
|
|
let set = UtxoSet::new();
|
|
let initial_value = set.total_value();
|
|
let initial_len = set.len();
|
|
|
|
// Add and then remove
|
|
set.add(outpoint, entry).unwrap();
|
|
set.remove(&outpoint).unwrap();
|
|
|
|
// Should be back to initial state
|
|
prop_assert_eq!(set.total_value(), initial_value);
|
|
prop_assert_eq!(set.len(), initial_len);
|
|
prop_assert!(!set.contains(&outpoint));
|
|
}
|
|
|
|
/// Property: Total value is always the sum of all UTXO amounts
|
|
#[test]
|
|
fn utxo_total_value_consistency(
|
|
entries in prop::collection::vec((arb_outpoint(), arb_utxo_entry()), 1..50),
|
|
) {
|
|
let set = UtxoSet::new();
|
|
let mut expected_total = Amount::ZERO;
|
|
let mut unique_outpoints = std::collections::HashSet::new();
|
|
|
|
for (outpoint, entry) in entries {
|
|
// Skip duplicates
|
|
if unique_outpoints.insert(outpoint) {
|
|
expected_total = expected_total.saturating_add(entry.amount());
|
|
let _ = set.add(outpoint, entry);
|
|
}
|
|
}
|
|
|
|
prop_assert_eq!(set.total_value(), expected_total);
|
|
prop_assert_eq!(set.len(), unique_outpoints.len());
|
|
}
|
|
|
|
/// Property: Count always matches actual number of UTXOs
|
|
#[test]
|
|
fn utxo_count_consistency(
|
|
operations in prop::collection::vec(
|
|
(arb_outpoint(), arb_utxo_entry(), any::<bool>()),
|
|
1..100
|
|
),
|
|
) {
|
|
let set = UtxoSet::new();
|
|
let mut present_outpoints = std::collections::HashSet::new();
|
|
|
|
for (outpoint, entry, is_add) in operations {
|
|
if is_add {
|
|
if !present_outpoints.contains(&outpoint) {
|
|
let _ = set.add(outpoint, entry);
|
|
present_outpoints.insert(outpoint);
|
|
}
|
|
} else if present_outpoints.remove(&outpoint) {
|
|
let _ = set.remove(&outpoint);
|
|
}
|
|
}
|
|
|
|
prop_assert_eq!(set.len(), present_outpoints.len());
|
|
}
|
|
|
|
/// Property: UtxoDiff add cancels remove for same outpoint
|
|
#[test]
|
|
fn utxo_diff_add_cancels_remove(
|
|
outpoint in arb_outpoint(),
|
|
entry in arb_utxo_entry(),
|
|
) {
|
|
let mut diff = UtxoDiff::new();
|
|
|
|
// Remove then add should cancel
|
|
diff.remove(outpoint);
|
|
diff.add(outpoint, entry.clone());
|
|
|
|
// Outpoint should be in to_add, not to_remove
|
|
prop_assert!(diff.to_add.contains_key(&outpoint));
|
|
prop_assert!(!diff.to_remove.contains(&outpoint));
|
|
}
|
|
|
|
/// Property: UtxoDiff remove cancels add for same outpoint
|
|
#[test]
|
|
fn utxo_diff_remove_cancels_add(
|
|
outpoint in arb_outpoint(),
|
|
entry in arb_utxo_entry(),
|
|
) {
|
|
let mut diff = UtxoDiff::new();
|
|
|
|
// Add then remove should cancel
|
|
diff.add(outpoint, entry);
|
|
diff.remove(outpoint);
|
|
|
|
// Outpoint should be in neither
|
|
prop_assert!(!diff.to_add.contains_key(&outpoint));
|
|
prop_assert!(!diff.to_remove.contains(&outpoint));
|
|
}
|
|
|
|
/// Property: Applying a diff and its "undo" should restore additions
|
|
#[test]
|
|
fn utxo_diff_apply_reversibility(
|
|
initial_entries in prop::collection::vec((arb_outpoint(), arb_utxo_entry()), 1..10),
|
|
new_entries in prop::collection::vec((arb_outpoint(), arb_utxo_entry()), 1..5),
|
|
) {
|
|
// Build initial set with unique outpoints
|
|
let set = UtxoSet::new();
|
|
let mut used_outpoints = std::collections::HashSet::new();
|
|
|
|
for (outpoint, entry) in &initial_entries {
|
|
if used_outpoints.insert(*outpoint) {
|
|
let _ = set.add(*outpoint, entry.clone());
|
|
}
|
|
}
|
|
|
|
let initial_snapshot = set.snapshot();
|
|
|
|
// Create a diff that only adds (to make reversal simple)
|
|
let mut diff = UtxoDiff::new();
|
|
for (outpoint, entry) in new_entries {
|
|
if !used_outpoints.contains(&outpoint) {
|
|
used_outpoints.insert(outpoint);
|
|
diff.add(outpoint, entry);
|
|
}
|
|
}
|
|
|
|
if diff.to_add.is_empty() {
|
|
return Ok(()); // Skip if no new entries
|
|
}
|
|
|
|
// Apply diff
|
|
set.apply_diff(&diff).unwrap();
|
|
|
|
// Create reverse diff (remove what was added)
|
|
let mut reverse_diff = UtxoDiff::new();
|
|
for outpoint in diff.to_add.keys() {
|
|
reverse_diff.remove(*outpoint);
|
|
}
|
|
|
|
// Apply reverse
|
|
set.apply_diff(&reverse_diff).unwrap();
|
|
|
|
// Should be back to initial state
|
|
let final_snapshot = set.snapshot();
|
|
prop_assert_eq!(initial_snapshot.len(), final_snapshot.len());
|
|
for (outpoint, entry) in &initial_snapshot {
|
|
let final_entry = final_snapshot.get(outpoint);
|
|
prop_assert!(final_entry.is_some(), "Missing outpoint after reversal");
|
|
prop_assert_eq!(final_entry.unwrap().amount(), entry.amount());
|
|
}
|
|
}
|
|
|
|
/// Property: Coinbase maturity threshold is respected
|
|
#[test]
|
|
fn utxo_coinbase_maturity(
|
|
daa_score in 0u64..1_000_000,
|
|
current_score in 0u64..1_000_000,
|
|
) {
|
|
let entry = UtxoEntry::new(
|
|
TxOutput::new(Amount::from_sompi(1000), ScriptPubKey::p2pkh(&[0u8; 32])),
|
|
daa_score,
|
|
true, // coinbase
|
|
);
|
|
|
|
let is_mature = entry.is_mature(current_score);
|
|
let maturity_threshold = synor_consensus::COINBASE_MATURITY;
|
|
|
|
// Should be mature if and only if current_score >= daa_score + maturity
|
|
let expected_mature = current_score >= daa_score.saturating_add(maturity_threshold);
|
|
prop_assert_eq!(is_mature, expected_mature);
|
|
}
|
|
|
|
/// Property: Non-coinbase UTXOs are always mature
|
|
#[test]
|
|
fn utxo_non_coinbase_always_mature(
|
|
daa_score in 0u64..1_000_000,
|
|
current_score in 0u64..1_000_000,
|
|
) {
|
|
let entry = UtxoEntry::new(
|
|
TxOutput::new(Amount::from_sompi(1000), ScriptPubKey::p2pkh(&[0u8; 32])),
|
|
daa_score,
|
|
false, // not coinbase
|
|
);
|
|
|
|
prop_assert!(entry.is_mature(current_score));
|
|
}
|
|
}
|
|
|
|
// ============================================================================
|
|
// Difficulty Property Tests
|
|
// ============================================================================
|
|
|
|
proptest! {
|
|
/// Property: bits_to_difficulty and difficulty_to_bits are approximately inverse
|
|
#[test]
|
|
fn difficulty_roundtrip(difficulty in 1u64..10_000_000) {
|
|
let manager = DifficultyManager::with_defaults();
|
|
|
|
let bits = manager.difficulty_to_bits(difficulty);
|
|
let roundtrip = manager.bits_to_difficulty(bits);
|
|
|
|
// Should be within 10% due to precision loss
|
|
let ratio = roundtrip as f64 / difficulty as f64;
|
|
prop_assert!(
|
|
ratio > 0.5 && ratio < 2.0,
|
|
"Roundtrip failed: {} -> bits {:08x} -> {}",
|
|
difficulty,
|
|
bits,
|
|
roundtrip
|
|
);
|
|
}
|
|
|
|
/// Property: Higher difficulty always means smaller target
|
|
#[test]
|
|
fn difficulty_target_inverse_relationship(
|
|
diff1 in 1u64..1_000_000,
|
|
diff2 in 1u64..1_000_000,
|
|
) {
|
|
prop_assume!(diff1 != diff2);
|
|
|
|
let manager = DifficultyManager::with_defaults();
|
|
|
|
let bits1 = manager.difficulty_to_bits(diff1);
|
|
let bits2 = manager.difficulty_to_bits(diff2);
|
|
|
|
let target1 = manager.bits_to_target(bits1);
|
|
let target2 = manager.bits_to_target(bits2);
|
|
|
|
// Higher difficulty should give smaller (or equal) target
|
|
if diff1 > diff2 {
|
|
prop_assert!(target1 <= target2, "Higher diff {} gave larger target than diff {}", diff1, diff2);
|
|
} else {
|
|
prop_assert!(target2 <= target1, "Higher diff {} gave larger target than diff {}", diff2, diff1);
|
|
}
|
|
}
|
|
|
|
/// Property: Difficulty adjustment is bounded by max_adjustment_factor
|
|
/// Note: We allow some tolerance due to precision loss in bits conversion
|
|
#[test]
|
|
fn difficulty_adjustment_bounded(
|
|
starting_diff in 1000u64..100_000, // Use larger values for better precision
|
|
time_ratio in 0.3f64..3.0, // Reasonable range to avoid extreme clamping effects
|
|
) {
|
|
let params = DaaParams::for_testing();
|
|
let manager = DifficultyManager::new(params.clone());
|
|
|
|
let starting_bits = manager.difficulty_to_bits(starting_diff);
|
|
// Re-read difficulty to account for precision loss in bits conversion
|
|
let actual_starting_diff = manager.bits_to_difficulty(starting_bits);
|
|
|
|
// Create window with given time ratio (actual_time / expected_time)
|
|
let window: Vec<DaaBlock> = (0..10)
|
|
.map(|i| {
|
|
let expected_time = params.target_time_ms;
|
|
let actual_time = (expected_time as f64 * time_ratio) as u64;
|
|
DaaBlock {
|
|
timestamp: i * actual_time.max(1), // Ensure non-zero
|
|
daa_score: i,
|
|
bits: starting_bits,
|
|
}
|
|
})
|
|
.collect();
|
|
|
|
let new_bits = manager.calculate_next_difficulty(&window).unwrap();
|
|
let new_diff = manager.bits_to_difficulty(new_bits);
|
|
|
|
// Adjustment ratio should be within bounds (with 50% tolerance for precision loss)
|
|
let ratio = new_diff as f64 / actual_starting_diff as f64;
|
|
let tolerance = 1.5; // Allow 50% extra for bits conversion precision loss
|
|
prop_assert!(
|
|
ratio >= 1.0 / (params.max_adjustment_factor * tolerance)
|
|
&& ratio <= params.max_adjustment_factor * tolerance,
|
|
"Adjustment ratio {} out of bounds (max factor {} with {}x tolerance)",
|
|
ratio,
|
|
params.max_adjustment_factor,
|
|
tolerance
|
|
);
|
|
}
|
|
|
|
/// Property: Fast blocks increase difficulty
|
|
#[test]
|
|
fn difficulty_increases_for_fast_blocks(starting_diff in 100u64..10_000) {
|
|
let params = DaaParams::for_testing();
|
|
let manager = DifficultyManager::new(params.clone());
|
|
|
|
let starting_bits = manager.difficulty_to_bits(starting_diff);
|
|
|
|
// Blocks coming at half the target time
|
|
let window: Vec<DaaBlock> = (0..10)
|
|
.map(|i| DaaBlock {
|
|
timestamp: i * params.target_time_ms / 2,
|
|
daa_score: i,
|
|
bits: starting_bits,
|
|
})
|
|
.collect();
|
|
|
|
let new_bits = manager.calculate_next_difficulty(&window).unwrap();
|
|
let old_diff = manager.bits_to_difficulty(starting_bits);
|
|
let new_diff = manager.bits_to_difficulty(new_bits);
|
|
|
|
// Difficulty should increase (or stay same if at max)
|
|
prop_assert!(
|
|
new_diff >= old_diff,
|
|
"Fast blocks should increase difficulty: old={}, new={}",
|
|
old_diff,
|
|
new_diff
|
|
);
|
|
}
|
|
|
|
/// Property: Slow blocks decrease difficulty
|
|
#[test]
|
|
fn difficulty_decreases_for_slow_blocks(starting_diff in 100u64..10_000) {
|
|
let params = DaaParams::for_testing();
|
|
let manager = DifficultyManager::new(params.clone());
|
|
|
|
let starting_bits = manager.difficulty_to_bits(starting_diff);
|
|
|
|
// Blocks coming at double the target time
|
|
let window: Vec<DaaBlock> = (0..10)
|
|
.map(|i| DaaBlock {
|
|
timestamp: i * params.target_time_ms * 2,
|
|
daa_score: i,
|
|
bits: starting_bits,
|
|
})
|
|
.collect();
|
|
|
|
let new_bits = manager.calculate_next_difficulty(&window).unwrap();
|
|
let old_diff = manager.bits_to_difficulty(starting_bits);
|
|
let new_diff = manager.bits_to_difficulty(new_bits);
|
|
|
|
// Difficulty should decrease (or stay same if at minimum)
|
|
prop_assert!(
|
|
new_diff <= old_diff || new_diff <= params.min_difficulty,
|
|
"Slow blocks should decrease difficulty: old={}, new={} (min={})",
|
|
old_diff,
|
|
new_diff,
|
|
params.min_difficulty
|
|
);
|
|
}
|
|
|
|
/// Property: PoW validation is consistent with target
|
|
#[test]
|
|
fn pow_validation_consistency(
|
|
hash_bytes in any::<[u8; 32]>(),
|
|
bits in 0x17000001u32..0x1f7fffff,
|
|
) {
|
|
let manager = DifficultyManager::with_defaults();
|
|
let hash = Hash256::from_bytes(hash_bytes);
|
|
let target = manager.bits_to_target(bits);
|
|
|
|
let is_valid = manager.validate_pow(&hash, bits);
|
|
let expected_valid = hash <= target;
|
|
|
|
prop_assert_eq!(is_valid, expected_valid);
|
|
}
|
|
|
|
/// Property: Hashrate estimation is positive for valid difficulty
|
|
/// Note: Valid bits require coefficient in (0, REF_COEF] at reference exponent
|
|
/// to produce non-zero difficulty
|
|
#[test]
|
|
fn hashrate_estimation_positive(
|
|
exponent in 0x15u32..0x1d, // Below reference to ensure positive difficulty
|
|
coefficient in 1u32..0xffff, // Up to REF_COEF
|
|
) {
|
|
let bits = (exponent << 24) | coefficient;
|
|
let manager = DifficultyManager::with_defaults();
|
|
|
|
let difficulty = manager.bits_to_difficulty(bits);
|
|
// Skip if difficulty is 0 (would cause zero hashrate)
|
|
prop_assume!(difficulty > 0);
|
|
|
|
let hashrate = manager.estimate_hashrate(bits);
|
|
|
|
prop_assert!(hashrate > 0.0, "Hashrate should be positive for bits {:08x} (diff={})", bits, difficulty);
|
|
prop_assert!(hashrate.is_finite(), "Hashrate should be finite for bits {:08x}", bits);
|
|
}
|
|
}
|
|
|
|
// ============================================================================
|
|
// Amount Property Tests (from synor-types but relevant to consensus)
|
|
// ============================================================================
|
|
|
|
proptest! {
|
|
/// Property: Amount addition is commutative
|
|
#[test]
|
|
fn amount_addition_commutative(a in 0u64..Amount::MAX_SUPPLY, b in 0u64..Amount::MAX_SUPPLY) {
|
|
let amt_a = Amount::from_sompi(a);
|
|
let amt_b = Amount::from_sompi(b);
|
|
|
|
let sum_ab = amt_a.checked_add(amt_b);
|
|
let sum_ba = amt_b.checked_add(amt_a);
|
|
|
|
prop_assert_eq!(sum_ab, sum_ba);
|
|
}
|
|
|
|
/// Property: Amount subtraction satisfies a - b + b = a when b <= a
|
|
#[test]
|
|
fn amount_subtraction_addback(
|
|
a in 0u64..Amount::MAX_SUPPLY,
|
|
b in 0u64..Amount::MAX_SUPPLY,
|
|
) {
|
|
prop_assume!(a >= b);
|
|
|
|
let amt_a = Amount::from_sompi(a);
|
|
let amt_b = Amount::from_sompi(b);
|
|
|
|
let diff = amt_a.saturating_sub(amt_b);
|
|
let restored = diff.checked_add(amt_b);
|
|
|
|
prop_assert_eq!(restored, Some(amt_a));
|
|
}
|
|
|
|
/// Property: Saturating sub never exceeds original value
|
|
#[test]
|
|
fn amount_saturating_sub_bounded(
|
|
a in 0u64..Amount::MAX_SUPPLY,
|
|
b in 0u64..Amount::MAX_SUPPLY,
|
|
) {
|
|
let amt_a = Amount::from_sompi(a);
|
|
let amt_b = Amount::from_sompi(b);
|
|
|
|
let result = amt_a.saturating_sub(amt_b);
|
|
|
|
prop_assert!(result <= amt_a);
|
|
}
|
|
}
|
|
|
|
// ============================================================================
|
|
// Edge Case Tests
|
|
// ============================================================================
|
|
|
|
proptest! {
|
|
/// Property: Empty UTXO set has zero total value
|
|
#[test]
|
|
fn empty_utxo_set_invariants(_seed in any::<u64>()) {
|
|
let set = UtxoSet::new();
|
|
|
|
prop_assert!(set.is_empty());
|
|
prop_assert_eq!(set.len(), 0);
|
|
prop_assert_eq!(set.total_value(), Amount::ZERO);
|
|
}
|
|
|
|
/// Property: Single-block window keeps same difficulty
|
|
#[test]
|
|
fn single_block_window_stable(bits in 0x17000001u32..0x1f7fffff) {
|
|
let manager = DifficultyManager::with_defaults();
|
|
|
|
let window = vec![DaaBlock {
|
|
timestamp: 1000,
|
|
daa_score: 1,
|
|
bits,
|
|
}];
|
|
|
|
let new_bits = manager.calculate_next_difficulty(&window).unwrap();
|
|
prop_assert_eq!(new_bits, bits, "Single block window should keep same difficulty");
|
|
}
|
|
}
|