assert_ne!(node_txn[0].input[0].previous_output, node_txn[2].input[0].previous_output);
assert_ne!(node_txn[1].input[0].previous_output, node_txn[2].input[0].previous_output);
- assert_eq!(node_txn[0].input[0].previous_output, revoked_htlc_txn[1].input[0].previous_output);
- assert_eq!(node_txn[1].input[0].previous_output, revoked_htlc_txn[0].input[0].previous_output);
+ assert_eq!(node_txn[1].input[0].previous_output, revoked_htlc_txn[1].input[0].previous_output);
+ assert_eq!(node_txn[0].input[0].previous_output, revoked_htlc_txn[0].input[0].previous_output);
// node_txn[3] spends the revoked outputs from the revoked_htlc_txn (which only have one
// output, checked above).
let mut nodes = create_network(2, &node_cfgs, &node_chanmgrs);
+ // Reset our RNG counters to mirror the RNG output from when this test was written.
+ nodes[0].keys_manager.backing.inner.entropy_source.set_counter(0x1_0000_0004);
+ nodes[1].keys_manager.backing.inner.entropy_source.set_counter(0x1_0000_0004);
+
// Open a channel, lock in an HTLC, and immediately broadcast the commitment transaction. This
// ensures that the HTLC timeout package is held until we reach its expiration height.
let (_, _, chan_id, funding_tx) = create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 100_000, 50_000_000);
channel_master_key: Xpriv,
channel_child_index: AtomicUsize,
+ #[cfg(test)]
+ pub(crate) entropy_source: RandomBytes,
+ #[cfg(not(test))]
entropy_source: RandomBytes,
seed: [u8; 32],
/// Switching between this struct and [`KeysManager`] will invalidate any previously issued
/// invoices and attempts to pay previous invoices will fail.
pub struct PhantomKeysManager {
+ #[cfg(test)]
+ pub(crate) inner: KeysManager,
+ #[cfg(not(test))]
inner: KeysManager,
inbound_payment_key: KeyMaterial,
phantom_secret: SecretKey,
pub fn new(seed: [u8; 32]) -> Self {
Self { seed, index: AtomicCounter::new() }
}
+
+ #[cfg(test)]
+ /// Force the counter to a value to produce the same output again. Mostly useful in tests where
+ /// we need to maintain behavior with a previous version which didn't use as much RNG output.
+ pub(crate) fn set_counter(&self, count: u64) {
+ self.index.set_counter(count);
+ }
}
impl EntropySource for RandomBytes {
-//! A simple atomic counter that uses AtomicUsize to give a u64 counter.
+//! A simple atomic counter that uses mutexes if the platform doesn't support atomic u64s.
-#[cfg(not(any(target_pointer_width = "32", target_pointer_width = "64")))]
-compile_error!("We need at least 32-bit pointers for atomic counter (and to have enough memory to run LDK)");
+#[cfg(target_has_atomic = "64")]
+use core::sync::atomic::{AtomicU64, Ordering};
+#[cfg(not(target_has_atomic = "64"))]
+use crate::sync::Mutex;
-use core::sync::atomic::{AtomicUsize, Ordering};
-
-#[derive(Debug)]
pub(crate) struct AtomicCounter {
- // Usize needs to be at least 32 bits to avoid overflowing both low and high. If usize is 64
- // bits we will never realistically count into high:
- counter_low: AtomicUsize,
- counter_high: AtomicUsize,
+ #[cfg(target_has_atomic = "64")]
+ counter: AtomicU64,
+ #[cfg(not(target_has_atomic = "64"))]
+ counter: Mutex<u64>,
}
impl AtomicCounter {
pub(crate) fn new() -> Self {
Self {
- counter_low: AtomicUsize::new(0),
- counter_high: AtomicUsize::new(0),
+ #[cfg(target_has_atomic = "64")]
+ counter: AtomicU64::new(0),
+ #[cfg(not(target_has_atomic = "64"))]
+ counter: Mutex::new(0),
}
}
pub(crate) fn get_increment(&self) -> u64 {
- let low = self.counter_low.fetch_add(1, Ordering::AcqRel) as u64;
- let high = if low == 0 {
- self.counter_high.fetch_add(1, Ordering::AcqRel) as u64
- } else {
- self.counter_high.load(Ordering::Acquire) as u64
- };
- (high << 32) | low
+ #[cfg(target_has_atomic = "64")] {
+ self.counter.fetch_add(1, Ordering::AcqRel)
+ }
+ #[cfg(not(target_has_atomic = "64"))] {
+ let mut mtx = self.counter.lock().unwrap();
+ *mtx += 1;
+ *mtx - 1
+ }
+ }
+ #[cfg(test)]
+ pub(crate) fn set_counter(&self, count: u64) {
+ #[cfg(target_has_atomic = "64")] {
+ self.counter.store(count, Ordering::Release);
+ }
+ #[cfg(not(target_has_atomic = "64"))] {
+ let mut mtx = self.counter.lock().unwrap();
+ *mtx = count;
+ }
}
}