diff --git a/lwk_test_util/src/lib.rs b/lwk_test_util/src/lib.rs index 2a85e3679..4107f48aa 100644 --- a/lwk_test_util/src/lib.rs +++ b/lwk_test_util/src/lib.rs @@ -253,6 +253,29 @@ pub fn update_v2_test_vector_after_many_transactions() -> Vec { include_bytes!("../test_data/update_v2_after_many_txs.bin").to_vec() } +/// First of 3 consecutive updates for merge testing +/// Contains initial wallet sync (tip only) +pub fn update_merge_test_1() -> Vec { + include_bytes!("../test_data/merge_updates/update_merge_1.bin").to_vec() +} + +/// Second of 3 consecutive updates for merge testing +/// Contains wallet funding transaction +pub fn update_merge_test_2() -> Vec { + include_bytes!("../test_data/merge_updates/update_merge_2.bin").to_vec() +} + +/// Third of 3 consecutive updates for merge testing +/// Contains spending transaction +pub fn update_merge_test_3() -> Vec { + include_bytes!("../test_data/merge_updates/update_merge_3.bin").to_vec() +} + +/// Descriptor used for the merge test updates +pub fn update_merge_test_descriptor() -> String { + include_str!("../test_data/merge_updates/descriptor.txt").to_string() +} + pub fn update_test_vector_encrypted_bytes() -> Vec { Vec::::from_hex(include_str!( "../test_data/update_test_vector_encrypted.hex" diff --git a/lwk_test_util/test_data/merge_updates/README.md b/lwk_test_util/test_data/merge_updates/README.md new file mode 100644 index 000000000..45a079523 --- /dev/null +++ b/lwk_test_util/test_data/merge_updates/README.md @@ -0,0 +1,34 @@ +# Merge Test Updates + +This directory contains 3 consecutive `Update` structs for a regtest wallet with descriptor: + +ct(slip77(e12440f9ab46536bee060598e3b6064c0c1fa46049731d614b5c973ed054b363),elwpkh(tpubD6NzVbkrYhZ4Y2AL6uijCXAKHRNLMt7rxWnZt3bjPuYv8ibEQvGmEhRPDqbLUX7a7yCwEyEgEV37gu2CUa3a6CCpjLfrv3Xks9pGitNtd7z/*)) + +## Files + +- `update_merge_1.bin` - Initial wallet sync (tip only update) +- `update_merge_2.bin` - Wallet funding transaction +- `update_merge_3.bin` - Spending transaction +- `descriptor.txt` - The wallet descriptor used for these updates + +## Expected Wallet States + +When these updates are applied sequentially to a fresh wallet, the expected states are: + +### After Update 1 +- **Balance**: 0 L-BTC +- **Transactions**: 0 +- **UTXOs**: 0 +- **Description**: Initial sync of an empty wallet, only updates the blockchain tip + +### After Update 2 +- **Balance**: 1,000,000 +- **Transactions**: 1 +- **UTXOs**: 1 +- **Description**: Wallet received funding transaction of 1,000,000 sats + +### After Update 3 +- **Balance**: 899,974 sats +- **Transactions**: 2 +- **UTXOs**: 1 (change output) +- **Description**: Wallet sent 100,000 sats to an external address with a fee of 26 sats diff --git a/lwk_test_util/test_data/merge_updates/descriptor.txt b/lwk_test_util/test_data/merge_updates/descriptor.txt new file mode 100644 index 000000000..cc4fe67e8 --- /dev/null +++ b/lwk_test_util/test_data/merge_updates/descriptor.txt @@ -0,0 +1 @@ +ct(slip77(d48cc2f9c22d8afadf38f9656d6ebc2ad7773273f34cbf62e4b15f0e5fea46f2),elwpkh(tpubD6NzVbkrYhZ4X2UC8rSu2hqEuaZis6FakbAKRxjqMJLGWHyqLy4iKu41teNY4BuSkvgRhfG3Gi1LJnN4HzNkpuGxxRYYJYTVBow8xEs9MD2/*)) \ No newline at end of file diff --git a/lwk_test_util/test_data/merge_updates/update_merge_1.bin b/lwk_test_util/test_data/merge_updates/update_merge_1.bin new file mode 100644 index 000000000..7b79b4bb3 Binary files /dev/null and b/lwk_test_util/test_data/merge_updates/update_merge_1.bin differ diff --git a/lwk_test_util/test_data/merge_updates/update_merge_2.bin b/lwk_test_util/test_data/merge_updates/update_merge_2.bin new file mode 100644 index 000000000..86941583f Binary files /dev/null and b/lwk_test_util/test_data/merge_updates/update_merge_2.bin differ diff --git a/lwk_test_util/test_data/merge_updates/update_merge_3.bin b/lwk_test_util/test_data/merge_updates/update_merge_3.bin new file mode 100644 index 000000000..afa9bbc14 Binary files /dev/null and b/lwk_test_util/test_data/merge_updates/update_merge_3.bin differ diff --git a/lwk_wollet/src/cache.rs b/lwk_wollet/src/cache.rs index 646cfd349..815c8b1c3 100644 --- a/lwk_wollet/src/cache.rs +++ b/lwk_wollet/src/cache.rs @@ -166,11 +166,23 @@ impl Cache { } pub fn spent(&self) -> Result, Error> { - Ok(self + // Dummy spent outputs are outputs that are not in height map, but we have to consider them + // for the utxo mode. + let dummy_spent: Vec = self .all_txs - .values() + .iter() + .filter(|(_, tx)| tx.output.is_empty()) // Only dummy tx have no outputs + .map(|(_, tx)| tx.input.iter().map(|i| i.previous_output)) + .flatten() + .collect(); + + Ok(self + .heights + .keys() + .filter_map(|txid| self.all_txs.get(txid)) .flat_map(|tx| tx.input.iter()) .map(|i| i.previous_output) + .chain(dummy_spent) .collect()) } } diff --git a/lwk_wollet/src/update.rs b/lwk_wollet/src/update.rs index 7facad61c..e1eb6ab14 100644 --- a/lwk_wollet/src/update.rs +++ b/lwk_wollet/src/update.rs @@ -173,6 +173,50 @@ impl Update { .map_err(|e| Error::Generic(e.to_string()))?; Self::deserialize_decrypted(&vec, desc) } + + /// Merge another update into this one. + /// + /// This is used to squash multiple sequential updates into a single update. + /// + /// NOTE: it's caller responsibility to ensure that the following update is the next in sequence + /// and updates are not mixed up. + pub(crate) fn merge(&mut self, following: Update) { + // By construction there should not be duplicate txs, + // even if so when merged in the hashmap they will be overridden. + self.new_txs.txs.extend(following.new_txs.txs); + + self.new_txs.unblinds.extend(following.new_txs.unblinds); + + // When we apply an update we first delete then insert the new txs. + // Suppose to have Um = U1.merge(U2) + // The order would be: + // U1.delete, U1.insert, U2.delete, U2.insert + + // Um.insert = (U1.insert \ U2.delete) ∪ U2.insert + self.txid_height_new + .retain(|(t, _)| !following.txid_height_delete.contains(t)); + for (txid, height) in following.txid_height_new { + // In this case we can't extend, we want to keep the height of the newest update in case of duplicates + self.txid_height_new.retain(|(t, _)| *t != txid); + self.txid_height_new.push((txid, height)); + } + + // Um.delete = U1.delete ∪ U2.delete + self.txid_height_delete.extend(following.txid_height_delete); + + // Merge timestamps and scripts + self.timestamps.extend(following.timestamps); + self.scripts_with_blinding_pubkey + .extend(following.scripts_with_blinding_pubkey); + + // Update tip to other's tip + self.tip = following.tip; + + // Update version to latest + self.version = following.version; + + // We don't need to update the wollet status, it's right to keep the status of the first update + } } fn default_blockheader() -> BlockHeader { diff --git a/lwk_wollet/src/wollet.rs b/lwk_wollet/src/wollet.rs index 87039c5f9..134de0398 100644 --- a/lwk_wollet/src/wollet.rs +++ b/lwk_wollet/src/wollet.rs @@ -66,6 +66,8 @@ pub struct WolletBuilder { network: ElementsNetwork, descriptor: WolletDescriptor, store: Arc, + /// Number of updates to trigger merge. None disables merging. + merge_threshold: Option, } impl WolletBuilder { @@ -75,9 +77,18 @@ impl WolletBuilder { network, descriptor, store: Arc::new(FakeStore::new()), + merge_threshold: None, } } + /// Set the threshold for merging updates during build. + /// When the number of updates exceeds this threshold, they will be merged into one. + /// Set to None to disable merging (default). + pub fn with_merge_threshold(mut self, threshold: Option) -> Self { + self.merge_threshold = threshold; + self + } + /// Specify the `Wollet` store for persistence pub fn with_store(mut self, store: Arc) -> Self { self.store = store; @@ -116,21 +127,73 @@ impl WolletBuilder { max_weight_to_satisfy, }; + // Check if merging is enabled and needed + let mut merging = if let Some(threshold) = self.merge_threshold { + let merge_key = update_key(threshold); + match wollet.store.get(&merge_key) { + Ok(Some(_)) => { + // There are at least threshold+1 updates, need to merge + let first_key = update_key(0); + match wollet.store.get(&first_key) { + Ok(Some(bytes)) => Some(Update::deserialize(&bytes)?), + _ => None, + } + } + _ => None, + } + } else { + None + }; + // Restore updates from the store using indexed keys for i in 0.. { let key = update_key(i); match wollet.store.get(&key) { Ok(Some(bytes)) => { let update = Update::deserialize(&bytes)?; - wollet.apply_update_no_persist(update)?; + wollet.apply_update_no_persist(update.clone())?; + if let Some(ref mut m) = merging { + if i > 0 { + m.merge(update); + } + } } Ok(None) => { - // Update the next index and stop + // Update the next index let mut next_update_index = wollet .next_update_index .lock() .map_err(|_| Error::Generic("next_update_index lock poisoned".into()))?; *next_update_index = i; + + // If we were merging, persist the merged update and clean up + if let Some(merged) = merging { + // Delete all old updates + // we are starting from the last to avoid having holes in the beginning + for j in (0..i).rev() { + let old_key = update_key(j); + wollet.store.remove(&old_key).map_err(|e| { + Error::Generic(format!("failed to remove update {}: {}", j, e)) + })?; + } + + // A crash here or during the removal loop will leave the cache empty or at + // an old state which is not the end of the world, the following scan will + // bring the cache back to the correct state. + + // Store the merged update as update 0 + let merged_bytes = merged.serialize()?; + wollet + .store + .put(&update_key(0), &merged_bytes) + .map_err(|e| { + Error::Generic(format!("failed to store merged update: {}", e)) + })?; + + // Update next_update_index to 1 + *next_update_index = 1; + } + break; } Err(e) => return Err(Error::Generic(format!("store error: {e}"))), @@ -1457,6 +1520,188 @@ mod tests { ); } + /// Test that verifies the merge test updates can be deserialized and applied correctly. + /// This is a unit test that uses hard-coded binary data (no external nodes required). + #[test] + fn test_merge_updates_deserialize() { + use lwk_test_util::{ + update_merge_test_1, update_merge_test_2, update_merge_test_3, + update_merge_test_descriptor, + }; + + // Load the descriptor + let desc_str = update_merge_test_descriptor(); + let desc: WolletDescriptor = desc_str.parse().unwrap(); + + // Load and deserialize the three updates + let update1_bytes = update_merge_test_1(); + let update2_bytes = update_merge_test_2(); + let update3_bytes = update_merge_test_3(); + + let update1 = Update::deserialize(&update1_bytes).unwrap(); + let update2 = Update::deserialize(&update2_bytes).unwrap(); + let update3 = Update::deserialize(&update3_bytes).unwrap(); + + // Verify updates can be applied sequentially + let mut wollet = + Wollet::without_persist(ElementsNetwork::default_regtest(), desc.clone()).unwrap(); + + // After update 1: should be empty (tip only update) + wollet.apply_update(update1).unwrap(); + let balance1 = wollet.balance().unwrap(); + let txs1 = wollet.transactions().unwrap(); + let utxos1 = wollet.utxos().unwrap(); + assert!( + balance1.is_empty() || balance1.values().all(|&v| v == 0), + "Balance should be 0 after update 1" + ); + assert_eq!(txs1.len(), 0, "Should have 0 transactions after update 1"); + assert_eq!(utxos1.len(), 0, "Should have 0 UTXOs after update 1"); + + // After update 2: should have 1,000,000 sats from funding + wollet.apply_update(update2).unwrap(); + let balance2 = wollet.balance().unwrap(); + let txs2 = wollet.transactions().unwrap(); + let utxos2 = wollet.utxos().unwrap(); + let policy_asset = wollet.policy_asset(); + let btc_balance2 = balance2.get(&policy_asset).copied().unwrap_or(0); + assert_eq!( + btc_balance2, 1_000_000, + "Balance should be 1,000,000 after funding" + ); + assert_eq!(txs2.len(), 1, "Should have 1 transaction after funding"); + assert_eq!(utxos2.len(), 1, "Should have 1 UTXO after funding"); + + // After update 3: should have reduced balance after spending + wollet.apply_update(update3).unwrap(); + let balance3 = wollet.balance().unwrap(); + let txs3 = wollet.transactions().unwrap(); + let utxos3 = wollet.utxos().unwrap(); + let btc_balance3 = balance3.get(&policy_asset).copied().unwrap_or(0); + assert!( + btc_balance3 < 900_000, + "Balance should be less than 900,000 after spending (sent 100k + fee)" + ); + assert!( + btc_balance3 > 800_000, + "Balance should be more than 800,000 (fee should be less than 100k)" + ); + assert_eq!(txs3.len(), 2, "Should have 2 transactions after sending"); + assert_eq!(utxos3.len(), 1, "Should have 1 UTXO after sending (change)"); + } + + /// Test that verifies the merge functionality works correctly. + /// Creates 3 updates, stores them, builds with merge_threshold=Some(2), + /// and verifies they are merged into a single update. + /// + /// Expected final state (from lwk_test_util/test_data/merge_updates/README.md): + /// - Balance: 899,974 sats + /// - Transactions: 2 + /// - UTXOs: 1 (change output) + #[test] + fn test_merge_updates() { + use lwk_common::MemoryStore; + use lwk_test_util::{ + update_merge_test_1, update_merge_test_2, update_merge_test_3, + update_merge_test_descriptor, + }; + + // Load the descriptor + let desc_str = update_merge_test_descriptor(); + let desc: WolletDescriptor = desc_str.parse().unwrap(); + + // Load the three updates + let update1_bytes = update_merge_test_1(); + let update2_bytes = update_merge_test_2(); + let update3_bytes = update_merge_test_3(); + + // Create a memory store and manually insert the updates + let store = Arc::new(MemoryStore::default()); + store + .put(&update_key(0), &update1_bytes) + .expect("Failed to store update 0"); + store + .put(&update_key(1), &update2_bytes) + .expect("Failed to store update 1"); + store + .put(&update_key(2), &update3_bytes) + .expect("Failed to store update 2"); + + // Build with merge_threshold=Some(2) (should trigger merge since we have 3 updates) + let wollet = WolletBuilder::new(ElementsNetwork::default_regtest(), desc.clone()) + .with_store(store.clone()) + .with_merge_threshold(Some(2)) + .build() + .expect("Failed to build wollet"); + + // Verify state is correct (hardcoded values from README.md) + let balance = wollet.balance().expect("Failed to get balance"); + let txs = wollet.transactions().expect("Failed to get transactions"); + let utxos = wollet.utxos().expect("Failed to get utxos"); + let policy_asset = wollet.policy_asset(); + let btc_balance = balance.get(&policy_asset).copied().unwrap_or(0); + + // Expected final state: Balance 899,974, Transactions: 2, UTXOs: 1 + assert_eq!( + btc_balance, 899_974, + "Balance should be exactly 899,974 sats after merging all updates" + ); + assert_eq!( + txs.len(), + 2, + "Should have exactly 2 transactions after merge" + ); + assert_eq!(utxos.len(), 1, "Should have exactly 1 UTXO after merge"); + + // Verify that updates were merged - only update 0 should exist + assert!( + store.get(&update_key(0)).unwrap().is_some(), + "Update 0 should exist" + ); + assert!( + store.get(&update_key(1)).unwrap().is_none(), + "Update 1 should have been deleted" + ); + assert!( + store.get(&update_key(2)).unwrap().is_none(), + "Update 2 should have been deleted" + ); + assert!( + store.get(&update_key(3)).unwrap().is_none(), + "Update 3 should have been deleted" + ); + + // Verify the merged update can be deserialized and applied to a fresh wallet + let merged_bytes = store.get(&update_key(0)).unwrap().unwrap(); + let merged_update = + Update::deserialize(&merged_bytes).expect("Failed to deserialize merged update"); + + // Apply merged update to a fresh wallet + let mut fresh_wollet = + Wollet::without_persist(ElementsNetwork::default_regtest(), desc.clone()).unwrap(); + fresh_wollet + .apply_update(merged_update) + .expect("Failed to apply merged update"); + + // Verify fresh wallet has same state + let fresh_balance = fresh_wollet.balance().unwrap(); + let fresh_btc_balance = fresh_balance.get(&policy_asset).copied().unwrap_or(0); + assert_eq!( + btc_balance, fresh_btc_balance, + "Merged update should produce same balance" + ); + assert_eq!( + wollet.transactions().unwrap().len(), + fresh_wollet.transactions().unwrap().len(), + "Merged update should produce same transaction count" + ); + assert_eq!( + wollet.utxos().unwrap().len(), + fresh_wollet.utxos().unwrap().len(), + "Merged update should produce same UTXO count" + ); + } + #[test] fn test_desc_no_wildcard_with_index() { let k = "9c8e4f05c7711a98c838be228bcb84924d4570ca53f35fa1c793e58841d47023"; diff --git a/lwk_wollet/tests/e2e.rs b/lwk_wollet/tests/e2e.rs index fb03ed525..c1d8d04a8 100644 --- a/lwk_wollet/tests/e2e.rs +++ b/lwk_wollet/tests/e2e.rs @@ -4164,3 +4164,119 @@ fn test_zmq_endpoint() { assert_eq!(msg[0], b"rawtx"); assert!(!msg[1].is_empty()); } + +#[test] +fn test_merge_updates_e2e() { + let env = TestEnvBuilder::from_env().with_electrum().build(); + let signer = SwSigner::new(TEST_MNEMONIC, false).unwrap(); + let signers: [&AnySigner; 1] = [&AnySigner::Software(signer)]; + let slip77_key = "9c8e4f05c7711a98c838be228bcb84924d4570ca53f35fa1c793e58841d47023"; + let desc_str = format!( + "ct(slip77({}),elwpkh({}/*))", + slip77_key, + signers[0].xpub().unwrap() + ); + let client = test_client_electrum(&env.electrum_url()); + let mut wallet = TestWollet::new(client, &desc_str); + + wallet.fund_btc(&env); + wallet.send_btc(&signers, None, None); + + let expected_balance = wallet.wollet.balance().unwrap(); + let expected_tx_count = wallet.wollet.transactions().unwrap().len(); + let expected_utxo_count = wallet.wollet.utxos().unwrap().len(); + let num_updates_before = wallet.wollet.updates().unwrap().len(); + assert_eq!(num_updates_before, 4); + + let descriptor = wallet.wollet.wollet_descriptor(); + let network = ElementsNetwork::default_regtest(); + let db_root_dir = wallet.db_root_dir(); + + // Reconstruct the encrypted file store + // this is needed to use the same path/encryption created by the TestWollet ( which use Wollet::with_fs_persist ) + let mut path = db_root_dir.path().to_path_buf(); + path.push(network.as_str()); + path.push("enc_cache"); + path.push( + ::hash(descriptor.to_string().as_bytes()).to_string(), + ); + let file_store = FileStore::new(path.clone()).unwrap(); + let encrypted_store = EncryptedStore::new(file_store, descriptor.encryption_key_bytes()); + + let wollet = WolletBuilder::new(network, descriptor.clone()) + .with_store(std::sync::Arc::new(encrypted_store)) + .with_merge_threshold(Some(2)) + .build() + .unwrap(); + + assert_eq!(expected_balance, wollet.balance().unwrap()); + assert_eq!(expected_tx_count, wollet.transactions().unwrap().len()); + assert_eq!(expected_utxo_count, wollet.utxos().unwrap().len()); + + let updates_after = wollet.updates().unwrap(); + assert_eq!(updates_after.len(), 1); + + // Verify the merged wallet can be reopened and still has correct state + drop(wollet); + let mut wollet = Wollet::with_fs_persist(network, descriptor.clone(), &db_root_dir).unwrap(); + assert_eq!(expected_balance, wollet.balance().unwrap()); + assert_eq!(expected_tx_count, wollet.transactions().unwrap().len()); + assert_eq!(expected_utxo_count, wollet.utxos().unwrap().len()); + assert_eq!(wollet.updates().unwrap().len(), 1); + + // Test merge with txid_height_delete: + // Build a tx, apply it without broadcasting, then sync so it gets deleted. + let address = wollet.address(None).unwrap().address().clone(); + let mut pset = wollet + .tx_builder() + .add_lbtc_recipient(&address, 10_000) + .unwrap() + .finish() + .unwrap(); + let signer = SwSigner::new(TEST_MNEMONIC, false).unwrap(); + signer.sign(&mut pset).unwrap(); + let tx = wollet.finalize(&mut pset).unwrap(); + let phantom_txid = tx.txid(); + wollet.apply_transaction(tx).unwrap(); + assert!(wollet.transaction(&phantom_txid).unwrap().is_some()); + + // Sync: electrum doesn't know about the phantom tx, so it will be deleted + let mut client = test_client_electrum(&env.electrum_url()); + let update = client.full_scan(&wollet).unwrap().unwrap(); + assert!(update.txid_height_delete.contains(&phantom_txid)); + wollet.apply_update(update).unwrap(); + assert!(wollet.transaction(&phantom_txid).unwrap().is_none()); + + // State should be back to what it was before the phantom tx + assert_eq!(expected_balance, wollet.balance().unwrap()); + assert_eq!(expected_tx_count, wollet.transactions().unwrap().len()); + assert_eq!(expected_utxo_count, wollet.utxos().unwrap().len()); + + let num_updates = wollet.updates().unwrap().len(); + assert_eq!(num_updates, 3); + + // Reopen with merge threshold to trigger merge including txid_height_delete + drop(wollet); + + let file_store = FileStore::new(path).unwrap(); + let encrypted_store = EncryptedStore::new(file_store, descriptor.encryption_key_bytes()); + + let wollet = WolletBuilder::new(network, descriptor.clone()) + .with_store(std::sync::Arc::new(encrypted_store)) + .with_merge_threshold(Some(2)) + .build() + .unwrap(); + + assert_eq!(expected_balance, wollet.balance().unwrap()); + assert_eq!(expected_tx_count, wollet.transactions().unwrap().len()); + assert_eq!(expected_utxo_count, wollet.utxos().unwrap().len()); + assert_eq!(wollet.updates().unwrap().len(), 1); + + // Final reopen to verify persistence of the merged-with-deletes state + drop(wollet); + let wollet = Wollet::with_fs_persist(network, descriptor, &db_root_dir).unwrap(); + assert_eq!(expected_balance, wollet.balance().unwrap()); + assert_eq!(expected_tx_count, wollet.transactions().unwrap().len()); + assert_eq!(expected_utxo_count, wollet.utxos().unwrap().len()); + assert_eq!(wollet.updates().unwrap().len(), 1); +}