From fbb04e466d38de714deddfa63449fcd39a56a6e3 Mon Sep 17 00:00:00 2001 From: Roman Dmitrienko Date: Tue, 27 Feb 2024 22:09:57 +0100 Subject: [PATCH 01/89] Pass custom TLVs with keysend requests. --- bindings/ldk_node.udl | 7 ++++++- src/lib.rs | 11 ++++++++--- src/types.rs | 9 +++++++++ 3 files changed, 23 insertions(+), 4 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 30b2d4a1b..db4dd7e53 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -72,7 +72,7 @@ interface LDKNode { [Throws=NodeError] PaymentHash send_payment_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat); [Throws=NodeError] - PaymentHash send_spontaneous_payment(u64 amount_msat, PublicKey node_id); + PaymentHash send_spontaneous_payment(u64 amount_msat, PublicKey node_id, sequence custom_tlvs); [Throws=NodeError] void send_payment_probes([ByRef]Bolt11Invoice invoice); [Throws=NodeError] @@ -312,6 +312,11 @@ enum LogLevel { "Error", }; +dictionary TlvEntry { + u64 type; + sequence value; +}; + [Custom] typedef string Txid; diff --git a/src/lib.rs b/src/lib.rs index 24b2123f5..320bac7f5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -134,7 +134,7 @@ use types::{ Broadcaster, ChainMonitor, ChannelManager, FeeEstimator, KeysManager, NetworkGraph, PeerManager, Router, Scorer, Sweeper, Wallet, }; -pub use types::{ChannelDetails, PeerDetails, UserChannelId}; +pub use types::{ChannelDetails, PeerDetails, TlvEntry, UserChannelId}; use logger::{log_error, log_info, log_trace, FilesystemLogger, Logger}; @@ -1222,7 +1222,7 @@ impl Node { /// Send a spontaneous, aka. "keysend", payment pub fn send_spontaneous_payment( - &self, amount_msat: u64, node_id: PublicKey, + &self, amount_msat: u64, node_id: PublicKey, custom_tlvs: Vec, ) -> Result { let rt_lock = self.runtime.read().unwrap(); if rt_lock.is_none() { @@ -1245,7 +1245,12 @@ impl Node { PaymentParameters::from_node_id(node_id, self.config.default_cltv_expiry_delta), amount_msat, ); - let recipient_fields = RecipientOnionFields::spontaneous_empty(); + let recipient_fields = RecipientOnionFields::spontaneous_empty() + .with_custom_tlvs(custom_tlvs.into_iter().map(|tlv| (tlv.r#type, tlv.value)).collect()) + .map_err(|_| { + log_error!(self.logger, "Payment error: invalid custom TLVs."); + Error::PaymentSendingFailed + })?; match self.channel_manager.send_spontaneous_payment_with_retry( Some(payment_preimage), diff --git a/src/types.rs b/src/types.rs index 6269b3ddf..96b83c11e 100644 --- a/src/types.rs +++ b/src/types.rs @@ -456,3 +456,12 @@ impl Default for ChannelConfig { LdkChannelConfig::default().into() } } + +/// Custom TLV entry. +pub struct TlvEntry { + /// Type number. + pub r#type: u64, + + /// Serialized value. + pub value: Vec, +} From bd1459b934f006048168a2410cc0faa97ba88e09 Mon Sep 17 00:00:00 2001 From: Roman Dmitrienko Date: Fri, 1 Mar 2024 21:11:20 +0100 Subject: [PATCH 02/89] Add specific error code for invalid custom TLVs. --- src/error.rs | 2 ++ src/lib.rs | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/error.rs b/src/error.rs index 0182b3092..7762426c3 100644 --- a/src/error.rs +++ b/src/error.rs @@ -61,6 +61,8 @@ pub enum Error { InvalidChannelId, /// The given network is invalid. InvalidNetwork, + /// The custom TLVs are invalid. + InvalidCustomTlv, /// A payment with the given hash has already been initiated. DuplicatePayment, /// The available funds are insufficient to complete the given operation. diff --git a/src/lib.rs b/src/lib.rs index 320bac7f5..c9df1116f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1249,7 +1249,7 @@ impl Node { .with_custom_tlvs(custom_tlvs.into_iter().map(|tlv| (tlv.r#type, tlv.value)).collect()) .map_err(|_| { log_error!(self.logger, "Payment error: invalid custom TLVs."); - Error::PaymentSendingFailed + Error::InvalidCustomTlv })?; match self.channel_manager.send_spontaneous_payment_with_retry( From 9708a42ce2c53acebc55c58f366c5fa7041acbe9 Mon Sep 17 00:00:00 2001 From: Roman Dmitrienko Date: Fri, 1 Mar 2024 21:26:16 +0100 Subject: [PATCH 03/89] Add custom TLVs to the test. --- tests/common.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/common.rs b/tests/common.rs index 3696f9b71..50bc436ed 100644 --- a/tests/common.rs +++ b/tests/common.rs @@ -3,7 +3,7 @@ use ldk_node::io::sqlite_store::SqliteStore; use ldk_node::{ - Builder, Config, Event, LogLevel, Node, NodeError, PaymentDirection, PaymentStatus, + Builder, Config, Event, LogLevel, Node, NodeError, PaymentDirection, PaymentStatus, TlvEntry, }; use lightning::ln::msgs::SocketAddress; @@ -490,8 +490,11 @@ pub(crate) fn do_channel_full_cycle( // Test spontaneous/keysend payments println!("\nA send_spontaneous_payment"); let keysend_amount_msat = 2500_000; - let keysend_payment_hash = - node_a.send_spontaneous_payment(keysend_amount_msat, node_b.node_id()).unwrap(); + let tlv1 = TlvEntry { r#type: 131073, value: vec![0x00, 0x11, 0x22, 0x33] }; + let tlv2 = TlvEntry { r#type: 131075, value: vec![0xaa, 0xbb] }; + let keysend_payment_hash = node_a + .send_spontaneous_payment(keysend_amount_msat, node_b.node_id(), vec![tlv1, tlv2]) + .unwrap(); expect_event!(node_a, PaymentSuccessful); let received_keysend_amount = match node_b.wait_next_event() { ref e @ Event::PaymentReceived { amount_msat, .. } => { From f545a423db254c3a5868342f3bc41f881cdb27c8 Mon Sep 17 00:00:00 2001 From: Roman Dmitrienko Date: Fri, 1 Mar 2024 21:29:55 +0100 Subject: [PATCH 04/89] Fix Display trait implementation for Error. --- src/error.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/error.rs b/src/error.rs index 7762426c3..730e95460 100644 --- a/src/error.rs +++ b/src/error.rs @@ -109,6 +109,7 @@ impl fmt::Display for Error { Self::InvalidInvoice => write!(f, "The given invoice is invalid."), Self::InvalidChannelId => write!(f, "The given channel ID is invalid."), Self::InvalidNetwork => write!(f, "The given network is invalid."), + Self::InvalidCustomTlv => write!(f, "The given custom TLVs are invalid."), Self::DuplicatePayment => { write!(f, "A payment with the given hash has already been initiated.") }, From 7638cecb5c640e2191e084d06988349947ec5e20 Mon Sep 17 00:00:00 2001 From: Roman Dmitrienko Date: Fri, 1 Mar 2024 21:36:03 +0100 Subject: [PATCH 05/89] Add the new error code to UDL. --- bindings/ldk_node.udl | 1 + 1 file changed, 1 insertion(+) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index db4dd7e53..ef406404f 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -131,6 +131,7 @@ enum NodeError { "InvalidInvoice", "InvalidChannelId", "InvalidNetwork", + "InvalidCustomTlv", "DuplicatePayment", "InsufficientFunds", "LiquiditySourceUnavailable", From 02f0000aaec5c7695f77ef3d43ddc7c0ac4d9de2 Mon Sep 17 00:00:00 2001 From: jbesraa Date: Thu, 7 Mar 2024 15:39:20 +0200 Subject: [PATCH 06/89] Add `fee_paid_msat` to `PaymentSuccessful` event (#271) The fee as returned from `PaymentSent` event generated by LDK and is saved in `PaymentSuccessful` event. --- bindings/ldk_node.udl | 2 +- src/event.rs | 5 ++++- tests/common.rs | 37 +++++++++++++++++++++++++++++++++ tests/integration_tests_rust.rs | 10 ++++++--- 4 files changed, 49 insertions(+), 5 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 30b2d4a1b..4f09b7ced 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -154,7 +154,7 @@ enum BuildError { [Enum] interface Event { - PaymentSuccessful(PaymentHash payment_hash); + PaymentSuccessful(PaymentHash payment_hash, u64? fee_paid_msat); PaymentFailed(PaymentHash payment_hash, PaymentFailureReason? reason); PaymentReceived(PaymentHash payment_hash, u64 amount_msat); ChannelPending(ChannelId channel_id, UserChannelId user_channel_id, ChannelId former_temporary_channel_id, PublicKey counterparty_node_id, OutPoint funding_txo); diff --git a/src/event.rs b/src/event.rs index bf683838a..61cd7973f 100644 --- a/src/event.rs +++ b/src/event.rs @@ -47,6 +47,8 @@ pub enum Event { PaymentSuccessful { /// The hash of the payment. payment_hash: PaymentHash, + /// The total fee which was spent at intermediate hops in this payment. + fee_paid_msat: Option, }, /// A sent payment has failed. PaymentFailed { @@ -106,6 +108,7 @@ pub enum Event { impl_writeable_tlv_based_enum!(Event, (0, PaymentSuccessful) => { (0, payment_hash, required), + (1, fee_paid_msat, option), }, (1, PaymentFailed) => { (0, payment_hash, required), @@ -611,7 +614,7 @@ where ); } self.event_queue - .add_event(Event::PaymentSuccessful { payment_hash }) + .add_event(Event::PaymentSuccessful { payment_hash, fee_paid_msat }) .unwrap_or_else(|e| { log_error!(self.logger, "Failed to push to event queue: {}", e); panic!("Failed to push to event queue"); diff --git a/tests/common.rs b/tests/common.rs index 3696f9b71..8b4c1ef63 100644 --- a/tests/common.rs +++ b/tests/common.rs @@ -80,6 +80,43 @@ macro_rules! expect_channel_ready_event { pub(crate) use expect_channel_ready_event; +macro_rules! expect_payment_received_event { + ($node: expr, $amount_msat: expr) => {{ + match $node.wait_next_event() { + ref e @ Event::PaymentReceived { payment_hash, amount_msat } => { + println!("{} got event {:?}", $node.node_id(), e); + assert_eq!(amount_msat, $amount_msat); + $node.event_handled(); + let result = Ok(payment_hash); + result + }, + ref e => { + panic!("{} got unexpected event!: {:?}", std::stringify!(node_b), e); + }, + } + }}; +} + +pub(crate) use expect_payment_received_event; + +macro_rules! expect_payment_successful_event { + ($node: expr, $payment_hash: expr, $fee_paid_msat: expr) => {{ + match $node.wait_next_event() { + ref e @ Event::PaymentSuccessful { payment_hash, fee_paid_msat } => { + println!("{} got event {:?}", $node.node_id(), e); + assert_eq!(fee_paid_msat, $fee_paid_msat); + assert_eq!(payment_hash, $payment_hash); + $node.event_handled(); + }, + ref e => { + panic!("{} got unexpected event!: {:?}", std::stringify!(node_b), e); + }, + } + }}; +} + +pub(crate) use expect_payment_successful_event; + pub(crate) fn setup_bitcoind_and_electrsd() -> (BitcoinD, ElectrsD) { let bitcoind_exe = env::var("BITCOIND_EXE").ok().or_else(|| bitcoind::downloaded_exe_path().ok()).expect( diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index 1c5a67521..e8fee941c 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -1,7 +1,8 @@ mod common; use common::{ - do_channel_full_cycle, expect_event, generate_blocks_and_wait, open_channel, + do_channel_full_cycle, expect_event, expect_payment_received_event, + expect_payment_successful_event, generate_blocks_and_wait, open_channel, premine_and_distribute_funds, random_config, setup_bitcoind_and_electrsd, setup_builder, setup_node, setup_two_nodes, wait_for_tx, TestSyncStore, }; @@ -9,6 +10,7 @@ use common::{ use ldk_node::{Builder, Event, NodeError}; use bitcoin::{Amount, Network}; +use lightning::ln::PaymentHash; use std::sync::Arc; @@ -133,8 +135,10 @@ fn multi_hop_sending() { let invoice = nodes[4].receive_payment(2_500_000, &"asdf", 9217).unwrap(); nodes[0].send_payment(&invoice).unwrap(); - expect_event!(nodes[4], PaymentReceived); - expect_event!(nodes[0], PaymentSuccessful); + let payment_hash: Result = + expect_payment_received_event!(&nodes[4], 2_500_000); + let fee_paid_msat = Some(2000); + expect_payment_successful_event!(nodes[0], payment_hash.unwrap(), fee_paid_msat); } #[test] From 9f68aeeb87e5056c21037228b7a7efc691962ec7 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 7 Mar 2024 14:40:30 +0100 Subject: [PATCH 07/89] Move `common` test module to `common/mod.rs` .. which will tell Rust not to treat it as an integration test module. --- tests/{common.rs => common/mod.rs} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/{common.rs => common/mod.rs} (100%) diff --git a/tests/common.rs b/tests/common/mod.rs similarity index 100% rename from tests/common.rs rename to tests/common/mod.rs From d79b8ae09053795354cafe07da79269259c739bf Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 7 Mar 2024 14:43:52 +0100 Subject: [PATCH 08/89] Return payment hash directly from `expect_payment_received` macro .. no need to use a `Result` here --- tests/common/mod.rs | 3 +-- tests/integration_tests_rust.rs | 6 ++---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 8b4c1ef63..474ac14a8 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -87,8 +87,7 @@ macro_rules! expect_payment_received_event { println!("{} got event {:?}", $node.node_id(), e); assert_eq!(amount_msat, $amount_msat); $node.event_handled(); - let result = Ok(payment_hash); - result + payment_hash }, ref e => { panic!("{} got unexpected event!: {:?}", std::stringify!(node_b), e); diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index e8fee941c..d9562c565 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -10,7 +10,6 @@ use common::{ use ldk_node::{Builder, Event, NodeError}; use bitcoin::{Amount, Network}; -use lightning::ln::PaymentHash; use std::sync::Arc; @@ -135,10 +134,9 @@ fn multi_hop_sending() { let invoice = nodes[4].receive_payment(2_500_000, &"asdf", 9217).unwrap(); nodes[0].send_payment(&invoice).unwrap(); - let payment_hash: Result = - expect_payment_received_event!(&nodes[4], 2_500_000); + let payment_hash = expect_payment_received_event!(&nodes[4], 2_500_000); let fee_paid_msat = Some(2000); - expect_payment_successful_event!(nodes[0], payment_hash.unwrap(), fee_paid_msat); + expect_payment_successful_event!(nodes[0], payment_hash, fee_paid_msat); } #[test] From 84ecc24bc871b7411d4c279ddef21f2062c7a48b Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 6 Mar 2024 14:04:46 +0100 Subject: [PATCH 09/89] Introduce `status` method allowing to query the `Node`'s status .. we replace the simple `is_running` with a more verbose `status` method returning a `NodeStatus` struct, giving more information on syncing states etc. --- bindings/ldk_node.udl | 18 +++++- src/builder.rs | 14 +++++ src/lib.rs | 143 ++++++++++++++++++++++++++++++++++++------ src/types.rs | 20 +++++- tests/common/mod.rs | 2 + 5 files changed, 175 insertions(+), 22 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 4f09b7ced..5e9c98053 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -42,6 +42,7 @@ interface LDKNode { void start(); [Throws=NodeError] void stop(); + NodeStatus status(); Event? next_event(); Event wait_next_event(); [Async] @@ -97,7 +98,6 @@ interface LDKNode { [Throws=NodeError] string sign_message([ByRef]sequence msg); boolean verify_signature([ByRef]sequence msg, [ByRef]string sig, [ByRef]PublicKey pkey); - boolean is_running(); }; [Error] @@ -137,6 +137,22 @@ enum NodeError { "LiquidityFeeTooHigh", }; +dictionary NodeStatus { + boolean is_running; + boolean is_listening; + BestBlock current_best_block; + u64? latest_wallet_sync_timestamp; + u64? latest_onchain_wallet_sync_timestamp; + u64? latest_fee_rate_cache_update_timestamp; + u64? latest_rgs_snapshot_timestamp; + u64? latest_node_announcement_broadcast_timestamp; +}; + +dictionary BestBlock { + BlockHash block_hash; + u32 height; +}; + [Error] enum BuildError { "InvalidSeedBytes", diff --git a/src/builder.rs b/src/builder.rs index a09b2563f..161c3bbbb 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -65,6 +65,7 @@ use std::fmt; use std::fs; use std::io::Cursor; use std::path::PathBuf; +use std::sync::atomic::AtomicBool; use std::sync::{Arc, Mutex, RwLock}; use std::time::SystemTime; @@ -945,6 +946,13 @@ fn build_with_store_internal( let (stop_sender, _) = tokio::sync::watch::channel(()); + let is_listening = Arc::new(AtomicBool::new(false)); + let latest_wallet_sync_timestamp = Arc::new(RwLock::new(None)); + let latest_onchain_wallet_sync_timestamp = Arc::new(RwLock::new(None)); + let latest_fee_rate_cache_update_timestamp = Arc::new(RwLock::new(None)); + let latest_rgs_snapshot_timestamp = Arc::new(RwLock::new(None)); + let latest_node_announcement_broadcast_timestamp = Arc::new(RwLock::new(None)); + Ok(Node { runtime, stop_sender, @@ -968,6 +976,12 @@ fn build_with_store_internal( scorer, peer_store, payment_store, + is_listening, + latest_wallet_sync_timestamp, + latest_onchain_wallet_sync_timestamp, + latest_fee_rate_cache_update_timestamp, + latest_rgs_snapshot_timestamp, + latest_node_announcement_broadcast_timestamp, }) } diff --git a/src/lib.rs b/src/lib.rs index 24b2123f5..2b0e796ca 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -107,7 +107,7 @@ pub use error::Error as NodeError; use error::Error; pub use event::Event; -pub use types::ChannelConfig; +pub use types::{BestBlock, ChannelConfig}; pub use io::utils::generate_entropy_mnemonic; @@ -167,8 +167,9 @@ use rand::Rng; use std::default::Default; use std::net::ToSocketAddrs; +use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::{Arc, Mutex, RwLock}; -use std::time::{Duration, Instant, SystemTime}; +use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; #[cfg(feature = "uniffi")] uniffi::include_scaffolding!("ldk_node"); @@ -199,6 +200,12 @@ pub struct Node { scorer: Arc>, peer_store: Arc>>, payment_store: Arc>>, + is_listening: Arc, + latest_wallet_sync_timestamp: Arc>>, + latest_onchain_wallet_sync_timestamp: Arc>>, + latest_fee_rate_cache_update_timestamp: Arc>>, + latest_rgs_snapshot_timestamp: Arc>>, + latest_node_announcement_broadcast_timestamp: Arc>>, } impl Node { @@ -222,6 +229,8 @@ impl Node { // Block to ensure we update our fee rate cache once on startup let fee_estimator = Arc::clone(&self.fee_estimator); let sync_logger = Arc::clone(&self.logger); + let sync_fee_rate_update_timestamp = + Arc::clone(&self.latest_fee_rate_cache_update_timestamp); let runtime_ref = &runtime; tokio::task::block_in_place(move || { runtime_ref.block_on(async move { @@ -233,6 +242,9 @@ impl Node { "Initial fee rate cache update finished in {}ms.", now.elapsed().as_millis() ); + let unix_time_secs_opt = + SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); + *sync_fee_rate_update_timestamp.write().unwrap() = unix_time_secs_opt; Ok(()) }, Err(e) => { @@ -246,6 +258,7 @@ impl Node { // Setup wallet sync let wallet = Arc::clone(&self.wallet); let sync_logger = Arc::clone(&self.logger); + let sync_onchain_wallet_timestamp = Arc::clone(&self.latest_onchain_wallet_sync_timestamp); let mut stop_sync = self.stop_sender.subscribe(); let onchain_wallet_sync_interval_secs = self .config @@ -267,11 +280,16 @@ impl Node { _ = onchain_wallet_sync_interval.tick() => { let now = Instant::now(); match wallet.sync().await { - Ok(()) => log_trace!( + Ok(()) => { + log_trace!( sync_logger, "Background sync of on-chain wallet finished in {}ms.", now.elapsed().as_millis() - ), + ); + let unix_time_secs_opt = + SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); + *sync_onchain_wallet_timestamp.write().unwrap() = unix_time_secs_opt; + } Err(err) => { log_error!( sync_logger, @@ -289,6 +307,7 @@ impl Node { let mut stop_fee_updates = self.stop_sender.subscribe(); let fee_update_logger = Arc::clone(&self.logger); + let fee_update_timestamp = Arc::clone(&self.latest_fee_rate_cache_update_timestamp); let fee_estimator = Arc::clone(&self.fee_estimator); let fee_rate_cache_update_interval_secs = self.config.fee_rate_cache_update_interval_secs.max(WALLET_SYNC_INTERVAL_MINIMUM_SECS); @@ -307,11 +326,16 @@ impl Node { _ = fee_rate_update_interval.tick() => { let now = Instant::now(); match fee_estimator.update_fee_estimates().await { - Ok(()) => log_trace!( + Ok(()) => { + log_trace!( fee_update_logger, "Background update of fee rate cache finished in {}ms.", now.elapsed().as_millis() - ), + ); + let unix_time_secs_opt = + SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); + *fee_update_timestamp.write().unwrap() = unix_time_secs_opt; + } Err(err) => { log_error!( fee_update_logger, @@ -330,6 +354,7 @@ impl Node { let sync_cmon = Arc::clone(&self.chain_monitor); let sync_sweeper = Arc::clone(&self.output_sweeper); let sync_logger = Arc::clone(&self.logger); + let sync_wallet_timestamp = Arc::clone(&self.latest_wallet_sync_timestamp); let mut stop_sync = self.stop_sender.subscribe(); let wallet_sync_interval_secs = self.config.wallet_sync_interval_secs.max(WALLET_SYNC_INTERVAL_MINIMUM_SECS); @@ -350,11 +375,16 @@ impl Node { ]; let now = Instant::now(); match tx_sync.sync(confirmables).await { - Ok(()) => log_trace!( + Ok(()) => { + log_trace!( sync_logger, "Background sync of Lightning wallet finished in {}ms.", now.elapsed().as_millis() - ), + ); + let unix_time_secs_opt = + SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); + *sync_wallet_timestamp.write().unwrap() = unix_time_secs_opt; + } Err(e) => { log_error!(sync_logger, "Background sync of Lightning wallet failed: {}", e) } @@ -368,6 +398,7 @@ impl Node { let gossip_source = Arc::clone(&self.gossip_source); let gossip_sync_store = Arc::clone(&self.kv_store); let gossip_sync_logger = Arc::clone(&self.logger); + let gossip_rgs_sync_timestamp = Arc::clone(&self.latest_rgs_snapshot_timestamp); let mut stop_gossip_sync = self.stop_sender.subscribe(); runtime.spawn(async move { let mut interval = tokio::time::interval(RGS_SYNC_INTERVAL); @@ -395,6 +426,7 @@ impl Node { log_error!(gossip_sync_logger, "Persistence failed: {}", e); panic!("Persistence failed"); }); + *gossip_rgs_sync_timestamp.write().unwrap() = Some(updated_timestamp as u64); } Err(e) => log_error!( gossip_sync_logger, @@ -413,6 +445,7 @@ impl Node { let peer_manager_connection_handler = Arc::clone(&self.peer_manager); let mut stop_listen = self.stop_sender.subscribe(); let listening_logger = Arc::clone(&self.logger); + let listening_indicator = Arc::clone(&self.is_listening); let mut bind_addrs = Vec::with_capacity(listening_addresses.len()); @@ -431,6 +464,7 @@ impl Node { } runtime.spawn(async move { + { let listener = tokio::net::TcpListener::bind(&*bind_addrs).await .unwrap_or_else(|e| { @@ -440,11 +474,13 @@ impl Node { ); }); + listening_indicator.store(true, Ordering::Release); + loop { let peer_mgr = Arc::clone(&peer_manager_connection_handler); tokio::select! { _ = stop_listen.changed() => { - return; + break; } res = listener.accept() => { let tcp_stream = res.unwrap().0; @@ -458,6 +494,9 @@ impl Node { } } } + } + + listening_indicator.store(false, Ordering::Release); }); } @@ -508,6 +547,7 @@ impl Node { let bcast_config = Arc::clone(&self.config); let bcast_store = Arc::clone(&self.kv_store); let bcast_logger = Arc::clone(&self.logger); + let bcast_ann_timestamp = Arc::clone(&self.latest_node_announcement_broadcast_timestamp); let mut stop_bcast = self.stop_sender.subscribe(); runtime.spawn(async move { // We check every 30 secs whether our last broadcast is NODE_ANN_BCAST_INTERVAL away. @@ -553,12 +593,17 @@ impl Node { bcast_pm.broadcast_node_announcement([0; 3], [0; 32], addresses); - let unix_time_secs = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs(); - io::utils::write_latest_node_ann_bcast_timestamp(unix_time_secs, Arc::clone(&bcast_store), Arc::clone(&bcast_logger)) - .unwrap_or_else(|e| { - log_error!(bcast_logger, "Persistence failed: {}", e); - panic!("Persistence failed"); - }); + let unix_time_secs_opt = + SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); + *bcast_ann_timestamp.write().unwrap() = unix_time_secs_opt; + + if let Some(unix_time_secs) = unix_time_secs_opt { + io::utils::write_latest_node_ann_bcast_timestamp(unix_time_secs, Arc::clone(&bcast_store), Arc::clone(&bcast_logger)) + .unwrap_or_else(|e| { + log_error!(bcast_logger, "Persistence failed: {}", e); + panic!("Persistence failed"); + }); + } } } } @@ -662,11 +707,6 @@ impl Node { Ok(()) } - /// Returns whether the [`Node`] is running. - pub fn is_running(&self) -> bool { - self.runtime.read().unwrap().is_some() - } - /// Disconnects all peers, stops all running background tasks, and shuts down [`Node`]. /// /// After this returns most API methods will return [`Error::NotRunning`]. @@ -697,6 +737,32 @@ impl Node { Ok(()) } + /// Returns the status of the [`Node`]. + pub fn status(&self) -> NodeStatus { + let is_running = self.runtime.read().unwrap().is_some(); + let is_listening = self.is_listening.load(Ordering::Acquire); + let current_best_block = self.channel_manager.current_best_block().into(); + let latest_wallet_sync_timestamp = *self.latest_wallet_sync_timestamp.read().unwrap(); + let latest_onchain_wallet_sync_timestamp = + *self.latest_onchain_wallet_sync_timestamp.read().unwrap(); + let latest_fee_rate_cache_update_timestamp = + *self.latest_fee_rate_cache_update_timestamp.read().unwrap(); + let latest_rgs_snapshot_timestamp = *self.latest_rgs_snapshot_timestamp.read().unwrap(); + let latest_node_announcement_broadcast_timestamp = + *self.latest_node_announcement_broadcast_timestamp.read().unwrap(); + + NodeStatus { + is_running, + is_listening, + current_best_block, + latest_wallet_sync_timestamp, + latest_onchain_wallet_sync_timestamp, + latest_fee_rate_cache_update_timestamp, + latest_rgs_snapshot_timestamp, + latest_node_announcement_broadcast_timestamp, + } + } + /// Returns the next event in the event queue, if currently available. /// /// Will return `Some(..)` if an event is available and `None` otherwise. @@ -1746,6 +1812,43 @@ impl Drop for Node { } } +/// Represents the status of the [`Node`]. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct NodeStatus { + /// Indicates whether the [`Node`] is running. + pub is_running: bool, + /// Indicates whether the [`Node`] is listening for incoming connections on the addresses + /// configured via [`Config::listening_addresses`]. + pub is_listening: bool, + /// The best block to which our Lightning wallet is currently synced. + pub current_best_block: BestBlock, + /// The timestamp, in seconds since start of the UNIX epoch, when we last successfully synced + /// our Lightning wallet to the chain tip. + /// + /// Will be `None` if the wallet hasn't been synced since the [`Node`] was initialized. + pub latest_wallet_sync_timestamp: Option, + /// The timestamp, in seconds since start of the UNIX epoch, when we last successfully synced + /// our on-chain wallet to the chain tip. + /// + /// Will be `None` if the wallet hasn't been synced since the [`Node`] was initialized. + pub latest_onchain_wallet_sync_timestamp: Option, + /// The timestamp, in seconds since start of the UNIX epoch, when we last successfully update + /// our fee rate cache. + /// + /// Will be `None` if the cache hasn't been updated since the [`Node`] was initialized. + pub latest_fee_rate_cache_update_timestamp: Option, + /// The timestamp, in seconds since start of the UNIX epoch, when the last rapid gossip sync + /// (RGS) snapshot we successfully applied was generated. + /// + /// Will be `None` if RGS isn't configured or the snapshot hasn't been updated since the [`Node`] was initialized. + pub latest_rgs_snapshot_timestamp: Option, + /// The timestamp, in seconds since start of the UNIX epoch, when we last broadcasted a node + /// announcement. + /// + /// Will be `None` if we have no public channels or we haven't broadcasted since the [`Node`] was initialized. + pub latest_node_announcement_broadcast_timestamp: Option, +} + async fn connect_peer_if_necessary( node_id: PublicKey, addr: SocketAddress, peer_manager: Arc>, logger: Arc, diff --git a/src/types.rs b/src/types.rs index 6269b3ddf..4e082498e 100644 --- a/src/types.rs +++ b/src/types.rs @@ -4,6 +4,7 @@ use crate::sweep::OutputSweeper; use lightning::blinded_path::BlindedPath; use lightning::chain::chainmonitor; +use lightning::chain::BestBlock as LdkBestBlock; use lightning::ln::channelmanager::ChannelDetails as LdkChannelDetails; use lightning::ln::msgs::RoutingMessageHandler; use lightning::ln::msgs::SocketAddress; @@ -20,7 +21,7 @@ use lightning_net_tokio::SocketDescriptor; use lightning_transaction_sync::EsploraSyncClient; use bitcoin::secp256k1::{self, PublicKey, Secp256k1}; -use bitcoin::OutPoint; +use bitcoin::{BlockHash, OutPoint}; use std::sync::{Arc, Mutex, RwLock}; @@ -456,3 +457,20 @@ impl Default for ChannelConfig { LdkChannelConfig::default().into() } } + +/// The best known block as identified by its hash and height. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct BestBlock { + /// The block's hash + pub block_hash: BlockHash, + /// The height at which the block was confirmed. + pub height: u32, +} + +impl From for BestBlock { + fn from(value: LdkBestBlock) -> Self { + let block_hash = value.block_hash(); + let height = value.height(); + Self { block_hash, height } + } +} diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 474ac14a8..3be36869d 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -220,6 +220,8 @@ pub(crate) fn setup_node(electrsd: &ElectrsD, config: Config) -> TestNode Date: Wed, 6 Mar 2024 14:09:08 +0100 Subject: [PATCH 10/89] Allow to retrieve the `Node`'s `Config` --- bindings/ldk_node.udl | 1 + src/lib.rs | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 5e9c98053..992899000 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -43,6 +43,7 @@ interface LDKNode { [Throws=NodeError] void stop(); NodeStatus status(); + Config config(); Event? next_event(); Event wait_next_event(); [Async] diff --git a/src/lib.rs b/src/lib.rs index 2b0e796ca..b9508ad60 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -763,6 +763,11 @@ impl Node { } } + /// Returns the config with which the [`Node`] was initialized. + pub fn config(&self) -> Config { + self.config.as_ref().clone() + } + /// Returns the next event in the event queue, if currently available. /// /// Will return `Some(..)` if an event is available and `None` otherwise. From b052f15d8239fa0d60ff8b21a058db1df239fefd Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 6 Mar 2024 14:22:29 +0100 Subject: [PATCH 11/89] Use `is_listening` flag in `connection_restart_behavior` test --- tests/integration_tests_rust.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index d9562c565..0f1689ecd 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -289,7 +289,11 @@ fn do_connection_restart_behavior(persist: bool) { let node_id_b = node_b.node_id(); let node_addr_b = node_b.listening_addresses().unwrap().first().unwrap().clone(); - std::thread::sleep(std::time::Duration::from_secs(1)); + + while !node_b.status().is_listening { + std::thread::sleep(std::time::Duration::from_millis(10)); + } + node_a.connect(node_id_b, node_addr_b, persist).unwrap(); let peer_details_a = node_a.list_peers().first().unwrap().clone(); From 059ff2d265205d5bfd16fceed348065b0cbb471c Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 11 Mar 2024 14:49:20 +0100 Subject: [PATCH 12/89] Pin `reqwest` to fix MSRV builds in CI --- .github/workflows/rust.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 8d1266cc7..ccac22e63 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -48,7 +48,8 @@ jobs: cargo update -p hashlink --precise "0.8.2" --verbose # hashlink 0.8.3 requires hashbrown 0.14, requiring 1.64.0 cargo update -p proptest --precise "1.2.0" --verbose # proptest 1.3.0 requires rustc 1.64.0 cargo update -p regex --precise "1.9.6" --verbose # regex 1.10.0 requires rustc 1.65.0 - cargo update -p home --precise "0.5.5" --verbose # home v0.5.9, requires rustc 1.70 or newer + cargo update -p home --precise "0.5.5" --verbose # home v0.5.9 requires rustc 1.70 or newer + cargo update -p reqwest --precise "0.11.24" --verbose # reqwest v0.11.25 requires rustc 1.64 or newer - name: Set RUSTFLAGS to deny warnings if: "matrix.toolchain == 'stable'" run: echo "RUSTFLAGS=-D warnings" >> "$GITHUB_ENV" From 71b1d3c5594b1901cb88b47defec4c6507fba6db Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 14 Mar 2024 11:28:18 +0000 Subject: [PATCH 13/89] Drop `reqwest` pin (#276) ... since they fixed their MSRV. --- .github/workflows/rust.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index ccac22e63..5ce1306ee 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -49,7 +49,6 @@ jobs: cargo update -p proptest --precise "1.2.0" --verbose # proptest 1.3.0 requires rustc 1.64.0 cargo update -p regex --precise "1.9.6" --verbose # regex 1.10.0 requires rustc 1.65.0 cargo update -p home --precise "0.5.5" --verbose # home v0.5.9 requires rustc 1.70 or newer - cargo update -p reqwest --precise "0.11.24" --verbose # reqwest v0.11.25 requires rustc 1.64 or newer - name: Set RUSTFLAGS to deny warnings if: "matrix.toolchain == 'stable'" run: echo "RUSTFLAGS=-D warnings" >> "$GITHUB_ENV" From a624501fad96cb6c1995865d499d600eacfa7a29 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 7 Feb 2024 13:10:19 +0100 Subject: [PATCH 14/89] Upgrade to LDK v0.0.123-beta --- Cargo.toml | 20 ++++++++------- bindings/ldk_node.udl | 23 +++++++++-------- src/builder.rs | 11 ++++++--- src/event.rs | 54 ++++++++++++++++++++++++++++++++++++---- src/fee_estimator.rs | 3 +++ src/lib.rs | 32 +++++++++++------------- src/sweep.rs | 14 +++++------ src/types.rs | 57 ++++++++++--------------------------------- src/wallet.rs | 6 ++--- 9 files changed, 120 insertions(+), 100 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 7e4a3b348..5b7eca8e3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,14 +28,16 @@ panic = 'abort' # Abort on panic default = [] [dependencies] -lightning = { version = "0.0.121", features = ["std"] } -lightning-invoice = { version = "0.29.0" } -lightning-net-tokio = { version = "0.0.121" } -lightning-persister = { version = "0.0.121" } -lightning-background-processor = { version = "0.0.121", features = ["futures"] } -lightning-rapid-gossip-sync = { version = "0.0.121" } -lightning-transaction-sync = { version = "0.0.121", features = ["esplora-async-https", "time"] } -lightning-liquidity = { version = "0.1.0-alpha.1", features = ["std"] } +lightning = { version = "0.0.123-beta", features = ["std"] } +lightning-invoice = { version = "0.31.0-beta" } +lightning-net-tokio = { version = "0.0.123-beta" } +lightning-persister = { version = "0.0.123-beta" } +lightning-background-processor = { version = "0.0.123-beta", features = ["futures"] } +lightning-rapid-gossip-sync = { version = "0.0.123-beta" } +lightning-transaction-sync = { version = "0.0.123-beta", features = ["esplora-async-https", "time"] } +#lightning-liquidity = { version = "0.1.0-alpha.1", features = ["std"] } + +lightning-liquidity = { git = "https://github.com/tnull/lightning-liquidity", rev = "abf7088c0e03221c0f122e797f34802c9e99a3d4", features = ["std"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std"] } #lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main" } @@ -78,7 +80,7 @@ prost = { version = "0.11.6", default-features = false} winapi = { version = "0.3", features = ["winbase"] } [dev-dependencies] -lightning = { version = "0.0.121", features = ["std", "_test_utils"] } +lightning = { version = "0.0.123-beta", features = ["std", "_test_utils"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std", "_test_utils"] } electrum-client = { version = "0.15.1", default-features = true } bitcoincore-rpc = { version = "0.17.0", default-features = false } diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 992899000..02dbc2fb2 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -190,16 +190,19 @@ enum PaymentFailureReason { [Enum] interface ClosureReason { - CounterpartyForceClosed ( UntrustedString peer_msg ); - HolderForceClosed (); - CooperativeClosure (); - CommitmentTxConfirmed (); - FundingTimedOut (); - ProcessingError ( string err ); - DisconnectedPeer (); - OutdatedChannelManager (); - CounterpartyCoopClosedUnfundedChannel (); - FundingBatchClosure (); + CounterpartyForceClosed(UntrustedString peer_msg); + HolderForceClosed(); + LegacyCooperativeClosure(); + CounterpartyInitiatedCooperativeClosure(); + LocallyInitiatedCooperativeClosure(); + CommitmentTxConfirmed(); + FundingTimedOut(); + ProcessingError(string err); + DisconnectedPeer(); + OutdatedChannelManager(); + CounterpartyCoopClosedUnfundedChannel(); + FundingBatchClosure(); + HTLCsTimedOut(); }; enum PaymentDirection { diff --git a/src/builder.rs b/src/builder.rs index 161c3bbbb..d2f1c914c 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -15,7 +15,7 @@ use crate::peer_store::PeerStore; use crate::sweep::OutputSweeper; use crate::tx_broadcaster::TransactionBroadcaster; use crate::types::{ - ChainMonitor, ChannelManager, FakeMessageRouter, GossipSync, KeysManager, NetworkGraph, + ChainMonitor, ChannelManager, GossipSync, KeysManager, MessageRouter, NetworkGraph, OnionMessenger, PeerManager, }; use crate::wallet::Wallet; @@ -664,7 +664,7 @@ fn build_with_store_internal( let router = Arc::new(DefaultRouter::new( Arc::clone(&network_graph), Arc::clone(&logger), - keys_manager.get_secure_random_bytes(), + Arc::clone(&keys_manager), Arc::clone(&scorer), scoring_fee_params, )); @@ -776,12 +776,15 @@ fn build_with_store_internal( })?; } + let message_router = MessageRouter::new(Arc::clone(&network_graph), Arc::clone(&keys_manager)); + // Initialize the PeerManager - let onion_messenger: Arc = Arc::new(OnionMessenger::new( + let onion_messenger: Arc> = Arc::new(OnionMessenger::new( Arc::clone(&keys_manager), Arc::clone(&keys_manager), Arc::clone(&logger), - Arc::new(FakeMessageRouter {}), + Arc::clone(&channel_manager), + Arc::new(message_router), IgnoringMessageHandler {}, IgnoringMessageHandler {}, )); diff --git a/src/event.rs b/src/event.rs index 61cd7973f..c6a8f9b6a 100644 --- a/src/event.rs +++ b/src/event.rs @@ -347,7 +347,7 @@ where let confirmation_target = ConfirmationTarget::NonAnchorChannelFee; // We set nLockTime to the current height to discourage fee sniping. - let cur_height = self.channel_manager.current_best_block().height(); + let cur_height = self.channel_manager.current_best_block().height; let locktime = LockTime::from_height(cur_height).unwrap_or(LockTime::ZERO); // Sign the final funding transaction and broadcast it. @@ -474,7 +474,7 @@ where amount_msat, ); let payment_preimage = match purpose { - PaymentPurpose::InvoicePayment { payment_preimage, payment_secret } => { + PaymentPurpose::Bolt11InvoicePayment { payment_preimage, payment_secret } => { if payment_preimage.is_some() { payment_preimage } else { @@ -483,6 +483,26 @@ where .ok() } }, + PaymentPurpose::Bolt12OfferPayment { .. } => { + // TODO: support BOLT12. + log_error!( + self.logger, + "Failed to claim unsupported BOLT12 payment with hash: {}", + payment_hash + ); + self.channel_manager.fail_htlc_backwards(&payment_hash); + return; + }, + PaymentPurpose::Bolt12RefundPayment { .. } => { + // TODO: support BOLT12. + log_error!( + self.logger, + "Failed to claim unsupported BOLT12 payment with hash: {}", + payment_hash + ); + self.channel_manager.fail_htlc_backwards(&payment_hash); + return; + }, PaymentPurpose::SpontaneousPayment(preimage) => Some(preimage), }; @@ -521,7 +541,11 @@ where amount_msat, ); match purpose { - PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => { + PaymentPurpose::Bolt11InvoicePayment { + payment_preimage, + payment_secret, + .. + } => { let update = PaymentDetailsUpdate { preimage: Some(payment_preimage), secret: Some(Some(payment_secret)), @@ -550,6 +574,24 @@ where }, } }, + PaymentPurpose::Bolt12OfferPayment { .. } => { + // TODO: support BOLT12. + log_error!( + self.logger, + "Failed to claim unsupported BOLT12 payment with hash: {}", + payment_hash + ); + return; + }, + PaymentPurpose::Bolt12RefundPayment { .. } => { + // TODO: support BOLT12. + log_error!( + self.logger, + "Failed to claim unsupported BOLT12 payment with hash: {}", + payment_hash + ); + return; + }, PaymentPurpose::SpontaneousPayment(preimage) => { let payment = PaymentDetails { preimage: Some(preimage), @@ -717,9 +759,10 @@ where LdkEvent::PaymentForwarded { prev_channel_id, next_channel_id, - fee_earned_msat, + total_fee_earned_msat, claim_from_onchain_tx, outbound_amount_forwarded_msat, + .. } => { let read_only_network_graph = self.network_graph.read_only(); let nodes = read_only_network_graph.nodes(); @@ -752,7 +795,7 @@ where let to_next_str = format!(" to {}{}", node_str(&next_channel_id), channel_str(&next_channel_id)); - let fee_earned = fee_earned_msat.unwrap_or(0); + let fee_earned = total_fee_earned_msat.unwrap_or(0); let outbound_amount_forwarded_msat = outbound_amount_forwarded_msat.unwrap_or(0); if claim_from_onchain_tx { log_info!( @@ -780,6 +823,7 @@ where former_temporary_channel_id, counterparty_node_id, funding_txo, + .. } => { log_info!( self.logger, diff --git a/src/fee_estimator.rs b/src/fee_estimator.rs index f79cfcb34..74518227f 100644 --- a/src/fee_estimator.rs +++ b/src/fee_estimator.rs @@ -42,6 +42,7 @@ where ConfirmationTarget::AnchorChannelFee, ConfirmationTarget::NonAnchorChannelFee, ConfirmationTarget::ChannelCloseMinimum, + ConfirmationTarget::OutputSpendingFee, ]; for target in confirmation_targets { let num_blocks = match target { @@ -51,6 +52,7 @@ where ConfirmationTarget::AnchorChannelFee => 1008, ConfirmationTarget::NonAnchorChannelFee => 12, ConfirmationTarget::ChannelCloseMinimum => 144, + ConfirmationTarget::OutputSpendingFee => 12, }; let estimates = self.esplora_client.get_fee_estimates().await.map_err(|e| { @@ -119,6 +121,7 @@ where ConfirmationTarget::AnchorChannelFee => 500, ConfirmationTarget::NonAnchorChannelFee => 1000, ConfirmationTarget::ChannelCloseMinimum => 500, + ConfirmationTarget::OutputSpendingFee => 1000, }; // We'll fall back on this, if we really don't have any other information. diff --git a/src/lib.rs b/src/lib.rs index b9508ad60..5a6e1dd8c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -107,7 +107,7 @@ pub use error::Error as NodeError; use error::Error; pub use event::Event; -pub use types::{BestBlock, ChannelConfig}; +pub use types::ChannelConfig; pub use io::utils::generate_entropy_mnemonic; @@ -138,7 +138,7 @@ pub use types::{ChannelDetails, PeerDetails, UserChannelId}; use logger::{log_error, log_info, log_trace, FilesystemLogger, Logger}; -use lightning::chain::Confirm; +use lightning::chain::{BestBlock, Confirm}; use lightning::ln::channelmanager::{self, PaymentId, RecipientOnionFields, Retry}; use lightning::ln::msgs::SocketAddress; use lightning::ln::{PaymentHash, PaymentPreimage}; @@ -157,7 +157,6 @@ use lightning_transaction_sync::EsploraSyncClient; use lightning::routing::router::{PaymentParameters, RouteParameters}; use lightning_invoice::{payment, Bolt11Invoice, Currency}; -use bitcoin::hashes::sha256::Hash as Sha256; use bitcoin::hashes::Hash; use bitcoin::secp256k1::PublicKey; @@ -515,9 +514,9 @@ impl Node { } _ = interval.tick() => { let pm_peers = connect_pm - .get_peer_node_ids() + .list_peers() .iter() - .map(|(peer, _addr)| *peer) + .map(|peer| peer.counterparty_node_id) .collect::>(); for peer_info in connect_peer_store.list_peers().iter().filter(|info| !pm_peers.contains(&info.node_id)) { @@ -579,7 +578,7 @@ impl Node { continue; } - if bcast_pm.get_peer_node_ids().is_empty() { + if bcast_pm.list_peers().is_empty() { // Skip if we don't have any connected peers to gossip to. continue; } @@ -1301,7 +1300,7 @@ impl Node { } let payment_preimage = PaymentPreimage(self.keys_manager.get_secure_random_bytes()); - let payment_hash = PaymentHash(Sha256::hash(&payment_preimage.0).to_byte_array()); + let payment_hash = PaymentHash::from(payment_preimage); if let Some(payment) = self.payment_store.get(&payment_hash) { if payment.status == PaymentStatus::Pending @@ -1690,11 +1689,9 @@ impl Node { let mut total_lightning_balance_sats = 0; let mut lightning_balances = Vec::new(); - for funding_txo in self.chain_monitor.list_monitors() { + for (funding_txo, channel_id) in self.chain_monitor.list_monitors() { match self.chain_monitor.get_monitor(funding_txo) { Ok(monitor) => { - // TODO: Switch to `channel_id` with LDK 0.0.122: let channel_id = monitor.channel_id(); - let channel_id = funding_txo.to_channel_id(); // unwrap safety: `get_counterparty_node_id` will always be `Some` after 0.0.110 and // LDK Node 0.1 depended on 0.0.115 already. let counterparty_node_id = monitor.get_counterparty_node_id().unwrap(); @@ -1758,12 +1755,13 @@ impl Node { let mut peers = Vec::new(); // First add all connected peers, preferring to list the connected address if available. - let connected_peers = self.peer_manager.get_peer_node_ids(); + let connected_peers = self.peer_manager.list_peers(); let connected_peers_len = connected_peers.len(); - for (node_id, con_addr_opt) in connected_peers { + for connected_peer in connected_peers { + let node_id = connected_peer.counterparty_node_id; let stored_peer = self.peer_store.get_peer(&node_id); let stored_addr_opt = stored_peer.as_ref().map(|p| p.address.clone()); - let address = match (con_addr_opt, stored_addr_opt) { + let address = match (connected_peer.socket_address, stored_addr_opt) { (Some(con_addr), _) => con_addr, (None, Some(stored_addr)) => stored_addr, (None, None) => continue, @@ -1858,10 +1856,8 @@ async fn connect_peer_if_necessary( node_id: PublicKey, addr: SocketAddress, peer_manager: Arc>, logger: Arc, ) -> Result<(), Error> { - for (pman_node_id, _pman_addr) in peer_manager.get_peer_node_ids() { - if node_id == pman_node_id { - return Ok(()); - } + if peer_manager.peer_by_node_id(&node_id).is_some() { + return Ok(()); } do_connect_peer(node_id, addr, peer_manager, logger).await @@ -1896,7 +1892,7 @@ async fn do_connect_peer( std::task::Poll::Pending => {}, } // Avoid blocking the tokio context by sleeping a bit - match peer_manager.get_peer_node_ids().iter().find(|(id, _addr)| *id == node_id) { + match peer_manager.peer_by_node_id(&node_id) { Some(_) => return Ok(()), None => tokio::time::sleep(Duration::from_millis(10)).await, } diff --git a/src/sweep.rs b/src/sweep.rs index 93dac19fa..59457944b 100644 --- a/src/sweep.rs +++ b/src/sweep.rs @@ -199,7 +199,7 @@ where fn rebroadcast_if_necessary(&self) { let (cur_height, cur_hash) = { let best_block = self.best_block.lock().unwrap(); - (best_block.height(), best_block.block_hash()) + (best_block.height, best_block.block_hash) }; let mut respend_descriptors = Vec::new(); @@ -277,7 +277,7 @@ where } fn prune_confirmed_outputs(&self) { - let cur_height = self.best_block.lock().unwrap().height(); + let cur_height = self.best_block.lock().unwrap().height; let mut locked_outputs = self.outputs.lock().unwrap(); // Prune all outputs that have sufficient depth by now. @@ -314,7 +314,7 @@ where &self, output_descriptors: &Vec, cur_height: u32, ) -> Result { let tx_feerate = - self.fee_estimator.get_est_sat_per_1000_weight(ConfirmationTarget::NonAnchorChannelFee); + self.fee_estimator.get_est_sat_per_1000_weight(ConfirmationTarget::OutputSpendingFee); let destination_address = self.wallet.get_new_address().map_err(|e| { log_error!(self.logger, "Failed to get destination address from wallet: {}", e); @@ -370,9 +370,9 @@ where ) { { let best_block = self.best_block.lock().unwrap(); - assert_eq!(best_block.block_hash(), header.prev_blockhash, + assert_eq!(best_block.block_hash, header.prev_blockhash, "Blocks must be connected in chain-order - the connected header must build on the last connected header"); - assert_eq!(best_block.height(), height - 1, + assert_eq!(best_block.height, height - 1, "Blocks must be connected in chain-order - the connected block height must be one greater than the previous height"); } @@ -384,9 +384,9 @@ where let new_height = height - 1; { let mut best_block = self.best_block.lock().unwrap(); - assert_eq!(best_block.block_hash(), header.block_hash(), + assert_eq!(best_block.block_hash, header.block_hash(), "Blocks must be disconnected in chain-order - the disconnected header must be the last connected header"); - assert_eq!(best_block.height(), height, + assert_eq!(best_block.height, height, "Blocks must be disconnected in chain-order - the disconnected block must have the correct height"); *best_block = BestBlock::new(header.prev_blockhash, new_height) } diff --git a/src/types.rs b/src/types.rs index 4e082498e..d10e2fbf7 100644 --- a/src/types.rs +++ b/src/types.rs @@ -2,9 +2,7 @@ use crate::logger::FilesystemLogger; use crate::message_handler::NodeCustomMessageHandler; use crate::sweep::OutputSweeper; -use lightning::blinded_path::BlindedPath; use lightning::chain::chainmonitor; -use lightning::chain::BestBlock as LdkBestBlock; use lightning::ln::channelmanager::ChannelDetails as LdkChannelDetails; use lightning::ln::msgs::RoutingMessageHandler; use lightning::ln::msgs::SocketAddress; @@ -13,15 +11,15 @@ use lightning::ln::ChannelId; use lightning::routing::gossip; use lightning::routing::router::DefaultRouter; use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringFeeParameters}; -use lightning::sign::{EntropySource, InMemorySigner}; +use lightning::sign::InMemorySigner; use lightning::util::config::ChannelConfig as LdkChannelConfig; use lightning::util::config::MaxDustHTLCExposure as LdkMaxDustHTLCExposure; use lightning::util::ser::{Readable, Writeable, Writer}; use lightning_net_tokio::SocketDescriptor; use lightning_transaction_sync::EsploraSyncClient; -use bitcoin::secp256k1::{self, PublicKey, Secp256k1}; -use bitcoin::{BlockHash, OutPoint}; +use bitcoin::secp256k1::PublicKey; +use bitcoin::OutPoint; use std::sync::{Arc, Mutex, RwLock}; @@ -38,7 +36,7 @@ pub(crate) type PeerManager = lightning::ln::peer_handler::PeerManager< SocketDescriptor, Arc>, Arc, - Arc, + Arc>, Arc, Arc>>, Arc, @@ -84,6 +82,7 @@ pub(crate) type KeysManager = crate::wallet::WalletKeysManager< pub(crate) type Router = DefaultRouter< Arc, Arc, + Arc, Arc>, ProbabilisticScoringFeeParameters, Scorer, @@ -110,34 +109,21 @@ pub(crate) type GossipSync = lightning_background_processor::GossipSync< Arc, >; -pub(crate) type OnionMessenger = lightning::onion_message::messenger::OnionMessenger< +pub(crate) type OnionMessenger = lightning::onion_message::messenger::OnionMessenger< Arc, Arc, Arc, - Arc, + Arc>, + Arc, IgnoringMessageHandler, IgnoringMessageHandler, >; -pub(crate) struct FakeMessageRouter {} - -impl lightning::onion_message::messenger::MessageRouter for FakeMessageRouter { - fn find_path( - &self, _sender: PublicKey, _peers: Vec, - _destination: lightning::onion_message::messenger::Destination, - ) -> Result { - unimplemented!() - } - fn create_blinded_paths< - ES: EntropySource + ?Sized, - T: secp256k1::Signing + secp256k1::Verification, - >( - &self, _recipient: PublicKey, _peers: Vec, _entropy_source: &ES, - _secp_ctx: &Secp256k1, - ) -> Result, ()> { - unreachable!() - } -} +pub(crate) type MessageRouter = lightning::onion_message::messenger::DefaultMessageRouter< + Arc, + Arc, + Arc, +>; pub(crate) type Sweeper = OutputSweeper< Arc, @@ -457,20 +443,3 @@ impl Default for ChannelConfig { LdkChannelConfig::default().into() } } - -/// The best known block as identified by its hash and height. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct BestBlock { - /// The block's hash - pub block_hash: BlockHash, - /// The height at which the block was confirmed. - pub height: u32, -} - -impl From for BestBlock { - fn from(value: LdkBestBlock) -> Self { - let block_hash = value.block_hash(); - let height = value.height(); - Self { block_hash, height } - } -} diff --git a/src/wallet.rs b/src/wallet.rs index aa38eb986..a79bb0078 100644 --- a/src/wallet.rs +++ b/src/wallet.rs @@ -7,8 +7,8 @@ use lightning::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget, use lightning::ln::msgs::{DecodeError, UnsignedGossipMessage}; use lightning::ln::script::ShutdownScript; use lightning::sign::{ - EntropySource, InMemorySigner, KeyMaterial, KeysManager, NodeSigner, Recipient, SignerProvider, - SpendableOutputDescriptor, + EntropySource, InMemorySigner, KeyMaterial, KeysManager, NodeSigner, OutputSpender, Recipient, + SignerProvider, SpendableOutputDescriptor, }; use lightning::util::message_signing; @@ -173,7 +173,7 @@ where pub(crate) fn send_to_address( &self, address: &bitcoin::Address, amount_msat_or_drain: Option, ) -> Result { - let confirmation_target = ConfirmationTarget::NonAnchorChannelFee; + let confirmation_target = ConfirmationTarget::OutputSpendingFee; let fee_rate = FeeRate::from_sat_per_kwu( self.fee_estimator.get_est_sat_per_1000_weight(confirmation_target) as f32, ); From 4564ec11117ba7bcc22aa8ca3642fc2b8b161400 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 22 Apr 2024 11:00:07 +0200 Subject: [PATCH 15/89] Migrate to the upstreamed `OutputSweeper` --- src/balance.rs | 82 ++++----- src/builder.rs | 64 ++++--- src/event.rs | 2 +- src/io/mod.rs | 7 +- src/io/utils.rs | 120 +++++++++++-- src/sweep.rs | 467 ++---------------------------------------------- src/types.rs | 4 +- src/wallet.rs | 59 ++++-- 8 files changed, 249 insertions(+), 556 deletions(-) diff --git a/src/balance.rs b/src/balance.rs index f5a52073d..bad2d1d5f 100644 --- a/src/balance.rs +++ b/src/balance.rs @@ -1,11 +1,12 @@ +use crate::sweep::value_satoshis_from_descriptor; + use lightning::chain::channelmonitor::Balance as LdkBalance; use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage}; +use lightning::util::sweep::{OutputSpendStatus, TrackedSpendableOutput}; use bitcoin::secp256k1::PublicKey; use bitcoin::{BlockHash, Txid}; -use crate::sweep::SpendableOutputInfo; - /// Details of the known available balances returned by [`Node::list_balances`]. /// /// [`Node::list_balances`]: crate::Node::list_balances @@ -258,46 +259,45 @@ pub enum PendingSweepBalance { } impl PendingSweepBalance { - pub(crate) fn from_tracked_spendable_output(output_info: SpendableOutputInfo) -> Self { - if let Some(confirmation_hash) = output_info.confirmation_hash { - debug_assert!(output_info.confirmation_height.is_some()); - debug_assert!(output_info.latest_spending_tx.is_some()); - let channel_id = output_info.channel_id; - let confirmation_height = output_info - .confirmation_height - .expect("Height must be set if the output is confirmed"); - let latest_spending_txid = output_info - .latest_spending_tx - .as_ref() - .expect("Spending tx must be set if the output is confirmed") - .txid(); - let amount_satoshis = output_info.value_satoshis(); - Self::AwaitingThresholdConfirmations { - channel_id, - latest_spending_txid, - confirmation_hash, - confirmation_height, - amount_satoshis, - } - } else if let Some(latest_broadcast_height) = output_info.latest_broadcast_height { - debug_assert!(output_info.latest_spending_tx.is_some()); - let channel_id = output_info.channel_id; - let latest_spending_txid = output_info - .latest_spending_tx - .as_ref() - .expect("Spending tx must be set if the spend was broadcast") - .txid(); - let amount_satoshis = output_info.value_satoshis(); - Self::BroadcastAwaitingConfirmation { - channel_id, + pub(crate) fn from_tracked_spendable_output(output_info: TrackedSpendableOutput) -> Self { + match output_info.status { + OutputSpendStatus::PendingInitialBroadcast { .. } => { + let channel_id = output_info.channel_id; + let amount_satoshis = value_satoshis_from_descriptor(&output_info.descriptor); + Self::PendingBroadcast { channel_id, amount_satoshis } + }, + OutputSpendStatus::PendingFirstConfirmation { latest_broadcast_height, - latest_spending_txid, - amount_satoshis, - } - } else { - let channel_id = output_info.channel_id; - let amount_satoshis = output_info.value_satoshis(); - Self::PendingBroadcast { channel_id, amount_satoshis } + latest_spending_tx, + .. + } => { + let channel_id = output_info.channel_id; + let amount_satoshis = value_satoshis_from_descriptor(&output_info.descriptor); + let latest_spending_txid = latest_spending_tx.txid(); + Self::BroadcastAwaitingConfirmation { + channel_id, + latest_broadcast_height, + latest_spending_txid, + amount_satoshis, + } + }, + OutputSpendStatus::PendingThresholdConfirmations { + latest_spending_tx, + confirmation_height, + confirmation_hash, + .. + } => { + let channel_id = output_info.channel_id; + let amount_satoshis = value_satoshis_from_descriptor(&output_info.descriptor); + let latest_spending_txid = latest_spending_tx.txid(); + Self::AwaitingThresholdConfirmations { + channel_id, + latest_spending_txid, + confirmation_hash, + confirmation_height, + amount_satoshis, + } + }, } } } diff --git a/src/builder.rs b/src/builder.rs index d2f1c914c..daf3afd47 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -8,11 +8,10 @@ use crate::gossip::GossipSource; use crate::io; use crate::io::sqlite_store::SqliteStore; use crate::liquidity::LiquiditySource; -use crate::logger::{log_error, FilesystemLogger, Logger}; +use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; use crate::message_handler::NodeCustomMessageHandler; use crate::payment_store::PaymentStore; use crate::peer_store::PeerStore; -use crate::sweep::OutputSweeper; use crate::tx_broadcaster::TransactionBroadcaster; use crate::types::{ ChainMonitor, ChannelManager, GossipSync, KeysManager, MessageRouter, NetworkGraph, @@ -37,6 +36,7 @@ use lightning::util::persist::{ CHANNEL_MANAGER_PERSISTENCE_PRIMARY_NAMESPACE, CHANNEL_MANAGER_PERSISTENCE_SECONDARY_NAMESPACE, }; use lightning::util::ser::ReadableArgs; +use lightning::util::sweep::OutputSweeper; use lightning_persister::fs_store::FilesystemStore; @@ -895,6 +895,47 @@ fn build_with_store_internal( liquidity_source.as_ref().map(|l| l.set_peer_manager(Arc::clone(&peer_manager))); + let output_sweeper = match io::utils::read_output_sweeper( + Arc::clone(&tx_broadcaster), + Arc::clone(&fee_estimator), + Arc::clone(&tx_sync), + Arc::clone(&keys_manager), + Arc::clone(&kv_store), + Arc::clone(&logger), + ) { + Ok(output_sweeper) => Arc::new(output_sweeper), + Err(e) => { + if e.kind() == std::io::ErrorKind::NotFound { + Arc::new(OutputSweeper::new( + channel_manager.current_best_block(), + Arc::clone(&tx_broadcaster), + Arc::clone(&fee_estimator), + Some(Arc::clone(&tx_sync)), + Arc::clone(&keys_manager), + Arc::clone(&keys_manager), + Arc::clone(&kv_store), + Arc::clone(&logger), + )) + } else { + return Err(BuildError::ReadFailed); + } + }, + }; + + match io::utils::migrate_deprecated_spendable_outputs( + Arc::clone(&output_sweeper), + Arc::clone(&kv_store), + Arc::clone(&logger), + ) { + Ok(()) => { + log_info!(logger, "Successfully migrated OutputSweeper data."); + }, + Err(e) => { + log_error!(logger, "Failed to migrate OutputSweeper data: {}", e); + return Err(BuildError::ReadFailed); + }, + } + // Init payment info storage let payment_store = match io::utils::read_payments(Arc::clone(&kv_store), Arc::clone(&logger)) { Ok(payments) => { @@ -928,25 +969,6 @@ fn build_with_store_internal( }, }; - let best_block = channel_manager.current_best_block(); - let output_sweeper = - match io::utils::read_spendable_outputs(Arc::clone(&kv_store), Arc::clone(&logger)) { - Ok(outputs) => Arc::new(OutputSweeper::new( - outputs, - Arc::clone(&wallet), - Arc::clone(&tx_broadcaster), - Arc::clone(&fee_estimator), - Arc::clone(&keys_manager), - Arc::clone(&kv_store), - best_block, - Some(Arc::clone(&tx_sync)), - Arc::clone(&logger), - )), - Err(_) => { - return Err(BuildError::ReadFailed); - }, - }; - let (stop_sender, _) = tokio::sync::watch::channel(()); let is_listening = Arc::new(AtomicBool::new(false)); diff --git a/src/event.rs b/src/event.rs index c6a8f9b6a..29ebbef43 100644 --- a/src/event.rs +++ b/src/event.rs @@ -708,7 +708,7 @@ where } }, LdkEvent::SpendableOutputs { outputs, channel_id } => { - self.output_sweeper.add_outputs(outputs, channel_id) + self.output_sweeper.track_spendable_outputs(outputs, channel_id, true, None) }, LdkEvent::OpenChannelRequest { temporary_channel_id, diff --git a/src/io/mod.rs b/src/io/mod.rs index d9dab440c..d545f6b93 100644 --- a/src/io/mod.rs +++ b/src/io/mod.rs @@ -21,9 +21,10 @@ pub(crate) const PEER_INFO_PERSISTENCE_KEY: &str = "peers"; pub(crate) const PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE: &str = "payments"; pub(crate) const PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE: &str = ""; -/// The spendable output information will be persisted under this prefix. -pub(crate) const SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE: &str = "spendable_outputs"; -pub(crate) const SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE: &str = ""; +/// The spendable output information used to persisted under this prefix until LDK Node v0.3.0. +pub(crate) const DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE: &str = + "spendable_outputs"; +pub(crate) const DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE: &str = ""; /// RapidGossipSync's `latest_sync_timestamp` will be persisted under this key. pub(crate) const LATEST_RGS_SYNC_TIMESTAMP_PRIMARY_NAMESPACE: &str = ""; diff --git a/src/io/utils.rs b/src/io/utils.rs index f486dda8b..937cc706c 100644 --- a/src/io/utils.rs +++ b/src/io/utils.rs @@ -1,9 +1,10 @@ use super::*; use crate::config::WALLET_KEYS_SEED_LEN; -use crate::logger::log_error; +use crate::logger::{log_error, FilesystemLogger}; use crate::peer_store::PeerStore; -use crate::sweep::SpendableOutputInfo; +use crate::sweep::DeprecatedSpendableOutputInfo; +use crate::types::{Broadcaster, ChainSource, FeeEstimator, KeysManager, Sweeper}; use crate::{Error, EventQueue, PaymentDetails}; use lightning::routing::gossip::NetworkGraph; @@ -12,13 +13,16 @@ use lightning::util::logger::Logger; use lightning::util::persist::{ KVStore, KVSTORE_NAMESPACE_KEY_ALPHABET, KVSTORE_NAMESPACE_KEY_MAX_LEN, NETWORK_GRAPH_PERSISTENCE_KEY, NETWORK_GRAPH_PERSISTENCE_PRIMARY_NAMESPACE, - NETWORK_GRAPH_PERSISTENCE_SECONDARY_NAMESPACE, SCORER_PERSISTENCE_KEY, - SCORER_PERSISTENCE_PRIMARY_NAMESPACE, SCORER_PERSISTENCE_SECONDARY_NAMESPACE, + NETWORK_GRAPH_PERSISTENCE_SECONDARY_NAMESPACE, OUTPUT_SWEEPER_PERSISTENCE_KEY, + OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE, OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE, + SCORER_PERSISTENCE_KEY, SCORER_PERSISTENCE_PRIMARY_NAMESPACE, + SCORER_PERSISTENCE_SECONDARY_NAMESPACE, }; use lightning::util::ser::{Readable, ReadableArgs, Writeable}; use lightning::util::string::PrintableString; use bip39::Mnemonic; +use lightning::util::sweep::{OutputSpendStatus, OutputSweeper}; use rand::{thread_rng, RngCore}; use std::fs; @@ -200,34 +204,118 @@ where Ok(res) } -/// Read previously persisted spendable output information from the store. -pub(crate) fn read_spendable_outputs( - kv_store: Arc, logger: L, -) -> Result, std::io::Error> +/// Read `OutputSweeper` state from the store. +pub(crate) fn read_output_sweeper( + broadcaster: Arc, fee_estimator: Arc, + chain_data_source: Arc, keys_manager: Arc, kv_store: Arc, + logger: Arc, +) -> Result, std::io::Error> { + let mut reader = Cursor::new(kv_store.read( + OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE, + OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE, + OUTPUT_SWEEPER_PERSISTENCE_KEY, + )?); + let args = ( + broadcaster, + fee_estimator, + Some(chain_data_source), + Arc::clone(&keys_manager), + keys_manager, + kv_store, + logger.clone(), + ); + OutputSweeper::read(&mut reader, args).map_err(|e| { + log_error!(logger, "Failed to deserialize OutputSweeper: {}", e); + std::io::Error::new(std::io::ErrorKind::InvalidData, "Failed to deserialize OutputSweeper") + }) +} + +/// Read previously persisted spendable output information from the store and migrate to the +/// upstreamed `OutputSweeper`. +/// +/// We first iterate all `DeprecatedSpendableOutputInfo`s and have them tracked by the new +/// `OutputSweeper`. In order to be certain the initial output spends will happen in a single +/// transaction (and safe on-chain fees), we batch them to happen at current height plus two +/// blocks. Lastly, we remove the previously persisted data once we checked they are tracked and +/// awaiting their initial spend at the correct height. +/// +/// Note that this migration will be run in the `Builder`, i.e., at the time when the migration is +/// happening no background sync is ongoing, so we shouldn't have a risk of interleaving block +/// connections during the migration. +pub(crate) fn migrate_deprecated_spendable_outputs( + sweeper: Arc>, kv_store: Arc, logger: L, +) -> Result<(), std::io::Error> where L::Target: Logger, { - let mut res = Vec::new(); + let best_block = sweeper.current_best_block(); for stored_key in kv_store.list( - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, )? { let mut reader = Cursor::new(kv_store.read( - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, &stored_key, )?); - let output = SpendableOutputInfo::read(&mut reader).map_err(|e| { + let output = DeprecatedSpendableOutputInfo::read(&mut reader).map_err(|e| { log_error!(logger, "Failed to deserialize SpendableOutputInfo: {}", e); std::io::Error::new( std::io::ErrorKind::InvalidData, "Failed to deserialize SpendableOutputInfo", ) })?; - res.push(output); + let descriptors = vec![output.descriptor.clone()]; + let spend_delay = Some(best_block.height + 2); + sweeper.track_spendable_outputs(descriptors, output.channel_id, true, spend_delay); + if let Some(tracked_spendable_output) = + sweeper.tracked_spendable_outputs().iter().find(|o| o.descriptor == output.descriptor) + { + match tracked_spendable_output.status { + OutputSpendStatus::PendingInitialBroadcast { delayed_until_height } => { + if delayed_until_height == spend_delay { + kv_store.remove( + DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + &stored_key, + false, + )?; + } else { + debug_assert!(false, "Unexpected status in OutputSweeper migration."); + log_error!(logger, "Unexpected status in OutputSweeper migration."); + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "Failed to migrate OutputSweeper state.", + )); + } + }, + _ => { + debug_assert!(false, "Unexpected status in OutputSweeper migration."); + log_error!(logger, "Unexpected status in OutputSweeper migration."); + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "Failed to migrate OutputSweeper state.", + )); + }, + } + } else { + debug_assert!( + false, + "OutputSweeper failed to track and persist outputs during migration." + ); + log_error!( + logger, + "OutputSweeper failed to track and persist outputs during migration." + ); + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "Failed to migrate OutputSweeper state.", + )); + } } - Ok(res) + + Ok(()) } pub(crate) fn read_latest_rgs_sync_timestamp( diff --git a/src/sweep.rs b/src/sweep.rs index 59457944b..1c772d4e9 100644 --- a/src/sweep.rs +++ b/src/sweep.rs @@ -1,34 +1,15 @@ -use crate::hex_utils; -use crate::io::{ - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, -}; -use crate::logger::{log_error, Logger}; -use crate::wallet::{Wallet, WalletKeysManager}; -use crate::Error; +//! The output sweeper used to live here before we upstreamed it to `rust-lightning` and migrated +//! to the upstreamed version with LDK Node v0.3.0 (May 2024). We should drop this module entirely +//! once sufficient time has passed for us to be confident any users completed the migration. -use lightning::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget, FeeEstimator}; -use lightning::chain::{self, BestBlock, Confirm, Filter, Listen, WatchedOutput}; use lightning::impl_writeable_tlv_based; use lightning::ln::ChannelId; -use lightning::sign::{EntropySource, SpendableOutputDescriptor}; -use lightning::util::persist::KVStore; -use lightning::util::ser::Writeable; +use lightning::sign::SpendableOutputDescriptor; -use bitcoin::blockdata::block::Header; -use bitcoin::blockdata::locktime::absolute::LockTime; -use bitcoin::secp256k1::Secp256k1; -use bitcoin::{BlockHash, Transaction, Txid}; - -use std::ops::Deref; -use std::sync::{Arc, Mutex}; - -const CONSIDERED_SPENT_THRESHOLD_CONF: u32 = 6; - -const REGENERATE_SPEND_THRESHOLD: u32 = 144; +use bitcoin::{BlockHash, Transaction}; #[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) struct SpendableOutputInfo { +pub(crate) struct DeprecatedSpendableOutputInfo { pub(crate) id: [u8; 32], pub(crate) descriptor: SpendableOutputDescriptor, pub(crate) channel_id: Option, @@ -39,55 +20,7 @@ pub(crate) struct SpendableOutputInfo { pub(crate) confirmation_hash: Option, } -impl SpendableOutputInfo { - fn to_watched_output(&self) -> WatchedOutput { - match &self.descriptor { - SpendableOutputDescriptor::StaticOutput { outpoint, output, channel_keys_id: _ } => { - WatchedOutput { - block_hash: self.first_broadcast_hash, - outpoint: *outpoint, - script_pubkey: output.script_pubkey.clone(), - } - }, - SpendableOutputDescriptor::DelayedPaymentOutput(output) => WatchedOutput { - block_hash: self.first_broadcast_hash, - outpoint: output.outpoint, - script_pubkey: output.output.script_pubkey.clone(), - }, - SpendableOutputDescriptor::StaticPaymentOutput(output) => WatchedOutput { - block_hash: self.first_broadcast_hash, - outpoint: output.outpoint, - script_pubkey: output.output.script_pubkey.clone(), - }, - } - } - - fn is_spent_in(&self, tx: &Transaction) -> bool { - let prev_outpoint = match &self.descriptor { - SpendableOutputDescriptor::StaticOutput { outpoint, .. } => *outpoint, - SpendableOutputDescriptor::DelayedPaymentOutput(output) => output.outpoint, - SpendableOutputDescriptor::StaticPaymentOutput(output) => output.outpoint, - }; - - for input in &tx.input { - if input.previous_output == prev_outpoint.into_bitcoin_outpoint() { - return true; - } - } - - false - } - - pub(crate) fn value_satoshis(&self) -> u64 { - match &self.descriptor { - SpendableOutputDescriptor::StaticOutput { output, .. } => output.value, - SpendableOutputDescriptor::DelayedPaymentOutput(output) => output.output.value, - SpendableOutputDescriptor::StaticPaymentOutput(output) => output.output.value, - } - } -} - -impl_writeable_tlv_based!(SpendableOutputInfo, { +impl_writeable_tlv_based!(DeprecatedSpendableOutputInfo, { (0, id, required), (2, descriptor, required), (4, channel_id, option), @@ -98,386 +31,10 @@ impl_writeable_tlv_based!(SpendableOutputInfo, { (14, confirmation_hash, option), }); -pub(crate) struct OutputSweeper -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - F::Target: Filter, - K::Target: KVStore, - L::Target: Logger, -{ - outputs: Mutex>, - wallet: Arc>, - broadcaster: B, - fee_estimator: E, - keys_manager: Arc>, - kv_store: K, - best_block: Mutex, - chain_source: Option, - logger: L, -} - -impl OutputSweeper -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - F::Target: Filter, - K::Target: KVStore, - L::Target: Logger, -{ - pub(crate) fn new( - outputs: Vec, - wallet: Arc>, broadcaster: B, - fee_estimator: E, - keys_manager: Arc>, kv_store: K, - best_block: BestBlock, chain_source: Option, logger: L, - ) -> Self { - if let Some(filter) = chain_source.as_ref() { - for output_info in &outputs { - let watched_output = output_info.to_watched_output(); - filter.register_output(watched_output); - } - } - - let outputs = Mutex::new(outputs); - let best_block = Mutex::new(best_block); - Self { - outputs, - wallet, - broadcaster, - fee_estimator, - keys_manager, - kv_store, - best_block, - chain_source, - logger, - } - } - - pub(crate) fn add_outputs( - &self, mut output_descriptors: Vec, - channel_id: Option, - ) { - let non_static_outputs = output_descriptors - .drain(..) - .filter(|desc| !matches!(desc, SpendableOutputDescriptor::StaticOutput { .. })) - .collect::>(); - - if non_static_outputs.is_empty() { - return; - } - - { - let mut locked_outputs = self.outputs.lock().unwrap(); - for descriptor in non_static_outputs { - let id = self.keys_manager.get_secure_random_bytes(); - let output_info = SpendableOutputInfo { - id, - descriptor, - channel_id, - first_broadcast_hash: None, - latest_broadcast_height: None, - latest_spending_tx: None, - confirmation_height: None, - confirmation_hash: None, - }; - - locked_outputs.push(output_info.clone()); - self.persist_info(&output_info).unwrap_or_else(|e| { - log_error!(self.logger, "Error persisting SpendableOutputInfo: {:?}", e) - }); - } - } - - self.rebroadcast_if_necessary(); - } - - pub(crate) fn tracked_spendable_outputs(&self) -> Vec { - self.outputs.lock().unwrap().clone() - } - - fn rebroadcast_if_necessary(&self) { - let (cur_height, cur_hash) = { - let best_block = self.best_block.lock().unwrap(); - (best_block.height, best_block.block_hash) - }; - - let mut respend_descriptors = Vec::new(); - let mut respend_ids = Vec::new(); - - { - let mut locked_outputs = self.outputs.lock().unwrap(); - for output_info in locked_outputs.iter_mut() { - if output_info.confirmation_height.is_some() { - // Don't rebroadcast confirmed txs - debug_assert!(output_info.confirmation_hash.is_some()); - continue; - } - - if let Some(latest_broadcast_height) = output_info.latest_broadcast_height { - // Re-generate spending tx after REGENERATE_SPEND_THRESHOLD, rebroadcast - // after every block - if latest_broadcast_height + REGENERATE_SPEND_THRESHOLD >= cur_height { - respend_descriptors.push(output_info.descriptor.clone()); - respend_ids.push(output_info.id); - } else if latest_broadcast_height < cur_height { - if let Some(latest_spending_tx) = output_info.latest_spending_tx.as_ref() { - self.broadcaster.broadcast_transactions(&[&latest_spending_tx]); - output_info.latest_broadcast_height = Some(cur_height); - self.persist_info(&output_info).unwrap_or_else(|e| { - log_error!( - self.logger, - "Error persisting SpendableOutputInfo: {:?}", - e - ) - }); - } - } - } else { - // Our first broadcast. - respend_descriptors.push(output_info.descriptor.clone()); - respend_ids.push(output_info.id); - output_info.first_broadcast_hash = Some(cur_hash); - self.persist_info(&output_info).unwrap_or_else(|e| { - log_error!(self.logger, "Error persisting SpendableOutputInfo: {:?}", e) - }); - } - } - } - - if !respend_descriptors.is_empty() { - match self.get_spending_tx(&respend_descriptors, cur_height) { - Ok(spending_tx) => { - self.broadcaster.broadcast_transactions(&[&spending_tx]); - let mut locked_outputs = self.outputs.lock().unwrap(); - for output_info in locked_outputs.iter_mut() { - if respend_ids.contains(&output_info.id) { - if let Some(filter) = self.chain_source.as_ref() { - let watched_output = output_info.to_watched_output(); - filter.register_output(watched_output); - } - - output_info.latest_spending_tx = Some(spending_tx.clone()); - output_info.latest_broadcast_height = Some(cur_height); - self.persist_info(&output_info).unwrap_or_else(|e| { - log_error!( - self.logger, - "Error persisting SpendableOutputInfo: {:?}", - e - ) - }); - } - } - }, - Err(e) => { - log_error!(self.logger, "Error spending outputs: {:?}", e); - }, - }; - } - } - - fn prune_confirmed_outputs(&self) { - let cur_height = self.best_block.lock().unwrap().height; - let mut locked_outputs = self.outputs.lock().unwrap(); - - // Prune all outputs that have sufficient depth by now. - locked_outputs.retain(|o| { - if let Some(confirmation_height) = o.confirmation_height { - if cur_height >= confirmation_height + CONSIDERED_SPENT_THRESHOLD_CONF - 1 { - let key = hex_utils::to_string(&o.id); - match self.kv_store.remove( - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - &key, - false, - ) { - Ok(_) => return false, - Err(e) => { - log_error!( - self.logger, - "Removal of key {}/{}/{} failed due to: {}", - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - key, - e - ); - return true; - }, - } - } - } - true - }); - } - - fn get_spending_tx( - &self, output_descriptors: &Vec, cur_height: u32, - ) -> Result { - let tx_feerate = - self.fee_estimator.get_est_sat_per_1000_weight(ConfirmationTarget::OutputSpendingFee); - - let destination_address = self.wallet.get_new_address().map_err(|e| { - log_error!(self.logger, "Failed to get destination address from wallet: {}", e); - })?; - - let locktime = LockTime::from_height(cur_height).unwrap_or(LockTime::ZERO); - - let output_descriptors = output_descriptors.iter().collect::>(); - self.keys_manager.spend_spendable_outputs( - &output_descriptors, - Vec::new(), - destination_address.script_pubkey(), - tx_feerate, - Some(locktime), - &Secp256k1::new(), - ) - } - - fn persist_info(&self, output: &SpendableOutputInfo) -> Result<(), Error> { - let key = hex_utils::to_string(&output.id); - let data = output.encode(); - self.kv_store - .write( - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - &key, - &data, - ) - .map_err(|e| { - log_error!( - self.logger, - "Write for key {}/{}/{} failed due to: {}", - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - key, - e - ); - Error::PersistenceFailed - }) - } -} - -impl Listen for OutputSweeper -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - F::Target: Filter, - K::Target: KVStore, - L::Target: Logger, -{ - fn filtered_block_connected( - &self, header: &Header, txdata: &chain::transaction::TransactionData, height: u32, - ) { - { - let best_block = self.best_block.lock().unwrap(); - assert_eq!(best_block.block_hash, header.prev_blockhash, - "Blocks must be connected in chain-order - the connected header must build on the last connected header"); - assert_eq!(best_block.height, height - 1, - "Blocks must be connected in chain-order - the connected block height must be one greater than the previous height"); - } - - self.transactions_confirmed(header, txdata, height); - self.best_block_updated(header, height); - } - - fn block_disconnected(&self, header: &Header, height: u32) { - let new_height = height - 1; - { - let mut best_block = self.best_block.lock().unwrap(); - assert_eq!(best_block.block_hash, header.block_hash(), - "Blocks must be disconnected in chain-order - the disconnected header must be the last connected header"); - assert_eq!(best_block.height, height, - "Blocks must be disconnected in chain-order - the disconnected block must have the correct height"); - *best_block = BestBlock::new(header.prev_blockhash, new_height) - } - - let mut locked_outputs = self.outputs.lock().unwrap(); - for output_info in locked_outputs.iter_mut() { - if output_info.confirmation_hash == Some(header.block_hash()) { - debug_assert_eq!(output_info.confirmation_height, Some(height)); - output_info.confirmation_hash = None; - output_info.confirmation_height = None; - self.persist_info(&output_info).unwrap_or_else(|e| { - log_error!(self.logger, "Error persisting SpendableOutputInfo: {:?}", e) - }); - } - } - } -} - -impl Confirm for OutputSweeper -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - F::Target: Filter, - K::Target: KVStore, - L::Target: Logger, -{ - fn transactions_confirmed( - &self, header: &Header, txdata: &chain::transaction::TransactionData, height: u32, - ) { - let mut locked_outputs = self.outputs.lock().unwrap(); - for (_, tx) in txdata { - for output_info in locked_outputs.iter_mut() { - if output_info.is_spent_in(*tx) { - debug_assert!(Some(height) > output_info.latest_broadcast_height); - output_info.confirmation_hash = Some(header.block_hash()); - output_info.confirmation_height = Some(height); - output_info.latest_spending_tx = Some((*tx).clone()); - self.persist_info(&output_info).unwrap_or_else(|e| { - log_error!(self.logger, "Error persisting SpendableOutputInfo: {:?}", e) - }); - } - } - } - } - - fn transaction_unconfirmed(&self, txid: &Txid) { - let mut locked_outputs = self.outputs.lock().unwrap(); - - // Get what height was unconfirmed. - let unconf_height = locked_outputs - .iter() - .find(|o| o.latest_spending_tx.as_ref().map(|tx| tx.txid()) == Some(*txid)) - .and_then(|o| o.confirmation_height); - - // Unconfirm all >= this height. - locked_outputs.iter_mut().filter(|o| o.confirmation_height >= unconf_height).for_each( - |o| { - o.confirmation_hash = None; - o.confirmation_height = None; - self.persist_info(&o).unwrap_or_else(|e| { - log_error!(self.logger, "Error persisting SpendableOutputInfo: {:?}", e) - }); - }, - ); - } - - fn best_block_updated(&self, header: &Header, height: u32) { - *self.best_block.lock().unwrap() = BestBlock::new(header.block_hash(), height); - self.prune_confirmed_outputs(); - self.rebroadcast_if_necessary(); - } - - fn get_relevant_txids(&self) -> Vec<(Txid, u32, Option)> { - let locked_outputs = self.outputs.lock().unwrap(); - locked_outputs - .iter() - .filter_map(|o| { - if let Some(confirmation_hash) = o.confirmation_hash { - if let Some(confirmation_height) = o.confirmation_height { - if let Some(latest_spending_tx) = o.latest_spending_tx.as_ref() { - return Some(( - latest_spending_tx.txid(), - confirmation_height, - Some(confirmation_hash), - )); - } - } - } - - None - }) - .collect::>() +pub(crate) fn value_satoshis_from_descriptor(descriptor: &SpendableOutputDescriptor) -> u64 { + match &descriptor { + SpendableOutputDescriptor::StaticOutput { output, .. } => output.value, + SpendableOutputDescriptor::DelayedPaymentOutput(output) => output.output.value, + SpendableOutputDescriptor::StaticPaymentOutput(output) => output.output.value, } } diff --git a/src/types.rs b/src/types.rs index d10e2fbf7..afed1320a 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1,6 +1,5 @@ use crate::logger::FilesystemLogger; use crate::message_handler::NodeCustomMessageHandler; -use crate::sweep::OutputSweeper; use lightning::chain::chainmonitor; use lightning::ln::channelmanager::ChannelDetails as LdkChannelDetails; @@ -15,6 +14,7 @@ use lightning::sign::InMemorySigner; use lightning::util::config::ChannelConfig as LdkChannelConfig; use lightning::util::config::MaxDustHTLCExposure as LdkMaxDustHTLCExposure; use lightning::util::ser::{Readable, Writeable, Writer}; +use lightning::util::sweep::OutputSweeper; use lightning_net_tokio::SocketDescriptor; use lightning_transaction_sync::EsploraSyncClient; @@ -127,10 +127,12 @@ pub(crate) type MessageRouter = lightning::onion_message::messenger::DefaultMess pub(crate) type Sweeper = OutputSweeper< Arc, + Arc, Arc, Arc, Arc, Arc, + Arc, >; /// A local, potentially user-provided, identifier of a channel. diff --git a/src/wallet.rs b/src/wallet.rs index a79bb0078..2b01d1b49 100644 --- a/src/wallet.rs +++ b/src/wallet.rs @@ -7,8 +7,8 @@ use lightning::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget, use lightning::ln::msgs::{DecodeError, UnsignedGossipMessage}; use lightning::ln::script::ShutdownScript; use lightning::sign::{ - EntropySource, InMemorySigner, KeyMaterial, KeysManager, NodeSigner, OutputSpender, Recipient, - SignerProvider, SpendableOutputDescriptor, + ChangeDestinationSource, EntropySource, InMemorySigner, KeyMaterial, KeysManager, NodeSigner, + OutputSpender, Recipient, SignerProvider, SpendableOutputDescriptor, }; use lightning::util::message_signing; @@ -278,22 +278,6 @@ where Self { inner, wallet, logger } } - /// See [`KeysManager::spend_spendable_outputs`] for documentation on this method. - pub fn spend_spendable_outputs( - &self, descriptors: &[&SpendableOutputDescriptor], outputs: Vec, - change_destination_script: ScriptBuf, feerate_sat_per_1000_weight: u32, - locktime: Option, secp_ctx: &Secp256k1, - ) -> Result { - self.inner.spend_spendable_outputs( - descriptors, - outputs, - change_destination_script, - feerate_sat_per_1000_weight, - locktime, - secp_ctx, - ) - } - pub fn sign_message(&self, msg: &[u8]) -> Result { message_signing::sign(msg, &self.inner.get_node_secret_key()) .or(Err(Error::MessageSigningFailed)) @@ -352,6 +336,30 @@ where } } +impl OutputSpender for WalletKeysManager +where + D: BatchDatabase, + B::Target: BroadcasterInterface, + E::Target: FeeEstimator, + L::Target: Logger, +{ + /// See [`KeysManager::spend_spendable_outputs`] for documentation on this method. + fn spend_spendable_outputs( + &self, descriptors: &[&SpendableOutputDescriptor], outputs: Vec, + change_destination_script: ScriptBuf, feerate_sat_per_1000_weight: u32, + locktime: Option, secp_ctx: &Secp256k1, + ) -> Result { + self.inner.spend_spendable_outputs( + descriptors, + outputs, + change_destination_script, + feerate_sat_per_1000_weight, + locktime, + secp_ctx, + ) + } +} + impl EntropySource for WalletKeysManager where D: BatchDatabase, @@ -417,3 +425,18 @@ where } } } + +impl ChangeDestinationSource for WalletKeysManager +where + D: BatchDatabase, + B::Target: BroadcasterInterface, + E::Target: FeeEstimator, + L::Target: Logger, +{ + fn get_change_destination_script(&self) -> Result { + let address = self.wallet.get_new_address().map_err(|e| { + log_error!(self.logger, "Failed to retrieve new address from wallet: {}", e); + })?; + Ok(address.script_pubkey()) + } +} From c49c3d79265dcd8eb931933544a22f6d1c1dd76f Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 30 Jan 2024 11:00:25 +0100 Subject: [PATCH 16/89] Drop `KVStore` generic from `Node` .. switching to `dyn KVStore + Send + Sync` --- src/builder.rs | 42 ++++++++++----------- src/event.rs | 42 ++++++++++----------- src/io/utils.rs | 67 +++++++++++++++------------------ src/lib.rs | 36 +++++++++--------- src/liquidity.rs | 17 ++++----- src/message_handler.rs | 15 +++----- src/payment_store.rs | 12 +++--- src/peer_store.rs | 16 ++++---- src/types.rs | 34 ++++++++--------- src/uniffi_types.rs | 3 +- tests/common/mod.rs | 19 ++++------ tests/integration_tests_rust.rs | 5 ++- 12 files changed, 147 insertions(+), 161 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index daf3afd47..5edbd55ab 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -14,7 +14,7 @@ use crate::payment_store::PaymentStore; use crate::peer_store::PeerStore; use crate::tx_broadcaster::TransactionBroadcaster; use crate::types::{ - ChainMonitor, ChannelManager, GossipSync, KeysManager, MessageRouter, NetworkGraph, + ChainMonitor, ChannelManager, DynStore, GossipSync, KeysManager, MessageRouter, NetworkGraph, OnionMessenger, PeerManager, }; use crate::wallet::Wallet; @@ -32,7 +32,7 @@ use lightning::sign::EntropySource; use lightning::util::config::UserConfig; use lightning::util::persist::{ - read_channel_monitors, KVStore, CHANNEL_MANAGER_PERSISTENCE_KEY, + read_channel_monitors, CHANNEL_MANAGER_PERSISTENCE_KEY, CHANNEL_MANAGER_PERSISTENCE_PRIMARY_NAMESPACE, CHANNEL_MANAGER_PERSISTENCE_SECONDARY_NAMESPACE, }; use lightning::util::ser::ReadableArgs; @@ -115,12 +115,18 @@ pub enum BuildError { /// The given listening addresses are invalid, e.g. too many were passed. InvalidListeningAddresses, /// We failed to read data from the [`KVStore`]. + /// + /// [`KVStore`]: lightning::util::persist::KVStore ReadFailed, /// We failed to write data to the [`KVStore`]. + /// + /// [`KVStore`]: lightning::util::persist::KVStore WriteFailed, /// We failed to access the given `storage_dir_path`. StoragePathAccessFailed, /// We failed to setup our [`KVStore`]. + /// + /// [`KVStore`]: lightning::util::persist::KVStore KVStoreSetupFailed, /// We failed to setup the onchain wallet. WalletSetupFailed, @@ -299,7 +305,7 @@ impl NodeBuilder { /// Builds a [`Node`] instance with a [`SqliteStore`] backend and according to the options /// previously configured. - pub fn build(&self) -> Result, BuildError> { + pub fn build(&self) -> Result { let storage_dir_path = self.config.storage_dir_path.clone(); fs::create_dir_all(storage_dir_path.clone()) .map_err(|_| BuildError::StoragePathAccessFailed)?; @@ -316,7 +322,7 @@ impl NodeBuilder { /// Builds a [`Node`] instance with a [`FilesystemStore`] backend and according to the options /// previously configured. - pub fn build_with_fs_store(&self) -> Result, BuildError> { + pub fn build_with_fs_store(&self) -> Result { let mut storage_dir_path: PathBuf = self.config.storage_dir_path.clone().into(); storage_dir_path.push("fs_store"); @@ -329,9 +335,7 @@ impl NodeBuilder { /// Builds a [`Node`] instance with a [`VssStore`] backend and according to the options /// previously configured. #[cfg(any(vss, vss_test))] - pub fn build_with_vss_store( - &self, url: String, store_id: String, - ) -> Result, BuildError> { + pub fn build_with_vss_store(&self, url: String, store_id: String) -> Result { let logger = setup_logger(&self.config)?; let seed_bytes = seed_bytes_from_config( @@ -369,9 +373,7 @@ impl NodeBuilder { } /// Builds a [`Node`] instance according to the options previously configured. - pub fn build_with_store( - &self, kv_store: Arc, - ) -> Result, BuildError> { + pub fn build_with_store(&self, kv_store: Arc) -> Result { let logger = setup_logger(&self.config)?; let seed_bytes = seed_bytes_from_config( &self.config, @@ -500,31 +502,29 @@ impl ArcedNodeBuilder { /// Builds a [`Node`] instance with a [`SqliteStore`] backend and according to the options /// previously configured. - pub fn build(&self) -> Result>, BuildError> { + pub fn build(&self) -> Result, BuildError> { self.inner.read().unwrap().build().map(Arc::new) } /// Builds a [`Node`] instance with a [`FilesystemStore`] backend and according to the options /// previously configured. - pub fn build_with_fs_store(&self) -> Result>, BuildError> { + pub fn build_with_fs_store(&self) -> Result, BuildError> { self.inner.read().unwrap().build_with_fs_store().map(Arc::new) } /// Builds a [`Node`] instance according to the options previously configured. - pub fn build_with_store( - &self, kv_store: Arc, - ) -> Result>, BuildError> { + pub fn build_with_store(&self, kv_store: Arc) -> Result, BuildError> { self.inner.read().unwrap().build_with_store(kv_store).map(Arc::new) } } /// Builds a [`Node`] instance according to the options previously configured. -fn build_with_store_internal( +fn build_with_store_internal( config: Arc, chain_data_source_config: Option<&ChainDataSourceConfig>, gossip_source_config: Option<&GossipSourceConfig>, liquidity_source_config: Option<&LiquiditySourceConfig>, seed_bytes: [u8; 64], - logger: Arc, kv_store: Arc, -) -> Result, BuildError> { + logger: Arc, kv_store: Arc, +) -> Result { // Initialize the on-chain wallet and chain access let xprv = bitcoin::bip32::ExtendedPrivKey::new_master(config.network.into(), &seed_bytes) .map_err(|e| { @@ -604,7 +604,7 @@ fn build_with_store_internal( )); // Initialize the ChainMonitor - let chain_monitor: Arc> = Arc::new(chainmonitor::ChainMonitor::new( + let chain_monitor: Arc = Arc::new(chainmonitor::ChainMonitor::new( Some(Arc::clone(&tx_sync)), Arc::clone(&tx_broadcaster), Arc::clone(&logger), @@ -735,7 +735,7 @@ fn build_with_store_internal( channel_monitor_references, ); let (_hash, channel_manager) = - <(BlockHash, ChannelManager)>::read(&mut reader, read_args).map_err(|e| { + <(BlockHash, ChannelManager)>::read(&mut reader, read_args).map_err(|e| { log_error!(logger, "Failed to read channel manager from KVStore: {}", e); BuildError::ReadFailed })?; @@ -779,7 +779,7 @@ fn build_with_store_internal( let message_router = MessageRouter::new(Arc::clone(&network_graph), Arc::clone(&keys_manager)); // Initialize the PeerManager - let onion_messenger: Arc> = Arc::new(OnionMessenger::new( + let onion_messenger: Arc = Arc::new(OnionMessenger::new( Arc::clone(&keys_manager), Arc::clone(&keys_manager), Arc::clone(&logger), diff --git a/src/event.rs b/src/event.rs index 29ebbef43..61dc748d4 100644 --- a/src/event.rs +++ b/src/event.rs @@ -1,4 +1,4 @@ -use crate::types::{Sweeper, Wallet}; +use crate::types::{DynStore, Sweeper, Wallet}; use crate::{ hex_utils, ChannelManager, Config, Error, NetworkGraph, PeerInfo, PeerStore, UserChannelId, }; @@ -20,7 +20,6 @@ use lightning::impl_writeable_tlv_based_enum; use lightning::ln::{ChannelId, PaymentHash}; use lightning::routing::gossip::NodeId; use lightning::util::errors::APIError; -use lightning::util::persist::KVStore; use lightning::util::ser::{Readable, ReadableArgs, Writeable, Writer}; use lightning_liquidity::lsps2::utils::compute_opening_fee; @@ -138,22 +137,22 @@ impl_writeable_tlv_based_enum!(Event, }; ); -pub struct EventQueue +pub struct EventQueue where L::Target: Logger, { queue: Arc>>, waker: Arc>>, notifier: Condvar, - kv_store: Arc, + kv_store: Arc, logger: L, } -impl EventQueue +impl EventQueue where L::Target: Logger, { - pub(crate) fn new(kv_store: Arc, logger: L) -> Self { + pub(crate) fn new(kv_store: Arc, logger: L) -> Self { let queue = Arc::new(Mutex::new(VecDeque::new())); let waker = Arc::new(Mutex::new(None)); let notifier = Condvar::new(); @@ -228,13 +227,13 @@ where } } -impl ReadableArgs<(Arc, L)> for EventQueue +impl ReadableArgs<(Arc, L)> for EventQueue where L::Target: Logger, { #[inline] fn read( - reader: &mut R, args: (Arc, L), + reader: &mut R, args: (Arc, L), ) -> Result { let (kv_store, logger) = args; let read_queue: EventQueueDeserWrapper = Readable::read(reader)?; @@ -292,32 +291,31 @@ impl Future for EventFuture { } } -pub(crate) struct EventHandler +pub(crate) struct EventHandler where L::Target: Logger, { - event_queue: Arc>, + event_queue: Arc>, wallet: Arc, - channel_manager: Arc>, - output_sweeper: Arc>, + channel_manager: Arc, + output_sweeper: Arc, network_graph: Arc, - payment_store: Arc>, - peer_store: Arc>, + payment_store: Arc>, + peer_store: Arc>, runtime: Arc>>, logger: L, config: Arc, } -impl EventHandler +impl EventHandler where L::Target: Logger, { pub fn new( - event_queue: Arc>, wallet: Arc, - channel_manager: Arc>, output_sweeper: Arc>, - network_graph: Arc, payment_store: Arc>, - peer_store: Arc>, runtime: Arc>>, - logger: L, config: Arc, + event_queue: Arc>, wallet: Arc, channel_manager: Arc, + output_sweeper: Arc, network_graph: Arc, + payment_store: Arc>, peer_store: Arc>, + runtime: Arc>>, logger: L, config: Arc, ) -> Self { Self { event_queue, @@ -933,7 +931,7 @@ mod tests { #[tokio::test] async fn event_queue_persistence() { - let store = Arc::new(TestStore::new(false)); + let store: Arc = Arc::new(TestStore::new(false)); let logger = Arc::new(TestLogger::new()); let event_queue = Arc::new(EventQueue::new(Arc::clone(&store), Arc::clone(&logger))); assert_eq!(event_queue.next_event(), None); @@ -970,7 +968,7 @@ mod tests { #[tokio::test] async fn event_queue_concurrency() { - let store = Arc::new(TestStore::new(false)); + let store: Arc = Arc::new(TestStore::new(false)); let logger = Arc::new(TestLogger::new()); let event_queue = Arc::new(EventQueue::new(Arc::clone(&store), Arc::clone(&logger))); assert_eq!(event_queue.next_event(), None); diff --git a/src/io/utils.rs b/src/io/utils.rs index 937cc706c..3a0429ed2 100644 --- a/src/io/utils.rs +++ b/src/io/utils.rs @@ -4,19 +4,18 @@ use crate::config::WALLET_KEYS_SEED_LEN; use crate::logger::{log_error, FilesystemLogger}; use crate::peer_store::PeerStore; use crate::sweep::DeprecatedSpendableOutputInfo; -use crate::types::{Broadcaster, ChainSource, FeeEstimator, KeysManager, Sweeper}; +use crate::types::{Broadcaster, ChainSource, DynStore, FeeEstimator, KeysManager, Sweeper}; use crate::{Error, EventQueue, PaymentDetails}; use lightning::routing::gossip::NetworkGraph; use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringDecayParameters}; use lightning::util::logger::Logger; use lightning::util::persist::{ - KVStore, KVSTORE_NAMESPACE_KEY_ALPHABET, KVSTORE_NAMESPACE_KEY_MAX_LEN, - NETWORK_GRAPH_PERSISTENCE_KEY, NETWORK_GRAPH_PERSISTENCE_PRIMARY_NAMESPACE, - NETWORK_GRAPH_PERSISTENCE_SECONDARY_NAMESPACE, OUTPUT_SWEEPER_PERSISTENCE_KEY, - OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE, OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE, - SCORER_PERSISTENCE_KEY, SCORER_PERSISTENCE_PRIMARY_NAMESPACE, - SCORER_PERSISTENCE_SECONDARY_NAMESPACE, + KVSTORE_NAMESPACE_KEY_ALPHABET, KVSTORE_NAMESPACE_KEY_MAX_LEN, NETWORK_GRAPH_PERSISTENCE_KEY, + NETWORK_GRAPH_PERSISTENCE_PRIMARY_NAMESPACE, NETWORK_GRAPH_PERSISTENCE_SECONDARY_NAMESPACE, + OUTPUT_SWEEPER_PERSISTENCE_KEY, OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE, + OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE, SCORER_PERSISTENCE_KEY, + SCORER_PERSISTENCE_PRIMARY_NAMESPACE, SCORER_PERSISTENCE_SECONDARY_NAMESPACE, }; use lightning::util::ser::{Readable, ReadableArgs, Writeable}; use lightning::util::string::PrintableString; @@ -97,8 +96,8 @@ where } /// Read a previously persisted [`NetworkGraph`] from the store. -pub(crate) fn read_network_graph( - kv_store: Arc, logger: L, +pub(crate) fn read_network_graph( + kv_store: Arc, logger: L, ) -> Result, std::io::Error> where L::Target: Logger, @@ -115,12 +114,8 @@ where } /// Read a previously persisted [`ProbabilisticScorer`] from the store. -pub(crate) fn read_scorer< - K: KVStore + Send + Sync, - G: Deref>, - L: Deref + Clone, ->( - kv_store: Arc, network_graph: G, logger: L, +pub(crate) fn read_scorer>, L: Deref + Clone>( + kv_store: Arc, network_graph: G, logger: L, ) -> Result, std::io::Error> where L::Target: Logger, @@ -139,9 +134,9 @@ where } /// Read previously persisted events from the store. -pub(crate) fn read_event_queue( - kv_store: Arc, logger: L, -) -> Result, std::io::Error> +pub(crate) fn read_event_queue( + kv_store: Arc, logger: L, +) -> Result, std::io::Error> where L::Target: Logger, { @@ -157,9 +152,9 @@ where } /// Read previously persisted peer info from the store. -pub(crate) fn read_peer_info( - kv_store: Arc, logger: L, -) -> Result, std::io::Error> +pub(crate) fn read_peer_info( + kv_store: Arc, logger: L, +) -> Result, std::io::Error> where L::Target: Logger, { @@ -175,8 +170,8 @@ where } /// Read previously persisted payments information from the store. -pub(crate) fn read_payments( - kv_store: Arc, logger: L, +pub(crate) fn read_payments( + kv_store: Arc, logger: L, ) -> Result, std::io::Error> where L::Target: Logger, @@ -205,11 +200,11 @@ where } /// Read `OutputSweeper` state from the store. -pub(crate) fn read_output_sweeper( +pub(crate) fn read_output_sweeper( broadcaster: Arc, fee_estimator: Arc, - chain_data_source: Arc, keys_manager: Arc, kv_store: Arc, + chain_data_source: Arc, keys_manager: Arc, kv_store: Arc, logger: Arc, -) -> Result, std::io::Error> { +) -> Result { let mut reader = Cursor::new(kv_store.read( OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE, OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE, @@ -242,8 +237,8 @@ pub(crate) fn read_output_sweeper( /// Note that this migration will be run in the `Builder`, i.e., at the time when the migration is /// happening no background sync is ongoing, so we shouldn't have a risk of interleaving block /// connections during the migration. -pub(crate) fn migrate_deprecated_spendable_outputs( - sweeper: Arc>, kv_store: Arc, logger: L, +pub(crate) fn migrate_deprecated_spendable_outputs( + sweeper: Arc, kv_store: Arc, logger: L, ) -> Result<(), std::io::Error> where L::Target: Logger, @@ -318,8 +313,8 @@ where Ok(()) } -pub(crate) fn read_latest_rgs_sync_timestamp( - kv_store: Arc, logger: L, +pub(crate) fn read_latest_rgs_sync_timestamp( + kv_store: Arc, logger: L, ) -> Result where L::Target: Logger, @@ -338,8 +333,8 @@ where }) } -pub(crate) fn write_latest_rgs_sync_timestamp( - updated_timestamp: u32, kv_store: Arc, logger: L, +pub(crate) fn write_latest_rgs_sync_timestamp( + updated_timestamp: u32, kv_store: Arc, logger: L, ) -> Result<(), Error> where L::Target: Logger, @@ -365,8 +360,8 @@ where }) } -pub(crate) fn read_latest_node_ann_bcast_timestamp( - kv_store: Arc, logger: L, +pub(crate) fn read_latest_node_ann_bcast_timestamp( + kv_store: Arc, logger: L, ) -> Result where L::Target: Logger, @@ -389,8 +384,8 @@ where }) } -pub(crate) fn write_latest_node_ann_bcast_timestamp( - updated_timestamp: u64, kv_store: Arc, logger: L, +pub(crate) fn write_latest_node_ann_bcast_timestamp( + updated_timestamp: u64, kv_store: Arc, logger: L, ) -> Result<(), Error> where L::Target: Logger, diff --git a/src/lib.rs b/src/lib.rs index 5a6e1dd8c..3f240e980 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -131,7 +131,7 @@ use payment_store::PaymentStore; pub use payment_store::{LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentStatus}; use peer_store::{PeerInfo, PeerStore}; use types::{ - Broadcaster, ChainMonitor, ChannelManager, FeeEstimator, KeysManager, NetworkGraph, + Broadcaster, ChainMonitor, ChannelManager, DynStore, FeeEstimator, KeysManager, NetworkGraph, PeerManager, Router, Scorer, Sweeper, Wallet, }; pub use types::{ChannelDetails, PeerDetails, UserChannelId}; @@ -145,8 +145,6 @@ use lightning::ln::{PaymentHash, PaymentPreimage}; use lightning::sign::EntropySource; -use lightning::util::persist::KVStore; - use lightning::util::config::{ChannelHandshakeConfig, UserConfig}; pub use lightning::util::logger::Level as LogLevel; @@ -176,7 +174,7 @@ uniffi::include_scaffolding!("ldk_node"); /// The main interface object of LDK Node, wrapping the necessary LDK and BDK functionalities. /// /// Needs to be initialized and instantiated through [`Builder::build`]. -pub struct Node { +pub struct Node { runtime: Arc>>, stop_sender: tokio::sync::watch::Sender<()>, config: Arc, @@ -184,21 +182,21 @@ pub struct Node { tx_sync: Arc>>, tx_broadcaster: Arc, fee_estimator: Arc, - event_queue: Arc>>, - channel_manager: Arc>, - chain_monitor: Arc>, - output_sweeper: Arc>, - peer_manager: Arc>, + event_queue: Arc>>, + channel_manager: Arc, + chain_monitor: Arc, + output_sweeper: Arc, + peer_manager: Arc, keys_manager: Arc, network_graph: Arc, gossip_source: Arc, - liquidity_source: Option>>>, - kv_store: Arc, + liquidity_source: Option>>>, + kv_store: Arc, logger: Arc, _router: Arc, scorer: Arc>, - peer_store: Arc>>, - payment_store: Arc>>, + peer_store: Arc>>, + payment_store: Arc>>, is_listening: Arc, latest_wallet_sync_timestamp: Arc>>, latest_onchain_wallet_sync_timestamp: Arc>>, @@ -207,7 +205,7 @@ pub struct Node { latest_node_announcement_broadcast_timestamp: Arc>>, } -impl Node { +impl Node { /// Starts the necessary background tasks, such as handling events coming from user input, /// LDK/BDK, and the peer-to-peer network. /// @@ -1809,7 +1807,7 @@ impl Node { } } -impl Drop for Node { +impl Drop for Node { fn drop(&mut self) { let _ = self.stop(); } @@ -1852,8 +1850,8 @@ pub struct NodeStatus { pub latest_node_announcement_broadcast_timestamp: Option, } -async fn connect_peer_if_necessary( - node_id: PublicKey, addr: SocketAddress, peer_manager: Arc>, +async fn connect_peer_if_necessary( + node_id: PublicKey, addr: SocketAddress, peer_manager: Arc, logger: Arc, ) -> Result<(), Error> { if peer_manager.peer_by_node_id(&node_id).is_some() { @@ -1863,8 +1861,8 @@ async fn connect_peer_if_necessary( do_connect_peer(node_id, addr, peer_manager, logger).await } -async fn do_connect_peer( - node_id: PublicKey, addr: SocketAddress, peer_manager: Arc>, +async fn do_connect_peer( + node_id: PublicKey, addr: SocketAddress, peer_manager: Arc, logger: Arc, ) -> Result<(), Error> { log_info!(logger, "Connecting to peer: {}@{}", node_id, addr); diff --git a/src/liquidity.rs b/src/liquidity.rs index 0404fe64e..00e9f5717 100644 --- a/src/liquidity.rs +++ b/src/liquidity.rs @@ -5,7 +5,6 @@ use crate::{Config, Error}; use lightning::ln::channelmanager::MIN_FINAL_CLTV_EXPIRY_DELTA; use lightning::ln::msgs::SocketAddress; use lightning::routing::router::{RouteHint, RouteHintHop}; -use lightning::util::persist::KVStore; use lightning_invoice::{Bolt11Invoice, InvoiceBuilder, RoutingFees}; use lightning_liquidity::events::Event; use lightning_liquidity::lsps0::ser::RequestId; @@ -33,26 +32,26 @@ struct LSPS2Service { pending_buy_requests: Mutex>>, } -pub(crate) struct LiquiditySource +pub(crate) struct LiquiditySource where L::Target: Logger, { lsps2_service: Option, - channel_manager: Arc>, + channel_manager: Arc, keys_manager: Arc, - liquidity_manager: Arc>, + liquidity_manager: Arc, config: Arc, logger: L, } -impl LiquiditySource +impl LiquiditySource where L::Target: Logger, { pub(crate) fn new_lsps2( address: SocketAddress, node_id: PublicKey, token: Option, - channel_manager: Arc>, keys_manager: Arc, - liquidity_manager: Arc>, config: Arc, logger: L, + channel_manager: Arc, keys_manager: Arc, + liquidity_manager: Arc, config: Arc, logger: L, ) -> Self { let pending_fee_requests = Mutex::new(HashMap::new()); let pending_buy_requests = Mutex::new(HashMap::new()); @@ -66,12 +65,12 @@ where Self { lsps2_service, channel_manager, keys_manager, liquidity_manager, config, logger } } - pub(crate) fn set_peer_manager(&self, peer_manager: Arc>) { + pub(crate) fn set_peer_manager(&self, peer_manager: Arc) { let process_msgs_callback = move || peer_manager.process_events(); self.liquidity_manager.set_process_msgs_callback(process_msgs_callback); } - pub(crate) fn liquidity_manager(&self) -> &LiquidityManager { + pub(crate) fn liquidity_manager(&self) -> &LiquidityManager { self.liquidity_manager.as_ref() } diff --git a/src/message_handler.rs b/src/message_handler.rs index 852f63cec..89d67d846 100644 --- a/src/message_handler.rs +++ b/src/message_handler.rs @@ -4,7 +4,6 @@ use lightning::ln::features::{InitFeatures, NodeFeatures}; use lightning::ln::peer_handler::CustomMessageHandler; use lightning::ln::wire::CustomMessageReader; use lightning::util::logger::Logger; -use lightning::util::persist::KVStore; use lightning_liquidity::lsps0::ser::RawLSPSMessage; @@ -13,19 +12,19 @@ use bitcoin::secp256k1::PublicKey; use std::ops::Deref; use std::sync::Arc; -pub(crate) enum NodeCustomMessageHandler +pub(crate) enum NodeCustomMessageHandler where L::Target: Logger, { Ignoring, - Liquidity { liquidity_source: Arc> }, + Liquidity { liquidity_source: Arc> }, } -impl NodeCustomMessageHandler +impl NodeCustomMessageHandler where L::Target: Logger, { - pub(crate) fn new_liquidity(liquidity_source: Arc>) -> Self { + pub(crate) fn new_liquidity(liquidity_source: Arc>) -> Self { Self::Liquidity { liquidity_source } } @@ -34,8 +33,7 @@ where } } -impl CustomMessageReader - for NodeCustomMessageHandler +impl CustomMessageReader for NodeCustomMessageHandler where L::Target: Logger, { @@ -53,8 +51,7 @@ where } } -impl CustomMessageHandler - for NodeCustomMessageHandler +impl CustomMessageHandler for NodeCustomMessageHandler where L::Target: Logger, { diff --git a/src/payment_store.rs b/src/payment_store.rs index 704966878..12e331ad4 100644 --- a/src/payment_store.rs +++ b/src/payment_store.rs @@ -3,10 +3,10 @@ use crate::io::{ PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, }; use crate::logger::{log_error, Logger}; +use crate::types::DynStore; use crate::Error; use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret}; -use lightning::util::persist::KVStore; use lightning::util::ser::Writeable; use lightning::{impl_writeable_tlv_based, impl_writeable_tlv_based_enum}; @@ -127,20 +127,20 @@ impl PaymentDetailsUpdate { } } -pub(crate) struct PaymentStore +pub(crate) struct PaymentStore where L::Target: Logger, { payments: Mutex>, - kv_store: Arc, + kv_store: Arc, logger: L, } -impl PaymentStore +impl PaymentStore where L::Target: Logger, { - pub(crate) fn new(payments: Vec, kv_store: Arc, logger: L) -> Self { + pub(crate) fn new(payments: Vec, kv_store: Arc, logger: L) -> Self { let payments = Mutex::new(HashMap::from_iter( payments.into_iter().map(|payment| (payment.hash, payment)), )); @@ -260,7 +260,7 @@ mod tests { #[test] fn payment_info_is_persisted() { - let store = Arc::new(TestStore::new(false)); + let store: Arc = Arc::new(TestStore::new(false)); let logger = Arc::new(TestLogger::new()); let payment_store = PaymentStore::new(Vec::new(), Arc::clone(&store), logger); diff --git a/src/peer_store.rs b/src/peer_store.rs index 46ba1dbe2..21bd50872 100644 --- a/src/peer_store.rs +++ b/src/peer_store.rs @@ -3,10 +3,10 @@ use crate::io::{ PEER_INFO_PERSISTENCE_SECONDARY_NAMESPACE, }; use crate::logger::{log_error, Logger}; +use crate::types::DynStore; use crate::{Error, SocketAddress}; use lightning::impl_writeable_tlv_based; -use lightning::util::persist::KVStore; use lightning::util::ser::{Readable, ReadableArgs, Writeable, Writer}; use bitcoin::secp256k1::PublicKey; @@ -15,20 +15,20 @@ use std::collections::HashMap; use std::ops::Deref; use std::sync::{Arc, RwLock}; -pub struct PeerStore +pub struct PeerStore where L::Target: Logger, { peers: RwLock>, - kv_store: Arc, + kv_store: Arc, logger: L, } -impl PeerStore +impl PeerStore where L::Target: Logger, { - pub(crate) fn new(kv_store: Arc, logger: L) -> Self { + pub(crate) fn new(kv_store: Arc, logger: L) -> Self { let peers = RwLock::new(HashMap::new()); Self { peers, kv_store, logger } } @@ -83,13 +83,13 @@ where } } -impl ReadableArgs<(Arc, L)> for PeerStore +impl ReadableArgs<(Arc, L)> for PeerStore where L::Target: Logger, { #[inline] fn read( - reader: &mut R, args: (Arc, L), + reader: &mut R, args: (Arc, L), ) -> Result { let (kv_store, logger) = args; let read_peers: PeerStoreDeserWrapper = Readable::read(reader)?; @@ -150,7 +150,7 @@ mod tests { #[test] fn peer_info_persistence() { - let store = Arc::new(TestStore::new(false)); + let store: Arc = Arc::new(TestStore::new(false)); let logger = Arc::new(TestLogger::new()); let peer_store = PeerStore::new(Arc::clone(&store), Arc::clone(&logger)); diff --git a/src/types.rs b/src/types.rs index afed1320a..bf467dc49 100644 --- a/src/types.rs +++ b/src/types.rs @@ -13,6 +13,7 @@ use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringFeePa use lightning::sign::InMemorySigner; use lightning::util::config::ChannelConfig as LdkChannelConfig; use lightning::util::config::MaxDustHTLCExposure as LdkMaxDustHTLCExposure; +use lightning::util::persist::KVStore; use lightning::util::ser::{Readable, Writeable, Writer}; use lightning::util::sweep::OutputSweeper; use lightning_net_tokio::SocketDescriptor; @@ -23,35 +24,34 @@ use bitcoin::OutPoint; use std::sync::{Arc, Mutex, RwLock}; -pub(crate) type ChainMonitor = chainmonitor::ChainMonitor< +pub(crate) type DynStore = dyn KVStore + Sync + Send; + +pub(crate) type ChainMonitor = chainmonitor::ChainMonitor< InMemorySigner, Arc, Arc, Arc, Arc, - Arc, + Arc, >; -pub(crate) type PeerManager = lightning::ln::peer_handler::PeerManager< +pub(crate) type PeerManager = lightning::ln::peer_handler::PeerManager< SocketDescriptor, - Arc>, + Arc, Arc, - Arc>, + Arc, Arc, - Arc>>, + Arc>>, Arc, >; pub(crate) type ChainSource = EsploraSyncClient>; -pub(crate) type LiquidityManager = lightning_liquidity::LiquidityManager< - Arc, - Arc>, - Arc, ->; +pub(crate) type LiquidityManager = + lightning_liquidity::LiquidityManager, Arc, Arc>; -pub(crate) type ChannelManager = lightning::ln::channelmanager::ChannelManager< - Arc>, +pub(crate) type ChannelManager = lightning::ln::channelmanager::ChannelManager< + Arc, Arc, Arc, Arc, @@ -109,11 +109,11 @@ pub(crate) type GossipSync = lightning_background_processor::GossipSync< Arc, >; -pub(crate) type OnionMessenger = lightning::onion_message::messenger::OnionMessenger< +pub(crate) type OnionMessenger = lightning::onion_message::messenger::OnionMessenger< Arc, Arc, Arc, - Arc>, + Arc, Arc, IgnoringMessageHandler, IgnoringMessageHandler, @@ -125,12 +125,12 @@ pub(crate) type MessageRouter = lightning::onion_message::messenger::DefaultMess Arc, >; -pub(crate) type Sweeper = OutputSweeper< +pub(crate) type Sweeper = OutputSweeper< Arc, Arc, Arc, Arc, - Arc, + Arc, Arc, Arc, >; diff --git a/src/uniffi_types.rs b/src/uniffi_types.rs index 7c6343b84..3aef1010d 100644 --- a/src/uniffi_types.rs +++ b/src/uniffi_types.rs @@ -11,7 +11,6 @@ use crate::UniffiCustomTypeConverter; use crate::error::Error; use crate::hex_utils; -use crate::io::sqlite_store::SqliteStore; use crate::{Node, SocketAddress, UserChannelId}; use bitcoin::hashes::sha256::Hash as Sha256; @@ -26,7 +25,7 @@ use std::str::FromStr; /// This type alias is required as Uniffi doesn't support generics, i.e., we can only expose the /// concretized types via this aliasing hack. -pub type LDKNode = Node; +pub type LDKNode = Node; impl UniffiCustomTypeConverter for PublicKey { type Builtin = String; diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 3be36869d..b72ecdc75 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -182,9 +182,9 @@ pub(crate) fn random_config() -> Config { } #[cfg(feature = "uniffi")] -type TestNode = Arc>; +type TestNode = Arc; #[cfg(not(feature = "uniffi"))] -type TestNode = Node; +type TestNode = Node; macro_rules! setup_builder { ($builder: ident, $config: expr) => { @@ -197,9 +197,7 @@ macro_rules! setup_builder { pub(crate) use setup_builder; -pub(crate) fn setup_two_nodes( - electrsd: &ElectrsD, allow_0conf: bool, -) -> (TestNode, TestNode) { +pub(crate) fn setup_two_nodes(electrsd: &ElectrsD, allow_0conf: bool) -> (TestNode, TestNode) { println!("== Node A =="); let config_a = random_config(); let node_a = setup_node(electrsd, config_a); @@ -213,7 +211,7 @@ pub(crate) fn setup_two_nodes( (node_a, node_b) } -pub(crate) fn setup_node(electrsd: &ElectrsD, config: Config) -> TestNode { +pub(crate) fn setup_node(electrsd: &ElectrsD, config: Config) -> TestNode { let esplora_url = format!("http://{}", electrsd.esplora_url.as_ref().unwrap()); setup_builder!(builder, config); builder.set_esplora_server(esplora_url.clone()); @@ -332,8 +330,8 @@ pub(crate) fn premine_and_distribute_funds( generate_blocks_and_wait(bitcoind, electrs, 1); } -pub fn open_channel( - node_a: &TestNode, node_b: &TestNode, funding_amount_sat: u64, announce: bool, +pub fn open_channel( + node_a: &TestNode, node_b: &TestNode, funding_amount_sat: u64, announce: bool, electrsd: &ElectrsD, ) { node_a @@ -354,9 +352,8 @@ pub fn open_channel( wait_for_tx(&electrsd.client, funding_txo_a.txid); } -pub(crate) fn do_channel_full_cycle( - node_a: TestNode, node_b: TestNode, bitcoind: &BitcoindClient, electrsd: &E, - allow_0conf: bool, +pub(crate) fn do_channel_full_cycle( + node_a: TestNode, node_b: TestNode, bitcoind: &BitcoindClient, electrsd: &E, allow_0conf: bool, ) { let addr_a = node_a.new_onchain_address().unwrap(); let addr_b = node_b.new_onchain_address().unwrap(); diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index 0f1689ecd..71867f8f2 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -9,6 +9,8 @@ use common::{ use ldk_node::{Builder, Event, NodeError}; +use lightning::util::persist::KVStore; + use bitcoin::{Amount, Network}; use std::sync::Arc; @@ -157,7 +159,8 @@ fn start_stop_reinit() { let esplora_url = format!("http://{}", electrsd.esplora_url.as_ref().unwrap()); - let test_sync_store = Arc::new(TestSyncStore::new(config.storage_dir_path.clone().into())); + let test_sync_store: Arc = + Arc::new(TestSyncStore::new(config.storage_dir_path.clone().into())); setup_builder!(builder, config); builder.set_esplora_server(esplora_url.clone()); From ba28a8ddb2e151b38ff52d23317bf7e27ac35774 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 7 Feb 2024 13:36:49 +0100 Subject: [PATCH 17/89] Drop unnecessary `LDKNode` type alias completely ... now that we don't have any generic to hide, we can just use the `Node` type directly. --- bindings/ldk_node.udl | 4 ++-- src/uniffi_types.rs | 6 +----- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 02dbc2fb2..619a29667 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -34,10 +34,10 @@ interface Builder { [Throws=BuildError] void set_listening_addresses(sequence listening_addresses); [Throws=BuildError] - LDKNode build(); + Node build(); }; -interface LDKNode { +interface Node { [Throws=NodeError] void start(); [Throws=NodeError] diff --git a/src/uniffi_types.rs b/src/uniffi_types.rs index 3aef1010d..0cef5d682 100644 --- a/src/uniffi_types.rs +++ b/src/uniffi_types.rs @@ -11,7 +11,7 @@ use crate::UniffiCustomTypeConverter; use crate::error::Error; use crate::hex_utils; -use crate::{Node, SocketAddress, UserChannelId}; +use crate::{SocketAddress, UserChannelId}; use bitcoin::hashes::sha256::Hash as Sha256; use bitcoin::hashes::Hash; @@ -23,10 +23,6 @@ use lightning_invoice::{Bolt11Invoice, SignedRawBolt11Invoice}; use std::convert::TryInto; use std::str::FromStr; -/// This type alias is required as Uniffi doesn't support generics, i.e., we can only expose the -/// concretized types via this aliasing hack. -pub type LDKNode = Node; - impl UniffiCustomTypeConverter for PublicKey { type Builtin = String; From c566fd51469bf4096f2181243d2ce7663824971c Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 7 Feb 2024 14:10:02 +0100 Subject: [PATCH 18/89] Expose `build_with_fs_store` in bindings .. now that we can. --- bindings/ldk_node.udl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 619a29667..c39d841cb 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -35,6 +35,8 @@ interface Builder { void set_listening_addresses(sequence listening_addresses); [Throws=BuildError] Node build(); + [Throws=BuildError] + Node build_with_fs_store(); }; interface Node { From e3dc93a0e1bd6836de77a342d97759b29ba54cd0 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 21 Feb 2024 10:53:24 +0100 Subject: [PATCH 19/89] Move connection logic to `connection.rs` .. just a minor cleanup to further modularize the codebase. Also, we'll be reusing these methods in `Event::ConnectionNeeded` soon. --- src/connection.rs | 64 +++++++++++++++++++++++++++++++++++++++++++++++ src/lib.rs | 55 ++-------------------------------------- 2 files changed, 66 insertions(+), 53 deletions(-) create mode 100644 src/connection.rs diff --git a/src/connection.rs b/src/connection.rs new file mode 100644 index 000000000..75273d337 --- /dev/null +++ b/src/connection.rs @@ -0,0 +1,64 @@ +use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; +use crate::types::PeerManager; +use crate::Error; + +use lightning::ln::msgs::SocketAddress; + +use bitcoin::secp256k1::PublicKey; + +use std::net::ToSocketAddrs; +use std::sync::Arc; +use std::time::Duration; + +pub(crate) async fn connect_peer_if_necessary( + node_id: PublicKey, addr: SocketAddress, peer_manager: Arc, + logger: Arc, +) -> Result<(), Error> { + if peer_manager.peer_by_node_id(&node_id).is_some() { + return Ok(()); + } + + do_connect_peer(node_id, addr, peer_manager, logger).await +} + +pub(crate) async fn do_connect_peer( + node_id: PublicKey, addr: SocketAddress, peer_manager: Arc, + logger: Arc, +) -> Result<(), Error> { + log_info!(logger, "Connecting to peer: {}@{}", node_id, addr); + + let socket_addr = addr + .to_socket_addrs() + .map_err(|e| { + log_error!(logger, "Failed to resolve network address: {}", e); + Error::InvalidSocketAddress + })? + .next() + .ok_or(Error::ConnectionFailed)?; + + match lightning_net_tokio::connect_outbound(Arc::clone(&peer_manager), node_id, socket_addr) + .await + { + Some(connection_closed_future) => { + let mut connection_closed_future = Box::pin(connection_closed_future); + loop { + match futures::poll!(&mut connection_closed_future) { + std::task::Poll::Ready(_) => { + log_info!(logger, "Peer connection closed: {}@{}", node_id, addr); + return Err(Error::ConnectionFailed); + }, + std::task::Poll::Pending => {}, + } + // Avoid blocking the tokio context by sleeping a bit + match peer_manager.peer_by_node_id(&node_id) { + Some(_) => return Ok(()), + None => tokio::time::sleep(Duration::from_millis(10)).await, + } + } + }, + None => { + log_error!(logger, "Failed to connect to peer: {}@{}", node_id, addr); + Err(Error::ConnectionFailed) + }, + } +} diff --git a/src/lib.rs b/src/lib.rs index 3f240e980..0bfe03ce5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -78,6 +78,7 @@ mod balance; mod builder; mod config; +mod connection; mod error; mod event; mod fee_estimator; @@ -124,6 +125,7 @@ use config::{ LDK_PAYMENT_RETRY_TIMEOUT, NODE_ANN_BCAST_INTERVAL, PEER_RECONNECTION_INTERVAL, RGS_SYNC_INTERVAL, WALLET_SYNC_INTERVAL_MINIMUM_SECS, }; +use connection::{connect_peer_if_necessary, do_connect_peer}; use event::{EventHandler, EventQueue}; use gossip::GossipSource; use liquidity::LiquiditySource; @@ -1849,56 +1851,3 @@ pub struct NodeStatus { /// Will be `None` if we have no public channels or we haven't broadcasted since the [`Node`] was initialized. pub latest_node_announcement_broadcast_timestamp: Option, } - -async fn connect_peer_if_necessary( - node_id: PublicKey, addr: SocketAddress, peer_manager: Arc, - logger: Arc, -) -> Result<(), Error> { - if peer_manager.peer_by_node_id(&node_id).is_some() { - return Ok(()); - } - - do_connect_peer(node_id, addr, peer_manager, logger).await -} - -async fn do_connect_peer( - node_id: PublicKey, addr: SocketAddress, peer_manager: Arc, - logger: Arc, -) -> Result<(), Error> { - log_info!(logger, "Connecting to peer: {}@{}", node_id, addr); - - let socket_addr = addr - .to_socket_addrs() - .map_err(|e| { - log_error!(logger, "Failed to resolve network address: {}", e); - Error::InvalidSocketAddress - })? - .next() - .ok_or(Error::ConnectionFailed)?; - - match lightning_net_tokio::connect_outbound(Arc::clone(&peer_manager), node_id, socket_addr) - .await - { - Some(connection_closed_future) => { - let mut connection_closed_future = Box::pin(connection_closed_future); - loop { - match futures::poll!(&mut connection_closed_future) { - std::task::Poll::Ready(_) => { - log_info!(logger, "Peer connection closed: {}@{}", node_id, addr); - return Err(Error::ConnectionFailed); - }, - std::task::Poll::Pending => {}, - } - // Avoid blocking the tokio context by sleeping a bit - match peer_manager.peer_by_node_id(&node_id) { - Some(_) => return Ok(()), - None => tokio::time::sleep(Duration::from_millis(10)).await, - } - } - }, - None => { - log_error!(logger, "Failed to connect to peer: {}@{}", node_id, addr); - Err(Error::ConnectionFailed) - }, - } -} From 872579fcc1880f4299d3d27413db59c1e80f978a Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 21 Feb 2024 13:17:23 +0100 Subject: [PATCH 20/89] Require `Clone + Sync + Send + 'static` for event and connection loggers .. we should consider dropping `Deref` and instead just commiting to store a `Arc` everwhere, as it gets tedious to maintain. However, this is barely scraping by and the least invasive change here. --- src/connection.rs | 23 ++++++++++++++--------- src/event.rs | 4 ++-- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/src/connection.rs b/src/connection.rs index 75273d337..e281e61c2 100644 --- a/src/connection.rs +++ b/src/connection.rs @@ -1,4 +1,4 @@ -use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; +use crate::logger::{log_error, log_info, Logger}; use crate::types::PeerManager; use crate::Error; @@ -7,13 +7,16 @@ use lightning::ln::msgs::SocketAddress; use bitcoin::secp256k1::PublicKey; use std::net::ToSocketAddrs; +use std::ops::Deref; use std::sync::Arc; use std::time::Duration; -pub(crate) async fn connect_peer_if_necessary( - node_id: PublicKey, addr: SocketAddress, peer_manager: Arc, - logger: Arc, -) -> Result<(), Error> { +pub(crate) async fn connect_peer_if_necessary( + node_id: PublicKey, addr: SocketAddress, peer_manager: Arc, logger: L, +) -> Result<(), Error> +where + L::Target: Logger, +{ if peer_manager.peer_by_node_id(&node_id).is_some() { return Ok(()); } @@ -21,10 +24,12 @@ pub(crate) async fn connect_peer_if_necessary( do_connect_peer(node_id, addr, peer_manager, logger).await } -pub(crate) async fn do_connect_peer( - node_id: PublicKey, addr: SocketAddress, peer_manager: Arc, - logger: Arc, -) -> Result<(), Error> { +pub(crate) async fn do_connect_peer( + node_id: PublicKey, addr: SocketAddress, peer_manager: Arc, logger: L, +) -> Result<(), Error> +where + L::Target: Logger, +{ log_info!(logger, "Connecting to peer: {}@{}", node_id, addr); let socket_addr = addr diff --git a/src/event.rs b/src/event.rs index 61dc748d4..cd11e41a8 100644 --- a/src/event.rs +++ b/src/event.rs @@ -291,7 +291,7 @@ impl Future for EventFuture { } } -pub(crate) struct EventHandler +pub(crate) struct EventHandler where L::Target: Logger, { @@ -307,7 +307,7 @@ where config: Arc, } -impl EventHandler +impl EventHandler where L::Target: Logger, { From 9c8be40a2f272bd12f909c1ab3241b60499e856d Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 4 Mar 2024 11:30:30 +0100 Subject: [PATCH 21/89] Use `tokio::select` on `connection_closed_future` .. which is a bit more readable and in-line what we do other places, plus it allows us to drop the `futures` dependency. --- Cargo.toml | 1 - src/connection.rs | 12 ++++++------ 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 5b7eca8e3..e88826aee 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -66,7 +66,6 @@ bip39 = "2.0.0" rand = "0.8.5" chrono = { version = "0.4", default-features = false, features = ["clock"] } -futures = "0.3" tokio = { version = "1", default-features = false, features = [ "rt-multi-thread", "time", "sync" ] } esplora-client = { version = "0.6", default-features = false } libc = "0.2" diff --git a/src/connection.rs b/src/connection.rs index e281e61c2..7a93c1d8d 100644 --- a/src/connection.rs +++ b/src/connection.rs @@ -47,17 +47,17 @@ where Some(connection_closed_future) => { let mut connection_closed_future = Box::pin(connection_closed_future); loop { - match futures::poll!(&mut connection_closed_future) { - std::task::Poll::Ready(_) => { + tokio::select! { + _ = &mut connection_closed_future => { log_info!(logger, "Peer connection closed: {}@{}", node_id, addr); return Err(Error::ConnectionFailed); }, - std::task::Poll::Pending => {}, - } - // Avoid blocking the tokio context by sleeping a bit + _ = tokio::time::sleep(Duration::from_millis(10)) => {}, + }; + match peer_manager.peer_by_node_id(&node_id) { Some(_) => return Ok(()), - None => tokio::time::sleep(Duration::from_millis(10)).await, + None => continue, } } }, From 2d9fe95f94c7d04ae4282983d2c085c9d17eb889 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 4 Mar 2024 14:32:23 +0100 Subject: [PATCH 22/89] Add test for concurrent connection handling ... we check that we can successfully issue concurrent connection attempts, which all succeed. --- tests/integration_tests_rust.rs | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index 71867f8f2..f0e222fd3 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -333,3 +333,33 @@ fn do_connection_restart_behavior(persist: bool) { assert!(node_b.list_peers().is_empty()); } } + +#[test] +fn concurrent_connections_succeed() { + let (_bitcoind, electrsd) = setup_bitcoind_and_electrsd(); + let (node_a, node_b) = setup_two_nodes(&electrsd, false); + + let node_a = Arc::new(node_a); + let node_b = Arc::new(node_b); + + let node_id_b = node_b.node_id(); + let node_addr_b = node_b.listening_addresses().unwrap().first().unwrap().clone(); + + while !node_b.status().is_listening { + std::thread::sleep(std::time::Duration::from_millis(10)); + } + + let mut handles = Vec::new(); + for _ in 0..10 { + let thread_node = Arc::clone(&node_a); + let thread_addr = node_addr_b.clone(); + let handle = std::thread::spawn(move || { + thread_node.connect(node_id_b, thread_addr, false).unwrap(); + }); + handles.push(handle); + } + + for h in handles { + h.join().unwrap(); + } +} From 77c538b63e8c1a558468018fdab69facfa3f75b8 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 4 Mar 2024 14:00:04 +0100 Subject: [PATCH 23/89] Add `ConnectionManager` handling pending connections Previously, concurrent calls to `do_connect_peer`/`connect_peer_if_necessary` could result in multiple connections being opened, just to be closed as redundant shortly after. Parallel connection attempt were therefore prone to fail. Here, we introduce a `ConnectionManager` that implements a pub/sub pattern: upon the initial call, the task responsible for polling the connection future to completion registers that a connection is in-flight. Any calls following will check this and register a `oneshot` channel to be notified of the `Result`. --- src/builder.rs | 5 ++ src/connection.rs | 168 +++++++++++++++++++++++++++++++++------------- src/error.rs | 2 +- src/lib.rs | 23 +++---- 4 files changed, 139 insertions(+), 59 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index 5edbd55ab..6b4da6b57 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -2,6 +2,7 @@ use crate::config::{ Config, BDK_CLIENT_CONCURRENCY, BDK_CLIENT_STOP_GAP, DEFAULT_ESPLORA_SERVER_URL, WALLET_KEYS_SEED_LEN, }; +use crate::connection::ConnectionManager; use crate::event::EventQueue; use crate::fee_estimator::OnchainFeeEstimator; use crate::gossip::GossipSource; @@ -895,6 +896,9 @@ fn build_with_store_internal( liquidity_source.as_ref().map(|l| l.set_peer_manager(Arc::clone(&peer_manager))); + let connection_manager = + Arc::new(ConnectionManager::new(Arc::clone(&peer_manager), Arc::clone(&logger))); + let output_sweeper = match io::utils::read_output_sweeper( Arc::clone(&tx_broadcaster), Arc::clone(&fee_estimator), @@ -991,6 +995,7 @@ fn build_with_store_internal( chain_monitor, output_sweeper, peer_manager, + connection_manager, keys_manager, network_graph, gossip_source, diff --git a/src/connection.rs b/src/connection.rs index 7a93c1d8d..9d956d6be 100644 --- a/src/connection.rs +++ b/src/connection.rs @@ -6,64 +6,142 @@ use lightning::ln::msgs::SocketAddress; use bitcoin::secp256k1::PublicKey; +use std::collections::hash_map::{self, HashMap}; use std::net::ToSocketAddrs; use std::ops::Deref; -use std::sync::Arc; +use std::sync::{Arc, Mutex}; use std::time::Duration; -pub(crate) async fn connect_peer_if_necessary( - node_id: PublicKey, addr: SocketAddress, peer_manager: Arc, logger: L, -) -> Result<(), Error> +pub(crate) struct ConnectionManager where L::Target: Logger, { - if peer_manager.peer_by_node_id(&node_id).is_some() { - return Ok(()); - } - - do_connect_peer(node_id, addr, peer_manager, logger).await + pending_connections: + Mutex>>>>, + peer_manager: Arc, + logger: L, } -pub(crate) async fn do_connect_peer( - node_id: PublicKey, addr: SocketAddress, peer_manager: Arc, logger: L, -) -> Result<(), Error> +impl ConnectionManager where L::Target: Logger, { - log_info!(logger, "Connecting to peer: {}@{}", node_id, addr); - - let socket_addr = addr - .to_socket_addrs() - .map_err(|e| { - log_error!(logger, "Failed to resolve network address: {}", e); - Error::InvalidSocketAddress - })? - .next() - .ok_or(Error::ConnectionFailed)?; - - match lightning_net_tokio::connect_outbound(Arc::clone(&peer_manager), node_id, socket_addr) - .await - { - Some(connection_closed_future) => { - let mut connection_closed_future = Box::pin(connection_closed_future); - loop { - tokio::select! { - _ = &mut connection_closed_future => { - log_info!(logger, "Peer connection closed: {}@{}", node_id, addr); - return Err(Error::ConnectionFailed); - }, - _ = tokio::time::sleep(Duration::from_millis(10)) => {}, - }; - - match peer_manager.peer_by_node_id(&node_id) { - Some(_) => return Ok(()), - None => continue, + pub(crate) fn new(peer_manager: Arc, logger: L) -> Self { + let pending_connections = Mutex::new(HashMap::new()); + Self { pending_connections, peer_manager, logger } + } + + pub(crate) async fn connect_peer_if_necessary( + &self, node_id: PublicKey, addr: SocketAddress, + ) -> Result<(), Error> { + if self.peer_manager.peer_by_node_id(&node_id).is_some() { + return Ok(()); + } + + self.do_connect_peer(node_id, addr).await + } + + pub(crate) async fn do_connect_peer( + &self, node_id: PublicKey, addr: SocketAddress, + ) -> Result<(), Error> { + // First, we check if there is already an outbound connection in flight, if so, we just + // await on the corresponding watch channel. The task driving the connection future will + // send us the result.. + let pending_ready_receiver_opt = self.register_or_subscribe_pending_connection(&node_id); + if let Some(pending_connection_ready_receiver) = pending_ready_receiver_opt { + return pending_connection_ready_receiver.await.map_err(|e| { + debug_assert!(false, "Failed to receive connection result: {:?}", e); + log_error!(self.logger, "Failed to receive connection result: {:?}", e); + Error::ConnectionFailed + })?; + } + + log_info!(self.logger, "Connecting to peer: {}@{}", node_id, addr); + + let socket_addr = addr + .to_socket_addrs() + .map_err(|e| { + log_error!(self.logger, "Failed to resolve network address {}: {}", addr, e); + self.propagate_result_to_subscribers(&node_id, Err(Error::InvalidSocketAddress)); + Error::InvalidSocketAddress + })? + .next() + .ok_or_else(|| { + log_error!(self.logger, "Failed to resolve network address {}", addr); + self.propagate_result_to_subscribers(&node_id, Err(Error::InvalidSocketAddress)); + Error::InvalidSocketAddress + })?; + + let connection_future = lightning_net_tokio::connect_outbound( + Arc::clone(&self.peer_manager), + node_id, + socket_addr, + ); + + let res = match connection_future.await { + Some(connection_closed_future) => { + let mut connection_closed_future = Box::pin(connection_closed_future); + loop { + tokio::select! { + _ = &mut connection_closed_future => { + log_info!(self.logger, "Peer connection closed: {}@{}", node_id, addr); + break Err(Error::ConnectionFailed); + }, + _ = tokio::time::sleep(Duration::from_millis(10)) => {}, + }; + + match self.peer_manager.peer_by_node_id(&node_id) { + Some(_) => break Ok(()), + None => continue, + } } + }, + None => { + log_error!(self.logger, "Failed to connect to peer: {}@{}", node_id, addr); + Err(Error::ConnectionFailed) + }, + }; + + self.propagate_result_to_subscribers(&node_id, res); + + res + } + + fn register_or_subscribe_pending_connection( + &self, node_id: &PublicKey, + ) -> Option>> { + let mut pending_connections_lock = self.pending_connections.lock().unwrap(); + match pending_connections_lock.entry(*node_id) { + hash_map::Entry::Occupied(mut entry) => { + let (tx, rx) = tokio::sync::oneshot::channel(); + entry.get_mut().push(tx); + Some(rx) + }, + hash_map::Entry::Vacant(entry) => { + entry.insert(Vec::new()); + None + }, + } + } + + fn propagate_result_to_subscribers(&self, node_id: &PublicKey, res: Result<(), Error>) { + // Send the result to any other tasks that might be waiting on it by now. + let mut pending_connections_lock = self.pending_connections.lock().unwrap(); + if let Some(connection_ready_senders) = pending_connections_lock.remove(node_id) { + for sender in connection_ready_senders { + let _ = sender.send(res).map_err(|e| { + debug_assert!( + false, + "Failed to send connection result to subscribers: {:?}", + e + ); + log_error!( + self.logger, + "Failed to send connection result to subscribers: {:?}", + e + ); + }); } - }, - None => { - log_error!(logger, "Failed to connect to peer: {}@{}", node_id, addr); - Err(Error::ConnectionFailed) - }, + } } } diff --git a/src/error.rs b/src/error.rs index 0182b3092..c5234a6d4 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,6 +1,6 @@ use std::fmt; -#[derive(Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq)] /// An error that possibly needs to be handled by the user. pub enum Error { /// Returned when trying to start [`crate::Node`] while it is already running. diff --git a/src/lib.rs b/src/lib.rs index 0bfe03ce5..f6082d4d3 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -125,7 +125,7 @@ use config::{ LDK_PAYMENT_RETRY_TIMEOUT, NODE_ANN_BCAST_INTERVAL, PEER_RECONNECTION_INTERVAL, RGS_SYNC_INTERVAL, WALLET_SYNC_INTERVAL_MINIMUM_SECS, }; -use connection::{connect_peer_if_necessary, do_connect_peer}; +use connection::ConnectionManager; use event::{EventHandler, EventQueue}; use gossip::GossipSource; use liquidity::LiquiditySource; @@ -189,6 +189,7 @@ pub struct Node { chain_monitor: Arc, output_sweeper: Arc, peer_manager: Arc, + connection_manager: Arc>>, keys_manager: Arc, network_graph: Arc, gossip_source: Arc, @@ -500,6 +501,7 @@ impl Node { } // Regularly reconnect to persisted peers. + let connect_cm = Arc::clone(&self.connection_manager); let connect_pm = Arc::clone(&self.peer_manager); let connect_logger = Arc::clone(&self.logger); let connect_peer_store = Arc::clone(&self.peer_store); @@ -520,11 +522,9 @@ impl Node { .collect::>(); for peer_info in connect_peer_store.list_peers().iter().filter(|info| !pm_peers.contains(&info.node_id)) { - let res = do_connect_peer( + let res = connect_cm.do_connect_peer( peer_info.node_id, peer_info.address.clone(), - Arc::clone(&connect_pm), - Arc::clone(&connect_logger), ).await; match res { Ok(_) => { @@ -873,14 +873,13 @@ impl Node { let con_node_id = peer_info.node_id; let con_addr = peer_info.address.clone(); - let con_logger = Arc::clone(&self.logger); - let con_pm = Arc::clone(&self.peer_manager); + let con_cm = Arc::clone(&self.connection_manager); // We need to use our main runtime here as a local runtime might not be around to poll // connection futures going forward. tokio::task::block_in_place(move || { runtime.block_on(async move { - connect_peer_if_necessary(con_node_id, con_addr, con_pm, con_logger).await + con_cm.connect_peer_if_necessary(con_node_id, con_addr).await }) })?; @@ -946,14 +945,13 @@ impl Node { let con_node_id = peer_info.node_id; let con_addr = peer_info.address.clone(); - let con_logger = Arc::clone(&self.logger); - let con_pm = Arc::clone(&self.peer_manager); + let con_cm = Arc::clone(&self.connection_manager); // We need to use our main runtime here as a local runtime might not be around to poll // connection futures going forward. tokio::task::block_in_place(move || { runtime.block_on(async move { - connect_peer_if_necessary(con_node_id, con_addr, con_pm, con_logger).await + con_cm.connect_peer_if_necessary(con_node_id, con_addr).await }) })?; @@ -1603,14 +1601,13 @@ impl Node { let con_node_id = peer_info.node_id; let con_addr = peer_info.address.clone(); - let con_logger = Arc::clone(&self.logger); - let con_pm = Arc::clone(&self.peer_manager); + let con_cm = Arc::clone(&self.connection_manager); // We need to use our main runtime here as a local runtime might not be around to poll // connection futures going forward. tokio::task::block_in_place(move || { runtime.block_on(async move { - connect_peer_if_necessary(con_node_id, con_addr, con_pm, con_logger).await + con_cm.connect_peer_if_necessary(con_node_id, con_addr).await }) })?; From be43848d2b3bc8984ee9f8ffb04e0cc1d9b11264 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 13 May 2024 10:39:41 +0200 Subject: [PATCH 24/89] Bump LDK to v0.0.123, lightning-liquidity to v0.1.0-alpha.4 .. to account for the new release --- Cargo.toml | 20 +++++++++----------- src/event.rs | 7 ++++++- src/io/utils.rs | 11 ++++++++++- tests/integration_tests_cln.rs | 4 ++-- 4 files changed, 27 insertions(+), 15 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index e88826aee..4c4422461 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,16 +28,14 @@ panic = 'abort' # Abort on panic default = [] [dependencies] -lightning = { version = "0.0.123-beta", features = ["std"] } -lightning-invoice = { version = "0.31.0-beta" } -lightning-net-tokio = { version = "0.0.123-beta" } -lightning-persister = { version = "0.0.123-beta" } -lightning-background-processor = { version = "0.0.123-beta", features = ["futures"] } -lightning-rapid-gossip-sync = { version = "0.0.123-beta" } -lightning-transaction-sync = { version = "0.0.123-beta", features = ["esplora-async-https", "time"] } -#lightning-liquidity = { version = "0.1.0-alpha.1", features = ["std"] } - -lightning-liquidity = { git = "https://github.com/tnull/lightning-liquidity", rev = "abf7088c0e03221c0f122e797f34802c9e99a3d4", features = ["std"] } +lightning = { version = "0.0.123", features = ["std"] } +lightning-invoice = { version = "0.31.0" } +lightning-net-tokio = { version = "0.0.123" } +lightning-persister = { version = "0.0.123" } +lightning-background-processor = { version = "0.0.123", features = ["futures"] } +lightning-rapid-gossip-sync = { version = "0.0.123" } +lightning-transaction-sync = { version = "0.0.123", features = ["esplora-async-https", "time"] } +lightning-liquidity = { version = "0.1.0-alpha.4", features = ["std"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std"] } #lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main" } @@ -79,7 +77,7 @@ prost = { version = "0.11.6", default-features = false} winapi = { version = "0.3", features = ["winbase"] } [dev-dependencies] -lightning = { version = "0.0.123-beta", features = ["std", "_test_utils"] } +lightning = { version = "0.0.123", features = ["std", "_test_utils"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std", "_test_utils"] } electrum-client = { version = "0.15.1", default-features = true } bitcoincore-rpc = { version = "0.17.0", default-features = false } diff --git a/src/event.rs b/src/event.rs index cd11e41a8..acdf12e4c 100644 --- a/src/event.rs +++ b/src/event.rs @@ -706,7 +706,12 @@ where } }, LdkEvent::SpendableOutputs { outputs, channel_id } => { - self.output_sweeper.track_spendable_outputs(outputs, channel_id, true, None) + self.output_sweeper + .track_spendable_outputs(outputs, channel_id, true, None) + .unwrap_or_else(|_| { + log_error!(self.logger, "Failed to track spendable outputs"); + panic!("Failed to track spendable outputs"); + }); }, LdkEvent::OpenChannelRequest { temporary_channel_id, diff --git a/src/io/utils.rs b/src/io/utils.rs index 3a0429ed2..77cc56f55 100644 --- a/src/io/utils.rs +++ b/src/io/utils.rs @@ -263,7 +263,16 @@ where })?; let descriptors = vec![output.descriptor.clone()]; let spend_delay = Some(best_block.height + 2); - sweeper.track_spendable_outputs(descriptors, output.channel_id, true, spend_delay); + sweeper + .track_spendable_outputs(descriptors, output.channel_id, true, spend_delay) + .map_err(|_| { + log_error!(logger, "Failed to track spendable outputs. Aborting migration, will retry in the future."); + std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Failed to track spendable outputs. Aborting migration, will retry in the future.", + ) + })?; + if let Some(tracked_spendable_output) = sweeper.tracked_spendable_outputs().iter().find(|o| o.descriptor == output.descriptor) { diff --git a/tests/integration_tests_cln.rs b/tests/integration_tests_cln.rs index a5c11ad1b..dd4fefb4f 100644 --- a/tests/integration_tests_cln.rs +++ b/tests/integration_tests_cln.rs @@ -94,7 +94,7 @@ fn test_cln() { let mut rng = thread_rng(); let rand_label: String = (0..7).map(|_| rng.sample(Alphanumeric) as char).collect(); let cln_invoice = - cln_client.invoice(Some(2_500_000), &rand_label, &rand_label, None, None, None).unwrap(); + cln_client.invoice(Some(10_000_000), &rand_label, &rand_label, None, None, None).unwrap(); let parsed_invoice = Bolt11Invoice::from_str(&cln_invoice.bolt11).unwrap(); node.send_payment(&parsed_invoice).unwrap(); @@ -106,7 +106,7 @@ fn test_cln() { // Send a payment to LDK let rand_label: String = (0..7).map(|_| rng.sample(Alphanumeric) as char).collect(); - let ldk_invoice = node.receive_payment(2_500_000, &rand_label, 3600).unwrap(); + let ldk_invoice = node.receive_payment(10_000_000, &rand_label, 3600).unwrap(); cln_client.pay(&ldk_invoice.to_string(), Default::default()).unwrap(); common::expect_event!(node, PaymentReceived); From 4d7c9f3205d9fd31b27a9f4602e86fce996acd6a Mon Sep 17 00:00:00 2001 From: Srikanth Iyengar Date: Tue, 30 Apr 2024 23:02:36 +0530 Subject: [PATCH 25/89] Refactored necessary files to improve code quality * Removed a useless shadowing in builder.rs * Passed directly the closure itself instead of making a inline closure * Refactored usage of map which returns boolean to matches! --- src/builder.rs | 3 +-- src/event.rs | 2 +- src/gossip.rs | 12 +++--------- src/lib.rs | 4 ++-- src/payment_store.rs | 2 +- src/types.rs | 2 +- src/wallet.rs | 5 +---- 7 files changed, 10 insertions(+), 20 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index 6b4da6b57..204088c6a 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -184,7 +184,6 @@ impl NodeBuilder { /// Creates a new builder instance from an [`Config`]. pub fn from_config(config: Config) -> Self { - let config = config; let entropy_source_config = None; let chain_data_source_config = None; let gossip_source_config = None; @@ -1034,7 +1033,7 @@ fn seed_bytes_from_config( match entropy_source_config { Some(EntropySourceConfig::SeedBytes(bytes)) => Ok(bytes.clone()), Some(EntropySourceConfig::SeedFile(seed_path)) => { - Ok(io::utils::read_or_generate_seed_file(&seed_path, Arc::clone(&logger)) + Ok(io::utils::read_or_generate_seed_file(seed_path, Arc::clone(&logger)) .map_err(|_| BuildError::InvalidSeedFile)?) }, Some(EntropySourceConfig::Bip39Mnemonic { mnemonic, passphrase }) => match passphrase { diff --git a/src/event.rs b/src/event.rs index acdf12e4c..6e9ab587c 100644 --- a/src/event.rs +++ b/src/event.rs @@ -176,7 +176,7 @@ where pub(crate) fn next_event(&self) -> Option { let locked_queue = self.queue.lock().unwrap(); - locked_queue.front().map(|e| e.clone()) + locked_queue.front().cloned() } pub(crate) async fn next_event_async(&self) -> Event { diff --git a/src/gossip.rs b/src/gossip.rs index 9fb5e6a84..de98d441e 100644 --- a/src/gossip.rs +++ b/src/gossip.rs @@ -39,19 +39,13 @@ impl GossipSource { } pub fn is_rgs(&self) -> bool { - if let Self::RapidGossipSync { .. } = self { - true - } else { - false - } + matches!(self, Self::RapidGossipSync { .. }) } pub fn as_gossip_sync(&self) -> GossipSync { match self { - Self::RapidGossipSync { gossip_sync, .. } => { - GossipSync::Rapid(Arc::clone(&gossip_sync)) - }, - Self::P2PNetwork { gossip_sync, .. } => GossipSync::P2P(Arc::clone(&gossip_sync)), + Self::RapidGossipSync { gossip_sync, .. } => GossipSync::Rapid(Arc::clone(gossip_sync)), + Self::P2PNetwork { gossip_sync, .. } => GossipSync::P2P(Arc::clone(gossip_sync)), } } diff --git a/src/lib.rs b/src/lib.rs index f6082d4d3..a803730c1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1711,7 +1711,7 @@ impl Node { .output_sweeper .tracked_spendable_outputs() .into_iter() - .map(|o| PendingSweepBalance::from_tracked_spendable_output(o)) + .map(PendingSweepBalance::from_tracked_spendable_output) .collect(); BalanceDetails { @@ -1772,7 +1772,7 @@ impl Node { // Now add all known-but-offline peers, too. for p in self.peer_store.list_peers() { - if peers.iter().take(connected_peers_len).find(|d| d.node_id == p.node_id).is_some() { + if peers.iter().take(connected_peers_len).any(|d| d.node_id == p.node_id) { continue; } diff --git a/src/payment_store.rs b/src/payment_store.rs index 12e331ad4..524984e9e 100644 --- a/src/payment_store.rs +++ b/src/payment_store.rs @@ -265,7 +265,7 @@ mod tests { let payment_store = PaymentStore::new(Vec::new(), Arc::clone(&store), logger); let hash = PaymentHash([42u8; 32]); - assert!(!payment_store.get(&hash).is_some()); + assert!(payment_store.get(&hash).is_none()); let store_key = hex_utils::to_string(&hash.0); assert!(store diff --git a/src/types.rs b/src/types.rs index bf467dc49..68ed36361 100644 --- a/src/types.rs +++ b/src/types.rs @@ -282,7 +282,7 @@ impl From for ChannelDetails { ChannelDetails { channel_id: value.channel_id, counterparty_node_id: value.counterparty.node_id, - funding_txo: value.funding_txo.and_then(|o| Some(o.into_bitcoin_outpoint())), + funding_txo: value.funding_txo.map(|o| o.into_bitcoin_outpoint()), channel_value_sats: value.channel_value_satoshis, unspendable_punishment_reserve: value.unspendable_punishment_reserve, user_channel_id: UserChannelId(value.user_channel_id), diff --git a/src/wallet.rs b/src/wallet.rs index 2b01d1b49..674cb6786 100644 --- a/src/wallet.rs +++ b/src/wallet.rs @@ -92,10 +92,7 @@ where e ); let sync_options = SyncOptions { progress: None }; - wallet_lock - .sync(&self.blockchain, sync_options) - .await - .map_err(|e| From::from(e)) + wallet_lock.sync(&self.blockchain, sync_options).await.map_err(From::from) }, _ => { log_error!(self.logger, "Sync failed due to Esplora error: {}", e); From b784f0bdbb3e8f8aa09b1bd454974956575b0284 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 5 Mar 2024 11:58:42 +0100 Subject: [PATCH 26/89] Move BOLT11 payments API to `Bolt11PaymentHandler` --- README.md | 4 +- .../lightningdevkit/ldknode/LibraryTest.kt | 4 +- bindings/ldk_node.udl | 36 +- bindings/python/src/ldk_node/test_ldk_node.py | 4 +- src/lib.rs | 503 ++--------------- src/payment/bolt11.rs | 517 ++++++++++++++++++ src/payment/mod.rs | 5 + src/uniffi_types.rs | 4 +- tests/common/mod.rs | 41 +- tests/integration_tests_cln.rs | 4 +- tests/integration_tests_rust.rs | 4 +- 11 files changed, 621 insertions(+), 505 deletions(-) create mode 100644 src/payment/bolt11.rs create mode 100644 src/payment/mod.rs diff --git a/README.md b/README.md index 270bf25a7..eccb85c40 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ A ready-to-go Lightning node library built using [LDK][ldk] and [BDK][bdk]. LDK Node is a self-custodial Lightning node in library form. Its central goal is to provide a small, simple, and straightforward interface that enables users to easily set up and run a Lightning node with an integrated on-chain wallet. While minimalism is at its core, LDK Node aims to be sufficiently modular and configurable to be useful for a variety of use cases. ## Getting Started -The primary abstraction of the library is the [`Node`][api_docs_node], which can be retrieved by setting up and configuring a [`Builder`][api_docs_builder] to your liking and calling one of the `build` methods. `Node` can then be controlled via commands such as `start`, `stop`, `connect_open_channel`, `send_payment`, etc. +The primary abstraction of the library is the [`Node`][api_docs_node], which can be retrieved by setting up and configuring a [`Builder`][api_docs_builder] to your liking and calling one of the `build` methods. `Node` can then be controlled via commands such as `start`, `stop`, `connect_open_channel`, `send`, etc. ```rust use ldk_node::Builder; @@ -44,7 +44,7 @@ fn main() { node.event_handled(); let invoice = Bolt11Invoice::from_str("INVOICE_STR").unwrap(); - node.send_payment(&invoice).unwrap(); + node.bolt11_payment().send(&invoice).unwrap(); node.stop().unwrap(); } diff --git a/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt b/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt index 39c19821d..cacd33bd6 100644 --- a/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt +++ b/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt @@ -222,9 +222,9 @@ class LibraryTest { else -> return } - val invoice = node2.receivePayment(2500000u, "asdf", 9217u) + val invoice = node2.bolt11Payment().receive(2500000u, "asdf", 9217u) - node1.sendPayment(invoice) + node1.bolt11Payment().send(invoice) val paymentSuccessfulEvent = node1.waitNextEvent() println("Got event: $paymentSuccessfulEvent") diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index c39d841cb..d896b2518 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -53,6 +53,7 @@ interface Node { void event_handled(); PublicKey node_id(); sequence? listening_addresses(); + Bolt11Payment bolt11_payment(); [Throws=NodeError] Address new_onchain_address(); [Throws=NodeError] @@ -72,25 +73,9 @@ interface Node { [Throws=NodeError] void sync_wallets(); [Throws=NodeError] - PaymentHash send_payment([ByRef]Bolt11Invoice invoice); - [Throws=NodeError] - PaymentHash send_payment_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat); - [Throws=NodeError] PaymentHash send_spontaneous_payment(u64 amount_msat, PublicKey node_id); [Throws=NodeError] - void send_payment_probes([ByRef]Bolt11Invoice invoice); - [Throws=NodeError] void send_spontaneous_payment_probes(u64 amount_msat, PublicKey node_id); - [Throws=NodeError] - void send_payment_probes_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat); - [Throws=NodeError] - Bolt11Invoice receive_payment(u64 amount_msat, [ByRef]string description, u32 expiry_secs); - [Throws=NodeError] - Bolt11Invoice receive_variable_amount_payment([ByRef]string description, u32 expiry_secs); - [Throws=NodeError] - Bolt11Invoice receive_payment_via_jit_channel(u64 amount_msat, [ByRef]string description, u32 expiry_secs, u64? max_lsp_fee_limit_msat); - [Throws=NodeError] - Bolt11Invoice receive_variable_amount_payment_via_jit_channel([ByRef]string description, u32 expiry_secs, u64? max_proportional_lsp_fee_limit_ppm_msat); PaymentDetails? payment([ByRef]PaymentHash payment_hash); [Throws=NodeError] void remove_payment([ByRef]PaymentHash payment_hash); @@ -103,6 +88,25 @@ interface Node { boolean verify_signature([ByRef]sequence msg, [ByRef]string sig, [ByRef]PublicKey pkey); }; +interface Bolt11Payment { + [Throws=NodeError] + PaymentHash send([ByRef]Bolt11Invoice invoice); + [Throws=NodeError] + PaymentHash send_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat); + [Throws=NodeError] + void send_probes([ByRef]Bolt11Invoice invoice); + [Throws=NodeError] + void send_probes_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat); + [Throws=NodeError] + Bolt11Invoice receive(u64 amount_msat, [ByRef]string description, u32 expiry_secs); + [Throws=NodeError] + Bolt11Invoice receive_variable_amount([ByRef]string description, u32 expiry_secs); + [Throws=NodeError] + Bolt11Invoice receive_via_jit_channel(u64 amount_msat, [ByRef]string description, u32 expiry_secs, u64? max_lsp_fee_limit_msat); + [Throws=NodeError] + Bolt11Invoice receive_variable_amount_via_jit_channel([ByRef]string description, u32 expiry_secs, u64? max_proportional_lsp_fee_limit_ppm_msat); +}; + [Error] enum NodeError { "AlreadyRunning", diff --git a/bindings/python/src/ldk_node/test_ldk_node.py b/bindings/python/src/ldk_node/test_ldk_node.py index 864ef7b43..468800efe 100644 --- a/bindings/python/src/ldk_node/test_ldk_node.py +++ b/bindings/python/src/ldk_node/test_ldk_node.py @@ -185,8 +185,8 @@ def test_channel_full_cycle(self): print("EVENT:", channel_ready_event_2) node_2.event_handled() - invoice = node_2.receive_payment(2500000, "asdf", 9217) - node_1.send_payment(invoice) + invoice = node_2.bolt11_payment().receive(2500000, "asdf", 9217) + node_1.bolt11_payment().send(invoice) payment_successful_event_1 = node_1.wait_next_event() assert isinstance(payment_successful_event_1, Event.PAYMENT_SUCCESSFUL) diff --git a/src/lib.rs b/src/lib.rs index a803730c1..03188806e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -23,7 +23,7 @@ //! The primary abstraction of the library is the [`Node`], which can be retrieved by setting up //! and configuring a [`Builder`] to your liking and calling [`build`]. `Node` can then be //! controlled via commands such as [`start`], [`stop`], [`connect_open_channel`], -//! [`send_payment`], etc.: +//! [`send`], etc.: //! //! ```no_run //! use ldk_node::Builder; @@ -56,7 +56,7 @@ //! node.event_handled(); //! //! let invoice = Bolt11Invoice::from_str("INVOICE_STR").unwrap(); -//! node.send_payment(&invoice).unwrap(); +//! node.bolt11_payment().send(&invoice).unwrap(); //! //! node.stop().unwrap(); //! } @@ -66,7 +66,7 @@ //! [`start`]: Node::start //! [`stop`]: Node::stop //! [`connect_open_channel`]: Node::connect_open_channel -//! [`send_payment`]: Node::send_payment +//! [`send`]: Bolt11Payment::send //! #![cfg_attr(not(feature = "uniffi"), deny(missing_docs))] #![deny(rustdoc::broken_intra_doc_links)] @@ -88,6 +88,7 @@ pub mod io; mod liquidity; mod logger; mod message_handler; +pub mod payment; mod payment_store; mod peer_store; mod sweep; @@ -129,6 +130,7 @@ use connection::ConnectionManager; use event::{EventHandler, EventQueue}; use gossip::GossipSource; use liquidity::LiquiditySource; +use payment::Bolt11Payment; use payment_store::PaymentStore; pub use payment_store::{LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentStatus}; use peer_store::{PeerInfo, PeerStore}; @@ -155,11 +157,8 @@ use lightning_background_processor::process_events_async; use lightning_transaction_sync::EsploraSyncClient; use lightning::routing::router::{PaymentParameters, RouteParameters}; -use lightning_invoice::{payment, Bolt11Invoice, Currency}; -use bitcoin::hashes::Hash; use bitcoin::secp256k1::PublicKey; - use bitcoin::{Address, Txid}; use rand::Rng; @@ -818,6 +817,42 @@ impl Node { self.config.listening_addresses.clone() } + /// Returns a payment handler allowing to create and pay [BOLT 11] invoices. + /// + /// [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md + #[cfg(not(feature = "uniffi"))] + pub fn bolt11_payment(&self) -> Bolt11Payment { + Bolt11Payment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.channel_manager), + Arc::clone(&self.connection_manager), + Arc::clone(&self.keys_manager), + self.liquidity_source.clone(), + Arc::clone(&self.payment_store), + Arc::clone(&self.peer_store), + Arc::clone(&self.config), + Arc::clone(&self.logger), + ) + } + + /// Returns a payment handler allowing to create and pay [BOLT 11] invoices. + /// + /// [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md + #[cfg(feature = "uniffi")] + pub fn bolt11_payment(&self) -> Arc { + Arc::new(Bolt11Payment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.channel_manager), + Arc::clone(&self.connection_manager), + Arc::clone(&self.keys_manager), + self.liquidity_source.clone(), + Arc::clone(&self.payment_store), + Arc::clone(&self.peer_store), + Arc::clone(&self.config), + Arc::clone(&self.logger), + )) + } + /// Retrieve a new on-chain/funding address. pub fn new_onchain_address(&self) -> Result { let funding_address = self.wallet.get_new_address()?; @@ -1104,190 +1139,6 @@ impl Node { } } - /// Send a payment given an invoice. - pub fn send_payment(&self, invoice: &Bolt11Invoice) -> Result { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - let (payment_hash, recipient_onion, route_params) = payment::payment_parameters_from_invoice(&invoice).map_err(|_| { - log_error!(self.logger, "Failed to send payment due to the given invoice being \"zero-amount\". Please use send_payment_using_amount instead."); - Error::InvalidInvoice - })?; - - if let Some(payment) = self.payment_store.get(&payment_hash) { - if payment.status == PaymentStatus::Pending - || payment.status == PaymentStatus::Succeeded - { - log_error!(self.logger, "Payment error: an invoice must not be paid twice."); - return Err(Error::DuplicatePayment); - } - } - - let payment_secret = Some(*invoice.payment_secret()); - let payment_id = PaymentId(invoice.payment_hash().to_byte_array()); - let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); - - match self.channel_manager.send_payment( - payment_hash, - recipient_onion, - payment_id, - route_params, - retry_strategy, - ) { - Ok(()) => { - let payee_pubkey = invoice.recover_payee_pub_key(); - let amt_msat = invoice.amount_milli_satoshis().unwrap(); - log_info!(self.logger, "Initiated sending {}msat to {}", amt_msat, payee_pubkey); - - let payment = PaymentDetails { - preimage: None, - hash: payment_hash, - secret: payment_secret, - amount_msat: invoice.amount_milli_satoshis(), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Pending, - lsp_fee_limits: None, - }; - self.payment_store.insert(payment)?; - - Ok(payment_hash) - }, - Err(e) => { - log_error!(self.logger, "Failed to send payment: {:?}", e); - match e { - channelmanager::RetryableSendFailure::DuplicatePayment => { - Err(Error::DuplicatePayment) - }, - _ => { - let payment = PaymentDetails { - preimage: None, - hash: payment_hash, - secret: payment_secret, - amount_msat: invoice.amount_milli_satoshis(), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Failed, - lsp_fee_limits: None, - }; - - self.payment_store.insert(payment)?; - Err(Error::PaymentSendingFailed) - }, - } - }, - } - } - - /// Send a payment given an invoice and an amount in millisatoshi. - /// - /// This will fail if the amount given is less than the value required by the given invoice. - /// - /// This can be used to pay a so-called "zero-amount" invoice, i.e., an invoice that leaves the - /// amount paid to be determined by the user. - pub fn send_payment_using_amount( - &self, invoice: &Bolt11Invoice, amount_msat: u64, - ) -> Result { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - if let Some(invoice_amount_msat) = invoice.amount_milli_satoshis() { - if amount_msat < invoice_amount_msat { - log_error!( - self.logger, - "Failed to pay as the given amount needs to be at least the invoice amount: required {}msat, gave {}msat.", invoice_amount_msat, amount_msat); - return Err(Error::InvalidAmount); - } - } - - let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); - if let Some(payment) = self.payment_store.get(&payment_hash) { - if payment.status == PaymentStatus::Pending - || payment.status == PaymentStatus::Succeeded - { - log_error!(self.logger, "Payment error: an invoice must not be paid twice."); - return Err(Error::DuplicatePayment); - } - } - - let payment_id = PaymentId(invoice.payment_hash().to_byte_array()); - let payment_secret = invoice.payment_secret(); - let expiry_time = invoice.duration_since_epoch().saturating_add(invoice.expiry_time()); - let mut payment_params = PaymentParameters::from_node_id( - invoice.recover_payee_pub_key(), - invoice.min_final_cltv_expiry_delta() as u32, - ) - .with_expiry_time(expiry_time.as_secs()) - .with_route_hints(invoice.route_hints()) - .map_err(|_| Error::InvalidInvoice)?; - if let Some(features) = invoice.features() { - payment_params = payment_params - .with_bolt11_features(features.clone()) - .map_err(|_| Error::InvalidInvoice)?; - } - let route_params = - RouteParameters::from_payment_params_and_value(payment_params, amount_msat); - - let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); - let recipient_fields = RecipientOnionFields::secret_only(*payment_secret); - - match self.channel_manager.send_payment( - payment_hash, - recipient_fields, - payment_id, - route_params, - retry_strategy, - ) { - Ok(_payment_id) => { - let payee_pubkey = invoice.recover_payee_pub_key(); - log_info!( - self.logger, - "Initiated sending {} msat to {}", - amount_msat, - payee_pubkey - ); - - let payment = PaymentDetails { - hash: payment_hash, - preimage: None, - secret: Some(*payment_secret), - amount_msat: Some(amount_msat), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Pending, - lsp_fee_limits: None, - }; - self.payment_store.insert(payment)?; - - Ok(payment_hash) - }, - Err(e) => { - log_error!(self.logger, "Failed to send payment: {:?}", e); - - match e { - channelmanager::RetryableSendFailure::DuplicatePayment => { - Err(Error::DuplicatePayment) - }, - _ => { - let payment = PaymentDetails { - hash: payment_hash, - preimage: None, - secret: Some(*payment_secret), - amount_msat: Some(amount_msat), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Failed, - lsp_fee_limits: None, - }; - self.payment_store.insert(payment)?; - - Err(Error::PaymentSendingFailed) - }, - } - }, - } - } - /// Send a spontaneous, aka. "keysend", payment pub fn send_spontaneous_payment( &self, amount_msat: u64, node_id: PublicKey, @@ -1364,46 +1215,10 @@ impl Node { } } - /// Sends payment probes over all paths of a route that would be used to pay the given invoice. - /// - /// This may be used to send "pre-flight" probes, i.e., to train our scorer before conducting - /// the actual payment. Note this is only useful if there likely is sufficient time for the - /// probe to settle before sending out the actual payment, e.g., when waiting for user - /// confirmation in a wallet UI. - /// - /// Otherwise, there is a chance the probe could take up some liquidity needed to complete the - /// actual payment. Users should therefore be cautious and might avoid sending probes if - /// liquidity is scarce and/or they don't expect the probe to return before they send the - /// payment. To mitigate this issue, channels with available liquidity less than the required - /// amount times [`Config::probing_liquidity_limit_multiplier`] won't be used to send - /// pre-flight probes. - pub fn send_payment_probes(&self, invoice: &Bolt11Invoice) -> Result<(), Error> { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - let (_payment_hash, _recipient_onion, route_params) = payment::payment_parameters_from_invoice(&invoice).map_err(|_| { - log_error!(self.logger, "Failed to send probes due to the given invoice being \"zero-amount\". Please use send_payment_probes_using_amount instead."); - Error::InvalidInvoice - })?; - - let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); - - self.channel_manager - .send_preflight_probes(route_params, liquidity_limit_multiplier) - .map_err(|e| { - log_error!(self.logger, "Failed to send payment probes: {:?}", e); - Error::ProbeSendingFailed - })?; - - Ok(()) - } - /// Sends payment probes over all paths of a route that would be used to pay the given /// amount to the given `node_id`. /// - /// See [`Self::send_payment_probes`] for more information. + /// See [`Bolt11Payment::send_probes`] for more information. pub fn send_spontaneous_payment_probes( &self, amount_msat: u64, node_id: PublicKey, ) -> Result<(), Error> { @@ -1430,240 +1245,6 @@ impl Node { Ok(()) } - /// Sends payment probes over all paths of a route that would be used to pay the given - /// zero-value invoice using the given amount. - /// - /// This can be used to send pre-flight probes for a so-called "zero-amount" invoice, i.e., an - /// invoice that leaves the amount paid to be determined by the user. - /// - /// See [`Self::send_payment_probes`] for more information. - pub fn send_payment_probes_using_amount( - &self, invoice: &Bolt11Invoice, amount_msat: u64, - ) -> Result<(), Error> { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - let (_payment_hash, _recipient_onion, route_params) = if let Some(invoice_amount_msat) = - invoice.amount_milli_satoshis() - { - if amount_msat < invoice_amount_msat { - log_error!( - self.logger, - "Failed to send probes as the given amount needs to be at least the invoice amount: required {}msat, gave {}msat.", invoice_amount_msat, amount_msat); - return Err(Error::InvalidAmount); - } - - payment::payment_parameters_from_invoice(&invoice).map_err(|_| { - log_error!(self.logger, "Failed to send probes due to the given invoice unexpectedly being \"zero-amount\"."); - Error::InvalidInvoice - })? - } else { - payment::payment_parameters_from_zero_amount_invoice(&invoice, amount_msat).map_err(|_| { - log_error!(self.logger, "Failed to send probes due to the given invoice unexpectedly being not \"zero-amount\"."); - Error::InvalidInvoice - })? - }; - - let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); - - self.channel_manager - .send_preflight_probes(route_params, liquidity_limit_multiplier) - .map_err(|e| { - log_error!(self.logger, "Failed to send payment probes: {:?}", e); - Error::ProbeSendingFailed - })?; - - Ok(()) - } - - /// Returns a payable invoice that can be used to request and receive a payment of the amount - /// given. - pub fn receive_payment( - &self, amount_msat: u64, description: &str, expiry_secs: u32, - ) -> Result { - self.receive_payment_inner(Some(amount_msat), description, expiry_secs) - } - - /// Returns a payable invoice that can be used to request and receive a payment for which the - /// amount is to be determined by the user, also known as a "zero-amount" invoice. - pub fn receive_variable_amount_payment( - &self, description: &str, expiry_secs: u32, - ) -> Result { - self.receive_payment_inner(None, description, expiry_secs) - } - - fn receive_payment_inner( - &self, amount_msat: Option, description: &str, expiry_secs: u32, - ) -> Result { - let currency = Currency::from(self.config.network); - let keys_manager = Arc::clone(&self.keys_manager); - let invoice = match lightning_invoice::utils::create_invoice_from_channelmanager( - &self.channel_manager, - keys_manager, - Arc::clone(&self.logger), - currency, - amount_msat, - description.to_string(), - expiry_secs, - None, - ) { - Ok(inv) => { - log_info!(self.logger, "Invoice created: {}", inv); - inv - }, - Err(e) => { - log_error!(self.logger, "Failed to create invoice: {}", e); - return Err(Error::InvoiceCreationFailed); - }, - }; - - let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); - let payment = PaymentDetails { - hash: payment_hash, - preimage: None, - secret: Some(invoice.payment_secret().clone()), - amount_msat, - direction: PaymentDirection::Inbound, - status: PaymentStatus::Pending, - lsp_fee_limits: None, - }; - - self.payment_store.insert(payment)?; - - Ok(invoice) - } - - /// Returns a payable invoice that can be used to request a payment of the amount given and - /// receive it via a newly created just-in-time (JIT) channel. - /// - /// When the returned invoice is paid, the configured [LSPS2]-compliant LSP will open a channel - /// to us, supplying just-in-time inbound liquidity. - /// - /// If set, `max_total_lsp_fee_limit_msat` will limit how much fee we allow the LSP to take for opening the - /// channel to us. We'll use its cheapest offer otherwise. - /// - /// [LSPS2]: https://github.com/BitcoinAndLightningLayerSpecs/lsp/blob/main/LSPS2/README.md - pub fn receive_payment_via_jit_channel( - &self, amount_msat: u64, description: &str, expiry_secs: u32, - max_total_lsp_fee_limit_msat: Option, - ) -> Result { - self.receive_payment_via_jit_channel_inner( - Some(amount_msat), - description, - expiry_secs, - max_total_lsp_fee_limit_msat, - None, - ) - } - - /// Returns a payable invoice that can be used to request a variable amount payment (also known - /// as "zero-amount" invoice) and receive it via a newly created just-in-time (JIT) channel. - /// - /// When the returned invoice is paid, the configured [LSPS2]-compliant LSP will open a channel - /// to us, supplying just-in-time inbound liquidity. - /// - /// If set, `max_proportional_lsp_fee_limit_ppm_msat` will limit how much proportional fee, in - /// parts-per-million millisatoshis, we allow the LSP to take for opening the channel to us. - /// We'll use its cheapest offer otherwise. - /// - /// [LSPS2]: https://github.com/BitcoinAndLightningLayerSpecs/lsp/blob/main/LSPS2/README.md - pub fn receive_variable_amount_payment_via_jit_channel( - &self, description: &str, expiry_secs: u32, - max_proportional_lsp_fee_limit_ppm_msat: Option, - ) -> Result { - self.receive_payment_via_jit_channel_inner( - None, - description, - expiry_secs, - None, - max_proportional_lsp_fee_limit_ppm_msat, - ) - } - - fn receive_payment_via_jit_channel_inner( - &self, amount_msat: Option, description: &str, expiry_secs: u32, - max_total_lsp_fee_limit_msat: Option, - max_proportional_lsp_fee_limit_ppm_msat: Option, - ) -> Result { - let liquidity_source = - self.liquidity_source.as_ref().ok_or(Error::LiquiditySourceUnavailable)?; - - let (node_id, address) = liquidity_source - .get_liquidity_source_details() - .ok_or(Error::LiquiditySourceUnavailable)?; - - let rt_lock = self.runtime.read().unwrap(); - let runtime = rt_lock.as_ref().unwrap(); - - let peer_info = PeerInfo { node_id, address }; - - let con_node_id = peer_info.node_id; - let con_addr = peer_info.address.clone(); - let con_cm = Arc::clone(&self.connection_manager); - - // We need to use our main runtime here as a local runtime might not be around to poll - // connection futures going forward. - tokio::task::block_in_place(move || { - runtime.block_on(async move { - con_cm.connect_peer_if_necessary(con_node_id, con_addr).await - }) - })?; - - log_info!(self.logger, "Connected to LSP {}@{}. ", peer_info.node_id, peer_info.address); - - let liquidity_source = Arc::clone(&liquidity_source); - let (invoice, lsp_total_opening_fee, lsp_prop_opening_fee) = - tokio::task::block_in_place(move || { - runtime.block_on(async move { - if let Some(amount_msat) = amount_msat { - liquidity_source - .lsps2_receive_to_jit_channel( - amount_msat, - description, - expiry_secs, - max_total_lsp_fee_limit_msat, - ) - .await - .map(|(invoice, total_fee)| (invoice, Some(total_fee), None)) - } else { - liquidity_source - .lsps2_receive_variable_amount_to_jit_channel( - description, - expiry_secs, - max_proportional_lsp_fee_limit_ppm_msat, - ) - .await - .map(|(invoice, prop_fee)| (invoice, None, Some(prop_fee))) - } - }) - })?; - - // Register payment in payment store. - let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); - let lsp_fee_limits = Some(LSPFeeLimits { - max_total_opening_fee_msat: lsp_total_opening_fee, - max_proportional_opening_fee_ppm_msat: lsp_prop_opening_fee, - }); - let payment = PaymentDetails { - hash: payment_hash, - preimage: None, - secret: Some(invoice.payment_secret().clone()), - amount_msat, - direction: PaymentDirection::Inbound, - status: PaymentStatus::Pending, - lsp_fee_limits, - }; - - self.payment_store.insert(payment)?; - - // Persist LSP peer to make sure we reconnect on restart. - self.peer_store.add_peer(peer_info)?; - - Ok(invoice) - } - /// Retrieve the details of a specific payment with the given hash. /// /// Returns `Some` if the payment was known and `None` otherwise. diff --git a/src/payment/bolt11.rs b/src/payment/bolt11.rs new file mode 100644 index 000000000..8ac15fe0a --- /dev/null +++ b/src/payment/bolt11.rs @@ -0,0 +1,517 @@ +//! Holds a payment handler allowing to create and pay [BOLT 11] invoices. +//! +//! [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md + +use crate::config::{Config, LDK_PAYMENT_RETRY_TIMEOUT}; +use crate::connection::ConnectionManager; +use crate::error::Error; +use crate::liquidity::LiquiditySource; +use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; +use crate::payment_store::{ + LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentStatus, PaymentStore, +}; +use crate::peer_store::{PeerInfo, PeerStore}; +use crate::types::{ChannelManager, KeysManager}; + +use lightning::ln::channelmanager::{PaymentId, RecipientOnionFields, Retry, RetryableSendFailure}; +use lightning::ln::PaymentHash; +use lightning::routing::router::{PaymentParameters, RouteParameters}; + +use lightning_invoice::{payment, Bolt11Invoice, Currency}; + +use bitcoin::hashes::Hash; + +use std::sync::{Arc, RwLock}; + +/// A payment handler allowing to create and pay [BOLT 11] invoices. +/// +/// Should be retrieved by calling [`Node::bolt11_payment`]. +/// +/// [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md +/// [`Node::bolt11_payment`]: crate::Node::bolt11_payment +pub struct Bolt11Payment { + runtime: Arc>>, + channel_manager: Arc, + connection_manager: Arc>>, + keys_manager: Arc, + liquidity_source: Option>>>, + payment_store: Arc>>, + peer_store: Arc>>, + config: Arc, + logger: Arc, +} + +impl Bolt11Payment { + pub(crate) fn new( + runtime: Arc>>, + channel_manager: Arc, + connection_manager: Arc>>, + keys_manager: Arc, + liquidity_source: Option>>>, + payment_store: Arc>>, + peer_store: Arc>>, config: Arc, + logger: Arc, + ) -> Self { + Self { + runtime, + channel_manager, + connection_manager, + keys_manager, + liquidity_source, + payment_store, + peer_store, + config, + logger, + } + } + + /// Send a payment given an invoice. + pub fn send(&self, invoice: &Bolt11Invoice) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let (payment_hash, recipient_onion, route_params) = payment::payment_parameters_from_invoice(&invoice).map_err(|_| { + log_error!(self.logger, "Failed to send payment due to the given invoice being \"zero-amount\". Please use send_using_amount instead."); + Error::InvalidInvoice + })?; + + if let Some(payment) = self.payment_store.get(&payment_hash) { + if payment.status == PaymentStatus::Pending + || payment.status == PaymentStatus::Succeeded + { + log_error!(self.logger, "Payment error: an invoice must not be paid twice."); + return Err(Error::DuplicatePayment); + } + } + + let payment_secret = Some(*invoice.payment_secret()); + let payment_id = PaymentId(invoice.payment_hash().to_byte_array()); + let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); + + match self.channel_manager.send_payment( + payment_hash, + recipient_onion, + payment_id, + route_params, + retry_strategy, + ) { + Ok(()) => { + let payee_pubkey = invoice.recover_payee_pub_key(); + let amt_msat = invoice.amount_milli_satoshis().unwrap(); + log_info!(self.logger, "Initiated sending {}msat to {}", amt_msat, payee_pubkey); + + let payment = PaymentDetails { + preimage: None, + hash: payment_hash, + secret: payment_secret, + amount_msat: invoice.amount_milli_satoshis(), + direction: PaymentDirection::Outbound, + status: PaymentStatus::Pending, + lsp_fee_limits: None, + }; + self.payment_store.insert(payment)?; + + Ok(payment_hash) + }, + Err(e) => { + log_error!(self.logger, "Failed to send payment: {:?}", e); + match e { + RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), + _ => { + let payment = PaymentDetails { + preimage: None, + hash: payment_hash, + secret: payment_secret, + amount_msat: invoice.amount_milli_satoshis(), + direction: PaymentDirection::Outbound, + status: PaymentStatus::Failed, + lsp_fee_limits: None, + }; + + self.payment_store.insert(payment)?; + Err(Error::PaymentSendingFailed) + }, + } + }, + } + } + + /// Send a payment given an invoice and an amount in millisatoshi. + /// + /// This will fail if the amount given is less than the value required by the given invoice. + /// + /// This can be used to pay a so-called "zero-amount" invoice, i.e., an invoice that leaves the + /// amount paid to be determined by the user. + pub fn send_using_amount( + &self, invoice: &Bolt11Invoice, amount_msat: u64, + ) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + if let Some(invoice_amount_msat) = invoice.amount_milli_satoshis() { + if amount_msat < invoice_amount_msat { + log_error!( + self.logger, + "Failed to pay as the given amount needs to be at least the invoice amount: required {}msat, gave {}msat.", invoice_amount_msat, amount_msat); + return Err(Error::InvalidAmount); + } + } + + let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); + if let Some(payment) = self.payment_store.get(&payment_hash) { + if payment.status == PaymentStatus::Pending + || payment.status == PaymentStatus::Succeeded + { + log_error!(self.logger, "Payment error: an invoice must not be paid twice."); + return Err(Error::DuplicatePayment); + } + } + + let payment_id = PaymentId(invoice.payment_hash().to_byte_array()); + let payment_secret = invoice.payment_secret(); + let expiry_time = invoice.duration_since_epoch().saturating_add(invoice.expiry_time()); + let mut payment_params = PaymentParameters::from_node_id( + invoice.recover_payee_pub_key(), + invoice.min_final_cltv_expiry_delta() as u32, + ) + .with_expiry_time(expiry_time.as_secs()) + .with_route_hints(invoice.route_hints()) + .map_err(|_| Error::InvalidInvoice)?; + if let Some(features) = invoice.features() { + payment_params = payment_params + .with_bolt11_features(features.clone()) + .map_err(|_| Error::InvalidInvoice)?; + } + let route_params = + RouteParameters::from_payment_params_and_value(payment_params, amount_msat); + + let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); + let recipient_fields = RecipientOnionFields::secret_only(*payment_secret); + + match self.channel_manager.send_payment( + payment_hash, + recipient_fields, + payment_id, + route_params, + retry_strategy, + ) { + Ok(_payment_id) => { + let payee_pubkey = invoice.recover_payee_pub_key(); + log_info!( + self.logger, + "Initiated sending {} msat to {}", + amount_msat, + payee_pubkey + ); + + let payment = PaymentDetails { + hash: payment_hash, + preimage: None, + secret: Some(*payment_secret), + amount_msat: Some(amount_msat), + direction: PaymentDirection::Outbound, + status: PaymentStatus::Pending, + lsp_fee_limits: None, + }; + self.payment_store.insert(payment)?; + + Ok(payment_hash) + }, + Err(e) => { + log_error!(self.logger, "Failed to send payment: {:?}", e); + + match e { + RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), + _ => { + let payment = PaymentDetails { + hash: payment_hash, + preimage: None, + secret: Some(*payment_secret), + amount_msat: Some(amount_msat), + direction: PaymentDirection::Outbound, + status: PaymentStatus::Failed, + lsp_fee_limits: None, + }; + self.payment_store.insert(payment)?; + + Err(Error::PaymentSendingFailed) + }, + } + }, + } + } + + /// Returns a payable invoice that can be used to request and receive a payment of the amount + /// given. + pub fn receive( + &self, amount_msat: u64, description: &str, expiry_secs: u32, + ) -> Result { + self.receive_inner(Some(amount_msat), description, expiry_secs) + } + + /// Returns a payable invoice that can be used to request and receive a payment for which the + /// amount is to be determined by the user, also known as a "zero-amount" invoice. + pub fn receive_variable_amount( + &self, description: &str, expiry_secs: u32, + ) -> Result { + self.receive_inner(None, description, expiry_secs) + } + + fn receive_inner( + &self, amount_msat: Option, description: &str, expiry_secs: u32, + ) -> Result { + let currency = Currency::from(self.config.network); + let keys_manager = Arc::clone(&self.keys_manager); + let invoice = match lightning_invoice::utils::create_invoice_from_channelmanager( + &self.channel_manager, + keys_manager, + Arc::clone(&self.logger), + currency, + amount_msat, + description.to_string(), + expiry_secs, + None, + ) { + Ok(inv) => { + log_info!(self.logger, "Invoice created: {}", inv); + inv + }, + Err(e) => { + log_error!(self.logger, "Failed to create invoice: {}", e); + return Err(Error::InvoiceCreationFailed); + }, + }; + + let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); + let payment = PaymentDetails { + hash: payment_hash, + preimage: None, + secret: Some(invoice.payment_secret().clone()), + amount_msat, + direction: PaymentDirection::Inbound, + status: PaymentStatus::Pending, + lsp_fee_limits: None, + }; + + self.payment_store.insert(payment)?; + + Ok(invoice) + } + + /// Returns a payable invoice that can be used to request a payment of the amount given and + /// receive it via a newly created just-in-time (JIT) channel. + /// + /// When the returned invoice is paid, the configured [LSPS2]-compliant LSP will open a channel + /// to us, supplying just-in-time inbound liquidity. + /// + /// If set, `max_total_lsp_fee_limit_msat` will limit how much fee we allow the LSP to take for opening the + /// channel to us. We'll use its cheapest offer otherwise. + /// + /// [LSPS2]: https://github.com/BitcoinAndLightningLayerSpecs/lsp/blob/main/LSPS2/README.md + pub fn receive_via_jit_channel( + &self, amount_msat: u64, description: &str, expiry_secs: u32, + max_total_lsp_fee_limit_msat: Option, + ) -> Result { + self.receive_via_jit_channel_inner( + Some(amount_msat), + description, + expiry_secs, + max_total_lsp_fee_limit_msat, + None, + ) + } + + /// Returns a payable invoice that can be used to request a variable amount payment (also known + /// as "zero-amount" invoice) and receive it via a newly created just-in-time (JIT) channel. + /// + /// When the returned invoice is paid, the configured [LSPS2]-compliant LSP will open a channel + /// to us, supplying just-in-time inbound liquidity. + /// + /// If set, `max_proportional_lsp_fee_limit_ppm_msat` will limit how much proportional fee, in + /// parts-per-million millisatoshis, we allow the LSP to take for opening the channel to us. + /// We'll use its cheapest offer otherwise. + /// + /// [LSPS2]: https://github.com/BitcoinAndLightningLayerSpecs/lsp/blob/main/LSPS2/README.md + pub fn receive_variable_amount_via_jit_channel( + &self, description: &str, expiry_secs: u32, + max_proportional_lsp_fee_limit_ppm_msat: Option, + ) -> Result { + self.receive_via_jit_channel_inner( + None, + description, + expiry_secs, + None, + max_proportional_lsp_fee_limit_ppm_msat, + ) + } + + fn receive_via_jit_channel_inner( + &self, amount_msat: Option, description: &str, expiry_secs: u32, + max_total_lsp_fee_limit_msat: Option, + max_proportional_lsp_fee_limit_ppm_msat: Option, + ) -> Result { + let liquidity_source = + self.liquidity_source.as_ref().ok_or(Error::LiquiditySourceUnavailable)?; + + let (node_id, address) = liquidity_source + .get_liquidity_source_details() + .ok_or(Error::LiquiditySourceUnavailable)?; + + let rt_lock = self.runtime.read().unwrap(); + let runtime = rt_lock.as_ref().unwrap(); + + let peer_info = PeerInfo { node_id, address }; + + let con_node_id = peer_info.node_id; + let con_addr = peer_info.address.clone(); + let con_cm = Arc::clone(&self.connection_manager); + + // We need to use our main runtime here as a local runtime might not be around to poll + // connection futures going forward. + tokio::task::block_in_place(move || { + runtime.block_on(async move { + con_cm.connect_peer_if_necessary(con_node_id, con_addr).await + }) + })?; + + log_info!(self.logger, "Connected to LSP {}@{}. ", peer_info.node_id, peer_info.address); + + let liquidity_source = Arc::clone(&liquidity_source); + let (invoice, lsp_total_opening_fee, lsp_prop_opening_fee) = + tokio::task::block_in_place(move || { + runtime.block_on(async move { + if let Some(amount_msat) = amount_msat { + liquidity_source + .lsps2_receive_to_jit_channel( + amount_msat, + description, + expiry_secs, + max_total_lsp_fee_limit_msat, + ) + .await + .map(|(invoice, total_fee)| (invoice, Some(total_fee), None)) + } else { + liquidity_source + .lsps2_receive_variable_amount_to_jit_channel( + description, + expiry_secs, + max_proportional_lsp_fee_limit_ppm_msat, + ) + .await + .map(|(invoice, prop_fee)| (invoice, None, Some(prop_fee))) + } + }) + })?; + + // Register payment in payment store. + let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); + let lsp_fee_limits = Some(LSPFeeLimits { + max_total_opening_fee_msat: lsp_total_opening_fee, + max_proportional_opening_fee_ppm_msat: lsp_prop_opening_fee, + }); + let payment = PaymentDetails { + hash: payment_hash, + preimage: None, + secret: Some(invoice.payment_secret().clone()), + amount_msat, + direction: PaymentDirection::Inbound, + status: PaymentStatus::Pending, + lsp_fee_limits, + }; + + self.payment_store.insert(payment)?; + + // Persist LSP peer to make sure we reconnect on restart. + self.peer_store.add_peer(peer_info)?; + + Ok(invoice) + } + + /// Sends payment probes over all paths of a route that would be used to pay the given invoice. + /// + /// This may be used to send "pre-flight" probes, i.e., to train our scorer before conducting + /// the actual payment. Note this is only useful if there likely is sufficient time for the + /// probe to settle before sending out the actual payment, e.g., when waiting for user + /// confirmation in a wallet UI. + /// + /// Otherwise, there is a chance the probe could take up some liquidity needed to complete the + /// actual payment. Users should therefore be cautious and might avoid sending probes if + /// liquidity is scarce and/or they don't expect the probe to return before they send the + /// payment. To mitigate this issue, channels with available liquidity less than the required + /// amount times [`Config::probing_liquidity_limit_multiplier`] won't be used to send + /// pre-flight probes. + pub fn send_probes(&self, invoice: &Bolt11Invoice) -> Result<(), Error> { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let (_payment_hash, _recipient_onion, route_params) = payment::payment_parameters_from_invoice(&invoice).map_err(|_| { + log_error!(self.logger, "Failed to send probes due to the given invoice being \"zero-amount\". Please use send_probes_using_amount instead."); + Error::InvalidInvoice + })?; + + let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); + + self.channel_manager + .send_preflight_probes(route_params, liquidity_limit_multiplier) + .map_err(|e| { + log_error!(self.logger, "Failed to send payment probes: {:?}", e); + Error::ProbeSendingFailed + })?; + + Ok(()) + } + + /// Sends payment probes over all paths of a route that would be used to pay the given + /// zero-value invoice using the given amount. + /// + /// This can be used to send pre-flight probes for a so-called "zero-amount" invoice, i.e., an + /// invoice that leaves the amount paid to be determined by the user. + /// + /// See [`Self::send_probes`] for more information. + pub fn send_probes_using_amount( + &self, invoice: &Bolt11Invoice, amount_msat: u64, + ) -> Result<(), Error> { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let (_payment_hash, _recipient_onion, route_params) = if let Some(invoice_amount_msat) = + invoice.amount_milli_satoshis() + { + if amount_msat < invoice_amount_msat { + log_error!( + self.logger, + "Failed to send probes as the given amount needs to be at least the invoice amount: required {}msat, gave {}msat.", invoice_amount_msat, amount_msat); + return Err(Error::InvalidAmount); + } + + payment::payment_parameters_from_invoice(&invoice).map_err(|_| { + log_error!(self.logger, "Failed to send probes due to the given invoice unexpectedly being \"zero-amount\"."); + Error::InvalidInvoice + })? + } else { + payment::payment_parameters_from_zero_amount_invoice(&invoice, amount_msat).map_err(|_| { + log_error!(self.logger, "Failed to send probes due to the given invoice unexpectedly being not \"zero-amount\"."); + Error::InvalidInvoice + })? + }; + + let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); + + self.channel_manager + .send_preflight_probes(route_params, liquidity_limit_multiplier) + .map_err(|e| { + log_error!(self.logger, "Failed to send payment probes: {:?}", e); + Error::ProbeSendingFailed + })?; + + Ok(()) + } +} diff --git a/src/payment/mod.rs b/src/payment/mod.rs new file mode 100644 index 000000000..adbe4df7b --- /dev/null +++ b/src/payment/mod.rs @@ -0,0 +1,5 @@ +//! Handlers for different types of payments. + +mod bolt11; + +pub use bolt11::Bolt11Payment; diff --git a/src/uniffi_types.rs b/src/uniffi_types.rs index 0cef5d682..959c6e9ed 100644 --- a/src/uniffi_types.rs +++ b/src/uniffi_types.rs @@ -3,6 +3,8 @@ pub use lightning::ln::ChannelId; pub use lightning::ln::PaymentSecret; pub use lightning::util::string::UntrustedString; +pub use lightning_invoice::Bolt11Invoice; + pub use bitcoin::{BlockHash, Network, OutPoint}; pub use bip39::Mnemonic; @@ -18,7 +20,7 @@ use bitcoin::hashes::Hash; use bitcoin::secp256k1::PublicKey; use bitcoin::{Address, Txid}; use lightning::ln::{PaymentHash, PaymentPreimage}; -use lightning_invoice::{Bolt11Invoice, SignedRawBolt11Invoice}; +use lightning_invoice::SignedRawBolt11Invoice; use std::convert::TryInto; use std::str::FromStr; diff --git a/tests/common/mod.rs b/tests/common/mod.rs index b72ecdc75..c57ed4dbf 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -415,13 +415,13 @@ pub(crate) fn do_channel_full_cycle( let user_channel_id = expect_channel_ready_event!(node_b, node_a.node_id()); - println!("\nB receive_payment"); + println!("\nB receive"); let invoice_amount_1_msat = 2500_000; - let invoice = node_b.receive_payment(invoice_amount_1_msat, &"asdf", 9217).unwrap(); + let invoice = node_b.bolt11_payment().receive(invoice_amount_1_msat, &"asdf", 9217).unwrap(); - println!("\nA send_payment"); - let payment_hash = node_a.send_payment(&invoice).unwrap(); - assert_eq!(node_a.send_payment(&invoice), Err(NodeError::DuplicatePayment)); + println!("\nA send"); + let payment_hash = node_a.bolt11_payment().send(&invoice).unwrap(); + assert_eq!(node_a.bolt11_payment().send(&invoice), Err(NodeError::DuplicatePayment)); assert_eq!(node_a.list_payments().first().unwrap().hash, payment_hash); @@ -451,7 +451,7 @@ pub(crate) fn do_channel_full_cycle( assert_eq!(node_b.payment(&payment_hash).unwrap().amount_msat, Some(invoice_amount_1_msat)); // Assert we fail duplicate outbound payments and check the status hasn't changed. - assert_eq!(Err(NodeError::DuplicatePayment), node_a.send_payment(&invoice)); + assert_eq!(Err(NodeError::DuplicatePayment), node_a.bolt11_payment().send(&invoice)); assert_eq!(node_a.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); assert_eq!(node_a.payment(&payment_hash).unwrap().direction, PaymentDirection::Outbound); assert_eq!(node_a.payment(&payment_hash).unwrap().amount_msat, Some(invoice_amount_1_msat)); @@ -461,20 +461,21 @@ pub(crate) fn do_channel_full_cycle( // Test under-/overpayment let invoice_amount_2_msat = 2500_000; - let invoice = node_b.receive_payment(invoice_amount_2_msat, &"asdf", 9217).unwrap(); + let invoice = node_b.bolt11_payment().receive(invoice_amount_2_msat, &"asdf", 9217).unwrap(); let underpaid_amount = invoice_amount_2_msat - 1; assert_eq!( Err(NodeError::InvalidAmount), - node_a.send_payment_using_amount(&invoice, underpaid_amount) + node_a.bolt11_payment().send_using_amount(&invoice, underpaid_amount) ); - println!("\nB overpaid receive_payment"); - let invoice = node_b.receive_payment(invoice_amount_2_msat, &"asdf", 9217).unwrap(); + println!("\nB overpaid receive"); + let invoice = node_b.bolt11_payment().receive(invoice_amount_2_msat, &"asdf", 9217).unwrap(); let overpaid_amount_msat = invoice_amount_2_msat + 100; - println!("\nA overpaid send_payment"); - let payment_hash = node_a.send_payment_using_amount(&invoice, overpaid_amount_msat).unwrap(); + println!("\nA overpaid send"); + let payment_hash = + node_a.bolt11_payment().send_using_amount(&invoice, overpaid_amount_msat).unwrap(); expect_event!(node_a, PaymentSuccessful); let received_amount = match node_b.wait_next_event() { ref e @ Event::PaymentReceived { amount_msat, .. } => { @@ -496,12 +497,18 @@ pub(crate) fn do_channel_full_cycle( // Test "zero-amount" invoice payment println!("\nB receive_variable_amount_payment"); - let variable_amount_invoice = node_b.receive_variable_amount_payment(&"asdf", 9217).unwrap(); + let variable_amount_invoice = + node_b.bolt11_payment().receive_variable_amount(&"asdf", 9217).unwrap(); let determined_amount_msat = 2345_678; - assert_eq!(Err(NodeError::InvalidInvoice), node_a.send_payment(&variable_amount_invoice)); - println!("\nA send_payment_using_amount"); - let payment_hash = - node_a.send_payment_using_amount(&variable_amount_invoice, determined_amount_msat).unwrap(); + assert_eq!( + Err(NodeError::InvalidInvoice), + node_a.bolt11_payment().send(&variable_amount_invoice) + ); + println!("\nA send_using_amount"); + let payment_hash = node_a + .bolt11_payment() + .send_using_amount(&variable_amount_invoice, determined_amount_msat) + .unwrap(); expect_event!(node_a, PaymentSuccessful); let received_amount = match node_b.wait_next_event() { diff --git a/tests/integration_tests_cln.rs b/tests/integration_tests_cln.rs index dd4fefb4f..8dce54b28 100644 --- a/tests/integration_tests_cln.rs +++ b/tests/integration_tests_cln.rs @@ -97,7 +97,7 @@ fn test_cln() { cln_client.invoice(Some(10_000_000), &rand_label, &rand_label, None, None, None).unwrap(); let parsed_invoice = Bolt11Invoice::from_str(&cln_invoice.bolt11).unwrap(); - node.send_payment(&parsed_invoice).unwrap(); + node.bolt11_payment().send(&parsed_invoice).unwrap(); common::expect_event!(node, PaymentSuccessful); let cln_listed_invoices = cln_client.listinvoices(Some(&rand_label), None, None, None).unwrap().invoices; @@ -106,7 +106,7 @@ fn test_cln() { // Send a payment to LDK let rand_label: String = (0..7).map(|_| rng.sample(Alphanumeric) as char).collect(); - let ldk_invoice = node.receive_payment(10_000_000, &rand_label, 3600).unwrap(); + let ldk_invoice = node.bolt11_payment().receive(10_000_000, &rand_label, 3600).unwrap(); cln_client.pay(&ldk_invoice.to_string(), Default::default()).unwrap(); common::expect_event!(node, PaymentReceived); diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index f0e222fd3..fad071c13 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -133,8 +133,8 @@ fn multi_hop_sending() { // Sleep a bit for gossip to propagate. std::thread::sleep(std::time::Duration::from_secs(1)); - let invoice = nodes[4].receive_payment(2_500_000, &"asdf", 9217).unwrap(); - nodes[0].send_payment(&invoice).unwrap(); + let invoice = nodes[4].bolt11_payment().receive(2_500_000, &"asdf", 9217).unwrap(); + nodes[0].bolt11_payment().send(&invoice).unwrap(); let payment_hash = expect_payment_received_event!(&nodes[4], 2_500_000); let fee_paid_msat = Some(2000); From 38eb21ec650e536e4f94391b806709085545409b Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 5 Mar 2024 11:59:24 +0100 Subject: [PATCH 27/89] Move spontaneous payments API to `SpontaneousPaymentHandler` --- bindings/ldk_node.udl | 12 ++- src/lib.rs | 145 ++++++++----------------------------- src/payment/mod.rs | 2 + src/payment/spontaneous.rs | 143 ++++++++++++++++++++++++++++++++++++ src/uniffi_types.rs | 4 +- tests/common/mod.rs | 2 +- 6 files changed, 185 insertions(+), 123 deletions(-) create mode 100644 src/payment/spontaneous.rs diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index d896b2518..d4f80d25b 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -54,6 +54,7 @@ interface Node { PublicKey node_id(); sequence? listening_addresses(); Bolt11Payment bolt11_payment(); + SpontaneousPayment spontaneous_payment(); [Throws=NodeError] Address new_onchain_address(); [Throws=NodeError] @@ -72,10 +73,6 @@ interface Node { void update_channel_config([ByRef]UserChannelId user_channel_id, PublicKey counterparty_node_id, ChannelConfig channel_config); [Throws=NodeError] void sync_wallets(); - [Throws=NodeError] - PaymentHash send_spontaneous_payment(u64 amount_msat, PublicKey node_id); - [Throws=NodeError] - void send_spontaneous_payment_probes(u64 amount_msat, PublicKey node_id); PaymentDetails? payment([ByRef]PaymentHash payment_hash); [Throws=NodeError] void remove_payment([ByRef]PaymentHash payment_hash); @@ -107,6 +104,13 @@ interface Bolt11Payment { Bolt11Invoice receive_variable_amount_via_jit_channel([ByRef]string description, u32 expiry_secs, u64? max_proportional_lsp_fee_limit_ppm_msat); }; +interface SpontaneousPayment { + [Throws=NodeError] + PaymentHash send(u64 amount_msat, PublicKey node_id); + [Throws=NodeError] + void send_probes(u64 amount_msat, PublicKey node_id); +}; + [Error] enum NodeError { "AlreadyRunning", diff --git a/src/lib.rs b/src/lib.rs index 03188806e..b622cf0d1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -123,14 +123,14 @@ pub use builder::BuildError; pub use builder::NodeBuilder as Builder; use config::{ - LDK_PAYMENT_RETRY_TIMEOUT, NODE_ANN_BCAST_INTERVAL, PEER_RECONNECTION_INTERVAL, - RGS_SYNC_INTERVAL, WALLET_SYNC_INTERVAL_MINIMUM_SECS, + NODE_ANN_BCAST_INTERVAL, PEER_RECONNECTION_INTERVAL, RGS_SYNC_INTERVAL, + WALLET_SYNC_INTERVAL_MINIMUM_SECS, }; use connection::ConnectionManager; use event::{EventHandler, EventQueue}; use gossip::GossipSource; use liquidity::LiquiditySource; -use payment::Bolt11Payment; +use payment::{Bolt11Payment, SpontaneousPayment}; use payment_store::PaymentStore; pub use payment_store::{LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentStatus}; use peer_store::{PeerInfo, PeerStore}; @@ -143,11 +143,8 @@ pub use types::{ChannelDetails, PeerDetails, UserChannelId}; use logger::{log_error, log_info, log_trace, FilesystemLogger, Logger}; use lightning::chain::{BestBlock, Confirm}; -use lightning::ln::channelmanager::{self, PaymentId, RecipientOnionFields, Retry}; use lightning::ln::msgs::SocketAddress; -use lightning::ln::{PaymentHash, PaymentPreimage}; - -use lightning::sign::EntropySource; +use lightning::ln::PaymentHash; use lightning::util::config::{ChannelHandshakeConfig, UserConfig}; pub use lightning::util::logger::Level as LogLevel; @@ -156,8 +153,6 @@ use lightning_background_processor::process_events_async; use lightning_transaction_sync::EsploraSyncClient; -use lightning::routing::router::{PaymentParameters, RouteParameters}; - use bitcoin::secp256k1::PublicKey; use bitcoin::{Address, Txid}; @@ -853,6 +848,32 @@ impl Node { )) } + /// Returns a payment handler allowing to send spontaneous ("keysend") payments. + #[cfg(not(feature = "uniffi"))] + pub fn spontaneous_payment(&self) -> SpontaneousPayment { + SpontaneousPayment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.channel_manager), + Arc::clone(&self.keys_manager), + Arc::clone(&self.payment_store), + Arc::clone(&self.config), + Arc::clone(&self.logger), + ) + } + + /// Returns a payment handler allowing to send spontaneous ("keysend") payments. + #[cfg(feature = "uniffi")] + pub fn spontaneous_payment(&self) -> Arc { + Arc::new(SpontaneousPayment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.channel_manager), + Arc::clone(&self.keys_manager), + Arc::clone(&self.payment_store), + Arc::clone(&self.config), + Arc::clone(&self.logger), + )) + } + /// Retrieve a new on-chain/funding address. pub fn new_onchain_address(&self) -> Result { let funding_address = self.wallet.get_new_address()?; @@ -1139,112 +1160,6 @@ impl Node { } } - /// Send a spontaneous, aka. "keysend", payment - pub fn send_spontaneous_payment( - &self, amount_msat: u64, node_id: PublicKey, - ) -> Result { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - let payment_preimage = PaymentPreimage(self.keys_manager.get_secure_random_bytes()); - let payment_hash = PaymentHash::from(payment_preimage); - - if let Some(payment) = self.payment_store.get(&payment_hash) { - if payment.status == PaymentStatus::Pending - || payment.status == PaymentStatus::Succeeded - { - log_error!(self.logger, "Payment error: must not send duplicate payments."); - return Err(Error::DuplicatePayment); - } - } - - let route_params = RouteParameters::from_payment_params_and_value( - PaymentParameters::from_node_id(node_id, self.config.default_cltv_expiry_delta), - amount_msat, - ); - let recipient_fields = RecipientOnionFields::spontaneous_empty(); - - match self.channel_manager.send_spontaneous_payment_with_retry( - Some(payment_preimage), - recipient_fields, - PaymentId(payment_hash.0), - route_params, - Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT), - ) { - Ok(_payment_id) => { - log_info!(self.logger, "Initiated sending {}msat to {}.", amount_msat, node_id); - - let payment = PaymentDetails { - hash: payment_hash, - preimage: Some(payment_preimage), - secret: None, - status: PaymentStatus::Pending, - direction: PaymentDirection::Outbound, - amount_msat: Some(amount_msat), - lsp_fee_limits: None, - }; - self.payment_store.insert(payment)?; - - Ok(payment_hash) - }, - Err(e) => { - log_error!(self.logger, "Failed to send payment: {:?}", e); - - match e { - channelmanager::RetryableSendFailure::DuplicatePayment => { - Err(Error::DuplicatePayment) - }, - _ => { - let payment = PaymentDetails { - hash: payment_hash, - preimage: Some(payment_preimage), - secret: None, - status: PaymentStatus::Failed, - direction: PaymentDirection::Outbound, - amount_msat: Some(amount_msat), - lsp_fee_limits: None, - }; - - self.payment_store.insert(payment)?; - Err(Error::PaymentSendingFailed) - }, - } - }, - } - } - - /// Sends payment probes over all paths of a route that would be used to pay the given - /// amount to the given `node_id`. - /// - /// See [`Bolt11Payment::send_probes`] for more information. - pub fn send_spontaneous_payment_probes( - &self, amount_msat: u64, node_id: PublicKey, - ) -> Result<(), Error> { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); - let cltv_expiry_delta = self.config.default_cltv_expiry_delta; - - self.channel_manager - .send_spontaneous_preflight_probes( - node_id, - amount_msat, - cltv_expiry_delta, - liquidity_limit_multiplier, - ) - .map_err(|e| { - log_error!(self.logger, "Failed to send payment probes: {:?}", e); - Error::ProbeSendingFailed - })?; - - Ok(()) - } - /// Retrieve the details of a specific payment with the given hash. /// /// Returns `Some` if the payment was known and `None` otherwise. diff --git a/src/payment/mod.rs b/src/payment/mod.rs index adbe4df7b..5774f95a5 100644 --- a/src/payment/mod.rs +++ b/src/payment/mod.rs @@ -1,5 +1,7 @@ //! Handlers for different types of payments. mod bolt11; +mod spontaneous; pub use bolt11::Bolt11Payment; +pub use spontaneous::SpontaneousPayment; diff --git a/src/payment/spontaneous.rs b/src/payment/spontaneous.rs new file mode 100644 index 000000000..775fb2ff4 --- /dev/null +++ b/src/payment/spontaneous.rs @@ -0,0 +1,143 @@ +//! Holds a payment handler allowing to send spontaneous ("keysend") payments. + +use crate::config::{Config, LDK_PAYMENT_RETRY_TIMEOUT}; +use crate::error::Error; +use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; +use crate::payment_store::{PaymentDetails, PaymentDirection, PaymentStatus, PaymentStore}; +use crate::types::{ChannelManager, KeysManager}; + +use lightning::ln::channelmanager::{PaymentId, RecipientOnionFields, Retry, RetryableSendFailure}; +use lightning::ln::{PaymentHash, PaymentPreimage}; +use lightning::routing::router::{PaymentParameters, RouteParameters}; +use lightning::sign::EntropySource; + +use bitcoin::secp256k1::PublicKey; + +use std::sync::{Arc, RwLock}; + +/// A payment handler allowing to send spontaneous ("keysend") payments. +/// +/// Should be retrieved by calling [`Node::spontaneous_payment`]. +/// +/// [`Node::spontaneous_payment`]: crate::Node::spontaneous_payment +pub struct SpontaneousPayment { + runtime: Arc>>, + channel_manager: Arc, + keys_manager: Arc, + payment_store: Arc>>, + config: Arc, + logger: Arc, +} + +impl SpontaneousPayment { + pub(crate) fn new( + runtime: Arc>>, + channel_manager: Arc, keys_manager: Arc, + payment_store: Arc>>, config: Arc, + logger: Arc, + ) -> Self { + Self { runtime, channel_manager, keys_manager, payment_store, config, logger } + } + + /// Send a spontaneous, aka. "keysend", payment + pub fn send(&self, amount_msat: u64, node_id: PublicKey) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let payment_preimage = PaymentPreimage(self.keys_manager.get_secure_random_bytes()); + let payment_hash = PaymentHash::from(payment_preimage); + + if let Some(payment) = self.payment_store.get(&payment_hash) { + if payment.status == PaymentStatus::Pending + || payment.status == PaymentStatus::Succeeded + { + log_error!(self.logger, "Payment error: must not send duplicate payments."); + return Err(Error::DuplicatePayment); + } + } + + let route_params = RouteParameters::from_payment_params_and_value( + PaymentParameters::from_node_id(node_id, self.config.default_cltv_expiry_delta), + amount_msat, + ); + let recipient_fields = RecipientOnionFields::spontaneous_empty(); + + match self.channel_manager.send_spontaneous_payment_with_retry( + Some(payment_preimage), + recipient_fields, + PaymentId(payment_hash.0), + route_params, + Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT), + ) { + Ok(_payment_id) => { + log_info!(self.logger, "Initiated sending {}msat to {}.", amount_msat, node_id); + + let payment = PaymentDetails { + hash: payment_hash, + preimage: Some(payment_preimage), + secret: None, + status: PaymentStatus::Pending, + direction: PaymentDirection::Outbound, + amount_msat: Some(amount_msat), + lsp_fee_limits: None, + }; + self.payment_store.insert(payment)?; + + Ok(payment_hash) + }, + Err(e) => { + log_error!(self.logger, "Failed to send payment: {:?}", e); + + match e { + RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), + _ => { + let payment = PaymentDetails { + hash: payment_hash, + preimage: Some(payment_preimage), + secret: None, + status: PaymentStatus::Failed, + direction: PaymentDirection::Outbound, + amount_msat: Some(amount_msat), + lsp_fee_limits: None, + }; + + self.payment_store.insert(payment)?; + Err(Error::PaymentSendingFailed) + }, + } + }, + } + } + + /// Sends payment probes over all paths of a route that would be used to pay the given + /// amount to the given `node_id`. + /// + /// See [`Bolt11Payment::send_probes`] for more information. + /// + /// [`Bolt11Payment::send_probes`]: crate::payment::Bolt11Payment + pub fn send_probes(&self, amount_msat: u64, node_id: PublicKey) -> Result<(), Error> { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); + let cltv_expiry_delta = self.config.default_cltv_expiry_delta; + + self.channel_manager + .send_spontaneous_preflight_probes( + node_id, + amount_msat, + cltv_expiry_delta, + liquidity_limit_multiplier, + ) + .map_err(|e| { + log_error!(self.logger, "Failed to send payment probes: {:?}", e); + Error::ProbeSendingFailed + })?; + + Ok(()) + } +} diff --git a/src/uniffi_types.rs b/src/uniffi_types.rs index 959c6e9ed..0cef466a1 100644 --- a/src/uniffi_types.rs +++ b/src/uniffi_types.rs @@ -1,6 +1,5 @@ pub use lightning::events::{ClosureReason, PaymentFailureReason}; -pub use lightning::ln::ChannelId; -pub use lightning::ln::PaymentSecret; +pub use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret}; pub use lightning::util::string::UntrustedString; pub use lightning_invoice::Bolt11Invoice; @@ -19,7 +18,6 @@ use bitcoin::hashes::sha256::Hash as Sha256; use bitcoin::hashes::Hash; use bitcoin::secp256k1::PublicKey; use bitcoin::{Address, Txid}; -use lightning::ln::{PaymentHash, PaymentPreimage}; use lightning_invoice::SignedRawBolt11Invoice; use std::convert::TryInto; diff --git a/tests/common/mod.rs b/tests/common/mod.rs index c57ed4dbf..05cceb268 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -533,7 +533,7 @@ pub(crate) fn do_channel_full_cycle( println!("\nA send_spontaneous_payment"); let keysend_amount_msat = 2500_000; let keysend_payment_hash = - node_a.send_spontaneous_payment(keysend_amount_msat, node_b.node_id()).unwrap(); + node_a.spontaneous_payment().send(keysend_amount_msat, node_b.node_id()).unwrap(); expect_event!(node_a, PaymentSuccessful); let received_keysend_amount = match node_b.wait_next_event() { ref e @ Event::PaymentReceived { amount_msat, .. } => { From 5e502e6dcdf902fec676a5dc17c44f8d7825e122 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 5 Mar 2024 12:18:52 +0100 Subject: [PATCH 28/89] Move onchain payments API to `OnchainPaymentHandler` --- README.md | 2 +- .../lightningdevkit/ldknode/AndroidLibTest.kt | 4 +- .../lightningdevkit/ldknode/LibraryTest.kt | 4 +- bindings/ldk_node.udl | 16 +++-- bindings/python/src/ldk_node/test_ldk_node.py | 4 +- src/lib.rs | 51 ++++++--------- src/payment/mod.rs | 2 + src/payment/onchain.rs | 63 +++++++++++++++++++ src/uniffi_types.rs | 3 +- tests/common/mod.rs | 4 +- tests/integration_tests_cln.rs | 2 +- tests/integration_tests_rust.rs | 23 ++++--- 12 files changed, 117 insertions(+), 61 deletions(-) create mode 100644 src/payment/onchain.rs diff --git a/README.md b/README.md index eccb85c40..4078ce67b 100644 --- a/README.md +++ b/README.md @@ -31,7 +31,7 @@ fn main() { node.start().unwrap(); - let funding_address = node.new_onchain_address(); + let funding_address = node.onchain_payment().new_address(); // .. fund address .. diff --git a/bindings/kotlin/ldk-node-android/lib/src/androidTest/kotlin/org/lightningdevkit/ldknode/AndroidLibTest.kt b/bindings/kotlin/ldk-node-android/lib/src/androidTest/kotlin/org/lightningdevkit/ldknode/AndroidLibTest.kt index a5ca6eac0..763862a33 100644 --- a/bindings/kotlin/ldk-node-android/lib/src/androidTest/kotlin/org/lightningdevkit/ldknode/AndroidLibTest.kt +++ b/bindings/kotlin/ldk-node-android/lib/src/androidTest/kotlin/org/lightningdevkit/ldknode/AndroidLibTest.kt @@ -51,10 +51,10 @@ class AndroidLibTest { val nodeId2 = node2.nodeId() println("Node Id 2: $nodeId2") - val address1 = node1.newOnchainAddress() + val address1 = node1.onchain_payment().newOnchainAddress() println("Funding address 1: $address1") - val address2 = node2.newOnchainAddress() + val address2 = node2.onchain_payment().newOnchainAddress() println("Funding address 2: $address2") node1.stop() diff --git a/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt b/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt index cacd33bd6..b6b86da9c 100644 --- a/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt +++ b/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt @@ -146,10 +146,10 @@ class LibraryTest { val nodeId2 = node2.nodeId() println("Node Id 2: $nodeId2") - val address1 = node1.newOnchainAddress() + val address1 = node1.onchainPayment().newAddress() println("Funding address 1: $address1") - val address2 = node2.newOnchainAddress() + val address2 = node2.onchainPayment().newAddress() println("Funding address 2: $address2") val txid1 = sendToAddress(address1, 100000u) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index d4f80d25b..91425b75e 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -55,12 +55,7 @@ interface Node { sequence? listening_addresses(); Bolt11Payment bolt11_payment(); SpontaneousPayment spontaneous_payment(); - [Throws=NodeError] - Address new_onchain_address(); - [Throws=NodeError] - Txid send_to_onchain_address([ByRef]Address address, u64 amount_msat); - [Throws=NodeError] - Txid send_all_to_onchain_address([ByRef]Address address); + OnchainPayment onchain_payment(); [Throws=NodeError] void connect(PublicKey node_id, SocketAddress address, boolean persist); [Throws=NodeError] @@ -111,6 +106,15 @@ interface SpontaneousPayment { void send_probes(u64 amount_msat, PublicKey node_id); }; +interface OnchainPayment { + [Throws=NodeError] + Address new_address(); + [Throws=NodeError] + Txid send_to_address([ByRef]Address address, u64 amount_msat); + [Throws=NodeError] + Txid send_all_to_address([ByRef]Address address); +}; + [Error] enum NodeError { "AlreadyRunning", diff --git a/bindings/python/src/ldk_node/test_ldk_node.py b/bindings/python/src/ldk_node/test_ldk_node.py index 468800efe..a593078c1 100644 --- a/bindings/python/src/ldk_node/test_ldk_node.py +++ b/bindings/python/src/ldk_node/test_ldk_node.py @@ -125,9 +125,9 @@ def test_channel_full_cycle(self): node_id_2 = node_2.node_id() print("Node ID 2:", node_id_2) - address_1 = node_1.new_onchain_address() + address_1 = node_1.onchain_payment().new_address() txid_1 = send_to_address(address_1, 100000) - address_2 = node_2.new_onchain_address() + address_2 = node_2.onchain_payment().new_address() txid_2 = send_to_address(address_2, 100000) wait_for_tx(esplora_endpoint, txid_1) diff --git a/src/lib.rs b/src/lib.rs index b622cf0d1..1d718a994 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -43,7 +43,7 @@ //! //! node.start().unwrap(); //! -//! let funding_address = node.new_onchain_address(); +//! let funding_address = node.onchain_payment().new_address(); //! //! // .. fund address .. //! @@ -130,7 +130,7 @@ use connection::ConnectionManager; use event::{EventHandler, EventQueue}; use gossip::GossipSource; use liquidity::LiquiditySource; -use payment::{Bolt11Payment, SpontaneousPayment}; +use payment::{Bolt11Payment, OnchainPayment, SpontaneousPayment}; use payment_store::PaymentStore; pub use payment_store::{LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentStatus}; use peer_store::{PeerInfo, PeerStore}; @@ -154,7 +154,6 @@ use lightning_background_processor::process_events_async; use lightning_transaction_sync::EsploraSyncClient; use bitcoin::secp256k1::PublicKey; -use bitcoin::{Address, Txid}; use rand::Rng; @@ -874,38 +873,24 @@ impl Node { )) } - /// Retrieve a new on-chain/funding address. - pub fn new_onchain_address(&self) -> Result { - let funding_address = self.wallet.get_new_address()?; - log_info!(self.logger, "Generated new funding address: {}", funding_address); - Ok(funding_address) - } - - /// Send an on-chain payment to the given address. - pub fn send_to_onchain_address( - &self, address: &bitcoin::Address, amount_sats: u64, - ) -> Result { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - let cur_balance = self.wallet.get_balance()?; - if cur_balance.get_spendable() < amount_sats { - log_error!(self.logger, "Unable to send payment due to insufficient funds."); - return Err(Error::InsufficientFunds); - } - self.wallet.send_to_address(address, Some(amount_sats)) + /// Returns a payment handler allowing to send and receive on-chain payments. + #[cfg(not(feature = "uniffi"))] + pub fn onchain_payment(&self) -> OnchainPayment { + OnchainPayment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.wallet), + Arc::clone(&self.logger), + ) } - /// Send an on-chain payment to the given address, draining all the available funds. - pub fn send_all_to_onchain_address(&self, address: &bitcoin::Address) -> Result { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - self.wallet.send_to_address(address, None) + /// Returns a payment handler allowing to send and receive on-chain payments. + #[cfg(feature = "uniffi")] + pub fn onchain_payment(&self) -> Arc { + Arc::new(OnchainPayment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.wallet), + Arc::clone(&self.logger), + )) } /// Retrieve a list of known channels. diff --git a/src/payment/mod.rs b/src/payment/mod.rs index 5774f95a5..1e0a09a84 100644 --- a/src/payment/mod.rs +++ b/src/payment/mod.rs @@ -1,7 +1,9 @@ //! Handlers for different types of payments. mod bolt11; +mod onchain; mod spontaneous; pub use bolt11::Bolt11Payment; +pub use onchain::OnchainPayment; pub use spontaneous::SpontaneousPayment; diff --git a/src/payment/onchain.rs b/src/payment/onchain.rs new file mode 100644 index 000000000..c178e95a0 --- /dev/null +++ b/src/payment/onchain.rs @@ -0,0 +1,63 @@ +//! Holds a payment handler allowing to send and receive on-chain payments. + +use crate::error::Error; +use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; +use crate::types::Wallet; + +use bitcoin::{Address, Txid}; + +use std::sync::{Arc, RwLock}; + +/// A payment handler allowing to send and receive on-chain payments. +/// +/// Should be retrieved by calling [`Node::onchain_payment`]. +/// +/// [`Node::onchain_payment`]: crate::Node::onchain_payment +pub struct OnchainPayment { + runtime: Arc>>, + wallet: Arc, + logger: Arc, +} + +impl OnchainPayment { + pub(crate) fn new( + runtime: Arc>>, wallet: Arc, + logger: Arc, + ) -> Self { + Self { runtime, wallet, logger } + } + + /// Retrieve a new on-chain/funding address. + pub fn new_address(&self) -> Result { + let funding_address = self.wallet.get_new_address()?; + log_info!(self.logger, "Generated new funding address: {}", funding_address); + Ok(funding_address) + } + + /// Send an on-chain payment to the given address. + pub fn send_to_address( + &self, address: &bitcoin::Address, amount_sats: u64, + ) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let cur_balance = self.wallet.get_balance()?; + if cur_balance.get_spendable() < amount_sats { + log_error!(self.logger, "Unable to send payment due to insufficient funds."); + return Err(Error::InsufficientFunds); + } + self.wallet.send_to_address(address, Some(amount_sats)) + } + + /// Send an on-chain payment to the given address, draining all the available funds. + pub fn send_all_to_address(&self, address: &bitcoin::Address) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + self.wallet.send_to_address(address, None) + } +} diff --git a/src/uniffi_types.rs b/src/uniffi_types.rs index 0cef466a1..7204f67f1 100644 --- a/src/uniffi_types.rs +++ b/src/uniffi_types.rs @@ -4,7 +4,7 @@ pub use lightning::util::string::UntrustedString; pub use lightning_invoice::Bolt11Invoice; -pub use bitcoin::{BlockHash, Network, OutPoint}; +pub use bitcoin::{Address, BlockHash, Network, OutPoint, Txid}; pub use bip39::Mnemonic; @@ -17,7 +17,6 @@ use crate::{SocketAddress, UserChannelId}; use bitcoin::hashes::sha256::Hash as Sha256; use bitcoin::hashes::Hash; use bitcoin::secp256k1::PublicKey; -use bitcoin::{Address, Txid}; use lightning_invoice::SignedRawBolt11Invoice; use std::convert::TryInto; diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 05cceb268..a942e66f2 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -355,8 +355,8 @@ pub fn open_channel( pub(crate) fn do_channel_full_cycle( node_a: TestNode, node_b: TestNode, bitcoind: &BitcoindClient, electrsd: &E, allow_0conf: bool, ) { - let addr_a = node_a.new_onchain_address().unwrap(); - let addr_b = node_b.new_onchain_address().unwrap(); + let addr_a = node_a.onchain_payment().new_address().unwrap(); + let addr_b = node_b.onchain_payment().new_address().unwrap(); let premine_amount_sat = 100_000; diff --git a/tests/integration_tests_cln.rs b/tests/integration_tests_cln.rs index 8dce54b28..ec752f474 100644 --- a/tests/integration_tests_cln.rs +++ b/tests/integration_tests_cln.rs @@ -44,7 +44,7 @@ fn test_cln() { node.start().unwrap(); // Premine some funds and distribute - let address = node.new_onchain_address().unwrap(); + let address = node.onchain_payment().new_address().unwrap(); let premine_amount = Amount::from_sat(5_000_000); common::premine_and_distribute_funds( &bitcoind_client, diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index fad071c13..fa8986c71 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -34,8 +34,8 @@ fn channel_open_fails_when_funds_insufficient() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); let (node_a, node_b) = setup_two_nodes(&electrsd, false); - let addr_a = node_a.new_onchain_address().unwrap(); - let addr_b = node_b.new_onchain_address().unwrap(); + let addr_a = node_a.onchain_payment().new_address().unwrap(); + let addr_b = node_b.onchain_payment().new_address().unwrap(); let premine_amount_sat = 100_000; @@ -80,7 +80,7 @@ fn multi_hop_sending() { nodes.push(node); } - let addresses = nodes.iter().map(|n| n.new_onchain_address().unwrap()).collect(); + let addresses = nodes.iter().map(|n| n.onchain_payment().new_address().unwrap()).collect(); let premine_amount_sat = 5_000_000; premine_and_distribute_funds( &bitcoind.client, @@ -171,7 +171,7 @@ fn start_stop_reinit() { let expected_node_id = node.node_id(); assert_eq!(node.start(), Err(NodeError::AlreadyRunning)); - let funding_address = node.new_onchain_address().unwrap(); + let funding_address = node.onchain_payment().new_address().unwrap(); assert_eq!(node.list_balances().total_onchain_balance_sats, 0); @@ -225,8 +225,8 @@ fn onchain_spend_receive() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); let (node_a, node_b) = setup_two_nodes(&electrsd, false); - let addr_a = node_a.new_onchain_address().unwrap(); - let addr_b = node_b.new_onchain_address().unwrap(); + let addr_a = node_a.onchain_payment().new_address().unwrap(); + let addr_b = node_b.onchain_payment().new_address().unwrap(); premine_and_distribute_funds( &bitcoind.client, @@ -239,9 +239,12 @@ fn onchain_spend_receive() { node_b.sync_wallets().unwrap(); assert_eq!(node_b.list_balances().spendable_onchain_balance_sats, 100000); - assert_eq!(Err(NodeError::InsufficientFunds), node_a.send_to_onchain_address(&addr_b, 1000)); + assert_eq!( + Err(NodeError::InsufficientFunds), + node_a.onchain_payment().send_to_address(&addr_b, 1000) + ); - let txid = node_b.send_to_onchain_address(&addr_a, 1000).unwrap(); + let txid = node_b.onchain_payment().send_to_address(&addr_a, 1000).unwrap(); generate_blocks_and_wait(&bitcoind.client, &electrsd.client, 6); wait_for_tx(&electrsd.client, txid); @@ -252,8 +255,8 @@ fn onchain_spend_receive() { assert!(node_b.list_balances().spendable_onchain_balance_sats > 98000); assert!(node_b.list_balances().spendable_onchain_balance_sats < 100000); - let addr_b = node_b.new_onchain_address().unwrap(); - let txid = node_a.send_all_to_onchain_address(&addr_b).unwrap(); + let addr_b = node_b.onchain_payment().new_address().unwrap(); + let txid = node_a.onchain_payment().send_all_to_address(&addr_b).unwrap(); generate_blocks_and_wait(&bitcoind.client, &electrsd.client, 6); wait_for_tx(&electrsd.client, txid); From a78ed43b7a9e8c21540c2098c59b17405827f7a5 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 5 Mar 2024 13:48:19 +0100 Subject: [PATCH 29/89] Move `payment_store` to `payment` submodule .. which declutters our top-level docs a bit. --- src/builder.rs | 2 +- src/event.rs | 2 +- src/lib.rs | 9 ++++----- src/payment/bolt11.rs | 2 +- src/payment/mod.rs | 4 +++- src/payment/spontaneous.rs | 2 +- src/{payment_store.rs => payment/store.rs} | 0 src/uniffi_types.rs | 2 ++ tests/common/mod.rs | 5 ++--- 9 files changed, 15 insertions(+), 13 deletions(-) rename src/{payment_store.rs => payment/store.rs} (100%) diff --git a/src/builder.rs b/src/builder.rs index 204088c6a..6d3db420f 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -11,7 +11,7 @@ use crate::io::sqlite_store::SqliteStore; use crate::liquidity::LiquiditySource; use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; use crate::message_handler::NodeCustomMessageHandler; -use crate::payment_store::PaymentStore; +use crate::payment::store::PaymentStore; use crate::peer_store::PeerStore; use crate::tx_broadcaster::TransactionBroadcaster; use crate::types::{ diff --git a/src/event.rs b/src/event.rs index 6e9ab587c..05fead0ae 100644 --- a/src/event.rs +++ b/src/event.rs @@ -3,7 +3,7 @@ use crate::{ hex_utils, ChannelManager, Config, Error, NetworkGraph, PeerInfo, PeerStore, UserChannelId, }; -use crate::payment_store::{ +use crate::payment::store::{ PaymentDetails, PaymentDetailsUpdate, PaymentDirection, PaymentStatus, PaymentStore, }; diff --git a/src/lib.rs b/src/lib.rs index 1d718a994..53df151f1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -89,7 +89,6 @@ mod liquidity; mod logger; mod message_handler; pub mod payment; -mod payment_store; mod peer_store; mod sweep; mod tx_broadcaster; @@ -130,9 +129,8 @@ use connection::ConnectionManager; use event::{EventHandler, EventQueue}; use gossip::GossipSource; use liquidity::LiquiditySource; -use payment::{Bolt11Payment, OnchainPayment, SpontaneousPayment}; -use payment_store::PaymentStore; -pub use payment_store::{LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentStatus}; +use payment::store::PaymentStore; +use payment::{Bolt11Payment, OnchainPayment, PaymentDetails, SpontaneousPayment}; use peer_store::{PeerInfo, PeerStore}; use types::{ Broadcaster, ChainMonitor, ChannelManager, DynStore, FeeEstimator, KeysManager, NetworkGraph, @@ -1208,7 +1206,8 @@ impl Node { /// /// For example, you could retrieve all stored outbound payments as follows: /// ``` - /// # use ldk_node::{Builder, Config, PaymentDirection}; + /// # use ldk_node::{Builder, Config}; + /// # use ldk_node::payment::PaymentDirection; /// # use ldk_node::bitcoin::Network; /// # let mut config = Config::default(); /// # config.network = Network::Regtest; diff --git a/src/payment/bolt11.rs b/src/payment/bolt11.rs index 8ac15fe0a..10bbd5fbd 100644 --- a/src/payment/bolt11.rs +++ b/src/payment/bolt11.rs @@ -7,7 +7,7 @@ use crate::connection::ConnectionManager; use crate::error::Error; use crate::liquidity::LiquiditySource; use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; -use crate::payment_store::{ +use crate::payment::store::{ LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentStatus, PaymentStore, }; use crate::peer_store::{PeerInfo, PeerStore}; diff --git a/src/payment/mod.rs b/src/payment/mod.rs index 1e0a09a84..0666bd69f 100644 --- a/src/payment/mod.rs +++ b/src/payment/mod.rs @@ -1,9 +1,11 @@ -//! Handlers for different types of payments. +//! Objects for different types of payments. mod bolt11; mod onchain; mod spontaneous; +pub(crate) mod store; pub use bolt11::Bolt11Payment; pub use onchain::OnchainPayment; pub use spontaneous::SpontaneousPayment; +pub use store::{LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentStatus}; diff --git a/src/payment/spontaneous.rs b/src/payment/spontaneous.rs index 775fb2ff4..f81c23a4b 100644 --- a/src/payment/spontaneous.rs +++ b/src/payment/spontaneous.rs @@ -3,7 +3,7 @@ use crate::config::{Config, LDK_PAYMENT_RETRY_TIMEOUT}; use crate::error::Error; use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; -use crate::payment_store::{PaymentDetails, PaymentDirection, PaymentStatus, PaymentStore}; +use crate::payment::store::{PaymentDetails, PaymentDirection, PaymentStatus, PaymentStore}; use crate::types::{ChannelManager, KeysManager}; use lightning::ln::channelmanager::{PaymentId, RecipientOnionFields, Retry, RetryableSendFailure}; diff --git a/src/payment_store.rs b/src/payment/store.rs similarity index 100% rename from src/payment_store.rs rename to src/payment/store.rs diff --git a/src/uniffi_types.rs b/src/uniffi_types.rs index 7204f67f1..2bddfee13 100644 --- a/src/uniffi_types.rs +++ b/src/uniffi_types.rs @@ -1,3 +1,5 @@ +pub use crate::payment::store::{LSPFeeLimits, PaymentDirection, PaymentStatus}; + pub use lightning::events::{ClosureReason, PaymentFailureReason}; pub use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret}; pub use lightning::util::string::UntrustedString; diff --git a/tests/common/mod.rs b/tests/common/mod.rs index a942e66f2..a9acb63a0 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -2,9 +2,8 @@ #![allow(dead_code)] use ldk_node::io::sqlite_store::SqliteStore; -use ldk_node::{ - Builder, Config, Event, LogLevel, Node, NodeError, PaymentDirection, PaymentStatus, -}; +use ldk_node::payment::{PaymentDirection, PaymentStatus}; +use ldk_node::{Builder, Config, Event, LogLevel, Node, NodeError}; use lightning::ln::msgs::SocketAddress; use lightning::util::persist::KVStore; From 7cd8a7261d56d041d2518217e75fad4b4f688b44 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 8 Mar 2024 11:17:29 +0100 Subject: [PATCH 30/89] Introduce `PaymentKind` to payment store, track by `PaymentId` --- bindings/ldk_node.udl | 28 +++-- src/error.rs | 3 + src/event.rs | 91 +++++++++----- src/lib.rs | 14 +-- src/payment/bolt11.rs | 89 +++++++------ src/payment/mod.rs | 2 +- src/payment/spontaneous.rs | 32 +++-- src/payment/store.rs | 250 +++++++++++++++++++++++++++---------- src/uniffi_types.rs | 21 +++- tests/common/mod.rs | 79 ++++++------ 10 files changed, 400 insertions(+), 209 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 91425b75e..1d776782b 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -68,9 +68,9 @@ interface Node { void update_channel_config([ByRef]UserChannelId user_channel_id, PublicKey counterparty_node_id, ChannelConfig channel_config); [Throws=NodeError] void sync_wallets(); - PaymentDetails? payment([ByRef]PaymentHash payment_hash); + PaymentDetails? payment([ByRef]PaymentId payment_id); [Throws=NodeError] - void remove_payment([ByRef]PaymentHash payment_hash); + void remove_payment([ByRef]PaymentId payment_id); BalanceDetails list_balances(); sequence list_payments(); sequence list_peers(); @@ -82,9 +82,9 @@ interface Node { interface Bolt11Payment { [Throws=NodeError] - PaymentHash send([ByRef]Bolt11Invoice invoice); + PaymentId send([ByRef]Bolt11Invoice invoice); [Throws=NodeError] - PaymentHash send_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat); + PaymentId send_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat); [Throws=NodeError] void send_probes([ByRef]Bolt11Invoice invoice); [Throws=NodeError] @@ -101,7 +101,7 @@ interface Bolt11Payment { interface SpontaneousPayment { [Throws=NodeError] - PaymentHash send(u64 amount_msat, PublicKey node_id); + PaymentId send(u64 amount_msat, PublicKey node_id); [Throws=NodeError] void send_probes(u64 amount_msat, PublicKey node_id); }; @@ -139,6 +139,7 @@ enum NodeError { "InvalidSocketAddress", "InvalidPublicKey", "InvalidSecretKey", + "InvalidPaymentId", "InvalidPaymentHash", "InvalidPaymentPreimage", "InvalidPaymentSecret", @@ -219,6 +220,14 @@ interface ClosureReason { HTLCsTimedOut(); }; +[Enum] +interface PaymentKind { + Onchain(); + Bolt11(PaymentHash hash, PaymentPreimage? preimage, PaymentSecret? secret); + Bolt11Jit(PaymentHash hash, PaymentPreimage? preimage, PaymentSecret? secret, LSPFeeLimits lsp_fee_limits); + Spontaneous(PaymentHash hash, PaymentPreimage? preimage); +}; + enum PaymentDirection { "Inbound", "Outbound", @@ -236,13 +245,11 @@ dictionary LSPFeeLimits { }; dictionary PaymentDetails { - PaymentHash hash; - PaymentPreimage? preimage; - PaymentSecret? secret; + PaymentId id; + PaymentKind kind; u64? amount_msat; PaymentDirection direction; PaymentStatus status; - LSPFeeLimits? lsp_fee_limits; }; [NonExhaustive] @@ -364,6 +371,9 @@ typedef string Address; [Custom] typedef string Bolt11Invoice; +[Custom] +typedef string PaymentId; + [Custom] typedef string PaymentHash; diff --git a/src/error.rs b/src/error.rs index c5234a6d4..5acc75af8 100644 --- a/src/error.rs +++ b/src/error.rs @@ -47,6 +47,8 @@ pub enum Error { InvalidPublicKey, /// The given secret key is invalid. InvalidSecretKey, + /// The given payment id is invalid. + InvalidPaymentId, /// The given payment hash is invalid. InvalidPaymentHash, /// The given payment pre-image is invalid. @@ -100,6 +102,7 @@ impl fmt::Display for Error { Self::InvalidSocketAddress => write!(f, "The given network address is invalid."), Self::InvalidPublicKey => write!(f, "The given public key is invalid."), Self::InvalidSecretKey => write!(f, "The given secret key is invalid."), + Self::InvalidPaymentId => write!(f, "The given payment id is invalid."), Self::InvalidPaymentHash => write!(f, "The given payment hash is invalid."), Self::InvalidPaymentPreimage => write!(f, "The given payment preimage is invalid."), Self::InvalidPaymentSecret => write!(f, "The given payment secret is invalid."), diff --git a/src/event.rs b/src/event.rs index 05fead0ae..d4e3d57bc 100644 --- a/src/event.rs +++ b/src/event.rs @@ -4,7 +4,8 @@ use crate::{ }; use crate::payment::store::{ - PaymentDetails, PaymentDetailsUpdate, PaymentDirection, PaymentStatus, PaymentStore, + PaymentDetails, PaymentDetailsUpdate, PaymentDirection, PaymentKind, PaymentStatus, + PaymentStore, }; use crate::io::{ @@ -17,6 +18,7 @@ use lightning::chain::chaininterface::ConfirmationTarget; use lightning::events::{ClosureReason, PaymentPurpose}; use lightning::events::{Event as LdkEvent, PaymentFailureReason}; use lightning::impl_writeable_tlv_based_enum; +use lightning::ln::channelmanager::PaymentId; use lightning::ln::{ChannelId, PaymentHash}; use lightning::routing::gossip::NodeId; use lightning::util::errors::APIError; @@ -410,7 +412,8 @@ where onion_fields: _, counterparty_skimmed_fee_msat, } => { - if let Some(info) = self.payment_store.get(&payment_hash) { + let payment_id = PaymentId(payment_hash.0); + if let Some(info) = self.payment_store.get(&payment_id) { if info.status == PaymentStatus::Succeeded { log_info!( self.logger, @@ -422,7 +425,7 @@ where let update = PaymentDetailsUpdate { status: Some(PaymentStatus::Failed), - ..PaymentDetailsUpdate::new(payment_hash) + ..PaymentDetailsUpdate::new(payment_id) }; self.payment_store.update(&update).unwrap_or_else(|e| { log_error!(self.logger, "Failed to access payment store: {}", e); @@ -431,17 +434,22 @@ where return; } - let max_total_opening_fee_msat = info - .lsp_fee_limits - .and_then(|l| { - l.max_total_opening_fee_msat.or_else(|| { - l.max_proportional_opening_fee_ppm_msat.and_then(|max_prop_fee| { - // If it's a variable amount payment, compute the actual fee. - compute_opening_fee(amount_msat, 0, max_prop_fee) + let max_total_opening_fee_msat = match info.kind { + PaymentKind::Bolt11Jit { lsp_fee_limits, .. } => { + lsp_fee_limits + .max_total_opening_fee_msat + .or_else(|| { + lsp_fee_limits.max_proportional_opening_fee_ppm_msat.and_then( + |max_prop_fee| { + // If it's a variable amount payment, compute the actual fee. + compute_opening_fee(amount_msat, 0, max_prop_fee) + }, + ) }) - }) - }) - .unwrap_or(0); + .unwrap_or(0) + }, + _ => 0, + }; if counterparty_skimmed_fee_msat > max_total_opening_fee_msat { log_info!( @@ -455,7 +463,7 @@ where let update = PaymentDetailsUpdate { status: Some(PaymentStatus::Failed), - ..PaymentDetailsUpdate::new(payment_hash) + ..PaymentDetailsUpdate::new(payment_id) }; self.payment_store.update(&update).unwrap_or_else(|e| { log_error!(self.logger, "Failed to access payment store: {}", e); @@ -516,7 +524,7 @@ where let update = PaymentDetailsUpdate { status: Some(PaymentStatus::Failed), - ..PaymentDetailsUpdate::new(payment_hash) + ..PaymentDetailsUpdate::new(payment_id) }; self.payment_store.update(&update).unwrap_or_else(|e| { log_error!(self.logger, "Failed to access payment store: {}", e); @@ -538,6 +546,7 @@ where hex_utils::to_string(&payment_hash.0), amount_msat, ); + let payment_id = PaymentId(payment_hash.0); match purpose { PaymentPurpose::Bolt11InvoicePayment { payment_preimage, @@ -549,7 +558,7 @@ where secret: Some(Some(payment_secret)), amount_msat: Some(Some(amount_msat)), status: Some(PaymentStatus::Succeeded), - ..PaymentDetailsUpdate::new(payment_hash) + ..PaymentDetailsUpdate::new(payment_id) }; match self.payment_store.update(&update) { Ok(true) => (), @@ -592,13 +601,14 @@ where }, PaymentPurpose::SpontaneousPayment(preimage) => { let payment = PaymentDetails { - preimage: Some(preimage), - hash: payment_hash, - secret: None, + id: payment_id, + kind: PaymentKind::Spontaneous { + hash: payment_hash, + preimage: Some(preimage), + }, amount_msat: Some(amount_msat), direction: PaymentDirection::Inbound, status: PaymentStatus::Succeeded, - lsp_fee_limits: None, }; match self.payment_store.insert(payment) { @@ -631,14 +641,32 @@ where panic!("Failed to push to event queue"); }); }, - LdkEvent::PaymentSent { payment_preimage, payment_hash, fee_paid_msat, .. } => { - if let Some(mut payment) = self.payment_store.get(&payment_hash) { - payment.preimage = Some(payment_preimage); - payment.status = PaymentStatus::Succeeded; - self.payment_store.insert(payment.clone()).unwrap_or_else(|e| { - log_error!(self.logger, "Failed to access payment store: {}", e); - panic!("Failed to access payment store"); - }); + LdkEvent::PaymentSent { + payment_id, + payment_preimage, + payment_hash, + fee_paid_msat, + .. + } => { + let payment_id = if let Some(id) = payment_id { + id + } else { + debug_assert!(false, "payment_id should always be set."); + return; + }; + + let update = PaymentDetailsUpdate { + preimage: Some(Some(payment_preimage)), + status: Some(PaymentStatus::Succeeded), + ..PaymentDetailsUpdate::new(payment_id) + }; + + self.payment_store.update(&update).unwrap_or_else(|e| { + log_error!(self.logger, "Failed to access payment store: {}", e); + panic!("Failed to access payment store"); + }); + + self.payment_store.get(&payment_id).map(|payment| { log_info!( self.logger, "Successfully sent payment of {}msat{} from \ @@ -652,7 +680,8 @@ where hex_utils::to_string(&payment_hash.0), hex_utils::to_string(&payment_preimage.0) ); - } + }); + self.event_queue .add_event(Event::PaymentSuccessful { payment_hash, fee_paid_msat }) .unwrap_or_else(|e| { @@ -660,7 +689,7 @@ where panic!("Failed to push to event queue"); }); }, - LdkEvent::PaymentFailed { payment_hash, reason, .. } => { + LdkEvent::PaymentFailed { payment_id, payment_hash, reason, .. } => { log_info!( self.logger, "Failed to send payment to payment hash {:?} due to {:?}.", @@ -670,7 +699,7 @@ where let update = PaymentDetailsUpdate { status: Some(PaymentStatus::Failed), - ..PaymentDetailsUpdate::new(payment_hash) + ..PaymentDetailsUpdate::new(payment_id) }; self.payment_store.update(&update).unwrap_or_else(|e| { log_error!(self.logger, "Failed to access payment store: {}", e); diff --git a/src/lib.rs b/src/lib.rs index 53df151f1..3d619cebb 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -141,8 +141,8 @@ pub use types::{ChannelDetails, PeerDetails, UserChannelId}; use logger::{log_error, log_info, log_trace, FilesystemLogger, Logger}; use lightning::chain::{BestBlock, Confirm}; +use lightning::ln::channelmanager::PaymentId; use lightning::ln::msgs::SocketAddress; -use lightning::ln::PaymentHash; use lightning::util::config::{ChannelHandshakeConfig, UserConfig}; pub use lightning::util::logger::Level as LogLevel; @@ -1143,16 +1143,16 @@ impl Node { } } - /// Retrieve the details of a specific payment with the given hash. + /// Retrieve the details of a specific payment with the given id. /// /// Returns `Some` if the payment was known and `None` otherwise. - pub fn payment(&self, payment_hash: &PaymentHash) -> Option { - self.payment_store.get(payment_hash) + pub fn payment(&self, payment_id: &PaymentId) -> Option { + self.payment_store.get(payment_id) } - /// Remove the payment with the given hash from the store. - pub fn remove_payment(&self, payment_hash: &PaymentHash) -> Result<(), Error> { - self.payment_store.remove(&payment_hash) + /// Remove the payment with the given id from the store. + pub fn remove_payment(&self, payment_id: &PaymentId) -> Result<(), Error> { + self.payment_store.remove(&payment_id) } /// Retrieves an overview of all known balances. diff --git a/src/payment/bolt11.rs b/src/payment/bolt11.rs index 10bbd5fbd..c23f7b670 100644 --- a/src/payment/bolt11.rs +++ b/src/payment/bolt11.rs @@ -8,7 +8,7 @@ use crate::error::Error; use crate::liquidity::LiquiditySource; use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; use crate::payment::store::{ - LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentStatus, PaymentStore, + LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus, PaymentStore, }; use crate::peer_store::{PeerInfo, PeerStore}; use crate::types::{ChannelManager, KeysManager}; @@ -66,7 +66,7 @@ impl Bolt11Payment { } /// Send a payment given an invoice. - pub fn send(&self, invoice: &Bolt11Invoice) -> Result { + pub fn send(&self, invoice: &Bolt11Invoice) -> Result { let rt_lock = self.runtime.read().unwrap(); if rt_lock.is_none() { return Err(Error::NotRunning); @@ -77,7 +77,8 @@ impl Bolt11Payment { Error::InvalidInvoice })?; - if let Some(payment) = self.payment_store.get(&payment_hash) { + let payment_id = PaymentId(invoice.payment_hash().to_byte_array()); + if let Some(payment) = self.payment_store.get(&payment_id) { if payment.status == PaymentStatus::Pending || payment.status == PaymentStatus::Succeeded { @@ -87,7 +88,6 @@ impl Bolt11Payment { } let payment_secret = Some(*invoice.payment_secret()); - let payment_id = PaymentId(invoice.payment_hash().to_byte_array()); let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); match self.channel_manager.send_payment( @@ -103,17 +103,20 @@ impl Bolt11Payment { log_info!(self.logger, "Initiated sending {}msat to {}", amt_msat, payee_pubkey); let payment = PaymentDetails { - preimage: None, - hash: payment_hash, - secret: payment_secret, + id: payment_id, + kind: PaymentKind::Bolt11 { + hash: payment_hash, + preimage: None, + secret: payment_secret, + }, amount_msat: invoice.amount_milli_satoshis(), direction: PaymentDirection::Outbound, status: PaymentStatus::Pending, - lsp_fee_limits: None, }; + self.payment_store.insert(payment)?; - Ok(payment_hash) + Ok(payment_id) }, Err(e) => { log_error!(self.logger, "Failed to send payment: {:?}", e); @@ -121,13 +124,15 @@ impl Bolt11Payment { RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), _ => { let payment = PaymentDetails { - preimage: None, - hash: payment_hash, - secret: payment_secret, + id: payment_id, + kind: PaymentKind::Bolt11 { + hash: payment_hash, + preimage: None, + secret: payment_secret, + }, amount_msat: invoice.amount_milli_satoshis(), direction: PaymentDirection::Outbound, status: PaymentStatus::Failed, - lsp_fee_limits: None, }; self.payment_store.insert(payment)?; @@ -146,7 +151,7 @@ impl Bolt11Payment { /// amount paid to be determined by the user. pub fn send_using_amount( &self, invoice: &Bolt11Invoice, amount_msat: u64, - ) -> Result { + ) -> Result { let rt_lock = self.runtime.read().unwrap(); if rt_lock.is_none() { return Err(Error::NotRunning); @@ -162,7 +167,8 @@ impl Bolt11Payment { } let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); - if let Some(payment) = self.payment_store.get(&payment_hash) { + let payment_id = PaymentId(invoice.payment_hash().to_byte_array()); + if let Some(payment) = self.payment_store.get(&payment_id) { if payment.status == PaymentStatus::Pending || payment.status == PaymentStatus::Succeeded { @@ -171,7 +177,6 @@ impl Bolt11Payment { } } - let payment_id = PaymentId(invoice.payment_hash().to_byte_array()); let payment_secret = invoice.payment_secret(); let expiry_time = invoice.duration_since_epoch().saturating_add(invoice.expiry_time()); let mut payment_params = PaymentParameters::from_node_id( @@ -199,7 +204,7 @@ impl Bolt11Payment { route_params, retry_strategy, ) { - Ok(_payment_id) => { + Ok(()) => { let payee_pubkey = invoice.recover_payee_pub_key(); log_info!( self.logger, @@ -209,17 +214,19 @@ impl Bolt11Payment { ); let payment = PaymentDetails { - hash: payment_hash, - preimage: None, - secret: Some(*payment_secret), + id: payment_id, + kind: PaymentKind::Bolt11 { + hash: payment_hash, + preimage: None, + secret: Some(*payment_secret), + }, amount_msat: Some(amount_msat), direction: PaymentDirection::Outbound, status: PaymentStatus::Pending, - lsp_fee_limits: None, }; self.payment_store.insert(payment)?; - Ok(payment_hash) + Ok(payment_id) }, Err(e) => { log_error!(self.logger, "Failed to send payment: {:?}", e); @@ -228,13 +235,15 @@ impl Bolt11Payment { RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), _ => { let payment = PaymentDetails { - hash: payment_hash, - preimage: None, - secret: Some(*payment_secret), + id: payment_id, + kind: PaymentKind::Bolt11 { + hash: payment_hash, + preimage: None, + secret: Some(*payment_secret), + }, amount_msat: Some(amount_msat), direction: PaymentDirection::Outbound, status: PaymentStatus::Failed, - lsp_fee_limits: None, }; self.payment_store.insert(payment)?; @@ -287,14 +296,18 @@ impl Bolt11Payment { }; let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); + let id = PaymentId(payment_hash.0); let payment = PaymentDetails { - hash: payment_hash, - preimage: None, - secret: Some(invoice.payment_secret().clone()), + id, + kind: PaymentKind::Bolt11 { + hash: payment_hash, + preimage: None, + secret: Some(invoice.payment_secret().clone()), + }, + amount_msat, direction: PaymentDirection::Inbound, status: PaymentStatus::Pending, - lsp_fee_limits: None, }; self.payment_store.insert(payment)?; @@ -409,18 +422,22 @@ impl Bolt11Payment { // Register payment in payment store. let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); - let lsp_fee_limits = Some(LSPFeeLimits { + let lsp_fee_limits = LSPFeeLimits { max_total_opening_fee_msat: lsp_total_opening_fee, max_proportional_opening_fee_ppm_msat: lsp_prop_opening_fee, - }); + }; + let id = PaymentId(payment_hash.0); let payment = PaymentDetails { - hash: payment_hash, - preimage: None, - secret: Some(invoice.payment_secret().clone()), + id, + kind: PaymentKind::Bolt11Jit { + hash: payment_hash, + preimage: None, + secret: Some(invoice.payment_secret().clone()), + lsp_fee_limits, + }, amount_msat, direction: PaymentDirection::Inbound, status: PaymentStatus::Pending, - lsp_fee_limits, }; self.payment_store.insert(payment)?; diff --git a/src/payment/mod.rs b/src/payment/mod.rs index 0666bd69f..3649f1fcc 100644 --- a/src/payment/mod.rs +++ b/src/payment/mod.rs @@ -8,4 +8,4 @@ pub(crate) mod store; pub use bolt11::Bolt11Payment; pub use onchain::OnchainPayment; pub use spontaneous::SpontaneousPayment; -pub use store::{LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentStatus}; +pub use store::{LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus}; diff --git a/src/payment/spontaneous.rs b/src/payment/spontaneous.rs index f81c23a4b..fcca8065a 100644 --- a/src/payment/spontaneous.rs +++ b/src/payment/spontaneous.rs @@ -3,7 +3,9 @@ use crate::config::{Config, LDK_PAYMENT_RETRY_TIMEOUT}; use crate::error::Error; use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; -use crate::payment::store::{PaymentDetails, PaymentDirection, PaymentStatus, PaymentStore}; +use crate::payment::store::{ + PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus, PaymentStore, +}; use crate::types::{ChannelManager, KeysManager}; use lightning::ln::channelmanager::{PaymentId, RecipientOnionFields, Retry, RetryableSendFailure}; @@ -40,7 +42,7 @@ impl SpontaneousPayment { } /// Send a spontaneous, aka. "keysend", payment - pub fn send(&self, amount_msat: u64, node_id: PublicKey) -> Result { + pub fn send(&self, amount_msat: u64, node_id: PublicKey) -> Result { let rt_lock = self.runtime.read().unwrap(); if rt_lock.is_none() { return Err(Error::NotRunning); @@ -48,8 +50,9 @@ impl SpontaneousPayment { let payment_preimage = PaymentPreimage(self.keys_manager.get_secure_random_bytes()); let payment_hash = PaymentHash::from(payment_preimage); + let payment_id = PaymentId(payment_hash.0); - if let Some(payment) = self.payment_store.get(&payment_hash) { + if let Some(payment) = self.payment_store.get(&payment_id) { if payment.status == PaymentStatus::Pending || payment.status == PaymentStatus::Succeeded { @@ -71,21 +74,22 @@ impl SpontaneousPayment { route_params, Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT), ) { - Ok(_payment_id) => { + Ok(_hash) => { log_info!(self.logger, "Initiated sending {}msat to {}.", amount_msat, node_id); let payment = PaymentDetails { - hash: payment_hash, - preimage: Some(payment_preimage), - secret: None, + id: payment_id, + kind: PaymentKind::Spontaneous { + hash: payment_hash, + preimage: Some(payment_preimage), + }, status: PaymentStatus::Pending, direction: PaymentDirection::Outbound, amount_msat: Some(amount_msat), - lsp_fee_limits: None, }; self.payment_store.insert(payment)?; - Ok(payment_hash) + Ok(payment_id) }, Err(e) => { log_error!(self.logger, "Failed to send payment: {:?}", e); @@ -94,13 +98,15 @@ impl SpontaneousPayment { RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), _ => { let payment = PaymentDetails { - hash: payment_hash, - preimage: Some(payment_preimage), - secret: None, + id: payment_id, + kind: PaymentKind::Spontaneous { + hash: payment_hash, + preimage: Some(payment_preimage), + }, + status: PaymentStatus::Failed, direction: PaymentDirection::Outbound, amount_msat: Some(amount_msat), - lsp_fee_limits: None, }; self.payment_store.insert(payment)?; diff --git a/src/payment/store.rs b/src/payment/store.rs index 524984e9e..1a85152e2 100644 --- a/src/payment/store.rs +++ b/src/payment/store.rs @@ -6,9 +6,14 @@ use crate::logger::{log_error, Logger}; use crate::types::DynStore; use crate::Error; +use lightning::ln::channelmanager::PaymentId; +use lightning::ln::msgs::DecodeError; use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret}; -use lightning::util::ser::Writeable; -use lightning::{impl_writeable_tlv_based, impl_writeable_tlv_based_enum}; +use lightning::util::ser::{Readable, Writeable}; +use lightning::{ + _init_and_read_len_prefixed_tlv_fields, impl_writeable_tlv_based, + impl_writeable_tlv_based_enum, write_tlv_fields, +}; use std::collections::HashMap; use std::iter::FromIterator; @@ -18,38 +23,88 @@ use std::sync::{Arc, Mutex}; /// Represents a payment. #[derive(Clone, Debug, PartialEq, Eq)] pub struct PaymentDetails { - /// The payment hash, i.e., the hash of the `preimage`. - pub hash: PaymentHash, - /// The pre-image used by the payment. - pub preimage: Option, - /// The secret used by the payment. - pub secret: Option, + /// The identifier of this payment. + pub id: PaymentId, + /// The kind of the payment. + pub kind: PaymentKind, /// The amount transferred. pub amount_msat: Option, /// The direction of the payment. pub direction: PaymentDirection, /// The status of the payment. pub status: PaymentStatus, - /// Limits applying to how much fee we allow an LSP to deduct from the payment amount. - /// - /// This is only `Some` for payments received via a JIT-channel, in which case the first - /// inbound payment will pay for the LSP's channel opening fees. - /// - /// See [`LdkChannelConfig::accept_underpaying_htlcs`] for more information. - /// - /// [`LdkChannelConfig::accept_underpaying_htlcs`]: lightning::util::config::ChannelConfig::accept_underpaying_htlcs - pub lsp_fee_limits: Option, } -impl_writeable_tlv_based!(PaymentDetails, { - (0, hash, required), - (1, lsp_fee_limits, option), - (2, preimage, required), - (4, secret, required), - (6, amount_msat, required), - (8, direction, required), - (10, status, required) -}); +impl Writeable for PaymentDetails { + fn write( + &self, writer: &mut W, + ) -> Result<(), lightning::io::Error> { + write_tlv_fields!(writer, { + (0, self.id, required), // Used to be `hash` for v0.2.1 and prior + // 1 briefly used to be lsp_fee_limits, could probably be reused at some point in the future. + // 2 used to be `preimage` before it was moved to `kind` in v0.3.0 + (2, None::>, required), + (3, self.kind, required), + // 4 used to be `secret` before it was moved to `kind` in v0.3.0 + (4, None::>, required), + (6, self.amount_msat, required), + (8, self.direction, required), + (10, self.status, required) + }); + Ok(()) + } +} + +impl Readable for PaymentDetails { + fn read(reader: &mut R) -> Result { + _init_and_read_len_prefixed_tlv_fields!(reader, { + (0, id, required), // Used to be `hash` + (1, lsp_fee_limits, option), + (2, preimage, required), + (3, kind_opt, option), + (4, secret, required), + (6, amount_msat, required), + (8, direction, required), + (10, status, required) + }); + + let id: PaymentId = id.0.ok_or(DecodeError::InvalidValue)?; + let preimage: Option = preimage.0.ok_or(DecodeError::InvalidValue)?; + let secret: Option = secret.0.ok_or(DecodeError::InvalidValue)?; + let amount_msat: Option = amount_msat.0.ok_or(DecodeError::InvalidValue)?; + let direction: PaymentDirection = direction.0.ok_or(DecodeError::InvalidValue)?; + let status: PaymentStatus = status.0.ok_or(DecodeError::InvalidValue)?; + + let kind = if let Some(kind) = kind_opt { + // If we serialized the payment kind, use it. + // This will always be the case for any version after v0.2.1. + kind + } else { + // Otherwise we persisted with v0.2.1 or before, and puzzle together the kind from the + // provided fields. + + // We used to track everything by hash, but switched to track everything by id + // post-v0.2.1. As both are serialized identically, we just switched the `0`-type field above + // from `PaymentHash` to `PaymentId` and serialize a separate `PaymentHash` in + // `PaymentKind` when needed. Here, for backwards compat, we can just re-create the + // `PaymentHash` from the id, as 'back then' `payment_hash == payment_id` was always + // true. + let hash = PaymentHash(id.0); + + if secret.is_some() { + if let Some(lsp_fee_limits) = lsp_fee_limits { + PaymentKind::Bolt11Jit { hash, preimage, secret, lsp_fee_limits } + } else { + PaymentKind::Bolt11 { hash, preimage, secret } + } + } else { + PaymentKind::Spontaneous { hash, preimage } + } + }; + + Ok(PaymentDetails { id, kind, amount_msat, direction, status }) + } +} /// Represents the direction of a payment. #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -82,6 +137,73 @@ impl_writeable_tlv_based_enum!(PaymentStatus, (4, Failed) => {}; ); +/// Represents the kind of a payment. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum PaymentKind { + /// An on-chain payment. + Onchain, + /// A [BOLT 11] payment. + /// + /// [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md + // TODO: Bolt11 { invoice: Option }, + Bolt11 { + /// The payment hash, i.e., the hash of the `preimage`. + hash: PaymentHash, + /// The pre-image used by the payment. + preimage: Option, + /// The secret used by the payment. + secret: Option, + }, + /// A [BOLT 11] payment intended to open an [LSPS 2] just-in-time channel. + /// + /// [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md + /// [LSPS 2]: https://github.com/BitcoinAndLightningLayerSpecs/lsp/blob/main/LSPS2/README.md + // TODO: Bolt11Jit { invoice: Option }, + Bolt11Jit { + /// The payment hash, i.e., the hash of the `preimage`. + hash: PaymentHash, + /// The pre-image used by the payment. + preimage: Option, + /// The secret used by the payment. + secret: Option, + /// Limits applying to how much fee we allow an LSP to deduct from the payment amount. + /// + /// Allowing them to deduct this fee from the first inbound payment will pay for the LSP's + /// channel opening fees. + /// + /// See [`LdkChannelConfig::accept_underpaying_htlcs`] for more information. + /// + /// [`LdkChannelConfig::accept_underpaying_htlcs`]: lightning::util::config::ChannelConfig::accept_underpaying_htlcs + lsp_fee_limits: LSPFeeLimits, + }, + /// A spontaneous ("keysend") payment. + Spontaneous { + /// The payment hash, i.e., the hash of the `preimage`. + hash: PaymentHash, + /// The pre-image used by the payment. + preimage: Option, + }, +} + +impl_writeable_tlv_based_enum!(PaymentKind, + (0, Onchain) => {}, + (2, Bolt11) => { + (0, hash, required), + (2, preimage, option), + (4, secret, option), + }, + (4, Bolt11Jit) => { + (0, hash, required), + (2, preimage, option), + (4, secret, option), + (6, lsp_fee_limits, required), + }, + (8, Spontaneous) => { + (0, hash, required), + (2, preimage, option), + }; +); + /// Limits applying to how much fee we allow an LSP to deduct from the payment amount. /// /// See [`LdkChannelConfig::accept_underpaying_htlcs`] for more information. @@ -104,26 +226,17 @@ impl_writeable_tlv_based!(LSPFeeLimits, { #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) struct PaymentDetailsUpdate { - pub hash: PaymentHash, + pub id: PaymentId, pub preimage: Option>, pub secret: Option>, pub amount_msat: Option>, pub direction: Option, pub status: Option, - pub lsp_fee_limits: Option>, } impl PaymentDetailsUpdate { - pub fn new(hash: PaymentHash) -> Self { - Self { - hash, - preimage: None, - secret: None, - amount_msat: None, - direction: None, - status: None, - lsp_fee_limits: None, - } + pub fn new(id: PaymentId) -> Self { + Self { id, preimage: None, secret: None, amount_msat: None, direction: None, status: None } } } @@ -131,7 +244,7 @@ pub(crate) struct PaymentStore where L::Target: Logger, { - payments: Mutex>, + payments: Mutex>, kv_store: Arc, logger: L, } @@ -142,7 +255,7 @@ where { pub(crate) fn new(payments: Vec, kv_store: Arc, logger: L) -> Self { let payments = Mutex::new(HashMap::from_iter( - payments.into_iter().map(|payment| (payment.hash, payment)), + payments.into_iter().map(|payment| (payment.id, payment)), )); Self { payments, kv_store, logger } } @@ -150,14 +263,13 @@ where pub(crate) fn insert(&self, payment: PaymentDetails) -> Result { let mut locked_payments = self.payments.lock().unwrap(); - let hash = payment.hash.clone(); - let updated = locked_payments.insert(hash.clone(), payment.clone()).is_some(); - self.persist_info(&hash, &payment)?; + let updated = locked_payments.insert(payment.id, payment.clone()).is_some(); + self.persist_info(&payment.id, &payment)?; Ok(updated) } - pub(crate) fn remove(&self, hash: &PaymentHash) -> Result<(), Error> { - let store_key = hex_utils::to_string(&hash.0); + pub(crate) fn remove(&self, id: &PaymentId) -> Result<(), Error> { + let store_key = hex_utils::to_string(&id.0); self.kv_store .remove( PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, @@ -178,21 +290,30 @@ where }) } - pub(crate) fn get(&self, hash: &PaymentHash) -> Option { - self.payments.lock().unwrap().get(hash).cloned() + pub(crate) fn get(&self, id: &PaymentId) -> Option { + self.payments.lock().unwrap().get(id).cloned() } pub(crate) fn update(&self, update: &PaymentDetailsUpdate) -> Result { let mut updated = false; let mut locked_payments = self.payments.lock().unwrap(); - if let Some(payment) = locked_payments.get_mut(&update.hash) { + if let Some(payment) = locked_payments.get_mut(&update.id) { if let Some(preimage_opt) = update.preimage { - payment.preimage = preimage_opt; + match payment.kind { + PaymentKind::Bolt11 { ref mut preimage, .. } => *preimage = preimage_opt, + PaymentKind::Bolt11Jit { ref mut preimage, .. } => *preimage = preimage_opt, + PaymentKind::Spontaneous { ref mut preimage, .. } => *preimage = preimage_opt, + _ => {}, + } } if let Some(secret_opt) = update.secret { - payment.secret = secret_opt; + match payment.kind { + PaymentKind::Bolt11 { ref mut secret, .. } => *secret = secret_opt, + PaymentKind::Bolt11Jit { ref mut secret, .. } => *secret = secret_opt, + _ => {}, + } } if let Some(amount_opt) = update.amount_msat { @@ -203,11 +324,7 @@ where payment.status = status; } - if let Some(lsp_fee_limits) = update.lsp_fee_limits { - payment.lsp_fee_limits = lsp_fee_limits - } - - self.persist_info(&update.hash, payment)?; + self.persist_info(&update.id, payment)?; updated = true; } @@ -227,8 +344,8 @@ where .collect::>() } - fn persist_info(&self, hash: &PaymentHash, payment: &PaymentDetails) -> Result<(), Error> { - let store_key = hex_utils::to_string(&hash.0); + fn persist_info(&self, id: &PaymentId, payment: &PaymentDetails) -> Result<(), Error> { + let store_key = hex_utils::to_string(&id.0); let data = payment.encode(); self.kv_store .write( @@ -265,7 +382,8 @@ mod tests { let payment_store = PaymentStore::new(Vec::new(), Arc::clone(&store), logger); let hash = PaymentHash([42u8; 32]); - assert!(payment_store.get(&hash).is_none()); + let id = PaymentId([42u8; 32]); + assert!(payment_store.get(&id).is_none()); let store_key = hex_utils::to_string(&hash.0); assert!(store @@ -277,17 +395,15 @@ mod tests { .is_err()); let payment = PaymentDetails { - hash, - preimage: None, - secret: None, + id, + kind: PaymentKind::Bolt11 { hash, preimage: None, secret: None }, amount_msat: None, direction: PaymentDirection::Inbound, status: PaymentStatus::Pending, - lsp_fee_limits: None, }; assert_eq!(Ok(false), payment_store.insert(payment.clone())); - assert!(payment_store.get(&hash).is_some()); + assert!(payment_store.get(&id).is_some()); assert!(store .read( PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, @@ -297,13 +413,13 @@ mod tests { .is_ok()); assert_eq!(Ok(true), payment_store.insert(payment)); - assert!(payment_store.get(&hash).is_some()); + assert!(payment_store.get(&id).is_some()); - let mut update = PaymentDetailsUpdate::new(hash); + let mut update = PaymentDetailsUpdate::new(id); update.status = Some(PaymentStatus::Succeeded); assert_eq!(Ok(true), payment_store.update(&update)); - assert!(payment_store.get(&hash).is_some()); + assert!(payment_store.get(&id).is_some()); - assert_eq!(PaymentStatus::Succeeded, payment_store.get(&hash).unwrap().status); + assert_eq!(PaymentStatus::Succeeded, payment_store.get(&id).unwrap().status); } } diff --git a/src/uniffi_types.rs b/src/uniffi_types.rs index 2bddfee13..e979b5ce9 100644 --- a/src/uniffi_types.rs +++ b/src/uniffi_types.rs @@ -1,4 +1,4 @@ -pub use crate::payment::store::{LSPFeeLimits, PaymentDirection, PaymentStatus}; +pub use crate::payment::store::{LSPFeeLimits, PaymentDirection, PaymentKind, PaymentStatus}; pub use lightning::events::{ClosureReason, PaymentFailureReason}; pub use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret}; @@ -19,6 +19,7 @@ use crate::{SocketAddress, UserChannelId}; use bitcoin::hashes::sha256::Hash as Sha256; use bitcoin::hashes::Hash; use bitcoin::secp256k1::PublicKey; +use lightning::ln::channelmanager::PaymentId; use lightning_invoice::SignedRawBolt11Invoice; use std::convert::TryInto; @@ -74,6 +75,24 @@ impl UniffiCustomTypeConverter for Bolt11Invoice { } } +impl UniffiCustomTypeConverter for PaymentId { + type Builtin = String; + + fn into_custom(val: Self::Builtin) -> uniffi::Result { + if let Some(bytes_vec) = hex_utils::to_vec(&val) { + let bytes_res = bytes_vec.try_into(); + if let Ok(bytes) = bytes_res { + return Ok(PaymentId(bytes)); + } + } + Err(Error::InvalidPaymentId.into()) + } + + fn from_custom(obj: Self) -> Self::Builtin { + hex_utils::to_string(&obj.0) + } +} + impl UniffiCustomTypeConverter for PaymentHash { type Builtin = String; diff --git a/tests/common/mod.rs b/tests/common/mod.rs index a9acb63a0..f8022639a 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -419,10 +419,10 @@ pub(crate) fn do_channel_full_cycle( let invoice = node_b.bolt11_payment().receive(invoice_amount_1_msat, &"asdf", 9217).unwrap(); println!("\nA send"); - let payment_hash = node_a.bolt11_payment().send(&invoice).unwrap(); + let payment_id = node_a.bolt11_payment().send(&invoice).unwrap(); assert_eq!(node_a.bolt11_payment().send(&invoice), Err(NodeError::DuplicatePayment)); - assert_eq!(node_a.list_payments().first().unwrap().hash, payment_hash); + assert_eq!(node_a.list_payments().first().unwrap().id, payment_id); let outbound_payments_a = node_a.list_payments_with_filter(|p| p.direction == PaymentDirection::Outbound); @@ -442,21 +442,21 @@ pub(crate) fn do_channel_full_cycle( expect_event!(node_a, PaymentSuccessful); expect_event!(node_b, PaymentReceived); - assert_eq!(node_a.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_a.payment(&payment_hash).unwrap().direction, PaymentDirection::Outbound); - assert_eq!(node_a.payment(&payment_hash).unwrap().amount_msat, Some(invoice_amount_1_msat)); - assert_eq!(node_b.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_b.payment(&payment_hash).unwrap().direction, PaymentDirection::Inbound); - assert_eq!(node_b.payment(&payment_hash).unwrap().amount_msat, Some(invoice_amount_1_msat)); + assert_eq!(node_a.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_a.payment(&payment_id).unwrap().direction, PaymentDirection::Outbound); + assert_eq!(node_a.payment(&payment_id).unwrap().amount_msat, Some(invoice_amount_1_msat)); + assert_eq!(node_b.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_b.payment(&payment_id).unwrap().direction, PaymentDirection::Inbound); + assert_eq!(node_b.payment(&payment_id).unwrap().amount_msat, Some(invoice_amount_1_msat)); // Assert we fail duplicate outbound payments and check the status hasn't changed. assert_eq!(Err(NodeError::DuplicatePayment), node_a.bolt11_payment().send(&invoice)); - assert_eq!(node_a.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_a.payment(&payment_hash).unwrap().direction, PaymentDirection::Outbound); - assert_eq!(node_a.payment(&payment_hash).unwrap().amount_msat, Some(invoice_amount_1_msat)); - assert_eq!(node_b.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_b.payment(&payment_hash).unwrap().direction, PaymentDirection::Inbound); - assert_eq!(node_b.payment(&payment_hash).unwrap().amount_msat, Some(invoice_amount_1_msat)); + assert_eq!(node_a.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_a.payment(&payment_id).unwrap().direction, PaymentDirection::Outbound); + assert_eq!(node_a.payment(&payment_id).unwrap().amount_msat, Some(invoice_amount_1_msat)); + assert_eq!(node_b.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_b.payment(&payment_id).unwrap().direction, PaymentDirection::Inbound); + assert_eq!(node_b.payment(&payment_id).unwrap().amount_msat, Some(invoice_amount_1_msat)); // Test under-/overpayment let invoice_amount_2_msat = 2500_000; @@ -473,7 +473,7 @@ pub(crate) fn do_channel_full_cycle( let overpaid_amount_msat = invoice_amount_2_msat + 100; println!("\nA overpaid send"); - let payment_hash = + let payment_id = node_a.bolt11_payment().send_using_amount(&invoice, overpaid_amount_msat).unwrap(); expect_event!(node_a, PaymentSuccessful); let received_amount = match node_b.wait_next_event() { @@ -487,12 +487,12 @@ pub(crate) fn do_channel_full_cycle( }, }; assert_eq!(received_amount, overpaid_amount_msat); - assert_eq!(node_a.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_a.payment(&payment_hash).unwrap().direction, PaymentDirection::Outbound); - assert_eq!(node_a.payment(&payment_hash).unwrap().amount_msat, Some(overpaid_amount_msat)); - assert_eq!(node_b.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_b.payment(&payment_hash).unwrap().direction, PaymentDirection::Inbound); - assert_eq!(node_b.payment(&payment_hash).unwrap().amount_msat, Some(overpaid_amount_msat)); + assert_eq!(node_a.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_a.payment(&payment_id).unwrap().direction, PaymentDirection::Outbound); + assert_eq!(node_a.payment(&payment_id).unwrap().amount_msat, Some(overpaid_amount_msat)); + assert_eq!(node_b.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_b.payment(&payment_id).unwrap().direction, PaymentDirection::Inbound); + assert_eq!(node_b.payment(&payment_id).unwrap().amount_msat, Some(overpaid_amount_msat)); // Test "zero-amount" invoice payment println!("\nB receive_variable_amount_payment"); @@ -504,7 +504,7 @@ pub(crate) fn do_channel_full_cycle( node_a.bolt11_payment().send(&variable_amount_invoice) ); println!("\nA send_using_amount"); - let payment_hash = node_a + let payment_id = node_a .bolt11_payment() .send_using_amount(&variable_amount_invoice, determined_amount_msat) .unwrap(); @@ -521,17 +521,17 @@ pub(crate) fn do_channel_full_cycle( }, }; assert_eq!(received_amount, determined_amount_msat); - assert_eq!(node_a.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_a.payment(&payment_hash).unwrap().direction, PaymentDirection::Outbound); - assert_eq!(node_a.payment(&payment_hash).unwrap().amount_msat, Some(determined_amount_msat)); - assert_eq!(node_b.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_b.payment(&payment_hash).unwrap().direction, PaymentDirection::Inbound); - assert_eq!(node_b.payment(&payment_hash).unwrap().amount_msat, Some(determined_amount_msat)); + assert_eq!(node_a.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_a.payment(&payment_id).unwrap().direction, PaymentDirection::Outbound); + assert_eq!(node_a.payment(&payment_id).unwrap().amount_msat, Some(determined_amount_msat)); + assert_eq!(node_b.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_b.payment(&payment_id).unwrap().direction, PaymentDirection::Inbound); + assert_eq!(node_b.payment(&payment_id).unwrap().amount_msat, Some(determined_amount_msat)); // Test spontaneous/keysend payments println!("\nA send_spontaneous_payment"); let keysend_amount_msat = 2500_000; - let keysend_payment_hash = + let keysend_payment_id = node_a.spontaneous_payment().send(keysend_amount_msat, node_b.node_id()).unwrap(); expect_event!(node_a, PaymentSuccessful); let received_keysend_amount = match node_b.wait_next_event() { @@ -545,21 +545,12 @@ pub(crate) fn do_channel_full_cycle( }, }; assert_eq!(received_keysend_amount, keysend_amount_msat); - assert_eq!(node_a.payment(&keysend_payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!( - node_a.payment(&keysend_payment_hash).unwrap().direction, - PaymentDirection::Outbound - ); - assert_eq!( - node_a.payment(&keysend_payment_hash).unwrap().amount_msat, - Some(keysend_amount_msat) - ); - assert_eq!(node_b.payment(&keysend_payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_b.payment(&keysend_payment_hash).unwrap().direction, PaymentDirection::Inbound); - assert_eq!( - node_b.payment(&keysend_payment_hash).unwrap().amount_msat, - Some(keysend_amount_msat) - ); + assert_eq!(node_a.payment(&keysend_payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_a.payment(&keysend_payment_id).unwrap().direction, PaymentDirection::Outbound); + assert_eq!(node_a.payment(&keysend_payment_id).unwrap().amount_msat, Some(keysend_amount_msat)); + assert_eq!(node_b.payment(&keysend_payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_b.payment(&keysend_payment_id).unwrap().direction, PaymentDirection::Inbound); + assert_eq!(node_b.payment(&keysend_payment_id).unwrap().amount_msat, Some(keysend_amount_msat)); println!("\nB close_channel"); node_b.close_channel(&user_channel_id, node_a.node_id()).unwrap(); From 51e25803573f04aebd711155657b38b629b754c9 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 11 Mar 2024 10:34:49 +0100 Subject: [PATCH 31/89] Add test ensuring de/ser compatibility with old format .. as we changed quite a bit and moved required fields, we add a test here that adds the old `PaymentDetails` as a test struct and ensures we're able to parse them just fine --- src/payment/store.rs | 158 ++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 157 insertions(+), 1 deletion(-) diff --git a/src/payment/store.rs b/src/payment/store.rs index 1a85152e2..8bcbf41e6 100644 --- a/src/payment/store.rs +++ b/src/payment/store.rs @@ -372,9 +372,36 @@ where #[cfg(test)] mod tests { use super::*; - use lightning::util::test_utils::{TestLogger, TestStore}; + use lightning::util::{ + ser::Readable, + test_utils::{TestLogger, TestStore}, + }; + use std::io::Cursor; use std::sync::Arc; + /// We refactored `PaymentDetails` to hold a payment id and moved some required fields into + /// `PaymentKind`. Here, we keep the old layout available in order test de/ser compatibility. + #[derive(Clone, Debug, PartialEq, Eq)] + struct OldPaymentDetails { + pub hash: PaymentHash, + pub preimage: Option, + pub secret: Option, + pub amount_msat: Option, + pub direction: PaymentDirection, + pub status: PaymentStatus, + pub lsp_fee_limits: Option, + } + + impl_writeable_tlv_based!(OldPaymentDetails, { + (0, hash, required), + (1, lsp_fee_limits, option), + (2, preimage, required), + (4, secret, required), + (6, amount_msat, required), + (8, direction, required), + (10, status, required) + }); + #[test] fn payment_info_is_persisted() { let store: Arc = Arc::new(TestStore::new(false)); @@ -422,4 +449,133 @@ mod tests { assert_eq!(PaymentStatus::Succeeded, payment_store.get(&id).unwrap().status); } + + #[test] + fn old_payment_details_deser_compat() { + // We refactored `PaymentDetails` to hold a payment id and moved some required fields into + // `PaymentKind`. Here, we test compatibility with the old layout. + let hash = PaymentHash([42u8; 32]); + let preimage = Some(PaymentPreimage([43u8; 32])); + let secret = Some(PaymentSecret([44u8; 32])); + let amount_msat = Some(45_000_000); + + // Test `Bolt11` de/ser + { + let old_bolt11_payment = OldPaymentDetails { + hash, + preimage, + secret, + amount_msat, + direction: PaymentDirection::Inbound, + status: PaymentStatus::Pending, + lsp_fee_limits: None, + }; + + let old_bolt11_encoded = old_bolt11_payment.encode(); + assert_eq!( + old_bolt11_payment, + OldPaymentDetails::read(&mut Cursor::new(old_bolt11_encoded.clone())).unwrap() + ); + + let bolt11_decoded = + PaymentDetails::read(&mut Cursor::new(old_bolt11_encoded)).unwrap(); + let bolt11_reencoded = bolt11_decoded.encode(); + assert_eq!( + bolt11_decoded, + PaymentDetails::read(&mut Cursor::new(bolt11_reencoded)).unwrap() + ); + + match bolt11_decoded.kind { + PaymentKind::Bolt11 { hash: h, preimage: p, secret: s } => { + assert_eq!(hash, h); + assert_eq!(preimage, p); + assert_eq!(secret, s); + }, + _ => { + panic!("Unexpected kind!"); + }, + } + } + + // Test `Bolt11Jit` de/ser + { + let lsp_fee_limits = Some(LSPFeeLimits { + max_total_opening_fee_msat: Some(46_000), + max_proportional_opening_fee_ppm_msat: Some(47_000), + }); + + let old_bolt11_jit_payment = OldPaymentDetails { + hash, + preimage, + secret, + amount_msat, + direction: PaymentDirection::Inbound, + status: PaymentStatus::Pending, + lsp_fee_limits, + }; + + let old_bolt11_jit_encoded = old_bolt11_jit_payment.encode(); + assert_eq!( + old_bolt11_jit_payment, + OldPaymentDetails::read(&mut Cursor::new(old_bolt11_jit_encoded.clone())).unwrap() + ); + + let bolt11_jit_decoded = + PaymentDetails::read(&mut Cursor::new(old_bolt11_jit_encoded)).unwrap(); + let bolt11_jit_reencoded = bolt11_jit_decoded.encode(); + assert_eq!( + bolt11_jit_decoded, + PaymentDetails::read(&mut Cursor::new(bolt11_jit_reencoded)).unwrap() + ); + + match bolt11_jit_decoded.kind { + PaymentKind::Bolt11Jit { hash: h, preimage: p, secret: s, lsp_fee_limits: l } => { + assert_eq!(hash, h); + assert_eq!(preimage, p); + assert_eq!(secret, s); + assert_eq!(lsp_fee_limits, Some(l)); + }, + _ => { + panic!("Unexpected kind!"); + }, + } + } + + // Test `Spontaneous` de/ser + { + let old_spontaneous_payment = OldPaymentDetails { + hash, + preimage, + secret: None, + amount_msat, + direction: PaymentDirection::Inbound, + status: PaymentStatus::Pending, + lsp_fee_limits: None, + }; + + let old_spontaneous_encoded = old_spontaneous_payment.encode(); + assert_eq!( + old_spontaneous_payment, + OldPaymentDetails::read(&mut Cursor::new(old_spontaneous_encoded.clone())).unwrap() + ); + + let spontaneous_decoded = + PaymentDetails::read(&mut Cursor::new(old_spontaneous_encoded)).unwrap(); + let spontaneous_reencoded = spontaneous_decoded.encode(); + assert_eq!( + spontaneous_decoded, + PaymentDetails::read(&mut Cursor::new(spontaneous_reencoded)).unwrap() + ); + + match spontaneous_decoded.kind { + PaymentKind::Spontaneous { hash: h, preimage: p } => { + assert_eq!(hash, h); + assert_eq!(preimage, p); + }, + _ => { + panic!("Unexpected kind!"); + }, + } + } + } } From 1fab656f15f9dafa043aa0aed17ac56a625a6443 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 11 Mar 2024 16:11:48 +0100 Subject: [PATCH 32/89] Add `payment_id` to `Payment{Successful,Failed,Received}` events --- bindings/ldk_node.udl | 6 +++--- src/event.rs | 33 ++++++++++++++++++++++++++++++--- tests/common/mod.rs | 10 +++++----- tests/integration_tests_rust.rs | 4 ++-- 4 files changed, 40 insertions(+), 13 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 1d776782b..58fab0d52 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -186,9 +186,9 @@ enum BuildError { [Enum] interface Event { - PaymentSuccessful(PaymentHash payment_hash, u64? fee_paid_msat); - PaymentFailed(PaymentHash payment_hash, PaymentFailureReason? reason); - PaymentReceived(PaymentHash payment_hash, u64 amount_msat); + PaymentSuccessful(PaymentId? payment_id, PaymentHash payment_hash, u64? fee_paid_msat); + PaymentFailed(PaymentId? payment_id, PaymentHash payment_hash, PaymentFailureReason? reason); + PaymentReceived(PaymentId? payment_id, PaymentHash payment_hash, u64 amount_msat); ChannelPending(ChannelId channel_id, UserChannelId user_channel_id, ChannelId former_temporary_channel_id, PublicKey counterparty_node_id, OutPoint funding_txo); ChannelReady(ChannelId channel_id, UserChannelId user_channel_id, PublicKey? counterparty_node_id); ChannelClosed(ChannelId channel_id, UserChannelId user_channel_id, PublicKey? counterparty_node_id, ClosureReason? reason); diff --git a/src/event.rs b/src/event.rs index d4e3d57bc..78188452f 100644 --- a/src/event.rs +++ b/src/event.rs @@ -46,6 +46,10 @@ use std::time::Duration; pub enum Event { /// A sent payment was successful. PaymentSuccessful { + /// A local identifier used to track the payment. + /// + /// Will only be `None` for events serialized with LDK Node v0.2.1 or prior. + payment_id: Option, /// The hash of the payment. payment_hash: PaymentHash, /// The total fee which was spent at intermediate hops in this payment. @@ -53,6 +57,10 @@ pub enum Event { }, /// A sent payment has failed. PaymentFailed { + /// A local identifier used to track the payment. + /// + /// Will only be `None` for events serialized with LDK Node v0.2.1 or prior. + payment_id: Option, /// The hash of the payment. payment_hash: PaymentHash, /// The reason why the payment failed. @@ -62,6 +70,10 @@ pub enum Event { }, /// A payment has been received. PaymentReceived { + /// A local identifier used to track the payment. + /// + /// Will only be `None` for events serialized with LDK Node v0.2.1 or prior. + payment_id: Option, /// The hash of the payment. payment_hash: PaymentHash, /// The value, in thousandths of a satoshi, that has been received. @@ -110,13 +122,16 @@ impl_writeable_tlv_based_enum!(Event, (0, PaymentSuccessful) => { (0, payment_hash, required), (1, fee_paid_msat, option), + (3, payment_id, option), }, (1, PaymentFailed) => { (0, payment_hash, required), (1, reason, option), + (3, payment_id, option), }, (2, PaymentReceived) => { (0, payment_hash, required), + (1, payment_id, option), (2, amount_msat, required), }, (3, ChannelReady) => { @@ -635,7 +650,11 @@ where }; self.event_queue - .add_event(Event::PaymentReceived { payment_hash, amount_msat }) + .add_event(Event::PaymentReceived { + payment_id: Some(payment_id), + payment_hash, + amount_msat, + }) .unwrap_or_else(|e| { log_error!(self.logger, "Failed to push to event queue: {}", e); panic!("Failed to push to event queue"); @@ -683,7 +702,11 @@ where }); self.event_queue - .add_event(Event::PaymentSuccessful { payment_hash, fee_paid_msat }) + .add_event(Event::PaymentSuccessful { + payment_id: Some(payment_id), + payment_hash, + fee_paid_msat, + }) .unwrap_or_else(|e| { log_error!(self.logger, "Failed to push to event queue: {}", e); panic!("Failed to push to event queue"); @@ -706,7 +729,11 @@ where panic!("Failed to access payment store"); }); self.event_queue - .add_event(Event::PaymentFailed { payment_hash, reason }) + .add_event(Event::PaymentFailed { + payment_id: Some(payment_id), + payment_hash, + reason, + }) .unwrap_or_else(|e| { log_error!(self.logger, "Failed to push to event queue: {}", e); panic!("Failed to push to event queue"); diff --git a/tests/common/mod.rs b/tests/common/mod.rs index f8022639a..bcb47accb 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -82,11 +82,11 @@ pub(crate) use expect_channel_ready_event; macro_rules! expect_payment_received_event { ($node: expr, $amount_msat: expr) => {{ match $node.wait_next_event() { - ref e @ Event::PaymentReceived { payment_hash, amount_msat } => { + ref e @ Event::PaymentReceived { payment_id, amount_msat, .. } => { println!("{} got event {:?}", $node.node_id(), e); assert_eq!(amount_msat, $amount_msat); $node.event_handled(); - payment_hash + payment_id }, ref e => { panic!("{} got unexpected event!: {:?}", std::stringify!(node_b), e); @@ -98,12 +98,12 @@ macro_rules! expect_payment_received_event { pub(crate) use expect_payment_received_event; macro_rules! expect_payment_successful_event { - ($node: expr, $payment_hash: expr, $fee_paid_msat: expr) => {{ + ($node: expr, $payment_id: expr, $fee_paid_msat: expr) => {{ match $node.wait_next_event() { - ref e @ Event::PaymentSuccessful { payment_hash, fee_paid_msat } => { + ref e @ Event::PaymentSuccessful { payment_id, fee_paid_msat, .. } => { println!("{} got event {:?}", $node.node_id(), e); assert_eq!(fee_paid_msat, $fee_paid_msat); - assert_eq!(payment_hash, $payment_hash); + assert_eq!(payment_id, $payment_id); $node.event_handled(); }, ref e => { diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index fa8986c71..1820ef76a 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -136,9 +136,9 @@ fn multi_hop_sending() { let invoice = nodes[4].bolt11_payment().receive(2_500_000, &"asdf", 9217).unwrap(); nodes[0].bolt11_payment().send(&invoice).unwrap(); - let payment_hash = expect_payment_received_event!(&nodes[4], 2_500_000); + let payment_id = expect_payment_received_event!(&nodes[4], 2_500_000); let fee_paid_msat = Some(2000); - expect_payment_successful_event!(nodes[0], payment_hash, fee_paid_msat); + expect_payment_successful_event!(nodes[0], payment_id, fee_paid_msat); } #[test] From c418c7d0cd1eaa15279568c91fb6c45707435963 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 20 May 2024 13:36:33 +0200 Subject: [PATCH 33/89] Update `uniffi_bindgen_generate_swift.sh` to include patch .. for `SystemConfiguration` --- scripts/uniffi_bindgen_generate_swift.sh | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/uniffi_bindgen_generate_swift.sh b/scripts/uniffi_bindgen_generate_swift.sh index 277d23209..ba2d84d2a 100755 --- a/scripts/uniffi_bindgen_generate_swift.sh +++ b/scripts/uniffi_bindgen_generate_swift.sh @@ -34,6 +34,10 @@ swiftc -module-name LDKNode -emit-library -o "$BINDINGS_DIR"/libldk_node.dylib - # Create xcframework from bindings Swift file and libs mkdir -p "$BINDINGS_DIR"/Sources/LDKNode || exit 1 + +# Patch LDKNode.swift with `SystemConfiguration` import. +sed -i '' '4s/^/import SystemConfiguration\n/' "$BINDINGS_DIR"/LDKNode.swift + mv "$BINDINGS_DIR"/LDKNode.swift "$BINDINGS_DIR"/Sources/LDKNode/LDKNode.swift || exit 1 cp "$BINDINGS_DIR"/LDKNodeFFI.h "$BINDINGS_DIR"/LDKNodeFFI.xcframework/ios-arm64/LDKNodeFFI.framework/Headers || exit 1 cp "$BINDINGS_DIR"/LDKNodeFFI.h "$BINDINGS_DIR"/LDKNodeFFI.xcframework/ios-arm64_x86_64-simulator/LDKNodeFFI.framework/Headers || exit 1 @@ -41,6 +45,6 @@ cp "$BINDINGS_DIR"/LDKNodeFFI.h "$BINDINGS_DIR"/LDKNodeFFI.xcframework/macos-arm cp target/aarch64-apple-ios/release-smaller/libldk_node.a "$BINDINGS_DIR"/LDKNodeFFI.xcframework/ios-arm64/LDKNodeFFI.framework/LDKNodeFFI || exit 1 cp target/lipo-ios-sim/release-smaller/libldk_node.a "$BINDINGS_DIR"/LDKNodeFFI.xcframework/ios-arm64_x86_64-simulator/LDKNodeFFI.framework/LDKNodeFFI || exit 1 cp target/lipo-macos/release-smaller/libldk_node.a "$BINDINGS_DIR"/LDKNodeFFI.xcframework/macos-arm64_x86_64/LDKNodeFFI.framework/LDKNodeFFI || exit 1 -# rm "$BINDINGS_DIR"/LDKNodeFFI.h || exit 1 -# rm "$BINDINGS_DIR"/LDKNodeFFI.modulemap || exit 1 +rm "$BINDINGS_DIR"/LDKNodeFFI.h || exit 1 +rm "$BINDINGS_DIR"/LDKNodeFFI.modulemap || exit 1 echo finished successfully! From 6dec002724caf32dd863763b5665f4a4c499e517 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 20 May 2024 13:01:54 +0200 Subject: [PATCH 34/89] Fix Swift builds --- .../swift/LDKNodeFFI.xcframework/Info.plist | 6 ++++++ .../ios-arm64/LDKNodeFFI.framework/Info.plist | 18 ++++++++++++++++++ .../LDKNodeFFI.framework/Info.plist | 18 ++++++++++++++++++ .../LDKNodeFFI.framework/Info.plist | 18 ++++++++++++++++++ 4 files changed, 60 insertions(+) create mode 100644 bindings/swift/LDKNodeFFI.xcframework/ios-arm64/LDKNodeFFI.framework/Info.plist create mode 100644 bindings/swift/LDKNodeFFI.xcframework/ios-arm64_x86_64-simulator/LDKNodeFFI.framework/Info.plist create mode 100644 bindings/swift/LDKNodeFFI.xcframework/macos-arm64_x86_64/LDKNodeFFI.framework/Info.plist diff --git a/bindings/swift/LDKNodeFFI.xcframework/Info.plist b/bindings/swift/LDKNodeFFI.xcframework/Info.plist index ee11d9a89..9b1d666c7 100644 --- a/bindings/swift/LDKNodeFFI.xcframework/Info.plist +++ b/bindings/swift/LDKNodeFFI.xcframework/Info.plist @@ -16,6 +16,8 @@ SupportedPlatform macos + LSMinimumSystemVersion + 12.0 LibraryIdentifier @@ -31,6 +33,8 @@ ios SupportedPlatformVariant simulator + MinimumOSVersion + 15.0 LibraryIdentifier @@ -43,6 +47,8 @@ SupportedPlatform ios + MinimumOSVersion + 15.0 CFBundlePackageType diff --git a/bindings/swift/LDKNodeFFI.xcframework/ios-arm64/LDKNodeFFI.framework/Info.plist b/bindings/swift/LDKNodeFFI.xcframework/ios-arm64/LDKNodeFFI.framework/Info.plist new file mode 100644 index 000000000..8d0bf0f09 --- /dev/null +++ b/bindings/swift/LDKNodeFFI.xcframework/ios-arm64/LDKNodeFFI.framework/Info.plist @@ -0,0 +1,18 @@ + + + + + CFBundleIdentifier + org.lightningdevkit.LDKNodeFFI + CFBundleName + LDKNodeFFI + CFBundleVersion + 0.3.0 + CFBundleShortVersionString + 0.3.0 + CFBundleExecutable + LDKNodeFFI + MinimumOSVersion + 100.0 + + diff --git a/bindings/swift/LDKNodeFFI.xcframework/ios-arm64_x86_64-simulator/LDKNodeFFI.framework/Info.plist b/bindings/swift/LDKNodeFFI.xcframework/ios-arm64_x86_64-simulator/LDKNodeFFI.framework/Info.plist new file mode 100644 index 000000000..92e0f395d --- /dev/null +++ b/bindings/swift/LDKNodeFFI.xcframework/ios-arm64_x86_64-simulator/LDKNodeFFI.framework/Info.plist @@ -0,0 +1,18 @@ + + + + + CFBundleIdentifier + org.lightningdevkit.LDKNodeFFI + CFBundleName + LDKNodeFFI + CFBundleVersion + 0.3.0 + CFBundleShortVersionString + 0.3.0 + CFBundleExecutable + LDKNodeFFI + MinimumOSVersion + 15.0 + + diff --git a/bindings/swift/LDKNodeFFI.xcframework/macos-arm64_x86_64/LDKNodeFFI.framework/Info.plist b/bindings/swift/LDKNodeFFI.xcframework/macos-arm64_x86_64/LDKNodeFFI.framework/Info.plist new file mode 100644 index 000000000..d3536e8b1 --- /dev/null +++ b/bindings/swift/LDKNodeFFI.xcframework/macos-arm64_x86_64/LDKNodeFFI.framework/Info.plist @@ -0,0 +1,18 @@ + + + + + CFBundleIdentifier + org.lightningdevkit.LDKNodeFFI + CFBundleName + LDKNodeFFI + CFBundleVersion + 0.3.0 + CFBundleShortVersionString + 0.3.0 + CFBundleExecutable + LDKNodeFFI + LSMinimumSystemVersion + 12.0 + + From d912a9994d59d49947cc0ca64b81df38202805ed Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 21 May 2024 11:07:18 +0200 Subject: [PATCH 35/89] Update CHANGELOG for v0.2.2 --- CHANGELOG.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e92a7c9e8..e2fb6f1c5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,17 @@ +# 0.2.2 - May 21, 2024 + +This is a bugfix release that reestablishes compatibility of Swift packages +with Xcode 15.3 and later. + +## Bug Fixes + +- Swift bindings can now be built using Xcode 15.3 and later again (#294) + +In total, this release features 5 files changed, 66 insertions, 2 deletions +deletions in 2 commits from 1 author, in alphabetical order: + +- Elias Rohrer + # 0.2.1 - Jan 26, 2024 This is a bugfix release bumping the used LDK and BDK dependencies to the From 278a849bacb864a8789e62454c7d1bae78a8192c Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 12 Mar 2024 16:14:33 +0100 Subject: [PATCH 36/89] Update `OffersMessageHandler` and `MessageRouter` types --- src/builder.rs | 2 +- src/types.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index 6d3db420f..386deb418 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -785,7 +785,7 @@ fn build_with_store_internal( Arc::clone(&logger), Arc::clone(&channel_manager), Arc::new(message_router), - IgnoringMessageHandler {}, + Arc::clone(&channel_manager), IgnoringMessageHandler {}, )); let ephemeral_bytes: [u8; 32] = keys_manager.get_secure_random_bytes(); diff --git a/src/types.rs b/src/types.rs index 68ed36361..14d8adf76 100644 --- a/src/types.rs +++ b/src/types.rs @@ -115,7 +115,7 @@ pub(crate) type OnionMessenger = lightning::onion_message::messenger::OnionMesse Arc, Arc, Arc, - IgnoringMessageHandler, + Arc, IgnoringMessageHandler, >; From 7c9216ea072d37351169d87d9a3d7f54251628c3 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 21 Feb 2024 11:08:44 +0100 Subject: [PATCH 37/89] Handle `ConnectionNeeded` events We spawn a background task that will try to connect to any of the provided socket addresses and return as soon as it suceeds. --- src/event.rs | 40 ++++++++++++++++++++++++++++++++++++---- src/lib.rs | 1 + 2 files changed, 37 insertions(+), 4 deletions(-) diff --git a/src/event.rs b/src/event.rs index 78188452f..34f36c456 100644 --- a/src/event.rs +++ b/src/event.rs @@ -1,8 +1,11 @@ use crate::types::{DynStore, Sweeper, Wallet}; + use crate::{ hex_utils, ChannelManager, Config, Error, NetworkGraph, PeerInfo, PeerStore, UserChannelId, }; +use crate::connection::ConnectionManager; + use crate::payment::store::{ PaymentDetails, PaymentDetailsUpdate, PaymentDirection, PaymentKind, PaymentStatus, PaymentStore, @@ -315,6 +318,7 @@ where event_queue: Arc>, wallet: Arc, channel_manager: Arc, + connection_manager: Arc>, output_sweeper: Arc, network_graph: Arc, payment_store: Arc>, @@ -330,14 +334,16 @@ where { pub fn new( event_queue: Arc>, wallet: Arc, channel_manager: Arc, - output_sweeper: Arc, network_graph: Arc, - payment_store: Arc>, peer_store: Arc>, - runtime: Arc>>, logger: L, config: Arc, + connection_manager: Arc>, output_sweeper: Arc, + network_graph: Arc, payment_store: Arc>, + peer_store: Arc>, runtime: Arc>>, + logger: L, config: Arc, ) -> Self { Self { event_queue, wallet, channel_manager, + connection_manager, output_sweeper, network_graph, payment_store, @@ -978,7 +984,33 @@ where LdkEvent::HTLCIntercepted { .. } => {}, LdkEvent::BumpTransaction(_) => {}, LdkEvent::InvoiceRequestFailed { .. } => {}, - LdkEvent::ConnectionNeeded { .. } => {}, + LdkEvent::ConnectionNeeded { node_id, addresses } => { + let runtime_lock = self.runtime.read().unwrap(); + debug_assert!(runtime_lock.is_some()); + + if let Some(runtime) = runtime_lock.as_ref() { + let spawn_logger = self.logger.clone(); + let spawn_cm = Arc::clone(&self.connection_manager); + runtime.spawn(async move { + for addr in &addresses { + match spawn_cm.connect_peer_if_necessary(node_id, addr.clone()).await { + Ok(()) => { + return; + }, + Err(e) => { + log_error!( + spawn_logger, + "Failed to establish connection to peer {}@{}: {}", + node_id, + addr, + e + ); + }, + } + } + }); + } + }, } } } diff --git a/src/lib.rs b/src/lib.rs index 3d619cebb..a93b53ef3 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -621,6 +621,7 @@ impl Node { Arc::clone(&self.event_queue), Arc::clone(&self.wallet), Arc::clone(&self.channel_manager), + Arc::clone(&self.connection_manager), Arc::clone(&self.output_sweeper), Arc::clone(&self.network_graph), Arc::clone(&self.payment_store), From 2d70e65eb7d39e56bf5b2ddcade8b45def26c284 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 8 Mar 2024 12:15:31 +0100 Subject: [PATCH 38/89] Add `Bolt12` variant to `PaymentKind` --- bindings/ldk_node.udl | 6 ++++ src/error.rs | 3 ++ src/payment/store.rs | 73 ++++++++++++++++++++++++++++++++++++++++--- src/uniffi_types.rs | 19 +++++++++++ 4 files changed, 97 insertions(+), 4 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 58fab0d52..eabfd56d1 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -139,6 +139,7 @@ enum NodeError { "InvalidSocketAddress", "InvalidPublicKey", "InvalidSecretKey", + "InvalidOfferId", "InvalidPaymentId", "InvalidPaymentHash", "InvalidPaymentPreimage", @@ -225,6 +226,8 @@ interface PaymentKind { Onchain(); Bolt11(PaymentHash hash, PaymentPreimage? preimage, PaymentSecret? secret); Bolt11Jit(PaymentHash hash, PaymentPreimage? preimage, PaymentSecret? secret, LSPFeeLimits lsp_fee_limits); + Bolt12Offer(PaymentHash? hash, PaymentPreimage? preimage, PaymentSecret? secret, OfferId offer_id); + Bolt12Refund(PaymentHash? hash, PaymentPreimage? preimage, PaymentSecret? secret); Spontaneous(PaymentHash hash, PaymentPreimage? preimage); }; @@ -371,6 +374,9 @@ typedef string Address; [Custom] typedef string Bolt11Invoice; +[Custom] +typedef string OfferId; + [Custom] typedef string PaymentId; diff --git a/src/error.rs b/src/error.rs index 5acc75af8..f9f95a8be 100644 --- a/src/error.rs +++ b/src/error.rs @@ -47,6 +47,8 @@ pub enum Error { InvalidPublicKey, /// The given secret key is invalid. InvalidSecretKey, + /// The given offer id is invalid. + InvalidOfferId, /// The given payment id is invalid. InvalidPaymentId, /// The given payment hash is invalid. @@ -102,6 +104,7 @@ impl fmt::Display for Error { Self::InvalidSocketAddress => write!(f, "The given network address is invalid."), Self::InvalidPublicKey => write!(f, "The given public key is invalid."), Self::InvalidSecretKey => write!(f, "The given secret key is invalid."), + Self::InvalidOfferId => write!(f, "The given offer id is invalid."), Self::InvalidPaymentId => write!(f, "The given payment id is invalid."), Self::InvalidPaymentHash => write!(f, "The given payment hash is invalid."), Self::InvalidPaymentPreimage => write!(f, "The given payment preimage is invalid."), diff --git a/src/payment/store.rs b/src/payment/store.rs index 8bcbf41e6..c495620c8 100644 --- a/src/payment/store.rs +++ b/src/payment/store.rs @@ -9,6 +9,7 @@ use crate::Error; use lightning::ln::channelmanager::PaymentId; use lightning::ln::msgs::DecodeError; use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret}; +use lightning::offers::offer::OfferId; use lightning::util::ser::{Readable, Writeable}; use lightning::{ _init_and_read_len_prefixed_tlv_fields, impl_writeable_tlv_based, @@ -145,7 +146,6 @@ pub enum PaymentKind { /// A [BOLT 11] payment. /// /// [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md - // TODO: Bolt11 { invoice: Option }, Bolt11 { /// The payment hash, i.e., the hash of the `preimage`. hash: PaymentHash, @@ -158,7 +158,6 @@ pub enum PaymentKind { /// /// [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md /// [LSPS 2]: https://github.com/BitcoinAndLightningLayerSpecs/lsp/blob/main/LSPS2/README.md - // TODO: Bolt11Jit { invoice: Option }, Bolt11Jit { /// The payment hash, i.e., the hash of the `preimage`. hash: PaymentHash, @@ -176,6 +175,32 @@ pub enum PaymentKind { /// [`LdkChannelConfig::accept_underpaying_htlcs`]: lightning::util::config::ChannelConfig::accept_underpaying_htlcs lsp_fee_limits: LSPFeeLimits, }, + /// A [BOLT 12] 'offer' payment, i.e., a payment for an [`Offer`]. + /// + /// [BOLT 12]: https://github.com/lightning/bolts/blob/master/12-offer-encoding.md + /// [`Offer`]: crate::lightning::offers::offer::Offer + Bolt12Offer { + /// The payment hash, i.e., the hash of the `preimage`. + hash: Option, + /// The pre-image used by the payment. + preimage: Option, + /// The secret used by the payment. + secret: Option, + /// The ID of the offer this payment is for. + offer_id: OfferId, + }, + /// A [BOLT 12] 'refund' payment, i.e., a payment for a [`Refund`]. + /// + /// [BOLT 12]: https://github.com/lightning/bolts/blob/master/12-offer-encoding.md + /// [`Refund`]: lightning::offers::refund::Refund + Bolt12Refund { + /// The payment hash, i.e., the hash of the `preimage`. + hash: Option, + /// The pre-image used by the payment. + preimage: Option, + /// The secret used by the payment. + secret: Option, + }, /// A spontaneous ("keysend") payment. Spontaneous { /// The payment hash, i.e., the hash of the `preimage`. @@ -198,9 +223,20 @@ impl_writeable_tlv_based_enum!(PaymentKind, (4, secret, option), (6, lsp_fee_limits, required), }, + (6, Bolt12Offer) => { + (0, hash, option), + (2, preimage, option), + (4, secret, option), + (6, offer_id, required), + }, (8, Spontaneous) => { (0, hash, required), (2, preimage, option), + }, + (10, Bolt12Refund) => { + (0, hash, option), + (2, preimage, option), + (4, secret, option), }; ); @@ -227,6 +263,7 @@ impl_writeable_tlv_based!(LSPFeeLimits, { #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) struct PaymentDetailsUpdate { pub id: PaymentId, + pub hash: Option>, pub preimage: Option>, pub secret: Option>, pub amount_msat: Option>, @@ -236,7 +273,15 @@ pub(crate) struct PaymentDetailsUpdate { impl PaymentDetailsUpdate { pub fn new(id: PaymentId) -> Self { - Self { id, preimage: None, secret: None, amount_msat: None, direction: None, status: None } + Self { + id, + hash: None, + preimage: None, + secret: None, + amount_msat: None, + direction: None, + status: None, + } } } @@ -299,10 +344,29 @@ where let mut locked_payments = self.payments.lock().unwrap(); if let Some(payment) = locked_payments.get_mut(&update.id) { + if let Some(hash_opt) = update.hash { + match payment.kind { + PaymentKind::Bolt12Offer { ref mut hash, .. } => { + debug_assert_eq!(payment.direction, PaymentDirection::Outbound, + "We should only ever override payment hash for outbound BOLT 12 payments"); + *hash = hash_opt + }, + PaymentKind::Bolt12Refund { ref mut hash, .. } => { + debug_assert_eq!(payment.direction, PaymentDirection::Outbound, + "We should only ever override payment hash for outbound BOLT 12 payments"); + *hash = hash_opt + }, + _ => { + // We can omit updating the hash for BOLT11 payments as the payment has will always be known from the beginning. + }, + } + } if let Some(preimage_opt) = update.preimage { match payment.kind { PaymentKind::Bolt11 { ref mut preimage, .. } => *preimage = preimage_opt, PaymentKind::Bolt11Jit { ref mut preimage, .. } => *preimage = preimage_opt, + PaymentKind::Bolt12Offer { ref mut preimage, .. } => *preimage = preimage_opt, + PaymentKind::Bolt12Refund { ref mut preimage, .. } => *preimage = preimage_opt, PaymentKind::Spontaneous { ref mut preimage, .. } => *preimage = preimage_opt, _ => {}, } @@ -312,6 +376,8 @@ where match payment.kind { PaymentKind::Bolt11 { ref mut secret, .. } => *secret = secret_opt, PaymentKind::Bolt11Jit { ref mut secret, .. } => *secret = secret_opt, + PaymentKind::Bolt12Offer { ref mut secret, .. } => *secret = secret_opt, + PaymentKind::Bolt12Refund { ref mut secret, .. } => *secret = secret_opt, _ => {}, } } @@ -327,7 +393,6 @@ where self.persist_info(&update.id, payment)?; updated = true; } - Ok(updated) } diff --git a/src/uniffi_types.rs b/src/uniffi_types.rs index e979b5ce9..711d5b355 100644 --- a/src/uniffi_types.rs +++ b/src/uniffi_types.rs @@ -2,6 +2,7 @@ pub use crate::payment::store::{LSPFeeLimits, PaymentDirection, PaymentKind, Pay pub use lightning::events::{ClosureReason, PaymentFailureReason}; pub use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret}; +pub use lightning::offers::offer::OfferId; pub use lightning::util::string::UntrustedString; pub use lightning_invoice::Bolt11Invoice; @@ -75,6 +76,24 @@ impl UniffiCustomTypeConverter for Bolt11Invoice { } } +impl UniffiCustomTypeConverter for OfferId { + type Builtin = String; + + fn into_custom(val: Self::Builtin) -> uniffi::Result { + if let Some(bytes_vec) = hex_utils::to_vec(&val) { + let bytes_res = bytes_vec.try_into(); + if let Ok(bytes) = bytes_res { + return Ok(OfferId(bytes)); + } + } + Err(Error::InvalidOfferId.into()) + } + + fn from_custom(obj: Self) -> Self::Builtin { + hex_utils::to_string(&obj.0) + } +} + impl UniffiCustomTypeConverter for PaymentId { type Builtin = String; From 9a3f1696986af06fd2d0fec0d123a4130edf205c Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 5 Mar 2024 15:33:05 +0100 Subject: [PATCH 39/89] Introduce `Bolt12Payment` API --- bindings/ldk_node.udl | 19 +++ src/error.rs | 14 +++ src/lib.rs | 28 ++++- src/payment/bolt12.rs | 267 ++++++++++++++++++++++++++++++++++++++++++ src/payment/mod.rs | 2 + src/uniffi_types.rs | 14 ++- 6 files changed, 342 insertions(+), 2 deletions(-) create mode 100644 src/payment/bolt12.rs diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index eabfd56d1..dd09cf88a 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -54,6 +54,7 @@ interface Node { PublicKey node_id(); sequence? listening_addresses(); Bolt11Payment bolt11_payment(); + Bolt12Payment bolt12_payment(); SpontaneousPayment spontaneous_payment(); OnchainPayment onchain_payment(); [Throws=NodeError] @@ -99,6 +100,17 @@ interface Bolt11Payment { Bolt11Invoice receive_variable_amount_via_jit_channel([ByRef]string description, u32 expiry_secs, u64? max_proportional_lsp_fee_limit_ppm_msat); }; +interface Bolt12Payment { + [Throws=NodeError] + PaymentId send([ByRef]Offer offer, string? payer_note); + [Throws=NodeError] + PaymentId send_using_amount([ByRef]Offer offer, string? payer_note, u64 amount_msat); + [Throws=NodeError] + Offer receive(u64 amount_msat, [ByRef]string description); + [Throws=NodeError] + Offer receive_variable_amount([ByRef]string description); +}; + interface SpontaneousPayment { [Throws=NodeError] PaymentId send(u64 amount_msat, PublicKey node_id); @@ -122,6 +134,8 @@ enum NodeError { "OnchainTxCreationFailed", "ConnectionFailed", "InvoiceCreationFailed", + "InvoiceRequestCreationFailed", + "OfferCreationFailed", "PaymentSendingFailed", "ProbeSendingFailed", "ChannelCreationFailed", @@ -146,9 +160,11 @@ enum NodeError { "InvalidPaymentSecret", "InvalidAmount", "InvalidInvoice", + "InvalidOffer", "InvalidChannelId", "InvalidNetwork", "DuplicatePayment", + "UnsupportedCurrency", "InsufficientFunds", "LiquiditySourceUnavailable", "LiquidityFeeTooHigh", @@ -374,6 +390,9 @@ typedef string Address; [Custom] typedef string Bolt11Invoice; +[Custom] +typedef string Offer; + [Custom] typedef string OfferId; diff --git a/src/error.rs b/src/error.rs index f9f95a8be..b51941012 100644 --- a/src/error.rs +++ b/src/error.rs @@ -13,6 +13,10 @@ pub enum Error { ConnectionFailed, /// Invoice creation failed. InvoiceCreationFailed, + /// Invoice request creation failed. + InvoiceRequestCreationFailed, + /// Offer creation failed. + OfferCreationFailed, /// Sending a payment has failed. PaymentSendingFailed, /// Sending a payment probe has failed. @@ -61,12 +65,16 @@ pub enum Error { InvalidAmount, /// The given invoice is invalid. InvalidInvoice, + /// The given offer is invalid. + InvalidOffer, /// The given channel ID is invalid. InvalidChannelId, /// The given network is invalid. InvalidNetwork, /// A payment with the given hash has already been initiated. DuplicatePayment, + /// The provided offer was denonminated in an unsupported currency. + UnsupportedCurrency, /// The available funds are insufficient to complete the given operation. InsufficientFunds, /// The given operation failed due to the required liquidity source being unavailable. @@ -85,6 +93,8 @@ impl fmt::Display for Error { }, Self::ConnectionFailed => write!(f, "Network connection closed."), Self::InvoiceCreationFailed => write!(f, "Failed to create invoice."), + Self::InvoiceRequestCreationFailed => write!(f, "Failed to create invoice request."), + Self::OfferCreationFailed => write!(f, "Failed to create offer."), Self::PaymentSendingFailed => write!(f, "Failed to send the given payment."), Self::ProbeSendingFailed => write!(f, "Failed to send the given payment probe."), Self::ChannelCreationFailed => write!(f, "Failed to create channel."), @@ -111,6 +121,7 @@ impl fmt::Display for Error { Self::InvalidPaymentSecret => write!(f, "The given payment secret is invalid."), Self::InvalidAmount => write!(f, "The given amount is invalid."), Self::InvalidInvoice => write!(f, "The given invoice is invalid."), + Self::InvalidOffer => write!(f, "The given offer is invalid."), Self::InvalidChannelId => write!(f, "The given channel ID is invalid."), Self::InvalidNetwork => write!(f, "The given network is invalid."), Self::DuplicatePayment => { @@ -119,6 +130,9 @@ impl fmt::Display for Error { Self::InsufficientFunds => { write!(f, "The available funds are insufficient to complete the given operation.") }, + Self::UnsupportedCurrency => { + write!(f, "The provided offer was denonminated in an unsupported currency.") + }, Self::LiquiditySourceUnavailable => { write!(f, "The given operation failed due to the required liquidity source being unavailable.") }, diff --git a/src/lib.rs b/src/lib.rs index a93b53ef3..4e1e012e2 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -130,7 +130,7 @@ use event::{EventHandler, EventQueue}; use gossip::GossipSource; use liquidity::LiquiditySource; use payment::store::PaymentStore; -use payment::{Bolt11Payment, OnchainPayment, PaymentDetails, SpontaneousPayment}; +use payment::{Bolt11Payment, Bolt12Payment, OnchainPayment, PaymentDetails, SpontaneousPayment}; use peer_store::{PeerInfo, PeerStore}; use types::{ Broadcaster, ChainMonitor, ChannelManager, DynStore, FeeEstimator, KeysManager, NetworkGraph, @@ -846,6 +846,32 @@ impl Node { )) } + /// Returns a payment handler allowing to create and pay [BOLT 12] offers and refunds. + /// + /// [BOLT 12]: https://github.com/lightning/bolts/blob/master/12-offer-encoding.md + #[cfg(not(feature = "uniffi"))] + pub fn bolt12_payment(&self) -> Arc { + Arc::new(Bolt12Payment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.channel_manager), + Arc::clone(&self.payment_store), + Arc::clone(&self.logger), + )) + } + + /// Returns a payment handler allowing to create and pay [BOLT 12] offers and refunds. + /// + /// [BOLT 12]: https://github.com/lightning/bolts/blob/master/12-offer-encoding.md + #[cfg(feature = "uniffi")] + pub fn bolt12_payment(&self) -> Arc { + Arc::new(Bolt12Payment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.channel_manager), + Arc::clone(&self.payment_store), + Arc::clone(&self.logger), + )) + } + /// Returns a payment handler allowing to send spontaneous ("keysend") payments. #[cfg(not(feature = "uniffi"))] pub fn spontaneous_payment(&self) -> SpontaneousPayment { diff --git a/src/payment/bolt12.rs b/src/payment/bolt12.rs new file mode 100644 index 000000000..ffcaa4c5d --- /dev/null +++ b/src/payment/bolt12.rs @@ -0,0 +1,267 @@ +//! Holds a payment handler allowing to create and pay [BOLT 12] offers and refunds. +//! +//! [BOLT 12]: https://github.com/lightning/bolts/blob/master/12-offer-encoding.md + +use crate::config::LDK_PAYMENT_RETRY_TIMEOUT; +use crate::error::Error; +use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; +use crate::payment::store::{ + PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus, PaymentStore, +}; +use crate::types::ChannelManager; + +use lightning::ln::channelmanager::{PaymentId, Retry}; +use lightning::offers::offer::{Amount, Offer}; +use lightning::offers::parse::Bolt12SemanticError; + +use rand::RngCore; + +use std::sync::{Arc, RwLock}; + +/// A payment handler allowing to create and pay [BOLT 12] offers and refunds. +/// +/// Should be retrieved by calling [`Node::bolt12_payment`]. +/// +/// [BOLT 12]: https://github.com/lightning/bolts/blob/master/12-offer-encoding.md +/// [`Node::bolt12_payment`]: crate::Node::bolt12_payment +pub struct Bolt12Payment { + runtime: Arc>>, + channel_manager: Arc, + payment_store: Arc>>, + logger: Arc, +} + +impl Bolt12Payment { + pub(crate) fn new( + runtime: Arc>>, + channel_manager: Arc, + payment_store: Arc>>, logger: Arc, + ) -> Self { + Self { runtime, channel_manager, payment_store, logger } + } + + /// Send a payment given an offer. + /// + /// If `payer_note` is `Some` it will be seen by the recipient and reflected back in the invoice + /// response. + pub fn send(&self, offer: &Offer, payer_note: Option) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let quantity = None; + let mut random_bytes = [0u8; 32]; + rand::thread_rng().fill_bytes(&mut random_bytes); + let payment_id = PaymentId(random_bytes); + let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); + let max_total_routing_fee_msat = None; + + let offer_amount_msat = match offer.amount() { + Some(Amount::Bitcoin { amount_msats }) => amount_msats, + Some(_) => { + log_error!(self.logger, "Failed to send payment as the provided offer was denominated in an unsupported currency."); + return Err(Error::UnsupportedCurrency); + }, + None => { + log_error!(self.logger, "Failed to send payment due to the given offer being \"zero-amount\". Please use send_using_amount instead."); + return Err(Error::InvalidOffer); + }, + }; + + match self.channel_manager.pay_for_offer( + &offer, + quantity, + None, + payer_note, + payment_id, + retry_strategy, + max_total_routing_fee_msat, + ) { + Ok(()) => { + let payee_pubkey = offer.signing_pubkey(); + log_info!( + self.logger, + "Initiated sending {}msat to {:?}", + offer_amount_msat, + payee_pubkey + ); + + let kind = PaymentKind::Bolt12Offer { + hash: None, + preimage: None, + secret: None, + offer_id: offer.id(), + }; + let payment = PaymentDetails { + id: payment_id, + kind, + amount_msat: Some(*offer_amount_msat), + direction: PaymentDirection::Outbound, + status: PaymentStatus::Pending, + }; + self.payment_store.insert(payment)?; + + Ok(payment_id) + }, + Err(e) => { + log_error!(self.logger, "Failed to send invoice request: {:?}", e); + match e { + Bolt12SemanticError::DuplicatePaymentId => Err(Error::DuplicatePayment), + _ => { + let kind = PaymentKind::Bolt12Offer { + hash: None, + preimage: None, + secret: None, + offer_id: offer.id(), + }; + let payment = PaymentDetails { + id: payment_id, + kind, + amount_msat: Some(*offer_amount_msat), + direction: PaymentDirection::Outbound, + status: PaymentStatus::Failed, + }; + self.payment_store.insert(payment)?; + Err(Error::InvoiceRequestCreationFailed) + }, + } + }, + } + } + + /// Send a payment given an offer and an amount in millisatoshi. + /// + /// This will fail if the amount given is less than the value required by the given offer. + /// + /// This can be used to pay a so-called "zero-amount" offers, i.e., an offer that leaves the + /// amount paid to be determined by the user. + /// + /// If `payer_note` is `Some` it will be seen by the recipient and reflected back in the invoice + /// response. + pub fn send_using_amount( + &self, offer: &Offer, payer_note: Option, amount_msat: u64, + ) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let quantity = None; + let mut random_bytes = [0u8; 32]; + rand::thread_rng().fill_bytes(&mut random_bytes); + let payment_id = PaymentId(random_bytes); + let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); + let max_total_routing_fee_msat = None; + + let offer_amount_msat = match offer.amount() { + Some(Amount::Bitcoin { amount_msats }) => *amount_msats, + Some(_) => { + log_error!(self.logger, "Failed to send payment as the provided offer was denominated in an unsupported currency."); + return Err(Error::UnsupportedCurrency); + }, + None => amount_msat, + }; + + if amount_msat < offer_amount_msat { + log_error!( + self.logger, + "Failed to pay as the given amount needs to be at least the offer amount: required {}msat, gave {}msat.", offer_amount_msat, amount_msat); + return Err(Error::InvalidAmount); + } + + match self.channel_manager.pay_for_offer( + &offer, + quantity, + Some(amount_msat), + payer_note, + payment_id, + retry_strategy, + max_total_routing_fee_msat, + ) { + Ok(()) => { + let payee_pubkey = offer.signing_pubkey(); + log_info!( + self.logger, + "Initiated sending {}msat to {:?}", + amount_msat, + payee_pubkey + ); + + let kind = PaymentKind::Bolt12Offer { + hash: None, + preimage: None, + secret: None, + offer_id: offer.id(), + }; + let payment = PaymentDetails { + id: payment_id, + kind, + amount_msat: Some(amount_msat), + direction: PaymentDirection::Outbound, + status: PaymentStatus::Pending, + }; + self.payment_store.insert(payment)?; + + Ok(payment_id) + }, + Err(e) => { + log_error!(self.logger, "Failed to send payment: {:?}", e); + match e { + Bolt12SemanticError::DuplicatePaymentId => Err(Error::DuplicatePayment), + _ => { + let kind = PaymentKind::Bolt12Offer { + hash: None, + preimage: None, + secret: None, + offer_id: offer.id(), + }; + let payment = PaymentDetails { + id: payment_id, + kind, + amount_msat: Some(amount_msat), + direction: PaymentDirection::Outbound, + status: PaymentStatus::Failed, + }; + self.payment_store.insert(payment)?; + Err(Error::PaymentSendingFailed) + }, + } + }, + } + } + + /// Returns a payable offer that can be used to request and receive a payment of the amount + /// given. + pub fn receive(&self, amount_msat: u64, description: &str) -> Result { + let offer_builder = self.channel_manager.create_offer_builder().map_err(|e| { + log_error!(self.logger, "Failed to create offer builder: {:?}", e); + Error::OfferCreationFailed + })?; + let offer = offer_builder + .amount_msats(amount_msat) + .description(description.to_string()) + .build() + .map_err(|e| { + log_error!(self.logger, "Failed to create offer: {:?}", e); + Error::OfferCreationFailed + })?; + + Ok(offer) + } + + /// Returns a payable offer that can be used to request and receive a payment for which the + /// amount is to be determined by the user, also known as a "zero-amount" offer. + pub fn receive_variable_amount(&self, description: &str) -> Result { + let offer_builder = self.channel_manager.create_offer_builder().map_err(|e| { + log_error!(self.logger, "Failed to create offer builder: {:?}", e); + Error::OfferCreationFailed + })?; + let offer = offer_builder.description(description.to_string()).build().map_err(|e| { + log_error!(self.logger, "Failed to create offer: {:?}", e); + Error::OfferCreationFailed + })?; + + Ok(offer) + } +} diff --git a/src/payment/mod.rs b/src/payment/mod.rs index 3649f1fcc..1862bf2df 100644 --- a/src/payment/mod.rs +++ b/src/payment/mod.rs @@ -1,11 +1,13 @@ //! Objects for different types of payments. mod bolt11; +mod bolt12; mod onchain; mod spontaneous; pub(crate) mod store; pub use bolt11::Bolt11Payment; +pub use bolt12::Bolt12Payment; pub use onchain::OnchainPayment; pub use spontaneous::SpontaneousPayment; pub use store::{LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus}; diff --git a/src/uniffi_types.rs b/src/uniffi_types.rs index 711d5b355..ef22e61ec 100644 --- a/src/uniffi_types.rs +++ b/src/uniffi_types.rs @@ -2,7 +2,7 @@ pub use crate::payment::store::{LSPFeeLimits, PaymentDirection, PaymentKind, Pay pub use lightning::events::{ClosureReason, PaymentFailureReason}; pub use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret}; -pub use lightning::offers::offer::OfferId; +pub use lightning::offers::offer::{Offer, OfferId}; pub use lightning::util::string::UntrustedString; pub use lightning_invoice::Bolt11Invoice; @@ -76,6 +76,18 @@ impl UniffiCustomTypeConverter for Bolt11Invoice { } } +impl UniffiCustomTypeConverter for Offer { + type Builtin = String; + + fn into_custom(val: Self::Builtin) -> uniffi::Result { + Offer::from_str(&val).map_err(|_| Error::InvalidOffer.into()) + } + + fn from_custom(obj: Self) -> Self::Builtin { + obj.to_string() + } +} + impl UniffiCustomTypeConverter for OfferId { type Builtin = String; From 8a67d7e16adc29e17c9af9cabd04ab469ca5f029 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 12 Mar 2024 10:25:42 +0100 Subject: [PATCH 40/89] Handle `InvoiceRequestFailed` event --- src/event.rs | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/src/event.rs b/src/event.rs index 34f36c456..ca28e27e6 100644 --- a/src/event.rs +++ b/src/event.rs @@ -983,7 +983,22 @@ where LdkEvent::DiscardFunding { .. } => {}, LdkEvent::HTLCIntercepted { .. } => {}, LdkEvent::BumpTransaction(_) => {}, - LdkEvent::InvoiceRequestFailed { .. } => {}, + LdkEvent::InvoiceRequestFailed { payment_id } => { + log_error!( + self.logger, + "Failed to request invoice for outbound BOLT12 payment {}", + payment_id + ); + let update = PaymentDetailsUpdate { + status: Some(PaymentStatus::Failed), + ..PaymentDetailsUpdate::new(payment_id) + }; + self.payment_store.update(&update).unwrap_or_else(|e| { + log_error!(self.logger, "Failed to access payment store: {}", e); + panic!("Failed to access payment store"); + }); + return; + }, LdkEvent::ConnectionNeeded { node_id, addresses } => { let runtime_lock = self.runtime.read().unwrap(); debug_assert!(runtime_lock.is_some()); From 962476ab4bfd0ac1f7cbc1d9473bd5038b3c3855 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 12 Mar 2024 11:23:10 +0100 Subject: [PATCH 41/89] Update the payment store in event handling .. depending on the payment purpose, we derive the inbound payment ID and update the status in `PaymentClaimable`/`PaymentClaimed` --- src/event.rs | 244 +++++++++++++++++++++++++------------------ src/payment/store.rs | 3 +- 2 files changed, 142 insertions(+), 105 deletions(-) diff --git a/src/event.rs b/src/event.rs index ca28e27e6..b49fc96e8 100644 --- a/src/event.rs +++ b/src/event.rs @@ -435,7 +435,9 @@ where } => { let payment_id = PaymentId(payment_hash.0); if let Some(info) = self.payment_store.get(&payment_id) { - if info.status == PaymentStatus::Succeeded { + if info.status == PaymentStatus::Succeeded + || matches!(info.kind, PaymentKind::Spontaneous { .. }) + { log_info!( self.logger, "Refused duplicate inbound payment from payment hash {} of {}msat", @@ -483,6 +485,7 @@ where self.channel_manager.fail_htlc_backwards(&payment_hash); let update = PaymentDetailsUpdate { + hash: Some(Some(payment_hash)), status: Some(PaymentStatus::Failed), ..PaymentDetailsUpdate::new(payment_id) }; @@ -501,126 +504,56 @@ where amount_msat, ); let payment_preimage = match purpose { - PaymentPurpose::Bolt11InvoicePayment { payment_preimage, payment_secret } => { - if payment_preimage.is_some() { - payment_preimage - } else { - self.channel_manager - .get_payment_preimage(payment_hash, payment_secret) - .ok() - } - }, - PaymentPurpose::Bolt12OfferPayment { .. } => { - // TODO: support BOLT12. - log_error!( - self.logger, - "Failed to claim unsupported BOLT12 payment with hash: {}", - payment_hash - ); - self.channel_manager.fail_htlc_backwards(&payment_hash); - return; + PaymentPurpose::Bolt11InvoicePayment { payment_preimage, .. } => { + payment_preimage }, - PaymentPurpose::Bolt12RefundPayment { .. } => { - // TODO: support BOLT12. - log_error!( - self.logger, - "Failed to claim unsupported BOLT12 payment with hash: {}", - payment_hash - ); - self.channel_manager.fail_htlc_backwards(&payment_hash); - return; - }, - PaymentPurpose::SpontaneousPayment(preimage) => Some(preimage), - }; - - if let Some(preimage) = payment_preimage { - self.channel_manager.claim_funds(preimage); - } else { - log_error!( - self.logger, - "Failed to claim payment with hash {}: preimage unknown.", - hex_utils::to_string(&payment_hash.0), - ); - self.channel_manager.fail_htlc_backwards(&payment_hash); - - let update = PaymentDetailsUpdate { - status: Some(PaymentStatus::Failed), - ..PaymentDetailsUpdate::new(payment_id) - }; - self.payment_store.update(&update).unwrap_or_else(|e| { - log_error!(self.logger, "Failed to access payment store: {}", e); - panic!("Failed to access payment store"); - }); - } - }, - LdkEvent::PaymentClaimed { - payment_hash, - purpose, - amount_msat, - receiver_node_id: _, - htlcs: _, - sender_intended_total_msat: _, - } => { - log_info!( - self.logger, - "Claimed payment from payment hash {} of {}msat.", - hex_utils::to_string(&payment_hash.0), - amount_msat, - ); - let payment_id = PaymentId(payment_hash.0); - match purpose { - PaymentPurpose::Bolt11InvoicePayment { + PaymentPurpose::Bolt12OfferPayment { payment_preimage, payment_secret, + payment_context, .. } => { - let update = PaymentDetailsUpdate { - preimage: Some(payment_preimage), - secret: Some(Some(payment_secret)), - amount_msat: Some(Some(amount_msat)), - status: Some(PaymentStatus::Succeeded), - ..PaymentDetailsUpdate::new(payment_id) + let offer_id = payment_context.offer_id; + let payment = PaymentDetails { + id: payment_id, + kind: PaymentKind::Bolt12Offer { + hash: Some(payment_hash), + preimage: payment_preimage, + secret: Some(payment_secret), + offer_id, + }, + amount_msat: Some(amount_msat), + direction: PaymentDirection::Inbound, + status: PaymentStatus::Pending, }; - match self.payment_store.update(&update) { - Ok(true) => (), - Ok(false) => { + + match self.payment_store.insert(payment) { + Ok(false) => (), + Ok(true) => { log_error!( self.logger, - "Payment with hash {} couldn't be found in store", - hex_utils::to_string(&payment_hash.0) + "Bolt12OfferPayment with ID {} was previously known", + payment_id, ); debug_assert!(false); }, Err(e) => { log_error!( self.logger, - "Failed to update payment with hash {}: {}", - hex_utils::to_string(&payment_hash.0), + "Failed to insert payment with ID {}: {}", + payment_id, e ); debug_assert!(false); }, } + payment_preimage }, - PaymentPurpose::Bolt12OfferPayment { .. } => { - // TODO: support BOLT12. - log_error!( - self.logger, - "Failed to claim unsupported BOLT12 payment with hash: {}", - payment_hash - ); - return; - }, - PaymentPurpose::Bolt12RefundPayment { .. } => { - // TODO: support BOLT12. - log_error!( - self.logger, - "Failed to claim unsupported BOLT12 payment with hash: {}", - payment_hash - ); - return; + PaymentPurpose::Bolt12RefundPayment { payment_preimage, .. } => { + payment_preimage }, PaymentPurpose::SpontaneousPayment(preimage) => { + // Since it's spontaneous, we insert it now into our store. let payment = PaymentDetails { id: payment_id, kind: PaymentKind::Spontaneous { @@ -629,7 +562,7 @@ where }, amount_msat: Some(amount_msat), direction: PaymentDirection::Inbound, - status: PaymentStatus::Succeeded, + status: PaymentStatus::Pending, }; match self.payment_store.insert(payment) { @@ -637,24 +570,125 @@ where Ok(true) => { log_error!( self.logger, - "Spontaneous payment with hash {} was previously known", - hex_utils::to_string(&payment_hash.0) + "Spontaneous payment with ID {} was previously known", + payment_id, ); debug_assert!(false); }, Err(e) => { log_error!( self.logger, - "Failed to insert payment with hash {}: {}", - hex_utils::to_string(&payment_hash.0), + "Failed to insert payment with ID {}: {}", + payment_id, e ); debug_assert!(false); }, } + + Some(preimage) }, }; + if let Some(preimage) = payment_preimage { + self.channel_manager.claim_funds(preimage); + } else { + log_error!( + self.logger, + "Failed to claim payment with ID {}: preimage unknown.", + payment_id, + ); + self.channel_manager.fail_htlc_backwards(&payment_hash); + + let update = PaymentDetailsUpdate { + hash: Some(Some(payment_hash)), + status: Some(PaymentStatus::Failed), + ..PaymentDetailsUpdate::new(payment_id) + }; + self.payment_store.update(&update).unwrap_or_else(|e| { + log_error!(self.logger, "Failed to access payment store: {}", e); + panic!("Failed to access payment store"); + }); + } + }, + LdkEvent::PaymentClaimed { + payment_hash, + purpose, + amount_msat, + receiver_node_id: _, + htlcs: _, + sender_intended_total_msat: _, + } => { + let payment_id = PaymentId(payment_hash.0); + log_info!( + self.logger, + "Claimed payment with ID {} from payment hash {} of {}msat.", + payment_id, + hex_utils::to_string(&payment_hash.0), + amount_msat, + ); + + let update = match purpose { + PaymentPurpose::Bolt11InvoicePayment { + payment_preimage, + payment_secret, + .. + } => PaymentDetailsUpdate { + preimage: Some(payment_preimage), + secret: Some(Some(payment_secret)), + amount_msat: Some(Some(amount_msat)), + status: Some(PaymentStatus::Succeeded), + ..PaymentDetailsUpdate::new(payment_id) + }, + PaymentPurpose::Bolt12OfferPayment { + payment_preimage, payment_secret, .. + } => PaymentDetailsUpdate { + preimage: Some(payment_preimage), + secret: Some(Some(payment_secret)), + amount_msat: Some(Some(amount_msat)), + status: Some(PaymentStatus::Succeeded), + ..PaymentDetailsUpdate::new(payment_id) + }, + PaymentPurpose::Bolt12RefundPayment { + payment_preimage, + payment_secret, + .. + } => PaymentDetailsUpdate { + preimage: Some(payment_preimage), + secret: Some(Some(payment_secret)), + amount_msat: Some(Some(amount_msat)), + status: Some(PaymentStatus::Succeeded), + ..PaymentDetailsUpdate::new(payment_id) + }, + PaymentPurpose::SpontaneousPayment(preimage) => PaymentDetailsUpdate { + preimage: Some(Some(preimage)), + amount_msat: Some(Some(amount_msat)), + status: Some(PaymentStatus::Succeeded), + ..PaymentDetailsUpdate::new(payment_id) + }, + }; + + match self.payment_store.update(&update) { + Ok(true) => (), + Ok(false) => { + log_error!( + self.logger, + "Payment with ID {} couldn't be found in store", + payment_id, + ); + debug_assert!(false); + }, + Err(e) => { + log_error!( + self.logger, + "Failed to update payment with ID {}: {}", + payment_id, + e + ); + panic!("Failed to access payment store"); + }, + } + self.event_queue .add_event(Event::PaymentReceived { payment_id: Some(payment_id), @@ -681,6 +715,7 @@ where }; let update = PaymentDetailsUpdate { + hash: Some(Some(payment_hash)), preimage: Some(Some(payment_preimage)), status: Some(PaymentStatus::Succeeded), ..PaymentDetailsUpdate::new(payment_id) @@ -727,6 +762,7 @@ where ); let update = PaymentDetailsUpdate { + hash: Some(Some(payment_hash)), status: Some(PaymentStatus::Failed), ..PaymentDetailsUpdate::new(payment_id) }; diff --git a/src/payment/store.rs b/src/payment/store.rs index c495620c8..f7f4942be 100644 --- a/src/payment/store.rs +++ b/src/payment/store.rs @@ -357,7 +357,8 @@ where *hash = hash_opt }, _ => { - // We can omit updating the hash for BOLT11 payments as the payment has will always be known from the beginning. + // We can omit updating the hash for BOLT11 payments as the payment hash + // will always be known from the beginning. }, } } From 1e522d06e48e6b67b499d63e449d2bae2dcc59de Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 12 Mar 2024 13:47:57 +0100 Subject: [PATCH 42/89] Shorten node ann. broadcast timer in tests --- src/lib.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/lib.rs b/src/lib.rs index 4e1e012e2..9a634c12d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -541,7 +541,10 @@ impl Node { let mut stop_bcast = self.stop_sender.subscribe(); runtime.spawn(async move { // We check every 30 secs whether our last broadcast is NODE_ANN_BCAST_INTERVAL away. + #[cfg(not(test))] let mut interval = tokio::time::interval(Duration::from_secs(30)); + #[cfg(test)] + let mut interval = tokio::time::interval(Duration::from_secs(5)); loop { tokio::select! { _ = stop_bcast.changed() => { From 5361d9f809fbb0a5857df7857b95141de7f8b481 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 12 Mar 2024 13:07:15 +0100 Subject: [PATCH 43/89] Add simple BOLT12 send/receive test --- tests/common/mod.rs | 4 +- tests/integration_tests_rust.rs | 124 +++++++++++++++++++++++++++++++- 2 files changed, 126 insertions(+), 2 deletions(-) diff --git a/tests/common/mod.rs b/tests/common/mod.rs index bcb47accb..36f0cd6a1 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -102,7 +102,9 @@ macro_rules! expect_payment_successful_event { match $node.wait_next_event() { ref e @ Event::PaymentSuccessful { payment_id, fee_paid_msat, .. } => { println!("{} got event {:?}", $node.node_id(), e); - assert_eq!(fee_paid_msat, $fee_paid_msat); + if let Some(fee_msat) = $fee_paid_msat { + assert_eq!(fee_paid_msat, fee_msat); + } assert_eq!(payment_id, $payment_id); $node.event_handled(); }, diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index 1820ef76a..7e0b5a41c 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -7,14 +7,18 @@ use common::{ setup_node, setup_two_nodes, wait_for_tx, TestSyncStore, }; +use ldk_node::payment::PaymentKind; use ldk_node::{Builder, Event, NodeError}; +use lightning::ln::channelmanager::PaymentId; use lightning::util::persist::KVStore; use bitcoin::{Amount, Network}; use std::sync::Arc; +use crate::common::expect_channel_ready_event; + #[test] fn channel_full_cycle() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); @@ -138,7 +142,7 @@ fn multi_hop_sending() { let payment_id = expect_payment_received_event!(&nodes[4], 2_500_000); let fee_paid_msat = Some(2000); - expect_payment_successful_event!(nodes[0], payment_id, fee_paid_msat); + expect_payment_successful_event!(nodes[0], payment_id, Some(fee_paid_msat)); } #[test] @@ -366,3 +370,121 @@ fn concurrent_connections_succeed() { h.join().unwrap(); } } + +#[test] +fn simple_bolt12_send_receive() { + let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); + let (node_a, node_b) = setup_two_nodes(&electrsd, false); + + let address_a = node_a.onchain_payment().new_address().unwrap(); + let premine_amount_sat = 5_000_000; + premine_and_distribute_funds( + &bitcoind.client, + &electrsd.client, + vec![address_a], + Amount::from_sat(premine_amount_sat), + ); + + node_a.sync_wallets().unwrap(); + open_channel(&node_a, &node_b, 4_000_000, true, &electrsd); + + generate_blocks_and_wait(&bitcoind.client, &electrsd.client, 6); + + node_a.sync_wallets().unwrap(); + node_b.sync_wallets().unwrap(); + + expect_channel_ready_event!(node_a, node_b.node_id()); + expect_channel_ready_event!(node_b, node_a.node_id()); + + // Sleep until we broadcasted a node announcement. + while node_b.status().latest_node_announcement_broadcast_timestamp.is_none() { + std::thread::sleep(std::time::Duration::from_millis(10)); + } + + // Sleep one more sec to make sure the node announcement propagates. + std::thread::sleep(std::time::Duration::from_secs(1)); + + let expected_amount_msat = 100_000_000; + let offer = node_b.bolt12_payment().receive(expected_amount_msat, "asdf").unwrap(); + let payment_id = node_a.bolt12_payment().send(&offer, None).unwrap(); + + expect_payment_successful_event!(node_a, Some(payment_id), None); + let node_a_payments = node_a.list_payments(); + assert_eq!(node_a_payments.len(), 1); + match node_a_payments.first().unwrap().kind { + PaymentKind::Bolt12Offer { hash, preimage, secret: _, offer_id } => { + assert!(hash.is_some()); + assert!(preimage.is_some()); + assert_eq!(offer_id, offer.id()); + //TODO: We should eventually set and assert the secret sender-side, too, but the BOLT12 + //API currently doesn't allow to do that. + }, + _ => { + panic!("Unexpected payment kind"); + }, + } + assert_eq!(node_a_payments.first().unwrap().amount_msat, Some(expected_amount_msat)); + + expect_payment_received_event!(node_b, expected_amount_msat); + let node_b_payments = node_b.list_payments(); + assert_eq!(node_b_payments.len(), 1); + match node_b_payments.first().unwrap().kind { + PaymentKind::Bolt12Offer { hash, preimage, secret, offer_id } => { + assert!(hash.is_some()); + assert!(preimage.is_some()); + assert!(secret.is_some()); + assert_eq!(offer_id, offer.id()); + }, + _ => { + panic!("Unexpected payment kind"); + }, + } + assert_eq!(node_b_payments.first().unwrap().amount_msat, Some(expected_amount_msat)); + + // Test send_using_amount + let offer_amount_msat = 100_000_000; + let less_than_offer_amount = offer_amount_msat - 10_000; + let expected_amount_msat = offer_amount_msat + 10_000; + let offer = node_b.bolt12_payment().receive(offer_amount_msat, "asdf").unwrap(); + assert!(node_a + .bolt12_payment() + .send_using_amount(&offer, None, less_than_offer_amount) + .is_err()); + let payment_id = + node_a.bolt12_payment().send_using_amount(&offer, None, expected_amount_msat).unwrap(); + + expect_payment_successful_event!(node_a, Some(payment_id), None); + let node_a_payments = node_a.list_payments_with_filter(|p| p.id == payment_id); + assert_eq!(node_a_payments.len(), 1); + let payment_hash = match node_a_payments.first().unwrap().kind { + PaymentKind::Bolt12Offer { hash, preimage, secret: _, offer_id } => { + assert!(hash.is_some()); + assert!(preimage.is_some()); + assert_eq!(offer_id, offer.id()); + //TODO: We should eventually set and assert the secret sender-side, too, but the BOLT12 + //API currently doesn't allow to do that. + hash.unwrap() + }, + _ => { + panic!("Unexpected payment kind"); + }, + }; + assert_eq!(node_a_payments.first().unwrap().amount_msat, Some(expected_amount_msat)); + + expect_payment_received_event!(node_b, expected_amount_msat); + let node_b_payment_id = PaymentId(payment_hash.0); + let node_b_payments = node_b.list_payments_with_filter(|p| p.id == node_b_payment_id); + assert_eq!(node_b_payments.len(), 1); + match node_b_payments.first().unwrap().kind { + PaymentKind::Bolt12Offer { hash, preimage, secret, offer_id } => { + assert!(hash.is_some()); + assert!(preimage.is_some()); + assert!(secret.is_some()); + assert_eq!(offer_id, offer.id()); + }, + _ => { + panic!("Unexpected payment kind"); + }, + } + assert_eq!(node_b_payments.first().unwrap().amount_msat, Some(expected_amount_msat)); +} From d57f67f2e2b4c25b802fd9ef4bf75d648b514fa5 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 23 May 2024 16:33:50 +0200 Subject: [PATCH 44/89] Implement and test `Refund` flow --- bindings/ldk_node.udl | 12 +++++ src/error.rs | 6 +++ src/payment/bolt12.rs | 81 +++++++++++++++++++++++++++++++++ src/uniffi_types.rs | 32 +++++++++++++ tests/integration_tests_rust.rs | 43 +++++++++++++++++ 5 files changed, 174 insertions(+) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index dd09cf88a..328dcfdbd 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -109,6 +109,10 @@ interface Bolt12Payment { Offer receive(u64 amount_msat, [ByRef]string description); [Throws=NodeError] Offer receive_variable_amount([ByRef]string description); + [Throws=NodeError] + Bolt12Invoice request_refund_payment([ByRef]Refund refund); + [Throws=NodeError] + Refund initiate_refund(u64 amount_msat, u32 expiry_secs); }; interface SpontaneousPayment { @@ -136,6 +140,7 @@ enum NodeError { "InvoiceCreationFailed", "InvoiceRequestCreationFailed", "OfferCreationFailed", + "RefundCreationFailed", "PaymentSendingFailed", "ProbeSendingFailed", "ChannelCreationFailed", @@ -161,6 +166,7 @@ enum NodeError { "InvalidAmount", "InvalidInvoice", "InvalidOffer", + "InvalidRefund", "InvalidChannelId", "InvalidNetwork", "DuplicatePayment", @@ -393,6 +399,12 @@ typedef string Bolt11Invoice; [Custom] typedef string Offer; +[Custom] +typedef string Refund; + +[Custom] +typedef string Bolt12Invoice; + [Custom] typedef string OfferId; diff --git a/src/error.rs b/src/error.rs index b51941012..824bde192 100644 --- a/src/error.rs +++ b/src/error.rs @@ -17,6 +17,8 @@ pub enum Error { InvoiceRequestCreationFailed, /// Offer creation failed. OfferCreationFailed, + /// Refund creation failed. + RefundCreationFailed, /// Sending a payment has failed. PaymentSendingFailed, /// Sending a payment probe has failed. @@ -67,6 +69,8 @@ pub enum Error { InvalidInvoice, /// The given offer is invalid. InvalidOffer, + /// The given refund is invalid. + InvalidRefund, /// The given channel ID is invalid. InvalidChannelId, /// The given network is invalid. @@ -95,6 +99,7 @@ impl fmt::Display for Error { Self::InvoiceCreationFailed => write!(f, "Failed to create invoice."), Self::InvoiceRequestCreationFailed => write!(f, "Failed to create invoice request."), Self::OfferCreationFailed => write!(f, "Failed to create offer."), + Self::RefundCreationFailed => write!(f, "Failed to create refund."), Self::PaymentSendingFailed => write!(f, "Failed to send the given payment."), Self::ProbeSendingFailed => write!(f, "Failed to send the given payment probe."), Self::ChannelCreationFailed => write!(f, "Failed to create channel."), @@ -122,6 +127,7 @@ impl fmt::Display for Error { Self::InvalidAmount => write!(f, "The given amount is invalid."), Self::InvalidInvoice => write!(f, "The given invoice is invalid."), Self::InvalidOffer => write!(f, "The given offer is invalid."), + Self::InvalidRefund => write!(f, "The given refund is invalid."), Self::InvalidChannelId => write!(f, "The given channel ID is invalid."), Self::InvalidNetwork => write!(f, "The given network is invalid."), Self::DuplicatePayment => { diff --git a/src/payment/bolt12.rs b/src/payment/bolt12.rs index ffcaa4c5d..35fa3cfb4 100644 --- a/src/payment/bolt12.rs +++ b/src/payment/bolt12.rs @@ -11,12 +11,15 @@ use crate::payment::store::{ use crate::types::ChannelManager; use lightning::ln::channelmanager::{PaymentId, Retry}; +use lightning::offers::invoice::Bolt12Invoice; use lightning::offers::offer::{Amount, Offer}; use lightning::offers::parse::Bolt12SemanticError; +use lightning::offers::refund::Refund; use rand::RngCore; use std::sync::{Arc, RwLock}; +use std::time::{Duration, SystemTime, UNIX_EPOCH}; /// A payment handler allowing to create and pay [BOLT 12] offers and refunds. /// @@ -264,4 +267,82 @@ impl Bolt12Payment { Ok(offer) } + + /// Requests a refund payment for the given [`Refund`]. + /// + /// The returned [`Bolt12Invoice`] is for informational purposes only (i.e., isn't needed to + /// retrieve the refund). + pub fn request_refund_payment(&self, refund: &Refund) -> Result { + let invoice = self.channel_manager.request_refund_payment(refund).map_err(|e| { + log_error!(self.logger, "Failed to request refund payment: {:?}", e); + Error::InvoiceRequestCreationFailed + })?; + + let payment_hash = invoice.payment_hash(); + let payment_id = PaymentId(payment_hash.0); + + let payment = PaymentDetails { + id: payment_id, + kind: PaymentKind::Bolt12Refund { + hash: Some(payment_hash), + preimage: None, + secret: None, + }, + amount_msat: Some(refund.amount_msats()), + direction: PaymentDirection::Inbound, + status: PaymentStatus::Pending, + }; + + self.payment_store.insert(payment)?; + + Ok(invoice) + } + + /// Returns a [`Refund`] object that can be used to offer a refund payment of the amount given. + pub fn initiate_refund(&self, amount_msat: u64, expiry_secs: u32) -> Result { + let mut random_bytes = [0u8; 32]; + rand::thread_rng().fill_bytes(&mut random_bytes); + let payment_id = PaymentId(random_bytes); + + let expiration = (SystemTime::now() + Duration::from_secs(expiry_secs as u64)) + .duration_since(UNIX_EPOCH) + .unwrap(); + let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); + let max_total_routing_fee_msat = None; + + let refund = self + .channel_manager + .create_refund_builder( + amount_msat, + expiration, + payment_id, + retry_strategy, + max_total_routing_fee_msat, + ) + .map_err(|e| { + log_error!(self.logger, "Failed to create refund builder: {:?}", e); + Error::RefundCreationFailed + })? + .build() + .map_err(|e| { + log_error!(self.logger, "Failed to create refund: {:?}", e); + Error::RefundCreationFailed + })?; + + log_info!(self.logger, "Offering refund of {}msat", amount_msat); + + let kind = PaymentKind::Bolt12Refund { hash: None, preimage: None, secret: None }; + + let payment = PaymentDetails { + id: payment_id, + kind, + amount_msat: Some(amount_msat), + direction: PaymentDirection::Outbound, + status: PaymentStatus::Pending, + }; + + self.payment_store.insert(payment)?; + + Ok(refund) + } } diff --git a/src/uniffi_types.rs b/src/uniffi_types.rs index ef22e61ec..99e72e31c 100644 --- a/src/uniffi_types.rs +++ b/src/uniffi_types.rs @@ -2,7 +2,9 @@ pub use crate::payment::store::{LSPFeeLimits, PaymentDirection, PaymentKind, Pay pub use lightning::events::{ClosureReason, PaymentFailureReason}; pub use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret}; +pub use lightning::offers::invoice::Bolt12Invoice; pub use lightning::offers::offer::{Offer, OfferId}; +pub use lightning::offers::refund::Refund; pub use lightning::util::string::UntrustedString; pub use lightning_invoice::Bolt11Invoice; @@ -21,6 +23,7 @@ use bitcoin::hashes::sha256::Hash as Sha256; use bitcoin::hashes::Hash; use bitcoin::secp256k1::PublicKey; use lightning::ln::channelmanager::PaymentId; +use lightning::util::ser::Writeable; use lightning_invoice::SignedRawBolt11Invoice; use std::convert::TryInto; @@ -88,6 +91,35 @@ impl UniffiCustomTypeConverter for Offer { } } +impl UniffiCustomTypeConverter for Refund { + type Builtin = String; + + fn into_custom(val: Self::Builtin) -> uniffi::Result { + Refund::from_str(&val).map_err(|_| Error::InvalidRefund.into()) + } + + fn from_custom(obj: Self) -> Self::Builtin { + obj.to_string() + } +} + +impl UniffiCustomTypeConverter for Bolt12Invoice { + type Builtin = String; + + fn into_custom(val: Self::Builtin) -> uniffi::Result { + if let Some(bytes_vec) = hex_utils::to_vec(&val) { + if let Ok(invoice) = Bolt12Invoice::try_from(bytes_vec) { + return Ok(invoice); + } + } + Err(Error::InvalidInvoice.into()) + } + + fn from_custom(obj: Self) -> Self::Builtin { + hex_utils::to_string(&obj.encode()) + } +} + impl UniffiCustomTypeConverter for OfferId { type Builtin = String; diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index 7e0b5a41c..fa1b33b39 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -487,4 +487,47 @@ fn simple_bolt12_send_receive() { }, } assert_eq!(node_b_payments.first().unwrap().amount_msat, Some(expected_amount_msat)); + + // Now node_b refunds the amount node_a just overpaid. + let overpaid_amount = expected_amount_msat - offer_amount_msat; + let refund = node_b.bolt12_payment().initiate_refund(overpaid_amount, 3600).unwrap(); + let invoice = node_a.bolt12_payment().request_refund_payment(&refund).unwrap(); + expect_payment_received_event!(node_a, overpaid_amount); + + let node_b_payment_id = node_b + .list_payments_with_filter(|p| p.amount_msat == Some(overpaid_amount)) + .first() + .unwrap() + .id; + expect_payment_successful_event!(node_b, Some(node_b_payment_id), None); + + let node_b_payments = node_b.list_payments_with_filter(|p| p.id == node_b_payment_id); + assert_eq!(node_b_payments.len(), 1); + match node_b_payments.first().unwrap().kind { + PaymentKind::Bolt12Refund { hash, preimage, secret: _ } => { + assert!(hash.is_some()); + assert!(preimage.is_some()); + //TODO: We should eventually set and assert the secret sender-side, too, but the BOLT12 + //API currently doesn't allow to do that. + }, + _ => { + panic!("Unexpected payment kind"); + }, + } + assert_eq!(node_b_payments.first().unwrap().amount_msat, Some(overpaid_amount)); + + let node_a_payment_id = PaymentId(invoice.payment_hash().0); + let node_a_payments = node_a.list_payments_with_filter(|p| p.id == node_a_payment_id); + assert_eq!(node_a_payments.len(), 1); + match node_a_payments.first().unwrap().kind { + PaymentKind::Bolt12Refund { hash, preimage, secret } => { + assert!(hash.is_some()); + assert!(preimage.is_some()); + assert!(secret.is_some()); + }, + _ => { + panic!("Unexpected payment kind"); + }, + } + assert_eq!(node_a_payments.first().unwrap().amount_msat, Some(overpaid_amount)); } From 82b85c1abe786b7d8a45f116d60ece812af7119f Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 31 May 2024 11:59:30 +0200 Subject: [PATCH 45/89] Check `PaymentKind` explicitly in `full_cycle` tests --- tests/common/mod.rs | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 36f0cd6a1..44c8efde9 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -2,7 +2,7 @@ #![allow(dead_code)] use ldk_node::io::sqlite_store::SqliteStore; -use ldk_node::payment::{PaymentDirection, PaymentStatus}; +use ldk_node::payment::{PaymentDirection, PaymentKind, PaymentStatus}; use ldk_node::{Builder, Config, Event, LogLevel, Node, NodeError}; use lightning::ln::msgs::SocketAddress; @@ -447,9 +447,11 @@ pub(crate) fn do_channel_full_cycle( assert_eq!(node_a.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); assert_eq!(node_a.payment(&payment_id).unwrap().direction, PaymentDirection::Outbound); assert_eq!(node_a.payment(&payment_id).unwrap().amount_msat, Some(invoice_amount_1_msat)); + assert!(matches!(node_a.payment(&payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); assert_eq!(node_b.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); assert_eq!(node_b.payment(&payment_id).unwrap().direction, PaymentDirection::Inbound); assert_eq!(node_b.payment(&payment_id).unwrap().amount_msat, Some(invoice_amount_1_msat)); + assert!(matches!(node_b.payment(&payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); // Assert we fail duplicate outbound payments and check the status hasn't changed. assert_eq!(Err(NodeError::DuplicatePayment), node_a.bolt11_payment().send(&invoice)); @@ -492,9 +494,11 @@ pub(crate) fn do_channel_full_cycle( assert_eq!(node_a.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); assert_eq!(node_a.payment(&payment_id).unwrap().direction, PaymentDirection::Outbound); assert_eq!(node_a.payment(&payment_id).unwrap().amount_msat, Some(overpaid_amount_msat)); + assert!(matches!(node_a.payment(&payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); assert_eq!(node_b.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); assert_eq!(node_b.payment(&payment_id).unwrap().direction, PaymentDirection::Inbound); assert_eq!(node_b.payment(&payment_id).unwrap().amount_msat, Some(overpaid_amount_msat)); + assert!(matches!(node_b.payment(&payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); // Test "zero-amount" invoice payment println!("\nB receive_variable_amount_payment"); @@ -526,9 +530,11 @@ pub(crate) fn do_channel_full_cycle( assert_eq!(node_a.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); assert_eq!(node_a.payment(&payment_id).unwrap().direction, PaymentDirection::Outbound); assert_eq!(node_a.payment(&payment_id).unwrap().amount_msat, Some(determined_amount_msat)); + assert!(matches!(node_a.payment(&payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); assert_eq!(node_b.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); assert_eq!(node_b.payment(&payment_id).unwrap().direction, PaymentDirection::Inbound); assert_eq!(node_b.payment(&payment_id).unwrap().amount_msat, Some(determined_amount_msat)); + assert!(matches!(node_b.payment(&payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); // Test spontaneous/keysend payments println!("\nA send_spontaneous_payment"); @@ -550,9 +556,19 @@ pub(crate) fn do_channel_full_cycle( assert_eq!(node_a.payment(&keysend_payment_id).unwrap().status, PaymentStatus::Succeeded); assert_eq!(node_a.payment(&keysend_payment_id).unwrap().direction, PaymentDirection::Outbound); assert_eq!(node_a.payment(&keysend_payment_id).unwrap().amount_msat, Some(keysend_amount_msat)); + assert!(matches!( + node_a.payment(&keysend_payment_id).unwrap().kind, + PaymentKind::Spontaneous { .. } + )); assert_eq!(node_b.payment(&keysend_payment_id).unwrap().status, PaymentStatus::Succeeded); assert_eq!(node_b.payment(&keysend_payment_id).unwrap().direction, PaymentDirection::Inbound); assert_eq!(node_b.payment(&keysend_payment_id).unwrap().amount_msat, Some(keysend_amount_msat)); + assert!(matches!( + node_b.payment(&keysend_payment_id).unwrap().kind, + PaymentKind::Spontaneous { .. } + )); + assert_eq!(node_a.list_payments().len(), 4); + assert_eq!(node_b.list_payments().len(), 5); println!("\nB close_channel"); node_b.close_channel(&user_channel_id, node_a.node_id()).unwrap(); From 0285b55c84f292da9bc4f3a60e748509fc54bafe Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 11 Jun 2024 09:55:40 +0200 Subject: [PATCH 46/89] Pin `url` to v2.5.0 in CI to fix MSRV breakage .. for now --- .github/workflows/rust.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 5ce1306ee..4837063b7 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -49,6 +49,7 @@ jobs: cargo update -p proptest --precise "1.2.0" --verbose # proptest 1.3.0 requires rustc 1.64.0 cargo update -p regex --precise "1.9.6" --verbose # regex 1.10.0 requires rustc 1.65.0 cargo update -p home --precise "0.5.5" --verbose # home v0.5.9 requires rustc 1.70 or newer + cargo update -p url --precise "2.5.0" --verbose # url v2.5.1 requires rustc 1.67 or newer - name: Set RUSTFLAGS to deny warnings if: "matrix.toolchain == 'stable'" run: echo "RUSTFLAGS=-D warnings" >> "$GITHUB_ENV" From a2691e76db25bf712bcc31f5cf6289e364d6fbb0 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 6 Feb 2024 10:27:19 +0100 Subject: [PATCH 47/89] Introduce `AnchorChannelsConfig` .. allowing to configure the per-channel emergency reserve as well as some trusted peers for which we won't maintain any reserve. --- bindings/ldk_node.udl | 6 +++ src/config.rs | 93 +++++++++++++++++++++++++++++++++++++++++++ src/lib.rs | 2 +- 3 files changed, 100 insertions(+), 1 deletion(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 328dcfdbd..762eb25c4 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -15,6 +15,12 @@ dictionary Config { sequence trusted_peers_0conf; u64 probing_liquidity_limit_multiplier; LogLevel log_level; + AnchorChannelsConfig? anchor_channels_config; +}; + +dictionary AnchorChannelsConfig { + sequence trusted_peers_no_reserve; + u64 per_channel_reserve_sats; }; interface Builder { diff --git a/src/config.rs b/src/config.rs index 945d712c9..aaa205f24 100644 --- a/src/config.rs +++ b/src/config.rs @@ -15,6 +15,7 @@ const DEFAULT_LDK_WALLET_SYNC_INTERVAL_SECS: u64 = 30; const DEFAULT_FEE_RATE_CACHE_UPDATE_INTERVAL_SECS: u64 = 60 * 10; const DEFAULT_PROBING_LIQUIDITY_LIMIT_MULTIPLIER: u64 = 3; const DEFAULT_LOG_LEVEL: LogLevel = LogLevel::Debug; +const DEFAULT_ANCHOR_PER_CHANNEL_RESERVE_SATS: u64 = 25_000; // The 'stop gap' parameter used by BDK's wallet sync. This seems to configure the threshold // number of derivation indexes after which BDK stops looking for new scripts belonging to the wallet. @@ -62,6 +63,9 @@ pub(crate) const WALLET_KEYS_SEED_LEN: usize = 64; /// | `trusted_peers_0conf` | [] | /// | `probing_liquidity_limit_multiplier` | 3 | /// | `log_level` | Debug | +/// | `anchor_channels_config` | Some(..) | +/// +/// See [`AnchorChannelsConfig`] for more information on its respective default values. /// /// [`Node`]: crate::Node pub struct Config { @@ -104,6 +108,23 @@ pub struct Config { /// /// Any messages below this level will be excluded from the logs. pub log_level: LogLevel, + /// Configuration options pertaining to Anchor channels, i.e., channels for which the + /// `option_anchors_zero_fee_htlc_tx` channel type is negotiated. + /// + /// Please refer to [`AnchorChannelsConfig`] for further information on Anchor channels. + /// + /// If set to `Some`, we'll try to open new channels with Anchors enabled, i.e., new channels + /// will be negotiated with the `option_anchors_zero_fee_htlc_tx` channel type if supported by + /// the counterparty. Note that this won't prevent us from opening non-Anchor channels if the + /// counterparty doesn't support `option_anchors_zero_fee_htlc_tx`. If set to `None`, new + /// channels will be negotiated with the legacy `option_static_remotekey` channel type only. + /// + /// **Note:** If set to `None` *after* some Anchor channels have already been + /// opened, no dedicated emergency on-chain reserve will be maintained for these channels, + /// which can be dangerous if only insufficient funds are available at the time of channel + /// closure. We *will* however still try to get the Anchor spending transactions confirmed + /// on-chain with the funds available. + pub anchor_channels_config: Option, } impl Default for Config { @@ -120,6 +141,78 @@ impl Default for Config { trusted_peers_0conf: Vec::new(), probing_liquidity_limit_multiplier: DEFAULT_PROBING_LIQUIDITY_LIMIT_MULTIPLIER, log_level: DEFAULT_LOG_LEVEL, + anchor_channels_config: Some(AnchorChannelsConfig::default()), + } + } +} + +/// Configuration options pertaining to 'Anchor' channels, i.e., channels for which the +/// `option_anchors_zero_fee_htlc_tx` channel type is negotiated. +/// +/// Prior to the introduction of Anchor channels, the on-chain fees paying for the transactions +/// issued on channel closure were pre-determined and locked-in at the time of the channel +/// opening. This required to estimate what fee rate would be sufficient to still have the +/// closing transactions be spendable on-chain (i.e., not be considered dust). This legacy +/// design of pre-anchor channels proved inadequate in the unpredictable, often turbulent, fee +/// markets we experience today. +/// +/// In contrast, Anchor channels allow to determine an adequate fee rate *at the time of channel +/// closure*, making them much more robust in the face of fee spikes. In turn, they require to +/// maintain a reserve of on-chain funds to have the channel closure transactions confirmed +/// on-chain, at least if the channel counterparty can't be trusted to do this for us. +/// +/// See [BOLT 3] for more technical details on Anchor channels. +/// +/// +/// ### Defaults +/// +/// | Parameter | Value | +/// |----------------------------|--------| +/// | `trusted_peers_no_reserve` | [] | +/// | `per_channel_reserve_sats` | 25000 | +/// +/// +/// [BOLT 3]: https://github.com/lightning/bolts/blob/master/03-transactions.md#htlc-timeout-and-htlc-success-transactions +#[derive(Debug, Clone)] +pub struct AnchorChannelsConfig { + /// A list of peers that we trust to get the required channel closing transactions confirmed + /// on-chain. + /// + /// Channels with these peers won't count towards the retained on-chain reserve and we won't + /// take any action to get the required transactions confirmed ourselves. + /// + /// **Note:** Trusting the channel counterparty to take the necessary actions to get the + /// required Anchor spending and HTLC transactions confirmed on-chain is potentially insecure + /// as the channel may not be closed if they refuse to do so, potentially leaving the user + /// funds stuck *or* even allow the counterparty to steal any in-flight funds after the + /// corresponding HTLCs time out. + pub trusted_peers_no_reserve: Vec, + /// The amount of satoshis per anchors-negotiated channel with an untrusted peer that we keep + /// as an emergency reserve in our on-chain wallet. + /// + /// This allows for having the required Anchor output spending and HTLC transactions confirmed + /// when the channel is closed. + /// + /// If the channel peer is not marked as trusted via + /// [`AnchorChannelsConfig::trusted_peers_no_reserve`], we will always try to spend the Anchor + /// outputs with *any* on-chain funds available, i.e., the total reserve value as well as any + /// spendable funds available in the on-chain wallet. Therefore, this per-channel multiplier is + /// really a emergencey reserve that we maintain at all time to reduce reduce the risk of + /// insufficient funds at time of a channel closure. To this end, we will refuse to open + /// outbound or accept inbound channels if we don't have sufficient on-chain funds availble to + /// cover the additional reserve requirement. + /// + /// **Note:** Depending on the fee market at the time of closure, this reserve amount might or + /// might not suffice to successfully spend the Anchor output and have the HTLC transactions + /// confirmed on-chain, i.e., you may want to adjust this value accordingly. + pub per_channel_reserve_sats: u64, +} + +impl Default for AnchorChannelsConfig { + fn default() -> Self { + Self { + trusted_peers_no_reserve: Vec::new(), + per_channel_reserve_sats: DEFAULT_ANCHOR_PER_CHANNEL_RESERVE_SATS, } } } diff --git a/src/lib.rs b/src/lib.rs index 9a634c12d..2c66fd154 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -103,7 +103,7 @@ pub use lightning; pub use lightning_invoice; pub use balance::{BalanceDetails, LightningBalance, PendingSweepBalance}; -pub use config::{default_config, Config}; +pub use config::{default_config, AnchorChannelsConfig, Config}; pub use error::Error as NodeError; use error::Error; From db1b3732af87e96091c53ed813b0933fc58acfcb Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 27 Jun 2023 12:34:01 -0400 Subject: [PATCH 48/89] Implement Anchor channel event handling When Anchor outputs need to be spent LDK will generate `BumpTransactionEvent`s. Here, we add the corresponding event-handling and PSBT-signing support. --- src/builder.rs | 11 ++--- src/event.rs | 49 +++++++++++++++--- src/lib.rs | 13 ++++- src/types.rs | 8 +++ src/wallet.rs | 131 +++++++++++++++++++++++++++++++++++++++++++++++-- 5 files changed, 192 insertions(+), 20 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index 386deb418..2a361396d 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -693,12 +693,11 @@ fn build_with_store_internal( // for inbound channels. let mut user_config = UserConfig::default(); user_config.channel_handshake_limits.force_announced_channel_preference = false; - - if !config.trusted_peers_0conf.is_empty() { - // Manually accept inbound channels if we expect 0conf channel requests, avoid - // generating the events otherwise. - user_config.manually_accept_inbound_channels = true; - } + user_config.manually_accept_inbound_channels = true; + // Note the channel_handshake_config will be overwritten in `connect_open_channel`, but we + // still set a default here. + user_config.channel_handshake_config.negotiate_anchors_zero_fee_htlc_tx = + config.anchor_channels_config.is_some(); if liquidity_source_config.and_then(|lsc| lsc.lsps2_service.as_ref()).is_some() { // Generally allow claiming underpaying HTLCs as the LSP will skim off some fee. We'll diff --git a/src/event.rs b/src/event.rs index b49fc96e8..5aeb8f346 100644 --- a/src/event.rs +++ b/src/event.rs @@ -1,7 +1,8 @@ use crate::types::{DynStore, Sweeper, Wallet}; use crate::{ - hex_utils, ChannelManager, Config, Error, NetworkGraph, PeerInfo, PeerStore, UserChannelId, + hex_utils, BumpTransactionEventHandler, ChannelManager, Config, Error, NetworkGraph, PeerInfo, + PeerStore, UserChannelId, }; use crate::connection::ConnectionManager; @@ -15,9 +16,10 @@ use crate::io::{ EVENT_QUEUE_PERSISTENCE_KEY, EVENT_QUEUE_PERSISTENCE_PRIMARY_NAMESPACE, EVENT_QUEUE_PERSISTENCE_SECONDARY_NAMESPACE, }; -use crate::logger::{log_error, log_info, Logger}; +use crate::logger::{log_debug, log_error, log_info, Logger}; use lightning::chain::chaininterface::ConfirmationTarget; +use lightning::events::bump_transaction::BumpTransactionEvent; use lightning::events::{ClosureReason, PaymentPurpose}; use lightning::events::{Event as LdkEvent, PaymentFailureReason}; use lightning::impl_writeable_tlv_based_enum; @@ -317,6 +319,7 @@ where { event_queue: Arc>, wallet: Arc, + bump_tx_event_handler: Arc, channel_manager: Arc, connection_manager: Arc>, output_sweeper: Arc, @@ -333,15 +336,17 @@ where L::Target: Logger, { pub fn new( - event_queue: Arc>, wallet: Arc, channel_manager: Arc, - connection_manager: Arc>, output_sweeper: Arc, - network_graph: Arc, payment_store: Arc>, - peer_store: Arc>, runtime: Arc>>, - logger: L, config: Arc, + event_queue: Arc>, wallet: Arc, + bump_tx_event_handler: Arc, + channel_manager: Arc, connection_manager: Arc>, + output_sweeper: Arc, network_graph: Arc, + payment_store: Arc>, peer_store: Arc>, + runtime: Arc>>, logger: L, config: Arc, ) -> Self { Self { event_queue, wallet, + bump_tx_event_handler, channel_manager, connection_manager, output_sweeper, @@ -1018,7 +1023,6 @@ where }, LdkEvent::DiscardFunding { .. } => {}, LdkEvent::HTLCIntercepted { .. } => {}, - LdkEvent::BumpTransaction(_) => {}, LdkEvent::InvoiceRequestFailed { payment_id } => { log_error!( self.logger, @@ -1062,6 +1066,35 @@ where }); } }, + LdkEvent::BumpTransaction(bte) => { + let (channel_id, counterparty_node_id) = match bte { + BumpTransactionEvent::ChannelClose { + ref channel_id, + ref counterparty_node_id, + .. + } => (channel_id, counterparty_node_id), + BumpTransactionEvent::HTLCResolution { + ref channel_id, + ref counterparty_node_id, + .. + } => (channel_id, counterparty_node_id), + }; + + if let Some(anchor_channels_config) = self.config.anchor_channels_config.as_ref() { + if anchor_channels_config + .trusted_peers_no_reserve + .contains(counterparty_node_id) + { + log_debug!(self.logger, + "Ignoring BumpTransactionEvent for channel {} due to trusted counterparty {}", + channel_id, counterparty_node_id + ); + return; + } + } + + self.bump_tx_event_handler.handle_event(&bte); + }, } } } diff --git a/src/lib.rs b/src/lib.rs index 2c66fd154..190cdfd44 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -133,14 +133,15 @@ use payment::store::PaymentStore; use payment::{Bolt11Payment, Bolt12Payment, OnchainPayment, PaymentDetails, SpontaneousPayment}; use peer_store::{PeerInfo, PeerStore}; use types::{ - Broadcaster, ChainMonitor, ChannelManager, DynStore, FeeEstimator, KeysManager, NetworkGraph, - PeerManager, Router, Scorer, Sweeper, Wallet, + Broadcaster, BumpTransactionEventHandler, ChainMonitor, ChannelManager, DynStore, FeeEstimator, + KeysManager, NetworkGraph, PeerManager, Router, Scorer, Sweeper, Wallet, }; pub use types::{ChannelDetails, PeerDetails, UserChannelId}; use logger::{log_error, log_info, log_trace, FilesystemLogger, Logger}; use lightning::chain::{BestBlock, Confirm}; +use lightning::events::bump_transaction::Wallet as LdkWallet; use lightning::ln::channelmanager::PaymentId; use lightning::ln::msgs::SocketAddress; @@ -620,9 +621,17 @@ impl Node { } }); + let bump_tx_event_handler = Arc::new(BumpTransactionEventHandler::new( + Arc::clone(&self.tx_broadcaster), + Arc::new(LdkWallet::new(Arc::clone(&self.wallet), Arc::clone(&self.logger))), + Arc::clone(&self.keys_manager), + Arc::clone(&self.logger), + )); + let event_handler = Arc::new(EventHandler::new( Arc::clone(&self.event_queue), Arc::clone(&self.wallet), + bump_tx_event_handler, Arc::clone(&self.channel_manager), Arc::clone(&self.connection_manager), Arc::clone(&self.output_sweeper), diff --git a/src/types.rs b/src/types.rs index 14d8adf76..b7cf4ad44 100644 --- a/src/types.rs +++ b/src/types.rs @@ -135,6 +135,14 @@ pub(crate) type Sweeper = OutputSweeper< Arc, >; +pub(crate) type BumpTransactionEventHandler = + lightning::events::bump_transaction::BumpTransactionEventHandler< + Arc, + Arc, Arc>>, + Arc, + Arc, + >; + /// A local, potentially user-provided, identifier of a channel. /// /// By default, this will be randomly generated for the user to ensure local uniqueness. diff --git a/src/wallet.rs b/src/wallet.rs index 674cb6786..df9656d48 100644 --- a/src/wallet.rs +++ b/src/wallet.rs @@ -4,6 +4,7 @@ use crate::Error; use lightning::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget, FeeEstimator}; +use lightning::events::bump_transaction::{Utxo, WalletSource}; use lightning::ln::msgs::{DecodeError, UnsignedGossipMessage}; use lightning::ln::script::ShutdownScript; use lightning::sign::{ @@ -19,8 +20,14 @@ use bdk::wallet::AddressIndex; use bdk::FeeRate; use bdk::{SignOptions, SyncOptions}; +use bitcoin::address::{Payload, WitnessVersion}; use bitcoin::bech32::u5; +use bitcoin::blockdata::constants::WITNESS_SCALE_FACTOR; use bitcoin::blockdata::locktime::absolute::LockTime; +use bitcoin::hash_types::WPubkeyHash; +use bitcoin::hashes::Hash; +use bitcoin::key::XOnlyPublicKey; +use bitcoin::psbt::PartiallySignedTransaction; use bitcoin::secp256k1::ecdh::SharedSecret; use bitcoin::secp256k1::ecdsa::{RecoverableSignature, Signature}; use bitcoin::secp256k1::{PublicKey, Scalar, Secp256k1, SecretKey, Signing}; @@ -242,6 +249,123 @@ where } } +impl WalletSource for Wallet +where + D: BatchDatabase, + B::Target: BroadcasterInterface, + E::Target: FeeEstimator, + L::Target: Logger, +{ + fn list_confirmed_utxos(&self) -> Result, ()> { + let locked_wallet = self.inner.lock().unwrap(); + let mut utxos = Vec::new(); + let confirmed_txs: Vec = locked_wallet + .list_transactions(false) + .map_err(|e| { + log_error!(self.logger, "Failed to retrieve transactions from wallet: {}", e); + })? + .into_iter() + .filter(|t| t.confirmation_time.is_some()) + .collect(); + let unspent_confirmed_utxos = locked_wallet + .list_unspent() + .map_err(|e| { + log_error!( + self.logger, + "Failed to retrieve unspent transactions from wallet: {}", + e + ); + })? + .into_iter() + .filter(|u| confirmed_txs.iter().find(|t| t.txid == u.outpoint.txid).is_some()); + + for u in unspent_confirmed_utxos { + let payload = Payload::from_script(&u.txout.script_pubkey).map_err(|e| { + log_error!(self.logger, "Failed to retrieve script payload: {}", e); + })?; + + match payload { + Payload::WitnessProgram(program) => match program.version() { + WitnessVersion::V0 if program.program().len() == 20 => { + let wpkh = + WPubkeyHash::from_slice(program.program().as_bytes()).map_err(|e| { + log_error!(self.logger, "Failed to retrieve script payload: {}", e); + })?; + let utxo = Utxo::new_v0_p2wpkh(u.outpoint, u.txout.value, &wpkh); + utxos.push(utxo); + }, + WitnessVersion::V1 => { + XOnlyPublicKey::from_slice(program.program().as_bytes()).map_err(|e| { + log_error!(self.logger, "Failed to retrieve script payload: {}", e); + })?; + + let utxo = Utxo { + outpoint: u.outpoint, + output: TxOut { + value: u.txout.value, + script_pubkey: ScriptBuf::new_witness_program(&program), + }, + satisfaction_weight: 1 /* empty script_sig */ * WITNESS_SCALE_FACTOR as u64 + + 1 /* witness items */ + 1 /* schnorr sig len */ + 64, /* schnorr sig */ + }; + utxos.push(utxo); + }, + _ => { + log_error!( + self.logger, + "Unexpected witness version or length. Version: {}, Length: {}", + program.version(), + program.program().len() + ); + }, + }, + _ => { + log_error!( + self.logger, + "Tried to use a non-witness script. This must never happen." + ); + panic!("Tried to use a non-witness script. This must never happen."); + }, + } + } + + Ok(utxos) + } + + fn get_change_script(&self) -> Result { + let locked_wallet = self.inner.lock().unwrap(); + let address_info = locked_wallet.get_address(AddressIndex::LastUnused).map_err(|e| { + log_error!(self.logger, "Failed to retrieve new address from wallet: {}", e); + })?; + + Ok(address_info.address.script_pubkey()) + } + + fn sign_psbt(&self, mut psbt: PartiallySignedTransaction) -> Result { + let locked_wallet = self.inner.lock().unwrap(); + + // While BDK populates both `witness_utxo` and `non_witness_utxo` fields, LDK does not. As + // BDK by default doesn't trust the witness UTXO to account for the Segwit bug, we must + // disable it here as otherwise we fail to sign. + let mut sign_options = SignOptions::default(); + sign_options.trust_witness_utxo = true; + + match locked_wallet.sign(&mut psbt, sign_options) { + Ok(_finalized) => { + // BDK will fail to finalize for all LDK-provided inputs of the PSBT. Unfortunately + // we can't check more fine grained if it succeeded for all the other inputs here, + // so we just ignore the returned `finalized` bool. + }, + Err(err) => { + log_error!(self.logger, "Failed to sign transaction: {}", err); + return Err(()); + }, + } + + Ok(psbt.extract_tx()) + } +} + /// Similar to [`KeysManager`], but overrides the destination and shutdown scripts so they are /// directly spendable by the BDK wallet. pub struct WalletKeysManager @@ -407,11 +531,10 @@ where })?; match address.payload { - bitcoin::address::Payload::WitnessProgram(program) => { - ShutdownScript::new_witness_program(&program).map_err(|e| { + Payload::WitnessProgram(program) => ShutdownScript::new_witness_program(&program) + .map_err(|e| { log_error!(self.logger, "Invalid shutdown script: {:?}", e); - }) - }, + }), _ => { log_error!( self.logger, From 1952802c4929021538929b917bef0262fb7f0c5f Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 19 Feb 2024 12:07:50 +0100 Subject: [PATCH 49/89] Maintain and expose anchor reserve --- .../lightningdevkit/ldknode/LibraryTest.kt | 6 +- bindings/ldk_node.udl | 1 + src/balance.rs | 8 ++ src/event.rs | 66 +++++++++++++- src/lib.rs | 89 ++++++++++++++++--- src/payment/onchain.rs | 34 +++++-- src/wallet.rs | 19 +++- tests/common/mod.rs | 28 ++++-- tests/integration_tests_cln.rs | 2 +- tests/integration_tests_rust.rs | 33 ++++--- tests/integration_tests_vss.rs | 6 +- 11 files changed, 241 insertions(+), 51 deletions(-) diff --git a/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt b/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt index b6b86da9c..6f863e637 100644 --- a/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt +++ b/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt @@ -203,9 +203,9 @@ class LibraryTest { val spendableBalance2AfterOpen = node2.listBalances().spendableOnchainBalanceSats println("Spendable balance 1 after open: $spendableBalance1AfterOpen") println("Spendable balance 2 after open: $spendableBalance2AfterOpen") - assert(spendableBalance1AfterOpen > 49000u) - assert(spendableBalance1AfterOpen < 50000u) - assertEquals(100000uL, spendableBalance2AfterOpen) + assert(spendableBalance1AfterOpen > 24000u) + assert(spendableBalance1AfterOpen < 25000u) + assertEquals(75000uL, spendableBalance2AfterOpen) val channelReadyEvent1 = node1.waitNextEvent() println("Got event: $channelReadyEvent1") diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 762eb25c4..7b1b3047e 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -354,6 +354,7 @@ interface PendingSweepBalance { dictionary BalanceDetails { u64 total_onchain_balance_sats; u64 spendable_onchain_balance_sats; + u64 total_anchor_channels_reserve_sats; u64 total_lightning_balance_sats; sequence lightning_balances; sequence pending_balances_from_channel_closures; diff --git a/src/balance.rs b/src/balance.rs index bad2d1d5f..f1c95dcbe 100644 --- a/src/balance.rs +++ b/src/balance.rs @@ -15,7 +15,15 @@ pub struct BalanceDetails { /// The total balance of our on-chain wallet. pub total_onchain_balance_sats: u64, /// The currently spendable balance of our on-chain wallet. + /// + /// This includes any sufficiently confirmed funds, minus + /// [`total_anchor_channels_reserve_sats`]. + /// + /// [`total_anchor_channels_reserve_sats`]: Self::total_anchor_channels_reserve_sats pub spendable_onchain_balance_sats: u64, + /// The share of our total balance that we retain as an emergency reserve to (hopefully) be + /// able to spend the Anchor outputs when one of our channels is closed. + pub total_anchor_channels_reserve_sats: u64, /// The total balance that we would be able to claim across all our Lightning channels. /// /// Note this excludes balances that we are unsure if we are able to claim (e.g., as we are diff --git a/src/event.rs b/src/event.rs index 5aeb8f346..36769c0ee 100644 --- a/src/event.rs +++ b/src/event.rs @@ -820,9 +820,67 @@ where temporary_channel_id, counterparty_node_id, funding_satoshis, - channel_type: _, + channel_type, push_msat: _, } => { + let anchor_channel = channel_type.requires_anchors_zero_fee_htlc_tx(); + + if anchor_channel { + if let Some(anchor_channels_config) = + self.config.anchor_channels_config.as_ref() + { + let cur_anchor_reserve_sats = crate::total_anchor_channels_reserve_sats( + &self.channel_manager, + &self.config, + ); + let spendable_amount_sats = self + .wallet + .get_spendable_amount_sats(cur_anchor_reserve_sats) + .unwrap_or(0); + + let required_amount_sats = if anchor_channels_config + .trusted_peers_no_reserve + .contains(&counterparty_node_id) + { + 0 + } else { + anchor_channels_config.per_channel_reserve_sats + }; + + if spendable_amount_sats < required_amount_sats { + log_error!( + self.logger, + "Rejecting inbound Anchor channel from peer {} due to insufficient available on-chain reserves.", + counterparty_node_id, + ); + self.channel_manager + .force_close_without_broadcasting_txn( + &temporary_channel_id, + &counterparty_node_id, + ) + .unwrap_or_else(|e| { + log_error!(self.logger, "Failed to reject channel: {:?}", e) + }); + return; + } + } else { + log_error!( + self.logger, + "Rejecting inbound channel from peer {} due to Anchor channels being disabled.", + counterparty_node_id, + ); + self.channel_manager + .force_close_without_broadcasting_txn( + &temporary_channel_id, + &counterparty_node_id, + ) + .unwrap_or_else(|e| { + log_error!(self.logger, "Failed to reject channel: {:?}", e) + }); + return; + } + } + let user_channel_id: u128 = rand::thread_rng().gen::(); let allow_0conf = self.config.trusted_peers_0conf.contains(&counterparty_node_id); let res = if allow_0conf { @@ -843,8 +901,9 @@ where Ok(()) => { log_info!( self.logger, - "Accepting inbound{} channel of {}sats from{} peer {}", + "Accepting inbound{}{} channel of {}sats from{} peer {}", if allow_0conf { " 0conf" } else { "" }, + if anchor_channel { " Anchor" } else { "" }, funding_satoshis, if allow_0conf { " trusted" } else { "" }, counterparty_node_id, @@ -853,8 +912,9 @@ where Err(e) => { log_error!( self.logger, - "Error while accepting inbound{} channel from{} peer {}: {:?}", + "Error while accepting inbound{}{} channel from{} peer {}: {:?}", if allow_0conf { " 0conf" } else { "" }, + if anchor_channel { " Anchor" } else { "" }, counterparty_node_id, if allow_0conf { " trusted" } else { "" }, e, diff --git a/src/lib.rs b/src/lib.rs index 190cdfd44..7b2ccaa83 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -142,7 +142,7 @@ use logger::{log_error, log_info, log_trace, FilesystemLogger, Logger}; use lightning::chain::{BestBlock, Confirm}; use lightning::events::bump_transaction::Wallet as LdkWallet; -use lightning::ln::channelmanager::PaymentId; +use lightning::ln::channelmanager::{ChannelShutdownState, PaymentId}; use lightning::ln::msgs::SocketAddress; use lightning::util::config::{ChannelHandshakeConfig, UserConfig}; @@ -916,6 +916,8 @@ impl Node { OnchainPayment::new( Arc::clone(&self.runtime), Arc::clone(&self.wallet), + Arc::clone(&self.channel_manager), + Arc::clone(&self.config), Arc::clone(&self.logger), ) } @@ -926,6 +928,8 @@ impl Node { Arc::new(OnchainPayment::new( Arc::clone(&self.runtime), Arc::clone(&self.wallet), + Arc::clone(&self.channel_manager), + Arc::clone(&self.config), Arc::clone(&self.logger), )) } @@ -1001,6 +1005,10 @@ impl Node { /// channel counterparty on channel open. This can be useful to start out with the balance not /// entirely shifted to one side, therefore allowing to receive payments from the getgo. /// + /// If Anchor channels are enabled, this will ensure the configured + /// [`AnchorChannelsConfig::per_channel_reserve_sats`] is available and will be retained before + /// opening the channel. + /// /// Returns a [`UserChannelId`] allowing to locally keep track of the channel. pub fn connect_open_channel( &self, node_id: PublicKey, address: SocketAddress, channel_amount_sats: u64, @@ -1013,18 +1021,26 @@ impl Node { } let runtime = rt_lock.as_ref().unwrap(); - let cur_balance = self.wallet.get_balance()?; - if cur_balance.get_spendable() < channel_amount_sats { - log_error!(self.logger, "Unable to create channel due to insufficient funds."); - return Err(Error::InsufficientFunds); - } - let peer_info = PeerInfo { node_id, address }; let con_node_id = peer_info.node_id; let con_addr = peer_info.address.clone(); let con_cm = Arc::clone(&self.connection_manager); + let cur_anchor_reserve_sats = + total_anchor_channels_reserve_sats(&self.channel_manager, &self.config); + let spendable_amount_sats = + self.wallet.get_spendable_amount_sats(cur_anchor_reserve_sats).unwrap_or(0); + + // Fail early if we have less than the channel value available. + if spendable_amount_sats < channel_amount_sats { + log_error!(self.logger, + "Unable to create channel due to insufficient funds. Available: {}sats, Required: {}sats", + spendable_amount_sats, channel_amount_sats + ); + return Err(Error::InsufficientFunds); + } + // We need to use our main runtime here as a local runtime might not be around to poll // connection futures going forward. tokio::task::block_in_place(move || { @@ -1033,11 +1049,37 @@ impl Node { }) })?; + // Fail if we have less than the channel value + anchor reserve available (if applicable). + let init_features = self + .peer_manager + .peer_by_node_id(&node_id) + .ok_or(Error::ConnectionFailed)? + .init_features; + let required_funds_sats = channel_amount_sats + + self.config.anchor_channels_config.as_ref().map_or(0, |c| { + if init_features.requires_anchors_zero_fee_htlc_tx() + && !c.trusted_peers_no_reserve.contains(&node_id) + { + c.per_channel_reserve_sats + } else { + 0 + } + }); + + if spendable_amount_sats < required_funds_sats { + log_error!(self.logger, + "Unable to create channel due to insufficient funds. Available: {}sats, Required: {}sats", + spendable_amount_sats, required_funds_sats + ); + return Err(Error::InsufficientFunds); + } + let channel_config = (*(channel_config.unwrap_or_default())).clone().into(); let user_config = UserConfig { channel_handshake_limits: Default::default(), channel_handshake_config: ChannelHandshakeConfig { announced_channel: announce_channel, + negotiate_anchors_zero_fee_htlc_tx: self.config.anchor_channels_config.is_some(), ..Default::default() }, channel_config, @@ -1196,11 +1238,13 @@ impl Node { /// Retrieves an overview of all known balances. pub fn list_balances(&self) -> BalanceDetails { - let (total_onchain_balance_sats, spendable_onchain_balance_sats) = self - .wallet - .get_balance() - .map(|bal| (bal.get_total(), bal.get_spendable())) - .unwrap_or((0, 0)); + let cur_anchor_reserve_sats = + total_anchor_channels_reserve_sats(&self.channel_manager, &self.config); + let (total_onchain_balance_sats, spendable_onchain_balance_sats) = + self.wallet.get_balances(cur_anchor_reserve_sats).unwrap_or((0, 0)); + + let total_anchor_channels_reserve_sats = + std::cmp::min(cur_anchor_reserve_sats, total_onchain_balance_sats); let mut total_lightning_balance_sats = 0; let mut lightning_balances = Vec::new(); @@ -1235,6 +1279,7 @@ impl Node { BalanceDetails { total_onchain_balance_sats, spendable_onchain_balance_sats, + total_anchor_channels_reserve_sats, total_lightning_balance_sats, lightning_balances, pending_balances_from_channel_closures, @@ -1367,3 +1412,23 @@ pub struct NodeStatus { /// Will be `None` if we have no public channels or we haven't broadcasted since the [`Node`] was initialized. pub latest_node_announcement_broadcast_timestamp: Option, } + +pub(crate) fn total_anchor_channels_reserve_sats( + channel_manager: &ChannelManager, config: &Config, +) -> u64 { + config.anchor_channels_config.as_ref().map_or(0, |anchor_channels_config| { + channel_manager + .list_channels() + .into_iter() + .filter(|c| { + !anchor_channels_config.trusted_peers_no_reserve.contains(&c.counterparty.node_id) + && c.channel_shutdown_state + .map_or(true, |s| s != ChannelShutdownState::ShutdownComplete) + && c.channel_type + .as_ref() + .map_or(false, |t| t.requires_anchors_zero_fee_htlc_tx()) + }) + .count() as u64 + * anchor_channels_config.per_channel_reserve_sats + }) +} diff --git a/src/payment/onchain.rs b/src/payment/onchain.rs index c178e95a0..8a879ae8c 100644 --- a/src/payment/onchain.rs +++ b/src/payment/onchain.rs @@ -1,8 +1,9 @@ //! Holds a payment handler allowing to send and receive on-chain payments. +use crate::config::Config; use crate::error::Error; use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; -use crate::types::Wallet; +use crate::types::{ChannelManager, Wallet}; use bitcoin::{Address, Txid}; @@ -16,15 +17,17 @@ use std::sync::{Arc, RwLock}; pub struct OnchainPayment { runtime: Arc>>, wallet: Arc, + channel_manager: Arc, + config: Arc, logger: Arc, } impl OnchainPayment { pub(crate) fn new( runtime: Arc>>, wallet: Arc, - logger: Arc, + channel_manager: Arc, config: Arc, logger: Arc, ) -> Self { - Self { runtime, wallet, logger } + Self { runtime, wallet, channel_manager, config, logger } } /// Retrieve a new on-chain/funding address. @@ -35,6 +38,11 @@ impl OnchainPayment { } /// Send an on-chain payment to the given address. + /// + /// This will respect any on-chain reserve we need to keep, i.e., won't allow to cut into + /// [`BalanceDetails::total_anchor_channels_reserve_sats`]. + /// + /// [`BalanceDetails::total_anchor_channels_reserve_sats`]: crate::BalanceDetails::total_anchor_channels_reserve_sats pub fn send_to_address( &self, address: &bitcoin::Address, amount_sats: u64, ) -> Result { @@ -43,15 +51,29 @@ impl OnchainPayment { return Err(Error::NotRunning); } - let cur_balance = self.wallet.get_balance()?; - if cur_balance.get_spendable() < amount_sats { - log_error!(self.logger, "Unable to send payment due to insufficient funds."); + let cur_anchor_reserve_sats = + crate::total_anchor_channels_reserve_sats(&self.channel_manager, &self.config); + let spendable_amount_sats = + self.wallet.get_spendable_amount_sats(cur_anchor_reserve_sats).unwrap_or(0); + + if spendable_amount_sats < amount_sats { + log_error!(self.logger, + "Unable to send payment due to insufficient funds. Available: {}sats, Required: {}sats", + spendable_amount_sats, amount_sats + ); return Err(Error::InsufficientFunds); } self.wallet.send_to_address(address, Some(amount_sats)) } /// Send an on-chain payment to the given address, draining all the available funds. + /// + /// This is useful if you have closed all channels and want to migrate funds to another + /// on-chain wallet. + /// + /// Please note that this will **not** retain any on-chain reserves, which might be potentially + /// dangerous if you have open Anchor channels for which you can't trust the counterparty to + /// spend the Anchor output after channel closure. pub fn send_all_to_address(&self, address: &bitcoin::Address) -> Result { let rt_lock = self.runtime.read().unwrap(); if rt_lock.is_none() { diff --git a/src/wallet.rs b/src/wallet.rs index df9656d48..1e26bba62 100644 --- a/src/wallet.rs +++ b/src/wallet.rs @@ -166,8 +166,23 @@ where Ok(address_info.address) } - pub(crate) fn get_balance(&self) -> Result { - Ok(self.inner.lock().unwrap().get_balance()?) + pub(crate) fn get_balances( + &self, total_anchor_channels_reserve_sats: u64, + ) -> Result<(u64, u64), Error> { + let wallet_lock = self.inner.lock().unwrap(); + let (total, spendable) = wallet_lock.get_balance().map(|bal| { + ( + bal.get_total(), + bal.get_spendable().saturating_sub(total_anchor_channels_reserve_sats), + ) + })?; + Ok((total, spendable)) + } + + pub(crate) fn get_spendable_amount_sats( + &self, total_anchor_channels_reserve_sats: u64, + ) -> Result { + self.get_balances(total_anchor_channels_reserve_sats).map(|(_, s)| s) } /// Send funds to the given address. diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 44c8efde9..607bb1167 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -163,9 +163,13 @@ pub(crate) fn random_listening_addresses() -> Vec { listening_addresses } -pub(crate) fn random_config() -> Config { +pub(crate) fn random_config(anchor_channels: bool) -> Config { let mut config = Config::default(); + if !anchor_channels { + config.anchor_channels_config = None; + } + config.network = Network::Regtest; println!("Setting network: {}", config.network); @@ -198,13 +202,15 @@ macro_rules! setup_builder { pub(crate) use setup_builder; -pub(crate) fn setup_two_nodes(electrsd: &ElectrsD, allow_0conf: bool) -> (TestNode, TestNode) { +pub(crate) fn setup_two_nodes( + electrsd: &ElectrsD, allow_0conf: bool, anchor_channels: bool, +) -> (TestNode, TestNode) { println!("== Node A =="); - let config_a = random_config(); + let config_a = random_config(anchor_channels); let node_a = setup_node(electrsd, config_a); println!("\n== Node B =="); - let mut config_b = random_config(); + let mut config_b = random_config(anchor_channels); if allow_0conf { config_b.trusted_peers_0conf.push(node_a.node_id()); } @@ -355,11 +361,12 @@ pub fn open_channel( pub(crate) fn do_channel_full_cycle( node_a: TestNode, node_b: TestNode, bitcoind: &BitcoindClient, electrsd: &E, allow_0conf: bool, + expect_anchor_channel: bool, ) { let addr_a = node_a.onchain_payment().new_address().unwrap(); let addr_b = node_b.onchain_payment().new_address().unwrap(); - let premine_amount_sat = 100_000; + let premine_amount_sat = if expect_anchor_channel { 125_000 } else { 100_000 }; premine_and_distribute_funds( &bitcoind, @@ -406,11 +413,16 @@ pub(crate) fn do_channel_full_cycle( node_b.sync_wallets().unwrap(); let onchain_fee_buffer_sat = 1500; - let node_a_upper_bound_sat = premine_amount_sat - funding_amount_sat; - let node_a_lower_bound_sat = premine_amount_sat - funding_amount_sat - onchain_fee_buffer_sat; + let anchor_reserve_sat = if expect_anchor_channel { 25_000 } else { 0 }; + let node_a_upper_bound_sat = premine_amount_sat - anchor_reserve_sat - funding_amount_sat; + let node_a_lower_bound_sat = + premine_amount_sat - anchor_reserve_sat - funding_amount_sat - onchain_fee_buffer_sat; assert!(node_a.list_balances().spendable_onchain_balance_sats < node_a_upper_bound_sat); assert!(node_a.list_balances().spendable_onchain_balance_sats > node_a_lower_bound_sat); - assert_eq!(node_b.list_balances().spendable_onchain_balance_sats, premine_amount_sat); + assert_eq!( + node_b.list_balances().spendable_onchain_balance_sats, + premine_amount_sat - anchor_reserve_sat + ); expect_channel_ready_event!(node_a, node_b.node_id()); diff --git a/tests/integration_tests_cln.rs b/tests/integration_tests_cln.rs index ec752f474..38143ef10 100644 --- a/tests/integration_tests_cln.rs +++ b/tests/integration_tests_cln.rs @@ -36,7 +36,7 @@ fn test_cln() { common::generate_blocks_and_wait(&bitcoind_client, &electrs_client, 1); // Setup LDK Node - let config = common::random_config(); + let config = common::random_config(true); let mut builder = Builder::from_config(config); builder.set_esplora_server("http://127.0.0.1:3002".to_string()); diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index fa1b33b39..5f64b44e6 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -22,21 +22,28 @@ use crate::common::expect_channel_ready_event; #[test] fn channel_full_cycle() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false); - do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true); + do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, true); } #[test] fn channel_full_cycle_0conf() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, true); - do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, true) + let (node_a, node_b) = setup_two_nodes(&electrsd, true, true); + do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, true, true) +} + +#[test] +fn channel_full_cycle_legacy_staticremotekey() { + let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, false); + do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, false); } #[test] fn channel_open_fails_when_funds_insufficient() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true); let addr_a = node_a.onchain_payment().new_address().unwrap(); let addr_b = node_b.onchain_payment().new_address().unwrap(); @@ -76,7 +83,7 @@ fn multi_hop_sending() { // Setup and fund 5 nodes let mut nodes = Vec::new(); for _ in 0..5 { - let config = random_config(); + let config = random_config(true); setup_builder!(builder, config); builder.set_esplora_server(esplora_url.clone()); let node = builder.build().unwrap(); @@ -147,7 +154,7 @@ fn multi_hop_sending() { #[test] fn connect_to_public_testnet_esplora() { - let mut config = random_config(); + let mut config = random_config(true); config.network = Network::Testnet; setup_builder!(builder, config); builder.set_esplora_server("https://blockstream.info/testnet/api".to_string()); @@ -159,7 +166,7 @@ fn connect_to_public_testnet_esplora() { #[test] fn start_stop_reinit() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let config = random_config(); + let config = random_config(true); let esplora_url = format!("http://{}", electrsd.esplora_url.as_ref().unwrap()); @@ -227,7 +234,7 @@ fn start_stop_reinit() { #[test] fn onchain_spend_receive() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true); let addr_a = node_a.onchain_payment().new_address().unwrap(); let addr_b = node_b.onchain_payment().new_address().unwrap(); @@ -275,7 +282,7 @@ fn onchain_spend_receive() { #[test] fn sign_verify_msg() { let (_bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let config = random_config(); + let config = random_config(true); let node = setup_node(&electrsd, config); // Tests arbitrary message signing and later verification @@ -293,7 +300,7 @@ fn connection_restart_behavior() { fn do_connection_restart_behavior(persist: bool) { let (_bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, false); let node_id_a = node_a.node_id(); let node_id_b = node_b.node_id(); @@ -344,7 +351,7 @@ fn do_connection_restart_behavior(persist: bool) { #[test] fn concurrent_connections_succeed() { let (_bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true); let node_a = Arc::new(node_a); let node_b = Arc::new(node_b); @@ -374,7 +381,7 @@ fn concurrent_connections_succeed() { #[test] fn simple_bolt12_send_receive() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true); let address_a = node_a.onchain_payment().new_address().unwrap(); let premine_amount_sat = 5_000_000; diff --git a/tests/integration_tests_vss.rs b/tests/integration_tests_vss.rs index 26d0456d4..47bae6b94 100644 --- a/tests/integration_tests_vss.rs +++ b/tests/integration_tests_vss.rs @@ -9,7 +9,7 @@ fn channel_full_cycle_with_vss_store() { let (bitcoind, electrsd) = common::setup_bitcoind_and_electrsd(); println!("== Node A =="); let esplora_url = format!("http://{}", electrsd.esplora_url.as_ref().unwrap()); - let config_a = common::random_config(); + let config_a = common::random_config(true); let mut builder_a = Builder::from_config(config_a); builder_a.set_esplora_server(esplora_url.clone()); let vss_base_url = std::env::var("TEST_VSS_BASE_URL").unwrap(); @@ -18,11 +18,11 @@ fn channel_full_cycle_with_vss_store() { node_a.start().unwrap(); println!("\n== Node B =="); - let config_b = common::random_config(); + let config_b = common::random_config(true); let mut builder_b = Builder::from_config(config_b); builder_b.set_esplora_server(esplora_url); let node_b = builder_b.build_with_vss_store(vss_base_url, "node_2_store".to_string()).unwrap(); node_b.start().unwrap(); - common::do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false); + common::do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, true); } From 5ba1b35e4ed38a58fad6f5cad38b744100ae21e4 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 20 Feb 2024 10:06:03 +0100 Subject: [PATCH 50/89] Use Anchor channels in CLN integration test .. because they will be the new default. Note the upcoming CLN 24.02 release will make Anchors default, too, but for now we have to set the `experimental-anchors` config option. --- docker-compose-cln.yml | 1 + tests/integration_tests_cln.rs | 1 + 2 files changed, 2 insertions(+) diff --git a/docker-compose-cln.yml b/docker-compose-cln.yml index 6628636b9..5fb1f2dcd 100644 --- a/docker-compose-cln.yml +++ b/docker-compose-cln.yml @@ -63,6 +63,7 @@ services: "--bitcoin-rpcuser=user", "--bitcoin-rpcpassword=pass", "--regtest", + "--experimental-anchors", ] ports: - "19846:19846" diff --git a/tests/integration_tests_cln.rs b/tests/integration_tests_cln.rs index 38143ef10..7aea13620 100644 --- a/tests/integration_tests_cln.rs +++ b/tests/integration_tests_cln.rs @@ -88,6 +88,7 @@ fn test_cln() { let funding_txo = common::expect_channel_pending_event!(node, cln_node_id); common::wait_for_tx(&electrs_client, funding_txo.txid); common::generate_blocks_and_wait(&bitcoind_client, &electrs_client, 6); + node.sync_wallets().unwrap(); let user_channel_id = common::expect_channel_ready_event!(node, cln_node_id); // Send a payment to CLN From 1887af8c9220196317860e0dbbf06a2cbaff88ea Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 26 Feb 2024 15:04:09 +0100 Subject: [PATCH 51/89] Add `force_close` method to allow channel force-closure .. which we somehow so far ommitted exposing in the API. We now introduce a `force_close` method and broadcast if the counterparty is not trusted. --- bindings/ldk_node.udl | 2 ++ src/lib.rs | 82 +++++++++++++++++++++++++++++++++++-------- 2 files changed, 70 insertions(+), 14 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 7b1b3047e..a3023eca1 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -72,6 +72,8 @@ interface Node { [Throws=NodeError] void close_channel([ByRef]UserChannelId user_channel_id, PublicKey counterparty_node_id); [Throws=NodeError] + void force_close_channel([ByRef]UserChannelId user_channel_id, PublicKey counterparty_node_id); + [Throws=NodeError] void update_channel_config([ByRef]UserChannelId user_channel_id, PublicKey counterparty_node_id, ChannelConfig channel_config); [Throws=NodeError] void sync_wallets(); diff --git a/src/lib.rs b/src/lib.rs index 7b2ccaa83..9c3c12342 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1176,30 +1176,84 @@ impl Node { } /// Close a previously opened channel. + /// + /// Will attempt to close a channel coopertively. If this fails, users might need to resort to + /// [`Node::force_close_channel`]. pub fn close_channel( &self, user_channel_id: &UserChannelId, counterparty_node_id: PublicKey, + ) -> Result<(), Error> { + self.close_channel_internal(user_channel_id, counterparty_node_id, false) + } + + /// Force-close a previously opened channel. + /// + /// Will force-close the channel, potentially broadcasting our latest state. Note that in + /// contrast to cooperative closure, force-closing will have the channel funds time-locked, + /// i.e., they will only be available after the counterparty had time to contest our claim. + /// Force-closing channels also more costly in terms of on-chain fees. So cooperative closure + /// should always be preferred (and tried first). + /// + /// Broadcasting the closing transactions will be omitted for Anchor channels if we trust the + /// counterparty to broadcast for us (see [`AnchorChannelsConfig::trusted_peers_no_reserve`] + /// for more information). + pub fn force_close_channel( + &self, user_channel_id: &UserChannelId, counterparty_node_id: PublicKey, + ) -> Result<(), Error> { + self.close_channel_internal(user_channel_id, counterparty_node_id, true) + } + + fn close_channel_internal( + &self, user_channel_id: &UserChannelId, counterparty_node_id: PublicKey, force: bool, ) -> Result<(), Error> { let open_channels = self.channel_manager.list_channels_with_counterparty(&counterparty_node_id); if let Some(channel_details) = open_channels.iter().find(|c| c.user_channel_id == user_channel_id.0) { - match self - .channel_manager - .close_channel(&channel_details.channel_id, &counterparty_node_id) - { - Ok(_) => { - // Check if this was the last open channel, if so, forget the peer. - if open_channels.len() == 1 { - self.peer_store.remove_peer(&counterparty_node_id)?; - } - Ok(()) - }, - Err(_) => Err(Error::ChannelClosingFailed), + if force { + if self.config.anchor_channels_config.as_ref().map_or(false, |acc| { + acc.trusted_peers_no_reserve.contains(&counterparty_node_id) + }) { + self.channel_manager + .force_close_without_broadcasting_txn( + &channel_details.channel_id, + &counterparty_node_id, + ) + .map_err(|e| { + log_error!( + self.logger, + "Failed to force-close channel to trusted peer: {:?}", + e + ); + Error::ChannelClosingFailed + })?; + } else { + self.channel_manager + .force_close_broadcasting_latest_txn( + &channel_details.channel_id, + &counterparty_node_id, + ) + .map_err(|e| { + log_error!(self.logger, "Failed to force-close channel: {:?}", e); + Error::ChannelClosingFailed + })?; + } + } else { + self.channel_manager + .close_channel(&channel_details.channel_id, &counterparty_node_id) + .map_err(|e| { + log_error!(self.logger, "Failed to close channel: {:?}", e); + Error::ChannelClosingFailed + })?; + } + + // Check if this was the last open channel, if so, forget the peer. + if open_channels.len() == 1 { + self.peer_store.remove_peer(&counterparty_node_id)?; } - } else { - Ok(()) } + + Ok(()) } /// Update the config for a previously opened channel. From 0aaa8f1cf4f81212973a0f99e08f7fa67c49ad8a Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 17 May 2024 09:44:25 +0200 Subject: [PATCH 52/89] Test force-closure 'happy' case .. i.e., without bumping. --- tests/common/mod.rs | 158 ++++++++++++++++++++++++++++---- tests/integration_tests_rust.rs | 36 +++++--- tests/integration_tests_vss.rs | 10 +- 3 files changed, 174 insertions(+), 30 deletions(-) diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 607bb1167..062d14f61 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -3,7 +3,9 @@ use ldk_node::io::sqlite_store::SqliteStore; use ldk_node::payment::{PaymentDirection, PaymentKind, PaymentStatus}; -use ldk_node::{Builder, Config, Event, LogLevel, Node, NodeError}; +use ldk_node::{ + Builder, Config, Event, LightningBalance, LogLevel, Node, NodeError, PendingSweepBalance, +}; use lightning::ln::msgs::SocketAddress; use lightning::util::persist::KVStore; @@ -171,6 +173,8 @@ pub(crate) fn random_config(anchor_channels: bool) -> Config { } config.network = Network::Regtest; + config.onchain_wallet_sync_interval_secs = 100000; + config.wallet_sync_interval_secs = 100000; println!("Setting network: {}", config.network); let rand_dir = random_storage_path(); @@ -203,7 +207,7 @@ macro_rules! setup_builder { pub(crate) use setup_builder; pub(crate) fn setup_two_nodes( - electrsd: &ElectrsD, allow_0conf: bool, anchor_channels: bool, + electrsd: &ElectrsD, allow_0conf: bool, anchor_channels: bool, anchors_trusted_no_reserve: bool, ) -> (TestNode, TestNode) { println!("== Node A =="); let config_a = random_config(anchor_channels); @@ -214,6 +218,14 @@ pub(crate) fn setup_two_nodes( if allow_0conf { config_b.trusted_peers_0conf.push(node_a.node_id()); } + if anchor_channels && anchors_trusted_no_reserve { + config_b + .anchor_channels_config + .as_mut() + .unwrap() + .trusted_peers_no_reserve + .push(node_a.node_id()); + } let node_b = setup_node(electrsd, config_b); (node_a, node_b) } @@ -361,7 +373,7 @@ pub fn open_channel( pub(crate) fn do_channel_full_cycle( node_a: TestNode, node_b: TestNode, bitcoind: &BitcoindClient, electrsd: &E, allow_0conf: bool, - expect_anchor_channel: bool, + expect_anchor_channel: bool, force_close: bool, ) { let addr_a = node_a.onchain_payment().new_address().unwrap(); let addr_b = node_b.onchain_payment().new_address().unwrap(); @@ -413,20 +425,40 @@ pub(crate) fn do_channel_full_cycle( node_b.sync_wallets().unwrap(); let onchain_fee_buffer_sat = 1500; - let anchor_reserve_sat = if expect_anchor_channel { 25_000 } else { 0 }; - let node_a_upper_bound_sat = premine_amount_sat - anchor_reserve_sat - funding_amount_sat; - let node_a_lower_bound_sat = - premine_amount_sat - anchor_reserve_sat - funding_amount_sat - onchain_fee_buffer_sat; + let node_a_anchor_reserve_sat = if expect_anchor_channel { 25_000 } else { 0 }; + let node_a_upper_bound_sat = + premine_amount_sat - node_a_anchor_reserve_sat - funding_amount_sat; + let node_a_lower_bound_sat = premine_amount_sat + - node_a_anchor_reserve_sat + - funding_amount_sat + - onchain_fee_buffer_sat; assert!(node_a.list_balances().spendable_onchain_balance_sats < node_a_upper_bound_sat); assert!(node_a.list_balances().spendable_onchain_balance_sats > node_a_lower_bound_sat); assert_eq!( - node_b.list_balances().spendable_onchain_balance_sats, - premine_amount_sat - anchor_reserve_sat + node_a.list_balances().total_anchor_channels_reserve_sats, + node_a_anchor_reserve_sat ); - expect_channel_ready_event!(node_a, node_b.node_id()); + let node_b_anchor_reserve_sat = if node_b + .config() + .anchor_channels_config + .map_or(true, |acc| acc.trusted_peers_no_reserve.contains(&node_a.node_id())) + { + 0 + } else { + 25_000 + }; + assert_eq!( + node_b.list_balances().spendable_onchain_balance_sats, + premine_amount_sat - node_b_anchor_reserve_sat + ); + assert_eq!( + node_b.list_balances().total_anchor_channels_reserve_sats, + node_b_anchor_reserve_sat + ); - let user_channel_id = expect_channel_ready_event!(node_b, node_a.node_id()); + let user_channel_id = expect_channel_ready_event!(node_a, node_b.node_id()); + expect_channel_ready_event!(node_b, node_a.node_id()); println!("\nB receive"); let invoice_amount_1_msat = 2500_000; @@ -582,8 +614,14 @@ pub(crate) fn do_channel_full_cycle( assert_eq!(node_a.list_payments().len(), 4); assert_eq!(node_b.list_payments().len(), 5); - println!("\nB close_channel"); - node_b.close_channel(&user_channel_id, node_a.node_id()).unwrap(); + println!("\nB close_channel (force: {})", force_close); + if force_close { + std::thread::sleep(Duration::from_secs(1)); + node_a.force_close_channel(&user_channel_id, node_b.node_id()).unwrap(); + } else { + node_a.close_channel(&user_channel_id, node_b.node_id()).unwrap(); + } + expect_event!(node_a, ChannelClosed); expect_event!(node_b, ChannelClosed); @@ -593,6 +631,87 @@ pub(crate) fn do_channel_full_cycle( node_a.sync_wallets().unwrap(); node_b.sync_wallets().unwrap(); + if force_close { + // Check node_b properly sees all balances and sweeps them. + assert_eq!(node_b.list_balances().lightning_balances.len(), 1); + match node_b.list_balances().lightning_balances[0] { + LightningBalance::ClaimableAwaitingConfirmations { + counterparty_node_id, + confirmation_height, + .. + } => { + assert_eq!(counterparty_node_id, node_a.node_id()); + let cur_height = node_b.status().current_best_block.height; + let blocks_to_go = confirmation_height - cur_height; + generate_blocks_and_wait(&bitcoind, electrsd, blocks_to_go as usize); + node_b.sync_wallets().unwrap(); + node_a.sync_wallets().unwrap(); + }, + _ => panic!("Unexpected balance state!"), + } + + assert!(node_b.list_balances().lightning_balances.is_empty()); + assert_eq!(node_b.list_balances().pending_balances_from_channel_closures.len(), 1); + match node_b.list_balances().pending_balances_from_channel_closures[0] { + PendingSweepBalance::BroadcastAwaitingConfirmation { .. } => {}, + _ => panic!("Unexpected balance state!"), + } + generate_blocks_and_wait(&bitcoind, electrsd, 1); + node_b.sync_wallets().unwrap(); + node_a.sync_wallets().unwrap(); + + assert!(node_b.list_balances().lightning_balances.is_empty()); + assert_eq!(node_b.list_balances().pending_balances_from_channel_closures.len(), 1); + match node_b.list_balances().pending_balances_from_channel_closures[0] { + PendingSweepBalance::AwaitingThresholdConfirmations { .. } => {}, + _ => panic!("Unexpected balance state!"), + } + generate_blocks_and_wait(&bitcoind, electrsd, 5); + node_b.sync_wallets().unwrap(); + node_a.sync_wallets().unwrap(); + + assert!(node_b.list_balances().lightning_balances.is_empty()); + assert!(node_b.list_balances().pending_balances_from_channel_closures.is_empty()); + + // Check node_a properly sees all balances and sweeps them. + assert_eq!(node_a.list_balances().lightning_balances.len(), 1); + match node_a.list_balances().lightning_balances[0] { + LightningBalance::ClaimableAwaitingConfirmations { + counterparty_node_id, + confirmation_height, + .. + } => { + assert_eq!(counterparty_node_id, node_b.node_id()); + let cur_height = node_a.status().current_best_block.height; + let blocks_to_go = confirmation_height - cur_height; + generate_blocks_and_wait(&bitcoind, electrsd, blocks_to_go as usize); + node_a.sync_wallets().unwrap(); + node_b.sync_wallets().unwrap(); + }, + _ => panic!("Unexpected balance state!"), + } + + assert!(node_a.list_balances().lightning_balances.is_empty()); + assert_eq!(node_a.list_balances().pending_balances_from_channel_closures.len(), 1); + match node_a.list_balances().pending_balances_from_channel_closures[0] { + PendingSweepBalance::BroadcastAwaitingConfirmation { .. } => {}, + _ => panic!("Unexpected balance state!"), + } + generate_blocks_and_wait(&bitcoind, electrsd, 1); + node_a.sync_wallets().unwrap(); + node_b.sync_wallets().unwrap(); + + assert!(node_a.list_balances().lightning_balances.is_empty()); + assert_eq!(node_a.list_balances().pending_balances_from_channel_closures.len(), 1); + match node_a.list_balances().pending_balances_from_channel_closures[0] { + PendingSweepBalance::AwaitingThresholdConfirmations { .. } => {}, + _ => panic!("Unexpected balance state!"), + } + generate_blocks_and_wait(&bitcoind, electrsd, 5); + node_a.sync_wallets().unwrap(); + node_b.sync_wallets().unwrap(); + } + let sum_of_all_payments_sat = (push_msat + invoice_amount_1_msat + overpaid_amount_msat @@ -604,11 +723,14 @@ pub(crate) fn do_channel_full_cycle( let node_a_lower_bound_sat = node_a_upper_bound_sat - onchain_fee_buffer_sat; assert!(node_a.list_balances().spendable_onchain_balance_sats > node_a_lower_bound_sat); assert!(node_a.list_balances().spendable_onchain_balance_sats < node_a_upper_bound_sat); - let expected_final_amount_node_b_sat = premine_amount_sat + sum_of_all_payments_sat; - assert_eq!( - node_b.list_balances().spendable_onchain_balance_sats, - expected_final_amount_node_b_sat - ); + + let node_b_upper_bound_sat = premine_amount_sat + sum_of_all_payments_sat; + let node_b_lower_bound_sat = node_b_upper_bound_sat - onchain_fee_buffer_sat; + assert!(node_b.list_balances().spendable_onchain_balance_sats > node_b_lower_bound_sat); + assert!(node_b.list_balances().spendable_onchain_balance_sats <= node_b_upper_bound_sat); + + assert_eq!(node_a.list_balances().total_anchor_channels_reserve_sats, 0); + assert_eq!(node_b.list_balances().total_anchor_channels_reserve_sats, 0); // Check we handled all events assert_eq!(node_a.next_event(), None); diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index 5f64b44e6..37ddeb9a7 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -22,28 +22,42 @@ use crate::common::expect_channel_ready_event; #[test] fn channel_full_cycle() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false, true); - do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, true); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, false); + do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, true, false); +} + +#[test] +fn channel_full_cycle_force_close() { + let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, false); + do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, true, true); +} + +#[test] +fn channel_full_cycle_force_close_trusted_no_reserve() { + let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, true); + do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, true, true); } #[test] fn channel_full_cycle_0conf() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, true, true); - do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, true, true) + let (node_a, node_b) = setup_two_nodes(&electrsd, true, true, false); + do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, true, true, false) } #[test] fn channel_full_cycle_legacy_staticremotekey() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false, false); - do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, false); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, false, false); + do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, false, false); } #[test] fn channel_open_fails_when_funds_insufficient() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false, true); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, false); let addr_a = node_a.onchain_payment().new_address().unwrap(); let addr_b = node_b.onchain_payment().new_address().unwrap(); @@ -234,7 +248,7 @@ fn start_stop_reinit() { #[test] fn onchain_spend_receive() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false, true); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, false); let addr_a = node_a.onchain_payment().new_address().unwrap(); let addr_b = node_b.onchain_payment().new_address().unwrap(); @@ -300,7 +314,7 @@ fn connection_restart_behavior() { fn do_connection_restart_behavior(persist: bool) { let (_bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false, false); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, false, false); let node_id_a = node_a.node_id(); let node_id_b = node_b.node_id(); @@ -351,7 +365,7 @@ fn do_connection_restart_behavior(persist: bool) { #[test] fn concurrent_connections_succeed() { let (_bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false, true); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, false); let node_a = Arc::new(node_a); let node_b = Arc::new(node_b); @@ -381,7 +395,7 @@ fn concurrent_connections_succeed() { #[test] fn simple_bolt12_send_receive() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false, true); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, false); let address_a = node_a.onchain_payment().new_address().unwrap(); let premine_amount_sat = 5_000_000; diff --git a/tests/integration_tests_vss.rs b/tests/integration_tests_vss.rs index 47bae6b94..2a57ccffc 100644 --- a/tests/integration_tests_vss.rs +++ b/tests/integration_tests_vss.rs @@ -24,5 +24,13 @@ fn channel_full_cycle_with_vss_store() { let node_b = builder_b.build_with_vss_store(vss_base_url, "node_2_store".to_string()).unwrap(); node_b.start().unwrap(); - common::do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, true); + common::do_channel_full_cycle( + node_a, + node_b, + &bitcoind.client, + &electrsd.client, + false, + true, + false, + ); } From 436f2e429f4b0cb5d001b9812e61447970cfa8a6 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 17 May 2024 14:28:38 +0200 Subject: [PATCH 53/89] Use internal addresses/`LastUnused` for change scripts --- src/wallet.rs | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/src/wallet.rs b/src/wallet.rs index 1e26bba62..d970b52a6 100644 --- a/src/wallet.rs +++ b/src/wallet.rs @@ -166,6 +166,12 @@ where Ok(address_info.address) } + fn get_new_internal_address(&self) -> Result { + let address_info = + self.inner.lock().unwrap().get_internal_address(AddressIndex::LastUnused)?; + Ok(address_info.address) + } + pub(crate) fn get_balances( &self, total_anchor_channels_reserve_sats: u64, ) -> Result<(u64, u64), Error> { @@ -349,9 +355,10 @@ where fn get_change_script(&self) -> Result { let locked_wallet = self.inner.lock().unwrap(); - let address_info = locked_wallet.get_address(AddressIndex::LastUnused).map_err(|e| { - log_error!(self.logger, "Failed to retrieve new address from wallet: {}", e); - })?; + let address_info = + locked_wallet.get_internal_address(AddressIndex::LastUnused).map_err(|e| { + log_error!(self.logger, "Failed to retrieve new address from wallet: {}", e); + })?; Ok(address_info.address.script_pubkey()) } @@ -569,7 +576,7 @@ where L::Target: Logger, { fn get_change_destination_script(&self) -> Result { - let address = self.wallet.get_new_address().map_err(|e| { + let address = self.wallet.get_new_internal_address().map_err(|e| { log_error!(self.logger, "Failed to retrieve new address from wallet: {}", e); })?; Ok(address.script_pubkey()) From 5418cc8a3a333000d6cccb4f584c6d4ee9ec4c6e Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 20 May 2024 10:46:45 +0200 Subject: [PATCH 54/89] Rename `types::NetworkGraph` to `types::Graph` .. as we're about to expose a new `NetworkGraph` wrapper type. --- src/builder.rs | 4 ++-- src/event.rs | 6 +++--- src/gossip.rs | 6 +++--- src/lib.rs | 4 ++-- src/types.rs | 19 ++++++++----------- 5 files changed, 18 insertions(+), 21 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index 2a361396d..5d6243efb 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -15,7 +15,7 @@ use crate::payment::store::PaymentStore; use crate::peer_store::PeerStore; use crate::tx_broadcaster::TransactionBroadcaster; use crate::types::{ - ChainMonitor, ChannelManager, DynStore, GossipSync, KeysManager, MessageRouter, NetworkGraph, + ChainMonitor, ChannelManager, DynStore, GossipSync, Graph, KeysManager, MessageRouter, OnionMessenger, PeerManager, }; use crate::wallet::Wallet; @@ -633,7 +633,7 @@ fn build_with_store_internal( Ok(graph) => Arc::new(graph), Err(e) => { if e.kind() == std::io::ErrorKind::NotFound { - Arc::new(NetworkGraph::new(config.network.into(), Arc::clone(&logger))) + Arc::new(Graph::new(config.network.into(), Arc::clone(&logger))) } else { return Err(BuildError::ReadFailed); } diff --git a/src/event.rs b/src/event.rs index 36769c0ee..5cd9e2603 100644 --- a/src/event.rs +++ b/src/event.rs @@ -1,7 +1,7 @@ use crate::types::{DynStore, Sweeper, Wallet}; use crate::{ - hex_utils, BumpTransactionEventHandler, ChannelManager, Config, Error, NetworkGraph, PeerInfo, + hex_utils, BumpTransactionEventHandler, ChannelManager, Config, Error, Graph, PeerInfo, PeerStore, UserChannelId, }; @@ -323,7 +323,7 @@ where channel_manager: Arc, connection_manager: Arc>, output_sweeper: Arc, - network_graph: Arc, + network_graph: Arc, payment_store: Arc>, peer_store: Arc>, runtime: Arc>>, @@ -339,7 +339,7 @@ where event_queue: Arc>, wallet: Arc, bump_tx_event_handler: Arc, channel_manager: Arc, connection_manager: Arc>, - output_sweeper: Arc, network_graph: Arc, + output_sweeper: Arc, network_graph: Arc, payment_store: Arc>, peer_store: Arc>, runtime: Arc>>, logger: L, config: Arc, ) -> Self { diff --git a/src/gossip.rs b/src/gossip.rs index de98d441e..5a41bf51c 100644 --- a/src/gossip.rs +++ b/src/gossip.rs @@ -1,5 +1,5 @@ use crate::logger::{log_trace, FilesystemLogger, Logger}; -use crate::types::{GossipSync, NetworkGraph, P2PGossipSync, RapidGossipSync}; +use crate::types::{GossipSync, Graph, P2PGossipSync, RapidGossipSync}; use crate::Error; use lightning::routing::utxo::UtxoLookup; @@ -20,7 +20,7 @@ pub(crate) enum GossipSource { } impl GossipSource { - pub fn new_p2p(network_graph: Arc, logger: Arc) -> Self { + pub fn new_p2p(network_graph: Arc, logger: Arc) -> Self { let gossip_sync = Arc::new(P2PGossipSync::new( network_graph, None::>, @@ -30,7 +30,7 @@ impl GossipSource { } pub fn new_rgs( - server_url: String, latest_sync_timestamp: u32, network_graph: Arc, + server_url: String, latest_sync_timestamp: u32, network_graph: Arc, logger: Arc, ) -> Self { let gossip_sync = Arc::new(RapidGossipSync::new(network_graph, Arc::clone(&logger))); diff --git a/src/lib.rs b/src/lib.rs index 9c3c12342..9fdaaf793 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -134,7 +134,7 @@ use payment::{Bolt11Payment, Bolt12Payment, OnchainPayment, PaymentDetails, Spon use peer_store::{PeerInfo, PeerStore}; use types::{ Broadcaster, BumpTransactionEventHandler, ChainMonitor, ChannelManager, DynStore, FeeEstimator, - KeysManager, NetworkGraph, PeerManager, Router, Scorer, Sweeper, Wallet, + Graph, KeysManager, PeerManager, Router, Scorer, Sweeper, Wallet, }; pub use types::{ChannelDetails, PeerDetails, UserChannelId}; @@ -183,7 +183,7 @@ pub struct Node { peer_manager: Arc, connection_manager: Arc>>, keys_manager: Arc, - network_graph: Arc, + network_graph: Arc, gossip_source: Arc, liquidity_source: Option>>>, kv_store: Arc, diff --git a/src/types.rs b/src/types.rs index b7cf4ad44..89a14e163 100644 --- a/src/types.rs +++ b/src/types.rs @@ -80,31 +80,28 @@ pub(crate) type KeysManager = crate::wallet::WalletKeysManager< >; pub(crate) type Router = DefaultRouter< - Arc, + Arc, Arc, Arc, Arc>, ProbabilisticScoringFeeParameters, Scorer, >; -pub(crate) type Scorer = ProbabilisticScorer, Arc>; +pub(crate) type Scorer = ProbabilisticScorer, Arc>; -pub(crate) type NetworkGraph = gossip::NetworkGraph>; +pub(crate) type Graph = gossip::NetworkGraph>; pub(crate) type UtxoLookup = dyn lightning::routing::utxo::UtxoLookup + Send + Sync; -pub(crate) type P2PGossipSync = lightning::routing::gossip::P2PGossipSync< - Arc, - Arc, - Arc, ->; +pub(crate) type P2PGossipSync = + lightning::routing::gossip::P2PGossipSync, Arc, Arc>; pub(crate) type RapidGossipSync = - lightning_rapid_gossip_sync::RapidGossipSync, Arc>; + lightning_rapid_gossip_sync::RapidGossipSync, Arc>; pub(crate) type GossipSync = lightning_background_processor::GossipSync< Arc, Arc, - Arc, + Arc, Arc, Arc, >; @@ -120,7 +117,7 @@ pub(crate) type OnionMessenger = lightning::onion_message::messenger::OnionMesse >; pub(crate) type MessageRouter = lightning::onion_message::messenger::DefaultMessageRouter< - Arc, + Arc, Arc, Arc, >; From 35e496809ed85fa4448c62a51686ec79995f7c73 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 20 May 2024 12:19:05 +0200 Subject: [PATCH 55/89] Expose `NetworkGraph` accessors Previously, we omitted exposing details for the sake of simplicity. However, querying the network graph has a lot of utility, and is also very useful for debugging purposes. Here, we therefore give users access to the network graph, and expose slightly simplified versions of `ChannelInfo`/`NodeInfo` in bindings. --- bindings/ldk_node.udl | 45 ++++++++++++ src/error.rs | 3 + src/graph.rs | 166 ++++++++++++++++++++++++++++++++++++++++++ src/lib.rs | 14 ++++ src/uniffi_types.rs | 18 +++++ 5 files changed, 246 insertions(+) create mode 100644 src/graph.rs diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index a3023eca1..ec63adbe0 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -84,6 +84,7 @@ interface Node { sequence list_payments(); sequence list_peers(); sequence list_channels(); + NetworkGraph network_graph(); [Throws=NodeError] string sign_message([ByRef]sequence msg); boolean verify_signature([ByRef]sequence msg, [ByRef]string sig, [ByRef]PublicKey pkey); @@ -167,6 +168,7 @@ enum NodeError { "InvalidPublicKey", "InvalidSecretKey", "InvalidOfferId", + "InvalidNodeId", "InvalidPaymentId", "InvalidPaymentHash", "InvalidPaymentPreimage", @@ -387,6 +389,46 @@ enum LogLevel { "Error", }; +interface NetworkGraph { + sequence list_channels(); + ChannelInfo? channel(u64 short_channel_id); + sequence list_nodes(); + NodeInfo? node([ByRef]NodeId node_id); +}; + +dictionary ChannelInfo { + NodeId node_one; + ChannelUpdateInfo? one_to_two; + NodeId node_two; + ChannelUpdateInfo? two_to_one; + u64? capacity_sats; +}; + +dictionary ChannelUpdateInfo { + u32 last_update; + boolean enabled; + u16 cltv_expiry_delta; + u64 htlc_minimum_msat; + u64 htlc_maximum_msat; + RoutingFees fees; +}; + +dictionary RoutingFees { + u32 base_msat; + u32 proportional_millionths; +}; + +dictionary NodeInfo { + sequence channels; + NodeAnnouncementInfo? announcement_info; +}; + +dictionary NodeAnnouncementInfo { + u32 last_update; + string alias; + sequence addresses; +}; + [Custom] typedef string Txid; @@ -399,6 +441,9 @@ typedef string SocketAddress; [Custom] typedef string PublicKey; +[Custom] +typedef string NodeId; + [Custom] typedef string Address; diff --git a/src/error.rs b/src/error.rs index 824bde192..63ec0ad84 100644 --- a/src/error.rs +++ b/src/error.rs @@ -55,6 +55,8 @@ pub enum Error { InvalidSecretKey, /// The given offer id is invalid. InvalidOfferId, + /// The given node id is invalid. + InvalidNodeId, /// The given payment id is invalid. InvalidPaymentId, /// The given payment hash is invalid. @@ -120,6 +122,7 @@ impl fmt::Display for Error { Self::InvalidPublicKey => write!(f, "The given public key is invalid."), Self::InvalidSecretKey => write!(f, "The given secret key is invalid."), Self::InvalidOfferId => write!(f, "The given offer id is invalid."), + Self::InvalidNodeId => write!(f, "The given node id is invalid."), Self::InvalidPaymentId => write!(f, "The given payment id is invalid."), Self::InvalidPaymentHash => write!(f, "The given payment hash is invalid."), Self::InvalidPaymentPreimage => write!(f, "The given payment preimage is invalid."), diff --git a/src/graph.rs b/src/graph.rs new file mode 100644 index 000000000..79a21853d --- /dev/null +++ b/src/graph.rs @@ -0,0 +1,166 @@ +//! Objects for querying the network graph. + +use crate::types::Graph; + +use lightning::routing::gossip::NodeId; + +#[cfg(feature = "uniffi")] +use lightning::ln::msgs::SocketAddress; +#[cfg(feature = "uniffi")] +use lightning::routing::gossip::RoutingFees; + +#[cfg(not(feature = "uniffi"))] +use lightning::routing::gossip::{ChannelInfo, NodeInfo}; + +use std::sync::Arc; + +/// Represents the network as nodes and channels between them. +pub struct NetworkGraph { + inner: Arc, +} + +impl NetworkGraph { + pub(crate) fn new(inner: Arc) -> Self { + Self { inner } + } + + /// Returns the list of channels in the graph + pub fn list_channels(&self) -> Vec { + self.inner.read_only().channels().unordered_keys().map(|c| *c).collect() + } + + /// Returns information on a channel with the given id. + pub fn channel(&self, short_channel_id: u64) -> Option { + self.inner.read_only().channels().get(&short_channel_id).cloned().map(|c| c.into()) + } + + /// Returns the list of nodes in the graph + pub fn list_nodes(&self) -> Vec { + self.inner.read_only().nodes().unordered_keys().map(|n| *n).collect() + } + + /// Returns information on a node with the given id. + pub fn node(&self, node_id: &NodeId) -> Option { + self.inner.read_only().nodes().get(node_id).cloned().map(|n| n.into()) + } +} + +/// Details about a channel (both directions). +/// +/// Received within a channel announcement. +/// +/// This is a simplified version of LDK's `ChannelInfo` for bindings. +#[cfg(feature = "uniffi")] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ChannelInfo { + /// Source node of the first direction of a channel + pub node_one: NodeId, + /// Details about the first direction of a channel + pub one_to_two: Option, + /// Source node of the second direction of a channel + pub node_two: NodeId, + /// Details about the second direction of a channel + pub two_to_one: Option, + /// The channel capacity as seen on-chain, if chain lookup is available. + pub capacity_sats: Option, +} + +#[cfg(feature = "uniffi")] +impl From for ChannelInfo { + fn from(value: lightning::routing::gossip::ChannelInfo) -> Self { + Self { + node_one: value.node_one, + one_to_two: value.one_to_two.map(|u| u.into()), + node_two: value.node_two, + two_to_one: value.two_to_one.map(|u| u.into()), + capacity_sats: value.capacity_sats, + } + } +} + +/// Details about one direction of a channel as received within a `ChannelUpdate`. +/// +/// This is a simplified version of LDK's `ChannelUpdateInfo` for bindings. +#[cfg(feature = "uniffi")] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ChannelUpdateInfo { + /// When the last update to the channel direction was issued. + /// Value is opaque, as set in the announcement. + pub last_update: u32, + /// Whether the channel can be currently used for payments (in this one direction). + pub enabled: bool, + /// The difference in CLTV values that you must have when routing through this channel. + pub cltv_expiry_delta: u16, + /// The minimum value, which must be relayed to the next hop via the channel + pub htlc_minimum_msat: u64, + /// The maximum value which may be relayed to the next hop via the channel. + pub htlc_maximum_msat: u64, + /// Fees charged when the channel is used for routing + pub fees: RoutingFees, +} + +#[cfg(feature = "uniffi")] +impl From for ChannelUpdateInfo { + fn from(value: lightning::routing::gossip::ChannelUpdateInfo) -> Self { + Self { + last_update: value.last_update, + enabled: value.enabled, + cltv_expiry_delta: value.cltv_expiry_delta, + htlc_minimum_msat: value.htlc_minimum_msat, + htlc_maximum_msat: value.htlc_maximum_msat, + fees: value.fees, + } + } +} + +/// Details about a node in the network, known from the network announcement. +/// +/// This is a simplified version of LDK's `NodeInfo` for bindings. +#[cfg(feature = "uniffi")] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct NodeInfo { + /// All valid channels a node has announced + pub channels: Vec, + /// More information about a node from node_announcement. + /// Optional because we store a Node entry after learning about it from + /// a channel announcement, but before receiving a node announcement. + pub announcement_info: Option, +} + +#[cfg(feature = "uniffi")] +impl From for NodeInfo { + fn from(value: lightning::routing::gossip::NodeInfo) -> Self { + Self { + channels: value.channels, + announcement_info: value.announcement_info.map(|a| a.into()), + } + } +} + +/// Information received in the latest node_announcement from this node. +/// +/// This is a simplified version of LDK's `NodeAnnouncementInfo` for bindings. +#[cfg(feature = "uniffi")] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct NodeAnnouncementInfo { + /// When the last known update to the node state was issued. + /// Value is opaque, as set in the announcement. + pub last_update: u32, + /// Moniker assigned to the node. + /// May be invalid or malicious (eg control chars), + /// should not be exposed to the user. + pub alias: String, + /// List of addresses on which this node is reachable + pub addresses: Vec, +} + +#[cfg(feature = "uniffi")] +impl From for NodeAnnouncementInfo { + fn from(value: lightning::routing::gossip::NodeAnnouncementInfo) -> Self { + Self { + last_update: value.last_update, + alias: value.alias.to_string(), + addresses: value.addresses().iter().cloned().collect(), + } + } +} diff --git a/src/lib.rs b/src/lib.rs index 9fdaaf793..b748588a7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -83,6 +83,7 @@ mod error; mod event; mod fee_estimator; mod gossip; +pub mod graph; mod hex_utils; pub mod io; mod liquidity; @@ -128,6 +129,7 @@ use config::{ use connection::ConnectionManager; use event::{EventHandler, EventQueue}; use gossip::GossipSource; +use graph::NetworkGraph; use liquidity::LiquiditySource; use payment::store::PaymentStore; use payment::{Bolt11Payment, Bolt12Payment, OnchainPayment, PaymentDetails, SpontaneousPayment}; @@ -1407,6 +1409,18 @@ impl Node { peers } + /// Returns a handler allowing to query the network graph. + #[cfg(not(feature = "uniffi"))] + pub fn network_graph(&self) -> NetworkGraph { + NetworkGraph::new(Arc::clone(&self.network_graph)) + } + + /// Returns a handler allowing to query the network graph. + #[cfg(feature = "uniffi")] + pub fn network_graph(&self) -> Arc { + Arc::new(NetworkGraph::new(Arc::clone(&self.network_graph))) + } + /// Creates a digital ECDSA signature of a message with the node's secret key. /// /// A receiver knowing the corresponding `PublicKey` (e.g. the node’s id) and the message diff --git a/src/uniffi_types.rs b/src/uniffi_types.rs index 99e72e31c..9dd7e5699 100644 --- a/src/uniffi_types.rs +++ b/src/uniffi_types.rs @@ -1,3 +1,4 @@ +pub use crate::graph::{ChannelInfo, ChannelUpdateInfo, NodeAnnouncementInfo, NodeInfo}; pub use crate::payment::store::{LSPFeeLimits, PaymentDirection, PaymentKind, PaymentStatus}; pub use lightning::events::{ClosureReason, PaymentFailureReason}; @@ -5,6 +6,7 @@ pub use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret}; pub use lightning::offers::invoice::Bolt12Invoice; pub use lightning::offers::offer::{Offer, OfferId}; pub use lightning::offers::refund::Refund; +pub use lightning::routing::gossip::{NodeId, RoutingFees}; pub use lightning::util::string::UntrustedString; pub use lightning_invoice::Bolt11Invoice; @@ -45,6 +47,22 @@ impl UniffiCustomTypeConverter for PublicKey { } } +impl UniffiCustomTypeConverter for NodeId { + type Builtin = String; + + fn into_custom(val: Self::Builtin) -> uniffi::Result { + if let Ok(key) = NodeId::from_str(&val) { + return Ok(key); + } + + Err(Error::InvalidNodeId.into()) + } + + fn from_custom(obj: Self) -> Self::Builtin { + obj.to_string() + } +} + impl UniffiCustomTypeConverter for Address { type Builtin = String; From de73ddcc0e753d7629a553f35e139bf208b82af9 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 11 Jun 2024 11:26:01 +0200 Subject: [PATCH 56/89] Periodically archive fully-resolved channel monitors Previously, LDK was very conservative and kept channel monitors around ~forever or until the user manually decided to prune them. Recently it introduced the `ChainMonitor::archive_fully_resolved_monitors` method, which we now call periodically: every time a wallet sync succeeds, we check whether the latest archival height is 6 blocks in the past and call `archive_fully_resolved_monitors`. As this is not permanently persisted, we will always try to archive any pruned monitors when the first background sync after fresh initialization succeeds, ensuring we call it regularly also on short-lived sessions, e.g, on mobile. --- src/builder.rs | 2 ++ src/config.rs | 3 +++ src/lib.rs | 39 +++++++++++++++++++++++++++++++++++++-- 3 files changed, 42 insertions(+), 2 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index 2a361396d..ef4295669 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -979,6 +979,7 @@ fn build_with_store_internal( let latest_fee_rate_cache_update_timestamp = Arc::new(RwLock::new(None)); let latest_rgs_snapshot_timestamp = Arc::new(RwLock::new(None)); let latest_node_announcement_broadcast_timestamp = Arc::new(RwLock::new(None)); + let latest_channel_monitor_archival_height = Arc::new(RwLock::new(None)); Ok(Node { runtime, @@ -1010,6 +1011,7 @@ fn build_with_store_internal( latest_fee_rate_cache_update_timestamp, latest_rgs_snapshot_timestamp, latest_node_announcement_broadcast_timestamp, + latest_channel_monitor_archival_height, }) } diff --git a/src/config.rs b/src/config.rs index aaa205f24..1a8d5d2f0 100644 --- a/src/config.rs +++ b/src/config.rs @@ -30,6 +30,9 @@ pub(crate) const DEFAULT_ESPLORA_SERVER_URL: &str = "https://blockstream.info/ap // The timeout after which we abandon retrying failed payments. pub(crate) const LDK_PAYMENT_RETRY_TIMEOUT: Duration = Duration::from_secs(10); +// The interval (in block height) after which we retry archiving fully resolved channel monitors. +pub(crate) const LDK_CHANNEL_MONITOR_ARCHIVAL_INTERVAL: u32 = 6; + // The time in-between peer reconnection attempts. pub(crate) const PEER_RECONNECTION_INTERVAL: Duration = Duration::from_secs(10); diff --git a/src/lib.rs b/src/lib.rs index 9c3c12342..5f09b1a61 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -122,8 +122,8 @@ pub use builder::BuildError; pub use builder::NodeBuilder as Builder; use config::{ - NODE_ANN_BCAST_INTERVAL, PEER_RECONNECTION_INTERVAL, RGS_SYNC_INTERVAL, - WALLET_SYNC_INTERVAL_MINIMUM_SECS, + LDK_CHANNEL_MONITOR_ARCHIVAL_INTERVAL, NODE_ANN_BCAST_INTERVAL, PEER_RECONNECTION_INTERVAL, + RGS_SYNC_INTERVAL, WALLET_SYNC_INTERVAL_MINIMUM_SECS, }; use connection::ConnectionManager; use event::{EventHandler, EventQueue}; @@ -198,6 +198,7 @@ pub struct Node { latest_fee_rate_cache_update_timestamp: Arc>>, latest_rgs_snapshot_timestamp: Arc>>, latest_node_announcement_broadcast_timestamp: Arc>>, + latest_channel_monitor_archival_height: Arc>>, } impl Node { @@ -343,10 +344,13 @@ impl Node { let tx_sync = Arc::clone(&self.tx_sync); let sync_cman = Arc::clone(&self.channel_manager); + let archive_cman = Arc::clone(&self.channel_manager); let sync_cmon = Arc::clone(&self.chain_monitor); + let archive_cmon = Arc::clone(&self.chain_monitor); let sync_sweeper = Arc::clone(&self.output_sweeper); let sync_logger = Arc::clone(&self.logger); let sync_wallet_timestamp = Arc::clone(&self.latest_wallet_sync_timestamp); + let sync_monitor_archival_height = Arc::clone(&self.latest_channel_monitor_archival_height); let mut stop_sync = self.stop_sender.subscribe(); let wallet_sync_interval_secs = self.config.wallet_sync_interval_secs.max(WALLET_SYNC_INTERVAL_MINIMUM_SECS); @@ -376,6 +380,12 @@ impl Node { let unix_time_secs_opt = SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); *sync_wallet_timestamp.write().unwrap() = unix_time_secs_opt; + + periodically_archive_fully_resolved_monitors( + Arc::clone(&archive_cman), + Arc::clone(&archive_cmon), + Arc::clone(&sync_monitor_archival_height) + ); } Err(e) => { log_error!(sync_logger, "Background sync of Lightning wallet failed: {}", e) @@ -1128,7 +1138,9 @@ impl Node { let wallet = Arc::clone(&self.wallet); let tx_sync = Arc::clone(&self.tx_sync); let sync_cman = Arc::clone(&self.channel_manager); + let archive_cman = Arc::clone(&self.channel_manager); let sync_cmon = Arc::clone(&self.chain_monitor); + let archive_cmon = Arc::clone(&self.chain_monitor); let sync_sweeper = Arc::clone(&self.output_sweeper); let sync_logger = Arc::clone(&self.logger); let confirmables = vec![ @@ -1136,6 +1148,7 @@ impl Node { &*sync_cmon as &(dyn Confirm + Sync + Send), &*sync_sweeper as &(dyn Confirm + Sync + Send), ]; + let sync_monitor_archival_height = Arc::clone(&self.latest_channel_monitor_archival_height); tokio::task::block_in_place(move || { tokio::runtime::Builder::new_multi_thread().enable_all().build().unwrap().block_on( @@ -1163,6 +1176,12 @@ impl Node { "Sync of Lightning wallet finished in {}ms.", now.elapsed().as_millis() ); + + periodically_archive_fully_resolved_monitors( + archive_cman, + archive_cmon, + sync_monitor_archival_height, + ); Ok(()) }, Err(e) => { @@ -1486,3 +1505,19 @@ pub(crate) fn total_anchor_channels_reserve_sats( * anchor_channels_config.per_channel_reserve_sats }) } + +fn periodically_archive_fully_resolved_monitors( + channel_manager: Arc, chain_monitor: Arc, + latest_channel_monitor_archival_height: Arc>>, +) { + let mut latest_archival_height_lock = latest_channel_monitor_archival_height.write().unwrap(); + let cur_height = channel_manager.current_best_block().height; + let should_archive = latest_archival_height_lock + .as_ref() + .map_or(true, |h| cur_height >= h + LDK_CHANNEL_MONITOR_ARCHIVAL_INTERVAL); + + if should_archive { + chain_monitor.archive_fully_resolved_channel_monitors(); + *latest_archival_height_lock = Some(cur_height); + } +} From a503eb55d8f6fea8547f9c379a1969d0b99672c5 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 11 Jun 2024 13:02:12 +0200 Subject: [PATCH 57/89] f Rename the const to `RESOLVED_CHANNEL_MONITOR_ARCHIVAL_INTERVAL` --- src/config.rs | 2 +- src/lib.rs | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/config.rs b/src/config.rs index 1a8d5d2f0..37e37bd63 100644 --- a/src/config.rs +++ b/src/config.rs @@ -31,7 +31,7 @@ pub(crate) const DEFAULT_ESPLORA_SERVER_URL: &str = "https://blockstream.info/ap pub(crate) const LDK_PAYMENT_RETRY_TIMEOUT: Duration = Duration::from_secs(10); // The interval (in block height) after which we retry archiving fully resolved channel monitors. -pub(crate) const LDK_CHANNEL_MONITOR_ARCHIVAL_INTERVAL: u32 = 6; +pub(crate) const RESOLVED_CHANNEL_MONITOR_ARCHIVAL_INTERVAL: u32 = 6; // The time in-between peer reconnection attempts. pub(crate) const PEER_RECONNECTION_INTERVAL: Duration = Duration::from_secs(10); diff --git a/src/lib.rs b/src/lib.rs index 5f09b1a61..e336540ac 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -122,8 +122,9 @@ pub use builder::BuildError; pub use builder::NodeBuilder as Builder; use config::{ - LDK_CHANNEL_MONITOR_ARCHIVAL_INTERVAL, NODE_ANN_BCAST_INTERVAL, PEER_RECONNECTION_INTERVAL, - RGS_SYNC_INTERVAL, WALLET_SYNC_INTERVAL_MINIMUM_SECS, + NODE_ANN_BCAST_INTERVAL, PEER_RECONNECTION_INTERVAL, + RESOLVED_CHANNEL_MONITOR_ARCHIVAL_INTERVAL, RGS_SYNC_INTERVAL, + WALLET_SYNC_INTERVAL_MINIMUM_SECS, }; use connection::ConnectionManager; use event::{EventHandler, EventQueue}; @@ -1514,7 +1515,7 @@ fn periodically_archive_fully_resolved_monitors( let cur_height = channel_manager.current_best_block().height; let should_archive = latest_archival_height_lock .as_ref() - .map_or(true, |h| cur_height >= h + LDK_CHANNEL_MONITOR_ARCHIVAL_INTERVAL); + .map_or(true, |h| cur_height >= h + RESOLVED_CHANNEL_MONITOR_ARCHIVAL_INTERVAL); if should_archive { chain_monitor.archive_fully_resolved_channel_monitors(); From 9dba3ac79089e183351c944baf3702196ccc087d Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 11 Jun 2024 11:31:50 +0200 Subject: [PATCH 58/89] Also update `latest_sync_` timestamps in `sync_wallets` ... which we previously omitted. --- src/lib.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/lib.rs b/src/lib.rs index e336540ac..abc4f11f9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1149,6 +1149,8 @@ impl Node { &*sync_cmon as &(dyn Confirm + Sync + Send), &*sync_sweeper as &(dyn Confirm + Sync + Send), ]; + let sync_wallet_timestamp = Arc::clone(&self.latest_wallet_sync_timestamp); + let sync_onchain_wallet_timestamp = Arc::clone(&self.latest_onchain_wallet_sync_timestamp); let sync_monitor_archival_height = Arc::clone(&self.latest_channel_monitor_archival_height); tokio::task::block_in_place(move || { @@ -1162,6 +1164,11 @@ impl Node { "Sync of on-chain wallet finished in {}ms.", now.elapsed().as_millis() ); + let unix_time_secs_opt = SystemTime::now() + .duration_since(UNIX_EPOCH) + .ok() + .map(|d| d.as_secs()); + *sync_onchain_wallet_timestamp.write().unwrap() = unix_time_secs_opt; }, Err(e) => { log_error!(sync_logger, "Sync of on-chain wallet failed: {}", e); @@ -1178,6 +1185,12 @@ impl Node { now.elapsed().as_millis() ); + let unix_time_secs_opt = SystemTime::now() + .duration_since(UNIX_EPOCH) + .ok() + .map(|d| d.as_secs()); + *sync_wallet_timestamp.write().unwrap() = unix_time_secs_opt; + periodically_archive_fully_resolved_monitors( archive_cman, archive_cmon, From accd3c8060e1af629a002d93b30b78b443417a93 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 11 Jun 2024 11:36:28 +0200 Subject: [PATCH 59/89] Also update the fee rate cache in `sync_wallets` .. which we previously omitted. --- src/lib.rs | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/src/lib.rs b/src/lib.rs index abc4f11f9..2368e5a5a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1124,7 +1124,8 @@ impl Node { } } - /// Manually sync the LDK and BDK wallets with the current chain state. + /// Manually sync the LDK and BDK wallets with the current chain state and update the fee rate + /// cache. /// /// **Note:** The wallets are regularly synced in the background, which is configurable via /// [`Config::onchain_wallet_sync_interval_secs`] and [`Config::wallet_sync_interval_secs`]. @@ -1142,6 +1143,7 @@ impl Node { let archive_cman = Arc::clone(&self.channel_manager); let sync_cmon = Arc::clone(&self.chain_monitor); let archive_cmon = Arc::clone(&self.chain_monitor); + let fee_estimator = Arc::clone(&self.fee_estimator); let sync_sweeper = Arc::clone(&self.output_sweeper); let sync_logger = Arc::clone(&self.logger); let confirmables = vec![ @@ -1150,6 +1152,8 @@ impl Node { &*sync_sweeper as &(dyn Confirm + Sync + Send), ]; let sync_wallet_timestamp = Arc::clone(&self.latest_wallet_sync_timestamp); + let sync_fee_rate_update_timestamp = + Arc::clone(&self.latest_fee_rate_cache_update_timestamp); let sync_onchain_wallet_timestamp = Arc::clone(&self.latest_onchain_wallet_sync_timestamp); let sync_monitor_archival_height = Arc::clone(&self.latest_channel_monitor_archival_height); @@ -1176,6 +1180,26 @@ impl Node { }, }; + let now = Instant::now(); + match fee_estimator.update_fee_estimates().await { + Ok(()) => { + log_info!( + sync_logger, + "Fee rate cache update finished in {}ms.", + now.elapsed().as_millis() + ); + let unix_time_secs_opt = SystemTime::now() + .duration_since(UNIX_EPOCH) + .ok() + .map(|d| d.as_secs()); + *sync_fee_rate_update_timestamp.write().unwrap() = unix_time_secs_opt; + }, + Err(e) => { + log_error!(sync_logger, "Fee rate cache update failed: {}", e,); + return Err(e); + }, + } + let now = Instant::now(); match tx_sync.sync(confirmables).await { Ok(()) => { From ddc64307bb40bb1053653026b9c9892356bfeaac Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 30 May 2024 09:39:51 +0200 Subject: [PATCH 60/89] Consolidate setting `UserConfig` defaults .. now only via `Config::default_user_config` --- src/builder.rs | 19 +++---------------- src/config.rs | 16 ++++++++++++++++ src/lib.rs | 17 ++++------------- 3 files changed, 23 insertions(+), 29 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index 9b2e476df..fa6e573b7 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -1,6 +1,6 @@ use crate::config::{ - Config, BDK_CLIENT_CONCURRENCY, BDK_CLIENT_STOP_GAP, DEFAULT_ESPLORA_SERVER_URL, - WALLET_KEYS_SEED_LEN, + default_user_config, Config, BDK_CLIENT_CONCURRENCY, BDK_CLIENT_STOP_GAP, + DEFAULT_ESPLORA_SERVER_URL, WALLET_KEYS_SEED_LEN, }; use crate::connection::ConnectionManager; use crate::event::EventQueue; @@ -31,7 +31,6 @@ use lightning::routing::scoring::{ }; use lightning::sign::EntropySource; -use lightning::util::config::UserConfig; use lightning::util::persist::{ read_channel_monitors, CHANNEL_MANAGER_PERSISTENCE_KEY, CHANNEL_MANAGER_PERSISTENCE_PRIMARY_NAMESPACE, CHANNEL_MANAGER_PERSISTENCE_SECONDARY_NAMESPACE, @@ -686,19 +685,7 @@ fn build_with_store_internal( }, }; - // Initialize the default config values. - // - // Note that methods such as Node::connect_open_channel might override some of the values set - // here, e.g. the ChannelHandshakeConfig, meaning these default values will mostly be relevant - // for inbound channels. - let mut user_config = UserConfig::default(); - user_config.channel_handshake_limits.force_announced_channel_preference = false; - user_config.manually_accept_inbound_channels = true; - // Note the channel_handshake_config will be overwritten in `connect_open_channel`, but we - // still set a default here. - user_config.channel_handshake_config.negotiate_anchors_zero_fee_htlc_tx = - config.anchor_channels_config.is_some(); - + let mut user_config = default_user_config(&config); if liquidity_source_config.and_then(|lsc| lsc.lsps2_service.as_ref()).is_some() { // Generally allow claiming underpaying HTLCs as the LSP will skim off some fee. We'll // check that they don't take too much before claiming. diff --git a/src/config.rs b/src/config.rs index 37e37bd63..7adae2d4b 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,6 +1,7 @@ use std::time::Duration; use lightning::ln::msgs::SocketAddress; +use lightning::util::config::UserConfig; use lightning::util::logger::Level as LogLevel; use bitcoin::secp256k1::PublicKey; @@ -229,3 +230,18 @@ impl Default for AnchorChannelsConfig { pub fn default_config() -> Config { Config::default() } + +pub(crate) fn default_user_config(config: &Config) -> UserConfig { + // Initialize the default config values. + // + // Note that methods such as Node::connect_open_channel might override some of the values set + // here, e.g. the ChannelHandshakeConfig, meaning these default values will mostly be relevant + // for inbound channels. + let mut user_config = UserConfig::default(); + user_config.channel_handshake_limits.force_announced_channel_preference = false; + user_config.manually_accept_inbound_channels = true; + user_config.channel_handshake_config.negotiate_anchors_zero_fee_htlc_tx = + config.anchor_channels_config.is_some(); + + user_config +} diff --git a/src/lib.rs b/src/lib.rs index 938b7ad20..8a617fe9b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -123,7 +123,7 @@ pub use builder::BuildError; pub use builder::NodeBuilder as Builder; use config::{ - NODE_ANN_BCAST_INTERVAL, PEER_RECONNECTION_INTERVAL, + default_user_config, NODE_ANN_BCAST_INTERVAL, PEER_RECONNECTION_INTERVAL, RESOLVED_CHANNEL_MONITOR_ARCHIVAL_INTERVAL, RGS_SYNC_INTERVAL, WALLET_SYNC_INTERVAL_MINIMUM_SECS, }; @@ -148,7 +148,6 @@ use lightning::events::bump_transaction::Wallet as LdkWallet; use lightning::ln::channelmanager::{ChannelShutdownState, PaymentId}; use lightning::ln::msgs::SocketAddress; -use lightning::util::config::{ChannelHandshakeConfig, UserConfig}; pub use lightning::util::logger::Level as LogLevel; use lightning_background_processor::process_events_async; @@ -1087,17 +1086,9 @@ impl Node { return Err(Error::InsufficientFunds); } - let channel_config = (*(channel_config.unwrap_or_default())).clone().into(); - let user_config = UserConfig { - channel_handshake_limits: Default::default(), - channel_handshake_config: ChannelHandshakeConfig { - announced_channel: announce_channel, - negotiate_anchors_zero_fee_htlc_tx: self.config.anchor_channels_config.is_some(), - ..Default::default() - }, - channel_config, - ..Default::default() - }; + let mut user_config = default_user_config(&self.config); + user_config.channel_handshake_config.announced_channel = announce_channel; + user_config.channel_config = (*(channel_config.unwrap_or_default())).clone().into(); let push_msat = push_to_counterparty_msat.unwrap_or(0); let user_channel_id: u128 = rand::thread_rng().gen::(); From dc5d00ea4fd4fa25312ff9302328bd61e03d8cfb Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 30 May 2024 09:44:43 +0200 Subject: [PATCH 61/89] Bump `max_inbound_htlc_value_in_flight` for priv. outbound channels .. as users are confused why they can't send the full channel amount. LDK will default to this behavior soon, anyways, at which point we'll be able to drop this manual override again. --- src/lib.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/lib.rs b/src/lib.rs index 8a617fe9b..bb6492cb9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1089,6 +1089,14 @@ impl Node { let mut user_config = default_user_config(&self.config); user_config.channel_handshake_config.announced_channel = announce_channel; user_config.channel_config = (*(channel_config.unwrap_or_default())).clone().into(); + // We set the max inflight to 100% for private channels. + // FIXME: LDK will default to this behavior soon, too, at which point we should drop this + // manual override. + if !announce_channel { + user_config + .channel_handshake_config + .max_inbound_htlc_value_in_flight_percent_of_channel = 100; + } let push_msat = push_to_counterparty_msat.unwrap_or(0); let user_channel_id: u128 = rand::thread_rng().gen::(); From bc1fac375fc62827cce30b5c625667c3bcfb1426 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 17 Jun 2024 12:16:31 +0200 Subject: [PATCH 62/89] Skip node ann. broadcast if the public channel isn't ready yet Previously, we'd only skip broadcasting a node announcement if we don't have public channel. However, this could lead to us broadcasting the initial node announcement too early: it would be broadcast after the channel is pending but before it's confirmed and we had a chance to generate and exchange the channel announcement, leading to our counterparty ignoring the node announcement. Here, we just check that the public channel is actually ready (but don't bother if it's useable) before we trigger the broadcast. Note that this wouldn't have been a big issue before as we expect announced nodes to be always-online and we'd have rebroadcast the announcement after an hour anyways. However, it can be annoying in testing. --- src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index bb6492cb9..908bd1750 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -580,8 +580,8 @@ impl Node { continue; } - if !bcast_cm.list_channels().iter().any(|chan| chan.is_public) { - // Skip if we don't have any public channels. + if !bcast_cm.list_channels().iter().any(|chan| chan.is_public && chan.is_channel_ready) { + // Skip if we don't have any public channels that are ready. continue; } From 41a7955f2f1d1965185c0c98f57d0a6d944b3abb Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 17 Jun 2024 11:30:05 +0200 Subject: [PATCH 63/89] Log node ID in `start`/`stop` In scenarios with multiple interconnected nodes, reading user logs can be much easier once we know which log file is for a node with a particular node ID. Here, we simply log the node ID on startup and shutdown to allow to associate any given log file with the node's ID. --- src/lib.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index bb6492cb9..d9a697a03 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -217,7 +217,12 @@ impl Node { return Err(Error::AlreadyRunning); } - log_info!(self.logger, "Starting up LDK Node on network: {}", self.config.network); + log_info!( + self.logger, + "Starting up LDK Node with node ID {} on network: {}", + self.node_id(), + self.config.network + ); let runtime = tokio::runtime::Builder::new_multi_thread().enable_all().build().unwrap(); @@ -728,7 +733,7 @@ impl Node { pub fn stop(&self) -> Result<(), Error> { let runtime = self.runtime.write().unwrap().take().ok_or(Error::NotRunning)?; - log_info!(self.logger, "Shutting down LDK Node..."); + log_info!(self.logger, "Shutting down LDK Node with node ID {}...", self.node_id()); // Stop the runtime. match self.stop_sender.send(()) { From 018a5b68139fe32230384c8592bec8b5c964f84f Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 14 Jun 2024 12:14:07 +0200 Subject: [PATCH 64/89] Allow to generate invoices for custom payment hashes ... requiring users to manucally claim them. --- bindings/ldk_node.udl | 8 ++ src/payment/bolt11.rs | 230 +++++++++++++++++++++++++++++++++++++----- 2 files changed, 212 insertions(+), 26 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index ec63adbe0..5f99ec378 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -100,10 +100,18 @@ interface Bolt11Payment { [Throws=NodeError] void send_probes_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat); [Throws=NodeError] + void claim_for_hash(PaymentHash payment_hash, u64 claimable_amount_msat, PaymentPreimage preimage); + [Throws=NodeError] + void fail_for_hash(PaymentHash payment_hash); + [Throws=NodeError] Bolt11Invoice receive(u64 amount_msat, [ByRef]string description, u32 expiry_secs); [Throws=NodeError] + Bolt11Invoice receive_for_hash(u64 amount_msat, [ByRef]string description, u32 expiry_secs, PaymentHash payment_hash); + [Throws=NodeError] Bolt11Invoice receive_variable_amount([ByRef]string description, u32 expiry_secs); [Throws=NodeError] + Bolt11Invoice receive_variable_amount_for_hash([ByRef]string description, u32 expiry_secs, PaymentHash payment_hash); + [Throws=NodeError] Bolt11Invoice receive_via_jit_channel(u64 amount_msat, [ByRef]string description, u32 expiry_secs, u64? max_lsp_fee_limit_msat); [Throws=NodeError] Bolt11Invoice receive_variable_amount_via_jit_channel([ByRef]string description, u32 expiry_secs, u64? max_proportional_lsp_fee_limit_ppm_msat); diff --git a/src/payment/bolt11.rs b/src/payment/bolt11.rs index c23f7b670..e3dd27b9d 100644 --- a/src/payment/bolt11.rs +++ b/src/payment/bolt11.rs @@ -8,20 +8,23 @@ use crate::error::Error; use crate::liquidity::LiquiditySource; use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; use crate::payment::store::{ - LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus, PaymentStore, + LSPFeeLimits, PaymentDetails, PaymentDetailsUpdate, PaymentDirection, PaymentKind, + PaymentStatus, PaymentStore, }; use crate::peer_store::{PeerInfo, PeerStore}; use crate::types::{ChannelManager, KeysManager}; use lightning::ln::channelmanager::{PaymentId, RecipientOnionFields, Retry, RetryableSendFailure}; -use lightning::ln::PaymentHash; +use lightning::ln::{PaymentHash, PaymentPreimage}; use lightning::routing::router::{PaymentParameters, RouteParameters}; use lightning_invoice::{payment, Bolt11Invoice, Currency}; +use bitcoin::hashes::sha256::Hash as Sha256; use bitcoin::hashes::Hash; use std::sync::{Arc, RwLock}; +use std::time::SystemTime; /// A payment handler allowing to create and pay [BOLT 11] invoices. /// @@ -254,55 +257,227 @@ impl Bolt11Payment { } } + /// Allows to attempt manually claiming payments with the given preimage that have previously + /// been registered via [`receive_for_hash`] or [`receive_variable_amount_for_hash`]. + /// + /// This should be called in reponse to a [`PaymentClaimable`] event as soon as the preimage is + /// available. + /// + /// Will check that the payment is known, and that the given preimage and claimable amount + /// match our expectations before attempting to claim the payment, and will return an error + /// otherwise. + /// + /// When claiming the payment has succeeded, a [`PaymentReceived`] event will be emitted. + /// + /// [`receive_for_hash`]: Self::receive_for_hash + /// [`receive_variable_amount_for_hash`]: Self::receive_variable_amount_for_hash + /// [`PaymentClaimable`]: crate::Event::PaymentClaimable + /// [`PaymentReceived`]: crate::Event::PaymentReceived + pub fn claim_for_hash( + &self, payment_hash: PaymentHash, claimable_amount_msat: u64, preimage: PaymentPreimage, + ) -> Result<(), Error> { + let payment_id = PaymentId(payment_hash.0); + + let expected_payment_hash = PaymentHash(Sha256::hash(&preimage.0).to_byte_array()); + + if expected_payment_hash != payment_hash { + log_error!( + self.logger, + "Failed to manually claim payment as the given preimage doesn't match the hash {}", + payment_hash + ); + return Err(Error::InvalidPaymentPreimage); + } + + if let Some(details) = self.payment_store.get(&payment_id) { + if let Some(expected_amount_msat) = details.amount_msat { + if claimable_amount_msat < expected_amount_msat { + log_error!( + self.logger, + "Failed to manually claim payment {} as the claimable amount is less than expected", + payment_id + ); + return Err(Error::InvalidAmount); + } + } + } else { + log_error!( + self.logger, + "Failed to manually claim unknown payment with hash: {}", + payment_hash + ); + return Err(Error::InvalidPaymentHash); + } + + self.channel_manager.claim_funds(preimage); + Ok(()) + } + + /// Allows to manually fail payments with the given hash that have previously + /// been registered via [`receive_for_hash`] or [`receive_variable_amount_for_hash`]. + /// + /// This should be called in reponse to a [`PaymentClaimable`] event if the payment needs to be + /// failed back, e.g., if the correct preimage can't be retrieved in time before the claim + /// deadline has been reached. + /// + /// Will check that the payment is known before failing the payment, and will return an error + /// otherwise. + /// + /// [`receive_for_hash`]: Self::receive_for_hash + /// [`receive_variable_amount_for_hash`]: Self::receive_variable_amount_for_hash + /// [`PaymentClaimable`]: crate::Event::PaymentClaimable + pub fn fail_for_hash(&self, payment_hash: PaymentHash) -> Result<(), Error> { + let payment_id = PaymentId(payment_hash.0); + + let update = PaymentDetailsUpdate { + status: Some(PaymentStatus::Failed), + ..PaymentDetailsUpdate::new(payment_id) + }; + + if !self.payment_store.update(&update)? { + log_error!( + self.logger, + "Failed to manually fail unknown payment with hash: {}", + payment_hash + ); + return Err(Error::InvalidPaymentHash); + } + + self.channel_manager.fail_htlc_backwards(&payment_hash); + Ok(()) + } + /// Returns a payable invoice that can be used to request and receive a payment of the amount /// given. + /// + /// The inbound payment will be automatically claimed upon arrival. pub fn receive( &self, amount_msat: u64, description: &str, expiry_secs: u32, ) -> Result { - self.receive_inner(Some(amount_msat), description, expiry_secs) + self.receive_inner(Some(amount_msat), description, expiry_secs, None) + } + + /// Returns a payable invoice that can be used to request a payment of the amount + /// given for the given payment hash. + /// + /// We will register the given payment hash and emit a [`PaymentClaimable`] event once + /// the inbound payment arrives. + /// + /// **Note:** users *MUST* handle this event and claim the payment manually via + /// [`claim_for_hash`] as soon as they have obtained access to the preimage of the given + /// payment hash. If they're unable to obtain the preimage, they *MUST* immediately fail the payment via + /// [`fail_for_hash`]. + /// + /// [`PaymentClaimable`]: crate::Event::PaymentClaimable + /// [`claim_for_hash`]: Self::claim_for_hash + /// [`fail_for_hash`]: Self::fail_for_hash + pub fn receive_for_hash( + &self, amount_msat: u64, description: &str, expiry_secs: u32, payment_hash: PaymentHash, + ) -> Result { + self.receive_inner(Some(amount_msat), description, expiry_secs, Some(payment_hash)) } /// Returns a payable invoice that can be used to request and receive a payment for which the /// amount is to be determined by the user, also known as a "zero-amount" invoice. + /// + /// The inbound payment will be automatically claimed upon arrival. pub fn receive_variable_amount( &self, description: &str, expiry_secs: u32, ) -> Result { - self.receive_inner(None, description, expiry_secs) + self.receive_inner(None, description, expiry_secs, None) + } + + /// Returns a payable invoice that can be used to request a payment for the given payment hash + /// and the amount to be determined by the user, also known as a "zero-amount" invoice. + /// + /// We will register the given payment hash and emit a [`PaymentClaimable`] event once + /// the inbound payment arrives. + /// + /// **Note:** users *MUST* handle this event and claim the payment manually via + /// [`claim_for_hash`] as soon as they have obtained access to the preimage of the given + /// payment hash. If they're unable to obtain the preimage, they *MUST* immediately fail the payment via + /// [`fail_for_hash`]. + /// + /// [`PaymentClaimable`]: crate::Event::PaymentClaimable + /// [`claim_for_hash`]: Self::claim_for_hash + /// [`fail_for_hash`]: Self::fail_for_hash + pub fn receive_variable_amount_for_hash( + &self, description: &str, expiry_secs: u32, payment_hash: PaymentHash, + ) -> Result { + self.receive_inner(None, description, expiry_secs, Some(payment_hash)) } fn receive_inner( &self, amount_msat: Option, description: &str, expiry_secs: u32, + manual_claim_payment_hash: Option, ) -> Result { let currency = Currency::from(self.config.network); let keys_manager = Arc::clone(&self.keys_manager); - let invoice = match lightning_invoice::utils::create_invoice_from_channelmanager( - &self.channel_manager, - keys_manager, - Arc::clone(&self.logger), - currency, - amount_msat, - description.to_string(), - expiry_secs, - None, - ) { - Ok(inv) => { - log_info!(self.logger, "Invoice created: {}", inv); - inv - }, - Err(e) => { - log_error!(self.logger, "Failed to create invoice: {}", e); - return Err(Error::InvoiceCreationFailed); - }, + let duration = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .expect("for the foreseeable future this shouldn't happen"); + + let invoice = { + let invoice_res = if let Some(payment_hash) = manual_claim_payment_hash { + lightning_invoice::utils::create_invoice_from_channelmanager_and_duration_since_epoch_with_payment_hash( + &self.channel_manager, + keys_manager, + Arc::clone(&self.logger), + currency, + amount_msat, + description.to_string(), + duration, + expiry_secs, + payment_hash, + None, + ) + } else { + lightning_invoice::utils::create_invoice_from_channelmanager_and_duration_since_epoch( + &self.channel_manager, + keys_manager, + Arc::clone(&self.logger), + currency, + amount_msat, + description.to_string(), + duration, + expiry_secs, + None, + ) + }; + + match invoice_res { + Ok(inv) => { + log_info!(self.logger, "Invoice created: {}", inv); + inv + }, + Err(e) => { + log_error!(self.logger, "Failed to create invoice: {}", e); + return Err(Error::InvoiceCreationFailed); + }, + } }; let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); + let payment_secret = invoice.payment_secret(); let id = PaymentId(payment_hash.0); + let preimage = if manual_claim_payment_hash.is_none() { + // If the user hasn't registered a custom payment hash, we're positive ChannelManager + // will know the preimage at this point. + let res = self + .channel_manager + .get_payment_preimage(payment_hash, payment_secret.clone()) + .ok(); + debug_assert!(res.is_some(), "We just let ChannelManager create an inbound payment, it can't have forgotten the preimage by now."); + res + } else { + None + }; let payment = PaymentDetails { id, kind: PaymentKind::Bolt11 { hash: payment_hash, - preimage: None, - secret: Some(invoice.payment_secret().clone()), + preimage, + secret: Some(payment_secret.clone()), }, amount_msat, @@ -422,17 +597,20 @@ impl Bolt11Payment { // Register payment in payment store. let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); + let payment_secret = invoice.payment_secret(); let lsp_fee_limits = LSPFeeLimits { max_total_opening_fee_msat: lsp_total_opening_fee, max_proportional_opening_fee_ppm_msat: lsp_prop_opening_fee, }; let id = PaymentId(payment_hash.0); + let preimage = + self.channel_manager.get_payment_preimage(payment_hash, payment_secret.clone()).ok(); let payment = PaymentDetails { id, kind: PaymentKind::Bolt11Jit { hash: payment_hash, - preimage: None, - secret: Some(invoice.payment_secret().clone()), + preimage, + secret: Some(payment_secret.clone()), lsp_fee_limits, }, amount_msat, From be6e8c2c72019f095566743710b54a5d75abdccf Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 14 Jun 2024 13:38:29 +0200 Subject: [PATCH 65/89] Implement and yield `PaymentClaimable` events --- bindings/ldk_node.udl | 1 + src/event.rs | 62 ++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 62 insertions(+), 1 deletion(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 5f99ec378..d1b509deb 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -230,6 +230,7 @@ interface Event { PaymentSuccessful(PaymentId? payment_id, PaymentHash payment_hash, u64? fee_paid_msat); PaymentFailed(PaymentId? payment_id, PaymentHash payment_hash, PaymentFailureReason? reason); PaymentReceived(PaymentId? payment_id, PaymentHash payment_hash, u64 amount_msat); + PaymentClaimable(PaymentId payment_id, PaymentHash payment_hash, u64 claimable_amount_msat, u32? claim_deadline); ChannelPending(ChannelId channel_id, UserChannelId user_channel_id, ChannelId former_temporary_channel_id, PublicKey counterparty_node_id, OutPoint funding_txo); ChannelReady(ChannelId channel_id, UserChannelId user_channel_id, PublicKey? counterparty_node_id); ChannelClosed(ChannelId channel_id, UserChannelId user_channel_id, PublicKey? counterparty_node_id, ClosureReason? reason); diff --git a/src/event.rs b/src/event.rs index 5cd9e2603..80c7477f7 100644 --- a/src/event.rs +++ b/src/event.rs @@ -84,6 +84,28 @@ pub enum Event { /// The value, in thousandths of a satoshi, that has been received. amount_msat: u64, }, + /// A payment for a previously-registered payment hash has been received. + /// + /// This needs to be manually claimed by supplying the correct preimage to [`claim_for_hash`]. + /// + /// If the the provided parameters don't match the expectations or the preimage can't be + /// retrieved in time, should be failed-back via [`fail_for_hash`]. + /// + /// Note claiming will necessarily fail after the `claim_deadline` has been reached. + /// + /// [`claim_for_hash`]: crate::payment::Bolt11Payment::claim_for_hash + /// [`fail_for_hash`]: crate::payment::Bolt11Payment::fail_for_hash + PaymentClaimable { + /// A local identifier used to track the payment. + payment_id: PaymentId, + /// The hash of the payment. + payment_hash: PaymentHash, + /// The value, in thousandths of a satoshi, that is claimable. + claimable_amount_msat: u64, + /// The block height at which this payment will be failed back and will no longer be + /// eligible for claiming. + claim_deadline: Option, + }, /// A channel has been created and is pending confirmation on-chain. ChannelPending { /// The `channel_id` of the channel. @@ -156,6 +178,12 @@ impl_writeable_tlv_based_enum!(Event, (1, counterparty_node_id, option), (2, user_channel_id, required), (3, reason, upgradable_option), + }, + (6, PaymentClaimable) => { + (0, payment_hash, required), + (2, payment_id, required), + (4, claimable_amount_msat, required), + (6, claim_deadline, option), }; ); @@ -434,7 +462,7 @@ where receiver_node_id: _, via_channel_id: _, via_user_channel_id: _, - claim_deadline: _, + claim_deadline, onion_fields: _, counterparty_skimmed_fee_msat, } => { @@ -500,6 +528,38 @@ where }); return; } + + // If this is known by the store but ChannelManager doesn't know the preimage, + // the payment has been registered via `_for_hash` variants and needs to be manually claimed via + // user interaction. + match info.kind { + PaymentKind::Bolt11 { preimage, .. } => { + if purpose.preimage().is_none() { + debug_assert!( + preimage.is_none(), + "We would have registered the preimage if we knew" + ); + + self.event_queue + .add_event(Event::PaymentClaimable { + payment_id, + payment_hash, + claimable_amount_msat: amount_msat, + claim_deadline, + }) + .unwrap_or_else(|e| { + log_error!( + self.logger, + "Failed to push to event queue: {}", + e + ); + panic!("Failed to push to event queue"); + }); + return; + } + }, + _ => {}, + } } log_info!( From d5709b70d845d265b148abe8f2a485edad6a93aa Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 14 Jun 2024 14:12:31 +0200 Subject: [PATCH 66/89] Test manual-claiming flow --- tests/common/mod.rs | 120 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 116 insertions(+), 4 deletions(-) diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 062d14f61..5959bd58e 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -8,10 +8,13 @@ use ldk_node::{ }; use lightning::ln::msgs::SocketAddress; +use lightning::ln::{PaymentHash, PaymentPreimage}; use lightning::util::persist::KVStore; use lightning::util::test_utils::TestStore; use lightning_persister::fs_store::FilesystemStore; +use bitcoin::hashes::sha256::Hash as Sha256; +use bitcoin::hashes::Hash; use bitcoin::{Address, Amount, Network, OutPoint, Txid}; use bitcoincore_rpc::bitcoincore_rpc_json::AddressType; @@ -99,6 +102,31 @@ macro_rules! expect_payment_received_event { pub(crate) use expect_payment_received_event; +macro_rules! expect_payment_claimable_event { + ($node: expr, $payment_id: expr, $payment_hash: expr, $claimable_amount_msat: expr) => {{ + match $node.wait_next_event() { + ref e @ Event::PaymentClaimable { + payment_id, + payment_hash, + claimable_amount_msat, + .. + } => { + println!("{} got event {:?}", std::stringify!($node), e); + assert_eq!(payment_hash, $payment_hash); + assert_eq!(payment_id, $payment_id); + assert_eq!(claimable_amount_msat, $claimable_amount_msat); + $node.event_handled(); + claimable_amount_msat + }, + ref e => { + panic!("{} got unexpected event!: {:?}", std::stringify!($node), e); + }, + } + }}; +} + +pub(crate) use expect_payment_claimable_event; + macro_rules! expect_payment_successful_event { ($node: expr, $payment_id: expr, $fee_paid_msat: expr) => {{ match $node.wait_next_event() { @@ -378,7 +406,7 @@ pub(crate) fn do_channel_full_cycle( let addr_a = node_a.onchain_payment().new_address().unwrap(); let addr_b = node_b.onchain_payment().new_address().unwrap(); - let premine_amount_sat = if expect_anchor_channel { 125_000 } else { 100_000 }; + let premine_amount_sat = if expect_anchor_channel { 2_125_000 } else { 2_100_000 }; premine_and_distribute_funds( &bitcoind, @@ -396,7 +424,7 @@ pub(crate) fn do_channel_full_cycle( assert_eq!(node_b.next_event(), None); println!("\nA -- connect_open_channel -> B"); - let funding_amount_sat = 80_000; + let funding_amount_sat = 2_080_000; let push_msat = (funding_amount_sat / 2) * 1000; // balance the channel node_a .connect_open_channel( @@ -580,6 +608,89 @@ pub(crate) fn do_channel_full_cycle( assert_eq!(node_b.payment(&payment_id).unwrap().amount_msat, Some(determined_amount_msat)); assert!(matches!(node_b.payment(&payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); + // Test claiming manually registered payments. + let invoice_amount_3_msat = 5_532_000; + let manual_preimage = PaymentPreimage([42u8; 32]); + let manual_payment_hash = PaymentHash(Sha256::hash(&manual_preimage.0).to_byte_array()); + let manual_invoice = node_b + .bolt11_payment() + .receive_for_hash(invoice_amount_3_msat, &"asdf", 9217, manual_payment_hash) + .unwrap(); + let manual_payment_id = node_a.bolt11_payment().send(&manual_invoice).unwrap(); + + let claimable_amount_msat = expect_payment_claimable_event!( + node_b, + manual_payment_id, + manual_payment_hash, + invoice_amount_3_msat + ); + node_b + .bolt11_payment() + .claim_for_hash(manual_payment_hash, claimable_amount_msat, manual_preimage) + .unwrap(); + expect_payment_received_event!(node_b, claimable_amount_msat); + expect_payment_successful_event!(node_a, Some(manual_payment_id), None); + assert_eq!(node_a.payment(&manual_payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_a.payment(&manual_payment_id).unwrap().direction, PaymentDirection::Outbound); + assert_eq!( + node_a.payment(&manual_payment_id).unwrap().amount_msat, + Some(invoice_amount_3_msat) + ); + assert!(matches!(node_a.payment(&manual_payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); + assert_eq!(node_b.payment(&manual_payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_b.payment(&manual_payment_id).unwrap().direction, PaymentDirection::Inbound); + assert_eq!( + node_b.payment(&manual_payment_id).unwrap().amount_msat, + Some(invoice_amount_3_msat) + ); + assert!(matches!(node_b.payment(&manual_payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); + + // Test failing manually registered payments. + let invoice_amount_4_msat = 5_532_000; + let manual_fail_preimage = PaymentPreimage([43u8; 32]); + let manual_fail_payment_hash = + PaymentHash(Sha256::hash(&manual_fail_preimage.0).to_byte_array()); + let manual_fail_invoice = node_b + .bolt11_payment() + .receive_for_hash(invoice_amount_3_msat, &"asdf", 9217, manual_fail_payment_hash) + .unwrap(); + let manual_fail_payment_id = node_a.bolt11_payment().send(&manual_fail_invoice).unwrap(); + + expect_payment_claimable_event!( + node_b, + manual_fail_payment_id, + manual_fail_payment_hash, + invoice_amount_4_msat + ); + node_b.bolt11_payment().fail_for_hash(manual_fail_payment_hash).unwrap(); + expect_event!(node_a, PaymentFailed); + assert_eq!(node_a.payment(&manual_fail_payment_id).unwrap().status, PaymentStatus::Failed); + assert_eq!( + node_a.payment(&manual_fail_payment_id).unwrap().direction, + PaymentDirection::Outbound + ); + assert_eq!( + node_a.payment(&manual_fail_payment_id).unwrap().amount_msat, + Some(invoice_amount_4_msat) + ); + assert!(matches!( + node_a.payment(&manual_fail_payment_id).unwrap().kind, + PaymentKind::Bolt11 { .. } + )); + assert_eq!(node_b.payment(&manual_fail_payment_id).unwrap().status, PaymentStatus::Failed); + assert_eq!( + node_b.payment(&manual_fail_payment_id).unwrap().direction, + PaymentDirection::Inbound + ); + assert_eq!( + node_b.payment(&manual_fail_payment_id).unwrap().amount_msat, + Some(invoice_amount_4_msat) + ); + assert!(matches!( + node_b.payment(&manual_fail_payment_id).unwrap().kind, + PaymentKind::Bolt11 { .. } + )); + // Test spontaneous/keysend payments println!("\nA send_spontaneous_payment"); let keysend_amount_msat = 2500_000; @@ -611,8 +722,8 @@ pub(crate) fn do_channel_full_cycle( node_b.payment(&keysend_payment_id).unwrap().kind, PaymentKind::Spontaneous { .. } )); - assert_eq!(node_a.list_payments().len(), 4); - assert_eq!(node_b.list_payments().len(), 5); + assert_eq!(node_a.list_payments().len(), 6); + assert_eq!(node_b.list_payments().len(), 7); println!("\nB close_channel (force: {})", force_close); if force_close { @@ -715,6 +826,7 @@ pub(crate) fn do_channel_full_cycle( let sum_of_all_payments_sat = (push_msat + invoice_amount_1_msat + overpaid_amount_msat + + invoice_amount_3_msat + determined_amount_msat + keysend_amount_msat) / 1000; From 74943cdc06600203df4acbf8351554df2e1896fe Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 14 Jun 2024 15:32:16 +0200 Subject: [PATCH 67/89] Refuse circular payments .. we previously got reports from users trying to pay their own JIT invoice, which we currently don't support (and possibly never will). In order to avoid entering any weird states, we just reject our own circular payment. --- src/event.rs | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/src/event.rs b/src/event.rs index 80c7477f7..25c79827a 100644 --- a/src/event.rs +++ b/src/event.rs @@ -468,6 +468,25 @@ where } => { let payment_id = PaymentId(payment_hash.0); if let Some(info) = self.payment_store.get(&payment_id) { + if info.direction == PaymentDirection::Outbound { + log_info!( + self.logger, + "Refused inbound payment with ID {}: circular payments are unsupported.", + payment_id + ); + self.channel_manager.fail_htlc_backwards(&payment_hash); + + let update = PaymentDetailsUpdate { + status: Some(PaymentStatus::Failed), + ..PaymentDetailsUpdate::new(payment_id) + }; + self.payment_store.update(&update).unwrap_or_else(|e| { + log_error!(self.logger, "Failed to access payment store: {}", e); + panic!("Failed to access payment store"); + }); + return; + } + if info.status == PaymentStatus::Succeeded || matches!(info.kind, PaymentKind::Spontaneous { .. }) { From a678c5ece807fbaef83755c48982c4e2c90a1764 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 14 Jun 2024 15:08:33 +0200 Subject: [PATCH 68/89] Add a `latest_update_timestamp` field to `PaymentDetails` .. which allows to filter and sort payment based on how recent they are. --- bindings/ldk_node.udl | 1 + src/event.rs | 44 +++++++------ src/payment/bolt11.rs | 128 +++++++++++++++++++------------------ src/payment/bolt12.rs | 81 ++++++++++++----------- src/payment/spontaneous.rs | 39 +++++------ src/payment/store.rs | 41 +++++++++--- 6 files changed, 184 insertions(+), 150 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index d1b509deb..26ab55994 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -294,6 +294,7 @@ dictionary PaymentDetails { u64? amount_msat; PaymentDirection direction; PaymentStatus status; + u64 latest_update_timestamp; }; [NonExhaustive] diff --git a/src/event.rs b/src/event.rs index 25c79827a..838df4230 100644 --- a/src/event.rs +++ b/src/event.rs @@ -598,19 +598,21 @@ where .. } => { let offer_id = payment_context.offer_id; - let payment = PaymentDetails { - id: payment_id, - kind: PaymentKind::Bolt12Offer { - hash: Some(payment_hash), - preimage: payment_preimage, - secret: Some(payment_secret), - offer_id, - }, - amount_msat: Some(amount_msat), - direction: PaymentDirection::Inbound, - status: PaymentStatus::Pending, + let kind = PaymentKind::Bolt12Offer { + hash: Some(payment_hash), + preimage: payment_preimage, + secret: Some(payment_secret), + offer_id, }; + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Inbound, + PaymentStatus::Pending, + ); + match self.payment_store.insert(payment) { Ok(false) => (), Ok(true) => { @@ -638,17 +640,19 @@ where }, PaymentPurpose::SpontaneousPayment(preimage) => { // Since it's spontaneous, we insert it now into our store. - let payment = PaymentDetails { - id: payment_id, - kind: PaymentKind::Spontaneous { - hash: payment_hash, - preimage: Some(preimage), - }, - amount_msat: Some(amount_msat), - direction: PaymentDirection::Inbound, - status: PaymentStatus::Pending, + let kind = PaymentKind::Spontaneous { + hash: payment_hash, + preimage: Some(preimage), }; + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Inbound, + PaymentStatus::Pending, + ); + match self.payment_store.insert(payment) { Ok(false) => (), Ok(true) => { diff --git a/src/payment/bolt11.rs b/src/payment/bolt11.rs index e3dd27b9d..e8d030bc0 100644 --- a/src/payment/bolt11.rs +++ b/src/payment/bolt11.rs @@ -105,17 +105,18 @@ impl Bolt11Payment { let amt_msat = invoice.amount_milli_satoshis().unwrap(); log_info!(self.logger, "Initiated sending {}msat to {}", amt_msat, payee_pubkey); - let payment = PaymentDetails { - id: payment_id, - kind: PaymentKind::Bolt11 { - hash: payment_hash, - preimage: None, - secret: payment_secret, - }, - amount_msat: invoice.amount_milli_satoshis(), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Pending, + let kind = PaymentKind::Bolt11 { + hash: payment_hash, + preimage: None, + secret: payment_secret, }; + let payment = PaymentDetails::new( + payment_id, + kind, + invoice.amount_milli_satoshis(), + PaymentDirection::Outbound, + PaymentStatus::Pending, + ); self.payment_store.insert(payment)?; @@ -126,17 +127,18 @@ impl Bolt11Payment { match e { RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), _ => { - let payment = PaymentDetails { - id: payment_id, - kind: PaymentKind::Bolt11 { - hash: payment_hash, - preimage: None, - secret: payment_secret, - }, - amount_msat: invoice.amount_milli_satoshis(), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Failed, + let kind = PaymentKind::Bolt11 { + hash: payment_hash, + preimage: None, + secret: payment_secret, }; + let payment = PaymentDetails::new( + payment_id, + kind, + invoice.amount_milli_satoshis(), + PaymentDirection::Outbound, + PaymentStatus::Failed, + ); self.payment_store.insert(payment)?; Err(Error::PaymentSendingFailed) @@ -216,17 +218,19 @@ impl Bolt11Payment { payee_pubkey ); - let payment = PaymentDetails { - id: payment_id, - kind: PaymentKind::Bolt11 { - hash: payment_hash, - preimage: None, - secret: Some(*payment_secret), - }, - amount_msat: Some(amount_msat), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Pending, + let kind = PaymentKind::Bolt11 { + hash: payment_hash, + preimage: None, + secret: Some(*payment_secret), }; + + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Pending, + ); self.payment_store.insert(payment)?; Ok(payment_id) @@ -237,17 +241,18 @@ impl Bolt11Payment { match e { RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), _ => { - let payment = PaymentDetails { - id: payment_id, - kind: PaymentKind::Bolt11 { - hash: payment_hash, - preimage: None, - secret: Some(*payment_secret), - }, - amount_msat: Some(amount_msat), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Failed, + let kind = PaymentKind::Bolt11 { + hash: payment_hash, + preimage: None, + secret: Some(*payment_secret), }; + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Failed, + ); self.payment_store.insert(payment)?; Err(Error::PaymentSendingFailed) @@ -472,19 +477,18 @@ impl Bolt11Payment { } else { None }; - let payment = PaymentDetails { + let kind = PaymentKind::Bolt11 { + hash: payment_hash, + preimage, + secret: Some(payment_secret.clone()), + }; + let payment = PaymentDetails::new( id, - kind: PaymentKind::Bolt11 { - hash: payment_hash, - preimage, - secret: Some(payment_secret.clone()), - }, - + kind, amount_msat, - direction: PaymentDirection::Inbound, - status: PaymentStatus::Pending, - }; - + PaymentDirection::Inbound, + PaymentStatus::Pending, + ); self.payment_store.insert(payment)?; Ok(invoice) @@ -605,19 +609,19 @@ impl Bolt11Payment { let id = PaymentId(payment_hash.0); let preimage = self.channel_manager.get_payment_preimage(payment_hash, payment_secret.clone()).ok(); - let payment = PaymentDetails { + let kind = PaymentKind::Bolt11Jit { + hash: payment_hash, + preimage, + secret: Some(payment_secret.clone()), + lsp_fee_limits, + }; + let payment = PaymentDetails::new( id, - kind: PaymentKind::Bolt11Jit { - hash: payment_hash, - preimage, - secret: Some(payment_secret.clone()), - lsp_fee_limits, - }, + kind, amount_msat, - direction: PaymentDirection::Inbound, - status: PaymentStatus::Pending, - }; - + PaymentDirection::Inbound, + PaymentStatus::Pending, + ); self.payment_store.insert(payment)?; // Persist LSP peer to make sure we reconnect on restart. diff --git a/src/payment/bolt12.rs b/src/payment/bolt12.rs index 35fa3cfb4..5fd1208cc 100644 --- a/src/payment/bolt12.rs +++ b/src/payment/bolt12.rs @@ -96,13 +96,13 @@ impl Bolt12Payment { secret: None, offer_id: offer.id(), }; - let payment = PaymentDetails { - id: payment_id, + let payment = PaymentDetails::new( + payment_id, kind, - amount_msat: Some(*offer_amount_msat), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Pending, - }; + Some(*offer_amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Pending, + ); self.payment_store.insert(payment)?; Ok(payment_id) @@ -118,13 +118,13 @@ impl Bolt12Payment { secret: None, offer_id: offer.id(), }; - let payment = PaymentDetails { - id: payment_id, + let payment = PaymentDetails::new( + payment_id, kind, - amount_msat: Some(*offer_amount_msat), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Failed, - }; + Some(*offer_amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Failed, + ); self.payment_store.insert(payment)?; Err(Error::InvoiceRequestCreationFailed) }, @@ -197,13 +197,13 @@ impl Bolt12Payment { secret: None, offer_id: offer.id(), }; - let payment = PaymentDetails { - id: payment_id, + let payment = PaymentDetails::new( + payment_id, kind, - amount_msat: Some(amount_msat), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Pending, - }; + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Pending, + ); self.payment_store.insert(payment)?; Ok(payment_id) @@ -219,13 +219,13 @@ impl Bolt12Payment { secret: None, offer_id: offer.id(), }; - let payment = PaymentDetails { - id: payment_id, + let payment = PaymentDetails::new( + payment_id, kind, - amount_msat: Some(amount_msat), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Failed, - }; + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Failed, + ); self.payment_store.insert(payment)?; Err(Error::PaymentSendingFailed) }, @@ -281,17 +281,16 @@ impl Bolt12Payment { let payment_hash = invoice.payment_hash(); let payment_id = PaymentId(payment_hash.0); - let payment = PaymentDetails { - id: payment_id, - kind: PaymentKind::Bolt12Refund { - hash: Some(payment_hash), - preimage: None, - secret: None, - }, - amount_msat: Some(refund.amount_msats()), - direction: PaymentDirection::Inbound, - status: PaymentStatus::Pending, - }; + let kind = + PaymentKind::Bolt12Refund { hash: Some(payment_hash), preimage: None, secret: None }; + + let payment = PaymentDetails::new( + payment_id, + kind, + Some(refund.amount_msats()), + PaymentDirection::Inbound, + PaymentStatus::Pending, + ); self.payment_store.insert(payment)?; @@ -333,13 +332,13 @@ impl Bolt12Payment { let kind = PaymentKind::Bolt12Refund { hash: None, preimage: None, secret: None }; - let payment = PaymentDetails { - id: payment_id, + let payment = PaymentDetails::new( + payment_id, kind, - amount_msat: Some(amount_msat), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Pending, - }; + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Pending, + ); self.payment_store.insert(payment)?; diff --git a/src/payment/spontaneous.rs b/src/payment/spontaneous.rs index fcca8065a..482df42d9 100644 --- a/src/payment/spontaneous.rs +++ b/src/payment/spontaneous.rs @@ -77,16 +77,17 @@ impl SpontaneousPayment { Ok(_hash) => { log_info!(self.logger, "Initiated sending {}msat to {}.", amount_msat, node_id); - let payment = PaymentDetails { - id: payment_id, - kind: PaymentKind::Spontaneous { - hash: payment_hash, - preimage: Some(payment_preimage), - }, - status: PaymentStatus::Pending, - direction: PaymentDirection::Outbound, - amount_msat: Some(amount_msat), + let kind = PaymentKind::Spontaneous { + hash: payment_hash, + preimage: Some(payment_preimage), }; + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Pending, + ); self.payment_store.insert(payment)?; Ok(payment_id) @@ -97,17 +98,17 @@ impl SpontaneousPayment { match e { RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), _ => { - let payment = PaymentDetails { - id: payment_id, - kind: PaymentKind::Spontaneous { - hash: payment_hash, - preimage: Some(payment_preimage), - }, - - status: PaymentStatus::Failed, - direction: PaymentDirection::Outbound, - amount_msat: Some(amount_msat), + let kind = PaymentKind::Spontaneous { + hash: payment_hash, + preimage: Some(payment_preimage), }; + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Failed, + ); self.payment_store.insert(payment)?; Err(Error::PaymentSendingFailed) diff --git a/src/payment/store.rs b/src/payment/store.rs index f7f4942be..eb3ac091f 100644 --- a/src/payment/store.rs +++ b/src/payment/store.rs @@ -20,6 +20,7 @@ use std::collections::HashMap; use std::iter::FromIterator; use std::ops::Deref; use std::sync::{Arc, Mutex}; +use std::time::{Duration, SystemTime, UNIX_EPOCH}; /// Represents a payment. #[derive(Clone, Debug, PartialEq, Eq)] @@ -34,6 +35,21 @@ pub struct PaymentDetails { pub direction: PaymentDirection, /// The status of the payment. pub status: PaymentStatus, + /// The timestamp, in seconds since start of the UNIX epoch, when this entry was last updated. + pub latest_update_timestamp: u64, +} + +impl PaymentDetails { + pub(crate) fn new( + id: PaymentId, kind: PaymentKind, amount_msat: Option, direction: PaymentDirection, + status: PaymentStatus, + ) -> Self { + let latest_update_timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or(Duration::from_secs(0)) + .as_secs(); + Self { id, kind, amount_msat, direction, status, latest_update_timestamp } + } } impl Writeable for PaymentDetails { @@ -48,6 +64,7 @@ impl Writeable for PaymentDetails { (3, self.kind, required), // 4 used to be `secret` before it was moved to `kind` in v0.3.0 (4, None::>, required), + (5, self.latest_update_timestamp, required), (6, self.amount_msat, required), (8, self.direction, required), (10, self.status, required) @@ -58,12 +75,17 @@ impl Writeable for PaymentDetails { impl Readable for PaymentDetails { fn read(reader: &mut R) -> Result { + let unix_time_secs = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or(Duration::from_secs(0)) + .as_secs(); _init_and_read_len_prefixed_tlv_fields!(reader, { (0, id, required), // Used to be `hash` (1, lsp_fee_limits, option), (2, preimage, required), (3, kind_opt, option), (4, secret, required), + (5, latest_update_timestamp, (default_value, unix_time_secs)), (6, amount_msat, required), (8, direction, required), (10, status, required) @@ -72,6 +94,8 @@ impl Readable for PaymentDetails { let id: PaymentId = id.0.ok_or(DecodeError::InvalidValue)?; let preimage: Option = preimage.0.ok_or(DecodeError::InvalidValue)?; let secret: Option = secret.0.ok_or(DecodeError::InvalidValue)?; + let latest_update_timestamp: u64 = + latest_update_timestamp.0.ok_or(DecodeError::InvalidValue)?; let amount_msat: Option = amount_msat.0.ok_or(DecodeError::InvalidValue)?; let direction: PaymentDirection = direction.0.ok_or(DecodeError::InvalidValue)?; let status: PaymentStatus = status.0.ok_or(DecodeError::InvalidValue)?; @@ -103,7 +127,7 @@ impl Readable for PaymentDetails { } }; - Ok(PaymentDetails { id, kind, amount_msat, direction, status }) + Ok(PaymentDetails { id, kind, amount_msat, direction, status, latest_update_timestamp }) } } @@ -391,6 +415,11 @@ where payment.status = status; } + payment.latest_update_timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or(Duration::from_secs(0)) + .as_secs(); + self.persist_info(&update.id, payment)?; updated = true; } @@ -487,13 +516,9 @@ mod tests { ) .is_err()); - let payment = PaymentDetails { - id, - kind: PaymentKind::Bolt11 { hash, preimage: None, secret: None }, - amount_msat: None, - direction: PaymentDirection::Inbound, - status: PaymentStatus::Pending, - }; + let kind = PaymentKind::Bolt11 { hash, preimage: None, secret: None }; + let payment = + PaymentDetails::new(id, kind, None, PaymentDirection::Inbound, PaymentStatus::Pending); assert_eq!(Ok(false), payment_store.insert(payment.clone())); assert!(payment_store.get(&id).is_some()); From 80d24c399dfd54ca5c5fad0cfcf5ef2a50052e87 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 18 Mar 2024 11:41:20 +0100 Subject: [PATCH 69/89] Introduce `balance_cache` to avoid blocking on retrieving balances Unfortunately BDK's current wallet design requires us to have it live in `Mutex` that is locked for long periods of time during syncing. This is especially painful for short-lived operations that just operate locally, such as retrieving the current balance, which we now do in several places to be able to check Anchor channels limitations, e.g., in event handling. In order to avoid blocking during balance retrieval, we introduce a `balance` cache that will be refreshed whenever we're done with syncing *or* when we can successfully get the wallet lock. Otherwise, we'll just return the cached value, allowing us to make progress even though a background sync of the wallet might be in-progress. --- src/wallet.rs | 59 ++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 47 insertions(+), 12 deletions(-) diff --git a/src/wallet.rs b/src/wallet.rs index d970b52a6..5ade04481 100644 --- a/src/wallet.rs +++ b/src/wallet.rs @@ -17,7 +17,7 @@ use lightning::util::message_signing; use bdk::blockchain::EsploraBlockchain; use bdk::database::BatchDatabase; use bdk::wallet::AddressIndex; -use bdk::FeeRate; +use bdk::{Balance, FeeRate}; use bdk::{SignOptions, SyncOptions}; use bitcoin::address::{Payload, WitnessVersion}; @@ -34,7 +34,7 @@ use bitcoin::secp256k1::{PublicKey, Scalar, Secp256k1, SecretKey, Signing}; use bitcoin::{ScriptBuf, Transaction, TxOut, Txid}; use std::ops::Deref; -use std::sync::{Arc, Condvar, Mutex}; +use std::sync::{Arc, Condvar, Mutex, RwLock}; use std::time::Duration; pub struct Wallet @@ -52,6 +52,8 @@ where broadcaster: B, fee_estimator: E, sync_lock: (Mutex<()>, Condvar), + // TODO: Drop this workaround after BDK 1.0 upgrade. + balance_cache: RwLock, logger: L, } @@ -66,9 +68,17 @@ where blockchain: EsploraBlockchain, wallet: bdk::Wallet, broadcaster: B, fee_estimator: E, logger: L, ) -> Self { + let start_balance = wallet.get_balance().unwrap_or(Balance { + immature: 0, + trusted_pending: 0, + untrusted_pending: 0, + confirmed: 0, + }); + let inner = Mutex::new(wallet); let sync_lock = (Mutex::new(()), Condvar::new()); - Self { blockchain, inner, broadcaster, fee_estimator, sync_lock, logger } + let balance_cache = RwLock::new(start_balance); + Self { blockchain, inner, broadcaster, fee_estimator, sync_lock, balance_cache, logger } } pub(crate) async fn sync(&self) -> Result<(), Error> { @@ -88,10 +98,19 @@ where let sync_options = SyncOptions { progress: None }; let wallet_lock = self.inner.lock().unwrap(); let res = match wallet_lock.sync(&self.blockchain, sync_options).await { - Ok(()) => Ok(()), + Ok(()) => { + // TODO: Drop this workaround after BDK 1.0 upgrade. + // Update balance cache after syncing. + if let Ok(balance) = wallet_lock.get_balance() { + *self.balance_cache.write().unwrap() = balance; + } + Ok(()) + }, Err(e) => match e { bdk::Error::Esplora(ref be) => match **be { bdk::blockchain::esplora::EsploraError::Reqwest(_) => { + // Drop lock, sleep for a second, retry. + drop(wallet_lock); tokio::time::sleep(Duration::from_secs(1)).await; log_error!( self.logger, @@ -99,7 +118,12 @@ where e ); let sync_options = SyncOptions { progress: None }; - wallet_lock.sync(&self.blockchain, sync_options).await.map_err(From::from) + self.inner + .lock() + .unwrap() + .sync(&self.blockchain, sync_options) + .await + .map_err(From::from) }, _ => { log_error!(self.logger, "Sync failed due to Esplora error: {}", e); @@ -175,13 +199,24 @@ where pub(crate) fn get_balances( &self, total_anchor_channels_reserve_sats: u64, ) -> Result<(u64, u64), Error> { - let wallet_lock = self.inner.lock().unwrap(); - let (total, spendable) = wallet_lock.get_balance().map(|bal| { - ( - bal.get_total(), - bal.get_spendable().saturating_sub(total_anchor_channels_reserve_sats), - ) - })?; + // TODO: Drop this workaround after BDK 1.0 upgrade. + // We get the balance and update our cache if we can do so without blocking on the wallet + // Mutex. Otherwise, we return a cached value. + let balance = match self.inner.try_lock() { + Ok(wallet_lock) => { + // Update balance cache if we can. + let balance = wallet_lock.get_balance()?; + *self.balance_cache.write().unwrap() = balance.clone(); + balance + }, + Err(_) => self.balance_cache.read().unwrap().clone(), + }; + + let (total, spendable) = ( + balance.get_total(), + balance.get_spendable().saturating_sub(total_anchor_channels_reserve_sats), + ); + Ok((total, spendable)) } From e3279d7ee831d89dd3fdeb99ccda81524fc63b0b Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 12 Apr 2024 14:00:30 +0200 Subject: [PATCH 70/89] Drop potentially dangerous `sync_lock` `Condvar` and use pub/sub model Using a `Condvar` could be potentially dangerous in async contexts as `wait`ing on it might block the current thread potentially hosting more than one task. Here, we drop the `Condvar` and adopt a pub/sub scheme instead, similar to the one we already implemented in `ConnectionManager`. --- src/wallet.rs | 164 +++++++++++++++++++++++++++++++++----------------- 1 file changed, 110 insertions(+), 54 deletions(-) diff --git a/src/wallet.rs b/src/wallet.rs index 5ade04481..23679e141 100644 --- a/src/wallet.rs +++ b/src/wallet.rs @@ -33,10 +33,15 @@ use bitcoin::secp256k1::ecdsa::{RecoverableSignature, Signature}; use bitcoin::secp256k1::{PublicKey, Scalar, Secp256k1, SecretKey, Signing}; use bitcoin::{ScriptBuf, Transaction, TxOut, Txid}; -use std::ops::Deref; -use std::sync::{Arc, Condvar, Mutex, RwLock}; +use std::ops::{Deref, DerefMut}; +use std::sync::{Arc, Mutex, RwLock}; use std::time::Duration; +enum WalletSyncStatus { + Completed, + InProgress { subscribers: tokio::sync::broadcast::Sender> }, +} + pub struct Wallet where D: BatchDatabase, @@ -51,7 +56,8 @@ where // A cache storing the most recently retrieved fee rate estimations. broadcaster: B, fee_estimator: E, - sync_lock: (Mutex<()>, Condvar), + // A Mutex holding the current sync status. + sync_status: Mutex, // TODO: Drop this workaround after BDK 1.0 upgrade. balance_cache: RwLock, logger: L, @@ -76,69 +82,66 @@ where }); let inner = Mutex::new(wallet); - let sync_lock = (Mutex::new(()), Condvar::new()); + let sync_status = Mutex::new(WalletSyncStatus::Completed); let balance_cache = RwLock::new(start_balance); - Self { blockchain, inner, broadcaster, fee_estimator, sync_lock, balance_cache, logger } + Self { blockchain, inner, broadcaster, fee_estimator, sync_status, balance_cache, logger } } pub(crate) async fn sync(&self) -> Result<(), Error> { - let (lock, cvar) = &self.sync_lock; - - let guard = match lock.try_lock() { - Ok(guard) => guard, - Err(_) => { - log_info!(self.logger, "Sync in progress, skipping."); - let guard = cvar.wait(lock.lock().unwrap()); - drop(guard); - cvar.notify_all(); - return Ok(()); - }, - }; + if let Some(mut sync_receiver) = self.register_or_subscribe_pending_sync() { + log_info!(self.logger, "Sync in progress, skipping."); + return sync_receiver.recv().await.map_err(|e| { + debug_assert!(false, "Failed to receive wallet sync result: {:?}", e); + log_error!(self.logger, "Failed to receive wallet sync result: {:?}", e); + Error::WalletOperationFailed + })?; + } - let sync_options = SyncOptions { progress: None }; - let wallet_lock = self.inner.lock().unwrap(); - let res = match wallet_lock.sync(&self.blockchain, sync_options).await { - Ok(()) => { - // TODO: Drop this workaround after BDK 1.0 upgrade. - // Update balance cache after syncing. - if let Ok(balance) = wallet_lock.get_balance() { - *self.balance_cache.write().unwrap() = balance; - } - Ok(()) - }, - Err(e) => match e { - bdk::Error::Esplora(ref be) => match **be { - bdk::blockchain::esplora::EsploraError::Reqwest(_) => { - // Drop lock, sleep for a second, retry. - drop(wallet_lock); - tokio::time::sleep(Duration::from_secs(1)).await; - log_error!( - self.logger, - "Sync failed due to HTTP connection error, retrying: {}", - e - ); - let sync_options = SyncOptions { progress: None }; - self.inner - .lock() - .unwrap() - .sync(&self.blockchain, sync_options) - .await - .map_err(From::from) + let res = { + let wallet_lock = self.inner.lock().unwrap(); + match wallet_lock.sync(&self.blockchain, SyncOptions { progress: None }).await { + Ok(()) => { + // TODO: Drop this workaround after BDK 1.0 upgrade. + // Update balance cache after syncing. + if let Ok(balance) = wallet_lock.get_balance() { + *self.balance_cache.write().unwrap() = balance; + } + Ok(()) + }, + Err(e) => match e { + bdk::Error::Esplora(ref be) => match **be { + bdk::blockchain::esplora::EsploraError::Reqwest(_) => { + // Drop lock, sleep for a second, retry. + drop(wallet_lock); + tokio::time::sleep(Duration::from_secs(1)).await; + log_error!( + self.logger, + "Sync failed due to HTTP connection error, retrying: {}", + e + ); + let sync_options = SyncOptions { progress: None }; + self.inner + .lock() + .unwrap() + .sync(&self.blockchain, sync_options) + .await + .map_err(From::from) + }, + _ => { + log_error!(self.logger, "Sync failed due to Esplora error: {}", e); + Err(From::from(e)) + }, }, _ => { - log_error!(self.logger, "Sync failed due to Esplora error: {}", e); + log_error!(self.logger, "Wallet sync error: {}", e); Err(From::from(e)) }, }, - _ => { - log_error!(self.logger, "Wallet sync error: {}", e); - Err(From::from(e)) - }, - }, + } }; - drop(guard); - cvar.notify_all(); + self.propagate_result_to_subscribers(res); + res } @@ -303,6 +306,59 @@ where Ok(txid) } + + fn register_or_subscribe_pending_sync( + &self, + ) -> Option>> { + let mut sync_status_lock = self.sync_status.lock().unwrap(); + match sync_status_lock.deref_mut() { + WalletSyncStatus::Completed => { + // We're first to register for a sync. + let (tx, _) = tokio::sync::broadcast::channel(1); + *sync_status_lock = WalletSyncStatus::InProgress { subscribers: tx }; + None + }, + WalletSyncStatus::InProgress { subscribers } => { + // A sync is in-progress, we subscribe. + let rx = subscribers.subscribe(); + Some(rx) + }, + } + } + + fn propagate_result_to_subscribers(&self, res: Result<(), Error>) { + // Send the notification to any other tasks that might be waiting on it by now. + { + let mut sync_status_lock = self.sync_status.lock().unwrap(); + match sync_status_lock.deref_mut() { + WalletSyncStatus::Completed => { + // No sync in-progress, do nothing. + return; + }, + WalletSyncStatus::InProgress { subscribers } => { + // A sync is in-progress, we notify subscribers. + if subscribers.receiver_count() > 0 { + match subscribers.send(res) { + Ok(_) => (), + Err(e) => { + debug_assert!( + false, + "Failed to send wallet sync result to subscribers: {:?}", + e + ); + log_error!( + self.logger, + "Failed to send wallet sync result to subscribers: {:?}", + e + ); + }, + } + } + *sync_status_lock = WalletSyncStatus::Completed; + }, + } + } + } } impl WalletSource for Wallet From fd4b33f627c71a79777be1a60524d22290631157 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 12 Apr 2024 14:16:53 +0200 Subject: [PATCH 71/89] Drop immediate-retry logic in `wallet` It's not super clear that it achieves much in the face of a rate-limited Esplora server, and having a custom sleep there is just awkward. So we drop it and hope we still get a chance to sync our on-chain wallet now and then. --- src/wallet.rs | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/src/wallet.rs b/src/wallet.rs index 23679e141..95b3d9008 100644 --- a/src/wallet.rs +++ b/src/wallet.rs @@ -35,7 +35,6 @@ use bitcoin::{ScriptBuf, Transaction, TxOut, Txid}; use std::ops::{Deref, DerefMut}; use std::sync::{Arc, Mutex, RwLock}; -use std::time::Duration; enum WalletSyncStatus { Completed, @@ -111,21 +110,12 @@ where Err(e) => match e { bdk::Error::Esplora(ref be) => match **be { bdk::blockchain::esplora::EsploraError::Reqwest(_) => { - // Drop lock, sleep for a second, retry. - drop(wallet_lock); - tokio::time::sleep(Duration::from_secs(1)).await; log_error!( self.logger, - "Sync failed due to HTTP connection error, retrying: {}", + "Sync failed due to HTTP connection error: {}", e ); - let sync_options = SyncOptions { progress: None }; - self.inner - .lock() - .unwrap() - .sync(&self.blockchain, sync_options) - .await - .map_err(From::from) + Err(From::from(e)) }, _ => { log_error!(self.logger, "Sync failed due to Esplora error: {}", e); From 82ab9ac425250e2624bc3756bc11d82afc82492f Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 18 Jun 2024 10:45:56 +0200 Subject: [PATCH 72/89] Drop immediate-retry logic in `tx_broadcaster` .. as we're not sure it actually increases reliability. We now only log failures, ignoring HTTP 400 as this is bitcoind's error code for "transaction already in mempool". --- src/tx_broadcaster.rs | 52 +++++++++++++++++-------------------------- 1 file changed, 20 insertions(+), 32 deletions(-) diff --git a/src/tx_broadcaster.rs b/src/tx_broadcaster.rs index 40483f578..2019db28c 100644 --- a/src/tx_broadcaster.rs +++ b/src/tx_broadcaster.rs @@ -1,4 +1,4 @@ -use crate::logger::{log_bytes, log_debug, log_error, log_trace, Logger}; +use crate::logger::{log_bytes, log_error, log_trace, Logger}; use lightning::chain::chaininterface::BroadcasterInterface; use lightning::util::ser::Writeable; @@ -7,11 +7,11 @@ use esplora_client::AsyncClient as EsploraClient; use bitcoin::Transaction; +use reqwest::StatusCode; use tokio::sync::mpsc; use tokio::sync::Mutex; use std::ops::Deref; -use std::time::Duration; const BCAST_PACKAGE_QUEUE_SIZE: usize = 50; @@ -43,36 +43,24 @@ where log_trace!(self.logger, "Successfully broadcast transaction {}", tx.txid()); }, Err(e) => match e { - esplora_client::Error::Reqwest(_) => { - // Wait 500 ms and retry in case we get a `Reqwest` error (typically - // 429) - tokio::time::sleep(Duration::from_millis(500)).await; - log_error!( - self.logger, - "Sync failed due to HTTP connection error, retrying: {}", - e - ); - match self.esplora_client.broadcast(tx).await { - Ok(()) => { - log_debug!( - self.logger, - "Successfully broadcast transaction {}", - tx.txid() - ); - }, - Err(e) => { - log_error!( - self.logger, - "Failed to broadcast transaction {}: {}", - tx.txid(), - e - ); - log_trace!( - self.logger, - "Failed broadcast transaction bytes: {}", - log_bytes!(tx.encode()) - ); - }, + esplora_client::Error::Reqwest(err) => { + if err.status() == StatusCode::from_u16(400).ok() { + // Ignore 400, as this just means bitcoind already knows the + // transaction. + // FIXME: We can further differentiate here based on the error + // message which will be available with rust-esplora-client 0.7 and + // later. + } else { + log_error!( + self.logger, + "Failed to broadcast due to HTTP connection error: {}", + err + ); + log_trace!( + self.logger, + "Failed broadcast transaction bytes: {}", + log_bytes!(tx.encode()) + ); } }, _ => { From f58f00f13b10ff3897d5a22ed26d60e35887b400 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 15 May 2024 11:24:06 +0200 Subject: [PATCH 73/89] Add timeout for on-chain syncing .. to make progress and unblock the `Mutex` even if BDK's wallet `sync` would never return. --- bindings/ldk_node.udl | 1 + src/config.rs | 3 +++ src/error.rs | 3 +++ src/wallet.rs | 60 ++++++++++++++++++++++++++----------------- 4 files changed, 44 insertions(+), 23 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 26ab55994..188fefd09 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -166,6 +166,7 @@ enum NodeError { "PersistenceFailed", "FeerateEstimationUpdateFailed", "WalletOperationFailed", + "WalletOperationTimeout", "OnchainTxSigningFailed", "MessageSigningFailed", "TxSyncFailed", diff --git a/src/config.rs b/src/config.rs index 7adae2d4b..4b4ecc272 100644 --- a/src/config.rs +++ b/src/config.rs @@ -46,6 +46,9 @@ pub(crate) const NODE_ANN_BCAST_INTERVAL: Duration = Duration::from_secs(60 * 60 // The lower limit which we apply to any configured wallet sync intervals. pub(crate) const WALLET_SYNC_INTERVAL_MINIMUM_SECS: u64 = 10; +// The timeout after which we abort a wallet syncing operation. +pub(crate) const BDK_WALLET_SYNC_TIMEOUT_SECS: u64 = 90; + // The length in bytes of our wallets' keys seed. pub(crate) const WALLET_KEYS_SEED_LEN: usize = 64; diff --git a/src/error.rs b/src/error.rs index 63ec0ad84..8cc3950ec 100644 --- a/src/error.rs +++ b/src/error.rs @@ -35,6 +35,8 @@ pub enum Error { FeerateEstimationUpdateFailed, /// A wallet operation failed. WalletOperationFailed, + /// A wallet operation timed out. + WalletOperationTimeout, /// A signing operation for transaction failed. OnchainTxSigningFailed, /// A signing operation for message failed. @@ -112,6 +114,7 @@ impl fmt::Display for Error { write!(f, "Failed to update fee rate estimates.") }, Self::WalletOperationFailed => write!(f, "Failed to conduct wallet operation."), + Self::WalletOperationTimeout => write!(f, "A wallet operation timed out."), Self::OnchainTxSigningFailed => write!(f, "Failed to sign given transaction."), Self::MessageSigningFailed => write!(f, "Failed to sign given message."), Self::TxSyncFailed => write!(f, "Failed to sync transactions."), diff --git a/src/wallet.rs b/src/wallet.rs index 95b3d9008..0da3f6db8 100644 --- a/src/wallet.rs +++ b/src/wallet.rs @@ -1,5 +1,6 @@ use crate::logger::{log_error, log_info, log_trace, Logger}; +use crate::config::BDK_WALLET_SYNC_TIMEOUT_SECS; use crate::Error; use lightning::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget, FeeEstimator}; @@ -35,6 +36,7 @@ use bitcoin::{ScriptBuf, Transaction, TxOut, Txid}; use std::ops::{Deref, DerefMut}; use std::sync::{Arc, Mutex, RwLock}; +use std::time::Duration; enum WalletSyncStatus { Completed, @@ -98,34 +100,46 @@ where let res = { let wallet_lock = self.inner.lock().unwrap(); - match wallet_lock.sync(&self.blockchain, SyncOptions { progress: None }).await { - Ok(()) => { - // TODO: Drop this workaround after BDK 1.0 upgrade. - // Update balance cache after syncing. - if let Ok(balance) = wallet_lock.get_balance() { - *self.balance_cache.write().unwrap() = balance; - } - Ok(()) - }, - Err(e) => match e { - bdk::Error::Esplora(ref be) => match **be { - bdk::blockchain::esplora::EsploraError::Reqwest(_) => { - log_error!( - self.logger, - "Sync failed due to HTTP connection error: {}", - e - ); - Err(From::from(e)) + + let wallet_sync_timeout_fut = tokio::time::timeout( + Duration::from_secs(BDK_WALLET_SYNC_TIMEOUT_SECS), + wallet_lock.sync(&self.blockchain, SyncOptions { progress: None }), + ); + + match wallet_sync_timeout_fut.await { + Ok(res) => match res { + Ok(()) => { + // TODO: Drop this workaround after BDK 1.0 upgrade. + // Update balance cache after syncing. + if let Ok(balance) = wallet_lock.get_balance() { + *self.balance_cache.write().unwrap() = balance; + } + Ok(()) + }, + Err(e) => match e { + bdk::Error::Esplora(ref be) => match **be { + bdk::blockchain::esplora::EsploraError::Reqwest(_) => { + log_error!( + self.logger, + "Sync failed due to HTTP connection error: {}", + e + ); + Err(From::from(e)) + }, + _ => { + log_error!(self.logger, "Sync failed due to Esplora error: {}", e); + Err(From::from(e)) + }, }, _ => { - log_error!(self.logger, "Sync failed due to Esplora error: {}", e); + log_error!(self.logger, "Wallet sync error: {}", e); Err(From::from(e)) }, }, - _ => { - log_error!(self.logger, "Wallet sync error: {}", e); - Err(From::from(e)) - }, + }, + Err(e) => { + log_error!(self.logger, "On-chain wallet sync timed out: {}", e); + Err(Error::WalletOperationTimeout) }, } }; From 746014c4bce62e359a106d656da11f138d3ab210 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 15 May 2024 11:38:48 +0200 Subject: [PATCH 74/89] Add timeout for Lightning syncing .. even though we don't expect this to block, we're better safe than sorry and start to introduce timeouts for any calls we make to remote servers. --- bindings/ldk_node.udl | 1 + src/config.rs | 3 ++ src/error.rs | 3 ++ src/lib.rs | 98 ++++++++++++++++++++++++++----------------- 4 files changed, 66 insertions(+), 39 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 188fefd09..3bd2f8b08 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -170,6 +170,7 @@ enum NodeError { "OnchainTxSigningFailed", "MessageSigningFailed", "TxSyncFailed", + "TxSyncTimeout", "GossipUpdateFailed", "LiquidityRequestFailed", "InvalidAddress", diff --git a/src/config.rs b/src/config.rs index 4b4ecc272..dd7372528 100644 --- a/src/config.rs +++ b/src/config.rs @@ -49,6 +49,9 @@ pub(crate) const WALLET_SYNC_INTERVAL_MINIMUM_SECS: u64 = 10; // The timeout after which we abort a wallet syncing operation. pub(crate) const BDK_WALLET_SYNC_TIMEOUT_SECS: u64 = 90; +// The timeout after which we abort a wallet syncing operation. +pub(crate) const LDK_WALLET_SYNC_TIMEOUT_SECS: u64 = 30; + // The length in bytes of our wallets' keys seed. pub(crate) const WALLET_KEYS_SEED_LEN: usize = 64; diff --git a/src/error.rs b/src/error.rs index 8cc3950ec..3d427279c 100644 --- a/src/error.rs +++ b/src/error.rs @@ -43,6 +43,8 @@ pub enum Error { MessageSigningFailed, /// A transaction sync operation failed. TxSyncFailed, + /// A transaction sync operation timed out. + TxSyncTimeout, /// A gossip updating operation failed. GossipUpdateFailed, /// A liquidity request operation failed. @@ -118,6 +120,7 @@ impl fmt::Display for Error { Self::OnchainTxSigningFailed => write!(f, "Failed to sign given transaction."), Self::MessageSigningFailed => write!(f, "Failed to sign given message."), Self::TxSyncFailed => write!(f, "Failed to sync transactions."), + Self::TxSyncTimeout => write!(f, "Syncing transactions timed out."), Self::GossipUpdateFailed => write!(f, "Failed to update gossip data."), Self::LiquidityRequestFailed => write!(f, "Failed to request inbound liquidity."), Self::InvalidAddress => write!(f, "The given address is invalid."), diff --git a/src/lib.rs b/src/lib.rs index 8c504ed51..45bb11059 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -123,8 +123,8 @@ pub use builder::BuildError; pub use builder::NodeBuilder as Builder; use config::{ - default_user_config, NODE_ANN_BCAST_INTERVAL, PEER_RECONNECTION_INTERVAL, - RESOLVED_CHANNEL_MONITOR_ARCHIVAL_INTERVAL, RGS_SYNC_INTERVAL, + default_user_config, LDK_WALLET_SYNC_TIMEOUT_SECS, NODE_ANN_BCAST_INTERVAL, + PEER_RECONNECTION_INTERVAL, RESOLVED_CHANNEL_MONITOR_ARCHIVAL_INTERVAL, RGS_SYNC_INTERVAL, WALLET_SYNC_INTERVAL_MINIMUM_SECS, }; use connection::ConnectionManager; @@ -377,25 +377,31 @@ impl Node { &*sync_sweeper as &(dyn Confirm + Sync + Send), ]; let now = Instant::now(); - match tx_sync.sync(confirmables).await { - Ok(()) => { - log_trace!( - sync_logger, - "Background sync of Lightning wallet finished in {}ms.", - now.elapsed().as_millis() - ); - let unix_time_secs_opt = - SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); - *sync_wallet_timestamp.write().unwrap() = unix_time_secs_opt; - - periodically_archive_fully_resolved_monitors( - Arc::clone(&archive_cman), - Arc::clone(&archive_cmon), - Arc::clone(&sync_monitor_archival_height) - ); + let timeout_fut = tokio::time::timeout(Duration::from_secs(LDK_WALLET_SYNC_TIMEOUT_SECS), tx_sync.sync(confirmables)); + match timeout_fut.await { + Ok(res) => match res { + Ok(()) => { + log_trace!( + sync_logger, + "Background sync of Lightning wallet finished in {}ms.", + now.elapsed().as_millis() + ); + let unix_time_secs_opt = + SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); + *sync_wallet_timestamp.write().unwrap() = unix_time_secs_opt; + + periodically_archive_fully_resolved_monitors( + Arc::clone(&archive_cman), + Arc::clone(&archive_cmon), + Arc::clone(&sync_monitor_archival_height) + ); + } + Err(e) => { + log_error!(sync_logger, "Background sync of Lightning wallet failed: {}", e) + } } Err(e) => { - log_error!(sync_logger, "Background sync of Lightning wallet failed: {}", e) + log_error!(sync_logger, "Background sync of Lightning wallet timed out: {}", e) } } } @@ -1167,6 +1173,8 @@ impl Node { tokio::runtime::Builder::new_multi_thread().enable_all().build().unwrap().block_on( async move { let now = Instant::now(); + // We don't add an additional timeout here, as `Wallet::sync` already returns + // after a timeout. match wallet.sync().await { Ok(()) => { log_info!( @@ -1187,6 +1195,8 @@ impl Node { }; let now = Instant::now(); + // We don't add an additional timeout here, as + // `FeeEstimator::update_fee_estimates` already returns after a timeout. match fee_estimator.update_fee_estimates().await { Ok(()) => { log_info!( @@ -1207,30 +1217,40 @@ impl Node { } let now = Instant::now(); - match tx_sync.sync(confirmables).await { - Ok(()) => { - log_info!( - sync_logger, - "Sync of Lightning wallet finished in {}ms.", - now.elapsed().as_millis() - ); + let tx_sync_timeout_fut = tokio::time::timeout( + Duration::from_secs(LDK_WALLET_SYNC_TIMEOUT_SECS), + tx_sync.sync(confirmables), + ); + match tx_sync_timeout_fut.await { + Ok(res) => match res { + Ok(()) => { + log_info!( + sync_logger, + "Sync of Lightning wallet finished in {}ms.", + now.elapsed().as_millis() + ); - let unix_time_secs_opt = SystemTime::now() - .duration_since(UNIX_EPOCH) - .ok() - .map(|d| d.as_secs()); - *sync_wallet_timestamp.write().unwrap() = unix_time_secs_opt; + let unix_time_secs_opt = SystemTime::now() + .duration_since(UNIX_EPOCH) + .ok() + .map(|d| d.as_secs()); + *sync_wallet_timestamp.write().unwrap() = unix_time_secs_opt; - periodically_archive_fully_resolved_monitors( - archive_cman, - archive_cmon, - sync_monitor_archival_height, - ); - Ok(()) + periodically_archive_fully_resolved_monitors( + archive_cman, + archive_cmon, + sync_monitor_archival_height, + ); + Ok(()) + }, + Err(e) => { + log_error!(sync_logger, "Sync of Lightning wallet failed: {}", e); + Err(e.into()) + }, }, Err(e) => { - log_error!(sync_logger, "Sync of Lightning wallet failed: {}", e); - Err(e.into()) + log_error!(sync_logger, "Sync of Lightning wallet timed out: {}", e); + Err(Error::TxSyncTimeout) }, } }, From 02e4b3f69af410c171b8645fce3e4b205574a7cd Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 15 May 2024 11:46:21 +0200 Subject: [PATCH 75/89] Add timeout for fee rate cache updates .. even though we don't expect this to block, we're better safe than sorry and start to introduce timeouts for any calls we make to remote servers. --- bindings/ldk_node.udl | 1 + src/config.rs | 3 +++ src/error.rs | 5 +++++ src/fee_estimator.rs | 18 +++++++++++++++++- 4 files changed, 26 insertions(+), 1 deletion(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 3bd2f8b08..fff7ced89 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -165,6 +165,7 @@ enum NodeError { "ChannelConfigUpdateFailed", "PersistenceFailed", "FeerateEstimationUpdateFailed", + "FeerateEstimationUpdateTimeout", "WalletOperationFailed", "WalletOperationTimeout", "OnchainTxSigningFailed", diff --git a/src/config.rs b/src/config.rs index dd7372528..365795175 100644 --- a/src/config.rs +++ b/src/config.rs @@ -52,6 +52,9 @@ pub(crate) const BDK_WALLET_SYNC_TIMEOUT_SECS: u64 = 90; // The timeout after which we abort a wallet syncing operation. pub(crate) const LDK_WALLET_SYNC_TIMEOUT_SECS: u64 = 30; +// The timeout after which we abort a fee rate cache update operation. +pub(crate) const FEE_RATE_CACHE_UPDATE_TIMEOUT_SECS: u64 = 5; + // The length in bytes of our wallets' keys seed. pub(crate) const WALLET_KEYS_SEED_LEN: usize = 64; diff --git a/src/error.rs b/src/error.rs index 3d427279c..8ffdc53ec 100644 --- a/src/error.rs +++ b/src/error.rs @@ -33,6 +33,8 @@ pub enum Error { PersistenceFailed, /// A fee rate estimation update failed. FeerateEstimationUpdateFailed, + /// A fee rate estimation update timed out. + FeerateEstimationUpdateTimeout, /// A wallet operation failed. WalletOperationFailed, /// A wallet operation timed out. @@ -115,6 +117,9 @@ impl fmt::Display for Error { Self::FeerateEstimationUpdateFailed => { write!(f, "Failed to update fee rate estimates.") }, + Self::FeerateEstimationUpdateTimeout => { + write!(f, "Updating fee rate estimates timed out.") + }, Self::WalletOperationFailed => write!(f, "Failed to conduct wallet operation."), Self::WalletOperationTimeout => write!(f, "A wallet operation timed out."), Self::OnchainTxSigningFailed => write!(f, "Failed to sign given transaction."), diff --git a/src/fee_estimator.rs b/src/fee_estimator.rs index 74518227f..f1fa7e43b 100644 --- a/src/fee_estimator.rs +++ b/src/fee_estimator.rs @@ -1,3 +1,4 @@ +use crate::config::FEE_RATE_CACHE_UPDATE_TIMEOUT_SECS; use crate::logger::{log_error, log_trace, Logger}; use crate::{Config, Error}; @@ -14,6 +15,7 @@ use bitcoin::Network; use std::collections::HashMap; use std::ops::Deref; use std::sync::{Arc, RwLock}; +use std::time::Duration; pub(crate) struct OnchainFeeEstimator where @@ -55,7 +57,21 @@ where ConfirmationTarget::OutputSpendingFee => 12, }; - let estimates = self.esplora_client.get_fee_estimates().await.map_err(|e| { + let estimates = tokio::time::timeout( + Duration::from_secs(FEE_RATE_CACHE_UPDATE_TIMEOUT_SECS), + self.esplora_client.get_fee_estimates(), + ) + .await + .map_err(|e| { + log_error!( + self.logger, + "Updating fee rate estimates for {:?} timed out: {}", + target, + e + ); + Error::FeerateEstimationUpdateTimeout + })? + .map_err(|e| { log_error!( self.logger, "Failed to retrieve fee rate estimates for {:?}: {}", From b0a1dfcb7663582f95a817243f0661a5c19ea8d2 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 15 May 2024 11:53:06 +0200 Subject: [PATCH 76/89] Add timeout for RGS updates .. even though we don't expect this to block, we're better safe than sorry and start to introduce timeouts for any calls we make to remote servers. --- bindings/ldk_node.udl | 1 + src/config.rs | 3 +++ src/error.rs | 3 +++ src/gossip.rs | 14 +++++++++++++- 4 files changed, 20 insertions(+), 1 deletion(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index fff7ced89..2723db573 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -173,6 +173,7 @@ enum NodeError { "TxSyncFailed", "TxSyncTimeout", "GossipUpdateFailed", + "GossipUpdateTimeout", "LiquidityRequestFailed", "InvalidAddress", "InvalidSocketAddress", diff --git a/src/config.rs b/src/config.rs index 365795175..91543ed87 100644 --- a/src/config.rs +++ b/src/config.rs @@ -55,6 +55,9 @@ pub(crate) const LDK_WALLET_SYNC_TIMEOUT_SECS: u64 = 30; // The timeout after which we abort a fee rate cache update operation. pub(crate) const FEE_RATE_CACHE_UPDATE_TIMEOUT_SECS: u64 = 5; +// The timeout after which we abort a RGS sync operation. +pub(crate) const RGS_SYNC_TIMEOUT_SECS: u64 = 5; + // The length in bytes of our wallets' keys seed. pub(crate) const WALLET_KEYS_SEED_LEN: usize = 64; diff --git a/src/error.rs b/src/error.rs index 8ffdc53ec..a8671d9a7 100644 --- a/src/error.rs +++ b/src/error.rs @@ -49,6 +49,8 @@ pub enum Error { TxSyncTimeout, /// A gossip updating operation failed. GossipUpdateFailed, + /// A gossip updating operation timed out. + GossipUpdateTimeout, /// A liquidity request operation failed. LiquidityRequestFailed, /// The given address is invalid. @@ -127,6 +129,7 @@ impl fmt::Display for Error { Self::TxSyncFailed => write!(f, "Failed to sync transactions."), Self::TxSyncTimeout => write!(f, "Syncing transactions timed out."), Self::GossipUpdateFailed => write!(f, "Failed to update gossip data."), + Self::GossipUpdateTimeout => write!(f, "Updating gossip data timed out."), Self::LiquidityRequestFailed => write!(f, "Failed to request inbound liquidity."), Self::InvalidAddress => write!(f, "The given address is invalid."), Self::InvalidSocketAddress => write!(f, "The given network address is invalid."), diff --git a/src/gossip.rs b/src/gossip.rs index 5a41bf51c..1241b0cdc 100644 --- a/src/gossip.rs +++ b/src/gossip.rs @@ -1,3 +1,4 @@ +use crate::config::RGS_SYNC_TIMEOUT_SECS; use crate::logger::{log_trace, FilesystemLogger, Logger}; use crate::types::{GossipSync, Graph, P2PGossipSync, RapidGossipSync}; use crate::Error; @@ -6,6 +7,7 @@ use lightning::routing::utxo::UtxoLookup; use std::sync::atomic::{AtomicU32, Ordering}; use std::sync::Arc; +use std::time::Duration; pub(crate) enum GossipSource { P2PNetwork { @@ -55,7 +57,17 @@ impl GossipSource { Self::RapidGossipSync { gossip_sync, server_url, latest_sync_timestamp, logger } => { let query_timestamp = latest_sync_timestamp.load(Ordering::Acquire); let query_url = format!("{}/{}", server_url, query_timestamp); - let response = reqwest::get(query_url).await.map_err(|e| { + + let response = tokio::time::timeout( + Duration::from_secs(RGS_SYNC_TIMEOUT_SECS), + reqwest::get(query_url), + ) + .await + .map_err(|e| { + log_trace!(logger, "Retrieving RGS gossip update timed out: {}", e); + Error::GossipUpdateTimeout + })? + .map_err(|e| { log_trace!(logger, "Failed to retrieve RGS gossip update: {}", e); Error::GossipUpdateFailed })?; From d67a3af47d81502c9c1044673c928788d575c80e Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 18 Jun 2024 12:01:42 +0200 Subject: [PATCH 77/89] Add timeout for broadcasting transactions --- src/config.rs | 3 ++ src/tx_broadcaster.rs | 83 ++++++++++++++++++++++++++++--------------- 2 files changed, 57 insertions(+), 29 deletions(-) diff --git a/src/config.rs b/src/config.rs index 91543ed87..12f3bd5ea 100644 --- a/src/config.rs +++ b/src/config.rs @@ -55,6 +55,9 @@ pub(crate) const LDK_WALLET_SYNC_TIMEOUT_SECS: u64 = 30; // The timeout after which we abort a fee rate cache update operation. pub(crate) const FEE_RATE_CACHE_UPDATE_TIMEOUT_SECS: u64 = 5; +// The timeout after which we abort a transaction broadcast operation. +pub(crate) const TX_BROADCAST_TIMEOUT_SECS: u64 = 5; + // The timeout after which we abort a RGS sync operation. pub(crate) const RGS_SYNC_TIMEOUT_SECS: u64 = 5; diff --git a/src/tx_broadcaster.rs b/src/tx_broadcaster.rs index 2019db28c..4492bcfc6 100644 --- a/src/tx_broadcaster.rs +++ b/src/tx_broadcaster.rs @@ -1,3 +1,4 @@ +use crate::config::TX_BROADCAST_TIMEOUT_SECS; use crate::logger::{log_bytes, log_error, log_trace, Logger}; use lightning::chain::chaininterface::BroadcasterInterface; @@ -12,6 +13,7 @@ use tokio::sync::mpsc; use tokio::sync::Mutex; use std::ops::Deref; +use std::time::Duration; const BCAST_PACKAGE_QUEUE_SIZE: usize = 50; @@ -38,45 +40,68 @@ where let mut receiver = self.queue_receiver.lock().await; while let Some(next_package) = receiver.recv().await { for tx in &next_package { - match self.esplora_client.broadcast(tx).await { - Ok(()) => { - log_trace!(self.logger, "Successfully broadcast transaction {}", tx.txid()); - }, - Err(e) => match e { - esplora_client::Error::Reqwest(err) => { - if err.status() == StatusCode::from_u16(400).ok() { - // Ignore 400, as this just means bitcoind already knows the - // transaction. - // FIXME: We can further differentiate here based on the error - // message which will be available with rust-esplora-client 0.7 and - // later. - } else { + let timeout_fut = tokio::time::timeout( + Duration::from_secs(TX_BROADCAST_TIMEOUT_SECS), + self.esplora_client.broadcast(tx), + ); + match timeout_fut.await { + Ok(res) => match res { + Ok(()) => { + log_trace!( + self.logger, + "Successfully broadcast transaction {}", + tx.txid() + ); + }, + Err(e) => match e { + esplora_client::Error::Reqwest(err) => { + if err.status() == StatusCode::from_u16(400).ok() { + // Ignore 400, as this just means bitcoind already knows the + // transaction. + // FIXME: We can further differentiate here based on the error + // message which will be available with rust-esplora-client 0.7 and + // later. + } else { + log_error!( + self.logger, + "Failed to broadcast due to HTTP connection error: {}", + err + ); + } + log_trace!( + self.logger, + "Failed broadcast transaction bytes: {}", + log_bytes!(tx.encode()) + ); + }, + _ => { log_error!( self.logger, - "Failed to broadcast due to HTTP connection error: {}", - err + "Failed to broadcast transaction {}: {}", + tx.txid(), + e ); log_trace!( self.logger, "Failed broadcast transaction bytes: {}", log_bytes!(tx.encode()) ); - } - }, - _ => { - log_error!( - self.logger, - "Failed to broadcast transaction {}: {}", - tx.txid(), - e - ); - log_trace!( - self.logger, - "Failed broadcast transaction bytes: {}", - log_bytes!(tx.encode()) - ); + }, }, }, + Err(e) => { + log_error!( + self.logger, + "Failed to broadcast transaction due to timeout {}: {}", + tx.txid(), + e + ); + log_trace!( + self.logger, + "Failed broadcast transaction bytes: {}", + log_bytes!(tx.encode()) + ); + }, } } } From de69c7588951c53ea30384086d1ebcc90bd97342 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 15 May 2024 12:20:02 +0200 Subject: [PATCH 78/89] Log shutdowns of background tasks --- src/lib.rs | 46 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/src/lib.rs b/src/lib.rs index 45bb11059..369072d48 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -275,6 +275,10 @@ impl Node { loop { tokio::select! { _ = stop_sync.changed() => { + log_trace!( + sync_logger, + "Stopping background syncing on-chain wallet.", + ); return; } _ = onchain_wallet_sync_interval.tick() => { @@ -321,6 +325,10 @@ impl Node { loop { tokio::select! { _ = stop_fee_updates.changed() => { + log_trace!( + fee_update_logger, + "Stopping background updates of fee rate cache.", + ); return; } _ = fee_rate_update_interval.tick() => { @@ -368,6 +376,10 @@ impl Node { loop { tokio::select! { _ = stop_sync.changed() => { + log_trace!( + sync_logger, + "Stopping background syncing Lightning wallet.", + ); return; } _ = wallet_sync_interval.tick() => { @@ -420,6 +432,10 @@ impl Node { loop { tokio::select! { _ = stop_gossip_sync.changed() => { + log_trace!( + gossip_sync_logger, + "Stopping background syncing RGS gossip data.", + ); return; } _ = interval.tick() => { @@ -495,6 +511,10 @@ impl Node { let peer_mgr = Arc::clone(&peer_manager_connection_handler); tokio::select! { _ = stop_listen.changed() => { + log_trace!( + listening_logger, + "Stopping listening to inbound connections.", + ); break; } res = listener.accept() => { @@ -527,6 +547,10 @@ impl Node { loop { tokio::select! { _ = stop_connect.changed() => { + log_trace!( + connect_logger, + "Stopping reconnecting known peers.", + ); return; } _ = interval.tick() => { @@ -572,6 +596,10 @@ impl Node { loop { tokio::select! { _ = stop_bcast.changed() => { + log_trace!( + bcast_logger, + "Stopping broadcasting node announcements.", + ); return; } _ = interval.tick() => { @@ -628,6 +656,7 @@ impl Node { let mut stop_tx_bcast = self.stop_sender.subscribe(); let tx_bcaster = Arc::clone(&self.tx_broadcaster); + let tx_bcast_logger = Arc::clone(&self.logger); runtime.spawn(async move { // Every second we try to clear our broadcasting queue. let mut interval = tokio::time::interval(Duration::from_secs(1)); @@ -635,6 +664,10 @@ impl Node { loop { tokio::select! { _ = stop_tx_bcast.changed() => { + log_trace!( + tx_bcast_logger, + "Stopping broadcasting transactions.", + ); return; } _ = interval.tick() => { @@ -677,11 +710,17 @@ impl Node { let background_error_logger = Arc::clone(&self.logger); let background_scorer = Arc::clone(&self.scorer); let stop_bp = self.stop_sender.subscribe(); + let sleeper_logger = Arc::clone(&self.logger); let sleeper = move |d| { let mut stop = stop_bp.clone(); + let sleeper_logger = Arc::clone(&sleeper_logger); Box::pin(async move { tokio::select! { _ = stop.changed() => { + log_trace!( + sleeper_logger, + "Stopping processing events.", + ); true } _ = tokio::time::sleep(d) => { @@ -691,6 +730,7 @@ impl Node { }) }; + let background_stop_logger = Arc::clone(&self.logger); runtime.spawn(async move { process_events_async( background_persister, @@ -710,15 +750,21 @@ impl Node { log_error!(background_error_logger, "Failed to process events: {}", e); panic!("Failed to process events"); }); + log_trace!(background_stop_logger, "Events processing stopped.",); }); if let Some(liquidity_source) = self.liquidity_source.as_ref() { let mut stop_liquidity_handler = self.stop_sender.subscribe(); let liquidity_handler = Arc::clone(&liquidity_source); + let liquidity_logger = Arc::clone(&self.logger); runtime.spawn(async move { loop { tokio::select! { _ = stop_liquidity_handler.changed() => { + log_trace!( + liquidity_logger, + "Stopping processing liquidity events.", + ); return; } _ = liquidity_handler.handle_next_event() => {} From 0a0ccb10edd780a00e49fff9112af9702213b9b7 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 15 May 2024 13:12:42 +0200 Subject: [PATCH 79/89] Shutdown: Wait for event processing to fully stop .. before initiating the Runtime shutdown. --- src/builder.rs | 2 ++ src/lib.rs | 62 +++++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 63 insertions(+), 1 deletion(-) diff --git a/src/builder.rs b/src/builder.rs index fa6e573b7..6aece0a96 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -959,6 +959,7 @@ fn build_with_store_internal( }; let (stop_sender, _) = tokio::sync::watch::channel(()); + let (event_handling_stopped_sender, _) = tokio::sync::watch::channel(()); let is_listening = Arc::new(AtomicBool::new(false)); let latest_wallet_sync_timestamp = Arc::new(RwLock::new(None)); @@ -971,6 +972,7 @@ fn build_with_store_internal( Ok(Node { runtime, stop_sender, + event_handling_stopped_sender, config, wallet, tx_sync, diff --git a/src/lib.rs b/src/lib.rs index 369072d48..de2a0badf 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -173,6 +173,7 @@ uniffi::include_scaffolding!("ldk_node"); pub struct Node { runtime: Arc>>, stop_sender: tokio::sync::watch::Sender<()>, + event_handling_stopped_sender: tokio::sync::watch::Sender<()>, config: Arc, wallet: Arc, tx_sync: Arc>>, @@ -731,6 +732,7 @@ impl Node { }; let background_stop_logger = Arc::clone(&self.logger); + let event_handling_stopped_sender = self.event_handling_stopped_sender.clone(); runtime.spawn(async move { process_events_async( background_persister, @@ -751,6 +753,18 @@ impl Node { panic!("Failed to process events"); }); log_trace!(background_stop_logger, "Events processing stopped.",); + + match event_handling_stopped_sender.send(()) { + Ok(_) => (), + Err(e) => { + log_error!( + background_stop_logger, + "Failed to send 'events handling stopped' signal. This should never happen: {}", + e + ); + debug_assert!(false); + }, + } }); if let Some(liquidity_source) = self.liquidity_source.as_ref() { @@ -800,9 +814,55 @@ impl Node { }, } - // Stop disconnect peers. + // Disconnect all peers. self.peer_manager.disconnect_all_peers(); + // Wait until event handling stopped, at least until a timeout is reached. + let event_handling_stopped_logger = Arc::clone(&self.logger); + let mut event_handling_stopped_receiver = self.event_handling_stopped_sender.subscribe(); + + // FIXME: For now, we wait up to 100 secs (BDK_WALLET_SYNC_TIMEOUT_SECS + 10) to allow + // event handling to exit gracefully even if it was blocked on the BDK wallet syncing. We + // should drop this considerably post upgrading to BDK 1.0. + let timeout_res = runtime.block_on(async { + tokio::time::timeout( + Duration::from_secs(100), + event_handling_stopped_receiver.changed(), + ) + .await + }); + + match timeout_res { + Ok(stop_res) => match stop_res { + Ok(()) => {}, + Err(e) => { + log_error!( + event_handling_stopped_logger, + "Stopping event handling failed. This should never happen: {}", + e + ); + panic!("Stopping event handling failed. This should never happen."); + }, + }, + Err(e) => { + log_error!( + event_handling_stopped_logger, + "Stopping event handling timed out: {}", + e + ); + }, + } + + #[cfg(tokio_unstable)] + { + log_trace!( + self.logger, + "Active runtime tasks left prior to shutdown: {}", + runtime.metrics().active_tasks_count() + ); + } + + // Shutdown our runtime. By now ~no or only very few tasks should be left. runtime.shutdown_timeout(Duration::from_secs(10)); log_info!(self.logger, "Shutdown complete."); From 5095d4237c3da7258f8237ed61c2d56855a38f88 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 15 May 2024 15:15:59 +0200 Subject: [PATCH 80/89] Bump `tokio` version to 1.37 .. as we use `Clone` for `tokio::sync::watch::Sender`, which was only introduced with 1.37. --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 4c4422461..6661497c7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -64,7 +64,7 @@ bip39 = "2.0.0" rand = "0.8.5" chrono = { version = "0.4", default-features = false, features = ["clock"] } -tokio = { version = "1", default-features = false, features = [ "rt-multi-thread", "time", "sync" ] } +tokio = { version = "1.37", default-features = false, features = [ "rt-multi-thread", "time", "sync" ] } esplora-client = { version = "0.6", default-features = false } libc = "0.2" uniffi = { version = "0.26.0", features = ["build"], optional = true } From f839015caa41d6d0331725d1671712a7bddd1b4f Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Tue, 18 Jun 2024 12:26:25 +0200 Subject: [PATCH 81/89] Also apply a general 10 second socket timeout for the Esplora client --- src/builder.rs | 15 ++++++++++----- src/config.rs | 3 +++ 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index 6aece0a96..a2a93aa79 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -1,6 +1,6 @@ use crate::config::{ default_user_config, Config, BDK_CLIENT_CONCURRENCY, BDK_CLIENT_STOP_GAP, - DEFAULT_ESPLORA_SERVER_URL, WALLET_KEYS_SEED_LEN, + DEFAULT_ESPLORA_CLIENT_TIMEOUT_SECS, DEFAULT_ESPLORA_SERVER_URL, WALLET_KEYS_SEED_LEN, }; use crate::connection::ConnectionManager; use crate::event::EventQueue; @@ -558,10 +558,15 @@ fn build_with_store_internal( let (blockchain, tx_sync, tx_broadcaster, fee_estimator) = match chain_data_source_config { Some(ChainDataSourceConfig::Esplora(server_url)) => { - let tx_sync = Arc::new(EsploraSyncClient::new(server_url.clone(), Arc::clone(&logger))); - let blockchain = - EsploraBlockchain::from_client(tx_sync.client().clone(), BDK_CLIENT_STOP_GAP) - .with_concurrency(BDK_CLIENT_CONCURRENCY); + let mut client_builder = esplora_client::Builder::new(&server_url.clone()); + client_builder = client_builder.timeout(DEFAULT_ESPLORA_CLIENT_TIMEOUT_SECS); + let esplora_client = client_builder.build_async().unwrap(); + let tx_sync = Arc::new(EsploraSyncClient::from_client( + esplora_client.clone(), + Arc::clone(&logger), + )); + let blockchain = EsploraBlockchain::from_client(esplora_client, BDK_CLIENT_STOP_GAP) + .with_concurrency(BDK_CLIENT_CONCURRENCY); let tx_broadcaster = Arc::new(TransactionBroadcaster::new( tx_sync.client().clone(), Arc::clone(&logger), diff --git a/src/config.rs b/src/config.rs index 12f3bd5ea..d0e72080f 100644 --- a/src/config.rs +++ b/src/config.rs @@ -28,6 +28,9 @@ pub(crate) const BDK_CLIENT_CONCURRENCY: u8 = 4; // The default Esplora server we're using. pub(crate) const DEFAULT_ESPLORA_SERVER_URL: &str = "https://blockstream.info/api"; +// The default Esplora client timeout we're using. +pub(crate) const DEFAULT_ESPLORA_CLIENT_TIMEOUT_SECS: u64 = 10; + // The timeout after which we abandon retrying failed payments. pub(crate) const LDK_PAYMENT_RETRY_TIMEOUT: Duration = Duration::from_secs(10); From 2b8ca85e983f1a8b4709e444eca4103c80c1c792 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 21 Jun 2024 10:13:40 +0200 Subject: [PATCH 82/89] Cut v0.3.0 release --- CHANGELOG.md | 38 +++++++++++++++++++ Cargo.toml | 2 +- Package.swift | 2 +- .../kotlin/ldk-node-android/gradle.properties | 2 +- .../kotlin/ldk-node-jvm/gradle.properties | 2 +- bindings/python/pyproject.toml | 2 +- 6 files changed, 43 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e2fb6f1c5..3a3894899 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,41 @@ +# 0.3.0 - June 21, 2024 + +This third minor release notably adds support for BOLT12 payments, Anchor +channels, and sourcing inbound liquidity via LSPS2 just-in-time channels. + +## Feature and API updates +- Support for creating and paying BOLT12 offers and refunds has been added (#265). +- Support for Anchor channels has been added (#141). +- Support for sourcing inbound liquidity via LSPS2 just-in-time (JIT) channels has been added (#223). +- The node's local view of the network graph can now be accessed via interface methods (#293). +- A new `next_event_async` method was added that allows polling the event queue asynchronously (#224). +- A `default_config` method was introduced that allows to retrieve sane default values, also in bindings (#242). +- The `PaymentFailed` and `ChannelClosed` events now include `reason` fields (#260). +- All available balances outside of channel balances are now exposed via a unified `list_balances` interface method (#250). +- The maximum in-flight HTLC value has been bumped to 100% of the channel capacity for private outbound channels (#303) and, if JIT channel support is enabled, for inbound channels (#262). +- The fee paid is now exposed via the `PaymentSuccessful` event (#271). +- A `status` method has been added allowing to retrieve information about the `Node`'s status (#272). +- `Node` no longer takes a `KVStore` type parameter, allowing to use the filesystem storage backend in bindings (#244). +- The payment APIs have been restructured to use per-type (`bolt11`, `onchain`, `bolt12`, ..) payment handlers which can be accessed via corresponding `Node::{type}_payment` methods (#270). +- Fully resolved channel monitors are now eventually moved to an archive location (#307). +- The ability to register and claim from custom payment hashes generated outside of LDK Node has been added (#308). + +## Bug Fixes +- Node announcements are now correctly only broadcast if we have any public, sufficiently confirmed channels (#248, #314). +- Falling back to default fee values is now disallowed on mainnet, ensuring we won't startup without a successful fee cache update (#249). +- Persisted peers are now correctly reconnected after startup (#265). +- Concurrent connection attempts to the same peer are no longer overriding each other (#266). +- Several steps have been taken to reduce the risk of blocking node operation on wallet syncing in the face of unresponsive Esplora services (#281). + +## Compatibility Notes +- LDK has been updated to version 0.0.123 (#291). + +In total, this release features 54 files changed, 7282 insertions, 2410 deletions in 165 commits from 3 authors, in alphabetical order: + +- Elias Rohrer +- jbesraa +- Srikanth Iyengar + # 0.2.2 - May 21, 2024 This is a bugfix release that reestablishes compatibility of Swift packages diff --git a/Cargo.toml b/Cargo.toml index 6661497c7..d4a87b2a2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ldk-node" -version = "0.2.1" +version = "0.3.0" authors = ["Elias Rohrer "] homepage = "https://lightningdevkit.org/" license = "MIT OR Apache-2.0" diff --git a/Package.swift b/Package.swift index c7d203713..b8efab014 100644 --- a/Package.swift +++ b/Package.swift @@ -3,7 +3,7 @@ import PackageDescription -let tag = "v0.2.1" +let tag = "v0.3.0" let checksum = "cca3d5f380c3c216c22ac892cb04a792f3982730e570df71d824462f14c1350e" let url = "https://github.com/lightningdevkit/ldk-node/releases/download/\(tag)/LDKNodeFFI.xcframework.zip" diff --git a/bindings/kotlin/ldk-node-android/gradle.properties b/bindings/kotlin/ldk-node-android/gradle.properties index f4f8cd571..70f5823b6 100644 --- a/bindings/kotlin/ldk-node-android/gradle.properties +++ b/bindings/kotlin/ldk-node-android/gradle.properties @@ -2,4 +2,4 @@ org.gradle.jvmargs=-Xmx1536m android.useAndroidX=true android.enableJetifier=true kotlin.code.style=official -libraryVersion=0.2.1 +libraryVersion=0.3.0 diff --git a/bindings/kotlin/ldk-node-jvm/gradle.properties b/bindings/kotlin/ldk-node-jvm/gradle.properties index 46f202595..4ed588117 100644 --- a/bindings/kotlin/ldk-node-jvm/gradle.properties +++ b/bindings/kotlin/ldk-node-jvm/gradle.properties @@ -1,3 +1,3 @@ org.gradle.jvmargs=-Xmx1536m kotlin.code.style=official -libraryVersion=0.2.1 +libraryVersion=0.3.0 diff --git a/bindings/python/pyproject.toml b/bindings/python/pyproject.toml index 13ad46ce6..c8ff0a79d 100644 --- a/bindings/python/pyproject.toml +++ b/bindings/python/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "ldk_node" -version = "0.2.1" +version = "0.3.0" authors = [ { name="Elias Rohrer", email="dev@tnull.de" }, ] From 8680d07df13f8edf0e85213f7bb812cfe0d79cce Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 21 Jun 2024 10:32:50 +0200 Subject: [PATCH 83/89] Update Swift files --- Package.swift | 2 +- bindings/swift/Sources/LDKNode/LDKNode.swift | 4505 +++++++++++++++--- 2 files changed, 3756 insertions(+), 751 deletions(-) diff --git a/Package.swift b/Package.swift index b8efab014..67c02dd8b 100644 --- a/Package.swift +++ b/Package.swift @@ -4,7 +4,7 @@ import PackageDescription let tag = "v0.3.0" -let checksum = "cca3d5f380c3c216c22ac892cb04a792f3982730e570df71d824462f14c1350e" +let checksum = "07c8741768956bf1a51d1c25f751b5e29d1ae9ee2fd786c4282031c9a8a92f0c" let url = "https://github.com/lightningdevkit/ldk-node/releases/download/\(tag)/LDKNodeFFI.xcframework.zip" let package = Package( diff --git a/bindings/swift/Sources/LDKNode/LDKNode.swift b/bindings/swift/Sources/LDKNode/LDKNode.swift index 18551648c..5937c9050 100644 --- a/bindings/swift/Sources/LDKNode/LDKNode.swift +++ b/bindings/swift/Sources/LDKNode/LDKNode.swift @@ -288,7 +288,7 @@ private func uniffiCheckCallStatus( } case CALL_CANCELLED: - throw CancellationError() + fatalError("Cancellation not supported yet") default: throw UniffiInternalError.unexpectedRustCallStatusCode @@ -410,21 +410,38 @@ fileprivate struct FfiConverterString: FfiConverter { } -public protocol BuilderProtocol { - func build() throws -> LdkNode - func setEntropyBip39Mnemonic(mnemonic: Mnemonic, passphrase: String?) - func setEntropySeedBytes(seedBytes: [UInt8]) throws - func setEntropySeedPath(seedPath: String) - func setEsploraServer(esploraServerUrl: String) - func setGossipSourceP2p() - func setGossipSourceRgs(rgsServerUrl: String) - func setListeningAddresses(listeningAddresses: [SocketAddress]) throws - func setNetwork(network: Network) - func setStorageDirPath(storageDirPath: String) + + +public protocol Bolt11PaymentProtocol : AnyObject { + + func claimForHash(paymentHash: PaymentHash, claimableAmountMsat: UInt64, preimage: PaymentPreimage) throws + + func failForHash(paymentHash: PaymentHash) throws + + func receive(amountMsat: UInt64, description: String, expirySecs: UInt32) throws -> Bolt11Invoice + + func receiveForHash(amountMsat: UInt64, description: String, expirySecs: UInt32, paymentHash: PaymentHash) throws -> Bolt11Invoice + + func receiveVariableAmount(description: String, expirySecs: UInt32) throws -> Bolt11Invoice + + func receiveVariableAmountForHash(description: String, expirySecs: UInt32, paymentHash: PaymentHash) throws -> Bolt11Invoice + + func receiveVariableAmountViaJitChannel(description: String, expirySecs: UInt32, maxProportionalLspFeeLimitPpmMsat: UInt64?) throws -> Bolt11Invoice + + func receiveViaJitChannel(amountMsat: UInt64, description: String, expirySecs: UInt32, maxLspFeeLimitMsat: UInt64?) throws -> Bolt11Invoice + + func send(invoice: Bolt11Invoice) throws -> PaymentId + + func sendProbes(invoice: Bolt11Invoice) throws + + func sendProbesUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws + + func sendUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws -> PaymentId } -public class Builder: BuilderProtocol { +public class Bolt11Payment: + Bolt11PaymentProtocol { fileprivate let pointer: UnsafeMutableRawPointer // TODO: We'd like this to be `private` but for Swifty reasons, @@ -433,134 +450,165 @@ public class Builder: BuilderProtocol { required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { self.pointer = pointer } - public convenience init() { - self.init(unsafeFromRawPointer: try! rustCall() { - uniffi_ldk_node_fn_constructor_builder_new($0) -}) - } - deinit { - try! rustCall { uniffi_ldk_node_fn_free_builder(pointer, $0) } + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_bolt11payment(self.pointer, $0) } } - - - public static func fromConfig(config: Config) -> Builder { - return Builder(unsafeFromRawPointer: try! rustCall() { - uniffi_ldk_node_fn_constructor_builder_from_config( - FfiConverterTypeConfig.lower(config),$0) -}) + deinit { + try! rustCall { uniffi_ldk_node_fn_free_bolt11payment(pointer, $0) } } - - public func build() throws -> LdkNode { - return try FfiConverterTypeLDKNode.lift( + public func claimForHash(paymentHash: PaymentHash, claimableAmountMsat: UInt64, preimage: PaymentPreimage) throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_claim_for_hash(self.uniffiClonePointer(), + FfiConverterTypePaymentHash.lower(paymentHash), + FfiConverterUInt64.lower(claimableAmountMsat), + FfiConverterTypePaymentPreimage.lower(preimage),$0 + ) +} + } + public func failForHash(paymentHash: PaymentHash) throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_fail_for_hash(self.uniffiClonePointer(), + FfiConverterTypePaymentHash.lower(paymentHash),$0 + ) +} + } + public func receive(amountMsat: UInt64, description: String, expirySecs: UInt32) throws -> Bolt11Invoice { + return try FfiConverterTypeBolt11Invoice.lift( try - rustCallWithError(FfiConverterTypeBuildError.lift) { - uniffi_ldk_node_fn_method_builder_build(self.pointer, $0 + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_receive(self.uniffiClonePointer(), + FfiConverterUInt64.lower(amountMsat), + FfiConverterString.lower(description), + FfiConverterUInt32.lower(expirySecs),$0 ) } ) } - - public func setEntropyBip39Mnemonic(mnemonic: Mnemonic, passphrase: String?) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_entropy_bip39_mnemonic(self.pointer, - FfiConverterTypeMnemonic.lower(mnemonic), - FfiConverterOptionString.lower(passphrase),$0 + public func receiveForHash(amountMsat: UInt64, description: String, expirySecs: UInt32, paymentHash: PaymentHash) throws -> Bolt11Invoice { + return try FfiConverterTypeBolt11Invoice.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_receive_for_hash(self.uniffiClonePointer(), + FfiConverterUInt64.lower(amountMsat), + FfiConverterString.lower(description), + FfiConverterUInt32.lower(expirySecs), + FfiConverterTypePaymentHash.lower(paymentHash),$0 ) } + ) } - - public func setEntropySeedBytes(seedBytes: [UInt8]) throws { - try - rustCallWithError(FfiConverterTypeBuildError.lift) { - uniffi_ldk_node_fn_method_builder_set_entropy_seed_bytes(self.pointer, - FfiConverterSequenceUInt8.lower(seedBytes),$0 + public func receiveVariableAmount(description: String, expirySecs: UInt32) throws -> Bolt11Invoice { + return try FfiConverterTypeBolt11Invoice.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_receive_variable_amount(self.uniffiClonePointer(), + FfiConverterString.lower(description), + FfiConverterUInt32.lower(expirySecs),$0 ) } + ) } - - public func setEntropySeedPath(seedPath: String) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_entropy_seed_path(self.pointer, - FfiConverterString.lower(seedPath),$0 + public func receiveVariableAmountForHash(description: String, expirySecs: UInt32, paymentHash: PaymentHash) throws -> Bolt11Invoice { + return try FfiConverterTypeBolt11Invoice.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_receive_variable_amount_for_hash(self.uniffiClonePointer(), + FfiConverterString.lower(description), + FfiConverterUInt32.lower(expirySecs), + FfiConverterTypePaymentHash.lower(paymentHash),$0 ) } + ) } - - public func setEsploraServer(esploraServerUrl: String) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_esplora_server(self.pointer, - FfiConverterString.lower(esploraServerUrl),$0 + public func receiveVariableAmountViaJitChannel(description: String, expirySecs: UInt32, maxProportionalLspFeeLimitPpmMsat: UInt64?) throws -> Bolt11Invoice { + return try FfiConverterTypeBolt11Invoice.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_receive_variable_amount_via_jit_channel(self.uniffiClonePointer(), + FfiConverterString.lower(description), + FfiConverterUInt32.lower(expirySecs), + FfiConverterOptionUInt64.lower(maxProportionalLspFeeLimitPpmMsat),$0 ) } + ) } - - public func setGossipSourceP2p() { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_gossip_source_p2p(self.pointer, $0 + public func receiveViaJitChannel(amountMsat: UInt64, description: String, expirySecs: UInt32, maxLspFeeLimitMsat: UInt64?) throws -> Bolt11Invoice { + return try FfiConverterTypeBolt11Invoice.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_receive_via_jit_channel(self.uniffiClonePointer(), + FfiConverterUInt64.lower(amountMsat), + FfiConverterString.lower(description), + FfiConverterUInt32.lower(expirySecs), + FfiConverterOptionUInt64.lower(maxLspFeeLimitMsat),$0 ) } + ) } - - public func setGossipSourceRgs(rgsServerUrl: String) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_gossip_source_rgs(self.pointer, - FfiConverterString.lower(rgsServerUrl),$0 + public func send(invoice: Bolt11Invoice) throws -> PaymentId { + return try FfiConverterTypePaymentId.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_send(self.uniffiClonePointer(), + FfiConverterTypeBolt11Invoice.lower(invoice),$0 ) } + ) } - - public func setListeningAddresses(listeningAddresses: [SocketAddress]) throws { + public func sendProbes(invoice: Bolt11Invoice) throws { try - rustCallWithError(FfiConverterTypeBuildError.lift) { - uniffi_ldk_node_fn_method_builder_set_listening_addresses(self.pointer, - FfiConverterSequenceTypeSocketAddress.lower(listeningAddresses),$0 + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_send_probes(self.uniffiClonePointer(), + FfiConverterTypeBolt11Invoice.lower(invoice),$0 ) } } - - public func setNetwork(network: Network) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_network(self.pointer, - FfiConverterTypeNetwork.lower(network),$0 + public func sendProbesUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_send_probes_using_amount(self.uniffiClonePointer(), + FfiConverterTypeBolt11Invoice.lower(invoice), + FfiConverterUInt64.lower(amountMsat),$0 ) } } - - public func setStorageDirPath(storageDirPath: String) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_storage_dir_path(self.pointer, - FfiConverterString.lower(storageDirPath),$0 + public func sendUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws -> PaymentId { + return try FfiConverterTypePaymentId.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_send_using_amount(self.uniffiClonePointer(), + FfiConverterTypeBolt11Invoice.lower(invoice), + FfiConverterUInt64.lower(amountMsat),$0 ) } + ) } + } -public struct FfiConverterTypeBuilder: FfiConverter { +public struct FfiConverterTypeBolt11Payment: FfiConverter { + typealias FfiType = UnsafeMutableRawPointer - typealias SwiftType = Builder + typealias SwiftType = Bolt11Payment - public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Builder { + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> Bolt11Payment { + return Bolt11Payment(unsafeFromRawPointer: pointer) + } + + public static func lower(_ value: Bolt11Payment) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Bolt11Payment { let v: UInt64 = try readInt(&buf) // The Rust code won't compile if a pointer won't fit in a UInt64. // We have to go via `UInt` because that's the thing that's the size of a pointer. @@ -571,48 +619,43 @@ public struct FfiConverterTypeBuilder: FfiConverter { return try lift(ptr!) } - public static func write(_ value: Builder, into buf: inout [UInt8]) { + public static func write(_ value: Bolt11Payment, into buf: inout [UInt8]) { // This fiddling is because `Int` is the thing that's the same size as a pointer. // The Rust code won't compile if a pointer won't fit in a `UInt64`. writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) } - - public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> Builder { - return Builder(unsafeFromRawPointer: pointer) - } - - public static func lower(_ value: Builder) -> UnsafeMutableRawPointer { - return value.pointer - } } -public func FfiConverterTypeBuilder_lift(_ pointer: UnsafeMutableRawPointer) throws -> Builder { - return try FfiConverterTypeBuilder.lift(pointer) +public func FfiConverterTypeBolt11Payment_lift(_ pointer: UnsafeMutableRawPointer) throws -> Bolt11Payment { + return try FfiConverterTypeBolt11Payment.lift(pointer) } -public func FfiConverterTypeBuilder_lower(_ value: Builder) -> UnsafeMutableRawPointer { - return FfiConverterTypeBuilder.lower(value) +public func FfiConverterTypeBolt11Payment_lower(_ value: Bolt11Payment) -> UnsafeMutableRawPointer { + return FfiConverterTypeBolt11Payment.lower(value) } -public protocol ChannelConfigProtocol { - func acceptUnderpayingHtlcs() -> Bool - func cltvExpiryDelta() -> UInt16 - func forceCloseAvoidanceMaxFeeSatoshis() -> UInt64 - func forwardingFeeBaseMsat() -> UInt32 - func forwardingFeeProportionalMillionths() -> UInt32 - func setAcceptUnderpayingHtlcs(value: Bool) - func setCltvExpiryDelta(value: UInt16) - func setForceCloseAvoidanceMaxFeeSatoshis(valueSat: UInt64) - func setForwardingFeeBaseMsat(feeMsat: UInt32) - func setForwardingFeeProportionalMillionths(value: UInt32) - func setMaxDustHtlcExposureFromFeeRateMultiplier(multiplier: UInt64) - func setMaxDustHtlcExposureFromFixedLimit(limitMsat: UInt64) + + +public protocol Bolt12PaymentProtocol : AnyObject { + + func initiateRefund(amountMsat: UInt64, expirySecs: UInt32) throws -> Refund + + func receive(amountMsat: UInt64, description: String) throws -> Offer + + func receiveVariableAmount(description: String) throws -> Offer + + func requestRefundPayment(refund: Refund) throws -> Bolt12Invoice + + func send(offer: Offer, payerNote: String?) throws -> PaymentId + + func sendUsingAmount(offer: Offer, payerNote: String?, amountMsat: UInt64) throws -> PaymentId } -public class ChannelConfig: ChannelConfigProtocol { +public class Bolt12Payment: + Bolt12PaymentProtocol { fileprivate let pointer: UnsafeMutableRawPointer // TODO: We'd like this to be `private` but for Swifty reasons, @@ -621,151 +664,533 @@ public class ChannelConfig: ChannelConfigProtocol { required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { self.pointer = pointer } - public convenience init() { - self.init(unsafeFromRawPointer: try! rustCall() { - uniffi_ldk_node_fn_constructor_channelconfig_new($0) -}) + + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_bolt12payment(self.pointer, $0) } } deinit { - try! rustCall { uniffi_ldk_node_fn_free_channelconfig(pointer, $0) } + try! rustCall { uniffi_ldk_node_fn_free_bolt12payment(pointer, $0) } } - - public func acceptUnderpayingHtlcs() -> Bool { - return try! FfiConverterBool.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_accept_underpaying_htlcs(self.pointer, $0 + public func initiateRefund(amountMsat: UInt64, expirySecs: UInt32) throws -> Refund { + return try FfiConverterTypeRefund.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt12payment_initiate_refund(self.uniffiClonePointer(), + FfiConverterUInt64.lower(amountMsat), + FfiConverterUInt32.lower(expirySecs),$0 ) } ) } - - public func cltvExpiryDelta() -> UInt16 { - return try! FfiConverterUInt16.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_cltv_expiry_delta(self.pointer, $0 + public func receive(amountMsat: UInt64, description: String) throws -> Offer { + return try FfiConverterTypeOffer.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt12payment_receive(self.uniffiClonePointer(), + FfiConverterUInt64.lower(amountMsat), + FfiConverterString.lower(description),$0 ) } ) } - - public func forceCloseAvoidanceMaxFeeSatoshis() -> UInt64 { - return try! FfiConverterUInt64.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_force_close_avoidance_max_fee_satoshis(self.pointer, $0 + public func receiveVariableAmount(description: String) throws -> Offer { + return try FfiConverterTypeOffer.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt12payment_receive_variable_amount(self.uniffiClonePointer(), + FfiConverterString.lower(description),$0 ) } ) } - - public func forwardingFeeBaseMsat() -> UInt32 { - return try! FfiConverterUInt32.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_forwarding_fee_base_msat(self.pointer, $0 + public func requestRefundPayment(refund: Refund) throws -> Bolt12Invoice { + return try FfiConverterTypeBolt12Invoice.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt12payment_request_refund_payment(self.uniffiClonePointer(), + FfiConverterTypeRefund.lower(refund),$0 ) } ) } - - public func forwardingFeeProportionalMillionths() -> UInt32 { - return try! FfiConverterUInt32.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_forwarding_fee_proportional_millionths(self.pointer, $0 + public func send(offer: Offer, payerNote: String?) throws -> PaymentId { + return try FfiConverterTypePaymentId.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt12payment_send(self.uniffiClonePointer(), + FfiConverterTypeOffer.lower(offer), + FfiConverterOptionString.lower(payerNote),$0 ) } ) } - - public func setAcceptUnderpayingHtlcs(value: Bool) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_set_accept_underpaying_htlcs(self.pointer, - FfiConverterBool.lower(value),$0 + public func sendUsingAmount(offer: Offer, payerNote: String?, amountMsat: UInt64) throws -> PaymentId { + return try FfiConverterTypePaymentId.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt12payment_send_using_amount(self.uniffiClonePointer(), + FfiConverterTypeOffer.lower(offer), + FfiConverterOptionString.lower(payerNote), + FfiConverterUInt64.lower(amountMsat),$0 ) } + ) } - public func setCltvExpiryDelta(value: UInt16) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_set_cltv_expiry_delta(self.pointer, - FfiConverterUInt16.lower(value),$0 - ) } - } - public func setForceCloseAvoidanceMaxFeeSatoshis(valueSat: UInt64) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_set_force_close_avoidance_max_fee_satoshis(self.pointer, - FfiConverterUInt64.lower(valueSat),$0 - ) -} +public struct FfiConverterTypeBolt12Payment: FfiConverter { + + typealias FfiType = UnsafeMutableRawPointer + typealias SwiftType = Bolt12Payment + + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> Bolt12Payment { + return Bolt12Payment(unsafeFromRawPointer: pointer) } - public func setForwardingFeeBaseMsat(feeMsat: UInt32) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_set_forwarding_fee_base_msat(self.pointer, - FfiConverterUInt32.lower(feeMsat),$0 - ) -} + public static func lower(_ value: Bolt12Payment) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() } - public func setForwardingFeeProportionalMillionths(value: UInt32) { - try! - rustCall() { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Bolt12Payment { + let v: UInt64 = try readInt(&buf) + // The Rust code won't compile if a pointer won't fit in a UInt64. + // We have to go via `UInt` because that's the thing that's the size of a pointer. + let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) + if (ptr == nil) { + throw UniffiInternalError.unexpectedNullPointer + } + return try lift(ptr!) + } + + public static func write(_ value: Bolt12Payment, into buf: inout [UInt8]) { + // This fiddling is because `Int` is the thing that's the same size as a pointer. + // The Rust code won't compile if a pointer won't fit in a `UInt64`. + writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) + } +} + + +public func FfiConverterTypeBolt12Payment_lift(_ pointer: UnsafeMutableRawPointer) throws -> Bolt12Payment { + return try FfiConverterTypeBolt12Payment.lift(pointer) +} + +public func FfiConverterTypeBolt12Payment_lower(_ value: Bolt12Payment) -> UnsafeMutableRawPointer { + return FfiConverterTypeBolt12Payment.lower(value) +} + + + + +public protocol BuilderProtocol : AnyObject { - uniffi_ldk_node_fn_method_channelconfig_set_forwarding_fee_proportional_millionths(self.pointer, - FfiConverterUInt32.lower(value),$0 + func build() throws -> Node + + func buildWithFsStore() throws -> Node + + func setEntropyBip39Mnemonic(mnemonic: Mnemonic, passphrase: String?) + + func setEntropySeedBytes(seedBytes: [UInt8]) throws + + func setEntropySeedPath(seedPath: String) + + func setEsploraServer(esploraServerUrl: String) + + func setGossipSourceP2p() + + func setGossipSourceRgs(rgsServerUrl: String) + + func setLiquiditySourceLsps2(address: SocketAddress, nodeId: PublicKey, token: String?) + + func setListeningAddresses(listeningAddresses: [SocketAddress]) throws + + func setNetwork(network: Network) + + func setStorageDirPath(storageDirPath: String) + +} + +public class Builder: + BuilderProtocol { + fileprivate let pointer: UnsafeMutableRawPointer + + // TODO: We'd like this to be `private` but for Swifty reasons, + // we can't implement `FfiConverter` without making this `required` and we can't + // make it `required` without making it `public`. + required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { + self.pointer = pointer + } + + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_builder(self.pointer, $0) } + } + public convenience init() { + self.init(unsafeFromRawPointer: try! rustCall() { + uniffi_ldk_node_fn_constructor_builder_new($0) +}) + } + + deinit { + try! rustCall { uniffi_ldk_node_fn_free_builder(pointer, $0) } + } + + + public static func fromConfig(config: Config) -> Builder { + return Builder(unsafeFromRawPointer: try! rustCall() { + uniffi_ldk_node_fn_constructor_builder_from_config( + FfiConverterTypeConfig.lower(config),$0) +}) + } + + + + + + public func build() throws -> Node { + return try FfiConverterTypeNode.lift( + try + rustCallWithError(FfiConverterTypeBuildError.lift) { + uniffi_ldk_node_fn_method_builder_build(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func buildWithFsStore() throws -> Node { + return try FfiConverterTypeNode.lift( + try + rustCallWithError(FfiConverterTypeBuildError.lift) { + uniffi_ldk_node_fn_method_builder_build_with_fs_store(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func setEntropyBip39Mnemonic(mnemonic: Mnemonic, passphrase: String?) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_entropy_bip39_mnemonic(self.uniffiClonePointer(), + FfiConverterTypeMnemonic.lower(mnemonic), + FfiConverterOptionString.lower(passphrase),$0 + ) +} + } + public func setEntropySeedBytes(seedBytes: [UInt8]) throws { + try + rustCallWithError(FfiConverterTypeBuildError.lift) { + uniffi_ldk_node_fn_method_builder_set_entropy_seed_bytes(self.uniffiClonePointer(), + FfiConverterSequenceUInt8.lower(seedBytes),$0 + ) +} + } + public func setEntropySeedPath(seedPath: String) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_entropy_seed_path(self.uniffiClonePointer(), + FfiConverterString.lower(seedPath),$0 + ) +} + } + public func setEsploraServer(esploraServerUrl: String) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_esplora_server(self.uniffiClonePointer(), + FfiConverterString.lower(esploraServerUrl),$0 + ) +} + } + public func setGossipSourceP2p() { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_gossip_source_p2p(self.uniffiClonePointer(), $0 + ) +} + } + public func setGossipSourceRgs(rgsServerUrl: String) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_gossip_source_rgs(self.uniffiClonePointer(), + FfiConverterString.lower(rgsServerUrl),$0 + ) +} + } + public func setLiquiditySourceLsps2(address: SocketAddress, nodeId: PublicKey, token: String?) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_liquidity_source_lsps2(self.uniffiClonePointer(), + FfiConverterTypeSocketAddress.lower(address), + FfiConverterTypePublicKey.lower(nodeId), + FfiConverterOptionString.lower(token),$0 ) } } + public func setListeningAddresses(listeningAddresses: [SocketAddress]) throws { + try + rustCallWithError(FfiConverterTypeBuildError.lift) { + uniffi_ldk_node_fn_method_builder_set_listening_addresses(self.uniffiClonePointer(), + FfiConverterSequenceTypeSocketAddress.lower(listeningAddresses),$0 + ) +} + } + public func setNetwork(network: Network) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_network(self.uniffiClonePointer(), + FfiConverterTypeNetwork.lower(network),$0 + ) +} + } + public func setStorageDirPath(storageDirPath: String) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_storage_dir_path(self.uniffiClonePointer(), + FfiConverterString.lower(storageDirPath),$0 + ) +} + } + +} + +public struct FfiConverterTypeBuilder: FfiConverter { + + typealias FfiType = UnsafeMutableRawPointer + typealias SwiftType = Builder + + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> Builder { + return Builder(unsafeFromRawPointer: pointer) + } + + public static func lower(_ value: Builder) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Builder { + let v: UInt64 = try readInt(&buf) + // The Rust code won't compile if a pointer won't fit in a UInt64. + // We have to go via `UInt` because that's the thing that's the size of a pointer. + let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) + if (ptr == nil) { + throw UniffiInternalError.unexpectedNullPointer + } + return try lift(ptr!) + } + + public static func write(_ value: Builder, into buf: inout [UInt8]) { + // This fiddling is because `Int` is the thing that's the same size as a pointer. + // The Rust code won't compile if a pointer won't fit in a `UInt64`. + writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) + } +} + + +public func FfiConverterTypeBuilder_lift(_ pointer: UnsafeMutableRawPointer) throws -> Builder { + return try FfiConverterTypeBuilder.lift(pointer) +} +public func FfiConverterTypeBuilder_lower(_ value: Builder) -> UnsafeMutableRawPointer { + return FfiConverterTypeBuilder.lower(value) +} + + + + +public protocol ChannelConfigProtocol : AnyObject { + + func acceptUnderpayingHtlcs() -> Bool + + func cltvExpiryDelta() -> UInt16 + + func forceCloseAvoidanceMaxFeeSatoshis() -> UInt64 + + func forwardingFeeBaseMsat() -> UInt32 + + func forwardingFeeProportionalMillionths() -> UInt32 + + func setAcceptUnderpayingHtlcs(value: Bool) + + func setCltvExpiryDelta(value: UInt16) + + func setForceCloseAvoidanceMaxFeeSatoshis(valueSat: UInt64) + + func setForwardingFeeBaseMsat(feeMsat: UInt32) + + func setForwardingFeeProportionalMillionths(value: UInt32) + + func setMaxDustHtlcExposureFromFeeRateMultiplier(multiplier: UInt64) + + func setMaxDustHtlcExposureFromFixedLimit(limitMsat: UInt64) + +} + +public class ChannelConfig: + ChannelConfigProtocol { + fileprivate let pointer: UnsafeMutableRawPointer + + // TODO: We'd like this to be `private` but for Swifty reasons, + // we can't implement `FfiConverter` without making this `required` and we can't + // make it `required` without making it `public`. + required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { + self.pointer = pointer + } + + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_channelconfig(self.pointer, $0) } + } + public convenience init() { + self.init(unsafeFromRawPointer: try! rustCall() { + uniffi_ldk_node_fn_constructor_channelconfig_new($0) +}) + } + + deinit { + try! rustCall { uniffi_ldk_node_fn_free_channelconfig(pointer, $0) } + } + + + + + + public func acceptUnderpayingHtlcs() -> Bool { + return try! FfiConverterBool.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_accept_underpaying_htlcs(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func cltvExpiryDelta() -> UInt16 { + return try! FfiConverterUInt16.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_cltv_expiry_delta(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func forceCloseAvoidanceMaxFeeSatoshis() -> UInt64 { + return try! FfiConverterUInt64.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_force_close_avoidance_max_fee_satoshis(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func forwardingFeeBaseMsat() -> UInt32 { + return try! FfiConverterUInt32.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_forwarding_fee_base_msat(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func forwardingFeeProportionalMillionths() -> UInt32 { + return try! FfiConverterUInt32.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_forwarding_fee_proportional_millionths(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func setAcceptUnderpayingHtlcs(value: Bool) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_set_accept_underpaying_htlcs(self.uniffiClonePointer(), + FfiConverterBool.lower(value),$0 + ) +} + } + public func setCltvExpiryDelta(value: UInt16) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_set_cltv_expiry_delta(self.uniffiClonePointer(), + FfiConverterUInt16.lower(value),$0 + ) +} + } + public func setForceCloseAvoidanceMaxFeeSatoshis(valueSat: UInt64) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_set_force_close_avoidance_max_fee_satoshis(self.uniffiClonePointer(), + FfiConverterUInt64.lower(valueSat),$0 + ) +} + } + public func setForwardingFeeBaseMsat(feeMsat: UInt32) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_set_forwarding_fee_base_msat(self.uniffiClonePointer(), + FfiConverterUInt32.lower(feeMsat),$0 + ) +} + } + public func setForwardingFeeProportionalMillionths(value: UInt32) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_set_forwarding_fee_proportional_millionths(self.uniffiClonePointer(), + FfiConverterUInt32.lower(value),$0 + ) +} + } public func setMaxDustHtlcExposureFromFeeRateMultiplier(multiplier: UInt64) { try! rustCall() { - uniffi_ldk_node_fn_method_channelconfig_set_max_dust_htlc_exposure_from_fee_rate_multiplier(self.pointer, + uniffi_ldk_node_fn_method_channelconfig_set_max_dust_htlc_exposure_from_fee_rate_multiplier(self.uniffiClonePointer(), FfiConverterUInt64.lower(multiplier),$0 ) } } - public func setMaxDustHtlcExposureFromFixedLimit(limitMsat: UInt64) { try! rustCall() { - uniffi_ldk_node_fn_method_channelconfig_set_max_dust_htlc_exposure_from_fixed_limit(self.pointer, + uniffi_ldk_node_fn_method_channelconfig_set_max_dust_htlc_exposure_from_fixed_limit(self.uniffiClonePointer(), FfiConverterUInt64.lower(limitMsat),$0 ) } } + } public struct FfiConverterTypeChannelConfig: FfiConverter { + typealias FfiType = UnsafeMutableRawPointer typealias SwiftType = ChannelConfig + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> ChannelConfig { + return ChannelConfig(unsafeFromRawPointer: pointer) + } + + public static func lower(_ value: ChannelConfig) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() + } + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> ChannelConfig { let v: UInt64 = try readInt(&buf) // The Rust code won't compile if a pointer won't fit in a UInt64. @@ -782,14 +1207,6 @@ public struct FfiConverterTypeChannelConfig: FfiConverter { // The Rust code won't compile if a pointer won't fit in a `UInt64`. writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) } - - public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> ChannelConfig { - return ChannelConfig(unsafeFromRawPointer: pointer) - } - - public static func lower(_ value: ChannelConfig) -> UnsafeMutableRawPointer { - return value.pointer - } } @@ -801,46 +1218,198 @@ public func FfiConverterTypeChannelConfig_lower(_ value: ChannelConfig) -> Unsaf return FfiConverterTypeChannelConfig.lower(value) } - -public protocol LDKNodeProtocol { - func closeChannel(channelId: ChannelId, counterpartyNodeId: PublicKey) throws - func connect(nodeId: PublicKey, address: SocketAddress, persist: Bool) throws - func connectOpenChannel(nodeId: PublicKey, address: SocketAddress, channelAmountSats: UInt64, pushToCounterpartyMsat: UInt64?, channelConfig: ChannelConfig?, announceChannel: Bool) throws - func disconnect(nodeId: PublicKey) throws - func eventHandled() - func isRunning() -> Bool - func listChannels() -> [ChannelDetails] - func listPayments() -> [PaymentDetails] - func listPeers() -> [PeerDetails] - func listeningAddresses() -> [SocketAddress]? - func newOnchainAddress() throws -> Address - func nextEvent() -> Event? - func nodeId() -> PublicKey - func payment(paymentHash: PaymentHash) -> PaymentDetails? - func receivePayment(amountMsat: UInt64, description: String, expirySecs: UInt32) throws -> Bolt11Invoice - func receiveVariableAmountPayment(description: String, expirySecs: UInt32) throws -> Bolt11Invoice - func removePayment(paymentHash: PaymentHash) throws - func sendAllToOnchainAddress(address: Address) throws -> Txid - func sendPayment(invoice: Bolt11Invoice) throws -> PaymentHash - func sendPaymentProbes(invoice: Bolt11Invoice) throws - func sendPaymentProbesUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws - func sendPaymentUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws -> PaymentHash - func sendSpontaneousPayment(amountMsat: UInt64, nodeId: PublicKey) throws -> PaymentHash - func sendSpontaneousPaymentProbes(amountMsat: UInt64, nodeId: PublicKey) throws - func sendToOnchainAddress(address: Address, amountMsat: UInt64) throws -> Txid - func signMessage(msg: [UInt8]) throws -> String - func spendableOnchainBalanceSats() throws -> UInt64 - func start() throws - func stop() throws - func syncWallets() throws - func totalOnchainBalanceSats() throws -> UInt64 - func updateChannelConfig(channelId: ChannelId, counterpartyNodeId: PublicKey, channelConfig: ChannelConfig) throws - func verifySignature(msg: [UInt8], sig: String, pkey: PublicKey) -> Bool - func waitNextEvent() -> Event - -} - -public class LdkNode: LDKNodeProtocol { + + + +public protocol NetworkGraphProtocol : AnyObject { + + func channel(shortChannelId: UInt64) -> ChannelInfo? + + func listChannels() -> [UInt64] + + func listNodes() -> [NodeId] + + func node(nodeId: NodeId) -> NodeInfo? + +} + +public class NetworkGraph: + NetworkGraphProtocol { + fileprivate let pointer: UnsafeMutableRawPointer + + // TODO: We'd like this to be `private` but for Swifty reasons, + // we can't implement `FfiConverter` without making this `required` and we can't + // make it `required` without making it `public`. + required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { + self.pointer = pointer + } + + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_networkgraph(self.pointer, $0) } + } + + deinit { + try! rustCall { uniffi_ldk_node_fn_free_networkgraph(pointer, $0) } + } + + + + + + public func channel(shortChannelId: UInt64) -> ChannelInfo? { + return try! FfiConverterOptionTypeChannelInfo.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_networkgraph_channel(self.uniffiClonePointer(), + FfiConverterUInt64.lower(shortChannelId),$0 + ) +} + ) + } + public func listChannels() -> [UInt64] { + return try! FfiConverterSequenceUInt64.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_networkgraph_list_channels(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func listNodes() -> [NodeId] { + return try! FfiConverterSequenceTypeNodeId.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_networkgraph_list_nodes(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func node(nodeId: NodeId) -> NodeInfo? { + return try! FfiConverterOptionTypeNodeInfo.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_networkgraph_node(self.uniffiClonePointer(), + FfiConverterTypeNodeId.lower(nodeId),$0 + ) +} + ) + } + +} + +public struct FfiConverterTypeNetworkGraph: FfiConverter { + + typealias FfiType = UnsafeMutableRawPointer + typealias SwiftType = NetworkGraph + + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> NetworkGraph { + return NetworkGraph(unsafeFromRawPointer: pointer) + } + + public static func lower(_ value: NetworkGraph) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> NetworkGraph { + let v: UInt64 = try readInt(&buf) + // The Rust code won't compile if a pointer won't fit in a UInt64. + // We have to go via `UInt` because that's the thing that's the size of a pointer. + let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) + if (ptr == nil) { + throw UniffiInternalError.unexpectedNullPointer + } + return try lift(ptr!) + } + + public static func write(_ value: NetworkGraph, into buf: inout [UInt8]) { + // This fiddling is because `Int` is the thing that's the same size as a pointer. + // The Rust code won't compile if a pointer won't fit in a `UInt64`. + writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) + } +} + + +public func FfiConverterTypeNetworkGraph_lift(_ pointer: UnsafeMutableRawPointer) throws -> NetworkGraph { + return try FfiConverterTypeNetworkGraph.lift(pointer) +} + +public func FfiConverterTypeNetworkGraph_lower(_ value: NetworkGraph) -> UnsafeMutableRawPointer { + return FfiConverterTypeNetworkGraph.lower(value) +} + + + + +public protocol NodeProtocol : AnyObject { + + func bolt11Payment() -> Bolt11Payment + + func bolt12Payment() -> Bolt12Payment + + func closeChannel(userChannelId: UserChannelId, counterpartyNodeId: PublicKey) throws + + func config() -> Config + + func connect(nodeId: PublicKey, address: SocketAddress, persist: Bool) throws + + func connectOpenChannel(nodeId: PublicKey, address: SocketAddress, channelAmountSats: UInt64, pushToCounterpartyMsat: UInt64?, channelConfig: ChannelConfig?, announceChannel: Bool) throws -> UserChannelId + + func disconnect(nodeId: PublicKey) throws + + func eventHandled() + + func forceCloseChannel(userChannelId: UserChannelId, counterpartyNodeId: PublicKey) throws + + func listBalances() -> BalanceDetails + + func listChannels() -> [ChannelDetails] + + func listPayments() -> [PaymentDetails] + + func listPeers() -> [PeerDetails] + + func listeningAddresses() -> [SocketAddress]? + + func networkGraph() -> NetworkGraph + + func nextEvent() -> Event? + + func nextEventAsync() async -> Event + + func nodeId() -> PublicKey + + func onchainPayment() -> OnchainPayment + + func payment(paymentId: PaymentId) -> PaymentDetails? + + func removePayment(paymentId: PaymentId) throws + + func signMessage(msg: [UInt8]) throws -> String + + func spontaneousPayment() -> SpontaneousPayment + + func start() throws + + func status() -> NodeStatus + + func stop() throws + + func syncWallets() throws + + func updateChannelConfig(userChannelId: UserChannelId, counterpartyNodeId: PublicKey, channelConfig: ChannelConfig) throws + + func verifySignature(msg: [UInt8], sig: String, pkey: PublicKey) -> Bool + + func waitNextEvent() -> Event + +} + +public class Node: + NodeProtocol { fileprivate let pointer: UnsafeMutableRawPointer // TODO: We'd like this to be `private` but for Swifty reasons, @@ -850,40 +1419,72 @@ public class LdkNode: LDKNodeProtocol { self.pointer = pointer } + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_node(self.pointer, $0) } + } + deinit { - try! rustCall { uniffi_ldk_node_fn_free_ldknode(pointer, $0) } + try! rustCall { uniffi_ldk_node_fn_free_node(pointer, $0) } } - - public func closeChannel(channelId: ChannelId, counterpartyNodeId: PublicKey) throws { + public func bolt11Payment() -> Bolt11Payment { + return try! FfiConverterTypeBolt11Payment.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_bolt11_payment(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func bolt12Payment() -> Bolt12Payment { + return try! FfiConverterTypeBolt12Payment.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_bolt12_payment(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func closeChannel(userChannelId: UserChannelId, counterpartyNodeId: PublicKey) throws { try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_close_channel(self.pointer, - FfiConverterTypeChannelId.lower(channelId), + uniffi_ldk_node_fn_method_node_close_channel(self.uniffiClonePointer(), + FfiConverterTypeUserChannelId.lower(userChannelId), FfiConverterTypePublicKey.lower(counterpartyNodeId),$0 ) } } - - public func connect(nodeId: PublicKey, address: SocketAddress, persist: Bool) throws { + public func config() -> Config { + return try! FfiConverterTypeConfig.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_config(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func connect(nodeId: PublicKey, address: SocketAddress, persist: Bool) throws { try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_connect(self.pointer, + uniffi_ldk_node_fn_method_node_connect(self.uniffiClonePointer(), FfiConverterTypePublicKey.lower(nodeId), FfiConverterTypeSocketAddress.lower(address), FfiConverterBool.lower(persist),$0 ) } } - - public func connectOpenChannel(nodeId: PublicKey, address: SocketAddress, channelAmountSats: UInt64, pushToCounterpartyMsat: UInt64?, channelConfig: ChannelConfig?, announceChannel: Bool) throws { - try + public func connectOpenChannel(nodeId: PublicKey, address: SocketAddress, channelAmountSats: UInt64, pushToCounterpartyMsat: UInt64?, channelConfig: ChannelConfig?, announceChannel: Bool) throws -> UserChannelId { + return try FfiConverterTypeUserChannelId.lift( + try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_connect_open_channel(self.pointer, + uniffi_ldk_node_fn_method_node_connect_open_channel(self.uniffiClonePointer(), FfiConverterTypePublicKey.lower(nodeId), FfiConverterTypeSocketAddress.lower(address), FfiConverterUInt64.lower(channelAmountSats), @@ -892,375 +1493,700 @@ public class LdkNode: LDKNodeProtocol { FfiConverterBool.lower(announceChannel),$0 ) } + ) } - - public func disconnect(nodeId: PublicKey) throws { + public func disconnect(nodeId: PublicKey) throws { try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_disconnect(self.pointer, + uniffi_ldk_node_fn_method_node_disconnect(self.uniffiClonePointer(), FfiConverterTypePublicKey.lower(nodeId),$0 ) } } - public func eventHandled() { try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_event_handled(self.pointer, $0 + uniffi_ldk_node_fn_method_node_event_handled(self.uniffiClonePointer(), $0 ) } } - - public func isRunning() -> Bool { - return try! FfiConverterBool.lift( + public func forceCloseChannel(userChannelId: UserChannelId, counterpartyNodeId: PublicKey) throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_node_force_close_channel(self.uniffiClonePointer(), + FfiConverterTypeUserChannelId.lower(userChannelId), + FfiConverterTypePublicKey.lower(counterpartyNodeId),$0 + ) +} + } + public func listBalances() -> BalanceDetails { + return try! FfiConverterTypeBalanceDetails.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_is_running(self.pointer, $0 + uniffi_ldk_node_fn_method_node_list_balances(self.uniffiClonePointer(), $0 ) } ) } - public func listChannels() -> [ChannelDetails] { return try! FfiConverterSequenceTypeChannelDetails.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_list_channels(self.pointer, $0 + uniffi_ldk_node_fn_method_node_list_channels(self.uniffiClonePointer(), $0 ) } ) } - public func listPayments() -> [PaymentDetails] { return try! FfiConverterSequenceTypePaymentDetails.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_list_payments(self.pointer, $0 + uniffi_ldk_node_fn_method_node_list_payments(self.uniffiClonePointer(), $0 ) } ) } - public func listPeers() -> [PeerDetails] { return try! FfiConverterSequenceTypePeerDetails.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_list_peers(self.pointer, $0 + uniffi_ldk_node_fn_method_node_list_peers(self.uniffiClonePointer(), $0 ) } ) } - public func listeningAddresses() -> [SocketAddress]? { return try! FfiConverterOptionSequenceTypeSocketAddress.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_listening_addresses(self.pointer, $0 + uniffi_ldk_node_fn_method_node_listening_addresses(self.uniffiClonePointer(), $0 ) } ) } - - public func newOnchainAddress() throws -> Address { - return try FfiConverterTypeAddress.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_new_onchain_address(self.pointer, $0 + public func networkGraph() -> NetworkGraph { + return try! FfiConverterTypeNetworkGraph.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_network_graph(self.uniffiClonePointer(), $0 ) } ) } - public func nextEvent() -> Event? { return try! FfiConverterOptionTypeEvent.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_next_event(self.pointer, $0 + uniffi_ldk_node_fn_method_node_next_event(self.uniffiClonePointer(), $0 ) } ) } + public func nextEventAsync() async -> Event { + return try! await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_ldk_node_fn_method_node_next_event_async( + self.uniffiClonePointer() + ) + }, + pollFunc: ffi_ldk_node_rust_future_poll_rust_buffer, + completeFunc: ffi_ldk_node_rust_future_complete_rust_buffer, + freeFunc: ffi_ldk_node_rust_future_free_rust_buffer, + liftFunc: FfiConverterTypeEvent.lift, + errorHandler: nil + + ) + } + public func nodeId() -> PublicKey { return try! FfiConverterTypePublicKey.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_node_id(self.pointer, $0 + uniffi_ldk_node_fn_method_node_node_id(self.uniffiClonePointer(), $0 ) } ) } - - public func payment(paymentHash: PaymentHash) -> PaymentDetails? { + public func onchainPayment() -> OnchainPayment { + return try! FfiConverterTypeOnchainPayment.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_onchain_payment(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func payment(paymentId: PaymentId) -> PaymentDetails? { return try! FfiConverterOptionTypePaymentDetails.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_payment(self.pointer, - FfiConverterTypePaymentHash.lower(paymentHash),$0 + uniffi_ldk_node_fn_method_node_payment(self.uniffiClonePointer(), + FfiConverterTypePaymentId.lower(paymentId),$0 ) } ) } - - public func receivePayment(amountMsat: UInt64, description: String, expirySecs: UInt32) throws -> Bolt11Invoice { - return try FfiConverterTypeBolt11Invoice.lift( + public func removePayment(paymentId: PaymentId) throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_node_remove_payment(self.uniffiClonePointer(), + FfiConverterTypePaymentId.lower(paymentId),$0 + ) +} + } + public func signMessage(msg: [UInt8]) throws -> String { + return try FfiConverterString.lift( try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_receive_payment(self.pointer, - FfiConverterUInt64.lower(amountMsat), - FfiConverterString.lower(description), - FfiConverterUInt32.lower(expirySecs),$0 + uniffi_ldk_node_fn_method_node_sign_message(self.uniffiClonePointer(), + FfiConverterSequenceUInt8.lower(msg),$0 ) } ) } - - public func receiveVariableAmountPayment(description: String, expirySecs: UInt32) throws -> Bolt11Invoice { - return try FfiConverterTypeBolt11Invoice.lift( - try + public func spontaneousPayment() -> SpontaneousPayment { + return try! FfiConverterTypeSpontaneousPayment.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_spontaneous_payment(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func start() throws { + try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_receive_variable_amount_payment(self.pointer, - FfiConverterString.lower(description), - FfiConverterUInt32.lower(expirySecs),$0 + uniffi_ldk_node_fn_method_node_start(self.uniffiClonePointer(), $0 + ) +} + } + public func status() -> NodeStatus { + return try! FfiConverterTypeNodeStatus.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_status(self.uniffiClonePointer(), $0 ) } ) } - - public func removePayment(paymentHash: PaymentHash) throws { + public func stop() throws { try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_remove_payment(self.pointer, - FfiConverterTypePaymentHash.lower(paymentHash),$0 + uniffi_ldk_node_fn_method_node_stop(self.uniffiClonePointer(), $0 + ) +} + } + public func syncWallets() throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_node_sync_wallets(self.uniffiClonePointer(), $0 ) } } + public func updateChannelConfig(userChannelId: UserChannelId, counterpartyNodeId: PublicKey, channelConfig: ChannelConfig) throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_node_update_channel_config(self.uniffiClonePointer(), + FfiConverterTypeUserChannelId.lower(userChannelId), + FfiConverterTypePublicKey.lower(counterpartyNodeId), + FfiConverterTypeChannelConfig.lower(channelConfig),$0 + ) +} + } + public func verifySignature(msg: [UInt8], sig: String, pkey: PublicKey) -> Bool { + return try! FfiConverterBool.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_verify_signature(self.uniffiClonePointer(), + FfiConverterSequenceUInt8.lower(msg), + FfiConverterString.lower(sig), + FfiConverterTypePublicKey.lower(pkey),$0 + ) +} + ) + } + public func waitNextEvent() -> Event { + return try! FfiConverterTypeEvent.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_wait_next_event(self.uniffiClonePointer(), $0 + ) +} + ) + } + +} + +public struct FfiConverterTypeNode: FfiConverter { + + typealias FfiType = UnsafeMutableRawPointer + typealias SwiftType = Node + + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> Node { + return Node(unsafeFromRawPointer: pointer) + } + + public static func lower(_ value: Node) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Node { + let v: UInt64 = try readInt(&buf) + // The Rust code won't compile if a pointer won't fit in a UInt64. + // We have to go via `UInt` because that's the thing that's the size of a pointer. + let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) + if (ptr == nil) { + throw UniffiInternalError.unexpectedNullPointer + } + return try lift(ptr!) + } + + public static func write(_ value: Node, into buf: inout [UInt8]) { + // This fiddling is because `Int` is the thing that's the same size as a pointer. + // The Rust code won't compile if a pointer won't fit in a `UInt64`. + writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) + } +} + + +public func FfiConverterTypeNode_lift(_ pointer: UnsafeMutableRawPointer) throws -> Node { + return try FfiConverterTypeNode.lift(pointer) +} + +public func FfiConverterTypeNode_lower(_ value: Node) -> UnsafeMutableRawPointer { + return FfiConverterTypeNode.lower(value) +} + + + + +public protocol OnchainPaymentProtocol : AnyObject { + + func newAddress() throws -> Address + + func sendAllToAddress(address: Address) throws -> Txid + + func sendToAddress(address: Address, amountMsat: UInt64) throws -> Txid + +} + +public class OnchainPayment: + OnchainPaymentProtocol { + fileprivate let pointer: UnsafeMutableRawPointer + + // TODO: We'd like this to be `private` but for Swifty reasons, + // we can't implement `FfiConverter` without making this `required` and we can't + // make it `required` without making it `public`. + required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { + self.pointer = pointer + } + + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_onchainpayment(self.pointer, $0) } + } + + deinit { + try! rustCall { uniffi_ldk_node_fn_free_onchainpayment(pointer, $0) } + } + + - public func sendAllToOnchainAddress(address: Address) throws -> Txid { + + + public func newAddress() throws -> Address { + return try FfiConverterTypeAddress.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_onchainpayment_new_address(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func sendAllToAddress(address: Address) throws -> Txid { return try FfiConverterTypeTxid.lift( try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_all_to_onchain_address(self.pointer, + uniffi_ldk_node_fn_method_onchainpayment_send_all_to_address(self.uniffiClonePointer(), FfiConverterTypeAddress.lower(address),$0 ) } ) } + public func sendToAddress(address: Address, amountMsat: UInt64) throws -> Txid { + return try FfiConverterTypeTxid.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_onchainpayment_send_to_address(self.uniffiClonePointer(), + FfiConverterTypeAddress.lower(address), + FfiConverterUInt64.lower(amountMsat),$0 + ) +} + ) + } + +} + +public struct FfiConverterTypeOnchainPayment: FfiConverter { + + typealias FfiType = UnsafeMutableRawPointer + typealias SwiftType = OnchainPayment + + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> OnchainPayment { + return OnchainPayment(unsafeFromRawPointer: pointer) + } + + public static func lower(_ value: OnchainPayment) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> OnchainPayment { + let v: UInt64 = try readInt(&buf) + // The Rust code won't compile if a pointer won't fit in a UInt64. + // We have to go via `UInt` because that's the thing that's the size of a pointer. + let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) + if (ptr == nil) { + throw UniffiInternalError.unexpectedNullPointer + } + return try lift(ptr!) + } + + public static func write(_ value: OnchainPayment, into buf: inout [UInt8]) { + // This fiddling is because `Int` is the thing that's the same size as a pointer. + // The Rust code won't compile if a pointer won't fit in a `UInt64`. + writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) + } +} + + +public func FfiConverterTypeOnchainPayment_lift(_ pointer: UnsafeMutableRawPointer) throws -> OnchainPayment { + return try FfiConverterTypeOnchainPayment.lift(pointer) +} - public func sendPayment(invoice: Bolt11Invoice) throws -> PaymentHash { - return try FfiConverterTypePaymentHash.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_payment(self.pointer, - FfiConverterTypeBolt11Invoice.lower(invoice),$0 - ) +public func FfiConverterTypeOnchainPayment_lower(_ value: OnchainPayment) -> UnsafeMutableRawPointer { + return FfiConverterTypeOnchainPayment.lower(value) } - ) - } - public func sendPaymentProbes(invoice: Bolt11Invoice) throws { - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_payment_probes(self.pointer, - FfiConverterTypeBolt11Invoice.lower(invoice),$0 - ) + + + +public protocol SpontaneousPaymentProtocol : AnyObject { + + func send(amountMsat: UInt64, nodeId: PublicKey) throws -> PaymentId + + func sendProbes(amountMsat: UInt64, nodeId: PublicKey) throws + } + +public class SpontaneousPayment: + SpontaneousPaymentProtocol { + fileprivate let pointer: UnsafeMutableRawPointer + + // TODO: We'd like this to be `private` but for Swifty reasons, + // we can't implement `FfiConverter` without making this `required` and we can't + // make it `required` without making it `public`. + required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { + self.pointer = pointer } - public func sendPaymentProbesUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws { - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_payment_probes_using_amount(self.pointer, - FfiConverterTypeBolt11Invoice.lower(invoice), - FfiConverterUInt64.lower(amountMsat),$0 - ) -} + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_spontaneouspayment(self.pointer, $0) } } - public func sendPaymentUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws -> PaymentHash { - return try FfiConverterTypePaymentHash.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_payment_using_amount(self.pointer, - FfiConverterTypeBolt11Invoice.lower(invoice), - FfiConverterUInt64.lower(amountMsat),$0 - ) -} - ) + deinit { + try! rustCall { uniffi_ldk_node_fn_free_spontaneouspayment(pointer, $0) } } - public func sendSpontaneousPayment(amountMsat: UInt64, nodeId: PublicKey) throws -> PaymentHash { - return try FfiConverterTypePaymentHash.lift( + + + + + public func send(amountMsat: UInt64, nodeId: PublicKey) throws -> PaymentId { + return try FfiConverterTypePaymentId.lift( try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_spontaneous_payment(self.pointer, + uniffi_ldk_node_fn_method_spontaneouspayment_send(self.uniffiClonePointer(), FfiConverterUInt64.lower(amountMsat), FfiConverterTypePublicKey.lower(nodeId),$0 ) } ) } - - public func sendSpontaneousPaymentProbes(amountMsat: UInt64, nodeId: PublicKey) throws { + public func sendProbes(amountMsat: UInt64, nodeId: PublicKey) throws { try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_spontaneous_payment_probes(self.pointer, + uniffi_ldk_node_fn_method_spontaneouspayment_send_probes(self.uniffiClonePointer(), FfiConverterUInt64.lower(amountMsat), FfiConverterTypePublicKey.lower(nodeId),$0 ) } } - public func sendToOnchainAddress(address: Address, amountMsat: UInt64) throws -> Txid { - return try FfiConverterTypeTxid.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_to_onchain_address(self.pointer, - FfiConverterTypeAddress.lower(address), - FfiConverterUInt64.lower(amountMsat),$0 - ) } - ) + +public struct FfiConverterTypeSpontaneousPayment: FfiConverter { + + typealias FfiType = UnsafeMutableRawPointer + typealias SwiftType = SpontaneousPayment + + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> SpontaneousPayment { + return SpontaneousPayment(unsafeFromRawPointer: pointer) } - public func signMessage(msg: [UInt8]) throws -> String { - return try FfiConverterString.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_sign_message(self.pointer, - FfiConverterSequenceUInt8.lower(msg),$0 - ) -} - ) + public static func lower(_ value: SpontaneousPayment) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() } - public func spendableOnchainBalanceSats() throws -> UInt64 { - return try FfiConverterUInt64.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_spendable_onchain_balance_sats(self.pointer, $0 - ) -} - ) + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SpontaneousPayment { + let v: UInt64 = try readInt(&buf) + // The Rust code won't compile if a pointer won't fit in a UInt64. + // We have to go via `UInt` because that's the thing that's the size of a pointer. + let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) + if (ptr == nil) { + throw UniffiInternalError.unexpectedNullPointer + } + return try lift(ptr!) } - public func start() throws { - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_start(self.pointer, $0 - ) -} + public static func write(_ value: SpontaneousPayment, into buf: inout [UInt8]) { + // This fiddling is because `Int` is the thing that's the same size as a pointer. + // The Rust code won't compile if a pointer won't fit in a `UInt64`. + writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) } +} - public func stop() throws { - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_stop(self.pointer, $0 - ) + +public func FfiConverterTypeSpontaneousPayment_lift(_ pointer: UnsafeMutableRawPointer) throws -> SpontaneousPayment { + return try FfiConverterTypeSpontaneousPayment.lift(pointer) } - } - public func syncWallets() throws { - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_sync_wallets(self.pointer, $0 - ) +public func FfiConverterTypeSpontaneousPayment_lower(_ value: SpontaneousPayment) -> UnsafeMutableRawPointer { + return FfiConverterTypeSpontaneousPayment.lower(value) +} + + +public struct AnchorChannelsConfig { + public var trustedPeersNoReserve: [PublicKey] + public var perChannelReserveSats: UInt64 + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + trustedPeersNoReserve: [PublicKey], + perChannelReserveSats: UInt64) { + self.trustedPeersNoReserve = trustedPeersNoReserve + self.perChannelReserveSats = perChannelReserveSats + } } + + +extension AnchorChannelsConfig: Equatable, Hashable { + public static func ==(lhs: AnchorChannelsConfig, rhs: AnchorChannelsConfig) -> Bool { + if lhs.trustedPeersNoReserve != rhs.trustedPeersNoReserve { + return false + } + if lhs.perChannelReserveSats != rhs.perChannelReserveSats { + return false + } + return true } - public func totalOnchainBalanceSats() throws -> UInt64 { - return try FfiConverterUInt64.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_total_onchain_balance_sats(self.pointer, $0 - ) + public func hash(into hasher: inout Hasher) { + hasher.combine(trustedPeersNoReserve) + hasher.combine(perChannelReserveSats) + } } + + +public struct FfiConverterTypeAnchorChannelsConfig: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> AnchorChannelsConfig { + return + try AnchorChannelsConfig( + trustedPeersNoReserve: FfiConverterSequenceTypePublicKey.read(from: &buf), + perChannelReserveSats: FfiConverterUInt64.read(from: &buf) ) } - public func updateChannelConfig(channelId: ChannelId, counterpartyNodeId: PublicKey, channelConfig: ChannelConfig) throws { - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_update_channel_config(self.pointer, - FfiConverterTypeChannelId.lower(channelId), - FfiConverterTypePublicKey.lower(counterpartyNodeId), - FfiConverterTypeChannelConfig.lower(channelConfig),$0 - ) -} + public static func write(_ value: AnchorChannelsConfig, into buf: inout [UInt8]) { + FfiConverterSequenceTypePublicKey.write(value.trustedPeersNoReserve, into: &buf) + FfiConverterUInt64.write(value.perChannelReserveSats, into: &buf) } +} - public func verifySignature(msg: [UInt8], sig: String, pkey: PublicKey) -> Bool { - return try! FfiConverterBool.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_ldknode_verify_signature(self.pointer, - FfiConverterSequenceUInt8.lower(msg), - FfiConverterString.lower(sig), - FfiConverterTypePublicKey.lower(pkey),$0 - ) + +public func FfiConverterTypeAnchorChannelsConfig_lift(_ buf: RustBuffer) throws -> AnchorChannelsConfig { + return try FfiConverterTypeAnchorChannelsConfig.lift(buf) +} + +public func FfiConverterTypeAnchorChannelsConfig_lower(_ value: AnchorChannelsConfig) -> RustBuffer { + return FfiConverterTypeAnchorChannelsConfig.lower(value) } + + +public struct BalanceDetails { + public var totalOnchainBalanceSats: UInt64 + public var spendableOnchainBalanceSats: UInt64 + public var totalAnchorChannelsReserveSats: UInt64 + public var totalLightningBalanceSats: UInt64 + public var lightningBalances: [LightningBalance] + public var pendingBalancesFromChannelClosures: [PendingSweepBalance] + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + totalOnchainBalanceSats: UInt64, + spendableOnchainBalanceSats: UInt64, + totalAnchorChannelsReserveSats: UInt64, + totalLightningBalanceSats: UInt64, + lightningBalances: [LightningBalance], + pendingBalancesFromChannelClosures: [PendingSweepBalance]) { + self.totalOnchainBalanceSats = totalOnchainBalanceSats + self.spendableOnchainBalanceSats = spendableOnchainBalanceSats + self.totalAnchorChannelsReserveSats = totalAnchorChannelsReserveSats + self.totalLightningBalanceSats = totalLightningBalanceSats + self.lightningBalances = lightningBalances + self.pendingBalancesFromChannelClosures = pendingBalancesFromChannelClosures + } +} + + +extension BalanceDetails: Equatable, Hashable { + public static func ==(lhs: BalanceDetails, rhs: BalanceDetails) -> Bool { + if lhs.totalOnchainBalanceSats != rhs.totalOnchainBalanceSats { + return false + } + if lhs.spendableOnchainBalanceSats != rhs.spendableOnchainBalanceSats { + return false + } + if lhs.totalAnchorChannelsReserveSats != rhs.totalAnchorChannelsReserveSats { + return false + } + if lhs.totalLightningBalanceSats != rhs.totalLightningBalanceSats { + return false + } + if lhs.lightningBalances != rhs.lightningBalances { + return false + } + if lhs.pendingBalancesFromChannelClosures != rhs.pendingBalancesFromChannelClosures { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(totalOnchainBalanceSats) + hasher.combine(spendableOnchainBalanceSats) + hasher.combine(totalAnchorChannelsReserveSats) + hasher.combine(totalLightningBalanceSats) + hasher.combine(lightningBalances) + hasher.combine(pendingBalancesFromChannelClosures) + } +} + + +public struct FfiConverterTypeBalanceDetails: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> BalanceDetails { + return + try BalanceDetails( + totalOnchainBalanceSats: FfiConverterUInt64.read(from: &buf), + spendableOnchainBalanceSats: FfiConverterUInt64.read(from: &buf), + totalAnchorChannelsReserveSats: FfiConverterUInt64.read(from: &buf), + totalLightningBalanceSats: FfiConverterUInt64.read(from: &buf), + lightningBalances: FfiConverterSequenceTypeLightningBalance.read(from: &buf), + pendingBalancesFromChannelClosures: FfiConverterSequenceTypePendingSweepBalance.read(from: &buf) ) } - public func waitNextEvent() -> Event { - return try! FfiConverterTypeEvent.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_ldknode_wait_next_event(self.pointer, $0 - ) + public static func write(_ value: BalanceDetails, into buf: inout [UInt8]) { + FfiConverterUInt64.write(value.totalOnchainBalanceSats, into: &buf) + FfiConverterUInt64.write(value.spendableOnchainBalanceSats, into: &buf) + FfiConverterUInt64.write(value.totalAnchorChannelsReserveSats, into: &buf) + FfiConverterUInt64.write(value.totalLightningBalanceSats, into: &buf) + FfiConverterSequenceTypeLightningBalance.write(value.lightningBalances, into: &buf) + FfiConverterSequenceTypePendingSweepBalance.write(value.pendingBalancesFromChannelClosures, into: &buf) + } } - ) + + +public func FfiConverterTypeBalanceDetails_lift(_ buf: RustBuffer) throws -> BalanceDetails { + return try FfiConverterTypeBalanceDetails.lift(buf) +} + +public func FfiConverterTypeBalanceDetails_lower(_ value: BalanceDetails) -> RustBuffer { + return FfiConverterTypeBalanceDetails.lower(value) +} + + +public struct BestBlock { + public var blockHash: BlockHash + public var height: UInt32 + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + blockHash: BlockHash, + height: UInt32) { + self.blockHash = blockHash + self.height = height } } -public struct FfiConverterTypeLDKNode: FfiConverter { - typealias FfiType = UnsafeMutableRawPointer - typealias SwiftType = LdkNode - public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> LdkNode { - let v: UInt64 = try readInt(&buf) - // The Rust code won't compile if a pointer won't fit in a UInt64. - // We have to go via `UInt` because that's the thing that's the size of a pointer. - let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) - if (ptr == nil) { - throw UniffiInternalError.unexpectedNullPointer +extension BestBlock: Equatable, Hashable { + public static func ==(lhs: BestBlock, rhs: BestBlock) -> Bool { + if lhs.blockHash != rhs.blockHash { + return false } - return try lift(ptr!) + if lhs.height != rhs.height { + return false + } + return true } - public static func write(_ value: LdkNode, into buf: inout [UInt8]) { - // This fiddling is because `Int` is the thing that's the same size as a pointer. - // The Rust code won't compile if a pointer won't fit in a `UInt64`. - writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) + public func hash(into hasher: inout Hasher) { + hasher.combine(blockHash) + hasher.combine(height) } +} + - public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> LdkNode { - return LdkNode(unsafeFromRawPointer: pointer) +public struct FfiConverterTypeBestBlock: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> BestBlock { + return + try BestBlock( + blockHash: FfiConverterTypeBlockHash.read(from: &buf), + height: FfiConverterUInt32.read(from: &buf) + ) } - public static func lower(_ value: LdkNode) -> UnsafeMutableRawPointer { - return value.pointer + public static func write(_ value: BestBlock, into buf: inout [UInt8]) { + FfiConverterTypeBlockHash.write(value.blockHash, into: &buf) + FfiConverterUInt32.write(value.height, into: &buf) } } -public func FfiConverterTypeLDKNode_lift(_ pointer: UnsafeMutableRawPointer) throws -> LdkNode { - return try FfiConverterTypeLDKNode.lift(pointer) +public func FfiConverterTypeBestBlock_lift(_ buf: RustBuffer) throws -> BestBlock { + return try FfiConverterTypeBestBlock.lift(buf) } -public func FfiConverterTypeLDKNode_lower(_ value: LdkNode) -> UnsafeMutableRawPointer { - return FfiConverterTypeLDKNode.lower(value) +public func FfiConverterTypeBestBlock_lower(_ value: BestBlock) -> RustBuffer { + return FfiConverterTypeBestBlock.lower(value) } @@ -1272,7 +2198,6 @@ public struct ChannelDetails { public var unspendablePunishmentReserve: UInt64? public var userChannelId: UserChannelId public var feerateSatPer1000Weight: UInt32 - public var balanceMsat: UInt64 public var outboundCapacityMsat: UInt64 public var inboundCapacityMsat: UInt64 public var confirmationsRequired: UInt32? @@ -1297,7 +2222,35 @@ public struct ChannelDetails { // Default memberwise initializers are never public by default, so we // declare one manually. - public init(channelId: ChannelId, counterpartyNodeId: PublicKey, fundingTxo: OutPoint?, channelValueSats: UInt64, unspendablePunishmentReserve: UInt64?, userChannelId: UserChannelId, feerateSatPer1000Weight: UInt32, balanceMsat: UInt64, outboundCapacityMsat: UInt64, inboundCapacityMsat: UInt64, confirmationsRequired: UInt32?, confirmations: UInt32?, isOutbound: Bool, isChannelReady: Bool, isUsable: Bool, isPublic: Bool, cltvExpiryDelta: UInt16?, counterpartyUnspendablePunishmentReserve: UInt64, counterpartyOutboundHtlcMinimumMsat: UInt64?, counterpartyOutboundHtlcMaximumMsat: UInt64?, counterpartyForwardingInfoFeeBaseMsat: UInt32?, counterpartyForwardingInfoFeeProportionalMillionths: UInt32?, counterpartyForwardingInfoCltvExpiryDelta: UInt16?, nextOutboundHtlcLimitMsat: UInt64, nextOutboundHtlcMinimumMsat: UInt64, forceCloseSpendDelay: UInt16?, inboundHtlcMinimumMsat: UInt64, inboundHtlcMaximumMsat: UInt64?, config: ChannelConfig) { + public init( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + fundingTxo: OutPoint?, + channelValueSats: UInt64, + unspendablePunishmentReserve: UInt64?, + userChannelId: UserChannelId, + feerateSatPer1000Weight: UInt32, + outboundCapacityMsat: UInt64, + inboundCapacityMsat: UInt64, + confirmationsRequired: UInt32?, + confirmations: UInt32?, + isOutbound: Bool, + isChannelReady: Bool, + isUsable: Bool, + isPublic: Bool, + cltvExpiryDelta: UInt16?, + counterpartyUnspendablePunishmentReserve: UInt64, + counterpartyOutboundHtlcMinimumMsat: UInt64?, + counterpartyOutboundHtlcMaximumMsat: UInt64?, + counterpartyForwardingInfoFeeBaseMsat: UInt32?, + counterpartyForwardingInfoFeeProportionalMillionths: UInt32?, + counterpartyForwardingInfoCltvExpiryDelta: UInt16?, + nextOutboundHtlcLimitMsat: UInt64, + nextOutboundHtlcMinimumMsat: UInt64, + forceCloseSpendDelay: UInt16?, + inboundHtlcMinimumMsat: UInt64, + inboundHtlcMaximumMsat: UInt64?, + config: ChannelConfig) { self.channelId = channelId self.counterpartyNodeId = counterpartyNodeId self.fundingTxo = fundingTxo @@ -1305,7 +2258,6 @@ public struct ChannelDetails { self.unspendablePunishmentReserve = unspendablePunishmentReserve self.userChannelId = userChannelId self.feerateSatPer1000Weight = feerateSatPer1000Weight - self.balanceMsat = balanceMsat self.outboundCapacityMsat = outboundCapacityMsat self.inboundCapacityMsat = inboundCapacityMsat self.confirmationsRequired = confirmationsRequired @@ -1334,36 +2286,36 @@ public struct ChannelDetails { public struct FfiConverterTypeChannelDetails: FfiConverterRustBuffer { public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> ChannelDetails { - return try ChannelDetails( - channelId: FfiConverterTypeChannelId.read(from: &buf), - counterpartyNodeId: FfiConverterTypePublicKey.read(from: &buf), - fundingTxo: FfiConverterOptionTypeOutPoint.read(from: &buf), - channelValueSats: FfiConverterUInt64.read(from: &buf), - unspendablePunishmentReserve: FfiConverterOptionUInt64.read(from: &buf), - userChannelId: FfiConverterTypeUserChannelId.read(from: &buf), - feerateSatPer1000Weight: FfiConverterUInt32.read(from: &buf), - balanceMsat: FfiConverterUInt64.read(from: &buf), - outboundCapacityMsat: FfiConverterUInt64.read(from: &buf), - inboundCapacityMsat: FfiConverterUInt64.read(from: &buf), - confirmationsRequired: FfiConverterOptionUInt32.read(from: &buf), - confirmations: FfiConverterOptionUInt32.read(from: &buf), - isOutbound: FfiConverterBool.read(from: &buf), - isChannelReady: FfiConverterBool.read(from: &buf), - isUsable: FfiConverterBool.read(from: &buf), - isPublic: FfiConverterBool.read(from: &buf), - cltvExpiryDelta: FfiConverterOptionUInt16.read(from: &buf), - counterpartyUnspendablePunishmentReserve: FfiConverterUInt64.read(from: &buf), - counterpartyOutboundHtlcMinimumMsat: FfiConverterOptionUInt64.read(from: &buf), - counterpartyOutboundHtlcMaximumMsat: FfiConverterOptionUInt64.read(from: &buf), - counterpartyForwardingInfoFeeBaseMsat: FfiConverterOptionUInt32.read(from: &buf), - counterpartyForwardingInfoFeeProportionalMillionths: FfiConverterOptionUInt32.read(from: &buf), - counterpartyForwardingInfoCltvExpiryDelta: FfiConverterOptionUInt16.read(from: &buf), - nextOutboundHtlcLimitMsat: FfiConverterUInt64.read(from: &buf), - nextOutboundHtlcMinimumMsat: FfiConverterUInt64.read(from: &buf), - forceCloseSpendDelay: FfiConverterOptionUInt16.read(from: &buf), - inboundHtlcMinimumMsat: FfiConverterUInt64.read(from: &buf), - inboundHtlcMaximumMsat: FfiConverterOptionUInt64.read(from: &buf), - config: FfiConverterTypeChannelConfig.read(from: &buf) + return + try ChannelDetails( + channelId: FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: FfiConverterTypePublicKey.read(from: &buf), + fundingTxo: FfiConverterOptionTypeOutPoint.read(from: &buf), + channelValueSats: FfiConverterUInt64.read(from: &buf), + unspendablePunishmentReserve: FfiConverterOptionUInt64.read(from: &buf), + userChannelId: FfiConverterTypeUserChannelId.read(from: &buf), + feerateSatPer1000Weight: FfiConverterUInt32.read(from: &buf), + outboundCapacityMsat: FfiConverterUInt64.read(from: &buf), + inboundCapacityMsat: FfiConverterUInt64.read(from: &buf), + confirmationsRequired: FfiConverterOptionUInt32.read(from: &buf), + confirmations: FfiConverterOptionUInt32.read(from: &buf), + isOutbound: FfiConverterBool.read(from: &buf), + isChannelReady: FfiConverterBool.read(from: &buf), + isUsable: FfiConverterBool.read(from: &buf), + isPublic: FfiConverterBool.read(from: &buf), + cltvExpiryDelta: FfiConverterOptionUInt16.read(from: &buf), + counterpartyUnspendablePunishmentReserve: FfiConverterUInt64.read(from: &buf), + counterpartyOutboundHtlcMinimumMsat: FfiConverterOptionUInt64.read(from: &buf), + counterpartyOutboundHtlcMaximumMsat: FfiConverterOptionUInt64.read(from: &buf), + counterpartyForwardingInfoFeeBaseMsat: FfiConverterOptionUInt32.read(from: &buf), + counterpartyForwardingInfoFeeProportionalMillionths: FfiConverterOptionUInt32.read(from: &buf), + counterpartyForwardingInfoCltvExpiryDelta: FfiConverterOptionUInt16.read(from: &buf), + nextOutboundHtlcLimitMsat: FfiConverterUInt64.read(from: &buf), + nextOutboundHtlcMinimumMsat: FfiConverterUInt64.read(from: &buf), + forceCloseSpendDelay: FfiConverterOptionUInt16.read(from: &buf), + inboundHtlcMinimumMsat: FfiConverterUInt64.read(from: &buf), + inboundHtlcMaximumMsat: FfiConverterOptionUInt64.read(from: &buf), + config: FfiConverterTypeChannelConfig.read(from: &buf) ) } @@ -1375,7 +2327,6 @@ public struct FfiConverterTypeChannelDetails: FfiConverterRustBuffer { FfiConverterOptionUInt64.write(value.unspendablePunishmentReserve, into: &buf) FfiConverterTypeUserChannelId.write(value.userChannelId, into: &buf) FfiConverterUInt32.write(value.feerateSatPer1000Weight, into: &buf) - FfiConverterUInt64.write(value.balanceMsat, into: &buf) FfiConverterUInt64.write(value.outboundCapacityMsat, into: &buf) FfiConverterUInt64.write(value.inboundCapacityMsat, into: &buf) FfiConverterOptionUInt32.write(value.confirmationsRequired, into: &buf) @@ -1401,12 +2352,191 @@ public struct FfiConverterTypeChannelDetails: FfiConverterRustBuffer { } -public func FfiConverterTypeChannelDetails_lift(_ buf: RustBuffer) throws -> ChannelDetails { - return try FfiConverterTypeChannelDetails.lift(buf) +public func FfiConverterTypeChannelDetails_lift(_ buf: RustBuffer) throws -> ChannelDetails { + return try FfiConverterTypeChannelDetails.lift(buf) +} + +public func FfiConverterTypeChannelDetails_lower(_ value: ChannelDetails) -> RustBuffer { + return FfiConverterTypeChannelDetails.lower(value) +} + + +public struct ChannelInfo { + public var nodeOne: NodeId + public var oneToTwo: ChannelUpdateInfo? + public var nodeTwo: NodeId + public var twoToOne: ChannelUpdateInfo? + public var capacitySats: UInt64? + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + nodeOne: NodeId, + oneToTwo: ChannelUpdateInfo?, + nodeTwo: NodeId, + twoToOne: ChannelUpdateInfo?, + capacitySats: UInt64?) { + self.nodeOne = nodeOne + self.oneToTwo = oneToTwo + self.nodeTwo = nodeTwo + self.twoToOne = twoToOne + self.capacitySats = capacitySats + } +} + + +extension ChannelInfo: Equatable, Hashable { + public static func ==(lhs: ChannelInfo, rhs: ChannelInfo) -> Bool { + if lhs.nodeOne != rhs.nodeOne { + return false + } + if lhs.oneToTwo != rhs.oneToTwo { + return false + } + if lhs.nodeTwo != rhs.nodeTwo { + return false + } + if lhs.twoToOne != rhs.twoToOne { + return false + } + if lhs.capacitySats != rhs.capacitySats { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(nodeOne) + hasher.combine(oneToTwo) + hasher.combine(nodeTwo) + hasher.combine(twoToOne) + hasher.combine(capacitySats) + } +} + + +public struct FfiConverterTypeChannelInfo: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> ChannelInfo { + return + try ChannelInfo( + nodeOne: FfiConverterTypeNodeId.read(from: &buf), + oneToTwo: FfiConverterOptionTypeChannelUpdateInfo.read(from: &buf), + nodeTwo: FfiConverterTypeNodeId.read(from: &buf), + twoToOne: FfiConverterOptionTypeChannelUpdateInfo.read(from: &buf), + capacitySats: FfiConverterOptionUInt64.read(from: &buf) + ) + } + + public static func write(_ value: ChannelInfo, into buf: inout [UInt8]) { + FfiConverterTypeNodeId.write(value.nodeOne, into: &buf) + FfiConverterOptionTypeChannelUpdateInfo.write(value.oneToTwo, into: &buf) + FfiConverterTypeNodeId.write(value.nodeTwo, into: &buf) + FfiConverterOptionTypeChannelUpdateInfo.write(value.twoToOne, into: &buf) + FfiConverterOptionUInt64.write(value.capacitySats, into: &buf) + } +} + + +public func FfiConverterTypeChannelInfo_lift(_ buf: RustBuffer) throws -> ChannelInfo { + return try FfiConverterTypeChannelInfo.lift(buf) +} + +public func FfiConverterTypeChannelInfo_lower(_ value: ChannelInfo) -> RustBuffer { + return FfiConverterTypeChannelInfo.lower(value) +} + + +public struct ChannelUpdateInfo { + public var lastUpdate: UInt32 + public var enabled: Bool + public var cltvExpiryDelta: UInt16 + public var htlcMinimumMsat: UInt64 + public var htlcMaximumMsat: UInt64 + public var fees: RoutingFees + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + lastUpdate: UInt32, + enabled: Bool, + cltvExpiryDelta: UInt16, + htlcMinimumMsat: UInt64, + htlcMaximumMsat: UInt64, + fees: RoutingFees) { + self.lastUpdate = lastUpdate + self.enabled = enabled + self.cltvExpiryDelta = cltvExpiryDelta + self.htlcMinimumMsat = htlcMinimumMsat + self.htlcMaximumMsat = htlcMaximumMsat + self.fees = fees + } +} + + +extension ChannelUpdateInfo: Equatable, Hashable { + public static func ==(lhs: ChannelUpdateInfo, rhs: ChannelUpdateInfo) -> Bool { + if lhs.lastUpdate != rhs.lastUpdate { + return false + } + if lhs.enabled != rhs.enabled { + return false + } + if lhs.cltvExpiryDelta != rhs.cltvExpiryDelta { + return false + } + if lhs.htlcMinimumMsat != rhs.htlcMinimumMsat { + return false + } + if lhs.htlcMaximumMsat != rhs.htlcMaximumMsat { + return false + } + if lhs.fees != rhs.fees { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(lastUpdate) + hasher.combine(enabled) + hasher.combine(cltvExpiryDelta) + hasher.combine(htlcMinimumMsat) + hasher.combine(htlcMaximumMsat) + hasher.combine(fees) + } +} + + +public struct FfiConverterTypeChannelUpdateInfo: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> ChannelUpdateInfo { + return + try ChannelUpdateInfo( + lastUpdate: FfiConverterUInt32.read(from: &buf), + enabled: FfiConverterBool.read(from: &buf), + cltvExpiryDelta: FfiConverterUInt16.read(from: &buf), + htlcMinimumMsat: FfiConverterUInt64.read(from: &buf), + htlcMaximumMsat: FfiConverterUInt64.read(from: &buf), + fees: FfiConverterTypeRoutingFees.read(from: &buf) + ) + } + + public static func write(_ value: ChannelUpdateInfo, into buf: inout [UInt8]) { + FfiConverterUInt32.write(value.lastUpdate, into: &buf) + FfiConverterBool.write(value.enabled, into: &buf) + FfiConverterUInt16.write(value.cltvExpiryDelta, into: &buf) + FfiConverterUInt64.write(value.htlcMinimumMsat, into: &buf) + FfiConverterUInt64.write(value.htlcMaximumMsat, into: &buf) + FfiConverterTypeRoutingFees.write(value.fees, into: &buf) + } +} + + +public func FfiConverterTypeChannelUpdateInfo_lift(_ buf: RustBuffer) throws -> ChannelUpdateInfo { + return try FfiConverterTypeChannelUpdateInfo.lift(buf) } -public func FfiConverterTypeChannelDetails_lower(_ value: ChannelDetails) -> RustBuffer { - return FfiConverterTypeChannelDetails.lower(value) +public func FfiConverterTypeChannelUpdateInfo_lower(_ value: ChannelUpdateInfo) -> RustBuffer { + return FfiConverterTypeChannelUpdateInfo.lower(value) } @@ -1422,10 +2552,23 @@ public struct Config { public var trustedPeers0conf: [PublicKey] public var probingLiquidityLimitMultiplier: UInt64 public var logLevel: LogLevel + public var anchorChannelsConfig: AnchorChannelsConfig? // Default memberwise initializers are never public by default, so we // declare one manually. - public init(storageDirPath: String = "/tmp/ldk_node/", logDirPath: String? = nil, network: Network = .bitcoin, listeningAddresses: [SocketAddress]? = nil, defaultCltvExpiryDelta: UInt32 = UInt32(144), onchainWalletSyncIntervalSecs: UInt64 = UInt64(80), walletSyncIntervalSecs: UInt64 = UInt64(30), feeRateCacheUpdateIntervalSecs: UInt64 = UInt64(600), trustedPeers0conf: [PublicKey] = [], probingLiquidityLimitMultiplier: UInt64 = UInt64(3), logLevel: LogLevel = .debug) { + public init( + storageDirPath: String, + logDirPath: String?, + network: Network, + listeningAddresses: [SocketAddress]?, + defaultCltvExpiryDelta: UInt32, + onchainWalletSyncIntervalSecs: UInt64, + walletSyncIntervalSecs: UInt64, + feeRateCacheUpdateIntervalSecs: UInt64, + trustedPeers0conf: [PublicKey], + probingLiquidityLimitMultiplier: UInt64, + logLevel: LogLevel, + anchorChannelsConfig: AnchorChannelsConfig?) { self.storageDirPath = storageDirPath self.logDirPath = logDirPath self.network = network @@ -1437,6 +2580,7 @@ public struct Config { self.trustedPeers0conf = trustedPeers0conf self.probingLiquidityLimitMultiplier = probingLiquidityLimitMultiplier self.logLevel = logLevel + self.anchorChannelsConfig = anchorChannelsConfig } } @@ -1476,6 +2620,9 @@ extension Config: Equatable, Hashable { if lhs.logLevel != rhs.logLevel { return false } + if lhs.anchorChannelsConfig != rhs.anchorChannelsConfig { + return false + } return true } @@ -1491,24 +2638,27 @@ extension Config: Equatable, Hashable { hasher.combine(trustedPeers0conf) hasher.combine(probingLiquidityLimitMultiplier) hasher.combine(logLevel) + hasher.combine(anchorChannelsConfig) } } public struct FfiConverterTypeConfig: FfiConverterRustBuffer { public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Config { - return try Config( - storageDirPath: FfiConverterString.read(from: &buf), - logDirPath: FfiConverterOptionString.read(from: &buf), - network: FfiConverterTypeNetwork.read(from: &buf), - listeningAddresses: FfiConverterOptionSequenceTypeSocketAddress.read(from: &buf), - defaultCltvExpiryDelta: FfiConverterUInt32.read(from: &buf), - onchainWalletSyncIntervalSecs: FfiConverterUInt64.read(from: &buf), - walletSyncIntervalSecs: FfiConverterUInt64.read(from: &buf), - feeRateCacheUpdateIntervalSecs: FfiConverterUInt64.read(from: &buf), - trustedPeers0conf: FfiConverterSequenceTypePublicKey.read(from: &buf), - probingLiquidityLimitMultiplier: FfiConverterUInt64.read(from: &buf), - logLevel: FfiConverterTypeLogLevel.read(from: &buf) + return + try Config( + storageDirPath: FfiConverterString.read(from: &buf), + logDirPath: FfiConverterOptionString.read(from: &buf), + network: FfiConverterTypeNetwork.read(from: &buf), + listeningAddresses: FfiConverterOptionSequenceTypeSocketAddress.read(from: &buf), + defaultCltvExpiryDelta: FfiConverterUInt32.read(from: &buf), + onchainWalletSyncIntervalSecs: FfiConverterUInt64.read(from: &buf), + walletSyncIntervalSecs: FfiConverterUInt64.read(from: &buf), + feeRateCacheUpdateIntervalSecs: FfiConverterUInt64.read(from: &buf), + trustedPeers0conf: FfiConverterSequenceTypePublicKey.read(from: &buf), + probingLiquidityLimitMultiplier: FfiConverterUInt64.read(from: &buf), + logLevel: FfiConverterTypeLogLevel.read(from: &buf), + anchorChannelsConfig: FfiConverterOptionTypeAnchorChannelsConfig.read(from: &buf) ) } @@ -1524,6 +2674,7 @@ public struct FfiConverterTypeConfig: FfiConverterRustBuffer { FfiConverterSequenceTypePublicKey.write(value.trustedPeers0conf, into: &buf) FfiConverterUInt64.write(value.probingLiquidityLimitMultiplier, into: &buf) FfiConverterTypeLogLevel.write(value.logLevel, into: &buf) + FfiConverterOptionTypeAnchorChannelsConfig.write(value.anchorChannelsConfig, into: &buf) } } @@ -1537,13 +2688,310 @@ public func FfiConverterTypeConfig_lower(_ value: Config) -> RustBuffer { } +public struct LspFeeLimits { + public var maxTotalOpeningFeeMsat: UInt64? + public var maxProportionalOpeningFeePpmMsat: UInt64? + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + maxTotalOpeningFeeMsat: UInt64?, + maxProportionalOpeningFeePpmMsat: UInt64?) { + self.maxTotalOpeningFeeMsat = maxTotalOpeningFeeMsat + self.maxProportionalOpeningFeePpmMsat = maxProportionalOpeningFeePpmMsat + } +} + + +extension LspFeeLimits: Equatable, Hashable { + public static func ==(lhs: LspFeeLimits, rhs: LspFeeLimits) -> Bool { + if lhs.maxTotalOpeningFeeMsat != rhs.maxTotalOpeningFeeMsat { + return false + } + if lhs.maxProportionalOpeningFeePpmMsat != rhs.maxProportionalOpeningFeePpmMsat { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(maxTotalOpeningFeeMsat) + hasher.combine(maxProportionalOpeningFeePpmMsat) + } +} + + +public struct FfiConverterTypeLSPFeeLimits: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> LspFeeLimits { + return + try LspFeeLimits( + maxTotalOpeningFeeMsat: FfiConverterOptionUInt64.read(from: &buf), + maxProportionalOpeningFeePpmMsat: FfiConverterOptionUInt64.read(from: &buf) + ) + } + + public static func write(_ value: LspFeeLimits, into buf: inout [UInt8]) { + FfiConverterOptionUInt64.write(value.maxTotalOpeningFeeMsat, into: &buf) + FfiConverterOptionUInt64.write(value.maxProportionalOpeningFeePpmMsat, into: &buf) + } +} + + +public func FfiConverterTypeLSPFeeLimits_lift(_ buf: RustBuffer) throws -> LspFeeLimits { + return try FfiConverterTypeLSPFeeLimits.lift(buf) +} + +public func FfiConverterTypeLSPFeeLimits_lower(_ value: LspFeeLimits) -> RustBuffer { + return FfiConverterTypeLSPFeeLimits.lower(value) +} + + +public struct NodeAnnouncementInfo { + public var lastUpdate: UInt32 + public var alias: String + public var addresses: [SocketAddress] + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + lastUpdate: UInt32, + alias: String, + addresses: [SocketAddress]) { + self.lastUpdate = lastUpdate + self.alias = alias + self.addresses = addresses + } +} + + +extension NodeAnnouncementInfo: Equatable, Hashable { + public static func ==(lhs: NodeAnnouncementInfo, rhs: NodeAnnouncementInfo) -> Bool { + if lhs.lastUpdate != rhs.lastUpdate { + return false + } + if lhs.alias != rhs.alias { + return false + } + if lhs.addresses != rhs.addresses { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(lastUpdate) + hasher.combine(alias) + hasher.combine(addresses) + } +} + + +public struct FfiConverterTypeNodeAnnouncementInfo: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> NodeAnnouncementInfo { + return + try NodeAnnouncementInfo( + lastUpdate: FfiConverterUInt32.read(from: &buf), + alias: FfiConverterString.read(from: &buf), + addresses: FfiConverterSequenceTypeSocketAddress.read(from: &buf) + ) + } + + public static func write(_ value: NodeAnnouncementInfo, into buf: inout [UInt8]) { + FfiConverterUInt32.write(value.lastUpdate, into: &buf) + FfiConverterString.write(value.alias, into: &buf) + FfiConverterSequenceTypeSocketAddress.write(value.addresses, into: &buf) + } +} + + +public func FfiConverterTypeNodeAnnouncementInfo_lift(_ buf: RustBuffer) throws -> NodeAnnouncementInfo { + return try FfiConverterTypeNodeAnnouncementInfo.lift(buf) +} + +public func FfiConverterTypeNodeAnnouncementInfo_lower(_ value: NodeAnnouncementInfo) -> RustBuffer { + return FfiConverterTypeNodeAnnouncementInfo.lower(value) +} + + +public struct NodeInfo { + public var channels: [UInt64] + public var announcementInfo: NodeAnnouncementInfo? + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + channels: [UInt64], + announcementInfo: NodeAnnouncementInfo?) { + self.channels = channels + self.announcementInfo = announcementInfo + } +} + + +extension NodeInfo: Equatable, Hashable { + public static func ==(lhs: NodeInfo, rhs: NodeInfo) -> Bool { + if lhs.channels != rhs.channels { + return false + } + if lhs.announcementInfo != rhs.announcementInfo { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(channels) + hasher.combine(announcementInfo) + } +} + + +public struct FfiConverterTypeNodeInfo: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> NodeInfo { + return + try NodeInfo( + channels: FfiConverterSequenceUInt64.read(from: &buf), + announcementInfo: FfiConverterOptionTypeNodeAnnouncementInfo.read(from: &buf) + ) + } + + public static func write(_ value: NodeInfo, into buf: inout [UInt8]) { + FfiConverterSequenceUInt64.write(value.channels, into: &buf) + FfiConverterOptionTypeNodeAnnouncementInfo.write(value.announcementInfo, into: &buf) + } +} + + +public func FfiConverterTypeNodeInfo_lift(_ buf: RustBuffer) throws -> NodeInfo { + return try FfiConverterTypeNodeInfo.lift(buf) +} + +public func FfiConverterTypeNodeInfo_lower(_ value: NodeInfo) -> RustBuffer { + return FfiConverterTypeNodeInfo.lower(value) +} + + +public struct NodeStatus { + public var isRunning: Bool + public var isListening: Bool + public var currentBestBlock: BestBlock + public var latestWalletSyncTimestamp: UInt64? + public var latestOnchainWalletSyncTimestamp: UInt64? + public var latestFeeRateCacheUpdateTimestamp: UInt64? + public var latestRgsSnapshotTimestamp: UInt64? + public var latestNodeAnnouncementBroadcastTimestamp: UInt64? + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + isRunning: Bool, + isListening: Bool, + currentBestBlock: BestBlock, + latestWalletSyncTimestamp: UInt64?, + latestOnchainWalletSyncTimestamp: UInt64?, + latestFeeRateCacheUpdateTimestamp: UInt64?, + latestRgsSnapshotTimestamp: UInt64?, + latestNodeAnnouncementBroadcastTimestamp: UInt64?) { + self.isRunning = isRunning + self.isListening = isListening + self.currentBestBlock = currentBestBlock + self.latestWalletSyncTimestamp = latestWalletSyncTimestamp + self.latestOnchainWalletSyncTimestamp = latestOnchainWalletSyncTimestamp + self.latestFeeRateCacheUpdateTimestamp = latestFeeRateCacheUpdateTimestamp + self.latestRgsSnapshotTimestamp = latestRgsSnapshotTimestamp + self.latestNodeAnnouncementBroadcastTimestamp = latestNodeAnnouncementBroadcastTimestamp + } +} + + +extension NodeStatus: Equatable, Hashable { + public static func ==(lhs: NodeStatus, rhs: NodeStatus) -> Bool { + if lhs.isRunning != rhs.isRunning { + return false + } + if lhs.isListening != rhs.isListening { + return false + } + if lhs.currentBestBlock != rhs.currentBestBlock { + return false + } + if lhs.latestWalletSyncTimestamp != rhs.latestWalletSyncTimestamp { + return false + } + if lhs.latestOnchainWalletSyncTimestamp != rhs.latestOnchainWalletSyncTimestamp { + return false + } + if lhs.latestFeeRateCacheUpdateTimestamp != rhs.latestFeeRateCacheUpdateTimestamp { + return false + } + if lhs.latestRgsSnapshotTimestamp != rhs.latestRgsSnapshotTimestamp { + return false + } + if lhs.latestNodeAnnouncementBroadcastTimestamp != rhs.latestNodeAnnouncementBroadcastTimestamp { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(isRunning) + hasher.combine(isListening) + hasher.combine(currentBestBlock) + hasher.combine(latestWalletSyncTimestamp) + hasher.combine(latestOnchainWalletSyncTimestamp) + hasher.combine(latestFeeRateCacheUpdateTimestamp) + hasher.combine(latestRgsSnapshotTimestamp) + hasher.combine(latestNodeAnnouncementBroadcastTimestamp) + } +} + + +public struct FfiConverterTypeNodeStatus: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> NodeStatus { + return + try NodeStatus( + isRunning: FfiConverterBool.read(from: &buf), + isListening: FfiConverterBool.read(from: &buf), + currentBestBlock: FfiConverterTypeBestBlock.read(from: &buf), + latestWalletSyncTimestamp: FfiConverterOptionUInt64.read(from: &buf), + latestOnchainWalletSyncTimestamp: FfiConverterOptionUInt64.read(from: &buf), + latestFeeRateCacheUpdateTimestamp: FfiConverterOptionUInt64.read(from: &buf), + latestRgsSnapshotTimestamp: FfiConverterOptionUInt64.read(from: &buf), + latestNodeAnnouncementBroadcastTimestamp: FfiConverterOptionUInt64.read(from: &buf) + ) + } + + public static func write(_ value: NodeStatus, into buf: inout [UInt8]) { + FfiConverterBool.write(value.isRunning, into: &buf) + FfiConverterBool.write(value.isListening, into: &buf) + FfiConverterTypeBestBlock.write(value.currentBestBlock, into: &buf) + FfiConverterOptionUInt64.write(value.latestWalletSyncTimestamp, into: &buf) + FfiConverterOptionUInt64.write(value.latestOnchainWalletSyncTimestamp, into: &buf) + FfiConverterOptionUInt64.write(value.latestFeeRateCacheUpdateTimestamp, into: &buf) + FfiConverterOptionUInt64.write(value.latestRgsSnapshotTimestamp, into: &buf) + FfiConverterOptionUInt64.write(value.latestNodeAnnouncementBroadcastTimestamp, into: &buf) + } +} + + +public func FfiConverterTypeNodeStatus_lift(_ buf: RustBuffer) throws -> NodeStatus { + return try FfiConverterTypeNodeStatus.lift(buf) +} + +public func FfiConverterTypeNodeStatus_lower(_ value: NodeStatus) -> RustBuffer { + return FfiConverterTypeNodeStatus.lower(value) +} + + public struct OutPoint { public var txid: Txid public var vout: UInt32 // Default memberwise initializers are never public by default, so we // declare one manually. - public init(txid: Txid, vout: UInt32) { + public init( + txid: Txid, + vout: UInt32) { self.txid = txid self.vout = vout } @@ -1570,9 +3018,10 @@ extension OutPoint: Equatable, Hashable { public struct FfiConverterTypeOutPoint: FfiConverterRustBuffer { public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> OutPoint { - return try OutPoint( - txid: FfiConverterTypeTxid.read(from: &buf), - vout: FfiConverterUInt32.read(from: &buf) + return + try OutPoint( + txid: FfiConverterTypeTxid.read(from: &buf), + vout: FfiConverterUInt32.read(from: &buf) ) } @@ -1593,35 +3042,38 @@ public func FfiConverterTypeOutPoint_lower(_ value: OutPoint) -> RustBuffer { public struct PaymentDetails { - public var hash: PaymentHash - public var preimage: PaymentPreimage? - public var secret: PaymentSecret? + public var id: PaymentId + public var kind: PaymentKind public var amountMsat: UInt64? public var direction: PaymentDirection public var status: PaymentStatus + public var latestUpdateTimestamp: UInt64 // Default memberwise initializers are never public by default, so we // declare one manually. - public init(hash: PaymentHash, preimage: PaymentPreimage?, secret: PaymentSecret?, amountMsat: UInt64?, direction: PaymentDirection, status: PaymentStatus) { - self.hash = hash - self.preimage = preimage - self.secret = secret + public init( + id: PaymentId, + kind: PaymentKind, + amountMsat: UInt64?, + direction: PaymentDirection, + status: PaymentStatus, + latestUpdateTimestamp: UInt64) { + self.id = id + self.kind = kind self.amountMsat = amountMsat self.direction = direction self.status = status + self.latestUpdateTimestamp = latestUpdateTimestamp } } extension PaymentDetails: Equatable, Hashable { public static func ==(lhs: PaymentDetails, rhs: PaymentDetails) -> Bool { - if lhs.hash != rhs.hash { - return false - } - if lhs.preimage != rhs.preimage { + if lhs.id != rhs.id { return false } - if lhs.secret != rhs.secret { + if lhs.kind != rhs.kind { return false } if lhs.amountMsat != rhs.amountMsat { @@ -1633,39 +3085,43 @@ extension PaymentDetails: Equatable, Hashable { if lhs.status != rhs.status { return false } + if lhs.latestUpdateTimestamp != rhs.latestUpdateTimestamp { + return false + } return true } public func hash(into hasher: inout Hasher) { - hasher.combine(hash) - hasher.combine(preimage) - hasher.combine(secret) + hasher.combine(id) + hasher.combine(kind) hasher.combine(amountMsat) hasher.combine(direction) hasher.combine(status) + hasher.combine(latestUpdateTimestamp) } } public struct FfiConverterTypePaymentDetails: FfiConverterRustBuffer { public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> PaymentDetails { - return try PaymentDetails( - hash: FfiConverterTypePaymentHash.read(from: &buf), - preimage: FfiConverterOptionTypePaymentPreimage.read(from: &buf), - secret: FfiConverterOptionTypePaymentSecret.read(from: &buf), - amountMsat: FfiConverterOptionUInt64.read(from: &buf), - direction: FfiConverterTypePaymentDirection.read(from: &buf), - status: FfiConverterTypePaymentStatus.read(from: &buf) + return + try PaymentDetails( + id: FfiConverterTypePaymentId.read(from: &buf), + kind: FfiConverterTypePaymentKind.read(from: &buf), + amountMsat: FfiConverterOptionUInt64.read(from: &buf), + direction: FfiConverterTypePaymentDirection.read(from: &buf), + status: FfiConverterTypePaymentStatus.read(from: &buf), + latestUpdateTimestamp: FfiConverterUInt64.read(from: &buf) ) } public static func write(_ value: PaymentDetails, into buf: inout [UInt8]) { - FfiConverterTypePaymentHash.write(value.hash, into: &buf) - FfiConverterOptionTypePaymentPreimage.write(value.preimage, into: &buf) - FfiConverterOptionTypePaymentSecret.write(value.secret, into: &buf) + FfiConverterTypePaymentId.write(value.id, into: &buf) + FfiConverterTypePaymentKind.write(value.kind, into: &buf) FfiConverterOptionUInt64.write(value.amountMsat, into: &buf) FfiConverterTypePaymentDirection.write(value.direction, into: &buf) FfiConverterTypePaymentStatus.write(value.status, into: &buf) + FfiConverterUInt64.write(value.latestUpdateTimestamp, into: &buf) } } @@ -1687,7 +3143,11 @@ public struct PeerDetails { // Default memberwise initializers are never public by default, so we // declare one manually. - public init(nodeId: PublicKey, address: SocketAddress, isPersisted: Bool, isConnected: Bool) { + public init( + nodeId: PublicKey, + address: SocketAddress, + isPersisted: Bool, + isConnected: Bool) { self.nodeId = nodeId self.address = address self.isPersisted = isPersisted @@ -1724,11 +3184,12 @@ extension PeerDetails: Equatable, Hashable { public struct FfiConverterTypePeerDetails: FfiConverterRustBuffer { public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> PeerDetails { - return try PeerDetails( - nodeId: FfiConverterTypePublicKey.read(from: &buf), - address: FfiConverterTypeSocketAddress.read(from: &buf), - isPersisted: FfiConverterBool.read(from: &buf), - isConnected: FfiConverterBool.read(from: &buf) + return + try PeerDetails( + nodeId: FfiConverterTypePublicKey.read(from: &buf), + address: FfiConverterTypeSocketAddress.read(from: &buf), + isPersisted: FfiConverterBool.read(from: &buf), + isConnected: FfiConverterBool.read(from: &buf) ) } @@ -1749,41 +3210,89 @@ public func FfiConverterTypePeerDetails_lower(_ value: PeerDetails) -> RustBuffe return FfiConverterTypePeerDetails.lower(value) } + +public struct RoutingFees { + public var baseMsat: UInt32 + public var proportionalMillionths: UInt32 + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + baseMsat: UInt32, + proportionalMillionths: UInt32) { + self.baseMsat = baseMsat + self.proportionalMillionths = proportionalMillionths + } +} + + +extension RoutingFees: Equatable, Hashable { + public static func ==(lhs: RoutingFees, rhs: RoutingFees) -> Bool { + if lhs.baseMsat != rhs.baseMsat { + return false + } + if lhs.proportionalMillionths != rhs.proportionalMillionths { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(baseMsat) + hasher.combine(proportionalMillionths) + } +} + + +public struct FfiConverterTypeRoutingFees: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> RoutingFees { + return + try RoutingFees( + baseMsat: FfiConverterUInt32.read(from: &buf), + proportionalMillionths: FfiConverterUInt32.read(from: &buf) + ) + } + + public static func write(_ value: RoutingFees, into buf: inout [UInt8]) { + FfiConverterUInt32.write(value.baseMsat, into: &buf) + FfiConverterUInt32.write(value.proportionalMillionths, into: &buf) + } +} + + +public func FfiConverterTypeRoutingFees_lift(_ buf: RustBuffer) throws -> RoutingFees { + return try FfiConverterTypeRoutingFees.lift(buf) +} + +public func FfiConverterTypeRoutingFees_lower(_ value: RoutingFees) -> RustBuffer { + return FfiConverterTypeRoutingFees.lower(value) +} + + public enum BuildError { - // Simple error enums only carry a message case InvalidSeedBytes(message: String) - // Simple error enums only carry a message case InvalidSeedFile(message: String) - // Simple error enums only carry a message case InvalidSystemTime(message: String) - // Simple error enums only carry a message case InvalidChannelMonitor(message: String) - // Simple error enums only carry a message case InvalidListeningAddresses(message: String) - // Simple error enums only carry a message case ReadFailed(message: String) - // Simple error enums only carry a message case WriteFailed(message: String) - // Simple error enums only carry a message case StoragePathAccessFailed(message: String) - // Simple error enums only carry a message case KvStoreSetupFailed(message: String) - // Simple error enums only carry a message case WalletSetupFailed(message: String) - // Simple error enums only carry a message case LoggerSetupFailed(message: String) @@ -1835,72 +3344,244 @@ public struct FfiConverterTypeBuildError: FfiConverterRustBuffer { message: try FfiConverterString.read(from: &buf) ) - case 9: return .KvStoreSetupFailed( - message: try FfiConverterString.read(from: &buf) - ) + case 9: return .KvStoreSetupFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 10: return .WalletSetupFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 11: return .LoggerSetupFailed( + message: try FfiConverterString.read(from: &buf) + ) + + + default: throw UniffiInternalError.unexpectedEnumCase + } + } + + public static func write(_ value: BuildError, into buf: inout [UInt8]) { + switch value { + + + + + case .InvalidSeedBytes(_ /* message is ignored*/): + writeInt(&buf, Int32(1)) + case .InvalidSeedFile(_ /* message is ignored*/): + writeInt(&buf, Int32(2)) + case .InvalidSystemTime(_ /* message is ignored*/): + writeInt(&buf, Int32(3)) + case .InvalidChannelMonitor(_ /* message is ignored*/): + writeInt(&buf, Int32(4)) + case .InvalidListeningAddresses(_ /* message is ignored*/): + writeInt(&buf, Int32(5)) + case .ReadFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(6)) + case .WriteFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(7)) + case .StoragePathAccessFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(8)) + case .KvStoreSetupFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(9)) + case .WalletSetupFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(10)) + case .LoggerSetupFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(11)) + + + } + } +} + + +extension BuildError: Equatable, Hashable {} + +extension BuildError: Error { } + +// Note that we don't yet support `indirect` for enums. +// See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. +public enum ClosureReason { + + case counterpartyForceClosed( + peerMsg: UntrustedString + ) + case holderForceClosed + case legacyCooperativeClosure + case counterpartyInitiatedCooperativeClosure + case locallyInitiatedCooperativeClosure + case commitmentTxConfirmed + case fundingTimedOut + case processingError( + err: String + ) + case disconnectedPeer + case outdatedChannelManager + case counterpartyCoopClosedUnfundedChannel + case fundingBatchClosure + case htlCsTimedOut +} + +public struct FfiConverterTypeClosureReason: FfiConverterRustBuffer { + typealias SwiftType = ClosureReason + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> ClosureReason { + let variant: Int32 = try readInt(&buf) + switch variant { + + case 1: return .counterpartyForceClosed( + peerMsg: try FfiConverterTypeUntrustedString.read(from: &buf) + ) + + case 2: return .holderForceClosed + + case 3: return .legacyCooperativeClosure + + case 4: return .counterpartyInitiatedCooperativeClosure + + case 5: return .locallyInitiatedCooperativeClosure + + case 6: return .commitmentTxConfirmed + + case 7: return .fundingTimedOut - case 10: return .WalletSetupFailed( - message: try FfiConverterString.read(from: &buf) + case 8: return .processingError( + err: try FfiConverterString.read(from: &buf) ) - case 11: return .LoggerSetupFailed( - message: try FfiConverterString.read(from: &buf) - ) + case 9: return .disconnectedPeer + + case 10: return .outdatedChannelManager + + case 11: return .counterpartyCoopClosedUnfundedChannel + + case 12: return .fundingBatchClosure + + case 13: return .htlCsTimedOut - default: throw UniffiInternalError.unexpectedEnumCase } } - public static func write(_ value: BuildError, into buf: inout [UInt8]) { + public static func write(_ value: ClosureReason, into buf: inout [UInt8]) { switch value { - - - case .InvalidSeedBytes(_ /* message is ignored*/): + case let .counterpartyForceClosed(peerMsg): writeInt(&buf, Int32(1)) - case .InvalidSeedFile(_ /* message is ignored*/): + FfiConverterTypeUntrustedString.write(peerMsg, into: &buf) + + + case .holderForceClosed: writeInt(&buf, Int32(2)) - case .InvalidSystemTime(_ /* message is ignored*/): + + + case .legacyCooperativeClosure: writeInt(&buf, Int32(3)) - case .InvalidChannelMonitor(_ /* message is ignored*/): + + + case .counterpartyInitiatedCooperativeClosure: writeInt(&buf, Int32(4)) - case .InvalidListeningAddresses(_ /* message is ignored*/): + + + case .locallyInitiatedCooperativeClosure: writeInt(&buf, Int32(5)) - case .ReadFailed(_ /* message is ignored*/): + + + case .commitmentTxConfirmed: writeInt(&buf, Int32(6)) - case .WriteFailed(_ /* message is ignored*/): + + + case .fundingTimedOut: writeInt(&buf, Int32(7)) - case .StoragePathAccessFailed(_ /* message is ignored*/): + + + case let .processingError(err): writeInt(&buf, Int32(8)) - case .KvStoreSetupFailed(_ /* message is ignored*/): + FfiConverterString.write(err, into: &buf) + + + case .disconnectedPeer: writeInt(&buf, Int32(9)) - case .WalletSetupFailed(_ /* message is ignored*/): + + + case .outdatedChannelManager: writeInt(&buf, Int32(10)) - case .LoggerSetupFailed(_ /* message is ignored*/): + + + case .counterpartyCoopClosedUnfundedChannel: writeInt(&buf, Int32(11)) - + + + case .fundingBatchClosure: + writeInt(&buf, Int32(12)) + + + case .htlCsTimedOut: + writeInt(&buf, Int32(13)) } } } -extension BuildError: Equatable, Hashable {} +public func FfiConverterTypeClosureReason_lift(_ buf: RustBuffer) throws -> ClosureReason { + return try FfiConverterTypeClosureReason.lift(buf) +} + +public func FfiConverterTypeClosureReason_lower(_ value: ClosureReason) -> RustBuffer { + return FfiConverterTypeClosureReason.lower(value) +} + + +extension ClosureReason: Equatable, Hashable {} + -extension BuildError: Error { } // Note that we don't yet support `indirect` for enums. // See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. public enum Event { - case paymentSuccessful(paymentHash: PaymentHash) - case paymentFailed(paymentHash: PaymentHash) - case paymentReceived(paymentHash: PaymentHash, amountMsat: UInt64) - case channelPending(channelId: ChannelId, userChannelId: UserChannelId, formerTemporaryChannelId: ChannelId, counterpartyNodeId: PublicKey, fundingTxo: OutPoint) - case channelReady(channelId: ChannelId, userChannelId: UserChannelId, counterpartyNodeId: PublicKey?) - case channelClosed(channelId: ChannelId, userChannelId: UserChannelId, counterpartyNodeId: PublicKey?) + case paymentSuccessful( + paymentId: PaymentId?, + paymentHash: PaymentHash, + feePaidMsat: UInt64? + ) + case paymentFailed( + paymentId: PaymentId?, + paymentHash: PaymentHash, + reason: PaymentFailureReason? + ) + case paymentReceived( + paymentId: PaymentId?, + paymentHash: PaymentHash, + amountMsat: UInt64 + ) + case paymentClaimable( + paymentId: PaymentId, + paymentHash: PaymentHash, + claimableAmountMsat: UInt64, + claimDeadline: UInt32? + ) + case channelPending( + channelId: ChannelId, + userChannelId: UserChannelId, + formerTemporaryChannelId: ChannelId, + counterpartyNodeId: PublicKey, + fundingTxo: OutPoint + ) + case channelReady( + channelId: ChannelId, + userChannelId: UserChannelId, + counterpartyNodeId: PublicKey? + ) + case channelClosed( + channelId: ChannelId, + userChannelId: UserChannelId, + counterpartyNodeId: PublicKey?, + reason: ClosureReason? + ) } public struct FfiConverterTypeEvent: FfiConverterRustBuffer { @@ -1911,19 +3592,31 @@ public struct FfiConverterTypeEvent: FfiConverterRustBuffer { switch variant { case 1: return .paymentSuccessful( - paymentHash: try FfiConverterTypePaymentHash.read(from: &buf) + paymentId: try FfiConverterOptionTypePaymentId.read(from: &buf), + paymentHash: try FfiConverterTypePaymentHash.read(from: &buf), + feePaidMsat: try FfiConverterOptionUInt64.read(from: &buf) ) case 2: return .paymentFailed( - paymentHash: try FfiConverterTypePaymentHash.read(from: &buf) + paymentId: try FfiConverterOptionTypePaymentId.read(from: &buf), + paymentHash: try FfiConverterTypePaymentHash.read(from: &buf), + reason: try FfiConverterOptionTypePaymentFailureReason.read(from: &buf) ) case 3: return .paymentReceived( + paymentId: try FfiConverterOptionTypePaymentId.read(from: &buf), paymentHash: try FfiConverterTypePaymentHash.read(from: &buf), amountMsat: try FfiConverterUInt64.read(from: &buf) ) - case 4: return .channelPending( + case 4: return .paymentClaimable( + paymentId: try FfiConverterTypePaymentId.read(from: &buf), + paymentHash: try FfiConverterTypePaymentHash.read(from: &buf), + claimableAmountMsat: try FfiConverterUInt64.read(from: &buf), + claimDeadline: try FfiConverterOptionUInt32.read(from: &buf) + ) + + case 5: return .channelPending( channelId: try FfiConverterTypeChannelId.read(from: &buf), userChannelId: try FfiConverterTypeUserChannelId.read(from: &buf), formerTemporaryChannelId: try FfiConverterTypeChannelId.read(from: &buf), @@ -1931,16 +3624,17 @@ public struct FfiConverterTypeEvent: FfiConverterRustBuffer { fundingTxo: try FfiConverterTypeOutPoint.read(from: &buf) ) - case 5: return .channelReady( + case 6: return .channelReady( channelId: try FfiConverterTypeChannelId.read(from: &buf), userChannelId: try FfiConverterTypeUserChannelId.read(from: &buf), counterpartyNodeId: try FfiConverterOptionTypePublicKey.read(from: &buf) ) - case 6: return .channelClosed( + case 7: return .channelClosed( channelId: try FfiConverterTypeChannelId.read(from: &buf), userChannelId: try FfiConverterTypeUserChannelId.read(from: &buf), - counterpartyNodeId: try FfiConverterOptionTypePublicKey.read(from: &buf) + counterpartyNodeId: try FfiConverterOptionTypePublicKey.read(from: &buf), + reason: try FfiConverterOptionTypeClosureReason.read(from: &buf) ) default: throw UniffiInternalError.unexpectedEnumCase @@ -1951,24 +3645,37 @@ public struct FfiConverterTypeEvent: FfiConverterRustBuffer { switch value { - case let .paymentSuccessful(paymentHash): + case let .paymentSuccessful(paymentId,paymentHash,feePaidMsat): writeInt(&buf, Int32(1)) + FfiConverterOptionTypePaymentId.write(paymentId, into: &buf) FfiConverterTypePaymentHash.write(paymentHash, into: &buf) + FfiConverterOptionUInt64.write(feePaidMsat, into: &buf) - case let .paymentFailed(paymentHash): + case let .paymentFailed(paymentId,paymentHash,reason): writeInt(&buf, Int32(2)) + FfiConverterOptionTypePaymentId.write(paymentId, into: &buf) FfiConverterTypePaymentHash.write(paymentHash, into: &buf) + FfiConverterOptionTypePaymentFailureReason.write(reason, into: &buf) - case let .paymentReceived(paymentHash,amountMsat): + case let .paymentReceived(paymentId,paymentHash,amountMsat): writeInt(&buf, Int32(3)) + FfiConverterOptionTypePaymentId.write(paymentId, into: &buf) FfiConverterTypePaymentHash.write(paymentHash, into: &buf) FfiConverterUInt64.write(amountMsat, into: &buf) - case let .channelPending(channelId,userChannelId,formerTemporaryChannelId,counterpartyNodeId,fundingTxo): + case let .paymentClaimable(paymentId,paymentHash,claimableAmountMsat,claimDeadline): writeInt(&buf, Int32(4)) + FfiConverterTypePaymentId.write(paymentId, into: &buf) + FfiConverterTypePaymentHash.write(paymentHash, into: &buf) + FfiConverterUInt64.write(claimableAmountMsat, into: &buf) + FfiConverterOptionUInt32.write(claimDeadline, into: &buf) + + + case let .channelPending(channelId,userChannelId,formerTemporaryChannelId,counterpartyNodeId,fundingTxo): + writeInt(&buf, Int32(5)) FfiConverterTypeChannelId.write(channelId, into: &buf) FfiConverterTypeUserChannelId.write(userChannelId, into: &buf) FfiConverterTypeChannelId.write(formerTemporaryChannelId, into: &buf) @@ -1977,17 +3684,18 @@ public struct FfiConverterTypeEvent: FfiConverterRustBuffer { case let .channelReady(channelId,userChannelId,counterpartyNodeId): - writeInt(&buf, Int32(5)) + writeInt(&buf, Int32(6)) FfiConverterTypeChannelId.write(channelId, into: &buf) FfiConverterTypeUserChannelId.write(userChannelId, into: &buf) FfiConverterOptionTypePublicKey.write(counterpartyNodeId, into: &buf) - case let .channelClosed(channelId,userChannelId,counterpartyNodeId): - writeInt(&buf, Int32(6)) + case let .channelClosed(channelId,userChannelId,counterpartyNodeId,reason): + writeInt(&buf, Int32(7)) FfiConverterTypeChannelId.write(channelId, into: &buf) FfiConverterTypeUserChannelId.write(userChannelId, into: &buf) FfiConverterOptionTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterOptionTypeClosureReason.write(reason, into: &buf) } } @@ -2007,6 +3715,176 @@ extension Event: Equatable, Hashable {} +// Note that we don't yet support `indirect` for enums. +// See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. +public enum LightningBalance { + + case claimableOnChannelClose( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + amountSatoshis: UInt64 + ) + case claimableAwaitingConfirmations( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + amountSatoshis: UInt64, + confirmationHeight: UInt32 + ) + case contentiousClaimable( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + amountSatoshis: UInt64, + timeoutHeight: UInt32, + paymentHash: PaymentHash, + paymentPreimage: PaymentPreimage + ) + case maybeTimeoutClaimableHtlc( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + amountSatoshis: UInt64, + claimableHeight: UInt32, + paymentHash: PaymentHash + ) + case maybePreimageClaimableHtlc( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + amountSatoshis: UInt64, + expiryHeight: UInt32, + paymentHash: PaymentHash + ) + case counterpartyRevokedOutputClaimable( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + amountSatoshis: UInt64 + ) +} + +public struct FfiConverterTypeLightningBalance: FfiConverterRustBuffer { + typealias SwiftType = LightningBalance + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> LightningBalance { + let variant: Int32 = try readInt(&buf) + switch variant { + + case 1: return .claimableOnChannelClose( + channelId: try FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: try FfiConverterTypePublicKey.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf) + ) + + case 2: return .claimableAwaitingConfirmations( + channelId: try FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: try FfiConverterTypePublicKey.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf), + confirmationHeight: try FfiConverterUInt32.read(from: &buf) + ) + + case 3: return .contentiousClaimable( + channelId: try FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: try FfiConverterTypePublicKey.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf), + timeoutHeight: try FfiConverterUInt32.read(from: &buf), + paymentHash: try FfiConverterTypePaymentHash.read(from: &buf), + paymentPreimage: try FfiConverterTypePaymentPreimage.read(from: &buf) + ) + + case 4: return .maybeTimeoutClaimableHtlc( + channelId: try FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: try FfiConverterTypePublicKey.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf), + claimableHeight: try FfiConverterUInt32.read(from: &buf), + paymentHash: try FfiConverterTypePaymentHash.read(from: &buf) + ) + + case 5: return .maybePreimageClaimableHtlc( + channelId: try FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: try FfiConverterTypePublicKey.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf), + expiryHeight: try FfiConverterUInt32.read(from: &buf), + paymentHash: try FfiConverterTypePaymentHash.read(from: &buf) + ) + + case 6: return .counterpartyRevokedOutputClaimable( + channelId: try FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: try FfiConverterTypePublicKey.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf) + ) + + default: throw UniffiInternalError.unexpectedEnumCase + } + } + + public static func write(_ value: LightningBalance, into buf: inout [UInt8]) { + switch value { + + + case let .claimableOnChannelClose(channelId,counterpartyNodeId,amountSatoshis): + writeInt(&buf, Int32(1)) + FfiConverterTypeChannelId.write(channelId, into: &buf) + FfiConverterTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + + + case let .claimableAwaitingConfirmations(channelId,counterpartyNodeId,amountSatoshis,confirmationHeight): + writeInt(&buf, Int32(2)) + FfiConverterTypeChannelId.write(channelId, into: &buf) + FfiConverterTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + FfiConverterUInt32.write(confirmationHeight, into: &buf) + + + case let .contentiousClaimable(channelId,counterpartyNodeId,amountSatoshis,timeoutHeight,paymentHash,paymentPreimage): + writeInt(&buf, Int32(3)) + FfiConverterTypeChannelId.write(channelId, into: &buf) + FfiConverterTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + FfiConverterUInt32.write(timeoutHeight, into: &buf) + FfiConverterTypePaymentHash.write(paymentHash, into: &buf) + FfiConverterTypePaymentPreimage.write(paymentPreimage, into: &buf) + + + case let .maybeTimeoutClaimableHtlc(channelId,counterpartyNodeId,amountSatoshis,claimableHeight,paymentHash): + writeInt(&buf, Int32(4)) + FfiConverterTypeChannelId.write(channelId, into: &buf) + FfiConverterTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + FfiConverterUInt32.write(claimableHeight, into: &buf) + FfiConverterTypePaymentHash.write(paymentHash, into: &buf) + + + case let .maybePreimageClaimableHtlc(channelId,counterpartyNodeId,amountSatoshis,expiryHeight,paymentHash): + writeInt(&buf, Int32(5)) + FfiConverterTypeChannelId.write(channelId, into: &buf) + FfiConverterTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + FfiConverterUInt32.write(expiryHeight, into: &buf) + FfiConverterTypePaymentHash.write(paymentHash, into: &buf) + + + case let .counterpartyRevokedOutputClaimable(channelId,counterpartyNodeId,amountSatoshis): + writeInt(&buf, Int32(6)) + FfiConverterTypeChannelId.write(channelId, into: &buf) + FfiConverterTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + + } + } +} + + +public func FfiConverterTypeLightningBalance_lift(_ buf: RustBuffer) throws -> LightningBalance { + return try FfiConverterTypeLightningBalance.lift(buf) +} + +public func FfiConverterTypeLightningBalance_lower(_ value: LightningBalance) -> RustBuffer { + return FfiConverterTypeLightningBalance.lower(value) +} + + +extension LightningBalance: Equatable, Hashable {} + + + // Note that we don't yet support `indirect` for enums. // See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. public enum LogLevel { @@ -2153,100 +4031,103 @@ extension Network: Equatable, Hashable {} + public enum NodeError { - // Simple error enums only carry a message case AlreadyRunning(message: String) - // Simple error enums only carry a message case NotRunning(message: String) - // Simple error enums only carry a message case OnchainTxCreationFailed(message: String) - // Simple error enums only carry a message case ConnectionFailed(message: String) - // Simple error enums only carry a message case InvoiceCreationFailed(message: String) - // Simple error enums only carry a message + case InvoiceRequestCreationFailed(message: String) + + case OfferCreationFailed(message: String) + + case RefundCreationFailed(message: String) + case PaymentSendingFailed(message: String) - // Simple error enums only carry a message case ProbeSendingFailed(message: String) - // Simple error enums only carry a message case ChannelCreationFailed(message: String) - // Simple error enums only carry a message case ChannelClosingFailed(message: String) - // Simple error enums only carry a message case ChannelConfigUpdateFailed(message: String) - // Simple error enums only carry a message case PersistenceFailed(message: String) - // Simple error enums only carry a message case FeerateEstimationUpdateFailed(message: String) - // Simple error enums only carry a message + case FeerateEstimationUpdateTimeout(message: String) + case WalletOperationFailed(message: String) - // Simple error enums only carry a message + case WalletOperationTimeout(message: String) + case OnchainTxSigningFailed(message: String) - // Simple error enums only carry a message case MessageSigningFailed(message: String) - // Simple error enums only carry a message case TxSyncFailed(message: String) - // Simple error enums only carry a message + case TxSyncTimeout(message: String) + case GossipUpdateFailed(message: String) - // Simple error enums only carry a message + case GossipUpdateTimeout(message: String) + + case LiquidityRequestFailed(message: String) + case InvalidAddress(message: String) - // Simple error enums only carry a message case InvalidSocketAddress(message: String) - // Simple error enums only carry a message case InvalidPublicKey(message: String) - // Simple error enums only carry a message case InvalidSecretKey(message: String) - // Simple error enums only carry a message + case InvalidOfferId(message: String) + + case InvalidNodeId(message: String) + + case InvalidPaymentId(message: String) + case InvalidPaymentHash(message: String) - // Simple error enums only carry a message case InvalidPaymentPreimage(message: String) - // Simple error enums only carry a message case InvalidPaymentSecret(message: String) - // Simple error enums only carry a message case InvalidAmount(message: String) - // Simple error enums only carry a message case InvalidInvoice(message: String) - // Simple error enums only carry a message + case InvalidOffer(message: String) + + case InvalidRefund(message: String) + case InvalidChannelId(message: String) - // Simple error enums only carry a message case InvalidNetwork(message: String) - // Simple error enums only carry a message case DuplicatePayment(message: String) - // Simple error enums only carry a message + case UnsupportedCurrency(message: String) + case InsufficientFunds(message: String) + case LiquiditySourceUnavailable(message: String) + + case LiquidityFeeTooHigh(message: String) + fileprivate static func uniffiErrorHandler(_ error: RustBuffer) throws -> Error { return try FfiConverterTypeNodeError.lift(error) @@ -2284,103 +4165,167 @@ public struct FfiConverterTypeNodeError: FfiConverterRustBuffer { message: try FfiConverterString.read(from: &buf) ) - case 6: return .PaymentSendingFailed( + case 6: return .InvoiceRequestCreationFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 7: return .OfferCreationFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 8: return .RefundCreationFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 9: return .PaymentSendingFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 10: return .ProbeSendingFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 11: return .ChannelCreationFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 12: return .ChannelClosingFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 13: return .ChannelConfigUpdateFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 14: return .PersistenceFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 15: return .FeerateEstimationUpdateFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 16: return .FeerateEstimationUpdateTimeout( + message: try FfiConverterString.read(from: &buf) + ) + + case 17: return .WalletOperationFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 18: return .WalletOperationTimeout( + message: try FfiConverterString.read(from: &buf) + ) + + case 19: return .OnchainTxSigningFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 20: return .MessageSigningFailed( message: try FfiConverterString.read(from: &buf) ) - case 7: return .ProbeSendingFailed( + case 21: return .TxSyncFailed( message: try FfiConverterString.read(from: &buf) ) - case 8: return .ChannelCreationFailed( + case 22: return .TxSyncTimeout( message: try FfiConverterString.read(from: &buf) ) - case 9: return .ChannelClosingFailed( + case 23: return .GossipUpdateFailed( message: try FfiConverterString.read(from: &buf) ) - case 10: return .ChannelConfigUpdateFailed( + case 24: return .GossipUpdateTimeout( message: try FfiConverterString.read(from: &buf) ) - case 11: return .PersistenceFailed( + case 25: return .LiquidityRequestFailed( message: try FfiConverterString.read(from: &buf) ) - case 12: return .FeerateEstimationUpdateFailed( + case 26: return .InvalidAddress( message: try FfiConverterString.read(from: &buf) ) - case 13: return .WalletOperationFailed( + case 27: return .InvalidSocketAddress( message: try FfiConverterString.read(from: &buf) ) - case 14: return .OnchainTxSigningFailed( + case 28: return .InvalidPublicKey( message: try FfiConverterString.read(from: &buf) ) - case 15: return .MessageSigningFailed( + case 29: return .InvalidSecretKey( message: try FfiConverterString.read(from: &buf) ) - case 16: return .TxSyncFailed( + case 30: return .InvalidOfferId( message: try FfiConverterString.read(from: &buf) ) - case 17: return .GossipUpdateFailed( + case 31: return .InvalidNodeId( message: try FfiConverterString.read(from: &buf) ) - case 18: return .InvalidAddress( + case 32: return .InvalidPaymentId( message: try FfiConverterString.read(from: &buf) ) - case 19: return .InvalidSocketAddress( + case 33: return .InvalidPaymentHash( message: try FfiConverterString.read(from: &buf) ) - case 20: return .InvalidPublicKey( + case 34: return .InvalidPaymentPreimage( message: try FfiConverterString.read(from: &buf) ) - case 21: return .InvalidSecretKey( + case 35: return .InvalidPaymentSecret( message: try FfiConverterString.read(from: &buf) ) - case 22: return .InvalidPaymentHash( + case 36: return .InvalidAmount( message: try FfiConverterString.read(from: &buf) ) - case 23: return .InvalidPaymentPreimage( + case 37: return .InvalidInvoice( message: try FfiConverterString.read(from: &buf) ) - case 24: return .InvalidPaymentSecret( + case 38: return .InvalidOffer( message: try FfiConverterString.read(from: &buf) ) - case 25: return .InvalidAmount( + case 39: return .InvalidRefund( message: try FfiConverterString.read(from: &buf) ) - case 26: return .InvalidInvoice( + case 40: return .InvalidChannelId( message: try FfiConverterString.read(from: &buf) ) - case 27: return .InvalidChannelId( + case 41: return .InvalidNetwork( message: try FfiConverterString.read(from: &buf) ) - case 28: return .InvalidNetwork( + case 42: return .DuplicatePayment( message: try FfiConverterString.read(from: &buf) ) - case 29: return .DuplicatePayment( + case 43: return .UnsupportedCurrency( message: try FfiConverterString.read(from: &buf) ) - case 30: return .InsufficientFunds( + case 44: return .InsufficientFunds( + message: try FfiConverterString.read(from: &buf) + ) + + case 45: return .LiquiditySourceUnavailable( + message: try FfiConverterString.read(from: &buf) + ) + + case 46: return .LiquidityFeeTooHigh( message: try FfiConverterString.read(from: &buf) ) @@ -2405,56 +4350,88 @@ public struct FfiConverterTypeNodeError: FfiConverterRustBuffer { writeInt(&buf, Int32(4)) case .InvoiceCreationFailed(_ /* message is ignored*/): writeInt(&buf, Int32(5)) - case .PaymentSendingFailed(_ /* message is ignored*/): + case .InvoiceRequestCreationFailed(_ /* message is ignored*/): writeInt(&buf, Int32(6)) - case .ProbeSendingFailed(_ /* message is ignored*/): + case .OfferCreationFailed(_ /* message is ignored*/): writeInt(&buf, Int32(7)) - case .ChannelCreationFailed(_ /* message is ignored*/): + case .RefundCreationFailed(_ /* message is ignored*/): writeInt(&buf, Int32(8)) - case .ChannelClosingFailed(_ /* message is ignored*/): + case .PaymentSendingFailed(_ /* message is ignored*/): writeInt(&buf, Int32(9)) + case .ProbeSendingFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(10)) + case .ChannelCreationFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(11)) + case .ChannelClosingFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(12)) case .ChannelConfigUpdateFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(10)) + writeInt(&buf, Int32(13)) case .PersistenceFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(11)) + writeInt(&buf, Int32(14)) case .FeerateEstimationUpdateFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(12)) + writeInt(&buf, Int32(15)) + case .FeerateEstimationUpdateTimeout(_ /* message is ignored*/): + writeInt(&buf, Int32(16)) case .WalletOperationFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(13)) + writeInt(&buf, Int32(17)) + case .WalletOperationTimeout(_ /* message is ignored*/): + writeInt(&buf, Int32(18)) case .OnchainTxSigningFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(14)) + writeInt(&buf, Int32(19)) case .MessageSigningFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(15)) + writeInt(&buf, Int32(20)) case .TxSyncFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(16)) + writeInt(&buf, Int32(21)) + case .TxSyncTimeout(_ /* message is ignored*/): + writeInt(&buf, Int32(22)) case .GossipUpdateFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(17)) + writeInt(&buf, Int32(23)) + case .GossipUpdateTimeout(_ /* message is ignored*/): + writeInt(&buf, Int32(24)) + case .LiquidityRequestFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(25)) case .InvalidAddress(_ /* message is ignored*/): - writeInt(&buf, Int32(18)) + writeInt(&buf, Int32(26)) case .InvalidSocketAddress(_ /* message is ignored*/): - writeInt(&buf, Int32(19)) + writeInt(&buf, Int32(27)) case .InvalidPublicKey(_ /* message is ignored*/): - writeInt(&buf, Int32(20)) + writeInt(&buf, Int32(28)) case .InvalidSecretKey(_ /* message is ignored*/): - writeInt(&buf, Int32(21)) + writeInt(&buf, Int32(29)) + case .InvalidOfferId(_ /* message is ignored*/): + writeInt(&buf, Int32(30)) + case .InvalidNodeId(_ /* message is ignored*/): + writeInt(&buf, Int32(31)) + case .InvalidPaymentId(_ /* message is ignored*/): + writeInt(&buf, Int32(32)) case .InvalidPaymentHash(_ /* message is ignored*/): - writeInt(&buf, Int32(22)) + writeInt(&buf, Int32(33)) case .InvalidPaymentPreimage(_ /* message is ignored*/): - writeInt(&buf, Int32(23)) + writeInt(&buf, Int32(34)) case .InvalidPaymentSecret(_ /* message is ignored*/): - writeInt(&buf, Int32(24)) + writeInt(&buf, Int32(35)) case .InvalidAmount(_ /* message is ignored*/): - writeInt(&buf, Int32(25)) + writeInt(&buf, Int32(36)) case .InvalidInvoice(_ /* message is ignored*/): - writeInt(&buf, Int32(26)) + writeInt(&buf, Int32(37)) + case .InvalidOffer(_ /* message is ignored*/): + writeInt(&buf, Int32(38)) + case .InvalidRefund(_ /* message is ignored*/): + writeInt(&buf, Int32(39)) case .InvalidChannelId(_ /* message is ignored*/): - writeInt(&buf, Int32(27)) + writeInt(&buf, Int32(40)) case .InvalidNetwork(_ /* message is ignored*/): - writeInt(&buf, Int32(28)) + writeInt(&buf, Int32(41)) case .DuplicatePayment(_ /* message is ignored*/): - writeInt(&buf, Int32(29)) + writeInt(&buf, Int32(42)) + case .UnsupportedCurrency(_ /* message is ignored*/): + writeInt(&buf, Int32(43)) case .InsufficientFunds(_ /* message is ignored*/): - writeInt(&buf, Int32(30)) + writeInt(&buf, Int32(44)) + case .LiquiditySourceUnavailable(_ /* message is ignored*/): + writeInt(&buf, Int32(45)) + case .LiquidityFeeTooHigh(_ /* message is ignored*/): + writeInt(&buf, Int32(46)) } @@ -2518,6 +4495,224 @@ extension PaymentDirection: Equatable, Hashable {} +// Note that we don't yet support `indirect` for enums. +// See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. +public enum PaymentFailureReason { + + case recipientRejected + case userAbandoned + case retriesExhausted + case paymentExpired + case routeNotFound + case unexpectedError +} + +public struct FfiConverterTypePaymentFailureReason: FfiConverterRustBuffer { + typealias SwiftType = PaymentFailureReason + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> PaymentFailureReason { + let variant: Int32 = try readInt(&buf) + switch variant { + + case 1: return .recipientRejected + + case 2: return .userAbandoned + + case 3: return .retriesExhausted + + case 4: return .paymentExpired + + case 5: return .routeNotFound + + case 6: return .unexpectedError + + default: throw UniffiInternalError.unexpectedEnumCase + } + } + + public static func write(_ value: PaymentFailureReason, into buf: inout [UInt8]) { + switch value { + + + case .recipientRejected: + writeInt(&buf, Int32(1)) + + + case .userAbandoned: + writeInt(&buf, Int32(2)) + + + case .retriesExhausted: + writeInt(&buf, Int32(3)) + + + case .paymentExpired: + writeInt(&buf, Int32(4)) + + + case .routeNotFound: + writeInt(&buf, Int32(5)) + + + case .unexpectedError: + writeInt(&buf, Int32(6)) + + } + } +} + + +public func FfiConverterTypePaymentFailureReason_lift(_ buf: RustBuffer) throws -> PaymentFailureReason { + return try FfiConverterTypePaymentFailureReason.lift(buf) +} + +public func FfiConverterTypePaymentFailureReason_lower(_ value: PaymentFailureReason) -> RustBuffer { + return FfiConverterTypePaymentFailureReason.lower(value) +} + + +extension PaymentFailureReason: Equatable, Hashable {} + + + +// Note that we don't yet support `indirect` for enums. +// See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. +public enum PaymentKind { + + case onchain + case bolt11( + hash: PaymentHash, + preimage: PaymentPreimage?, + secret: PaymentSecret? + ) + case bolt11Jit( + hash: PaymentHash, + preimage: PaymentPreimage?, + secret: PaymentSecret?, + lspFeeLimits: LspFeeLimits + ) + case bolt12Offer( + hash: PaymentHash?, + preimage: PaymentPreimage?, + secret: PaymentSecret?, + offerId: OfferId + ) + case bolt12Refund( + hash: PaymentHash?, + preimage: PaymentPreimage?, + secret: PaymentSecret? + ) + case spontaneous( + hash: PaymentHash, + preimage: PaymentPreimage? + ) +} + +public struct FfiConverterTypePaymentKind: FfiConverterRustBuffer { + typealias SwiftType = PaymentKind + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> PaymentKind { + let variant: Int32 = try readInt(&buf) + switch variant { + + case 1: return .onchain + + case 2: return .bolt11( + hash: try FfiConverterTypePaymentHash.read(from: &buf), + preimage: try FfiConverterOptionTypePaymentPreimage.read(from: &buf), + secret: try FfiConverterOptionTypePaymentSecret.read(from: &buf) + ) + + case 3: return .bolt11Jit( + hash: try FfiConverterTypePaymentHash.read(from: &buf), + preimage: try FfiConverterOptionTypePaymentPreimage.read(from: &buf), + secret: try FfiConverterOptionTypePaymentSecret.read(from: &buf), + lspFeeLimits: try FfiConverterTypeLSPFeeLimits.read(from: &buf) + ) + + case 4: return .bolt12Offer( + hash: try FfiConverterOptionTypePaymentHash.read(from: &buf), + preimage: try FfiConverterOptionTypePaymentPreimage.read(from: &buf), + secret: try FfiConverterOptionTypePaymentSecret.read(from: &buf), + offerId: try FfiConverterTypeOfferId.read(from: &buf) + ) + + case 5: return .bolt12Refund( + hash: try FfiConverterOptionTypePaymentHash.read(from: &buf), + preimage: try FfiConverterOptionTypePaymentPreimage.read(from: &buf), + secret: try FfiConverterOptionTypePaymentSecret.read(from: &buf) + ) + + case 6: return .spontaneous( + hash: try FfiConverterTypePaymentHash.read(from: &buf), + preimage: try FfiConverterOptionTypePaymentPreimage.read(from: &buf) + ) + + default: throw UniffiInternalError.unexpectedEnumCase + } + } + + public static func write(_ value: PaymentKind, into buf: inout [UInt8]) { + switch value { + + + case .onchain: + writeInt(&buf, Int32(1)) + + + case let .bolt11(hash,preimage,secret): + writeInt(&buf, Int32(2)) + FfiConverterTypePaymentHash.write(hash, into: &buf) + FfiConverterOptionTypePaymentPreimage.write(preimage, into: &buf) + FfiConverterOptionTypePaymentSecret.write(secret, into: &buf) + + + case let .bolt11Jit(hash,preimage,secret,lspFeeLimits): + writeInt(&buf, Int32(3)) + FfiConverterTypePaymentHash.write(hash, into: &buf) + FfiConverterOptionTypePaymentPreimage.write(preimage, into: &buf) + FfiConverterOptionTypePaymentSecret.write(secret, into: &buf) + FfiConverterTypeLSPFeeLimits.write(lspFeeLimits, into: &buf) + + + case let .bolt12Offer(hash,preimage,secret,offerId): + writeInt(&buf, Int32(4)) + FfiConverterOptionTypePaymentHash.write(hash, into: &buf) + FfiConverterOptionTypePaymentPreimage.write(preimage, into: &buf) + FfiConverterOptionTypePaymentSecret.write(secret, into: &buf) + FfiConverterTypeOfferId.write(offerId, into: &buf) + + + case let .bolt12Refund(hash,preimage,secret): + writeInt(&buf, Int32(5)) + FfiConverterOptionTypePaymentHash.write(hash, into: &buf) + FfiConverterOptionTypePaymentPreimage.write(preimage, into: &buf) + FfiConverterOptionTypePaymentSecret.write(secret, into: &buf) + + + case let .spontaneous(hash,preimage): + writeInt(&buf, Int32(6)) + FfiConverterTypePaymentHash.write(hash, into: &buf) + FfiConverterOptionTypePaymentPreimage.write(preimage, into: &buf) + + } + } +} + + +public func FfiConverterTypePaymentKind_lift(_ buf: RustBuffer) throws -> PaymentKind { + return try FfiConverterTypePaymentKind.lift(buf) +} + +public func FfiConverterTypePaymentKind_lower(_ value: PaymentKind) -> RustBuffer { + return FfiConverterTypePaymentKind.lower(value) +} + + +extension PaymentKind: Equatable, Hashable {} + + + // Note that we don't yet support `indirect` for enums. // See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. public enum PaymentStatus { @@ -2564,21 +4759,224 @@ public struct FfiConverterTypePaymentStatus: FfiConverterRustBuffer { } -public func FfiConverterTypePaymentStatus_lift(_ buf: RustBuffer) throws -> PaymentStatus { - return try FfiConverterTypePaymentStatus.lift(buf) -} +public func FfiConverterTypePaymentStatus_lift(_ buf: RustBuffer) throws -> PaymentStatus { + return try FfiConverterTypePaymentStatus.lift(buf) +} + +public func FfiConverterTypePaymentStatus_lower(_ value: PaymentStatus) -> RustBuffer { + return FfiConverterTypePaymentStatus.lower(value) +} + + +extension PaymentStatus: Equatable, Hashable {} + + + +// Note that we don't yet support `indirect` for enums. +// See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. +public enum PendingSweepBalance { + + case pendingBroadcast( + channelId: ChannelId?, + amountSatoshis: UInt64 + ) + case broadcastAwaitingConfirmation( + channelId: ChannelId?, + latestBroadcastHeight: UInt32, + latestSpendingTxid: Txid, + amountSatoshis: UInt64 + ) + case awaitingThresholdConfirmations( + channelId: ChannelId?, + latestSpendingTxid: Txid, + confirmationHash: BlockHash, + confirmationHeight: UInt32, + amountSatoshis: UInt64 + ) +} + +public struct FfiConverterTypePendingSweepBalance: FfiConverterRustBuffer { + typealias SwiftType = PendingSweepBalance + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> PendingSweepBalance { + let variant: Int32 = try readInt(&buf) + switch variant { + + case 1: return .pendingBroadcast( + channelId: try FfiConverterOptionTypeChannelId.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf) + ) + + case 2: return .broadcastAwaitingConfirmation( + channelId: try FfiConverterOptionTypeChannelId.read(from: &buf), + latestBroadcastHeight: try FfiConverterUInt32.read(from: &buf), + latestSpendingTxid: try FfiConverterTypeTxid.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf) + ) + + case 3: return .awaitingThresholdConfirmations( + channelId: try FfiConverterOptionTypeChannelId.read(from: &buf), + latestSpendingTxid: try FfiConverterTypeTxid.read(from: &buf), + confirmationHash: try FfiConverterTypeBlockHash.read(from: &buf), + confirmationHeight: try FfiConverterUInt32.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf) + ) + + default: throw UniffiInternalError.unexpectedEnumCase + } + } + + public static func write(_ value: PendingSweepBalance, into buf: inout [UInt8]) { + switch value { + + + case let .pendingBroadcast(channelId,amountSatoshis): + writeInt(&buf, Int32(1)) + FfiConverterOptionTypeChannelId.write(channelId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + + + case let .broadcastAwaitingConfirmation(channelId,latestBroadcastHeight,latestSpendingTxid,amountSatoshis): + writeInt(&buf, Int32(2)) + FfiConverterOptionTypeChannelId.write(channelId, into: &buf) + FfiConverterUInt32.write(latestBroadcastHeight, into: &buf) + FfiConverterTypeTxid.write(latestSpendingTxid, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + + + case let .awaitingThresholdConfirmations(channelId,latestSpendingTxid,confirmationHash,confirmationHeight,amountSatoshis): + writeInt(&buf, Int32(3)) + FfiConverterOptionTypeChannelId.write(channelId, into: &buf) + FfiConverterTypeTxid.write(latestSpendingTxid, into: &buf) + FfiConverterTypeBlockHash.write(confirmationHash, into: &buf) + FfiConverterUInt32.write(confirmationHeight, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + + } + } +} + + +public func FfiConverterTypePendingSweepBalance_lift(_ buf: RustBuffer) throws -> PendingSweepBalance { + return try FfiConverterTypePendingSweepBalance.lift(buf) +} + +public func FfiConverterTypePendingSweepBalance_lower(_ value: PendingSweepBalance) -> RustBuffer { + return FfiConverterTypePendingSweepBalance.lower(value) +} + + +extension PendingSweepBalance: Equatable, Hashable {} + + + +fileprivate struct FfiConverterOptionUInt16: FfiConverterRustBuffer { + typealias SwiftType = UInt16? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterUInt16.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterUInt16.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +fileprivate struct FfiConverterOptionUInt32: FfiConverterRustBuffer { + typealias SwiftType = UInt32? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterUInt32.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterUInt32.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +fileprivate struct FfiConverterOptionUInt64: FfiConverterRustBuffer { + typealias SwiftType = UInt64? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterUInt64.write(value, into: &buf) + } -public func FfiConverterTypePaymentStatus_lower(_ value: PaymentStatus) -> RustBuffer { - return FfiConverterTypePaymentStatus.lower(value) + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterUInt64.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } } +fileprivate struct FfiConverterOptionString: FfiConverterRustBuffer { + typealias SwiftType = String? -extension PaymentStatus: Equatable, Hashable {} + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterString.write(value, into: &buf) + } + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterString.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} +fileprivate struct FfiConverterOptionTypeChannelConfig: FfiConverterRustBuffer { + typealias SwiftType = ChannelConfig? -fileprivate struct FfiConverterOptionUInt16: FfiConverterRustBuffer { - typealias SwiftType = UInt16? + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypeChannelConfig.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypeChannelConfig.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +fileprivate struct FfiConverterOptionTypeAnchorChannelsConfig: FfiConverterRustBuffer { + typealias SwiftType = AnchorChannelsConfig? public static func write(_ value: SwiftType, into buf: inout [UInt8]) { guard let value = value else { @@ -2586,20 +4984,20 @@ fileprivate struct FfiConverterOptionUInt16: FfiConverterRustBuffer { return } writeInt(&buf, Int8(1)) - FfiConverterUInt16.write(value, into: &buf) + FfiConverterTypeAnchorChannelsConfig.write(value, into: &buf) } public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { switch try readInt(&buf) as Int8 { case 0: return nil - case 1: return try FfiConverterUInt16.read(from: &buf) + case 1: return try FfiConverterTypeAnchorChannelsConfig.read(from: &buf) default: throw UniffiInternalError.unexpectedOptionalTag } } } -fileprivate struct FfiConverterOptionUInt32: FfiConverterRustBuffer { - typealias SwiftType = UInt32? +fileprivate struct FfiConverterOptionTypeChannelInfo: FfiConverterRustBuffer { + typealias SwiftType = ChannelInfo? public static func write(_ value: SwiftType, into buf: inout [UInt8]) { guard let value = value else { @@ -2607,20 +5005,20 @@ fileprivate struct FfiConverterOptionUInt32: FfiConverterRustBuffer { return } writeInt(&buf, Int8(1)) - FfiConverterUInt32.write(value, into: &buf) + FfiConverterTypeChannelInfo.write(value, into: &buf) } public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { switch try readInt(&buf) as Int8 { case 0: return nil - case 1: return try FfiConverterUInt32.read(from: &buf) + case 1: return try FfiConverterTypeChannelInfo.read(from: &buf) default: throw UniffiInternalError.unexpectedOptionalTag } } } -fileprivate struct FfiConverterOptionUInt64: FfiConverterRustBuffer { - typealias SwiftType = UInt64? +fileprivate struct FfiConverterOptionTypeChannelUpdateInfo: FfiConverterRustBuffer { + typealias SwiftType = ChannelUpdateInfo? public static func write(_ value: SwiftType, into buf: inout [UInt8]) { guard let value = value else { @@ -2628,20 +5026,20 @@ fileprivate struct FfiConverterOptionUInt64: FfiConverterRustBuffer { return } writeInt(&buf, Int8(1)) - FfiConverterUInt64.write(value, into: &buf) + FfiConverterTypeChannelUpdateInfo.write(value, into: &buf) } public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { switch try readInt(&buf) as Int8 { case 0: return nil - case 1: return try FfiConverterUInt64.read(from: &buf) + case 1: return try FfiConverterTypeChannelUpdateInfo.read(from: &buf) default: throw UniffiInternalError.unexpectedOptionalTag } } } -fileprivate struct FfiConverterOptionString: FfiConverterRustBuffer { - typealias SwiftType = String? +fileprivate struct FfiConverterOptionTypeNodeAnnouncementInfo: FfiConverterRustBuffer { + typealias SwiftType = NodeAnnouncementInfo? public static func write(_ value: SwiftType, into buf: inout [UInt8]) { guard let value = value else { @@ -2649,20 +5047,20 @@ fileprivate struct FfiConverterOptionString: FfiConverterRustBuffer { return } writeInt(&buf, Int8(1)) - FfiConverterString.write(value, into: &buf) + FfiConverterTypeNodeAnnouncementInfo.write(value, into: &buf) } public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { switch try readInt(&buf) as Int8 { case 0: return nil - case 1: return try FfiConverterString.read(from: &buf) + case 1: return try FfiConverterTypeNodeAnnouncementInfo.read(from: &buf) default: throw UniffiInternalError.unexpectedOptionalTag } } } -fileprivate struct FfiConverterOptionTypeChannelConfig: FfiConverterRustBuffer { - typealias SwiftType = ChannelConfig? +fileprivate struct FfiConverterOptionTypeNodeInfo: FfiConverterRustBuffer { + typealias SwiftType = NodeInfo? public static func write(_ value: SwiftType, into buf: inout [UInt8]) { guard let value = value else { @@ -2670,13 +5068,13 @@ fileprivate struct FfiConverterOptionTypeChannelConfig: FfiConverterRustBuffer { return } writeInt(&buf, Int8(1)) - FfiConverterTypeChannelConfig.write(value, into: &buf) + FfiConverterTypeNodeInfo.write(value, into: &buf) } public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { switch try readInt(&buf) as Int8 { case 0: return nil - case 1: return try FfiConverterTypeChannelConfig.read(from: &buf) + case 1: return try FfiConverterTypeNodeInfo.read(from: &buf) default: throw UniffiInternalError.unexpectedOptionalTag } } @@ -2724,6 +5122,27 @@ fileprivate struct FfiConverterOptionTypePaymentDetails: FfiConverterRustBuffer } } +fileprivate struct FfiConverterOptionTypeClosureReason: FfiConverterRustBuffer { + typealias SwiftType = ClosureReason? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypeClosureReason.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypeClosureReason.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + fileprivate struct FfiConverterOptionTypeEvent: FfiConverterRustBuffer { typealias SwiftType = Event? @@ -2745,6 +5164,27 @@ fileprivate struct FfiConverterOptionTypeEvent: FfiConverterRustBuffer { } } +fileprivate struct FfiConverterOptionTypePaymentFailureReason: FfiConverterRustBuffer { + typealias SwiftType = PaymentFailureReason? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypePaymentFailureReason.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypePaymentFailureReason.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + fileprivate struct FfiConverterOptionSequenceTypeSocketAddress: FfiConverterRustBuffer { typealias SwiftType = [SocketAddress]? @@ -2766,6 +5206,69 @@ fileprivate struct FfiConverterOptionSequenceTypeSocketAddress: FfiConverterRust } } +fileprivate struct FfiConverterOptionTypeChannelId: FfiConverterRustBuffer { + typealias SwiftType = ChannelId? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypeChannelId.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypeChannelId.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +fileprivate struct FfiConverterOptionTypePaymentHash: FfiConverterRustBuffer { + typealias SwiftType = PaymentHash? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypePaymentHash.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypePaymentHash.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +fileprivate struct FfiConverterOptionTypePaymentId: FfiConverterRustBuffer { + typealias SwiftType = PaymentId? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypePaymentId.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypePaymentId.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + fileprivate struct FfiConverterOptionTypePaymentPreimage: FfiConverterRustBuffer { typealias SwiftType = PaymentPreimage? @@ -2851,6 +5354,28 @@ fileprivate struct FfiConverterSequenceUInt8: FfiConverterRustBuffer { } } +fileprivate struct FfiConverterSequenceUInt64: FfiConverterRustBuffer { + typealias SwiftType = [UInt64] + + public static func write(_ value: [UInt64], into buf: inout [UInt8]) { + let len = Int32(value.count) + writeInt(&buf, len) + for item in value { + FfiConverterUInt64.write(item, into: &buf) + } + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [UInt64] { + let len: Int32 = try readInt(&buf) + var seq = [UInt64]() + seq.reserveCapacity(Int(len)) + for _ in 0 ..< len { + seq.append(try FfiConverterUInt64.read(from: &buf)) + } + return seq + } +} + fileprivate struct FfiConverterSequenceTypeChannelDetails: FfiConverterRustBuffer { typealias SwiftType = [ChannelDetails] @@ -2917,6 +5442,72 @@ fileprivate struct FfiConverterSequenceTypePeerDetails: FfiConverterRustBuffer { } } +fileprivate struct FfiConverterSequenceTypeLightningBalance: FfiConverterRustBuffer { + typealias SwiftType = [LightningBalance] + + public static func write(_ value: [LightningBalance], into buf: inout [UInt8]) { + let len = Int32(value.count) + writeInt(&buf, len) + for item in value { + FfiConverterTypeLightningBalance.write(item, into: &buf) + } + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [LightningBalance] { + let len: Int32 = try readInt(&buf) + var seq = [LightningBalance]() + seq.reserveCapacity(Int(len)) + for _ in 0 ..< len { + seq.append(try FfiConverterTypeLightningBalance.read(from: &buf)) + } + return seq + } +} + +fileprivate struct FfiConverterSequenceTypePendingSweepBalance: FfiConverterRustBuffer { + typealias SwiftType = [PendingSweepBalance] + + public static func write(_ value: [PendingSweepBalance], into buf: inout [UInt8]) { + let len = Int32(value.count) + writeInt(&buf, len) + for item in value { + FfiConverterTypePendingSweepBalance.write(item, into: &buf) + } + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [PendingSweepBalance] { + let len: Int32 = try readInt(&buf) + var seq = [PendingSweepBalance]() + seq.reserveCapacity(Int(len)) + for _ in 0 ..< len { + seq.append(try FfiConverterTypePendingSweepBalance.read(from: &buf)) + } + return seq + } +} + +fileprivate struct FfiConverterSequenceTypeNodeId: FfiConverterRustBuffer { + typealias SwiftType = [NodeId] + + public static func write(_ value: [NodeId], into buf: inout [UInt8]) { + let len = Int32(value.count) + writeInt(&buf, len) + for item in value { + FfiConverterTypeNodeId.write(item, into: &buf) + } + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [NodeId] { + let len: Int32 = try readInt(&buf) + var seq = [NodeId]() + seq.reserveCapacity(Int(len)) + for _ in 0 ..< len { + seq.append(try FfiConverterTypeNodeId.read(from: &buf)) + } + return seq + } +} + fileprivate struct FfiConverterSequenceTypePublicKey: FfiConverterRustBuffer { typealias SwiftType = [PublicKey] @@ -2966,32 +5557,66 @@ fileprivate struct FfiConverterSequenceTypeSocketAddress: FfiConverterRustBuffer * Typealias from the type name used in the UDL file to the builtin type. This * is needed because the UDL type name is used in function/method signatures. */ -public typealias Address = String -public struct FfiConverterTypeAddress: FfiConverter { - public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Address { +public typealias Address = String +public struct FfiConverterTypeAddress: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Address { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: Address, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> Address { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: Address) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeAddress_lift(_ value: RustBuffer) throws -> Address { + return try FfiConverterTypeAddress.lift(value) +} + +public func FfiConverterTypeAddress_lower(_ value: Address) -> RustBuffer { + return FfiConverterTypeAddress.lower(value) +} + + + +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias BlockHash = String +public struct FfiConverterTypeBlockHash: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> BlockHash { return try FfiConverterString.read(from: &buf) } - public static func write(_ value: Address, into buf: inout [UInt8]) { + public static func write(_ value: BlockHash, into buf: inout [UInt8]) { return FfiConverterString.write(value, into: &buf) } - public static func lift(_ value: RustBuffer) throws -> Address { + public static func lift(_ value: RustBuffer) throws -> BlockHash { return try FfiConverterString.lift(value) } - public static func lower(_ value: Address) -> RustBuffer { + public static func lower(_ value: BlockHash) -> RustBuffer { return FfiConverterString.lower(value) } } -public func FfiConverterTypeAddress_lift(_ value: RustBuffer) throws -> Address { - return try FfiConverterTypeAddress.lift(value) +public func FfiConverterTypeBlockHash_lift(_ value: RustBuffer) throws -> BlockHash { + return try FfiConverterTypeBlockHash.lift(value) } -public func FfiConverterTypeAddress_lower(_ value: Address) -> RustBuffer { - return FfiConverterTypeAddress.lower(value) +public func FfiConverterTypeBlockHash_lower(_ value: BlockHash) -> RustBuffer { + return FfiConverterTypeBlockHash.lower(value) } @@ -3030,6 +5655,40 @@ public func FfiConverterTypeBolt11Invoice_lower(_ value: Bolt11Invoice) -> RustB +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias Bolt12Invoice = String +public struct FfiConverterTypeBolt12Invoice: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Bolt12Invoice { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: Bolt12Invoice, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> Bolt12Invoice { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: Bolt12Invoice) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeBolt12Invoice_lift(_ value: RustBuffer) throws -> Bolt12Invoice { + return try FfiConverterTypeBolt12Invoice.lift(value) +} + +public func FfiConverterTypeBolt12Invoice_lower(_ value: Bolt12Invoice) -> RustBuffer { + return FfiConverterTypeBolt12Invoice.lower(value) +} + + + /** * Typealias from the type name used in the UDL file to the builtin type. This * is needed because the UDL type name is used in function/method signatures. @@ -3098,6 +5757,108 @@ public func FfiConverterTypeMnemonic_lower(_ value: Mnemonic) -> RustBuffer { +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias NodeId = String +public struct FfiConverterTypeNodeId: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> NodeId { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: NodeId, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> NodeId { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: NodeId) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeNodeId_lift(_ value: RustBuffer) throws -> NodeId { + return try FfiConverterTypeNodeId.lift(value) +} + +public func FfiConverterTypeNodeId_lower(_ value: NodeId) -> RustBuffer { + return FfiConverterTypeNodeId.lower(value) +} + + + +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias Offer = String +public struct FfiConverterTypeOffer: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Offer { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: Offer, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> Offer { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: Offer) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeOffer_lift(_ value: RustBuffer) throws -> Offer { + return try FfiConverterTypeOffer.lift(value) +} + +public func FfiConverterTypeOffer_lower(_ value: Offer) -> RustBuffer { + return FfiConverterTypeOffer.lower(value) +} + + + +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias OfferId = String +public struct FfiConverterTypeOfferId: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> OfferId { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: OfferId, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> OfferId { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: OfferId) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeOfferId_lift(_ value: RustBuffer) throws -> OfferId { + return try FfiConverterTypeOfferId.lift(value) +} + +public func FfiConverterTypeOfferId_lower(_ value: OfferId) -> RustBuffer { + return FfiConverterTypeOfferId.lower(value) +} + + + /** * Typealias from the type name used in the UDL file to the builtin type. This * is needed because the UDL type name is used in function/method signatures. @@ -3132,6 +5893,40 @@ public func FfiConverterTypePaymentHash_lower(_ value: PaymentHash) -> RustBuffe +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias PaymentId = String +public struct FfiConverterTypePaymentId: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> PaymentId { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: PaymentId, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> PaymentId { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: PaymentId) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypePaymentId_lift(_ value: RustBuffer) throws -> PaymentId { + return try FfiConverterTypePaymentId.lift(value) +} + +public func FfiConverterTypePaymentId_lower(_ value: PaymentId) -> RustBuffer { + return FfiConverterTypePaymentId.lower(value) +} + + + /** * Typealias from the type name used in the UDL file to the builtin type. This * is needed because the UDL type name is used in function/method signatures. @@ -3234,6 +6029,40 @@ public func FfiConverterTypePublicKey_lower(_ value: PublicKey) -> RustBuffer { +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias Refund = String +public struct FfiConverterTypeRefund: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Refund { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: Refund, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> Refund { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: Refund) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeRefund_lift(_ value: RustBuffer) throws -> Refund { + return try FfiConverterTypeRefund.lift(value) +} + +public func FfiConverterTypeRefund_lower(_ value: Refund) -> RustBuffer { + return FfiConverterTypeRefund.lower(value) +} + + + /** * Typealias from the type name used in the UDL file to the builtin type. This * is needed because the UDL type name is used in function/method signatures. @@ -3302,6 +6131,40 @@ public func FfiConverterTypeTxid_lower(_ value: Txid) -> RustBuffer { +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias UntrustedString = String +public struct FfiConverterTypeUntrustedString: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> UntrustedString { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: UntrustedString, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> UntrustedString { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: UntrustedString) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeUntrustedString_lift(_ value: RustBuffer) throws -> UntrustedString { + return try FfiConverterTypeUntrustedString.lift(value) +} + +public func FfiConverterTypeUntrustedString_lower(_ value: UntrustedString) -> RustBuffer { + return FfiConverterTypeUntrustedString.lower(value) +} + + + /** * Typealias from the type name used in the UDL file to the builtin type. This * is needed because the UDL type name is used in function/method signatures. @@ -3334,7 +6197,71 @@ public func FfiConverterTypeUserChannelId_lower(_ value: UserChannelId) -> RustB return FfiConverterTypeUserChannelId.lower(value) } +private let UNIFFI_RUST_FUTURE_POLL_READY: Int8 = 0 +private let UNIFFI_RUST_FUTURE_POLL_MAYBE_READY: Int8 = 1 + +fileprivate func uniffiRustCallAsync( + rustFutureFunc: () -> UnsafeMutableRawPointer, + pollFunc: (UnsafeMutableRawPointer, @escaping UniFfiRustFutureContinuation, UnsafeMutableRawPointer) -> (), + completeFunc: (UnsafeMutableRawPointer, UnsafeMutablePointer) -> F, + freeFunc: (UnsafeMutableRawPointer) -> (), + liftFunc: (F) throws -> T, + errorHandler: ((RustBuffer) throws -> Error)? +) async throws -> T { + // Make sure to call uniffiEnsureInitialized() since future creation doesn't have a + // RustCallStatus param, so doesn't use makeRustCall() + uniffiEnsureInitialized() + let rustFuture = rustFutureFunc() + defer { + freeFunc(rustFuture) + } + var pollResult: Int8; + repeat { + pollResult = await withUnsafeContinuation { + pollFunc(rustFuture, uniffiFutureContinuationCallback, ContinuationHolder($0).toOpaque()) + } + } while pollResult != UNIFFI_RUST_FUTURE_POLL_READY + + return try liftFunc(makeRustCall( + { completeFunc(rustFuture, $0) }, + errorHandler: errorHandler + )) +} + +// Callback handlers for an async calls. These are invoked by Rust when the future is ready. They +// lift the return value or error and resume the suspended function. +fileprivate func uniffiFutureContinuationCallback(ptr: UnsafeMutableRawPointer, pollResult: Int8) { + ContinuationHolder.fromOpaque(ptr).resume(pollResult) +} + +// Wraps UnsafeContinuation in a class so that we can use reference counting when passing it across +// the FFI +fileprivate class ContinuationHolder { + let continuation: UnsafeContinuation + + init(_ continuation: UnsafeContinuation) { + self.continuation = continuation + } + + func resume(_ pollResult: Int8) { + self.continuation.resume(returning: pollResult) + } + + func toOpaque() -> UnsafeMutableRawPointer { + return Unmanaged.passRetained(self).toOpaque() + } + static func fromOpaque(_ ptr: UnsafeRawPointer) -> ContinuationHolder { + return Unmanaged.fromOpaque(ptr).takeRetainedValue() + } +} +public func defaultConfig() -> Config { + return try! FfiConverterTypeConfig.lift( + try! rustCall() { + uniffi_ldk_node_fn_func_default_config($0) +} + ) +} public func generateEntropyMnemonic() -> Mnemonic { return try! FfiConverterTypeMnemonic.lift( try! rustCall() { @@ -3352,22 +6279,82 @@ private enum InitializationResult { // the code inside is only computed once. private var initializationResult: InitializationResult { // Get the bindings contract version from our ComponentInterface - let bindings_contract_version = 24 + let bindings_contract_version = 25 // Get the scaffolding contract version by calling the into the dylib let scaffolding_contract_version = ffi_ldk_node_uniffi_contract_version() if bindings_contract_version != scaffolding_contract_version { return InitializationResult.contractVersionMismatch } - if (uniffi_ldk_node_checksum_func_generate_entropy_mnemonic() != 7251) { + if (uniffi_ldk_node_checksum_func_default_config() != 55381) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_func_generate_entropy_mnemonic() != 59926) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_claim_for_hash() != 52848) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_fail_for_hash() != 24516) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_receive() != 28084) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_receive_for_hash() != 3869) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_receive_variable_amount() != 51453) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_receive_variable_amount_for_hash() != 21975) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_receive_variable_amount_via_jit_channel() != 58617) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_receive_via_jit_channel() != 50555) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_send() != 35346) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_send_probes() != 39625) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_send_probes_using_amount() != 25010) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_send_using_amount() != 15471) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt12payment_initiate_refund() != 15379) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt12payment_receive() != 20864) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt12payment_receive_variable_amount() != 10863) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt12payment_request_refund_payment() != 61945) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt12payment_send() != 15282) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt12payment_send_using_amount() != 21384) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_builder_build() != 785) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_builder_build() != 48294) { + if (uniffi_ldk_node_checksum_method_builder_build_with_fs_store() != 61304) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_builder_set_entropy_bip39_mnemonic() != 35659) { + if (uniffi_ldk_node_checksum_method_builder_set_entropy_bip39_mnemonic() != 827) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_builder_set_entropy_seed_bytes() != 26795) { + if (uniffi_ldk_node_checksum_method_builder_set_entropy_seed_bytes() != 44799) { return InitializationResult.apiChecksumMismatch } if (uniffi_ldk_node_checksum_method_builder_set_entropy_seed_path() != 64056) { @@ -3382,10 +6369,13 @@ private var initializationResult: InitializationResult { if (uniffi_ldk_node_checksum_method_builder_set_gossip_source_rgs() != 64312) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_builder_set_listening_addresses() != 18689) { + if (uniffi_ldk_node_checksum_method_builder_set_liquidity_source_lsps2() != 2667) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_builder_set_listening_addresses() != 14051) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_builder_set_network() != 23321) { + if (uniffi_ldk_node_checksum_method_builder_set_network() != 27539) { return InitializationResult.apiChecksumMismatch } if (uniffi_ldk_node_checksum_method_builder_set_storage_dir_path() != 59019) { @@ -3427,109 +6417,124 @@ private var initializationResult: InitializationResult { if (uniffi_ldk_node_checksum_method_channelconfig_set_max_dust_htlc_exposure_from_fixed_limit() != 16864) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_close_channel() != 7103) { + if (uniffi_ldk_node_checksum_method_networkgraph_channel() != 38070) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_networkgraph_list_channels() != 4693) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_networkgraph_list_nodes() != 36715) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_networkgraph_node() != 48925) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_node_bolt11_payment() != 41402) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_node_bolt12_payment() != 49254) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_connect() != 5558) { + if (uniffi_ldk_node_checksum_method_node_close_channel() != 62479) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_connect_open_channel() != 59688) { + if (uniffi_ldk_node_checksum_method_node_config() != 7511) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_disconnect() != 43777) { + if (uniffi_ldk_node_checksum_method_node_connect() != 34120) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_event_handled() != 28838) { + if (uniffi_ldk_node_checksum_method_node_connect_open_channel() != 64763) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_is_running() != 18666) { + if (uniffi_ldk_node_checksum_method_node_disconnect() != 43538) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_list_channels() != 43935) { + if (uniffi_ldk_node_checksum_method_node_event_handled() != 47939) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_list_payments() != 58666) { + if (uniffi_ldk_node_checksum_method_node_force_close_channel() != 44813) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_list_peers() != 22735) { + if (uniffi_ldk_node_checksum_method_node_list_balances() != 57528) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_listening_addresses() != 49178) { + if (uniffi_ldk_node_checksum_method_node_list_channels() != 7954) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_new_onchain_address() != 34077) { + if (uniffi_ldk_node_checksum_method_node_list_payments() != 35002) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_next_event() != 41150) { + if (uniffi_ldk_node_checksum_method_node_list_peers() != 14889) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_node_id() != 39688) { + if (uniffi_ldk_node_checksum_method_node_listening_addresses() != 2665) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_payment() != 35034) { + if (uniffi_ldk_node_checksum_method_node_network_graph() != 2695) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_receive_payment() != 4148) { + if (uniffi_ldk_node_checksum_method_node_next_event() != 7682) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_receive_variable_amount_payment() != 25209) { + if (uniffi_ldk_node_checksum_method_node_next_event_async() != 25426) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_remove_payment() != 12673) { + if (uniffi_ldk_node_checksum_method_node_node_id() != 51489) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_all_to_onchain_address() != 24019) { + if (uniffi_ldk_node_checksum_method_node_onchain_payment() != 6092) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_payment() != 56244) { + if (uniffi_ldk_node_checksum_method_node_payment() != 60296) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_payment_probes() != 38405) { + if (uniffi_ldk_node_checksum_method_node_remove_payment() != 47952) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_payment_probes_using_amount() != 340) { + if (uniffi_ldk_node_checksum_method_node_sign_message() != 51392) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_payment_using_amount() != 42148) { + if (uniffi_ldk_node_checksum_method_node_spontaneous_payment() != 37403) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_spontaneous_payment() != 39235) { + if (uniffi_ldk_node_checksum_method_node_start() != 58480) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_spontaneous_payment_probes() != 52786) { + if (uniffi_ldk_node_checksum_method_node_status() != 55952) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_to_onchain_address() != 43948) { + if (uniffi_ldk_node_checksum_method_node_stop() != 42188) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_sign_message() != 40383) { + if (uniffi_ldk_node_checksum_method_node_sync_wallets() != 32474) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_spendable_onchain_balance_sats() != 1454) { + if (uniffi_ldk_node_checksum_method_node_update_channel_config() != 38109) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_start() != 44334) { + if (uniffi_ldk_node_checksum_method_node_verify_signature() != 20486) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_stop() != 16494) { + if (uniffi_ldk_node_checksum_method_node_wait_next_event() != 55101) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_sync_wallets() != 2447) { + if (uniffi_ldk_node_checksum_method_onchainpayment_new_address() != 37251) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_total_onchain_balance_sats() != 44607) { + if (uniffi_ldk_node_checksum_method_onchainpayment_send_all_to_address() != 20046) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_update_channel_config() != 13742) { + if (uniffi_ldk_node_checksum_method_onchainpayment_send_to_address() != 34782) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_verify_signature() != 35778) { + if (uniffi_ldk_node_checksum_method_spontaneouspayment_send() != 16613) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_wait_next_event() != 34319) { + if (uniffi_ldk_node_checksum_method_spontaneouspayment_send_probes() != 25937) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_constructor_builder_from_config() != 56443) { + if (uniffi_ldk_node_checksum_constructor_builder_from_config() != 64393) { return InitializationResult.apiChecksumMismatch } if (uniffi_ldk_node_checksum_constructor_builder_new() != 48442) { @@ -3551,4 +6556,4 @@ private func uniffiEnsureInitialized() { case .apiChecksumMismatch: fatalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project") } -} +} \ No newline at end of file From 182bc214568e44399609d32a738ccfed2d998ad2 Mon Sep 17 00:00:00 2001 From: elnosh Date: Fri, 21 Jun 2024 15:02:03 -0500 Subject: [PATCH 84/89] Fix inbound capacity comment --- src/types.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/types.rs b/src/types.rs index 89a14e163..0c2faeb78 100644 --- a/src/types.rs +++ b/src/types.rs @@ -200,11 +200,11 @@ pub struct ChannelDetails { /// balance is not available for inclusion in new outbound HTLCs). This further does not include /// any pending outgoing HTLCs which are awaiting some other resolution to be sent. pub outbound_capacity_msat: u64, - /// The available outbound capacity for sending HTLCs to the remote peer. + /// The available inbound capacity for receiving HTLCs from the remote peer. /// /// The amount does not include any pending HTLCs which are not yet resolved /// (and, thus, whose balance is not available for inclusion in new inbound HTLCs). This further - /// does not include any pending outgoing HTLCs which are awaiting some other resolution to be + /// does not include any pending incoming HTLCs which are awaiting some other resolution to be /// sent. pub inbound_capacity_msat: u64, /// The number of required confirmations on the funding transactions before the funding is From 28d2d7740113ccf520bccf1aa370ae5d38b57475 Mon Sep 17 00:00:00 2001 From: Roman Dmitrienko Date: Tue, 27 Feb 2024 22:09:57 +0100 Subject: [PATCH 85/89] Pass custom TLVs with keysend requests. --- src/types.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/types.rs b/src/types.rs index 0c2faeb78..37371ac84 100644 --- a/src/types.rs +++ b/src/types.rs @@ -450,3 +450,12 @@ impl Default for ChannelConfig { LdkChannelConfig::default().into() } } + +/// Custom TLV entry. +pub struct TlvEntry { + /// Type number. + pub r#type: u64, + + /// Serialized value. + pub value: Vec, +} From 37db97dd45a958eb9457fb6c77d9c69b11c3935d Mon Sep 17 00:00:00 2001 From: Roman Dmitrienko Date: Fri, 1 Mar 2024 21:11:20 +0100 Subject: [PATCH 86/89] Add specific error code for invalid custom TLVs. --- src/error.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/error.rs b/src/error.rs index a8671d9a7..b98df2f49 100644 --- a/src/error.rs +++ b/src/error.rs @@ -85,6 +85,8 @@ pub enum Error { InvalidChannelId, /// The given network is invalid. InvalidNetwork, + /// The custom TLVs are invalid. + InvalidCustomTlv, /// A payment with the given hash has already been initiated. DuplicatePayment, /// The provided offer was denonminated in an unsupported currency. From caa08d7dac09fb38a13bd0ae31a7a1b75404706a Mon Sep 17 00:00:00 2001 From: Roman Dmitrienko Date: Fri, 1 Mar 2024 21:29:55 +0100 Subject: [PATCH 87/89] Fix Display trait implementation for Error. --- src/error.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/error.rs b/src/error.rs index b98df2f49..15aa5a960 100644 --- a/src/error.rs +++ b/src/error.rs @@ -149,6 +149,7 @@ impl fmt::Display for Error { Self::InvalidRefund => write!(f, "The given refund is invalid."), Self::InvalidChannelId => write!(f, "The given channel ID is invalid."), Self::InvalidNetwork => write!(f, "The given network is invalid."), + Self::InvalidCustomTlv => write!(f, "The given custom TLVs are invalid."), Self::DuplicatePayment => { write!(f, "A payment with the given hash has already been initiated.") }, From 047b167fc5b10b80610d762660351040006bbe08 Mon Sep 17 00:00:00 2001 From: Roman Dmitrienko Date: Fri, 1 Mar 2024 21:36:03 +0100 Subject: [PATCH 88/89] Add the new error code to UDL. --- bindings/ldk_node.udl | 1 + 1 file changed, 1 insertion(+) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 2723db573..7c84dab96 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -191,6 +191,7 @@ enum NodeError { "InvalidRefund", "InvalidChannelId", "InvalidNetwork", + "InvalidCustomTlv", "DuplicatePayment", "UnsupportedCurrency", "InsufficientFunds", From f70873759cc0fbd0167d7fe1b669ddb1f54ff849 Mon Sep 17 00:00:00 2001 From: Roman Dmitrienko Date: Tue, 25 Jun 2024 14:28:02 +0200 Subject: [PATCH 89/89] Merge main and re-apply changes. --- bindings/ldk_node.udl | 9 +++++++-- src/event.rs | 14 ++++++++++++-- src/lib.rs | 2 +- src/payment/spontaneous.rs | 17 ++++++++++++++--- src/payment/store.rs | 9 ++++++--- src/types.rs | 7 +++++++ tests/common/mod.rs | 9 +++++++-- 7 files changed, 54 insertions(+), 13 deletions(-) diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 7c84dab96..9fdc794fa 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -134,7 +134,7 @@ interface Bolt12Payment { interface SpontaneousPayment { [Throws=NodeError] - PaymentId send(u64 amount_msat, PublicKey node_id); + PaymentId send(u64 amount_msat, PublicKey node_id, sequence custom_tlvs); [Throws=NodeError] void send_probes(u64 amount_msat, PublicKey node_id); }; @@ -274,7 +274,7 @@ interface PaymentKind { Bolt11Jit(PaymentHash hash, PaymentPreimage? preimage, PaymentSecret? secret, LSPFeeLimits lsp_fee_limits); Bolt12Offer(PaymentHash? hash, PaymentPreimage? preimage, PaymentSecret? secret, OfferId offer_id); Bolt12Refund(PaymentHash? hash, PaymentPreimage? preimage, PaymentSecret? secret); - Spontaneous(PaymentHash hash, PaymentPreimage? preimage); + Spontaneous(PaymentHash hash, PaymentPreimage? preimage, sequence custom_tlvs); }; enum PaymentDirection { @@ -404,6 +404,11 @@ enum LogLevel { "Error", }; +dictionary TlvEntry { + u64 type; + sequence value; +}; + interface NetworkGraph { sequence list_channels(); ChannelInfo? channel(u64 short_channel_id); diff --git a/src/event.rs b/src/event.rs index 838df4230..1c192547e 100644 --- a/src/event.rs +++ b/src/event.rs @@ -2,7 +2,7 @@ use crate::types::{DynStore, Sweeper, Wallet}; use crate::{ hex_utils, BumpTransactionEventHandler, ChannelManager, Config, Error, Graph, PeerInfo, - PeerStore, UserChannelId, + PeerStore, TlvEntry, UserChannelId, }; use crate::connection::ConnectionManager; @@ -463,7 +463,7 @@ where via_channel_id: _, via_user_channel_id: _, claim_deadline, - onion_fields: _, + onion_fields, counterparty_skimmed_fee_msat, } => { let payment_id = PaymentId(payment_hash.0); @@ -639,10 +639,20 @@ where payment_preimage }, PaymentPurpose::SpontaneousPayment(preimage) => { + let custom_tlvs = onion_fields + .map(|of| { + of.custom_tlvs() + .iter() + .map(|(t, v)| TlvEntry { r#type: *t, value: v.clone() }) + .collect() + }) + .unwrap_or_default(); + // Since it's spontaneous, we insert it now into our store. let kind = PaymentKind::Spontaneous { hash: payment_hash, preimage: Some(preimage), + custom_tlvs, }; let payment = PaymentDetails::new( diff --git a/src/lib.rs b/src/lib.rs index de2a0badf..e594a3a80 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -139,7 +139,7 @@ use types::{ Broadcaster, BumpTransactionEventHandler, ChainMonitor, ChannelManager, DynStore, FeeEstimator, Graph, KeysManager, PeerManager, Router, Scorer, Sweeper, Wallet, }; -pub use types::{ChannelDetails, PeerDetails, UserChannelId}; +pub use types::{ChannelDetails, PeerDetails, TlvEntry, UserChannelId}; use logger::{log_error, log_info, log_trace, FilesystemLogger, Logger}; diff --git a/src/payment/spontaneous.rs b/src/payment/spontaneous.rs index 482df42d9..4c904c80d 100644 --- a/src/payment/spontaneous.rs +++ b/src/payment/spontaneous.rs @@ -6,7 +6,7 @@ use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; use crate::payment::store::{ PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus, PaymentStore, }; -use crate::types::{ChannelManager, KeysManager}; +use crate::types::{ChannelManager, KeysManager, TlvEntry}; use lightning::ln::channelmanager::{PaymentId, RecipientOnionFields, Retry, RetryableSendFailure}; use lightning::ln::{PaymentHash, PaymentPreimage}; @@ -42,7 +42,9 @@ impl SpontaneousPayment { } /// Send a spontaneous, aka. "keysend", payment - pub fn send(&self, amount_msat: u64, node_id: PublicKey) -> Result { + pub fn send( + &self, amount_msat: u64, node_id: PublicKey, custom_tlvs: Vec, + ) -> Result { let rt_lock = self.runtime.read().unwrap(); if rt_lock.is_none() { return Err(Error::NotRunning); @@ -65,7 +67,14 @@ impl SpontaneousPayment { PaymentParameters::from_node_id(node_id, self.config.default_cltv_expiry_delta), amount_msat, ); - let recipient_fields = RecipientOnionFields::spontaneous_empty(); + let recipient_fields = RecipientOnionFields::spontaneous_empty() + .with_custom_tlvs( + custom_tlvs.iter().map(|tlv| (tlv.r#type, tlv.value.clone())).collect(), + ) + .map_err(|_| { + log_error!(self.logger, "Payment error: invalid custom TLVs."); + Error::InvalidCustomTlv + })?; match self.channel_manager.send_spontaneous_payment_with_retry( Some(payment_preimage), @@ -80,6 +89,7 @@ impl SpontaneousPayment { let kind = PaymentKind::Spontaneous { hash: payment_hash, preimage: Some(payment_preimage), + custom_tlvs, }; let payment = PaymentDetails::new( payment_id, @@ -101,6 +111,7 @@ impl SpontaneousPayment { let kind = PaymentKind::Spontaneous { hash: payment_hash, preimage: Some(payment_preimage), + custom_tlvs, }; let payment = PaymentDetails::new( payment_id, diff --git a/src/payment/store.rs b/src/payment/store.rs index eb3ac091f..983c90f20 100644 --- a/src/payment/store.rs +++ b/src/payment/store.rs @@ -3,7 +3,7 @@ use crate::io::{ PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, }; use crate::logger::{log_error, Logger}; -use crate::types::DynStore; +use crate::types::{DynStore, TlvEntry}; use crate::Error; use lightning::ln::channelmanager::PaymentId; @@ -123,7 +123,7 @@ impl Readable for PaymentDetails { PaymentKind::Bolt11 { hash, preimage, secret } } } else { - PaymentKind::Spontaneous { hash, preimage } + PaymentKind::Spontaneous { hash, preimage, custom_tlvs: Vec::new() } } }; @@ -231,6 +231,8 @@ pub enum PaymentKind { hash: PaymentHash, /// The pre-image used by the payment. preimage: Option, + /// Custom TLVs. + custom_tlvs: Vec, }, } @@ -256,6 +258,7 @@ impl_writeable_tlv_based_enum!(PaymentKind, (8, Spontaneous) => { (0, hash, required), (2, preimage, option), + (131072, custom_tlvs, optional_vec), }, (10, Bolt12Refund) => { (0, hash, option), @@ -659,7 +662,7 @@ mod tests { ); match spontaneous_decoded.kind { - PaymentKind::Spontaneous { hash: h, preimage: p } => { + PaymentKind::Spontaneous { hash: h, preimage: p, custom_tlvs: _ } => { assert_eq!(hash, h); assert_eq!(preimage, p); }, diff --git a/src/types.rs b/src/types.rs index 37371ac84..78cb00a82 100644 --- a/src/types.rs +++ b/src/types.rs @@ -2,6 +2,7 @@ use crate::logger::FilesystemLogger; use crate::message_handler::NodeCustomMessageHandler; use lightning::chain::chainmonitor; +use lightning::impl_writeable_tlv_based; use lightning::ln::channelmanager::ChannelDetails as LdkChannelDetails; use lightning::ln::msgs::RoutingMessageHandler; use lightning::ln::msgs::SocketAddress; @@ -452,6 +453,7 @@ impl Default for ChannelConfig { } /// Custom TLV entry. +#[derive(Debug, Clone, PartialEq, Eq)] pub struct TlvEntry { /// Type number. pub r#type: u64, @@ -459,3 +461,8 @@ pub struct TlvEntry { /// Serialized value. pub value: Vec, } + +impl_writeable_tlv_based!(TlvEntry, { + (0, r#type, required), + (1, value, required), +}); diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 5959bd58e..a85aa3032 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -5,6 +5,7 @@ use ldk_node::io::sqlite_store::SqliteStore; use ldk_node::payment::{PaymentDirection, PaymentKind, PaymentStatus}; use ldk_node::{ Builder, Config, Event, LightningBalance, LogLevel, Node, NodeError, PendingSweepBalance, + TlvEntry, }; use lightning::ln::msgs::SocketAddress; @@ -694,8 +695,12 @@ pub(crate) fn do_channel_full_cycle( // Test spontaneous/keysend payments println!("\nA send_spontaneous_payment"); let keysend_amount_msat = 2500_000; - let keysend_payment_id = - node_a.spontaneous_payment().send(keysend_amount_msat, node_b.node_id()).unwrap(); + let tlv1 = TlvEntry { r#type: 131073, value: vec![0x00, 0x11, 0x22, 0x33] }; + let tlv2 = TlvEntry { r#type: 131075, value: vec![0xaa, 0xbb] }; + let keysend_payment_id = node_a + .spontaneous_payment() + .send(keysend_amount_msat, node_b.node_id(), vec![tlv1, tlv2]) + .unwrap(); expect_event!(node_a, PaymentSuccessful); let received_keysend_amount = match node_b.wait_next_event() { ref e @ Event::PaymentReceived { amount_msat, .. } => {