diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 8d1266cc7..4837063b7 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -48,7 +48,8 @@ jobs: cargo update -p hashlink --precise "0.8.2" --verbose # hashlink 0.8.3 requires hashbrown 0.14, requiring 1.64.0 cargo update -p proptest --precise "1.2.0" --verbose # proptest 1.3.0 requires rustc 1.64.0 cargo update -p regex --precise "1.9.6" --verbose # regex 1.10.0 requires rustc 1.65.0 - cargo update -p home --precise "0.5.5" --verbose # home v0.5.9, requires rustc 1.70 or newer + cargo update -p home --precise "0.5.5" --verbose # home v0.5.9 requires rustc 1.70 or newer + cargo update -p url --precise "2.5.0" --verbose # url v2.5.1 requires rustc 1.67 or newer - name: Set RUSTFLAGS to deny warnings if: "matrix.toolchain == 'stable'" run: echo "RUSTFLAGS=-D warnings" >> "$GITHUB_ENV" diff --git a/CHANGELOG.md b/CHANGELOG.md index e92a7c9e8..3a3894899 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,55 @@ +# 0.3.0 - June 21, 2024 + +This third minor release notably adds support for BOLT12 payments, Anchor +channels, and sourcing inbound liquidity via LSPS2 just-in-time channels. + +## Feature and API updates +- Support for creating and paying BOLT12 offers and refunds has been added (#265). +- Support for Anchor channels has been added (#141). +- Support for sourcing inbound liquidity via LSPS2 just-in-time (JIT) channels has been added (#223). +- The node's local view of the network graph can now be accessed via interface methods (#293). +- A new `next_event_async` method was added that allows polling the event queue asynchronously (#224). +- A `default_config` method was introduced that allows to retrieve sane default values, also in bindings (#242). +- The `PaymentFailed` and `ChannelClosed` events now include `reason` fields (#260). +- All available balances outside of channel balances are now exposed via a unified `list_balances` interface method (#250). +- The maximum in-flight HTLC value has been bumped to 100% of the channel capacity for private outbound channels (#303) and, if JIT channel support is enabled, for inbound channels (#262). +- The fee paid is now exposed via the `PaymentSuccessful` event (#271). +- A `status` method has been added allowing to retrieve information about the `Node`'s status (#272). +- `Node` no longer takes a `KVStore` type parameter, allowing to use the filesystem storage backend in bindings (#244). +- The payment APIs have been restructured to use per-type (`bolt11`, `onchain`, `bolt12`, ..) payment handlers which can be accessed via corresponding `Node::{type}_payment` methods (#270). +- Fully resolved channel monitors are now eventually moved to an archive location (#307). +- The ability to register and claim from custom payment hashes generated outside of LDK Node has been added (#308). + +## Bug Fixes +- Node announcements are now correctly only broadcast if we have any public, sufficiently confirmed channels (#248, #314). +- Falling back to default fee values is now disallowed on mainnet, ensuring we won't startup without a successful fee cache update (#249). +- Persisted peers are now correctly reconnected after startup (#265). +- Concurrent connection attempts to the same peer are no longer overriding each other (#266). +- Several steps have been taken to reduce the risk of blocking node operation on wallet syncing in the face of unresponsive Esplora services (#281). + +## Compatibility Notes +- LDK has been updated to version 0.0.123 (#291). + +In total, this release features 54 files changed, 7282 insertions, 2410 deletions in 165 commits from 3 authors, in alphabetical order: + +- Elias Rohrer +- jbesraa +- Srikanth Iyengar + +# 0.2.2 - May 21, 2024 + +This is a bugfix release that reestablishes compatibility of Swift packages +with Xcode 15.3 and later. + +## Bug Fixes + +- Swift bindings can now be built using Xcode 15.3 and later again (#294) + +In total, this release features 5 files changed, 66 insertions, 2 deletions +deletions in 2 commits from 1 author, in alphabetical order: + +- Elias Rohrer + # 0.2.1 - Jan 26, 2024 This is a bugfix release bumping the used LDK and BDK dependencies to the diff --git a/Cargo.toml b/Cargo.toml index 7e4a3b348..d4a87b2a2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ldk-node" -version = "0.2.1" +version = "0.3.0" authors = ["Elias Rohrer "] homepage = "https://lightningdevkit.org/" license = "MIT OR Apache-2.0" @@ -28,14 +28,14 @@ panic = 'abort' # Abort on panic default = [] [dependencies] -lightning = { version = "0.0.121", features = ["std"] } -lightning-invoice = { version = "0.29.0" } -lightning-net-tokio = { version = "0.0.121" } -lightning-persister = { version = "0.0.121" } -lightning-background-processor = { version = "0.0.121", features = ["futures"] } -lightning-rapid-gossip-sync = { version = "0.0.121" } -lightning-transaction-sync = { version = "0.0.121", features = ["esplora-async-https", "time"] } -lightning-liquidity = { version = "0.1.0-alpha.1", features = ["std"] } +lightning = { version = "0.0.123", features = ["std"] } +lightning-invoice = { version = "0.31.0" } +lightning-net-tokio = { version = "0.0.123" } +lightning-persister = { version = "0.0.123" } +lightning-background-processor = { version = "0.0.123", features = ["futures"] } +lightning-rapid-gossip-sync = { version = "0.0.123" } +lightning-transaction-sync = { version = "0.0.123", features = ["esplora-async-https", "time"] } +lightning-liquidity = { version = "0.1.0-alpha.4", features = ["std"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std"] } #lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main" } @@ -64,8 +64,7 @@ bip39 = "2.0.0" rand = "0.8.5" chrono = { version = "0.4", default-features = false, features = ["clock"] } -futures = "0.3" -tokio = { version = "1", default-features = false, features = [ "rt-multi-thread", "time", "sync" ] } +tokio = { version = "1.37", default-features = false, features = [ "rt-multi-thread", "time", "sync" ] } esplora-client = { version = "0.6", default-features = false } libc = "0.2" uniffi = { version = "0.26.0", features = ["build"], optional = true } @@ -78,7 +77,7 @@ prost = { version = "0.11.6", default-features = false} winapi = { version = "0.3", features = ["winbase"] } [dev-dependencies] -lightning = { version = "0.0.121", features = ["std", "_test_utils"] } +lightning = { version = "0.0.123", features = ["std", "_test_utils"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std", "_test_utils"] } electrum-client = { version = "0.15.1", default-features = true } bitcoincore-rpc = { version = "0.17.0", default-features = false } diff --git a/Package.swift b/Package.swift index c7d203713..67c02dd8b 100644 --- a/Package.swift +++ b/Package.swift @@ -3,8 +3,8 @@ import PackageDescription -let tag = "v0.2.1" -let checksum = "cca3d5f380c3c216c22ac892cb04a792f3982730e570df71d824462f14c1350e" +let tag = "v0.3.0" +let checksum = "07c8741768956bf1a51d1c25f751b5e29d1ae9ee2fd786c4282031c9a8a92f0c" let url = "https://github.com/lightningdevkit/ldk-node/releases/download/\(tag)/LDKNodeFFI.xcframework.zip" let package = Package( diff --git a/README.md b/README.md index 270bf25a7..4078ce67b 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ A ready-to-go Lightning node library built using [LDK][ldk] and [BDK][bdk]. LDK Node is a self-custodial Lightning node in library form. Its central goal is to provide a small, simple, and straightforward interface that enables users to easily set up and run a Lightning node with an integrated on-chain wallet. While minimalism is at its core, LDK Node aims to be sufficiently modular and configurable to be useful for a variety of use cases. ## Getting Started -The primary abstraction of the library is the [`Node`][api_docs_node], which can be retrieved by setting up and configuring a [`Builder`][api_docs_builder] to your liking and calling one of the `build` methods. `Node` can then be controlled via commands such as `start`, `stop`, `connect_open_channel`, `send_payment`, etc. +The primary abstraction of the library is the [`Node`][api_docs_node], which can be retrieved by setting up and configuring a [`Builder`][api_docs_builder] to your liking and calling one of the `build` methods. `Node` can then be controlled via commands such as `start`, `stop`, `connect_open_channel`, `send`, etc. ```rust use ldk_node::Builder; @@ -31,7 +31,7 @@ fn main() { node.start().unwrap(); - let funding_address = node.new_onchain_address(); + let funding_address = node.onchain_payment().new_address(); // .. fund address .. @@ -44,7 +44,7 @@ fn main() { node.event_handled(); let invoice = Bolt11Invoice::from_str("INVOICE_STR").unwrap(); - node.send_payment(&invoice).unwrap(); + node.bolt11_payment().send(&invoice).unwrap(); node.stop().unwrap(); } diff --git a/bindings/kotlin/ldk-node-android/gradle.properties b/bindings/kotlin/ldk-node-android/gradle.properties index f4f8cd571..70f5823b6 100644 --- a/bindings/kotlin/ldk-node-android/gradle.properties +++ b/bindings/kotlin/ldk-node-android/gradle.properties @@ -2,4 +2,4 @@ org.gradle.jvmargs=-Xmx1536m android.useAndroidX=true android.enableJetifier=true kotlin.code.style=official -libraryVersion=0.2.1 +libraryVersion=0.3.0 diff --git a/bindings/kotlin/ldk-node-android/lib/src/androidTest/kotlin/org/lightningdevkit/ldknode/AndroidLibTest.kt b/bindings/kotlin/ldk-node-android/lib/src/androidTest/kotlin/org/lightningdevkit/ldknode/AndroidLibTest.kt index a5ca6eac0..763862a33 100644 --- a/bindings/kotlin/ldk-node-android/lib/src/androidTest/kotlin/org/lightningdevkit/ldknode/AndroidLibTest.kt +++ b/bindings/kotlin/ldk-node-android/lib/src/androidTest/kotlin/org/lightningdevkit/ldknode/AndroidLibTest.kt @@ -51,10 +51,10 @@ class AndroidLibTest { val nodeId2 = node2.nodeId() println("Node Id 2: $nodeId2") - val address1 = node1.newOnchainAddress() + val address1 = node1.onchain_payment().newOnchainAddress() println("Funding address 1: $address1") - val address2 = node2.newOnchainAddress() + val address2 = node2.onchain_payment().newOnchainAddress() println("Funding address 2: $address2") node1.stop() diff --git a/bindings/kotlin/ldk-node-jvm/gradle.properties b/bindings/kotlin/ldk-node-jvm/gradle.properties index 46f202595..4ed588117 100644 --- a/bindings/kotlin/ldk-node-jvm/gradle.properties +++ b/bindings/kotlin/ldk-node-jvm/gradle.properties @@ -1,3 +1,3 @@ org.gradle.jvmargs=-Xmx1536m kotlin.code.style=official -libraryVersion=0.2.1 +libraryVersion=0.3.0 diff --git a/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt b/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt index 39c19821d..6f863e637 100644 --- a/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt +++ b/bindings/kotlin/ldk-node-jvm/lib/src/test/kotlin/org/lightningdevkit/ldknode/LibraryTest.kt @@ -146,10 +146,10 @@ class LibraryTest { val nodeId2 = node2.nodeId() println("Node Id 2: $nodeId2") - val address1 = node1.newOnchainAddress() + val address1 = node1.onchainPayment().newAddress() println("Funding address 1: $address1") - val address2 = node2.newOnchainAddress() + val address2 = node2.onchainPayment().newAddress() println("Funding address 2: $address2") val txid1 = sendToAddress(address1, 100000u) @@ -203,9 +203,9 @@ class LibraryTest { val spendableBalance2AfterOpen = node2.listBalances().spendableOnchainBalanceSats println("Spendable balance 1 after open: $spendableBalance1AfterOpen") println("Spendable balance 2 after open: $spendableBalance2AfterOpen") - assert(spendableBalance1AfterOpen > 49000u) - assert(spendableBalance1AfterOpen < 50000u) - assertEquals(100000uL, spendableBalance2AfterOpen) + assert(spendableBalance1AfterOpen > 24000u) + assert(spendableBalance1AfterOpen < 25000u) + assertEquals(75000uL, spendableBalance2AfterOpen) val channelReadyEvent1 = node1.waitNextEvent() println("Got event: $channelReadyEvent1") @@ -222,9 +222,9 @@ class LibraryTest { else -> return } - val invoice = node2.receivePayment(2500000u, "asdf", 9217u) + val invoice = node2.bolt11Payment().receive(2500000u, "asdf", 9217u) - node1.sendPayment(invoice) + node1.bolt11Payment().send(invoice) val paymentSuccessfulEvent = node1.waitNextEvent() println("Got event: $paymentSuccessfulEvent") diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 30b2d4a1b..9fdc794fa 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -15,6 +15,12 @@ dictionary Config { sequence trusted_peers_0conf; u64 probing_liquidity_limit_multiplier; LogLevel log_level; + AnchorChannelsConfig? anchor_channels_config; +}; + +dictionary AnchorChannelsConfig { + sequence trusted_peers_no_reserve; + u64 per_channel_reserve_sats; }; interface Builder { @@ -34,14 +40,18 @@ interface Builder { [Throws=BuildError] void set_listening_addresses(sequence listening_addresses); [Throws=BuildError] - LDKNode build(); + Node build(); + [Throws=BuildError] + Node build_with_fs_store(); }; -interface LDKNode { +interface Node { [Throws=NodeError] void start(); [Throws=NodeError] void stop(); + NodeStatus status(); + Config config(); Event? next_event(); Event wait_next_event(); [Async] @@ -49,12 +59,10 @@ interface LDKNode { void event_handled(); PublicKey node_id(); sequence? listening_addresses(); - [Throws=NodeError] - Address new_onchain_address(); - [Throws=NodeError] - Txid send_to_onchain_address([ByRef]Address address, u64 amount_msat); - [Throws=NodeError] - Txid send_all_to_onchain_address([ByRef]Address address); + Bolt11Payment bolt11_payment(); + Bolt12Payment bolt12_payment(); + SpontaneousPayment spontaneous_payment(); + OnchainPayment onchain_payment(); [Throws=NodeError] void connect(PublicKey node_id, SocketAddress address, boolean persist); [Throws=NodeError] @@ -64,40 +72,80 @@ interface LDKNode { [Throws=NodeError] void close_channel([ByRef]UserChannelId user_channel_id, PublicKey counterparty_node_id); [Throws=NodeError] + void force_close_channel([ByRef]UserChannelId user_channel_id, PublicKey counterparty_node_id); + [Throws=NodeError] void update_channel_config([ByRef]UserChannelId user_channel_id, PublicKey counterparty_node_id, ChannelConfig channel_config); [Throws=NodeError] void sync_wallets(); + PaymentDetails? payment([ByRef]PaymentId payment_id); [Throws=NodeError] - PaymentHash send_payment([ByRef]Bolt11Invoice invoice); + void remove_payment([ByRef]PaymentId payment_id); + BalanceDetails list_balances(); + sequence list_payments(); + sequence list_peers(); + sequence list_channels(); + NetworkGraph network_graph(); [Throws=NodeError] - PaymentHash send_payment_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat); + string sign_message([ByRef]sequence msg); + boolean verify_signature([ByRef]sequence msg, [ByRef]string sig, [ByRef]PublicKey pkey); +}; + +interface Bolt11Payment { [Throws=NodeError] - PaymentHash send_spontaneous_payment(u64 amount_msat, PublicKey node_id); + PaymentId send([ByRef]Bolt11Invoice invoice); [Throws=NodeError] - void send_payment_probes([ByRef]Bolt11Invoice invoice); + PaymentId send_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat); [Throws=NodeError] - void send_spontaneous_payment_probes(u64 amount_msat, PublicKey node_id); + void send_probes([ByRef]Bolt11Invoice invoice); [Throws=NodeError] - void send_payment_probes_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat); + void send_probes_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat); [Throws=NodeError] - Bolt11Invoice receive_payment(u64 amount_msat, [ByRef]string description, u32 expiry_secs); + void claim_for_hash(PaymentHash payment_hash, u64 claimable_amount_msat, PaymentPreimage preimage); [Throws=NodeError] - Bolt11Invoice receive_variable_amount_payment([ByRef]string description, u32 expiry_secs); + void fail_for_hash(PaymentHash payment_hash); [Throws=NodeError] - Bolt11Invoice receive_payment_via_jit_channel(u64 amount_msat, [ByRef]string description, u32 expiry_secs, u64? max_lsp_fee_limit_msat); + Bolt11Invoice receive(u64 amount_msat, [ByRef]string description, u32 expiry_secs); [Throws=NodeError] - Bolt11Invoice receive_variable_amount_payment_via_jit_channel([ByRef]string description, u32 expiry_secs, u64? max_proportional_lsp_fee_limit_ppm_msat); - PaymentDetails? payment([ByRef]PaymentHash payment_hash); + Bolt11Invoice receive_for_hash(u64 amount_msat, [ByRef]string description, u32 expiry_secs, PaymentHash payment_hash); [Throws=NodeError] - void remove_payment([ByRef]PaymentHash payment_hash); - BalanceDetails list_balances(); - sequence list_payments(); - sequence list_peers(); - sequence list_channels(); + Bolt11Invoice receive_variable_amount([ByRef]string description, u32 expiry_secs); [Throws=NodeError] - string sign_message([ByRef]sequence msg); - boolean verify_signature([ByRef]sequence msg, [ByRef]string sig, [ByRef]PublicKey pkey); - boolean is_running(); + Bolt11Invoice receive_variable_amount_for_hash([ByRef]string description, u32 expiry_secs, PaymentHash payment_hash); + [Throws=NodeError] + Bolt11Invoice receive_via_jit_channel(u64 amount_msat, [ByRef]string description, u32 expiry_secs, u64? max_lsp_fee_limit_msat); + [Throws=NodeError] + Bolt11Invoice receive_variable_amount_via_jit_channel([ByRef]string description, u32 expiry_secs, u64? max_proportional_lsp_fee_limit_ppm_msat); +}; + +interface Bolt12Payment { + [Throws=NodeError] + PaymentId send([ByRef]Offer offer, string? payer_note); + [Throws=NodeError] + PaymentId send_using_amount([ByRef]Offer offer, string? payer_note, u64 amount_msat); + [Throws=NodeError] + Offer receive(u64 amount_msat, [ByRef]string description); + [Throws=NodeError] + Offer receive_variable_amount([ByRef]string description); + [Throws=NodeError] + Bolt12Invoice request_refund_payment([ByRef]Refund refund); + [Throws=NodeError] + Refund initiate_refund(u64 amount_msat, u32 expiry_secs); +}; + +interface SpontaneousPayment { + [Throws=NodeError] + PaymentId send(u64 amount_msat, PublicKey node_id, sequence custom_tlvs); + [Throws=NodeError] + void send_probes(u64 amount_msat, PublicKey node_id); +}; + +interface OnchainPayment { + [Throws=NodeError] + Address new_address(); + [Throws=NodeError] + Txid send_to_address([ByRef]Address address, u64 amount_msat); + [Throws=NodeError] + Txid send_all_to_address([ByRef]Address address); }; [Error] @@ -107,6 +155,9 @@ enum NodeError { "OnchainTxCreationFailed", "ConnectionFailed", "InvoiceCreationFailed", + "InvoiceRequestCreationFailed", + "OfferCreationFailed", + "RefundCreationFailed", "PaymentSendingFailed", "ProbeSendingFailed", "ChannelCreationFailed", @@ -114,29 +165,56 @@ enum NodeError { "ChannelConfigUpdateFailed", "PersistenceFailed", "FeerateEstimationUpdateFailed", + "FeerateEstimationUpdateTimeout", "WalletOperationFailed", + "WalletOperationTimeout", "OnchainTxSigningFailed", "MessageSigningFailed", "TxSyncFailed", + "TxSyncTimeout", "GossipUpdateFailed", + "GossipUpdateTimeout", "LiquidityRequestFailed", "InvalidAddress", "InvalidSocketAddress", "InvalidPublicKey", "InvalidSecretKey", + "InvalidOfferId", + "InvalidNodeId", + "InvalidPaymentId", "InvalidPaymentHash", "InvalidPaymentPreimage", "InvalidPaymentSecret", "InvalidAmount", "InvalidInvoice", + "InvalidOffer", + "InvalidRefund", "InvalidChannelId", "InvalidNetwork", + "InvalidCustomTlv", "DuplicatePayment", + "UnsupportedCurrency", "InsufficientFunds", "LiquiditySourceUnavailable", "LiquidityFeeTooHigh", }; +dictionary NodeStatus { + boolean is_running; + boolean is_listening; + BestBlock current_best_block; + u64? latest_wallet_sync_timestamp; + u64? latest_onchain_wallet_sync_timestamp; + u64? latest_fee_rate_cache_update_timestamp; + u64? latest_rgs_snapshot_timestamp; + u64? latest_node_announcement_broadcast_timestamp; +}; + +dictionary BestBlock { + BlockHash block_hash; + u32 height; +}; + [Error] enum BuildError { "InvalidSeedBytes", @@ -154,9 +232,10 @@ enum BuildError { [Enum] interface Event { - PaymentSuccessful(PaymentHash payment_hash); - PaymentFailed(PaymentHash payment_hash, PaymentFailureReason? reason); - PaymentReceived(PaymentHash payment_hash, u64 amount_msat); + PaymentSuccessful(PaymentId? payment_id, PaymentHash payment_hash, u64? fee_paid_msat); + PaymentFailed(PaymentId? payment_id, PaymentHash payment_hash, PaymentFailureReason? reason); + PaymentReceived(PaymentId? payment_id, PaymentHash payment_hash, u64 amount_msat); + PaymentClaimable(PaymentId payment_id, PaymentHash payment_hash, u64 claimable_amount_msat, u32? claim_deadline); ChannelPending(ChannelId channel_id, UserChannelId user_channel_id, ChannelId former_temporary_channel_id, PublicKey counterparty_node_id, OutPoint funding_txo); ChannelReady(ChannelId channel_id, UserChannelId user_channel_id, PublicKey? counterparty_node_id); ChannelClosed(ChannelId channel_id, UserChannelId user_channel_id, PublicKey? counterparty_node_id, ClosureReason? reason); @@ -173,16 +252,29 @@ enum PaymentFailureReason { [Enum] interface ClosureReason { - CounterpartyForceClosed ( UntrustedString peer_msg ); - HolderForceClosed (); - CooperativeClosure (); - CommitmentTxConfirmed (); - FundingTimedOut (); - ProcessingError ( string err ); - DisconnectedPeer (); - OutdatedChannelManager (); - CounterpartyCoopClosedUnfundedChannel (); - FundingBatchClosure (); + CounterpartyForceClosed(UntrustedString peer_msg); + HolderForceClosed(); + LegacyCooperativeClosure(); + CounterpartyInitiatedCooperativeClosure(); + LocallyInitiatedCooperativeClosure(); + CommitmentTxConfirmed(); + FundingTimedOut(); + ProcessingError(string err); + DisconnectedPeer(); + OutdatedChannelManager(); + CounterpartyCoopClosedUnfundedChannel(); + FundingBatchClosure(); + HTLCsTimedOut(); +}; + +[Enum] +interface PaymentKind { + Onchain(); + Bolt11(PaymentHash hash, PaymentPreimage? preimage, PaymentSecret? secret); + Bolt11Jit(PaymentHash hash, PaymentPreimage? preimage, PaymentSecret? secret, LSPFeeLimits lsp_fee_limits); + Bolt12Offer(PaymentHash? hash, PaymentPreimage? preimage, PaymentSecret? secret, OfferId offer_id); + Bolt12Refund(PaymentHash? hash, PaymentPreimage? preimage, PaymentSecret? secret); + Spontaneous(PaymentHash hash, PaymentPreimage? preimage, sequence custom_tlvs); }; enum PaymentDirection { @@ -202,13 +294,12 @@ dictionary LSPFeeLimits { }; dictionary PaymentDetails { - PaymentHash hash; - PaymentPreimage? preimage; - PaymentSecret? secret; + PaymentId id; + PaymentKind kind; u64? amount_msat; PaymentDirection direction; PaymentStatus status; - LSPFeeLimits? lsp_fee_limits; + u64 latest_update_timestamp; }; [NonExhaustive] @@ -282,6 +373,7 @@ interface PendingSweepBalance { dictionary BalanceDetails { u64 total_onchain_balance_sats; u64 spendable_onchain_balance_sats; + u64 total_anchor_channels_reserve_sats; u64 total_lightning_balance_sats; sequence lightning_balances; sequence pending_balances_from_channel_closures; @@ -312,6 +404,51 @@ enum LogLevel { "Error", }; +dictionary TlvEntry { + u64 type; + sequence value; +}; + +interface NetworkGraph { + sequence list_channels(); + ChannelInfo? channel(u64 short_channel_id); + sequence list_nodes(); + NodeInfo? node([ByRef]NodeId node_id); +}; + +dictionary ChannelInfo { + NodeId node_one; + ChannelUpdateInfo? one_to_two; + NodeId node_two; + ChannelUpdateInfo? two_to_one; + u64? capacity_sats; +}; + +dictionary ChannelUpdateInfo { + u32 last_update; + boolean enabled; + u16 cltv_expiry_delta; + u64 htlc_minimum_msat; + u64 htlc_maximum_msat; + RoutingFees fees; +}; + +dictionary RoutingFees { + u32 base_msat; + u32 proportional_millionths; +}; + +dictionary NodeInfo { + sequence channels; + NodeAnnouncementInfo? announcement_info; +}; + +dictionary NodeAnnouncementInfo { + u32 last_update; + string alias; + sequence addresses; +}; + [Custom] typedef string Txid; @@ -324,12 +461,30 @@ typedef string SocketAddress; [Custom] typedef string PublicKey; +[Custom] +typedef string NodeId; + [Custom] typedef string Address; [Custom] typedef string Bolt11Invoice; +[Custom] +typedef string Offer; + +[Custom] +typedef string Refund; + +[Custom] +typedef string Bolt12Invoice; + +[Custom] +typedef string OfferId; + +[Custom] +typedef string PaymentId; + [Custom] typedef string PaymentHash; diff --git a/bindings/python/pyproject.toml b/bindings/python/pyproject.toml index 13ad46ce6..c8ff0a79d 100644 --- a/bindings/python/pyproject.toml +++ b/bindings/python/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "ldk_node" -version = "0.2.1" +version = "0.3.0" authors = [ { name="Elias Rohrer", email="dev@tnull.de" }, ] diff --git a/bindings/python/src/ldk_node/test_ldk_node.py b/bindings/python/src/ldk_node/test_ldk_node.py index 864ef7b43..a593078c1 100644 --- a/bindings/python/src/ldk_node/test_ldk_node.py +++ b/bindings/python/src/ldk_node/test_ldk_node.py @@ -125,9 +125,9 @@ def test_channel_full_cycle(self): node_id_2 = node_2.node_id() print("Node ID 2:", node_id_2) - address_1 = node_1.new_onchain_address() + address_1 = node_1.onchain_payment().new_address() txid_1 = send_to_address(address_1, 100000) - address_2 = node_2.new_onchain_address() + address_2 = node_2.onchain_payment().new_address() txid_2 = send_to_address(address_2, 100000) wait_for_tx(esplora_endpoint, txid_1) @@ -185,8 +185,8 @@ def test_channel_full_cycle(self): print("EVENT:", channel_ready_event_2) node_2.event_handled() - invoice = node_2.receive_payment(2500000, "asdf", 9217) - node_1.send_payment(invoice) + invoice = node_2.bolt11_payment().receive(2500000, "asdf", 9217) + node_1.bolt11_payment().send(invoice) payment_successful_event_1 = node_1.wait_next_event() assert isinstance(payment_successful_event_1, Event.PAYMENT_SUCCESSFUL) diff --git a/bindings/swift/LDKNodeFFI.xcframework/Info.plist b/bindings/swift/LDKNodeFFI.xcframework/Info.plist index ee11d9a89..9b1d666c7 100644 --- a/bindings/swift/LDKNodeFFI.xcframework/Info.plist +++ b/bindings/swift/LDKNodeFFI.xcframework/Info.plist @@ -16,6 +16,8 @@ SupportedPlatform macos + LSMinimumSystemVersion + 12.0 LibraryIdentifier @@ -31,6 +33,8 @@ ios SupportedPlatformVariant simulator + MinimumOSVersion + 15.0 LibraryIdentifier @@ -43,6 +47,8 @@ SupportedPlatform ios + MinimumOSVersion + 15.0 CFBundlePackageType diff --git a/bindings/swift/LDKNodeFFI.xcframework/ios-arm64/LDKNodeFFI.framework/Info.plist b/bindings/swift/LDKNodeFFI.xcframework/ios-arm64/LDKNodeFFI.framework/Info.plist new file mode 100644 index 000000000..8d0bf0f09 --- /dev/null +++ b/bindings/swift/LDKNodeFFI.xcframework/ios-arm64/LDKNodeFFI.framework/Info.plist @@ -0,0 +1,18 @@ + + + + + CFBundleIdentifier + org.lightningdevkit.LDKNodeFFI + CFBundleName + LDKNodeFFI + CFBundleVersion + 0.3.0 + CFBundleShortVersionString + 0.3.0 + CFBundleExecutable + LDKNodeFFI + MinimumOSVersion + 100.0 + + diff --git a/bindings/swift/LDKNodeFFI.xcframework/ios-arm64_x86_64-simulator/LDKNodeFFI.framework/Info.plist b/bindings/swift/LDKNodeFFI.xcframework/ios-arm64_x86_64-simulator/LDKNodeFFI.framework/Info.plist new file mode 100644 index 000000000..92e0f395d --- /dev/null +++ b/bindings/swift/LDKNodeFFI.xcframework/ios-arm64_x86_64-simulator/LDKNodeFFI.framework/Info.plist @@ -0,0 +1,18 @@ + + + + + CFBundleIdentifier + org.lightningdevkit.LDKNodeFFI + CFBundleName + LDKNodeFFI + CFBundleVersion + 0.3.0 + CFBundleShortVersionString + 0.3.0 + CFBundleExecutable + LDKNodeFFI + MinimumOSVersion + 15.0 + + diff --git a/bindings/swift/LDKNodeFFI.xcframework/macos-arm64_x86_64/LDKNodeFFI.framework/Info.plist b/bindings/swift/LDKNodeFFI.xcframework/macos-arm64_x86_64/LDKNodeFFI.framework/Info.plist new file mode 100644 index 000000000..d3536e8b1 --- /dev/null +++ b/bindings/swift/LDKNodeFFI.xcframework/macos-arm64_x86_64/LDKNodeFFI.framework/Info.plist @@ -0,0 +1,18 @@ + + + + + CFBundleIdentifier + org.lightningdevkit.LDKNodeFFI + CFBundleName + LDKNodeFFI + CFBundleVersion + 0.3.0 + CFBundleShortVersionString + 0.3.0 + CFBundleExecutable + LDKNodeFFI + LSMinimumSystemVersion + 12.0 + + diff --git a/bindings/swift/Sources/LDKNode/LDKNode.swift b/bindings/swift/Sources/LDKNode/LDKNode.swift index 18551648c..5937c9050 100644 --- a/bindings/swift/Sources/LDKNode/LDKNode.swift +++ b/bindings/swift/Sources/LDKNode/LDKNode.swift @@ -288,7 +288,7 @@ private func uniffiCheckCallStatus( } case CALL_CANCELLED: - throw CancellationError() + fatalError("Cancellation not supported yet") default: throw UniffiInternalError.unexpectedRustCallStatusCode @@ -410,21 +410,38 @@ fileprivate struct FfiConverterString: FfiConverter { } -public protocol BuilderProtocol { - func build() throws -> LdkNode - func setEntropyBip39Mnemonic(mnemonic: Mnemonic, passphrase: String?) - func setEntropySeedBytes(seedBytes: [UInt8]) throws - func setEntropySeedPath(seedPath: String) - func setEsploraServer(esploraServerUrl: String) - func setGossipSourceP2p() - func setGossipSourceRgs(rgsServerUrl: String) - func setListeningAddresses(listeningAddresses: [SocketAddress]) throws - func setNetwork(network: Network) - func setStorageDirPath(storageDirPath: String) + + +public protocol Bolt11PaymentProtocol : AnyObject { + + func claimForHash(paymentHash: PaymentHash, claimableAmountMsat: UInt64, preimage: PaymentPreimage) throws + + func failForHash(paymentHash: PaymentHash) throws + + func receive(amountMsat: UInt64, description: String, expirySecs: UInt32) throws -> Bolt11Invoice + + func receiveForHash(amountMsat: UInt64, description: String, expirySecs: UInt32, paymentHash: PaymentHash) throws -> Bolt11Invoice + + func receiveVariableAmount(description: String, expirySecs: UInt32) throws -> Bolt11Invoice + + func receiveVariableAmountForHash(description: String, expirySecs: UInt32, paymentHash: PaymentHash) throws -> Bolt11Invoice + + func receiveVariableAmountViaJitChannel(description: String, expirySecs: UInt32, maxProportionalLspFeeLimitPpmMsat: UInt64?) throws -> Bolt11Invoice + + func receiveViaJitChannel(amountMsat: UInt64, description: String, expirySecs: UInt32, maxLspFeeLimitMsat: UInt64?) throws -> Bolt11Invoice + + func send(invoice: Bolt11Invoice) throws -> PaymentId + + func sendProbes(invoice: Bolt11Invoice) throws + + func sendProbesUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws + + func sendUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws -> PaymentId } -public class Builder: BuilderProtocol { +public class Bolt11Payment: + Bolt11PaymentProtocol { fileprivate let pointer: UnsafeMutableRawPointer // TODO: We'd like this to be `private` but for Swifty reasons, @@ -433,134 +450,165 @@ public class Builder: BuilderProtocol { required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { self.pointer = pointer } - public convenience init() { - self.init(unsafeFromRawPointer: try! rustCall() { - uniffi_ldk_node_fn_constructor_builder_new($0) -}) - } - deinit { - try! rustCall { uniffi_ldk_node_fn_free_builder(pointer, $0) } + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_bolt11payment(self.pointer, $0) } } - - - public static func fromConfig(config: Config) -> Builder { - return Builder(unsafeFromRawPointer: try! rustCall() { - uniffi_ldk_node_fn_constructor_builder_from_config( - FfiConverterTypeConfig.lower(config),$0) -}) + deinit { + try! rustCall { uniffi_ldk_node_fn_free_bolt11payment(pointer, $0) } } - - public func build() throws -> LdkNode { - return try FfiConverterTypeLDKNode.lift( + public func claimForHash(paymentHash: PaymentHash, claimableAmountMsat: UInt64, preimage: PaymentPreimage) throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_claim_for_hash(self.uniffiClonePointer(), + FfiConverterTypePaymentHash.lower(paymentHash), + FfiConverterUInt64.lower(claimableAmountMsat), + FfiConverterTypePaymentPreimage.lower(preimage),$0 + ) +} + } + public func failForHash(paymentHash: PaymentHash) throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_fail_for_hash(self.uniffiClonePointer(), + FfiConverterTypePaymentHash.lower(paymentHash),$0 + ) +} + } + public func receive(amountMsat: UInt64, description: String, expirySecs: UInt32) throws -> Bolt11Invoice { + return try FfiConverterTypeBolt11Invoice.lift( try - rustCallWithError(FfiConverterTypeBuildError.lift) { - uniffi_ldk_node_fn_method_builder_build(self.pointer, $0 + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_receive(self.uniffiClonePointer(), + FfiConverterUInt64.lower(amountMsat), + FfiConverterString.lower(description), + FfiConverterUInt32.lower(expirySecs),$0 ) } ) } - - public func setEntropyBip39Mnemonic(mnemonic: Mnemonic, passphrase: String?) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_entropy_bip39_mnemonic(self.pointer, - FfiConverterTypeMnemonic.lower(mnemonic), - FfiConverterOptionString.lower(passphrase),$0 + public func receiveForHash(amountMsat: UInt64, description: String, expirySecs: UInt32, paymentHash: PaymentHash) throws -> Bolt11Invoice { + return try FfiConverterTypeBolt11Invoice.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_receive_for_hash(self.uniffiClonePointer(), + FfiConverterUInt64.lower(amountMsat), + FfiConverterString.lower(description), + FfiConverterUInt32.lower(expirySecs), + FfiConverterTypePaymentHash.lower(paymentHash),$0 ) } + ) } - - public func setEntropySeedBytes(seedBytes: [UInt8]) throws { - try - rustCallWithError(FfiConverterTypeBuildError.lift) { - uniffi_ldk_node_fn_method_builder_set_entropy_seed_bytes(self.pointer, - FfiConverterSequenceUInt8.lower(seedBytes),$0 + public func receiveVariableAmount(description: String, expirySecs: UInt32) throws -> Bolt11Invoice { + return try FfiConverterTypeBolt11Invoice.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_receive_variable_amount(self.uniffiClonePointer(), + FfiConverterString.lower(description), + FfiConverterUInt32.lower(expirySecs),$0 ) } + ) } - - public func setEntropySeedPath(seedPath: String) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_entropy_seed_path(self.pointer, - FfiConverterString.lower(seedPath),$0 + public func receiveVariableAmountForHash(description: String, expirySecs: UInt32, paymentHash: PaymentHash) throws -> Bolt11Invoice { + return try FfiConverterTypeBolt11Invoice.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_receive_variable_amount_for_hash(self.uniffiClonePointer(), + FfiConverterString.lower(description), + FfiConverterUInt32.lower(expirySecs), + FfiConverterTypePaymentHash.lower(paymentHash),$0 ) } + ) } - - public func setEsploraServer(esploraServerUrl: String) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_esplora_server(self.pointer, - FfiConverterString.lower(esploraServerUrl),$0 + public func receiveVariableAmountViaJitChannel(description: String, expirySecs: UInt32, maxProportionalLspFeeLimitPpmMsat: UInt64?) throws -> Bolt11Invoice { + return try FfiConverterTypeBolt11Invoice.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_receive_variable_amount_via_jit_channel(self.uniffiClonePointer(), + FfiConverterString.lower(description), + FfiConverterUInt32.lower(expirySecs), + FfiConverterOptionUInt64.lower(maxProportionalLspFeeLimitPpmMsat),$0 ) } + ) } - - public func setGossipSourceP2p() { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_gossip_source_p2p(self.pointer, $0 + public func receiveViaJitChannel(amountMsat: UInt64, description: String, expirySecs: UInt32, maxLspFeeLimitMsat: UInt64?) throws -> Bolt11Invoice { + return try FfiConverterTypeBolt11Invoice.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_receive_via_jit_channel(self.uniffiClonePointer(), + FfiConverterUInt64.lower(amountMsat), + FfiConverterString.lower(description), + FfiConverterUInt32.lower(expirySecs), + FfiConverterOptionUInt64.lower(maxLspFeeLimitMsat),$0 ) } + ) } - - public func setGossipSourceRgs(rgsServerUrl: String) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_gossip_source_rgs(self.pointer, - FfiConverterString.lower(rgsServerUrl),$0 + public func send(invoice: Bolt11Invoice) throws -> PaymentId { + return try FfiConverterTypePaymentId.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_send(self.uniffiClonePointer(), + FfiConverterTypeBolt11Invoice.lower(invoice),$0 ) } + ) } - - public func setListeningAddresses(listeningAddresses: [SocketAddress]) throws { + public func sendProbes(invoice: Bolt11Invoice) throws { try - rustCallWithError(FfiConverterTypeBuildError.lift) { - uniffi_ldk_node_fn_method_builder_set_listening_addresses(self.pointer, - FfiConverterSequenceTypeSocketAddress.lower(listeningAddresses),$0 + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_send_probes(self.uniffiClonePointer(), + FfiConverterTypeBolt11Invoice.lower(invoice),$0 ) } } - - public func setNetwork(network: Network) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_network(self.pointer, - FfiConverterTypeNetwork.lower(network),$0 + public func sendProbesUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_send_probes_using_amount(self.uniffiClonePointer(), + FfiConverterTypeBolt11Invoice.lower(invoice), + FfiConverterUInt64.lower(amountMsat),$0 ) } } - - public func setStorageDirPath(storageDirPath: String) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_builder_set_storage_dir_path(self.pointer, - FfiConverterString.lower(storageDirPath),$0 + public func sendUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws -> PaymentId { + return try FfiConverterTypePaymentId.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt11payment_send_using_amount(self.uniffiClonePointer(), + FfiConverterTypeBolt11Invoice.lower(invoice), + FfiConverterUInt64.lower(amountMsat),$0 ) } + ) } + } -public struct FfiConverterTypeBuilder: FfiConverter { +public struct FfiConverterTypeBolt11Payment: FfiConverter { + typealias FfiType = UnsafeMutableRawPointer - typealias SwiftType = Builder + typealias SwiftType = Bolt11Payment - public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Builder { + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> Bolt11Payment { + return Bolt11Payment(unsafeFromRawPointer: pointer) + } + + public static func lower(_ value: Bolt11Payment) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Bolt11Payment { let v: UInt64 = try readInt(&buf) // The Rust code won't compile if a pointer won't fit in a UInt64. // We have to go via `UInt` because that's the thing that's the size of a pointer. @@ -571,48 +619,43 @@ public struct FfiConverterTypeBuilder: FfiConverter { return try lift(ptr!) } - public static func write(_ value: Builder, into buf: inout [UInt8]) { + public static func write(_ value: Bolt11Payment, into buf: inout [UInt8]) { // This fiddling is because `Int` is the thing that's the same size as a pointer. // The Rust code won't compile if a pointer won't fit in a `UInt64`. writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) } - - public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> Builder { - return Builder(unsafeFromRawPointer: pointer) - } - - public static func lower(_ value: Builder) -> UnsafeMutableRawPointer { - return value.pointer - } } -public func FfiConverterTypeBuilder_lift(_ pointer: UnsafeMutableRawPointer) throws -> Builder { - return try FfiConverterTypeBuilder.lift(pointer) +public func FfiConverterTypeBolt11Payment_lift(_ pointer: UnsafeMutableRawPointer) throws -> Bolt11Payment { + return try FfiConverterTypeBolt11Payment.lift(pointer) } -public func FfiConverterTypeBuilder_lower(_ value: Builder) -> UnsafeMutableRawPointer { - return FfiConverterTypeBuilder.lower(value) +public func FfiConverterTypeBolt11Payment_lower(_ value: Bolt11Payment) -> UnsafeMutableRawPointer { + return FfiConverterTypeBolt11Payment.lower(value) } -public protocol ChannelConfigProtocol { - func acceptUnderpayingHtlcs() -> Bool - func cltvExpiryDelta() -> UInt16 - func forceCloseAvoidanceMaxFeeSatoshis() -> UInt64 - func forwardingFeeBaseMsat() -> UInt32 - func forwardingFeeProportionalMillionths() -> UInt32 - func setAcceptUnderpayingHtlcs(value: Bool) - func setCltvExpiryDelta(value: UInt16) - func setForceCloseAvoidanceMaxFeeSatoshis(valueSat: UInt64) - func setForwardingFeeBaseMsat(feeMsat: UInt32) - func setForwardingFeeProportionalMillionths(value: UInt32) - func setMaxDustHtlcExposureFromFeeRateMultiplier(multiplier: UInt64) - func setMaxDustHtlcExposureFromFixedLimit(limitMsat: UInt64) + + +public protocol Bolt12PaymentProtocol : AnyObject { + + func initiateRefund(amountMsat: UInt64, expirySecs: UInt32) throws -> Refund + + func receive(amountMsat: UInt64, description: String) throws -> Offer + + func receiveVariableAmount(description: String) throws -> Offer + + func requestRefundPayment(refund: Refund) throws -> Bolt12Invoice + + func send(offer: Offer, payerNote: String?) throws -> PaymentId + + func sendUsingAmount(offer: Offer, payerNote: String?, amountMsat: UInt64) throws -> PaymentId } -public class ChannelConfig: ChannelConfigProtocol { +public class Bolt12Payment: + Bolt12PaymentProtocol { fileprivate let pointer: UnsafeMutableRawPointer // TODO: We'd like this to be `private` but for Swifty reasons, @@ -621,151 +664,533 @@ public class ChannelConfig: ChannelConfigProtocol { required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { self.pointer = pointer } - public convenience init() { - self.init(unsafeFromRawPointer: try! rustCall() { - uniffi_ldk_node_fn_constructor_channelconfig_new($0) -}) + + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_bolt12payment(self.pointer, $0) } } deinit { - try! rustCall { uniffi_ldk_node_fn_free_channelconfig(pointer, $0) } + try! rustCall { uniffi_ldk_node_fn_free_bolt12payment(pointer, $0) } } - - public func acceptUnderpayingHtlcs() -> Bool { - return try! FfiConverterBool.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_accept_underpaying_htlcs(self.pointer, $0 + public func initiateRefund(amountMsat: UInt64, expirySecs: UInt32) throws -> Refund { + return try FfiConverterTypeRefund.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt12payment_initiate_refund(self.uniffiClonePointer(), + FfiConverterUInt64.lower(amountMsat), + FfiConverterUInt32.lower(expirySecs),$0 ) } ) } - - public func cltvExpiryDelta() -> UInt16 { - return try! FfiConverterUInt16.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_cltv_expiry_delta(self.pointer, $0 + public func receive(amountMsat: UInt64, description: String) throws -> Offer { + return try FfiConverterTypeOffer.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt12payment_receive(self.uniffiClonePointer(), + FfiConverterUInt64.lower(amountMsat), + FfiConverterString.lower(description),$0 ) } ) } - - public func forceCloseAvoidanceMaxFeeSatoshis() -> UInt64 { - return try! FfiConverterUInt64.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_force_close_avoidance_max_fee_satoshis(self.pointer, $0 + public func receiveVariableAmount(description: String) throws -> Offer { + return try FfiConverterTypeOffer.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt12payment_receive_variable_amount(self.uniffiClonePointer(), + FfiConverterString.lower(description),$0 ) } ) } - - public func forwardingFeeBaseMsat() -> UInt32 { - return try! FfiConverterUInt32.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_forwarding_fee_base_msat(self.pointer, $0 + public func requestRefundPayment(refund: Refund) throws -> Bolt12Invoice { + return try FfiConverterTypeBolt12Invoice.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt12payment_request_refund_payment(self.uniffiClonePointer(), + FfiConverterTypeRefund.lower(refund),$0 ) } ) } - - public func forwardingFeeProportionalMillionths() -> UInt32 { - return try! FfiConverterUInt32.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_forwarding_fee_proportional_millionths(self.pointer, $0 + public func send(offer: Offer, payerNote: String?) throws -> PaymentId { + return try FfiConverterTypePaymentId.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt12payment_send(self.uniffiClonePointer(), + FfiConverterTypeOffer.lower(offer), + FfiConverterOptionString.lower(payerNote),$0 ) } ) } - - public func setAcceptUnderpayingHtlcs(value: Bool) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_set_accept_underpaying_htlcs(self.pointer, - FfiConverterBool.lower(value),$0 + public func sendUsingAmount(offer: Offer, payerNote: String?, amountMsat: UInt64) throws -> PaymentId { + return try FfiConverterTypePaymentId.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_bolt12payment_send_using_amount(self.uniffiClonePointer(), + FfiConverterTypeOffer.lower(offer), + FfiConverterOptionString.lower(payerNote), + FfiConverterUInt64.lower(amountMsat),$0 ) } + ) } - public func setCltvExpiryDelta(value: UInt16) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_set_cltv_expiry_delta(self.pointer, - FfiConverterUInt16.lower(value),$0 - ) } - } - public func setForceCloseAvoidanceMaxFeeSatoshis(valueSat: UInt64) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_set_force_close_avoidance_max_fee_satoshis(self.pointer, - FfiConverterUInt64.lower(valueSat),$0 - ) -} +public struct FfiConverterTypeBolt12Payment: FfiConverter { + + typealias FfiType = UnsafeMutableRawPointer + typealias SwiftType = Bolt12Payment + + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> Bolt12Payment { + return Bolt12Payment(unsafeFromRawPointer: pointer) } - public func setForwardingFeeBaseMsat(feeMsat: UInt32) { - try! - rustCall() { - - uniffi_ldk_node_fn_method_channelconfig_set_forwarding_fee_base_msat(self.pointer, - FfiConverterUInt32.lower(feeMsat),$0 - ) -} + public static func lower(_ value: Bolt12Payment) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() } - public func setForwardingFeeProportionalMillionths(value: UInt32) { - try! - rustCall() { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Bolt12Payment { + let v: UInt64 = try readInt(&buf) + // The Rust code won't compile if a pointer won't fit in a UInt64. + // We have to go via `UInt` because that's the thing that's the size of a pointer. + let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) + if (ptr == nil) { + throw UniffiInternalError.unexpectedNullPointer + } + return try lift(ptr!) + } + + public static func write(_ value: Bolt12Payment, into buf: inout [UInt8]) { + // This fiddling is because `Int` is the thing that's the same size as a pointer. + // The Rust code won't compile if a pointer won't fit in a `UInt64`. + writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) + } +} + + +public func FfiConverterTypeBolt12Payment_lift(_ pointer: UnsafeMutableRawPointer) throws -> Bolt12Payment { + return try FfiConverterTypeBolt12Payment.lift(pointer) +} + +public func FfiConverterTypeBolt12Payment_lower(_ value: Bolt12Payment) -> UnsafeMutableRawPointer { + return FfiConverterTypeBolt12Payment.lower(value) +} + + + + +public protocol BuilderProtocol : AnyObject { - uniffi_ldk_node_fn_method_channelconfig_set_forwarding_fee_proportional_millionths(self.pointer, - FfiConverterUInt32.lower(value),$0 + func build() throws -> Node + + func buildWithFsStore() throws -> Node + + func setEntropyBip39Mnemonic(mnemonic: Mnemonic, passphrase: String?) + + func setEntropySeedBytes(seedBytes: [UInt8]) throws + + func setEntropySeedPath(seedPath: String) + + func setEsploraServer(esploraServerUrl: String) + + func setGossipSourceP2p() + + func setGossipSourceRgs(rgsServerUrl: String) + + func setLiquiditySourceLsps2(address: SocketAddress, nodeId: PublicKey, token: String?) + + func setListeningAddresses(listeningAddresses: [SocketAddress]) throws + + func setNetwork(network: Network) + + func setStorageDirPath(storageDirPath: String) + +} + +public class Builder: + BuilderProtocol { + fileprivate let pointer: UnsafeMutableRawPointer + + // TODO: We'd like this to be `private` but for Swifty reasons, + // we can't implement `FfiConverter` without making this `required` and we can't + // make it `required` without making it `public`. + required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { + self.pointer = pointer + } + + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_builder(self.pointer, $0) } + } + public convenience init() { + self.init(unsafeFromRawPointer: try! rustCall() { + uniffi_ldk_node_fn_constructor_builder_new($0) +}) + } + + deinit { + try! rustCall { uniffi_ldk_node_fn_free_builder(pointer, $0) } + } + + + public static func fromConfig(config: Config) -> Builder { + return Builder(unsafeFromRawPointer: try! rustCall() { + uniffi_ldk_node_fn_constructor_builder_from_config( + FfiConverterTypeConfig.lower(config),$0) +}) + } + + + + + + public func build() throws -> Node { + return try FfiConverterTypeNode.lift( + try + rustCallWithError(FfiConverterTypeBuildError.lift) { + uniffi_ldk_node_fn_method_builder_build(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func buildWithFsStore() throws -> Node { + return try FfiConverterTypeNode.lift( + try + rustCallWithError(FfiConverterTypeBuildError.lift) { + uniffi_ldk_node_fn_method_builder_build_with_fs_store(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func setEntropyBip39Mnemonic(mnemonic: Mnemonic, passphrase: String?) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_entropy_bip39_mnemonic(self.uniffiClonePointer(), + FfiConverterTypeMnemonic.lower(mnemonic), + FfiConverterOptionString.lower(passphrase),$0 + ) +} + } + public func setEntropySeedBytes(seedBytes: [UInt8]) throws { + try + rustCallWithError(FfiConverterTypeBuildError.lift) { + uniffi_ldk_node_fn_method_builder_set_entropy_seed_bytes(self.uniffiClonePointer(), + FfiConverterSequenceUInt8.lower(seedBytes),$0 + ) +} + } + public func setEntropySeedPath(seedPath: String) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_entropy_seed_path(self.uniffiClonePointer(), + FfiConverterString.lower(seedPath),$0 + ) +} + } + public func setEsploraServer(esploraServerUrl: String) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_esplora_server(self.uniffiClonePointer(), + FfiConverterString.lower(esploraServerUrl),$0 + ) +} + } + public func setGossipSourceP2p() { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_gossip_source_p2p(self.uniffiClonePointer(), $0 + ) +} + } + public func setGossipSourceRgs(rgsServerUrl: String) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_gossip_source_rgs(self.uniffiClonePointer(), + FfiConverterString.lower(rgsServerUrl),$0 + ) +} + } + public func setLiquiditySourceLsps2(address: SocketAddress, nodeId: PublicKey, token: String?) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_liquidity_source_lsps2(self.uniffiClonePointer(), + FfiConverterTypeSocketAddress.lower(address), + FfiConverterTypePublicKey.lower(nodeId), + FfiConverterOptionString.lower(token),$0 ) } } + public func setListeningAddresses(listeningAddresses: [SocketAddress]) throws { + try + rustCallWithError(FfiConverterTypeBuildError.lift) { + uniffi_ldk_node_fn_method_builder_set_listening_addresses(self.uniffiClonePointer(), + FfiConverterSequenceTypeSocketAddress.lower(listeningAddresses),$0 + ) +} + } + public func setNetwork(network: Network) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_network(self.uniffiClonePointer(), + FfiConverterTypeNetwork.lower(network),$0 + ) +} + } + public func setStorageDirPath(storageDirPath: String) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_builder_set_storage_dir_path(self.uniffiClonePointer(), + FfiConverterString.lower(storageDirPath),$0 + ) +} + } + +} + +public struct FfiConverterTypeBuilder: FfiConverter { + + typealias FfiType = UnsafeMutableRawPointer + typealias SwiftType = Builder + + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> Builder { + return Builder(unsafeFromRawPointer: pointer) + } + + public static func lower(_ value: Builder) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Builder { + let v: UInt64 = try readInt(&buf) + // The Rust code won't compile if a pointer won't fit in a UInt64. + // We have to go via `UInt` because that's the thing that's the size of a pointer. + let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) + if (ptr == nil) { + throw UniffiInternalError.unexpectedNullPointer + } + return try lift(ptr!) + } + + public static func write(_ value: Builder, into buf: inout [UInt8]) { + // This fiddling is because `Int` is the thing that's the same size as a pointer. + // The Rust code won't compile if a pointer won't fit in a `UInt64`. + writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) + } +} + + +public func FfiConverterTypeBuilder_lift(_ pointer: UnsafeMutableRawPointer) throws -> Builder { + return try FfiConverterTypeBuilder.lift(pointer) +} +public func FfiConverterTypeBuilder_lower(_ value: Builder) -> UnsafeMutableRawPointer { + return FfiConverterTypeBuilder.lower(value) +} + + + + +public protocol ChannelConfigProtocol : AnyObject { + + func acceptUnderpayingHtlcs() -> Bool + + func cltvExpiryDelta() -> UInt16 + + func forceCloseAvoidanceMaxFeeSatoshis() -> UInt64 + + func forwardingFeeBaseMsat() -> UInt32 + + func forwardingFeeProportionalMillionths() -> UInt32 + + func setAcceptUnderpayingHtlcs(value: Bool) + + func setCltvExpiryDelta(value: UInt16) + + func setForceCloseAvoidanceMaxFeeSatoshis(valueSat: UInt64) + + func setForwardingFeeBaseMsat(feeMsat: UInt32) + + func setForwardingFeeProportionalMillionths(value: UInt32) + + func setMaxDustHtlcExposureFromFeeRateMultiplier(multiplier: UInt64) + + func setMaxDustHtlcExposureFromFixedLimit(limitMsat: UInt64) + +} + +public class ChannelConfig: + ChannelConfigProtocol { + fileprivate let pointer: UnsafeMutableRawPointer + + // TODO: We'd like this to be `private` but for Swifty reasons, + // we can't implement `FfiConverter` without making this `required` and we can't + // make it `required` without making it `public`. + required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { + self.pointer = pointer + } + + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_channelconfig(self.pointer, $0) } + } + public convenience init() { + self.init(unsafeFromRawPointer: try! rustCall() { + uniffi_ldk_node_fn_constructor_channelconfig_new($0) +}) + } + + deinit { + try! rustCall { uniffi_ldk_node_fn_free_channelconfig(pointer, $0) } + } + + + + + + public func acceptUnderpayingHtlcs() -> Bool { + return try! FfiConverterBool.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_accept_underpaying_htlcs(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func cltvExpiryDelta() -> UInt16 { + return try! FfiConverterUInt16.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_cltv_expiry_delta(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func forceCloseAvoidanceMaxFeeSatoshis() -> UInt64 { + return try! FfiConverterUInt64.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_force_close_avoidance_max_fee_satoshis(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func forwardingFeeBaseMsat() -> UInt32 { + return try! FfiConverterUInt32.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_forwarding_fee_base_msat(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func forwardingFeeProportionalMillionths() -> UInt32 { + return try! FfiConverterUInt32.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_forwarding_fee_proportional_millionths(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func setAcceptUnderpayingHtlcs(value: Bool) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_set_accept_underpaying_htlcs(self.uniffiClonePointer(), + FfiConverterBool.lower(value),$0 + ) +} + } + public func setCltvExpiryDelta(value: UInt16) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_set_cltv_expiry_delta(self.uniffiClonePointer(), + FfiConverterUInt16.lower(value),$0 + ) +} + } + public func setForceCloseAvoidanceMaxFeeSatoshis(valueSat: UInt64) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_set_force_close_avoidance_max_fee_satoshis(self.uniffiClonePointer(), + FfiConverterUInt64.lower(valueSat),$0 + ) +} + } + public func setForwardingFeeBaseMsat(feeMsat: UInt32) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_set_forwarding_fee_base_msat(self.uniffiClonePointer(), + FfiConverterUInt32.lower(feeMsat),$0 + ) +} + } + public func setForwardingFeeProportionalMillionths(value: UInt32) { + try! + rustCall() { + + uniffi_ldk_node_fn_method_channelconfig_set_forwarding_fee_proportional_millionths(self.uniffiClonePointer(), + FfiConverterUInt32.lower(value),$0 + ) +} + } public func setMaxDustHtlcExposureFromFeeRateMultiplier(multiplier: UInt64) { try! rustCall() { - uniffi_ldk_node_fn_method_channelconfig_set_max_dust_htlc_exposure_from_fee_rate_multiplier(self.pointer, + uniffi_ldk_node_fn_method_channelconfig_set_max_dust_htlc_exposure_from_fee_rate_multiplier(self.uniffiClonePointer(), FfiConverterUInt64.lower(multiplier),$0 ) } } - public func setMaxDustHtlcExposureFromFixedLimit(limitMsat: UInt64) { try! rustCall() { - uniffi_ldk_node_fn_method_channelconfig_set_max_dust_htlc_exposure_from_fixed_limit(self.pointer, + uniffi_ldk_node_fn_method_channelconfig_set_max_dust_htlc_exposure_from_fixed_limit(self.uniffiClonePointer(), FfiConverterUInt64.lower(limitMsat),$0 ) } } + } public struct FfiConverterTypeChannelConfig: FfiConverter { + typealias FfiType = UnsafeMutableRawPointer typealias SwiftType = ChannelConfig + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> ChannelConfig { + return ChannelConfig(unsafeFromRawPointer: pointer) + } + + public static func lower(_ value: ChannelConfig) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() + } + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> ChannelConfig { let v: UInt64 = try readInt(&buf) // The Rust code won't compile if a pointer won't fit in a UInt64. @@ -782,14 +1207,6 @@ public struct FfiConverterTypeChannelConfig: FfiConverter { // The Rust code won't compile if a pointer won't fit in a `UInt64`. writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) } - - public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> ChannelConfig { - return ChannelConfig(unsafeFromRawPointer: pointer) - } - - public static func lower(_ value: ChannelConfig) -> UnsafeMutableRawPointer { - return value.pointer - } } @@ -801,46 +1218,198 @@ public func FfiConverterTypeChannelConfig_lower(_ value: ChannelConfig) -> Unsaf return FfiConverterTypeChannelConfig.lower(value) } - -public protocol LDKNodeProtocol { - func closeChannel(channelId: ChannelId, counterpartyNodeId: PublicKey) throws - func connect(nodeId: PublicKey, address: SocketAddress, persist: Bool) throws - func connectOpenChannel(nodeId: PublicKey, address: SocketAddress, channelAmountSats: UInt64, pushToCounterpartyMsat: UInt64?, channelConfig: ChannelConfig?, announceChannel: Bool) throws - func disconnect(nodeId: PublicKey) throws - func eventHandled() - func isRunning() -> Bool - func listChannels() -> [ChannelDetails] - func listPayments() -> [PaymentDetails] - func listPeers() -> [PeerDetails] - func listeningAddresses() -> [SocketAddress]? - func newOnchainAddress() throws -> Address - func nextEvent() -> Event? - func nodeId() -> PublicKey - func payment(paymentHash: PaymentHash) -> PaymentDetails? - func receivePayment(amountMsat: UInt64, description: String, expirySecs: UInt32) throws -> Bolt11Invoice - func receiveVariableAmountPayment(description: String, expirySecs: UInt32) throws -> Bolt11Invoice - func removePayment(paymentHash: PaymentHash) throws - func sendAllToOnchainAddress(address: Address) throws -> Txid - func sendPayment(invoice: Bolt11Invoice) throws -> PaymentHash - func sendPaymentProbes(invoice: Bolt11Invoice) throws - func sendPaymentProbesUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws - func sendPaymentUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws -> PaymentHash - func sendSpontaneousPayment(amountMsat: UInt64, nodeId: PublicKey) throws -> PaymentHash - func sendSpontaneousPaymentProbes(amountMsat: UInt64, nodeId: PublicKey) throws - func sendToOnchainAddress(address: Address, amountMsat: UInt64) throws -> Txid - func signMessage(msg: [UInt8]) throws -> String - func spendableOnchainBalanceSats() throws -> UInt64 - func start() throws - func stop() throws - func syncWallets() throws - func totalOnchainBalanceSats() throws -> UInt64 - func updateChannelConfig(channelId: ChannelId, counterpartyNodeId: PublicKey, channelConfig: ChannelConfig) throws - func verifySignature(msg: [UInt8], sig: String, pkey: PublicKey) -> Bool - func waitNextEvent() -> Event - -} - -public class LdkNode: LDKNodeProtocol { + + + +public protocol NetworkGraphProtocol : AnyObject { + + func channel(shortChannelId: UInt64) -> ChannelInfo? + + func listChannels() -> [UInt64] + + func listNodes() -> [NodeId] + + func node(nodeId: NodeId) -> NodeInfo? + +} + +public class NetworkGraph: + NetworkGraphProtocol { + fileprivate let pointer: UnsafeMutableRawPointer + + // TODO: We'd like this to be `private` but for Swifty reasons, + // we can't implement `FfiConverter` without making this `required` and we can't + // make it `required` without making it `public`. + required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { + self.pointer = pointer + } + + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_networkgraph(self.pointer, $0) } + } + + deinit { + try! rustCall { uniffi_ldk_node_fn_free_networkgraph(pointer, $0) } + } + + + + + + public func channel(shortChannelId: UInt64) -> ChannelInfo? { + return try! FfiConverterOptionTypeChannelInfo.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_networkgraph_channel(self.uniffiClonePointer(), + FfiConverterUInt64.lower(shortChannelId),$0 + ) +} + ) + } + public func listChannels() -> [UInt64] { + return try! FfiConverterSequenceUInt64.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_networkgraph_list_channels(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func listNodes() -> [NodeId] { + return try! FfiConverterSequenceTypeNodeId.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_networkgraph_list_nodes(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func node(nodeId: NodeId) -> NodeInfo? { + return try! FfiConverterOptionTypeNodeInfo.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_networkgraph_node(self.uniffiClonePointer(), + FfiConverterTypeNodeId.lower(nodeId),$0 + ) +} + ) + } + +} + +public struct FfiConverterTypeNetworkGraph: FfiConverter { + + typealias FfiType = UnsafeMutableRawPointer + typealias SwiftType = NetworkGraph + + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> NetworkGraph { + return NetworkGraph(unsafeFromRawPointer: pointer) + } + + public static func lower(_ value: NetworkGraph) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> NetworkGraph { + let v: UInt64 = try readInt(&buf) + // The Rust code won't compile if a pointer won't fit in a UInt64. + // We have to go via `UInt` because that's the thing that's the size of a pointer. + let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) + if (ptr == nil) { + throw UniffiInternalError.unexpectedNullPointer + } + return try lift(ptr!) + } + + public static func write(_ value: NetworkGraph, into buf: inout [UInt8]) { + // This fiddling is because `Int` is the thing that's the same size as a pointer. + // The Rust code won't compile if a pointer won't fit in a `UInt64`. + writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) + } +} + + +public func FfiConverterTypeNetworkGraph_lift(_ pointer: UnsafeMutableRawPointer) throws -> NetworkGraph { + return try FfiConverterTypeNetworkGraph.lift(pointer) +} + +public func FfiConverterTypeNetworkGraph_lower(_ value: NetworkGraph) -> UnsafeMutableRawPointer { + return FfiConverterTypeNetworkGraph.lower(value) +} + + + + +public protocol NodeProtocol : AnyObject { + + func bolt11Payment() -> Bolt11Payment + + func bolt12Payment() -> Bolt12Payment + + func closeChannel(userChannelId: UserChannelId, counterpartyNodeId: PublicKey) throws + + func config() -> Config + + func connect(nodeId: PublicKey, address: SocketAddress, persist: Bool) throws + + func connectOpenChannel(nodeId: PublicKey, address: SocketAddress, channelAmountSats: UInt64, pushToCounterpartyMsat: UInt64?, channelConfig: ChannelConfig?, announceChannel: Bool) throws -> UserChannelId + + func disconnect(nodeId: PublicKey) throws + + func eventHandled() + + func forceCloseChannel(userChannelId: UserChannelId, counterpartyNodeId: PublicKey) throws + + func listBalances() -> BalanceDetails + + func listChannels() -> [ChannelDetails] + + func listPayments() -> [PaymentDetails] + + func listPeers() -> [PeerDetails] + + func listeningAddresses() -> [SocketAddress]? + + func networkGraph() -> NetworkGraph + + func nextEvent() -> Event? + + func nextEventAsync() async -> Event + + func nodeId() -> PublicKey + + func onchainPayment() -> OnchainPayment + + func payment(paymentId: PaymentId) -> PaymentDetails? + + func removePayment(paymentId: PaymentId) throws + + func signMessage(msg: [UInt8]) throws -> String + + func spontaneousPayment() -> SpontaneousPayment + + func start() throws + + func status() -> NodeStatus + + func stop() throws + + func syncWallets() throws + + func updateChannelConfig(userChannelId: UserChannelId, counterpartyNodeId: PublicKey, channelConfig: ChannelConfig) throws + + func verifySignature(msg: [UInt8], sig: String, pkey: PublicKey) -> Bool + + func waitNextEvent() -> Event + +} + +public class Node: + NodeProtocol { fileprivate let pointer: UnsafeMutableRawPointer // TODO: We'd like this to be `private` but for Swifty reasons, @@ -850,40 +1419,72 @@ public class LdkNode: LDKNodeProtocol { self.pointer = pointer } + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_node(self.pointer, $0) } + } + deinit { - try! rustCall { uniffi_ldk_node_fn_free_ldknode(pointer, $0) } + try! rustCall { uniffi_ldk_node_fn_free_node(pointer, $0) } } - - public func closeChannel(channelId: ChannelId, counterpartyNodeId: PublicKey) throws { + public func bolt11Payment() -> Bolt11Payment { + return try! FfiConverterTypeBolt11Payment.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_bolt11_payment(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func bolt12Payment() -> Bolt12Payment { + return try! FfiConverterTypeBolt12Payment.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_bolt12_payment(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func closeChannel(userChannelId: UserChannelId, counterpartyNodeId: PublicKey) throws { try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_close_channel(self.pointer, - FfiConverterTypeChannelId.lower(channelId), + uniffi_ldk_node_fn_method_node_close_channel(self.uniffiClonePointer(), + FfiConverterTypeUserChannelId.lower(userChannelId), FfiConverterTypePublicKey.lower(counterpartyNodeId),$0 ) } } - - public func connect(nodeId: PublicKey, address: SocketAddress, persist: Bool) throws { + public func config() -> Config { + return try! FfiConverterTypeConfig.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_config(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func connect(nodeId: PublicKey, address: SocketAddress, persist: Bool) throws { try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_connect(self.pointer, + uniffi_ldk_node_fn_method_node_connect(self.uniffiClonePointer(), FfiConverterTypePublicKey.lower(nodeId), FfiConverterTypeSocketAddress.lower(address), FfiConverterBool.lower(persist),$0 ) } } - - public func connectOpenChannel(nodeId: PublicKey, address: SocketAddress, channelAmountSats: UInt64, pushToCounterpartyMsat: UInt64?, channelConfig: ChannelConfig?, announceChannel: Bool) throws { - try + public func connectOpenChannel(nodeId: PublicKey, address: SocketAddress, channelAmountSats: UInt64, pushToCounterpartyMsat: UInt64?, channelConfig: ChannelConfig?, announceChannel: Bool) throws -> UserChannelId { + return try FfiConverterTypeUserChannelId.lift( + try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_connect_open_channel(self.pointer, + uniffi_ldk_node_fn_method_node_connect_open_channel(self.uniffiClonePointer(), FfiConverterTypePublicKey.lower(nodeId), FfiConverterTypeSocketAddress.lower(address), FfiConverterUInt64.lower(channelAmountSats), @@ -892,375 +1493,700 @@ public class LdkNode: LDKNodeProtocol { FfiConverterBool.lower(announceChannel),$0 ) } + ) } - - public func disconnect(nodeId: PublicKey) throws { + public func disconnect(nodeId: PublicKey) throws { try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_disconnect(self.pointer, + uniffi_ldk_node_fn_method_node_disconnect(self.uniffiClonePointer(), FfiConverterTypePublicKey.lower(nodeId),$0 ) } } - public func eventHandled() { try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_event_handled(self.pointer, $0 + uniffi_ldk_node_fn_method_node_event_handled(self.uniffiClonePointer(), $0 ) } } - - public func isRunning() -> Bool { - return try! FfiConverterBool.lift( + public func forceCloseChannel(userChannelId: UserChannelId, counterpartyNodeId: PublicKey) throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_node_force_close_channel(self.uniffiClonePointer(), + FfiConverterTypeUserChannelId.lower(userChannelId), + FfiConverterTypePublicKey.lower(counterpartyNodeId),$0 + ) +} + } + public func listBalances() -> BalanceDetails { + return try! FfiConverterTypeBalanceDetails.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_is_running(self.pointer, $0 + uniffi_ldk_node_fn_method_node_list_balances(self.uniffiClonePointer(), $0 ) } ) } - public func listChannels() -> [ChannelDetails] { return try! FfiConverterSequenceTypeChannelDetails.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_list_channels(self.pointer, $0 + uniffi_ldk_node_fn_method_node_list_channels(self.uniffiClonePointer(), $0 ) } ) } - public func listPayments() -> [PaymentDetails] { return try! FfiConverterSequenceTypePaymentDetails.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_list_payments(self.pointer, $0 + uniffi_ldk_node_fn_method_node_list_payments(self.uniffiClonePointer(), $0 ) } ) } - public func listPeers() -> [PeerDetails] { return try! FfiConverterSequenceTypePeerDetails.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_list_peers(self.pointer, $0 + uniffi_ldk_node_fn_method_node_list_peers(self.uniffiClonePointer(), $0 ) } ) } - public func listeningAddresses() -> [SocketAddress]? { return try! FfiConverterOptionSequenceTypeSocketAddress.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_listening_addresses(self.pointer, $0 + uniffi_ldk_node_fn_method_node_listening_addresses(self.uniffiClonePointer(), $0 ) } ) } - - public func newOnchainAddress() throws -> Address { - return try FfiConverterTypeAddress.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_new_onchain_address(self.pointer, $0 + public func networkGraph() -> NetworkGraph { + return try! FfiConverterTypeNetworkGraph.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_network_graph(self.uniffiClonePointer(), $0 ) } ) } - public func nextEvent() -> Event? { return try! FfiConverterOptionTypeEvent.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_next_event(self.pointer, $0 + uniffi_ldk_node_fn_method_node_next_event(self.uniffiClonePointer(), $0 ) } ) } + public func nextEventAsync() async -> Event { + return try! await uniffiRustCallAsync( + rustFutureFunc: { + uniffi_ldk_node_fn_method_node_next_event_async( + self.uniffiClonePointer() + ) + }, + pollFunc: ffi_ldk_node_rust_future_poll_rust_buffer, + completeFunc: ffi_ldk_node_rust_future_complete_rust_buffer, + freeFunc: ffi_ldk_node_rust_future_free_rust_buffer, + liftFunc: FfiConverterTypeEvent.lift, + errorHandler: nil + + ) + } + public func nodeId() -> PublicKey { return try! FfiConverterTypePublicKey.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_node_id(self.pointer, $0 + uniffi_ldk_node_fn_method_node_node_id(self.uniffiClonePointer(), $0 ) } ) } - - public func payment(paymentHash: PaymentHash) -> PaymentDetails? { + public func onchainPayment() -> OnchainPayment { + return try! FfiConverterTypeOnchainPayment.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_onchain_payment(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func payment(paymentId: PaymentId) -> PaymentDetails? { return try! FfiConverterOptionTypePaymentDetails.lift( try! rustCall() { - uniffi_ldk_node_fn_method_ldknode_payment(self.pointer, - FfiConverterTypePaymentHash.lower(paymentHash),$0 + uniffi_ldk_node_fn_method_node_payment(self.uniffiClonePointer(), + FfiConverterTypePaymentId.lower(paymentId),$0 ) } ) } - - public func receivePayment(amountMsat: UInt64, description: String, expirySecs: UInt32) throws -> Bolt11Invoice { - return try FfiConverterTypeBolt11Invoice.lift( + public func removePayment(paymentId: PaymentId) throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_node_remove_payment(self.uniffiClonePointer(), + FfiConverterTypePaymentId.lower(paymentId),$0 + ) +} + } + public func signMessage(msg: [UInt8]) throws -> String { + return try FfiConverterString.lift( try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_receive_payment(self.pointer, - FfiConverterUInt64.lower(amountMsat), - FfiConverterString.lower(description), - FfiConverterUInt32.lower(expirySecs),$0 + uniffi_ldk_node_fn_method_node_sign_message(self.uniffiClonePointer(), + FfiConverterSequenceUInt8.lower(msg),$0 ) } ) } - - public func receiveVariableAmountPayment(description: String, expirySecs: UInt32) throws -> Bolt11Invoice { - return try FfiConverterTypeBolt11Invoice.lift( - try + public func spontaneousPayment() -> SpontaneousPayment { + return try! FfiConverterTypeSpontaneousPayment.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_spontaneous_payment(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func start() throws { + try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_receive_variable_amount_payment(self.pointer, - FfiConverterString.lower(description), - FfiConverterUInt32.lower(expirySecs),$0 + uniffi_ldk_node_fn_method_node_start(self.uniffiClonePointer(), $0 + ) +} + } + public func status() -> NodeStatus { + return try! FfiConverterTypeNodeStatus.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_status(self.uniffiClonePointer(), $0 ) } ) } - - public func removePayment(paymentHash: PaymentHash) throws { + public func stop() throws { try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_remove_payment(self.pointer, - FfiConverterTypePaymentHash.lower(paymentHash),$0 + uniffi_ldk_node_fn_method_node_stop(self.uniffiClonePointer(), $0 + ) +} + } + public func syncWallets() throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_node_sync_wallets(self.uniffiClonePointer(), $0 ) } } + public func updateChannelConfig(userChannelId: UserChannelId, counterpartyNodeId: PublicKey, channelConfig: ChannelConfig) throws { + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_node_update_channel_config(self.uniffiClonePointer(), + FfiConverterTypeUserChannelId.lower(userChannelId), + FfiConverterTypePublicKey.lower(counterpartyNodeId), + FfiConverterTypeChannelConfig.lower(channelConfig),$0 + ) +} + } + public func verifySignature(msg: [UInt8], sig: String, pkey: PublicKey) -> Bool { + return try! FfiConverterBool.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_verify_signature(self.uniffiClonePointer(), + FfiConverterSequenceUInt8.lower(msg), + FfiConverterString.lower(sig), + FfiConverterTypePublicKey.lower(pkey),$0 + ) +} + ) + } + public func waitNextEvent() -> Event { + return try! FfiConverterTypeEvent.lift( + try! + rustCall() { + + uniffi_ldk_node_fn_method_node_wait_next_event(self.uniffiClonePointer(), $0 + ) +} + ) + } + +} + +public struct FfiConverterTypeNode: FfiConverter { + + typealias FfiType = UnsafeMutableRawPointer + typealias SwiftType = Node + + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> Node { + return Node(unsafeFromRawPointer: pointer) + } + + public static func lower(_ value: Node) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Node { + let v: UInt64 = try readInt(&buf) + // The Rust code won't compile if a pointer won't fit in a UInt64. + // We have to go via `UInt` because that's the thing that's the size of a pointer. + let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) + if (ptr == nil) { + throw UniffiInternalError.unexpectedNullPointer + } + return try lift(ptr!) + } + + public static func write(_ value: Node, into buf: inout [UInt8]) { + // This fiddling is because `Int` is the thing that's the same size as a pointer. + // The Rust code won't compile if a pointer won't fit in a `UInt64`. + writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) + } +} + + +public func FfiConverterTypeNode_lift(_ pointer: UnsafeMutableRawPointer) throws -> Node { + return try FfiConverterTypeNode.lift(pointer) +} + +public func FfiConverterTypeNode_lower(_ value: Node) -> UnsafeMutableRawPointer { + return FfiConverterTypeNode.lower(value) +} + + + + +public protocol OnchainPaymentProtocol : AnyObject { + + func newAddress() throws -> Address + + func sendAllToAddress(address: Address) throws -> Txid + + func sendToAddress(address: Address, amountMsat: UInt64) throws -> Txid + +} + +public class OnchainPayment: + OnchainPaymentProtocol { + fileprivate let pointer: UnsafeMutableRawPointer + + // TODO: We'd like this to be `private` but for Swifty reasons, + // we can't implement `FfiConverter` without making this `required` and we can't + // make it `required` without making it `public`. + required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { + self.pointer = pointer + } + + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_onchainpayment(self.pointer, $0) } + } + + deinit { + try! rustCall { uniffi_ldk_node_fn_free_onchainpayment(pointer, $0) } + } + + - public func sendAllToOnchainAddress(address: Address) throws -> Txid { + + + public func newAddress() throws -> Address { + return try FfiConverterTypeAddress.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_onchainpayment_new_address(self.uniffiClonePointer(), $0 + ) +} + ) + } + public func sendAllToAddress(address: Address) throws -> Txid { return try FfiConverterTypeTxid.lift( try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_all_to_onchain_address(self.pointer, + uniffi_ldk_node_fn_method_onchainpayment_send_all_to_address(self.uniffiClonePointer(), FfiConverterTypeAddress.lower(address),$0 ) } ) } + public func sendToAddress(address: Address, amountMsat: UInt64) throws -> Txid { + return try FfiConverterTypeTxid.lift( + try + rustCallWithError(FfiConverterTypeNodeError.lift) { + uniffi_ldk_node_fn_method_onchainpayment_send_to_address(self.uniffiClonePointer(), + FfiConverterTypeAddress.lower(address), + FfiConverterUInt64.lower(amountMsat),$0 + ) +} + ) + } + +} + +public struct FfiConverterTypeOnchainPayment: FfiConverter { + + typealias FfiType = UnsafeMutableRawPointer + typealias SwiftType = OnchainPayment + + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> OnchainPayment { + return OnchainPayment(unsafeFromRawPointer: pointer) + } + + public static func lower(_ value: OnchainPayment) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> OnchainPayment { + let v: UInt64 = try readInt(&buf) + // The Rust code won't compile if a pointer won't fit in a UInt64. + // We have to go via `UInt` because that's the thing that's the size of a pointer. + let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) + if (ptr == nil) { + throw UniffiInternalError.unexpectedNullPointer + } + return try lift(ptr!) + } + + public static func write(_ value: OnchainPayment, into buf: inout [UInt8]) { + // This fiddling is because `Int` is the thing that's the same size as a pointer. + // The Rust code won't compile if a pointer won't fit in a `UInt64`. + writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) + } +} + + +public func FfiConverterTypeOnchainPayment_lift(_ pointer: UnsafeMutableRawPointer) throws -> OnchainPayment { + return try FfiConverterTypeOnchainPayment.lift(pointer) +} - public func sendPayment(invoice: Bolt11Invoice) throws -> PaymentHash { - return try FfiConverterTypePaymentHash.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_payment(self.pointer, - FfiConverterTypeBolt11Invoice.lower(invoice),$0 - ) +public func FfiConverterTypeOnchainPayment_lower(_ value: OnchainPayment) -> UnsafeMutableRawPointer { + return FfiConverterTypeOnchainPayment.lower(value) } - ) - } - public func sendPaymentProbes(invoice: Bolt11Invoice) throws { - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_payment_probes(self.pointer, - FfiConverterTypeBolt11Invoice.lower(invoice),$0 - ) + + + +public protocol SpontaneousPaymentProtocol : AnyObject { + + func send(amountMsat: UInt64, nodeId: PublicKey) throws -> PaymentId + + func sendProbes(amountMsat: UInt64, nodeId: PublicKey) throws + } + +public class SpontaneousPayment: + SpontaneousPaymentProtocol { + fileprivate let pointer: UnsafeMutableRawPointer + + // TODO: We'd like this to be `private` but for Swifty reasons, + // we can't implement `FfiConverter` without making this `required` and we can't + // make it `required` without making it `public`. + required init(unsafeFromRawPointer pointer: UnsafeMutableRawPointer) { + self.pointer = pointer } - public func sendPaymentProbesUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws { - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_payment_probes_using_amount(self.pointer, - FfiConverterTypeBolt11Invoice.lower(invoice), - FfiConverterUInt64.lower(amountMsat),$0 - ) -} + public func uniffiClonePointer() -> UnsafeMutableRawPointer { + return try! rustCall { uniffi_ldk_node_fn_clone_spontaneouspayment(self.pointer, $0) } } - public func sendPaymentUsingAmount(invoice: Bolt11Invoice, amountMsat: UInt64) throws -> PaymentHash { - return try FfiConverterTypePaymentHash.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_payment_using_amount(self.pointer, - FfiConverterTypeBolt11Invoice.lower(invoice), - FfiConverterUInt64.lower(amountMsat),$0 - ) -} - ) + deinit { + try! rustCall { uniffi_ldk_node_fn_free_spontaneouspayment(pointer, $0) } } - public func sendSpontaneousPayment(amountMsat: UInt64, nodeId: PublicKey) throws -> PaymentHash { - return try FfiConverterTypePaymentHash.lift( + + + + + public func send(amountMsat: UInt64, nodeId: PublicKey) throws -> PaymentId { + return try FfiConverterTypePaymentId.lift( try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_spontaneous_payment(self.pointer, + uniffi_ldk_node_fn_method_spontaneouspayment_send(self.uniffiClonePointer(), FfiConverterUInt64.lower(amountMsat), FfiConverterTypePublicKey.lower(nodeId),$0 ) } ) } - - public func sendSpontaneousPaymentProbes(amountMsat: UInt64, nodeId: PublicKey) throws { + public func sendProbes(amountMsat: UInt64, nodeId: PublicKey) throws { try rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_spontaneous_payment_probes(self.pointer, + uniffi_ldk_node_fn_method_spontaneouspayment_send_probes(self.uniffiClonePointer(), FfiConverterUInt64.lower(amountMsat), FfiConverterTypePublicKey.lower(nodeId),$0 ) } } - public func sendToOnchainAddress(address: Address, amountMsat: UInt64) throws -> Txid { - return try FfiConverterTypeTxid.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_send_to_onchain_address(self.pointer, - FfiConverterTypeAddress.lower(address), - FfiConverterUInt64.lower(amountMsat),$0 - ) } - ) + +public struct FfiConverterTypeSpontaneousPayment: FfiConverter { + + typealias FfiType = UnsafeMutableRawPointer + typealias SwiftType = SpontaneousPayment + + public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> SpontaneousPayment { + return SpontaneousPayment(unsafeFromRawPointer: pointer) } - public func signMessage(msg: [UInt8]) throws -> String { - return try FfiConverterString.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_sign_message(self.pointer, - FfiConverterSequenceUInt8.lower(msg),$0 - ) -} - ) + public static func lower(_ value: SpontaneousPayment) -> UnsafeMutableRawPointer { + return value.uniffiClonePointer() } - public func spendableOnchainBalanceSats() throws -> UInt64 { - return try FfiConverterUInt64.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_spendable_onchain_balance_sats(self.pointer, $0 - ) -} - ) + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SpontaneousPayment { + let v: UInt64 = try readInt(&buf) + // The Rust code won't compile if a pointer won't fit in a UInt64. + // We have to go via `UInt` because that's the thing that's the size of a pointer. + let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) + if (ptr == nil) { + throw UniffiInternalError.unexpectedNullPointer + } + return try lift(ptr!) } - public func start() throws { - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_start(self.pointer, $0 - ) -} + public static func write(_ value: SpontaneousPayment, into buf: inout [UInt8]) { + // This fiddling is because `Int` is the thing that's the same size as a pointer. + // The Rust code won't compile if a pointer won't fit in a `UInt64`. + writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) } +} - public func stop() throws { - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_stop(self.pointer, $0 - ) + +public func FfiConverterTypeSpontaneousPayment_lift(_ pointer: UnsafeMutableRawPointer) throws -> SpontaneousPayment { + return try FfiConverterTypeSpontaneousPayment.lift(pointer) } - } - public func syncWallets() throws { - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_sync_wallets(self.pointer, $0 - ) +public func FfiConverterTypeSpontaneousPayment_lower(_ value: SpontaneousPayment) -> UnsafeMutableRawPointer { + return FfiConverterTypeSpontaneousPayment.lower(value) +} + + +public struct AnchorChannelsConfig { + public var trustedPeersNoReserve: [PublicKey] + public var perChannelReserveSats: UInt64 + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + trustedPeersNoReserve: [PublicKey], + perChannelReserveSats: UInt64) { + self.trustedPeersNoReserve = trustedPeersNoReserve + self.perChannelReserveSats = perChannelReserveSats + } } + + +extension AnchorChannelsConfig: Equatable, Hashable { + public static func ==(lhs: AnchorChannelsConfig, rhs: AnchorChannelsConfig) -> Bool { + if lhs.trustedPeersNoReserve != rhs.trustedPeersNoReserve { + return false + } + if lhs.perChannelReserveSats != rhs.perChannelReserveSats { + return false + } + return true } - public func totalOnchainBalanceSats() throws -> UInt64 { - return try FfiConverterUInt64.lift( - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_total_onchain_balance_sats(self.pointer, $0 - ) + public func hash(into hasher: inout Hasher) { + hasher.combine(trustedPeersNoReserve) + hasher.combine(perChannelReserveSats) + } } + + +public struct FfiConverterTypeAnchorChannelsConfig: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> AnchorChannelsConfig { + return + try AnchorChannelsConfig( + trustedPeersNoReserve: FfiConverterSequenceTypePublicKey.read(from: &buf), + perChannelReserveSats: FfiConverterUInt64.read(from: &buf) ) } - public func updateChannelConfig(channelId: ChannelId, counterpartyNodeId: PublicKey, channelConfig: ChannelConfig) throws { - try - rustCallWithError(FfiConverterTypeNodeError.lift) { - uniffi_ldk_node_fn_method_ldknode_update_channel_config(self.pointer, - FfiConverterTypeChannelId.lower(channelId), - FfiConverterTypePublicKey.lower(counterpartyNodeId), - FfiConverterTypeChannelConfig.lower(channelConfig),$0 - ) -} + public static func write(_ value: AnchorChannelsConfig, into buf: inout [UInt8]) { + FfiConverterSequenceTypePublicKey.write(value.trustedPeersNoReserve, into: &buf) + FfiConverterUInt64.write(value.perChannelReserveSats, into: &buf) } +} - public func verifySignature(msg: [UInt8], sig: String, pkey: PublicKey) -> Bool { - return try! FfiConverterBool.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_ldknode_verify_signature(self.pointer, - FfiConverterSequenceUInt8.lower(msg), - FfiConverterString.lower(sig), - FfiConverterTypePublicKey.lower(pkey),$0 - ) + +public func FfiConverterTypeAnchorChannelsConfig_lift(_ buf: RustBuffer) throws -> AnchorChannelsConfig { + return try FfiConverterTypeAnchorChannelsConfig.lift(buf) +} + +public func FfiConverterTypeAnchorChannelsConfig_lower(_ value: AnchorChannelsConfig) -> RustBuffer { + return FfiConverterTypeAnchorChannelsConfig.lower(value) } + + +public struct BalanceDetails { + public var totalOnchainBalanceSats: UInt64 + public var spendableOnchainBalanceSats: UInt64 + public var totalAnchorChannelsReserveSats: UInt64 + public var totalLightningBalanceSats: UInt64 + public var lightningBalances: [LightningBalance] + public var pendingBalancesFromChannelClosures: [PendingSweepBalance] + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + totalOnchainBalanceSats: UInt64, + spendableOnchainBalanceSats: UInt64, + totalAnchorChannelsReserveSats: UInt64, + totalLightningBalanceSats: UInt64, + lightningBalances: [LightningBalance], + pendingBalancesFromChannelClosures: [PendingSweepBalance]) { + self.totalOnchainBalanceSats = totalOnchainBalanceSats + self.spendableOnchainBalanceSats = spendableOnchainBalanceSats + self.totalAnchorChannelsReserveSats = totalAnchorChannelsReserveSats + self.totalLightningBalanceSats = totalLightningBalanceSats + self.lightningBalances = lightningBalances + self.pendingBalancesFromChannelClosures = pendingBalancesFromChannelClosures + } +} + + +extension BalanceDetails: Equatable, Hashable { + public static func ==(lhs: BalanceDetails, rhs: BalanceDetails) -> Bool { + if lhs.totalOnchainBalanceSats != rhs.totalOnchainBalanceSats { + return false + } + if lhs.spendableOnchainBalanceSats != rhs.spendableOnchainBalanceSats { + return false + } + if lhs.totalAnchorChannelsReserveSats != rhs.totalAnchorChannelsReserveSats { + return false + } + if lhs.totalLightningBalanceSats != rhs.totalLightningBalanceSats { + return false + } + if lhs.lightningBalances != rhs.lightningBalances { + return false + } + if lhs.pendingBalancesFromChannelClosures != rhs.pendingBalancesFromChannelClosures { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(totalOnchainBalanceSats) + hasher.combine(spendableOnchainBalanceSats) + hasher.combine(totalAnchorChannelsReserveSats) + hasher.combine(totalLightningBalanceSats) + hasher.combine(lightningBalances) + hasher.combine(pendingBalancesFromChannelClosures) + } +} + + +public struct FfiConverterTypeBalanceDetails: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> BalanceDetails { + return + try BalanceDetails( + totalOnchainBalanceSats: FfiConverterUInt64.read(from: &buf), + spendableOnchainBalanceSats: FfiConverterUInt64.read(from: &buf), + totalAnchorChannelsReserveSats: FfiConverterUInt64.read(from: &buf), + totalLightningBalanceSats: FfiConverterUInt64.read(from: &buf), + lightningBalances: FfiConverterSequenceTypeLightningBalance.read(from: &buf), + pendingBalancesFromChannelClosures: FfiConverterSequenceTypePendingSweepBalance.read(from: &buf) ) } - public func waitNextEvent() -> Event { - return try! FfiConverterTypeEvent.lift( - try! - rustCall() { - - uniffi_ldk_node_fn_method_ldknode_wait_next_event(self.pointer, $0 - ) + public static func write(_ value: BalanceDetails, into buf: inout [UInt8]) { + FfiConverterUInt64.write(value.totalOnchainBalanceSats, into: &buf) + FfiConverterUInt64.write(value.spendableOnchainBalanceSats, into: &buf) + FfiConverterUInt64.write(value.totalAnchorChannelsReserveSats, into: &buf) + FfiConverterUInt64.write(value.totalLightningBalanceSats, into: &buf) + FfiConverterSequenceTypeLightningBalance.write(value.lightningBalances, into: &buf) + FfiConverterSequenceTypePendingSweepBalance.write(value.pendingBalancesFromChannelClosures, into: &buf) + } } - ) + + +public func FfiConverterTypeBalanceDetails_lift(_ buf: RustBuffer) throws -> BalanceDetails { + return try FfiConverterTypeBalanceDetails.lift(buf) +} + +public func FfiConverterTypeBalanceDetails_lower(_ value: BalanceDetails) -> RustBuffer { + return FfiConverterTypeBalanceDetails.lower(value) +} + + +public struct BestBlock { + public var blockHash: BlockHash + public var height: UInt32 + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + blockHash: BlockHash, + height: UInt32) { + self.blockHash = blockHash + self.height = height } } -public struct FfiConverterTypeLDKNode: FfiConverter { - typealias FfiType = UnsafeMutableRawPointer - typealias SwiftType = LdkNode - public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> LdkNode { - let v: UInt64 = try readInt(&buf) - // The Rust code won't compile if a pointer won't fit in a UInt64. - // We have to go via `UInt` because that's the thing that's the size of a pointer. - let ptr = UnsafeMutableRawPointer(bitPattern: UInt(truncatingIfNeeded: v)) - if (ptr == nil) { - throw UniffiInternalError.unexpectedNullPointer +extension BestBlock: Equatable, Hashable { + public static func ==(lhs: BestBlock, rhs: BestBlock) -> Bool { + if lhs.blockHash != rhs.blockHash { + return false } - return try lift(ptr!) + if lhs.height != rhs.height { + return false + } + return true } - public static func write(_ value: LdkNode, into buf: inout [UInt8]) { - // This fiddling is because `Int` is the thing that's the same size as a pointer. - // The Rust code won't compile if a pointer won't fit in a `UInt64`. - writeInt(&buf, UInt64(bitPattern: Int64(Int(bitPattern: lower(value))))) + public func hash(into hasher: inout Hasher) { + hasher.combine(blockHash) + hasher.combine(height) } +} + - public static func lift(_ pointer: UnsafeMutableRawPointer) throws -> LdkNode { - return LdkNode(unsafeFromRawPointer: pointer) +public struct FfiConverterTypeBestBlock: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> BestBlock { + return + try BestBlock( + blockHash: FfiConverterTypeBlockHash.read(from: &buf), + height: FfiConverterUInt32.read(from: &buf) + ) } - public static func lower(_ value: LdkNode) -> UnsafeMutableRawPointer { - return value.pointer + public static func write(_ value: BestBlock, into buf: inout [UInt8]) { + FfiConverterTypeBlockHash.write(value.blockHash, into: &buf) + FfiConverterUInt32.write(value.height, into: &buf) } } -public func FfiConverterTypeLDKNode_lift(_ pointer: UnsafeMutableRawPointer) throws -> LdkNode { - return try FfiConverterTypeLDKNode.lift(pointer) +public func FfiConverterTypeBestBlock_lift(_ buf: RustBuffer) throws -> BestBlock { + return try FfiConverterTypeBestBlock.lift(buf) } -public func FfiConverterTypeLDKNode_lower(_ value: LdkNode) -> UnsafeMutableRawPointer { - return FfiConverterTypeLDKNode.lower(value) +public func FfiConverterTypeBestBlock_lower(_ value: BestBlock) -> RustBuffer { + return FfiConverterTypeBestBlock.lower(value) } @@ -1272,7 +2198,6 @@ public struct ChannelDetails { public var unspendablePunishmentReserve: UInt64? public var userChannelId: UserChannelId public var feerateSatPer1000Weight: UInt32 - public var balanceMsat: UInt64 public var outboundCapacityMsat: UInt64 public var inboundCapacityMsat: UInt64 public var confirmationsRequired: UInt32? @@ -1297,7 +2222,35 @@ public struct ChannelDetails { // Default memberwise initializers are never public by default, so we // declare one manually. - public init(channelId: ChannelId, counterpartyNodeId: PublicKey, fundingTxo: OutPoint?, channelValueSats: UInt64, unspendablePunishmentReserve: UInt64?, userChannelId: UserChannelId, feerateSatPer1000Weight: UInt32, balanceMsat: UInt64, outboundCapacityMsat: UInt64, inboundCapacityMsat: UInt64, confirmationsRequired: UInt32?, confirmations: UInt32?, isOutbound: Bool, isChannelReady: Bool, isUsable: Bool, isPublic: Bool, cltvExpiryDelta: UInt16?, counterpartyUnspendablePunishmentReserve: UInt64, counterpartyOutboundHtlcMinimumMsat: UInt64?, counterpartyOutboundHtlcMaximumMsat: UInt64?, counterpartyForwardingInfoFeeBaseMsat: UInt32?, counterpartyForwardingInfoFeeProportionalMillionths: UInt32?, counterpartyForwardingInfoCltvExpiryDelta: UInt16?, nextOutboundHtlcLimitMsat: UInt64, nextOutboundHtlcMinimumMsat: UInt64, forceCloseSpendDelay: UInt16?, inboundHtlcMinimumMsat: UInt64, inboundHtlcMaximumMsat: UInt64?, config: ChannelConfig) { + public init( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + fundingTxo: OutPoint?, + channelValueSats: UInt64, + unspendablePunishmentReserve: UInt64?, + userChannelId: UserChannelId, + feerateSatPer1000Weight: UInt32, + outboundCapacityMsat: UInt64, + inboundCapacityMsat: UInt64, + confirmationsRequired: UInt32?, + confirmations: UInt32?, + isOutbound: Bool, + isChannelReady: Bool, + isUsable: Bool, + isPublic: Bool, + cltvExpiryDelta: UInt16?, + counterpartyUnspendablePunishmentReserve: UInt64, + counterpartyOutboundHtlcMinimumMsat: UInt64?, + counterpartyOutboundHtlcMaximumMsat: UInt64?, + counterpartyForwardingInfoFeeBaseMsat: UInt32?, + counterpartyForwardingInfoFeeProportionalMillionths: UInt32?, + counterpartyForwardingInfoCltvExpiryDelta: UInt16?, + nextOutboundHtlcLimitMsat: UInt64, + nextOutboundHtlcMinimumMsat: UInt64, + forceCloseSpendDelay: UInt16?, + inboundHtlcMinimumMsat: UInt64, + inboundHtlcMaximumMsat: UInt64?, + config: ChannelConfig) { self.channelId = channelId self.counterpartyNodeId = counterpartyNodeId self.fundingTxo = fundingTxo @@ -1305,7 +2258,6 @@ public struct ChannelDetails { self.unspendablePunishmentReserve = unspendablePunishmentReserve self.userChannelId = userChannelId self.feerateSatPer1000Weight = feerateSatPer1000Weight - self.balanceMsat = balanceMsat self.outboundCapacityMsat = outboundCapacityMsat self.inboundCapacityMsat = inboundCapacityMsat self.confirmationsRequired = confirmationsRequired @@ -1334,36 +2286,36 @@ public struct ChannelDetails { public struct FfiConverterTypeChannelDetails: FfiConverterRustBuffer { public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> ChannelDetails { - return try ChannelDetails( - channelId: FfiConverterTypeChannelId.read(from: &buf), - counterpartyNodeId: FfiConverterTypePublicKey.read(from: &buf), - fundingTxo: FfiConverterOptionTypeOutPoint.read(from: &buf), - channelValueSats: FfiConverterUInt64.read(from: &buf), - unspendablePunishmentReserve: FfiConverterOptionUInt64.read(from: &buf), - userChannelId: FfiConverterTypeUserChannelId.read(from: &buf), - feerateSatPer1000Weight: FfiConverterUInt32.read(from: &buf), - balanceMsat: FfiConverterUInt64.read(from: &buf), - outboundCapacityMsat: FfiConverterUInt64.read(from: &buf), - inboundCapacityMsat: FfiConverterUInt64.read(from: &buf), - confirmationsRequired: FfiConverterOptionUInt32.read(from: &buf), - confirmations: FfiConverterOptionUInt32.read(from: &buf), - isOutbound: FfiConverterBool.read(from: &buf), - isChannelReady: FfiConverterBool.read(from: &buf), - isUsable: FfiConverterBool.read(from: &buf), - isPublic: FfiConverterBool.read(from: &buf), - cltvExpiryDelta: FfiConverterOptionUInt16.read(from: &buf), - counterpartyUnspendablePunishmentReserve: FfiConverterUInt64.read(from: &buf), - counterpartyOutboundHtlcMinimumMsat: FfiConverterOptionUInt64.read(from: &buf), - counterpartyOutboundHtlcMaximumMsat: FfiConverterOptionUInt64.read(from: &buf), - counterpartyForwardingInfoFeeBaseMsat: FfiConverterOptionUInt32.read(from: &buf), - counterpartyForwardingInfoFeeProportionalMillionths: FfiConverterOptionUInt32.read(from: &buf), - counterpartyForwardingInfoCltvExpiryDelta: FfiConverterOptionUInt16.read(from: &buf), - nextOutboundHtlcLimitMsat: FfiConverterUInt64.read(from: &buf), - nextOutboundHtlcMinimumMsat: FfiConverterUInt64.read(from: &buf), - forceCloseSpendDelay: FfiConverterOptionUInt16.read(from: &buf), - inboundHtlcMinimumMsat: FfiConverterUInt64.read(from: &buf), - inboundHtlcMaximumMsat: FfiConverterOptionUInt64.read(from: &buf), - config: FfiConverterTypeChannelConfig.read(from: &buf) + return + try ChannelDetails( + channelId: FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: FfiConverterTypePublicKey.read(from: &buf), + fundingTxo: FfiConverterOptionTypeOutPoint.read(from: &buf), + channelValueSats: FfiConverterUInt64.read(from: &buf), + unspendablePunishmentReserve: FfiConverterOptionUInt64.read(from: &buf), + userChannelId: FfiConverterTypeUserChannelId.read(from: &buf), + feerateSatPer1000Weight: FfiConverterUInt32.read(from: &buf), + outboundCapacityMsat: FfiConverterUInt64.read(from: &buf), + inboundCapacityMsat: FfiConverterUInt64.read(from: &buf), + confirmationsRequired: FfiConverterOptionUInt32.read(from: &buf), + confirmations: FfiConverterOptionUInt32.read(from: &buf), + isOutbound: FfiConverterBool.read(from: &buf), + isChannelReady: FfiConverterBool.read(from: &buf), + isUsable: FfiConverterBool.read(from: &buf), + isPublic: FfiConverterBool.read(from: &buf), + cltvExpiryDelta: FfiConverterOptionUInt16.read(from: &buf), + counterpartyUnspendablePunishmentReserve: FfiConverterUInt64.read(from: &buf), + counterpartyOutboundHtlcMinimumMsat: FfiConverterOptionUInt64.read(from: &buf), + counterpartyOutboundHtlcMaximumMsat: FfiConverterOptionUInt64.read(from: &buf), + counterpartyForwardingInfoFeeBaseMsat: FfiConverterOptionUInt32.read(from: &buf), + counterpartyForwardingInfoFeeProportionalMillionths: FfiConverterOptionUInt32.read(from: &buf), + counterpartyForwardingInfoCltvExpiryDelta: FfiConverterOptionUInt16.read(from: &buf), + nextOutboundHtlcLimitMsat: FfiConverterUInt64.read(from: &buf), + nextOutboundHtlcMinimumMsat: FfiConverterUInt64.read(from: &buf), + forceCloseSpendDelay: FfiConverterOptionUInt16.read(from: &buf), + inboundHtlcMinimumMsat: FfiConverterUInt64.read(from: &buf), + inboundHtlcMaximumMsat: FfiConverterOptionUInt64.read(from: &buf), + config: FfiConverterTypeChannelConfig.read(from: &buf) ) } @@ -1375,7 +2327,6 @@ public struct FfiConverterTypeChannelDetails: FfiConverterRustBuffer { FfiConverterOptionUInt64.write(value.unspendablePunishmentReserve, into: &buf) FfiConverterTypeUserChannelId.write(value.userChannelId, into: &buf) FfiConverterUInt32.write(value.feerateSatPer1000Weight, into: &buf) - FfiConverterUInt64.write(value.balanceMsat, into: &buf) FfiConverterUInt64.write(value.outboundCapacityMsat, into: &buf) FfiConverterUInt64.write(value.inboundCapacityMsat, into: &buf) FfiConverterOptionUInt32.write(value.confirmationsRequired, into: &buf) @@ -1401,12 +2352,191 @@ public struct FfiConverterTypeChannelDetails: FfiConverterRustBuffer { } -public func FfiConverterTypeChannelDetails_lift(_ buf: RustBuffer) throws -> ChannelDetails { - return try FfiConverterTypeChannelDetails.lift(buf) +public func FfiConverterTypeChannelDetails_lift(_ buf: RustBuffer) throws -> ChannelDetails { + return try FfiConverterTypeChannelDetails.lift(buf) +} + +public func FfiConverterTypeChannelDetails_lower(_ value: ChannelDetails) -> RustBuffer { + return FfiConverterTypeChannelDetails.lower(value) +} + + +public struct ChannelInfo { + public var nodeOne: NodeId + public var oneToTwo: ChannelUpdateInfo? + public var nodeTwo: NodeId + public var twoToOne: ChannelUpdateInfo? + public var capacitySats: UInt64? + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + nodeOne: NodeId, + oneToTwo: ChannelUpdateInfo?, + nodeTwo: NodeId, + twoToOne: ChannelUpdateInfo?, + capacitySats: UInt64?) { + self.nodeOne = nodeOne + self.oneToTwo = oneToTwo + self.nodeTwo = nodeTwo + self.twoToOne = twoToOne + self.capacitySats = capacitySats + } +} + + +extension ChannelInfo: Equatable, Hashable { + public static func ==(lhs: ChannelInfo, rhs: ChannelInfo) -> Bool { + if lhs.nodeOne != rhs.nodeOne { + return false + } + if lhs.oneToTwo != rhs.oneToTwo { + return false + } + if lhs.nodeTwo != rhs.nodeTwo { + return false + } + if lhs.twoToOne != rhs.twoToOne { + return false + } + if lhs.capacitySats != rhs.capacitySats { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(nodeOne) + hasher.combine(oneToTwo) + hasher.combine(nodeTwo) + hasher.combine(twoToOne) + hasher.combine(capacitySats) + } +} + + +public struct FfiConverterTypeChannelInfo: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> ChannelInfo { + return + try ChannelInfo( + nodeOne: FfiConverterTypeNodeId.read(from: &buf), + oneToTwo: FfiConverterOptionTypeChannelUpdateInfo.read(from: &buf), + nodeTwo: FfiConverterTypeNodeId.read(from: &buf), + twoToOne: FfiConverterOptionTypeChannelUpdateInfo.read(from: &buf), + capacitySats: FfiConverterOptionUInt64.read(from: &buf) + ) + } + + public static func write(_ value: ChannelInfo, into buf: inout [UInt8]) { + FfiConverterTypeNodeId.write(value.nodeOne, into: &buf) + FfiConverterOptionTypeChannelUpdateInfo.write(value.oneToTwo, into: &buf) + FfiConverterTypeNodeId.write(value.nodeTwo, into: &buf) + FfiConverterOptionTypeChannelUpdateInfo.write(value.twoToOne, into: &buf) + FfiConverterOptionUInt64.write(value.capacitySats, into: &buf) + } +} + + +public func FfiConverterTypeChannelInfo_lift(_ buf: RustBuffer) throws -> ChannelInfo { + return try FfiConverterTypeChannelInfo.lift(buf) +} + +public func FfiConverterTypeChannelInfo_lower(_ value: ChannelInfo) -> RustBuffer { + return FfiConverterTypeChannelInfo.lower(value) +} + + +public struct ChannelUpdateInfo { + public var lastUpdate: UInt32 + public var enabled: Bool + public var cltvExpiryDelta: UInt16 + public var htlcMinimumMsat: UInt64 + public var htlcMaximumMsat: UInt64 + public var fees: RoutingFees + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + lastUpdate: UInt32, + enabled: Bool, + cltvExpiryDelta: UInt16, + htlcMinimumMsat: UInt64, + htlcMaximumMsat: UInt64, + fees: RoutingFees) { + self.lastUpdate = lastUpdate + self.enabled = enabled + self.cltvExpiryDelta = cltvExpiryDelta + self.htlcMinimumMsat = htlcMinimumMsat + self.htlcMaximumMsat = htlcMaximumMsat + self.fees = fees + } +} + + +extension ChannelUpdateInfo: Equatable, Hashable { + public static func ==(lhs: ChannelUpdateInfo, rhs: ChannelUpdateInfo) -> Bool { + if lhs.lastUpdate != rhs.lastUpdate { + return false + } + if lhs.enabled != rhs.enabled { + return false + } + if lhs.cltvExpiryDelta != rhs.cltvExpiryDelta { + return false + } + if lhs.htlcMinimumMsat != rhs.htlcMinimumMsat { + return false + } + if lhs.htlcMaximumMsat != rhs.htlcMaximumMsat { + return false + } + if lhs.fees != rhs.fees { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(lastUpdate) + hasher.combine(enabled) + hasher.combine(cltvExpiryDelta) + hasher.combine(htlcMinimumMsat) + hasher.combine(htlcMaximumMsat) + hasher.combine(fees) + } +} + + +public struct FfiConverterTypeChannelUpdateInfo: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> ChannelUpdateInfo { + return + try ChannelUpdateInfo( + lastUpdate: FfiConverterUInt32.read(from: &buf), + enabled: FfiConverterBool.read(from: &buf), + cltvExpiryDelta: FfiConverterUInt16.read(from: &buf), + htlcMinimumMsat: FfiConverterUInt64.read(from: &buf), + htlcMaximumMsat: FfiConverterUInt64.read(from: &buf), + fees: FfiConverterTypeRoutingFees.read(from: &buf) + ) + } + + public static func write(_ value: ChannelUpdateInfo, into buf: inout [UInt8]) { + FfiConverterUInt32.write(value.lastUpdate, into: &buf) + FfiConverterBool.write(value.enabled, into: &buf) + FfiConverterUInt16.write(value.cltvExpiryDelta, into: &buf) + FfiConverterUInt64.write(value.htlcMinimumMsat, into: &buf) + FfiConverterUInt64.write(value.htlcMaximumMsat, into: &buf) + FfiConverterTypeRoutingFees.write(value.fees, into: &buf) + } +} + + +public func FfiConverterTypeChannelUpdateInfo_lift(_ buf: RustBuffer) throws -> ChannelUpdateInfo { + return try FfiConverterTypeChannelUpdateInfo.lift(buf) } -public func FfiConverterTypeChannelDetails_lower(_ value: ChannelDetails) -> RustBuffer { - return FfiConverterTypeChannelDetails.lower(value) +public func FfiConverterTypeChannelUpdateInfo_lower(_ value: ChannelUpdateInfo) -> RustBuffer { + return FfiConverterTypeChannelUpdateInfo.lower(value) } @@ -1422,10 +2552,23 @@ public struct Config { public var trustedPeers0conf: [PublicKey] public var probingLiquidityLimitMultiplier: UInt64 public var logLevel: LogLevel + public var anchorChannelsConfig: AnchorChannelsConfig? // Default memberwise initializers are never public by default, so we // declare one manually. - public init(storageDirPath: String = "/tmp/ldk_node/", logDirPath: String? = nil, network: Network = .bitcoin, listeningAddresses: [SocketAddress]? = nil, defaultCltvExpiryDelta: UInt32 = UInt32(144), onchainWalletSyncIntervalSecs: UInt64 = UInt64(80), walletSyncIntervalSecs: UInt64 = UInt64(30), feeRateCacheUpdateIntervalSecs: UInt64 = UInt64(600), trustedPeers0conf: [PublicKey] = [], probingLiquidityLimitMultiplier: UInt64 = UInt64(3), logLevel: LogLevel = .debug) { + public init( + storageDirPath: String, + logDirPath: String?, + network: Network, + listeningAddresses: [SocketAddress]?, + defaultCltvExpiryDelta: UInt32, + onchainWalletSyncIntervalSecs: UInt64, + walletSyncIntervalSecs: UInt64, + feeRateCacheUpdateIntervalSecs: UInt64, + trustedPeers0conf: [PublicKey], + probingLiquidityLimitMultiplier: UInt64, + logLevel: LogLevel, + anchorChannelsConfig: AnchorChannelsConfig?) { self.storageDirPath = storageDirPath self.logDirPath = logDirPath self.network = network @@ -1437,6 +2580,7 @@ public struct Config { self.trustedPeers0conf = trustedPeers0conf self.probingLiquidityLimitMultiplier = probingLiquidityLimitMultiplier self.logLevel = logLevel + self.anchorChannelsConfig = anchorChannelsConfig } } @@ -1476,6 +2620,9 @@ extension Config: Equatable, Hashable { if lhs.logLevel != rhs.logLevel { return false } + if lhs.anchorChannelsConfig != rhs.anchorChannelsConfig { + return false + } return true } @@ -1491,24 +2638,27 @@ extension Config: Equatable, Hashable { hasher.combine(trustedPeers0conf) hasher.combine(probingLiquidityLimitMultiplier) hasher.combine(logLevel) + hasher.combine(anchorChannelsConfig) } } public struct FfiConverterTypeConfig: FfiConverterRustBuffer { public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Config { - return try Config( - storageDirPath: FfiConverterString.read(from: &buf), - logDirPath: FfiConverterOptionString.read(from: &buf), - network: FfiConverterTypeNetwork.read(from: &buf), - listeningAddresses: FfiConverterOptionSequenceTypeSocketAddress.read(from: &buf), - defaultCltvExpiryDelta: FfiConverterUInt32.read(from: &buf), - onchainWalletSyncIntervalSecs: FfiConverterUInt64.read(from: &buf), - walletSyncIntervalSecs: FfiConverterUInt64.read(from: &buf), - feeRateCacheUpdateIntervalSecs: FfiConverterUInt64.read(from: &buf), - trustedPeers0conf: FfiConverterSequenceTypePublicKey.read(from: &buf), - probingLiquidityLimitMultiplier: FfiConverterUInt64.read(from: &buf), - logLevel: FfiConverterTypeLogLevel.read(from: &buf) + return + try Config( + storageDirPath: FfiConverterString.read(from: &buf), + logDirPath: FfiConverterOptionString.read(from: &buf), + network: FfiConverterTypeNetwork.read(from: &buf), + listeningAddresses: FfiConverterOptionSequenceTypeSocketAddress.read(from: &buf), + defaultCltvExpiryDelta: FfiConverterUInt32.read(from: &buf), + onchainWalletSyncIntervalSecs: FfiConverterUInt64.read(from: &buf), + walletSyncIntervalSecs: FfiConverterUInt64.read(from: &buf), + feeRateCacheUpdateIntervalSecs: FfiConverterUInt64.read(from: &buf), + trustedPeers0conf: FfiConverterSequenceTypePublicKey.read(from: &buf), + probingLiquidityLimitMultiplier: FfiConverterUInt64.read(from: &buf), + logLevel: FfiConverterTypeLogLevel.read(from: &buf), + anchorChannelsConfig: FfiConverterOptionTypeAnchorChannelsConfig.read(from: &buf) ) } @@ -1524,6 +2674,7 @@ public struct FfiConverterTypeConfig: FfiConverterRustBuffer { FfiConverterSequenceTypePublicKey.write(value.trustedPeers0conf, into: &buf) FfiConverterUInt64.write(value.probingLiquidityLimitMultiplier, into: &buf) FfiConverterTypeLogLevel.write(value.logLevel, into: &buf) + FfiConverterOptionTypeAnchorChannelsConfig.write(value.anchorChannelsConfig, into: &buf) } } @@ -1537,13 +2688,310 @@ public func FfiConverterTypeConfig_lower(_ value: Config) -> RustBuffer { } +public struct LspFeeLimits { + public var maxTotalOpeningFeeMsat: UInt64? + public var maxProportionalOpeningFeePpmMsat: UInt64? + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + maxTotalOpeningFeeMsat: UInt64?, + maxProportionalOpeningFeePpmMsat: UInt64?) { + self.maxTotalOpeningFeeMsat = maxTotalOpeningFeeMsat + self.maxProportionalOpeningFeePpmMsat = maxProportionalOpeningFeePpmMsat + } +} + + +extension LspFeeLimits: Equatable, Hashable { + public static func ==(lhs: LspFeeLimits, rhs: LspFeeLimits) -> Bool { + if lhs.maxTotalOpeningFeeMsat != rhs.maxTotalOpeningFeeMsat { + return false + } + if lhs.maxProportionalOpeningFeePpmMsat != rhs.maxProportionalOpeningFeePpmMsat { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(maxTotalOpeningFeeMsat) + hasher.combine(maxProportionalOpeningFeePpmMsat) + } +} + + +public struct FfiConverterTypeLSPFeeLimits: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> LspFeeLimits { + return + try LspFeeLimits( + maxTotalOpeningFeeMsat: FfiConverterOptionUInt64.read(from: &buf), + maxProportionalOpeningFeePpmMsat: FfiConverterOptionUInt64.read(from: &buf) + ) + } + + public static func write(_ value: LspFeeLimits, into buf: inout [UInt8]) { + FfiConverterOptionUInt64.write(value.maxTotalOpeningFeeMsat, into: &buf) + FfiConverterOptionUInt64.write(value.maxProportionalOpeningFeePpmMsat, into: &buf) + } +} + + +public func FfiConverterTypeLSPFeeLimits_lift(_ buf: RustBuffer) throws -> LspFeeLimits { + return try FfiConverterTypeLSPFeeLimits.lift(buf) +} + +public func FfiConverterTypeLSPFeeLimits_lower(_ value: LspFeeLimits) -> RustBuffer { + return FfiConverterTypeLSPFeeLimits.lower(value) +} + + +public struct NodeAnnouncementInfo { + public var lastUpdate: UInt32 + public var alias: String + public var addresses: [SocketAddress] + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + lastUpdate: UInt32, + alias: String, + addresses: [SocketAddress]) { + self.lastUpdate = lastUpdate + self.alias = alias + self.addresses = addresses + } +} + + +extension NodeAnnouncementInfo: Equatable, Hashable { + public static func ==(lhs: NodeAnnouncementInfo, rhs: NodeAnnouncementInfo) -> Bool { + if lhs.lastUpdate != rhs.lastUpdate { + return false + } + if lhs.alias != rhs.alias { + return false + } + if lhs.addresses != rhs.addresses { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(lastUpdate) + hasher.combine(alias) + hasher.combine(addresses) + } +} + + +public struct FfiConverterTypeNodeAnnouncementInfo: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> NodeAnnouncementInfo { + return + try NodeAnnouncementInfo( + lastUpdate: FfiConverterUInt32.read(from: &buf), + alias: FfiConverterString.read(from: &buf), + addresses: FfiConverterSequenceTypeSocketAddress.read(from: &buf) + ) + } + + public static func write(_ value: NodeAnnouncementInfo, into buf: inout [UInt8]) { + FfiConverterUInt32.write(value.lastUpdate, into: &buf) + FfiConverterString.write(value.alias, into: &buf) + FfiConverterSequenceTypeSocketAddress.write(value.addresses, into: &buf) + } +} + + +public func FfiConverterTypeNodeAnnouncementInfo_lift(_ buf: RustBuffer) throws -> NodeAnnouncementInfo { + return try FfiConverterTypeNodeAnnouncementInfo.lift(buf) +} + +public func FfiConverterTypeNodeAnnouncementInfo_lower(_ value: NodeAnnouncementInfo) -> RustBuffer { + return FfiConverterTypeNodeAnnouncementInfo.lower(value) +} + + +public struct NodeInfo { + public var channels: [UInt64] + public var announcementInfo: NodeAnnouncementInfo? + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + channels: [UInt64], + announcementInfo: NodeAnnouncementInfo?) { + self.channels = channels + self.announcementInfo = announcementInfo + } +} + + +extension NodeInfo: Equatable, Hashable { + public static func ==(lhs: NodeInfo, rhs: NodeInfo) -> Bool { + if lhs.channels != rhs.channels { + return false + } + if lhs.announcementInfo != rhs.announcementInfo { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(channels) + hasher.combine(announcementInfo) + } +} + + +public struct FfiConverterTypeNodeInfo: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> NodeInfo { + return + try NodeInfo( + channels: FfiConverterSequenceUInt64.read(from: &buf), + announcementInfo: FfiConverterOptionTypeNodeAnnouncementInfo.read(from: &buf) + ) + } + + public static func write(_ value: NodeInfo, into buf: inout [UInt8]) { + FfiConverterSequenceUInt64.write(value.channels, into: &buf) + FfiConverterOptionTypeNodeAnnouncementInfo.write(value.announcementInfo, into: &buf) + } +} + + +public func FfiConverterTypeNodeInfo_lift(_ buf: RustBuffer) throws -> NodeInfo { + return try FfiConverterTypeNodeInfo.lift(buf) +} + +public func FfiConverterTypeNodeInfo_lower(_ value: NodeInfo) -> RustBuffer { + return FfiConverterTypeNodeInfo.lower(value) +} + + +public struct NodeStatus { + public var isRunning: Bool + public var isListening: Bool + public var currentBestBlock: BestBlock + public var latestWalletSyncTimestamp: UInt64? + public var latestOnchainWalletSyncTimestamp: UInt64? + public var latestFeeRateCacheUpdateTimestamp: UInt64? + public var latestRgsSnapshotTimestamp: UInt64? + public var latestNodeAnnouncementBroadcastTimestamp: UInt64? + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + isRunning: Bool, + isListening: Bool, + currentBestBlock: BestBlock, + latestWalletSyncTimestamp: UInt64?, + latestOnchainWalletSyncTimestamp: UInt64?, + latestFeeRateCacheUpdateTimestamp: UInt64?, + latestRgsSnapshotTimestamp: UInt64?, + latestNodeAnnouncementBroadcastTimestamp: UInt64?) { + self.isRunning = isRunning + self.isListening = isListening + self.currentBestBlock = currentBestBlock + self.latestWalletSyncTimestamp = latestWalletSyncTimestamp + self.latestOnchainWalletSyncTimestamp = latestOnchainWalletSyncTimestamp + self.latestFeeRateCacheUpdateTimestamp = latestFeeRateCacheUpdateTimestamp + self.latestRgsSnapshotTimestamp = latestRgsSnapshotTimestamp + self.latestNodeAnnouncementBroadcastTimestamp = latestNodeAnnouncementBroadcastTimestamp + } +} + + +extension NodeStatus: Equatable, Hashable { + public static func ==(lhs: NodeStatus, rhs: NodeStatus) -> Bool { + if lhs.isRunning != rhs.isRunning { + return false + } + if lhs.isListening != rhs.isListening { + return false + } + if lhs.currentBestBlock != rhs.currentBestBlock { + return false + } + if lhs.latestWalletSyncTimestamp != rhs.latestWalletSyncTimestamp { + return false + } + if lhs.latestOnchainWalletSyncTimestamp != rhs.latestOnchainWalletSyncTimestamp { + return false + } + if lhs.latestFeeRateCacheUpdateTimestamp != rhs.latestFeeRateCacheUpdateTimestamp { + return false + } + if lhs.latestRgsSnapshotTimestamp != rhs.latestRgsSnapshotTimestamp { + return false + } + if lhs.latestNodeAnnouncementBroadcastTimestamp != rhs.latestNodeAnnouncementBroadcastTimestamp { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(isRunning) + hasher.combine(isListening) + hasher.combine(currentBestBlock) + hasher.combine(latestWalletSyncTimestamp) + hasher.combine(latestOnchainWalletSyncTimestamp) + hasher.combine(latestFeeRateCacheUpdateTimestamp) + hasher.combine(latestRgsSnapshotTimestamp) + hasher.combine(latestNodeAnnouncementBroadcastTimestamp) + } +} + + +public struct FfiConverterTypeNodeStatus: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> NodeStatus { + return + try NodeStatus( + isRunning: FfiConverterBool.read(from: &buf), + isListening: FfiConverterBool.read(from: &buf), + currentBestBlock: FfiConverterTypeBestBlock.read(from: &buf), + latestWalletSyncTimestamp: FfiConverterOptionUInt64.read(from: &buf), + latestOnchainWalletSyncTimestamp: FfiConverterOptionUInt64.read(from: &buf), + latestFeeRateCacheUpdateTimestamp: FfiConverterOptionUInt64.read(from: &buf), + latestRgsSnapshotTimestamp: FfiConverterOptionUInt64.read(from: &buf), + latestNodeAnnouncementBroadcastTimestamp: FfiConverterOptionUInt64.read(from: &buf) + ) + } + + public static func write(_ value: NodeStatus, into buf: inout [UInt8]) { + FfiConverterBool.write(value.isRunning, into: &buf) + FfiConverterBool.write(value.isListening, into: &buf) + FfiConverterTypeBestBlock.write(value.currentBestBlock, into: &buf) + FfiConverterOptionUInt64.write(value.latestWalletSyncTimestamp, into: &buf) + FfiConverterOptionUInt64.write(value.latestOnchainWalletSyncTimestamp, into: &buf) + FfiConverterOptionUInt64.write(value.latestFeeRateCacheUpdateTimestamp, into: &buf) + FfiConverterOptionUInt64.write(value.latestRgsSnapshotTimestamp, into: &buf) + FfiConverterOptionUInt64.write(value.latestNodeAnnouncementBroadcastTimestamp, into: &buf) + } +} + + +public func FfiConverterTypeNodeStatus_lift(_ buf: RustBuffer) throws -> NodeStatus { + return try FfiConverterTypeNodeStatus.lift(buf) +} + +public func FfiConverterTypeNodeStatus_lower(_ value: NodeStatus) -> RustBuffer { + return FfiConverterTypeNodeStatus.lower(value) +} + + public struct OutPoint { public var txid: Txid public var vout: UInt32 // Default memberwise initializers are never public by default, so we // declare one manually. - public init(txid: Txid, vout: UInt32) { + public init( + txid: Txid, + vout: UInt32) { self.txid = txid self.vout = vout } @@ -1570,9 +3018,10 @@ extension OutPoint: Equatable, Hashable { public struct FfiConverterTypeOutPoint: FfiConverterRustBuffer { public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> OutPoint { - return try OutPoint( - txid: FfiConverterTypeTxid.read(from: &buf), - vout: FfiConverterUInt32.read(from: &buf) + return + try OutPoint( + txid: FfiConverterTypeTxid.read(from: &buf), + vout: FfiConverterUInt32.read(from: &buf) ) } @@ -1593,35 +3042,38 @@ public func FfiConverterTypeOutPoint_lower(_ value: OutPoint) -> RustBuffer { public struct PaymentDetails { - public var hash: PaymentHash - public var preimage: PaymentPreimage? - public var secret: PaymentSecret? + public var id: PaymentId + public var kind: PaymentKind public var amountMsat: UInt64? public var direction: PaymentDirection public var status: PaymentStatus + public var latestUpdateTimestamp: UInt64 // Default memberwise initializers are never public by default, so we // declare one manually. - public init(hash: PaymentHash, preimage: PaymentPreimage?, secret: PaymentSecret?, amountMsat: UInt64?, direction: PaymentDirection, status: PaymentStatus) { - self.hash = hash - self.preimage = preimage - self.secret = secret + public init( + id: PaymentId, + kind: PaymentKind, + amountMsat: UInt64?, + direction: PaymentDirection, + status: PaymentStatus, + latestUpdateTimestamp: UInt64) { + self.id = id + self.kind = kind self.amountMsat = amountMsat self.direction = direction self.status = status + self.latestUpdateTimestamp = latestUpdateTimestamp } } extension PaymentDetails: Equatable, Hashable { public static func ==(lhs: PaymentDetails, rhs: PaymentDetails) -> Bool { - if lhs.hash != rhs.hash { - return false - } - if lhs.preimage != rhs.preimage { + if lhs.id != rhs.id { return false } - if lhs.secret != rhs.secret { + if lhs.kind != rhs.kind { return false } if lhs.amountMsat != rhs.amountMsat { @@ -1633,39 +3085,43 @@ extension PaymentDetails: Equatable, Hashable { if lhs.status != rhs.status { return false } + if lhs.latestUpdateTimestamp != rhs.latestUpdateTimestamp { + return false + } return true } public func hash(into hasher: inout Hasher) { - hasher.combine(hash) - hasher.combine(preimage) - hasher.combine(secret) + hasher.combine(id) + hasher.combine(kind) hasher.combine(amountMsat) hasher.combine(direction) hasher.combine(status) + hasher.combine(latestUpdateTimestamp) } } public struct FfiConverterTypePaymentDetails: FfiConverterRustBuffer { public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> PaymentDetails { - return try PaymentDetails( - hash: FfiConverterTypePaymentHash.read(from: &buf), - preimage: FfiConverterOptionTypePaymentPreimage.read(from: &buf), - secret: FfiConverterOptionTypePaymentSecret.read(from: &buf), - amountMsat: FfiConverterOptionUInt64.read(from: &buf), - direction: FfiConverterTypePaymentDirection.read(from: &buf), - status: FfiConverterTypePaymentStatus.read(from: &buf) + return + try PaymentDetails( + id: FfiConverterTypePaymentId.read(from: &buf), + kind: FfiConverterTypePaymentKind.read(from: &buf), + amountMsat: FfiConverterOptionUInt64.read(from: &buf), + direction: FfiConverterTypePaymentDirection.read(from: &buf), + status: FfiConverterTypePaymentStatus.read(from: &buf), + latestUpdateTimestamp: FfiConverterUInt64.read(from: &buf) ) } public static func write(_ value: PaymentDetails, into buf: inout [UInt8]) { - FfiConverterTypePaymentHash.write(value.hash, into: &buf) - FfiConverterOptionTypePaymentPreimage.write(value.preimage, into: &buf) - FfiConverterOptionTypePaymentSecret.write(value.secret, into: &buf) + FfiConverterTypePaymentId.write(value.id, into: &buf) + FfiConverterTypePaymentKind.write(value.kind, into: &buf) FfiConverterOptionUInt64.write(value.amountMsat, into: &buf) FfiConverterTypePaymentDirection.write(value.direction, into: &buf) FfiConverterTypePaymentStatus.write(value.status, into: &buf) + FfiConverterUInt64.write(value.latestUpdateTimestamp, into: &buf) } } @@ -1687,7 +3143,11 @@ public struct PeerDetails { // Default memberwise initializers are never public by default, so we // declare one manually. - public init(nodeId: PublicKey, address: SocketAddress, isPersisted: Bool, isConnected: Bool) { + public init( + nodeId: PublicKey, + address: SocketAddress, + isPersisted: Bool, + isConnected: Bool) { self.nodeId = nodeId self.address = address self.isPersisted = isPersisted @@ -1724,11 +3184,12 @@ extension PeerDetails: Equatable, Hashable { public struct FfiConverterTypePeerDetails: FfiConverterRustBuffer { public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> PeerDetails { - return try PeerDetails( - nodeId: FfiConverterTypePublicKey.read(from: &buf), - address: FfiConverterTypeSocketAddress.read(from: &buf), - isPersisted: FfiConverterBool.read(from: &buf), - isConnected: FfiConverterBool.read(from: &buf) + return + try PeerDetails( + nodeId: FfiConverterTypePublicKey.read(from: &buf), + address: FfiConverterTypeSocketAddress.read(from: &buf), + isPersisted: FfiConverterBool.read(from: &buf), + isConnected: FfiConverterBool.read(from: &buf) ) } @@ -1749,41 +3210,89 @@ public func FfiConverterTypePeerDetails_lower(_ value: PeerDetails) -> RustBuffe return FfiConverterTypePeerDetails.lower(value) } + +public struct RoutingFees { + public var baseMsat: UInt32 + public var proportionalMillionths: UInt32 + + // Default memberwise initializers are never public by default, so we + // declare one manually. + public init( + baseMsat: UInt32, + proportionalMillionths: UInt32) { + self.baseMsat = baseMsat + self.proportionalMillionths = proportionalMillionths + } +} + + +extension RoutingFees: Equatable, Hashable { + public static func ==(lhs: RoutingFees, rhs: RoutingFees) -> Bool { + if lhs.baseMsat != rhs.baseMsat { + return false + } + if lhs.proportionalMillionths != rhs.proportionalMillionths { + return false + } + return true + } + + public func hash(into hasher: inout Hasher) { + hasher.combine(baseMsat) + hasher.combine(proportionalMillionths) + } +} + + +public struct FfiConverterTypeRoutingFees: FfiConverterRustBuffer { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> RoutingFees { + return + try RoutingFees( + baseMsat: FfiConverterUInt32.read(from: &buf), + proportionalMillionths: FfiConverterUInt32.read(from: &buf) + ) + } + + public static func write(_ value: RoutingFees, into buf: inout [UInt8]) { + FfiConverterUInt32.write(value.baseMsat, into: &buf) + FfiConverterUInt32.write(value.proportionalMillionths, into: &buf) + } +} + + +public func FfiConverterTypeRoutingFees_lift(_ buf: RustBuffer) throws -> RoutingFees { + return try FfiConverterTypeRoutingFees.lift(buf) +} + +public func FfiConverterTypeRoutingFees_lower(_ value: RoutingFees) -> RustBuffer { + return FfiConverterTypeRoutingFees.lower(value) +} + + public enum BuildError { - // Simple error enums only carry a message case InvalidSeedBytes(message: String) - // Simple error enums only carry a message case InvalidSeedFile(message: String) - // Simple error enums only carry a message case InvalidSystemTime(message: String) - // Simple error enums only carry a message case InvalidChannelMonitor(message: String) - // Simple error enums only carry a message case InvalidListeningAddresses(message: String) - // Simple error enums only carry a message case ReadFailed(message: String) - // Simple error enums only carry a message case WriteFailed(message: String) - // Simple error enums only carry a message case StoragePathAccessFailed(message: String) - // Simple error enums only carry a message case KvStoreSetupFailed(message: String) - // Simple error enums only carry a message case WalletSetupFailed(message: String) - // Simple error enums only carry a message case LoggerSetupFailed(message: String) @@ -1835,72 +3344,244 @@ public struct FfiConverterTypeBuildError: FfiConverterRustBuffer { message: try FfiConverterString.read(from: &buf) ) - case 9: return .KvStoreSetupFailed( - message: try FfiConverterString.read(from: &buf) - ) + case 9: return .KvStoreSetupFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 10: return .WalletSetupFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 11: return .LoggerSetupFailed( + message: try FfiConverterString.read(from: &buf) + ) + + + default: throw UniffiInternalError.unexpectedEnumCase + } + } + + public static func write(_ value: BuildError, into buf: inout [UInt8]) { + switch value { + + + + + case .InvalidSeedBytes(_ /* message is ignored*/): + writeInt(&buf, Int32(1)) + case .InvalidSeedFile(_ /* message is ignored*/): + writeInt(&buf, Int32(2)) + case .InvalidSystemTime(_ /* message is ignored*/): + writeInt(&buf, Int32(3)) + case .InvalidChannelMonitor(_ /* message is ignored*/): + writeInt(&buf, Int32(4)) + case .InvalidListeningAddresses(_ /* message is ignored*/): + writeInt(&buf, Int32(5)) + case .ReadFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(6)) + case .WriteFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(7)) + case .StoragePathAccessFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(8)) + case .KvStoreSetupFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(9)) + case .WalletSetupFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(10)) + case .LoggerSetupFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(11)) + + + } + } +} + + +extension BuildError: Equatable, Hashable {} + +extension BuildError: Error { } + +// Note that we don't yet support `indirect` for enums. +// See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. +public enum ClosureReason { + + case counterpartyForceClosed( + peerMsg: UntrustedString + ) + case holderForceClosed + case legacyCooperativeClosure + case counterpartyInitiatedCooperativeClosure + case locallyInitiatedCooperativeClosure + case commitmentTxConfirmed + case fundingTimedOut + case processingError( + err: String + ) + case disconnectedPeer + case outdatedChannelManager + case counterpartyCoopClosedUnfundedChannel + case fundingBatchClosure + case htlCsTimedOut +} + +public struct FfiConverterTypeClosureReason: FfiConverterRustBuffer { + typealias SwiftType = ClosureReason + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> ClosureReason { + let variant: Int32 = try readInt(&buf) + switch variant { + + case 1: return .counterpartyForceClosed( + peerMsg: try FfiConverterTypeUntrustedString.read(from: &buf) + ) + + case 2: return .holderForceClosed + + case 3: return .legacyCooperativeClosure + + case 4: return .counterpartyInitiatedCooperativeClosure + + case 5: return .locallyInitiatedCooperativeClosure + + case 6: return .commitmentTxConfirmed + + case 7: return .fundingTimedOut - case 10: return .WalletSetupFailed( - message: try FfiConverterString.read(from: &buf) + case 8: return .processingError( + err: try FfiConverterString.read(from: &buf) ) - case 11: return .LoggerSetupFailed( - message: try FfiConverterString.read(from: &buf) - ) + case 9: return .disconnectedPeer + + case 10: return .outdatedChannelManager + + case 11: return .counterpartyCoopClosedUnfundedChannel + + case 12: return .fundingBatchClosure + + case 13: return .htlCsTimedOut - default: throw UniffiInternalError.unexpectedEnumCase } } - public static func write(_ value: BuildError, into buf: inout [UInt8]) { + public static func write(_ value: ClosureReason, into buf: inout [UInt8]) { switch value { - - - case .InvalidSeedBytes(_ /* message is ignored*/): + case let .counterpartyForceClosed(peerMsg): writeInt(&buf, Int32(1)) - case .InvalidSeedFile(_ /* message is ignored*/): + FfiConverterTypeUntrustedString.write(peerMsg, into: &buf) + + + case .holderForceClosed: writeInt(&buf, Int32(2)) - case .InvalidSystemTime(_ /* message is ignored*/): + + + case .legacyCooperativeClosure: writeInt(&buf, Int32(3)) - case .InvalidChannelMonitor(_ /* message is ignored*/): + + + case .counterpartyInitiatedCooperativeClosure: writeInt(&buf, Int32(4)) - case .InvalidListeningAddresses(_ /* message is ignored*/): + + + case .locallyInitiatedCooperativeClosure: writeInt(&buf, Int32(5)) - case .ReadFailed(_ /* message is ignored*/): + + + case .commitmentTxConfirmed: writeInt(&buf, Int32(6)) - case .WriteFailed(_ /* message is ignored*/): + + + case .fundingTimedOut: writeInt(&buf, Int32(7)) - case .StoragePathAccessFailed(_ /* message is ignored*/): + + + case let .processingError(err): writeInt(&buf, Int32(8)) - case .KvStoreSetupFailed(_ /* message is ignored*/): + FfiConverterString.write(err, into: &buf) + + + case .disconnectedPeer: writeInt(&buf, Int32(9)) - case .WalletSetupFailed(_ /* message is ignored*/): + + + case .outdatedChannelManager: writeInt(&buf, Int32(10)) - case .LoggerSetupFailed(_ /* message is ignored*/): + + + case .counterpartyCoopClosedUnfundedChannel: writeInt(&buf, Int32(11)) - + + + case .fundingBatchClosure: + writeInt(&buf, Int32(12)) + + + case .htlCsTimedOut: + writeInt(&buf, Int32(13)) } } } -extension BuildError: Equatable, Hashable {} +public func FfiConverterTypeClosureReason_lift(_ buf: RustBuffer) throws -> ClosureReason { + return try FfiConverterTypeClosureReason.lift(buf) +} + +public func FfiConverterTypeClosureReason_lower(_ value: ClosureReason) -> RustBuffer { + return FfiConverterTypeClosureReason.lower(value) +} + + +extension ClosureReason: Equatable, Hashable {} + -extension BuildError: Error { } // Note that we don't yet support `indirect` for enums. // See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. public enum Event { - case paymentSuccessful(paymentHash: PaymentHash) - case paymentFailed(paymentHash: PaymentHash) - case paymentReceived(paymentHash: PaymentHash, amountMsat: UInt64) - case channelPending(channelId: ChannelId, userChannelId: UserChannelId, formerTemporaryChannelId: ChannelId, counterpartyNodeId: PublicKey, fundingTxo: OutPoint) - case channelReady(channelId: ChannelId, userChannelId: UserChannelId, counterpartyNodeId: PublicKey?) - case channelClosed(channelId: ChannelId, userChannelId: UserChannelId, counterpartyNodeId: PublicKey?) + case paymentSuccessful( + paymentId: PaymentId?, + paymentHash: PaymentHash, + feePaidMsat: UInt64? + ) + case paymentFailed( + paymentId: PaymentId?, + paymentHash: PaymentHash, + reason: PaymentFailureReason? + ) + case paymentReceived( + paymentId: PaymentId?, + paymentHash: PaymentHash, + amountMsat: UInt64 + ) + case paymentClaimable( + paymentId: PaymentId, + paymentHash: PaymentHash, + claimableAmountMsat: UInt64, + claimDeadline: UInt32? + ) + case channelPending( + channelId: ChannelId, + userChannelId: UserChannelId, + formerTemporaryChannelId: ChannelId, + counterpartyNodeId: PublicKey, + fundingTxo: OutPoint + ) + case channelReady( + channelId: ChannelId, + userChannelId: UserChannelId, + counterpartyNodeId: PublicKey? + ) + case channelClosed( + channelId: ChannelId, + userChannelId: UserChannelId, + counterpartyNodeId: PublicKey?, + reason: ClosureReason? + ) } public struct FfiConverterTypeEvent: FfiConverterRustBuffer { @@ -1911,19 +3592,31 @@ public struct FfiConverterTypeEvent: FfiConverterRustBuffer { switch variant { case 1: return .paymentSuccessful( - paymentHash: try FfiConverterTypePaymentHash.read(from: &buf) + paymentId: try FfiConverterOptionTypePaymentId.read(from: &buf), + paymentHash: try FfiConverterTypePaymentHash.read(from: &buf), + feePaidMsat: try FfiConverterOptionUInt64.read(from: &buf) ) case 2: return .paymentFailed( - paymentHash: try FfiConverterTypePaymentHash.read(from: &buf) + paymentId: try FfiConverterOptionTypePaymentId.read(from: &buf), + paymentHash: try FfiConverterTypePaymentHash.read(from: &buf), + reason: try FfiConverterOptionTypePaymentFailureReason.read(from: &buf) ) case 3: return .paymentReceived( + paymentId: try FfiConverterOptionTypePaymentId.read(from: &buf), paymentHash: try FfiConverterTypePaymentHash.read(from: &buf), amountMsat: try FfiConverterUInt64.read(from: &buf) ) - case 4: return .channelPending( + case 4: return .paymentClaimable( + paymentId: try FfiConverterTypePaymentId.read(from: &buf), + paymentHash: try FfiConverterTypePaymentHash.read(from: &buf), + claimableAmountMsat: try FfiConverterUInt64.read(from: &buf), + claimDeadline: try FfiConverterOptionUInt32.read(from: &buf) + ) + + case 5: return .channelPending( channelId: try FfiConverterTypeChannelId.read(from: &buf), userChannelId: try FfiConverterTypeUserChannelId.read(from: &buf), formerTemporaryChannelId: try FfiConverterTypeChannelId.read(from: &buf), @@ -1931,16 +3624,17 @@ public struct FfiConverterTypeEvent: FfiConverterRustBuffer { fundingTxo: try FfiConverterTypeOutPoint.read(from: &buf) ) - case 5: return .channelReady( + case 6: return .channelReady( channelId: try FfiConverterTypeChannelId.read(from: &buf), userChannelId: try FfiConverterTypeUserChannelId.read(from: &buf), counterpartyNodeId: try FfiConverterOptionTypePublicKey.read(from: &buf) ) - case 6: return .channelClosed( + case 7: return .channelClosed( channelId: try FfiConverterTypeChannelId.read(from: &buf), userChannelId: try FfiConverterTypeUserChannelId.read(from: &buf), - counterpartyNodeId: try FfiConverterOptionTypePublicKey.read(from: &buf) + counterpartyNodeId: try FfiConverterOptionTypePublicKey.read(from: &buf), + reason: try FfiConverterOptionTypeClosureReason.read(from: &buf) ) default: throw UniffiInternalError.unexpectedEnumCase @@ -1951,24 +3645,37 @@ public struct FfiConverterTypeEvent: FfiConverterRustBuffer { switch value { - case let .paymentSuccessful(paymentHash): + case let .paymentSuccessful(paymentId,paymentHash,feePaidMsat): writeInt(&buf, Int32(1)) + FfiConverterOptionTypePaymentId.write(paymentId, into: &buf) FfiConverterTypePaymentHash.write(paymentHash, into: &buf) + FfiConverterOptionUInt64.write(feePaidMsat, into: &buf) - case let .paymentFailed(paymentHash): + case let .paymentFailed(paymentId,paymentHash,reason): writeInt(&buf, Int32(2)) + FfiConverterOptionTypePaymentId.write(paymentId, into: &buf) FfiConverterTypePaymentHash.write(paymentHash, into: &buf) + FfiConverterOptionTypePaymentFailureReason.write(reason, into: &buf) - case let .paymentReceived(paymentHash,amountMsat): + case let .paymentReceived(paymentId,paymentHash,amountMsat): writeInt(&buf, Int32(3)) + FfiConverterOptionTypePaymentId.write(paymentId, into: &buf) FfiConverterTypePaymentHash.write(paymentHash, into: &buf) FfiConverterUInt64.write(amountMsat, into: &buf) - case let .channelPending(channelId,userChannelId,formerTemporaryChannelId,counterpartyNodeId,fundingTxo): + case let .paymentClaimable(paymentId,paymentHash,claimableAmountMsat,claimDeadline): writeInt(&buf, Int32(4)) + FfiConverterTypePaymentId.write(paymentId, into: &buf) + FfiConverterTypePaymentHash.write(paymentHash, into: &buf) + FfiConverterUInt64.write(claimableAmountMsat, into: &buf) + FfiConverterOptionUInt32.write(claimDeadline, into: &buf) + + + case let .channelPending(channelId,userChannelId,formerTemporaryChannelId,counterpartyNodeId,fundingTxo): + writeInt(&buf, Int32(5)) FfiConverterTypeChannelId.write(channelId, into: &buf) FfiConverterTypeUserChannelId.write(userChannelId, into: &buf) FfiConverterTypeChannelId.write(formerTemporaryChannelId, into: &buf) @@ -1977,17 +3684,18 @@ public struct FfiConverterTypeEvent: FfiConverterRustBuffer { case let .channelReady(channelId,userChannelId,counterpartyNodeId): - writeInt(&buf, Int32(5)) + writeInt(&buf, Int32(6)) FfiConverterTypeChannelId.write(channelId, into: &buf) FfiConverterTypeUserChannelId.write(userChannelId, into: &buf) FfiConverterOptionTypePublicKey.write(counterpartyNodeId, into: &buf) - case let .channelClosed(channelId,userChannelId,counterpartyNodeId): - writeInt(&buf, Int32(6)) + case let .channelClosed(channelId,userChannelId,counterpartyNodeId,reason): + writeInt(&buf, Int32(7)) FfiConverterTypeChannelId.write(channelId, into: &buf) FfiConverterTypeUserChannelId.write(userChannelId, into: &buf) FfiConverterOptionTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterOptionTypeClosureReason.write(reason, into: &buf) } } @@ -2007,6 +3715,176 @@ extension Event: Equatable, Hashable {} +// Note that we don't yet support `indirect` for enums. +// See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. +public enum LightningBalance { + + case claimableOnChannelClose( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + amountSatoshis: UInt64 + ) + case claimableAwaitingConfirmations( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + amountSatoshis: UInt64, + confirmationHeight: UInt32 + ) + case contentiousClaimable( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + amountSatoshis: UInt64, + timeoutHeight: UInt32, + paymentHash: PaymentHash, + paymentPreimage: PaymentPreimage + ) + case maybeTimeoutClaimableHtlc( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + amountSatoshis: UInt64, + claimableHeight: UInt32, + paymentHash: PaymentHash + ) + case maybePreimageClaimableHtlc( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + amountSatoshis: UInt64, + expiryHeight: UInt32, + paymentHash: PaymentHash + ) + case counterpartyRevokedOutputClaimable( + channelId: ChannelId, + counterpartyNodeId: PublicKey, + amountSatoshis: UInt64 + ) +} + +public struct FfiConverterTypeLightningBalance: FfiConverterRustBuffer { + typealias SwiftType = LightningBalance + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> LightningBalance { + let variant: Int32 = try readInt(&buf) + switch variant { + + case 1: return .claimableOnChannelClose( + channelId: try FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: try FfiConverterTypePublicKey.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf) + ) + + case 2: return .claimableAwaitingConfirmations( + channelId: try FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: try FfiConverterTypePublicKey.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf), + confirmationHeight: try FfiConverterUInt32.read(from: &buf) + ) + + case 3: return .contentiousClaimable( + channelId: try FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: try FfiConverterTypePublicKey.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf), + timeoutHeight: try FfiConverterUInt32.read(from: &buf), + paymentHash: try FfiConverterTypePaymentHash.read(from: &buf), + paymentPreimage: try FfiConverterTypePaymentPreimage.read(from: &buf) + ) + + case 4: return .maybeTimeoutClaimableHtlc( + channelId: try FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: try FfiConverterTypePublicKey.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf), + claimableHeight: try FfiConverterUInt32.read(from: &buf), + paymentHash: try FfiConverterTypePaymentHash.read(from: &buf) + ) + + case 5: return .maybePreimageClaimableHtlc( + channelId: try FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: try FfiConverterTypePublicKey.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf), + expiryHeight: try FfiConverterUInt32.read(from: &buf), + paymentHash: try FfiConverterTypePaymentHash.read(from: &buf) + ) + + case 6: return .counterpartyRevokedOutputClaimable( + channelId: try FfiConverterTypeChannelId.read(from: &buf), + counterpartyNodeId: try FfiConverterTypePublicKey.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf) + ) + + default: throw UniffiInternalError.unexpectedEnumCase + } + } + + public static func write(_ value: LightningBalance, into buf: inout [UInt8]) { + switch value { + + + case let .claimableOnChannelClose(channelId,counterpartyNodeId,amountSatoshis): + writeInt(&buf, Int32(1)) + FfiConverterTypeChannelId.write(channelId, into: &buf) + FfiConverterTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + + + case let .claimableAwaitingConfirmations(channelId,counterpartyNodeId,amountSatoshis,confirmationHeight): + writeInt(&buf, Int32(2)) + FfiConverterTypeChannelId.write(channelId, into: &buf) + FfiConverterTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + FfiConverterUInt32.write(confirmationHeight, into: &buf) + + + case let .contentiousClaimable(channelId,counterpartyNodeId,amountSatoshis,timeoutHeight,paymentHash,paymentPreimage): + writeInt(&buf, Int32(3)) + FfiConverterTypeChannelId.write(channelId, into: &buf) + FfiConverterTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + FfiConverterUInt32.write(timeoutHeight, into: &buf) + FfiConverterTypePaymentHash.write(paymentHash, into: &buf) + FfiConverterTypePaymentPreimage.write(paymentPreimage, into: &buf) + + + case let .maybeTimeoutClaimableHtlc(channelId,counterpartyNodeId,amountSatoshis,claimableHeight,paymentHash): + writeInt(&buf, Int32(4)) + FfiConverterTypeChannelId.write(channelId, into: &buf) + FfiConverterTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + FfiConverterUInt32.write(claimableHeight, into: &buf) + FfiConverterTypePaymentHash.write(paymentHash, into: &buf) + + + case let .maybePreimageClaimableHtlc(channelId,counterpartyNodeId,amountSatoshis,expiryHeight,paymentHash): + writeInt(&buf, Int32(5)) + FfiConverterTypeChannelId.write(channelId, into: &buf) + FfiConverterTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + FfiConverterUInt32.write(expiryHeight, into: &buf) + FfiConverterTypePaymentHash.write(paymentHash, into: &buf) + + + case let .counterpartyRevokedOutputClaimable(channelId,counterpartyNodeId,amountSatoshis): + writeInt(&buf, Int32(6)) + FfiConverterTypeChannelId.write(channelId, into: &buf) + FfiConverterTypePublicKey.write(counterpartyNodeId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + + } + } +} + + +public func FfiConverterTypeLightningBalance_lift(_ buf: RustBuffer) throws -> LightningBalance { + return try FfiConverterTypeLightningBalance.lift(buf) +} + +public func FfiConverterTypeLightningBalance_lower(_ value: LightningBalance) -> RustBuffer { + return FfiConverterTypeLightningBalance.lower(value) +} + + +extension LightningBalance: Equatable, Hashable {} + + + // Note that we don't yet support `indirect` for enums. // See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. public enum LogLevel { @@ -2153,100 +4031,103 @@ extension Network: Equatable, Hashable {} + public enum NodeError { - // Simple error enums only carry a message case AlreadyRunning(message: String) - // Simple error enums only carry a message case NotRunning(message: String) - // Simple error enums only carry a message case OnchainTxCreationFailed(message: String) - // Simple error enums only carry a message case ConnectionFailed(message: String) - // Simple error enums only carry a message case InvoiceCreationFailed(message: String) - // Simple error enums only carry a message + case InvoiceRequestCreationFailed(message: String) + + case OfferCreationFailed(message: String) + + case RefundCreationFailed(message: String) + case PaymentSendingFailed(message: String) - // Simple error enums only carry a message case ProbeSendingFailed(message: String) - // Simple error enums only carry a message case ChannelCreationFailed(message: String) - // Simple error enums only carry a message case ChannelClosingFailed(message: String) - // Simple error enums only carry a message case ChannelConfigUpdateFailed(message: String) - // Simple error enums only carry a message case PersistenceFailed(message: String) - // Simple error enums only carry a message case FeerateEstimationUpdateFailed(message: String) - // Simple error enums only carry a message + case FeerateEstimationUpdateTimeout(message: String) + case WalletOperationFailed(message: String) - // Simple error enums only carry a message + case WalletOperationTimeout(message: String) + case OnchainTxSigningFailed(message: String) - // Simple error enums only carry a message case MessageSigningFailed(message: String) - // Simple error enums only carry a message case TxSyncFailed(message: String) - // Simple error enums only carry a message + case TxSyncTimeout(message: String) + case GossipUpdateFailed(message: String) - // Simple error enums only carry a message + case GossipUpdateTimeout(message: String) + + case LiquidityRequestFailed(message: String) + case InvalidAddress(message: String) - // Simple error enums only carry a message case InvalidSocketAddress(message: String) - // Simple error enums only carry a message case InvalidPublicKey(message: String) - // Simple error enums only carry a message case InvalidSecretKey(message: String) - // Simple error enums only carry a message + case InvalidOfferId(message: String) + + case InvalidNodeId(message: String) + + case InvalidPaymentId(message: String) + case InvalidPaymentHash(message: String) - // Simple error enums only carry a message case InvalidPaymentPreimage(message: String) - // Simple error enums only carry a message case InvalidPaymentSecret(message: String) - // Simple error enums only carry a message case InvalidAmount(message: String) - // Simple error enums only carry a message case InvalidInvoice(message: String) - // Simple error enums only carry a message + case InvalidOffer(message: String) + + case InvalidRefund(message: String) + case InvalidChannelId(message: String) - // Simple error enums only carry a message case InvalidNetwork(message: String) - // Simple error enums only carry a message case DuplicatePayment(message: String) - // Simple error enums only carry a message + case UnsupportedCurrency(message: String) + case InsufficientFunds(message: String) + case LiquiditySourceUnavailable(message: String) + + case LiquidityFeeTooHigh(message: String) + fileprivate static func uniffiErrorHandler(_ error: RustBuffer) throws -> Error { return try FfiConverterTypeNodeError.lift(error) @@ -2284,103 +4165,167 @@ public struct FfiConverterTypeNodeError: FfiConverterRustBuffer { message: try FfiConverterString.read(from: &buf) ) - case 6: return .PaymentSendingFailed( + case 6: return .InvoiceRequestCreationFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 7: return .OfferCreationFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 8: return .RefundCreationFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 9: return .PaymentSendingFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 10: return .ProbeSendingFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 11: return .ChannelCreationFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 12: return .ChannelClosingFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 13: return .ChannelConfigUpdateFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 14: return .PersistenceFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 15: return .FeerateEstimationUpdateFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 16: return .FeerateEstimationUpdateTimeout( + message: try FfiConverterString.read(from: &buf) + ) + + case 17: return .WalletOperationFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 18: return .WalletOperationTimeout( + message: try FfiConverterString.read(from: &buf) + ) + + case 19: return .OnchainTxSigningFailed( + message: try FfiConverterString.read(from: &buf) + ) + + case 20: return .MessageSigningFailed( message: try FfiConverterString.read(from: &buf) ) - case 7: return .ProbeSendingFailed( + case 21: return .TxSyncFailed( message: try FfiConverterString.read(from: &buf) ) - case 8: return .ChannelCreationFailed( + case 22: return .TxSyncTimeout( message: try FfiConverterString.read(from: &buf) ) - case 9: return .ChannelClosingFailed( + case 23: return .GossipUpdateFailed( message: try FfiConverterString.read(from: &buf) ) - case 10: return .ChannelConfigUpdateFailed( + case 24: return .GossipUpdateTimeout( message: try FfiConverterString.read(from: &buf) ) - case 11: return .PersistenceFailed( + case 25: return .LiquidityRequestFailed( message: try FfiConverterString.read(from: &buf) ) - case 12: return .FeerateEstimationUpdateFailed( + case 26: return .InvalidAddress( message: try FfiConverterString.read(from: &buf) ) - case 13: return .WalletOperationFailed( + case 27: return .InvalidSocketAddress( message: try FfiConverterString.read(from: &buf) ) - case 14: return .OnchainTxSigningFailed( + case 28: return .InvalidPublicKey( message: try FfiConverterString.read(from: &buf) ) - case 15: return .MessageSigningFailed( + case 29: return .InvalidSecretKey( message: try FfiConverterString.read(from: &buf) ) - case 16: return .TxSyncFailed( + case 30: return .InvalidOfferId( message: try FfiConverterString.read(from: &buf) ) - case 17: return .GossipUpdateFailed( + case 31: return .InvalidNodeId( message: try FfiConverterString.read(from: &buf) ) - case 18: return .InvalidAddress( + case 32: return .InvalidPaymentId( message: try FfiConverterString.read(from: &buf) ) - case 19: return .InvalidSocketAddress( + case 33: return .InvalidPaymentHash( message: try FfiConverterString.read(from: &buf) ) - case 20: return .InvalidPublicKey( + case 34: return .InvalidPaymentPreimage( message: try FfiConverterString.read(from: &buf) ) - case 21: return .InvalidSecretKey( + case 35: return .InvalidPaymentSecret( message: try FfiConverterString.read(from: &buf) ) - case 22: return .InvalidPaymentHash( + case 36: return .InvalidAmount( message: try FfiConverterString.read(from: &buf) ) - case 23: return .InvalidPaymentPreimage( + case 37: return .InvalidInvoice( message: try FfiConverterString.read(from: &buf) ) - case 24: return .InvalidPaymentSecret( + case 38: return .InvalidOffer( message: try FfiConverterString.read(from: &buf) ) - case 25: return .InvalidAmount( + case 39: return .InvalidRefund( message: try FfiConverterString.read(from: &buf) ) - case 26: return .InvalidInvoice( + case 40: return .InvalidChannelId( message: try FfiConverterString.read(from: &buf) ) - case 27: return .InvalidChannelId( + case 41: return .InvalidNetwork( message: try FfiConverterString.read(from: &buf) ) - case 28: return .InvalidNetwork( + case 42: return .DuplicatePayment( message: try FfiConverterString.read(from: &buf) ) - case 29: return .DuplicatePayment( + case 43: return .UnsupportedCurrency( message: try FfiConverterString.read(from: &buf) ) - case 30: return .InsufficientFunds( + case 44: return .InsufficientFunds( + message: try FfiConverterString.read(from: &buf) + ) + + case 45: return .LiquiditySourceUnavailable( + message: try FfiConverterString.read(from: &buf) + ) + + case 46: return .LiquidityFeeTooHigh( message: try FfiConverterString.read(from: &buf) ) @@ -2405,56 +4350,88 @@ public struct FfiConverterTypeNodeError: FfiConverterRustBuffer { writeInt(&buf, Int32(4)) case .InvoiceCreationFailed(_ /* message is ignored*/): writeInt(&buf, Int32(5)) - case .PaymentSendingFailed(_ /* message is ignored*/): + case .InvoiceRequestCreationFailed(_ /* message is ignored*/): writeInt(&buf, Int32(6)) - case .ProbeSendingFailed(_ /* message is ignored*/): + case .OfferCreationFailed(_ /* message is ignored*/): writeInt(&buf, Int32(7)) - case .ChannelCreationFailed(_ /* message is ignored*/): + case .RefundCreationFailed(_ /* message is ignored*/): writeInt(&buf, Int32(8)) - case .ChannelClosingFailed(_ /* message is ignored*/): + case .PaymentSendingFailed(_ /* message is ignored*/): writeInt(&buf, Int32(9)) + case .ProbeSendingFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(10)) + case .ChannelCreationFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(11)) + case .ChannelClosingFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(12)) case .ChannelConfigUpdateFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(10)) + writeInt(&buf, Int32(13)) case .PersistenceFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(11)) + writeInt(&buf, Int32(14)) case .FeerateEstimationUpdateFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(12)) + writeInt(&buf, Int32(15)) + case .FeerateEstimationUpdateTimeout(_ /* message is ignored*/): + writeInt(&buf, Int32(16)) case .WalletOperationFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(13)) + writeInt(&buf, Int32(17)) + case .WalletOperationTimeout(_ /* message is ignored*/): + writeInt(&buf, Int32(18)) case .OnchainTxSigningFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(14)) + writeInt(&buf, Int32(19)) case .MessageSigningFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(15)) + writeInt(&buf, Int32(20)) case .TxSyncFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(16)) + writeInt(&buf, Int32(21)) + case .TxSyncTimeout(_ /* message is ignored*/): + writeInt(&buf, Int32(22)) case .GossipUpdateFailed(_ /* message is ignored*/): - writeInt(&buf, Int32(17)) + writeInt(&buf, Int32(23)) + case .GossipUpdateTimeout(_ /* message is ignored*/): + writeInt(&buf, Int32(24)) + case .LiquidityRequestFailed(_ /* message is ignored*/): + writeInt(&buf, Int32(25)) case .InvalidAddress(_ /* message is ignored*/): - writeInt(&buf, Int32(18)) + writeInt(&buf, Int32(26)) case .InvalidSocketAddress(_ /* message is ignored*/): - writeInt(&buf, Int32(19)) + writeInt(&buf, Int32(27)) case .InvalidPublicKey(_ /* message is ignored*/): - writeInt(&buf, Int32(20)) + writeInt(&buf, Int32(28)) case .InvalidSecretKey(_ /* message is ignored*/): - writeInt(&buf, Int32(21)) + writeInt(&buf, Int32(29)) + case .InvalidOfferId(_ /* message is ignored*/): + writeInt(&buf, Int32(30)) + case .InvalidNodeId(_ /* message is ignored*/): + writeInt(&buf, Int32(31)) + case .InvalidPaymentId(_ /* message is ignored*/): + writeInt(&buf, Int32(32)) case .InvalidPaymentHash(_ /* message is ignored*/): - writeInt(&buf, Int32(22)) + writeInt(&buf, Int32(33)) case .InvalidPaymentPreimage(_ /* message is ignored*/): - writeInt(&buf, Int32(23)) + writeInt(&buf, Int32(34)) case .InvalidPaymentSecret(_ /* message is ignored*/): - writeInt(&buf, Int32(24)) + writeInt(&buf, Int32(35)) case .InvalidAmount(_ /* message is ignored*/): - writeInt(&buf, Int32(25)) + writeInt(&buf, Int32(36)) case .InvalidInvoice(_ /* message is ignored*/): - writeInt(&buf, Int32(26)) + writeInt(&buf, Int32(37)) + case .InvalidOffer(_ /* message is ignored*/): + writeInt(&buf, Int32(38)) + case .InvalidRefund(_ /* message is ignored*/): + writeInt(&buf, Int32(39)) case .InvalidChannelId(_ /* message is ignored*/): - writeInt(&buf, Int32(27)) + writeInt(&buf, Int32(40)) case .InvalidNetwork(_ /* message is ignored*/): - writeInt(&buf, Int32(28)) + writeInt(&buf, Int32(41)) case .DuplicatePayment(_ /* message is ignored*/): - writeInt(&buf, Int32(29)) + writeInt(&buf, Int32(42)) + case .UnsupportedCurrency(_ /* message is ignored*/): + writeInt(&buf, Int32(43)) case .InsufficientFunds(_ /* message is ignored*/): - writeInt(&buf, Int32(30)) + writeInt(&buf, Int32(44)) + case .LiquiditySourceUnavailable(_ /* message is ignored*/): + writeInt(&buf, Int32(45)) + case .LiquidityFeeTooHigh(_ /* message is ignored*/): + writeInt(&buf, Int32(46)) } @@ -2518,6 +4495,224 @@ extension PaymentDirection: Equatable, Hashable {} +// Note that we don't yet support `indirect` for enums. +// See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. +public enum PaymentFailureReason { + + case recipientRejected + case userAbandoned + case retriesExhausted + case paymentExpired + case routeNotFound + case unexpectedError +} + +public struct FfiConverterTypePaymentFailureReason: FfiConverterRustBuffer { + typealias SwiftType = PaymentFailureReason + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> PaymentFailureReason { + let variant: Int32 = try readInt(&buf) + switch variant { + + case 1: return .recipientRejected + + case 2: return .userAbandoned + + case 3: return .retriesExhausted + + case 4: return .paymentExpired + + case 5: return .routeNotFound + + case 6: return .unexpectedError + + default: throw UniffiInternalError.unexpectedEnumCase + } + } + + public static func write(_ value: PaymentFailureReason, into buf: inout [UInt8]) { + switch value { + + + case .recipientRejected: + writeInt(&buf, Int32(1)) + + + case .userAbandoned: + writeInt(&buf, Int32(2)) + + + case .retriesExhausted: + writeInt(&buf, Int32(3)) + + + case .paymentExpired: + writeInt(&buf, Int32(4)) + + + case .routeNotFound: + writeInt(&buf, Int32(5)) + + + case .unexpectedError: + writeInt(&buf, Int32(6)) + + } + } +} + + +public func FfiConverterTypePaymentFailureReason_lift(_ buf: RustBuffer) throws -> PaymentFailureReason { + return try FfiConverterTypePaymentFailureReason.lift(buf) +} + +public func FfiConverterTypePaymentFailureReason_lower(_ value: PaymentFailureReason) -> RustBuffer { + return FfiConverterTypePaymentFailureReason.lower(value) +} + + +extension PaymentFailureReason: Equatable, Hashable {} + + + +// Note that we don't yet support `indirect` for enums. +// See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. +public enum PaymentKind { + + case onchain + case bolt11( + hash: PaymentHash, + preimage: PaymentPreimage?, + secret: PaymentSecret? + ) + case bolt11Jit( + hash: PaymentHash, + preimage: PaymentPreimage?, + secret: PaymentSecret?, + lspFeeLimits: LspFeeLimits + ) + case bolt12Offer( + hash: PaymentHash?, + preimage: PaymentPreimage?, + secret: PaymentSecret?, + offerId: OfferId + ) + case bolt12Refund( + hash: PaymentHash?, + preimage: PaymentPreimage?, + secret: PaymentSecret? + ) + case spontaneous( + hash: PaymentHash, + preimage: PaymentPreimage? + ) +} + +public struct FfiConverterTypePaymentKind: FfiConverterRustBuffer { + typealias SwiftType = PaymentKind + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> PaymentKind { + let variant: Int32 = try readInt(&buf) + switch variant { + + case 1: return .onchain + + case 2: return .bolt11( + hash: try FfiConverterTypePaymentHash.read(from: &buf), + preimage: try FfiConverterOptionTypePaymentPreimage.read(from: &buf), + secret: try FfiConverterOptionTypePaymentSecret.read(from: &buf) + ) + + case 3: return .bolt11Jit( + hash: try FfiConverterTypePaymentHash.read(from: &buf), + preimage: try FfiConverterOptionTypePaymentPreimage.read(from: &buf), + secret: try FfiConverterOptionTypePaymentSecret.read(from: &buf), + lspFeeLimits: try FfiConverterTypeLSPFeeLimits.read(from: &buf) + ) + + case 4: return .bolt12Offer( + hash: try FfiConverterOptionTypePaymentHash.read(from: &buf), + preimage: try FfiConverterOptionTypePaymentPreimage.read(from: &buf), + secret: try FfiConverterOptionTypePaymentSecret.read(from: &buf), + offerId: try FfiConverterTypeOfferId.read(from: &buf) + ) + + case 5: return .bolt12Refund( + hash: try FfiConverterOptionTypePaymentHash.read(from: &buf), + preimage: try FfiConverterOptionTypePaymentPreimage.read(from: &buf), + secret: try FfiConverterOptionTypePaymentSecret.read(from: &buf) + ) + + case 6: return .spontaneous( + hash: try FfiConverterTypePaymentHash.read(from: &buf), + preimage: try FfiConverterOptionTypePaymentPreimage.read(from: &buf) + ) + + default: throw UniffiInternalError.unexpectedEnumCase + } + } + + public static func write(_ value: PaymentKind, into buf: inout [UInt8]) { + switch value { + + + case .onchain: + writeInt(&buf, Int32(1)) + + + case let .bolt11(hash,preimage,secret): + writeInt(&buf, Int32(2)) + FfiConverterTypePaymentHash.write(hash, into: &buf) + FfiConverterOptionTypePaymentPreimage.write(preimage, into: &buf) + FfiConverterOptionTypePaymentSecret.write(secret, into: &buf) + + + case let .bolt11Jit(hash,preimage,secret,lspFeeLimits): + writeInt(&buf, Int32(3)) + FfiConverterTypePaymentHash.write(hash, into: &buf) + FfiConverterOptionTypePaymentPreimage.write(preimage, into: &buf) + FfiConverterOptionTypePaymentSecret.write(secret, into: &buf) + FfiConverterTypeLSPFeeLimits.write(lspFeeLimits, into: &buf) + + + case let .bolt12Offer(hash,preimage,secret,offerId): + writeInt(&buf, Int32(4)) + FfiConverterOptionTypePaymentHash.write(hash, into: &buf) + FfiConverterOptionTypePaymentPreimage.write(preimage, into: &buf) + FfiConverterOptionTypePaymentSecret.write(secret, into: &buf) + FfiConverterTypeOfferId.write(offerId, into: &buf) + + + case let .bolt12Refund(hash,preimage,secret): + writeInt(&buf, Int32(5)) + FfiConverterOptionTypePaymentHash.write(hash, into: &buf) + FfiConverterOptionTypePaymentPreimage.write(preimage, into: &buf) + FfiConverterOptionTypePaymentSecret.write(secret, into: &buf) + + + case let .spontaneous(hash,preimage): + writeInt(&buf, Int32(6)) + FfiConverterTypePaymentHash.write(hash, into: &buf) + FfiConverterOptionTypePaymentPreimage.write(preimage, into: &buf) + + } + } +} + + +public func FfiConverterTypePaymentKind_lift(_ buf: RustBuffer) throws -> PaymentKind { + return try FfiConverterTypePaymentKind.lift(buf) +} + +public func FfiConverterTypePaymentKind_lower(_ value: PaymentKind) -> RustBuffer { + return FfiConverterTypePaymentKind.lower(value) +} + + +extension PaymentKind: Equatable, Hashable {} + + + // Note that we don't yet support `indirect` for enums. // See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. public enum PaymentStatus { @@ -2564,21 +4759,224 @@ public struct FfiConverterTypePaymentStatus: FfiConverterRustBuffer { } -public func FfiConverterTypePaymentStatus_lift(_ buf: RustBuffer) throws -> PaymentStatus { - return try FfiConverterTypePaymentStatus.lift(buf) -} +public func FfiConverterTypePaymentStatus_lift(_ buf: RustBuffer) throws -> PaymentStatus { + return try FfiConverterTypePaymentStatus.lift(buf) +} + +public func FfiConverterTypePaymentStatus_lower(_ value: PaymentStatus) -> RustBuffer { + return FfiConverterTypePaymentStatus.lower(value) +} + + +extension PaymentStatus: Equatable, Hashable {} + + + +// Note that we don't yet support `indirect` for enums. +// See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion. +public enum PendingSweepBalance { + + case pendingBroadcast( + channelId: ChannelId?, + amountSatoshis: UInt64 + ) + case broadcastAwaitingConfirmation( + channelId: ChannelId?, + latestBroadcastHeight: UInt32, + latestSpendingTxid: Txid, + amountSatoshis: UInt64 + ) + case awaitingThresholdConfirmations( + channelId: ChannelId?, + latestSpendingTxid: Txid, + confirmationHash: BlockHash, + confirmationHeight: UInt32, + amountSatoshis: UInt64 + ) +} + +public struct FfiConverterTypePendingSweepBalance: FfiConverterRustBuffer { + typealias SwiftType = PendingSweepBalance + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> PendingSweepBalance { + let variant: Int32 = try readInt(&buf) + switch variant { + + case 1: return .pendingBroadcast( + channelId: try FfiConverterOptionTypeChannelId.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf) + ) + + case 2: return .broadcastAwaitingConfirmation( + channelId: try FfiConverterOptionTypeChannelId.read(from: &buf), + latestBroadcastHeight: try FfiConverterUInt32.read(from: &buf), + latestSpendingTxid: try FfiConverterTypeTxid.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf) + ) + + case 3: return .awaitingThresholdConfirmations( + channelId: try FfiConverterOptionTypeChannelId.read(from: &buf), + latestSpendingTxid: try FfiConverterTypeTxid.read(from: &buf), + confirmationHash: try FfiConverterTypeBlockHash.read(from: &buf), + confirmationHeight: try FfiConverterUInt32.read(from: &buf), + amountSatoshis: try FfiConverterUInt64.read(from: &buf) + ) + + default: throw UniffiInternalError.unexpectedEnumCase + } + } + + public static func write(_ value: PendingSweepBalance, into buf: inout [UInt8]) { + switch value { + + + case let .pendingBroadcast(channelId,amountSatoshis): + writeInt(&buf, Int32(1)) + FfiConverterOptionTypeChannelId.write(channelId, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + + + case let .broadcastAwaitingConfirmation(channelId,latestBroadcastHeight,latestSpendingTxid,amountSatoshis): + writeInt(&buf, Int32(2)) + FfiConverterOptionTypeChannelId.write(channelId, into: &buf) + FfiConverterUInt32.write(latestBroadcastHeight, into: &buf) + FfiConverterTypeTxid.write(latestSpendingTxid, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + + + case let .awaitingThresholdConfirmations(channelId,latestSpendingTxid,confirmationHash,confirmationHeight,amountSatoshis): + writeInt(&buf, Int32(3)) + FfiConverterOptionTypeChannelId.write(channelId, into: &buf) + FfiConverterTypeTxid.write(latestSpendingTxid, into: &buf) + FfiConverterTypeBlockHash.write(confirmationHash, into: &buf) + FfiConverterUInt32.write(confirmationHeight, into: &buf) + FfiConverterUInt64.write(amountSatoshis, into: &buf) + + } + } +} + + +public func FfiConverterTypePendingSweepBalance_lift(_ buf: RustBuffer) throws -> PendingSweepBalance { + return try FfiConverterTypePendingSweepBalance.lift(buf) +} + +public func FfiConverterTypePendingSweepBalance_lower(_ value: PendingSweepBalance) -> RustBuffer { + return FfiConverterTypePendingSweepBalance.lower(value) +} + + +extension PendingSweepBalance: Equatable, Hashable {} + + + +fileprivate struct FfiConverterOptionUInt16: FfiConverterRustBuffer { + typealias SwiftType = UInt16? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterUInt16.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterUInt16.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +fileprivate struct FfiConverterOptionUInt32: FfiConverterRustBuffer { + typealias SwiftType = UInt32? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterUInt32.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterUInt32.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +fileprivate struct FfiConverterOptionUInt64: FfiConverterRustBuffer { + typealias SwiftType = UInt64? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterUInt64.write(value, into: &buf) + } -public func FfiConverterTypePaymentStatus_lower(_ value: PaymentStatus) -> RustBuffer { - return FfiConverterTypePaymentStatus.lower(value) + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterUInt64.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } } +fileprivate struct FfiConverterOptionString: FfiConverterRustBuffer { + typealias SwiftType = String? -extension PaymentStatus: Equatable, Hashable {} + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterString.write(value, into: &buf) + } + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterString.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} +fileprivate struct FfiConverterOptionTypeChannelConfig: FfiConverterRustBuffer { + typealias SwiftType = ChannelConfig? -fileprivate struct FfiConverterOptionUInt16: FfiConverterRustBuffer { - typealias SwiftType = UInt16? + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypeChannelConfig.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypeChannelConfig.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +fileprivate struct FfiConverterOptionTypeAnchorChannelsConfig: FfiConverterRustBuffer { + typealias SwiftType = AnchorChannelsConfig? public static func write(_ value: SwiftType, into buf: inout [UInt8]) { guard let value = value else { @@ -2586,20 +4984,20 @@ fileprivate struct FfiConverterOptionUInt16: FfiConverterRustBuffer { return } writeInt(&buf, Int8(1)) - FfiConverterUInt16.write(value, into: &buf) + FfiConverterTypeAnchorChannelsConfig.write(value, into: &buf) } public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { switch try readInt(&buf) as Int8 { case 0: return nil - case 1: return try FfiConverterUInt16.read(from: &buf) + case 1: return try FfiConverterTypeAnchorChannelsConfig.read(from: &buf) default: throw UniffiInternalError.unexpectedOptionalTag } } } -fileprivate struct FfiConverterOptionUInt32: FfiConverterRustBuffer { - typealias SwiftType = UInt32? +fileprivate struct FfiConverterOptionTypeChannelInfo: FfiConverterRustBuffer { + typealias SwiftType = ChannelInfo? public static func write(_ value: SwiftType, into buf: inout [UInt8]) { guard let value = value else { @@ -2607,20 +5005,20 @@ fileprivate struct FfiConverterOptionUInt32: FfiConverterRustBuffer { return } writeInt(&buf, Int8(1)) - FfiConverterUInt32.write(value, into: &buf) + FfiConverterTypeChannelInfo.write(value, into: &buf) } public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { switch try readInt(&buf) as Int8 { case 0: return nil - case 1: return try FfiConverterUInt32.read(from: &buf) + case 1: return try FfiConverterTypeChannelInfo.read(from: &buf) default: throw UniffiInternalError.unexpectedOptionalTag } } } -fileprivate struct FfiConverterOptionUInt64: FfiConverterRustBuffer { - typealias SwiftType = UInt64? +fileprivate struct FfiConverterOptionTypeChannelUpdateInfo: FfiConverterRustBuffer { + typealias SwiftType = ChannelUpdateInfo? public static func write(_ value: SwiftType, into buf: inout [UInt8]) { guard let value = value else { @@ -2628,20 +5026,20 @@ fileprivate struct FfiConverterOptionUInt64: FfiConverterRustBuffer { return } writeInt(&buf, Int8(1)) - FfiConverterUInt64.write(value, into: &buf) + FfiConverterTypeChannelUpdateInfo.write(value, into: &buf) } public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { switch try readInt(&buf) as Int8 { case 0: return nil - case 1: return try FfiConverterUInt64.read(from: &buf) + case 1: return try FfiConverterTypeChannelUpdateInfo.read(from: &buf) default: throw UniffiInternalError.unexpectedOptionalTag } } } -fileprivate struct FfiConverterOptionString: FfiConverterRustBuffer { - typealias SwiftType = String? +fileprivate struct FfiConverterOptionTypeNodeAnnouncementInfo: FfiConverterRustBuffer { + typealias SwiftType = NodeAnnouncementInfo? public static func write(_ value: SwiftType, into buf: inout [UInt8]) { guard let value = value else { @@ -2649,20 +5047,20 @@ fileprivate struct FfiConverterOptionString: FfiConverterRustBuffer { return } writeInt(&buf, Int8(1)) - FfiConverterString.write(value, into: &buf) + FfiConverterTypeNodeAnnouncementInfo.write(value, into: &buf) } public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { switch try readInt(&buf) as Int8 { case 0: return nil - case 1: return try FfiConverterString.read(from: &buf) + case 1: return try FfiConverterTypeNodeAnnouncementInfo.read(from: &buf) default: throw UniffiInternalError.unexpectedOptionalTag } } } -fileprivate struct FfiConverterOptionTypeChannelConfig: FfiConverterRustBuffer { - typealias SwiftType = ChannelConfig? +fileprivate struct FfiConverterOptionTypeNodeInfo: FfiConverterRustBuffer { + typealias SwiftType = NodeInfo? public static func write(_ value: SwiftType, into buf: inout [UInt8]) { guard let value = value else { @@ -2670,13 +5068,13 @@ fileprivate struct FfiConverterOptionTypeChannelConfig: FfiConverterRustBuffer { return } writeInt(&buf, Int8(1)) - FfiConverterTypeChannelConfig.write(value, into: &buf) + FfiConverterTypeNodeInfo.write(value, into: &buf) } public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { switch try readInt(&buf) as Int8 { case 0: return nil - case 1: return try FfiConverterTypeChannelConfig.read(from: &buf) + case 1: return try FfiConverterTypeNodeInfo.read(from: &buf) default: throw UniffiInternalError.unexpectedOptionalTag } } @@ -2724,6 +5122,27 @@ fileprivate struct FfiConverterOptionTypePaymentDetails: FfiConverterRustBuffer } } +fileprivate struct FfiConverterOptionTypeClosureReason: FfiConverterRustBuffer { + typealias SwiftType = ClosureReason? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypeClosureReason.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypeClosureReason.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + fileprivate struct FfiConverterOptionTypeEvent: FfiConverterRustBuffer { typealias SwiftType = Event? @@ -2745,6 +5164,27 @@ fileprivate struct FfiConverterOptionTypeEvent: FfiConverterRustBuffer { } } +fileprivate struct FfiConverterOptionTypePaymentFailureReason: FfiConverterRustBuffer { + typealias SwiftType = PaymentFailureReason? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypePaymentFailureReason.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypePaymentFailureReason.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + fileprivate struct FfiConverterOptionSequenceTypeSocketAddress: FfiConverterRustBuffer { typealias SwiftType = [SocketAddress]? @@ -2766,6 +5206,69 @@ fileprivate struct FfiConverterOptionSequenceTypeSocketAddress: FfiConverterRust } } +fileprivate struct FfiConverterOptionTypeChannelId: FfiConverterRustBuffer { + typealias SwiftType = ChannelId? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypeChannelId.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypeChannelId.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +fileprivate struct FfiConverterOptionTypePaymentHash: FfiConverterRustBuffer { + typealias SwiftType = PaymentHash? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypePaymentHash.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypePaymentHash.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + +fileprivate struct FfiConverterOptionTypePaymentId: FfiConverterRustBuffer { + typealias SwiftType = PaymentId? + + public static func write(_ value: SwiftType, into buf: inout [UInt8]) { + guard let value = value else { + writeInt(&buf, Int8(0)) + return + } + writeInt(&buf, Int8(1)) + FfiConverterTypePaymentId.write(value, into: &buf) + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> SwiftType { + switch try readInt(&buf) as Int8 { + case 0: return nil + case 1: return try FfiConverterTypePaymentId.read(from: &buf) + default: throw UniffiInternalError.unexpectedOptionalTag + } + } +} + fileprivate struct FfiConverterOptionTypePaymentPreimage: FfiConverterRustBuffer { typealias SwiftType = PaymentPreimage? @@ -2851,6 +5354,28 @@ fileprivate struct FfiConverterSequenceUInt8: FfiConverterRustBuffer { } } +fileprivate struct FfiConverterSequenceUInt64: FfiConverterRustBuffer { + typealias SwiftType = [UInt64] + + public static func write(_ value: [UInt64], into buf: inout [UInt8]) { + let len = Int32(value.count) + writeInt(&buf, len) + for item in value { + FfiConverterUInt64.write(item, into: &buf) + } + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [UInt64] { + let len: Int32 = try readInt(&buf) + var seq = [UInt64]() + seq.reserveCapacity(Int(len)) + for _ in 0 ..< len { + seq.append(try FfiConverterUInt64.read(from: &buf)) + } + return seq + } +} + fileprivate struct FfiConverterSequenceTypeChannelDetails: FfiConverterRustBuffer { typealias SwiftType = [ChannelDetails] @@ -2917,6 +5442,72 @@ fileprivate struct FfiConverterSequenceTypePeerDetails: FfiConverterRustBuffer { } } +fileprivate struct FfiConverterSequenceTypeLightningBalance: FfiConverterRustBuffer { + typealias SwiftType = [LightningBalance] + + public static func write(_ value: [LightningBalance], into buf: inout [UInt8]) { + let len = Int32(value.count) + writeInt(&buf, len) + for item in value { + FfiConverterTypeLightningBalance.write(item, into: &buf) + } + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [LightningBalance] { + let len: Int32 = try readInt(&buf) + var seq = [LightningBalance]() + seq.reserveCapacity(Int(len)) + for _ in 0 ..< len { + seq.append(try FfiConverterTypeLightningBalance.read(from: &buf)) + } + return seq + } +} + +fileprivate struct FfiConverterSequenceTypePendingSweepBalance: FfiConverterRustBuffer { + typealias SwiftType = [PendingSweepBalance] + + public static func write(_ value: [PendingSweepBalance], into buf: inout [UInt8]) { + let len = Int32(value.count) + writeInt(&buf, len) + for item in value { + FfiConverterTypePendingSweepBalance.write(item, into: &buf) + } + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [PendingSweepBalance] { + let len: Int32 = try readInt(&buf) + var seq = [PendingSweepBalance]() + seq.reserveCapacity(Int(len)) + for _ in 0 ..< len { + seq.append(try FfiConverterTypePendingSweepBalance.read(from: &buf)) + } + return seq + } +} + +fileprivate struct FfiConverterSequenceTypeNodeId: FfiConverterRustBuffer { + typealias SwiftType = [NodeId] + + public static func write(_ value: [NodeId], into buf: inout [UInt8]) { + let len = Int32(value.count) + writeInt(&buf, len) + for item in value { + FfiConverterTypeNodeId.write(item, into: &buf) + } + } + + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [NodeId] { + let len: Int32 = try readInt(&buf) + var seq = [NodeId]() + seq.reserveCapacity(Int(len)) + for _ in 0 ..< len { + seq.append(try FfiConverterTypeNodeId.read(from: &buf)) + } + return seq + } +} + fileprivate struct FfiConverterSequenceTypePublicKey: FfiConverterRustBuffer { typealias SwiftType = [PublicKey] @@ -2966,32 +5557,66 @@ fileprivate struct FfiConverterSequenceTypeSocketAddress: FfiConverterRustBuffer * Typealias from the type name used in the UDL file to the builtin type. This * is needed because the UDL type name is used in function/method signatures. */ -public typealias Address = String -public struct FfiConverterTypeAddress: FfiConverter { - public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Address { +public typealias Address = String +public struct FfiConverterTypeAddress: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Address { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: Address, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> Address { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: Address) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeAddress_lift(_ value: RustBuffer) throws -> Address { + return try FfiConverterTypeAddress.lift(value) +} + +public func FfiConverterTypeAddress_lower(_ value: Address) -> RustBuffer { + return FfiConverterTypeAddress.lower(value) +} + + + +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias BlockHash = String +public struct FfiConverterTypeBlockHash: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> BlockHash { return try FfiConverterString.read(from: &buf) } - public static func write(_ value: Address, into buf: inout [UInt8]) { + public static func write(_ value: BlockHash, into buf: inout [UInt8]) { return FfiConverterString.write(value, into: &buf) } - public static func lift(_ value: RustBuffer) throws -> Address { + public static func lift(_ value: RustBuffer) throws -> BlockHash { return try FfiConverterString.lift(value) } - public static func lower(_ value: Address) -> RustBuffer { + public static func lower(_ value: BlockHash) -> RustBuffer { return FfiConverterString.lower(value) } } -public func FfiConverterTypeAddress_lift(_ value: RustBuffer) throws -> Address { - return try FfiConverterTypeAddress.lift(value) +public func FfiConverterTypeBlockHash_lift(_ value: RustBuffer) throws -> BlockHash { + return try FfiConverterTypeBlockHash.lift(value) } -public func FfiConverterTypeAddress_lower(_ value: Address) -> RustBuffer { - return FfiConverterTypeAddress.lower(value) +public func FfiConverterTypeBlockHash_lower(_ value: BlockHash) -> RustBuffer { + return FfiConverterTypeBlockHash.lower(value) } @@ -3030,6 +5655,40 @@ public func FfiConverterTypeBolt11Invoice_lower(_ value: Bolt11Invoice) -> RustB +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias Bolt12Invoice = String +public struct FfiConverterTypeBolt12Invoice: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Bolt12Invoice { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: Bolt12Invoice, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> Bolt12Invoice { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: Bolt12Invoice) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeBolt12Invoice_lift(_ value: RustBuffer) throws -> Bolt12Invoice { + return try FfiConverterTypeBolt12Invoice.lift(value) +} + +public func FfiConverterTypeBolt12Invoice_lower(_ value: Bolt12Invoice) -> RustBuffer { + return FfiConverterTypeBolt12Invoice.lower(value) +} + + + /** * Typealias from the type name used in the UDL file to the builtin type. This * is needed because the UDL type name is used in function/method signatures. @@ -3098,6 +5757,108 @@ public func FfiConverterTypeMnemonic_lower(_ value: Mnemonic) -> RustBuffer { +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias NodeId = String +public struct FfiConverterTypeNodeId: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> NodeId { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: NodeId, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> NodeId { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: NodeId) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeNodeId_lift(_ value: RustBuffer) throws -> NodeId { + return try FfiConverterTypeNodeId.lift(value) +} + +public func FfiConverterTypeNodeId_lower(_ value: NodeId) -> RustBuffer { + return FfiConverterTypeNodeId.lower(value) +} + + + +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias Offer = String +public struct FfiConverterTypeOffer: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Offer { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: Offer, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> Offer { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: Offer) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeOffer_lift(_ value: RustBuffer) throws -> Offer { + return try FfiConverterTypeOffer.lift(value) +} + +public func FfiConverterTypeOffer_lower(_ value: Offer) -> RustBuffer { + return FfiConverterTypeOffer.lower(value) +} + + + +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias OfferId = String +public struct FfiConverterTypeOfferId: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> OfferId { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: OfferId, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> OfferId { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: OfferId) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeOfferId_lift(_ value: RustBuffer) throws -> OfferId { + return try FfiConverterTypeOfferId.lift(value) +} + +public func FfiConverterTypeOfferId_lower(_ value: OfferId) -> RustBuffer { + return FfiConverterTypeOfferId.lower(value) +} + + + /** * Typealias from the type name used in the UDL file to the builtin type. This * is needed because the UDL type name is used in function/method signatures. @@ -3132,6 +5893,40 @@ public func FfiConverterTypePaymentHash_lower(_ value: PaymentHash) -> RustBuffe +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias PaymentId = String +public struct FfiConverterTypePaymentId: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> PaymentId { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: PaymentId, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> PaymentId { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: PaymentId) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypePaymentId_lift(_ value: RustBuffer) throws -> PaymentId { + return try FfiConverterTypePaymentId.lift(value) +} + +public func FfiConverterTypePaymentId_lower(_ value: PaymentId) -> RustBuffer { + return FfiConverterTypePaymentId.lower(value) +} + + + /** * Typealias from the type name used in the UDL file to the builtin type. This * is needed because the UDL type name is used in function/method signatures. @@ -3234,6 +6029,40 @@ public func FfiConverterTypePublicKey_lower(_ value: PublicKey) -> RustBuffer { +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias Refund = String +public struct FfiConverterTypeRefund: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> Refund { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: Refund, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> Refund { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: Refund) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeRefund_lift(_ value: RustBuffer) throws -> Refund { + return try FfiConverterTypeRefund.lift(value) +} + +public func FfiConverterTypeRefund_lower(_ value: Refund) -> RustBuffer { + return FfiConverterTypeRefund.lower(value) +} + + + /** * Typealias from the type name used in the UDL file to the builtin type. This * is needed because the UDL type name is used in function/method signatures. @@ -3302,6 +6131,40 @@ public func FfiConverterTypeTxid_lower(_ value: Txid) -> RustBuffer { +/** + * Typealias from the type name used in the UDL file to the builtin type. This + * is needed because the UDL type name is used in function/method signatures. + */ +public typealias UntrustedString = String +public struct FfiConverterTypeUntrustedString: FfiConverter { + public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> UntrustedString { + return try FfiConverterString.read(from: &buf) + } + + public static func write(_ value: UntrustedString, into buf: inout [UInt8]) { + return FfiConverterString.write(value, into: &buf) + } + + public static func lift(_ value: RustBuffer) throws -> UntrustedString { + return try FfiConverterString.lift(value) + } + + public static func lower(_ value: UntrustedString) -> RustBuffer { + return FfiConverterString.lower(value) + } +} + + +public func FfiConverterTypeUntrustedString_lift(_ value: RustBuffer) throws -> UntrustedString { + return try FfiConverterTypeUntrustedString.lift(value) +} + +public func FfiConverterTypeUntrustedString_lower(_ value: UntrustedString) -> RustBuffer { + return FfiConverterTypeUntrustedString.lower(value) +} + + + /** * Typealias from the type name used in the UDL file to the builtin type. This * is needed because the UDL type name is used in function/method signatures. @@ -3334,7 +6197,71 @@ public func FfiConverterTypeUserChannelId_lower(_ value: UserChannelId) -> RustB return FfiConverterTypeUserChannelId.lower(value) } +private let UNIFFI_RUST_FUTURE_POLL_READY: Int8 = 0 +private let UNIFFI_RUST_FUTURE_POLL_MAYBE_READY: Int8 = 1 + +fileprivate func uniffiRustCallAsync( + rustFutureFunc: () -> UnsafeMutableRawPointer, + pollFunc: (UnsafeMutableRawPointer, @escaping UniFfiRustFutureContinuation, UnsafeMutableRawPointer) -> (), + completeFunc: (UnsafeMutableRawPointer, UnsafeMutablePointer) -> F, + freeFunc: (UnsafeMutableRawPointer) -> (), + liftFunc: (F) throws -> T, + errorHandler: ((RustBuffer) throws -> Error)? +) async throws -> T { + // Make sure to call uniffiEnsureInitialized() since future creation doesn't have a + // RustCallStatus param, so doesn't use makeRustCall() + uniffiEnsureInitialized() + let rustFuture = rustFutureFunc() + defer { + freeFunc(rustFuture) + } + var pollResult: Int8; + repeat { + pollResult = await withUnsafeContinuation { + pollFunc(rustFuture, uniffiFutureContinuationCallback, ContinuationHolder($0).toOpaque()) + } + } while pollResult != UNIFFI_RUST_FUTURE_POLL_READY + + return try liftFunc(makeRustCall( + { completeFunc(rustFuture, $0) }, + errorHandler: errorHandler + )) +} + +// Callback handlers for an async calls. These are invoked by Rust when the future is ready. They +// lift the return value or error and resume the suspended function. +fileprivate func uniffiFutureContinuationCallback(ptr: UnsafeMutableRawPointer, pollResult: Int8) { + ContinuationHolder.fromOpaque(ptr).resume(pollResult) +} + +// Wraps UnsafeContinuation in a class so that we can use reference counting when passing it across +// the FFI +fileprivate class ContinuationHolder { + let continuation: UnsafeContinuation + + init(_ continuation: UnsafeContinuation) { + self.continuation = continuation + } + + func resume(_ pollResult: Int8) { + self.continuation.resume(returning: pollResult) + } + + func toOpaque() -> UnsafeMutableRawPointer { + return Unmanaged.passRetained(self).toOpaque() + } + static func fromOpaque(_ ptr: UnsafeRawPointer) -> ContinuationHolder { + return Unmanaged.fromOpaque(ptr).takeRetainedValue() + } +} +public func defaultConfig() -> Config { + return try! FfiConverterTypeConfig.lift( + try! rustCall() { + uniffi_ldk_node_fn_func_default_config($0) +} + ) +} public func generateEntropyMnemonic() -> Mnemonic { return try! FfiConverterTypeMnemonic.lift( try! rustCall() { @@ -3352,22 +6279,82 @@ private enum InitializationResult { // the code inside is only computed once. private var initializationResult: InitializationResult { // Get the bindings contract version from our ComponentInterface - let bindings_contract_version = 24 + let bindings_contract_version = 25 // Get the scaffolding contract version by calling the into the dylib let scaffolding_contract_version = ffi_ldk_node_uniffi_contract_version() if bindings_contract_version != scaffolding_contract_version { return InitializationResult.contractVersionMismatch } - if (uniffi_ldk_node_checksum_func_generate_entropy_mnemonic() != 7251) { + if (uniffi_ldk_node_checksum_func_default_config() != 55381) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_func_generate_entropy_mnemonic() != 59926) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_claim_for_hash() != 52848) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_fail_for_hash() != 24516) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_receive() != 28084) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_receive_for_hash() != 3869) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_receive_variable_amount() != 51453) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_receive_variable_amount_for_hash() != 21975) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_receive_variable_amount_via_jit_channel() != 58617) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_receive_via_jit_channel() != 50555) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_send() != 35346) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_send_probes() != 39625) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_send_probes_using_amount() != 25010) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt11payment_send_using_amount() != 15471) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt12payment_initiate_refund() != 15379) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt12payment_receive() != 20864) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt12payment_receive_variable_amount() != 10863) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt12payment_request_refund_payment() != 61945) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt12payment_send() != 15282) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_bolt12payment_send_using_amount() != 21384) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_builder_build() != 785) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_builder_build() != 48294) { + if (uniffi_ldk_node_checksum_method_builder_build_with_fs_store() != 61304) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_builder_set_entropy_bip39_mnemonic() != 35659) { + if (uniffi_ldk_node_checksum_method_builder_set_entropy_bip39_mnemonic() != 827) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_builder_set_entropy_seed_bytes() != 26795) { + if (uniffi_ldk_node_checksum_method_builder_set_entropy_seed_bytes() != 44799) { return InitializationResult.apiChecksumMismatch } if (uniffi_ldk_node_checksum_method_builder_set_entropy_seed_path() != 64056) { @@ -3382,10 +6369,13 @@ private var initializationResult: InitializationResult { if (uniffi_ldk_node_checksum_method_builder_set_gossip_source_rgs() != 64312) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_builder_set_listening_addresses() != 18689) { + if (uniffi_ldk_node_checksum_method_builder_set_liquidity_source_lsps2() != 2667) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_builder_set_listening_addresses() != 14051) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_builder_set_network() != 23321) { + if (uniffi_ldk_node_checksum_method_builder_set_network() != 27539) { return InitializationResult.apiChecksumMismatch } if (uniffi_ldk_node_checksum_method_builder_set_storage_dir_path() != 59019) { @@ -3427,109 +6417,124 @@ private var initializationResult: InitializationResult { if (uniffi_ldk_node_checksum_method_channelconfig_set_max_dust_htlc_exposure_from_fixed_limit() != 16864) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_close_channel() != 7103) { + if (uniffi_ldk_node_checksum_method_networkgraph_channel() != 38070) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_networkgraph_list_channels() != 4693) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_networkgraph_list_nodes() != 36715) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_networkgraph_node() != 48925) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_node_bolt11_payment() != 41402) { + return InitializationResult.apiChecksumMismatch + } + if (uniffi_ldk_node_checksum_method_node_bolt12_payment() != 49254) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_connect() != 5558) { + if (uniffi_ldk_node_checksum_method_node_close_channel() != 62479) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_connect_open_channel() != 59688) { + if (uniffi_ldk_node_checksum_method_node_config() != 7511) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_disconnect() != 43777) { + if (uniffi_ldk_node_checksum_method_node_connect() != 34120) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_event_handled() != 28838) { + if (uniffi_ldk_node_checksum_method_node_connect_open_channel() != 64763) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_is_running() != 18666) { + if (uniffi_ldk_node_checksum_method_node_disconnect() != 43538) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_list_channels() != 43935) { + if (uniffi_ldk_node_checksum_method_node_event_handled() != 47939) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_list_payments() != 58666) { + if (uniffi_ldk_node_checksum_method_node_force_close_channel() != 44813) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_list_peers() != 22735) { + if (uniffi_ldk_node_checksum_method_node_list_balances() != 57528) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_listening_addresses() != 49178) { + if (uniffi_ldk_node_checksum_method_node_list_channels() != 7954) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_new_onchain_address() != 34077) { + if (uniffi_ldk_node_checksum_method_node_list_payments() != 35002) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_next_event() != 41150) { + if (uniffi_ldk_node_checksum_method_node_list_peers() != 14889) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_node_id() != 39688) { + if (uniffi_ldk_node_checksum_method_node_listening_addresses() != 2665) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_payment() != 35034) { + if (uniffi_ldk_node_checksum_method_node_network_graph() != 2695) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_receive_payment() != 4148) { + if (uniffi_ldk_node_checksum_method_node_next_event() != 7682) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_receive_variable_amount_payment() != 25209) { + if (uniffi_ldk_node_checksum_method_node_next_event_async() != 25426) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_remove_payment() != 12673) { + if (uniffi_ldk_node_checksum_method_node_node_id() != 51489) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_all_to_onchain_address() != 24019) { + if (uniffi_ldk_node_checksum_method_node_onchain_payment() != 6092) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_payment() != 56244) { + if (uniffi_ldk_node_checksum_method_node_payment() != 60296) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_payment_probes() != 38405) { + if (uniffi_ldk_node_checksum_method_node_remove_payment() != 47952) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_payment_probes_using_amount() != 340) { + if (uniffi_ldk_node_checksum_method_node_sign_message() != 51392) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_payment_using_amount() != 42148) { + if (uniffi_ldk_node_checksum_method_node_spontaneous_payment() != 37403) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_spontaneous_payment() != 39235) { + if (uniffi_ldk_node_checksum_method_node_start() != 58480) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_spontaneous_payment_probes() != 52786) { + if (uniffi_ldk_node_checksum_method_node_status() != 55952) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_send_to_onchain_address() != 43948) { + if (uniffi_ldk_node_checksum_method_node_stop() != 42188) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_sign_message() != 40383) { + if (uniffi_ldk_node_checksum_method_node_sync_wallets() != 32474) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_spendable_onchain_balance_sats() != 1454) { + if (uniffi_ldk_node_checksum_method_node_update_channel_config() != 38109) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_start() != 44334) { + if (uniffi_ldk_node_checksum_method_node_verify_signature() != 20486) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_stop() != 16494) { + if (uniffi_ldk_node_checksum_method_node_wait_next_event() != 55101) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_sync_wallets() != 2447) { + if (uniffi_ldk_node_checksum_method_onchainpayment_new_address() != 37251) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_total_onchain_balance_sats() != 44607) { + if (uniffi_ldk_node_checksum_method_onchainpayment_send_all_to_address() != 20046) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_update_channel_config() != 13742) { + if (uniffi_ldk_node_checksum_method_onchainpayment_send_to_address() != 34782) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_verify_signature() != 35778) { + if (uniffi_ldk_node_checksum_method_spontaneouspayment_send() != 16613) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_method_ldknode_wait_next_event() != 34319) { + if (uniffi_ldk_node_checksum_method_spontaneouspayment_send_probes() != 25937) { return InitializationResult.apiChecksumMismatch } - if (uniffi_ldk_node_checksum_constructor_builder_from_config() != 56443) { + if (uniffi_ldk_node_checksum_constructor_builder_from_config() != 64393) { return InitializationResult.apiChecksumMismatch } if (uniffi_ldk_node_checksum_constructor_builder_new() != 48442) { @@ -3551,4 +6556,4 @@ private func uniffiEnsureInitialized() { case .apiChecksumMismatch: fatalError("UniFFI API checksum mismatch: try cleaning and rebuilding your project") } -} +} \ No newline at end of file diff --git a/docker-compose-cln.yml b/docker-compose-cln.yml index 6628636b9..5fb1f2dcd 100644 --- a/docker-compose-cln.yml +++ b/docker-compose-cln.yml @@ -63,6 +63,7 @@ services: "--bitcoin-rpcuser=user", "--bitcoin-rpcpassword=pass", "--regtest", + "--experimental-anchors", ] ports: - "19846:19846" diff --git a/scripts/uniffi_bindgen_generate_swift.sh b/scripts/uniffi_bindgen_generate_swift.sh index 277d23209..ba2d84d2a 100755 --- a/scripts/uniffi_bindgen_generate_swift.sh +++ b/scripts/uniffi_bindgen_generate_swift.sh @@ -34,6 +34,10 @@ swiftc -module-name LDKNode -emit-library -o "$BINDINGS_DIR"/libldk_node.dylib - # Create xcframework from bindings Swift file and libs mkdir -p "$BINDINGS_DIR"/Sources/LDKNode || exit 1 + +# Patch LDKNode.swift with `SystemConfiguration` import. +sed -i '' '4s/^/import SystemConfiguration\n/' "$BINDINGS_DIR"/LDKNode.swift + mv "$BINDINGS_DIR"/LDKNode.swift "$BINDINGS_DIR"/Sources/LDKNode/LDKNode.swift || exit 1 cp "$BINDINGS_DIR"/LDKNodeFFI.h "$BINDINGS_DIR"/LDKNodeFFI.xcframework/ios-arm64/LDKNodeFFI.framework/Headers || exit 1 cp "$BINDINGS_DIR"/LDKNodeFFI.h "$BINDINGS_DIR"/LDKNodeFFI.xcframework/ios-arm64_x86_64-simulator/LDKNodeFFI.framework/Headers || exit 1 @@ -41,6 +45,6 @@ cp "$BINDINGS_DIR"/LDKNodeFFI.h "$BINDINGS_DIR"/LDKNodeFFI.xcframework/macos-arm cp target/aarch64-apple-ios/release-smaller/libldk_node.a "$BINDINGS_DIR"/LDKNodeFFI.xcframework/ios-arm64/LDKNodeFFI.framework/LDKNodeFFI || exit 1 cp target/lipo-ios-sim/release-smaller/libldk_node.a "$BINDINGS_DIR"/LDKNodeFFI.xcframework/ios-arm64_x86_64-simulator/LDKNodeFFI.framework/LDKNodeFFI || exit 1 cp target/lipo-macos/release-smaller/libldk_node.a "$BINDINGS_DIR"/LDKNodeFFI.xcframework/macos-arm64_x86_64/LDKNodeFFI.framework/LDKNodeFFI || exit 1 -# rm "$BINDINGS_DIR"/LDKNodeFFI.h || exit 1 -# rm "$BINDINGS_DIR"/LDKNodeFFI.modulemap || exit 1 +rm "$BINDINGS_DIR"/LDKNodeFFI.h || exit 1 +rm "$BINDINGS_DIR"/LDKNodeFFI.modulemap || exit 1 echo finished successfully! diff --git a/src/balance.rs b/src/balance.rs index f5a52073d..f1c95dcbe 100644 --- a/src/balance.rs +++ b/src/balance.rs @@ -1,11 +1,12 @@ +use crate::sweep::value_satoshis_from_descriptor; + use lightning::chain::channelmonitor::Balance as LdkBalance; use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage}; +use lightning::util::sweep::{OutputSpendStatus, TrackedSpendableOutput}; use bitcoin::secp256k1::PublicKey; use bitcoin::{BlockHash, Txid}; -use crate::sweep::SpendableOutputInfo; - /// Details of the known available balances returned by [`Node::list_balances`]. /// /// [`Node::list_balances`]: crate::Node::list_balances @@ -14,7 +15,15 @@ pub struct BalanceDetails { /// The total balance of our on-chain wallet. pub total_onchain_balance_sats: u64, /// The currently spendable balance of our on-chain wallet. + /// + /// This includes any sufficiently confirmed funds, minus + /// [`total_anchor_channels_reserve_sats`]. + /// + /// [`total_anchor_channels_reserve_sats`]: Self::total_anchor_channels_reserve_sats pub spendable_onchain_balance_sats: u64, + /// The share of our total balance that we retain as an emergency reserve to (hopefully) be + /// able to spend the Anchor outputs when one of our channels is closed. + pub total_anchor_channels_reserve_sats: u64, /// The total balance that we would be able to claim across all our Lightning channels. /// /// Note this excludes balances that we are unsure if we are able to claim (e.g., as we are @@ -258,46 +267,45 @@ pub enum PendingSweepBalance { } impl PendingSweepBalance { - pub(crate) fn from_tracked_spendable_output(output_info: SpendableOutputInfo) -> Self { - if let Some(confirmation_hash) = output_info.confirmation_hash { - debug_assert!(output_info.confirmation_height.is_some()); - debug_assert!(output_info.latest_spending_tx.is_some()); - let channel_id = output_info.channel_id; - let confirmation_height = output_info - .confirmation_height - .expect("Height must be set if the output is confirmed"); - let latest_spending_txid = output_info - .latest_spending_tx - .as_ref() - .expect("Spending tx must be set if the output is confirmed") - .txid(); - let amount_satoshis = output_info.value_satoshis(); - Self::AwaitingThresholdConfirmations { - channel_id, - latest_spending_txid, - confirmation_hash, - confirmation_height, - amount_satoshis, - } - } else if let Some(latest_broadcast_height) = output_info.latest_broadcast_height { - debug_assert!(output_info.latest_spending_tx.is_some()); - let channel_id = output_info.channel_id; - let latest_spending_txid = output_info - .latest_spending_tx - .as_ref() - .expect("Spending tx must be set if the spend was broadcast") - .txid(); - let amount_satoshis = output_info.value_satoshis(); - Self::BroadcastAwaitingConfirmation { - channel_id, + pub(crate) fn from_tracked_spendable_output(output_info: TrackedSpendableOutput) -> Self { + match output_info.status { + OutputSpendStatus::PendingInitialBroadcast { .. } => { + let channel_id = output_info.channel_id; + let amount_satoshis = value_satoshis_from_descriptor(&output_info.descriptor); + Self::PendingBroadcast { channel_id, amount_satoshis } + }, + OutputSpendStatus::PendingFirstConfirmation { latest_broadcast_height, - latest_spending_txid, - amount_satoshis, - } - } else { - let channel_id = output_info.channel_id; - let amount_satoshis = output_info.value_satoshis(); - Self::PendingBroadcast { channel_id, amount_satoshis } + latest_spending_tx, + .. + } => { + let channel_id = output_info.channel_id; + let amount_satoshis = value_satoshis_from_descriptor(&output_info.descriptor); + let latest_spending_txid = latest_spending_tx.txid(); + Self::BroadcastAwaitingConfirmation { + channel_id, + latest_broadcast_height, + latest_spending_txid, + amount_satoshis, + } + }, + OutputSpendStatus::PendingThresholdConfirmations { + latest_spending_tx, + confirmation_height, + confirmation_hash, + .. + } => { + let channel_id = output_info.channel_id; + let amount_satoshis = value_satoshis_from_descriptor(&output_info.descriptor); + let latest_spending_txid = latest_spending_tx.txid(); + Self::AwaitingThresholdConfirmations { + channel_id, + latest_spending_txid, + confirmation_hash, + confirmation_height, + amount_satoshis, + } + }, } } } diff --git a/src/builder.rs b/src/builder.rs index a09b2563f..a2a93aa79 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -1,21 +1,21 @@ use crate::config::{ - Config, BDK_CLIENT_CONCURRENCY, BDK_CLIENT_STOP_GAP, DEFAULT_ESPLORA_SERVER_URL, - WALLET_KEYS_SEED_LEN, + default_user_config, Config, BDK_CLIENT_CONCURRENCY, BDK_CLIENT_STOP_GAP, + DEFAULT_ESPLORA_CLIENT_TIMEOUT_SECS, DEFAULT_ESPLORA_SERVER_URL, WALLET_KEYS_SEED_LEN, }; +use crate::connection::ConnectionManager; use crate::event::EventQueue; use crate::fee_estimator::OnchainFeeEstimator; use crate::gossip::GossipSource; use crate::io; use crate::io::sqlite_store::SqliteStore; use crate::liquidity::LiquiditySource; -use crate::logger::{log_error, FilesystemLogger, Logger}; +use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; use crate::message_handler::NodeCustomMessageHandler; -use crate::payment_store::PaymentStore; +use crate::payment::store::PaymentStore; use crate::peer_store::PeerStore; -use crate::sweep::OutputSweeper; use crate::tx_broadcaster::TransactionBroadcaster; use crate::types::{ - ChainMonitor, ChannelManager, FakeMessageRouter, GossipSync, KeysManager, NetworkGraph, + ChainMonitor, ChannelManager, DynStore, GossipSync, Graph, KeysManager, MessageRouter, OnionMessenger, PeerManager, }; use crate::wallet::Wallet; @@ -31,12 +31,12 @@ use lightning::routing::scoring::{ }; use lightning::sign::EntropySource; -use lightning::util::config::UserConfig; use lightning::util::persist::{ - read_channel_monitors, KVStore, CHANNEL_MANAGER_PERSISTENCE_KEY, + read_channel_monitors, CHANNEL_MANAGER_PERSISTENCE_KEY, CHANNEL_MANAGER_PERSISTENCE_PRIMARY_NAMESPACE, CHANNEL_MANAGER_PERSISTENCE_SECONDARY_NAMESPACE, }; use lightning::util::ser::ReadableArgs; +use lightning::util::sweep::OutputSweeper; use lightning_persister::fs_store::FilesystemStore; @@ -65,6 +65,7 @@ use std::fmt; use std::fs; use std::io::Cursor; use std::path::PathBuf; +use std::sync::atomic::AtomicBool; use std::sync::{Arc, Mutex, RwLock}; use std::time::SystemTime; @@ -114,12 +115,18 @@ pub enum BuildError { /// The given listening addresses are invalid, e.g. too many were passed. InvalidListeningAddresses, /// We failed to read data from the [`KVStore`]. + /// + /// [`KVStore`]: lightning::util::persist::KVStore ReadFailed, /// We failed to write data to the [`KVStore`]. + /// + /// [`KVStore`]: lightning::util::persist::KVStore WriteFailed, /// We failed to access the given `storage_dir_path`. StoragePathAccessFailed, /// We failed to setup our [`KVStore`]. + /// + /// [`KVStore`]: lightning::util::persist::KVStore KVStoreSetupFailed, /// We failed to setup the onchain wallet. WalletSetupFailed, @@ -176,7 +183,6 @@ impl NodeBuilder { /// Creates a new builder instance from an [`Config`]. pub fn from_config(config: Config) -> Self { - let config = config; let entropy_source_config = None; let chain_data_source_config = None; let gossip_source_config = None; @@ -298,7 +304,7 @@ impl NodeBuilder { /// Builds a [`Node`] instance with a [`SqliteStore`] backend and according to the options /// previously configured. - pub fn build(&self) -> Result, BuildError> { + pub fn build(&self) -> Result { let storage_dir_path = self.config.storage_dir_path.clone(); fs::create_dir_all(storage_dir_path.clone()) .map_err(|_| BuildError::StoragePathAccessFailed)?; @@ -315,7 +321,7 @@ impl NodeBuilder { /// Builds a [`Node`] instance with a [`FilesystemStore`] backend and according to the options /// previously configured. - pub fn build_with_fs_store(&self) -> Result, BuildError> { + pub fn build_with_fs_store(&self) -> Result { let mut storage_dir_path: PathBuf = self.config.storage_dir_path.clone().into(); storage_dir_path.push("fs_store"); @@ -328,9 +334,7 @@ impl NodeBuilder { /// Builds a [`Node`] instance with a [`VssStore`] backend and according to the options /// previously configured. #[cfg(any(vss, vss_test))] - pub fn build_with_vss_store( - &self, url: String, store_id: String, - ) -> Result, BuildError> { + pub fn build_with_vss_store(&self, url: String, store_id: String) -> Result { let logger = setup_logger(&self.config)?; let seed_bytes = seed_bytes_from_config( @@ -368,9 +372,7 @@ impl NodeBuilder { } /// Builds a [`Node`] instance according to the options previously configured. - pub fn build_with_store( - &self, kv_store: Arc, - ) -> Result, BuildError> { + pub fn build_with_store(&self, kv_store: Arc) -> Result { let logger = setup_logger(&self.config)?; let seed_bytes = seed_bytes_from_config( &self.config, @@ -499,31 +501,29 @@ impl ArcedNodeBuilder { /// Builds a [`Node`] instance with a [`SqliteStore`] backend and according to the options /// previously configured. - pub fn build(&self) -> Result>, BuildError> { + pub fn build(&self) -> Result, BuildError> { self.inner.read().unwrap().build().map(Arc::new) } /// Builds a [`Node`] instance with a [`FilesystemStore`] backend and according to the options /// previously configured. - pub fn build_with_fs_store(&self) -> Result>, BuildError> { + pub fn build_with_fs_store(&self) -> Result, BuildError> { self.inner.read().unwrap().build_with_fs_store().map(Arc::new) } /// Builds a [`Node`] instance according to the options previously configured. - pub fn build_with_store( - &self, kv_store: Arc, - ) -> Result>, BuildError> { + pub fn build_with_store(&self, kv_store: Arc) -> Result, BuildError> { self.inner.read().unwrap().build_with_store(kv_store).map(Arc::new) } } /// Builds a [`Node`] instance according to the options previously configured. -fn build_with_store_internal( +fn build_with_store_internal( config: Arc, chain_data_source_config: Option<&ChainDataSourceConfig>, gossip_source_config: Option<&GossipSourceConfig>, liquidity_source_config: Option<&LiquiditySourceConfig>, seed_bytes: [u8; 64], - logger: Arc, kv_store: Arc, -) -> Result, BuildError> { + logger: Arc, kv_store: Arc, +) -> Result { // Initialize the on-chain wallet and chain access let xprv = bitcoin::bip32::ExtendedPrivKey::new_master(config.network.into(), &seed_bytes) .map_err(|e| { @@ -558,10 +558,15 @@ fn build_with_store_internal( let (blockchain, tx_sync, tx_broadcaster, fee_estimator) = match chain_data_source_config { Some(ChainDataSourceConfig::Esplora(server_url)) => { - let tx_sync = Arc::new(EsploraSyncClient::new(server_url.clone(), Arc::clone(&logger))); - let blockchain = - EsploraBlockchain::from_client(tx_sync.client().clone(), BDK_CLIENT_STOP_GAP) - .with_concurrency(BDK_CLIENT_CONCURRENCY); + let mut client_builder = esplora_client::Builder::new(&server_url.clone()); + client_builder = client_builder.timeout(DEFAULT_ESPLORA_CLIENT_TIMEOUT_SECS); + let esplora_client = client_builder.build_async().unwrap(); + let tx_sync = Arc::new(EsploraSyncClient::from_client( + esplora_client.clone(), + Arc::clone(&logger), + )); + let blockchain = EsploraBlockchain::from_client(esplora_client, BDK_CLIENT_STOP_GAP) + .with_concurrency(BDK_CLIENT_CONCURRENCY); let tx_broadcaster = Arc::new(TransactionBroadcaster::new( tx_sync.client().clone(), Arc::clone(&logger), @@ -603,7 +608,7 @@ fn build_with_store_internal( )); // Initialize the ChainMonitor - let chain_monitor: Arc> = Arc::new(chainmonitor::ChainMonitor::new( + let chain_monitor: Arc = Arc::new(chainmonitor::ChainMonitor::new( Some(Arc::clone(&tx_sync)), Arc::clone(&tx_broadcaster), Arc::clone(&logger), @@ -632,7 +637,7 @@ fn build_with_store_internal( Ok(graph) => Arc::new(graph), Err(e) => { if e.kind() == std::io::ErrorKind::NotFound { - Arc::new(NetworkGraph::new(config.network.into(), Arc::clone(&logger))) + Arc::new(Graph::new(config.network.into(), Arc::clone(&logger))) } else { return Err(BuildError::ReadFailed); } @@ -663,7 +668,7 @@ fn build_with_store_internal( let router = Arc::new(DefaultRouter::new( Arc::clone(&network_graph), Arc::clone(&logger), - keys_manager.get_secure_random_bytes(), + Arc::clone(&keys_manager), Arc::clone(&scorer), scoring_fee_params, )); @@ -685,20 +690,7 @@ fn build_with_store_internal( }, }; - // Initialize the default config values. - // - // Note that methods such as Node::connect_open_channel might override some of the values set - // here, e.g. the ChannelHandshakeConfig, meaning these default values will mostly be relevant - // for inbound channels. - let mut user_config = UserConfig::default(); - user_config.channel_handshake_limits.force_announced_channel_preference = false; - - if !config.trusted_peers_0conf.is_empty() { - // Manually accept inbound channels if we expect 0conf channel requests, avoid - // generating the events otherwise. - user_config.manually_accept_inbound_channels = true; - } - + let mut user_config = default_user_config(&config); if liquidity_source_config.and_then(|lsc| lsc.lsps2_service.as_ref()).is_some() { // Generally allow claiming underpaying HTLCs as the LSP will skim off some fee. We'll // check that they don't take too much before claiming. @@ -734,7 +726,7 @@ fn build_with_store_internal( channel_monitor_references, ); let (_hash, channel_manager) = - <(BlockHash, ChannelManager)>::read(&mut reader, read_args).map_err(|e| { + <(BlockHash, ChannelManager)>::read(&mut reader, read_args).map_err(|e| { log_error!(logger, "Failed to read channel manager from KVStore: {}", e); BuildError::ReadFailed })?; @@ -775,13 +767,16 @@ fn build_with_store_internal( })?; } + let message_router = MessageRouter::new(Arc::clone(&network_graph), Arc::clone(&keys_manager)); + // Initialize the PeerManager let onion_messenger: Arc = Arc::new(OnionMessenger::new( Arc::clone(&keys_manager), Arc::clone(&keys_manager), Arc::clone(&logger), - Arc::new(FakeMessageRouter {}), - IgnoringMessageHandler {}, + Arc::clone(&channel_manager), + Arc::new(message_router), + Arc::clone(&channel_manager), IgnoringMessageHandler {}, )); let ephemeral_bytes: [u8; 32] = keys_manager.get_secure_random_bytes(); @@ -891,6 +886,50 @@ fn build_with_store_internal( liquidity_source.as_ref().map(|l| l.set_peer_manager(Arc::clone(&peer_manager))); + let connection_manager = + Arc::new(ConnectionManager::new(Arc::clone(&peer_manager), Arc::clone(&logger))); + + let output_sweeper = match io::utils::read_output_sweeper( + Arc::clone(&tx_broadcaster), + Arc::clone(&fee_estimator), + Arc::clone(&tx_sync), + Arc::clone(&keys_manager), + Arc::clone(&kv_store), + Arc::clone(&logger), + ) { + Ok(output_sweeper) => Arc::new(output_sweeper), + Err(e) => { + if e.kind() == std::io::ErrorKind::NotFound { + Arc::new(OutputSweeper::new( + channel_manager.current_best_block(), + Arc::clone(&tx_broadcaster), + Arc::clone(&fee_estimator), + Some(Arc::clone(&tx_sync)), + Arc::clone(&keys_manager), + Arc::clone(&keys_manager), + Arc::clone(&kv_store), + Arc::clone(&logger), + )) + } else { + return Err(BuildError::ReadFailed); + } + }, + }; + + match io::utils::migrate_deprecated_spendable_outputs( + Arc::clone(&output_sweeper), + Arc::clone(&kv_store), + Arc::clone(&logger), + ) { + Ok(()) => { + log_info!(logger, "Successfully migrated OutputSweeper data."); + }, + Err(e) => { + log_error!(logger, "Failed to migrate OutputSweeper data: {}", e); + return Err(BuildError::ReadFailed); + }, + } + // Init payment info storage let payment_store = match io::utils::read_payments(Arc::clone(&kv_store), Arc::clone(&logger)) { Ok(payments) => { @@ -924,30 +963,21 @@ fn build_with_store_internal( }, }; - let best_block = channel_manager.current_best_block(); - let output_sweeper = - match io::utils::read_spendable_outputs(Arc::clone(&kv_store), Arc::clone(&logger)) { - Ok(outputs) => Arc::new(OutputSweeper::new( - outputs, - Arc::clone(&wallet), - Arc::clone(&tx_broadcaster), - Arc::clone(&fee_estimator), - Arc::clone(&keys_manager), - Arc::clone(&kv_store), - best_block, - Some(Arc::clone(&tx_sync)), - Arc::clone(&logger), - )), - Err(_) => { - return Err(BuildError::ReadFailed); - }, - }; - let (stop_sender, _) = tokio::sync::watch::channel(()); + let (event_handling_stopped_sender, _) = tokio::sync::watch::channel(()); + + let is_listening = Arc::new(AtomicBool::new(false)); + let latest_wallet_sync_timestamp = Arc::new(RwLock::new(None)); + let latest_onchain_wallet_sync_timestamp = Arc::new(RwLock::new(None)); + let latest_fee_rate_cache_update_timestamp = Arc::new(RwLock::new(None)); + let latest_rgs_snapshot_timestamp = Arc::new(RwLock::new(None)); + let latest_node_announcement_broadcast_timestamp = Arc::new(RwLock::new(None)); + let latest_channel_monitor_archival_height = Arc::new(RwLock::new(None)); Ok(Node { runtime, stop_sender, + event_handling_stopped_sender, config, wallet, tx_sync, @@ -958,6 +988,7 @@ fn build_with_store_internal( chain_monitor, output_sweeper, peer_manager, + connection_manager, keys_manager, network_graph, gossip_source, @@ -968,6 +999,13 @@ fn build_with_store_internal( scorer, peer_store, payment_store, + is_listening, + latest_wallet_sync_timestamp, + latest_onchain_wallet_sync_timestamp, + latest_fee_rate_cache_update_timestamp, + latest_rgs_snapshot_timestamp, + latest_node_announcement_broadcast_timestamp, + latest_channel_monitor_archival_height, }) } @@ -990,7 +1028,7 @@ fn seed_bytes_from_config( match entropy_source_config { Some(EntropySourceConfig::SeedBytes(bytes)) => Ok(bytes.clone()), Some(EntropySourceConfig::SeedFile(seed_path)) => { - Ok(io::utils::read_or_generate_seed_file(&seed_path, Arc::clone(&logger)) + Ok(io::utils::read_or_generate_seed_file(seed_path, Arc::clone(&logger)) .map_err(|_| BuildError::InvalidSeedFile)?) }, Some(EntropySourceConfig::Bip39Mnemonic { mnemonic, passphrase }) => match passphrase { diff --git a/src/config.rs b/src/config.rs index 945d712c9..d0e72080f 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,6 +1,7 @@ use std::time::Duration; use lightning::ln::msgs::SocketAddress; +use lightning::util::config::UserConfig; use lightning::util::logger::Level as LogLevel; use bitcoin::secp256k1::PublicKey; @@ -15,6 +16,7 @@ const DEFAULT_LDK_WALLET_SYNC_INTERVAL_SECS: u64 = 30; const DEFAULT_FEE_RATE_CACHE_UPDATE_INTERVAL_SECS: u64 = 60 * 10; const DEFAULT_PROBING_LIQUIDITY_LIMIT_MULTIPLIER: u64 = 3; const DEFAULT_LOG_LEVEL: LogLevel = LogLevel::Debug; +const DEFAULT_ANCHOR_PER_CHANNEL_RESERVE_SATS: u64 = 25_000; // The 'stop gap' parameter used by BDK's wallet sync. This seems to configure the threshold // number of derivation indexes after which BDK stops looking for new scripts belonging to the wallet. @@ -26,9 +28,15 @@ pub(crate) const BDK_CLIENT_CONCURRENCY: u8 = 4; // The default Esplora server we're using. pub(crate) const DEFAULT_ESPLORA_SERVER_URL: &str = "https://blockstream.info/api"; +// The default Esplora client timeout we're using. +pub(crate) const DEFAULT_ESPLORA_CLIENT_TIMEOUT_SECS: u64 = 10; + // The timeout after which we abandon retrying failed payments. pub(crate) const LDK_PAYMENT_RETRY_TIMEOUT: Duration = Duration::from_secs(10); +// The interval (in block height) after which we retry archiving fully resolved channel monitors. +pub(crate) const RESOLVED_CHANNEL_MONITOR_ARCHIVAL_INTERVAL: u32 = 6; + // The time in-between peer reconnection attempts. pub(crate) const PEER_RECONNECTION_INTERVAL: Duration = Duration::from_secs(10); @@ -41,6 +49,21 @@ pub(crate) const NODE_ANN_BCAST_INTERVAL: Duration = Duration::from_secs(60 * 60 // The lower limit which we apply to any configured wallet sync intervals. pub(crate) const WALLET_SYNC_INTERVAL_MINIMUM_SECS: u64 = 10; +// The timeout after which we abort a wallet syncing operation. +pub(crate) const BDK_WALLET_SYNC_TIMEOUT_SECS: u64 = 90; + +// The timeout after which we abort a wallet syncing operation. +pub(crate) const LDK_WALLET_SYNC_TIMEOUT_SECS: u64 = 30; + +// The timeout after which we abort a fee rate cache update operation. +pub(crate) const FEE_RATE_CACHE_UPDATE_TIMEOUT_SECS: u64 = 5; + +// The timeout after which we abort a transaction broadcast operation. +pub(crate) const TX_BROADCAST_TIMEOUT_SECS: u64 = 5; + +// The timeout after which we abort a RGS sync operation. +pub(crate) const RGS_SYNC_TIMEOUT_SECS: u64 = 5; + // The length in bytes of our wallets' keys seed. pub(crate) const WALLET_KEYS_SEED_LEN: usize = 64; @@ -62,6 +85,9 @@ pub(crate) const WALLET_KEYS_SEED_LEN: usize = 64; /// | `trusted_peers_0conf` | [] | /// | `probing_liquidity_limit_multiplier` | 3 | /// | `log_level` | Debug | +/// | `anchor_channels_config` | Some(..) | +/// +/// See [`AnchorChannelsConfig`] for more information on its respective default values. /// /// [`Node`]: crate::Node pub struct Config { @@ -104,6 +130,23 @@ pub struct Config { /// /// Any messages below this level will be excluded from the logs. pub log_level: LogLevel, + /// Configuration options pertaining to Anchor channels, i.e., channels for which the + /// `option_anchors_zero_fee_htlc_tx` channel type is negotiated. + /// + /// Please refer to [`AnchorChannelsConfig`] for further information on Anchor channels. + /// + /// If set to `Some`, we'll try to open new channels with Anchors enabled, i.e., new channels + /// will be negotiated with the `option_anchors_zero_fee_htlc_tx` channel type if supported by + /// the counterparty. Note that this won't prevent us from opening non-Anchor channels if the + /// counterparty doesn't support `option_anchors_zero_fee_htlc_tx`. If set to `None`, new + /// channels will be negotiated with the legacy `option_static_remotekey` channel type only. + /// + /// **Note:** If set to `None` *after* some Anchor channels have already been + /// opened, no dedicated emergency on-chain reserve will be maintained for these channels, + /// which can be dangerous if only insufficient funds are available at the time of channel + /// closure. We *will* however still try to get the Anchor spending transactions confirmed + /// on-chain with the funds available. + pub anchor_channels_config: Option, } impl Default for Config { @@ -120,6 +163,78 @@ impl Default for Config { trusted_peers_0conf: Vec::new(), probing_liquidity_limit_multiplier: DEFAULT_PROBING_LIQUIDITY_LIMIT_MULTIPLIER, log_level: DEFAULT_LOG_LEVEL, + anchor_channels_config: Some(AnchorChannelsConfig::default()), + } + } +} + +/// Configuration options pertaining to 'Anchor' channels, i.e., channels for which the +/// `option_anchors_zero_fee_htlc_tx` channel type is negotiated. +/// +/// Prior to the introduction of Anchor channels, the on-chain fees paying for the transactions +/// issued on channel closure were pre-determined and locked-in at the time of the channel +/// opening. This required to estimate what fee rate would be sufficient to still have the +/// closing transactions be spendable on-chain (i.e., not be considered dust). This legacy +/// design of pre-anchor channels proved inadequate in the unpredictable, often turbulent, fee +/// markets we experience today. +/// +/// In contrast, Anchor channels allow to determine an adequate fee rate *at the time of channel +/// closure*, making them much more robust in the face of fee spikes. In turn, they require to +/// maintain a reserve of on-chain funds to have the channel closure transactions confirmed +/// on-chain, at least if the channel counterparty can't be trusted to do this for us. +/// +/// See [BOLT 3] for more technical details on Anchor channels. +/// +/// +/// ### Defaults +/// +/// | Parameter | Value | +/// |----------------------------|--------| +/// | `trusted_peers_no_reserve` | [] | +/// | `per_channel_reserve_sats` | 25000 | +/// +/// +/// [BOLT 3]: https://github.com/lightning/bolts/blob/master/03-transactions.md#htlc-timeout-and-htlc-success-transactions +#[derive(Debug, Clone)] +pub struct AnchorChannelsConfig { + /// A list of peers that we trust to get the required channel closing transactions confirmed + /// on-chain. + /// + /// Channels with these peers won't count towards the retained on-chain reserve and we won't + /// take any action to get the required transactions confirmed ourselves. + /// + /// **Note:** Trusting the channel counterparty to take the necessary actions to get the + /// required Anchor spending and HTLC transactions confirmed on-chain is potentially insecure + /// as the channel may not be closed if they refuse to do so, potentially leaving the user + /// funds stuck *or* even allow the counterparty to steal any in-flight funds after the + /// corresponding HTLCs time out. + pub trusted_peers_no_reserve: Vec, + /// The amount of satoshis per anchors-negotiated channel with an untrusted peer that we keep + /// as an emergency reserve in our on-chain wallet. + /// + /// This allows for having the required Anchor output spending and HTLC transactions confirmed + /// when the channel is closed. + /// + /// If the channel peer is not marked as trusted via + /// [`AnchorChannelsConfig::trusted_peers_no_reserve`], we will always try to spend the Anchor + /// outputs with *any* on-chain funds available, i.e., the total reserve value as well as any + /// spendable funds available in the on-chain wallet. Therefore, this per-channel multiplier is + /// really a emergencey reserve that we maintain at all time to reduce reduce the risk of + /// insufficient funds at time of a channel closure. To this end, we will refuse to open + /// outbound or accept inbound channels if we don't have sufficient on-chain funds availble to + /// cover the additional reserve requirement. + /// + /// **Note:** Depending on the fee market at the time of closure, this reserve amount might or + /// might not suffice to successfully spend the Anchor output and have the HTLC transactions + /// confirmed on-chain, i.e., you may want to adjust this value accordingly. + pub per_channel_reserve_sats: u64, +} + +impl Default for AnchorChannelsConfig { + fn default() -> Self { + Self { + trusted_peers_no_reserve: Vec::new(), + per_channel_reserve_sats: DEFAULT_ANCHOR_PER_CHANNEL_RESERVE_SATS, } } } @@ -133,3 +248,18 @@ impl Default for Config { pub fn default_config() -> Config { Config::default() } + +pub(crate) fn default_user_config(config: &Config) -> UserConfig { + // Initialize the default config values. + // + // Note that methods such as Node::connect_open_channel might override some of the values set + // here, e.g. the ChannelHandshakeConfig, meaning these default values will mostly be relevant + // for inbound channels. + let mut user_config = UserConfig::default(); + user_config.channel_handshake_limits.force_announced_channel_preference = false; + user_config.manually_accept_inbound_channels = true; + user_config.channel_handshake_config.negotiate_anchors_zero_fee_htlc_tx = + config.anchor_channels_config.is_some(); + + user_config +} diff --git a/src/connection.rs b/src/connection.rs new file mode 100644 index 000000000..9d956d6be --- /dev/null +++ b/src/connection.rs @@ -0,0 +1,147 @@ +use crate::logger::{log_error, log_info, Logger}; +use crate::types::PeerManager; +use crate::Error; + +use lightning::ln::msgs::SocketAddress; + +use bitcoin::secp256k1::PublicKey; + +use std::collections::hash_map::{self, HashMap}; +use std::net::ToSocketAddrs; +use std::ops::Deref; +use std::sync::{Arc, Mutex}; +use std::time::Duration; + +pub(crate) struct ConnectionManager +where + L::Target: Logger, +{ + pending_connections: + Mutex>>>>, + peer_manager: Arc, + logger: L, +} + +impl ConnectionManager +where + L::Target: Logger, +{ + pub(crate) fn new(peer_manager: Arc, logger: L) -> Self { + let pending_connections = Mutex::new(HashMap::new()); + Self { pending_connections, peer_manager, logger } + } + + pub(crate) async fn connect_peer_if_necessary( + &self, node_id: PublicKey, addr: SocketAddress, + ) -> Result<(), Error> { + if self.peer_manager.peer_by_node_id(&node_id).is_some() { + return Ok(()); + } + + self.do_connect_peer(node_id, addr).await + } + + pub(crate) async fn do_connect_peer( + &self, node_id: PublicKey, addr: SocketAddress, + ) -> Result<(), Error> { + // First, we check if there is already an outbound connection in flight, if so, we just + // await on the corresponding watch channel. The task driving the connection future will + // send us the result.. + let pending_ready_receiver_opt = self.register_or_subscribe_pending_connection(&node_id); + if let Some(pending_connection_ready_receiver) = pending_ready_receiver_opt { + return pending_connection_ready_receiver.await.map_err(|e| { + debug_assert!(false, "Failed to receive connection result: {:?}", e); + log_error!(self.logger, "Failed to receive connection result: {:?}", e); + Error::ConnectionFailed + })?; + } + + log_info!(self.logger, "Connecting to peer: {}@{}", node_id, addr); + + let socket_addr = addr + .to_socket_addrs() + .map_err(|e| { + log_error!(self.logger, "Failed to resolve network address {}: {}", addr, e); + self.propagate_result_to_subscribers(&node_id, Err(Error::InvalidSocketAddress)); + Error::InvalidSocketAddress + })? + .next() + .ok_or_else(|| { + log_error!(self.logger, "Failed to resolve network address {}", addr); + self.propagate_result_to_subscribers(&node_id, Err(Error::InvalidSocketAddress)); + Error::InvalidSocketAddress + })?; + + let connection_future = lightning_net_tokio::connect_outbound( + Arc::clone(&self.peer_manager), + node_id, + socket_addr, + ); + + let res = match connection_future.await { + Some(connection_closed_future) => { + let mut connection_closed_future = Box::pin(connection_closed_future); + loop { + tokio::select! { + _ = &mut connection_closed_future => { + log_info!(self.logger, "Peer connection closed: {}@{}", node_id, addr); + break Err(Error::ConnectionFailed); + }, + _ = tokio::time::sleep(Duration::from_millis(10)) => {}, + }; + + match self.peer_manager.peer_by_node_id(&node_id) { + Some(_) => break Ok(()), + None => continue, + } + } + }, + None => { + log_error!(self.logger, "Failed to connect to peer: {}@{}", node_id, addr); + Err(Error::ConnectionFailed) + }, + }; + + self.propagate_result_to_subscribers(&node_id, res); + + res + } + + fn register_or_subscribe_pending_connection( + &self, node_id: &PublicKey, + ) -> Option>> { + let mut pending_connections_lock = self.pending_connections.lock().unwrap(); + match pending_connections_lock.entry(*node_id) { + hash_map::Entry::Occupied(mut entry) => { + let (tx, rx) = tokio::sync::oneshot::channel(); + entry.get_mut().push(tx); + Some(rx) + }, + hash_map::Entry::Vacant(entry) => { + entry.insert(Vec::new()); + None + }, + } + } + + fn propagate_result_to_subscribers(&self, node_id: &PublicKey, res: Result<(), Error>) { + // Send the result to any other tasks that might be waiting on it by now. + let mut pending_connections_lock = self.pending_connections.lock().unwrap(); + if let Some(connection_ready_senders) = pending_connections_lock.remove(node_id) { + for sender in connection_ready_senders { + let _ = sender.send(res).map_err(|e| { + debug_assert!( + false, + "Failed to send connection result to subscribers: {:?}", + e + ); + log_error!( + self.logger, + "Failed to send connection result to subscribers: {:?}", + e + ); + }); + } + } + } +} diff --git a/src/error.rs b/src/error.rs index 0182b3092..15aa5a960 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,6 +1,6 @@ use std::fmt; -#[derive(Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq)] /// An error that possibly needs to be handled by the user. pub enum Error { /// Returned when trying to start [`crate::Node`] while it is already running. @@ -13,6 +13,12 @@ pub enum Error { ConnectionFailed, /// Invoice creation failed. InvoiceCreationFailed, + /// Invoice request creation failed. + InvoiceRequestCreationFailed, + /// Offer creation failed. + OfferCreationFailed, + /// Refund creation failed. + RefundCreationFailed, /// Sending a payment has failed. PaymentSendingFailed, /// Sending a payment probe has failed. @@ -27,16 +33,24 @@ pub enum Error { PersistenceFailed, /// A fee rate estimation update failed. FeerateEstimationUpdateFailed, + /// A fee rate estimation update timed out. + FeerateEstimationUpdateTimeout, /// A wallet operation failed. WalletOperationFailed, + /// A wallet operation timed out. + WalletOperationTimeout, /// A signing operation for transaction failed. OnchainTxSigningFailed, /// A signing operation for message failed. MessageSigningFailed, /// A transaction sync operation failed. TxSyncFailed, + /// A transaction sync operation timed out. + TxSyncTimeout, /// A gossip updating operation failed. GossipUpdateFailed, + /// A gossip updating operation timed out. + GossipUpdateTimeout, /// A liquidity request operation failed. LiquidityRequestFailed, /// The given address is invalid. @@ -47,6 +61,12 @@ pub enum Error { InvalidPublicKey, /// The given secret key is invalid. InvalidSecretKey, + /// The given offer id is invalid. + InvalidOfferId, + /// The given node id is invalid. + InvalidNodeId, + /// The given payment id is invalid. + InvalidPaymentId, /// The given payment hash is invalid. InvalidPaymentHash, /// The given payment pre-image is invalid. @@ -57,12 +77,20 @@ pub enum Error { InvalidAmount, /// The given invoice is invalid. InvalidInvoice, + /// The given offer is invalid. + InvalidOffer, + /// The given refund is invalid. + InvalidRefund, /// The given channel ID is invalid. InvalidChannelId, /// The given network is invalid. InvalidNetwork, + /// The custom TLVs are invalid. + InvalidCustomTlv, /// A payment with the given hash has already been initiated. DuplicatePayment, + /// The provided offer was denonminated in an unsupported currency. + UnsupportedCurrency, /// The available funds are insufficient to complete the given operation. InsufficientFunds, /// The given operation failed due to the required liquidity source being unavailable. @@ -81,6 +109,9 @@ impl fmt::Display for Error { }, Self::ConnectionFailed => write!(f, "Network connection closed."), Self::InvoiceCreationFailed => write!(f, "Failed to create invoice."), + Self::InvoiceRequestCreationFailed => write!(f, "Failed to create invoice request."), + Self::OfferCreationFailed => write!(f, "Failed to create offer."), + Self::RefundCreationFailed => write!(f, "Failed to create refund."), Self::PaymentSendingFailed => write!(f, "Failed to send the given payment."), Self::ProbeSendingFailed => write!(f, "Failed to send the given payment probe."), Self::ChannelCreationFailed => write!(f, "Failed to create channel."), @@ -90,29 +121,44 @@ impl fmt::Display for Error { Self::FeerateEstimationUpdateFailed => { write!(f, "Failed to update fee rate estimates.") }, + Self::FeerateEstimationUpdateTimeout => { + write!(f, "Updating fee rate estimates timed out.") + }, Self::WalletOperationFailed => write!(f, "Failed to conduct wallet operation."), + Self::WalletOperationTimeout => write!(f, "A wallet operation timed out."), Self::OnchainTxSigningFailed => write!(f, "Failed to sign given transaction."), Self::MessageSigningFailed => write!(f, "Failed to sign given message."), Self::TxSyncFailed => write!(f, "Failed to sync transactions."), + Self::TxSyncTimeout => write!(f, "Syncing transactions timed out."), Self::GossipUpdateFailed => write!(f, "Failed to update gossip data."), + Self::GossipUpdateTimeout => write!(f, "Updating gossip data timed out."), Self::LiquidityRequestFailed => write!(f, "Failed to request inbound liquidity."), Self::InvalidAddress => write!(f, "The given address is invalid."), Self::InvalidSocketAddress => write!(f, "The given network address is invalid."), Self::InvalidPublicKey => write!(f, "The given public key is invalid."), Self::InvalidSecretKey => write!(f, "The given secret key is invalid."), + Self::InvalidOfferId => write!(f, "The given offer id is invalid."), + Self::InvalidNodeId => write!(f, "The given node id is invalid."), + Self::InvalidPaymentId => write!(f, "The given payment id is invalid."), Self::InvalidPaymentHash => write!(f, "The given payment hash is invalid."), Self::InvalidPaymentPreimage => write!(f, "The given payment preimage is invalid."), Self::InvalidPaymentSecret => write!(f, "The given payment secret is invalid."), Self::InvalidAmount => write!(f, "The given amount is invalid."), Self::InvalidInvoice => write!(f, "The given invoice is invalid."), + Self::InvalidOffer => write!(f, "The given offer is invalid."), + Self::InvalidRefund => write!(f, "The given refund is invalid."), Self::InvalidChannelId => write!(f, "The given channel ID is invalid."), Self::InvalidNetwork => write!(f, "The given network is invalid."), + Self::InvalidCustomTlv => write!(f, "The given custom TLVs are invalid."), Self::DuplicatePayment => { write!(f, "A payment with the given hash has already been initiated.") }, Self::InsufficientFunds => { write!(f, "The available funds are insufficient to complete the given operation.") }, + Self::UnsupportedCurrency => { + write!(f, "The provided offer was denonminated in an unsupported currency.") + }, Self::LiquiditySourceUnavailable => { write!(f, "The given operation failed due to the required liquidity source being unavailable.") }, diff --git a/src/event.rs b/src/event.rs index bf683838a..1c192547e 100644 --- a/src/event.rs +++ b/src/event.rs @@ -1,26 +1,32 @@ -use crate::types::{Sweeper, Wallet}; +use crate::types::{DynStore, Sweeper, Wallet}; + use crate::{ - hex_utils, ChannelManager, Config, Error, NetworkGraph, PeerInfo, PeerStore, UserChannelId, + hex_utils, BumpTransactionEventHandler, ChannelManager, Config, Error, Graph, PeerInfo, + PeerStore, TlvEntry, UserChannelId, }; -use crate::payment_store::{ - PaymentDetails, PaymentDetailsUpdate, PaymentDirection, PaymentStatus, PaymentStore, +use crate::connection::ConnectionManager; + +use crate::payment::store::{ + PaymentDetails, PaymentDetailsUpdate, PaymentDirection, PaymentKind, PaymentStatus, + PaymentStore, }; use crate::io::{ EVENT_QUEUE_PERSISTENCE_KEY, EVENT_QUEUE_PERSISTENCE_PRIMARY_NAMESPACE, EVENT_QUEUE_PERSISTENCE_SECONDARY_NAMESPACE, }; -use crate::logger::{log_error, log_info, Logger}; +use crate::logger::{log_debug, log_error, log_info, Logger}; use lightning::chain::chaininterface::ConfirmationTarget; +use lightning::events::bump_transaction::BumpTransactionEvent; use lightning::events::{ClosureReason, PaymentPurpose}; use lightning::events::{Event as LdkEvent, PaymentFailureReason}; use lightning::impl_writeable_tlv_based_enum; +use lightning::ln::channelmanager::PaymentId; use lightning::ln::{ChannelId, PaymentHash}; use lightning::routing::gossip::NodeId; use lightning::util::errors::APIError; -use lightning::util::persist::KVStore; use lightning::util::ser::{Readable, ReadableArgs, Writeable, Writer}; use lightning_liquidity::lsps2::utils::compute_opening_fee; @@ -45,11 +51,21 @@ use std::time::Duration; pub enum Event { /// A sent payment was successful. PaymentSuccessful { + /// A local identifier used to track the payment. + /// + /// Will only be `None` for events serialized with LDK Node v0.2.1 or prior. + payment_id: Option, /// The hash of the payment. payment_hash: PaymentHash, + /// The total fee which was spent at intermediate hops in this payment. + fee_paid_msat: Option, }, /// A sent payment has failed. PaymentFailed { + /// A local identifier used to track the payment. + /// + /// Will only be `None` for events serialized with LDK Node v0.2.1 or prior. + payment_id: Option, /// The hash of the payment. payment_hash: PaymentHash, /// The reason why the payment failed. @@ -59,11 +75,37 @@ pub enum Event { }, /// A payment has been received. PaymentReceived { + /// A local identifier used to track the payment. + /// + /// Will only be `None` for events serialized with LDK Node v0.2.1 or prior. + payment_id: Option, /// The hash of the payment. payment_hash: PaymentHash, /// The value, in thousandths of a satoshi, that has been received. amount_msat: u64, }, + /// A payment for a previously-registered payment hash has been received. + /// + /// This needs to be manually claimed by supplying the correct preimage to [`claim_for_hash`]. + /// + /// If the the provided parameters don't match the expectations or the preimage can't be + /// retrieved in time, should be failed-back via [`fail_for_hash`]. + /// + /// Note claiming will necessarily fail after the `claim_deadline` has been reached. + /// + /// [`claim_for_hash`]: crate::payment::Bolt11Payment::claim_for_hash + /// [`fail_for_hash`]: crate::payment::Bolt11Payment::fail_for_hash + PaymentClaimable { + /// A local identifier used to track the payment. + payment_id: PaymentId, + /// The hash of the payment. + payment_hash: PaymentHash, + /// The value, in thousandths of a satoshi, that is claimable. + claimable_amount_msat: u64, + /// The block height at which this payment will be failed back and will no longer be + /// eligible for claiming. + claim_deadline: Option, + }, /// A channel has been created and is pending confirmation on-chain. ChannelPending { /// The `channel_id` of the channel. @@ -106,13 +148,17 @@ pub enum Event { impl_writeable_tlv_based_enum!(Event, (0, PaymentSuccessful) => { (0, payment_hash, required), + (1, fee_paid_msat, option), + (3, payment_id, option), }, (1, PaymentFailed) => { (0, payment_hash, required), (1, reason, option), + (3, payment_id, option), }, (2, PaymentReceived) => { (0, payment_hash, required), + (1, payment_id, option), (2, amount_msat, required), }, (3, ChannelReady) => { @@ -132,25 +178,31 @@ impl_writeable_tlv_based_enum!(Event, (1, counterparty_node_id, option), (2, user_channel_id, required), (3, reason, upgradable_option), + }, + (6, PaymentClaimable) => { + (0, payment_hash, required), + (2, payment_id, required), + (4, claimable_amount_msat, required), + (6, claim_deadline, option), }; ); -pub struct EventQueue +pub struct EventQueue where L::Target: Logger, { queue: Arc>>, waker: Arc>>, notifier: Condvar, - kv_store: Arc, + kv_store: Arc, logger: L, } -impl EventQueue +impl EventQueue where L::Target: Logger, { - pub(crate) fn new(kv_store: Arc, logger: L) -> Self { + pub(crate) fn new(kv_store: Arc, logger: L) -> Self { let queue = Arc::new(Mutex::new(VecDeque::new())); let waker = Arc::new(Mutex::new(None)); let notifier = Condvar::new(); @@ -174,7 +226,7 @@ where pub(crate) fn next_event(&self) -> Option { let locked_queue = self.queue.lock().unwrap(); - locked_queue.front().map(|e| e.clone()) + locked_queue.front().cloned() } pub(crate) async fn next_event_async(&self) -> Event { @@ -225,13 +277,13 @@ where } } -impl ReadableArgs<(Arc, L)> for EventQueue +impl ReadableArgs<(Arc, L)> for EventQueue where L::Target: Logger, { #[inline] fn read( - reader: &mut R, args: (Arc, L), + reader: &mut R, args: (Arc, L), ) -> Result { let (kv_store, logger) = args; let read_queue: EventQueueDeserWrapper = Readable::read(reader)?; @@ -289,37 +341,42 @@ impl Future for EventFuture { } } -pub(crate) struct EventHandler +pub(crate) struct EventHandler where L::Target: Logger, { - event_queue: Arc>, + event_queue: Arc>, wallet: Arc, - channel_manager: Arc>, - output_sweeper: Arc>, - network_graph: Arc, - payment_store: Arc>, - peer_store: Arc>, + bump_tx_event_handler: Arc, + channel_manager: Arc, + connection_manager: Arc>, + output_sweeper: Arc, + network_graph: Arc, + payment_store: Arc>, + peer_store: Arc>, runtime: Arc>>, logger: L, config: Arc, } -impl EventHandler +impl EventHandler where L::Target: Logger, { pub fn new( - event_queue: Arc>, wallet: Arc, - channel_manager: Arc>, output_sweeper: Arc>, - network_graph: Arc, payment_store: Arc>, - peer_store: Arc>, runtime: Arc>>, - logger: L, config: Arc, + event_queue: Arc>, wallet: Arc, + bump_tx_event_handler: Arc, + channel_manager: Arc, connection_manager: Arc>, + output_sweeper: Arc, network_graph: Arc, + payment_store: Arc>, peer_store: Arc>, + runtime: Arc>>, logger: L, config: Arc, ) -> Self { Self { event_queue, wallet, + bump_tx_event_handler, channel_manager, + connection_manager, output_sweeper, network_graph, payment_store, @@ -344,7 +401,7 @@ where let confirmation_target = ConfirmationTarget::NonAnchorChannelFee; // We set nLockTime to the current height to discourage fee sniping. - let cur_height = self.channel_manager.current_best_block().height(); + let cur_height = self.channel_manager.current_best_block().height; let locktime = LockTime::from_height(cur_height).unwrap_or(LockTime::ZERO); // Sign the final funding transaction and broadcast it. @@ -405,12 +462,34 @@ where receiver_node_id: _, via_channel_id: _, via_user_channel_id: _, - claim_deadline: _, - onion_fields: _, + claim_deadline, + onion_fields, counterparty_skimmed_fee_msat, } => { - if let Some(info) = self.payment_store.get(&payment_hash) { - if info.status == PaymentStatus::Succeeded { + let payment_id = PaymentId(payment_hash.0); + if let Some(info) = self.payment_store.get(&payment_id) { + if info.direction == PaymentDirection::Outbound { + log_info!( + self.logger, + "Refused inbound payment with ID {}: circular payments are unsupported.", + payment_id + ); + self.channel_manager.fail_htlc_backwards(&payment_hash); + + let update = PaymentDetailsUpdate { + status: Some(PaymentStatus::Failed), + ..PaymentDetailsUpdate::new(payment_id) + }; + self.payment_store.update(&update).unwrap_or_else(|e| { + log_error!(self.logger, "Failed to access payment store: {}", e); + panic!("Failed to access payment store"); + }); + return; + } + + if info.status == PaymentStatus::Succeeded + || matches!(info.kind, PaymentKind::Spontaneous { .. }) + { log_info!( self.logger, "Refused duplicate inbound payment from payment hash {} of {}msat", @@ -421,7 +500,7 @@ where let update = PaymentDetailsUpdate { status: Some(PaymentStatus::Failed), - ..PaymentDetailsUpdate::new(payment_hash) + ..PaymentDetailsUpdate::new(payment_id) }; self.payment_store.update(&update).unwrap_or_else(|e| { log_error!(self.logger, "Failed to access payment store: {}", e); @@ -430,17 +509,22 @@ where return; } - let max_total_opening_fee_msat = info - .lsp_fee_limits - .and_then(|l| { - l.max_total_opening_fee_msat.or_else(|| { - l.max_proportional_opening_fee_ppm_msat.and_then(|max_prop_fee| { - // If it's a variable amount payment, compute the actual fee. - compute_opening_fee(amount_msat, 0, max_prop_fee) + let max_total_opening_fee_msat = match info.kind { + PaymentKind::Bolt11Jit { lsp_fee_limits, .. } => { + lsp_fee_limits + .max_total_opening_fee_msat + .or_else(|| { + lsp_fee_limits.max_proportional_opening_fee_ppm_msat.and_then( + |max_prop_fee| { + // If it's a variable amount payment, compute the actual fee. + compute_opening_fee(amount_msat, 0, max_prop_fee) + }, + ) }) - }) - }) - .unwrap_or(0); + .unwrap_or(0) + }, + _ => 0, + }; if counterparty_skimmed_fee_msat > max_total_opening_fee_msat { log_info!( @@ -453,8 +537,9 @@ where self.channel_manager.fail_htlc_backwards(&payment_hash); let update = PaymentDetailsUpdate { + hash: Some(Some(payment_hash)), status: Some(PaymentStatus::Failed), - ..PaymentDetailsUpdate::new(payment_hash) + ..PaymentDetailsUpdate::new(payment_id) }; self.payment_store.update(&update).unwrap_or_else(|e| { log_error!(self.logger, "Failed to access payment store: {}", e); @@ -462,6 +547,38 @@ where }); return; } + + // If this is known by the store but ChannelManager doesn't know the preimage, + // the payment has been registered via `_for_hash` variants and needs to be manually claimed via + // user interaction. + match info.kind { + PaymentKind::Bolt11 { preimage, .. } => { + if purpose.preimage().is_none() { + debug_assert!( + preimage.is_none(), + "We would have registered the preimage if we knew" + ); + + self.event_queue + .add_event(Event::PaymentClaimable { + payment_id, + payment_hash, + claimable_amount_msat: amount_msat, + claim_deadline, + }) + .unwrap_or_else(|e| { + log_error!( + self.logger, + "Failed to push to event queue: {}", + e + ); + panic!("Failed to push to event queue"); + }); + return; + } + }, + _ => {}, + } } log_info!( @@ -471,131 +588,243 @@ where amount_msat, ); let payment_preimage = match purpose { - PaymentPurpose::InvoicePayment { payment_preimage, payment_secret } => { - if payment_preimage.is_some() { - payment_preimage - } else { - self.channel_manager - .get_payment_preimage(payment_hash, payment_secret) - .ok() - } + PaymentPurpose::Bolt11InvoicePayment { payment_preimage, .. } => { + payment_preimage }, - PaymentPurpose::SpontaneousPayment(preimage) => Some(preimage), - }; + PaymentPurpose::Bolt12OfferPayment { + payment_preimage, + payment_secret, + payment_context, + .. + } => { + let offer_id = payment_context.offer_id; + let kind = PaymentKind::Bolt12Offer { + hash: Some(payment_hash), + preimage: payment_preimage, + secret: Some(payment_secret), + offer_id, + }; - if let Some(preimage) = payment_preimage { - self.channel_manager.claim_funds(preimage); - } else { - log_error!( - self.logger, - "Failed to claim payment with hash {}: preimage unknown.", - hex_utils::to_string(&payment_hash.0), - ); - self.channel_manager.fail_htlc_backwards(&payment_hash); + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Inbound, + PaymentStatus::Pending, + ); - let update = PaymentDetailsUpdate { - status: Some(PaymentStatus::Failed), - ..PaymentDetailsUpdate::new(payment_hash) - }; - self.payment_store.update(&update).unwrap_or_else(|e| { - log_error!(self.logger, "Failed to access payment store: {}", e); - panic!("Failed to access payment store"); - }); - } - }, - LdkEvent::PaymentClaimed { - payment_hash, - purpose, - amount_msat, - receiver_node_id: _, - htlcs: _, - sender_intended_total_msat: _, - } => { - log_info!( - self.logger, - "Claimed payment from payment hash {} of {}msat.", - hex_utils::to_string(&payment_hash.0), - amount_msat, - ); - match purpose { - PaymentPurpose::InvoicePayment { payment_preimage, payment_secret, .. } => { - let update = PaymentDetailsUpdate { - preimage: Some(payment_preimage), - secret: Some(Some(payment_secret)), - amount_msat: Some(Some(amount_msat)), - status: Some(PaymentStatus::Succeeded), - ..PaymentDetailsUpdate::new(payment_hash) - }; - match self.payment_store.update(&update) { - Ok(true) => (), - Ok(false) => { + match self.payment_store.insert(payment) { + Ok(false) => (), + Ok(true) => { log_error!( self.logger, - "Payment with hash {} couldn't be found in store", - hex_utils::to_string(&payment_hash.0) + "Bolt12OfferPayment with ID {} was previously known", + payment_id, ); debug_assert!(false); }, Err(e) => { log_error!( self.logger, - "Failed to update payment with hash {}: {}", - hex_utils::to_string(&payment_hash.0), + "Failed to insert payment with ID {}: {}", + payment_id, e ); debug_assert!(false); }, } + payment_preimage + }, + PaymentPurpose::Bolt12RefundPayment { payment_preimage, .. } => { + payment_preimage }, PaymentPurpose::SpontaneousPayment(preimage) => { - let payment = PaymentDetails { - preimage: Some(preimage), + let custom_tlvs = onion_fields + .map(|of| { + of.custom_tlvs() + .iter() + .map(|(t, v)| TlvEntry { r#type: *t, value: v.clone() }) + .collect() + }) + .unwrap_or_default(); + + // Since it's spontaneous, we insert it now into our store. + let kind = PaymentKind::Spontaneous { hash: payment_hash, - secret: None, - amount_msat: Some(amount_msat), - direction: PaymentDirection::Inbound, - status: PaymentStatus::Succeeded, - lsp_fee_limits: None, + preimage: Some(preimage), + custom_tlvs, }; + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Inbound, + PaymentStatus::Pending, + ); + match self.payment_store.insert(payment) { Ok(false) => (), Ok(true) => { log_error!( self.logger, - "Spontaneous payment with hash {} was previously known", - hex_utils::to_string(&payment_hash.0) + "Spontaneous payment with ID {} was previously known", + payment_id, ); debug_assert!(false); }, Err(e) => { log_error!( self.logger, - "Failed to insert payment with hash {}: {}", - hex_utils::to_string(&payment_hash.0), + "Failed to insert payment with ID {}: {}", + payment_id, e ); debug_assert!(false); }, } + + Some(preimage) }, }; + if let Some(preimage) = payment_preimage { + self.channel_manager.claim_funds(preimage); + } else { + log_error!( + self.logger, + "Failed to claim payment with ID {}: preimage unknown.", + payment_id, + ); + self.channel_manager.fail_htlc_backwards(&payment_hash); + + let update = PaymentDetailsUpdate { + hash: Some(Some(payment_hash)), + status: Some(PaymentStatus::Failed), + ..PaymentDetailsUpdate::new(payment_id) + }; + self.payment_store.update(&update).unwrap_or_else(|e| { + log_error!(self.logger, "Failed to access payment store: {}", e); + panic!("Failed to access payment store"); + }); + } + }, + LdkEvent::PaymentClaimed { + payment_hash, + purpose, + amount_msat, + receiver_node_id: _, + htlcs: _, + sender_intended_total_msat: _, + } => { + let payment_id = PaymentId(payment_hash.0); + log_info!( + self.logger, + "Claimed payment with ID {} from payment hash {} of {}msat.", + payment_id, + hex_utils::to_string(&payment_hash.0), + amount_msat, + ); + + let update = match purpose { + PaymentPurpose::Bolt11InvoicePayment { + payment_preimage, + payment_secret, + .. + } => PaymentDetailsUpdate { + preimage: Some(payment_preimage), + secret: Some(Some(payment_secret)), + amount_msat: Some(Some(amount_msat)), + status: Some(PaymentStatus::Succeeded), + ..PaymentDetailsUpdate::new(payment_id) + }, + PaymentPurpose::Bolt12OfferPayment { + payment_preimage, payment_secret, .. + } => PaymentDetailsUpdate { + preimage: Some(payment_preimage), + secret: Some(Some(payment_secret)), + amount_msat: Some(Some(amount_msat)), + status: Some(PaymentStatus::Succeeded), + ..PaymentDetailsUpdate::new(payment_id) + }, + PaymentPurpose::Bolt12RefundPayment { + payment_preimage, + payment_secret, + .. + } => PaymentDetailsUpdate { + preimage: Some(payment_preimage), + secret: Some(Some(payment_secret)), + amount_msat: Some(Some(amount_msat)), + status: Some(PaymentStatus::Succeeded), + ..PaymentDetailsUpdate::new(payment_id) + }, + PaymentPurpose::SpontaneousPayment(preimage) => PaymentDetailsUpdate { + preimage: Some(Some(preimage)), + amount_msat: Some(Some(amount_msat)), + status: Some(PaymentStatus::Succeeded), + ..PaymentDetailsUpdate::new(payment_id) + }, + }; + + match self.payment_store.update(&update) { + Ok(true) => (), + Ok(false) => { + log_error!( + self.logger, + "Payment with ID {} couldn't be found in store", + payment_id, + ); + debug_assert!(false); + }, + Err(e) => { + log_error!( + self.logger, + "Failed to update payment with ID {}: {}", + payment_id, + e + ); + panic!("Failed to access payment store"); + }, + } + self.event_queue - .add_event(Event::PaymentReceived { payment_hash, amount_msat }) + .add_event(Event::PaymentReceived { + payment_id: Some(payment_id), + payment_hash, + amount_msat, + }) .unwrap_or_else(|e| { log_error!(self.logger, "Failed to push to event queue: {}", e); panic!("Failed to push to event queue"); }); }, - LdkEvent::PaymentSent { payment_preimage, payment_hash, fee_paid_msat, .. } => { - if let Some(mut payment) = self.payment_store.get(&payment_hash) { - payment.preimage = Some(payment_preimage); - payment.status = PaymentStatus::Succeeded; - self.payment_store.insert(payment.clone()).unwrap_or_else(|e| { - log_error!(self.logger, "Failed to access payment store: {}", e); - panic!("Failed to access payment store"); - }); + LdkEvent::PaymentSent { + payment_id, + payment_preimage, + payment_hash, + fee_paid_msat, + .. + } => { + let payment_id = if let Some(id) = payment_id { + id + } else { + debug_assert!(false, "payment_id should always be set."); + return; + }; + + let update = PaymentDetailsUpdate { + hash: Some(Some(payment_hash)), + preimage: Some(Some(payment_preimage)), + status: Some(PaymentStatus::Succeeded), + ..PaymentDetailsUpdate::new(payment_id) + }; + + self.payment_store.update(&update).unwrap_or_else(|e| { + log_error!(self.logger, "Failed to access payment store: {}", e); + panic!("Failed to access payment store"); + }); + + self.payment_store.get(&payment_id).map(|payment| { log_info!( self.logger, "Successfully sent payment of {}msat{} from \ @@ -609,15 +838,20 @@ where hex_utils::to_string(&payment_hash.0), hex_utils::to_string(&payment_preimage.0) ); - } + }); + self.event_queue - .add_event(Event::PaymentSuccessful { payment_hash }) + .add_event(Event::PaymentSuccessful { + payment_id: Some(payment_id), + payment_hash, + fee_paid_msat, + }) .unwrap_or_else(|e| { log_error!(self.logger, "Failed to push to event queue: {}", e); panic!("Failed to push to event queue"); }); }, - LdkEvent::PaymentFailed { payment_hash, reason, .. } => { + LdkEvent::PaymentFailed { payment_id, payment_hash, reason, .. } => { log_info!( self.logger, "Failed to send payment to payment hash {:?} due to {:?}.", @@ -626,15 +860,20 @@ where ); let update = PaymentDetailsUpdate { + hash: Some(Some(payment_hash)), status: Some(PaymentStatus::Failed), - ..PaymentDetailsUpdate::new(payment_hash) + ..PaymentDetailsUpdate::new(payment_id) }; self.payment_store.update(&update).unwrap_or_else(|e| { log_error!(self.logger, "Failed to access payment store: {}", e); panic!("Failed to access payment store"); }); self.event_queue - .add_event(Event::PaymentFailed { payment_hash, reason }) + .add_event(Event::PaymentFailed { + payment_id: Some(payment_id), + payment_hash, + reason, + }) .unwrap_or_else(|e| { log_error!(self.logger, "Failed to push to event queue: {}", e); panic!("Failed to push to event queue"); @@ -663,15 +902,78 @@ where } }, LdkEvent::SpendableOutputs { outputs, channel_id } => { - self.output_sweeper.add_outputs(outputs, channel_id) + self.output_sweeper + .track_spendable_outputs(outputs, channel_id, true, None) + .unwrap_or_else(|_| { + log_error!(self.logger, "Failed to track spendable outputs"); + panic!("Failed to track spendable outputs"); + }); }, LdkEvent::OpenChannelRequest { temporary_channel_id, counterparty_node_id, funding_satoshis, - channel_type: _, + channel_type, push_msat: _, } => { + let anchor_channel = channel_type.requires_anchors_zero_fee_htlc_tx(); + + if anchor_channel { + if let Some(anchor_channels_config) = + self.config.anchor_channels_config.as_ref() + { + let cur_anchor_reserve_sats = crate::total_anchor_channels_reserve_sats( + &self.channel_manager, + &self.config, + ); + let spendable_amount_sats = self + .wallet + .get_spendable_amount_sats(cur_anchor_reserve_sats) + .unwrap_or(0); + + let required_amount_sats = if anchor_channels_config + .trusted_peers_no_reserve + .contains(&counterparty_node_id) + { + 0 + } else { + anchor_channels_config.per_channel_reserve_sats + }; + + if spendable_amount_sats < required_amount_sats { + log_error!( + self.logger, + "Rejecting inbound Anchor channel from peer {} due to insufficient available on-chain reserves.", + counterparty_node_id, + ); + self.channel_manager + .force_close_without_broadcasting_txn( + &temporary_channel_id, + &counterparty_node_id, + ) + .unwrap_or_else(|e| { + log_error!(self.logger, "Failed to reject channel: {:?}", e) + }); + return; + } + } else { + log_error!( + self.logger, + "Rejecting inbound channel from peer {} due to Anchor channels being disabled.", + counterparty_node_id, + ); + self.channel_manager + .force_close_without_broadcasting_txn( + &temporary_channel_id, + &counterparty_node_id, + ) + .unwrap_or_else(|e| { + log_error!(self.logger, "Failed to reject channel: {:?}", e) + }); + return; + } + } + let user_channel_id: u128 = rand::thread_rng().gen::(); let allow_0conf = self.config.trusted_peers_0conf.contains(&counterparty_node_id); let res = if allow_0conf { @@ -692,8 +994,9 @@ where Ok(()) => { log_info!( self.logger, - "Accepting inbound{} channel of {}sats from{} peer {}", + "Accepting inbound{}{} channel of {}sats from{} peer {}", if allow_0conf { " 0conf" } else { "" }, + if anchor_channel { " Anchor" } else { "" }, funding_satoshis, if allow_0conf { " trusted" } else { "" }, counterparty_node_id, @@ -702,8 +1005,9 @@ where Err(e) => { log_error!( self.logger, - "Error while accepting inbound{} channel from{} peer {}: {:?}", + "Error while accepting inbound{}{} channel from{} peer {}: {:?}", if allow_0conf { " 0conf" } else { "" }, + if anchor_channel { " Anchor" } else { "" }, counterparty_node_id, if allow_0conf { " trusted" } else { "" }, e, @@ -714,9 +1018,10 @@ where LdkEvent::PaymentForwarded { prev_channel_id, next_channel_id, - fee_earned_msat, + total_fee_earned_msat, claim_from_onchain_tx, outbound_amount_forwarded_msat, + .. } => { let read_only_network_graph = self.network_graph.read_only(); let nodes = read_only_network_graph.nodes(); @@ -749,7 +1054,7 @@ where let to_next_str = format!(" to {}{}", node_str(&next_channel_id), channel_str(&next_channel_id)); - let fee_earned = fee_earned_msat.unwrap_or(0); + let fee_earned = total_fee_earned_msat.unwrap_or(0); let outbound_amount_forwarded_msat = outbound_amount_forwarded_msat.unwrap_or(0); if claim_from_onchain_tx { log_info!( @@ -777,6 +1082,7 @@ where former_temporary_channel_id, counterparty_node_id, funding_txo, + .. } => { log_info!( self.logger, @@ -870,9 +1176,78 @@ where }, LdkEvent::DiscardFunding { .. } => {}, LdkEvent::HTLCIntercepted { .. } => {}, - LdkEvent::BumpTransaction(_) => {}, - LdkEvent::InvoiceRequestFailed { .. } => {}, - LdkEvent::ConnectionNeeded { .. } => {}, + LdkEvent::InvoiceRequestFailed { payment_id } => { + log_error!( + self.logger, + "Failed to request invoice for outbound BOLT12 payment {}", + payment_id + ); + let update = PaymentDetailsUpdate { + status: Some(PaymentStatus::Failed), + ..PaymentDetailsUpdate::new(payment_id) + }; + self.payment_store.update(&update).unwrap_or_else(|e| { + log_error!(self.logger, "Failed to access payment store: {}", e); + panic!("Failed to access payment store"); + }); + return; + }, + LdkEvent::ConnectionNeeded { node_id, addresses } => { + let runtime_lock = self.runtime.read().unwrap(); + debug_assert!(runtime_lock.is_some()); + + if let Some(runtime) = runtime_lock.as_ref() { + let spawn_logger = self.logger.clone(); + let spawn_cm = Arc::clone(&self.connection_manager); + runtime.spawn(async move { + for addr in &addresses { + match spawn_cm.connect_peer_if_necessary(node_id, addr.clone()).await { + Ok(()) => { + return; + }, + Err(e) => { + log_error!( + spawn_logger, + "Failed to establish connection to peer {}@{}: {}", + node_id, + addr, + e + ); + }, + } + } + }); + } + }, + LdkEvent::BumpTransaction(bte) => { + let (channel_id, counterparty_node_id) = match bte { + BumpTransactionEvent::ChannelClose { + ref channel_id, + ref counterparty_node_id, + .. + } => (channel_id, counterparty_node_id), + BumpTransactionEvent::HTLCResolution { + ref channel_id, + ref counterparty_node_id, + .. + } => (channel_id, counterparty_node_id), + }; + + if let Some(anchor_channels_config) = self.config.anchor_channels_config.as_ref() { + if anchor_channels_config + .trusted_peers_no_reserve + .contains(counterparty_node_id) + { + log_debug!(self.logger, + "Ignoring BumpTransactionEvent for channel {} due to trusted counterparty {}", + channel_id, counterparty_node_id + ); + return; + } + } + + self.bump_tx_event_handler.handle_event(&bte); + }, } } } @@ -886,7 +1261,7 @@ mod tests { #[tokio::test] async fn event_queue_persistence() { - let store = Arc::new(TestStore::new(false)); + let store: Arc = Arc::new(TestStore::new(false)); let logger = Arc::new(TestLogger::new()); let event_queue = Arc::new(EventQueue::new(Arc::clone(&store), Arc::clone(&logger))); assert_eq!(event_queue.next_event(), None); @@ -923,7 +1298,7 @@ mod tests { #[tokio::test] async fn event_queue_concurrency() { - let store = Arc::new(TestStore::new(false)); + let store: Arc = Arc::new(TestStore::new(false)); let logger = Arc::new(TestLogger::new()); let event_queue = Arc::new(EventQueue::new(Arc::clone(&store), Arc::clone(&logger))); assert_eq!(event_queue.next_event(), None); diff --git a/src/fee_estimator.rs b/src/fee_estimator.rs index f79cfcb34..f1fa7e43b 100644 --- a/src/fee_estimator.rs +++ b/src/fee_estimator.rs @@ -1,3 +1,4 @@ +use crate::config::FEE_RATE_CACHE_UPDATE_TIMEOUT_SECS; use crate::logger::{log_error, log_trace, Logger}; use crate::{Config, Error}; @@ -14,6 +15,7 @@ use bitcoin::Network; use std::collections::HashMap; use std::ops::Deref; use std::sync::{Arc, RwLock}; +use std::time::Duration; pub(crate) struct OnchainFeeEstimator where @@ -42,6 +44,7 @@ where ConfirmationTarget::AnchorChannelFee, ConfirmationTarget::NonAnchorChannelFee, ConfirmationTarget::ChannelCloseMinimum, + ConfirmationTarget::OutputSpendingFee, ]; for target in confirmation_targets { let num_blocks = match target { @@ -51,9 +54,24 @@ where ConfirmationTarget::AnchorChannelFee => 1008, ConfirmationTarget::NonAnchorChannelFee => 12, ConfirmationTarget::ChannelCloseMinimum => 144, + ConfirmationTarget::OutputSpendingFee => 12, }; - let estimates = self.esplora_client.get_fee_estimates().await.map_err(|e| { + let estimates = tokio::time::timeout( + Duration::from_secs(FEE_RATE_CACHE_UPDATE_TIMEOUT_SECS), + self.esplora_client.get_fee_estimates(), + ) + .await + .map_err(|e| { + log_error!( + self.logger, + "Updating fee rate estimates for {:?} timed out: {}", + target, + e + ); + Error::FeerateEstimationUpdateTimeout + })? + .map_err(|e| { log_error!( self.logger, "Failed to retrieve fee rate estimates for {:?}: {}", @@ -119,6 +137,7 @@ where ConfirmationTarget::AnchorChannelFee => 500, ConfirmationTarget::NonAnchorChannelFee => 1000, ConfirmationTarget::ChannelCloseMinimum => 500, + ConfirmationTarget::OutputSpendingFee => 1000, }; // We'll fall back on this, if we really don't have any other information. diff --git a/src/gossip.rs b/src/gossip.rs index 9fb5e6a84..1241b0cdc 100644 --- a/src/gossip.rs +++ b/src/gossip.rs @@ -1,11 +1,13 @@ +use crate::config::RGS_SYNC_TIMEOUT_SECS; use crate::logger::{log_trace, FilesystemLogger, Logger}; -use crate::types::{GossipSync, NetworkGraph, P2PGossipSync, RapidGossipSync}; +use crate::types::{GossipSync, Graph, P2PGossipSync, RapidGossipSync}; use crate::Error; use lightning::routing::utxo::UtxoLookup; use std::sync::atomic::{AtomicU32, Ordering}; use std::sync::Arc; +use std::time::Duration; pub(crate) enum GossipSource { P2PNetwork { @@ -20,7 +22,7 @@ pub(crate) enum GossipSource { } impl GossipSource { - pub fn new_p2p(network_graph: Arc, logger: Arc) -> Self { + pub fn new_p2p(network_graph: Arc, logger: Arc) -> Self { let gossip_sync = Arc::new(P2PGossipSync::new( network_graph, None::>, @@ -30,7 +32,7 @@ impl GossipSource { } pub fn new_rgs( - server_url: String, latest_sync_timestamp: u32, network_graph: Arc, + server_url: String, latest_sync_timestamp: u32, network_graph: Arc, logger: Arc, ) -> Self { let gossip_sync = Arc::new(RapidGossipSync::new(network_graph, Arc::clone(&logger))); @@ -39,19 +41,13 @@ impl GossipSource { } pub fn is_rgs(&self) -> bool { - if let Self::RapidGossipSync { .. } = self { - true - } else { - false - } + matches!(self, Self::RapidGossipSync { .. }) } pub fn as_gossip_sync(&self) -> GossipSync { match self { - Self::RapidGossipSync { gossip_sync, .. } => { - GossipSync::Rapid(Arc::clone(&gossip_sync)) - }, - Self::P2PNetwork { gossip_sync, .. } => GossipSync::P2P(Arc::clone(&gossip_sync)), + Self::RapidGossipSync { gossip_sync, .. } => GossipSync::Rapid(Arc::clone(gossip_sync)), + Self::P2PNetwork { gossip_sync, .. } => GossipSync::P2P(Arc::clone(gossip_sync)), } } @@ -61,7 +57,17 @@ impl GossipSource { Self::RapidGossipSync { gossip_sync, server_url, latest_sync_timestamp, logger } => { let query_timestamp = latest_sync_timestamp.load(Ordering::Acquire); let query_url = format!("{}/{}", server_url, query_timestamp); - let response = reqwest::get(query_url).await.map_err(|e| { + + let response = tokio::time::timeout( + Duration::from_secs(RGS_SYNC_TIMEOUT_SECS), + reqwest::get(query_url), + ) + .await + .map_err(|e| { + log_trace!(logger, "Retrieving RGS gossip update timed out: {}", e); + Error::GossipUpdateTimeout + })? + .map_err(|e| { log_trace!(logger, "Failed to retrieve RGS gossip update: {}", e); Error::GossipUpdateFailed })?; diff --git a/src/graph.rs b/src/graph.rs new file mode 100644 index 000000000..79a21853d --- /dev/null +++ b/src/graph.rs @@ -0,0 +1,166 @@ +//! Objects for querying the network graph. + +use crate::types::Graph; + +use lightning::routing::gossip::NodeId; + +#[cfg(feature = "uniffi")] +use lightning::ln::msgs::SocketAddress; +#[cfg(feature = "uniffi")] +use lightning::routing::gossip::RoutingFees; + +#[cfg(not(feature = "uniffi"))] +use lightning::routing::gossip::{ChannelInfo, NodeInfo}; + +use std::sync::Arc; + +/// Represents the network as nodes and channels between them. +pub struct NetworkGraph { + inner: Arc, +} + +impl NetworkGraph { + pub(crate) fn new(inner: Arc) -> Self { + Self { inner } + } + + /// Returns the list of channels in the graph + pub fn list_channels(&self) -> Vec { + self.inner.read_only().channels().unordered_keys().map(|c| *c).collect() + } + + /// Returns information on a channel with the given id. + pub fn channel(&self, short_channel_id: u64) -> Option { + self.inner.read_only().channels().get(&short_channel_id).cloned().map(|c| c.into()) + } + + /// Returns the list of nodes in the graph + pub fn list_nodes(&self) -> Vec { + self.inner.read_only().nodes().unordered_keys().map(|n| *n).collect() + } + + /// Returns information on a node with the given id. + pub fn node(&self, node_id: &NodeId) -> Option { + self.inner.read_only().nodes().get(node_id).cloned().map(|n| n.into()) + } +} + +/// Details about a channel (both directions). +/// +/// Received within a channel announcement. +/// +/// This is a simplified version of LDK's `ChannelInfo` for bindings. +#[cfg(feature = "uniffi")] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ChannelInfo { + /// Source node of the first direction of a channel + pub node_one: NodeId, + /// Details about the first direction of a channel + pub one_to_two: Option, + /// Source node of the second direction of a channel + pub node_two: NodeId, + /// Details about the second direction of a channel + pub two_to_one: Option, + /// The channel capacity as seen on-chain, if chain lookup is available. + pub capacity_sats: Option, +} + +#[cfg(feature = "uniffi")] +impl From for ChannelInfo { + fn from(value: lightning::routing::gossip::ChannelInfo) -> Self { + Self { + node_one: value.node_one, + one_to_two: value.one_to_two.map(|u| u.into()), + node_two: value.node_two, + two_to_one: value.two_to_one.map(|u| u.into()), + capacity_sats: value.capacity_sats, + } + } +} + +/// Details about one direction of a channel as received within a `ChannelUpdate`. +/// +/// This is a simplified version of LDK's `ChannelUpdateInfo` for bindings. +#[cfg(feature = "uniffi")] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ChannelUpdateInfo { + /// When the last update to the channel direction was issued. + /// Value is opaque, as set in the announcement. + pub last_update: u32, + /// Whether the channel can be currently used for payments (in this one direction). + pub enabled: bool, + /// The difference in CLTV values that you must have when routing through this channel. + pub cltv_expiry_delta: u16, + /// The minimum value, which must be relayed to the next hop via the channel + pub htlc_minimum_msat: u64, + /// The maximum value which may be relayed to the next hop via the channel. + pub htlc_maximum_msat: u64, + /// Fees charged when the channel is used for routing + pub fees: RoutingFees, +} + +#[cfg(feature = "uniffi")] +impl From for ChannelUpdateInfo { + fn from(value: lightning::routing::gossip::ChannelUpdateInfo) -> Self { + Self { + last_update: value.last_update, + enabled: value.enabled, + cltv_expiry_delta: value.cltv_expiry_delta, + htlc_minimum_msat: value.htlc_minimum_msat, + htlc_maximum_msat: value.htlc_maximum_msat, + fees: value.fees, + } + } +} + +/// Details about a node in the network, known from the network announcement. +/// +/// This is a simplified version of LDK's `NodeInfo` for bindings. +#[cfg(feature = "uniffi")] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct NodeInfo { + /// All valid channels a node has announced + pub channels: Vec, + /// More information about a node from node_announcement. + /// Optional because we store a Node entry after learning about it from + /// a channel announcement, but before receiving a node announcement. + pub announcement_info: Option, +} + +#[cfg(feature = "uniffi")] +impl From for NodeInfo { + fn from(value: lightning::routing::gossip::NodeInfo) -> Self { + Self { + channels: value.channels, + announcement_info: value.announcement_info.map(|a| a.into()), + } + } +} + +/// Information received in the latest node_announcement from this node. +/// +/// This is a simplified version of LDK's `NodeAnnouncementInfo` for bindings. +#[cfg(feature = "uniffi")] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct NodeAnnouncementInfo { + /// When the last known update to the node state was issued. + /// Value is opaque, as set in the announcement. + pub last_update: u32, + /// Moniker assigned to the node. + /// May be invalid or malicious (eg control chars), + /// should not be exposed to the user. + pub alias: String, + /// List of addresses on which this node is reachable + pub addresses: Vec, +} + +#[cfg(feature = "uniffi")] +impl From for NodeAnnouncementInfo { + fn from(value: lightning::routing::gossip::NodeAnnouncementInfo) -> Self { + Self { + last_update: value.last_update, + alias: value.alias.to_string(), + addresses: value.addresses().iter().cloned().collect(), + } + } +} diff --git a/src/io/mod.rs b/src/io/mod.rs index d9dab440c..d545f6b93 100644 --- a/src/io/mod.rs +++ b/src/io/mod.rs @@ -21,9 +21,10 @@ pub(crate) const PEER_INFO_PERSISTENCE_KEY: &str = "peers"; pub(crate) const PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE: &str = "payments"; pub(crate) const PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE: &str = ""; -/// The spendable output information will be persisted under this prefix. -pub(crate) const SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE: &str = "spendable_outputs"; -pub(crate) const SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE: &str = ""; +/// The spendable output information used to persisted under this prefix until LDK Node v0.3.0. +pub(crate) const DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE: &str = + "spendable_outputs"; +pub(crate) const DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE: &str = ""; /// RapidGossipSync's `latest_sync_timestamp` will be persisted under this key. pub(crate) const LATEST_RGS_SYNC_TIMESTAMP_PRIMARY_NAMESPACE: &str = ""; diff --git a/src/io/utils.rs b/src/io/utils.rs index f486dda8b..77cc56f55 100644 --- a/src/io/utils.rs +++ b/src/io/utils.rs @@ -1,24 +1,27 @@ use super::*; use crate::config::WALLET_KEYS_SEED_LEN; -use crate::logger::log_error; +use crate::logger::{log_error, FilesystemLogger}; use crate::peer_store::PeerStore; -use crate::sweep::SpendableOutputInfo; +use crate::sweep::DeprecatedSpendableOutputInfo; +use crate::types::{Broadcaster, ChainSource, DynStore, FeeEstimator, KeysManager, Sweeper}; use crate::{Error, EventQueue, PaymentDetails}; use lightning::routing::gossip::NetworkGraph; use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringDecayParameters}; use lightning::util::logger::Logger; use lightning::util::persist::{ - KVStore, KVSTORE_NAMESPACE_KEY_ALPHABET, KVSTORE_NAMESPACE_KEY_MAX_LEN, - NETWORK_GRAPH_PERSISTENCE_KEY, NETWORK_GRAPH_PERSISTENCE_PRIMARY_NAMESPACE, - NETWORK_GRAPH_PERSISTENCE_SECONDARY_NAMESPACE, SCORER_PERSISTENCE_KEY, + KVSTORE_NAMESPACE_KEY_ALPHABET, KVSTORE_NAMESPACE_KEY_MAX_LEN, NETWORK_GRAPH_PERSISTENCE_KEY, + NETWORK_GRAPH_PERSISTENCE_PRIMARY_NAMESPACE, NETWORK_GRAPH_PERSISTENCE_SECONDARY_NAMESPACE, + OUTPUT_SWEEPER_PERSISTENCE_KEY, OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE, + OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE, SCORER_PERSISTENCE_KEY, SCORER_PERSISTENCE_PRIMARY_NAMESPACE, SCORER_PERSISTENCE_SECONDARY_NAMESPACE, }; use lightning::util::ser::{Readable, ReadableArgs, Writeable}; use lightning::util::string::PrintableString; use bip39::Mnemonic; +use lightning::util::sweep::{OutputSpendStatus, OutputSweeper}; use rand::{thread_rng, RngCore}; use std::fs; @@ -93,8 +96,8 @@ where } /// Read a previously persisted [`NetworkGraph`] from the store. -pub(crate) fn read_network_graph( - kv_store: Arc, logger: L, +pub(crate) fn read_network_graph( + kv_store: Arc, logger: L, ) -> Result, std::io::Error> where L::Target: Logger, @@ -111,12 +114,8 @@ where } /// Read a previously persisted [`ProbabilisticScorer`] from the store. -pub(crate) fn read_scorer< - K: KVStore + Send + Sync, - G: Deref>, - L: Deref + Clone, ->( - kv_store: Arc, network_graph: G, logger: L, +pub(crate) fn read_scorer>, L: Deref + Clone>( + kv_store: Arc, network_graph: G, logger: L, ) -> Result, std::io::Error> where L::Target: Logger, @@ -135,9 +134,9 @@ where } /// Read previously persisted events from the store. -pub(crate) fn read_event_queue( - kv_store: Arc, logger: L, -) -> Result, std::io::Error> +pub(crate) fn read_event_queue( + kv_store: Arc, logger: L, +) -> Result, std::io::Error> where L::Target: Logger, { @@ -153,9 +152,9 @@ where } /// Read previously persisted peer info from the store. -pub(crate) fn read_peer_info( - kv_store: Arc, logger: L, -) -> Result, std::io::Error> +pub(crate) fn read_peer_info( + kv_store: Arc, logger: L, +) -> Result, std::io::Error> where L::Target: Logger, { @@ -171,8 +170,8 @@ where } /// Read previously persisted payments information from the store. -pub(crate) fn read_payments( - kv_store: Arc, logger: L, +pub(crate) fn read_payments( + kv_store: Arc, logger: L, ) -> Result, std::io::Error> where L::Target: Logger, @@ -200,38 +199,131 @@ where Ok(res) } -/// Read previously persisted spendable output information from the store. -pub(crate) fn read_spendable_outputs( - kv_store: Arc, logger: L, -) -> Result, std::io::Error> +/// Read `OutputSweeper` state from the store. +pub(crate) fn read_output_sweeper( + broadcaster: Arc, fee_estimator: Arc, + chain_data_source: Arc, keys_manager: Arc, kv_store: Arc, + logger: Arc, +) -> Result { + let mut reader = Cursor::new(kv_store.read( + OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE, + OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE, + OUTPUT_SWEEPER_PERSISTENCE_KEY, + )?); + let args = ( + broadcaster, + fee_estimator, + Some(chain_data_source), + Arc::clone(&keys_manager), + keys_manager, + kv_store, + logger.clone(), + ); + OutputSweeper::read(&mut reader, args).map_err(|e| { + log_error!(logger, "Failed to deserialize OutputSweeper: {}", e); + std::io::Error::new(std::io::ErrorKind::InvalidData, "Failed to deserialize OutputSweeper") + }) +} + +/// Read previously persisted spendable output information from the store and migrate to the +/// upstreamed `OutputSweeper`. +/// +/// We first iterate all `DeprecatedSpendableOutputInfo`s and have them tracked by the new +/// `OutputSweeper`. In order to be certain the initial output spends will happen in a single +/// transaction (and safe on-chain fees), we batch them to happen at current height plus two +/// blocks. Lastly, we remove the previously persisted data once we checked they are tracked and +/// awaiting their initial spend at the correct height. +/// +/// Note that this migration will be run in the `Builder`, i.e., at the time when the migration is +/// happening no background sync is ongoing, so we shouldn't have a risk of interleaving block +/// connections during the migration. +pub(crate) fn migrate_deprecated_spendable_outputs( + sweeper: Arc, kv_store: Arc, logger: L, +) -> Result<(), std::io::Error> where L::Target: Logger, { - let mut res = Vec::new(); + let best_block = sweeper.current_best_block(); for stored_key in kv_store.list( - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, )? { let mut reader = Cursor::new(kv_store.read( - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, &stored_key, )?); - let output = SpendableOutputInfo::read(&mut reader).map_err(|e| { + let output = DeprecatedSpendableOutputInfo::read(&mut reader).map_err(|e| { log_error!(logger, "Failed to deserialize SpendableOutputInfo: {}", e); std::io::Error::new( std::io::ErrorKind::InvalidData, "Failed to deserialize SpendableOutputInfo", ) })?; - res.push(output); + let descriptors = vec![output.descriptor.clone()]; + let spend_delay = Some(best_block.height + 2); + sweeper + .track_spendable_outputs(descriptors, output.channel_id, true, spend_delay) + .map_err(|_| { + log_error!(logger, "Failed to track spendable outputs. Aborting migration, will retry in the future."); + std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Failed to track spendable outputs. Aborting migration, will retry in the future.", + ) + })?; + + if let Some(tracked_spendable_output) = + sweeper.tracked_spendable_outputs().iter().find(|o| o.descriptor == output.descriptor) + { + match tracked_spendable_output.status { + OutputSpendStatus::PendingInitialBroadcast { delayed_until_height } => { + if delayed_until_height == spend_delay { + kv_store.remove( + DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + &stored_key, + false, + )?; + } else { + debug_assert!(false, "Unexpected status in OutputSweeper migration."); + log_error!(logger, "Unexpected status in OutputSweeper migration."); + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "Failed to migrate OutputSweeper state.", + )); + } + }, + _ => { + debug_assert!(false, "Unexpected status in OutputSweeper migration."); + log_error!(logger, "Unexpected status in OutputSweeper migration."); + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "Failed to migrate OutputSweeper state.", + )); + }, + } + } else { + debug_assert!( + false, + "OutputSweeper failed to track and persist outputs during migration." + ); + log_error!( + logger, + "OutputSweeper failed to track and persist outputs during migration." + ); + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "Failed to migrate OutputSweeper state.", + )); + } } - Ok(res) + + Ok(()) } -pub(crate) fn read_latest_rgs_sync_timestamp( - kv_store: Arc, logger: L, +pub(crate) fn read_latest_rgs_sync_timestamp( + kv_store: Arc, logger: L, ) -> Result where L::Target: Logger, @@ -250,8 +342,8 @@ where }) } -pub(crate) fn write_latest_rgs_sync_timestamp( - updated_timestamp: u32, kv_store: Arc, logger: L, +pub(crate) fn write_latest_rgs_sync_timestamp( + updated_timestamp: u32, kv_store: Arc, logger: L, ) -> Result<(), Error> where L::Target: Logger, @@ -277,8 +369,8 @@ where }) } -pub(crate) fn read_latest_node_ann_bcast_timestamp( - kv_store: Arc, logger: L, +pub(crate) fn read_latest_node_ann_bcast_timestamp( + kv_store: Arc, logger: L, ) -> Result where L::Target: Logger, @@ -301,8 +393,8 @@ where }) } -pub(crate) fn write_latest_node_ann_bcast_timestamp( - updated_timestamp: u64, kv_store: Arc, logger: L, +pub(crate) fn write_latest_node_ann_bcast_timestamp( + updated_timestamp: u64, kv_store: Arc, logger: L, ) -> Result<(), Error> where L::Target: Logger, diff --git a/src/lib.rs b/src/lib.rs index 24b2123f5..e594a3a80 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -23,7 +23,7 @@ //! The primary abstraction of the library is the [`Node`], which can be retrieved by setting up //! and configuring a [`Builder`] to your liking and calling [`build`]. `Node` can then be //! controlled via commands such as [`start`], [`stop`], [`connect_open_channel`], -//! [`send_payment`], etc.: +//! [`send`], etc.: //! //! ```no_run //! use ldk_node::Builder; @@ -43,7 +43,7 @@ //! //! node.start().unwrap(); //! -//! let funding_address = node.new_onchain_address(); +//! let funding_address = node.onchain_payment().new_address(); //! //! // .. fund address .. //! @@ -56,7 +56,7 @@ //! node.event_handled(); //! //! let invoice = Bolt11Invoice::from_str("INVOICE_STR").unwrap(); -//! node.send_payment(&invoice).unwrap(); +//! node.bolt11_payment().send(&invoice).unwrap(); //! //! node.stop().unwrap(); //! } @@ -66,7 +66,7 @@ //! [`start`]: Node::start //! [`stop`]: Node::stop //! [`connect_open_channel`]: Node::connect_open_channel -//! [`send_payment`]: Node::send_payment +//! [`send`]: Bolt11Payment::send //! #![cfg_attr(not(feature = "uniffi"), deny(missing_docs))] #![deny(rustdoc::broken_intra_doc_links)] @@ -78,16 +78,18 @@ mod balance; mod builder; mod config; +mod connection; mod error; mod event; mod fee_estimator; mod gossip; +pub mod graph; mod hex_utils; pub mod io; mod liquidity; mod logger; mod message_handler; -mod payment_store; +pub mod payment; mod peer_store; mod sweep; mod tx_broadcaster; @@ -102,7 +104,7 @@ pub use lightning; pub use lightning_invoice; pub use balance::{BalanceDetails, LightningBalance, PendingSweepBalance}; -pub use config::{default_config, Config}; +pub use config::{default_config, AnchorChannelsConfig, Config}; pub use error::Error as NodeError; use error::Error; @@ -121,54 +123,46 @@ pub use builder::BuildError; pub use builder::NodeBuilder as Builder; use config::{ - LDK_PAYMENT_RETRY_TIMEOUT, NODE_ANN_BCAST_INTERVAL, PEER_RECONNECTION_INTERVAL, - RGS_SYNC_INTERVAL, WALLET_SYNC_INTERVAL_MINIMUM_SECS, + default_user_config, LDK_WALLET_SYNC_TIMEOUT_SECS, NODE_ANN_BCAST_INTERVAL, + PEER_RECONNECTION_INTERVAL, RESOLVED_CHANNEL_MONITOR_ARCHIVAL_INTERVAL, RGS_SYNC_INTERVAL, + WALLET_SYNC_INTERVAL_MINIMUM_SECS, }; +use connection::ConnectionManager; use event::{EventHandler, EventQueue}; use gossip::GossipSource; +use graph::NetworkGraph; use liquidity::LiquiditySource; -use payment_store::PaymentStore; -pub use payment_store::{LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentStatus}; +use payment::store::PaymentStore; +use payment::{Bolt11Payment, Bolt12Payment, OnchainPayment, PaymentDetails, SpontaneousPayment}; use peer_store::{PeerInfo, PeerStore}; use types::{ - Broadcaster, ChainMonitor, ChannelManager, FeeEstimator, KeysManager, NetworkGraph, - PeerManager, Router, Scorer, Sweeper, Wallet, + Broadcaster, BumpTransactionEventHandler, ChainMonitor, ChannelManager, DynStore, FeeEstimator, + Graph, KeysManager, PeerManager, Router, Scorer, Sweeper, Wallet, }; -pub use types::{ChannelDetails, PeerDetails, UserChannelId}; +pub use types::{ChannelDetails, PeerDetails, TlvEntry, UserChannelId}; use logger::{log_error, log_info, log_trace, FilesystemLogger, Logger}; -use lightning::chain::Confirm; -use lightning::ln::channelmanager::{self, PaymentId, RecipientOnionFields, Retry}; +use lightning::chain::{BestBlock, Confirm}; +use lightning::events::bump_transaction::Wallet as LdkWallet; +use lightning::ln::channelmanager::{ChannelShutdownState, PaymentId}; use lightning::ln::msgs::SocketAddress; -use lightning::ln::{PaymentHash, PaymentPreimage}; -use lightning::sign::EntropySource; - -use lightning::util::persist::KVStore; - -use lightning::util::config::{ChannelHandshakeConfig, UserConfig}; pub use lightning::util::logger::Level as LogLevel; use lightning_background_processor::process_events_async; use lightning_transaction_sync::EsploraSyncClient; -use lightning::routing::router::{PaymentParameters, RouteParameters}; -use lightning_invoice::{payment, Bolt11Invoice, Currency}; - -use bitcoin::hashes::sha256::Hash as Sha256; -use bitcoin::hashes::Hash; use bitcoin::secp256k1::PublicKey; -use bitcoin::{Address, Txid}; - use rand::Rng; use std::default::Default; use std::net::ToSocketAddrs; +use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::{Arc, Mutex, RwLock}; -use std::time::{Duration, Instant, SystemTime}; +use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; #[cfg(feature = "uniffi")] uniffi::include_scaffolding!("ldk_node"); @@ -176,32 +170,41 @@ uniffi::include_scaffolding!("ldk_node"); /// The main interface object of LDK Node, wrapping the necessary LDK and BDK functionalities. /// /// Needs to be initialized and instantiated through [`Builder::build`]. -pub struct Node { +pub struct Node { runtime: Arc>>, stop_sender: tokio::sync::watch::Sender<()>, + event_handling_stopped_sender: tokio::sync::watch::Sender<()>, config: Arc, wallet: Arc, tx_sync: Arc>>, tx_broadcaster: Arc, fee_estimator: Arc, - event_queue: Arc>>, - channel_manager: Arc>, - chain_monitor: Arc>, - output_sweeper: Arc>, - peer_manager: Arc>, + event_queue: Arc>>, + channel_manager: Arc, + chain_monitor: Arc, + output_sweeper: Arc, + peer_manager: Arc, + connection_manager: Arc>>, keys_manager: Arc, - network_graph: Arc, + network_graph: Arc, gossip_source: Arc, - liquidity_source: Option>>>, - kv_store: Arc, + liquidity_source: Option>>>, + kv_store: Arc, logger: Arc, _router: Arc, scorer: Arc>, - peer_store: Arc>>, - payment_store: Arc>>, + peer_store: Arc>>, + payment_store: Arc>>, + is_listening: Arc, + latest_wallet_sync_timestamp: Arc>>, + latest_onchain_wallet_sync_timestamp: Arc>>, + latest_fee_rate_cache_update_timestamp: Arc>>, + latest_rgs_snapshot_timestamp: Arc>>, + latest_node_announcement_broadcast_timestamp: Arc>>, + latest_channel_monitor_archival_height: Arc>>, } -impl Node { +impl Node { /// Starts the necessary background tasks, such as handling events coming from user input, /// LDK/BDK, and the peer-to-peer network. /// @@ -215,13 +218,20 @@ impl Node { return Err(Error::AlreadyRunning); } - log_info!(self.logger, "Starting up LDK Node on network: {}", self.config.network); + log_info!( + self.logger, + "Starting up LDK Node with node ID {} on network: {}", + self.node_id(), + self.config.network + ); let runtime = tokio::runtime::Builder::new_multi_thread().enable_all().build().unwrap(); // Block to ensure we update our fee rate cache once on startup let fee_estimator = Arc::clone(&self.fee_estimator); let sync_logger = Arc::clone(&self.logger); + let sync_fee_rate_update_timestamp = + Arc::clone(&self.latest_fee_rate_cache_update_timestamp); let runtime_ref = &runtime; tokio::task::block_in_place(move || { runtime_ref.block_on(async move { @@ -233,6 +243,9 @@ impl Node { "Initial fee rate cache update finished in {}ms.", now.elapsed().as_millis() ); + let unix_time_secs_opt = + SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); + *sync_fee_rate_update_timestamp.write().unwrap() = unix_time_secs_opt; Ok(()) }, Err(e) => { @@ -246,6 +259,7 @@ impl Node { // Setup wallet sync let wallet = Arc::clone(&self.wallet); let sync_logger = Arc::clone(&self.logger); + let sync_onchain_wallet_timestamp = Arc::clone(&self.latest_onchain_wallet_sync_timestamp); let mut stop_sync = self.stop_sender.subscribe(); let onchain_wallet_sync_interval_secs = self .config @@ -262,16 +276,25 @@ impl Node { loop { tokio::select! { _ = stop_sync.changed() => { + log_trace!( + sync_logger, + "Stopping background syncing on-chain wallet.", + ); return; } _ = onchain_wallet_sync_interval.tick() => { let now = Instant::now(); match wallet.sync().await { - Ok(()) => log_trace!( + Ok(()) => { + log_trace!( sync_logger, "Background sync of on-chain wallet finished in {}ms.", now.elapsed().as_millis() - ), + ); + let unix_time_secs_opt = + SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); + *sync_onchain_wallet_timestamp.write().unwrap() = unix_time_secs_opt; + } Err(err) => { log_error!( sync_logger, @@ -289,6 +312,7 @@ impl Node { let mut stop_fee_updates = self.stop_sender.subscribe(); let fee_update_logger = Arc::clone(&self.logger); + let fee_update_timestamp = Arc::clone(&self.latest_fee_rate_cache_update_timestamp); let fee_estimator = Arc::clone(&self.fee_estimator); let fee_rate_cache_update_interval_secs = self.config.fee_rate_cache_update_interval_secs.max(WALLET_SYNC_INTERVAL_MINIMUM_SECS); @@ -302,16 +326,25 @@ impl Node { loop { tokio::select! { _ = stop_fee_updates.changed() => { + log_trace!( + fee_update_logger, + "Stopping background updates of fee rate cache.", + ); return; } _ = fee_rate_update_interval.tick() => { let now = Instant::now(); match fee_estimator.update_fee_estimates().await { - Ok(()) => log_trace!( + Ok(()) => { + log_trace!( fee_update_logger, "Background update of fee rate cache finished in {}ms.", now.elapsed().as_millis() - ), + ); + let unix_time_secs_opt = + SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); + *fee_update_timestamp.write().unwrap() = unix_time_secs_opt; + } Err(err) => { log_error!( fee_update_logger, @@ -327,9 +360,13 @@ impl Node { let tx_sync = Arc::clone(&self.tx_sync); let sync_cman = Arc::clone(&self.channel_manager); + let archive_cman = Arc::clone(&self.channel_manager); let sync_cmon = Arc::clone(&self.chain_monitor); + let archive_cmon = Arc::clone(&self.chain_monitor); let sync_sweeper = Arc::clone(&self.output_sweeper); let sync_logger = Arc::clone(&self.logger); + let sync_wallet_timestamp = Arc::clone(&self.latest_wallet_sync_timestamp); + let sync_monitor_archival_height = Arc::clone(&self.latest_channel_monitor_archival_height); let mut stop_sync = self.stop_sender.subscribe(); let wallet_sync_interval_secs = self.config.wallet_sync_interval_secs.max(WALLET_SYNC_INTERVAL_MINIMUM_SECS); @@ -340,6 +377,10 @@ impl Node { loop { tokio::select! { _ = stop_sync.changed() => { + log_trace!( + sync_logger, + "Stopping background syncing Lightning wallet.", + ); return; } _ = wallet_sync_interval.tick() => { @@ -349,14 +390,31 @@ impl Node { &*sync_sweeper as &(dyn Confirm + Sync + Send), ]; let now = Instant::now(); - match tx_sync.sync(confirmables).await { - Ok(()) => log_trace!( - sync_logger, - "Background sync of Lightning wallet finished in {}ms.", - now.elapsed().as_millis() - ), + let timeout_fut = tokio::time::timeout(Duration::from_secs(LDK_WALLET_SYNC_TIMEOUT_SECS), tx_sync.sync(confirmables)); + match timeout_fut.await { + Ok(res) => match res { + Ok(()) => { + log_trace!( + sync_logger, + "Background sync of Lightning wallet finished in {}ms.", + now.elapsed().as_millis() + ); + let unix_time_secs_opt = + SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); + *sync_wallet_timestamp.write().unwrap() = unix_time_secs_opt; + + periodically_archive_fully_resolved_monitors( + Arc::clone(&archive_cman), + Arc::clone(&archive_cmon), + Arc::clone(&sync_monitor_archival_height) + ); + } + Err(e) => { + log_error!(sync_logger, "Background sync of Lightning wallet failed: {}", e) + } + } Err(e) => { - log_error!(sync_logger, "Background sync of Lightning wallet failed: {}", e) + log_error!(sync_logger, "Background sync of Lightning wallet timed out: {}", e) } } } @@ -368,12 +426,17 @@ impl Node { let gossip_source = Arc::clone(&self.gossip_source); let gossip_sync_store = Arc::clone(&self.kv_store); let gossip_sync_logger = Arc::clone(&self.logger); + let gossip_rgs_sync_timestamp = Arc::clone(&self.latest_rgs_snapshot_timestamp); let mut stop_gossip_sync = self.stop_sender.subscribe(); runtime.spawn(async move { let mut interval = tokio::time::interval(RGS_SYNC_INTERVAL); loop { tokio::select! { _ = stop_gossip_sync.changed() => { + log_trace!( + gossip_sync_logger, + "Stopping background syncing RGS gossip data.", + ); return; } _ = interval.tick() => { @@ -395,6 +458,7 @@ impl Node { log_error!(gossip_sync_logger, "Persistence failed: {}", e); panic!("Persistence failed"); }); + *gossip_rgs_sync_timestamp.write().unwrap() = Some(updated_timestamp as u64); } Err(e) => log_error!( gossip_sync_logger, @@ -413,6 +477,7 @@ impl Node { let peer_manager_connection_handler = Arc::clone(&self.peer_manager); let mut stop_listen = self.stop_sender.subscribe(); let listening_logger = Arc::clone(&self.logger); + let listening_indicator = Arc::clone(&self.is_listening); let mut bind_addrs = Vec::with_capacity(listening_addresses.len()); @@ -431,6 +496,7 @@ impl Node { } runtime.spawn(async move { + { let listener = tokio::net::TcpListener::bind(&*bind_addrs).await .unwrap_or_else(|e| { @@ -440,11 +506,17 @@ impl Node { ); }); + listening_indicator.store(true, Ordering::Release); + loop { let peer_mgr = Arc::clone(&peer_manager_connection_handler); tokio::select! { _ = stop_listen.changed() => { - return; + log_trace!( + listening_logger, + "Stopping listening to inbound connections.", + ); + break; } res = listener.accept() => { let tcp_stream = res.unwrap().0; @@ -458,10 +530,14 @@ impl Node { } } } + } + + listening_indicator.store(false, Ordering::Release); }); } // Regularly reconnect to persisted peers. + let connect_cm = Arc::clone(&self.connection_manager); let connect_pm = Arc::clone(&self.peer_manager); let connect_logger = Arc::clone(&self.logger); let connect_peer_store = Arc::clone(&self.peer_store); @@ -472,21 +548,23 @@ impl Node { loop { tokio::select! { _ = stop_connect.changed() => { + log_trace!( + connect_logger, + "Stopping reconnecting known peers.", + ); return; } _ = interval.tick() => { let pm_peers = connect_pm - .get_peer_node_ids() + .list_peers() .iter() - .map(|(peer, _addr)| *peer) + .map(|peer| peer.counterparty_node_id) .collect::>(); for peer_info in connect_peer_store.list_peers().iter().filter(|info| !pm_peers.contains(&info.node_id)) { - let res = do_connect_peer( + let res = connect_cm.do_connect_peer( peer_info.node_id, peer_info.address.clone(), - Arc::clone(&connect_pm), - Arc::clone(&connect_logger), ).await; match res { Ok(_) => { @@ -508,13 +586,21 @@ impl Node { let bcast_config = Arc::clone(&self.config); let bcast_store = Arc::clone(&self.kv_store); let bcast_logger = Arc::clone(&self.logger); + let bcast_ann_timestamp = Arc::clone(&self.latest_node_announcement_broadcast_timestamp); let mut stop_bcast = self.stop_sender.subscribe(); runtime.spawn(async move { // We check every 30 secs whether our last broadcast is NODE_ANN_BCAST_INTERVAL away. + #[cfg(not(test))] let mut interval = tokio::time::interval(Duration::from_secs(30)); + #[cfg(test)] + let mut interval = tokio::time::interval(Duration::from_secs(5)); loop { tokio::select! { _ = stop_bcast.changed() => { + log_trace!( + bcast_logger, + "Stopping broadcasting node announcements.", + ); return; } _ = interval.tick() => { @@ -534,12 +620,12 @@ impl Node { continue; } - if !bcast_cm.list_channels().iter().any(|chan| chan.is_public) { - // Skip if we don't have any public channels. + if !bcast_cm.list_channels().iter().any(|chan| chan.is_public && chan.is_channel_ready) { + // Skip if we don't have any public channels that are ready. continue; } - if bcast_pm.get_peer_node_ids().is_empty() { + if bcast_pm.list_peers().is_empty() { // Skip if we don't have any connected peers to gossip to. continue; } @@ -553,12 +639,17 @@ impl Node { bcast_pm.broadcast_node_announcement([0; 3], [0; 32], addresses); - let unix_time_secs = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap().as_secs(); - io::utils::write_latest_node_ann_bcast_timestamp(unix_time_secs, Arc::clone(&bcast_store), Arc::clone(&bcast_logger)) - .unwrap_or_else(|e| { - log_error!(bcast_logger, "Persistence failed: {}", e); - panic!("Persistence failed"); - }); + let unix_time_secs_opt = + SystemTime::now().duration_since(UNIX_EPOCH).ok().map(|d| d.as_secs()); + *bcast_ann_timestamp.write().unwrap() = unix_time_secs_opt; + + if let Some(unix_time_secs) = unix_time_secs_opt { + io::utils::write_latest_node_ann_bcast_timestamp(unix_time_secs, Arc::clone(&bcast_store), Arc::clone(&bcast_logger)) + .unwrap_or_else(|e| { + log_error!(bcast_logger, "Persistence failed: {}", e); + panic!("Persistence failed"); + }); + } } } } @@ -566,6 +657,7 @@ impl Node { let mut stop_tx_bcast = self.stop_sender.subscribe(); let tx_bcaster = Arc::clone(&self.tx_broadcaster); + let tx_bcast_logger = Arc::clone(&self.logger); runtime.spawn(async move { // Every second we try to clear our broadcasting queue. let mut interval = tokio::time::interval(Duration::from_secs(1)); @@ -573,6 +665,10 @@ impl Node { loop { tokio::select! { _ = stop_tx_bcast.changed() => { + log_trace!( + tx_bcast_logger, + "Stopping broadcasting transactions.", + ); return; } _ = interval.tick() => { @@ -582,10 +678,19 @@ impl Node { } }); + let bump_tx_event_handler = Arc::new(BumpTransactionEventHandler::new( + Arc::clone(&self.tx_broadcaster), + Arc::new(LdkWallet::new(Arc::clone(&self.wallet), Arc::clone(&self.logger))), + Arc::clone(&self.keys_manager), + Arc::clone(&self.logger), + )); + let event_handler = Arc::new(EventHandler::new( Arc::clone(&self.event_queue), Arc::clone(&self.wallet), + bump_tx_event_handler, Arc::clone(&self.channel_manager), + Arc::clone(&self.connection_manager), Arc::clone(&self.output_sweeper), Arc::clone(&self.network_graph), Arc::clone(&self.payment_store), @@ -606,11 +711,17 @@ impl Node { let background_error_logger = Arc::clone(&self.logger); let background_scorer = Arc::clone(&self.scorer); let stop_bp = self.stop_sender.subscribe(); + let sleeper_logger = Arc::clone(&self.logger); let sleeper = move |d| { let mut stop = stop_bp.clone(); + let sleeper_logger = Arc::clone(&sleeper_logger); Box::pin(async move { tokio::select! { _ = stop.changed() => { + log_trace!( + sleeper_logger, + "Stopping processing events.", + ); true } _ = tokio::time::sleep(d) => { @@ -620,6 +731,8 @@ impl Node { }) }; + let background_stop_logger = Arc::clone(&self.logger); + let event_handling_stopped_sender = self.event_handling_stopped_sender.clone(); runtime.spawn(async move { process_events_async( background_persister, @@ -639,15 +752,33 @@ impl Node { log_error!(background_error_logger, "Failed to process events: {}", e); panic!("Failed to process events"); }); + log_trace!(background_stop_logger, "Events processing stopped.",); + + match event_handling_stopped_sender.send(()) { + Ok(_) => (), + Err(e) => { + log_error!( + background_stop_logger, + "Failed to send 'events handling stopped' signal. This should never happen: {}", + e + ); + debug_assert!(false); + }, + } }); if let Some(liquidity_source) = self.liquidity_source.as_ref() { let mut stop_liquidity_handler = self.stop_sender.subscribe(); let liquidity_handler = Arc::clone(&liquidity_source); + let liquidity_logger = Arc::clone(&self.logger); runtime.spawn(async move { loop { tokio::select! { _ = stop_liquidity_handler.changed() => { + log_trace!( + liquidity_logger, + "Stopping processing liquidity events.", + ); return; } _ = liquidity_handler.handle_next_event() => {} @@ -662,18 +793,13 @@ impl Node { Ok(()) } - /// Returns whether the [`Node`] is running. - pub fn is_running(&self) -> bool { - self.runtime.read().unwrap().is_some() - } - /// Disconnects all peers, stops all running background tasks, and shuts down [`Node`]. /// /// After this returns most API methods will return [`Error::NotRunning`]. pub fn stop(&self) -> Result<(), Error> { let runtime = self.runtime.write().unwrap().take().ok_or(Error::NotRunning)?; - log_info!(self.logger, "Shutting down LDK Node..."); + log_info!(self.logger, "Shutting down LDK Node with node ID {}...", self.node_id()); // Stop the runtime. match self.stop_sender.send(()) { @@ -688,15 +814,92 @@ impl Node { }, } - // Stop disconnect peers. + // Disconnect all peers. self.peer_manager.disconnect_all_peers(); + // Wait until event handling stopped, at least until a timeout is reached. + let event_handling_stopped_logger = Arc::clone(&self.logger); + let mut event_handling_stopped_receiver = self.event_handling_stopped_sender.subscribe(); + + // FIXME: For now, we wait up to 100 secs (BDK_WALLET_SYNC_TIMEOUT_SECS + 10) to allow + // event handling to exit gracefully even if it was blocked on the BDK wallet syncing. We + // should drop this considerably post upgrading to BDK 1.0. + let timeout_res = runtime.block_on(async { + tokio::time::timeout( + Duration::from_secs(100), + event_handling_stopped_receiver.changed(), + ) + .await + }); + + match timeout_res { + Ok(stop_res) => match stop_res { + Ok(()) => {}, + Err(e) => { + log_error!( + event_handling_stopped_logger, + "Stopping event handling failed. This should never happen: {}", + e + ); + panic!("Stopping event handling failed. This should never happen."); + }, + }, + Err(e) => { + log_error!( + event_handling_stopped_logger, + "Stopping event handling timed out: {}", + e + ); + }, + } + + #[cfg(tokio_unstable)] + { + log_trace!( + self.logger, + "Active runtime tasks left prior to shutdown: {}", + runtime.metrics().active_tasks_count() + ); + } + + // Shutdown our runtime. By now ~no or only very few tasks should be left. runtime.shutdown_timeout(Duration::from_secs(10)); log_info!(self.logger, "Shutdown complete."); Ok(()) } + /// Returns the status of the [`Node`]. + pub fn status(&self) -> NodeStatus { + let is_running = self.runtime.read().unwrap().is_some(); + let is_listening = self.is_listening.load(Ordering::Acquire); + let current_best_block = self.channel_manager.current_best_block().into(); + let latest_wallet_sync_timestamp = *self.latest_wallet_sync_timestamp.read().unwrap(); + let latest_onchain_wallet_sync_timestamp = + *self.latest_onchain_wallet_sync_timestamp.read().unwrap(); + let latest_fee_rate_cache_update_timestamp = + *self.latest_fee_rate_cache_update_timestamp.read().unwrap(); + let latest_rgs_snapshot_timestamp = *self.latest_rgs_snapshot_timestamp.read().unwrap(); + let latest_node_announcement_broadcast_timestamp = + *self.latest_node_announcement_broadcast_timestamp.read().unwrap(); + + NodeStatus { + is_running, + is_listening, + current_best_block, + latest_wallet_sync_timestamp, + latest_onchain_wallet_sync_timestamp, + latest_fee_rate_cache_update_timestamp, + latest_rgs_snapshot_timestamp, + latest_node_announcement_broadcast_timestamp, + } + } + + /// Returns the config with which the [`Node`] was initialized. + pub fn config(&self) -> Config { + self.config.as_ref().clone() + } + /// Returns the next event in the event queue, if currently available. /// /// Will return `Some(..)` if an event is available and `None` otherwise. @@ -748,38 +951,116 @@ impl Node { self.config.listening_addresses.clone() } - /// Retrieve a new on-chain/funding address. - pub fn new_onchain_address(&self) -> Result { - let funding_address = self.wallet.get_new_address()?; - log_info!(self.logger, "Generated new funding address: {}", funding_address); - Ok(funding_address) + /// Returns a payment handler allowing to create and pay [BOLT 11] invoices. + /// + /// [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md + #[cfg(not(feature = "uniffi"))] + pub fn bolt11_payment(&self) -> Bolt11Payment { + Bolt11Payment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.channel_manager), + Arc::clone(&self.connection_manager), + Arc::clone(&self.keys_manager), + self.liquidity_source.clone(), + Arc::clone(&self.payment_store), + Arc::clone(&self.peer_store), + Arc::clone(&self.config), + Arc::clone(&self.logger), + ) } - /// Send an on-chain payment to the given address. - pub fn send_to_onchain_address( - &self, address: &bitcoin::Address, amount_sats: u64, - ) -> Result { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } + /// Returns a payment handler allowing to create and pay [BOLT 11] invoices. + /// + /// [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md + #[cfg(feature = "uniffi")] + pub fn bolt11_payment(&self) -> Arc { + Arc::new(Bolt11Payment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.channel_manager), + Arc::clone(&self.connection_manager), + Arc::clone(&self.keys_manager), + self.liquidity_source.clone(), + Arc::clone(&self.payment_store), + Arc::clone(&self.peer_store), + Arc::clone(&self.config), + Arc::clone(&self.logger), + )) + } - let cur_balance = self.wallet.get_balance()?; - if cur_balance.get_spendable() < amount_sats { - log_error!(self.logger, "Unable to send payment due to insufficient funds."); - return Err(Error::InsufficientFunds); - } - self.wallet.send_to_address(address, Some(amount_sats)) + /// Returns a payment handler allowing to create and pay [BOLT 12] offers and refunds. + /// + /// [BOLT 12]: https://github.com/lightning/bolts/blob/master/12-offer-encoding.md + #[cfg(not(feature = "uniffi"))] + pub fn bolt12_payment(&self) -> Arc { + Arc::new(Bolt12Payment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.channel_manager), + Arc::clone(&self.payment_store), + Arc::clone(&self.logger), + )) } - /// Send an on-chain payment to the given address, draining all the available funds. - pub fn send_all_to_onchain_address(&self, address: &bitcoin::Address) -> Result { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } + /// Returns a payment handler allowing to create and pay [BOLT 12] offers and refunds. + /// + /// [BOLT 12]: https://github.com/lightning/bolts/blob/master/12-offer-encoding.md + #[cfg(feature = "uniffi")] + pub fn bolt12_payment(&self) -> Arc { + Arc::new(Bolt12Payment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.channel_manager), + Arc::clone(&self.payment_store), + Arc::clone(&self.logger), + )) + } + + /// Returns a payment handler allowing to send spontaneous ("keysend") payments. + #[cfg(not(feature = "uniffi"))] + pub fn spontaneous_payment(&self) -> SpontaneousPayment { + SpontaneousPayment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.channel_manager), + Arc::clone(&self.keys_manager), + Arc::clone(&self.payment_store), + Arc::clone(&self.config), + Arc::clone(&self.logger), + ) + } + + /// Returns a payment handler allowing to send spontaneous ("keysend") payments. + #[cfg(feature = "uniffi")] + pub fn spontaneous_payment(&self) -> Arc { + Arc::new(SpontaneousPayment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.channel_manager), + Arc::clone(&self.keys_manager), + Arc::clone(&self.payment_store), + Arc::clone(&self.config), + Arc::clone(&self.logger), + )) + } + + /// Returns a payment handler allowing to send and receive on-chain payments. + #[cfg(not(feature = "uniffi"))] + pub fn onchain_payment(&self) -> OnchainPayment { + OnchainPayment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.wallet), + Arc::clone(&self.channel_manager), + Arc::clone(&self.config), + Arc::clone(&self.logger), + ) + } - self.wallet.send_to_address(address, None) + /// Returns a payment handler allowing to send and receive on-chain payments. + #[cfg(feature = "uniffi")] + pub fn onchain_payment(&self) -> Arc { + Arc::new(OnchainPayment::new( + Arc::clone(&self.runtime), + Arc::clone(&self.wallet), + Arc::clone(&self.channel_manager), + Arc::clone(&self.config), + Arc::clone(&self.logger), + )) } /// Retrieve a list of known channels. @@ -803,14 +1084,13 @@ impl Node { let con_node_id = peer_info.node_id; let con_addr = peer_info.address.clone(); - let con_logger = Arc::clone(&self.logger); - let con_pm = Arc::clone(&self.peer_manager); + let con_cm = Arc::clone(&self.connection_manager); // We need to use our main runtime here as a local runtime might not be around to poll // connection futures going forward. tokio::task::block_in_place(move || { runtime.block_on(async move { - connect_peer_if_necessary(con_node_id, con_addr, con_pm, con_logger).await + con_cm.connect_peer_if_necessary(con_node_id, con_addr).await }) })?; @@ -854,6 +1134,10 @@ impl Node { /// channel counterparty on channel open. This can be useful to start out with the balance not /// entirely shifted to one side, therefore allowing to receive payments from the getgo. /// + /// If Anchor channels are enabled, this will ensure the configured + /// [`AnchorChannelsConfig::per_channel_reserve_sats`] is available and will be retained before + /// opening the channel. + /// /// Returns a [`UserChannelId`] allowing to locally keep track of the channel. pub fn connect_open_channel( &self, node_id: PublicKey, address: SocketAddress, channel_amount_sats: u64, @@ -866,37 +1150,70 @@ impl Node { } let runtime = rt_lock.as_ref().unwrap(); - let cur_balance = self.wallet.get_balance()?; - if cur_balance.get_spendable() < channel_amount_sats { - log_error!(self.logger, "Unable to create channel due to insufficient funds."); - return Err(Error::InsufficientFunds); - } - let peer_info = PeerInfo { node_id, address }; let con_node_id = peer_info.node_id; let con_addr = peer_info.address.clone(); - let con_logger = Arc::clone(&self.logger); - let con_pm = Arc::clone(&self.peer_manager); + let con_cm = Arc::clone(&self.connection_manager); + + let cur_anchor_reserve_sats = + total_anchor_channels_reserve_sats(&self.channel_manager, &self.config); + let spendable_amount_sats = + self.wallet.get_spendable_amount_sats(cur_anchor_reserve_sats).unwrap_or(0); + + // Fail early if we have less than the channel value available. + if spendable_amount_sats < channel_amount_sats { + log_error!(self.logger, + "Unable to create channel due to insufficient funds. Available: {}sats, Required: {}sats", + spendable_amount_sats, channel_amount_sats + ); + return Err(Error::InsufficientFunds); + } // We need to use our main runtime here as a local runtime might not be around to poll // connection futures going forward. tokio::task::block_in_place(move || { runtime.block_on(async move { - connect_peer_if_necessary(con_node_id, con_addr, con_pm, con_logger).await + con_cm.connect_peer_if_necessary(con_node_id, con_addr).await }) })?; - let channel_config = (*(channel_config.unwrap_or_default())).clone().into(); - let user_config = UserConfig { - channel_handshake_limits: Default::default(), - channel_handshake_config: ChannelHandshakeConfig { - announced_channel: announce_channel, - ..Default::default() - }, - channel_config, - ..Default::default() - }; + // Fail if we have less than the channel value + anchor reserve available (if applicable). + let init_features = self + .peer_manager + .peer_by_node_id(&node_id) + .ok_or(Error::ConnectionFailed)? + .init_features; + let required_funds_sats = channel_amount_sats + + self.config.anchor_channels_config.as_ref().map_or(0, |c| { + if init_features.requires_anchors_zero_fee_htlc_tx() + && !c.trusted_peers_no_reserve.contains(&node_id) + { + c.per_channel_reserve_sats + } else { + 0 + } + }); + + if spendable_amount_sats < required_funds_sats { + log_error!(self.logger, + "Unable to create channel due to insufficient funds. Available: {}sats, Required: {}sats", + spendable_amount_sats, required_funds_sats + ); + return Err(Error::InsufficientFunds); + } + + let mut user_config = default_user_config(&self.config); + user_config.channel_handshake_config.announced_channel = announce_channel; + user_config.channel_config = (*(channel_config.unwrap_or_default())).clone().into(); + // We set the max inflight to 100% for private channels. + // FIXME: LDK will default to this behavior soon, too, at which point we should drop this + // manual override. + if !announce_channel { + user_config + .channel_handshake_config + .max_inbound_htlc_value_in_flight_percent_of_channel = 100; + } let push_msat = push_to_counterparty_msat.unwrap_or(0); let user_channel_id: u128 = rand::thread_rng().gen::(); @@ -925,7 +1242,8 @@ impl Node { } } - /// Manually sync the LDK and BDK wallets with the current chain state. + /// Manually sync the LDK and BDK wallets with the current chain state and update the fee rate + /// cache. /// /// **Note:** The wallets are regularly synced in the background, which is configurable via /// [`Config::onchain_wallet_sync_interval_secs`] and [`Config::wallet_sync_interval_secs`]. @@ -940,7 +1258,10 @@ impl Node { let wallet = Arc::clone(&self.wallet); let tx_sync = Arc::clone(&self.tx_sync); let sync_cman = Arc::clone(&self.channel_manager); + let archive_cman = Arc::clone(&self.channel_manager); let sync_cmon = Arc::clone(&self.chain_monitor); + let archive_cmon = Arc::clone(&self.chain_monitor); + let fee_estimator = Arc::clone(&self.fee_estimator); let sync_sweeper = Arc::clone(&self.output_sweeper); let sync_logger = Arc::clone(&self.logger); let confirmables = vec![ @@ -948,11 +1269,18 @@ impl Node { &*sync_cmon as &(dyn Confirm + Sync + Send), &*sync_sweeper as &(dyn Confirm + Sync + Send), ]; + let sync_wallet_timestamp = Arc::clone(&self.latest_wallet_sync_timestamp); + let sync_fee_rate_update_timestamp = + Arc::clone(&self.latest_fee_rate_cache_update_timestamp); + let sync_onchain_wallet_timestamp = Arc::clone(&self.latest_onchain_wallet_sync_timestamp); + let sync_monitor_archival_height = Arc::clone(&self.latest_channel_monitor_archival_height); tokio::task::block_in_place(move || { tokio::runtime::Builder::new_multi_thread().enable_all().build().unwrap().block_on( async move { let now = Instant::now(); + // We don't add an additional timeout here, as `Wallet::sync` already returns + // after a timeout. match wallet.sync().await { Ok(()) => { log_info!( @@ -960,6 +1288,11 @@ impl Node { "Sync of on-chain wallet finished in {}ms.", now.elapsed().as_millis() ); + let unix_time_secs_opt = SystemTime::now() + .duration_since(UNIX_EPOCH) + .ok() + .map(|d| d.as_secs()); + *sync_onchain_wallet_timestamp.write().unwrap() = unix_time_secs_opt; }, Err(e) => { log_error!(sync_logger, "Sync of on-chain wallet failed: {}", e); @@ -968,18 +1301,62 @@ impl Node { }; let now = Instant::now(); - match tx_sync.sync(confirmables).await { + // We don't add an additional timeout here, as + // `FeeEstimator::update_fee_estimates` already returns after a timeout. + match fee_estimator.update_fee_estimates().await { Ok(()) => { log_info!( sync_logger, - "Sync of Lightning wallet finished in {}ms.", + "Fee rate cache update finished in {}ms.", now.elapsed().as_millis() ); - Ok(()) + let unix_time_secs_opt = SystemTime::now() + .duration_since(UNIX_EPOCH) + .ok() + .map(|d| d.as_secs()); + *sync_fee_rate_update_timestamp.write().unwrap() = unix_time_secs_opt; + }, + Err(e) => { + log_error!(sync_logger, "Fee rate cache update failed: {}", e,); + return Err(e); + }, + } + + let now = Instant::now(); + let tx_sync_timeout_fut = tokio::time::timeout( + Duration::from_secs(LDK_WALLET_SYNC_TIMEOUT_SECS), + tx_sync.sync(confirmables), + ); + match tx_sync_timeout_fut.await { + Ok(res) => match res { + Ok(()) => { + log_info!( + sync_logger, + "Sync of Lightning wallet finished in {}ms.", + now.elapsed().as_millis() + ); + + let unix_time_secs_opt = SystemTime::now() + .duration_since(UNIX_EPOCH) + .ok() + .map(|d| d.as_secs()); + *sync_wallet_timestamp.write().unwrap() = unix_time_secs_opt; + + periodically_archive_fully_resolved_monitors( + archive_cman, + archive_cmon, + sync_monitor_archival_height, + ); + Ok(()) + }, + Err(e) => { + log_error!(sync_logger, "Sync of Lightning wallet failed: {}", e); + Err(e.into()) + }, }, Err(e) => { - log_error!(sync_logger, "Sync of Lightning wallet failed: {}", e); - Err(e.into()) + log_error!(sync_logger, "Sync of Lightning wallet timed out: {}", e); + Err(Error::TxSyncTimeout) }, } }, @@ -988,30 +1365,84 @@ impl Node { } /// Close a previously opened channel. + /// + /// Will attempt to close a channel coopertively. If this fails, users might need to resort to + /// [`Node::force_close_channel`]. pub fn close_channel( &self, user_channel_id: &UserChannelId, counterparty_node_id: PublicKey, + ) -> Result<(), Error> { + self.close_channel_internal(user_channel_id, counterparty_node_id, false) + } + + /// Force-close a previously opened channel. + /// + /// Will force-close the channel, potentially broadcasting our latest state. Note that in + /// contrast to cooperative closure, force-closing will have the channel funds time-locked, + /// i.e., they will only be available after the counterparty had time to contest our claim. + /// Force-closing channels also more costly in terms of on-chain fees. So cooperative closure + /// should always be preferred (and tried first). + /// + /// Broadcasting the closing transactions will be omitted for Anchor channels if we trust the + /// counterparty to broadcast for us (see [`AnchorChannelsConfig::trusted_peers_no_reserve`] + /// for more information). + pub fn force_close_channel( + &self, user_channel_id: &UserChannelId, counterparty_node_id: PublicKey, + ) -> Result<(), Error> { + self.close_channel_internal(user_channel_id, counterparty_node_id, true) + } + + fn close_channel_internal( + &self, user_channel_id: &UserChannelId, counterparty_node_id: PublicKey, force: bool, ) -> Result<(), Error> { let open_channels = self.channel_manager.list_channels_with_counterparty(&counterparty_node_id); if let Some(channel_details) = open_channels.iter().find(|c| c.user_channel_id == user_channel_id.0) { - match self - .channel_manager - .close_channel(&channel_details.channel_id, &counterparty_node_id) - { - Ok(_) => { - // Check if this was the last open channel, if so, forget the peer. - if open_channels.len() == 1 { - self.peer_store.remove_peer(&counterparty_node_id)?; - } - Ok(()) - }, - Err(_) => Err(Error::ChannelClosingFailed), + if force { + if self.config.anchor_channels_config.as_ref().map_or(false, |acc| { + acc.trusted_peers_no_reserve.contains(&counterparty_node_id) + }) { + self.channel_manager + .force_close_without_broadcasting_txn( + &channel_details.channel_id, + &counterparty_node_id, + ) + .map_err(|e| { + log_error!( + self.logger, + "Failed to force-close channel to trusted peer: {:?}", + e + ); + Error::ChannelClosingFailed + })?; + } else { + self.channel_manager + .force_close_broadcasting_latest_txn( + &channel_details.channel_id, + &counterparty_node_id, + ) + .map_err(|e| { + log_error!(self.logger, "Failed to force-close channel: {:?}", e); + Error::ChannelClosingFailed + })?; + } + } else { + self.channel_manager + .close_channel(&channel_details.channel_id, &counterparty_node_id) + .map_err(|e| { + log_error!(self.logger, "Failed to close channel: {:?}", e); + Error::ChannelClosingFailed + })?; + } + + // Check if this was the last open channel, if so, forget the peer. + if open_channels.len() == 1 { + self.peer_store.remove_peer(&counterparty_node_id)?; } - } else { - Ok(()) } + + Ok(()) } /// Update the config for a previously opened channel. @@ -1036,594 +1467,33 @@ impl Node { } } - /// Send a payment given an invoice. - pub fn send_payment(&self, invoice: &Bolt11Invoice) -> Result { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - let (payment_hash, recipient_onion, route_params) = payment::payment_parameters_from_invoice(&invoice).map_err(|_| { - log_error!(self.logger, "Failed to send payment due to the given invoice being \"zero-amount\". Please use send_payment_using_amount instead."); - Error::InvalidInvoice - })?; - - if let Some(payment) = self.payment_store.get(&payment_hash) { - if payment.status == PaymentStatus::Pending - || payment.status == PaymentStatus::Succeeded - { - log_error!(self.logger, "Payment error: an invoice must not be paid twice."); - return Err(Error::DuplicatePayment); - } - } - - let payment_secret = Some(*invoice.payment_secret()); - let payment_id = PaymentId(invoice.payment_hash().to_byte_array()); - let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); - - match self.channel_manager.send_payment( - payment_hash, - recipient_onion, - payment_id, - route_params, - retry_strategy, - ) { - Ok(()) => { - let payee_pubkey = invoice.recover_payee_pub_key(); - let amt_msat = invoice.amount_milli_satoshis().unwrap(); - log_info!(self.logger, "Initiated sending {}msat to {}", amt_msat, payee_pubkey); - - let payment = PaymentDetails { - preimage: None, - hash: payment_hash, - secret: payment_secret, - amount_msat: invoice.amount_milli_satoshis(), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Pending, - lsp_fee_limits: None, - }; - self.payment_store.insert(payment)?; - - Ok(payment_hash) - }, - Err(e) => { - log_error!(self.logger, "Failed to send payment: {:?}", e); - match e { - channelmanager::RetryableSendFailure::DuplicatePayment => { - Err(Error::DuplicatePayment) - }, - _ => { - let payment = PaymentDetails { - preimage: None, - hash: payment_hash, - secret: payment_secret, - amount_msat: invoice.amount_milli_satoshis(), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Failed, - lsp_fee_limits: None, - }; - - self.payment_store.insert(payment)?; - Err(Error::PaymentSendingFailed) - }, - } - }, - } - } - - /// Send a payment given an invoice and an amount in millisatoshi. - /// - /// This will fail if the amount given is less than the value required by the given invoice. - /// - /// This can be used to pay a so-called "zero-amount" invoice, i.e., an invoice that leaves the - /// amount paid to be determined by the user. - pub fn send_payment_using_amount( - &self, invoice: &Bolt11Invoice, amount_msat: u64, - ) -> Result { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - if let Some(invoice_amount_msat) = invoice.amount_milli_satoshis() { - if amount_msat < invoice_amount_msat { - log_error!( - self.logger, - "Failed to pay as the given amount needs to be at least the invoice amount: required {}msat, gave {}msat.", invoice_amount_msat, amount_msat); - return Err(Error::InvalidAmount); - } - } - - let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); - if let Some(payment) = self.payment_store.get(&payment_hash) { - if payment.status == PaymentStatus::Pending - || payment.status == PaymentStatus::Succeeded - { - log_error!(self.logger, "Payment error: an invoice must not be paid twice."); - return Err(Error::DuplicatePayment); - } - } - - let payment_id = PaymentId(invoice.payment_hash().to_byte_array()); - let payment_secret = invoice.payment_secret(); - let expiry_time = invoice.duration_since_epoch().saturating_add(invoice.expiry_time()); - let mut payment_params = PaymentParameters::from_node_id( - invoice.recover_payee_pub_key(), - invoice.min_final_cltv_expiry_delta() as u32, - ) - .with_expiry_time(expiry_time.as_secs()) - .with_route_hints(invoice.route_hints()) - .map_err(|_| Error::InvalidInvoice)?; - if let Some(features) = invoice.features() { - payment_params = payment_params - .with_bolt11_features(features.clone()) - .map_err(|_| Error::InvalidInvoice)?; - } - let route_params = - RouteParameters::from_payment_params_and_value(payment_params, amount_msat); - - let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); - let recipient_fields = RecipientOnionFields::secret_only(*payment_secret); - - match self.channel_manager.send_payment( - payment_hash, - recipient_fields, - payment_id, - route_params, - retry_strategy, - ) { - Ok(_payment_id) => { - let payee_pubkey = invoice.recover_payee_pub_key(); - log_info!( - self.logger, - "Initiated sending {} msat to {}", - amount_msat, - payee_pubkey - ); - - let payment = PaymentDetails { - hash: payment_hash, - preimage: None, - secret: Some(*payment_secret), - amount_msat: Some(amount_msat), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Pending, - lsp_fee_limits: None, - }; - self.payment_store.insert(payment)?; - - Ok(payment_hash) - }, - Err(e) => { - log_error!(self.logger, "Failed to send payment: {:?}", e); - - match e { - channelmanager::RetryableSendFailure::DuplicatePayment => { - Err(Error::DuplicatePayment) - }, - _ => { - let payment = PaymentDetails { - hash: payment_hash, - preimage: None, - secret: Some(*payment_secret), - amount_msat: Some(amount_msat), - direction: PaymentDirection::Outbound, - status: PaymentStatus::Failed, - lsp_fee_limits: None, - }; - self.payment_store.insert(payment)?; - - Err(Error::PaymentSendingFailed) - }, - } - }, - } - } - - /// Send a spontaneous, aka. "keysend", payment - pub fn send_spontaneous_payment( - &self, amount_msat: u64, node_id: PublicKey, - ) -> Result { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - let payment_preimage = PaymentPreimage(self.keys_manager.get_secure_random_bytes()); - let payment_hash = PaymentHash(Sha256::hash(&payment_preimage.0).to_byte_array()); - - if let Some(payment) = self.payment_store.get(&payment_hash) { - if payment.status == PaymentStatus::Pending - || payment.status == PaymentStatus::Succeeded - { - log_error!(self.logger, "Payment error: must not send duplicate payments."); - return Err(Error::DuplicatePayment); - } - } - - let route_params = RouteParameters::from_payment_params_and_value( - PaymentParameters::from_node_id(node_id, self.config.default_cltv_expiry_delta), - amount_msat, - ); - let recipient_fields = RecipientOnionFields::spontaneous_empty(); - - match self.channel_manager.send_spontaneous_payment_with_retry( - Some(payment_preimage), - recipient_fields, - PaymentId(payment_hash.0), - route_params, - Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT), - ) { - Ok(_payment_id) => { - log_info!(self.logger, "Initiated sending {}msat to {}.", amount_msat, node_id); - - let payment = PaymentDetails { - hash: payment_hash, - preimage: Some(payment_preimage), - secret: None, - status: PaymentStatus::Pending, - direction: PaymentDirection::Outbound, - amount_msat: Some(amount_msat), - lsp_fee_limits: None, - }; - self.payment_store.insert(payment)?; - - Ok(payment_hash) - }, - Err(e) => { - log_error!(self.logger, "Failed to send payment: {:?}", e); - - match e { - channelmanager::RetryableSendFailure::DuplicatePayment => { - Err(Error::DuplicatePayment) - }, - _ => { - let payment = PaymentDetails { - hash: payment_hash, - preimage: Some(payment_preimage), - secret: None, - status: PaymentStatus::Failed, - direction: PaymentDirection::Outbound, - amount_msat: Some(amount_msat), - lsp_fee_limits: None, - }; - - self.payment_store.insert(payment)?; - Err(Error::PaymentSendingFailed) - }, - } - }, - } - } - - /// Sends payment probes over all paths of a route that would be used to pay the given invoice. - /// - /// This may be used to send "pre-flight" probes, i.e., to train our scorer before conducting - /// the actual payment. Note this is only useful if there likely is sufficient time for the - /// probe to settle before sending out the actual payment, e.g., when waiting for user - /// confirmation in a wallet UI. - /// - /// Otherwise, there is a chance the probe could take up some liquidity needed to complete the - /// actual payment. Users should therefore be cautious and might avoid sending probes if - /// liquidity is scarce and/or they don't expect the probe to return before they send the - /// payment. To mitigate this issue, channels with available liquidity less than the required - /// amount times [`Config::probing_liquidity_limit_multiplier`] won't be used to send - /// pre-flight probes. - pub fn send_payment_probes(&self, invoice: &Bolt11Invoice) -> Result<(), Error> { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - let (_payment_hash, _recipient_onion, route_params) = payment::payment_parameters_from_invoice(&invoice).map_err(|_| { - log_error!(self.logger, "Failed to send probes due to the given invoice being \"zero-amount\". Please use send_payment_probes_using_amount instead."); - Error::InvalidInvoice - })?; - - let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); - - self.channel_manager - .send_preflight_probes(route_params, liquidity_limit_multiplier) - .map_err(|e| { - log_error!(self.logger, "Failed to send payment probes: {:?}", e); - Error::ProbeSendingFailed - })?; - - Ok(()) - } - - /// Sends payment probes over all paths of a route that would be used to pay the given - /// amount to the given `node_id`. - /// - /// See [`Self::send_payment_probes`] for more information. - pub fn send_spontaneous_payment_probes( - &self, amount_msat: u64, node_id: PublicKey, - ) -> Result<(), Error> { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); - let cltv_expiry_delta = self.config.default_cltv_expiry_delta; - - self.channel_manager - .send_spontaneous_preflight_probes( - node_id, - amount_msat, - cltv_expiry_delta, - liquidity_limit_multiplier, - ) - .map_err(|e| { - log_error!(self.logger, "Failed to send payment probes: {:?}", e); - Error::ProbeSendingFailed - })?; - - Ok(()) - } - - /// Sends payment probes over all paths of a route that would be used to pay the given - /// zero-value invoice using the given amount. - /// - /// This can be used to send pre-flight probes for a so-called "zero-amount" invoice, i.e., an - /// invoice that leaves the amount paid to be determined by the user. - /// - /// See [`Self::send_payment_probes`] for more information. - pub fn send_payment_probes_using_amount( - &self, invoice: &Bolt11Invoice, amount_msat: u64, - ) -> Result<(), Error> { - let rt_lock = self.runtime.read().unwrap(); - if rt_lock.is_none() { - return Err(Error::NotRunning); - } - - let (_payment_hash, _recipient_onion, route_params) = if let Some(invoice_amount_msat) = - invoice.amount_milli_satoshis() - { - if amount_msat < invoice_amount_msat { - log_error!( - self.logger, - "Failed to send probes as the given amount needs to be at least the invoice amount: required {}msat, gave {}msat.", invoice_amount_msat, amount_msat); - return Err(Error::InvalidAmount); - } - - payment::payment_parameters_from_invoice(&invoice).map_err(|_| { - log_error!(self.logger, "Failed to send probes due to the given invoice unexpectedly being \"zero-amount\"."); - Error::InvalidInvoice - })? - } else { - payment::payment_parameters_from_zero_amount_invoice(&invoice, amount_msat).map_err(|_| { - log_error!(self.logger, "Failed to send probes due to the given invoice unexpectedly being not \"zero-amount\"."); - Error::InvalidInvoice - })? - }; - - let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); - - self.channel_manager - .send_preflight_probes(route_params, liquidity_limit_multiplier) - .map_err(|e| { - log_error!(self.logger, "Failed to send payment probes: {:?}", e); - Error::ProbeSendingFailed - })?; - - Ok(()) - } - - /// Returns a payable invoice that can be used to request and receive a payment of the amount - /// given. - pub fn receive_payment( - &self, amount_msat: u64, description: &str, expiry_secs: u32, - ) -> Result { - self.receive_payment_inner(Some(amount_msat), description, expiry_secs) - } - - /// Returns a payable invoice that can be used to request and receive a payment for which the - /// amount is to be determined by the user, also known as a "zero-amount" invoice. - pub fn receive_variable_amount_payment( - &self, description: &str, expiry_secs: u32, - ) -> Result { - self.receive_payment_inner(None, description, expiry_secs) - } - - fn receive_payment_inner( - &self, amount_msat: Option, description: &str, expiry_secs: u32, - ) -> Result { - let currency = Currency::from(self.config.network); - let keys_manager = Arc::clone(&self.keys_manager); - let invoice = match lightning_invoice::utils::create_invoice_from_channelmanager( - &self.channel_manager, - keys_manager, - Arc::clone(&self.logger), - currency, - amount_msat, - description.to_string(), - expiry_secs, - None, - ) { - Ok(inv) => { - log_info!(self.logger, "Invoice created: {}", inv); - inv - }, - Err(e) => { - log_error!(self.logger, "Failed to create invoice: {}", e); - return Err(Error::InvoiceCreationFailed); - }, - }; - - let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); - let payment = PaymentDetails { - hash: payment_hash, - preimage: None, - secret: Some(invoice.payment_secret().clone()), - amount_msat, - direction: PaymentDirection::Inbound, - status: PaymentStatus::Pending, - lsp_fee_limits: None, - }; - - self.payment_store.insert(payment)?; - - Ok(invoice) - } - - /// Returns a payable invoice that can be used to request a payment of the amount given and - /// receive it via a newly created just-in-time (JIT) channel. - /// - /// When the returned invoice is paid, the configured [LSPS2]-compliant LSP will open a channel - /// to us, supplying just-in-time inbound liquidity. - /// - /// If set, `max_total_lsp_fee_limit_msat` will limit how much fee we allow the LSP to take for opening the - /// channel to us. We'll use its cheapest offer otherwise. - /// - /// [LSPS2]: https://github.com/BitcoinAndLightningLayerSpecs/lsp/blob/main/LSPS2/README.md - pub fn receive_payment_via_jit_channel( - &self, amount_msat: u64, description: &str, expiry_secs: u32, - max_total_lsp_fee_limit_msat: Option, - ) -> Result { - self.receive_payment_via_jit_channel_inner( - Some(amount_msat), - description, - expiry_secs, - max_total_lsp_fee_limit_msat, - None, - ) - } - - /// Returns a payable invoice that can be used to request a variable amount payment (also known - /// as "zero-amount" invoice) and receive it via a newly created just-in-time (JIT) channel. - /// - /// When the returned invoice is paid, the configured [LSPS2]-compliant LSP will open a channel - /// to us, supplying just-in-time inbound liquidity. - /// - /// If set, `max_proportional_lsp_fee_limit_ppm_msat` will limit how much proportional fee, in - /// parts-per-million millisatoshis, we allow the LSP to take for opening the channel to us. - /// We'll use its cheapest offer otherwise. - /// - /// [LSPS2]: https://github.com/BitcoinAndLightningLayerSpecs/lsp/blob/main/LSPS2/README.md - pub fn receive_variable_amount_payment_via_jit_channel( - &self, description: &str, expiry_secs: u32, - max_proportional_lsp_fee_limit_ppm_msat: Option, - ) -> Result { - self.receive_payment_via_jit_channel_inner( - None, - description, - expiry_secs, - None, - max_proportional_lsp_fee_limit_ppm_msat, - ) - } - - fn receive_payment_via_jit_channel_inner( - &self, amount_msat: Option, description: &str, expiry_secs: u32, - max_total_lsp_fee_limit_msat: Option, - max_proportional_lsp_fee_limit_ppm_msat: Option, - ) -> Result { - let liquidity_source = - self.liquidity_source.as_ref().ok_or(Error::LiquiditySourceUnavailable)?; - - let (node_id, address) = liquidity_source - .get_liquidity_source_details() - .ok_or(Error::LiquiditySourceUnavailable)?; - - let rt_lock = self.runtime.read().unwrap(); - let runtime = rt_lock.as_ref().unwrap(); - - let peer_info = PeerInfo { node_id, address }; - - let con_node_id = peer_info.node_id; - let con_addr = peer_info.address.clone(); - let con_logger = Arc::clone(&self.logger); - let con_pm = Arc::clone(&self.peer_manager); - - // We need to use our main runtime here as a local runtime might not be around to poll - // connection futures going forward. - tokio::task::block_in_place(move || { - runtime.block_on(async move { - connect_peer_if_necessary(con_node_id, con_addr, con_pm, con_logger).await - }) - })?; - - log_info!(self.logger, "Connected to LSP {}@{}. ", peer_info.node_id, peer_info.address); - - let liquidity_source = Arc::clone(&liquidity_source); - let (invoice, lsp_total_opening_fee, lsp_prop_opening_fee) = - tokio::task::block_in_place(move || { - runtime.block_on(async move { - if let Some(amount_msat) = amount_msat { - liquidity_source - .lsps2_receive_to_jit_channel( - amount_msat, - description, - expiry_secs, - max_total_lsp_fee_limit_msat, - ) - .await - .map(|(invoice, total_fee)| (invoice, Some(total_fee), None)) - } else { - liquidity_source - .lsps2_receive_variable_amount_to_jit_channel( - description, - expiry_secs, - max_proportional_lsp_fee_limit_ppm_msat, - ) - .await - .map(|(invoice, prop_fee)| (invoice, None, Some(prop_fee))) - } - }) - })?; - - // Register payment in payment store. - let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); - let lsp_fee_limits = Some(LSPFeeLimits { - max_total_opening_fee_msat: lsp_total_opening_fee, - max_proportional_opening_fee_ppm_msat: lsp_prop_opening_fee, - }); - let payment = PaymentDetails { - hash: payment_hash, - preimage: None, - secret: Some(invoice.payment_secret().clone()), - amount_msat, - direction: PaymentDirection::Inbound, - status: PaymentStatus::Pending, - lsp_fee_limits, - }; - - self.payment_store.insert(payment)?; - - // Persist LSP peer to make sure we reconnect on restart. - self.peer_store.add_peer(peer_info)?; - - Ok(invoice) - } - - /// Retrieve the details of a specific payment with the given hash. + /// Retrieve the details of a specific payment with the given id. /// /// Returns `Some` if the payment was known and `None` otherwise. - pub fn payment(&self, payment_hash: &PaymentHash) -> Option { - self.payment_store.get(payment_hash) + pub fn payment(&self, payment_id: &PaymentId) -> Option { + self.payment_store.get(payment_id) } - /// Remove the payment with the given hash from the store. - pub fn remove_payment(&self, payment_hash: &PaymentHash) -> Result<(), Error> { - self.payment_store.remove(&payment_hash) + /// Remove the payment with the given id from the store. + pub fn remove_payment(&self, payment_id: &PaymentId) -> Result<(), Error> { + self.payment_store.remove(&payment_id) } /// Retrieves an overview of all known balances. pub fn list_balances(&self) -> BalanceDetails { - let (total_onchain_balance_sats, spendable_onchain_balance_sats) = self - .wallet - .get_balance() - .map(|bal| (bal.get_total(), bal.get_spendable())) - .unwrap_or((0, 0)); + let cur_anchor_reserve_sats = + total_anchor_channels_reserve_sats(&self.channel_manager, &self.config); + let (total_onchain_balance_sats, spendable_onchain_balance_sats) = + self.wallet.get_balances(cur_anchor_reserve_sats).unwrap_or((0, 0)); + + let total_anchor_channels_reserve_sats = + std::cmp::min(cur_anchor_reserve_sats, total_onchain_balance_sats); let mut total_lightning_balance_sats = 0; let mut lightning_balances = Vec::new(); - for funding_txo in self.chain_monitor.list_monitors() { + for (funding_txo, channel_id) in self.chain_monitor.list_monitors() { match self.chain_monitor.get_monitor(funding_txo) { Ok(monitor) => { - // TODO: Switch to `channel_id` with LDK 0.0.122: let channel_id = monitor.channel_id(); - let channel_id = funding_txo.to_channel_id(); // unwrap safety: `get_counterparty_node_id` will always be `Some` after 0.0.110 and // LDK Node 0.1 depended on 0.0.115 already. let counterparty_node_id = monitor.get_counterparty_node_id().unwrap(); @@ -1646,12 +1516,13 @@ impl Node { .output_sweeper .tracked_spendable_outputs() .into_iter() - .map(|o| PendingSweepBalance::from_tracked_spendable_output(o)) + .map(PendingSweepBalance::from_tracked_spendable_output) .collect(); BalanceDetails { total_onchain_balance_sats, spendable_onchain_balance_sats, + total_anchor_channels_reserve_sats, total_lightning_balance_sats, lightning_balances, pending_balances_from_channel_closures, @@ -1662,7 +1533,8 @@ impl Node { /// /// For example, you could retrieve all stored outbound payments as follows: /// ``` - /// # use ldk_node::{Builder, Config, PaymentDirection}; + /// # use ldk_node::{Builder, Config}; + /// # use ldk_node::payment::PaymentDirection; /// # use ldk_node::bitcoin::Network; /// # let mut config = Config::default(); /// # config.network = Network::Regtest; @@ -1687,12 +1559,13 @@ impl Node { let mut peers = Vec::new(); // First add all connected peers, preferring to list the connected address if available. - let connected_peers = self.peer_manager.get_peer_node_ids(); + let connected_peers = self.peer_manager.list_peers(); let connected_peers_len = connected_peers.len(); - for (node_id, con_addr_opt) in connected_peers { + for connected_peer in connected_peers { + let node_id = connected_peer.counterparty_node_id; let stored_peer = self.peer_store.get_peer(&node_id); let stored_addr_opt = stored_peer.as_ref().map(|p| p.address.clone()); - let address = match (con_addr_opt, stored_addr_opt) { + let address = match (connected_peer.socket_address, stored_addr_opt) { (Some(con_addr), _) => con_addr, (None, Some(stored_addr)) => stored_addr, (None, None) => continue, @@ -1706,7 +1579,7 @@ impl Node { // Now add all known-but-offline peers, too. for p in self.peer_store.list_peers() { - if peers.iter().take(connected_peers_len).find(|d| d.node_id == p.node_id).is_some() { + if peers.iter().take(connected_peers_len).any(|d| d.node_id == p.node_id) { continue; } @@ -1723,6 +1596,18 @@ impl Node { peers } + /// Returns a handler allowing to query the network graph. + #[cfg(not(feature = "uniffi"))] + pub fn network_graph(&self) -> NetworkGraph { + NetworkGraph::new(Arc::clone(&self.network_graph)) + } + + /// Returns a handler allowing to query the network graph. + #[cfg(feature = "uniffi")] + pub fn network_graph(&self) -> Arc { + Arc::new(NetworkGraph::new(Arc::clone(&self.network_graph))) + } + /// Creates a digital ECDSA signature of a message with the node's secret key. /// /// A receiver knowing the corresponding `PublicKey` (e.g. the node’s id) and the message @@ -1740,63 +1625,81 @@ impl Node { } } -impl Drop for Node { +impl Drop for Node { fn drop(&mut self) { let _ = self.stop(); } } -async fn connect_peer_if_necessary( - node_id: PublicKey, addr: SocketAddress, peer_manager: Arc>, - logger: Arc, -) -> Result<(), Error> { - for (pman_node_id, _pman_addr) in peer_manager.get_peer_node_ids() { - if node_id == pman_node_id { - return Ok(()); - } - } +/// Represents the status of the [`Node`]. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct NodeStatus { + /// Indicates whether the [`Node`] is running. + pub is_running: bool, + /// Indicates whether the [`Node`] is listening for incoming connections on the addresses + /// configured via [`Config::listening_addresses`]. + pub is_listening: bool, + /// The best block to which our Lightning wallet is currently synced. + pub current_best_block: BestBlock, + /// The timestamp, in seconds since start of the UNIX epoch, when we last successfully synced + /// our Lightning wallet to the chain tip. + /// + /// Will be `None` if the wallet hasn't been synced since the [`Node`] was initialized. + pub latest_wallet_sync_timestamp: Option, + /// The timestamp, in seconds since start of the UNIX epoch, when we last successfully synced + /// our on-chain wallet to the chain tip. + /// + /// Will be `None` if the wallet hasn't been synced since the [`Node`] was initialized. + pub latest_onchain_wallet_sync_timestamp: Option, + /// The timestamp, in seconds since start of the UNIX epoch, when we last successfully update + /// our fee rate cache. + /// + /// Will be `None` if the cache hasn't been updated since the [`Node`] was initialized. + pub latest_fee_rate_cache_update_timestamp: Option, + /// The timestamp, in seconds since start of the UNIX epoch, when the last rapid gossip sync + /// (RGS) snapshot we successfully applied was generated. + /// + /// Will be `None` if RGS isn't configured or the snapshot hasn't been updated since the [`Node`] was initialized. + pub latest_rgs_snapshot_timestamp: Option, + /// The timestamp, in seconds since start of the UNIX epoch, when we last broadcasted a node + /// announcement. + /// + /// Will be `None` if we have no public channels or we haven't broadcasted since the [`Node`] was initialized. + pub latest_node_announcement_broadcast_timestamp: Option, +} - do_connect_peer(node_id, addr, peer_manager, logger).await +pub(crate) fn total_anchor_channels_reserve_sats( + channel_manager: &ChannelManager, config: &Config, +) -> u64 { + config.anchor_channels_config.as_ref().map_or(0, |anchor_channels_config| { + channel_manager + .list_channels() + .into_iter() + .filter(|c| { + !anchor_channels_config.trusted_peers_no_reserve.contains(&c.counterparty.node_id) + && c.channel_shutdown_state + .map_or(true, |s| s != ChannelShutdownState::ShutdownComplete) + && c.channel_type + .as_ref() + .map_or(false, |t| t.requires_anchors_zero_fee_htlc_tx()) + }) + .count() as u64 + * anchor_channels_config.per_channel_reserve_sats + }) } -async fn do_connect_peer( - node_id: PublicKey, addr: SocketAddress, peer_manager: Arc>, - logger: Arc, -) -> Result<(), Error> { - log_info!(logger, "Connecting to peer: {}@{}", node_id, addr); - - let socket_addr = addr - .to_socket_addrs() - .map_err(|e| { - log_error!(logger, "Failed to resolve network address: {}", e); - Error::InvalidSocketAddress - })? - .next() - .ok_or(Error::ConnectionFailed)?; - - match lightning_net_tokio::connect_outbound(Arc::clone(&peer_manager), node_id, socket_addr) - .await - { - Some(connection_closed_future) => { - let mut connection_closed_future = Box::pin(connection_closed_future); - loop { - match futures::poll!(&mut connection_closed_future) { - std::task::Poll::Ready(_) => { - log_info!(logger, "Peer connection closed: {}@{}", node_id, addr); - return Err(Error::ConnectionFailed); - }, - std::task::Poll::Pending => {}, - } - // Avoid blocking the tokio context by sleeping a bit - match peer_manager.get_peer_node_ids().iter().find(|(id, _addr)| *id == node_id) { - Some(_) => return Ok(()), - None => tokio::time::sleep(Duration::from_millis(10)).await, - } - } - }, - None => { - log_error!(logger, "Failed to connect to peer: {}@{}", node_id, addr); - Err(Error::ConnectionFailed) - }, +fn periodically_archive_fully_resolved_monitors( + channel_manager: Arc, chain_monitor: Arc, + latest_channel_monitor_archival_height: Arc>>, +) { + let mut latest_archival_height_lock = latest_channel_monitor_archival_height.write().unwrap(); + let cur_height = channel_manager.current_best_block().height; + let should_archive = latest_archival_height_lock + .as_ref() + .map_or(true, |h| cur_height >= h + RESOLVED_CHANNEL_MONITOR_ARCHIVAL_INTERVAL); + + if should_archive { + chain_monitor.archive_fully_resolved_channel_monitors(); + *latest_archival_height_lock = Some(cur_height); } } diff --git a/src/liquidity.rs b/src/liquidity.rs index 0404fe64e..00e9f5717 100644 --- a/src/liquidity.rs +++ b/src/liquidity.rs @@ -5,7 +5,6 @@ use crate::{Config, Error}; use lightning::ln::channelmanager::MIN_FINAL_CLTV_EXPIRY_DELTA; use lightning::ln::msgs::SocketAddress; use lightning::routing::router::{RouteHint, RouteHintHop}; -use lightning::util::persist::KVStore; use lightning_invoice::{Bolt11Invoice, InvoiceBuilder, RoutingFees}; use lightning_liquidity::events::Event; use lightning_liquidity::lsps0::ser::RequestId; @@ -33,26 +32,26 @@ struct LSPS2Service { pending_buy_requests: Mutex>>, } -pub(crate) struct LiquiditySource +pub(crate) struct LiquiditySource where L::Target: Logger, { lsps2_service: Option, - channel_manager: Arc>, + channel_manager: Arc, keys_manager: Arc, - liquidity_manager: Arc>, + liquidity_manager: Arc, config: Arc, logger: L, } -impl LiquiditySource +impl LiquiditySource where L::Target: Logger, { pub(crate) fn new_lsps2( address: SocketAddress, node_id: PublicKey, token: Option, - channel_manager: Arc>, keys_manager: Arc, - liquidity_manager: Arc>, config: Arc, logger: L, + channel_manager: Arc, keys_manager: Arc, + liquidity_manager: Arc, config: Arc, logger: L, ) -> Self { let pending_fee_requests = Mutex::new(HashMap::new()); let pending_buy_requests = Mutex::new(HashMap::new()); @@ -66,12 +65,12 @@ where Self { lsps2_service, channel_manager, keys_manager, liquidity_manager, config, logger } } - pub(crate) fn set_peer_manager(&self, peer_manager: Arc>) { + pub(crate) fn set_peer_manager(&self, peer_manager: Arc) { let process_msgs_callback = move || peer_manager.process_events(); self.liquidity_manager.set_process_msgs_callback(process_msgs_callback); } - pub(crate) fn liquidity_manager(&self) -> &LiquidityManager { + pub(crate) fn liquidity_manager(&self) -> &LiquidityManager { self.liquidity_manager.as_ref() } diff --git a/src/message_handler.rs b/src/message_handler.rs index 852f63cec..89d67d846 100644 --- a/src/message_handler.rs +++ b/src/message_handler.rs @@ -4,7 +4,6 @@ use lightning::ln::features::{InitFeatures, NodeFeatures}; use lightning::ln::peer_handler::CustomMessageHandler; use lightning::ln::wire::CustomMessageReader; use lightning::util::logger::Logger; -use lightning::util::persist::KVStore; use lightning_liquidity::lsps0::ser::RawLSPSMessage; @@ -13,19 +12,19 @@ use bitcoin::secp256k1::PublicKey; use std::ops::Deref; use std::sync::Arc; -pub(crate) enum NodeCustomMessageHandler +pub(crate) enum NodeCustomMessageHandler where L::Target: Logger, { Ignoring, - Liquidity { liquidity_source: Arc> }, + Liquidity { liquidity_source: Arc> }, } -impl NodeCustomMessageHandler +impl NodeCustomMessageHandler where L::Target: Logger, { - pub(crate) fn new_liquidity(liquidity_source: Arc>) -> Self { + pub(crate) fn new_liquidity(liquidity_source: Arc>) -> Self { Self::Liquidity { liquidity_source } } @@ -34,8 +33,7 @@ where } } -impl CustomMessageReader - for NodeCustomMessageHandler +impl CustomMessageReader for NodeCustomMessageHandler where L::Target: Logger, { @@ -53,8 +51,7 @@ where } } -impl CustomMessageHandler - for NodeCustomMessageHandler +impl CustomMessageHandler for NodeCustomMessageHandler where L::Target: Logger, { diff --git a/src/payment/bolt11.rs b/src/payment/bolt11.rs new file mode 100644 index 000000000..e8d030bc0 --- /dev/null +++ b/src/payment/bolt11.rs @@ -0,0 +1,716 @@ +//! Holds a payment handler allowing to create and pay [BOLT 11] invoices. +//! +//! [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md + +use crate::config::{Config, LDK_PAYMENT_RETRY_TIMEOUT}; +use crate::connection::ConnectionManager; +use crate::error::Error; +use crate::liquidity::LiquiditySource; +use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; +use crate::payment::store::{ + LSPFeeLimits, PaymentDetails, PaymentDetailsUpdate, PaymentDirection, PaymentKind, + PaymentStatus, PaymentStore, +}; +use crate::peer_store::{PeerInfo, PeerStore}; +use crate::types::{ChannelManager, KeysManager}; + +use lightning::ln::channelmanager::{PaymentId, RecipientOnionFields, Retry, RetryableSendFailure}; +use lightning::ln::{PaymentHash, PaymentPreimage}; +use lightning::routing::router::{PaymentParameters, RouteParameters}; + +use lightning_invoice::{payment, Bolt11Invoice, Currency}; + +use bitcoin::hashes::sha256::Hash as Sha256; +use bitcoin::hashes::Hash; + +use std::sync::{Arc, RwLock}; +use std::time::SystemTime; + +/// A payment handler allowing to create and pay [BOLT 11] invoices. +/// +/// Should be retrieved by calling [`Node::bolt11_payment`]. +/// +/// [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md +/// [`Node::bolt11_payment`]: crate::Node::bolt11_payment +pub struct Bolt11Payment { + runtime: Arc>>, + channel_manager: Arc, + connection_manager: Arc>>, + keys_manager: Arc, + liquidity_source: Option>>>, + payment_store: Arc>>, + peer_store: Arc>>, + config: Arc, + logger: Arc, +} + +impl Bolt11Payment { + pub(crate) fn new( + runtime: Arc>>, + channel_manager: Arc, + connection_manager: Arc>>, + keys_manager: Arc, + liquidity_source: Option>>>, + payment_store: Arc>>, + peer_store: Arc>>, config: Arc, + logger: Arc, + ) -> Self { + Self { + runtime, + channel_manager, + connection_manager, + keys_manager, + liquidity_source, + payment_store, + peer_store, + config, + logger, + } + } + + /// Send a payment given an invoice. + pub fn send(&self, invoice: &Bolt11Invoice) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let (payment_hash, recipient_onion, route_params) = payment::payment_parameters_from_invoice(&invoice).map_err(|_| { + log_error!(self.logger, "Failed to send payment due to the given invoice being \"zero-amount\". Please use send_using_amount instead."); + Error::InvalidInvoice + })?; + + let payment_id = PaymentId(invoice.payment_hash().to_byte_array()); + if let Some(payment) = self.payment_store.get(&payment_id) { + if payment.status == PaymentStatus::Pending + || payment.status == PaymentStatus::Succeeded + { + log_error!(self.logger, "Payment error: an invoice must not be paid twice."); + return Err(Error::DuplicatePayment); + } + } + + let payment_secret = Some(*invoice.payment_secret()); + let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); + + match self.channel_manager.send_payment( + payment_hash, + recipient_onion, + payment_id, + route_params, + retry_strategy, + ) { + Ok(()) => { + let payee_pubkey = invoice.recover_payee_pub_key(); + let amt_msat = invoice.amount_milli_satoshis().unwrap(); + log_info!(self.logger, "Initiated sending {}msat to {}", amt_msat, payee_pubkey); + + let kind = PaymentKind::Bolt11 { + hash: payment_hash, + preimage: None, + secret: payment_secret, + }; + let payment = PaymentDetails::new( + payment_id, + kind, + invoice.amount_milli_satoshis(), + PaymentDirection::Outbound, + PaymentStatus::Pending, + ); + + self.payment_store.insert(payment)?; + + Ok(payment_id) + }, + Err(e) => { + log_error!(self.logger, "Failed to send payment: {:?}", e); + match e { + RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), + _ => { + let kind = PaymentKind::Bolt11 { + hash: payment_hash, + preimage: None, + secret: payment_secret, + }; + let payment = PaymentDetails::new( + payment_id, + kind, + invoice.amount_milli_satoshis(), + PaymentDirection::Outbound, + PaymentStatus::Failed, + ); + + self.payment_store.insert(payment)?; + Err(Error::PaymentSendingFailed) + }, + } + }, + } + } + + /// Send a payment given an invoice and an amount in millisatoshi. + /// + /// This will fail if the amount given is less than the value required by the given invoice. + /// + /// This can be used to pay a so-called "zero-amount" invoice, i.e., an invoice that leaves the + /// amount paid to be determined by the user. + pub fn send_using_amount( + &self, invoice: &Bolt11Invoice, amount_msat: u64, + ) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + if let Some(invoice_amount_msat) = invoice.amount_milli_satoshis() { + if amount_msat < invoice_amount_msat { + log_error!( + self.logger, + "Failed to pay as the given amount needs to be at least the invoice amount: required {}msat, gave {}msat.", invoice_amount_msat, amount_msat); + return Err(Error::InvalidAmount); + } + } + + let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); + let payment_id = PaymentId(invoice.payment_hash().to_byte_array()); + if let Some(payment) = self.payment_store.get(&payment_id) { + if payment.status == PaymentStatus::Pending + || payment.status == PaymentStatus::Succeeded + { + log_error!(self.logger, "Payment error: an invoice must not be paid twice."); + return Err(Error::DuplicatePayment); + } + } + + let payment_secret = invoice.payment_secret(); + let expiry_time = invoice.duration_since_epoch().saturating_add(invoice.expiry_time()); + let mut payment_params = PaymentParameters::from_node_id( + invoice.recover_payee_pub_key(), + invoice.min_final_cltv_expiry_delta() as u32, + ) + .with_expiry_time(expiry_time.as_secs()) + .with_route_hints(invoice.route_hints()) + .map_err(|_| Error::InvalidInvoice)?; + if let Some(features) = invoice.features() { + payment_params = payment_params + .with_bolt11_features(features.clone()) + .map_err(|_| Error::InvalidInvoice)?; + } + let route_params = + RouteParameters::from_payment_params_and_value(payment_params, amount_msat); + + let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); + let recipient_fields = RecipientOnionFields::secret_only(*payment_secret); + + match self.channel_manager.send_payment( + payment_hash, + recipient_fields, + payment_id, + route_params, + retry_strategy, + ) { + Ok(()) => { + let payee_pubkey = invoice.recover_payee_pub_key(); + log_info!( + self.logger, + "Initiated sending {} msat to {}", + amount_msat, + payee_pubkey + ); + + let kind = PaymentKind::Bolt11 { + hash: payment_hash, + preimage: None, + secret: Some(*payment_secret), + }; + + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Pending, + ); + self.payment_store.insert(payment)?; + + Ok(payment_id) + }, + Err(e) => { + log_error!(self.logger, "Failed to send payment: {:?}", e); + + match e { + RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), + _ => { + let kind = PaymentKind::Bolt11 { + hash: payment_hash, + preimage: None, + secret: Some(*payment_secret), + }; + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Failed, + ); + self.payment_store.insert(payment)?; + + Err(Error::PaymentSendingFailed) + }, + } + }, + } + } + + /// Allows to attempt manually claiming payments with the given preimage that have previously + /// been registered via [`receive_for_hash`] or [`receive_variable_amount_for_hash`]. + /// + /// This should be called in reponse to a [`PaymentClaimable`] event as soon as the preimage is + /// available. + /// + /// Will check that the payment is known, and that the given preimage and claimable amount + /// match our expectations before attempting to claim the payment, and will return an error + /// otherwise. + /// + /// When claiming the payment has succeeded, a [`PaymentReceived`] event will be emitted. + /// + /// [`receive_for_hash`]: Self::receive_for_hash + /// [`receive_variable_amount_for_hash`]: Self::receive_variable_amount_for_hash + /// [`PaymentClaimable`]: crate::Event::PaymentClaimable + /// [`PaymentReceived`]: crate::Event::PaymentReceived + pub fn claim_for_hash( + &self, payment_hash: PaymentHash, claimable_amount_msat: u64, preimage: PaymentPreimage, + ) -> Result<(), Error> { + let payment_id = PaymentId(payment_hash.0); + + let expected_payment_hash = PaymentHash(Sha256::hash(&preimage.0).to_byte_array()); + + if expected_payment_hash != payment_hash { + log_error!( + self.logger, + "Failed to manually claim payment as the given preimage doesn't match the hash {}", + payment_hash + ); + return Err(Error::InvalidPaymentPreimage); + } + + if let Some(details) = self.payment_store.get(&payment_id) { + if let Some(expected_amount_msat) = details.amount_msat { + if claimable_amount_msat < expected_amount_msat { + log_error!( + self.logger, + "Failed to manually claim payment {} as the claimable amount is less than expected", + payment_id + ); + return Err(Error::InvalidAmount); + } + } + } else { + log_error!( + self.logger, + "Failed to manually claim unknown payment with hash: {}", + payment_hash + ); + return Err(Error::InvalidPaymentHash); + } + + self.channel_manager.claim_funds(preimage); + Ok(()) + } + + /// Allows to manually fail payments with the given hash that have previously + /// been registered via [`receive_for_hash`] or [`receive_variable_amount_for_hash`]. + /// + /// This should be called in reponse to a [`PaymentClaimable`] event if the payment needs to be + /// failed back, e.g., if the correct preimage can't be retrieved in time before the claim + /// deadline has been reached. + /// + /// Will check that the payment is known before failing the payment, and will return an error + /// otherwise. + /// + /// [`receive_for_hash`]: Self::receive_for_hash + /// [`receive_variable_amount_for_hash`]: Self::receive_variable_amount_for_hash + /// [`PaymentClaimable`]: crate::Event::PaymentClaimable + pub fn fail_for_hash(&self, payment_hash: PaymentHash) -> Result<(), Error> { + let payment_id = PaymentId(payment_hash.0); + + let update = PaymentDetailsUpdate { + status: Some(PaymentStatus::Failed), + ..PaymentDetailsUpdate::new(payment_id) + }; + + if !self.payment_store.update(&update)? { + log_error!( + self.logger, + "Failed to manually fail unknown payment with hash: {}", + payment_hash + ); + return Err(Error::InvalidPaymentHash); + } + + self.channel_manager.fail_htlc_backwards(&payment_hash); + Ok(()) + } + + /// Returns a payable invoice that can be used to request and receive a payment of the amount + /// given. + /// + /// The inbound payment will be automatically claimed upon arrival. + pub fn receive( + &self, amount_msat: u64, description: &str, expiry_secs: u32, + ) -> Result { + self.receive_inner(Some(amount_msat), description, expiry_secs, None) + } + + /// Returns a payable invoice that can be used to request a payment of the amount + /// given for the given payment hash. + /// + /// We will register the given payment hash and emit a [`PaymentClaimable`] event once + /// the inbound payment arrives. + /// + /// **Note:** users *MUST* handle this event and claim the payment manually via + /// [`claim_for_hash`] as soon as they have obtained access to the preimage of the given + /// payment hash. If they're unable to obtain the preimage, they *MUST* immediately fail the payment via + /// [`fail_for_hash`]. + /// + /// [`PaymentClaimable`]: crate::Event::PaymentClaimable + /// [`claim_for_hash`]: Self::claim_for_hash + /// [`fail_for_hash`]: Self::fail_for_hash + pub fn receive_for_hash( + &self, amount_msat: u64, description: &str, expiry_secs: u32, payment_hash: PaymentHash, + ) -> Result { + self.receive_inner(Some(amount_msat), description, expiry_secs, Some(payment_hash)) + } + + /// Returns a payable invoice that can be used to request and receive a payment for which the + /// amount is to be determined by the user, also known as a "zero-amount" invoice. + /// + /// The inbound payment will be automatically claimed upon arrival. + pub fn receive_variable_amount( + &self, description: &str, expiry_secs: u32, + ) -> Result { + self.receive_inner(None, description, expiry_secs, None) + } + + /// Returns a payable invoice that can be used to request a payment for the given payment hash + /// and the amount to be determined by the user, also known as a "zero-amount" invoice. + /// + /// We will register the given payment hash and emit a [`PaymentClaimable`] event once + /// the inbound payment arrives. + /// + /// **Note:** users *MUST* handle this event and claim the payment manually via + /// [`claim_for_hash`] as soon as they have obtained access to the preimage of the given + /// payment hash. If they're unable to obtain the preimage, they *MUST* immediately fail the payment via + /// [`fail_for_hash`]. + /// + /// [`PaymentClaimable`]: crate::Event::PaymentClaimable + /// [`claim_for_hash`]: Self::claim_for_hash + /// [`fail_for_hash`]: Self::fail_for_hash + pub fn receive_variable_amount_for_hash( + &self, description: &str, expiry_secs: u32, payment_hash: PaymentHash, + ) -> Result { + self.receive_inner(None, description, expiry_secs, Some(payment_hash)) + } + + fn receive_inner( + &self, amount_msat: Option, description: &str, expiry_secs: u32, + manual_claim_payment_hash: Option, + ) -> Result { + let currency = Currency::from(self.config.network); + let keys_manager = Arc::clone(&self.keys_manager); + let duration = SystemTime::now() + .duration_since(SystemTime::UNIX_EPOCH) + .expect("for the foreseeable future this shouldn't happen"); + + let invoice = { + let invoice_res = if let Some(payment_hash) = manual_claim_payment_hash { + lightning_invoice::utils::create_invoice_from_channelmanager_and_duration_since_epoch_with_payment_hash( + &self.channel_manager, + keys_manager, + Arc::clone(&self.logger), + currency, + amount_msat, + description.to_string(), + duration, + expiry_secs, + payment_hash, + None, + ) + } else { + lightning_invoice::utils::create_invoice_from_channelmanager_and_duration_since_epoch( + &self.channel_manager, + keys_manager, + Arc::clone(&self.logger), + currency, + amount_msat, + description.to_string(), + duration, + expiry_secs, + None, + ) + }; + + match invoice_res { + Ok(inv) => { + log_info!(self.logger, "Invoice created: {}", inv); + inv + }, + Err(e) => { + log_error!(self.logger, "Failed to create invoice: {}", e); + return Err(Error::InvoiceCreationFailed); + }, + } + }; + + let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); + let payment_secret = invoice.payment_secret(); + let id = PaymentId(payment_hash.0); + let preimage = if manual_claim_payment_hash.is_none() { + // If the user hasn't registered a custom payment hash, we're positive ChannelManager + // will know the preimage at this point. + let res = self + .channel_manager + .get_payment_preimage(payment_hash, payment_secret.clone()) + .ok(); + debug_assert!(res.is_some(), "We just let ChannelManager create an inbound payment, it can't have forgotten the preimage by now."); + res + } else { + None + }; + let kind = PaymentKind::Bolt11 { + hash: payment_hash, + preimage, + secret: Some(payment_secret.clone()), + }; + let payment = PaymentDetails::new( + id, + kind, + amount_msat, + PaymentDirection::Inbound, + PaymentStatus::Pending, + ); + self.payment_store.insert(payment)?; + + Ok(invoice) + } + + /// Returns a payable invoice that can be used to request a payment of the amount given and + /// receive it via a newly created just-in-time (JIT) channel. + /// + /// When the returned invoice is paid, the configured [LSPS2]-compliant LSP will open a channel + /// to us, supplying just-in-time inbound liquidity. + /// + /// If set, `max_total_lsp_fee_limit_msat` will limit how much fee we allow the LSP to take for opening the + /// channel to us. We'll use its cheapest offer otherwise. + /// + /// [LSPS2]: https://github.com/BitcoinAndLightningLayerSpecs/lsp/blob/main/LSPS2/README.md + pub fn receive_via_jit_channel( + &self, amount_msat: u64, description: &str, expiry_secs: u32, + max_total_lsp_fee_limit_msat: Option, + ) -> Result { + self.receive_via_jit_channel_inner( + Some(amount_msat), + description, + expiry_secs, + max_total_lsp_fee_limit_msat, + None, + ) + } + + /// Returns a payable invoice that can be used to request a variable amount payment (also known + /// as "zero-amount" invoice) and receive it via a newly created just-in-time (JIT) channel. + /// + /// When the returned invoice is paid, the configured [LSPS2]-compliant LSP will open a channel + /// to us, supplying just-in-time inbound liquidity. + /// + /// If set, `max_proportional_lsp_fee_limit_ppm_msat` will limit how much proportional fee, in + /// parts-per-million millisatoshis, we allow the LSP to take for opening the channel to us. + /// We'll use its cheapest offer otherwise. + /// + /// [LSPS2]: https://github.com/BitcoinAndLightningLayerSpecs/lsp/blob/main/LSPS2/README.md + pub fn receive_variable_amount_via_jit_channel( + &self, description: &str, expiry_secs: u32, + max_proportional_lsp_fee_limit_ppm_msat: Option, + ) -> Result { + self.receive_via_jit_channel_inner( + None, + description, + expiry_secs, + None, + max_proportional_lsp_fee_limit_ppm_msat, + ) + } + + fn receive_via_jit_channel_inner( + &self, amount_msat: Option, description: &str, expiry_secs: u32, + max_total_lsp_fee_limit_msat: Option, + max_proportional_lsp_fee_limit_ppm_msat: Option, + ) -> Result { + let liquidity_source = + self.liquidity_source.as_ref().ok_or(Error::LiquiditySourceUnavailable)?; + + let (node_id, address) = liquidity_source + .get_liquidity_source_details() + .ok_or(Error::LiquiditySourceUnavailable)?; + + let rt_lock = self.runtime.read().unwrap(); + let runtime = rt_lock.as_ref().unwrap(); + + let peer_info = PeerInfo { node_id, address }; + + let con_node_id = peer_info.node_id; + let con_addr = peer_info.address.clone(); + let con_cm = Arc::clone(&self.connection_manager); + + // We need to use our main runtime here as a local runtime might not be around to poll + // connection futures going forward. + tokio::task::block_in_place(move || { + runtime.block_on(async move { + con_cm.connect_peer_if_necessary(con_node_id, con_addr).await + }) + })?; + + log_info!(self.logger, "Connected to LSP {}@{}. ", peer_info.node_id, peer_info.address); + + let liquidity_source = Arc::clone(&liquidity_source); + let (invoice, lsp_total_opening_fee, lsp_prop_opening_fee) = + tokio::task::block_in_place(move || { + runtime.block_on(async move { + if let Some(amount_msat) = amount_msat { + liquidity_source + .lsps2_receive_to_jit_channel( + amount_msat, + description, + expiry_secs, + max_total_lsp_fee_limit_msat, + ) + .await + .map(|(invoice, total_fee)| (invoice, Some(total_fee), None)) + } else { + liquidity_source + .lsps2_receive_variable_amount_to_jit_channel( + description, + expiry_secs, + max_proportional_lsp_fee_limit_ppm_msat, + ) + .await + .map(|(invoice, prop_fee)| (invoice, None, Some(prop_fee))) + } + }) + })?; + + // Register payment in payment store. + let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); + let payment_secret = invoice.payment_secret(); + let lsp_fee_limits = LSPFeeLimits { + max_total_opening_fee_msat: lsp_total_opening_fee, + max_proportional_opening_fee_ppm_msat: lsp_prop_opening_fee, + }; + let id = PaymentId(payment_hash.0); + let preimage = + self.channel_manager.get_payment_preimage(payment_hash, payment_secret.clone()).ok(); + let kind = PaymentKind::Bolt11Jit { + hash: payment_hash, + preimage, + secret: Some(payment_secret.clone()), + lsp_fee_limits, + }; + let payment = PaymentDetails::new( + id, + kind, + amount_msat, + PaymentDirection::Inbound, + PaymentStatus::Pending, + ); + self.payment_store.insert(payment)?; + + // Persist LSP peer to make sure we reconnect on restart. + self.peer_store.add_peer(peer_info)?; + + Ok(invoice) + } + + /// Sends payment probes over all paths of a route that would be used to pay the given invoice. + /// + /// This may be used to send "pre-flight" probes, i.e., to train our scorer before conducting + /// the actual payment. Note this is only useful if there likely is sufficient time for the + /// probe to settle before sending out the actual payment, e.g., when waiting for user + /// confirmation in a wallet UI. + /// + /// Otherwise, there is a chance the probe could take up some liquidity needed to complete the + /// actual payment. Users should therefore be cautious and might avoid sending probes if + /// liquidity is scarce and/or they don't expect the probe to return before they send the + /// payment. To mitigate this issue, channels with available liquidity less than the required + /// amount times [`Config::probing_liquidity_limit_multiplier`] won't be used to send + /// pre-flight probes. + pub fn send_probes(&self, invoice: &Bolt11Invoice) -> Result<(), Error> { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let (_payment_hash, _recipient_onion, route_params) = payment::payment_parameters_from_invoice(&invoice).map_err(|_| { + log_error!(self.logger, "Failed to send probes due to the given invoice being \"zero-amount\". Please use send_probes_using_amount instead."); + Error::InvalidInvoice + })?; + + let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); + + self.channel_manager + .send_preflight_probes(route_params, liquidity_limit_multiplier) + .map_err(|e| { + log_error!(self.logger, "Failed to send payment probes: {:?}", e); + Error::ProbeSendingFailed + })?; + + Ok(()) + } + + /// Sends payment probes over all paths of a route that would be used to pay the given + /// zero-value invoice using the given amount. + /// + /// This can be used to send pre-flight probes for a so-called "zero-amount" invoice, i.e., an + /// invoice that leaves the amount paid to be determined by the user. + /// + /// See [`Self::send_probes`] for more information. + pub fn send_probes_using_amount( + &self, invoice: &Bolt11Invoice, amount_msat: u64, + ) -> Result<(), Error> { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let (_payment_hash, _recipient_onion, route_params) = if let Some(invoice_amount_msat) = + invoice.amount_milli_satoshis() + { + if amount_msat < invoice_amount_msat { + log_error!( + self.logger, + "Failed to send probes as the given amount needs to be at least the invoice amount: required {}msat, gave {}msat.", invoice_amount_msat, amount_msat); + return Err(Error::InvalidAmount); + } + + payment::payment_parameters_from_invoice(&invoice).map_err(|_| { + log_error!(self.logger, "Failed to send probes due to the given invoice unexpectedly being \"zero-amount\"."); + Error::InvalidInvoice + })? + } else { + payment::payment_parameters_from_zero_amount_invoice(&invoice, amount_msat).map_err(|_| { + log_error!(self.logger, "Failed to send probes due to the given invoice unexpectedly being not \"zero-amount\"."); + Error::InvalidInvoice + })? + }; + + let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); + + self.channel_manager + .send_preflight_probes(route_params, liquidity_limit_multiplier) + .map_err(|e| { + log_error!(self.logger, "Failed to send payment probes: {:?}", e); + Error::ProbeSendingFailed + })?; + + Ok(()) + } +} diff --git a/src/payment/bolt12.rs b/src/payment/bolt12.rs new file mode 100644 index 000000000..5fd1208cc --- /dev/null +++ b/src/payment/bolt12.rs @@ -0,0 +1,347 @@ +//! Holds a payment handler allowing to create and pay [BOLT 12] offers and refunds. +//! +//! [BOLT 12]: https://github.com/lightning/bolts/blob/master/12-offer-encoding.md + +use crate::config::LDK_PAYMENT_RETRY_TIMEOUT; +use crate::error::Error; +use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; +use crate::payment::store::{ + PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus, PaymentStore, +}; +use crate::types::ChannelManager; + +use lightning::ln::channelmanager::{PaymentId, Retry}; +use lightning::offers::invoice::Bolt12Invoice; +use lightning::offers::offer::{Amount, Offer}; +use lightning::offers::parse::Bolt12SemanticError; +use lightning::offers::refund::Refund; + +use rand::RngCore; + +use std::sync::{Arc, RwLock}; +use std::time::{Duration, SystemTime, UNIX_EPOCH}; + +/// A payment handler allowing to create and pay [BOLT 12] offers and refunds. +/// +/// Should be retrieved by calling [`Node::bolt12_payment`]. +/// +/// [BOLT 12]: https://github.com/lightning/bolts/blob/master/12-offer-encoding.md +/// [`Node::bolt12_payment`]: crate::Node::bolt12_payment +pub struct Bolt12Payment { + runtime: Arc>>, + channel_manager: Arc, + payment_store: Arc>>, + logger: Arc, +} + +impl Bolt12Payment { + pub(crate) fn new( + runtime: Arc>>, + channel_manager: Arc, + payment_store: Arc>>, logger: Arc, + ) -> Self { + Self { runtime, channel_manager, payment_store, logger } + } + + /// Send a payment given an offer. + /// + /// If `payer_note` is `Some` it will be seen by the recipient and reflected back in the invoice + /// response. + pub fn send(&self, offer: &Offer, payer_note: Option) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let quantity = None; + let mut random_bytes = [0u8; 32]; + rand::thread_rng().fill_bytes(&mut random_bytes); + let payment_id = PaymentId(random_bytes); + let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); + let max_total_routing_fee_msat = None; + + let offer_amount_msat = match offer.amount() { + Some(Amount::Bitcoin { amount_msats }) => amount_msats, + Some(_) => { + log_error!(self.logger, "Failed to send payment as the provided offer was denominated in an unsupported currency."); + return Err(Error::UnsupportedCurrency); + }, + None => { + log_error!(self.logger, "Failed to send payment due to the given offer being \"zero-amount\". Please use send_using_amount instead."); + return Err(Error::InvalidOffer); + }, + }; + + match self.channel_manager.pay_for_offer( + &offer, + quantity, + None, + payer_note, + payment_id, + retry_strategy, + max_total_routing_fee_msat, + ) { + Ok(()) => { + let payee_pubkey = offer.signing_pubkey(); + log_info!( + self.logger, + "Initiated sending {}msat to {:?}", + offer_amount_msat, + payee_pubkey + ); + + let kind = PaymentKind::Bolt12Offer { + hash: None, + preimage: None, + secret: None, + offer_id: offer.id(), + }; + let payment = PaymentDetails::new( + payment_id, + kind, + Some(*offer_amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Pending, + ); + self.payment_store.insert(payment)?; + + Ok(payment_id) + }, + Err(e) => { + log_error!(self.logger, "Failed to send invoice request: {:?}", e); + match e { + Bolt12SemanticError::DuplicatePaymentId => Err(Error::DuplicatePayment), + _ => { + let kind = PaymentKind::Bolt12Offer { + hash: None, + preimage: None, + secret: None, + offer_id: offer.id(), + }; + let payment = PaymentDetails::new( + payment_id, + kind, + Some(*offer_amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Failed, + ); + self.payment_store.insert(payment)?; + Err(Error::InvoiceRequestCreationFailed) + }, + } + }, + } + } + + /// Send a payment given an offer and an amount in millisatoshi. + /// + /// This will fail if the amount given is less than the value required by the given offer. + /// + /// This can be used to pay a so-called "zero-amount" offers, i.e., an offer that leaves the + /// amount paid to be determined by the user. + /// + /// If `payer_note` is `Some` it will be seen by the recipient and reflected back in the invoice + /// response. + pub fn send_using_amount( + &self, offer: &Offer, payer_note: Option, amount_msat: u64, + ) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let quantity = None; + let mut random_bytes = [0u8; 32]; + rand::thread_rng().fill_bytes(&mut random_bytes); + let payment_id = PaymentId(random_bytes); + let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); + let max_total_routing_fee_msat = None; + + let offer_amount_msat = match offer.amount() { + Some(Amount::Bitcoin { amount_msats }) => *amount_msats, + Some(_) => { + log_error!(self.logger, "Failed to send payment as the provided offer was denominated in an unsupported currency."); + return Err(Error::UnsupportedCurrency); + }, + None => amount_msat, + }; + + if amount_msat < offer_amount_msat { + log_error!( + self.logger, + "Failed to pay as the given amount needs to be at least the offer amount: required {}msat, gave {}msat.", offer_amount_msat, amount_msat); + return Err(Error::InvalidAmount); + } + + match self.channel_manager.pay_for_offer( + &offer, + quantity, + Some(amount_msat), + payer_note, + payment_id, + retry_strategy, + max_total_routing_fee_msat, + ) { + Ok(()) => { + let payee_pubkey = offer.signing_pubkey(); + log_info!( + self.logger, + "Initiated sending {}msat to {:?}", + amount_msat, + payee_pubkey + ); + + let kind = PaymentKind::Bolt12Offer { + hash: None, + preimage: None, + secret: None, + offer_id: offer.id(), + }; + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Pending, + ); + self.payment_store.insert(payment)?; + + Ok(payment_id) + }, + Err(e) => { + log_error!(self.logger, "Failed to send payment: {:?}", e); + match e { + Bolt12SemanticError::DuplicatePaymentId => Err(Error::DuplicatePayment), + _ => { + let kind = PaymentKind::Bolt12Offer { + hash: None, + preimage: None, + secret: None, + offer_id: offer.id(), + }; + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Failed, + ); + self.payment_store.insert(payment)?; + Err(Error::PaymentSendingFailed) + }, + } + }, + } + } + + /// Returns a payable offer that can be used to request and receive a payment of the amount + /// given. + pub fn receive(&self, amount_msat: u64, description: &str) -> Result { + let offer_builder = self.channel_manager.create_offer_builder().map_err(|e| { + log_error!(self.logger, "Failed to create offer builder: {:?}", e); + Error::OfferCreationFailed + })?; + let offer = offer_builder + .amount_msats(amount_msat) + .description(description.to_string()) + .build() + .map_err(|e| { + log_error!(self.logger, "Failed to create offer: {:?}", e); + Error::OfferCreationFailed + })?; + + Ok(offer) + } + + /// Returns a payable offer that can be used to request and receive a payment for which the + /// amount is to be determined by the user, also known as a "zero-amount" offer. + pub fn receive_variable_amount(&self, description: &str) -> Result { + let offer_builder = self.channel_manager.create_offer_builder().map_err(|e| { + log_error!(self.logger, "Failed to create offer builder: {:?}", e); + Error::OfferCreationFailed + })?; + let offer = offer_builder.description(description.to_string()).build().map_err(|e| { + log_error!(self.logger, "Failed to create offer: {:?}", e); + Error::OfferCreationFailed + })?; + + Ok(offer) + } + + /// Requests a refund payment for the given [`Refund`]. + /// + /// The returned [`Bolt12Invoice`] is for informational purposes only (i.e., isn't needed to + /// retrieve the refund). + pub fn request_refund_payment(&self, refund: &Refund) -> Result { + let invoice = self.channel_manager.request_refund_payment(refund).map_err(|e| { + log_error!(self.logger, "Failed to request refund payment: {:?}", e); + Error::InvoiceRequestCreationFailed + })?; + + let payment_hash = invoice.payment_hash(); + let payment_id = PaymentId(payment_hash.0); + + let kind = + PaymentKind::Bolt12Refund { hash: Some(payment_hash), preimage: None, secret: None }; + + let payment = PaymentDetails::new( + payment_id, + kind, + Some(refund.amount_msats()), + PaymentDirection::Inbound, + PaymentStatus::Pending, + ); + + self.payment_store.insert(payment)?; + + Ok(invoice) + } + + /// Returns a [`Refund`] object that can be used to offer a refund payment of the amount given. + pub fn initiate_refund(&self, amount_msat: u64, expiry_secs: u32) -> Result { + let mut random_bytes = [0u8; 32]; + rand::thread_rng().fill_bytes(&mut random_bytes); + let payment_id = PaymentId(random_bytes); + + let expiration = (SystemTime::now() + Duration::from_secs(expiry_secs as u64)) + .duration_since(UNIX_EPOCH) + .unwrap(); + let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); + let max_total_routing_fee_msat = None; + + let refund = self + .channel_manager + .create_refund_builder( + amount_msat, + expiration, + payment_id, + retry_strategy, + max_total_routing_fee_msat, + ) + .map_err(|e| { + log_error!(self.logger, "Failed to create refund builder: {:?}", e); + Error::RefundCreationFailed + })? + .build() + .map_err(|e| { + log_error!(self.logger, "Failed to create refund: {:?}", e); + Error::RefundCreationFailed + })?; + + log_info!(self.logger, "Offering refund of {}msat", amount_msat); + + let kind = PaymentKind::Bolt12Refund { hash: None, preimage: None, secret: None }; + + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Pending, + ); + + self.payment_store.insert(payment)?; + + Ok(refund) + } +} diff --git a/src/payment/mod.rs b/src/payment/mod.rs new file mode 100644 index 000000000..1862bf2df --- /dev/null +++ b/src/payment/mod.rs @@ -0,0 +1,13 @@ +//! Objects for different types of payments. + +mod bolt11; +mod bolt12; +mod onchain; +mod spontaneous; +pub(crate) mod store; + +pub use bolt11::Bolt11Payment; +pub use bolt12::Bolt12Payment; +pub use onchain::OnchainPayment; +pub use spontaneous::SpontaneousPayment; +pub use store::{LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus}; diff --git a/src/payment/onchain.rs b/src/payment/onchain.rs new file mode 100644 index 000000000..8a879ae8c --- /dev/null +++ b/src/payment/onchain.rs @@ -0,0 +1,85 @@ +//! Holds a payment handler allowing to send and receive on-chain payments. + +use crate::config::Config; +use crate::error::Error; +use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; +use crate::types::{ChannelManager, Wallet}; + +use bitcoin::{Address, Txid}; + +use std::sync::{Arc, RwLock}; + +/// A payment handler allowing to send and receive on-chain payments. +/// +/// Should be retrieved by calling [`Node::onchain_payment`]. +/// +/// [`Node::onchain_payment`]: crate::Node::onchain_payment +pub struct OnchainPayment { + runtime: Arc>>, + wallet: Arc, + channel_manager: Arc, + config: Arc, + logger: Arc, +} + +impl OnchainPayment { + pub(crate) fn new( + runtime: Arc>>, wallet: Arc, + channel_manager: Arc, config: Arc, logger: Arc, + ) -> Self { + Self { runtime, wallet, channel_manager, config, logger } + } + + /// Retrieve a new on-chain/funding address. + pub fn new_address(&self) -> Result { + let funding_address = self.wallet.get_new_address()?; + log_info!(self.logger, "Generated new funding address: {}", funding_address); + Ok(funding_address) + } + + /// Send an on-chain payment to the given address. + /// + /// This will respect any on-chain reserve we need to keep, i.e., won't allow to cut into + /// [`BalanceDetails::total_anchor_channels_reserve_sats`]. + /// + /// [`BalanceDetails::total_anchor_channels_reserve_sats`]: crate::BalanceDetails::total_anchor_channels_reserve_sats + pub fn send_to_address( + &self, address: &bitcoin::Address, amount_sats: u64, + ) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let cur_anchor_reserve_sats = + crate::total_anchor_channels_reserve_sats(&self.channel_manager, &self.config); + let spendable_amount_sats = + self.wallet.get_spendable_amount_sats(cur_anchor_reserve_sats).unwrap_or(0); + + if spendable_amount_sats < amount_sats { + log_error!(self.logger, + "Unable to send payment due to insufficient funds. Available: {}sats, Required: {}sats", + spendable_amount_sats, amount_sats + ); + return Err(Error::InsufficientFunds); + } + self.wallet.send_to_address(address, Some(amount_sats)) + } + + /// Send an on-chain payment to the given address, draining all the available funds. + /// + /// This is useful if you have closed all channels and want to migrate funds to another + /// on-chain wallet. + /// + /// Please note that this will **not** retain any on-chain reserves, which might be potentially + /// dangerous if you have open Anchor channels for which you can't trust the counterparty to + /// spend the Anchor output after channel closure. + pub fn send_all_to_address(&self, address: &bitcoin::Address) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + self.wallet.send_to_address(address, None) + } +} diff --git a/src/payment/spontaneous.rs b/src/payment/spontaneous.rs new file mode 100644 index 000000000..4c904c80d --- /dev/null +++ b/src/payment/spontaneous.rs @@ -0,0 +1,161 @@ +//! Holds a payment handler allowing to send spontaneous ("keysend") payments. + +use crate::config::{Config, LDK_PAYMENT_RETRY_TIMEOUT}; +use crate::error::Error; +use crate::logger::{log_error, log_info, FilesystemLogger, Logger}; +use crate::payment::store::{ + PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus, PaymentStore, +}; +use crate::types::{ChannelManager, KeysManager, TlvEntry}; + +use lightning::ln::channelmanager::{PaymentId, RecipientOnionFields, Retry, RetryableSendFailure}; +use lightning::ln::{PaymentHash, PaymentPreimage}; +use lightning::routing::router::{PaymentParameters, RouteParameters}; +use lightning::sign::EntropySource; + +use bitcoin::secp256k1::PublicKey; + +use std::sync::{Arc, RwLock}; + +/// A payment handler allowing to send spontaneous ("keysend") payments. +/// +/// Should be retrieved by calling [`Node::spontaneous_payment`]. +/// +/// [`Node::spontaneous_payment`]: crate::Node::spontaneous_payment +pub struct SpontaneousPayment { + runtime: Arc>>, + channel_manager: Arc, + keys_manager: Arc, + payment_store: Arc>>, + config: Arc, + logger: Arc, +} + +impl SpontaneousPayment { + pub(crate) fn new( + runtime: Arc>>, + channel_manager: Arc, keys_manager: Arc, + payment_store: Arc>>, config: Arc, + logger: Arc, + ) -> Self { + Self { runtime, channel_manager, keys_manager, payment_store, config, logger } + } + + /// Send a spontaneous, aka. "keysend", payment + pub fn send( + &self, amount_msat: u64, node_id: PublicKey, custom_tlvs: Vec, + ) -> Result { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let payment_preimage = PaymentPreimage(self.keys_manager.get_secure_random_bytes()); + let payment_hash = PaymentHash::from(payment_preimage); + let payment_id = PaymentId(payment_hash.0); + + if let Some(payment) = self.payment_store.get(&payment_id) { + if payment.status == PaymentStatus::Pending + || payment.status == PaymentStatus::Succeeded + { + log_error!(self.logger, "Payment error: must not send duplicate payments."); + return Err(Error::DuplicatePayment); + } + } + + let route_params = RouteParameters::from_payment_params_and_value( + PaymentParameters::from_node_id(node_id, self.config.default_cltv_expiry_delta), + amount_msat, + ); + let recipient_fields = RecipientOnionFields::spontaneous_empty() + .with_custom_tlvs( + custom_tlvs.iter().map(|tlv| (tlv.r#type, tlv.value.clone())).collect(), + ) + .map_err(|_| { + log_error!(self.logger, "Payment error: invalid custom TLVs."); + Error::InvalidCustomTlv + })?; + + match self.channel_manager.send_spontaneous_payment_with_retry( + Some(payment_preimage), + recipient_fields, + PaymentId(payment_hash.0), + route_params, + Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT), + ) { + Ok(_hash) => { + log_info!(self.logger, "Initiated sending {}msat to {}.", amount_msat, node_id); + + let kind = PaymentKind::Spontaneous { + hash: payment_hash, + preimage: Some(payment_preimage), + custom_tlvs, + }; + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Pending, + ); + self.payment_store.insert(payment)?; + + Ok(payment_id) + }, + Err(e) => { + log_error!(self.logger, "Failed to send payment: {:?}", e); + + match e { + RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), + _ => { + let kind = PaymentKind::Spontaneous { + hash: payment_hash, + preimage: Some(payment_preimage), + custom_tlvs, + }; + let payment = PaymentDetails::new( + payment_id, + kind, + Some(amount_msat), + PaymentDirection::Outbound, + PaymentStatus::Failed, + ); + + self.payment_store.insert(payment)?; + Err(Error::PaymentSendingFailed) + }, + } + }, + } + } + + /// Sends payment probes over all paths of a route that would be used to pay the given + /// amount to the given `node_id`. + /// + /// See [`Bolt11Payment::send_probes`] for more information. + /// + /// [`Bolt11Payment::send_probes`]: crate::payment::Bolt11Payment + pub fn send_probes(&self, amount_msat: u64, node_id: PublicKey) -> Result<(), Error> { + let rt_lock = self.runtime.read().unwrap(); + if rt_lock.is_none() { + return Err(Error::NotRunning); + } + + let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); + let cltv_expiry_delta = self.config.default_cltv_expiry_delta; + + self.channel_manager + .send_spontaneous_preflight_probes( + node_id, + amount_msat, + cltv_expiry_delta, + liquidity_limit_multiplier, + ) + .map_err(|e| { + log_error!(self.logger, "Failed to send payment probes: {:?}", e); + Error::ProbeSendingFailed + })?; + + Ok(()) + } +} diff --git a/src/payment/store.rs b/src/payment/store.rs new file mode 100644 index 000000000..983c90f20 --- /dev/null +++ b/src/payment/store.rs @@ -0,0 +1,675 @@ +use crate::hex_utils; +use crate::io::{ + PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, +}; +use crate::logger::{log_error, Logger}; +use crate::types::{DynStore, TlvEntry}; +use crate::Error; + +use lightning::ln::channelmanager::PaymentId; +use lightning::ln::msgs::DecodeError; +use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret}; +use lightning::offers::offer::OfferId; +use lightning::util::ser::{Readable, Writeable}; +use lightning::{ + _init_and_read_len_prefixed_tlv_fields, impl_writeable_tlv_based, + impl_writeable_tlv_based_enum, write_tlv_fields, +}; + +use std::collections::HashMap; +use std::iter::FromIterator; +use std::ops::Deref; +use std::sync::{Arc, Mutex}; +use std::time::{Duration, SystemTime, UNIX_EPOCH}; + +/// Represents a payment. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct PaymentDetails { + /// The identifier of this payment. + pub id: PaymentId, + /// The kind of the payment. + pub kind: PaymentKind, + /// The amount transferred. + pub amount_msat: Option, + /// The direction of the payment. + pub direction: PaymentDirection, + /// The status of the payment. + pub status: PaymentStatus, + /// The timestamp, in seconds since start of the UNIX epoch, when this entry was last updated. + pub latest_update_timestamp: u64, +} + +impl PaymentDetails { + pub(crate) fn new( + id: PaymentId, kind: PaymentKind, amount_msat: Option, direction: PaymentDirection, + status: PaymentStatus, + ) -> Self { + let latest_update_timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or(Duration::from_secs(0)) + .as_secs(); + Self { id, kind, amount_msat, direction, status, latest_update_timestamp } + } +} + +impl Writeable for PaymentDetails { + fn write( + &self, writer: &mut W, + ) -> Result<(), lightning::io::Error> { + write_tlv_fields!(writer, { + (0, self.id, required), // Used to be `hash` for v0.2.1 and prior + // 1 briefly used to be lsp_fee_limits, could probably be reused at some point in the future. + // 2 used to be `preimage` before it was moved to `kind` in v0.3.0 + (2, None::>, required), + (3, self.kind, required), + // 4 used to be `secret` before it was moved to `kind` in v0.3.0 + (4, None::>, required), + (5, self.latest_update_timestamp, required), + (6, self.amount_msat, required), + (8, self.direction, required), + (10, self.status, required) + }); + Ok(()) + } +} + +impl Readable for PaymentDetails { + fn read(reader: &mut R) -> Result { + let unix_time_secs = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or(Duration::from_secs(0)) + .as_secs(); + _init_and_read_len_prefixed_tlv_fields!(reader, { + (0, id, required), // Used to be `hash` + (1, lsp_fee_limits, option), + (2, preimage, required), + (3, kind_opt, option), + (4, secret, required), + (5, latest_update_timestamp, (default_value, unix_time_secs)), + (6, amount_msat, required), + (8, direction, required), + (10, status, required) + }); + + let id: PaymentId = id.0.ok_or(DecodeError::InvalidValue)?; + let preimage: Option = preimage.0.ok_or(DecodeError::InvalidValue)?; + let secret: Option = secret.0.ok_or(DecodeError::InvalidValue)?; + let latest_update_timestamp: u64 = + latest_update_timestamp.0.ok_or(DecodeError::InvalidValue)?; + let amount_msat: Option = amount_msat.0.ok_or(DecodeError::InvalidValue)?; + let direction: PaymentDirection = direction.0.ok_or(DecodeError::InvalidValue)?; + let status: PaymentStatus = status.0.ok_or(DecodeError::InvalidValue)?; + + let kind = if let Some(kind) = kind_opt { + // If we serialized the payment kind, use it. + // This will always be the case for any version after v0.2.1. + kind + } else { + // Otherwise we persisted with v0.2.1 or before, and puzzle together the kind from the + // provided fields. + + // We used to track everything by hash, but switched to track everything by id + // post-v0.2.1. As both are serialized identically, we just switched the `0`-type field above + // from `PaymentHash` to `PaymentId` and serialize a separate `PaymentHash` in + // `PaymentKind` when needed. Here, for backwards compat, we can just re-create the + // `PaymentHash` from the id, as 'back then' `payment_hash == payment_id` was always + // true. + let hash = PaymentHash(id.0); + + if secret.is_some() { + if let Some(lsp_fee_limits) = lsp_fee_limits { + PaymentKind::Bolt11Jit { hash, preimage, secret, lsp_fee_limits } + } else { + PaymentKind::Bolt11 { hash, preimage, secret } + } + } else { + PaymentKind::Spontaneous { hash, preimage, custom_tlvs: Vec::new() } + } + }; + + Ok(PaymentDetails { id, kind, amount_msat, direction, status, latest_update_timestamp }) + } +} + +/// Represents the direction of a payment. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum PaymentDirection { + /// The payment is inbound. + Inbound, + /// The payment is outbound. + Outbound, +} + +impl_writeable_tlv_based_enum!(PaymentDirection, + (0, Inbound) => {}, + (1, Outbound) => {}; +); + +/// Represents the current status of a payment. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum PaymentStatus { + /// The payment is still pending. + Pending, + /// The payment succeeded. + Succeeded, + /// The payment failed. + Failed, +} + +impl_writeable_tlv_based_enum!(PaymentStatus, + (0, Pending) => {}, + (2, Succeeded) => {}, + (4, Failed) => {}; +); + +/// Represents the kind of a payment. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum PaymentKind { + /// An on-chain payment. + Onchain, + /// A [BOLT 11] payment. + /// + /// [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md + Bolt11 { + /// The payment hash, i.e., the hash of the `preimage`. + hash: PaymentHash, + /// The pre-image used by the payment. + preimage: Option, + /// The secret used by the payment. + secret: Option, + }, + /// A [BOLT 11] payment intended to open an [LSPS 2] just-in-time channel. + /// + /// [BOLT 11]: https://github.com/lightning/bolts/blob/master/11-payment-encoding.md + /// [LSPS 2]: https://github.com/BitcoinAndLightningLayerSpecs/lsp/blob/main/LSPS2/README.md + Bolt11Jit { + /// The payment hash, i.e., the hash of the `preimage`. + hash: PaymentHash, + /// The pre-image used by the payment. + preimage: Option, + /// The secret used by the payment. + secret: Option, + /// Limits applying to how much fee we allow an LSP to deduct from the payment amount. + /// + /// Allowing them to deduct this fee from the first inbound payment will pay for the LSP's + /// channel opening fees. + /// + /// See [`LdkChannelConfig::accept_underpaying_htlcs`] for more information. + /// + /// [`LdkChannelConfig::accept_underpaying_htlcs`]: lightning::util::config::ChannelConfig::accept_underpaying_htlcs + lsp_fee_limits: LSPFeeLimits, + }, + /// A [BOLT 12] 'offer' payment, i.e., a payment for an [`Offer`]. + /// + /// [BOLT 12]: https://github.com/lightning/bolts/blob/master/12-offer-encoding.md + /// [`Offer`]: crate::lightning::offers::offer::Offer + Bolt12Offer { + /// The payment hash, i.e., the hash of the `preimage`. + hash: Option, + /// The pre-image used by the payment. + preimage: Option, + /// The secret used by the payment. + secret: Option, + /// The ID of the offer this payment is for. + offer_id: OfferId, + }, + /// A [BOLT 12] 'refund' payment, i.e., a payment for a [`Refund`]. + /// + /// [BOLT 12]: https://github.com/lightning/bolts/blob/master/12-offer-encoding.md + /// [`Refund`]: lightning::offers::refund::Refund + Bolt12Refund { + /// The payment hash, i.e., the hash of the `preimage`. + hash: Option, + /// The pre-image used by the payment. + preimage: Option, + /// The secret used by the payment. + secret: Option, + }, + /// A spontaneous ("keysend") payment. + Spontaneous { + /// The payment hash, i.e., the hash of the `preimage`. + hash: PaymentHash, + /// The pre-image used by the payment. + preimage: Option, + /// Custom TLVs. + custom_tlvs: Vec, + }, +} + +impl_writeable_tlv_based_enum!(PaymentKind, + (0, Onchain) => {}, + (2, Bolt11) => { + (0, hash, required), + (2, preimage, option), + (4, secret, option), + }, + (4, Bolt11Jit) => { + (0, hash, required), + (2, preimage, option), + (4, secret, option), + (6, lsp_fee_limits, required), + }, + (6, Bolt12Offer) => { + (0, hash, option), + (2, preimage, option), + (4, secret, option), + (6, offer_id, required), + }, + (8, Spontaneous) => { + (0, hash, required), + (2, preimage, option), + (131072, custom_tlvs, optional_vec), + }, + (10, Bolt12Refund) => { + (0, hash, option), + (2, preimage, option), + (4, secret, option), + }; +); + +/// Limits applying to how much fee we allow an LSP to deduct from the payment amount. +/// +/// See [`LdkChannelConfig::accept_underpaying_htlcs`] for more information. +/// +/// [`LdkChannelConfig::accept_underpaying_htlcs`]: lightning::util::config::ChannelConfig::accept_underpaying_htlcs +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct LSPFeeLimits { + /// The maximal total amount we allow any configured LSP withhold from us when forwarding the + /// payment. + pub max_total_opening_fee_msat: Option, + /// The maximal proportional fee, in parts-per-million millisatoshi, we allow any configured + /// LSP withhold from us when forwarding the payment. + pub max_proportional_opening_fee_ppm_msat: Option, +} + +impl_writeable_tlv_based!(LSPFeeLimits, { + (0, max_total_opening_fee_msat, option), + (2, max_proportional_opening_fee_ppm_msat, option), +}); + +#[derive(Clone, Debug, PartialEq, Eq)] +pub(crate) struct PaymentDetailsUpdate { + pub id: PaymentId, + pub hash: Option>, + pub preimage: Option>, + pub secret: Option>, + pub amount_msat: Option>, + pub direction: Option, + pub status: Option, +} + +impl PaymentDetailsUpdate { + pub fn new(id: PaymentId) -> Self { + Self { + id, + hash: None, + preimage: None, + secret: None, + amount_msat: None, + direction: None, + status: None, + } + } +} + +pub(crate) struct PaymentStore +where + L::Target: Logger, +{ + payments: Mutex>, + kv_store: Arc, + logger: L, +} + +impl PaymentStore +where + L::Target: Logger, +{ + pub(crate) fn new(payments: Vec, kv_store: Arc, logger: L) -> Self { + let payments = Mutex::new(HashMap::from_iter( + payments.into_iter().map(|payment| (payment.id, payment)), + )); + Self { payments, kv_store, logger } + } + + pub(crate) fn insert(&self, payment: PaymentDetails) -> Result { + let mut locked_payments = self.payments.lock().unwrap(); + + let updated = locked_payments.insert(payment.id, payment.clone()).is_some(); + self.persist_info(&payment.id, &payment)?; + Ok(updated) + } + + pub(crate) fn remove(&self, id: &PaymentId) -> Result<(), Error> { + let store_key = hex_utils::to_string(&id.0); + self.kv_store + .remove( + PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + &store_key, + false, + ) + .map_err(|e| { + log_error!( + self.logger, + "Removing payment data for key {}/{}/{} failed due to: {}", + PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + store_key, + e + ); + Error::PersistenceFailed + }) + } + + pub(crate) fn get(&self, id: &PaymentId) -> Option { + self.payments.lock().unwrap().get(id).cloned() + } + + pub(crate) fn update(&self, update: &PaymentDetailsUpdate) -> Result { + let mut updated = false; + let mut locked_payments = self.payments.lock().unwrap(); + + if let Some(payment) = locked_payments.get_mut(&update.id) { + if let Some(hash_opt) = update.hash { + match payment.kind { + PaymentKind::Bolt12Offer { ref mut hash, .. } => { + debug_assert_eq!(payment.direction, PaymentDirection::Outbound, + "We should only ever override payment hash for outbound BOLT 12 payments"); + *hash = hash_opt + }, + PaymentKind::Bolt12Refund { ref mut hash, .. } => { + debug_assert_eq!(payment.direction, PaymentDirection::Outbound, + "We should only ever override payment hash for outbound BOLT 12 payments"); + *hash = hash_opt + }, + _ => { + // We can omit updating the hash for BOLT11 payments as the payment hash + // will always be known from the beginning. + }, + } + } + if let Some(preimage_opt) = update.preimage { + match payment.kind { + PaymentKind::Bolt11 { ref mut preimage, .. } => *preimage = preimage_opt, + PaymentKind::Bolt11Jit { ref mut preimage, .. } => *preimage = preimage_opt, + PaymentKind::Bolt12Offer { ref mut preimage, .. } => *preimage = preimage_opt, + PaymentKind::Bolt12Refund { ref mut preimage, .. } => *preimage = preimage_opt, + PaymentKind::Spontaneous { ref mut preimage, .. } => *preimage = preimage_opt, + _ => {}, + } + } + + if let Some(secret_opt) = update.secret { + match payment.kind { + PaymentKind::Bolt11 { ref mut secret, .. } => *secret = secret_opt, + PaymentKind::Bolt11Jit { ref mut secret, .. } => *secret = secret_opt, + PaymentKind::Bolt12Offer { ref mut secret, .. } => *secret = secret_opt, + PaymentKind::Bolt12Refund { ref mut secret, .. } => *secret = secret_opt, + _ => {}, + } + } + + if let Some(amount_opt) = update.amount_msat { + payment.amount_msat = amount_opt; + } + + if let Some(status) = update.status { + payment.status = status; + } + + payment.latest_update_timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or(Duration::from_secs(0)) + .as_secs(); + + self.persist_info(&update.id, payment)?; + updated = true; + } + Ok(updated) + } + + pub(crate) fn list_filter bool>( + &self, f: F, + ) -> Vec { + self.payments + .lock() + .unwrap() + .iter() + .map(|(_, p)| p) + .filter(f) + .cloned() + .collect::>() + } + + fn persist_info(&self, id: &PaymentId, payment: &PaymentDetails) -> Result<(), Error> { + let store_key = hex_utils::to_string(&id.0); + let data = payment.encode(); + self.kv_store + .write( + PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + &store_key, + &data, + ) + .map_err(|e| { + log_error!( + self.logger, + "Write for key {}/{}/{} failed due to: {}", + PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + store_key, + e + ); + Error::PersistenceFailed + })?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use lightning::util::{ + ser::Readable, + test_utils::{TestLogger, TestStore}, + }; + use std::io::Cursor; + use std::sync::Arc; + + /// We refactored `PaymentDetails` to hold a payment id and moved some required fields into + /// `PaymentKind`. Here, we keep the old layout available in order test de/ser compatibility. + #[derive(Clone, Debug, PartialEq, Eq)] + struct OldPaymentDetails { + pub hash: PaymentHash, + pub preimage: Option, + pub secret: Option, + pub amount_msat: Option, + pub direction: PaymentDirection, + pub status: PaymentStatus, + pub lsp_fee_limits: Option, + } + + impl_writeable_tlv_based!(OldPaymentDetails, { + (0, hash, required), + (1, lsp_fee_limits, option), + (2, preimage, required), + (4, secret, required), + (6, amount_msat, required), + (8, direction, required), + (10, status, required) + }); + + #[test] + fn payment_info_is_persisted() { + let store: Arc = Arc::new(TestStore::new(false)); + let logger = Arc::new(TestLogger::new()); + let payment_store = PaymentStore::new(Vec::new(), Arc::clone(&store), logger); + + let hash = PaymentHash([42u8; 32]); + let id = PaymentId([42u8; 32]); + assert!(payment_store.get(&id).is_none()); + + let store_key = hex_utils::to_string(&hash.0); + assert!(store + .read( + PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + &store_key + ) + .is_err()); + + let kind = PaymentKind::Bolt11 { hash, preimage: None, secret: None }; + let payment = + PaymentDetails::new(id, kind, None, PaymentDirection::Inbound, PaymentStatus::Pending); + + assert_eq!(Ok(false), payment_store.insert(payment.clone())); + assert!(payment_store.get(&id).is_some()); + assert!(store + .read( + PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + &store_key + ) + .is_ok()); + + assert_eq!(Ok(true), payment_store.insert(payment)); + assert!(payment_store.get(&id).is_some()); + + let mut update = PaymentDetailsUpdate::new(id); + update.status = Some(PaymentStatus::Succeeded); + assert_eq!(Ok(true), payment_store.update(&update)); + assert!(payment_store.get(&id).is_some()); + + assert_eq!(PaymentStatus::Succeeded, payment_store.get(&id).unwrap().status); + } + + #[test] + fn old_payment_details_deser_compat() { + // We refactored `PaymentDetails` to hold a payment id and moved some required fields into + // `PaymentKind`. Here, we test compatibility with the old layout. + let hash = PaymentHash([42u8; 32]); + let preimage = Some(PaymentPreimage([43u8; 32])); + let secret = Some(PaymentSecret([44u8; 32])); + let amount_msat = Some(45_000_000); + + // Test `Bolt11` de/ser + { + let old_bolt11_payment = OldPaymentDetails { + hash, + preimage, + secret, + amount_msat, + direction: PaymentDirection::Inbound, + status: PaymentStatus::Pending, + lsp_fee_limits: None, + }; + + let old_bolt11_encoded = old_bolt11_payment.encode(); + assert_eq!( + old_bolt11_payment, + OldPaymentDetails::read(&mut Cursor::new(old_bolt11_encoded.clone())).unwrap() + ); + + let bolt11_decoded = + PaymentDetails::read(&mut Cursor::new(old_bolt11_encoded)).unwrap(); + let bolt11_reencoded = bolt11_decoded.encode(); + assert_eq!( + bolt11_decoded, + PaymentDetails::read(&mut Cursor::new(bolt11_reencoded)).unwrap() + ); + + match bolt11_decoded.kind { + PaymentKind::Bolt11 { hash: h, preimage: p, secret: s } => { + assert_eq!(hash, h); + assert_eq!(preimage, p); + assert_eq!(secret, s); + }, + _ => { + panic!("Unexpected kind!"); + }, + } + } + + // Test `Bolt11Jit` de/ser + { + let lsp_fee_limits = Some(LSPFeeLimits { + max_total_opening_fee_msat: Some(46_000), + max_proportional_opening_fee_ppm_msat: Some(47_000), + }); + + let old_bolt11_jit_payment = OldPaymentDetails { + hash, + preimage, + secret, + amount_msat, + direction: PaymentDirection::Inbound, + status: PaymentStatus::Pending, + lsp_fee_limits, + }; + + let old_bolt11_jit_encoded = old_bolt11_jit_payment.encode(); + assert_eq!( + old_bolt11_jit_payment, + OldPaymentDetails::read(&mut Cursor::new(old_bolt11_jit_encoded.clone())).unwrap() + ); + + let bolt11_jit_decoded = + PaymentDetails::read(&mut Cursor::new(old_bolt11_jit_encoded)).unwrap(); + let bolt11_jit_reencoded = bolt11_jit_decoded.encode(); + assert_eq!( + bolt11_jit_decoded, + PaymentDetails::read(&mut Cursor::new(bolt11_jit_reencoded)).unwrap() + ); + + match bolt11_jit_decoded.kind { + PaymentKind::Bolt11Jit { hash: h, preimage: p, secret: s, lsp_fee_limits: l } => { + assert_eq!(hash, h); + assert_eq!(preimage, p); + assert_eq!(secret, s); + assert_eq!(lsp_fee_limits, Some(l)); + }, + _ => { + panic!("Unexpected kind!"); + }, + } + } + + // Test `Spontaneous` de/ser + { + let old_spontaneous_payment = OldPaymentDetails { + hash, + preimage, + secret: None, + amount_msat, + direction: PaymentDirection::Inbound, + status: PaymentStatus::Pending, + lsp_fee_limits: None, + }; + + let old_spontaneous_encoded = old_spontaneous_payment.encode(); + assert_eq!( + old_spontaneous_payment, + OldPaymentDetails::read(&mut Cursor::new(old_spontaneous_encoded.clone())).unwrap() + ); + + let spontaneous_decoded = + PaymentDetails::read(&mut Cursor::new(old_spontaneous_encoded)).unwrap(); + let spontaneous_reencoded = spontaneous_decoded.encode(); + assert_eq!( + spontaneous_decoded, + PaymentDetails::read(&mut Cursor::new(spontaneous_reencoded)).unwrap() + ); + + match spontaneous_decoded.kind { + PaymentKind::Spontaneous { hash: h, preimage: p, custom_tlvs: _ } => { + assert_eq!(hash, h); + assert_eq!(preimage, p); + }, + _ => { + panic!("Unexpected kind!"); + }, + } + } + } +} diff --git a/src/payment_store.rs b/src/payment_store.rs deleted file mode 100644 index 704966878..000000000 --- a/src/payment_store.rs +++ /dev/null @@ -1,309 +0,0 @@ -use crate::hex_utils; -use crate::io::{ - PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, -}; -use crate::logger::{log_error, Logger}; -use crate::Error; - -use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret}; -use lightning::util::persist::KVStore; -use lightning::util::ser::Writeable; -use lightning::{impl_writeable_tlv_based, impl_writeable_tlv_based_enum}; - -use std::collections::HashMap; -use std::iter::FromIterator; -use std::ops::Deref; -use std::sync::{Arc, Mutex}; - -/// Represents a payment. -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct PaymentDetails { - /// The payment hash, i.e., the hash of the `preimage`. - pub hash: PaymentHash, - /// The pre-image used by the payment. - pub preimage: Option, - /// The secret used by the payment. - pub secret: Option, - /// The amount transferred. - pub amount_msat: Option, - /// The direction of the payment. - pub direction: PaymentDirection, - /// The status of the payment. - pub status: PaymentStatus, - /// Limits applying to how much fee we allow an LSP to deduct from the payment amount. - /// - /// This is only `Some` for payments received via a JIT-channel, in which case the first - /// inbound payment will pay for the LSP's channel opening fees. - /// - /// See [`LdkChannelConfig::accept_underpaying_htlcs`] for more information. - /// - /// [`LdkChannelConfig::accept_underpaying_htlcs`]: lightning::util::config::ChannelConfig::accept_underpaying_htlcs - pub lsp_fee_limits: Option, -} - -impl_writeable_tlv_based!(PaymentDetails, { - (0, hash, required), - (1, lsp_fee_limits, option), - (2, preimage, required), - (4, secret, required), - (6, amount_msat, required), - (8, direction, required), - (10, status, required) -}); - -/// Represents the direction of a payment. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum PaymentDirection { - /// The payment is inbound. - Inbound, - /// The payment is outbound. - Outbound, -} - -impl_writeable_tlv_based_enum!(PaymentDirection, - (0, Inbound) => {}, - (1, Outbound) => {}; -); - -/// Represents the current status of a payment. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum PaymentStatus { - /// The payment is still pending. - Pending, - /// The payment succeeded. - Succeeded, - /// The payment failed. - Failed, -} - -impl_writeable_tlv_based_enum!(PaymentStatus, - (0, Pending) => {}, - (2, Succeeded) => {}, - (4, Failed) => {}; -); - -/// Limits applying to how much fee we allow an LSP to deduct from the payment amount. -/// -/// See [`LdkChannelConfig::accept_underpaying_htlcs`] for more information. -/// -/// [`LdkChannelConfig::accept_underpaying_htlcs`]: lightning::util::config::ChannelConfig::accept_underpaying_htlcs -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct LSPFeeLimits { - /// The maximal total amount we allow any configured LSP withhold from us when forwarding the - /// payment. - pub max_total_opening_fee_msat: Option, - /// The maximal proportional fee, in parts-per-million millisatoshi, we allow any configured - /// LSP withhold from us when forwarding the payment. - pub max_proportional_opening_fee_ppm_msat: Option, -} - -impl_writeable_tlv_based!(LSPFeeLimits, { - (0, max_total_opening_fee_msat, option), - (2, max_proportional_opening_fee_ppm_msat, option), -}); - -#[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) struct PaymentDetailsUpdate { - pub hash: PaymentHash, - pub preimage: Option>, - pub secret: Option>, - pub amount_msat: Option>, - pub direction: Option, - pub status: Option, - pub lsp_fee_limits: Option>, -} - -impl PaymentDetailsUpdate { - pub fn new(hash: PaymentHash) -> Self { - Self { - hash, - preimage: None, - secret: None, - amount_msat: None, - direction: None, - status: None, - lsp_fee_limits: None, - } - } -} - -pub(crate) struct PaymentStore -where - L::Target: Logger, -{ - payments: Mutex>, - kv_store: Arc, - logger: L, -} - -impl PaymentStore -where - L::Target: Logger, -{ - pub(crate) fn new(payments: Vec, kv_store: Arc, logger: L) -> Self { - let payments = Mutex::new(HashMap::from_iter( - payments.into_iter().map(|payment| (payment.hash, payment)), - )); - Self { payments, kv_store, logger } - } - - pub(crate) fn insert(&self, payment: PaymentDetails) -> Result { - let mut locked_payments = self.payments.lock().unwrap(); - - let hash = payment.hash.clone(); - let updated = locked_payments.insert(hash.clone(), payment.clone()).is_some(); - self.persist_info(&hash, &payment)?; - Ok(updated) - } - - pub(crate) fn remove(&self, hash: &PaymentHash) -> Result<(), Error> { - let store_key = hex_utils::to_string(&hash.0); - self.kv_store - .remove( - PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - &store_key, - false, - ) - .map_err(|e| { - log_error!( - self.logger, - "Removing payment data for key {}/{}/{} failed due to: {}", - PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - store_key, - e - ); - Error::PersistenceFailed - }) - } - - pub(crate) fn get(&self, hash: &PaymentHash) -> Option { - self.payments.lock().unwrap().get(hash).cloned() - } - - pub(crate) fn update(&self, update: &PaymentDetailsUpdate) -> Result { - let mut updated = false; - let mut locked_payments = self.payments.lock().unwrap(); - - if let Some(payment) = locked_payments.get_mut(&update.hash) { - if let Some(preimage_opt) = update.preimage { - payment.preimage = preimage_opt; - } - - if let Some(secret_opt) = update.secret { - payment.secret = secret_opt; - } - - if let Some(amount_opt) = update.amount_msat { - payment.amount_msat = amount_opt; - } - - if let Some(status) = update.status { - payment.status = status; - } - - if let Some(lsp_fee_limits) = update.lsp_fee_limits { - payment.lsp_fee_limits = lsp_fee_limits - } - - self.persist_info(&update.hash, payment)?; - updated = true; - } - - Ok(updated) - } - - pub(crate) fn list_filter bool>( - &self, f: F, - ) -> Vec { - self.payments - .lock() - .unwrap() - .iter() - .map(|(_, p)| p) - .filter(f) - .cloned() - .collect::>() - } - - fn persist_info(&self, hash: &PaymentHash, payment: &PaymentDetails) -> Result<(), Error> { - let store_key = hex_utils::to_string(&hash.0); - let data = payment.encode(); - self.kv_store - .write( - PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - &store_key, - &data, - ) - .map_err(|e| { - log_error!( - self.logger, - "Write for key {}/{}/{} failed due to: {}", - PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - store_key, - e - ); - Error::PersistenceFailed - })?; - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use lightning::util::test_utils::{TestLogger, TestStore}; - use std::sync::Arc; - - #[test] - fn payment_info_is_persisted() { - let store = Arc::new(TestStore::new(false)); - let logger = Arc::new(TestLogger::new()); - let payment_store = PaymentStore::new(Vec::new(), Arc::clone(&store), logger); - - let hash = PaymentHash([42u8; 32]); - assert!(!payment_store.get(&hash).is_some()); - - let store_key = hex_utils::to_string(&hash.0); - assert!(store - .read( - PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - &store_key - ) - .is_err()); - - let payment = PaymentDetails { - hash, - preimage: None, - secret: None, - amount_msat: None, - direction: PaymentDirection::Inbound, - status: PaymentStatus::Pending, - lsp_fee_limits: None, - }; - - assert_eq!(Ok(false), payment_store.insert(payment.clone())); - assert!(payment_store.get(&hash).is_some()); - assert!(store - .read( - PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - &store_key - ) - .is_ok()); - - assert_eq!(Ok(true), payment_store.insert(payment)); - assert!(payment_store.get(&hash).is_some()); - - let mut update = PaymentDetailsUpdate::new(hash); - update.status = Some(PaymentStatus::Succeeded); - assert_eq!(Ok(true), payment_store.update(&update)); - assert!(payment_store.get(&hash).is_some()); - - assert_eq!(PaymentStatus::Succeeded, payment_store.get(&hash).unwrap().status); - } -} diff --git a/src/peer_store.rs b/src/peer_store.rs index 46ba1dbe2..21bd50872 100644 --- a/src/peer_store.rs +++ b/src/peer_store.rs @@ -3,10 +3,10 @@ use crate::io::{ PEER_INFO_PERSISTENCE_SECONDARY_NAMESPACE, }; use crate::logger::{log_error, Logger}; +use crate::types::DynStore; use crate::{Error, SocketAddress}; use lightning::impl_writeable_tlv_based; -use lightning::util::persist::KVStore; use lightning::util::ser::{Readable, ReadableArgs, Writeable, Writer}; use bitcoin::secp256k1::PublicKey; @@ -15,20 +15,20 @@ use std::collections::HashMap; use std::ops::Deref; use std::sync::{Arc, RwLock}; -pub struct PeerStore +pub struct PeerStore where L::Target: Logger, { peers: RwLock>, - kv_store: Arc, + kv_store: Arc, logger: L, } -impl PeerStore +impl PeerStore where L::Target: Logger, { - pub(crate) fn new(kv_store: Arc, logger: L) -> Self { + pub(crate) fn new(kv_store: Arc, logger: L) -> Self { let peers = RwLock::new(HashMap::new()); Self { peers, kv_store, logger } } @@ -83,13 +83,13 @@ where } } -impl ReadableArgs<(Arc, L)> for PeerStore +impl ReadableArgs<(Arc, L)> for PeerStore where L::Target: Logger, { #[inline] fn read( - reader: &mut R, args: (Arc, L), + reader: &mut R, args: (Arc, L), ) -> Result { let (kv_store, logger) = args; let read_peers: PeerStoreDeserWrapper = Readable::read(reader)?; @@ -150,7 +150,7 @@ mod tests { #[test] fn peer_info_persistence() { - let store = Arc::new(TestStore::new(false)); + let store: Arc = Arc::new(TestStore::new(false)); let logger = Arc::new(TestLogger::new()); let peer_store = PeerStore::new(Arc::clone(&store), Arc::clone(&logger)); diff --git a/src/sweep.rs b/src/sweep.rs index 93dac19fa..1c772d4e9 100644 --- a/src/sweep.rs +++ b/src/sweep.rs @@ -1,34 +1,15 @@ -use crate::hex_utils; -use crate::io::{ - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, -}; -use crate::logger::{log_error, Logger}; -use crate::wallet::{Wallet, WalletKeysManager}; -use crate::Error; +//! The output sweeper used to live here before we upstreamed it to `rust-lightning` and migrated +//! to the upstreamed version with LDK Node v0.3.0 (May 2024). We should drop this module entirely +//! once sufficient time has passed for us to be confident any users completed the migration. -use lightning::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget, FeeEstimator}; -use lightning::chain::{self, BestBlock, Confirm, Filter, Listen, WatchedOutput}; use lightning::impl_writeable_tlv_based; use lightning::ln::ChannelId; -use lightning::sign::{EntropySource, SpendableOutputDescriptor}; -use lightning::util::persist::KVStore; -use lightning::util::ser::Writeable; +use lightning::sign::SpendableOutputDescriptor; -use bitcoin::blockdata::block::Header; -use bitcoin::blockdata::locktime::absolute::LockTime; -use bitcoin::secp256k1::Secp256k1; -use bitcoin::{BlockHash, Transaction, Txid}; - -use std::ops::Deref; -use std::sync::{Arc, Mutex}; - -const CONSIDERED_SPENT_THRESHOLD_CONF: u32 = 6; - -const REGENERATE_SPEND_THRESHOLD: u32 = 144; +use bitcoin::{BlockHash, Transaction}; #[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) struct SpendableOutputInfo { +pub(crate) struct DeprecatedSpendableOutputInfo { pub(crate) id: [u8; 32], pub(crate) descriptor: SpendableOutputDescriptor, pub(crate) channel_id: Option, @@ -39,55 +20,7 @@ pub(crate) struct SpendableOutputInfo { pub(crate) confirmation_hash: Option, } -impl SpendableOutputInfo { - fn to_watched_output(&self) -> WatchedOutput { - match &self.descriptor { - SpendableOutputDescriptor::StaticOutput { outpoint, output, channel_keys_id: _ } => { - WatchedOutput { - block_hash: self.first_broadcast_hash, - outpoint: *outpoint, - script_pubkey: output.script_pubkey.clone(), - } - }, - SpendableOutputDescriptor::DelayedPaymentOutput(output) => WatchedOutput { - block_hash: self.first_broadcast_hash, - outpoint: output.outpoint, - script_pubkey: output.output.script_pubkey.clone(), - }, - SpendableOutputDescriptor::StaticPaymentOutput(output) => WatchedOutput { - block_hash: self.first_broadcast_hash, - outpoint: output.outpoint, - script_pubkey: output.output.script_pubkey.clone(), - }, - } - } - - fn is_spent_in(&self, tx: &Transaction) -> bool { - let prev_outpoint = match &self.descriptor { - SpendableOutputDescriptor::StaticOutput { outpoint, .. } => *outpoint, - SpendableOutputDescriptor::DelayedPaymentOutput(output) => output.outpoint, - SpendableOutputDescriptor::StaticPaymentOutput(output) => output.outpoint, - }; - - for input in &tx.input { - if input.previous_output == prev_outpoint.into_bitcoin_outpoint() { - return true; - } - } - - false - } - - pub(crate) fn value_satoshis(&self) -> u64 { - match &self.descriptor { - SpendableOutputDescriptor::StaticOutput { output, .. } => output.value, - SpendableOutputDescriptor::DelayedPaymentOutput(output) => output.output.value, - SpendableOutputDescriptor::StaticPaymentOutput(output) => output.output.value, - } - } -} - -impl_writeable_tlv_based!(SpendableOutputInfo, { +impl_writeable_tlv_based!(DeprecatedSpendableOutputInfo, { (0, id, required), (2, descriptor, required), (4, channel_id, option), @@ -98,386 +31,10 @@ impl_writeable_tlv_based!(SpendableOutputInfo, { (14, confirmation_hash, option), }); -pub(crate) struct OutputSweeper -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - F::Target: Filter, - K::Target: KVStore, - L::Target: Logger, -{ - outputs: Mutex>, - wallet: Arc>, - broadcaster: B, - fee_estimator: E, - keys_manager: Arc>, - kv_store: K, - best_block: Mutex, - chain_source: Option, - logger: L, -} - -impl OutputSweeper -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - F::Target: Filter, - K::Target: KVStore, - L::Target: Logger, -{ - pub(crate) fn new( - outputs: Vec, - wallet: Arc>, broadcaster: B, - fee_estimator: E, - keys_manager: Arc>, kv_store: K, - best_block: BestBlock, chain_source: Option, logger: L, - ) -> Self { - if let Some(filter) = chain_source.as_ref() { - for output_info in &outputs { - let watched_output = output_info.to_watched_output(); - filter.register_output(watched_output); - } - } - - let outputs = Mutex::new(outputs); - let best_block = Mutex::new(best_block); - Self { - outputs, - wallet, - broadcaster, - fee_estimator, - keys_manager, - kv_store, - best_block, - chain_source, - logger, - } - } - - pub(crate) fn add_outputs( - &self, mut output_descriptors: Vec, - channel_id: Option, - ) { - let non_static_outputs = output_descriptors - .drain(..) - .filter(|desc| !matches!(desc, SpendableOutputDescriptor::StaticOutput { .. })) - .collect::>(); - - if non_static_outputs.is_empty() { - return; - } - - { - let mut locked_outputs = self.outputs.lock().unwrap(); - for descriptor in non_static_outputs { - let id = self.keys_manager.get_secure_random_bytes(); - let output_info = SpendableOutputInfo { - id, - descriptor, - channel_id, - first_broadcast_hash: None, - latest_broadcast_height: None, - latest_spending_tx: None, - confirmation_height: None, - confirmation_hash: None, - }; - - locked_outputs.push(output_info.clone()); - self.persist_info(&output_info).unwrap_or_else(|e| { - log_error!(self.logger, "Error persisting SpendableOutputInfo: {:?}", e) - }); - } - } - - self.rebroadcast_if_necessary(); - } - - pub(crate) fn tracked_spendable_outputs(&self) -> Vec { - self.outputs.lock().unwrap().clone() - } - - fn rebroadcast_if_necessary(&self) { - let (cur_height, cur_hash) = { - let best_block = self.best_block.lock().unwrap(); - (best_block.height(), best_block.block_hash()) - }; - - let mut respend_descriptors = Vec::new(); - let mut respend_ids = Vec::new(); - - { - let mut locked_outputs = self.outputs.lock().unwrap(); - for output_info in locked_outputs.iter_mut() { - if output_info.confirmation_height.is_some() { - // Don't rebroadcast confirmed txs - debug_assert!(output_info.confirmation_hash.is_some()); - continue; - } - - if let Some(latest_broadcast_height) = output_info.latest_broadcast_height { - // Re-generate spending tx after REGENERATE_SPEND_THRESHOLD, rebroadcast - // after every block - if latest_broadcast_height + REGENERATE_SPEND_THRESHOLD >= cur_height { - respend_descriptors.push(output_info.descriptor.clone()); - respend_ids.push(output_info.id); - } else if latest_broadcast_height < cur_height { - if let Some(latest_spending_tx) = output_info.latest_spending_tx.as_ref() { - self.broadcaster.broadcast_transactions(&[&latest_spending_tx]); - output_info.latest_broadcast_height = Some(cur_height); - self.persist_info(&output_info).unwrap_or_else(|e| { - log_error!( - self.logger, - "Error persisting SpendableOutputInfo: {:?}", - e - ) - }); - } - } - } else { - // Our first broadcast. - respend_descriptors.push(output_info.descriptor.clone()); - respend_ids.push(output_info.id); - output_info.first_broadcast_hash = Some(cur_hash); - self.persist_info(&output_info).unwrap_or_else(|e| { - log_error!(self.logger, "Error persisting SpendableOutputInfo: {:?}", e) - }); - } - } - } - - if !respend_descriptors.is_empty() { - match self.get_spending_tx(&respend_descriptors, cur_height) { - Ok(spending_tx) => { - self.broadcaster.broadcast_transactions(&[&spending_tx]); - let mut locked_outputs = self.outputs.lock().unwrap(); - for output_info in locked_outputs.iter_mut() { - if respend_ids.contains(&output_info.id) { - if let Some(filter) = self.chain_source.as_ref() { - let watched_output = output_info.to_watched_output(); - filter.register_output(watched_output); - } - - output_info.latest_spending_tx = Some(spending_tx.clone()); - output_info.latest_broadcast_height = Some(cur_height); - self.persist_info(&output_info).unwrap_or_else(|e| { - log_error!( - self.logger, - "Error persisting SpendableOutputInfo: {:?}", - e - ) - }); - } - } - }, - Err(e) => { - log_error!(self.logger, "Error spending outputs: {:?}", e); - }, - }; - } - } - - fn prune_confirmed_outputs(&self) { - let cur_height = self.best_block.lock().unwrap().height(); - let mut locked_outputs = self.outputs.lock().unwrap(); - - // Prune all outputs that have sufficient depth by now. - locked_outputs.retain(|o| { - if let Some(confirmation_height) = o.confirmation_height { - if cur_height >= confirmation_height + CONSIDERED_SPENT_THRESHOLD_CONF - 1 { - let key = hex_utils::to_string(&o.id); - match self.kv_store.remove( - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - &key, - false, - ) { - Ok(_) => return false, - Err(e) => { - log_error!( - self.logger, - "Removal of key {}/{}/{} failed due to: {}", - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - key, - e - ); - return true; - }, - } - } - } - true - }); - } - - fn get_spending_tx( - &self, output_descriptors: &Vec, cur_height: u32, - ) -> Result { - let tx_feerate = - self.fee_estimator.get_est_sat_per_1000_weight(ConfirmationTarget::NonAnchorChannelFee); - - let destination_address = self.wallet.get_new_address().map_err(|e| { - log_error!(self.logger, "Failed to get destination address from wallet: {}", e); - })?; - - let locktime = LockTime::from_height(cur_height).unwrap_or(LockTime::ZERO); - - let output_descriptors = output_descriptors.iter().collect::>(); - self.keys_manager.spend_spendable_outputs( - &output_descriptors, - Vec::new(), - destination_address.script_pubkey(), - tx_feerate, - Some(locktime), - &Secp256k1::new(), - ) - } - - fn persist_info(&self, output: &SpendableOutputInfo) -> Result<(), Error> { - let key = hex_utils::to_string(&output.id); - let data = output.encode(); - self.kv_store - .write( - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - &key, - &data, - ) - .map_err(|e| { - log_error!( - self.logger, - "Write for key {}/{}/{} failed due to: {}", - SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - key, - e - ); - Error::PersistenceFailed - }) - } -} - -impl Listen for OutputSweeper -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - F::Target: Filter, - K::Target: KVStore, - L::Target: Logger, -{ - fn filtered_block_connected( - &self, header: &Header, txdata: &chain::transaction::TransactionData, height: u32, - ) { - { - let best_block = self.best_block.lock().unwrap(); - assert_eq!(best_block.block_hash(), header.prev_blockhash, - "Blocks must be connected in chain-order - the connected header must build on the last connected header"); - assert_eq!(best_block.height(), height - 1, - "Blocks must be connected in chain-order - the connected block height must be one greater than the previous height"); - } - - self.transactions_confirmed(header, txdata, height); - self.best_block_updated(header, height); - } - - fn block_disconnected(&self, header: &Header, height: u32) { - let new_height = height - 1; - { - let mut best_block = self.best_block.lock().unwrap(); - assert_eq!(best_block.block_hash(), header.block_hash(), - "Blocks must be disconnected in chain-order - the disconnected header must be the last connected header"); - assert_eq!(best_block.height(), height, - "Blocks must be disconnected in chain-order - the disconnected block must have the correct height"); - *best_block = BestBlock::new(header.prev_blockhash, new_height) - } - - let mut locked_outputs = self.outputs.lock().unwrap(); - for output_info in locked_outputs.iter_mut() { - if output_info.confirmation_hash == Some(header.block_hash()) { - debug_assert_eq!(output_info.confirmation_height, Some(height)); - output_info.confirmation_hash = None; - output_info.confirmation_height = None; - self.persist_info(&output_info).unwrap_or_else(|e| { - log_error!(self.logger, "Error persisting SpendableOutputInfo: {:?}", e) - }); - } - } - } -} - -impl Confirm for OutputSweeper -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - F::Target: Filter, - K::Target: KVStore, - L::Target: Logger, -{ - fn transactions_confirmed( - &self, header: &Header, txdata: &chain::transaction::TransactionData, height: u32, - ) { - let mut locked_outputs = self.outputs.lock().unwrap(); - for (_, tx) in txdata { - for output_info in locked_outputs.iter_mut() { - if output_info.is_spent_in(*tx) { - debug_assert!(Some(height) > output_info.latest_broadcast_height); - output_info.confirmation_hash = Some(header.block_hash()); - output_info.confirmation_height = Some(height); - output_info.latest_spending_tx = Some((*tx).clone()); - self.persist_info(&output_info).unwrap_or_else(|e| { - log_error!(self.logger, "Error persisting SpendableOutputInfo: {:?}", e) - }); - } - } - } - } - - fn transaction_unconfirmed(&self, txid: &Txid) { - let mut locked_outputs = self.outputs.lock().unwrap(); - - // Get what height was unconfirmed. - let unconf_height = locked_outputs - .iter() - .find(|o| o.latest_spending_tx.as_ref().map(|tx| tx.txid()) == Some(*txid)) - .and_then(|o| o.confirmation_height); - - // Unconfirm all >= this height. - locked_outputs.iter_mut().filter(|o| o.confirmation_height >= unconf_height).for_each( - |o| { - o.confirmation_hash = None; - o.confirmation_height = None; - self.persist_info(&o).unwrap_or_else(|e| { - log_error!(self.logger, "Error persisting SpendableOutputInfo: {:?}", e) - }); - }, - ); - } - - fn best_block_updated(&self, header: &Header, height: u32) { - *self.best_block.lock().unwrap() = BestBlock::new(header.block_hash(), height); - self.prune_confirmed_outputs(); - self.rebroadcast_if_necessary(); - } - - fn get_relevant_txids(&self) -> Vec<(Txid, u32, Option)> { - let locked_outputs = self.outputs.lock().unwrap(); - locked_outputs - .iter() - .filter_map(|o| { - if let Some(confirmation_hash) = o.confirmation_hash { - if let Some(confirmation_height) = o.confirmation_height { - if let Some(latest_spending_tx) = o.latest_spending_tx.as_ref() { - return Some(( - latest_spending_tx.txid(), - confirmation_height, - Some(confirmation_hash), - )); - } - } - } - - None - }) - .collect::>() +pub(crate) fn value_satoshis_from_descriptor(descriptor: &SpendableOutputDescriptor) -> u64 { + match &descriptor { + SpendableOutputDescriptor::StaticOutput { output, .. } => output.value, + SpendableOutputDescriptor::DelayedPaymentOutput(output) => output.output.value, + SpendableOutputDescriptor::StaticPaymentOutput(output) => output.output.value, } } diff --git a/src/tx_broadcaster.rs b/src/tx_broadcaster.rs index 40483f578..4492bcfc6 100644 --- a/src/tx_broadcaster.rs +++ b/src/tx_broadcaster.rs @@ -1,4 +1,5 @@ -use crate::logger::{log_bytes, log_debug, log_error, log_trace, Logger}; +use crate::config::TX_BROADCAST_TIMEOUT_SECS; +use crate::logger::{log_bytes, log_error, log_trace, Logger}; use lightning::chain::chaininterface::BroadcasterInterface; use lightning::util::ser::Writeable; @@ -7,6 +8,7 @@ use esplora_client::AsyncClient as EsploraClient; use bitcoin::Transaction; +use reqwest::StatusCode; use tokio::sync::mpsc; use tokio::sync::Mutex; @@ -38,57 +40,68 @@ where let mut receiver = self.queue_receiver.lock().await; while let Some(next_package) = receiver.recv().await { for tx in &next_package { - match self.esplora_client.broadcast(tx).await { - Ok(()) => { - log_trace!(self.logger, "Successfully broadcast transaction {}", tx.txid()); - }, - Err(e) => match e { - esplora_client::Error::Reqwest(_) => { - // Wait 500 ms and retry in case we get a `Reqwest` error (typically - // 429) - tokio::time::sleep(Duration::from_millis(500)).await; - log_error!( + let timeout_fut = tokio::time::timeout( + Duration::from_secs(TX_BROADCAST_TIMEOUT_SECS), + self.esplora_client.broadcast(tx), + ); + match timeout_fut.await { + Ok(res) => match res { + Ok(()) => { + log_trace!( self.logger, - "Sync failed due to HTTP connection error, retrying: {}", - e + "Successfully broadcast transaction {}", + tx.txid() ); - match self.esplora_client.broadcast(tx).await { - Ok(()) => { - log_debug!( - self.logger, - "Successfully broadcast transaction {}", - tx.txid() - ); - }, - Err(e) => { + }, + Err(e) => match e { + esplora_client::Error::Reqwest(err) => { + if err.status() == StatusCode::from_u16(400).ok() { + // Ignore 400, as this just means bitcoind already knows the + // transaction. + // FIXME: We can further differentiate here based on the error + // message which will be available with rust-esplora-client 0.7 and + // later. + } else { log_error!( self.logger, - "Failed to broadcast transaction {}: {}", - tx.txid(), - e - ); - log_trace!( - self.logger, - "Failed broadcast transaction bytes: {}", - log_bytes!(tx.encode()) + "Failed to broadcast due to HTTP connection error: {}", + err ); - }, - } - }, - _ => { - log_error!( - self.logger, - "Failed to broadcast transaction {}: {}", - tx.txid(), - e - ); - log_trace!( - self.logger, - "Failed broadcast transaction bytes: {}", - log_bytes!(tx.encode()) - ); + } + log_trace!( + self.logger, + "Failed broadcast transaction bytes: {}", + log_bytes!(tx.encode()) + ); + }, + _ => { + log_error!( + self.logger, + "Failed to broadcast transaction {}: {}", + tx.txid(), + e + ); + log_trace!( + self.logger, + "Failed broadcast transaction bytes: {}", + log_bytes!(tx.encode()) + ); + }, }, }, + Err(e) => { + log_error!( + self.logger, + "Failed to broadcast transaction due to timeout {}: {}", + tx.txid(), + e + ); + log_trace!( + self.logger, + "Failed broadcast transaction bytes: {}", + log_bytes!(tx.encode()) + ); + }, } } } diff --git a/src/types.rs b/src/types.rs index 6269b3ddf..78cb00a82 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1,9 +1,8 @@ use crate::logger::FilesystemLogger; use crate::message_handler::NodeCustomMessageHandler; -use crate::sweep::OutputSweeper; -use lightning::blinded_path::BlindedPath; use lightning::chain::chainmonitor; +use lightning::impl_writeable_tlv_based; use lightning::ln::channelmanager::ChannelDetails as LdkChannelDetails; use lightning::ln::msgs::RoutingMessageHandler; use lightning::ln::msgs::SocketAddress; @@ -12,47 +11,48 @@ use lightning::ln::ChannelId; use lightning::routing::gossip; use lightning::routing::router::DefaultRouter; use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringFeeParameters}; -use lightning::sign::{EntropySource, InMemorySigner}; +use lightning::sign::InMemorySigner; use lightning::util::config::ChannelConfig as LdkChannelConfig; use lightning::util::config::MaxDustHTLCExposure as LdkMaxDustHTLCExposure; +use lightning::util::persist::KVStore; use lightning::util::ser::{Readable, Writeable, Writer}; +use lightning::util::sweep::OutputSweeper; use lightning_net_tokio::SocketDescriptor; use lightning_transaction_sync::EsploraSyncClient; -use bitcoin::secp256k1::{self, PublicKey, Secp256k1}; +use bitcoin::secp256k1::PublicKey; use bitcoin::OutPoint; use std::sync::{Arc, Mutex, RwLock}; -pub(crate) type ChainMonitor = chainmonitor::ChainMonitor< +pub(crate) type DynStore = dyn KVStore + Sync + Send; + +pub(crate) type ChainMonitor = chainmonitor::ChainMonitor< InMemorySigner, Arc, Arc, Arc, Arc, - Arc, + Arc, >; -pub(crate) type PeerManager = lightning::ln::peer_handler::PeerManager< +pub(crate) type PeerManager = lightning::ln::peer_handler::PeerManager< SocketDescriptor, - Arc>, + Arc, Arc, Arc, Arc, - Arc>>, + Arc>>, Arc, >; pub(crate) type ChainSource = EsploraSyncClient>; -pub(crate) type LiquidityManager = lightning_liquidity::LiquidityManager< - Arc, - Arc>, - Arc, ->; +pub(crate) type LiquidityManager = + lightning_liquidity::LiquidityManager, Arc, Arc>; -pub(crate) type ChannelManager = lightning::ln::channelmanager::ChannelManager< - Arc>, +pub(crate) type ChannelManager = lightning::ln::channelmanager::ChannelManager< + Arc, Arc, Arc, Arc, @@ -81,30 +81,28 @@ pub(crate) type KeysManager = crate::wallet::WalletKeysManager< >; pub(crate) type Router = DefaultRouter< - Arc, + Arc, Arc, + Arc, Arc>, ProbabilisticScoringFeeParameters, Scorer, >; -pub(crate) type Scorer = ProbabilisticScorer, Arc>; +pub(crate) type Scorer = ProbabilisticScorer, Arc>; -pub(crate) type NetworkGraph = gossip::NetworkGraph>; +pub(crate) type Graph = gossip::NetworkGraph>; pub(crate) type UtxoLookup = dyn lightning::routing::utxo::UtxoLookup + Send + Sync; -pub(crate) type P2PGossipSync = lightning::routing::gossip::P2PGossipSync< - Arc, - Arc, - Arc, ->; +pub(crate) type P2PGossipSync = + lightning::routing::gossip::P2PGossipSync, Arc, Arc>; pub(crate) type RapidGossipSync = - lightning_rapid_gossip_sync::RapidGossipSync, Arc>; + lightning_rapid_gossip_sync::RapidGossipSync, Arc>; pub(crate) type GossipSync = lightning_background_processor::GossipSync< Arc, Arc, - Arc, + Arc, Arc, Arc, >; @@ -113,39 +111,36 @@ pub(crate) type OnionMessenger = lightning::onion_message::messenger::OnionMesse Arc, Arc, Arc, - Arc, - IgnoringMessageHandler, + Arc, + Arc, + Arc, IgnoringMessageHandler, >; -pub(crate) struct FakeMessageRouter {} - -impl lightning::onion_message::messenger::MessageRouter for FakeMessageRouter { - fn find_path( - &self, _sender: PublicKey, _peers: Vec, - _destination: lightning::onion_message::messenger::Destination, - ) -> Result { - unimplemented!() - } - fn create_blinded_paths< - ES: EntropySource + ?Sized, - T: secp256k1::Signing + secp256k1::Verification, - >( - &self, _recipient: PublicKey, _peers: Vec, _entropy_source: &ES, - _secp_ctx: &Secp256k1, - ) -> Result, ()> { - unreachable!() - } -} +pub(crate) type MessageRouter = lightning::onion_message::messenger::DefaultMessageRouter< + Arc, + Arc, + Arc, +>; -pub(crate) type Sweeper = OutputSweeper< +pub(crate) type Sweeper = OutputSweeper< Arc, + Arc, Arc, Arc, - Arc, + Arc, Arc, + Arc, >; +pub(crate) type BumpTransactionEventHandler = + lightning::events::bump_transaction::BumpTransactionEventHandler< + Arc, + Arc, Arc>>, + Arc, + Arc, + >; + /// A local, potentially user-provided, identifier of a channel. /// /// By default, this will be randomly generated for the user to ensure local uniqueness. @@ -206,11 +201,11 @@ pub struct ChannelDetails { /// balance is not available for inclusion in new outbound HTLCs). This further does not include /// any pending outgoing HTLCs which are awaiting some other resolution to be sent. pub outbound_capacity_msat: u64, - /// The available outbound capacity for sending HTLCs to the remote peer. + /// The available inbound capacity for receiving HTLCs from the remote peer. /// /// The amount does not include any pending HTLCs which are not yet resolved /// (and, thus, whose balance is not available for inclusion in new inbound HTLCs). This further - /// does not include any pending outgoing HTLCs which are awaiting some other resolution to be + /// does not include any pending incoming HTLCs which are awaiting some other resolution to be /// sent. pub inbound_capacity_msat: u64, /// The number of required confirmations on the funding transactions before the funding is @@ -293,7 +288,7 @@ impl From for ChannelDetails { ChannelDetails { channel_id: value.channel_id, counterparty_node_id: value.counterparty.node_id, - funding_txo: value.funding_txo.and_then(|o| Some(o.into_bitcoin_outpoint())), + funding_txo: value.funding_txo.map(|o| o.into_bitcoin_outpoint()), channel_value_sats: value.channel_value_satoshis, unspendable_punishment_reserve: value.unspendable_punishment_reserve, user_channel_id: UserChannelId(value.user_channel_id), @@ -456,3 +451,18 @@ impl Default for ChannelConfig { LdkChannelConfig::default().into() } } + +/// Custom TLV entry. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct TlvEntry { + /// Type number. + pub r#type: u64, + + /// Serialized value. + pub value: Vec, +} + +impl_writeable_tlv_based!(TlvEntry, { + (0, r#type, required), + (1, value, required), +}); diff --git a/src/uniffi_types.rs b/src/uniffi_types.rs index 7c6343b84..9dd7e5699 100644 --- a/src/uniffi_types.rs +++ b/src/uniffi_types.rs @@ -1,9 +1,17 @@ +pub use crate::graph::{ChannelInfo, ChannelUpdateInfo, NodeAnnouncementInfo, NodeInfo}; +pub use crate::payment::store::{LSPFeeLimits, PaymentDirection, PaymentKind, PaymentStatus}; + pub use lightning::events::{ClosureReason, PaymentFailureReason}; -pub use lightning::ln::ChannelId; -pub use lightning::ln::PaymentSecret; +pub use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret}; +pub use lightning::offers::invoice::Bolt12Invoice; +pub use lightning::offers::offer::{Offer, OfferId}; +pub use lightning::offers::refund::Refund; +pub use lightning::routing::gossip::{NodeId, RoutingFees}; pub use lightning::util::string::UntrustedString; -pub use bitcoin::{BlockHash, Network, OutPoint}; +pub use lightning_invoice::Bolt11Invoice; + +pub use bitcoin::{Address, BlockHash, Network, OutPoint, Txid}; pub use bip39::Mnemonic; @@ -11,23 +19,18 @@ use crate::UniffiCustomTypeConverter; use crate::error::Error; use crate::hex_utils; -use crate::io::sqlite_store::SqliteStore; -use crate::{Node, SocketAddress, UserChannelId}; +use crate::{SocketAddress, UserChannelId}; use bitcoin::hashes::sha256::Hash as Sha256; use bitcoin::hashes::Hash; use bitcoin::secp256k1::PublicKey; -use bitcoin::{Address, Txid}; -use lightning::ln::{PaymentHash, PaymentPreimage}; -use lightning_invoice::{Bolt11Invoice, SignedRawBolt11Invoice}; +use lightning::ln::channelmanager::PaymentId; +use lightning::util::ser::Writeable; +use lightning_invoice::SignedRawBolt11Invoice; use std::convert::TryInto; use std::str::FromStr; -/// This type alias is required as Uniffi doesn't support generics, i.e., we can only expose the -/// concretized types via this aliasing hack. -pub type LDKNode = Node; - impl UniffiCustomTypeConverter for PublicKey { type Builtin = String; @@ -44,6 +47,22 @@ impl UniffiCustomTypeConverter for PublicKey { } } +impl UniffiCustomTypeConverter for NodeId { + type Builtin = String; + + fn into_custom(val: Self::Builtin) -> uniffi::Result { + if let Ok(key) = NodeId::from_str(&val) { + return Ok(key); + } + + Err(Error::InvalidNodeId.into()) + } + + fn from_custom(obj: Self) -> Self::Builtin { + obj.to_string() + } +} + impl UniffiCustomTypeConverter for Address { type Builtin = String; @@ -78,6 +97,83 @@ impl UniffiCustomTypeConverter for Bolt11Invoice { } } +impl UniffiCustomTypeConverter for Offer { + type Builtin = String; + + fn into_custom(val: Self::Builtin) -> uniffi::Result { + Offer::from_str(&val).map_err(|_| Error::InvalidOffer.into()) + } + + fn from_custom(obj: Self) -> Self::Builtin { + obj.to_string() + } +} + +impl UniffiCustomTypeConverter for Refund { + type Builtin = String; + + fn into_custom(val: Self::Builtin) -> uniffi::Result { + Refund::from_str(&val).map_err(|_| Error::InvalidRefund.into()) + } + + fn from_custom(obj: Self) -> Self::Builtin { + obj.to_string() + } +} + +impl UniffiCustomTypeConverter for Bolt12Invoice { + type Builtin = String; + + fn into_custom(val: Self::Builtin) -> uniffi::Result { + if let Some(bytes_vec) = hex_utils::to_vec(&val) { + if let Ok(invoice) = Bolt12Invoice::try_from(bytes_vec) { + return Ok(invoice); + } + } + Err(Error::InvalidInvoice.into()) + } + + fn from_custom(obj: Self) -> Self::Builtin { + hex_utils::to_string(&obj.encode()) + } +} + +impl UniffiCustomTypeConverter for OfferId { + type Builtin = String; + + fn into_custom(val: Self::Builtin) -> uniffi::Result { + if let Some(bytes_vec) = hex_utils::to_vec(&val) { + let bytes_res = bytes_vec.try_into(); + if let Ok(bytes) = bytes_res { + return Ok(OfferId(bytes)); + } + } + Err(Error::InvalidOfferId.into()) + } + + fn from_custom(obj: Self) -> Self::Builtin { + hex_utils::to_string(&obj.0) + } +} + +impl UniffiCustomTypeConverter for PaymentId { + type Builtin = String; + + fn into_custom(val: Self::Builtin) -> uniffi::Result { + if let Some(bytes_vec) = hex_utils::to_vec(&val) { + let bytes_res = bytes_vec.try_into(); + if let Ok(bytes) = bytes_res { + return Ok(PaymentId(bytes)); + } + } + Err(Error::InvalidPaymentId.into()) + } + + fn from_custom(obj: Self) -> Self::Builtin { + hex_utils::to_string(&obj.0) + } +} + impl UniffiCustomTypeConverter for PaymentHash { type Builtin = String; diff --git a/src/wallet.rs b/src/wallet.rs index aa38eb986..0da3f6db8 100644 --- a/src/wallet.rs +++ b/src/wallet.rs @@ -1,14 +1,16 @@ use crate::logger::{log_error, log_info, log_trace, Logger}; +use crate::config::BDK_WALLET_SYNC_TIMEOUT_SECS; use crate::Error; use lightning::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget, FeeEstimator}; +use lightning::events::bump_transaction::{Utxo, WalletSource}; use lightning::ln::msgs::{DecodeError, UnsignedGossipMessage}; use lightning::ln::script::ShutdownScript; use lightning::sign::{ - EntropySource, InMemorySigner, KeyMaterial, KeysManager, NodeSigner, Recipient, SignerProvider, - SpendableOutputDescriptor, + ChangeDestinationSource, EntropySource, InMemorySigner, KeyMaterial, KeysManager, NodeSigner, + OutputSpender, Recipient, SignerProvider, SpendableOutputDescriptor, }; use lightning::util::message_signing; @@ -16,20 +18,31 @@ use lightning::util::message_signing; use bdk::blockchain::EsploraBlockchain; use bdk::database::BatchDatabase; use bdk::wallet::AddressIndex; -use bdk::FeeRate; +use bdk::{Balance, FeeRate}; use bdk::{SignOptions, SyncOptions}; +use bitcoin::address::{Payload, WitnessVersion}; use bitcoin::bech32::u5; +use bitcoin::blockdata::constants::WITNESS_SCALE_FACTOR; use bitcoin::blockdata::locktime::absolute::LockTime; +use bitcoin::hash_types::WPubkeyHash; +use bitcoin::hashes::Hash; +use bitcoin::key::XOnlyPublicKey; +use bitcoin::psbt::PartiallySignedTransaction; use bitcoin::secp256k1::ecdh::SharedSecret; use bitcoin::secp256k1::ecdsa::{RecoverableSignature, Signature}; use bitcoin::secp256k1::{PublicKey, Scalar, Secp256k1, SecretKey, Signing}; use bitcoin::{ScriptBuf, Transaction, TxOut, Txid}; -use std::ops::Deref; -use std::sync::{Arc, Condvar, Mutex}; +use std::ops::{Deref, DerefMut}; +use std::sync::{Arc, Mutex, RwLock}; use std::time::Duration; +enum WalletSyncStatus { + Completed, + InProgress { subscribers: tokio::sync::broadcast::Sender> }, +} + pub struct Wallet where D: BatchDatabase, @@ -44,7 +57,10 @@ where // A cache storing the most recently retrieved fee rate estimations. broadcaster: B, fee_estimator: E, - sync_lock: (Mutex<()>, Condvar), + // A Mutex holding the current sync status. + sync_status: Mutex, + // TODO: Drop this workaround after BDK 1.0 upgrade. + balance_cache: RwLock, logger: L, } @@ -59,58 +75,77 @@ where blockchain: EsploraBlockchain, wallet: bdk::Wallet, broadcaster: B, fee_estimator: E, logger: L, ) -> Self { + let start_balance = wallet.get_balance().unwrap_or(Balance { + immature: 0, + trusted_pending: 0, + untrusted_pending: 0, + confirmed: 0, + }); + let inner = Mutex::new(wallet); - let sync_lock = (Mutex::new(()), Condvar::new()); - Self { blockchain, inner, broadcaster, fee_estimator, sync_lock, logger } + let sync_status = Mutex::new(WalletSyncStatus::Completed); + let balance_cache = RwLock::new(start_balance); + Self { blockchain, inner, broadcaster, fee_estimator, sync_status, balance_cache, logger } } pub(crate) async fn sync(&self) -> Result<(), Error> { - let (lock, cvar) = &self.sync_lock; - - let guard = match lock.try_lock() { - Ok(guard) => guard, - Err(_) => { - log_info!(self.logger, "Sync in progress, skipping."); - let guard = cvar.wait(lock.lock().unwrap()); - drop(guard); - cvar.notify_all(); - return Ok(()); - }, - }; + if let Some(mut sync_receiver) = self.register_or_subscribe_pending_sync() { + log_info!(self.logger, "Sync in progress, skipping."); + return sync_receiver.recv().await.map_err(|e| { + debug_assert!(false, "Failed to receive wallet sync result: {:?}", e); + log_error!(self.logger, "Failed to receive wallet sync result: {:?}", e); + Error::WalletOperationFailed + })?; + } - let sync_options = SyncOptions { progress: None }; - let wallet_lock = self.inner.lock().unwrap(); - let res = match wallet_lock.sync(&self.blockchain, sync_options).await { - Ok(()) => Ok(()), - Err(e) => match e { - bdk::Error::Esplora(ref be) => match **be { - bdk::blockchain::esplora::EsploraError::Reqwest(_) => { - tokio::time::sleep(Duration::from_secs(1)).await; - log_error!( - self.logger, - "Sync failed due to HTTP connection error, retrying: {}", - e - ); - let sync_options = SyncOptions { progress: None }; - wallet_lock - .sync(&self.blockchain, sync_options) - .await - .map_err(|e| From::from(e)) + let res = { + let wallet_lock = self.inner.lock().unwrap(); + + let wallet_sync_timeout_fut = tokio::time::timeout( + Duration::from_secs(BDK_WALLET_SYNC_TIMEOUT_SECS), + wallet_lock.sync(&self.blockchain, SyncOptions { progress: None }), + ); + + match wallet_sync_timeout_fut.await { + Ok(res) => match res { + Ok(()) => { + // TODO: Drop this workaround after BDK 1.0 upgrade. + // Update balance cache after syncing. + if let Ok(balance) = wallet_lock.get_balance() { + *self.balance_cache.write().unwrap() = balance; + } + Ok(()) }, - _ => { - log_error!(self.logger, "Sync failed due to Esplora error: {}", e); - Err(From::from(e)) + Err(e) => match e { + bdk::Error::Esplora(ref be) => match **be { + bdk::blockchain::esplora::EsploraError::Reqwest(_) => { + log_error!( + self.logger, + "Sync failed due to HTTP connection error: {}", + e + ); + Err(From::from(e)) + }, + _ => { + log_error!(self.logger, "Sync failed due to Esplora error: {}", e); + Err(From::from(e)) + }, + }, + _ => { + log_error!(self.logger, "Wallet sync error: {}", e); + Err(From::from(e)) + }, }, }, - _ => { - log_error!(self.logger, "Wallet sync error: {}", e); - Err(From::from(e)) + Err(e) => { + log_error!(self.logger, "On-chain wallet sync timed out: {}", e); + Err(Error::WalletOperationTimeout) }, - }, + } }; - drop(guard); - cvar.notify_all(); + self.propagate_result_to_subscribers(res); + res } @@ -162,8 +197,40 @@ where Ok(address_info.address) } - pub(crate) fn get_balance(&self) -> Result { - Ok(self.inner.lock().unwrap().get_balance()?) + fn get_new_internal_address(&self) -> Result { + let address_info = + self.inner.lock().unwrap().get_internal_address(AddressIndex::LastUnused)?; + Ok(address_info.address) + } + + pub(crate) fn get_balances( + &self, total_anchor_channels_reserve_sats: u64, + ) -> Result<(u64, u64), Error> { + // TODO: Drop this workaround after BDK 1.0 upgrade. + // We get the balance and update our cache if we can do so without blocking on the wallet + // Mutex. Otherwise, we return a cached value. + let balance = match self.inner.try_lock() { + Ok(wallet_lock) => { + // Update balance cache if we can. + let balance = wallet_lock.get_balance()?; + *self.balance_cache.write().unwrap() = balance.clone(); + balance + }, + Err(_) => self.balance_cache.read().unwrap().clone(), + }; + + let (total, spendable) = ( + balance.get_total(), + balance.get_spendable().saturating_sub(total_anchor_channels_reserve_sats), + ); + + Ok((total, spendable)) + } + + pub(crate) fn get_spendable_amount_sats( + &self, total_anchor_channels_reserve_sats: u64, + ) -> Result { + self.get_balances(total_anchor_channels_reserve_sats).map(|(_, s)| s) } /// Send funds to the given address. @@ -173,7 +240,7 @@ where pub(crate) fn send_to_address( &self, address: &bitcoin::Address, amount_msat_or_drain: Option, ) -> Result { - let confirmation_target = ConfirmationTarget::NonAnchorChannelFee; + let confirmation_target = ConfirmationTarget::OutputSpendingFee; let fee_rate = FeeRate::from_sat_per_kwu( self.fee_estimator.get_est_sat_per_1000_weight(confirmation_target) as f32, ); @@ -243,6 +310,177 @@ where Ok(txid) } + + fn register_or_subscribe_pending_sync( + &self, + ) -> Option>> { + let mut sync_status_lock = self.sync_status.lock().unwrap(); + match sync_status_lock.deref_mut() { + WalletSyncStatus::Completed => { + // We're first to register for a sync. + let (tx, _) = tokio::sync::broadcast::channel(1); + *sync_status_lock = WalletSyncStatus::InProgress { subscribers: tx }; + None + }, + WalletSyncStatus::InProgress { subscribers } => { + // A sync is in-progress, we subscribe. + let rx = subscribers.subscribe(); + Some(rx) + }, + } + } + + fn propagate_result_to_subscribers(&self, res: Result<(), Error>) { + // Send the notification to any other tasks that might be waiting on it by now. + { + let mut sync_status_lock = self.sync_status.lock().unwrap(); + match sync_status_lock.deref_mut() { + WalletSyncStatus::Completed => { + // No sync in-progress, do nothing. + return; + }, + WalletSyncStatus::InProgress { subscribers } => { + // A sync is in-progress, we notify subscribers. + if subscribers.receiver_count() > 0 { + match subscribers.send(res) { + Ok(_) => (), + Err(e) => { + debug_assert!( + false, + "Failed to send wallet sync result to subscribers: {:?}", + e + ); + log_error!( + self.logger, + "Failed to send wallet sync result to subscribers: {:?}", + e + ); + }, + } + } + *sync_status_lock = WalletSyncStatus::Completed; + }, + } + } + } +} + +impl WalletSource for Wallet +where + D: BatchDatabase, + B::Target: BroadcasterInterface, + E::Target: FeeEstimator, + L::Target: Logger, +{ + fn list_confirmed_utxos(&self) -> Result, ()> { + let locked_wallet = self.inner.lock().unwrap(); + let mut utxos = Vec::new(); + let confirmed_txs: Vec = locked_wallet + .list_transactions(false) + .map_err(|e| { + log_error!(self.logger, "Failed to retrieve transactions from wallet: {}", e); + })? + .into_iter() + .filter(|t| t.confirmation_time.is_some()) + .collect(); + let unspent_confirmed_utxos = locked_wallet + .list_unspent() + .map_err(|e| { + log_error!( + self.logger, + "Failed to retrieve unspent transactions from wallet: {}", + e + ); + })? + .into_iter() + .filter(|u| confirmed_txs.iter().find(|t| t.txid == u.outpoint.txid).is_some()); + + for u in unspent_confirmed_utxos { + let payload = Payload::from_script(&u.txout.script_pubkey).map_err(|e| { + log_error!(self.logger, "Failed to retrieve script payload: {}", e); + })?; + + match payload { + Payload::WitnessProgram(program) => match program.version() { + WitnessVersion::V0 if program.program().len() == 20 => { + let wpkh = + WPubkeyHash::from_slice(program.program().as_bytes()).map_err(|e| { + log_error!(self.logger, "Failed to retrieve script payload: {}", e); + })?; + let utxo = Utxo::new_v0_p2wpkh(u.outpoint, u.txout.value, &wpkh); + utxos.push(utxo); + }, + WitnessVersion::V1 => { + XOnlyPublicKey::from_slice(program.program().as_bytes()).map_err(|e| { + log_error!(self.logger, "Failed to retrieve script payload: {}", e); + })?; + + let utxo = Utxo { + outpoint: u.outpoint, + output: TxOut { + value: u.txout.value, + script_pubkey: ScriptBuf::new_witness_program(&program), + }, + satisfaction_weight: 1 /* empty script_sig */ * WITNESS_SCALE_FACTOR as u64 + + 1 /* witness items */ + 1 /* schnorr sig len */ + 64, /* schnorr sig */ + }; + utxos.push(utxo); + }, + _ => { + log_error!( + self.logger, + "Unexpected witness version or length. Version: {}, Length: {}", + program.version(), + program.program().len() + ); + }, + }, + _ => { + log_error!( + self.logger, + "Tried to use a non-witness script. This must never happen." + ); + panic!("Tried to use a non-witness script. This must never happen."); + }, + } + } + + Ok(utxos) + } + + fn get_change_script(&self) -> Result { + let locked_wallet = self.inner.lock().unwrap(); + let address_info = + locked_wallet.get_internal_address(AddressIndex::LastUnused).map_err(|e| { + log_error!(self.logger, "Failed to retrieve new address from wallet: {}", e); + })?; + + Ok(address_info.address.script_pubkey()) + } + + fn sign_psbt(&self, mut psbt: PartiallySignedTransaction) -> Result { + let locked_wallet = self.inner.lock().unwrap(); + + // While BDK populates both `witness_utxo` and `non_witness_utxo` fields, LDK does not. As + // BDK by default doesn't trust the witness UTXO to account for the Segwit bug, we must + // disable it here as otherwise we fail to sign. + let mut sign_options = SignOptions::default(); + sign_options.trust_witness_utxo = true; + + match locked_wallet.sign(&mut psbt, sign_options) { + Ok(_finalized) => { + // BDK will fail to finalize for all LDK-provided inputs of the PSBT. Unfortunately + // we can't check more fine grained if it succeeded for all the other inputs here, + // so we just ignore the returned `finalized` bool. + }, + Err(err) => { + log_error!(self.logger, "Failed to sign transaction: {}", err); + return Err(()); + }, + } + + Ok(psbt.extract_tx()) + } } /// Similar to [`KeysManager`], but overrides the destination and shutdown scripts so they are @@ -278,22 +516,6 @@ where Self { inner, wallet, logger } } - /// See [`KeysManager::spend_spendable_outputs`] for documentation on this method. - pub fn spend_spendable_outputs( - &self, descriptors: &[&SpendableOutputDescriptor], outputs: Vec, - change_destination_script: ScriptBuf, feerate_sat_per_1000_weight: u32, - locktime: Option, secp_ctx: &Secp256k1, - ) -> Result { - self.inner.spend_spendable_outputs( - descriptors, - outputs, - change_destination_script, - feerate_sat_per_1000_weight, - locktime, - secp_ctx, - ) - } - pub fn sign_message(&self, msg: &[u8]) -> Result { message_signing::sign(msg, &self.inner.get_node_secret_key()) .or(Err(Error::MessageSigningFailed)) @@ -352,6 +574,30 @@ where } } +impl OutputSpender for WalletKeysManager +where + D: BatchDatabase, + B::Target: BroadcasterInterface, + E::Target: FeeEstimator, + L::Target: Logger, +{ + /// See [`KeysManager::spend_spendable_outputs`] for documentation on this method. + fn spend_spendable_outputs( + &self, descriptors: &[&SpendableOutputDescriptor], outputs: Vec, + change_destination_script: ScriptBuf, feerate_sat_per_1000_weight: u32, + locktime: Option, secp_ctx: &Secp256k1, + ) -> Result { + self.inner.spend_spendable_outputs( + descriptors, + outputs, + change_destination_script, + feerate_sat_per_1000_weight, + locktime, + secp_ctx, + ) + } +} + impl EntropySource for WalletKeysManager where D: BatchDatabase, @@ -402,11 +648,10 @@ where })?; match address.payload { - bitcoin::address::Payload::WitnessProgram(program) => { - ShutdownScript::new_witness_program(&program).map_err(|e| { + Payload::WitnessProgram(program) => ShutdownScript::new_witness_program(&program) + .map_err(|e| { log_error!(self.logger, "Invalid shutdown script: {:?}", e); - }) - }, + }), _ => { log_error!( self.logger, @@ -417,3 +662,18 @@ where } } } + +impl ChangeDestinationSource for WalletKeysManager +where + D: BatchDatabase, + B::Target: BroadcasterInterface, + E::Target: FeeEstimator, + L::Target: Logger, +{ + fn get_change_destination_script(&self) -> Result { + let address = self.wallet.get_new_internal_address().map_err(|e| { + log_error!(self.logger, "Failed to retrieve new address from wallet: {}", e); + })?; + Ok(address.script_pubkey()) + } +} diff --git a/tests/common.rs b/tests/common/mod.rs similarity index 52% rename from tests/common.rs rename to tests/common/mod.rs index 3696f9b71..a85aa3032 100644 --- a/tests/common.rs +++ b/tests/common/mod.rs @@ -2,15 +2,20 @@ #![allow(dead_code)] use ldk_node::io::sqlite_store::SqliteStore; +use ldk_node::payment::{PaymentDirection, PaymentKind, PaymentStatus}; use ldk_node::{ - Builder, Config, Event, LogLevel, Node, NodeError, PaymentDirection, PaymentStatus, + Builder, Config, Event, LightningBalance, LogLevel, Node, NodeError, PendingSweepBalance, + TlvEntry, }; use lightning::ln::msgs::SocketAddress; +use lightning::ln::{PaymentHash, PaymentPreimage}; use lightning::util::persist::KVStore; use lightning::util::test_utils::TestStore; use lightning_persister::fs_store::FilesystemStore; +use bitcoin::hashes::sha256::Hash as Sha256; +use bitcoin::hashes::Hash; use bitcoin::{Address, Amount, Network, OutPoint, Txid}; use bitcoincore_rpc::bitcoincore_rpc_json::AddressType; @@ -80,6 +85,69 @@ macro_rules! expect_channel_ready_event { pub(crate) use expect_channel_ready_event; +macro_rules! expect_payment_received_event { + ($node: expr, $amount_msat: expr) => {{ + match $node.wait_next_event() { + ref e @ Event::PaymentReceived { payment_id, amount_msat, .. } => { + println!("{} got event {:?}", $node.node_id(), e); + assert_eq!(amount_msat, $amount_msat); + $node.event_handled(); + payment_id + }, + ref e => { + panic!("{} got unexpected event!: {:?}", std::stringify!(node_b), e); + }, + } + }}; +} + +pub(crate) use expect_payment_received_event; + +macro_rules! expect_payment_claimable_event { + ($node: expr, $payment_id: expr, $payment_hash: expr, $claimable_amount_msat: expr) => {{ + match $node.wait_next_event() { + ref e @ Event::PaymentClaimable { + payment_id, + payment_hash, + claimable_amount_msat, + .. + } => { + println!("{} got event {:?}", std::stringify!($node), e); + assert_eq!(payment_hash, $payment_hash); + assert_eq!(payment_id, $payment_id); + assert_eq!(claimable_amount_msat, $claimable_amount_msat); + $node.event_handled(); + claimable_amount_msat + }, + ref e => { + panic!("{} got unexpected event!: {:?}", std::stringify!($node), e); + }, + } + }}; +} + +pub(crate) use expect_payment_claimable_event; + +macro_rules! expect_payment_successful_event { + ($node: expr, $payment_id: expr, $fee_paid_msat: expr) => {{ + match $node.wait_next_event() { + ref e @ Event::PaymentSuccessful { payment_id, fee_paid_msat, .. } => { + println!("{} got event {:?}", $node.node_id(), e); + if let Some(fee_msat) = $fee_paid_msat { + assert_eq!(fee_paid_msat, fee_msat); + } + assert_eq!(payment_id, $payment_id); + $node.event_handled(); + }, + ref e => { + panic!("{} got unexpected event!: {:?}", std::stringify!(node_b), e); + }, + } + }}; +} + +pub(crate) use expect_payment_successful_event; + pub(crate) fn setup_bitcoind_and_electrsd() -> (BitcoinD, ElectrsD) { let bitcoind_exe = env::var("BITCOIND_EXE").ok().or_else(|| bitcoind::downloaded_exe_path().ok()).expect( @@ -126,10 +194,16 @@ pub(crate) fn random_listening_addresses() -> Vec { listening_addresses } -pub(crate) fn random_config() -> Config { +pub(crate) fn random_config(anchor_channels: bool) -> Config { let mut config = Config::default(); + if !anchor_channels { + config.anchor_channels_config = None; + } + config.network = Network::Regtest; + config.onchain_wallet_sync_interval_secs = 100000; + config.wallet_sync_interval_secs = 100000; println!("Setting network: {}", config.network); let rand_dir = random_storage_path(); @@ -146,9 +220,9 @@ pub(crate) fn random_config() -> Config { } #[cfg(feature = "uniffi")] -type TestNode = Arc>; +type TestNode = Arc; #[cfg(not(feature = "uniffi"))] -type TestNode = Node; +type TestNode = Node; macro_rules! setup_builder { ($builder: ident, $config: expr) => { @@ -162,28 +236,38 @@ macro_rules! setup_builder { pub(crate) use setup_builder; pub(crate) fn setup_two_nodes( - electrsd: &ElectrsD, allow_0conf: bool, -) -> (TestNode, TestNode) { + electrsd: &ElectrsD, allow_0conf: bool, anchor_channels: bool, anchors_trusted_no_reserve: bool, +) -> (TestNode, TestNode) { println!("== Node A =="); - let config_a = random_config(); + let config_a = random_config(anchor_channels); let node_a = setup_node(electrsd, config_a); println!("\n== Node B =="); - let mut config_b = random_config(); + let mut config_b = random_config(anchor_channels); if allow_0conf { config_b.trusted_peers_0conf.push(node_a.node_id()); } + if anchor_channels && anchors_trusted_no_reserve { + config_b + .anchor_channels_config + .as_mut() + .unwrap() + .trusted_peers_no_reserve + .push(node_a.node_id()); + } let node_b = setup_node(electrsd, config_b); (node_a, node_b) } -pub(crate) fn setup_node(electrsd: &ElectrsD, config: Config) -> TestNode { +pub(crate) fn setup_node(electrsd: &ElectrsD, config: Config) -> TestNode { let esplora_url = format!("http://{}", electrsd.esplora_url.as_ref().unwrap()); setup_builder!(builder, config); builder.set_esplora_server(esplora_url.clone()); let test_sync_store = Arc::new(TestSyncStore::new(config.storage_dir_path.into())); let node = builder.build_with_store(test_sync_store).unwrap(); node.start().unwrap(); + assert!(node.status().is_running); + assert!(node.status().latest_fee_rate_cache_update_timestamp.is_some()); node } @@ -294,8 +378,8 @@ pub(crate) fn premine_and_distribute_funds( generate_blocks_and_wait(bitcoind, electrs, 1); } -pub fn open_channel( - node_a: &TestNode, node_b: &TestNode, funding_amount_sat: u64, announce: bool, +pub fn open_channel( + node_a: &TestNode, node_b: &TestNode, funding_amount_sat: u64, announce: bool, electrsd: &ElectrsD, ) { node_a @@ -316,14 +400,14 @@ pub fn open_channel( wait_for_tx(&electrsd.client, funding_txo_a.txid); } -pub(crate) fn do_channel_full_cycle( - node_a: TestNode, node_b: TestNode, bitcoind: &BitcoindClient, electrsd: &E, - allow_0conf: bool, +pub(crate) fn do_channel_full_cycle( + node_a: TestNode, node_b: TestNode, bitcoind: &BitcoindClient, electrsd: &E, allow_0conf: bool, + expect_anchor_channel: bool, force_close: bool, ) { - let addr_a = node_a.new_onchain_address().unwrap(); - let addr_b = node_b.new_onchain_address().unwrap(); + let addr_a = node_a.onchain_payment().new_address().unwrap(); + let addr_b = node_b.onchain_payment().new_address().unwrap(); - let premine_amount_sat = 100_000; + let premine_amount_sat = if expect_anchor_channel { 2_125_000 } else { 2_100_000 }; premine_and_distribute_funds( &bitcoind, @@ -341,7 +425,7 @@ pub(crate) fn do_channel_full_cycle( assert_eq!(node_b.next_event(), None); println!("\nA -- connect_open_channel -> B"); - let funding_amount_sat = 80_000; + let funding_amount_sat = 2_080_000; let push_msat = (funding_amount_sat / 2) * 1000; // balance the channel node_a .connect_open_channel( @@ -370,25 +454,50 @@ pub(crate) fn do_channel_full_cycle( node_b.sync_wallets().unwrap(); let onchain_fee_buffer_sat = 1500; - let node_a_upper_bound_sat = premine_amount_sat - funding_amount_sat; - let node_a_lower_bound_sat = premine_amount_sat - funding_amount_sat - onchain_fee_buffer_sat; + let node_a_anchor_reserve_sat = if expect_anchor_channel { 25_000 } else { 0 }; + let node_a_upper_bound_sat = + premine_amount_sat - node_a_anchor_reserve_sat - funding_amount_sat; + let node_a_lower_bound_sat = premine_amount_sat + - node_a_anchor_reserve_sat + - funding_amount_sat + - onchain_fee_buffer_sat; assert!(node_a.list_balances().spendable_onchain_balance_sats < node_a_upper_bound_sat); assert!(node_a.list_balances().spendable_onchain_balance_sats > node_a_lower_bound_sat); - assert_eq!(node_b.list_balances().spendable_onchain_balance_sats, premine_amount_sat); + assert_eq!( + node_a.list_balances().total_anchor_channels_reserve_sats, + node_a_anchor_reserve_sat + ); - expect_channel_ready_event!(node_a, node_b.node_id()); + let node_b_anchor_reserve_sat = if node_b + .config() + .anchor_channels_config + .map_or(true, |acc| acc.trusted_peers_no_reserve.contains(&node_a.node_id())) + { + 0 + } else { + 25_000 + }; + assert_eq!( + node_b.list_balances().spendable_onchain_balance_sats, + premine_amount_sat - node_b_anchor_reserve_sat + ); + assert_eq!( + node_b.list_balances().total_anchor_channels_reserve_sats, + node_b_anchor_reserve_sat + ); - let user_channel_id = expect_channel_ready_event!(node_b, node_a.node_id()); + let user_channel_id = expect_channel_ready_event!(node_a, node_b.node_id()); + expect_channel_ready_event!(node_b, node_a.node_id()); - println!("\nB receive_payment"); + println!("\nB receive"); let invoice_amount_1_msat = 2500_000; - let invoice = node_b.receive_payment(invoice_amount_1_msat, &"asdf", 9217).unwrap(); + let invoice = node_b.bolt11_payment().receive(invoice_amount_1_msat, &"asdf", 9217).unwrap(); - println!("\nA send_payment"); - let payment_hash = node_a.send_payment(&invoice).unwrap(); - assert_eq!(node_a.send_payment(&invoice), Err(NodeError::DuplicatePayment)); + println!("\nA send"); + let payment_id = node_a.bolt11_payment().send(&invoice).unwrap(); + assert_eq!(node_a.bolt11_payment().send(&invoice), Err(NodeError::DuplicatePayment)); - assert_eq!(node_a.list_payments().first().unwrap().hash, payment_hash); + assert_eq!(node_a.list_payments().first().unwrap().id, payment_id); let outbound_payments_a = node_a.list_payments_with_filter(|p| p.direction == PaymentDirection::Outbound); @@ -408,38 +517,41 @@ pub(crate) fn do_channel_full_cycle( expect_event!(node_a, PaymentSuccessful); expect_event!(node_b, PaymentReceived); - assert_eq!(node_a.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_a.payment(&payment_hash).unwrap().direction, PaymentDirection::Outbound); - assert_eq!(node_a.payment(&payment_hash).unwrap().amount_msat, Some(invoice_amount_1_msat)); - assert_eq!(node_b.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_b.payment(&payment_hash).unwrap().direction, PaymentDirection::Inbound); - assert_eq!(node_b.payment(&payment_hash).unwrap().amount_msat, Some(invoice_amount_1_msat)); + assert_eq!(node_a.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_a.payment(&payment_id).unwrap().direction, PaymentDirection::Outbound); + assert_eq!(node_a.payment(&payment_id).unwrap().amount_msat, Some(invoice_amount_1_msat)); + assert!(matches!(node_a.payment(&payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); + assert_eq!(node_b.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_b.payment(&payment_id).unwrap().direction, PaymentDirection::Inbound); + assert_eq!(node_b.payment(&payment_id).unwrap().amount_msat, Some(invoice_amount_1_msat)); + assert!(matches!(node_b.payment(&payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); // Assert we fail duplicate outbound payments and check the status hasn't changed. - assert_eq!(Err(NodeError::DuplicatePayment), node_a.send_payment(&invoice)); - assert_eq!(node_a.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_a.payment(&payment_hash).unwrap().direction, PaymentDirection::Outbound); - assert_eq!(node_a.payment(&payment_hash).unwrap().amount_msat, Some(invoice_amount_1_msat)); - assert_eq!(node_b.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_b.payment(&payment_hash).unwrap().direction, PaymentDirection::Inbound); - assert_eq!(node_b.payment(&payment_hash).unwrap().amount_msat, Some(invoice_amount_1_msat)); + assert_eq!(Err(NodeError::DuplicatePayment), node_a.bolt11_payment().send(&invoice)); + assert_eq!(node_a.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_a.payment(&payment_id).unwrap().direction, PaymentDirection::Outbound); + assert_eq!(node_a.payment(&payment_id).unwrap().amount_msat, Some(invoice_amount_1_msat)); + assert_eq!(node_b.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_b.payment(&payment_id).unwrap().direction, PaymentDirection::Inbound); + assert_eq!(node_b.payment(&payment_id).unwrap().amount_msat, Some(invoice_amount_1_msat)); // Test under-/overpayment let invoice_amount_2_msat = 2500_000; - let invoice = node_b.receive_payment(invoice_amount_2_msat, &"asdf", 9217).unwrap(); + let invoice = node_b.bolt11_payment().receive(invoice_amount_2_msat, &"asdf", 9217).unwrap(); let underpaid_amount = invoice_amount_2_msat - 1; assert_eq!( Err(NodeError::InvalidAmount), - node_a.send_payment_using_amount(&invoice, underpaid_amount) + node_a.bolt11_payment().send_using_amount(&invoice, underpaid_amount) ); - println!("\nB overpaid receive_payment"); - let invoice = node_b.receive_payment(invoice_amount_2_msat, &"asdf", 9217).unwrap(); + println!("\nB overpaid receive"); + let invoice = node_b.bolt11_payment().receive(invoice_amount_2_msat, &"asdf", 9217).unwrap(); let overpaid_amount_msat = invoice_amount_2_msat + 100; - println!("\nA overpaid send_payment"); - let payment_hash = node_a.send_payment_using_amount(&invoice, overpaid_amount_msat).unwrap(); + println!("\nA overpaid send"); + let payment_id = + node_a.bolt11_payment().send_using_amount(&invoice, overpaid_amount_msat).unwrap(); expect_event!(node_a, PaymentSuccessful); let received_amount = match node_b.wait_next_event() { ref e @ Event::PaymentReceived { amount_msat, .. } => { @@ -452,21 +564,29 @@ pub(crate) fn do_channel_full_cycle( }, }; assert_eq!(received_amount, overpaid_amount_msat); - assert_eq!(node_a.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_a.payment(&payment_hash).unwrap().direction, PaymentDirection::Outbound); - assert_eq!(node_a.payment(&payment_hash).unwrap().amount_msat, Some(overpaid_amount_msat)); - assert_eq!(node_b.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_b.payment(&payment_hash).unwrap().direction, PaymentDirection::Inbound); - assert_eq!(node_b.payment(&payment_hash).unwrap().amount_msat, Some(overpaid_amount_msat)); + assert_eq!(node_a.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_a.payment(&payment_id).unwrap().direction, PaymentDirection::Outbound); + assert_eq!(node_a.payment(&payment_id).unwrap().amount_msat, Some(overpaid_amount_msat)); + assert!(matches!(node_a.payment(&payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); + assert_eq!(node_b.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_b.payment(&payment_id).unwrap().direction, PaymentDirection::Inbound); + assert_eq!(node_b.payment(&payment_id).unwrap().amount_msat, Some(overpaid_amount_msat)); + assert!(matches!(node_b.payment(&payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); // Test "zero-amount" invoice payment println!("\nB receive_variable_amount_payment"); - let variable_amount_invoice = node_b.receive_variable_amount_payment(&"asdf", 9217).unwrap(); + let variable_amount_invoice = + node_b.bolt11_payment().receive_variable_amount(&"asdf", 9217).unwrap(); let determined_amount_msat = 2345_678; - assert_eq!(Err(NodeError::InvalidInvoice), node_a.send_payment(&variable_amount_invoice)); - println!("\nA send_payment_using_amount"); - let payment_hash = - node_a.send_payment_using_amount(&variable_amount_invoice, determined_amount_msat).unwrap(); + assert_eq!( + Err(NodeError::InvalidInvoice), + node_a.bolt11_payment().send(&variable_amount_invoice) + ); + println!("\nA send_using_amount"); + let payment_id = node_a + .bolt11_payment() + .send_using_amount(&variable_amount_invoice, determined_amount_msat) + .unwrap(); expect_event!(node_a, PaymentSuccessful); let received_amount = match node_b.wait_next_event() { @@ -480,18 +600,107 @@ pub(crate) fn do_channel_full_cycle( }, }; assert_eq!(received_amount, determined_amount_msat); - assert_eq!(node_a.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_a.payment(&payment_hash).unwrap().direction, PaymentDirection::Outbound); - assert_eq!(node_a.payment(&payment_hash).unwrap().amount_msat, Some(determined_amount_msat)); - assert_eq!(node_b.payment(&payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_b.payment(&payment_hash).unwrap().direction, PaymentDirection::Inbound); - assert_eq!(node_b.payment(&payment_hash).unwrap().amount_msat, Some(determined_amount_msat)); + assert_eq!(node_a.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_a.payment(&payment_id).unwrap().direction, PaymentDirection::Outbound); + assert_eq!(node_a.payment(&payment_id).unwrap().amount_msat, Some(determined_amount_msat)); + assert!(matches!(node_a.payment(&payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); + assert_eq!(node_b.payment(&payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_b.payment(&payment_id).unwrap().direction, PaymentDirection::Inbound); + assert_eq!(node_b.payment(&payment_id).unwrap().amount_msat, Some(determined_amount_msat)); + assert!(matches!(node_b.payment(&payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); + + // Test claiming manually registered payments. + let invoice_amount_3_msat = 5_532_000; + let manual_preimage = PaymentPreimage([42u8; 32]); + let manual_payment_hash = PaymentHash(Sha256::hash(&manual_preimage.0).to_byte_array()); + let manual_invoice = node_b + .bolt11_payment() + .receive_for_hash(invoice_amount_3_msat, &"asdf", 9217, manual_payment_hash) + .unwrap(); + let manual_payment_id = node_a.bolt11_payment().send(&manual_invoice).unwrap(); + + let claimable_amount_msat = expect_payment_claimable_event!( + node_b, + manual_payment_id, + manual_payment_hash, + invoice_amount_3_msat + ); + node_b + .bolt11_payment() + .claim_for_hash(manual_payment_hash, claimable_amount_msat, manual_preimage) + .unwrap(); + expect_payment_received_event!(node_b, claimable_amount_msat); + expect_payment_successful_event!(node_a, Some(manual_payment_id), None); + assert_eq!(node_a.payment(&manual_payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_a.payment(&manual_payment_id).unwrap().direction, PaymentDirection::Outbound); + assert_eq!( + node_a.payment(&manual_payment_id).unwrap().amount_msat, + Some(invoice_amount_3_msat) + ); + assert!(matches!(node_a.payment(&manual_payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); + assert_eq!(node_b.payment(&manual_payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_b.payment(&manual_payment_id).unwrap().direction, PaymentDirection::Inbound); + assert_eq!( + node_b.payment(&manual_payment_id).unwrap().amount_msat, + Some(invoice_amount_3_msat) + ); + assert!(matches!(node_b.payment(&manual_payment_id).unwrap().kind, PaymentKind::Bolt11 { .. })); + + // Test failing manually registered payments. + let invoice_amount_4_msat = 5_532_000; + let manual_fail_preimage = PaymentPreimage([43u8; 32]); + let manual_fail_payment_hash = + PaymentHash(Sha256::hash(&manual_fail_preimage.0).to_byte_array()); + let manual_fail_invoice = node_b + .bolt11_payment() + .receive_for_hash(invoice_amount_3_msat, &"asdf", 9217, manual_fail_payment_hash) + .unwrap(); + let manual_fail_payment_id = node_a.bolt11_payment().send(&manual_fail_invoice).unwrap(); + + expect_payment_claimable_event!( + node_b, + manual_fail_payment_id, + manual_fail_payment_hash, + invoice_amount_4_msat + ); + node_b.bolt11_payment().fail_for_hash(manual_fail_payment_hash).unwrap(); + expect_event!(node_a, PaymentFailed); + assert_eq!(node_a.payment(&manual_fail_payment_id).unwrap().status, PaymentStatus::Failed); + assert_eq!( + node_a.payment(&manual_fail_payment_id).unwrap().direction, + PaymentDirection::Outbound + ); + assert_eq!( + node_a.payment(&manual_fail_payment_id).unwrap().amount_msat, + Some(invoice_amount_4_msat) + ); + assert!(matches!( + node_a.payment(&manual_fail_payment_id).unwrap().kind, + PaymentKind::Bolt11 { .. } + )); + assert_eq!(node_b.payment(&manual_fail_payment_id).unwrap().status, PaymentStatus::Failed); + assert_eq!( + node_b.payment(&manual_fail_payment_id).unwrap().direction, + PaymentDirection::Inbound + ); + assert_eq!( + node_b.payment(&manual_fail_payment_id).unwrap().amount_msat, + Some(invoice_amount_4_msat) + ); + assert!(matches!( + node_b.payment(&manual_fail_payment_id).unwrap().kind, + PaymentKind::Bolt11 { .. } + )); // Test spontaneous/keysend payments println!("\nA send_spontaneous_payment"); let keysend_amount_msat = 2500_000; - let keysend_payment_hash = - node_a.send_spontaneous_payment(keysend_amount_msat, node_b.node_id()).unwrap(); + let tlv1 = TlvEntry { r#type: 131073, value: vec![0x00, 0x11, 0x22, 0x33] }; + let tlv2 = TlvEntry { r#type: 131075, value: vec![0xaa, 0xbb] }; + let keysend_payment_id = node_a + .spontaneous_payment() + .send(keysend_amount_msat, node_b.node_id(), vec![tlv1, tlv2]) + .unwrap(); expect_event!(node_a, PaymentSuccessful); let received_keysend_amount = match node_b.wait_next_event() { ref e @ Event::PaymentReceived { amount_msat, .. } => { @@ -504,24 +713,31 @@ pub(crate) fn do_channel_full_cycle( }, }; assert_eq!(received_keysend_amount, keysend_amount_msat); - assert_eq!(node_a.payment(&keysend_payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!( - node_a.payment(&keysend_payment_hash).unwrap().direction, - PaymentDirection::Outbound - ); - assert_eq!( - node_a.payment(&keysend_payment_hash).unwrap().amount_msat, - Some(keysend_amount_msat) - ); - assert_eq!(node_b.payment(&keysend_payment_hash).unwrap().status, PaymentStatus::Succeeded); - assert_eq!(node_b.payment(&keysend_payment_hash).unwrap().direction, PaymentDirection::Inbound); - assert_eq!( - node_b.payment(&keysend_payment_hash).unwrap().amount_msat, - Some(keysend_amount_msat) - ); + assert_eq!(node_a.payment(&keysend_payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_a.payment(&keysend_payment_id).unwrap().direction, PaymentDirection::Outbound); + assert_eq!(node_a.payment(&keysend_payment_id).unwrap().amount_msat, Some(keysend_amount_msat)); + assert!(matches!( + node_a.payment(&keysend_payment_id).unwrap().kind, + PaymentKind::Spontaneous { .. } + )); + assert_eq!(node_b.payment(&keysend_payment_id).unwrap().status, PaymentStatus::Succeeded); + assert_eq!(node_b.payment(&keysend_payment_id).unwrap().direction, PaymentDirection::Inbound); + assert_eq!(node_b.payment(&keysend_payment_id).unwrap().amount_msat, Some(keysend_amount_msat)); + assert!(matches!( + node_b.payment(&keysend_payment_id).unwrap().kind, + PaymentKind::Spontaneous { .. } + )); + assert_eq!(node_a.list_payments().len(), 6); + assert_eq!(node_b.list_payments().len(), 7); + + println!("\nB close_channel (force: {})", force_close); + if force_close { + std::thread::sleep(Duration::from_secs(1)); + node_a.force_close_channel(&user_channel_id, node_b.node_id()).unwrap(); + } else { + node_a.close_channel(&user_channel_id, node_b.node_id()).unwrap(); + } - println!("\nB close_channel"); - node_b.close_channel(&user_channel_id, node_a.node_id()).unwrap(); expect_event!(node_a, ChannelClosed); expect_event!(node_b, ChannelClosed); @@ -531,9 +747,91 @@ pub(crate) fn do_channel_full_cycle( node_a.sync_wallets().unwrap(); node_b.sync_wallets().unwrap(); + if force_close { + // Check node_b properly sees all balances and sweeps them. + assert_eq!(node_b.list_balances().lightning_balances.len(), 1); + match node_b.list_balances().lightning_balances[0] { + LightningBalance::ClaimableAwaitingConfirmations { + counterparty_node_id, + confirmation_height, + .. + } => { + assert_eq!(counterparty_node_id, node_a.node_id()); + let cur_height = node_b.status().current_best_block.height; + let blocks_to_go = confirmation_height - cur_height; + generate_blocks_and_wait(&bitcoind, electrsd, blocks_to_go as usize); + node_b.sync_wallets().unwrap(); + node_a.sync_wallets().unwrap(); + }, + _ => panic!("Unexpected balance state!"), + } + + assert!(node_b.list_balances().lightning_balances.is_empty()); + assert_eq!(node_b.list_balances().pending_balances_from_channel_closures.len(), 1); + match node_b.list_balances().pending_balances_from_channel_closures[0] { + PendingSweepBalance::BroadcastAwaitingConfirmation { .. } => {}, + _ => panic!("Unexpected balance state!"), + } + generate_blocks_and_wait(&bitcoind, electrsd, 1); + node_b.sync_wallets().unwrap(); + node_a.sync_wallets().unwrap(); + + assert!(node_b.list_balances().lightning_balances.is_empty()); + assert_eq!(node_b.list_balances().pending_balances_from_channel_closures.len(), 1); + match node_b.list_balances().pending_balances_from_channel_closures[0] { + PendingSweepBalance::AwaitingThresholdConfirmations { .. } => {}, + _ => panic!("Unexpected balance state!"), + } + generate_blocks_and_wait(&bitcoind, electrsd, 5); + node_b.sync_wallets().unwrap(); + node_a.sync_wallets().unwrap(); + + assert!(node_b.list_balances().lightning_balances.is_empty()); + assert!(node_b.list_balances().pending_balances_from_channel_closures.is_empty()); + + // Check node_a properly sees all balances and sweeps them. + assert_eq!(node_a.list_balances().lightning_balances.len(), 1); + match node_a.list_balances().lightning_balances[0] { + LightningBalance::ClaimableAwaitingConfirmations { + counterparty_node_id, + confirmation_height, + .. + } => { + assert_eq!(counterparty_node_id, node_b.node_id()); + let cur_height = node_a.status().current_best_block.height; + let blocks_to_go = confirmation_height - cur_height; + generate_blocks_and_wait(&bitcoind, electrsd, blocks_to_go as usize); + node_a.sync_wallets().unwrap(); + node_b.sync_wallets().unwrap(); + }, + _ => panic!("Unexpected balance state!"), + } + + assert!(node_a.list_balances().lightning_balances.is_empty()); + assert_eq!(node_a.list_balances().pending_balances_from_channel_closures.len(), 1); + match node_a.list_balances().pending_balances_from_channel_closures[0] { + PendingSweepBalance::BroadcastAwaitingConfirmation { .. } => {}, + _ => panic!("Unexpected balance state!"), + } + generate_blocks_and_wait(&bitcoind, electrsd, 1); + node_a.sync_wallets().unwrap(); + node_b.sync_wallets().unwrap(); + + assert!(node_a.list_balances().lightning_balances.is_empty()); + assert_eq!(node_a.list_balances().pending_balances_from_channel_closures.len(), 1); + match node_a.list_balances().pending_balances_from_channel_closures[0] { + PendingSweepBalance::AwaitingThresholdConfirmations { .. } => {}, + _ => panic!("Unexpected balance state!"), + } + generate_blocks_and_wait(&bitcoind, electrsd, 5); + node_a.sync_wallets().unwrap(); + node_b.sync_wallets().unwrap(); + } + let sum_of_all_payments_sat = (push_msat + invoice_amount_1_msat + overpaid_amount_msat + + invoice_amount_3_msat + determined_amount_msat + keysend_amount_msat) / 1000; @@ -542,11 +840,14 @@ pub(crate) fn do_channel_full_cycle( let node_a_lower_bound_sat = node_a_upper_bound_sat - onchain_fee_buffer_sat; assert!(node_a.list_balances().spendable_onchain_balance_sats > node_a_lower_bound_sat); assert!(node_a.list_balances().spendable_onchain_balance_sats < node_a_upper_bound_sat); - let expected_final_amount_node_b_sat = premine_amount_sat + sum_of_all_payments_sat; - assert_eq!( - node_b.list_balances().spendable_onchain_balance_sats, - expected_final_amount_node_b_sat - ); + + let node_b_upper_bound_sat = premine_amount_sat + sum_of_all_payments_sat; + let node_b_lower_bound_sat = node_b_upper_bound_sat - onchain_fee_buffer_sat; + assert!(node_b.list_balances().spendable_onchain_balance_sats > node_b_lower_bound_sat); + assert!(node_b.list_balances().spendable_onchain_balance_sats <= node_b_upper_bound_sat); + + assert_eq!(node_a.list_balances().total_anchor_channels_reserve_sats, 0); + assert_eq!(node_b.list_balances().total_anchor_channels_reserve_sats, 0); // Check we handled all events assert_eq!(node_a.next_event(), None); diff --git a/tests/integration_tests_cln.rs b/tests/integration_tests_cln.rs index a5c11ad1b..7aea13620 100644 --- a/tests/integration_tests_cln.rs +++ b/tests/integration_tests_cln.rs @@ -36,7 +36,7 @@ fn test_cln() { common::generate_blocks_and_wait(&bitcoind_client, &electrs_client, 1); // Setup LDK Node - let config = common::random_config(); + let config = common::random_config(true); let mut builder = Builder::from_config(config); builder.set_esplora_server("http://127.0.0.1:3002".to_string()); @@ -44,7 +44,7 @@ fn test_cln() { node.start().unwrap(); // Premine some funds and distribute - let address = node.new_onchain_address().unwrap(); + let address = node.onchain_payment().new_address().unwrap(); let premine_amount = Amount::from_sat(5_000_000); common::premine_and_distribute_funds( &bitcoind_client, @@ -88,16 +88,17 @@ fn test_cln() { let funding_txo = common::expect_channel_pending_event!(node, cln_node_id); common::wait_for_tx(&electrs_client, funding_txo.txid); common::generate_blocks_and_wait(&bitcoind_client, &electrs_client, 6); + node.sync_wallets().unwrap(); let user_channel_id = common::expect_channel_ready_event!(node, cln_node_id); // Send a payment to CLN let mut rng = thread_rng(); let rand_label: String = (0..7).map(|_| rng.sample(Alphanumeric) as char).collect(); let cln_invoice = - cln_client.invoice(Some(2_500_000), &rand_label, &rand_label, None, None, None).unwrap(); + cln_client.invoice(Some(10_000_000), &rand_label, &rand_label, None, None, None).unwrap(); let parsed_invoice = Bolt11Invoice::from_str(&cln_invoice.bolt11).unwrap(); - node.send_payment(&parsed_invoice).unwrap(); + node.bolt11_payment().send(&parsed_invoice).unwrap(); common::expect_event!(node, PaymentSuccessful); let cln_listed_invoices = cln_client.listinvoices(Some(&rand_label), None, None, None).unwrap().invoices; @@ -106,7 +107,7 @@ fn test_cln() { // Send a payment to LDK let rand_label: String = (0..7).map(|_| rng.sample(Alphanumeric) as char).collect(); - let ldk_invoice = node.receive_payment(2_500_000, &rand_label, 3600).unwrap(); + let ldk_invoice = node.bolt11_payment().receive(10_000_000, &rand_label, 3600).unwrap(); cln_client.pay(&ldk_invoice.to_string(), Default::default()).unwrap(); common::expect_event!(node, PaymentReceived); diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index 1c5a67521..37ddeb9a7 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -1,38 +1,66 @@ mod common; use common::{ - do_channel_full_cycle, expect_event, generate_blocks_and_wait, open_channel, + do_channel_full_cycle, expect_event, expect_payment_received_event, + expect_payment_successful_event, generate_blocks_and_wait, open_channel, premine_and_distribute_funds, random_config, setup_bitcoind_and_electrsd, setup_builder, setup_node, setup_two_nodes, wait_for_tx, TestSyncStore, }; +use ldk_node::payment::PaymentKind; use ldk_node::{Builder, Event, NodeError}; +use lightning::ln::channelmanager::PaymentId; +use lightning::util::persist::KVStore; + use bitcoin::{Amount, Network}; use std::sync::Arc; +use crate::common::expect_channel_ready_event; + #[test] fn channel_full_cycle() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false); - do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, false); + do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, true, false); +} + +#[test] +fn channel_full_cycle_force_close() { + let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, false); + do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, true, true); +} + +#[test] +fn channel_full_cycle_force_close_trusted_no_reserve() { + let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, true); + do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, true, true); } #[test] fn channel_full_cycle_0conf() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, true); - do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, true) + let (node_a, node_b) = setup_two_nodes(&electrsd, true, true, false); + do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, true, true, false) +} + +#[test] +fn channel_full_cycle_legacy_staticremotekey() { + let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, false, false); + do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false, false, false); } #[test] fn channel_open_fails_when_funds_insufficient() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, false); - let addr_a = node_a.new_onchain_address().unwrap(); - let addr_b = node_b.new_onchain_address().unwrap(); + let addr_a = node_a.onchain_payment().new_address().unwrap(); + let addr_b = node_b.onchain_payment().new_address().unwrap(); let premine_amount_sat = 100_000; @@ -69,7 +97,7 @@ fn multi_hop_sending() { // Setup and fund 5 nodes let mut nodes = Vec::new(); for _ in 0..5 { - let config = random_config(); + let config = random_config(true); setup_builder!(builder, config); builder.set_esplora_server(esplora_url.clone()); let node = builder.build().unwrap(); @@ -77,7 +105,7 @@ fn multi_hop_sending() { nodes.push(node); } - let addresses = nodes.iter().map(|n| n.new_onchain_address().unwrap()).collect(); + let addresses = nodes.iter().map(|n| n.onchain_payment().new_address().unwrap()).collect(); let premine_amount_sat = 5_000_000; premine_and_distribute_funds( &bitcoind.client, @@ -130,16 +158,17 @@ fn multi_hop_sending() { // Sleep a bit for gossip to propagate. std::thread::sleep(std::time::Duration::from_secs(1)); - let invoice = nodes[4].receive_payment(2_500_000, &"asdf", 9217).unwrap(); - nodes[0].send_payment(&invoice).unwrap(); + let invoice = nodes[4].bolt11_payment().receive(2_500_000, &"asdf", 9217).unwrap(); + nodes[0].bolt11_payment().send(&invoice).unwrap(); - expect_event!(nodes[4], PaymentReceived); - expect_event!(nodes[0], PaymentSuccessful); + let payment_id = expect_payment_received_event!(&nodes[4], 2_500_000); + let fee_paid_msat = Some(2000); + expect_payment_successful_event!(nodes[0], payment_id, Some(fee_paid_msat)); } #[test] fn connect_to_public_testnet_esplora() { - let mut config = random_config(); + let mut config = random_config(true); config.network = Network::Testnet; setup_builder!(builder, config); builder.set_esplora_server("https://blockstream.info/testnet/api".to_string()); @@ -151,11 +180,12 @@ fn connect_to_public_testnet_esplora() { #[test] fn start_stop_reinit() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let config = random_config(); + let config = random_config(true); let esplora_url = format!("http://{}", electrsd.esplora_url.as_ref().unwrap()); - let test_sync_store = Arc::new(TestSyncStore::new(config.storage_dir_path.clone().into())); + let test_sync_store: Arc = + Arc::new(TestSyncStore::new(config.storage_dir_path.clone().into())); setup_builder!(builder, config); builder.set_esplora_server(esplora_url.clone()); @@ -166,7 +196,7 @@ fn start_stop_reinit() { let expected_node_id = node.node_id(); assert_eq!(node.start(), Err(NodeError::AlreadyRunning)); - let funding_address = node.new_onchain_address().unwrap(); + let funding_address = node.onchain_payment().new_address().unwrap(); assert_eq!(node.list_balances().total_onchain_balance_sats, 0); @@ -218,10 +248,10 @@ fn start_stop_reinit() { #[test] fn onchain_spend_receive() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, false); - let addr_a = node_a.new_onchain_address().unwrap(); - let addr_b = node_b.new_onchain_address().unwrap(); + let addr_a = node_a.onchain_payment().new_address().unwrap(); + let addr_b = node_b.onchain_payment().new_address().unwrap(); premine_and_distribute_funds( &bitcoind.client, @@ -234,9 +264,12 @@ fn onchain_spend_receive() { node_b.sync_wallets().unwrap(); assert_eq!(node_b.list_balances().spendable_onchain_balance_sats, 100000); - assert_eq!(Err(NodeError::InsufficientFunds), node_a.send_to_onchain_address(&addr_b, 1000)); + assert_eq!( + Err(NodeError::InsufficientFunds), + node_a.onchain_payment().send_to_address(&addr_b, 1000) + ); - let txid = node_b.send_to_onchain_address(&addr_a, 1000).unwrap(); + let txid = node_b.onchain_payment().send_to_address(&addr_a, 1000).unwrap(); generate_blocks_and_wait(&bitcoind.client, &electrsd.client, 6); wait_for_tx(&electrsd.client, txid); @@ -247,8 +280,8 @@ fn onchain_spend_receive() { assert!(node_b.list_balances().spendable_onchain_balance_sats > 98000); assert!(node_b.list_balances().spendable_onchain_balance_sats < 100000); - let addr_b = node_b.new_onchain_address().unwrap(); - let txid = node_a.send_all_to_onchain_address(&addr_b).unwrap(); + let addr_b = node_b.onchain_payment().new_address().unwrap(); + let txid = node_a.onchain_payment().send_all_to_address(&addr_b).unwrap(); generate_blocks_and_wait(&bitcoind.client, &electrsd.client, 6); wait_for_tx(&electrsd.client, txid); @@ -263,7 +296,7 @@ fn onchain_spend_receive() { #[test] fn sign_verify_msg() { let (_bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let config = random_config(); + let config = random_config(true); let node = setup_node(&electrsd, config); // Tests arbitrary message signing and later verification @@ -281,13 +314,17 @@ fn connection_restart_behavior() { fn do_connection_restart_behavior(persist: bool) { let (_bitcoind, electrsd) = setup_bitcoind_and_electrsd(); - let (node_a, node_b) = setup_two_nodes(&electrsd, false); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, false, false); let node_id_a = node_a.node_id(); let node_id_b = node_b.node_id(); let node_addr_b = node_b.listening_addresses().unwrap().first().unwrap().clone(); - std::thread::sleep(std::time::Duration::from_secs(1)); + + while !node_b.status().is_listening { + std::thread::sleep(std::time::Duration::from_millis(10)); + } + node_a.connect(node_id_b, node_addr_b, persist).unwrap(); let peer_details_a = node_a.list_peers().first().unwrap().clone(); @@ -324,3 +361,194 @@ fn do_connection_restart_behavior(persist: bool) { assert!(node_b.list_peers().is_empty()); } } + +#[test] +fn concurrent_connections_succeed() { + let (_bitcoind, electrsd) = setup_bitcoind_and_electrsd(); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, false); + + let node_a = Arc::new(node_a); + let node_b = Arc::new(node_b); + + let node_id_b = node_b.node_id(); + let node_addr_b = node_b.listening_addresses().unwrap().first().unwrap().clone(); + + while !node_b.status().is_listening { + std::thread::sleep(std::time::Duration::from_millis(10)); + } + + let mut handles = Vec::new(); + for _ in 0..10 { + let thread_node = Arc::clone(&node_a); + let thread_addr = node_addr_b.clone(); + let handle = std::thread::spawn(move || { + thread_node.connect(node_id_b, thread_addr, false).unwrap(); + }); + handles.push(handle); + } + + for h in handles { + h.join().unwrap(); + } +} + +#[test] +fn simple_bolt12_send_receive() { + let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); + let (node_a, node_b) = setup_two_nodes(&electrsd, false, true, false); + + let address_a = node_a.onchain_payment().new_address().unwrap(); + let premine_amount_sat = 5_000_000; + premine_and_distribute_funds( + &bitcoind.client, + &electrsd.client, + vec![address_a], + Amount::from_sat(premine_amount_sat), + ); + + node_a.sync_wallets().unwrap(); + open_channel(&node_a, &node_b, 4_000_000, true, &electrsd); + + generate_blocks_and_wait(&bitcoind.client, &electrsd.client, 6); + + node_a.sync_wallets().unwrap(); + node_b.sync_wallets().unwrap(); + + expect_channel_ready_event!(node_a, node_b.node_id()); + expect_channel_ready_event!(node_b, node_a.node_id()); + + // Sleep until we broadcasted a node announcement. + while node_b.status().latest_node_announcement_broadcast_timestamp.is_none() { + std::thread::sleep(std::time::Duration::from_millis(10)); + } + + // Sleep one more sec to make sure the node announcement propagates. + std::thread::sleep(std::time::Duration::from_secs(1)); + + let expected_amount_msat = 100_000_000; + let offer = node_b.bolt12_payment().receive(expected_amount_msat, "asdf").unwrap(); + let payment_id = node_a.bolt12_payment().send(&offer, None).unwrap(); + + expect_payment_successful_event!(node_a, Some(payment_id), None); + let node_a_payments = node_a.list_payments(); + assert_eq!(node_a_payments.len(), 1); + match node_a_payments.first().unwrap().kind { + PaymentKind::Bolt12Offer { hash, preimage, secret: _, offer_id } => { + assert!(hash.is_some()); + assert!(preimage.is_some()); + assert_eq!(offer_id, offer.id()); + //TODO: We should eventually set and assert the secret sender-side, too, but the BOLT12 + //API currently doesn't allow to do that. + }, + _ => { + panic!("Unexpected payment kind"); + }, + } + assert_eq!(node_a_payments.first().unwrap().amount_msat, Some(expected_amount_msat)); + + expect_payment_received_event!(node_b, expected_amount_msat); + let node_b_payments = node_b.list_payments(); + assert_eq!(node_b_payments.len(), 1); + match node_b_payments.first().unwrap().kind { + PaymentKind::Bolt12Offer { hash, preimage, secret, offer_id } => { + assert!(hash.is_some()); + assert!(preimage.is_some()); + assert!(secret.is_some()); + assert_eq!(offer_id, offer.id()); + }, + _ => { + panic!("Unexpected payment kind"); + }, + } + assert_eq!(node_b_payments.first().unwrap().amount_msat, Some(expected_amount_msat)); + + // Test send_using_amount + let offer_amount_msat = 100_000_000; + let less_than_offer_amount = offer_amount_msat - 10_000; + let expected_amount_msat = offer_amount_msat + 10_000; + let offer = node_b.bolt12_payment().receive(offer_amount_msat, "asdf").unwrap(); + assert!(node_a + .bolt12_payment() + .send_using_amount(&offer, None, less_than_offer_amount) + .is_err()); + let payment_id = + node_a.bolt12_payment().send_using_amount(&offer, None, expected_amount_msat).unwrap(); + + expect_payment_successful_event!(node_a, Some(payment_id), None); + let node_a_payments = node_a.list_payments_with_filter(|p| p.id == payment_id); + assert_eq!(node_a_payments.len(), 1); + let payment_hash = match node_a_payments.first().unwrap().kind { + PaymentKind::Bolt12Offer { hash, preimage, secret: _, offer_id } => { + assert!(hash.is_some()); + assert!(preimage.is_some()); + assert_eq!(offer_id, offer.id()); + //TODO: We should eventually set and assert the secret sender-side, too, but the BOLT12 + //API currently doesn't allow to do that. + hash.unwrap() + }, + _ => { + panic!("Unexpected payment kind"); + }, + }; + assert_eq!(node_a_payments.first().unwrap().amount_msat, Some(expected_amount_msat)); + + expect_payment_received_event!(node_b, expected_amount_msat); + let node_b_payment_id = PaymentId(payment_hash.0); + let node_b_payments = node_b.list_payments_with_filter(|p| p.id == node_b_payment_id); + assert_eq!(node_b_payments.len(), 1); + match node_b_payments.first().unwrap().kind { + PaymentKind::Bolt12Offer { hash, preimage, secret, offer_id } => { + assert!(hash.is_some()); + assert!(preimage.is_some()); + assert!(secret.is_some()); + assert_eq!(offer_id, offer.id()); + }, + _ => { + panic!("Unexpected payment kind"); + }, + } + assert_eq!(node_b_payments.first().unwrap().amount_msat, Some(expected_amount_msat)); + + // Now node_b refunds the amount node_a just overpaid. + let overpaid_amount = expected_amount_msat - offer_amount_msat; + let refund = node_b.bolt12_payment().initiate_refund(overpaid_amount, 3600).unwrap(); + let invoice = node_a.bolt12_payment().request_refund_payment(&refund).unwrap(); + expect_payment_received_event!(node_a, overpaid_amount); + + let node_b_payment_id = node_b + .list_payments_with_filter(|p| p.amount_msat == Some(overpaid_amount)) + .first() + .unwrap() + .id; + expect_payment_successful_event!(node_b, Some(node_b_payment_id), None); + + let node_b_payments = node_b.list_payments_with_filter(|p| p.id == node_b_payment_id); + assert_eq!(node_b_payments.len(), 1); + match node_b_payments.first().unwrap().kind { + PaymentKind::Bolt12Refund { hash, preimage, secret: _ } => { + assert!(hash.is_some()); + assert!(preimage.is_some()); + //TODO: We should eventually set and assert the secret sender-side, too, but the BOLT12 + //API currently doesn't allow to do that. + }, + _ => { + panic!("Unexpected payment kind"); + }, + } + assert_eq!(node_b_payments.first().unwrap().amount_msat, Some(overpaid_amount)); + + let node_a_payment_id = PaymentId(invoice.payment_hash().0); + let node_a_payments = node_a.list_payments_with_filter(|p| p.id == node_a_payment_id); + assert_eq!(node_a_payments.len(), 1); + match node_a_payments.first().unwrap().kind { + PaymentKind::Bolt12Refund { hash, preimage, secret } => { + assert!(hash.is_some()); + assert!(preimage.is_some()); + assert!(secret.is_some()); + }, + _ => { + panic!("Unexpected payment kind"); + }, + } + assert_eq!(node_a_payments.first().unwrap().amount_msat, Some(overpaid_amount)); +} diff --git a/tests/integration_tests_vss.rs b/tests/integration_tests_vss.rs index 26d0456d4..2a57ccffc 100644 --- a/tests/integration_tests_vss.rs +++ b/tests/integration_tests_vss.rs @@ -9,7 +9,7 @@ fn channel_full_cycle_with_vss_store() { let (bitcoind, electrsd) = common::setup_bitcoind_and_electrsd(); println!("== Node A =="); let esplora_url = format!("http://{}", electrsd.esplora_url.as_ref().unwrap()); - let config_a = common::random_config(); + let config_a = common::random_config(true); let mut builder_a = Builder::from_config(config_a); builder_a.set_esplora_server(esplora_url.clone()); let vss_base_url = std::env::var("TEST_VSS_BASE_URL").unwrap(); @@ -18,11 +18,19 @@ fn channel_full_cycle_with_vss_store() { node_a.start().unwrap(); println!("\n== Node B =="); - let config_b = common::random_config(); + let config_b = common::random_config(true); let mut builder_b = Builder::from_config(config_b); builder_b.set_esplora_server(esplora_url); let node_b = builder_b.build_with_vss_store(vss_base_url, "node_2_store".to_string()).unwrap(); node_b.start().unwrap(); - common::do_channel_full_cycle(node_a, node_b, &bitcoind.client, &electrsd.client, false); + common::do_channel_full_cycle( + node_a, + node_b, + &bitcoind.client, + &electrsd.client, + false, + true, + false, + ); }