@@ -3519,22 +3519,27 @@ macro_rules! emit_initial_channel_ready_event {
35193519/// Requires that `$chan.blocked_monitor_updates_pending() == 0` and the in-flight monitor update
35203520/// set for this channel is empty!
35213521macro_rules! handle_monitor_update_completion {
3522- ($self: ident, $peer_state_lock: expr, $peer_state: expr, $per_peer_state_lock: expr, $chan: expr) => { {
3523- let channel_id = $chan.context.channel_id();
3524- let outbound_scid_alias = $chan.context().outbound_scid_alias();
3525- let counterparty_node_id = $chan.context.get_counterparty_node_id();
3522+ ($self: ident, $peer_state_lock: expr, $peer_state: expr, $per_peer_state_lock: expr, $chan: expr) => {{
3523+ let chan_id = $chan.context.channel_id();
3524+ let outbound_alias = $chan.context().outbound_scid_alias();
3525+ let cp_node_id = $chan.context.get_counterparty_node_id();
35263526 #[cfg(debug_assertions)]
35273527 {
3528- let in_flight_updates =
3529- $peer_state.in_flight_monitor_updates.get(&channel_id);
3528+ let in_flight_updates = $peer_state.in_flight_monitor_updates.get(&chan_id);
35303529 assert!(in_flight_updates.map(|(_, updates)| updates.is_empty()).unwrap_or(true));
35313530 assert_eq!($chan.blocked_monitor_updates_pending(), 0);
35323531 }
35333532 let logger = WithChannelContext::from(&$self.logger, &$chan.context, None);
3534- let updates = $chan.monitor_updating_restored(&&logger,
3535- &$self.node_signer, $self.chain_hash, &*$self.config.read().unwrap(),
3533+ let updates = $chan.monitor_updating_restored(
3534+ &&logger,
3535+ &$self.node_signer,
3536+ $self.chain_hash,
3537+ &*$self.config.read().unwrap(),
35363538 $self.best_block.read().unwrap().height,
3537- |htlc_id| $self.path_for_release_held_htlc(htlc_id, outbound_scid_alias, &channel_id, &counterparty_node_id));
3539+ |htlc_id| {
3540+ $self.path_for_release_held_htlc(htlc_id, outbound_alias, &chan_id, &cp_node_id)
3541+ },
3542+ );
35383543 let channel_update = if updates.channel_ready.is_some()
35393544 && $chan.context.is_usable()
35403545 && $peer_state.is_connected
@@ -3545,36 +3550,52 @@ macro_rules! handle_monitor_update_completion {
35453550 // channels, but there's no reason not to just inform our counterparty of our fees
35463551 // now.
35473552 if let Ok((msg, _, _)) = $self.get_channel_update_for_unicast($chan) {
3548- Some(MessageSendEvent::SendChannelUpdate {
3549- node_id: counterparty_node_id,
3550- msg,
3551- })
3552- } else { None }
3553- } else { None };
3553+ Some(MessageSendEvent::SendChannelUpdate { node_id: cp_node_id, msg })
3554+ } else {
3555+ None
3556+ }
3557+ } else {
3558+ None
3559+ };
35543560
3555- let update_actions = $peer_state.monitor_update_blocked_actions
3556- . remove(&channel_id ).unwrap_or(Vec::new());
3561+ let update_actions =
3562+ $peer_state.monitor_update_blocked_actions. remove(&chan_id ).unwrap_or(Vec::new());
35573563
35583564 let (htlc_forwards, decode_update_add_htlcs) = $self.handle_channel_resumption(
3559- &mut $peer_state.pending_msg_events, $chan, updates.raa,
3560- updates.commitment_update, updates.commitment_order, updates.accepted_htlcs,
3561- updates.pending_update_adds, updates.funding_broadcastable, updates.channel_ready,
3562- updates.announcement_sigs, updates.tx_signatures, None, updates.channel_ready_order,
3565+ &mut $peer_state.pending_msg_events,
3566+ $chan,
3567+ updates.raa,
3568+ updates.commitment_update,
3569+ updates.commitment_order,
3570+ updates.accepted_htlcs,
3571+ updates.pending_update_adds,
3572+ updates.funding_broadcastable,
3573+ updates.channel_ready,
3574+ updates.announcement_sigs,
3575+ updates.tx_signatures,
3576+ None,
3577+ updates.channel_ready_order,
35633578 );
35643579 if let Some(upd) = channel_update {
35653580 $peer_state.pending_msg_events.push(upd);
35663581 }
35673582
3568- let unbroadcasted_batch_funding_txid = $chan.context.unbroadcasted_batch_funding_txid(&$chan.funding);
3583+ let unbroadcasted_batch_funding_txid =
3584+ $chan.context.unbroadcasted_batch_funding_txid(&$chan.funding);
35693585 core::mem::drop($peer_state_lock);
35703586 core::mem::drop($per_peer_state_lock);
35713587
35723588 $self.post_monitor_update_unlock(
3573- channel_id, counterparty_node_id, unbroadcasted_batch_funding_txid, update_actions,
3574- htlc_forwards, decode_update_add_htlcs, updates.finalized_claimed_htlcs,
3589+ chan_id,
3590+ cp_node_id,
3591+ unbroadcasted_batch_funding_txid,
3592+ update_actions,
3593+ htlc_forwards,
3594+ decode_update_add_htlcs,
3595+ updates.finalized_claimed_htlcs,
35753596 updates.failed_htlcs,
35763597 );
3577- } }
3598+ }};
35783599}
35793600
35803601/// Returns whether the monitor update is completed, `false` if the update is in-progress.
@@ -9330,18 +9351,17 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
93309351 let mut funding_batch_states = self.funding_batch_states.lock().unwrap();
93319352 let mut batch_completed = false;
93329353 if let Some(batch_state) = funding_batch_states.get_mut(&txid) {
9333- let channel_state = batch_state.iter_mut().find(|(chan_id, pubkey, _)| (
9334- *chan_id == channel_id &&
9335- *pubkey == counterparty_node_id
9336- ));
9354+ let channel_state = batch_state.iter_mut().find(|(chan_id, pubkey, _)| {
9355+ *chan_id == channel_id && *pubkey == counterparty_node_id
9356+ });
93379357 if let Some(channel_state) = channel_state {
93389358 channel_state.2 = true;
93399359 } else {
9340- debug_assert!(false, "Missing channel batch state for channel which completed initial monitor update");
9360+ debug_assert!(false, "Missing batch state after initial monitor update");
93419361 }
93429362 batch_completed = batch_state.iter().all(|(_, _, completed)| *completed);
93439363 } else {
9344- debug_assert!(false, "Missing batch state for channel which completed initial monitor update");
9364+ debug_assert!(false, "Missing batch state after initial monitor update");
93459365 }
93469366
93479367 // When all channels in a batched funding transaction have become ready, it is not necessary
@@ -9353,19 +9373,21 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
93539373 for (channel_id, counterparty_node_id, _) in removed_batch_state {
93549374 if let Some(peer_state_mutex) = per_peer_state.get(&counterparty_node_id) {
93559375 let mut peer_state = peer_state_mutex.lock().unwrap();
9356- if let Some(funded_chan) = peer_state.channel_by_id
9357- .get_mut(&channel_id)
9358- .and_then(Channel::as_funded_mut)
9359- {
9360- batch_funding_tx = batch_funding_tx.or_else(|| funded_chan.context.unbroadcasted_funding(&funded_chan.funding));
9376+
9377+ let chan = peer_state.channel_by_id.get_mut(&channel_id);
9378+ if let Some(funded_chan) = chan.and_then(Channel::as_funded_mut) {
9379+ batch_funding_tx = batch_funding_tx.or_else(|| {
9380+ funded_chan.context.unbroadcasted_funding(&funded_chan.funding)
9381+ });
93619382 funded_chan.set_batch_ready();
9383+
93629384 let mut pending_events = self.pending_events.lock().unwrap();
93639385 emit_channel_pending_event!(pending_events, funded_chan);
93649386 }
93659387 }
93669388 }
93679389 if let Some(tx) = batch_funding_tx {
9368- log_info!(self.logger, "Broadcasting batch funding transaction with txid {}", tx.compute_txid());
9390+ log_info!(self.logger, "Broadcasting batch funding tx {}", tx.compute_txid());
93699391 self.tx_broadcaster.broadcast_transactions(&[&tx]);
93709392 }
93719393 }
@@ -9381,7 +9403,10 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
93819403 }
93829404 self.finalize_claims(finalized_claimed_htlcs);
93839405 for failure in failed_htlcs {
9384- let receiver = HTLCHandlingFailureType::Forward { node_id: Some(counterparty_node_id), channel_id };
9406+ let receiver = HTLCHandlingFailureType::Forward {
9407+ node_id: Some(counterparty_node_id),
9408+ channel_id,
9409+ };
93859410 self.fail_htlc_backwards_internal(&failure.0, &failure.1, &failure.2, receiver, None);
93869411 }
93879412 }
0 commit comments