Skip to content

Commit 8ac5e51

Browse files
committed
Add PaymentHash to Record
Adding the `payment_hash` to `Record` so we are able to track it in logs.
1 parent 650caa0 commit 8ac5e51

File tree

7 files changed

+104
-86
lines changed

7 files changed

+104
-86
lines changed

lightning/src/chain/chainmonitor.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -790,7 +790,7 @@ where C::Target: chain::Filter,
790790
let monitors = self.monitors.read().unwrap();
791791
match monitors.get(&funding_txo) {
792792
None => {
793-
let logger = WithContext::from(&self.logger, update.counterparty_node_id, Some(channel_id));
793+
let logger = WithContext::from(&self.logger, update.counterparty_node_id, Some(channel_id), None);
794794
log_error!(logger, "Failed to update channel monitor: no such monitor registered");
795795

796796
// We should never ever trigger this from within ChannelManager. Technically a

lightning/src/ln/channel.rs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -896,12 +896,14 @@ pub(super) struct WithChannelContext<'a, L: Deref> where L::Target: Logger {
896896
pub logger: &'a L,
897897
pub peer_id: Option<PublicKey>,
898898
pub channel_id: Option<ChannelId>,
899+
pub payment_hash: Option<PaymentHash>,
899900
}
900901

901902
impl<'a, L: Deref> Logger for WithChannelContext<'a, L> where L::Target: Logger {
902903
fn log(&self, mut record: Record) {
903904
record.peer_id = self.peer_id;
904905
record.channel_id = self.channel_id;
906+
record.payment_hash = self.payment_hash;
905907
self.logger.log(record)
906908
}
907909
}
@@ -915,6 +917,7 @@ where L::Target: Logger {
915917
logger,
916918
peer_id: Some(context.counterparty_node_id),
917919
channel_id: Some(context.channel_id),
920+
payment_hash: None
918921
}
919922
}
920923
}
@@ -1540,7 +1543,7 @@ impl<SP: Deref> ChannelContext<SP> where SP::Target: SignerProvider {
15401543
L::Target: Logger,
15411544
SP::Target: SignerProvider,
15421545
{
1543-
let logger = WithContext::from(logger, Some(counterparty_node_id), Some(open_channel_fields.temporary_channel_id));
1546+
let logger = WithContext::from(logger, Some(counterparty_node_id), Some(open_channel_fields.temporary_channel_id), None);
15441547
let announced_channel = if (open_channel_fields.channel_flags & 1) == 1 { true } else { false };
15451548

15461549
let channel_value_satoshis = our_funding_satoshis.saturating_add(open_channel_fields.funding_satoshis);
@@ -7703,7 +7706,7 @@ impl<SP: Deref> InboundV1Channel<SP> where SP::Target: SignerProvider {
77037706
F::Target: FeeEstimator,
77047707
L::Target: Logger,
77057708
{
7706-
let logger = WithContext::from(logger, Some(counterparty_node_id), Some(msg.common_fields.temporary_channel_id));
7709+
let logger = WithContext::from(logger, Some(counterparty_node_id), Some(msg.common_fields.temporary_channel_id), None);
77077710

77087711
// First check the channel type is known, failing before we do anything else if we don't
77097712
// support this channel type.

lightning/src/ln/channelmanager.rs

Lines changed: 28 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -2011,7 +2011,7 @@ macro_rules! handle_error {
20112011
let counterparty_node_id = shutdown_res.counterparty_node_id;
20122012
let channel_id = shutdown_res.channel_id;
20132013
let logger = WithContext::from(
2014-
&$self.logger, Some(counterparty_node_id), Some(channel_id),
2014+
&$self.logger, Some(counterparty_node_id), Some(channel_id), None
20152015
);
20162016
log_error!(logger, "Force-closing channel: {}", err.err);
20172017

@@ -2891,7 +2891,7 @@ where
28912891
}
28922892

28932893
let logger = WithContext::from(
2894-
&self.logger, Some(shutdown_res.counterparty_node_id), Some(shutdown_res.channel_id),
2894+
&self.logger, Some(shutdown_res.counterparty_node_id), Some(shutdown_res.channel_id), None
28952895
);
28962896

28972897
log_debug!(logger, "Finishing closure of channel due to {} with {} HTLCs to fail",
@@ -2967,7 +2967,7 @@ where
29672967
} else {
29682968
ClosureReason::HolderForceClosed
29692969
};
2970-
let logger = WithContext::from(&self.logger, Some(*peer_node_id), Some(*channel_id));
2970+
let logger = WithContext::from(&self.logger, Some(*peer_node_id), Some(*channel_id), None);
29712971
if let hash_map::Entry::Occupied(chan_phase_entry) = peer_state.channel_by_id.entry(channel_id.clone()) {
29722972
log_error!(logger, "Force-closing channel {}", channel_id);
29732973
let mut chan_phase = remove_channel_phase!(self, chan_phase_entry);
@@ -3098,7 +3098,7 @@ where
30983098
($msg: expr, $err_code: expr, $data: expr) => {
30993099
{
31003100
log_info!(
3101-
WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id)),
3101+
WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id), Some(msg.payment_hash)),
31023102
"Failed to accept/forward incoming HTLC: {}", $msg
31033103
);
31043104
// If `msg.blinding_point` is set, we must always fail with malformed.
@@ -3262,7 +3262,7 @@ where
32623262
macro_rules! return_err {
32633263
($msg: expr, $err_code: expr, $data: expr) => {
32643264
{
3265-
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id));
3265+
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id), Some(msg.payment_hash));
32663266
log_info!(logger, "Failed to accept/forward incoming HTLC: {}", $msg);
32673267
if msg.blinding_point.is_some() {
32683268
return PendingHTLCStatus::Fail(HTLCFailureMsg::Malformed(
@@ -3417,22 +3417,22 @@ where
34173417
&self.secp_ctx, &path, &session_priv, total_value, recipient_onion, cur_height,
34183418
payment_hash, keysend_preimage, prng_seed
34193419
).map_err(|e| {
3420-
let logger = WithContext::from(&self.logger, Some(path.hops.first().unwrap().pubkey), None);
3420+
let logger = WithContext::from(&self.logger, Some(path.hops.first().unwrap().pubkey), None, Some(*payment_hash));
34213421
log_error!(logger, "Failed to build an onion for path for payment hash {}", payment_hash);
34223422
e
34233423
})?;
34243424

34253425
let err: Result<(), _> = loop {
34263426
let (counterparty_node_id, id) = match self.short_to_chan_info.read().unwrap().get(&path.hops.first().unwrap().short_channel_id) {
34273427
None => {
3428-
let logger = WithContext::from(&self.logger, Some(path.hops.first().unwrap().pubkey), None);
3428+
let logger = WithContext::from(&self.logger, Some(path.hops.first().unwrap().pubkey), None, Some(*payment_hash));
34293429
log_error!(logger, "Failed to find first-hop for payment hash {}", payment_hash);
34303430
return Err(APIError::ChannelUnavailable{err: "No channel available with first hop!".to_owned()})
34313431
},
34323432
Some((cp_id, chan_id)) => (cp_id.clone(), chan_id.clone()),
34333433
};
34343434

3435-
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(id));
3435+
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(id), Some(*payment_hash));
34363436
log_trace!(logger,
34373437
"Attempting to send payment with payment hash {} along path with next hop {}",
34383438
payment_hash, path.hops.first().unwrap().short_channel_id);
@@ -4204,7 +4204,7 @@ where
42044204
None => {
42054205
let error = format!("Channel with id {} not found for the passed counterparty node_id {}",
42064206
next_hop_channel_id, next_node_id);
4207-
let logger = WithContext::from(&self.logger, Some(next_node_id), Some(*next_hop_channel_id));
4207+
let logger = WithContext::from(&self.logger, Some(next_node_id), Some(*next_hop_channel_id), None);
42084208
log_error!(logger, "{} when attempting to forward intercepted HTLC", error);
42094209
return Err(APIError::ChannelUnavailable {
42104210
err: error
@@ -4309,7 +4309,7 @@ where
43094309
}) => {
43104310
macro_rules! failure_handler {
43114311
($msg: expr, $err_code: expr, $err_data: expr, $phantom_ss: expr, $next_hop_unknown: expr) => {
4312-
let logger = WithContext::from(&self.logger, forwarding_counterparty, Some(prev_channel_id));
4312+
let logger = WithContext::from(&self.logger, forwarding_counterparty, Some(prev_channel_id), Some(payment_hash));
43134313
log_info!(logger, "Failed to accept/forward incoming HTLC: {}", $msg);
43144314

43154315
let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
@@ -5081,7 +5081,7 @@ where
50815081

50825082
for (chan_id, req) in peer_state.inbound_channel_request_by_id.iter_mut() {
50835083
if { req.ticks_remaining -= 1 ; req.ticks_remaining } <= 0 {
5084-
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(*chan_id));
5084+
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(*chan_id), None);
50855085
log_error!(logger, "Force-closing unaccepted inbound channel {} for not accepting in a timely manner", &chan_id);
50865086
peer_state.pending_msg_events.push(
50875087
events::MessageSendEvent::HandleError {
@@ -5362,7 +5362,7 @@ where
53625362
ref phantom_shared_secret, outpoint: _, ref blinded_failure, ref channel_id, ..
53635363
}) => {
53645364
log_trace!(
5365-
WithContext::from(&self.logger, None, Some(*channel_id)),
5365+
WithContext::from(&self.logger, None, Some(*channel_id), Some(*payment_hash)),
53665366
"Failing {}HTLC with payment_hash {} backwards from us: {:?}",
53675367
if blinded_failure.is_some() { "blinded " } else { "" }, &payment_hash, onion_error
53685368
);
@@ -5558,7 +5558,7 @@ where
55585558
if let msgs::ErrorAction::IgnoreError = err.err.action {
55595559
// We got a temporary failure updating monitor, but will claim the
55605560
// HTLC when the monitor updating is restored (or on chain).
5561-
let logger = WithContext::from(&self.logger, None, Some(prev_hop_chan_id));
5561+
let logger = WithContext::from(&self.logger, None, Some(prev_hop_chan_id), Some(payment_hash));
55625562
log_error!(logger, "Temporary failure claiming HTLC, treating as success: {}", err.err.err);
55635563
} else { errs.push((pk, err)); }
55645564
}
@@ -5710,7 +5710,7 @@ where
57105710
// with a preimage we *must* somehow manage to propagate it to the upstream
57115711
// channel, or we must have an ability to receive the same event and try
57125712
// again on restart.
5713-
log_error!(WithContext::from(&self.logger, None, Some(prev_hop.channel_id)),
5713+
log_error!(WithContext::from(&self.logger, None, Some(prev_hop.channel_id), None),
57145714
"Critical error: failed to update channel monitor with preimage {:?}: {:?}",
57155715
payment_preimage, update_res);
57165716
}
@@ -6091,7 +6091,7 @@ where
60916091

60926092
fn do_accept_inbound_channel(&self, temporary_channel_id: &ChannelId, counterparty_node_id: &PublicKey, accept_0conf: bool, user_channel_id: u128) -> Result<(), APIError> {
60936093

6094-
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), Some(*temporary_channel_id));
6094+
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), Some(*temporary_channel_id), None);
60956095
let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
60966096

60976097
let peers_without_funded_channels =
@@ -6527,7 +6527,8 @@ where
65276527
let logger = WithContext::from(
65286528
&self.logger,
65296529
Some(chan.context.get_counterparty_node_id()),
6530-
Some(chan.context.channel_id())
6530+
Some(chan.context.channel_id()),
6531+
None
65316532
);
65326533
let res =
65336534
chan.funding_signed(&msg, best_block, &self.signer_provider, &&logger);
@@ -6738,7 +6739,7 @@ where
67386739
};
67396740
if let Some(broadcast_tx) = tx {
67406741
let channel_id = chan_option.as_ref().map(|channel| channel.context().channel_id());
6741-
log_info!(WithContext::from(&self.logger, Some(*counterparty_node_id), channel_id), "Broadcasting {}", log_tx!(broadcast_tx));
6742+
log_info!(WithContext::from(&self.logger, Some(*counterparty_node_id), channel_id, None), "Broadcasting {}", log_tx!(broadcast_tx));
67426743
self.tx_broadcaster.broadcast_transactions(&[&broadcast_tx]);
67436744
}
67446745
if let Some(ChannelPhase::Funded(chan)) = chan_option {
@@ -7013,7 +7014,7 @@ where
70137014
prev_short_channel_id, prev_funding_outpoint, prev_channel_id, prev_htlc_id, prev_user_channel_id, forward_info });
70147015
},
70157016
hash_map::Entry::Occupied(_) => {
7016-
let logger = WithContext::from(&self.logger, None, Some(prev_channel_id));
7017+
let logger = WithContext::from(&self.logger, None, Some(prev_channel_id), Some(forward_info.payment_hash));
70177018
log_info!(logger, "Failed to forward incoming HTLC: detected duplicate intercepted payment over short channel id {}", scid);
70187019
let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
70197020
short_channel_id: prev_short_channel_id,
@@ -7277,7 +7278,7 @@ where
72777278
msg.channel_id
72787279
)
72797280
})?;
7280-
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id));
7281+
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id), None);
72817282
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
72827283
let peer_state = &mut *peer_state_lock;
72837284
match peer_state.channel_by_id.entry(msg.channel_id) {
@@ -7379,7 +7380,7 @@ where
73797380
for monitor_event in monitor_events.drain(..) {
73807381
match monitor_event {
73817382
MonitorEvent::HTLCEvent(htlc_update) => {
7382-
let logger = WithContext::from(&self.logger, counterparty_node_id, Some(channel_id));
7383+
let logger = WithContext::from(&self.logger, counterparty_node_id, Some(channel_id), Some(htlc_update.payment_hash));
73837384
if let Some(preimage) = htlc_update.payment_preimage {
73847385
log_trace!(logger, "Claiming HTLC with preimage {} from our monitor", preimage);
73857386
self.claim_funds_internal(htlc_update.source, preimage,
@@ -8261,7 +8262,7 @@ where
82618262
mut completed_blocker: Option<RAAMonitorUpdateBlockingAction>) {
82628263

82638264
let logger = WithContext::from(
8264-
&self.logger, Some(counterparty_node_id), Some(channel_id),
8265+
&self.logger, Some(counterparty_node_id), Some(channel_id), None
82658266
);
82668267
loop {
82678268
let per_peer_state = self.per_peer_state.read().unwrap();
@@ -8733,7 +8734,7 @@ where
87338734
HTLCFailReason::from_failure_code(0x2000 | 2),
87348735
HTLCDestination::InvalidForward { requested_forward_scid }));
87358736
let logger = WithContext::from(
8736-
&self.logger, None, Some(htlc.prev_channel_id)
8737+
&self.logger, None, Some(htlc.prev_channel_id), Some(htlc.forward_info.payment_hash)
87378738
);
87388739
log_trace!(logger, "Timing out intercepted HTLC with requested forward scid {}", requested_forward_scid);
87398740
false
@@ -9043,7 +9044,7 @@ where
90439044
let mut per_peer_state = self.per_peer_state.write().unwrap();
90449045
let remove_peer = {
90459046
log_debug!(
9046-
WithContext::from(&self.logger, Some(*counterparty_node_id), None),
9047+
WithContext::from(&self.logger, Some(*counterparty_node_id), None, None),
90479048
"Marking channels with {} disconnected and generating channel_updates.",
90489049
log_pubkey!(counterparty_node_id)
90499050
);
@@ -9151,7 +9152,7 @@ where
91519152
}
91529153

91539154
fn peer_connected(&self, counterparty_node_id: &PublicKey, init_msg: &msgs::Init, inbound: bool) -> Result<(), ()> {
9154-
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), None);
9155+
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), None, None);
91559156
if !init_msg.features.supports_static_remote_key() {
91569157
log_debug!(logger, "Peer {} does not support static remote key, disconnecting", log_pubkey!(counterparty_node_id));
91579158
return Err(());
@@ -10950,7 +10951,7 @@ where
1095010951
if let Some(in_flight_upds) = in_flight_monitor_updates {
1095110952
for ((counterparty_id, funding_txo), mut chan_in_flight_updates) in in_flight_upds {
1095210953
let channel_id = funding_txo_to_channel_id.get(&funding_txo).copied();
10953-
let logger = WithContext::from(&args.logger, Some(counterparty_id), channel_id);
10954+
let logger = WithContext::from(&args.logger, Some(counterparty_id), channel_id, None);
1095410955
if let Some(monitor) = args.channel_monitors.get(&funding_txo) {
1095510956
// Now that we've removed all the in-flight monitor updates for channels that are
1095610957
// still open, we need to replay any monitor updates that are for closed channels,
@@ -11314,7 +11315,7 @@ where
1131411315
for (node_id, monitor_update_blocked_actions) in monitor_update_blocked_actions_per_peer.unwrap() {
1131511316
if let Some(peer_state) = per_peer_state.get(&node_id) {
1131611317
for (channel_id, actions) in monitor_update_blocked_actions.iter() {
11317-
let logger = WithContext::from(&args.logger, Some(node_id), Some(*channel_id));
11318+
let logger = WithContext::from(&args.logger, Some(node_id), Some(*channel_id), None);
1131811319
for action in actions.iter() {
1131911320
if let MonitorUpdateCompletionAction::EmitEventAndFreeOtherChannel {
1132011321
downstream_counterparty_and_funding_outpoint:
@@ -11342,7 +11343,7 @@ where
1134211343
}
1134311344
peer_state.lock().unwrap().monitor_update_blocked_actions = monitor_update_blocked_actions;
1134411345
} else {
11345-
log_error!(WithContext::from(&args.logger, Some(node_id), None), "Got blocked actions without a per-peer-state for {}", node_id);
11346+
log_error!(WithContext::from(&args.logger, Some(node_id), None, None), "Got blocked actions without a per-peer-state for {}", node_id);
1134611347
return Err(DecodeError::InvalidValue);
1134711348
}
1134811349
}

0 commit comments

Comments
 (0)