From 4be5d64009a43906690a0b91ce26701f001086fd Mon Sep 17 00:00:00 2001 From: TrezorHannes Date: Fri, 16 Jan 2026 18:52:58 +0100 Subject: [PATCH 1/2] perf: cache LNDg channel data globally to reduce API calls - Add fetch_all_channels() that fetches all channels once and caches by pubkey - Refactor get_channels_to_modify() to use cached data instead of per-peer API calls - Reduces API calls from N (one per peer) to 1 (one per script run) Tests passing: 4/4 --- Other/fee_adjuster.py | 115 ++++++++++++++++++++++++++++-------------- 1 file changed, 76 insertions(+), 39 deletions(-) diff --git a/Other/fee_adjuster.py b/Other/fee_adjuster.py index 7b95d7a..3763339 100644 --- a/Other/fee_adjuster.py +++ b/Other/fee_adjuster.py @@ -484,62 +484,99 @@ def calculate_new_fee_rate( return round(new_fee_rate) -# Need to fetch from LNDg since lncli listchannels doesn't provide local_fee -# and want to avoid two lncli subprocesses per pubkey -def get_channels_to_modify(pubkey, config): +# Global channel cache - populated once per script run to avoid redundant API calls +_CHANNEL_CACHE = None + + +def fetch_all_channels(config): + """ + Fetch all channels from LNDg once and cache them by remote_pubkey. + Returns a dict: {pubkey: {chan_id: channel_data, ...}, ...} + """ + global _CHANNEL_CACHE + if _CHANNEL_CACHE is not None: + logging.debug("Using cached channel data") + return _CHANNEL_CACHE + lndg_api_url = config["lndg"]["lndg_api_url"] api_url = f"{lndg_api_url}/api/channels?limit=1500" username = config["credentials"]["lndg_username"] password = config["credentials"]["lndg_password"] - channels_to_modify = {} + + _CHANNEL_CACHE = {} + try: response = requests.get(api_url, auth=(username, password)) response.raise_for_status() data = response.json() + if "results" in data: results = data["results"] for result in results: remote_pubkey = result.get("remote_pubkey", "") - if remote_pubkey == pubkey: - chan_id = result.get("chan_id", "") - local_fee_rate = result.get("local_fee_rate", 0) - is_open = result.get("is_open", False) - alias = result.get("alias", "") - capacity = result.get("capacity", 0) - local_balance = result.get("local_balance", 0) - fees_updated = result.get("fees_updated", "") - auto_fees = result.get("auto_fees", False) - ar_max_cost = result.get("ar_max_cost") - local_inbound_fee_rate = result.get("local_inbound_fee_rate") - num_updates = result.get("num_updates", 0) - - if is_open: - local_balance_ratio = ( - (local_balance / capacity) * 100 if capacity else 0 - ) - fees_updated_datetime = ( - datetime.strptime(fees_updated, "%Y-%m-%dT%H:%M:%S.%f") - if fees_updated - else None - ) - channels_to_modify[chan_id] = { - "alias": alias, - "capacity": capacity, - "local_balance": local_balance, - "local_balance_ratio": local_balance_ratio, - "fees_updated_datetime": fees_updated_datetime, - "local_fee_rate": local_fee_rate, - "auto_fees": auto_fees, - "ar_max_cost": ar_max_cost, - "local_inbound_fee_rate": local_inbound_fee_rate, - "num_updates": num_updates, - } - return channels_to_modify + is_open = result.get("is_open", False) + + if not is_open: + continue + + chan_id = result.get("chan_id", "") + local_fee_rate = result.get("local_fee_rate", 0) + alias = result.get("alias", "") + capacity = result.get("capacity", 0) + local_balance = result.get("local_balance", 0) + fees_updated = result.get("fees_updated", "") + auto_fees = result.get("auto_fees", False) + ar_max_cost = result.get("ar_max_cost") + local_inbound_fee_rate = result.get("local_inbound_fee_rate") + num_updates = result.get("num_updates", 0) + + local_balance_ratio = ( + (local_balance / capacity) * 100 if capacity else 0 + ) + fees_updated_datetime = ( + datetime.strptime(fees_updated, "%Y-%m-%dT%H:%M:%S.%f") + if fees_updated + else None + ) + + channel_data = { + "alias": alias, + "capacity": capacity, + "local_balance": local_balance, + "local_balance_ratio": local_balance_ratio, + "fees_updated_datetime": fees_updated_datetime, + "local_fee_rate": local_fee_rate, + "auto_fees": auto_fees, + "ar_max_cost": ar_max_cost, + "local_inbound_fee_rate": local_inbound_fee_rate, + "num_updates": num_updates, + } + + if remote_pubkey not in _CHANNEL_CACHE: + _CHANNEL_CACHE[remote_pubkey] = {} + _CHANNEL_CACHE[remote_pubkey][chan_id] = channel_data + + logging.info( + f"Fetched and cached {sum(len(v) for v in _CHANNEL_CACHE.values())} channels for {len(_CHANNEL_CACHE)} peers" + ) + return _CHANNEL_CACHE + except requests.exceptions.RequestException as e: logging.error(f"Error fetching LNDg channels: {e}") raise LNDGAPIError(f"Error fetching LNDg channels: {e}") +# Need to fetch from LNDg since lncli listchannels doesn't provide local_fee +# and want to avoid two lncli subprocesses per pubkey +def get_channels_to_modify(pubkey, config): + """ + Get channels for a specific pubkey from the cached channel data. + Uses global cache to avoid redundant API calls when processing multiple peers. + """ + all_channels = fetch_all_channels(config) + return all_channels.get(pubkey, {}) + + def calculate_inbound_fee_discount_ppm( calculated_final_outgoing_fee_ppm, initial_raw_band, ar_max_cost_percent ): From a2cb4e487998639dfef36d9751c59bdd4c8ad197 Mon Sep 17 00:00:00 2001 From: TrezorHannes Date: Fri, 16 Jan 2026 19:05:51 +0100 Subject: [PATCH 2/2] fix: address PR review feedback on cache handling and date parsing - Move cache initialization after API success to prevent empty cache on failure - Reset cache to None on API failure to allow retry on next call - Add try-except fallback for date parsing without fractional seconds Tests passing: 4/4 --- Other/fee_adjuster.py | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/Other/fee_adjuster.py b/Other/fee_adjuster.py index 3763339..990d857 100644 --- a/Other/fee_adjuster.py +++ b/Other/fee_adjuster.py @@ -503,13 +503,13 @@ def fetch_all_channels(config): username = config["credentials"]["lndg_username"] password = config["credentials"]["lndg_password"] - _CHANNEL_CACHE = {} - try: response = requests.get(api_url, auth=(username, password)) response.raise_for_status() data = response.json() + _CHANNEL_CACHE = {} + if "results" in data: results = data["results"] for result in results: @@ -533,11 +533,18 @@ def fetch_all_channels(config): local_balance_ratio = ( (local_balance / capacity) * 100 if capacity else 0 ) - fees_updated_datetime = ( - datetime.strptime(fees_updated, "%Y-%m-%dT%H:%M:%S.%f") - if fees_updated - else None - ) + + # Parse fees_updated with fallback for missing fractional seconds + fees_updated_datetime = None + if fees_updated: + try: + fees_updated_datetime = datetime.strptime( + fees_updated, "%Y-%m-%dT%H:%M:%S.%f" + ) + except ValueError: + fees_updated_datetime = datetime.strptime( + fees_updated, "%Y-%m-%dT%H:%M:%S" + ) channel_data = { "alias": alias, @@ -563,6 +570,7 @@ def fetch_all_channels(config): except requests.exceptions.RequestException as e: logging.error(f"Error fetching LNDg channels: {e}") + _CHANNEL_CACHE = None # Reset cache to allow retry on next call raise LNDGAPIError(f"Error fetching LNDg channels: {e}")