From 0bc4187f4943b91f27a290218e92a9677afb1044 Mon Sep 17 00:00:00 2001 From: benthecarman Date: Tue, 16 Dec 2025 18:37:21 -0600 Subject: [PATCH] Fix clippy issues, enforce in CI We weren't checking for any clippy issues. This fixes most of them and adds a few exceptions for ones not worth fixing or ones that will be fixed with future changes. --- .github/workflows/build.yml | 4 + ldk-server-protos/Cargo.toml | 7 ++ ldk-server/src/api/bolt11_send.rs | 2 +- ldk-server/src/api/bolt12_send.rs | 2 +- ldk-server/src/api/error.rs | 5 +- ldk-server/src/api/get_payment_details.rs | 4 +- ldk-server/src/api/list_channels.rs | 2 +- ldk-server/src/api/onchain_send.rs | 2 +- ldk-server/src/io/events/event_publisher.rs | 3 + .../src/io/persist/paginated_kv_store.rs | 19 ---- ldk-server/src/io/persist/sqlite_store/mod.rs | 103 +++--------------- ldk-server/src/io/utils.rs | 10 +- ldk-server/src/main.rs | 10 +- ldk-server/src/util/config.rs | 60 +++++----- ldk-server/src/util/logger.rs | 18 ++- ldk-server/src/util/proto_adapter.rs | 59 +++++----- 16 files changed, 113 insertions(+), 197 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d406e0c..b6f8a9f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -28,6 +28,7 @@ jobs: run: | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --profile=minimal --default-toolchain ${{ matrix.toolchain }} rustup override set ${{ matrix.toolchain }} + if [ "${{ matrix.msrv }}" = "true" ]; then rustup component add clippy; fi - name: Check formatting if: matrix.check-fmt run: rustup component add rustfmt && cargo fmt --all -- --check @@ -37,6 +38,9 @@ jobs: echo "No packages need pinning for MSRV ${{ matrix.toolchain }}" - name: Build on Rust ${{ matrix.toolchain }} run: cargo build --verbose --color always + - name: Check clippy if on msrv + if: matrix.msrv + run: cargo clippy --all-features -- -D warnings - name: Test on Rust ${{ matrix.toolchain }} run: cargo test - name: Cargo check release on Rust ${{ matrix.toolchain }} diff --git a/ldk-server-protos/Cargo.toml b/ldk-server-protos/Cargo.toml index 894abe9..c971d6f 100644 --- a/ldk-server-protos/Cargo.toml +++ b/ldk-server-protos/Cargo.toml @@ -5,6 +5,13 @@ edition = "2021" build = "build.rs" +# We use a cfg instead of a feature for genproto to prevent it from being +# enabled with --all-features. Proto generation is a developer-only tool that +# requires external dependencies (protoc) and shouldn't be triggered accidentally. +# This lint configuration tells Cargo that genproto is an expected custom cfg. +[lints.rust] +unexpected_cfgs = { level = "warn", check-cfg = ['cfg(genproto)'] } + [features] default = [] serde = ["dep:serde", "dep:bytes"] diff --git a/ldk-server/src/api/bolt11_send.rs b/ldk-server/src/api/bolt11_send.rs index f5204c8..ced3e2e 100644 --- a/ldk-server/src/api/bolt11_send.rs +++ b/ldk-server/src/api/bolt11_send.rs @@ -9,7 +9,7 @@ use std::str::FromStr; pub(crate) fn handle_bolt11_send_request( context: Context, request: Bolt11SendRequest, ) -> Result { - let invoice = Bolt11Invoice::from_str(&request.invoice.as_str()) + let invoice = Bolt11Invoice::from_str(request.invoice.as_str()) .map_err(|_| ldk_node::NodeError::InvalidInvoice)?; let route_parameters = match request.route_parameters { diff --git a/ldk-server/src/api/bolt12_send.rs b/ldk-server/src/api/bolt12_send.rs index 908107a..6ca4c36 100644 --- a/ldk-server/src/api/bolt12_send.rs +++ b/ldk-server/src/api/bolt12_send.rs @@ -10,7 +10,7 @@ pub(crate) fn handle_bolt12_send_request( context: Context, request: Bolt12SendRequest, ) -> Result { let offer = - Offer::from_str(&request.offer.as_str()).map_err(|_| ldk_node::NodeError::InvalidOffer)?; + Offer::from_str(request.offer.as_str()).map_err(|_| ldk_node::NodeError::InvalidOffer)?; let route_parameters = match request.route_parameters { Some(params) => { diff --git a/ldk-server/src/api/error.rs b/ldk-server/src/api/error.rs index c9103da..137eed4 100644 --- a/ldk-server/src/api/error.rs +++ b/ldk-server/src/api/error.rs @@ -29,13 +29,11 @@ impl fmt::Display for LdkServerError { } #[derive(Clone, Debug, PartialEq, Eq)] +#[allow(clippy::enum_variant_names)] pub(crate) enum LdkServerErrorCode { /// Please refer to [`protos::error::ErrorCode::InvalidRequestError`]. InvalidRequestError, - /// Please refer to [`protos::error::ErrorCode::AuthError`]. - AuthError, - /// Please refer to [`protos::error::ErrorCode::LightningError`]. LightningError, @@ -47,7 +45,6 @@ impl fmt::Display for LdkServerErrorCode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { LdkServerErrorCode::InvalidRequestError => write!(f, "InvalidRequestError"), - LdkServerErrorCode::AuthError => write!(f, "AuthError"), LdkServerErrorCode::LightningError => write!(f, "LightningError"), LdkServerErrorCode::InternalServerError => write!(f, "InternalServerError"), } diff --git a/ldk-server/src/api/get_payment_details.rs b/ldk-server/src/api/get_payment_details.rs index d0b6fc2..48bfe3d 100644 --- a/ldk-server/src/api/get_payment_details.rs +++ b/ldk-server/src/api/get_payment_details.rs @@ -19,9 +19,7 @@ pub(crate) fn handle_get_payment_details_request( let payment_details = context.node.payment(&PaymentId(payment_id_bytes)); - let response = GetPaymentDetailsResponse { - payment: payment_details.map(|payment| payment_to_proto(payment)), - }; + let response = GetPaymentDetailsResponse { payment: payment_details.map(payment_to_proto) }; Ok(response) } diff --git a/ldk-server/src/api/list_channels.rs b/ldk-server/src/api/list_channels.rs index 046fad6..0f9d1a2 100644 --- a/ldk-server/src/api/list_channels.rs +++ b/ldk-server/src/api/list_channels.rs @@ -6,7 +6,7 @@ use ldk_server_protos::api::{ListChannelsRequest, ListChannelsResponse}; pub(crate) fn handle_list_channels_request( context: Context, _request: ListChannelsRequest, ) -> Result { - let channels = context.node.list_channels().into_iter().map(|c| channel_to_proto(c)).collect(); + let channels = context.node.list_channels().into_iter().map(channel_to_proto).collect(); let response = ListChannelsResponse { channels }; Ok(response) diff --git a/ldk-server/src/api/onchain_send.rs b/ldk-server/src/api/onchain_send.rs index b84c671..c3652e7 100644 --- a/ldk-server/src/api/onchain_send.rs +++ b/ldk-server/src/api/onchain_send.rs @@ -18,7 +18,7 @@ pub(crate) fn handle_onchain_send_request( ) })?; - let fee_rate = request.fee_rate_sat_per_vb.map(FeeRate::from_sat_per_vb).flatten(); + let fee_rate = request.fee_rate_sat_per_vb.and_then(FeeRate::from_sat_per_vb); let txid = match (request.amount_sats, request.send_all) { (Some(amount_sats), None) => { context.node.onchain_payment().send_to_address(&address, amount_sats, fee_rate)? diff --git a/ldk-server/src/io/events/event_publisher.rs b/ldk-server/src/io/events/event_publisher.rs index f333bb3..f0ffac9 100644 --- a/ldk-server/src/io/events/event_publisher.rs +++ b/ldk-server/src/io/events/event_publisher.rs @@ -41,9 +41,12 @@ pub trait EventPublisher: Send + Sync { async fn publish(&self, event: EventEnvelope) -> Result<(), LdkServerError>; } +/// A no-op implementation of the [`EventPublisher`] trait. +#[cfg(not(feature = "events-rabbitmq"))] pub(crate) struct NoopEventPublisher; #[async_trait] +#[cfg(not(feature = "events-rabbitmq"))] impl EventPublisher for NoopEventPublisher { /// Publishes an event to a no-op sink, effectively discarding it. /// diff --git a/ldk-server/src/io/persist/paginated_kv_store.rs b/ldk-server/src/io/persist/paginated_kv_store.rs index d036c2e..e450b86 100644 --- a/ldk-server/src/io/persist/paginated_kv_store.rs +++ b/ldk-server/src/io/persist/paginated_kv_store.rs @@ -49,25 +49,6 @@ pub trait PaginatedKVStore: Send + Sync { &self, primary_namespace: &str, secondary_namespace: &str, key: &str, time: i64, buf: &[u8], ) -> Result<(), io::Error>; - /// Removes any data that had previously been persisted under the given `key`. - /// - /// If the `lazy` flag is set to `true`, the backend implementation might choose to lazily - /// remove the given `key` at some point in time after the method returns, e.g., as part of an - /// eventual batch deletion of multiple keys. As a consequence, subsequent calls to - /// [`PaginatedKVStore::list`] might include the removed key until the changes are actually persisted. - /// - /// Note that while setting the `lazy` flag reduces the I/O burden of multiple subsequent - /// `remove` calls, it also influences the atomicity guarantees as lazy `remove`s could - /// potentially get lost on crash after the method returns. Therefore, this flag should only be - /// set for `remove` operations that can be safely replayed at a later time. - /// - /// Returns successfully if no data will be stored for the given `primary_namespace`, - /// `secondary_namespace`, and `key`, independently of whether it was present before its - /// invocation or not. - fn remove( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, - ) -> Result<(), io::Error>; - /// Returns a paginated list of keys that are stored under the given `secondary_namespace` in /// `primary_namespace`, ordered in descending order of `time`. /// diff --git a/ldk-server/src/io/persist/sqlite_store/mod.rs b/ldk-server/src/io/persist/sqlite_store/mod.rs index 9e53db3..0c92b8f 100644 --- a/ldk-server/src/io/persist/sqlite_store/mod.rs +++ b/ldk-server/src/io/persist/sqlite_store/mod.rs @@ -20,7 +20,6 @@ const LIST_KEYS_MAX_PAGE_SIZE: i32 = 100; pub struct SqliteStore { connection: Arc>, - data_dir: PathBuf, paginated_kv_table_name: String, } @@ -44,18 +43,18 @@ impl SqliteStore { data_dir.display(), e ); - io::Error::new(io::ErrorKind::Other, msg) + io::Error::other(msg) })?; - let mut db_file_path = data_dir.clone(); + let mut db_file_path = data_dir; db_file_path.push(db_file_name); let connection = Connection::open(db_file_path.clone()).map_err(|e| { let msg = format!("Failed to open/create database file {}: {}", db_file_path.display(), e); - io::Error::new(io::ErrorKind::Other, msg) + io::Error::other(msg) })?; - let sql = format!("SELECT user_version FROM pragma_user_version"); + let sql = "SELECT user_version FROM pragma_user_version".to_string(); let version_res: u16 = connection.query_row(&sql, [], |row| row.get(0)).unwrap(); if version_res == 0 { @@ -69,14 +68,14 @@ impl SqliteStore { ) .map_err(|e| { let msg = format!("Failed to set PRAGMA user_version: {}", e); - io::Error::new(io::ErrorKind::Other, msg) + io::Error::other(msg) })?; } else if version_res > SCHEMA_USER_VERSION { let msg = format!( "Failed to open database: incompatible schema version {}. Expected: {}", version_res, SCHEMA_USER_VERSION ); - return Err(io::Error::new(io::ErrorKind::Other, msg)); + return Err(io::Error::other(msg)); } let create_paginated_kv_table_sql = format!( @@ -92,7 +91,7 @@ impl SqliteStore { connection.execute(&create_paginated_kv_table_sql, []).map_err(|e| { let msg = format!("Failed to create table {}: {}", paginated_kv_table_name, e); - io::Error::new(io::ErrorKind::Other, msg) + io::Error::other(msg) })?; let index_creation_time_sql = format!( @@ -105,16 +104,11 @@ impl SqliteStore { "Failed to create index on creation_time, table {}: {}", paginated_kv_table_name, e ); - io::Error::new(io::ErrorKind::Other, msg) + io::Error::other(msg) })?; let connection = Arc::new(Mutex::new(connection)); - Ok(Self { connection, data_dir, paginated_kv_table_name }) - } - - /// Returns the data directory. - pub fn get_data_dir(&self) -> PathBuf { - self.data_dir.clone() + Ok(Self { connection, paginated_kv_table_name }) } fn read_internal( @@ -129,7 +123,7 @@ impl SqliteStore { let mut stmt = locked_conn.prepare_cached(&sql).map_err(|e| { let msg = format!("Failed to prepare statement: {}", e); - io::Error::new(io::ErrorKind::Other, msg) + io::Error::other(msg) })?; let res = stmt @@ -159,43 +153,11 @@ impl SqliteStore { PrintableString(key), e ); - io::Error::new(io::ErrorKind::Other, msg) + io::Error::other(msg) }, })?; Ok(res) } - - fn remove_internal( - &self, kv_table_name: &str, primary_namespace: &str, secondary_namespace: &str, key: &str, - ) -> io::Result<()> { - check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "remove")?; - - let locked_conn = self.connection.lock().unwrap(); - - let sql = format!("DELETE FROM {} WHERE primary_namespace=:primary_namespace AND secondary_namespace=:secondary_namespace AND key=:key;", kv_table_name); - - let mut stmt = locked_conn.prepare_cached(&sql).map_err(|e| { - let msg = format!("Failed to prepare statement: {}", e); - io::Error::new(io::ErrorKind::Other, msg) - })?; - - stmt.execute(named_params! { - ":primary_namespace": primary_namespace, - ":secondary_namespace": secondary_namespace, - ":key": key, - }) - .map_err(|e| { - let msg = format!( - "Failed to delete key {}/{}/{}: {}", - PrintableString(primary_namespace), - PrintableString(secondary_namespace), - PrintableString(key), - e - ); - io::Error::new(io::ErrorKind::Other, msg) - })?; - Ok(()) - } } impl PaginatedKVStore for SqliteStore { @@ -227,7 +189,7 @@ impl PaginatedKVStore for SqliteStore { let mut stmt = locked_conn.prepare_cached(&sql).map_err(|e| { let msg = format!("Failed to prepare statement: {}", e); - io::Error::new(io::ErrorKind::Other, msg) + io::Error::other(msg) })?; stmt.execute(named_params! { @@ -246,21 +208,10 @@ impl PaginatedKVStore for SqliteStore { PrintableString(key), e ); - io::Error::new(io::ErrorKind::Other, msg) + io::Error::other(msg) }) } - fn remove( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, _lazy: bool, - ) -> io::Result<()> { - self.remove_internal( - &self.paginated_kv_table_name, - primary_namespace, - secondary_namespace, - key, - ) - } - fn list( &self, primary_namespace: &str, secondary_namespace: &str, page_token: Option<(String, i64)>, @@ -278,7 +229,7 @@ impl PaginatedKVStore for SqliteStore { let mut stmt = locked_conn.prepare_cached(&sql).map_err(|e| { let msg = format!("Failed to prepare statement: {}", e); - io::Error::new(io::ErrorKind::Other, msg) + io::Error::other(msg) })?; let mut keys: Vec = Vec::new(); @@ -301,14 +252,14 @@ impl PaginatedKVStore for SqliteStore { ) .map_err(|e| { let msg = format!("Failed to retrieve queried rows: {}", e); - io::Error::new(io::ErrorKind::Other, msg) + io::Error::other(msg) })?; let mut last_creation_time: Option = None; for r in rows_iter { let (k, ct) = r.map_err(|e| { let msg = format!("Failed to retrieve queried rows: {}", e); - io::Error::new(io::ErrorKind::Other, msg) + io::Error::other(msg) })?; keys.push(k); last_creation_time = Some(ct); @@ -333,15 +284,6 @@ mod tests { use rand::{thread_rng, Rng}; use std::panic::RefUnwindSafe; - impl Drop for SqliteStore { - fn drop(&mut self) { - match fs::remove_dir_all(&self.data_dir) { - Err(e) => println!("Failed to remove test store directory: {}", e), - _ => {}, - } - } - } - #[test] fn read_write_remove_list_persist() { let mut temp_path = random_storage_path(); @@ -413,14 +355,8 @@ mod tests { let read_data = kv_store.read(primary_namespace, secondary_namespace, testkey).unwrap(); assert_eq!(data, &*read_data); - kv_store.remove(primary_namespace, secondary_namespace, testkey, false).unwrap(); - - let listed_keys = list_all_keys(primary_namespace, secondary_namespace); - assert_eq!(listed_keys.len(), 109); - // Ensure we have no issue operating with primary_namespace/secondary_namespace/key being KVSTORE_NAMESPACE_KEY_MAX_LEN - let max_chars: String = - std::iter::repeat('A').take(KVSTORE_NAMESPACE_KEY_MAX_LEN).collect(); + let max_chars: String = "A".repeat(KVSTORE_NAMESPACE_KEY_MAX_LEN); kv_store.write(&max_chars, &max_chars, &max_chars, 0, &data).unwrap(); println!("{:?}", listed_keys); @@ -431,10 +367,5 @@ mod tests { let read_data = kv_store.read(&max_chars, &max_chars, &max_chars).unwrap(); assert_eq!(data, &*read_data); - - kv_store.remove(&max_chars, &max_chars, &max_chars, false).unwrap(); - - let listed_keys = list_all_keys(&max_chars, &max_chars); - assert_eq!(listed_keys.len(), 0); } } diff --git a/ldk-server/src/io/utils.rs b/ldk-server/src/io/utils.rs index b9bfe59..73b5de1 100644 --- a/ldk-server/src/io/utils.rs +++ b/ldk-server/src/io/utils.rs @@ -23,7 +23,7 @@ pub(crate) fn check_namespace_key_validity( PrintableString(secondary_namespace), PrintableString(key) ); - return Err(std::io::Error::new(std::io::ErrorKind::Other, msg)); + return Err(std::io::Error::other(msg)); } if primary_namespace.is_empty() && !secondary_namespace.is_empty() { @@ -36,7 +36,7 @@ pub(crate) fn check_namespace_key_validity( "Failed to {} {}/{}/{}: primary namespace may not be empty if a non-empty secondary namespace is given.", operation, PrintableString(primary_namespace), PrintableString(secondary_namespace), PrintableString(key) ); - return Err(std::io::Error::new(std::io::ErrorKind::Other, msg)); + return Err(std::io::Error::other(msg)); } if !is_valid_kvstore_str(primary_namespace) @@ -58,7 +58,7 @@ pub(crate) fn check_namespace_key_validity( PrintableString(secondary_namespace), PrintableString(key) ); - return Err(std::io::Error::new(std::io::ErrorKind::Other, msg)); + return Err(std::io::Error::other(msg)); } } else { if primary_namespace.is_empty() && !secondary_namespace.is_empty() { @@ -70,7 +70,7 @@ pub(crate) fn check_namespace_key_validity( "Failed to {} {}/{}: primary namespace may not be empty if a non-empty secondary namespace is given.", operation, PrintableString(primary_namespace), PrintableString(secondary_namespace) ); - return Err(std::io::Error::new(std::io::ErrorKind::Other, msg)); + return Err(std::io::Error::other(msg)); } if !is_valid_kvstore_str(primary_namespace) || !is_valid_kvstore_str(secondary_namespace) { debug_assert!( @@ -86,7 +86,7 @@ pub(crate) fn check_namespace_key_validity( PrintableString(primary_namespace), PrintableString(secondary_namespace) ); - return Err(std::io::Error::new(std::io::ErrorKind::Other, msg)); + return Err(std::io::Error::other(msg)); } } diff --git a/ldk-server/src/main.rs b/ldk-server/src/main.rs index 72f5fb1..0123655 100644 --- a/ldk-server/src/main.rs +++ b/ldk-server/src/main.rs @@ -13,7 +13,7 @@ use tokio::signal::unix::SignalKind; use hyper::server::conn::http1; use hyper_util::rt::TokioIo; -use crate::io::events::event_publisher::{EventPublisher, NoopEventPublisher}; +use crate::io::events::event_publisher::EventPublisher; use crate::io::events::get_event_name; #[cfg(feature = "events-rabbitmq")] use crate::io::events::rabbitmq::{RabbitMqConfig, RabbitMqEventPublisher}; @@ -72,7 +72,7 @@ fn main() { }, }; - let log_file_path = config_file.log_file_path.map(|p| PathBuf::from(p)).unwrap_or_else(|| { + let log_file_path = config_file.log_file_path.map(PathBuf::from).unwrap_or_else(|| { let mut default_log_path = PathBuf::from(&config_file.storage_dir_path); default_log_path.push("ldk-server.log"); default_log_path @@ -152,7 +152,9 @@ fn main() { }, }); - let event_publisher: Arc = Arc::new(NoopEventPublisher); + #[cfg(not(feature = "events-rabbitmq"))] + let event_publisher: Arc = + Arc::new(crate::io::events::event_publisher::NoopEventPublisher); #[cfg(feature = "events-rabbitmq")] let event_publisher: Arc = { @@ -263,7 +265,7 @@ fn main() { let payment = payment_to_proto(payment_details); upsert_payment_details(&event_node, Arc::clone(&paginated_store), &payment); } else { - error!("Unable to find payment with paymentId: {}", payment_id.to_string()); + error!("Unable to find payment with paymentId: {payment_id}"); } }, Event::PaymentForwarded { diff --git a/ldk-server/src/util/config.rs b/ldk-server/src/util/config.rs index b2d896c..4044311 100644 --- a/ldk-server/src/util/config.rs +++ b/ldk-server/src/util/config.rs @@ -63,13 +63,13 @@ impl TryFrom for Config { (Some(_), Some(_)) => { return Err(io::Error::new( io::ErrorKind::InvalidInput, - format!("Must set a single chain source, multiple were configured"), + "Must set a single chain source, multiple were configured".to_string(), )) }, (None, None) => { return Err(io::Error::new( io::ErrorKind::InvalidInput, - format!("At least one chain source must be set, either bitcoind or esplora"), + "At least one chain source must be set, either bitcoind or esplora".to_string(), )) }, }; @@ -219,32 +219,32 @@ struct LSPS2ServiceTomlConfig { require_token: Option, } -impl Into for LSPS2ServiceTomlConfig { - fn into(self) -> LSPS2ServiceConfig { - match self { - LSPS2ServiceTomlConfig { - advertise_service, - channel_opening_fee_ppm, - channel_over_provisioning_ppm, - min_channel_opening_fee_msat, - min_channel_lifetime, - max_client_to_self_delay, - min_payment_size_msat, - max_payment_size_msat, - client_trusts_lsp, - require_token, - } => LSPS2ServiceConfig { - advertise_service, - channel_opening_fee_ppm, - channel_over_provisioning_ppm, - min_channel_opening_fee_msat, - min_channel_lifetime, - min_payment_size_msat, - max_client_to_self_delay, - max_payment_size_msat, - client_trusts_lsp, - require_token, - }, +impl From for LSPS2ServiceConfig { + fn from(val: LSPS2ServiceTomlConfig) -> Self { + let LSPS2ServiceTomlConfig { + advertise_service, + channel_opening_fee_ppm, + channel_over_provisioning_ppm, + min_channel_opening_fee_msat, + min_channel_lifetime, + max_client_to_self_delay, + min_payment_size_msat, + max_payment_size_msat, + client_trusts_lsp, + require_token, + } = val; + + Self { + advertise_service, + channel_opening_fee_ppm, + channel_over_provisioning_ppm, + min_channel_opening_fee_msat, + min_channel_lifetime, + min_payment_size_msat, + max_client_to_self_delay, + max_payment_size_msat, + client_trusts_lsp, + require_token, } } } @@ -264,7 +264,7 @@ pub fn load_config>(config_path: P) -> io::Result { format!("Config file contains invalid TOML format: {}", e), ) })?; - Ok(Config::try_from(toml_config)?) + Config::try_from(toml_config) } #[cfg(test)] @@ -315,7 +315,7 @@ mod tests { let mut bytes = [0u8; 32]; let alias = "LDK Server"; - bytes[..alias.as_bytes().len()].copy_from_slice(alias.as_bytes()); + bytes[..alias.len()].copy_from_slice(alias.as_bytes()); let config = load_config(storage_path.join(config_file_name)).unwrap(); let expected = Config { diff --git a/ldk-server/src/util/logger.rs b/ldk-server/src/util/logger.rs index 66a236c..b584812 100644 --- a/ldk-server/src/util/logger.rs +++ b/ldk-server/src/util/logger.rs @@ -49,7 +49,7 @@ impl ServerLogger { }); log::set_boxed_logger(Box::new(LoggerWrapper(Arc::clone(&logger)))) - .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?; + .map_err(io::Error::other)?; log::set_max_level(level); Ok(logger) } @@ -64,9 +64,7 @@ impl ServerLogger { *file = new_file; Ok(()) }, - Err(e) => { - Err(io::Error::new(io::ErrorKind::Other, format!("Failed to acquire lock: {}", e))) - }, + Err(e) => Err(io::Error::other(format!("Failed to acquire lock: {e}"))), } } } @@ -84,9 +82,9 @@ impl Log for ServerLogger { // Log to console let _ = match record.level() { Level::Error => { - write!( + writeln!( io::stderr(), - "[{} {} {}:{}] {}\n", + "[{} {} {}:{}] {}", format_timestamp(), level_str, record.target(), @@ -95,9 +93,9 @@ impl Log for ServerLogger { ) }, _ => { - write!( + writeln!( io::stdout(), - "[{} {} {}:{}] {}\n", + "[{} {} {}:{}] {}", format_timestamp(), level_str, record.target(), @@ -109,9 +107,9 @@ impl Log for ServerLogger { // Log to file if let Ok(mut file) = self.file.lock() { - let _ = write!( + let _ = writeln!( file, - "[{} {} {}:{}] {}\n", + "[{} {} {}:{}] {}", format_timestamp(), level_str, record.target(), diff --git a/ldk-server/src/util/proto_adapter.rs b/ldk-server/src/util/proto_adapter.rs index c645242..20a325b 100644 --- a/ldk-server/src/util/proto_adapter.rs +++ b/ldk-server/src/util/proto_adapter.rs @@ -1,6 +1,6 @@ use crate::api::error::LdkServerError; use crate::api::error::LdkServerErrorCode::{ - AuthError, InternalServerError, InvalidRequestError, LightningError, + InternalServerError, InvalidRequestError, LightningError, }; use bytes::Bytes; use hex::prelude::*; @@ -96,37 +96,33 @@ pub(crate) fn channel_config_to_proto( } pub(crate) fn payment_to_proto(payment: PaymentDetails) -> Payment { - match payment { - PaymentDetails { - id, - kind, - amount_msat, - fee_paid_msat, - direction, - status, - latest_update_timestamp, - } => Payment { - id: id.to_string(), - kind: Some(payment_kind_to_proto(kind)), - amount_msat, - fee_paid_msat, - direction: match direction { - PaymentDirection::Inbound => { - ldk_server_protos::types::PaymentDirection::Inbound.into() - }, - PaymentDirection::Outbound => { - ldk_server_protos::types::PaymentDirection::Outbound.into() - }, - }, - status: match status { - PaymentStatus::Pending => ldk_server_protos::types::PaymentStatus::Pending.into(), - PaymentStatus::Succeeded => { - ldk_server_protos::types::PaymentStatus::Succeeded.into() - }, - PaymentStatus::Failed => ldk_server_protos::types::PaymentStatus::Failed.into(), + let PaymentDetails { + id, + kind, + amount_msat, + fee_paid_msat, + direction, + status, + latest_update_timestamp, + } = payment; + + Payment { + id: id.to_string(), + kind: Some(payment_kind_to_proto(kind)), + amount_msat, + fee_paid_msat, + direction: match direction { + PaymentDirection::Inbound => ldk_server_protos::types::PaymentDirection::Inbound.into(), + PaymentDirection::Outbound => { + ldk_server_protos::types::PaymentDirection::Outbound.into() }, - latest_update_timestamp, }, + status: match status { + PaymentStatus::Pending => ldk_server_protos::types::PaymentStatus::Pending.into(), + PaymentStatus::Succeeded => ldk_server_protos::types::PaymentStatus::Succeeded.into(), + PaymentStatus::Failed => ldk_server_protos::types::PaymentStatus::Failed.into(), + }, + latest_update_timestamp, } } @@ -378,6 +374,7 @@ pub(crate) fn pending_sweep_balance_to_proto( } } +#[allow(clippy::too_many_arguments)] pub(crate) fn forwarded_payment_to_proto( prev_channel_id: ChannelId, next_channel_id: ChannelId, prev_user_channel_id: Option, next_user_channel_id: Option, @@ -437,14 +434,12 @@ pub(crate) fn proto_to_bolt11_description( pub(crate) fn to_error_response(ldk_error: LdkServerError) -> (ErrorResponse, StatusCode) { let error_code = match ldk_error.error_code { InvalidRequestError => ErrorCode::InvalidRequestError, - AuthError => ErrorCode::AuthError, LightningError => ErrorCode::LightningError, InternalServerError => ErrorCode::InternalServerError, } as i32; let status = match ldk_error.error_code { InvalidRequestError => StatusCode::BAD_REQUEST, - AuthError => StatusCode::UNAUTHORIZED, LightningError => StatusCode::INTERNAL_SERVER_ERROR, InternalServerError => StatusCode::INTERNAL_SERVER_ERROR, };