diff --git a/mediarepo-daemon/mediarepo-core/src/fs/thumbnail_store.rs b/mediarepo-daemon/mediarepo-core/src/fs/thumbnail_store.rs index 0ac2a70..c1299f4 100644 --- a/mediarepo-daemon/mediarepo-core/src/fs/thumbnail_store.rs +++ b/mediarepo-daemon/mediarepo-core/src/fs/thumbnail_store.rs @@ -71,7 +71,7 @@ impl ThumbnailStore { let name = file_name.to_string_lossy(); let (height, width) = name - .split_once("-") + .split_once('-') .and_then(|(height, width)| { Some((height.parse::().ok()?, width.parse::().ok()?)) }) diff --git a/mediarepo-daemon/mediarepo-core/src/settings/logging.rs b/mediarepo-daemon/mediarepo-core/src/settings/logging.rs index 513b7a2..1ab3682 100644 --- a/mediarepo-daemon/mediarepo-core/src/settings/logging.rs +++ b/mediarepo-daemon/mediarepo-core/src/settings/logging.rs @@ -34,6 +34,7 @@ pub enum LogLevel { Trace, } +#[allow(clippy::from_over_into)] impl Into> for LogLevel { fn into(self) -> Option { match self { diff --git a/mediarepo-daemon/mediarepo-core/src/settings/mod.rs b/mediarepo-daemon/mediarepo-core/src/settings/mod.rs index 3ce242b..26362d5 100644 --- a/mediarepo-daemon/mediarepo-core/src/settings/mod.rs +++ b/mediarepo-daemon/mediarepo-core/src/settings/mod.rs @@ -1,5 +1,5 @@ use std::fs; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use config::{Config, FileFormat}; use serde::{Deserialize, Serialize}; @@ -24,7 +24,7 @@ pub struct Settings { } impl Settings { - pub fn read(root: &PathBuf) -> RepoResult { + pub fn read(root: &Path) -> RepoResult { let settings = Config::builder() .add_source(config::File::from_str( &*Settings::default().to_toml_string()?, @@ -44,7 +44,7 @@ impl Settings { settings_main.server.tcp.enabled = true; settings_main.server.tcp.port = PortSetting::Range(settings_v1.port_range); settings_main.server.tcp.listen_address = settings_v1.listen_address; - settings_main.paths.thumbnail_directory = settings_v1.thumbnail_store.into(); + settings_main.paths.thumbnail_directory = settings_v1.thumbnail_store; settings_main.paths.database_directory = PathBuf::from(settings_v1.database_path) .parent() .map(|p| p.to_string_lossy().to_string()) @@ -69,7 +69,7 @@ impl Settings { Ok(string) } - pub fn save(&self, root: &PathBuf) -> RepoResult<()> { + pub fn save(&self, root: &Path) -> RepoResult<()> { let string = toml::to_string_pretty(&self)?; fs::write(root.join("repo.toml"), string.into_bytes())?; diff --git a/mediarepo-daemon/mediarepo-core/src/settings/paths.rs b/mediarepo-daemon/mediarepo-core/src/settings/paths.rs index 162e096..830ecfc 100644 --- a/mediarepo-daemon/mediarepo-core/src/settings/paths.rs +++ b/mediarepo-daemon/mediarepo-core/src/settings/paths.rs @@ -1,4 +1,4 @@ -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use serde::{Deserialize, Serialize}; @@ -21,27 +21,27 @@ impl Default for PathSettings { impl PathSettings { #[inline] - pub fn database_dir(&self, root: &PathBuf) -> PathBuf { + pub fn database_dir(&self, root: &Path) -> PathBuf { root.join(&self.database_directory) } #[inline] - pub fn files_dir(&self, root: &PathBuf) -> PathBuf { + pub fn files_dir(&self, root: &Path) -> PathBuf { root.join(&self.files_directory) } #[inline] - pub fn thumbs_dir(&self, root: &PathBuf) -> PathBuf { + pub fn thumbs_dir(&self, root: &Path) -> PathBuf { root.join(&self.thumbnail_directory) } #[inline] - pub fn db_file_path(&self, root: &PathBuf) -> PathBuf { + pub fn db_file_path(&self, root: &Path) -> PathBuf { self.database_dir(root).join("repo.db") } #[inline] - pub fn frontend_state_file_path(&self, root: &PathBuf) -> PathBuf { + pub fn frontend_state_file_path(&self, root: &Path) -> PathBuf { self.database_dir(root).join("frontend-state.json") } } diff --git a/mediarepo-daemon/mediarepo-core/src/tracing_layer_list.rs b/mediarepo-daemon/mediarepo-core/src/tracing_layer_list.rs index bdf13f8..53c1f00 100644 --- a/mediarepo-daemon/mediarepo-core/src/tracing_layer_list.rs +++ b/mediarepo-daemon/mediarepo-core/src/tracing_layer_list.rs @@ -7,9 +7,15 @@ use tracing_subscriber::Layer; pub struct DynLayerList(Vec + Send + Sync + 'static>>); +impl Default for DynLayerList { + fn default() -> Self { + Self(Vec::new()) + } +} + impl DynLayerList { pub fn new() -> Self { - Self(Vec::new()) + Self::default() } pub fn iter(&self) -> Iter<'_, Box + Send + Sync>> { diff --git a/mediarepo-daemon/mediarepo-core/src/traits.rs b/mediarepo-daemon/mediarepo-core/src/traits.rs deleted file mode 100644 index eab11ee..0000000 --- a/mediarepo-daemon/mediarepo-core/src/traits.rs +++ /dev/null @@ -1,7 +0,0 @@ -use async_trait::async_trait; - -#[async_trait] -pub trait AsyncTryFrom { - type Error; - fn async_try_from(other: T) -> Result; -} diff --git a/mediarepo-daemon/mediarepo-core/src/utils.rs b/mediarepo-daemon/mediarepo-core/src/utils.rs index c65a638..04d6b89 100644 --- a/mediarepo-daemon/mediarepo-core/src/utils.rs +++ b/mediarepo-daemon/mediarepo-core/src/utils.rs @@ -47,7 +47,7 @@ pub async fn get_folder_size(path: PathBuf) -> RepoResult { } } } - let futures = all_files.into_iter().map(|f| read_file_size(f)); + let futures = all_files.into_iter().map(read_file_size); let results = future::join_all(futures).await; let size = results.into_iter().filter_map(|r| r.ok()).sum(); diff --git a/mediarepo-daemon/mediarepo-database/src/queries/analysis.rs b/mediarepo-daemon/mediarepo-database/src/queries/analysis.rs index 9c169b6..897ea0c 100644 --- a/mediarepo-daemon/mediarepo-database/src/queries/analysis.rs +++ b/mediarepo-daemon/mediarepo-database/src/queries/analysis.rs @@ -30,7 +30,7 @@ pub async fn get_all_counts(db: &DatabaseConnection) -> RepoResult { )) .one(db) .await? - .ok_or(RepoError::from("could not retrieve metadata from database"))?; + .ok_or_else(|| RepoError::from("could not retrieve metadata from database"))?; Ok(counts) } diff --git a/mediarepo-daemon/mediarepo-database/src/queries/tags.rs b/mediarepo-daemon/mediarepo-database/src/queries/tags.rs index af136fa..5683254 100644 --- a/mediarepo-daemon/mediarepo-database/src/queries/tags.rs +++ b/mediarepo-daemon/mediarepo-database/src/queries/tags.rs @@ -55,7 +55,7 @@ fn vec_to_query_list(input: Vec) -> String { let mut entries = input .into_iter() .fold(String::new(), |acc, val| format!("{}{},", acc, val)); - if entries.len() > 0 { + if !entries.is_empty() { entries.remove(entries.len() - 1); } diff --git a/mediarepo-daemon/mediarepo-logic/src/dao/file/mod.rs b/mediarepo-daemon/mediarepo-logic/src/dao/file/mod.rs index 93f012c..5e388df 100644 --- a/mediarepo-daemon/mediarepo-logic/src/dao/file/mod.rs +++ b/mediarepo-daemon/mediarepo-logic/src/dao/file/mod.rs @@ -93,7 +93,7 @@ impl FileDao { .all(&self.ctx.db) .await? .into_iter() - .map(|m| FileMetadataDto::new(m)) + .map(FileMetadataDto::new) .collect(); Ok(metadata) diff --git a/mediarepo-daemon/mediarepo-logic/src/dao/file/update.rs b/mediarepo-daemon/mediarepo-logic/src/dao/file/update.rs index 184bf1b..801efdc 100644 --- a/mediarepo-daemon/mediarepo-logic/src/dao/file/update.rs +++ b/mediarepo-daemon/mediarepo-logic/src/dao/file/update.rs @@ -22,8 +22,8 @@ impl FileDao { let trx = self.ctx.db.begin().await?; let model = file::ActiveModel { id: Set(update_dto.id), - cd_id: update_dto.cd_id.map(|v| Set(v)).unwrap_or(NotSet), - mime_type: update_dto.mime_type.map(|v| Set(v)).unwrap_or(NotSet), + cd_id: update_dto.cd_id.map(Set).unwrap_or(NotSet), + mime_type: update_dto.mime_type.map(Set).unwrap_or(NotSet), status: update_dto.status.map(|v| Set(v as i32)).unwrap_or(NotSet), }; let file_model = model.update(&trx).await?; @@ -62,8 +62,8 @@ impl FileDao { sizes: I, ) -> RepoResult> { let bytes = self.get_bytes(file.cd()).await?; - let mime_type = mime::Mime::from_str(file.mime_type()) - .unwrap_or_else(|_| mime::APPLICATION_OCTET_STREAM); + let mime_type = + mime::Mime::from_str(file.mime_type()).unwrap_or(mime::APPLICATION_OCTET_STREAM); let thumbnails = thumbnailer::create_thumbnails(Cursor::new(bytes), mime_type.clone(), sizes)?; let mut dtos = Vec::new(); diff --git a/mediarepo-daemon/mediarepo-logic/src/dao/job/state.rs b/mediarepo-daemon/mediarepo-logic/src/dao/job/state.rs index a8aab72..444dda3 100644 --- a/mediarepo-daemon/mediarepo-logic/src/dao/job/state.rs +++ b/mediarepo-daemon/mediarepo-logic/src/dao/job/state.rs @@ -40,7 +40,7 @@ impl JobDao { } } -fn build_state_filters(states: &Vec) -> Condition { +fn build_state_filters(states: &[UpsertJobStateDto]) -> Condition { states .iter() .map(|s| Condition::all().add(job_state::Column::JobType.eq(s.job_type))) diff --git a/mediarepo-daemon/mediarepo-logic/src/dao/sorting_preset/add.rs b/mediarepo-daemon/mediarepo-logic/src/dao/sorting_preset/add.rs index 39a4b96..184031a 100644 --- a/mediarepo-daemon/mediarepo-logic/src/dao/sorting_preset/add.rs +++ b/mediarepo-daemon/mediarepo-logic/src/dao/sorting_preset/add.rs @@ -122,7 +122,7 @@ async fn add_keys( async fn find_sort_keys( trx: &DatabaseTransaction, - keys: &Vec, + keys: &[AddSortKeyDto], ) -> RepoResult> { if keys.is_empty() { return Ok(vec![]); diff --git a/mediarepo-daemon/mediarepo-logic/src/dao/tag/all_for_cds_map.rs b/mediarepo-daemon/mediarepo-logic/src/dao/tag/all_for_cds_map.rs index 1c00084..d6d7761 100644 --- a/mediarepo-daemon/mediarepo-logic/src/dao/tag/all_for_cds_map.rs +++ b/mediarepo-daemon/mediarepo-logic/src/dao/tag/all_for_cds_map.rs @@ -77,14 +77,12 @@ fn create_cd_tag_map( )>, tag_id_map: HashMap, ) -> HashMap, Vec> { - let cd_tag_map = tag_cd_entries + tag_cd_entries .into_iter() .filter_map(|(t, cd)| Some((cd?, tag_id_map.get(&t.tag_id)?.clone()))) .sorted_by_key(|(cd, _)| cd.id) .group_by(|(cd, _)| cd.descriptor.to_owned()) .into_iter() .map(|(key, group)| (key, group.map(|(_, t)| t).collect::>())) - .collect(); - - cd_tag_map + .collect::, Vec>>() } diff --git a/mediarepo-daemon/mediarepo-logic/src/dao/tag/by_name.rs b/mediarepo-daemon/mediarepo-logic/src/dao/tag/by_name.rs index 1ae99bc..fd40693 100644 --- a/mediarepo-daemon/mediarepo-logic/src/dao/tag/by_name.rs +++ b/mediarepo-daemon/mediarepo-logic/src/dao/tag/by_name.rs @@ -45,11 +45,12 @@ fn name_query_to_condition(query: TagByNameQuery) -> Option { let TagByNameQuery { namespace, name } = query; let mut condition = Condition::all(); + #[allow(clippy::question_mark)] if !name.ends_with('*') { condition = condition.add(tag::Column::Name.eq(name)) } else if name.len() > 1 { condition = - condition.add(tag::Column::Name.like(&*format!("{}%", name.trim_end_matches("*")))) + condition.add(tag::Column::Name.like(&*format!("{}%", name.trim_end_matches('*')))) } else if namespace.is_none() { return None; } diff --git a/mediarepo-daemon/mediarepo-logic/src/dao/tag/mappings.rs b/mediarepo-daemon/mediarepo-logic/src/dao/tag/mappings.rs index 2289c1f..6ddea3f 100644 --- a/mediarepo-daemon/mediarepo-logic/src/dao/tag/mappings.rs +++ b/mediarepo-daemon/mediarepo-logic/src/dao/tag/mappings.rs @@ -58,12 +58,12 @@ impl TagDao { async fn get_existing_mappings( trx: &DatabaseTransaction, - cd_ids: &Vec, - tag_ids: &Vec, + cd_ids: &[i64], + tag_ids: &[i64], ) -> RepoResult> { let existing_mappings: Vec<(i64, i64)> = content_descriptor_tag::Entity::find() - .filter(content_descriptor_tag::Column::CdId.is_in(cd_ids.clone())) - .filter(content_descriptor_tag::Column::TagId.is_in(tag_ids.clone())) + .filter(content_descriptor_tag::Column::CdId.is_in(cd_ids.to_vec())) + .filter(content_descriptor_tag::Column::TagId.is_in(tag_ids.to_vec())) .all(trx) .await? .into_iter() diff --git a/mediarepo-daemon/mediarepo-logic/src/dto/file.rs b/mediarepo-daemon/mediarepo-logic/src/dto/file.rs index ced7cca..bcbb702 100644 --- a/mediarepo-daemon/mediarepo-logic/src/dto/file.rs +++ b/mediarepo-daemon/mediarepo-logic/src/dto/file.rs @@ -75,7 +75,7 @@ pub struct AddFileDto { pub name: Option, } -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Default)] pub struct UpdateFileDto { pub id: i64, pub cd_id: Option, @@ -83,17 +83,6 @@ pub struct UpdateFileDto { pub status: Option, } -impl Default for UpdateFileDto { - fn default() -> Self { - Self { - id: 0, - cd_id: None, - mime_type: None, - status: None, - } - } -} - #[derive(Copy, Clone, Debug)] pub enum FileStatus { Imported = 10, diff --git a/mediarepo-daemon/mediarepo-logic/src/dto/sorting_preset.rs b/mediarepo-daemon/mediarepo-logic/src/dto/sorting_preset.rs index f83eef4..f2aeb2b 100644 --- a/mediarepo-daemon/mediarepo-logic/src/dto/sorting_preset.rs +++ b/mediarepo-daemon/mediarepo-logic/src/dto/sorting_preset.rs @@ -84,7 +84,7 @@ impl KeyType { } pub fn to_number(&self) -> i32 { - self.clone() as i32 + *self as i32 } } diff --git a/mediarepo-daemon/mediarepo-socket/src/lib.rs b/mediarepo-daemon/mediarepo-socket/src/lib.rs index efd3863..49427f6 100644 --- a/mediarepo-daemon/mediarepo-socket/src/lib.rs +++ b/mediarepo-daemon/mediarepo-socket/src/lib.rs @@ -30,8 +30,9 @@ pub fn start_tcp_server( return Err(RepoError::PortUnavailable); } } - PortSetting::Range((l, r)) => port_check::free_local_port_in_range(*l, *r) - .ok_or_else(|| RepoError::PortUnavailable)?, + PortSetting::Range((l, r)) => { + port_check::free_local_port_in_range(*l, *r).ok_or(RepoError::PortUnavailable)? + } }; let ip = settings.server.tcp.listen_address.to_owned(); let address = SocketAddr::new(ip, port); diff --git a/mediarepo-daemon/mediarepo-socket/src/namespaces/files/mod.rs b/mediarepo-daemon/mediarepo-socket/src/namespaces/files/mod.rs index 2c02703..257a422 100644 --- a/mediarepo-daemon/mediarepo-socket/src/namespaces/files/mod.rs +++ b/mediarepo-daemon/mediarepo-socket/src/namespaces/files/mod.rs @@ -151,7 +151,7 @@ impl FilesNamespace { content: bytes, mime_type: metadata .mime_type - .unwrap_or(String::from("application/octet-stream")), + .unwrap_or_else(|| String::from("application/octet-stream")), creation_time: metadata.creation_time, change_time: metadata.change_time, name: Some(metadata.name), diff --git a/mediarepo-daemon/mediarepo-socket/src/namespaces/files/searching.rs b/mediarepo-daemon/mediarepo-socket/src/namespaces/files/searching.rs index fb78e4d..a3aab2a 100644 --- a/mediarepo-daemon/mediarepo-socket/src/namespaces/files/searching.rs +++ b/mediarepo-daemon/mediarepo-socket/src/namespaces/files/searching.rs @@ -69,10 +69,10 @@ fn build_filters_from_expressions( } } }; - if filters.len() > 0 { - Some(filters) - } else { + if filters.is_empty() { None + } else { + Some(filters) } }) .collect() @@ -92,7 +92,7 @@ fn map_tag_query_to_filter( query: TagQuery, tag_id_map: &HashMap, ) -> Option { - if query.tag.ends_with("*") { + if query.tag.ends_with('*') { map_wildcard_tag_to_filter(query, tag_id_map) } else { map_tag_to_filter(query, tag_id_map) @@ -103,7 +103,7 @@ fn map_wildcard_tag_to_filter( query: TagQuery, tag_id_map: &HashMap, ) -> Option { - let filter_tag = query.tag.trim_end_matches("*"); + let filter_tag = query.tag.trim_end_matches('*'); let relevant_ids = tag_id_map .iter() .filter_map(|(name, id)| { @@ -115,15 +115,15 @@ fn map_wildcard_tag_to_filter( }) .collect::>(); - if relevant_ids.len() > 0 { + if relevant_ids.is_empty() { + None + } else { let comparator = if query.negate { IsNot(relevant_ids) } else { Is(relevant_ids) }; Some(FilterProperty::TagWildcardIds(comparator)) - } else { - None } } diff --git a/mediarepo-daemon/mediarepo-socket/src/namespaces/files/sorting.rs b/mediarepo-daemon/mediarepo-socket/src/namespaces/files/sorting.rs index f0526c7..80c87bf 100644 --- a/mediarepo-daemon/mediarepo-socket/src/namespaces/files/sorting.rs +++ b/mediarepo-daemon/mediarepo-socket/src/namespaces/files/sorting.rs @@ -71,7 +71,7 @@ async fn build_sort_context( mime_type: file.mime_type().to_owned(), namespaces: cid_nsp .remove(&file.cd_id()) - .unwrap_or(HashMap::with_capacity(0)), + .unwrap_or_else(|| HashMap::with_capacity(0)), tag_count: cid_tag_counts.remove(&file.cd_id()).unwrap_or(0), import_time: metadata.import_time().to_owned(), create_time: metadata.import_time().to_owned(), @@ -177,10 +177,7 @@ fn adjust_for_dir(ordering: Ordering, direction: &SortDirection) -> Ordering { } fn compare_tag_lists(list_a: &Vec, list_b: &Vec) -> Ordering { - let first_diff = list_a - .into_iter() - .zip(list_b.into_iter()) - .find(|(a, b)| *a != *b); + let first_diff = list_a.iter().zip(list_b.iter()).find(|(a, b)| *a != *b); if let Some(diff) = first_diff { if let (Some(num_a), Some(num_b)) = (diff.0.parse::().ok(), diff.1.parse::().ok()) { diff --git a/mediarepo-daemon/mediarepo-socket/src/namespaces/repo.rs b/mediarepo-daemon/mediarepo-socket/src/namespaces/repo.rs index 05d605f..8dc6fee 100644 --- a/mediarepo-daemon/mediarepo-socket/src/namespaces/repo.rs +++ b/mediarepo-daemon/mediarepo-socket/src/namespaces/repo.rs @@ -95,7 +95,7 @@ async fn get_frontend_state_path(ctx: &Context) -> IPCResult { let data = ctx.data.read().await; let settings = data.get::().unwrap(); let repo_path = data.get::().unwrap(); - let state_path = settings.paths.frontend_state_file_path(&repo_path); + let state_path = settings.paths.frontend_state_file_path(repo_path); Ok(state_path) } diff --git a/mediarepo-daemon/mediarepo-socket/src/utils.rs b/mediarepo-daemon/mediarepo-socket/src/utils.rs index 2c3fc57..1427384 100644 --- a/mediarepo-daemon/mediarepo-socket/src/utils.rs +++ b/mediarepo-daemon/mediarepo-socket/src/utils.rs @@ -33,11 +33,7 @@ pub async fn file_by_identifier(identifier: FileIdentifier, repo: &Repo) -> Repo pub async fn cd_by_identifier(identifier: FileIdentifier, repo: &Repo) -> RepoResult> { match identifier { FileIdentifier::ID(id) => { - let file = repo - .file() - .by_id(id) - .await? - .ok_or_else(|| "Thumbnail not found")?; + let file = repo.file().by_id(id).await?.ok_or("Thumbnail not found")?; Ok(file.cd().to_owned()) } FileIdentifier::CD(cd) => decode_content_descriptor(cd), diff --git a/mediarepo-daemon/src/logging.rs b/mediarepo-daemon/src/logging.rs index 74e97f5..45e0eca 100644 --- a/mediarepo-daemon/src/logging.rs +++ b/mediarepo-daemon/src/logging.rs @@ -1,5 +1,5 @@ use std::fs; -use std::path::PathBuf; +use std::path::Path; use console_subscriber::ConsoleLayer; use opentelemetry::sdk::Resource; @@ -24,7 +24,7 @@ use mediarepo_core::tracing_layer_list::DynLayerList; #[allow(dyn_drop)] pub type DropGuard = Box; -pub fn init_tracing(repo_path: &PathBuf, log_cfg: &LoggingSettings) -> Vec { +pub fn init_tracing(repo_path: &Path, log_cfg: &LoggingSettings) -> Vec { LogTracer::init().expect("failed to subscribe to log entries"); let log_path = repo_path.join("logs"); let mut guards = Vec::new(); @@ -97,11 +97,11 @@ fn add_telemetry_layer(log_cfg: &LoggingSettings, layer_list: &mut DynLayerList< fn add_app_log_layer( log_cfg: &LoggingSettings, - log_path: &PathBuf, + log_path: &Path, guards: &mut Vec, layer_list: &mut DynLayerList, ) { - let (app_log_writer, guard) = get_application_log_writer(&log_path); + let (app_log_writer, guard) = get_application_log_writer(log_path); guards.push(Box::new(guard) as DropGuard); let app_log_layer = fmt::layer() @@ -115,11 +115,11 @@ fn add_app_log_layer( fn add_bromine_layer( log_cfg: &LoggingSettings, - log_path: &PathBuf, + log_path: &Path, guards: &mut Vec, layer_list: &mut DynLayerList, ) { - let (bromine_writer, guard) = get_bromine_log_writer(&log_path); + let (bromine_writer, guard) = get_bromine_log_writer(log_path); guards.push(Box::new(guard) as DropGuard); let bromine_layer = fmt::layer() @@ -133,11 +133,11 @@ fn add_bromine_layer( fn add_sql_layer( log_cfg: &LoggingSettings, - log_path: &PathBuf, + log_path: &Path, guards: &mut Vec, layer_list: &mut DynLayerList, ) { - let (sql_writer, guard) = get_sql_log_writer(&log_path); + let (sql_writer, guard) = get_sql_log_writer(log_path); guards.push(Box::new(guard) as DropGuard); let sql_layer = fmt::layer() @@ -161,18 +161,18 @@ fn add_stdout_layer(guards: &mut Vec, layer_list: &mut DynLayerList() - .unwrap_or( + .unwrap_or_else(|_| { filter::Targets::new() .with_default(Level::INFO) - .with_target("sqlx", Level::WARN), - ), + .with_target("sqlx", Level::WARN) + }), ); layer_list.add(stdout_layer); } -fn get_sql_log_writer(log_path: &PathBuf) -> (NonBlocking, WorkerGuard) { +fn get_sql_log_writer(log_path: &Path) -> (NonBlocking, WorkerGuard) { tracing_appender::non_blocking( rolling_file::BasicRollingFileAppender::new( log_path.join("sql.log"), @@ -183,7 +183,7 @@ fn get_sql_log_writer(log_path: &PathBuf) -> (NonBlocking, WorkerGuard) { ) } -fn get_bromine_log_writer(log_path: &PathBuf) -> (NonBlocking, WorkerGuard) { +fn get_bromine_log_writer(log_path: &Path) -> (NonBlocking, WorkerGuard) { tracing_appender::non_blocking( rolling_file::BasicRollingFileAppender::new( log_path.join("bromine.log"), @@ -194,7 +194,7 @@ fn get_bromine_log_writer(log_path: &PathBuf) -> (NonBlocking, WorkerGuard) { ) } -fn get_application_log_writer(log_path: &PathBuf) -> (NonBlocking, WorkerGuard) { +fn get_application_log_writer(log_path: &Path) -> (NonBlocking, WorkerGuard) { tracing_appender::non_blocking( rolling_file::BasicRollingFileAppender::new( log_path.join("repo.log"), diff --git a/mediarepo-daemon/src/main.rs b/mediarepo-daemon/src/main.rs index e628c99..b4e085a 100644 --- a/mediarepo-daemon/src/main.rs +++ b/mediarepo-daemon/src/main.rs @@ -1,6 +1,6 @@ use std::env; use std::iter::FromIterator; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::sync::Arc; use std::time::Duration; @@ -242,7 +242,7 @@ async fn init(opt: Opt, force: bool) -> RepoResult<()> { Ok(()) } -async fn clean_old_connection_files(root: &PathBuf) -> RepoResult<()> { +async fn clean_old_connection_files(root: &Path) -> RepoResult<()> { let paths = ["repo.tcp", "repo.sock"]; for path in paths {