Code cleanup (#148)

Fixed all most all clippy warnings.
Test: cargo test && cargo clippy
This commit is contained in:
pmphfm 2021-10-19 19:31:17 -07:00 committed by GitHub
parent d01583b406
commit e2bf97db99
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
23 changed files with 68 additions and 94 deletions

View file

@ -3,7 +3,6 @@ use diesel::prelude::*;
use log::{error, info}; use log::{error, info};
use std::thread; use std::thread;
use std::time; use std::time;
use ureq;
use super::*; use super::*;
use crate::db::DB; use crate::db::DB;

View file

@ -1,12 +1,6 @@
use anyhow::*; use anyhow::*;
use ape;
use id3;
use lewton::inside_ogg::OggStreamReader; use lewton::inside_ogg::OggStreamReader;
use log::error; use log::error;
use metaflac;
use mp3_duration;
use mp4ameta;
use opus_headers;
use regex::Regex; use regex::Regex;
use std::fs; use std::fs;
use std::path::Path; use std::path::Path;
@ -52,13 +46,13 @@ impl From<id3::Tag> for SongTags {
let label = tag.get_text("TPUB"); let label = tag.get_text("TPUB");
SongTags { SongTags {
disc_number,
track_number,
title,
duration,
artist, artist,
album_artist, album_artist,
album, album,
title,
duration,
disc_number,
track_number,
year, year,
has_artwork, has_artwork,
lyricist, lyricist,
@ -296,7 +290,7 @@ fn read_flac(path: &Path) -> Result<SongTags> {
let tag = metaflac::Tag::read_from_path(path)?; let tag = metaflac::Tag::read_from_path(path)?;
let vorbis = tag let vorbis = tag
.vorbis_comments() .vorbis_comments()
.ok_or(anyhow!("Missing Vorbis comments"))?; .ok_or_else(|| anyhow!("Missing Vorbis comments"))?;
let disc_number = vorbis let disc_number = vorbis
.get("DISCNUMBER") .get("DISCNUMBER")
.and_then(|d| d[0].parse::<u32>().ok()); .and_then(|d| d[0].parse::<u32>().ok());

View file

@ -1,4 +1,3 @@
use diesel;
use log::error; use log::error;
use std::sync::{Arc, Condvar, Mutex}; use std::sync::{Arc, Condvar, Mutex};
use std::time::Duration; use std::time::Duration;
@ -31,7 +30,12 @@ impl Index {
db, db,
vfs_manager, vfs_manager,
settings_manager, settings_manager,
pending_reindex: Arc::new((Mutex::new(false), Condvar::new())),
pending_reindex: Arc::new((
#[allow(clippy::clippy::mutex_atomic)]
Mutex::new(false),
Condvar::new(),
)),
}; };
let commands_index = index.clone(); let commands_index = index.clone();

View file

@ -1,5 +1,4 @@
use anyhow::*; use anyhow::*;
use diesel;
use diesel::dsl::sql; use diesel::dsl::sql;
use diesel::prelude::*; use diesel::prelude::*;
use diesel::sql_types; use diesel::sql_types;
@ -89,7 +88,7 @@ impl Index {
.virtual_to_real(virtual_path) .virtual_to_real(virtual_path)
.map_err(|_| QueryError::VFSPathNotFound)?; .map_err(|_| QueryError::VFSPathNotFound)?;
let song_path_filter = { let song_path_filter = {
let mut path_buf = real_path.clone(); let mut path_buf = real_path;
path_buf.push("%"); path_buf.push("%");
path_buf.as_path().to_string_lossy().into_owned() path_buf.as_path().to_string_lossy().into_owned()
}; };

View file

@ -1,5 +1,4 @@
use anyhow::*; use anyhow::*;
use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use rayon::prelude::*; use rayon::prelude::*;
use std::path::Path; use std::path::Path;

View file

@ -24,11 +24,8 @@ impl Collector {
} }
pub fn collect(&self) { pub fn collect(&self) {
loop { while let Ok(directory) = self.receiver.recv() {
match self.receiver.recv() { self.collect_directory(directory);
Ok(directory) => self.collect_directory(directory),
Err(_) => break,
}
} }
} }

View file

@ -1,6 +1,5 @@
use anyhow::*; use anyhow::*;
use crossbeam_channel::Receiver; use crossbeam_channel::Receiver;
use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use log::error; use log::error;
@ -57,19 +56,16 @@ impl Inserter {
let new_directories = Vec::with_capacity(INDEX_BUILDING_INSERT_BUFFER_SIZE); let new_directories = Vec::with_capacity(INDEX_BUILDING_INSERT_BUFFER_SIZE);
let new_songs = Vec::with_capacity(INDEX_BUILDING_INSERT_BUFFER_SIZE); let new_songs = Vec::with_capacity(INDEX_BUILDING_INSERT_BUFFER_SIZE);
Self { Self {
db,
receiver, receiver,
new_directories, new_directories,
new_songs, new_songs,
db,
} }
} }
pub fn insert(&mut self) { pub fn insert(&mut self) {
loop { while let Ok(item) = self.receiver.recv() {
match self.receiver.recv() { self.insert_item(item);
Ok(item) => self.insert_item(item),
Err(_) => break,
}
} }
} }
@ -91,34 +87,26 @@ impl Inserter {
} }
fn flush_directories(&mut self) { fn flush_directories(&mut self) {
if self let res = self.db.connect().and_then(|connection| {
.db diesel::insert_into(directories::table)
.connect() .values(&self.new_directories)
.and_then(|connection| { .execute(&*connection) // TODO https://github.com/diesel-rs/diesel/issues/1822
diesel::insert_into(directories::table) .map_err(Error::new)
.values(&self.new_directories) });
.execute(&*connection) // TODO https://github.com/diesel-rs/diesel/issues/1822 if res.is_err() {
.map_err(Error::new)
})
.is_err()
{
error!("Could not insert new directories in database"); error!("Could not insert new directories in database");
} }
self.new_directories.clear(); self.new_directories.clear();
} }
fn flush_songs(&mut self) { fn flush_songs(&mut self) {
if self let res = self.db.connect().and_then(|connection| {
.db diesel::insert_into(songs::table)
.connect() .values(&self.new_songs)
.and_then(|connection| { .execute(&*connection) // TODO https://github.com/diesel-rs/diesel/issues/1822
diesel::insert_into(songs::table) .map_err(Error::new)
.values(&self.new_songs) });
.execute(&*connection) // TODO https://github.com/diesel-rs/diesel/issues/1822 if res.is_err() {
.map_err(Error::new)
})
.is_err()
{
error!("Could not insert new songs in database"); error!("Could not insert new songs in database");
} }
self.new_songs.clear(); self.new_songs.clear();
@ -127,10 +115,10 @@ impl Inserter {
impl Drop for Inserter { impl Drop for Inserter {
fn drop(&mut self) { fn drop(&mut self) {
if self.new_directories.len() > 0 { if !self.new_directories.is_empty() {
self.flush_directories(); self.flush_directories();
} }
if self.new_songs.len() > 0 { if !self.new_songs.is_empty() {
self.flush_songs(); self.flush_songs();
} }
} }

View file

@ -49,7 +49,7 @@ impl Traverser {
let num_threads = std::env::var_os(key) let num_threads = std::env::var_os(key)
.map(|v| v.to_string_lossy().to_string()) .map(|v| v.to_string_lossy().to_string())
.and_then(|v| usize::from_str(&v).ok()) .and_then(|v| usize::from_str(&v).ok())
.unwrap_or(min(num_cpus::get(), 4)); .unwrap_or_else(|| min(num_cpus::get(), 4));
info!("Browsing collection using {} threads", num_threads); info!("Browsing collection using {} threads", num_threads);
let mut threads = Vec::new(); let mut threads = Vec::new();
@ -107,14 +107,12 @@ impl Worker {
if self.is_all_work_done() { if self.is_all_work_done() {
return None; return None;
} }
if let Ok(w) = self
.work_item_receiver
.recv_timeout(Duration::from_millis(100))
{ {
if let Ok(w) = self return Some(w);
.work_item_receiver }
.recv_timeout(Duration::from_millis(100))
{
return Some(w);
}
};
} }
} }
@ -167,12 +165,10 @@ impl Worker {
if path.is_dir() { if path.is_dir() {
sub_directories.push(path); sub_directories.push(path);
} else if let Some(metadata) = metadata::read(&path) {
songs.push(Song { path, metadata });
} else { } else {
if let Some(metadata) = metadata::read(&path) { other_files.push(path);
songs.push(Song { path, metadata });
} else {
other_files.push(path);
}
} }
} }
@ -180,7 +176,7 @@ impl Worker {
self.emit_directory(Directory { self.emit_directory(Directory {
path: work_item.path.to_owned(), path: work_item.path.to_owned(),
parent: work_item.parent.map(|p| p.to_owned()), parent: work_item.parent,
songs, songs,
other_files, other_files,
created, created,
@ -195,7 +191,7 @@ impl Worker {
} }
fn get_date_created(path: &Path) -> Option<i32> { fn get_date_created(path: &Path) -> Option<i32> {
if let Ok(t) = fs::metadata(path).and_then(|m| m.created().or(m.modified())) { if let Ok(t) = fs::metadata(path).and_then(|m| m.created().or_else(|_| m.modified())) {
t.duration_since(std::time::UNIX_EPOCH) t.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs() as i32) .map(|d| d.as_secs() as i32)
.ok() .ok()

View file

@ -61,7 +61,7 @@ impl Manager {
} }
pub fn link(&self, username: &str, lastfm_token: &str) -> Result<()> { pub fn link(&self, username: &str, lastfm_token: &str) -> Result<()> {
let mut scrobbler = Scrobbler::new(LASTFM_API_KEY.into(), LASTFM_API_SECRET.into()); let mut scrobbler = Scrobbler::new(LASTFM_API_KEY, LASTFM_API_SECRET);
let auth_response = scrobbler.authenticate_with_token(lastfm_token)?; let auth_response = scrobbler.authenticate_with_token(lastfm_token)?;
self.user_manager self.user_manager
@ -74,7 +74,7 @@ impl Manager {
} }
pub fn scrobble(&self, username: &str, track: &Path) -> Result<()> { pub fn scrobble(&self, username: &str, track: &Path) -> Result<()> {
let mut scrobbler = Scrobbler::new(LASTFM_API_KEY.into(), LASTFM_API_SECRET.into()); let mut scrobbler = Scrobbler::new(LASTFM_API_KEY, LASTFM_API_SECRET);
let scrobble = self.scrobble_from_path(track)?; let scrobble = self.scrobble_from_path(track)?;
let auth_token = self.user_manager.get_lastfm_session_key(username)?; let auth_token = self.user_manager.get_lastfm_session_key(username)?;
scrobbler.authenticate_with_session_key(&auth_token); scrobbler.authenticate_with_session_key(&auth_token);
@ -83,7 +83,7 @@ impl Manager {
} }
pub fn now_playing(&self, username: &str, track: &Path) -> Result<()> { pub fn now_playing(&self, username: &str, track: &Path) -> Result<()> {
let mut scrobbler = Scrobbler::new(LASTFM_API_KEY.into(), LASTFM_API_SECRET.into()); let mut scrobbler = Scrobbler::new(LASTFM_API_KEY, LASTFM_API_SECRET);
let scrobble = self.scrobble_from_path(track)?; let scrobble = self.scrobble_from_path(track)?;
let auth_token = self.user_manager.get_lastfm_session_key(username)?; let auth_token = self.user_manager.get_lastfm_session_key(username)?;
scrobbler.authenticate_with_session_key(&auth_token); scrobbler.authenticate_with_session_key(&auth_token);

View file

@ -1,6 +1,5 @@
use anyhow::Result; use anyhow::Result;
use core::clone::Clone; use core::clone::Clone;
use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use diesel::sql_types; use diesel::sql_types;
use diesel::BelongingToDsl; use diesel::BelongingToDsl;
@ -171,7 +170,7 @@ impl Manager {
ORDER BY ps.ordering ORDER BY ps.ordering
"#, "#,
); );
let query = query.clone().bind::<sql_types::Integer, _>(playlist.id); let query = query.bind::<sql_types::Integer, _>(playlist.id);
songs = query.get_results(&connection).map_err(anyhow::Error::new)?; songs = query.get_results(&connection).map_err(anyhow::Error::new)?;
} }

View file

@ -1,4 +1,3 @@
use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use regex::Regex; use regex::Regex;
use std::convert::TryInto; use std::convert::TryInto;

View file

@ -23,7 +23,7 @@ pub fn generate_thumbnail(image_path: &Path, options: &Options) -> Result<Dynami
} else if options.pad_to_square { } else if options.pad_to_square {
let scaled_image = source_image.thumbnail(out_dimension, out_dimension); let scaled_image = source_image.thumbnail(out_dimension, out_dimension);
let (scaled_width, scaled_height) = scaled_image.dimensions(); let (scaled_width, scaled_height) = scaled_image.dimensions();
let background = image::Rgb([255, 255 as u8, 255 as u8]); let background = image::Rgb([255, 255_u8, 255_u8]);
final_image = DynamicImage::ImageRgb8(ImageBuffer::from_pixel( final_image = DynamicImage::ImageRgb8(ImageBuffer::from_pixel(
out_dimension, out_dimension,
out_dimension, out_dimension,

View file

@ -1,5 +1,4 @@
use anyhow::anyhow; use anyhow::anyhow;
use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use std::time::{SystemTime, UNIX_EPOCH}; use std::time::{SystemTime, UNIX_EPOCH};
@ -168,7 +167,7 @@ impl Manager {
.filter(name.eq(username)) .filter(name.eq(username))
.get_results(&connection) .get_results(&connection)
.map_err(|_| Error::Unspecified)?; .map_err(|_| Error::Unspecified)?;
Ok(results.len() > 0) Ok(!results.is_empty())
} }
pub fn is_admin(&self, username: &str) -> Result<bool, Error> { pub fn is_admin(&self, username: &str) -> Result<bool, Error> {

View file

@ -1,5 +1,4 @@
use anyhow::Result; use anyhow::Result;
use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};

View file

@ -30,7 +30,7 @@ impl Manager {
Ok(mount_dirs) Ok(mount_dirs)
} }
pub fn set_mount_dirs(&self, mount_dirs: &Vec<MountDir>) -> Result<()> { pub fn set_mount_dirs(&self, mount_dirs: &[MountDir]) -> Result<()> {
use self::mount_points::dsl::*; use self::mount_points::dsl::*;
let connection = self.db.connect()?; let connection = self.db.connect()?;
diesel::delete(mount_points).execute(&connection)?; diesel::delete(mount_points).execute(&connection)?;

View file

@ -34,11 +34,12 @@ impl From<MountDir> for Mount {
let source = PathBuf::from(path_string.deref()); let source = PathBuf::from(path_string.deref());
Self { Self {
name: m.name, name: m.name,
source: source, source,
} }
} }
} }
#[allow(clippy::upper_case_acronyms)]
pub struct VFS { pub struct VFS {
mounts: Vec<Mount>, mounts: Vec<Mount>,
} }

View file

@ -2,7 +2,6 @@ use anyhow::*;
use diesel::r2d2::{self, ConnectionManager, PooledConnection}; use diesel::r2d2::{self, ConnectionManager, PooledConnection};
use diesel::sqlite::SqliteConnection; use diesel::sqlite::SqliteConnection;
use diesel::RunQueryDsl; use diesel::RunQueryDsl;
use diesel_migrations;
use std::path::Path; use std::path::Path;
mod schema; mod schema;
@ -34,7 +33,7 @@ impl diesel::r2d2::CustomizeConnection<SqliteConnection, diesel::r2d2::Error>
); );
query query
.execute(connection) .execute(connection)
.map_err(|e| diesel::r2d2::Error::QueryError(e))?; .map_err(diesel::r2d2::Error::QueryError)?;
Ok(()) Ok(())
} }
} }
@ -46,7 +45,7 @@ impl DB {
let pool = diesel::r2d2::Pool::builder() let pool = diesel::r2d2::Pool::builder()
.connection_customizer(Box::new(ConnectionCustomizer {})) .connection_customizer(Box::new(ConnectionCustomizer {}))
.build(manager)?; .build(manager)?;
let db = DB { pool: pool }; let db = DB { pool };
db.migrate_up()?; db.migrate_up()?;
Ok(db) Ok(db)
} }

View file

@ -62,7 +62,7 @@ fn init_logging<T: AsRef<Path>>(log_level: LevelFilter, log_file_path: &Option<T
} }
loggers.push(WriteLogger::new( loggers.push(WriteLogger::new(
log_level, log_level,
log_config.clone(), log_config,
fs::File::create(path)?, fs::File::create(path)?,
)); ));
} }

View file

@ -106,6 +106,6 @@ impl Paths {
paths.log_file_path = None; paths.log_file_path = None;
}; };
return paths; paths
} }
} }

View file

@ -23,7 +23,8 @@ use std::str;
use crate::app::{ use crate::app::{
config, ddns, config, ddns,
index::{self, Index}, index::{self, Index},
lastfm, playlist, settings, thumbnail, user, vfs, lastfm, playlist, settings, thumbnail, user,
vfs::{self, MountDir},
}; };
use crate::service::{dto, error::*}; use crate::service::{dto, error::*};
@ -206,7 +207,7 @@ impl FromRequest for Auth {
}) })
.await?; .await?;
return Ok(Auth { return Ok(Auth {
username: authorization.username.to_owned(), username: authorization.username,
source: AuthSource::QueryParameter, source: AuthSource::QueryParameter,
}); });
} }
@ -219,7 +220,7 @@ impl FromRequest for Auth {
}) })
.await?; .await?;
return Ok(Auth { return Ok(Auth {
username: authorization.username.to_owned(), username: authorization.username,
source: AuthSource::AuthorizationBearer, source: AuthSource::AuthorizationBearer,
}); });
} }
@ -458,7 +459,7 @@ async fn put_mount_dirs(
vfs_manager: Data<vfs::Manager>, vfs_manager: Data<vfs::Manager>,
new_mount_dirs: Json<Vec<dto::MountDir>>, new_mount_dirs: Json<Vec<dto::MountDir>>,
) -> Result<HttpResponse, APIError> { ) -> Result<HttpResponse, APIError> {
let new_mount_dirs = new_mount_dirs let new_mount_dirs: Vec<MountDir> = new_mount_dirs
.to_owned() .to_owned()
.into_iter() .into_iter()
.map(|m| m.into()) .map(|m| m.into())
@ -515,10 +516,8 @@ async fn update_user(
user_update: Json<dto::UserUpdate>, user_update: Json<dto::UserUpdate>,
) -> Result<HttpResponse, APIError> { ) -> Result<HttpResponse, APIError> {
if let Some(auth) = &admin_rights.auth { if let Some(auth) = &admin_rights.auth {
if auth.username == name.as_str() { if auth.username == name.as_str() && user_update.new_is_admin == Some(false) {
if user_update.new_is_admin == Some(false) { return Err(APIError::OwnAdminPrivilegeRemoval);
return Err(APIError::OwnAdminPrivilegeRemoval);
}
} }
} }

View file

@ -1,6 +1,7 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::app::{config, ddns, settings, thumbnail, user, vfs}; use crate::app::{config, ddns, settings, thumbnail, user, vfs};
use std::convert::From;
pub const API_MAJOR_VERSION: i32 = 6; pub const API_MAJOR_VERSION: i32 = 6;
pub const API_MINOR_VERSION: i32 = 0; pub const API_MINOR_VERSION: i32 = 0;
@ -60,6 +61,7 @@ pub enum ThumbnailSize {
Native, Native,
} }
#[allow(clippy::clippy::clippy::from_over_into)]
impl Into<Option<u32>> for ThumbnailSize { impl Into<Option<u32>> for ThumbnailSize {
fn into(self) -> Option<u32> { fn into(self) -> Option<u32> {
match self { match self {

View file

@ -7,7 +7,7 @@ macro_rules! test_name {
let file_name = file_name.replace("/", "-"); let file_name = file_name.replace("/", "-");
let file_name = file_name.replace("\\", "-"); let file_name = file_name.replace("\\", "-");
format!("{}-line-{}", file_name, line!()) format!("{}-line-{}", file_name, line!())
}}; }};
} }
pub fn prepare_test_directory<T: AsRef<str>>(test_name: T) -> PathBuf { pub fn prepare_test_directory<T: AsRef<str>>(test_name: T) -> PathBuf {

View file

@ -12,6 +12,7 @@ macro_rules! match_ignore_case {
} }
pub use crate::match_ignore_case; pub use crate::match_ignore_case;
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub enum AudioFormat { pub enum AudioFormat {
AIFF, AIFF,