diff --git a/src/app.rs b/src/app.rs index bde215f..cd76400 100644 --- a/src/app.rs +++ b/src/app.rs @@ -4,12 +4,12 @@ use std::path::PathBuf; use crate::db::{self, DB}; use crate::paths::Paths; +pub mod collection; pub mod config; pub mod ddns; -pub mod index; +pub mod formats; pub mod lastfm; pub mod playlist; -pub mod scanner; pub mod settings; pub mod thumbnail; pub mod user; @@ -35,8 +35,9 @@ pub struct App { pub port: u16, pub web_dir_path: PathBuf, pub swagger_dir_path: PathBuf, - pub scanner: scanner::Scanner, - pub index: index::Index, + pub updater: collection::Updater, + pub browser: collection::Browser, + pub index: collection::Index, pub config_manager: config::Manager, pub ddns_manager: ddns::Manager, pub lastfm_manager: lastfm::Manager, @@ -64,9 +65,14 @@ impl App { let auth_secret = settings_manager.get_auth_secret().await?; let ddns_manager = ddns::Manager::new(db.clone()); let user_manager = user::Manager::new(db.clone(), auth_secret); - let scanner = - scanner::Scanner::new(db.clone(), vfs_manager.clone(), settings_manager.clone()); - let index = index::Index::new(db.clone(), vfs_manager.clone()); + let index = collection::Index::new(); + let browser = collection::Browser::new(db.clone(), vfs_manager.clone()); + let updater = collection::Updater::new( + db.clone(), + index.clone(), + settings_manager.clone(), + vfs_manager.clone(), + ); let config_manager = config::Manager::new( settings_manager.clone(), user_manager.clone(), @@ -75,7 +81,7 @@ impl App { ); let playlist_manager = playlist::Manager::new(db.clone(), vfs_manager.clone()); let thumbnail_manager = thumbnail::Manager::new(thumbnails_dir_path); - let lastfm_manager = lastfm::Manager::new(index.clone(), user_manager.clone()); + let lastfm_manager = lastfm::Manager::new(browser.clone(), user_manager.clone()); if let Some(config_path) = paths.config_file_path { let config = config::Config::from_path(&config_path)?; @@ -86,7 +92,8 @@ impl App { port, web_dir_path: paths.web_dir_path, swagger_dir_path: paths.swagger_dir_path, - scanner, + updater, + browser, index, config_manager, ddns_manager, diff --git a/src/app/collection.rs b/src/app/collection.rs new file mode 100644 index 0000000..09ed312 --- /dev/null +++ b/src/app/collection.rs @@ -0,0 +1,15 @@ +mod browser; +mod cleaner; +mod index; +mod inserter; +mod scanner; +mod types; +mod updater; + +pub use browser::*; +pub use cleaner::*; +pub use index::*; +pub use inserter::*; +pub use scanner::*; +pub use types::*; +pub use updater::*; diff --git a/src/app/collection/browser.rs b/src/app/collection/browser.rs new file mode 100644 index 0000000..51184a9 --- /dev/null +++ b/src/app/collection/browser.rs @@ -0,0 +1,343 @@ +use std::path::Path; + +use crate::app::{collection, vfs}; +use crate::db::DB; + +#[derive(Clone)] +pub struct Browser { + db: DB, + vfs_manager: vfs::Manager, +} + +impl Browser { + pub fn new(db: DB, vfs_manager: vfs::Manager) -> Self { + Self { db, vfs_manager } + } + + pub async fn browse

(&self, path: P) -> Result, collection::Error> + where + P: AsRef, + { + let mut output = Vec::new(); + let mut connection = self.db.connect().await?; + + if path.as_ref().components().count() == 0 { + // Browse top-level + let directories = sqlx::query_as!( + collection::Directory, + "SELECT * FROM directories WHERE virtual_parent IS NULL" + ) + .fetch_all(connection.as_mut()) + .await?; + output.extend(directories.into_iter().map(collection::File::Directory)); + } else { + let vfs = self.vfs_manager.get_vfs().await?; + match vfs.virtual_to_real(&path) { + Ok(p) if p.exists() => {} + _ => { + return Err(collection::Error::DirectoryNotFound( + path.as_ref().to_owned(), + )) + } + } + + let path = path.as_ref().to_string_lossy(); + + // Browse sub-directory + let directories = sqlx::query_as!( + collection::Directory, + "SELECT * FROM directories WHERE virtual_parent = $1 ORDER BY virtual_path COLLATE NOCASE ASC", + path + ) + .fetch_all(connection.as_mut()) + .await?; + output.extend(directories.into_iter().map(collection::File::Directory)); + + let songs = sqlx::query_as!( + collection::Song, + "SELECT * FROM songs WHERE virtual_parent = $1 ORDER BY virtual_path COLLATE NOCASE ASC", + path + ) + .fetch_all(connection.as_mut()) + .await?; + + output.extend(songs.into_iter().map(collection::File::Song)); + } + + Ok(output) + } + + pub async fn flatten

(&self, path: P) -> Result, collection::Error> + where + P: AsRef, + { + let mut connection = self.db.connect().await?; + + let songs = if path.as_ref().parent().is_some() { + let vfs = self.vfs_manager.get_vfs().await?; + match vfs.virtual_to_real(&path) { + Ok(p) if p.exists() => {} + _ => { + return Err(collection::Error::DirectoryNotFound( + path.as_ref().to_owned(), + )) + } + } + + let song_path_filter = { + let mut path_buf = path.as_ref().to_owned(); + path_buf.push("%"); + path_buf.as_path().to_string_lossy().into_owned() + }; + sqlx::query_as!( + collection::Song, + "SELECT * FROM songs WHERE virtual_path LIKE $1 ORDER BY virtual_path COLLATE NOCASE ASC", + song_path_filter + ) + .fetch_all(connection.as_mut()) + .await? + } else { + sqlx::query_as!( + collection::Song, + "SELECT * FROM songs ORDER BY virtual_path COLLATE NOCASE ASC" + ) + .fetch_all(connection.as_mut()) + .await? + }; + + Ok(songs) + } + + pub async fn get_random_albums( + &self, + count: i64, + ) -> Result, collection::Error> { + // TODO move to Index + Ok(vec![]) + } + + pub async fn get_recent_albums( + &self, + count: i64, + ) -> Result, collection::Error> { + // TODO move to Index + Ok(vec![]) + } + + pub async fn search(&self, query: &str) -> Result, collection::Error> { + let mut connection = self.db.connect().await?; + let like_test = format!("%{}%", query); + let mut output = Vec::new(); + + // Find dirs with matching path and parent not matching + { + let directories = sqlx::query_as!( + collection::Directory, + "SELECT * FROM directories WHERE virtual_path LIKE $1 AND virtual_parent NOT LIKE $1", + like_test + ) + .fetch_all(connection.as_mut()) + .await?; + + output.extend(directories.into_iter().map(collection::File::Directory)); + } + + // Find songs with matching title/album/artist and non-matching parent + { + let songs = sqlx::query_as!( + collection::Song, + r#" + SELECT * FROM songs + WHERE ( virtual_path LIKE $1 + OR title LIKE $1 + OR album LIKE $1 + OR artists LIKE $1 + OR album_artists LIKE $1 + ) + AND virtual_parent NOT LIKE $1 + "#, + like_test + ) + .fetch_all(connection.as_mut()) + .await?; + + output.extend(songs.into_iter().map(collection::File::Song)); + } + + Ok(output) + } + + pub async fn get_song(&self, path: &Path) -> Result { + let mut connection = self.db.connect().await?; + + let path = path.to_string_lossy(); + let song = sqlx::query_as!( + collection::Song, + "SELECT * FROM songs WHERE virtual_path = $1", + path + ) + .fetch_one(connection.as_mut()) + .await?; + + Ok(song) + } +} + +#[cfg(test)] +mod test { + use std::path::{Path, PathBuf}; + + use super::*; + use crate::app::test; + use crate::test_name; + + const TEST_MOUNT_NAME: &str = "root"; + + #[tokio::test] + async fn can_browse_top_level() { + let mut ctx = test::ContextBuilder::new(test_name!()) + .mount(TEST_MOUNT_NAME, "test-data/small-collection") + .build() + .await; + ctx.updater.update().await.unwrap(); + + let root_path = Path::new(TEST_MOUNT_NAME); + let files = ctx.browser.browse(Path::new("")).await.unwrap(); + assert_eq!(files.len(), 1); + match files[0] { + collection::File::Directory(ref d) => { + assert_eq!(d.virtual_path, root_path.to_str().unwrap()) + } + _ => panic!("Expected directory"), + } + } + + #[tokio::test] + async fn can_browse_directory() { + let khemmis_path: PathBuf = [TEST_MOUNT_NAME, "Khemmis"].iter().collect(); + let tobokegao_path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao"].iter().collect(); + + let mut ctx = test::ContextBuilder::new(test_name!()) + .mount(TEST_MOUNT_NAME, "test-data/small-collection") + .build() + .await; + ctx.updater.update().await.unwrap(); + + let files = ctx + .browser + .browse(Path::new(TEST_MOUNT_NAME)) + .await + .unwrap(); + + assert_eq!(files.len(), 2); + match files[0] { + collection::File::Directory(ref d) => { + assert_eq!(d.virtual_path, khemmis_path.to_str().unwrap()) + } + _ => panic!("Expected directory"), + } + + match files[1] { + collection::File::Directory(ref d) => { + assert_eq!(d.virtual_path, tobokegao_path.to_str().unwrap()) + } + _ => panic!("Expected directory"), + } + } + + #[tokio::test] + async fn can_flatten_root() { + let mut ctx = test::ContextBuilder::new(test_name!()) + .mount(TEST_MOUNT_NAME, "test-data/small-collection") + .build() + .await; + ctx.updater.update().await.unwrap(); + let songs = ctx + .browser + .flatten(Path::new(TEST_MOUNT_NAME)) + .await + .unwrap(); + assert_eq!(songs.len(), 13); + assert_eq!(songs[0].title, Some("Above The Water".to_owned())); + } + + #[tokio::test] + async fn can_flatten_directory() { + let mut ctx = test::ContextBuilder::new(test_name!()) + .mount(TEST_MOUNT_NAME, "test-data/small-collection") + .build() + .await; + ctx.updater.update().await.unwrap(); + let path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao"].iter().collect(); + let songs = ctx.browser.flatten(path).await.unwrap(); + assert_eq!(songs.len(), 8); + } + + #[tokio::test] + async fn can_flatten_directory_with_shared_prefix() { + let mut ctx = test::ContextBuilder::new(test_name!()) + .mount(TEST_MOUNT_NAME, "test-data/small-collection") + .build() + .await; + ctx.updater.update().await.unwrap(); + let path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect(); // Prefix of '(Picnic Remixes)' + let songs = ctx.browser.flatten(path).await.unwrap(); + assert_eq!(songs.len(), 7); + } + + #[tokio::test] + async fn can_get_random_albums() { + let mut ctx = test::ContextBuilder::new(test_name!()) + .mount(TEST_MOUNT_NAME, "test-data/small-collection") + .build() + .await; + ctx.updater.update().await.unwrap(); + let albums = ctx.browser.get_random_albums(1).await.unwrap(); + assert_eq!(albums.len(), 1); + } + + #[tokio::test] + async fn can_get_recent_albums() { + let mut ctx = test::ContextBuilder::new(test_name!()) + .mount(TEST_MOUNT_NAME, "test-data/small-collection") + .build() + .await; + ctx.updater.update().await.unwrap(); + let albums = ctx.browser.get_recent_albums(2).await.unwrap(); + assert_eq!(albums.len(), 2); + } + + #[tokio::test] + async fn can_get_a_song() { + let mut ctx = test::ContextBuilder::new(test_name!()) + .mount(TEST_MOUNT_NAME, "test-data/small-collection") + .build() + .await; + + ctx.updater.update().await.unwrap(); + + let picnic_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect(); + let song_virtual_path = picnic_virtual_dir.join("05 - シャーベット (Sherbet).mp3"); + let artwork_virtual_path = picnic_virtual_dir.join("Folder.png"); + + let song = ctx.browser.get_song(&song_virtual_path).await.unwrap(); + assert_eq!( + song.virtual_path, + song_virtual_path.to_string_lossy().as_ref() + ); + assert_eq!(song.track_number, Some(5)); + assert_eq!(song.disc_number, None); + assert_eq!(song.title, Some("シャーベット (Sherbet)".to_owned())); + assert_eq!( + song.artists, + collection::MultiString(vec!["Tobokegao".to_owned()]) + ); + assert_eq!(song.album_artists, collection::MultiString(vec![])); + assert_eq!(song.album, Some("Picnic".to_owned())); + assert_eq!(song.year, Some(2016)); + assert_eq!( + song.artwork, + Some(artwork_virtual_path.to_string_lossy().into_owned()) + ); + } +} diff --git a/src/app/collection/cleaner.rs b/src/app/collection/cleaner.rs new file mode 100644 index 0000000..c149737 --- /dev/null +++ b/src/app/collection/cleaner.rs @@ -0,0 +1,89 @@ +use rayon::prelude::*; +use sqlx::{QueryBuilder, Sqlite}; +use std::path::Path; + +use crate::app::{collection, vfs}; +use crate::db::DB; + +#[derive(Clone)] +pub struct Cleaner { + db: DB, + vfs_manager: vfs::Manager, +} + +impl Cleaner { + const BUFFER_SIZE: usize = 500; // Deletions in each transaction + + pub fn new(db: DB, vfs_manager: vfs::Manager) -> Self { + Self { db, vfs_manager } + } + + pub async fn clean(&self) -> Result<(), collection::Error> { + tokio::try_join!(self.clean_songs(), self.clean_directories())?; + Ok(()) + } + + pub async fn clean_directories(&self) -> Result<(), collection::Error> { + let directories = { + let mut connection = self.db.connect().await?; + sqlx::query!("SELECT path, virtual_path FROM directories") + .fetch_all(connection.as_mut()) + .await? + }; + + let vfs = self.vfs_manager.get_vfs().await?; + let missing_directories = tokio::task::spawn_blocking(move || { + directories + .into_par_iter() + .filter(|d| !vfs.exists(&d.virtual_path) || !Path::new(&d.path).exists()) + .map(|d| d.virtual_path) + .collect::>() + }) + .await?; + + let mut connection = self.db.connect().await?; + for chunk in missing_directories[..].chunks(Self::BUFFER_SIZE) { + QueryBuilder::::new("DELETE FROM directories WHERE virtual_path IN ") + .push_tuples(chunk, |mut b, virtual_path| { + b.push_bind(virtual_path); + }) + .build() + .execute(connection.as_mut()) + .await?; + } + + Ok(()) + } + + pub async fn clean_songs(&self) -> Result<(), collection::Error> { + let songs = { + let mut connection = self.db.connect().await?; + sqlx::query!("SELECT path, virtual_path FROM songs") + .fetch_all(connection.as_mut()) + .await? + }; + + let vfs = self.vfs_manager.get_vfs().await?; + let deleted_songs = tokio::task::spawn_blocking(move || { + songs + .into_par_iter() + .filter(|s| !vfs.exists(&s.virtual_path) || !Path::new(&s.path).exists()) + .map(|s| s.virtual_path) + .collect::>() + }) + .await?; + + for chunk in deleted_songs[..].chunks(Cleaner::BUFFER_SIZE) { + let mut connection = self.db.connect().await?; + QueryBuilder::::new("DELETE FROM songs WHERE virtual_path IN ") + .push_tuples(chunk, |mut b, virtual_path| { + b.push_bind(virtual_path); + }) + .build() + .execute(connection.as_mut()) + .await?; + } + + Ok(()) + } +} diff --git a/src/app/collection/index.rs b/src/app/collection/index.rs new file mode 100644 index 0000000..123ee44 --- /dev/null +++ b/src/app/collection/index.rs @@ -0,0 +1,32 @@ +use std::{collections::HashMap, sync::Arc}; + +use tokio::sync::RwLock; + +use crate::app::collection; + +#[derive(Clone, Default)] +pub struct Index { + lookups: Arc>, +} + +impl Index { + pub fn new() -> Self { + Self::default() + } + + pub async fn replace_lookup_tables(&mut self, new_lookups: Lookups) { + let mut lock = self.lookups.write().await; + *lock = new_lookups; + } +} + +#[derive(Default)] +pub struct Lookups { + data: HashMap, +} + +impl Lookups { + pub fn add_song(&mut self, _song: &collection::Song) { + // todo!() + } +} diff --git a/src/app/collection/inserter.rs b/src/app/collection/inserter.rs new file mode 100644 index 0000000..5d86139 --- /dev/null +++ b/src/app/collection/inserter.rs @@ -0,0 +1,123 @@ +use std::borrow::Cow; + +use log::error; +use sqlx::{ + encode::IsNull, + pool::PoolConnection, + sqlite::{SqliteArgumentValue, SqliteTypeInfo}, + QueryBuilder, Sqlite, +}; + +use crate::app::collection::{self, MultiString}; +use crate::db::DB; + +impl<'q> sqlx::Encode<'q, Sqlite> for MultiString { + fn encode_by_ref(&self, args: &mut Vec>) -> IsNull { + if self.0.is_empty() { + IsNull::Yes + } else { + let joined = self.0.join(MultiString::SEPARATOR); + args.push(SqliteArgumentValue::Text(Cow::Owned(joined))); + IsNull::No + } + } +} + +impl sqlx::Type for MultiString { + fn type_info() -> SqliteTypeInfo { + <&str as sqlx::Type>::type_info() + } +} + +pub struct Inserter { + new_entries: Vec, + db: DB, +} + +impl Inserter +where + T: Insertable, +{ + const BUFFER_SIZE: usize = 1000; + + pub fn new(db: DB) -> Self { + let new_entries = Vec::with_capacity(Self::BUFFER_SIZE); + Self { new_entries, db } + } + + pub async fn insert(&mut self, entry: T) { + self.new_entries.push(entry); + if self.new_entries.len() >= Self::BUFFER_SIZE { + self.flush().await; + } + } + + pub async fn flush(&mut self) { + let Ok(connection) = self.db.connect().await else { + error!("Could not acquire connection to insert new entries in database"); + return; + }; + match Insertable::bulk_insert(&self.new_entries, connection).await { + Ok(_) => self.new_entries.clear(), + Err(e) => error!("Could not insert new entries in database: {}", e), + }; + } +} + +pub trait Insertable +where + Self: Sized, +{ + async fn bulk_insert( + entries: &Vec, + connection: PoolConnection, + ) -> Result<(), sqlx::Error>; +} + +impl Insertable for collection::Directory { + async fn bulk_insert( + entries: &Vec, + mut connection: PoolConnection, + ) -> Result<(), sqlx::Error> { + QueryBuilder::::new("INSERT INTO directories(path, virtual_path, virtual_parent) ") + .push_values(entries.iter(), |mut b, directory| { + b.push_bind(&directory.path) + .push_bind(&directory.virtual_path) + .push_bind(&directory.virtual_parent); + }) + .build() + .execute(connection.as_mut()) + .await + .map(|_| ()) + } +} + +impl Insertable for collection::Song { + async fn bulk_insert( + entries: &Vec, + mut connection: PoolConnection, + ) -> Result<(), sqlx::Error> { + QueryBuilder::::new("INSERT INTO songs(path, virtual_path, virtual_parent, track_number, disc_number, title, artists, album_artists, year, album, artwork, duration, lyricists, composers, genres, labels) ") + .push_values(entries.iter(), |mut b, song| { + b.push_bind(&song.path) + .push_bind(&song.virtual_path) + .push_bind(&song.virtual_parent) + .push_bind(song.track_number) + .push_bind(song.disc_number) + .push_bind(&song.title) + .push_bind(&song.artists) + .push_bind(&song.album_artists) + .push_bind(song.year) + .push_bind(&song.album) + .push_bind(&song.artwork) + .push_bind(song.duration) + .push_bind(&song.lyricists) + .push_bind(&song.composers) + .push_bind(&song.genres) + .push_bind(&song.labels); + }) + .build() + .execute(connection.as_mut()) + .await.map(|_| ()) + } +} diff --git a/src/app/collection/scanner.rs b/src/app/collection/scanner.rs new file mode 100644 index 0000000..cbde420 --- /dev/null +++ b/src/app/collection/scanner.rs @@ -0,0 +1,196 @@ +use log::{error, info}; +use rayon::{Scope, ThreadPoolBuilder}; +use regex::Regex; +use std::cmp::min; +use std::fs; +use std::path::Path; +use std::str::FromStr; +use tokio::sync::mpsc::UnboundedSender; + +use crate::app::vfs; +use crate::app::{ + collection::{self, MultiString}, + formats, +}; + +pub struct Scanner { + directories_output: UnboundedSender, + songs_output: UnboundedSender, + vfs_manager: vfs::Manager, + artwork_regex: Option, +} + +impl Scanner { + pub fn new( + directories_output: UnboundedSender, + songs_output: UnboundedSender, + vfs_manager: vfs::Manager, + artwork_regex: Option, + ) -> Self { + Self { + directories_output, + songs_output, + vfs_manager, + artwork_regex, + } + } + + pub async fn scan(self) -> Result<(), collection::Error> { + let vfs = self.vfs_manager.get_vfs().await?; + let roots = vfs.mounts().clone(); + + let key = "POLARIS_NUM_TRAVERSER_THREADS"; + let num_threads = std::env::var_os(key) + .map(|v| v.to_string_lossy().to_string()) + .and_then(|v| usize::from_str(&v).ok()) + .unwrap_or_else(|| min(num_cpus::get(), 4)); + info!("Browsing collection using {} threads", num_threads); + + let directories_output = self.directories_output.clone(); + let songs_output = self.songs_output.clone(); + let artwork_regex = self.artwork_regex.clone(); + + let thread_pool = ThreadPoolBuilder::new().num_threads(num_threads).build()?; + thread_pool.scope({ + |scope| { + for root in roots { + scope.spawn(|scope| { + process_directory( + scope, + root.source, + root.name, + directories_output.clone(), + songs_output.clone(), + artwork_regex.clone(), + ); + }); + } + } + }); + + Ok(()) + } +} + +fn process_directory, Q: AsRef>( + scope: &Scope, + real_path: P, + virtual_path: Q, + directories_output: UnboundedSender, + songs_output: UnboundedSender, + artwork_regex: Option, +) { + let read_dir = match fs::read_dir(&real_path) { + Ok(read_dir) => read_dir, + Err(e) => { + error!( + "Directory read error for `{}`: {}", + real_path.as_ref().display(), + e + ); + return; + } + }; + + let mut songs = vec![]; + let mut artwork_file = None; + + for entry in read_dir { + let name = match entry { + Ok(ref f) => f.file_name(), + Err(e) => { + error!( + "File read error within `{}`: {}", + real_path.as_ref().display(), + e + ); + break; + } + }; + + let entry_real_path = real_path.as_ref().join(&name); + let entry_real_path_string = entry_real_path.to_string_lossy().to_string(); + + let entry_virtual_path = virtual_path.as_ref().join(&name); + let entry_virtual_path_string = entry_virtual_path.to_string_lossy().to_string(); + + if entry_real_path.is_dir() { + scope.spawn({ + let directories_output = directories_output.clone(); + let songs_output = songs_output.clone(); + let artwork_regex = artwork_regex.clone(); + |scope| { + process_directory( + scope, + entry_real_path, + entry_virtual_path, + directories_output, + songs_output, + artwork_regex, + ); + } + }); + } else if let Some(metadata) = formats::read_metadata(&entry_real_path) { + songs.push(collection::Song { + id: 0, + path: entry_real_path_string.clone(), + virtual_path: entry_virtual_path.to_string_lossy().to_string(), + virtual_parent: entry_virtual_path + .parent() + .unwrap() + .to_string_lossy() + .to_string(), + track_number: metadata.track_number.map(|n| n as i64), + disc_number: metadata.disc_number.map(|n| n as i64), + title: metadata.title, + artists: MultiString(metadata.artists), + album_artists: MultiString(metadata.album_artists), + year: metadata.year.map(|n| n as i64), + album: metadata.album, + artwork: metadata + .has_artwork + .then(|| entry_virtual_path_string.clone()), + duration: metadata.duration.map(|n| n as i64), + lyricists: MultiString(metadata.lyricists), + composers: MultiString(metadata.composers), + genres: MultiString(metadata.genres), + labels: MultiString(metadata.labels), + date_added: get_date_created(&entry_real_path).unwrap_or_default(), + }); + } else if artwork_file.is_none() + && artwork_regex + .as_ref() + .is_some_and(|r| r.is_match(name.to_str().unwrap_or_default())) + { + artwork_file = Some(entry_virtual_path_string); + } + } + + for mut song in songs { + song.artwork = song.artwork.or_else(|| artwork_file.clone()); + songs_output.send(song).ok(); + } + + directories_output + .send(collection::Directory { + id: 0, + path: real_path.as_ref().to_string_lossy().to_string(), + virtual_path: virtual_path.as_ref().to_string_lossy().to_string(), + virtual_parent: virtual_path + .as_ref() + .parent() + .map(|p| p.to_string_lossy().to_string()) + .filter(|p| !p.is_empty()), + }) + .ok(); +} + +fn get_date_created>(path: P) -> Option { + if let Ok(t) = fs::metadata(path).and_then(|m| m.created().or_else(|_| m.modified())) { + t.duration_since(std::time::UNIX_EPOCH) + .map(|d| d.as_secs() as i64) + .ok() + } else { + None + } +} diff --git a/src/app/collection/types.rs b/src/app/collection/types.rs new file mode 100644 index 0000000..1286ae3 --- /dev/null +++ b/src/app/collection/types.rs @@ -0,0 +1,74 @@ +use std::path::PathBuf; + +use crate::{ + app::vfs::{self}, + db, +}; + +#[derive(Debug, PartialEq, Eq)] +pub struct MultiString(pub Vec); + +impl MultiString { + pub const SEPARATOR: &'static str = "\u{000C}"; +} + +impl From> for MultiString { + fn from(value: Option) -> Self { + match value { + None => Self(Vec::new()), + Some(s) => Self(s.split(Self::SEPARATOR).map(|s| s.to_string()).collect()), + } + } +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("Directory not found: {0}")] + DirectoryNotFound(PathBuf), + #[error(transparent)] + Database(#[from] sqlx::Error), + #[error(transparent)] + DatabaseConnection(#[from] db::Error), + #[error(transparent)] + Vfs(#[from] vfs::Error), + #[error(transparent)] + ThreadPoolBuilder(#[from] rayon::ThreadPoolBuildError), + #[error(transparent)] + ThreadJoining(#[from] tokio::task::JoinError), +} + +#[derive(Debug, PartialEq, Eq)] +pub enum File { + Directory(Directory), + Song(Song), +} + +#[derive(Debug, PartialEq, Eq)] +pub struct Song { + pub id: i64, + pub path: String, + pub virtual_path: String, + pub virtual_parent: String, + pub track_number: Option, + pub disc_number: Option, + pub title: Option, + pub artists: MultiString, + pub album_artists: MultiString, + pub year: Option, + pub album: Option, + pub artwork: Option, + pub duration: Option, + pub lyricists: MultiString, + pub composers: MultiString, + pub genres: MultiString, + pub labels: MultiString, + pub date_added: i64, +} + +#[derive(Debug, PartialEq, Eq)] +pub struct Directory { + pub id: i64, + pub path: String, + pub virtual_path: String, + pub virtual_parent: Option, +} diff --git a/src/app/collection/updater.rs b/src/app/collection/updater.rs new file mode 100644 index 0000000..ff72646 --- /dev/null +++ b/src/app/collection/updater.rs @@ -0,0 +1,293 @@ +use std::{sync::Arc, time::Duration}; + +use log::{error, info}; +use tokio::{ + sync::{mpsc::unbounded_channel, Notify}, + time::Instant, +}; + +use crate::{ + app::{collection::*, settings, vfs}, + db::DB, +}; + +#[derive(Clone)] +pub struct Updater { + db: DB, + index: Index, + settings_manager: settings::Manager, + vfs_manager: vfs::Manager, + pending_scan: Arc, +} + +impl Updater { + pub fn new( + db: DB, + index: Index, + settings_manager: settings::Manager, + vfs_manager: vfs::Manager, + ) -> Self { + let updater = Self { + db, + index, + vfs_manager, + settings_manager, + pending_scan: Arc::new(Notify::new()), + }; + + tokio::spawn({ + let mut updater = updater.clone(); + async move { + loop { + updater.pending_scan.notified().await; + if let Err(e) = updater.update().await { + error!("Error while updating index: {}", e); + } + } + } + }); + + updater + } + + pub fn trigger_scan(&self) { + self.pending_scan.notify_one(); + } + + pub fn begin_periodic_scans(&self) { + tokio::spawn({ + let index = self.clone(); + async move { + loop { + index.trigger_scan(); + let sleep_duration = index + .settings_manager + .get_index_sleep_duration() + .await + .unwrap_or_else(|e| { + error!("Could not retrieve index sleep duration: {}", e); + Duration::from_secs(1800) + }); + tokio::time::sleep(sleep_duration).await; + } + } + }); + } + + pub async fn update(&mut self) -> Result<(), Error> { + let start = Instant::now(); + info!("Beginning library index update"); + + let cleaner = Cleaner::new(self.db.clone(), self.vfs_manager.clone()); + cleaner.clean().await?; + + let album_art_pattern = self + .settings_manager + .get_index_album_art_pattern() + .await + .ok(); + + let (scanner_directories_output, mut collection_directories_input) = unbounded_channel(); + let (scanner_songs_output, mut collection_songs_input) = unbounded_channel(); + + let scanner = Scanner::new( + scanner_directories_output, + scanner_songs_output, + self.vfs_manager.clone(), + album_art_pattern, + ); + + let mut song_inserter = Inserter::::new(self.db.clone()); + let mut directory_inserter = Inserter::::new(self.db.clone()); + + let directory_task = tokio::spawn(async move { + let capacity = 500; + let mut buffer: Vec = Vec::with_capacity(capacity); + loop { + match collection_directories_input + .recv_many(&mut buffer, capacity) + .await + { + 0 => break, + _ => { + for directory in buffer.drain(0..) { + directory_inserter.insert(directory).await; + } + } + } + } + directory_inserter.flush().await; + }); + + let song_task = tokio::spawn(async move { + let capacity = 500; + let mut lookup_tables = Lookups::default(); + let mut buffer: Vec = Vec::with_capacity(capacity); + + loop { + match collection_songs_input + .recv_many(&mut buffer, capacity) + .await + { + 0 => break, + _ => { + for song in buffer.drain(0..) { + lookup_tables.add_song(&song); + song_inserter.insert(song).await; + } + } + } + } + song_inserter.flush().await; + lookup_tables + }); + + let lookup_tables = tokio::join!(scanner.scan(), directory_task, song_task).2?; + self.index.replace_lookup_tables(lookup_tables).await; + + info!( + "Library index update took {} seconds", + start.elapsed().as_millis() as f32 / 1000.0 + ); + + Ok(()) + } +} + +#[cfg(test)] +mod test { + use std::path::PathBuf; + + use crate::{ + app::{collection::*, settings, test}, + test_name, + }; + + const TEST_MOUNT_NAME: &str = "root"; + + #[tokio::test] + async fn scan_adds_new_content() { + let mut ctx = test::ContextBuilder::new(test_name!()) + .mount(TEST_MOUNT_NAME, "test-data/small-collection") + .build() + .await; + + ctx.updater.update().await.unwrap(); + ctx.updater.update().await.unwrap(); // Validates that subsequent updates don't run into conflicts + + let mut connection = ctx.db.connect().await.unwrap(); + let all_directories = sqlx::query_as!(Directory, "SELECT * FROM directories") + .fetch_all(connection.as_mut()) + .await + .unwrap(); + let all_songs = sqlx::query_as!(Song, "SELECT * FROM songs") + .fetch_all(connection.as_mut()) + .await + .unwrap(); + assert_eq!(all_directories.len(), 6); + assert_eq!(all_songs.len(), 13); + } + + #[tokio::test] + async fn scan_removes_missing_content() { + let builder = test::ContextBuilder::new(test_name!()); + + let original_collection_dir: PathBuf = ["test-data", "small-collection"].iter().collect(); + let test_collection_dir: PathBuf = builder.test_directory.join("small-collection"); + + let copy_options = fs_extra::dir::CopyOptions::new(); + fs_extra::dir::copy( + original_collection_dir, + &builder.test_directory, + ©_options, + ) + .unwrap(); + + let mut ctx = builder + .mount(TEST_MOUNT_NAME, test_collection_dir.to_str().unwrap()) + .build() + .await; + + ctx.updater.update().await.unwrap(); + + { + let mut connection = ctx.db.connect().await.unwrap(); + let all_directories = sqlx::query_as!(Directory, "SELECT * FROM directories") + .fetch_all(connection.as_mut()) + .await + .unwrap(); + let all_songs = sqlx::query_as!(Song, "SELECT * FROM songs") + .fetch_all(connection.as_mut()) + .await + .unwrap(); + assert_eq!(all_directories.len(), 6); + assert_eq!(all_songs.len(), 13); + } + + let khemmis_directory = test_collection_dir.join("Khemmis"); + std::fs::remove_dir_all(khemmis_directory).unwrap(); + ctx.updater.update().await.unwrap(); + { + let mut connection = ctx.db.connect().await.unwrap(); + let all_directories = sqlx::query_as!(Directory, "SELECT * FROM directories") + .fetch_all(connection.as_mut()) + .await + .unwrap(); + let all_songs = sqlx::query_as!(Song, "SELECT * FROM songs") + .fetch_all(connection.as_mut()) + .await + .unwrap(); + assert_eq!(all_directories.len(), 4); + assert_eq!(all_songs.len(), 8); + } + } + + #[tokio::test] + async fn finds_embedded_artwork() { + let mut ctx = test::ContextBuilder::new(test_name!()) + .mount(TEST_MOUNT_NAME, "test-data/small-collection") + .build() + .await; + + ctx.updater.update().await.unwrap(); + + let picnic_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect(); + let song_virtual_path = picnic_virtual_dir.join("07 - なぜ (Why).mp3"); + + let song = ctx.browser.get_song(&song_virtual_path).await.unwrap(); + assert_eq!( + song.artwork, + Some(song_virtual_path.to_string_lossy().into_owned()) + ); + } + + #[tokio::test] + async fn album_art_pattern_is_case_insensitive() { + let mut ctx = test::ContextBuilder::new(test_name!()) + .mount(TEST_MOUNT_NAME, "test-data/small-collection") + .build() + .await; + + let patterns = vec!["folder", "FOLDER"]; + + for pattern in patterns.into_iter() { + ctx.settings_manager + .amend(&settings::NewSettings { + album_art_pattern: Some(pattern.to_owned()), + ..Default::default() + }) + .await + .unwrap(); + ctx.updater.update().await.unwrap(); + + let hunted_virtual_dir: PathBuf = + [TEST_MOUNT_NAME, "Khemmis", "Hunted"].iter().collect(); + let artwork_virtual_path = hunted_virtual_dir.join("Folder.jpg"); + let song = &ctx.browser.flatten(&hunted_virtual_dir).await.unwrap()[0]; + assert_eq!( + song.artwork, + Some(artwork_virtual_path.to_string_lossy().into_owned()) + ); + } + } +} diff --git a/src/app/scanner/metadata.rs b/src/app/formats.rs similarity index 81% rename from src/app/scanner/metadata.rs rename to src/app/formats.rs index c8ef03c..f14506b 100644 --- a/src/app/scanner/metadata.rs +++ b/src/app/formats.rs @@ -44,22 +44,26 @@ pub struct SongMetadata { pub labels: Vec, } -pub fn read(path: &Path) -> Option { - let data = match utils::get_audio_format(path) { - Some(AudioFormat::AIFF) => read_id3(path), - Some(AudioFormat::FLAC) => read_flac(path), - Some(AudioFormat::MP3) => read_mp3(path), - Some(AudioFormat::OGG) => read_vorbis(path), - Some(AudioFormat::OPUS) => read_opus(path), - Some(AudioFormat::WAVE) => read_id3(path), - Some(AudioFormat::APE) | Some(AudioFormat::MPC) => read_ape(path), - Some(AudioFormat::MP4) | Some(AudioFormat::M4B) => read_mp4(path), +pub fn read_metadata>(path: P) -> Option { + let data = match utils::get_audio_format(&path) { + Some(AudioFormat::AIFF) => read_id3(&path), + Some(AudioFormat::FLAC) => read_flac(&path), + Some(AudioFormat::MP3) => read_mp3(&path), + Some(AudioFormat::OGG) => read_vorbis(&path), + Some(AudioFormat::OPUS) => read_opus(&path), + Some(AudioFormat::WAVE) => read_id3(&path), + Some(AudioFormat::APE) | Some(AudioFormat::MPC) => read_ape(&path), + Some(AudioFormat::MP4) | Some(AudioFormat::M4B) => read_mp4(&path), None => return None, }; match data { Ok(d) => Some(d), Err(e) => { - error!("Error while reading file metadata for '{:?}': {}", path, e); + error!( + "Error while reading file metadata for '{:?}': {}", + path.as_ref(), + e + ); None } } @@ -78,7 +82,7 @@ impl ID3Ext for id3::Tag { } } -fn read_id3(path: &Path) -> Result { +fn read_id3>(path: P) -> Result { let tag = id3::Tag::read_from_path(path).or_else(|error| { if let Some(tag) = error.partial_tag { Ok(tag) @@ -122,8 +126,8 @@ fn read_id3(path: &Path) -> Result { }) } -fn read_mp3(path: &Path) -> Result { - let mut metadata = read_id3(path)?; +fn read_mp3>(path: P) -> Result { + let mut metadata = read_id3(&path)?; let duration = { mp3_duration::from_path(path) .map(|d| d.as_secs() as u32) @@ -167,7 +171,7 @@ mod ape_ext { } } -fn read_ape(path: &Path) -> Result { +fn read_ape>(path: P) -> Result { let tag = ape::read_from_path(path)?; let artists = ape_ext::read_strings(tag.items("Artist")); let album = tag.item("Album").and_then(ape_ext::read_string); @@ -197,8 +201,8 @@ fn read_ape(path: &Path) -> Result { }) } -fn read_vorbis(path: &Path) -> Result { - let file = fs::File::open(path).map_err(|e| Error::Io(path.to_owned(), e))?; +fn read_vorbis>(path: P) -> Result { + let file = fs::File::open(&path).map_err(|e| Error::Io(path.as_ref().to_owned(), e))?; let source = OggStreamReader::new(file)?; let mut metadata = SongMetadata::default(); @@ -224,7 +228,7 @@ fn read_vorbis(path: &Path) -> Result { Ok(metadata) } -fn read_opus(path: &Path) -> Result { +fn read_opus>(path: P) -> Result { let headers = opus_headers::parse_from_path(path)?; let mut metadata = SongMetadata::default(); @@ -250,7 +254,7 @@ fn read_opus(path: &Path) -> Result { Ok(metadata) } -fn read_flac(path: &Path) -> Result { +fn read_flac>(path: P) -> Result { let tag = metaflac::Tag::read_from_path(path)?; let vorbis = tag .vorbis_comments() @@ -285,7 +289,7 @@ fn read_flac(path: &Path) -> Result { }) } -fn read_mp4(path: &Path) -> Result { +fn read_mp4>(path: P) -> Result { let mut tag = mp4ameta::Tag::read_from_path(path)?; let label_ident = mp4ameta::FreeformIdent::new("com.apple.iTunes", "Label"); @@ -336,35 +340,35 @@ fn reads_file_metadata() { ..sample_tags.clone() }; assert_eq!( - read(Path::new("test-data/formats/sample.aif")).unwrap(), + read_metadata(Path::new("test-data/formats/sample.aif")).unwrap(), sample_tags ); assert_eq!( - read(Path::new("test-data/formats/sample.mp3")).unwrap(), + read_metadata(Path::new("test-data/formats/sample.mp3")).unwrap(), mp3_sample_tag ); assert_eq!( - read(Path::new("test-data/formats/sample.ogg")).unwrap(), + read_metadata(Path::new("test-data/formats/sample.ogg")).unwrap(), sample_tags ); assert_eq!( - read(Path::new("test-data/formats/sample.flac")).unwrap(), + read_metadata(Path::new("test-data/formats/sample.flac")).unwrap(), flac_sample_tag ); assert_eq!( - read(Path::new("test-data/formats/sample.m4a")).unwrap(), + read_metadata(Path::new("test-data/formats/sample.m4a")).unwrap(), m4a_sample_tag ); assert_eq!( - read(Path::new("test-data/formats/sample.opus")).unwrap(), + read_metadata(Path::new("test-data/formats/sample.opus")).unwrap(), sample_tags ); assert_eq!( - read(Path::new("test-data/formats/sample.ape")).unwrap(), + read_metadata(Path::new("test-data/formats/sample.ape")).unwrap(), sample_tags ); assert_eq!( - read(Path::new("test-data/formats/sample.wav")).unwrap(), + read_metadata(Path::new("test-data/formats/sample.wav")).unwrap(), sample_tags ); } @@ -372,27 +376,27 @@ fn reads_file_metadata() { #[test] fn reads_embedded_artwork() { assert!( - read(Path::new("test-data/artwork/sample.aif")) + read_metadata(Path::new("test-data/artwork/sample.aif")) .unwrap() .has_artwork ); assert!( - read(Path::new("test-data/artwork/sample.mp3")) + read_metadata(Path::new("test-data/artwork/sample.mp3")) .unwrap() .has_artwork ); assert!( - read(Path::new("test-data/artwork/sample.flac")) + read_metadata(Path::new("test-data/artwork/sample.flac")) .unwrap() .has_artwork ); assert!( - read(Path::new("test-data/artwork/sample.m4a")) + read_metadata(Path::new("test-data/artwork/sample.m4a")) .unwrap() .has_artwork ); assert!( - read(Path::new("test-data/artwork/sample.wav")) + read_metadata(Path::new("test-data/artwork/sample.wav")) .unwrap() .has_artwork ); diff --git a/src/app/index.rs b/src/app/index.rs deleted file mode 100644 index 028d30a..0000000 --- a/src/app/index.rs +++ /dev/null @@ -1,21 +0,0 @@ -use crate::app::vfs; -use crate::db::DB; - -mod query; -#[cfg(test)] -mod test; -mod types; - -pub use self::types::*; - -#[derive(Clone)] -pub struct Index { - db: DB, - vfs_manager: vfs::Manager, -} - -impl Index { - pub fn new(db: DB, vfs_manager: vfs::Manager) -> Self { - Self { db, vfs_manager } - } -} diff --git a/src/app/index/query.rs b/src/app/index/query.rs deleted file mode 100644 index 900bca2..0000000 --- a/src/app/index/query.rs +++ /dev/null @@ -1,201 +0,0 @@ -use std::path::Path; - -use super::*; -use crate::app::scanner; - -impl Index { - pub async fn browse

(&self, virtual_path: P) -> Result, Error> - where - P: AsRef, - { - let mut output = Vec::new(); - let vfs = self.vfs_manager.get_vfs().await?; - let mut connection = self.db.connect().await?; - - if virtual_path.as_ref().components().count() == 0 { - // Browse top-level - let real_directories = sqlx::query_as!( - scanner::Directory, - "SELECT * FROM directories WHERE parent IS NULL" - ) - .fetch_all(connection.as_mut()) - .await?; - let virtual_directories = real_directories - .into_iter() - .filter_map(|d| d.virtualize(&vfs)); - output.extend(virtual_directories.map(CollectionFile::Directory)); - } else { - // Browse sub-directory - let real_path = vfs.virtual_to_real(virtual_path)?; - let real_path_string = real_path.as_path().to_string_lossy().into_owned(); - - let real_directories = sqlx::query_as!( - scanner::Directory, - "SELECT * FROM directories WHERE parent = $1 ORDER BY path COLLATE NOCASE ASC", - real_path_string - ) - .fetch_all(connection.as_mut()) - .await?; - - let virtual_directories = real_directories - .into_iter() - .filter_map(|d| d.virtualize(&vfs)); - - output.extend(virtual_directories.map(CollectionFile::Directory)); - - let real_songs = sqlx::query_as!( - scanner::Song, - "SELECT * FROM songs WHERE parent = $1 ORDER BY path COLLATE NOCASE ASC", - real_path_string - ) - .fetch_all(connection.as_mut()) - .await?; - - let virtual_songs = real_songs.into_iter().filter_map(|s| s.virtualize(&vfs)); - output.extend(virtual_songs.map(CollectionFile::Song)); - } - - Ok(output) - } - - pub async fn flatten

(&self, virtual_path: P) -> Result, Error> - where - P: AsRef, - { - let vfs = self.vfs_manager.get_vfs().await?; - let mut connection = self.db.connect().await?; - - let real_songs = if virtual_path.as_ref().parent().is_some() { - let real_path = vfs.virtual_to_real(virtual_path)?; - let song_path_filter = { - let mut path_buf = real_path; - path_buf.push("%"); - path_buf.as_path().to_string_lossy().into_owned() - }; - sqlx::query_as!( - scanner::Song, - "SELECT * FROM songs WHERE path LIKE $1 ORDER BY path COLLATE NOCASE ASC", - song_path_filter - ) - .fetch_all(connection.as_mut()) - .await? - } else { - sqlx::query_as!( - scanner::Song, - "SELECT * FROM songs ORDER BY path COLLATE NOCASE ASC" - ) - .fetch_all(connection.as_mut()) - .await? - }; - - let virtual_songs = real_songs.into_iter().filter_map(|s| s.virtualize(&vfs)); - Ok(virtual_songs.collect::>()) - } - - pub async fn get_random_albums(&self, count: i64) -> Result, Error> { - let vfs = self.vfs_manager.get_vfs().await?; - let mut connection = self.db.connect().await?; - - let real_directories = sqlx::query_as!( - scanner::Directory, - "SELECT * FROM directories WHERE album IS NOT NULL ORDER BY RANDOM() DESC LIMIT $1", - count - ) - .fetch_all(connection.as_mut()) - .await?; - - let virtual_directories = real_directories - .into_iter() - .filter_map(|d| d.virtualize(&vfs)); - Ok(virtual_directories.collect::>()) - } - - pub async fn get_recent_albums(&self, count: i64) -> Result, Error> { - let vfs = self.vfs_manager.get_vfs().await?; - let mut connection = self.db.connect().await?; - - let real_directories = sqlx::query_as!( - scanner::Directory, - "SELECT * FROM directories WHERE album IS NOT NULL ORDER BY date_added DESC LIMIT $1", - count - ) - .fetch_all(connection.as_mut()) - .await?; - - let virtual_directories = real_directories - .into_iter() - .filter_map(|d| d.virtualize(&vfs)); - Ok(virtual_directories.collect::>()) - } - - pub async fn search(&self, query: &str) -> Result, Error> { - let vfs = self.vfs_manager.get_vfs().await?; - let mut connection = self.db.connect().await?; - let like_test = format!("%{}%", query); - let mut output = Vec::new(); - - // Find dirs with matching path and parent not matching - { - let real_directories = sqlx::query_as!( - scanner::Directory, - "SELECT * FROM directories WHERE path LIKE $1 AND parent NOT LIKE $1", - like_test - ) - .fetch_all(connection.as_mut()) - .await?; - - let virtual_directories = real_directories - .into_iter() - .filter_map(|d| d.virtualize(&vfs)); - - output.extend(virtual_directories.map(CollectionFile::Directory)); - } - - // Find songs with matching title/album/artist and non-matching parent - { - let real_songs = sqlx::query_as!( - scanner::Song, - r#" - SELECT * FROM songs - WHERE ( path LIKE $1 - OR title LIKE $1 - OR album LIKE $1 - OR artists LIKE $1 - OR album_artists LIKE $1 - ) - AND parent NOT LIKE $1 - "#, - like_test - ) - .fetch_all(connection.as_mut()) - .await?; - - let virtual_songs = real_songs.into_iter().filter_map(|d| d.virtualize(&vfs)); - - output.extend(virtual_songs.map(CollectionFile::Song)); - } - - Ok(output) - } - - pub async fn get_song(&self, virtual_path: &Path) -> Result { - let vfs = self.vfs_manager.get_vfs().await?; - let mut connection = self.db.connect().await?; - - let real_path = vfs.virtual_to_real(virtual_path)?; - let real_path_string = real_path.as_path().to_string_lossy(); - - let real_song = sqlx::query_as!( - scanner::Song, - "SELECT * FROM songs WHERE path = $1", - real_path_string - ) - .fetch_one(connection.as_mut()) - .await?; - - match real_song.virtualize(&vfs) { - Some(s) => Ok(s), - None => Err(Error::SongNotFound(real_path)), - } - } -} diff --git a/src/app/index/test.rs b/src/app/index/test.rs deleted file mode 100644 index ad6850f..0000000 --- a/src/app/index/test.rs +++ /dev/null @@ -1,139 +0,0 @@ -use std::path::{Path, PathBuf}; - -use super::*; -use crate::app::{scanner, test}; -use crate::test_name; - -const TEST_MOUNT_NAME: &str = "root"; - -#[tokio::test] -async fn can_browse_top_level() { - let ctx = test::ContextBuilder::new(test_name!()) - .mount(TEST_MOUNT_NAME, "test-data/small-collection") - .build() - .await; - ctx.scanner.scan().await.unwrap(); - - let root_path = Path::new(TEST_MOUNT_NAME); - let files = ctx.index.browse(Path::new("")).await.unwrap(); - assert_eq!(files.len(), 1); - match files[0] { - CollectionFile::Directory(ref d) => assert_eq!(d.path, root_path.to_str().unwrap()), - _ => panic!("Expected directory"), - } -} - -#[tokio::test] -async fn can_browse_directory() { - let khemmis_path: PathBuf = [TEST_MOUNT_NAME, "Khemmis"].iter().collect(); - let tobokegao_path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao"].iter().collect(); - - let ctx = test::ContextBuilder::new(test_name!()) - .mount(TEST_MOUNT_NAME, "test-data/small-collection") - .build() - .await; - ctx.scanner.scan().await.unwrap(); - - let files = ctx.index.browse(Path::new(TEST_MOUNT_NAME)).await.unwrap(); - - assert_eq!(files.len(), 2); - match files[0] { - CollectionFile::Directory(ref d) => assert_eq!(d.path, khemmis_path.to_str().unwrap()), - _ => panic!("Expected directory"), - } - - match files[1] { - CollectionFile::Directory(ref d) => assert_eq!(d.path, tobokegao_path.to_str().unwrap()), - _ => panic!("Expected directory"), - } -} - -#[tokio::test] -async fn can_flatten_root() { - let ctx = test::ContextBuilder::new(test_name!()) - .mount(TEST_MOUNT_NAME, "test-data/small-collection") - .build() - .await; - ctx.scanner.scan().await.unwrap(); - let songs = ctx.index.flatten(Path::new(TEST_MOUNT_NAME)).await.unwrap(); - assert_eq!(songs.len(), 13); - assert_eq!(songs[0].title, Some("Above The Water".to_owned())); -} - -#[tokio::test] -async fn can_flatten_directory() { - let ctx = test::ContextBuilder::new(test_name!()) - .mount(TEST_MOUNT_NAME, "test-data/small-collection") - .build() - .await; - ctx.scanner.scan().await.unwrap(); - let path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao"].iter().collect(); - let songs = ctx.index.flatten(path).await.unwrap(); - assert_eq!(songs.len(), 8); -} - -#[tokio::test] -async fn can_flatten_directory_with_shared_prefix() { - let ctx = test::ContextBuilder::new(test_name!()) - .mount(TEST_MOUNT_NAME, "test-data/small-collection") - .build() - .await; - ctx.scanner.scan().await.unwrap(); - let path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect(); // Prefix of '(Picnic Remixes)' - let songs = ctx.index.flatten(path).await.unwrap(); - assert_eq!(songs.len(), 7); -} - -#[tokio::test] -async fn can_get_random_albums() { - let ctx = test::ContextBuilder::new(test_name!()) - .mount(TEST_MOUNT_NAME, "test-data/small-collection") - .build() - .await; - ctx.scanner.scan().await.unwrap(); - let albums = ctx.index.get_random_albums(1).await.unwrap(); - assert_eq!(albums.len(), 1); -} - -#[tokio::test] -async fn can_get_recent_albums() { - let ctx = test::ContextBuilder::new(test_name!()) - .mount(TEST_MOUNT_NAME, "test-data/small-collection") - .build() - .await; - ctx.scanner.scan().await.unwrap(); - let albums = ctx.index.get_recent_albums(2).await.unwrap(); - assert_eq!(albums.len(), 2); - assert!(albums[0].date_added >= albums[1].date_added); -} - -#[tokio::test] -async fn can_get_a_song() { - let ctx = test::ContextBuilder::new(test_name!()) - .mount(TEST_MOUNT_NAME, "test-data/small-collection") - .build() - .await; - - ctx.scanner.scan().await.unwrap(); - - let picnic_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect(); - let song_virtual_path = picnic_virtual_dir.join("05 - シャーベット (Sherbet).mp3"); - let artwork_virtual_path = picnic_virtual_dir.join("Folder.png"); - - let song = ctx.index.get_song(&song_virtual_path).await.unwrap(); - assert_eq!(song.path, song_virtual_path.to_string_lossy().as_ref()); - assert_eq!(song.track_number, Some(5)); - assert_eq!(song.disc_number, None); - assert_eq!(song.title, Some("シャーベット (Sherbet)".to_owned())); - assert_eq!( - song.artists, - scanner::MultiString(vec!["Tobokegao".to_owned()]) - ); - assert_eq!(song.album_artists, scanner::MultiString(vec![])); - assert_eq!(song.album, Some("Picnic".to_owned())); - assert_eq!(song.year, Some(2016)); - assert_eq!( - song.artwork, - Some(artwork_virtual_path.to_string_lossy().into_owned()) - ); -} diff --git a/src/app/index/types.rs b/src/app/index/types.rs deleted file mode 100644 index 136ef14..0000000 --- a/src/app/index/types.rs +++ /dev/null @@ -1,24 +0,0 @@ -use std::path::PathBuf; - -use crate::{ - app::{scanner, vfs}, - db, -}; - -#[derive(Debug, PartialEq, Eq)] -pub enum CollectionFile { - Directory(scanner::Directory), - Song(scanner::Song), -} - -#[derive(thiserror::Error, Debug)] -pub enum Error { - #[error(transparent)] - Database(#[from] sqlx::Error), - #[error(transparent)] - DatabaseConnection(#[from] db::Error), - #[error("Song was not found: `{0}`")] - SongNotFound(PathBuf), - #[error(transparent)] - Vfs(#[from] vfs::Error), -} diff --git a/src/app/lastfm.rs b/src/app/lastfm.rs index d0d47e2..451e710 100644 --- a/src/app/lastfm.rs +++ b/src/app/lastfm.rs @@ -2,10 +2,7 @@ use rustfm_scrobble::{Scrobble, Scrobbler}; use std::path::Path; use user::AuthToken; -use crate::app::{ - index::{self, Index}, - user, -}; +use crate::app::{collection, user}; const LASTFM_API_KEY: &str = "02b96c939a2b451c31dfd67add1f696e"; const LASTFM_API_SECRET: &str = "0f25a80ceef4b470b5cb97d99d4b3420"; @@ -19,21 +16,21 @@ pub enum Error { #[error("Failed to emit last.fm now playing update")] NowPlaying(rustfm_scrobble::ScrobblerError), #[error(transparent)] - Query(#[from] index::Error), + Query(#[from] collection::Error), #[error(transparent)] User(#[from] user::Error), } #[derive(Clone)] pub struct Manager { - index: Index, + browser: collection::Browser, user_manager: user::Manager, } impl Manager { - pub fn new(index: Index, user_manager: user::Manager) -> Self { + pub fn new(browser: collection::Browser, user_manager: user::Manager) -> Self { Self { - index, + browser, user_manager, } } @@ -84,7 +81,7 @@ impl Manager { } async fn scrobble_from_path(&self, track: &Path) -> Result { - let song = self.index.get_song(track).await?; + let song = self.browser.get_song(track).await?; Ok(Scrobble::new( song.artists.0.first().map(|s| s.as_str()).unwrap_or(""), song.title.as_deref().unwrap_or(""), diff --git a/src/app/playlist.rs b/src/app/playlist.rs index 0a33cbd..c440c8c 100644 --- a/src/app/playlist.rs +++ b/src/app/playlist.rs @@ -1,8 +1,7 @@ use core::clone::Clone; use sqlx::{Acquire, QueryBuilder, Sqlite}; -use crate::app::scanner::Song; -use crate::app::vfs; +use crate::app::{collection::Song, vfs}; use crate::db::{self, DB}; #[derive(thiserror::Error, Debug)] @@ -126,8 +125,6 @@ impl Manager { playlist_name: &str, owner: &str, ) -> Result, Error> { - let vfs = self.vfs_manager.get_vfs().await?; - let songs = { let mut connection = self.db.connect().await?; @@ -163,13 +160,7 @@ impl Manager { .await? }; - // Map real path to virtual paths - let virtual_songs = songs - .into_iter() - .filter_map(|s| s.virtualize(&vfs)) - .collect(); - - Ok(virtual_songs) + Ok(songs) } pub async fn delete_playlist(&self, playlist_name: &str, owner: &str) -> Result<(), Error> { @@ -231,21 +222,21 @@ mod test { #[tokio::test] async fn save_playlist_is_idempotent() { - let ctx = test::ContextBuilder::new(test_name!()) + let mut ctx = test::ContextBuilder::new(test_name!()) .user(TEST_USER, TEST_PASSWORD, false) .mount(TEST_MOUNT_NAME, "test-data/small-collection") .build() .await; - ctx.scanner.scan().await.unwrap(); + ctx.updater.update().await.unwrap(); let playlist_content: Vec = ctx - .index + .browser .flatten(Path::new(TEST_MOUNT_NAME)) .await .unwrap() .into_iter() - .map(|s| s.path) + .map(|s| s.virtual_path) .collect(); assert_eq!(playlist_content.len(), 13); @@ -296,21 +287,21 @@ mod test { #[tokio::test] async fn read_playlist_golden_path() { - let ctx = test::ContextBuilder::new(test_name!()) + let mut ctx = test::ContextBuilder::new(test_name!()) .user(TEST_USER, TEST_PASSWORD, false) .mount(TEST_MOUNT_NAME, "test-data/small-collection") .build() .await; - ctx.scanner.scan().await.unwrap(); + ctx.updater.update().await.unwrap(); let playlist_content: Vec = ctx - .index + .browser .flatten(Path::new(TEST_MOUNT_NAME)) .await .unwrap() .into_iter() - .map(|s| s.path) + .map(|s| s.virtual_path) .collect(); assert_eq!(playlist_content.len(), 13); @@ -336,6 +327,6 @@ mod test { ] .iter() .collect(); - assert_eq!(songs[0].path, first_song_path.to_str().unwrap()); + assert_eq!(songs[0].virtual_path, first_song_path.to_str().unwrap()); } } diff --git a/src/app/scanner.rs b/src/app/scanner.rs deleted file mode 100644 index 77ca3f6..0000000 --- a/src/app/scanner.rs +++ /dev/null @@ -1,123 +0,0 @@ -use log::{error, info}; -use std::sync::Arc; -use std::time::{Duration, Instant}; -use tokio::sync::Notify; - -use crate::app::{settings, vfs}; -use crate::db::DB; - -mod cleaner; -mod collector; -mod inserter; -mod metadata; -#[cfg(test)] -mod test; -mod traverser; -mod types; - -pub use self::types::*; - -#[derive(Clone)] -pub struct Scanner { - db: DB, - vfs_manager: vfs::Manager, - settings_manager: settings::Manager, - pending_scan: Arc, -} - -impl Scanner { - pub fn new(db: DB, vfs_manager: vfs::Manager, settings_manager: settings::Manager) -> Self { - let scanner = Self { - db, - vfs_manager, - settings_manager, - pending_scan: Arc::new(Notify::new()), - }; - - tokio::spawn({ - let scanner = scanner.clone(); - async move { - loop { - scanner.pending_scan.notified().await; - if let Err(e) = scanner.scan().await { - error!("Error while updating index: {}", e); - } - } - } - }); - - scanner - } - - pub fn trigger_scan(&self) { - self.pending_scan.notify_one(); - } - - pub fn begin_periodic_scans(&self) { - tokio::spawn({ - let index = self.clone(); - async move { - loop { - index.trigger_scan(); - let sleep_duration = index - .settings_manager - .get_index_sleep_duration() - .await - .unwrap_or_else(|e| { - error!("Could not retrieve index sleep duration: {}", e); - Duration::from_secs(1800) - }); - tokio::time::sleep(sleep_duration).await; - } - } - }); - } - - pub async fn scan(&self) -> Result<(), types::Error> { - let start = Instant::now(); - info!("Beginning library index update"); - - let album_art_pattern = self - .settings_manager - .get_index_album_art_pattern() - .await - .ok(); - - let cleaner = cleaner::Cleaner::new(self.db.clone(), self.vfs_manager.clone()); - cleaner.clean().await?; - - let (insert_sender, insert_receiver) = tokio::sync::mpsc::unbounded_channel(); - let insertion = tokio::spawn({ - let db = self.db.clone(); - async { - let mut inserter = inserter::Inserter::new(db, insert_receiver); - inserter.insert().await; - } - }); - - let (collect_sender, collect_receiver) = crossbeam_channel::unbounded(); - let collection = tokio::task::spawn_blocking(|| { - let collector = - collector::Collector::new(collect_receiver, insert_sender, album_art_pattern); - collector.collect(); - }); - - let vfs = self.vfs_manager.get_vfs().await?; - let traversal = tokio::task::spawn_blocking(move || { - let mounts = vfs.mounts(); - let traverser = traverser::Traverser::new(collect_sender); - traverser.traverse(mounts.iter().map(|p| p.source.clone()).collect()); - }); - - traversal.await.unwrap(); - collection.await.unwrap(); - insertion.await.unwrap(); - - info!( - "Library index update took {} seconds", - start.elapsed().as_millis() as f32 / 1000.0 - ); - - Ok(()) - } -} diff --git a/src/app/scanner/cleaner.rs b/src/app/scanner/cleaner.rs deleted file mode 100644 index 6ce94f8..0000000 --- a/src/app/scanner/cleaner.rs +++ /dev/null @@ -1,101 +0,0 @@ -use rayon::prelude::*; -use sqlx::{QueryBuilder, Sqlite}; -use std::path::Path; - -use crate::app::vfs; -use crate::db::{self, DB}; - -const INDEX_BUILDING_CLEAN_BUFFER_SIZE: usize = 500; // Deletions in each transaction - -#[derive(thiserror::Error, Debug)] -pub enum Error { - #[error(transparent)] - Database(#[from] sqlx::Error), - #[error(transparent)] - DatabaseConnection(#[from] db::Error), - #[error(transparent)] - ThreadPoolBuilder(#[from] rayon::ThreadPoolBuildError), - #[error(transparent)] - Vfs(#[from] vfs::Error), -} - -pub struct Cleaner { - db: DB, - vfs_manager: vfs::Manager, -} - -impl Cleaner { - pub fn new(db: DB, vfs_manager: vfs::Manager) -> Self { - Self { db, vfs_manager } - } - - pub async fn clean(&self) -> Result<(), Error> { - let vfs = self.vfs_manager.get_vfs().await?; - - let (all_directories, all_songs) = { - let mut connection = self.db.connect().await?; - - let directories = sqlx::query_scalar!("SELECT path FROM directories") - .fetch_all(connection.as_mut()) - .await - .unwrap(); - - let songs = sqlx::query_scalar!("SELECT path FROM songs") - .fetch_all(connection.as_mut()) - .await - .unwrap(); - - (directories, songs) - }; - - let list_missing_directories = || { - all_directories - .par_iter() - .filter(|ref directory_path| { - let path = Path::new(&directory_path); - !path.exists() || vfs.real_to_virtual(path).is_err() - }) - .collect::>() - }; - - let list_missing_songs = || { - all_songs - .par_iter() - .filter(|ref song_path| { - let path = Path::new(&song_path); - !path.exists() || vfs.real_to_virtual(path).is_err() - }) - .collect::>() - }; - - let thread_pool = rayon::ThreadPoolBuilder::new().build()?; - let (missing_directories, missing_songs) = - thread_pool.join(list_missing_directories, list_missing_songs); - - { - let mut connection = self.db.connect().await?; - - for chunk in missing_directories[..].chunks(INDEX_BUILDING_CLEAN_BUFFER_SIZE) { - QueryBuilder::::new("DELETE FROM directories WHERE path IN ") - .push_tuples(chunk, |mut b, path| { - b.push_bind(path); - }) - .build() - .execute(connection.as_mut()) - .await?; - } - - for chunk in missing_songs[..].chunks(INDEX_BUILDING_CLEAN_BUFFER_SIZE) { - QueryBuilder::::new("DELETE FROM songs WHERE path IN ") - .push_tuples(chunk, |mut b, path| { - b.push_bind(path); - }) - .build() - .execute(connection.as_mut()) - .await?; - } - } - - Ok(()) - } -} diff --git a/src/app/scanner/collector.rs b/src/app/scanner/collector.rs deleted file mode 100644 index e58a842..0000000 --- a/src/app/scanner/collector.rs +++ /dev/null @@ -1,151 +0,0 @@ -use log::error; -use regex::Regex; - -use crate::app::scanner::MultiString; - -use super::inserter; -use super::traverser; - -pub struct Collector { - receiver: crossbeam_channel::Receiver, - sender: tokio::sync::mpsc::UnboundedSender, - album_art_pattern: Option, -} - -impl Collector { - pub fn new( - receiver: crossbeam_channel::Receiver, - sender: tokio::sync::mpsc::UnboundedSender, - album_art_pattern: Option, - ) -> Self { - Self { - receiver, - sender, - album_art_pattern, - } - } - - pub fn collect(&self) { - while let Ok(directory) = self.receiver.recv() { - self.collect_directory(directory); - } - } - - fn collect_directory(&self, directory: traverser::Directory) { - let mut directory_album = None; - let mut directory_year = None; - let mut directory_artists = None; - let mut inconsistent_directory_album = false; - let mut inconsistent_directory_year = false; - let mut inconsistent_directory_artist = false; - - let directory_artwork = self.get_artwork(&directory); - let directory_path_string = directory.path.to_string_lossy().to_string(); - let directory_parent_string = directory.parent.map(|p| p.to_string_lossy().to_string()); - - for song in directory.songs { - let tags = song.metadata; - let path_string = song.path.to_string_lossy().to_string(); - - if tags.year.is_some() { - inconsistent_directory_year |= - directory_year.is_some() && directory_year != tags.year; - directory_year = tags.year; - } - - if tags.album.is_some() { - inconsistent_directory_album |= - directory_album.is_some() && directory_album != tags.album; - directory_album = tags.album.as_ref().cloned(); - } - - if !tags.album_artists.is_empty() { - inconsistent_directory_artist |= directory_artists.is_some() - && directory_artists.as_ref() != Some(&tags.album_artists); - directory_artists = Some(tags.album_artists.clone()); - } else if !tags.artists.is_empty() { - inconsistent_directory_artist |= directory_artists.is_some() - && directory_artists.as_ref() != Some(&tags.artists); - directory_artists = Some(tags.artists.clone()); - } - - let artwork_path = if tags.has_artwork { - Some(path_string.clone()) - } else { - directory_artwork.as_ref().cloned() - }; - - if let Err(e) = self.sender.send(inserter::Item::Song(inserter::Song { - path: path_string, - parent: directory_path_string.clone(), - disc_number: tags.disc_number.map(|n| n as i32), - track_number: tags.track_number.map(|n| n as i32), - title: tags.title, - duration: tags.duration.map(|n| n as i32), - artists: MultiString(tags.artists), - album_artists: MultiString(tags.album_artists), - album: tags.album, - year: tags.year, - artwork: artwork_path, - lyricists: MultiString(tags.lyricists), - composers: MultiString(tags.composers), - genres: MultiString(tags.genres), - labels: MultiString(tags.labels), - })) { - error!("Error while sending song from collector: {}", e); - } - } - - if inconsistent_directory_year { - directory_year = None; - } - if inconsistent_directory_album { - directory_album = None; - } - if inconsistent_directory_artist { - directory_artists = None; - } - - if let Err(e) = self - .sender - .send(inserter::Item::Directory(inserter::Directory { - path: directory_path_string, - parent: directory_parent_string, - artwork: directory_artwork, - album: directory_album, - artists: MultiString(directory_artists.unwrap_or_default()), - year: directory_year, - date_added: directory.created, - })) { - error!("Error while sending directory from collector: {}", e); - } - } - - fn get_artwork(&self, directory: &traverser::Directory) -> Option { - let regex_artwork = directory.other_files.iter().find_map(|path| { - let matches = path - .file_name() - .and_then(|name| name.to_str()) - .map(|name| match &self.album_art_pattern { - Some(pattern) => pattern.is_match(name), - None => false, - }) - .unwrap_or(false); - if matches { - Some(path.to_string_lossy().to_string()) - } else { - None - } - }); - - let embedded_artwork = directory.songs.iter().find_map(|song| { - if song.metadata.has_artwork { - Some(song.path.to_string_lossy().to_string()) - } else { - None - } - }); - - regex_artwork.or(embedded_artwork) - } -} diff --git a/src/app/scanner/inserter.rs b/src/app/scanner/inserter.rs deleted file mode 100644 index 8d24c56..0000000 --- a/src/app/scanner/inserter.rs +++ /dev/null @@ -1,147 +0,0 @@ -use log::error; -use sqlx::{QueryBuilder, Sqlite}; -use tokio::sync::mpsc::UnboundedReceiver; - -use crate::{app::scanner::MultiString, db::DB}; - -const INDEX_BUILDING_INSERT_BUFFER_SIZE: usize = 1000; // Insertions in each transaction - -pub struct Song { - pub path: String, - pub parent: String, - pub track_number: Option, - pub disc_number: Option, - pub title: Option, - pub artists: MultiString, - pub album_artists: MultiString, - pub year: Option, - pub album: Option, - pub artwork: Option, - pub duration: Option, - pub lyricists: MultiString, - pub composers: MultiString, - pub genres: MultiString, - pub labels: MultiString, -} - -pub struct Directory { - pub path: String, - pub parent: Option, - pub artists: MultiString, - pub year: Option, - pub album: Option, - pub artwork: Option, - pub date_added: i32, -} - -pub enum Item { - Directory(Directory), - Song(Song), -} - -pub struct Inserter { - receiver: UnboundedReceiver, - new_directories: Vec, - new_songs: Vec, - db: DB, -} - -impl Inserter { - pub fn new(db: DB, receiver: UnboundedReceiver) -> Self { - let new_directories = Vec::with_capacity(INDEX_BUILDING_INSERT_BUFFER_SIZE); - let new_songs = Vec::with_capacity(INDEX_BUILDING_INSERT_BUFFER_SIZE); - Self { - receiver, - new_directories, - new_songs, - db, - } - } - - pub async fn insert(&mut self) { - while let Some(item) = self.receiver.recv().await { - self.insert_item(item).await; - } - self.flush_directories().await; - self.flush_songs().await; - } - - async fn insert_item(&mut self, insert: Item) { - match insert { - Item::Directory(d) => { - self.new_directories.push(d); - if self.new_directories.len() >= INDEX_BUILDING_INSERT_BUFFER_SIZE { - self.flush_directories().await; - } - } - Item::Song(s) => { - self.new_songs.push(s); - if self.new_songs.len() >= INDEX_BUILDING_INSERT_BUFFER_SIZE { - self.flush_songs().await; - } - } - }; - } - - async fn flush_directories(&mut self) { - let Ok(mut connection) = self.db.connect().await else { - error!("Could not acquire connection to insert new directories in database"); - return; - }; - - let result = QueryBuilder::::new( - "INSERT INTO directories(path, parent, artists, year, album, artwork, date_added) ", - ) - .push_values(&self.new_directories, |mut b, directory| { - b.push_bind(&directory.path) - .push_bind(&directory.parent) - .push_bind(&directory.artists) - .push_bind(directory.year) - .push_bind(&directory.album) - .push_bind(&directory.artwork) - .push_bind(directory.date_added); - }) - .build() - .execute(connection.as_mut()) - .await; - - match result { - Ok(_) => self.new_directories.clear(), - Err(_) => error!("Could not insert new directories in database"), - }; - } - - async fn flush_songs(&mut self) { - let Ok(mut connection) = self.db.connect().await else { - error!("Could not acquire connection to insert new songs in database"); - return; - }; - - let result = QueryBuilder::::new("INSERT INTO songs(path, parent, track_number, disc_number, title, artists, album_artists, year, album, artwork, duration, lyricists, composers, genres, labels) ") - .push_values(&self.new_songs, |mut b, song| { - b.push_bind(&song.path) - .push_bind(&song.parent) - .push_bind(song.track_number) - .push_bind(song.disc_number) - .push_bind(&song.title) - .push_bind(&song.artists) - .push_bind(&song.album_artists) - .push_bind(song.year) - .push_bind(&song.album) - .push_bind(&song.artwork) - .push_bind(song.duration) - .push_bind(&song.lyricists) - .push_bind(&song.composers) - .push_bind(&song.genres) - .push_bind(&song.labels); - }) - .build() - .execute(connection.as_mut()) - .await; - - match result { - Ok(_) => self.new_songs.clear(), - Err(_) => error!("Could not insert new songs in database"), - }; - } -} diff --git a/src/app/scanner/test.rs b/src/app/scanner/test.rs deleted file mode 100644 index 925d844..0000000 --- a/src/app/scanner/test.rs +++ /dev/null @@ -1,133 +0,0 @@ -use std::path::PathBuf; - -use crate::{ - app::{scanner, settings, test}, - test_name, -}; - -const TEST_MOUNT_NAME: &str = "root"; - -#[tokio::test] -async fn scan_adds_new_content() { - let ctx = test::ContextBuilder::new(test_name!()) - .mount(TEST_MOUNT_NAME, "test-data/small-collection") - .build() - .await; - - ctx.scanner.scan().await.unwrap(); - ctx.scanner.scan().await.unwrap(); // Validates that subsequent updates don't run into conflicts - - let mut connection = ctx.db.connect().await.unwrap(); - let all_directories = sqlx::query_as!(scanner::Directory, "SELECT * FROM directories") - .fetch_all(connection.as_mut()) - .await - .unwrap(); - let all_songs = sqlx::query_as!(scanner::Song, "SELECT * FROM songs") - .fetch_all(connection.as_mut()) - .await - .unwrap(); - assert_eq!(all_directories.len(), 6); - assert_eq!(all_songs.len(), 13); -} - -#[tokio::test] -async fn scan_removes_missing_content() { - let builder = test::ContextBuilder::new(test_name!()); - - let original_collection_dir: PathBuf = ["test-data", "small-collection"].iter().collect(); - let test_collection_dir: PathBuf = builder.test_directory.join("small-collection"); - - let copy_options = fs_extra::dir::CopyOptions::new(); - fs_extra::dir::copy( - original_collection_dir, - &builder.test_directory, - ©_options, - ) - .unwrap(); - - let ctx = builder - .mount(TEST_MOUNT_NAME, test_collection_dir.to_str().unwrap()) - .build() - .await; - - ctx.scanner.scan().await.unwrap(); - - { - let mut connection = ctx.db.connect().await.unwrap(); - let all_directories = sqlx::query_as!(scanner::Directory, "SELECT * FROM directories") - .fetch_all(connection.as_mut()) - .await - .unwrap(); - let all_songs = sqlx::query_as!(scanner::Song, "SELECT * FROM songs") - .fetch_all(connection.as_mut()) - .await - .unwrap(); - assert_eq!(all_directories.len(), 6); - assert_eq!(all_songs.len(), 13); - } - - let khemmis_directory = test_collection_dir.join("Khemmis"); - std::fs::remove_dir_all(khemmis_directory).unwrap(); - ctx.scanner.scan().await.unwrap(); - { - let mut connection = ctx.db.connect().await.unwrap(); - let all_directories = sqlx::query_as!(scanner::Directory, "SELECT * FROM directories") - .fetch_all(connection.as_mut()) - .await - .unwrap(); - let all_songs = sqlx::query_as!(scanner::Song, "SELECT * FROM songs") - .fetch_all(connection.as_mut()) - .await - .unwrap(); - assert_eq!(all_directories.len(), 4); - assert_eq!(all_songs.len(), 8); - } -} - -#[tokio::test] -async fn finds_embedded_artwork() { - let ctx = test::ContextBuilder::new(test_name!()) - .mount(TEST_MOUNT_NAME, "test-data/small-collection") - .build() - .await; - - ctx.scanner.scan().await.unwrap(); - - let picnic_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect(); - let song_virtual_path = picnic_virtual_dir.join("07 - なぜ (Why).mp3"); - - let song = ctx.index.get_song(&song_virtual_path).await.unwrap(); - assert_eq!( - song.artwork, - Some(song_virtual_path.to_string_lossy().into_owned()) - ); -} - -#[tokio::test] -async fn album_art_pattern_is_case_insensitive() { - let ctx = test::ContextBuilder::new(test_name!()) - .mount(TEST_MOUNT_NAME, "test-data/small-collection") - .build() - .await; - - let patterns = vec!["folder", "FOLDER"]; - - for pattern in patterns.into_iter() { - ctx.settings_manager - .amend(&settings::NewSettings { - album_art_pattern: Some(pattern.to_owned()), - ..Default::default() - }) - .await - .unwrap(); - ctx.scanner.scan().await.unwrap(); - - let hunted_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Khemmis", "Hunted"].iter().collect(); - let artwork_virtual_path = hunted_virtual_dir.join("Folder.jpg"); - let song = &ctx.index.flatten(&hunted_virtual_dir).await.unwrap()[0]; - assert_eq!( - song.artwork, - Some(artwork_virtual_path.to_string_lossy().into_owned()) - ); - } -} diff --git a/src/app/scanner/traverser.rs b/src/app/scanner/traverser.rs deleted file mode 100644 index e79650d..0000000 --- a/src/app/scanner/traverser.rs +++ /dev/null @@ -1,202 +0,0 @@ -use crossbeam_channel::{self, Receiver, Sender}; -use log::{error, info}; -use std::cmp::min; -use std::fs; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::sync::atomic::{AtomicUsize, Ordering}; -use std::sync::Arc; -use std::thread; -use std::time::Duration; - -use crate::app::scanner::metadata::{self, SongMetadata}; - -#[derive(Debug)] -pub struct Song { - pub path: PathBuf, - pub metadata: SongMetadata, -} - -#[derive(Debug)] -pub struct Directory { - pub parent: Option, - pub path: PathBuf, - pub songs: Vec, - pub other_files: Vec, - pub created: i32, -} - -pub struct Traverser { - directory_sender: Sender, -} - -#[derive(Debug)] -struct WorkItem { - parent: Option, - path: PathBuf, -} - -impl Traverser { - pub fn new(directory_sender: Sender) -> Self { - Self { directory_sender } - } - - pub fn traverse(&self, roots: Vec) { - let num_pending_work_items = Arc::new(AtomicUsize::new(roots.len())); - let (work_item_sender, work_item_receiver) = crossbeam_channel::unbounded(); - - let key = "POLARIS_NUM_TRAVERSER_THREADS"; - let num_threads = std::env::var_os(key) - .map(|v| v.to_string_lossy().to_string()) - .and_then(|v| usize::from_str(&v).ok()) - .unwrap_or_else(|| min(num_cpus::get(), 4)); - info!("Browsing collection using {} threads", num_threads); - - let mut threads = Vec::new(); - for _ in 0..num_threads { - let work_item_sender = work_item_sender.clone(); - let work_item_receiver = work_item_receiver.clone(); - let directory_sender = self.directory_sender.clone(); - let num_pending_work_items = num_pending_work_items.clone(); - threads.push(thread::spawn(move || { - let worker = Worker { - work_item_sender, - work_item_receiver, - directory_sender, - num_pending_work_items, - }; - worker.run(); - })); - } - - for root in roots { - let work_item = WorkItem { - parent: None, - path: root, - }; - if let Err(e) = work_item_sender.send(work_item) { - error!("Error initializing traverser: {:#?}", e); - } - } - - for thread in threads { - if let Err(e) = thread.join() { - error!("Error joining on traverser worker thread: {:#?}", e); - } - } - } -} - -struct Worker { - work_item_sender: Sender, - work_item_receiver: Receiver, - directory_sender: Sender, - num_pending_work_items: Arc, -} - -impl Worker { - fn run(&self) { - while let Some(work_item) = self.find_work_item() { - self.process_work_item(work_item); - self.on_item_processed(); - } - } - - fn find_work_item(&self) -> Option { - loop { - if self.is_all_work_done() { - return None; - } - if let Ok(w) = self - .work_item_receiver - .recv_timeout(Duration::from_millis(100)) - { - return Some(w); - } - } - } - - fn is_all_work_done(&self) -> bool { - self.num_pending_work_items.load(Ordering::SeqCst) == 0 - } - - fn queue_work(&self, work_item: WorkItem) { - self.num_pending_work_items.fetch_add(1, Ordering::SeqCst); - self.work_item_sender.send(work_item).unwrap(); - } - - fn on_item_processed(&self) { - self.num_pending_work_items.fetch_sub(1, Ordering::SeqCst); - } - - fn emit_directory(&self, directory: Directory) { - self.directory_sender.send(directory).unwrap(); - } - - pub fn process_work_item(&self, work_item: WorkItem) { - let read_dir = match fs::read_dir(&work_item.path) { - Ok(read_dir) => read_dir, - Err(e) => { - error!( - "Directory read error for `{}`: {}", - work_item.path.display(), - e - ); - return; - } - }; - - let mut sub_directories = Vec::new(); - let mut songs = Vec::new(); - let mut other_files = Vec::new(); - - for entry in read_dir { - let path = match entry { - Ok(ref f) => f.path(), - Err(e) => { - error!( - "File read error within `{}`: {}", - work_item.path.display(), - e - ); - break; - } - }; - - if path.is_dir() { - sub_directories.push(path); - } else if let Some(metadata) = metadata::read(&path) { - songs.push(Song { path, metadata }); - } else { - other_files.push(path); - } - } - - let created = Self::get_date_created(&work_item.path).unwrap_or_default(); - - self.emit_directory(Directory { - path: work_item.path.to_owned(), - parent: work_item.parent, - songs, - other_files, - created, - }); - - for sub_directory in sub_directories.into_iter() { - self.queue_work(WorkItem { - parent: Some(work_item.path.clone()), - path: sub_directory, - }); - } - } - - fn get_date_created(path: &Path) -> Option { - if let Ok(t) = fs::metadata(path).and_then(|m| m.created().or_else(|_| m.modified())) { - t.duration_since(std::time::UNIX_EPOCH) - .map(|d| d.as_secs() as i32) - .ok() - } else { - None - } - } -} diff --git a/src/app/scanner/types.rs b/src/app/scanner/types.rs deleted file mode 100644 index 0a9ab32..0000000 --- a/src/app/scanner/types.rs +++ /dev/null @@ -1,125 +0,0 @@ -use std::{borrow::Cow, path::Path}; - -use sqlx::{ - encode::IsNull, - sqlite::{SqliteArgumentValue, SqliteTypeInfo}, - Sqlite, -}; - -use crate::{ - app::vfs::{self, VFS}, - db, -}; - -#[derive(thiserror::Error, Debug)] -pub enum Error { - #[error(transparent)] - IndexClean(#[from] super::cleaner::Error), - #[error(transparent)] - Database(#[from] sqlx::Error), - #[error(transparent)] - DatabaseConnection(#[from] db::Error), - #[error(transparent)] - Vfs(#[from] vfs::Error), -} - -#[derive(Debug, PartialEq, Eq)] -pub struct MultiString(pub Vec); - -static MULTI_STRING_SEPARATOR: &str = "\u{000C}"; - -impl<'q> sqlx::Encode<'q, Sqlite> for MultiString { - fn encode_by_ref(&self, args: &mut Vec>) -> IsNull { - if self.0.is_empty() { - IsNull::Yes - } else { - let joined = self.0.join(MULTI_STRING_SEPARATOR); - args.push(SqliteArgumentValue::Text(Cow::Owned(joined))); - IsNull::No - } - } -} - -impl From> for MultiString { - fn from(value: Option) -> Self { - match value { - None => MultiString(Vec::new()), - Some(s) => MultiString( - s.split(MULTI_STRING_SEPARATOR) - .map(|s| s.to_string()) - .collect(), - ), - } - } -} - -impl sqlx::Type for MultiString { - fn type_info() -> SqliteTypeInfo { - <&str as sqlx::Type>::type_info() - } -} - -#[derive(Debug, PartialEq, Eq)] -pub struct Song { - pub id: i64, - pub path: String, - pub parent: String, - pub track_number: Option, - pub disc_number: Option, - pub title: Option, - pub artists: MultiString, - pub album_artists: MultiString, - pub year: Option, - pub album: Option, - pub artwork: Option, - pub duration: Option, - pub lyricists: MultiString, - pub composers: MultiString, - pub genres: MultiString, - pub labels: MultiString, -} - -impl Song { - pub fn virtualize(mut self, vfs: &VFS) -> Option { - self.path = match vfs.real_to_virtual(Path::new(&self.path)) { - Ok(p) => p.to_string_lossy().into_owned(), - _ => return None, - }; - if let Some(artwork_path) = self.artwork { - self.artwork = match vfs.real_to_virtual(Path::new(&artwork_path)) { - Ok(p) => Some(p.to_string_lossy().into_owned()), - _ => None, - }; - } - Some(self) - } -} - -#[derive(Debug, PartialEq, Eq)] -pub struct Directory { - pub id: i64, - pub path: String, - pub parent: Option, - // TODO remove all below when explorer and metadata browsing are separate - pub artists: MultiString, - pub year: Option, - pub album: Option, - pub artwork: Option, - pub date_added: i64, -} - -impl Directory { - pub fn virtualize(mut self, vfs: &VFS) -> Option { - self.path = match vfs.real_to_virtual(Path::new(&self.path)) { - Ok(p) => p.to_string_lossy().into_owned(), - _ => return None, - }; - if let Some(artwork_path) = self.artwork { - self.artwork = match vfs.real_to_virtual(Path::new(&artwork_path)) { - Ok(p) => Some(p.to_string_lossy().into_owned()), - _ => None, - }; - } - Some(self) - } -} diff --git a/src/app/test.rs b/src/app/test.rs index 1c25089..b90922e 100644 --- a/src/app/test.rs +++ b/src/app/test.rs @@ -1,13 +1,13 @@ use std::path::PathBuf; -use crate::app::{config, ddns, index::Index, playlist, scanner::Scanner, settings, user, vfs}; +use crate::app::{collection, config, ddns, playlist, settings, user, vfs}; use crate::db::DB; use crate::test::*; pub struct Context { pub db: DB, - pub scanner: Scanner, - pub index: Index, + pub browser: collection::Browser, + pub updater: collection::Updater, pub config_manager: config::Manager, pub ddns_manager: ddns::Manager, pub playlist_manager: playlist::Manager, @@ -66,16 +66,22 @@ impl ContextBuilder { vfs_manager.clone(), ddns_manager.clone(), ); - let scanner = Scanner::new(db.clone(), vfs_manager.clone(), settings_manager.clone()); - let index = Index::new(db.clone(), vfs_manager.clone()); + let browser = collection::Browser::new(db.clone(), vfs_manager.clone()); + let index = collection::Index::new(); + let updater = collection::Updater::new( + db.clone(), + index.clone(), + settings_manager.clone(), + vfs_manager.clone(), + ); let playlist_manager = playlist::Manager::new(db.clone(), vfs_manager.clone()); config_manager.apply(&self.config).await.unwrap(); Context { db, - scanner, - index, + browser, + updater, config_manager, ddns_manager, playlist_manager, diff --git a/src/app/vfs.rs b/src/app/vfs.rs index c304515..5083de8 100644 --- a/src/app/vfs.rs +++ b/src/app/vfs.rs @@ -8,8 +8,6 @@ use crate::db::{self, DB}; #[derive(thiserror::Error, Debug)] pub enum Error { - #[error("The following real path could not be mapped to a virtual path: `{0}`")] - CouldNotMapToVirtualPath(PathBuf), #[error("The following virtual path could not be mapped to a real path: `{0}`")] CouldNotMapToRealPath(PathBuf), #[error(transparent)] @@ -54,18 +52,10 @@ impl VFS { VFS { mounts } } - pub fn real_to_virtual>(&self, real_path: P) -> Result { - for mount in &self.mounts { - if let Ok(p) = real_path.as_ref().strip_prefix(&mount.source) { - let mount_path = Path::new(&mount.name); - return if p.components().count() == 0 { - Ok(mount_path.to_path_buf()) - } else { - Ok(mount_path.join(p)) - }; - } - } - Err(Error::CouldNotMapToVirtualPath(real_path.as_ref().into())) + pub fn exists>(&self, virtual_path: P) -> bool { + self.mounts + .iter() + .any(|m| virtual_path.as_ref().starts_with(&m.name)) } pub fn virtual_to_real>(&self, virtual_path: P) -> Result { @@ -162,18 +152,6 @@ mod test { assert_eq!(converted_path, real_path); } - #[test] - fn converts_real_to_virtual() { - let vfs = VFS::new(vec![Mount { - name: "root".to_owned(), - source: Path::new("test_dir").to_owned(), - }]); - let virtual_path: PathBuf = ["root", "somewhere", "something.png"].iter().collect(); - let real_path: PathBuf = ["test_dir", "somewhere", "something.png"].iter().collect(); - let converted_path = vfs.real_to_virtual(real_path.as_path()).unwrap(); - assert_eq!(converted_path, virtual_path); - } - #[test] fn cleans_path_string() { let mut correct_path = path::PathBuf::new(); diff --git a/src/db/20240711080449_init.sql b/src/db/20240711080449_init.sql index f47c71a..8511fa7 100644 --- a/src/db/20240711080449_init.sql +++ b/src/db/20240711080449_init.sql @@ -48,19 +48,16 @@ CREATE TABLE users ( CREATE TABLE directories ( id INTEGER PRIMARY KEY NOT NULL, path TEXT NOT NULL, - parent TEXT, - artists TEXT, - year INTEGER, - album TEXT, - artwork TEXT, - date_added INTEGER DEFAULT 0 NOT NULL, + virtual_path TEXT NOT NULL, + virtual_parent TEXT, UNIQUE(path) ON CONFLICT REPLACE ); CREATE TABLE songs ( id INTEGER PRIMARY KEY NOT NULL, path TEXT NOT NULL, - parent TEXT NOT NULL, + virtual_path TEXT NOT NULL, + virtual_parent TEXT NOT NULL, track_number INTEGER, disc_number INTEGER, title TEXT, @@ -74,6 +71,7 @@ CREATE TABLE songs ( composers TEXT, genres TEXT, labels TEXT, + date_added INTEGER DEFAULT 0 NOT NULL, UNIQUE(path) ON CONFLICT REPLACE ); diff --git a/src/main.rs b/src/main.rs index ab02616..ba90457 100644 --- a/src/main.rs +++ b/src/main.rs @@ -144,7 +144,7 @@ fn main() -> Result<(), Error> { async fn async_main(cli_options: CLIOptions, paths: paths::Paths) -> Result<(), Error> { // Create and run app let app = app::App::new(cli_options.port.unwrap_or(5050), paths).await?; - app.scanner.begin_periodic_scans(); + app.updater.begin_periodic_scans(); app.ddns_manager.begin_periodic_updates(); // Start server diff --git a/src/server/axum.rs b/src/server/axum.rs index 7b0e01b..580006d 100644 --- a/src/server/axum.rs +++ b/src/server/axum.rs @@ -27,6 +27,18 @@ pub async fn launch(app: App) -> Result<(), std::io::Error> { Ok(()) } +impl FromRef for app::collection::Browser { + fn from_ref(app: &App) -> Self { + app.browser.clone() + } +} + +impl FromRef for app::collection::Updater { + fn from_ref(app: &App) -> Self { + app.updater.clone() + } +} + impl FromRef for app::config::Manager { fn from_ref(app: &App) -> Self { app.config_manager.clone() @@ -39,12 +51,6 @@ impl FromRef for app::ddns::Manager { } } -impl FromRef for app::index::Index { - fn from_ref(app: &App) -> Self { - app.index.clone() - } -} - impl FromRef for app::lastfm::Manager { fn from_ref(app: &App) -> Self { app.lastfm_manager.clone() @@ -63,12 +69,6 @@ impl FromRef for app::user::Manager { } } -impl FromRef for app::scanner::Scanner { - fn from_ref(app: &App) -> Self { - app.scanner.clone() - } -} - impl FromRef for app::settings::Manager { fn from_ref(app: &App) -> Self { app.settings_manager.clone() diff --git a/src/server/axum/api.rs b/src/server/axum/api.rs index 343d04b..ae50dc8 100644 --- a/src/server/axum/api.rs +++ b/src/server/axum/api.rs @@ -11,7 +11,7 @@ use base64::{prelude::BASE64_STANDARD_NO_PAD, Engine}; use percent_encoding::percent_decode_str; use crate::{ - app::{config, ddns, index, lastfm, playlist, scanner, settings, thumbnail, user, vfs, App}, + app::{collection, config, ddns, lastfm, playlist, settings, thumbnail, user, vfs, App}, server::{dto, error::APIError, APIMajorVersion, API_MAJOR_VERSION, API_MINOR_VERSION}, }; @@ -247,14 +247,14 @@ async fn put_preferences( async fn post_trigger_index( _admin_rights: AdminRights, - State(scanner): State, + State(updater): State, ) -> Result<(), APIError> { - scanner.trigger_scan(); + updater.trigger_scan(); Ok(()) } fn collection_files_to_response( - files: Vec, + files: Vec, api_version: APIMajorVersion, ) -> Response { match api_version { @@ -275,7 +275,7 @@ fn collection_files_to_response( } } -fn songs_to_response(files: Vec, api_version: APIMajorVersion) -> Response { +fn songs_to_response(files: Vec, api_version: APIMajorVersion) -> Response { match api_version { APIMajorVersion::V7 => Json( files @@ -295,7 +295,7 @@ fn songs_to_response(files: Vec, api_version: APIMajorVersion) -> } fn directories_to_response( - files: Vec, + files: Vec, api_version: APIMajorVersion, ) -> Response { match api_version { @@ -319,9 +319,9 @@ fn directories_to_response( async fn get_browse_root( _auth: Auth, api_version: APIMajorVersion, - State(index): State, + State(browser): State, ) -> Response { - let result = match index.browse(std::path::Path::new("")).await { + let result = match browser.browse(std::path::Path::new("")).await { Ok(r) => r, Err(e) => return APIError::from(e).into_response(), }; @@ -331,11 +331,11 @@ async fn get_browse_root( async fn get_browse( _auth: Auth, api_version: APIMajorVersion, - State(index): State, + State(browser): State, Path(path): Path, ) -> Response { let path = percent_decode_str(&path).decode_utf8_lossy(); - let result = match index.browse(std::path::Path::new(path.as_ref())).await { + let result = match browser.browse(std::path::Path::new(path.as_ref())).await { Ok(r) => r, Err(e) => return APIError::from(e).into_response(), }; @@ -345,9 +345,9 @@ async fn get_browse( async fn get_flatten_root( _auth: Auth, api_version: APIMajorVersion, - State(index): State, + State(browser): State, ) -> Response { - let songs = match index.flatten(std::path::Path::new("")).await { + let songs = match browser.flatten(std::path::Path::new("")).await { Ok(s) => s, Err(e) => return APIError::from(e).into_response(), }; @@ -357,11 +357,11 @@ async fn get_flatten_root( async fn get_flatten( _auth: Auth, api_version: APIMajorVersion, - State(index): State, + State(browser): State, Path(path): Path, ) -> Response { let path = percent_decode_str(&path).decode_utf8_lossy(); - let songs = match index.flatten(std::path::Path::new(path.as_ref())).await { + let songs = match browser.flatten(std::path::Path::new(path.as_ref())).await { Ok(s) => s, Err(e) => return APIError::from(e).into_response(), }; @@ -371,9 +371,9 @@ async fn get_flatten( async fn get_random( _auth: Auth, api_version: APIMajorVersion, - State(index): State, + State(browser): State, ) -> Response { - let directories = match index.get_random_albums(20).await { + let directories = match browser.get_random_albums(20).await { Ok(d) => d, Err(e) => return APIError::from(e).into_response(), }; @@ -383,9 +383,9 @@ async fn get_random( async fn get_recent( _auth: Auth, api_version: APIMajorVersion, - State(index): State, + State(browser): State, ) -> Response { - let directories = match index.get_recent_albums(20).await { + let directories = match browser.get_recent_albums(20).await { Ok(d) => d, Err(e) => return APIError::from(e).into_response(), }; @@ -395,9 +395,9 @@ async fn get_recent( async fn get_search_root( _auth: Auth, api_version: APIMajorVersion, - State(index): State, + State(browser): State, ) -> Response { - let files = match index.search("").await { + let files = match browser.search("").await { Ok(f) => f, Err(e) => return APIError::from(e).into_response(), }; @@ -407,10 +407,10 @@ async fn get_search_root( async fn get_search( _auth: Auth, api_version: APIMajorVersion, - State(index): State, + State(browser): State, Path(query): Path, ) -> Response { - let files = match index.search(&query).await { + let files = match browser.search(&query).await { Ok(f) => f, Err(e) => return APIError::from(e).into_response(), }; diff --git a/src/server/axum/error.rs b/src/server/axum/error.rs index df07296..3623527 100644 --- a/src/server/axum/error.rs +++ b/src/server/axum/error.rs @@ -20,6 +20,7 @@ impl IntoResponse for APIError { } APIError::Database(_) => StatusCode::INTERNAL_SERVER_ERROR, APIError::DeletingOwnAccount => StatusCode::CONFLICT, + APIError::DirectoryNotFound(_) => StatusCode::NOT_FOUND, APIError::EmbeddedArtworkNotFound => StatusCode::NOT_FOUND, APIError::EmptyPassword => StatusCode::BAD_REQUEST, APIError::EmptyUsername => StatusCode::BAD_REQUEST, @@ -36,7 +37,6 @@ impl IntoResponse for APIError { APIError::PasswordHashing => StatusCode::INTERNAL_SERVER_ERROR, APIError::PlaylistNotFound => StatusCode::NOT_FOUND, APIError::Settings(_) => StatusCode::INTERNAL_SERVER_ERROR, - APIError::SongMetadataNotFound => StatusCode::NOT_FOUND, APIError::ThumbnailFlacDecoding(_, _) => StatusCode::INTERNAL_SERVER_ERROR, APIError::ThumbnailFileIOError => StatusCode::NOT_FOUND, APIError::ThumbnailId3Decoding(_, _) => StatusCode::INTERNAL_SERVER_ERROR, diff --git a/src/server/dto/v7.rs b/src/server/dto/v7.rs index 7f372c8..2f6290f 100644 --- a/src/server/dto/v7.rs +++ b/src/server/dto/v7.rs @@ -1,9 +1,8 @@ use serde::{Deserialize, Serialize}; use crate::app::{ - config, ddns, index, - scanner::{self, MultiString}, - settings, thumbnail, user, vfs, + collection::{self, MultiString}, + config, ddns, settings, thumbnail, user, vfs, }; use std::convert::From; @@ -238,11 +237,11 @@ pub enum CollectionFile { Song(Song), } -impl From for CollectionFile { - fn from(f: index::CollectionFile) -> Self { +impl From for CollectionFile { + fn from(f: collection::File) -> Self { match f { - index::CollectionFile::Directory(d) => Self::Directory(d.into()), - index::CollectionFile::Song(s) => Self::Song(s.into()), + collection::File::Directory(d) => Self::Directory(d.into()), + collection::File::Song(s) => Self::Song(s.into()), } } } @@ -275,10 +274,10 @@ pub struct Song { pub label: Option, } -impl From for Song { - fn from(s: scanner::Song) -> Self { +impl From for Song { + fn from(s: collection::Song) -> Self { Self { - path: s.path, + path: s.virtual_path, track_number: s.track_number, disc_number: s.disc_number, title: s.title, @@ -306,15 +305,15 @@ pub struct Directory { pub date_added: i64, } -impl From for Directory { - fn from(d: scanner::Directory) -> Self { +impl From for Directory { + fn from(d: collection::Directory) -> Self { Self { - path: d.path, - artist: d.artists.to_v7_string(), - year: d.year, - album: d.album, - artwork: d.artwork, - date_added: d.date_added, + path: d.virtual_path, + artist: None, + year: None, + album: None, + artwork: None, + date_added: 0, } } } diff --git a/src/server/dto/v8.rs b/src/server/dto/v8.rs index 8b9c870..1484b81 100644 --- a/src/server/dto/v8.rs +++ b/src/server/dto/v8.rs @@ -1,6 +1,6 @@ use serde::{Deserialize, Serialize}; -use crate::app::{config, ddns, index, scanner, settings, thumbnail, user, vfs}; +use crate::app::{collection, config, ddns, settings, thumbnail, user, vfs}; use std::convert::From; #[derive(PartialEq, Eq, Debug, Serialize, Deserialize)] @@ -234,11 +234,11 @@ pub enum CollectionFile { Song(Song), } -impl From for CollectionFile { - fn from(f: index::CollectionFile) -> Self { +impl From for CollectionFile { + fn from(f: collection::File) -> Self { match f { - index::CollectionFile::Directory(d) => Self::Directory(d.into()), - index::CollectionFile::Song(s) => Self::Song(s.into()), + collection::File::Directory(d) => Self::Directory(d.into()), + collection::File::Song(s) => Self::Song(s.into()), } } } @@ -274,10 +274,10 @@ pub struct Song { pub labels: Vec, } -impl From for Song { - fn from(s: scanner::Song) -> Self { +impl From for Song { + fn from(s: collection::Song) -> Self { Self { - path: s.path, + path: s.virtual_path, track_number: s.track_number, disc_number: s.disc_number, title: s.title, @@ -298,26 +298,12 @@ impl From for Song { #[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct Directory { pub path: String, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub artists: Vec, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub year: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub album: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub artwork: Option, - pub date_added: i64, } -impl From for Directory { - fn from(d: scanner::Directory) -> Self { +impl From for Directory { + fn from(d: collection::Directory) -> Self { Self { - path: d.path, - artists: d.artists.0, - year: d.year, - album: d.album, - artwork: d.artwork, - date_added: d.date_added, + path: d.virtual_path, } } } diff --git a/src/server/error.rs b/src/server/error.rs index 05b93fa..e797751 100644 --- a/src/server/error.rs +++ b/src/server/error.rs @@ -1,7 +1,7 @@ use std::path::PathBuf; use thiserror::Error; -use crate::app::{config, ddns, index, lastfm, playlist, settings, thumbnail, user, vfs}; +use crate::app::{collection, config, ddns, lastfm, playlist, settings, thumbnail, user, vfs}; use crate::db; #[derive(Error, Debug)] @@ -24,6 +24,8 @@ pub enum APIError { BrancaTokenEncoding, #[error("Database error:\n\n{0}")] Database(sqlx::Error), + #[error("Directory not found: {0}")] + DirectoryNotFound(PathBuf), #[error("DDNS update query failed with HTTP status {0}")] DdnsUpdateQueryFailed(u16), #[error("Cannot delete your own account")] @@ -60,8 +62,6 @@ pub enum APIError { PlaylistNotFound, #[error("Settings error:\n\n{0}")] Settings(settings::Error), - #[error("Song not found")] - SongMetadataNotFound, #[error("Could not decode thumbnail from flac file `{0}`:\n\n{1}")] ThumbnailFlacDecoding(PathBuf, metaflac::Error), #[error("Thumbnail file could not be opened")] @@ -82,6 +82,19 @@ pub enum APIError { VFSPathNotFound, } +impl From for APIError { + fn from(error: collection::Error) -> APIError { + match error { + collection::Error::DirectoryNotFound(d) => APIError::DirectoryNotFound(d), + collection::Error::Database(e) => APIError::Database(e), + collection::Error::DatabaseConnection(e) => e.into(), + collection::Error::Vfs(e) => e.into(), + collection::Error::ThreadPoolBuilder(_) => APIError::Internal, + collection::Error::ThreadJoining(_) => APIError::Internal, + } + } +} + impl From for APIError { fn from(error: config::Error) -> APIError { match error { @@ -107,17 +120,6 @@ impl From for APIError { } } -impl From for APIError { - fn from(error: index::Error) -> APIError { - match error { - index::Error::Database(e) => APIError::Database(e), - index::Error::DatabaseConnection(e) => e.into(), - index::Error::SongNotFound(_) => APIError::SongMetadataNotFound, - index::Error::Vfs(e) => e.into(), - } - } -} - impl From for APIError { fn from(error: settings::Error) -> APIError { match error { @@ -153,7 +155,6 @@ impl From for APIError { impl From for APIError { fn from(error: vfs::Error) -> APIError { match error { - vfs::Error::CouldNotMapToVirtualPath(_) => APIError::VFSPathNotFound, vfs::Error::CouldNotMapToRealPath(_) => APIError::VFSPathNotFound, vfs::Error::Database(e) => APIError::Database(e), vfs::Error::DatabaseConnection(e) => e.into(), diff --git a/src/utils.rs b/src/utils.rs index 0a538d3..12650ef 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -27,8 +27,8 @@ pub enum AudioFormat { M4B, } -pub fn get_audio_format(path: &Path) -> Option { - let extension = match path.extension() { +pub fn get_audio_format>(path: P) -> Option { + let extension = match path.as_ref().extension() { Some(e) => e, _ => return None, };