Index/Browser split

This commit is contained in:
Antoine Gersant 2024-07-28 23:15:26 -07:00
parent efc27757c7
commit 2965cbdf7e
34 changed files with 1339 additions and 1574 deletions

View file

@ -4,12 +4,12 @@ use std::path::PathBuf;
use crate::db::{self, DB}; use crate::db::{self, DB};
use crate::paths::Paths; use crate::paths::Paths;
pub mod collection;
pub mod config; pub mod config;
pub mod ddns; pub mod ddns;
pub mod index; pub mod formats;
pub mod lastfm; pub mod lastfm;
pub mod playlist; pub mod playlist;
pub mod scanner;
pub mod settings; pub mod settings;
pub mod thumbnail; pub mod thumbnail;
pub mod user; pub mod user;
@ -35,8 +35,9 @@ pub struct App {
pub port: u16, pub port: u16,
pub web_dir_path: PathBuf, pub web_dir_path: PathBuf,
pub swagger_dir_path: PathBuf, pub swagger_dir_path: PathBuf,
pub scanner: scanner::Scanner, pub updater: collection::Updater,
pub index: index::Index, pub browser: collection::Browser,
pub index: collection::Index,
pub config_manager: config::Manager, pub config_manager: config::Manager,
pub ddns_manager: ddns::Manager, pub ddns_manager: ddns::Manager,
pub lastfm_manager: lastfm::Manager, pub lastfm_manager: lastfm::Manager,
@ -64,9 +65,14 @@ impl App {
let auth_secret = settings_manager.get_auth_secret().await?; let auth_secret = settings_manager.get_auth_secret().await?;
let ddns_manager = ddns::Manager::new(db.clone()); let ddns_manager = ddns::Manager::new(db.clone());
let user_manager = user::Manager::new(db.clone(), auth_secret); let user_manager = user::Manager::new(db.clone(), auth_secret);
let scanner = let index = collection::Index::new();
scanner::Scanner::new(db.clone(), vfs_manager.clone(), settings_manager.clone()); let browser = collection::Browser::new(db.clone(), vfs_manager.clone());
let index = index::Index::new(db.clone(), vfs_manager.clone()); let updater = collection::Updater::new(
db.clone(),
index.clone(),
settings_manager.clone(),
vfs_manager.clone(),
);
let config_manager = config::Manager::new( let config_manager = config::Manager::new(
settings_manager.clone(), settings_manager.clone(),
user_manager.clone(), user_manager.clone(),
@ -75,7 +81,7 @@ impl App {
); );
let playlist_manager = playlist::Manager::new(db.clone(), vfs_manager.clone()); let playlist_manager = playlist::Manager::new(db.clone(), vfs_manager.clone());
let thumbnail_manager = thumbnail::Manager::new(thumbnails_dir_path); let thumbnail_manager = thumbnail::Manager::new(thumbnails_dir_path);
let lastfm_manager = lastfm::Manager::new(index.clone(), user_manager.clone()); let lastfm_manager = lastfm::Manager::new(browser.clone(), user_manager.clone());
if let Some(config_path) = paths.config_file_path { if let Some(config_path) = paths.config_file_path {
let config = config::Config::from_path(&config_path)?; let config = config::Config::from_path(&config_path)?;
@ -86,7 +92,8 @@ impl App {
port, port,
web_dir_path: paths.web_dir_path, web_dir_path: paths.web_dir_path,
swagger_dir_path: paths.swagger_dir_path, swagger_dir_path: paths.swagger_dir_path,
scanner, updater,
browser,
index, index,
config_manager, config_manager,
ddns_manager, ddns_manager,

15
src/app/collection.rs Normal file
View file

@ -0,0 +1,15 @@
mod browser;
mod cleaner;
mod index;
mod inserter;
mod scanner;
mod types;
mod updater;
pub use browser::*;
pub use cleaner::*;
pub use index::*;
pub use inserter::*;
pub use scanner::*;
pub use types::*;
pub use updater::*;

View file

@ -0,0 +1,343 @@
use std::path::Path;
use crate::app::{collection, vfs};
use crate::db::DB;
#[derive(Clone)]
pub struct Browser {
db: DB,
vfs_manager: vfs::Manager,
}
impl Browser {
pub fn new(db: DB, vfs_manager: vfs::Manager) -> Self {
Self { db, vfs_manager }
}
pub async fn browse<P>(&self, path: P) -> Result<Vec<collection::File>, collection::Error>
where
P: AsRef<Path>,
{
let mut output = Vec::new();
let mut connection = self.db.connect().await?;
if path.as_ref().components().count() == 0 {
// Browse top-level
let directories = sqlx::query_as!(
collection::Directory,
"SELECT * FROM directories WHERE virtual_parent IS NULL"
)
.fetch_all(connection.as_mut())
.await?;
output.extend(directories.into_iter().map(collection::File::Directory));
} else {
let vfs = self.vfs_manager.get_vfs().await?;
match vfs.virtual_to_real(&path) {
Ok(p) if p.exists() => {}
_ => {
return Err(collection::Error::DirectoryNotFound(
path.as_ref().to_owned(),
))
}
}
let path = path.as_ref().to_string_lossy();
// Browse sub-directory
let directories = sqlx::query_as!(
collection::Directory,
"SELECT * FROM directories WHERE virtual_parent = $1 ORDER BY virtual_path COLLATE NOCASE ASC",
path
)
.fetch_all(connection.as_mut())
.await?;
output.extend(directories.into_iter().map(collection::File::Directory));
let songs = sqlx::query_as!(
collection::Song,
"SELECT * FROM songs WHERE virtual_parent = $1 ORDER BY virtual_path COLLATE NOCASE ASC",
path
)
.fetch_all(connection.as_mut())
.await?;
output.extend(songs.into_iter().map(collection::File::Song));
}
Ok(output)
}
pub async fn flatten<P>(&self, path: P) -> Result<Vec<collection::Song>, collection::Error>
where
P: AsRef<Path>,
{
let mut connection = self.db.connect().await?;
let songs = if path.as_ref().parent().is_some() {
let vfs = self.vfs_manager.get_vfs().await?;
match vfs.virtual_to_real(&path) {
Ok(p) if p.exists() => {}
_ => {
return Err(collection::Error::DirectoryNotFound(
path.as_ref().to_owned(),
))
}
}
let song_path_filter = {
let mut path_buf = path.as_ref().to_owned();
path_buf.push("%");
path_buf.as_path().to_string_lossy().into_owned()
};
sqlx::query_as!(
collection::Song,
"SELECT * FROM songs WHERE virtual_path LIKE $1 ORDER BY virtual_path COLLATE NOCASE ASC",
song_path_filter
)
.fetch_all(connection.as_mut())
.await?
} else {
sqlx::query_as!(
collection::Song,
"SELECT * FROM songs ORDER BY virtual_path COLLATE NOCASE ASC"
)
.fetch_all(connection.as_mut())
.await?
};
Ok(songs)
}
pub async fn get_random_albums(
&self,
count: i64,
) -> Result<Vec<collection::Directory>, collection::Error> {
// TODO move to Index
Ok(vec![])
}
pub async fn get_recent_albums(
&self,
count: i64,
) -> Result<Vec<collection::Directory>, collection::Error> {
// TODO move to Index
Ok(vec![])
}
pub async fn search(&self, query: &str) -> Result<Vec<collection::File>, collection::Error> {
let mut connection = self.db.connect().await?;
let like_test = format!("%{}%", query);
let mut output = Vec::new();
// Find dirs with matching path and parent not matching
{
let directories = sqlx::query_as!(
collection::Directory,
"SELECT * FROM directories WHERE virtual_path LIKE $1 AND virtual_parent NOT LIKE $1",
like_test
)
.fetch_all(connection.as_mut())
.await?;
output.extend(directories.into_iter().map(collection::File::Directory));
}
// Find songs with matching title/album/artist and non-matching parent
{
let songs = sqlx::query_as!(
collection::Song,
r#"
SELECT * FROM songs
WHERE ( virtual_path LIKE $1
OR title LIKE $1
OR album LIKE $1
OR artists LIKE $1
OR album_artists LIKE $1
)
AND virtual_parent NOT LIKE $1
"#,
like_test
)
.fetch_all(connection.as_mut())
.await?;
output.extend(songs.into_iter().map(collection::File::Song));
}
Ok(output)
}
pub async fn get_song(&self, path: &Path) -> Result<collection::Song, collection::Error> {
let mut connection = self.db.connect().await?;
let path = path.to_string_lossy();
let song = sqlx::query_as!(
collection::Song,
"SELECT * FROM songs WHERE virtual_path = $1",
path
)
.fetch_one(connection.as_mut())
.await?;
Ok(song)
}
}
#[cfg(test)]
mod test {
use std::path::{Path, PathBuf};
use super::*;
use crate::app::test;
use crate::test_name;
const TEST_MOUNT_NAME: &str = "root";
#[tokio::test]
async fn can_browse_top_level() {
let mut ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.updater.update().await.unwrap();
let root_path = Path::new(TEST_MOUNT_NAME);
let files = ctx.browser.browse(Path::new("")).await.unwrap();
assert_eq!(files.len(), 1);
match files[0] {
collection::File::Directory(ref d) => {
assert_eq!(d.virtual_path, root_path.to_str().unwrap())
}
_ => panic!("Expected directory"),
}
}
#[tokio::test]
async fn can_browse_directory() {
let khemmis_path: PathBuf = [TEST_MOUNT_NAME, "Khemmis"].iter().collect();
let tobokegao_path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao"].iter().collect();
let mut ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.updater.update().await.unwrap();
let files = ctx
.browser
.browse(Path::new(TEST_MOUNT_NAME))
.await
.unwrap();
assert_eq!(files.len(), 2);
match files[0] {
collection::File::Directory(ref d) => {
assert_eq!(d.virtual_path, khemmis_path.to_str().unwrap())
}
_ => panic!("Expected directory"),
}
match files[1] {
collection::File::Directory(ref d) => {
assert_eq!(d.virtual_path, tobokegao_path.to_str().unwrap())
}
_ => panic!("Expected directory"),
}
}
#[tokio::test]
async fn can_flatten_root() {
let mut ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.updater.update().await.unwrap();
let songs = ctx
.browser
.flatten(Path::new(TEST_MOUNT_NAME))
.await
.unwrap();
assert_eq!(songs.len(), 13);
assert_eq!(songs[0].title, Some("Above The Water".to_owned()));
}
#[tokio::test]
async fn can_flatten_directory() {
let mut ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.updater.update().await.unwrap();
let path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao"].iter().collect();
let songs = ctx.browser.flatten(path).await.unwrap();
assert_eq!(songs.len(), 8);
}
#[tokio::test]
async fn can_flatten_directory_with_shared_prefix() {
let mut ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.updater.update().await.unwrap();
let path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect(); // Prefix of '(Picnic Remixes)'
let songs = ctx.browser.flatten(path).await.unwrap();
assert_eq!(songs.len(), 7);
}
#[tokio::test]
async fn can_get_random_albums() {
let mut ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.updater.update().await.unwrap();
let albums = ctx.browser.get_random_albums(1).await.unwrap();
assert_eq!(albums.len(), 1);
}
#[tokio::test]
async fn can_get_recent_albums() {
let mut ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.updater.update().await.unwrap();
let albums = ctx.browser.get_recent_albums(2).await.unwrap();
assert_eq!(albums.len(), 2);
}
#[tokio::test]
async fn can_get_a_song() {
let mut ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.updater.update().await.unwrap();
let picnic_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect();
let song_virtual_path = picnic_virtual_dir.join("05 - シャーベット (Sherbet).mp3");
let artwork_virtual_path = picnic_virtual_dir.join("Folder.png");
let song = ctx.browser.get_song(&song_virtual_path).await.unwrap();
assert_eq!(
song.virtual_path,
song_virtual_path.to_string_lossy().as_ref()
);
assert_eq!(song.track_number, Some(5));
assert_eq!(song.disc_number, None);
assert_eq!(song.title, Some("シャーベット (Sherbet)".to_owned()));
assert_eq!(
song.artists,
collection::MultiString(vec!["Tobokegao".to_owned()])
);
assert_eq!(song.album_artists, collection::MultiString(vec![]));
assert_eq!(song.album, Some("Picnic".to_owned()));
assert_eq!(song.year, Some(2016));
assert_eq!(
song.artwork,
Some(artwork_virtual_path.to_string_lossy().into_owned())
);
}
}

View file

@ -0,0 +1,89 @@
use rayon::prelude::*;
use sqlx::{QueryBuilder, Sqlite};
use std::path::Path;
use crate::app::{collection, vfs};
use crate::db::DB;
#[derive(Clone)]
pub struct Cleaner {
db: DB,
vfs_manager: vfs::Manager,
}
impl Cleaner {
const BUFFER_SIZE: usize = 500; // Deletions in each transaction
pub fn new(db: DB, vfs_manager: vfs::Manager) -> Self {
Self { db, vfs_manager }
}
pub async fn clean(&self) -> Result<(), collection::Error> {
tokio::try_join!(self.clean_songs(), self.clean_directories())?;
Ok(())
}
pub async fn clean_directories(&self) -> Result<(), collection::Error> {
let directories = {
let mut connection = self.db.connect().await?;
sqlx::query!("SELECT path, virtual_path FROM directories")
.fetch_all(connection.as_mut())
.await?
};
let vfs = self.vfs_manager.get_vfs().await?;
let missing_directories = tokio::task::spawn_blocking(move || {
directories
.into_par_iter()
.filter(|d| !vfs.exists(&d.virtual_path) || !Path::new(&d.path).exists())
.map(|d| d.virtual_path)
.collect::<Vec<_>>()
})
.await?;
let mut connection = self.db.connect().await?;
for chunk in missing_directories[..].chunks(Self::BUFFER_SIZE) {
QueryBuilder::<Sqlite>::new("DELETE FROM directories WHERE virtual_path IN ")
.push_tuples(chunk, |mut b, virtual_path| {
b.push_bind(virtual_path);
})
.build()
.execute(connection.as_mut())
.await?;
}
Ok(())
}
pub async fn clean_songs(&self) -> Result<(), collection::Error> {
let songs = {
let mut connection = self.db.connect().await?;
sqlx::query!("SELECT path, virtual_path FROM songs")
.fetch_all(connection.as_mut())
.await?
};
let vfs = self.vfs_manager.get_vfs().await?;
let deleted_songs = tokio::task::spawn_blocking(move || {
songs
.into_par_iter()
.filter(|s| !vfs.exists(&s.virtual_path) || !Path::new(&s.path).exists())
.map(|s| s.virtual_path)
.collect::<Vec<_>>()
})
.await?;
for chunk in deleted_songs[..].chunks(Cleaner::BUFFER_SIZE) {
let mut connection = self.db.connect().await?;
QueryBuilder::<Sqlite>::new("DELETE FROM songs WHERE virtual_path IN ")
.push_tuples(chunk, |mut b, virtual_path| {
b.push_bind(virtual_path);
})
.build()
.execute(connection.as_mut())
.await?;
}
Ok(())
}
}

View file

@ -0,0 +1,32 @@
use std::{collections::HashMap, sync::Arc};
use tokio::sync::RwLock;
use crate::app::collection;
#[derive(Clone, Default)]
pub struct Index {
lookups: Arc<RwLock<Lookups>>,
}
impl Index {
pub fn new() -> Self {
Self::default()
}
pub async fn replace_lookup_tables(&mut self, new_lookups: Lookups) {
let mut lock = self.lookups.write().await;
*lock = new_lookups;
}
}
#[derive(Default)]
pub struct Lookups {
data: HashMap<String, String>,
}
impl Lookups {
pub fn add_song(&mut self, _song: &collection::Song) {
// todo!()
}
}

View file

@ -0,0 +1,123 @@
use std::borrow::Cow;
use log::error;
use sqlx::{
encode::IsNull,
pool::PoolConnection,
sqlite::{SqliteArgumentValue, SqliteTypeInfo},
QueryBuilder, Sqlite,
};
use crate::app::collection::{self, MultiString};
use crate::db::DB;
impl<'q> sqlx::Encode<'q, Sqlite> for MultiString {
fn encode_by_ref(&self, args: &mut Vec<SqliteArgumentValue<'q>>) -> IsNull {
if self.0.is_empty() {
IsNull::Yes
} else {
let joined = self.0.join(MultiString::SEPARATOR);
args.push(SqliteArgumentValue::Text(Cow::Owned(joined)));
IsNull::No
}
}
}
impl sqlx::Type<Sqlite> for MultiString {
fn type_info() -> SqliteTypeInfo {
<&str as sqlx::Type<Sqlite>>::type_info()
}
}
pub struct Inserter<T> {
new_entries: Vec<T>,
db: DB,
}
impl<T> Inserter<T>
where
T: Insertable,
{
const BUFFER_SIZE: usize = 1000;
pub fn new(db: DB) -> Self {
let new_entries = Vec::with_capacity(Self::BUFFER_SIZE);
Self { new_entries, db }
}
pub async fn insert(&mut self, entry: T) {
self.new_entries.push(entry);
if self.new_entries.len() >= Self::BUFFER_SIZE {
self.flush().await;
}
}
pub async fn flush(&mut self) {
let Ok(connection) = self.db.connect().await else {
error!("Could not acquire connection to insert new entries in database");
return;
};
match Insertable::bulk_insert(&self.new_entries, connection).await {
Ok(_) => self.new_entries.clear(),
Err(e) => error!("Could not insert new entries in database: {}", e),
};
}
}
pub trait Insertable
where
Self: Sized,
{
async fn bulk_insert(
entries: &Vec<Self>,
connection: PoolConnection<Sqlite>,
) -> Result<(), sqlx::Error>;
}
impl Insertable for collection::Directory {
async fn bulk_insert(
entries: &Vec<Self>,
mut connection: PoolConnection<Sqlite>,
) -> Result<(), sqlx::Error> {
QueryBuilder::<Sqlite>::new("INSERT INTO directories(path, virtual_path, virtual_parent) ")
.push_values(entries.iter(), |mut b, directory| {
b.push_bind(&directory.path)
.push_bind(&directory.virtual_path)
.push_bind(&directory.virtual_parent);
})
.build()
.execute(connection.as_mut())
.await
.map(|_| ())
}
}
impl Insertable for collection::Song {
async fn bulk_insert(
entries: &Vec<Self>,
mut connection: PoolConnection<Sqlite>,
) -> Result<(), sqlx::Error> {
QueryBuilder::<Sqlite>::new("INSERT INTO songs(path, virtual_path, virtual_parent, track_number, disc_number, title, artists, album_artists, year, album, artwork, duration, lyricists, composers, genres, labels) ")
.push_values(entries.iter(), |mut b, song| {
b.push_bind(&song.path)
.push_bind(&song.virtual_path)
.push_bind(&song.virtual_parent)
.push_bind(song.track_number)
.push_bind(song.disc_number)
.push_bind(&song.title)
.push_bind(&song.artists)
.push_bind(&song.album_artists)
.push_bind(song.year)
.push_bind(&song.album)
.push_bind(&song.artwork)
.push_bind(song.duration)
.push_bind(&song.lyricists)
.push_bind(&song.composers)
.push_bind(&song.genres)
.push_bind(&song.labels);
})
.build()
.execute(connection.as_mut())
.await.map(|_| ())
}
}

View file

@ -0,0 +1,196 @@
use log::{error, info};
use rayon::{Scope, ThreadPoolBuilder};
use regex::Regex;
use std::cmp::min;
use std::fs;
use std::path::Path;
use std::str::FromStr;
use tokio::sync::mpsc::UnboundedSender;
use crate::app::vfs;
use crate::app::{
collection::{self, MultiString},
formats,
};
pub struct Scanner {
directories_output: UnboundedSender<collection::Directory>,
songs_output: UnboundedSender<collection::Song>,
vfs_manager: vfs::Manager,
artwork_regex: Option<Regex>,
}
impl Scanner {
pub fn new(
directories_output: UnboundedSender<collection::Directory>,
songs_output: UnboundedSender<collection::Song>,
vfs_manager: vfs::Manager,
artwork_regex: Option<Regex>,
) -> Self {
Self {
directories_output,
songs_output,
vfs_manager,
artwork_regex,
}
}
pub async fn scan(self) -> Result<(), collection::Error> {
let vfs = self.vfs_manager.get_vfs().await?;
let roots = vfs.mounts().clone();
let key = "POLARIS_NUM_TRAVERSER_THREADS";
let num_threads = std::env::var_os(key)
.map(|v| v.to_string_lossy().to_string())
.and_then(|v| usize::from_str(&v).ok())
.unwrap_or_else(|| min(num_cpus::get(), 4));
info!("Browsing collection using {} threads", num_threads);
let directories_output = self.directories_output.clone();
let songs_output = self.songs_output.clone();
let artwork_regex = self.artwork_regex.clone();
let thread_pool = ThreadPoolBuilder::new().num_threads(num_threads).build()?;
thread_pool.scope({
|scope| {
for root in roots {
scope.spawn(|scope| {
process_directory(
scope,
root.source,
root.name,
directories_output.clone(),
songs_output.clone(),
artwork_regex.clone(),
);
});
}
}
});
Ok(())
}
}
fn process_directory<P: AsRef<Path>, Q: AsRef<Path>>(
scope: &Scope,
real_path: P,
virtual_path: Q,
directories_output: UnboundedSender<collection::Directory>,
songs_output: UnboundedSender<collection::Song>,
artwork_regex: Option<Regex>,
) {
let read_dir = match fs::read_dir(&real_path) {
Ok(read_dir) => read_dir,
Err(e) => {
error!(
"Directory read error for `{}`: {}",
real_path.as_ref().display(),
e
);
return;
}
};
let mut songs = vec![];
let mut artwork_file = None;
for entry in read_dir {
let name = match entry {
Ok(ref f) => f.file_name(),
Err(e) => {
error!(
"File read error within `{}`: {}",
real_path.as_ref().display(),
e
);
break;
}
};
let entry_real_path = real_path.as_ref().join(&name);
let entry_real_path_string = entry_real_path.to_string_lossy().to_string();
let entry_virtual_path = virtual_path.as_ref().join(&name);
let entry_virtual_path_string = entry_virtual_path.to_string_lossy().to_string();
if entry_real_path.is_dir() {
scope.spawn({
let directories_output = directories_output.clone();
let songs_output = songs_output.clone();
let artwork_regex = artwork_regex.clone();
|scope| {
process_directory(
scope,
entry_real_path,
entry_virtual_path,
directories_output,
songs_output,
artwork_regex,
);
}
});
} else if let Some(metadata) = formats::read_metadata(&entry_real_path) {
songs.push(collection::Song {
id: 0,
path: entry_real_path_string.clone(),
virtual_path: entry_virtual_path.to_string_lossy().to_string(),
virtual_parent: entry_virtual_path
.parent()
.unwrap()
.to_string_lossy()
.to_string(),
track_number: metadata.track_number.map(|n| n as i64),
disc_number: metadata.disc_number.map(|n| n as i64),
title: metadata.title,
artists: MultiString(metadata.artists),
album_artists: MultiString(metadata.album_artists),
year: metadata.year.map(|n| n as i64),
album: metadata.album,
artwork: metadata
.has_artwork
.then(|| entry_virtual_path_string.clone()),
duration: metadata.duration.map(|n| n as i64),
lyricists: MultiString(metadata.lyricists),
composers: MultiString(metadata.composers),
genres: MultiString(metadata.genres),
labels: MultiString(metadata.labels),
date_added: get_date_created(&entry_real_path).unwrap_or_default(),
});
} else if artwork_file.is_none()
&& artwork_regex
.as_ref()
.is_some_and(|r| r.is_match(name.to_str().unwrap_or_default()))
{
artwork_file = Some(entry_virtual_path_string);
}
}
for mut song in songs {
song.artwork = song.artwork.or_else(|| artwork_file.clone());
songs_output.send(song).ok();
}
directories_output
.send(collection::Directory {
id: 0,
path: real_path.as_ref().to_string_lossy().to_string(),
virtual_path: virtual_path.as_ref().to_string_lossy().to_string(),
virtual_parent: virtual_path
.as_ref()
.parent()
.map(|p| p.to_string_lossy().to_string())
.filter(|p| !p.is_empty()),
})
.ok();
}
fn get_date_created<P: AsRef<Path>>(path: P) -> Option<i64> {
if let Ok(t) = fs::metadata(path).and_then(|m| m.created().or_else(|_| m.modified())) {
t.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs() as i64)
.ok()
} else {
None
}
}

View file

@ -0,0 +1,74 @@
use std::path::PathBuf;
use crate::{
app::vfs::{self},
db,
};
#[derive(Debug, PartialEq, Eq)]
pub struct MultiString(pub Vec<String>);
impl MultiString {
pub const SEPARATOR: &'static str = "\u{000C}";
}
impl From<Option<String>> for MultiString {
fn from(value: Option<String>) -> Self {
match value {
None => Self(Vec::new()),
Some(s) => Self(s.split(Self::SEPARATOR).map(|s| s.to_string()).collect()),
}
}
}
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Directory not found: {0}")]
DirectoryNotFound(PathBuf),
#[error(transparent)]
Database(#[from] sqlx::Error),
#[error(transparent)]
DatabaseConnection(#[from] db::Error),
#[error(transparent)]
Vfs(#[from] vfs::Error),
#[error(transparent)]
ThreadPoolBuilder(#[from] rayon::ThreadPoolBuildError),
#[error(transparent)]
ThreadJoining(#[from] tokio::task::JoinError),
}
#[derive(Debug, PartialEq, Eq)]
pub enum File {
Directory(Directory),
Song(Song),
}
#[derive(Debug, PartialEq, Eq)]
pub struct Song {
pub id: i64,
pub path: String,
pub virtual_path: String,
pub virtual_parent: String,
pub track_number: Option<i64>,
pub disc_number: Option<i64>,
pub title: Option<String>,
pub artists: MultiString,
pub album_artists: MultiString,
pub year: Option<i64>,
pub album: Option<String>,
pub artwork: Option<String>,
pub duration: Option<i64>,
pub lyricists: MultiString,
pub composers: MultiString,
pub genres: MultiString,
pub labels: MultiString,
pub date_added: i64,
}
#[derive(Debug, PartialEq, Eq)]
pub struct Directory {
pub id: i64,
pub path: String,
pub virtual_path: String,
pub virtual_parent: Option<String>,
}

View file

@ -0,0 +1,293 @@
use std::{sync::Arc, time::Duration};
use log::{error, info};
use tokio::{
sync::{mpsc::unbounded_channel, Notify},
time::Instant,
};
use crate::{
app::{collection::*, settings, vfs},
db::DB,
};
#[derive(Clone)]
pub struct Updater {
db: DB,
index: Index,
settings_manager: settings::Manager,
vfs_manager: vfs::Manager,
pending_scan: Arc<Notify>,
}
impl Updater {
pub fn new(
db: DB,
index: Index,
settings_manager: settings::Manager,
vfs_manager: vfs::Manager,
) -> Self {
let updater = Self {
db,
index,
vfs_manager,
settings_manager,
pending_scan: Arc::new(Notify::new()),
};
tokio::spawn({
let mut updater = updater.clone();
async move {
loop {
updater.pending_scan.notified().await;
if let Err(e) = updater.update().await {
error!("Error while updating index: {}", e);
}
}
}
});
updater
}
pub fn trigger_scan(&self) {
self.pending_scan.notify_one();
}
pub fn begin_periodic_scans(&self) {
tokio::spawn({
let index = self.clone();
async move {
loop {
index.trigger_scan();
let sleep_duration = index
.settings_manager
.get_index_sleep_duration()
.await
.unwrap_or_else(|e| {
error!("Could not retrieve index sleep duration: {}", e);
Duration::from_secs(1800)
});
tokio::time::sleep(sleep_duration).await;
}
}
});
}
pub async fn update(&mut self) -> Result<(), Error> {
let start = Instant::now();
info!("Beginning library index update");
let cleaner = Cleaner::new(self.db.clone(), self.vfs_manager.clone());
cleaner.clean().await?;
let album_art_pattern = self
.settings_manager
.get_index_album_art_pattern()
.await
.ok();
let (scanner_directories_output, mut collection_directories_input) = unbounded_channel();
let (scanner_songs_output, mut collection_songs_input) = unbounded_channel();
let scanner = Scanner::new(
scanner_directories_output,
scanner_songs_output,
self.vfs_manager.clone(),
album_art_pattern,
);
let mut song_inserter = Inserter::<Song>::new(self.db.clone());
let mut directory_inserter = Inserter::<Directory>::new(self.db.clone());
let directory_task = tokio::spawn(async move {
let capacity = 500;
let mut buffer: Vec<Directory> = Vec::with_capacity(capacity);
loop {
match collection_directories_input
.recv_many(&mut buffer, capacity)
.await
{
0 => break,
_ => {
for directory in buffer.drain(0..) {
directory_inserter.insert(directory).await;
}
}
}
}
directory_inserter.flush().await;
});
let song_task = tokio::spawn(async move {
let capacity = 500;
let mut lookup_tables = Lookups::default();
let mut buffer: Vec<Song> = Vec::with_capacity(capacity);
loop {
match collection_songs_input
.recv_many(&mut buffer, capacity)
.await
{
0 => break,
_ => {
for song in buffer.drain(0..) {
lookup_tables.add_song(&song);
song_inserter.insert(song).await;
}
}
}
}
song_inserter.flush().await;
lookup_tables
});
let lookup_tables = tokio::join!(scanner.scan(), directory_task, song_task).2?;
self.index.replace_lookup_tables(lookup_tables).await;
info!(
"Library index update took {} seconds",
start.elapsed().as_millis() as f32 / 1000.0
);
Ok(())
}
}
#[cfg(test)]
mod test {
use std::path::PathBuf;
use crate::{
app::{collection::*, settings, test},
test_name,
};
const TEST_MOUNT_NAME: &str = "root";
#[tokio::test]
async fn scan_adds_new_content() {
let mut ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.updater.update().await.unwrap();
ctx.updater.update().await.unwrap(); // Validates that subsequent updates don't run into conflicts
let mut connection = ctx.db.connect().await.unwrap();
let all_directories = sqlx::query_as!(Directory, "SELECT * FROM directories")
.fetch_all(connection.as_mut())
.await
.unwrap();
let all_songs = sqlx::query_as!(Song, "SELECT * FROM songs")
.fetch_all(connection.as_mut())
.await
.unwrap();
assert_eq!(all_directories.len(), 6);
assert_eq!(all_songs.len(), 13);
}
#[tokio::test]
async fn scan_removes_missing_content() {
let builder = test::ContextBuilder::new(test_name!());
let original_collection_dir: PathBuf = ["test-data", "small-collection"].iter().collect();
let test_collection_dir: PathBuf = builder.test_directory.join("small-collection");
let copy_options = fs_extra::dir::CopyOptions::new();
fs_extra::dir::copy(
original_collection_dir,
&builder.test_directory,
&copy_options,
)
.unwrap();
let mut ctx = builder
.mount(TEST_MOUNT_NAME, test_collection_dir.to_str().unwrap())
.build()
.await;
ctx.updater.update().await.unwrap();
{
let mut connection = ctx.db.connect().await.unwrap();
let all_directories = sqlx::query_as!(Directory, "SELECT * FROM directories")
.fetch_all(connection.as_mut())
.await
.unwrap();
let all_songs = sqlx::query_as!(Song, "SELECT * FROM songs")
.fetch_all(connection.as_mut())
.await
.unwrap();
assert_eq!(all_directories.len(), 6);
assert_eq!(all_songs.len(), 13);
}
let khemmis_directory = test_collection_dir.join("Khemmis");
std::fs::remove_dir_all(khemmis_directory).unwrap();
ctx.updater.update().await.unwrap();
{
let mut connection = ctx.db.connect().await.unwrap();
let all_directories = sqlx::query_as!(Directory, "SELECT * FROM directories")
.fetch_all(connection.as_mut())
.await
.unwrap();
let all_songs = sqlx::query_as!(Song, "SELECT * FROM songs")
.fetch_all(connection.as_mut())
.await
.unwrap();
assert_eq!(all_directories.len(), 4);
assert_eq!(all_songs.len(), 8);
}
}
#[tokio::test]
async fn finds_embedded_artwork() {
let mut ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.updater.update().await.unwrap();
let picnic_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect();
let song_virtual_path = picnic_virtual_dir.join("07 - なぜ (Why).mp3");
let song = ctx.browser.get_song(&song_virtual_path).await.unwrap();
assert_eq!(
song.artwork,
Some(song_virtual_path.to_string_lossy().into_owned())
);
}
#[tokio::test]
async fn album_art_pattern_is_case_insensitive() {
let mut ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
let patterns = vec!["folder", "FOLDER"];
for pattern in patterns.into_iter() {
ctx.settings_manager
.amend(&settings::NewSettings {
album_art_pattern: Some(pattern.to_owned()),
..Default::default()
})
.await
.unwrap();
ctx.updater.update().await.unwrap();
let hunted_virtual_dir: PathBuf =
[TEST_MOUNT_NAME, "Khemmis", "Hunted"].iter().collect();
let artwork_virtual_path = hunted_virtual_dir.join("Folder.jpg");
let song = &ctx.browser.flatten(&hunted_virtual_dir).await.unwrap()[0];
assert_eq!(
song.artwork,
Some(artwork_virtual_path.to_string_lossy().into_owned())
);
}
}
}

View file

@ -44,22 +44,26 @@ pub struct SongMetadata {
pub labels: Vec<String>, pub labels: Vec<String>,
} }
pub fn read(path: &Path) -> Option<SongMetadata> { pub fn read_metadata<P: AsRef<Path>>(path: P) -> Option<SongMetadata> {
let data = match utils::get_audio_format(path) { let data = match utils::get_audio_format(&path) {
Some(AudioFormat::AIFF) => read_id3(path), Some(AudioFormat::AIFF) => read_id3(&path),
Some(AudioFormat::FLAC) => read_flac(path), Some(AudioFormat::FLAC) => read_flac(&path),
Some(AudioFormat::MP3) => read_mp3(path), Some(AudioFormat::MP3) => read_mp3(&path),
Some(AudioFormat::OGG) => read_vorbis(path), Some(AudioFormat::OGG) => read_vorbis(&path),
Some(AudioFormat::OPUS) => read_opus(path), Some(AudioFormat::OPUS) => read_opus(&path),
Some(AudioFormat::WAVE) => read_id3(path), Some(AudioFormat::WAVE) => read_id3(&path),
Some(AudioFormat::APE) | Some(AudioFormat::MPC) => read_ape(path), Some(AudioFormat::APE) | Some(AudioFormat::MPC) => read_ape(&path),
Some(AudioFormat::MP4) | Some(AudioFormat::M4B) => read_mp4(path), Some(AudioFormat::MP4) | Some(AudioFormat::M4B) => read_mp4(&path),
None => return None, None => return None,
}; };
match data { match data {
Ok(d) => Some(d), Ok(d) => Some(d),
Err(e) => { Err(e) => {
error!("Error while reading file metadata for '{:?}': {}", path, e); error!(
"Error while reading file metadata for '{:?}': {}",
path.as_ref(),
e
);
None None
} }
} }
@ -78,7 +82,7 @@ impl ID3Ext for id3::Tag {
} }
} }
fn read_id3(path: &Path) -> Result<SongMetadata, Error> { fn read_id3<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let tag = id3::Tag::read_from_path(path).or_else(|error| { let tag = id3::Tag::read_from_path(path).or_else(|error| {
if let Some(tag) = error.partial_tag { if let Some(tag) = error.partial_tag {
Ok(tag) Ok(tag)
@ -122,8 +126,8 @@ fn read_id3(path: &Path) -> Result<SongMetadata, Error> {
}) })
} }
fn read_mp3(path: &Path) -> Result<SongMetadata, Error> { fn read_mp3<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let mut metadata = read_id3(path)?; let mut metadata = read_id3(&path)?;
let duration = { let duration = {
mp3_duration::from_path(path) mp3_duration::from_path(path)
.map(|d| d.as_secs() as u32) .map(|d| d.as_secs() as u32)
@ -167,7 +171,7 @@ mod ape_ext {
} }
} }
fn read_ape(path: &Path) -> Result<SongMetadata, Error> { fn read_ape<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let tag = ape::read_from_path(path)?; let tag = ape::read_from_path(path)?;
let artists = ape_ext::read_strings(tag.items("Artist")); let artists = ape_ext::read_strings(tag.items("Artist"));
let album = tag.item("Album").and_then(ape_ext::read_string); let album = tag.item("Album").and_then(ape_ext::read_string);
@ -197,8 +201,8 @@ fn read_ape(path: &Path) -> Result<SongMetadata, Error> {
}) })
} }
fn read_vorbis(path: &Path) -> Result<SongMetadata, Error> { fn read_vorbis<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let file = fs::File::open(path).map_err(|e| Error::Io(path.to_owned(), e))?; let file = fs::File::open(&path).map_err(|e| Error::Io(path.as_ref().to_owned(), e))?;
let source = OggStreamReader::new(file)?; let source = OggStreamReader::new(file)?;
let mut metadata = SongMetadata::default(); let mut metadata = SongMetadata::default();
@ -224,7 +228,7 @@ fn read_vorbis(path: &Path) -> Result<SongMetadata, Error> {
Ok(metadata) Ok(metadata)
} }
fn read_opus(path: &Path) -> Result<SongMetadata, Error> { fn read_opus<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let headers = opus_headers::parse_from_path(path)?; let headers = opus_headers::parse_from_path(path)?;
let mut metadata = SongMetadata::default(); let mut metadata = SongMetadata::default();
@ -250,7 +254,7 @@ fn read_opus(path: &Path) -> Result<SongMetadata, Error> {
Ok(metadata) Ok(metadata)
} }
fn read_flac(path: &Path) -> Result<SongMetadata, Error> { fn read_flac<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let tag = metaflac::Tag::read_from_path(path)?; let tag = metaflac::Tag::read_from_path(path)?;
let vorbis = tag let vorbis = tag
.vorbis_comments() .vorbis_comments()
@ -285,7 +289,7 @@ fn read_flac(path: &Path) -> Result<SongMetadata, Error> {
}) })
} }
fn read_mp4(path: &Path) -> Result<SongMetadata, Error> { fn read_mp4<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let mut tag = mp4ameta::Tag::read_from_path(path)?; let mut tag = mp4ameta::Tag::read_from_path(path)?;
let label_ident = mp4ameta::FreeformIdent::new("com.apple.iTunes", "Label"); let label_ident = mp4ameta::FreeformIdent::new("com.apple.iTunes", "Label");
@ -336,35 +340,35 @@ fn reads_file_metadata() {
..sample_tags.clone() ..sample_tags.clone()
}; };
assert_eq!( assert_eq!(
read(Path::new("test-data/formats/sample.aif")).unwrap(), read_metadata(Path::new("test-data/formats/sample.aif")).unwrap(),
sample_tags sample_tags
); );
assert_eq!( assert_eq!(
read(Path::new("test-data/formats/sample.mp3")).unwrap(), read_metadata(Path::new("test-data/formats/sample.mp3")).unwrap(),
mp3_sample_tag mp3_sample_tag
); );
assert_eq!( assert_eq!(
read(Path::new("test-data/formats/sample.ogg")).unwrap(), read_metadata(Path::new("test-data/formats/sample.ogg")).unwrap(),
sample_tags sample_tags
); );
assert_eq!( assert_eq!(
read(Path::new("test-data/formats/sample.flac")).unwrap(), read_metadata(Path::new("test-data/formats/sample.flac")).unwrap(),
flac_sample_tag flac_sample_tag
); );
assert_eq!( assert_eq!(
read(Path::new("test-data/formats/sample.m4a")).unwrap(), read_metadata(Path::new("test-data/formats/sample.m4a")).unwrap(),
m4a_sample_tag m4a_sample_tag
); );
assert_eq!( assert_eq!(
read(Path::new("test-data/formats/sample.opus")).unwrap(), read_metadata(Path::new("test-data/formats/sample.opus")).unwrap(),
sample_tags sample_tags
); );
assert_eq!( assert_eq!(
read(Path::new("test-data/formats/sample.ape")).unwrap(), read_metadata(Path::new("test-data/formats/sample.ape")).unwrap(),
sample_tags sample_tags
); );
assert_eq!( assert_eq!(
read(Path::new("test-data/formats/sample.wav")).unwrap(), read_metadata(Path::new("test-data/formats/sample.wav")).unwrap(),
sample_tags sample_tags
); );
} }
@ -372,27 +376,27 @@ fn reads_file_metadata() {
#[test] #[test]
fn reads_embedded_artwork() { fn reads_embedded_artwork() {
assert!( assert!(
read(Path::new("test-data/artwork/sample.aif")) read_metadata(Path::new("test-data/artwork/sample.aif"))
.unwrap() .unwrap()
.has_artwork .has_artwork
); );
assert!( assert!(
read(Path::new("test-data/artwork/sample.mp3")) read_metadata(Path::new("test-data/artwork/sample.mp3"))
.unwrap() .unwrap()
.has_artwork .has_artwork
); );
assert!( assert!(
read(Path::new("test-data/artwork/sample.flac")) read_metadata(Path::new("test-data/artwork/sample.flac"))
.unwrap() .unwrap()
.has_artwork .has_artwork
); );
assert!( assert!(
read(Path::new("test-data/artwork/sample.m4a")) read_metadata(Path::new("test-data/artwork/sample.m4a"))
.unwrap() .unwrap()
.has_artwork .has_artwork
); );
assert!( assert!(
read(Path::new("test-data/artwork/sample.wav")) read_metadata(Path::new("test-data/artwork/sample.wav"))
.unwrap() .unwrap()
.has_artwork .has_artwork
); );

View file

@ -1,21 +0,0 @@
use crate::app::vfs;
use crate::db::DB;
mod query;
#[cfg(test)]
mod test;
mod types;
pub use self::types::*;
#[derive(Clone)]
pub struct Index {
db: DB,
vfs_manager: vfs::Manager,
}
impl Index {
pub fn new(db: DB, vfs_manager: vfs::Manager) -> Self {
Self { db, vfs_manager }
}
}

View file

@ -1,201 +0,0 @@
use std::path::Path;
use super::*;
use crate::app::scanner;
impl Index {
pub async fn browse<P>(&self, virtual_path: P) -> Result<Vec<CollectionFile>, Error>
where
P: AsRef<Path>,
{
let mut output = Vec::new();
let vfs = self.vfs_manager.get_vfs().await?;
let mut connection = self.db.connect().await?;
if virtual_path.as_ref().components().count() == 0 {
// Browse top-level
let real_directories = sqlx::query_as!(
scanner::Directory,
"SELECT * FROM directories WHERE parent IS NULL"
)
.fetch_all(connection.as_mut())
.await?;
let virtual_directories = real_directories
.into_iter()
.filter_map(|d| d.virtualize(&vfs));
output.extend(virtual_directories.map(CollectionFile::Directory));
} else {
// Browse sub-directory
let real_path = vfs.virtual_to_real(virtual_path)?;
let real_path_string = real_path.as_path().to_string_lossy().into_owned();
let real_directories = sqlx::query_as!(
scanner::Directory,
"SELECT * FROM directories WHERE parent = $1 ORDER BY path COLLATE NOCASE ASC",
real_path_string
)
.fetch_all(connection.as_mut())
.await?;
let virtual_directories = real_directories
.into_iter()
.filter_map(|d| d.virtualize(&vfs));
output.extend(virtual_directories.map(CollectionFile::Directory));
let real_songs = sqlx::query_as!(
scanner::Song,
"SELECT * FROM songs WHERE parent = $1 ORDER BY path COLLATE NOCASE ASC",
real_path_string
)
.fetch_all(connection.as_mut())
.await?;
let virtual_songs = real_songs.into_iter().filter_map(|s| s.virtualize(&vfs));
output.extend(virtual_songs.map(CollectionFile::Song));
}
Ok(output)
}
pub async fn flatten<P>(&self, virtual_path: P) -> Result<Vec<scanner::Song>, Error>
where
P: AsRef<Path>,
{
let vfs = self.vfs_manager.get_vfs().await?;
let mut connection = self.db.connect().await?;
let real_songs = if virtual_path.as_ref().parent().is_some() {
let real_path = vfs.virtual_to_real(virtual_path)?;
let song_path_filter = {
let mut path_buf = real_path;
path_buf.push("%");
path_buf.as_path().to_string_lossy().into_owned()
};
sqlx::query_as!(
scanner::Song,
"SELECT * FROM songs WHERE path LIKE $1 ORDER BY path COLLATE NOCASE ASC",
song_path_filter
)
.fetch_all(connection.as_mut())
.await?
} else {
sqlx::query_as!(
scanner::Song,
"SELECT * FROM songs ORDER BY path COLLATE NOCASE ASC"
)
.fetch_all(connection.as_mut())
.await?
};
let virtual_songs = real_songs.into_iter().filter_map(|s| s.virtualize(&vfs));
Ok(virtual_songs.collect::<Vec<_>>())
}
pub async fn get_random_albums(&self, count: i64) -> Result<Vec<scanner::Directory>, Error> {
let vfs = self.vfs_manager.get_vfs().await?;
let mut connection = self.db.connect().await?;
let real_directories = sqlx::query_as!(
scanner::Directory,
"SELECT * FROM directories WHERE album IS NOT NULL ORDER BY RANDOM() DESC LIMIT $1",
count
)
.fetch_all(connection.as_mut())
.await?;
let virtual_directories = real_directories
.into_iter()
.filter_map(|d| d.virtualize(&vfs));
Ok(virtual_directories.collect::<Vec<_>>())
}
pub async fn get_recent_albums(&self, count: i64) -> Result<Vec<scanner::Directory>, Error> {
let vfs = self.vfs_manager.get_vfs().await?;
let mut connection = self.db.connect().await?;
let real_directories = sqlx::query_as!(
scanner::Directory,
"SELECT * FROM directories WHERE album IS NOT NULL ORDER BY date_added DESC LIMIT $1",
count
)
.fetch_all(connection.as_mut())
.await?;
let virtual_directories = real_directories
.into_iter()
.filter_map(|d| d.virtualize(&vfs));
Ok(virtual_directories.collect::<Vec<_>>())
}
pub async fn search(&self, query: &str) -> Result<Vec<CollectionFile>, Error> {
let vfs = self.vfs_manager.get_vfs().await?;
let mut connection = self.db.connect().await?;
let like_test = format!("%{}%", query);
let mut output = Vec::new();
// Find dirs with matching path and parent not matching
{
let real_directories = sqlx::query_as!(
scanner::Directory,
"SELECT * FROM directories WHERE path LIKE $1 AND parent NOT LIKE $1",
like_test
)
.fetch_all(connection.as_mut())
.await?;
let virtual_directories = real_directories
.into_iter()
.filter_map(|d| d.virtualize(&vfs));
output.extend(virtual_directories.map(CollectionFile::Directory));
}
// Find songs with matching title/album/artist and non-matching parent
{
let real_songs = sqlx::query_as!(
scanner::Song,
r#"
SELECT * FROM songs
WHERE ( path LIKE $1
OR title LIKE $1
OR album LIKE $1
OR artists LIKE $1
OR album_artists LIKE $1
)
AND parent NOT LIKE $1
"#,
like_test
)
.fetch_all(connection.as_mut())
.await?;
let virtual_songs = real_songs.into_iter().filter_map(|d| d.virtualize(&vfs));
output.extend(virtual_songs.map(CollectionFile::Song));
}
Ok(output)
}
pub async fn get_song(&self, virtual_path: &Path) -> Result<scanner::Song, Error> {
let vfs = self.vfs_manager.get_vfs().await?;
let mut connection = self.db.connect().await?;
let real_path = vfs.virtual_to_real(virtual_path)?;
let real_path_string = real_path.as_path().to_string_lossy();
let real_song = sqlx::query_as!(
scanner::Song,
"SELECT * FROM songs WHERE path = $1",
real_path_string
)
.fetch_one(connection.as_mut())
.await?;
match real_song.virtualize(&vfs) {
Some(s) => Ok(s),
None => Err(Error::SongNotFound(real_path)),
}
}
}

View file

@ -1,139 +0,0 @@
use std::path::{Path, PathBuf};
use super::*;
use crate::app::{scanner, test};
use crate::test_name;
const TEST_MOUNT_NAME: &str = "root";
#[tokio::test]
async fn can_browse_top_level() {
let ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.scanner.scan().await.unwrap();
let root_path = Path::new(TEST_MOUNT_NAME);
let files = ctx.index.browse(Path::new("")).await.unwrap();
assert_eq!(files.len(), 1);
match files[0] {
CollectionFile::Directory(ref d) => assert_eq!(d.path, root_path.to_str().unwrap()),
_ => panic!("Expected directory"),
}
}
#[tokio::test]
async fn can_browse_directory() {
let khemmis_path: PathBuf = [TEST_MOUNT_NAME, "Khemmis"].iter().collect();
let tobokegao_path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao"].iter().collect();
let ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.scanner.scan().await.unwrap();
let files = ctx.index.browse(Path::new(TEST_MOUNT_NAME)).await.unwrap();
assert_eq!(files.len(), 2);
match files[0] {
CollectionFile::Directory(ref d) => assert_eq!(d.path, khemmis_path.to_str().unwrap()),
_ => panic!("Expected directory"),
}
match files[1] {
CollectionFile::Directory(ref d) => assert_eq!(d.path, tobokegao_path.to_str().unwrap()),
_ => panic!("Expected directory"),
}
}
#[tokio::test]
async fn can_flatten_root() {
let ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.scanner.scan().await.unwrap();
let songs = ctx.index.flatten(Path::new(TEST_MOUNT_NAME)).await.unwrap();
assert_eq!(songs.len(), 13);
assert_eq!(songs[0].title, Some("Above The Water".to_owned()));
}
#[tokio::test]
async fn can_flatten_directory() {
let ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.scanner.scan().await.unwrap();
let path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao"].iter().collect();
let songs = ctx.index.flatten(path).await.unwrap();
assert_eq!(songs.len(), 8);
}
#[tokio::test]
async fn can_flatten_directory_with_shared_prefix() {
let ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.scanner.scan().await.unwrap();
let path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect(); // Prefix of '(Picnic Remixes)'
let songs = ctx.index.flatten(path).await.unwrap();
assert_eq!(songs.len(), 7);
}
#[tokio::test]
async fn can_get_random_albums() {
let ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.scanner.scan().await.unwrap();
let albums = ctx.index.get_random_albums(1).await.unwrap();
assert_eq!(albums.len(), 1);
}
#[tokio::test]
async fn can_get_recent_albums() {
let ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.scanner.scan().await.unwrap();
let albums = ctx.index.get_recent_albums(2).await.unwrap();
assert_eq!(albums.len(), 2);
assert!(albums[0].date_added >= albums[1].date_added);
}
#[tokio::test]
async fn can_get_a_song() {
let ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.scanner.scan().await.unwrap();
let picnic_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect();
let song_virtual_path = picnic_virtual_dir.join("05 - シャーベット (Sherbet).mp3");
let artwork_virtual_path = picnic_virtual_dir.join("Folder.png");
let song = ctx.index.get_song(&song_virtual_path).await.unwrap();
assert_eq!(song.path, song_virtual_path.to_string_lossy().as_ref());
assert_eq!(song.track_number, Some(5));
assert_eq!(song.disc_number, None);
assert_eq!(song.title, Some("シャーベット (Sherbet)".to_owned()));
assert_eq!(
song.artists,
scanner::MultiString(vec!["Tobokegao".to_owned()])
);
assert_eq!(song.album_artists, scanner::MultiString(vec![]));
assert_eq!(song.album, Some("Picnic".to_owned()));
assert_eq!(song.year, Some(2016));
assert_eq!(
song.artwork,
Some(artwork_virtual_path.to_string_lossy().into_owned())
);
}

View file

@ -1,24 +0,0 @@
use std::path::PathBuf;
use crate::{
app::{scanner, vfs},
db,
};
#[derive(Debug, PartialEq, Eq)]
pub enum CollectionFile {
Directory(scanner::Directory),
Song(scanner::Song),
}
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error(transparent)]
Database(#[from] sqlx::Error),
#[error(transparent)]
DatabaseConnection(#[from] db::Error),
#[error("Song was not found: `{0}`")]
SongNotFound(PathBuf),
#[error(transparent)]
Vfs(#[from] vfs::Error),
}

View file

@ -2,10 +2,7 @@ use rustfm_scrobble::{Scrobble, Scrobbler};
use std::path::Path; use std::path::Path;
use user::AuthToken; use user::AuthToken;
use crate::app::{ use crate::app::{collection, user};
index::{self, Index},
user,
};
const LASTFM_API_KEY: &str = "02b96c939a2b451c31dfd67add1f696e"; const LASTFM_API_KEY: &str = "02b96c939a2b451c31dfd67add1f696e";
const LASTFM_API_SECRET: &str = "0f25a80ceef4b470b5cb97d99d4b3420"; const LASTFM_API_SECRET: &str = "0f25a80ceef4b470b5cb97d99d4b3420";
@ -19,21 +16,21 @@ pub enum Error {
#[error("Failed to emit last.fm now playing update")] #[error("Failed to emit last.fm now playing update")]
NowPlaying(rustfm_scrobble::ScrobblerError), NowPlaying(rustfm_scrobble::ScrobblerError),
#[error(transparent)] #[error(transparent)]
Query(#[from] index::Error), Query(#[from] collection::Error),
#[error(transparent)] #[error(transparent)]
User(#[from] user::Error), User(#[from] user::Error),
} }
#[derive(Clone)] #[derive(Clone)]
pub struct Manager { pub struct Manager {
index: Index, browser: collection::Browser,
user_manager: user::Manager, user_manager: user::Manager,
} }
impl Manager { impl Manager {
pub fn new(index: Index, user_manager: user::Manager) -> Self { pub fn new(browser: collection::Browser, user_manager: user::Manager) -> Self {
Self { Self {
index, browser,
user_manager, user_manager,
} }
} }
@ -84,7 +81,7 @@ impl Manager {
} }
async fn scrobble_from_path(&self, track: &Path) -> Result<Scrobble, Error> { async fn scrobble_from_path(&self, track: &Path) -> Result<Scrobble, Error> {
let song = self.index.get_song(track).await?; let song = self.browser.get_song(track).await?;
Ok(Scrobble::new( Ok(Scrobble::new(
song.artists.0.first().map(|s| s.as_str()).unwrap_or(""), song.artists.0.first().map(|s| s.as_str()).unwrap_or(""),
song.title.as_deref().unwrap_or(""), song.title.as_deref().unwrap_or(""),

View file

@ -1,8 +1,7 @@
use core::clone::Clone; use core::clone::Clone;
use sqlx::{Acquire, QueryBuilder, Sqlite}; use sqlx::{Acquire, QueryBuilder, Sqlite};
use crate::app::scanner::Song; use crate::app::{collection::Song, vfs};
use crate::app::vfs;
use crate::db::{self, DB}; use crate::db::{self, DB};
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
@ -126,8 +125,6 @@ impl Manager {
playlist_name: &str, playlist_name: &str,
owner: &str, owner: &str,
) -> Result<Vec<Song>, Error> { ) -> Result<Vec<Song>, Error> {
let vfs = self.vfs_manager.get_vfs().await?;
let songs = { let songs = {
let mut connection = self.db.connect().await?; let mut connection = self.db.connect().await?;
@ -163,13 +160,7 @@ impl Manager {
.await? .await?
}; };
// Map real path to virtual paths Ok(songs)
let virtual_songs = songs
.into_iter()
.filter_map(|s| s.virtualize(&vfs))
.collect();
Ok(virtual_songs)
} }
pub async fn delete_playlist(&self, playlist_name: &str, owner: &str) -> Result<(), Error> { pub async fn delete_playlist(&self, playlist_name: &str, owner: &str) -> Result<(), Error> {
@ -231,21 +222,21 @@ mod test {
#[tokio::test] #[tokio::test]
async fn save_playlist_is_idempotent() { async fn save_playlist_is_idempotent() {
let ctx = test::ContextBuilder::new(test_name!()) let mut ctx = test::ContextBuilder::new(test_name!())
.user(TEST_USER, TEST_PASSWORD, false) .user(TEST_USER, TEST_PASSWORD, false)
.mount(TEST_MOUNT_NAME, "test-data/small-collection") .mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build() .build()
.await; .await;
ctx.scanner.scan().await.unwrap(); ctx.updater.update().await.unwrap();
let playlist_content: Vec<String> = ctx let playlist_content: Vec<String> = ctx
.index .browser
.flatten(Path::new(TEST_MOUNT_NAME)) .flatten(Path::new(TEST_MOUNT_NAME))
.await .await
.unwrap() .unwrap()
.into_iter() .into_iter()
.map(|s| s.path) .map(|s| s.virtual_path)
.collect(); .collect();
assert_eq!(playlist_content.len(), 13); assert_eq!(playlist_content.len(), 13);
@ -296,21 +287,21 @@ mod test {
#[tokio::test] #[tokio::test]
async fn read_playlist_golden_path() { async fn read_playlist_golden_path() {
let ctx = test::ContextBuilder::new(test_name!()) let mut ctx = test::ContextBuilder::new(test_name!())
.user(TEST_USER, TEST_PASSWORD, false) .user(TEST_USER, TEST_PASSWORD, false)
.mount(TEST_MOUNT_NAME, "test-data/small-collection") .mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build() .build()
.await; .await;
ctx.scanner.scan().await.unwrap(); ctx.updater.update().await.unwrap();
let playlist_content: Vec<String> = ctx let playlist_content: Vec<String> = ctx
.index .browser
.flatten(Path::new(TEST_MOUNT_NAME)) .flatten(Path::new(TEST_MOUNT_NAME))
.await .await
.unwrap() .unwrap()
.into_iter() .into_iter()
.map(|s| s.path) .map(|s| s.virtual_path)
.collect(); .collect();
assert_eq!(playlist_content.len(), 13); assert_eq!(playlist_content.len(), 13);
@ -336,6 +327,6 @@ mod test {
] ]
.iter() .iter()
.collect(); .collect();
assert_eq!(songs[0].path, first_song_path.to_str().unwrap()); assert_eq!(songs[0].virtual_path, first_song_path.to_str().unwrap());
} }
} }

View file

@ -1,123 +0,0 @@
use log::{error, info};
use std::sync::Arc;
use std::time::{Duration, Instant};
use tokio::sync::Notify;
use crate::app::{settings, vfs};
use crate::db::DB;
mod cleaner;
mod collector;
mod inserter;
mod metadata;
#[cfg(test)]
mod test;
mod traverser;
mod types;
pub use self::types::*;
#[derive(Clone)]
pub struct Scanner {
db: DB,
vfs_manager: vfs::Manager,
settings_manager: settings::Manager,
pending_scan: Arc<Notify>,
}
impl Scanner {
pub fn new(db: DB, vfs_manager: vfs::Manager, settings_manager: settings::Manager) -> Self {
let scanner = Self {
db,
vfs_manager,
settings_manager,
pending_scan: Arc::new(Notify::new()),
};
tokio::spawn({
let scanner = scanner.clone();
async move {
loop {
scanner.pending_scan.notified().await;
if let Err(e) = scanner.scan().await {
error!("Error while updating index: {}", e);
}
}
}
});
scanner
}
pub fn trigger_scan(&self) {
self.pending_scan.notify_one();
}
pub fn begin_periodic_scans(&self) {
tokio::spawn({
let index = self.clone();
async move {
loop {
index.trigger_scan();
let sleep_duration = index
.settings_manager
.get_index_sleep_duration()
.await
.unwrap_or_else(|e| {
error!("Could not retrieve index sleep duration: {}", e);
Duration::from_secs(1800)
});
tokio::time::sleep(sleep_duration).await;
}
}
});
}
pub async fn scan(&self) -> Result<(), types::Error> {
let start = Instant::now();
info!("Beginning library index update");
let album_art_pattern = self
.settings_manager
.get_index_album_art_pattern()
.await
.ok();
let cleaner = cleaner::Cleaner::new(self.db.clone(), self.vfs_manager.clone());
cleaner.clean().await?;
let (insert_sender, insert_receiver) = tokio::sync::mpsc::unbounded_channel();
let insertion = tokio::spawn({
let db = self.db.clone();
async {
let mut inserter = inserter::Inserter::new(db, insert_receiver);
inserter.insert().await;
}
});
let (collect_sender, collect_receiver) = crossbeam_channel::unbounded();
let collection = tokio::task::spawn_blocking(|| {
let collector =
collector::Collector::new(collect_receiver, insert_sender, album_art_pattern);
collector.collect();
});
let vfs = self.vfs_manager.get_vfs().await?;
let traversal = tokio::task::spawn_blocking(move || {
let mounts = vfs.mounts();
let traverser = traverser::Traverser::new(collect_sender);
traverser.traverse(mounts.iter().map(|p| p.source.clone()).collect());
});
traversal.await.unwrap();
collection.await.unwrap();
insertion.await.unwrap();
info!(
"Library index update took {} seconds",
start.elapsed().as_millis() as f32 / 1000.0
);
Ok(())
}
}

View file

@ -1,101 +0,0 @@
use rayon::prelude::*;
use sqlx::{QueryBuilder, Sqlite};
use std::path::Path;
use crate::app::vfs;
use crate::db::{self, DB};
const INDEX_BUILDING_CLEAN_BUFFER_SIZE: usize = 500; // Deletions in each transaction
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error(transparent)]
Database(#[from] sqlx::Error),
#[error(transparent)]
DatabaseConnection(#[from] db::Error),
#[error(transparent)]
ThreadPoolBuilder(#[from] rayon::ThreadPoolBuildError),
#[error(transparent)]
Vfs(#[from] vfs::Error),
}
pub struct Cleaner {
db: DB,
vfs_manager: vfs::Manager,
}
impl Cleaner {
pub fn new(db: DB, vfs_manager: vfs::Manager) -> Self {
Self { db, vfs_manager }
}
pub async fn clean(&self) -> Result<(), Error> {
let vfs = self.vfs_manager.get_vfs().await?;
let (all_directories, all_songs) = {
let mut connection = self.db.connect().await?;
let directories = sqlx::query_scalar!("SELECT path FROM directories")
.fetch_all(connection.as_mut())
.await
.unwrap();
let songs = sqlx::query_scalar!("SELECT path FROM songs")
.fetch_all(connection.as_mut())
.await
.unwrap();
(directories, songs)
};
let list_missing_directories = || {
all_directories
.par_iter()
.filter(|ref directory_path| {
let path = Path::new(&directory_path);
!path.exists() || vfs.real_to_virtual(path).is_err()
})
.collect::<Vec<_>>()
};
let list_missing_songs = || {
all_songs
.par_iter()
.filter(|ref song_path| {
let path = Path::new(&song_path);
!path.exists() || vfs.real_to_virtual(path).is_err()
})
.collect::<Vec<_>>()
};
let thread_pool = rayon::ThreadPoolBuilder::new().build()?;
let (missing_directories, missing_songs) =
thread_pool.join(list_missing_directories, list_missing_songs);
{
let mut connection = self.db.connect().await?;
for chunk in missing_directories[..].chunks(INDEX_BUILDING_CLEAN_BUFFER_SIZE) {
QueryBuilder::<Sqlite>::new("DELETE FROM directories WHERE path IN ")
.push_tuples(chunk, |mut b, path| {
b.push_bind(path);
})
.build()
.execute(connection.as_mut())
.await?;
}
for chunk in missing_songs[..].chunks(INDEX_BUILDING_CLEAN_BUFFER_SIZE) {
QueryBuilder::<Sqlite>::new("DELETE FROM songs WHERE path IN ")
.push_tuples(chunk, |mut b, path| {
b.push_bind(path);
})
.build()
.execute(connection.as_mut())
.await?;
}
}
Ok(())
}
}

View file

@ -1,151 +0,0 @@
use log::error;
use regex::Regex;
use crate::app::scanner::MultiString;
use super::inserter;
use super::traverser;
pub struct Collector {
receiver: crossbeam_channel::Receiver<traverser::Directory>,
sender: tokio::sync::mpsc::UnboundedSender<inserter::Item>,
album_art_pattern: Option<Regex>,
}
impl Collector {
pub fn new(
receiver: crossbeam_channel::Receiver<traverser::Directory>,
sender: tokio::sync::mpsc::UnboundedSender<inserter::Item>,
album_art_pattern: Option<Regex>,
) -> Self {
Self {
receiver,
sender,
album_art_pattern,
}
}
pub fn collect(&self) {
while let Ok(directory) = self.receiver.recv() {
self.collect_directory(directory);
}
}
fn collect_directory(&self, directory: traverser::Directory) {
let mut directory_album = None;
let mut directory_year = None;
let mut directory_artists = None;
let mut inconsistent_directory_album = false;
let mut inconsistent_directory_year = false;
let mut inconsistent_directory_artist = false;
let directory_artwork = self.get_artwork(&directory);
let directory_path_string = directory.path.to_string_lossy().to_string();
let directory_parent_string = directory.parent.map(|p| p.to_string_lossy().to_string());
for song in directory.songs {
let tags = song.metadata;
let path_string = song.path.to_string_lossy().to_string();
if tags.year.is_some() {
inconsistent_directory_year |=
directory_year.is_some() && directory_year != tags.year;
directory_year = tags.year;
}
if tags.album.is_some() {
inconsistent_directory_album |=
directory_album.is_some() && directory_album != tags.album;
directory_album = tags.album.as_ref().cloned();
}
if !tags.album_artists.is_empty() {
inconsistent_directory_artist |= directory_artists.is_some()
&& directory_artists.as_ref() != Some(&tags.album_artists);
directory_artists = Some(tags.album_artists.clone());
} else if !tags.artists.is_empty() {
inconsistent_directory_artist |= directory_artists.is_some()
&& directory_artists.as_ref() != Some(&tags.artists);
directory_artists = Some(tags.artists.clone());
}
let artwork_path = if tags.has_artwork {
Some(path_string.clone())
} else {
directory_artwork.as_ref().cloned()
};
if let Err(e) = self.sender.send(inserter::Item::Song(inserter::Song {
path: path_string,
parent: directory_path_string.clone(),
disc_number: tags.disc_number.map(|n| n as i32),
track_number: tags.track_number.map(|n| n as i32),
title: tags.title,
duration: tags.duration.map(|n| n as i32),
artists: MultiString(tags.artists),
album_artists: MultiString(tags.album_artists),
album: tags.album,
year: tags.year,
artwork: artwork_path,
lyricists: MultiString(tags.lyricists),
composers: MultiString(tags.composers),
genres: MultiString(tags.genres),
labels: MultiString(tags.labels),
})) {
error!("Error while sending song from collector: {}", e);
}
}
if inconsistent_directory_year {
directory_year = None;
}
if inconsistent_directory_album {
directory_album = None;
}
if inconsistent_directory_artist {
directory_artists = None;
}
if let Err(e) = self
.sender
.send(inserter::Item::Directory(inserter::Directory {
path: directory_path_string,
parent: directory_parent_string,
artwork: directory_artwork,
album: directory_album,
artists: MultiString(directory_artists.unwrap_or_default()),
year: directory_year,
date_added: directory.created,
})) {
error!("Error while sending directory from collector: {}", e);
}
}
fn get_artwork(&self, directory: &traverser::Directory) -> Option<String> {
let regex_artwork = directory.other_files.iter().find_map(|path| {
let matches = path
.file_name()
.and_then(|name| name.to_str())
.map(|name| match &self.album_art_pattern {
Some(pattern) => pattern.is_match(name),
None => false,
})
.unwrap_or(false);
if matches {
Some(path.to_string_lossy().to_string())
} else {
None
}
});
let embedded_artwork = directory.songs.iter().find_map(|song| {
if song.metadata.has_artwork {
Some(song.path.to_string_lossy().to_string())
} else {
None
}
});
regex_artwork.or(embedded_artwork)
}
}

View file

@ -1,147 +0,0 @@
use log::error;
use sqlx::{QueryBuilder, Sqlite};
use tokio::sync::mpsc::UnboundedReceiver;
use crate::{app::scanner::MultiString, db::DB};
const INDEX_BUILDING_INSERT_BUFFER_SIZE: usize = 1000; // Insertions in each transaction
pub struct Song {
pub path: String,
pub parent: String,
pub track_number: Option<i32>,
pub disc_number: Option<i32>,
pub title: Option<String>,
pub artists: MultiString,
pub album_artists: MultiString,
pub year: Option<i32>,
pub album: Option<String>,
pub artwork: Option<String>,
pub duration: Option<i32>,
pub lyricists: MultiString,
pub composers: MultiString,
pub genres: MultiString,
pub labels: MultiString,
}
pub struct Directory {
pub path: String,
pub parent: Option<String>,
pub artists: MultiString,
pub year: Option<i32>,
pub album: Option<String>,
pub artwork: Option<String>,
pub date_added: i32,
}
pub enum Item {
Directory(Directory),
Song(Song),
}
pub struct Inserter {
receiver: UnboundedReceiver<Item>,
new_directories: Vec<Directory>,
new_songs: Vec<Song>,
db: DB,
}
impl Inserter {
pub fn new(db: DB, receiver: UnboundedReceiver<Item>) -> Self {
let new_directories = Vec::with_capacity(INDEX_BUILDING_INSERT_BUFFER_SIZE);
let new_songs = Vec::with_capacity(INDEX_BUILDING_INSERT_BUFFER_SIZE);
Self {
receiver,
new_directories,
new_songs,
db,
}
}
pub async fn insert(&mut self) {
while let Some(item) = self.receiver.recv().await {
self.insert_item(item).await;
}
self.flush_directories().await;
self.flush_songs().await;
}
async fn insert_item(&mut self, insert: Item) {
match insert {
Item::Directory(d) => {
self.new_directories.push(d);
if self.new_directories.len() >= INDEX_BUILDING_INSERT_BUFFER_SIZE {
self.flush_directories().await;
}
}
Item::Song(s) => {
self.new_songs.push(s);
if self.new_songs.len() >= INDEX_BUILDING_INSERT_BUFFER_SIZE {
self.flush_songs().await;
}
}
};
}
async fn flush_directories(&mut self) {
let Ok(mut connection) = self.db.connect().await else {
error!("Could not acquire connection to insert new directories in database");
return;
};
let result = QueryBuilder::<Sqlite>::new(
"INSERT INTO directories(path, parent, artists, year, album, artwork, date_added) ",
)
.push_values(&self.new_directories, |mut b, directory| {
b.push_bind(&directory.path)
.push_bind(&directory.parent)
.push_bind(&directory.artists)
.push_bind(directory.year)
.push_bind(&directory.album)
.push_bind(&directory.artwork)
.push_bind(directory.date_added);
})
.build()
.execute(connection.as_mut())
.await;
match result {
Ok(_) => self.new_directories.clear(),
Err(_) => error!("Could not insert new directories in database"),
};
}
async fn flush_songs(&mut self) {
let Ok(mut connection) = self.db.connect().await else {
error!("Could not acquire connection to insert new songs in database");
return;
};
let result = QueryBuilder::<Sqlite>::new("INSERT INTO songs(path, parent, track_number, disc_number, title, artists, album_artists, year, album, artwork, duration, lyricists, composers, genres, labels) ")
.push_values(&self.new_songs, |mut b, song| {
b.push_bind(&song.path)
.push_bind(&song.parent)
.push_bind(song.track_number)
.push_bind(song.disc_number)
.push_bind(&song.title)
.push_bind(&song.artists)
.push_bind(&song.album_artists)
.push_bind(song.year)
.push_bind(&song.album)
.push_bind(&song.artwork)
.push_bind(song.duration)
.push_bind(&song.lyricists)
.push_bind(&song.composers)
.push_bind(&song.genres)
.push_bind(&song.labels);
})
.build()
.execute(connection.as_mut())
.await;
match result {
Ok(_) => self.new_songs.clear(),
Err(_) => error!("Could not insert new songs in database"),
};
}
}

View file

@ -1,133 +0,0 @@
use std::path::PathBuf;
use crate::{
app::{scanner, settings, test},
test_name,
};
const TEST_MOUNT_NAME: &str = "root";
#[tokio::test]
async fn scan_adds_new_content() {
let ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.scanner.scan().await.unwrap();
ctx.scanner.scan().await.unwrap(); // Validates that subsequent updates don't run into conflicts
let mut connection = ctx.db.connect().await.unwrap();
let all_directories = sqlx::query_as!(scanner::Directory, "SELECT * FROM directories")
.fetch_all(connection.as_mut())
.await
.unwrap();
let all_songs = sqlx::query_as!(scanner::Song, "SELECT * FROM songs")
.fetch_all(connection.as_mut())
.await
.unwrap();
assert_eq!(all_directories.len(), 6);
assert_eq!(all_songs.len(), 13);
}
#[tokio::test]
async fn scan_removes_missing_content() {
let builder = test::ContextBuilder::new(test_name!());
let original_collection_dir: PathBuf = ["test-data", "small-collection"].iter().collect();
let test_collection_dir: PathBuf = builder.test_directory.join("small-collection");
let copy_options = fs_extra::dir::CopyOptions::new();
fs_extra::dir::copy(
original_collection_dir,
&builder.test_directory,
&copy_options,
)
.unwrap();
let ctx = builder
.mount(TEST_MOUNT_NAME, test_collection_dir.to_str().unwrap())
.build()
.await;
ctx.scanner.scan().await.unwrap();
{
let mut connection = ctx.db.connect().await.unwrap();
let all_directories = sqlx::query_as!(scanner::Directory, "SELECT * FROM directories")
.fetch_all(connection.as_mut())
.await
.unwrap();
let all_songs = sqlx::query_as!(scanner::Song, "SELECT * FROM songs")
.fetch_all(connection.as_mut())
.await
.unwrap();
assert_eq!(all_directories.len(), 6);
assert_eq!(all_songs.len(), 13);
}
let khemmis_directory = test_collection_dir.join("Khemmis");
std::fs::remove_dir_all(khemmis_directory).unwrap();
ctx.scanner.scan().await.unwrap();
{
let mut connection = ctx.db.connect().await.unwrap();
let all_directories = sqlx::query_as!(scanner::Directory, "SELECT * FROM directories")
.fetch_all(connection.as_mut())
.await
.unwrap();
let all_songs = sqlx::query_as!(scanner::Song, "SELECT * FROM songs")
.fetch_all(connection.as_mut())
.await
.unwrap();
assert_eq!(all_directories.len(), 4);
assert_eq!(all_songs.len(), 8);
}
}
#[tokio::test]
async fn finds_embedded_artwork() {
let ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
ctx.scanner.scan().await.unwrap();
let picnic_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect();
let song_virtual_path = picnic_virtual_dir.join("07 - なぜ (Why).mp3");
let song = ctx.index.get_song(&song_virtual_path).await.unwrap();
assert_eq!(
song.artwork,
Some(song_virtual_path.to_string_lossy().into_owned())
);
}
#[tokio::test]
async fn album_art_pattern_is_case_insensitive() {
let ctx = test::ContextBuilder::new(test_name!())
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
.build()
.await;
let patterns = vec!["folder", "FOLDER"];
for pattern in patterns.into_iter() {
ctx.settings_manager
.amend(&settings::NewSettings {
album_art_pattern: Some(pattern.to_owned()),
..Default::default()
})
.await
.unwrap();
ctx.scanner.scan().await.unwrap();
let hunted_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Khemmis", "Hunted"].iter().collect();
let artwork_virtual_path = hunted_virtual_dir.join("Folder.jpg");
let song = &ctx.index.flatten(&hunted_virtual_dir).await.unwrap()[0];
assert_eq!(
song.artwork,
Some(artwork_virtual_path.to_string_lossy().into_owned())
);
}
}

View file

@ -1,202 +0,0 @@
use crossbeam_channel::{self, Receiver, Sender};
use log::{error, info};
use std::cmp::min;
use std::fs;
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use crate::app::scanner::metadata::{self, SongMetadata};
#[derive(Debug)]
pub struct Song {
pub path: PathBuf,
pub metadata: SongMetadata,
}
#[derive(Debug)]
pub struct Directory {
pub parent: Option<PathBuf>,
pub path: PathBuf,
pub songs: Vec<Song>,
pub other_files: Vec<PathBuf>,
pub created: i32,
}
pub struct Traverser {
directory_sender: Sender<Directory>,
}
#[derive(Debug)]
struct WorkItem {
parent: Option<PathBuf>,
path: PathBuf,
}
impl Traverser {
pub fn new(directory_sender: Sender<Directory>) -> Self {
Self { directory_sender }
}
pub fn traverse(&self, roots: Vec<PathBuf>) {
let num_pending_work_items = Arc::new(AtomicUsize::new(roots.len()));
let (work_item_sender, work_item_receiver) = crossbeam_channel::unbounded();
let key = "POLARIS_NUM_TRAVERSER_THREADS";
let num_threads = std::env::var_os(key)
.map(|v| v.to_string_lossy().to_string())
.and_then(|v| usize::from_str(&v).ok())
.unwrap_or_else(|| min(num_cpus::get(), 4));
info!("Browsing collection using {} threads", num_threads);
let mut threads = Vec::new();
for _ in 0..num_threads {
let work_item_sender = work_item_sender.clone();
let work_item_receiver = work_item_receiver.clone();
let directory_sender = self.directory_sender.clone();
let num_pending_work_items = num_pending_work_items.clone();
threads.push(thread::spawn(move || {
let worker = Worker {
work_item_sender,
work_item_receiver,
directory_sender,
num_pending_work_items,
};
worker.run();
}));
}
for root in roots {
let work_item = WorkItem {
parent: None,
path: root,
};
if let Err(e) = work_item_sender.send(work_item) {
error!("Error initializing traverser: {:#?}", e);
}
}
for thread in threads {
if let Err(e) = thread.join() {
error!("Error joining on traverser worker thread: {:#?}", e);
}
}
}
}
struct Worker {
work_item_sender: Sender<WorkItem>,
work_item_receiver: Receiver<WorkItem>,
directory_sender: Sender<Directory>,
num_pending_work_items: Arc<AtomicUsize>,
}
impl Worker {
fn run(&self) {
while let Some(work_item) = self.find_work_item() {
self.process_work_item(work_item);
self.on_item_processed();
}
}
fn find_work_item(&self) -> Option<WorkItem> {
loop {
if self.is_all_work_done() {
return None;
}
if let Ok(w) = self
.work_item_receiver
.recv_timeout(Duration::from_millis(100))
{
return Some(w);
}
}
}
fn is_all_work_done(&self) -> bool {
self.num_pending_work_items.load(Ordering::SeqCst) == 0
}
fn queue_work(&self, work_item: WorkItem) {
self.num_pending_work_items.fetch_add(1, Ordering::SeqCst);
self.work_item_sender.send(work_item).unwrap();
}
fn on_item_processed(&self) {
self.num_pending_work_items.fetch_sub(1, Ordering::SeqCst);
}
fn emit_directory(&self, directory: Directory) {
self.directory_sender.send(directory).unwrap();
}
pub fn process_work_item(&self, work_item: WorkItem) {
let read_dir = match fs::read_dir(&work_item.path) {
Ok(read_dir) => read_dir,
Err(e) => {
error!(
"Directory read error for `{}`: {}",
work_item.path.display(),
e
);
return;
}
};
let mut sub_directories = Vec::new();
let mut songs = Vec::new();
let mut other_files = Vec::new();
for entry in read_dir {
let path = match entry {
Ok(ref f) => f.path(),
Err(e) => {
error!(
"File read error within `{}`: {}",
work_item.path.display(),
e
);
break;
}
};
if path.is_dir() {
sub_directories.push(path);
} else if let Some(metadata) = metadata::read(&path) {
songs.push(Song { path, metadata });
} else {
other_files.push(path);
}
}
let created = Self::get_date_created(&work_item.path).unwrap_or_default();
self.emit_directory(Directory {
path: work_item.path.to_owned(),
parent: work_item.parent,
songs,
other_files,
created,
});
for sub_directory in sub_directories.into_iter() {
self.queue_work(WorkItem {
parent: Some(work_item.path.clone()),
path: sub_directory,
});
}
}
fn get_date_created(path: &Path) -> Option<i32> {
if let Ok(t) = fs::metadata(path).and_then(|m| m.created().or_else(|_| m.modified())) {
t.duration_since(std::time::UNIX_EPOCH)
.map(|d| d.as_secs() as i32)
.ok()
} else {
None
}
}
}

View file

@ -1,125 +0,0 @@
use std::{borrow::Cow, path::Path};
use sqlx::{
encode::IsNull,
sqlite::{SqliteArgumentValue, SqliteTypeInfo},
Sqlite,
};
use crate::{
app::vfs::{self, VFS},
db,
};
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error(transparent)]
IndexClean(#[from] super::cleaner::Error),
#[error(transparent)]
Database(#[from] sqlx::Error),
#[error(transparent)]
DatabaseConnection(#[from] db::Error),
#[error(transparent)]
Vfs(#[from] vfs::Error),
}
#[derive(Debug, PartialEq, Eq)]
pub struct MultiString(pub Vec<String>);
static MULTI_STRING_SEPARATOR: &str = "\u{000C}";
impl<'q> sqlx::Encode<'q, Sqlite> for MultiString {
fn encode_by_ref(&self, args: &mut Vec<SqliteArgumentValue<'q>>) -> IsNull {
if self.0.is_empty() {
IsNull::Yes
} else {
let joined = self.0.join(MULTI_STRING_SEPARATOR);
args.push(SqliteArgumentValue::Text(Cow::Owned(joined)));
IsNull::No
}
}
}
impl From<Option<String>> for MultiString {
fn from(value: Option<String>) -> Self {
match value {
None => MultiString(Vec::new()),
Some(s) => MultiString(
s.split(MULTI_STRING_SEPARATOR)
.map(|s| s.to_string())
.collect(),
),
}
}
}
impl sqlx::Type<Sqlite> for MultiString {
fn type_info() -> SqliteTypeInfo {
<&str as sqlx::Type<Sqlite>>::type_info()
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct Song {
pub id: i64,
pub path: String,
pub parent: String,
pub track_number: Option<i64>,
pub disc_number: Option<i64>,
pub title: Option<String>,
pub artists: MultiString,
pub album_artists: MultiString,
pub year: Option<i64>,
pub album: Option<String>,
pub artwork: Option<String>,
pub duration: Option<i64>,
pub lyricists: MultiString,
pub composers: MultiString,
pub genres: MultiString,
pub labels: MultiString,
}
impl Song {
pub fn virtualize(mut self, vfs: &VFS) -> Option<Song> {
self.path = match vfs.real_to_virtual(Path::new(&self.path)) {
Ok(p) => p.to_string_lossy().into_owned(),
_ => return None,
};
if let Some(artwork_path) = self.artwork {
self.artwork = match vfs.real_to_virtual(Path::new(&artwork_path)) {
Ok(p) => Some(p.to_string_lossy().into_owned()),
_ => None,
};
}
Some(self)
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct Directory {
pub id: i64,
pub path: String,
pub parent: Option<String>,
// TODO remove all below when explorer and metadata browsing are separate
pub artists: MultiString,
pub year: Option<i64>,
pub album: Option<String>,
pub artwork: Option<String>,
pub date_added: i64,
}
impl Directory {
pub fn virtualize(mut self, vfs: &VFS) -> Option<Directory> {
self.path = match vfs.real_to_virtual(Path::new(&self.path)) {
Ok(p) => p.to_string_lossy().into_owned(),
_ => return None,
};
if let Some(artwork_path) = self.artwork {
self.artwork = match vfs.real_to_virtual(Path::new(&artwork_path)) {
Ok(p) => Some(p.to_string_lossy().into_owned()),
_ => None,
};
}
Some(self)
}
}

View file

@ -1,13 +1,13 @@
use std::path::PathBuf; use std::path::PathBuf;
use crate::app::{config, ddns, index::Index, playlist, scanner::Scanner, settings, user, vfs}; use crate::app::{collection, config, ddns, playlist, settings, user, vfs};
use crate::db::DB; use crate::db::DB;
use crate::test::*; use crate::test::*;
pub struct Context { pub struct Context {
pub db: DB, pub db: DB,
pub scanner: Scanner, pub browser: collection::Browser,
pub index: Index, pub updater: collection::Updater,
pub config_manager: config::Manager, pub config_manager: config::Manager,
pub ddns_manager: ddns::Manager, pub ddns_manager: ddns::Manager,
pub playlist_manager: playlist::Manager, pub playlist_manager: playlist::Manager,
@ -66,16 +66,22 @@ impl ContextBuilder {
vfs_manager.clone(), vfs_manager.clone(),
ddns_manager.clone(), ddns_manager.clone(),
); );
let scanner = Scanner::new(db.clone(), vfs_manager.clone(), settings_manager.clone()); let browser = collection::Browser::new(db.clone(), vfs_manager.clone());
let index = Index::new(db.clone(), vfs_manager.clone()); let index = collection::Index::new();
let updater = collection::Updater::new(
db.clone(),
index.clone(),
settings_manager.clone(),
vfs_manager.clone(),
);
let playlist_manager = playlist::Manager::new(db.clone(), vfs_manager.clone()); let playlist_manager = playlist::Manager::new(db.clone(), vfs_manager.clone());
config_manager.apply(&self.config).await.unwrap(); config_manager.apply(&self.config).await.unwrap();
Context { Context {
db, db,
scanner, browser,
index, updater,
config_manager, config_manager,
ddns_manager, ddns_manager,
playlist_manager, playlist_manager,

View file

@ -8,8 +8,6 @@ use crate::db::{self, DB};
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum Error { pub enum Error {
#[error("The following real path could not be mapped to a virtual path: `{0}`")]
CouldNotMapToVirtualPath(PathBuf),
#[error("The following virtual path could not be mapped to a real path: `{0}`")] #[error("The following virtual path could not be mapped to a real path: `{0}`")]
CouldNotMapToRealPath(PathBuf), CouldNotMapToRealPath(PathBuf),
#[error(transparent)] #[error(transparent)]
@ -54,18 +52,10 @@ impl VFS {
VFS { mounts } VFS { mounts }
} }
pub fn real_to_virtual<P: AsRef<Path>>(&self, real_path: P) -> Result<PathBuf, Error> { pub fn exists<P: AsRef<Path>>(&self, virtual_path: P) -> bool {
for mount in &self.mounts { self.mounts
if let Ok(p) = real_path.as_ref().strip_prefix(&mount.source) { .iter()
let mount_path = Path::new(&mount.name); .any(|m| virtual_path.as_ref().starts_with(&m.name))
return if p.components().count() == 0 {
Ok(mount_path.to_path_buf())
} else {
Ok(mount_path.join(p))
};
}
}
Err(Error::CouldNotMapToVirtualPath(real_path.as_ref().into()))
} }
pub fn virtual_to_real<P: AsRef<Path>>(&self, virtual_path: P) -> Result<PathBuf, Error> { pub fn virtual_to_real<P: AsRef<Path>>(&self, virtual_path: P) -> Result<PathBuf, Error> {
@ -162,18 +152,6 @@ mod test {
assert_eq!(converted_path, real_path); assert_eq!(converted_path, real_path);
} }
#[test]
fn converts_real_to_virtual() {
let vfs = VFS::new(vec![Mount {
name: "root".to_owned(),
source: Path::new("test_dir").to_owned(),
}]);
let virtual_path: PathBuf = ["root", "somewhere", "something.png"].iter().collect();
let real_path: PathBuf = ["test_dir", "somewhere", "something.png"].iter().collect();
let converted_path = vfs.real_to_virtual(real_path.as_path()).unwrap();
assert_eq!(converted_path, virtual_path);
}
#[test] #[test]
fn cleans_path_string() { fn cleans_path_string() {
let mut correct_path = path::PathBuf::new(); let mut correct_path = path::PathBuf::new();

View file

@ -48,19 +48,16 @@ CREATE TABLE users (
CREATE TABLE directories ( CREATE TABLE directories (
id INTEGER PRIMARY KEY NOT NULL, id INTEGER PRIMARY KEY NOT NULL,
path TEXT NOT NULL, path TEXT NOT NULL,
parent TEXT, virtual_path TEXT NOT NULL,
artists TEXT, virtual_parent TEXT,
year INTEGER,
album TEXT,
artwork TEXT,
date_added INTEGER DEFAULT 0 NOT NULL,
UNIQUE(path) ON CONFLICT REPLACE UNIQUE(path) ON CONFLICT REPLACE
); );
CREATE TABLE songs ( CREATE TABLE songs (
id INTEGER PRIMARY KEY NOT NULL, id INTEGER PRIMARY KEY NOT NULL,
path TEXT NOT NULL, path TEXT NOT NULL,
parent TEXT NOT NULL, virtual_path TEXT NOT NULL,
virtual_parent TEXT NOT NULL,
track_number INTEGER, track_number INTEGER,
disc_number INTEGER, disc_number INTEGER,
title TEXT, title TEXT,
@ -74,6 +71,7 @@ CREATE TABLE songs (
composers TEXT, composers TEXT,
genres TEXT, genres TEXT,
labels TEXT, labels TEXT,
date_added INTEGER DEFAULT 0 NOT NULL,
UNIQUE(path) ON CONFLICT REPLACE UNIQUE(path) ON CONFLICT REPLACE
); );

View file

@ -144,7 +144,7 @@ fn main() -> Result<(), Error> {
async fn async_main(cli_options: CLIOptions, paths: paths::Paths) -> Result<(), Error> { async fn async_main(cli_options: CLIOptions, paths: paths::Paths) -> Result<(), Error> {
// Create and run app // Create and run app
let app = app::App::new(cli_options.port.unwrap_or(5050), paths).await?; let app = app::App::new(cli_options.port.unwrap_or(5050), paths).await?;
app.scanner.begin_periodic_scans(); app.updater.begin_periodic_scans();
app.ddns_manager.begin_periodic_updates(); app.ddns_manager.begin_periodic_updates();
// Start server // Start server

View file

@ -27,6 +27,18 @@ pub async fn launch(app: App) -> Result<(), std::io::Error> {
Ok(()) Ok(())
} }
impl FromRef<App> for app::collection::Browser {
fn from_ref(app: &App) -> Self {
app.browser.clone()
}
}
impl FromRef<App> for app::collection::Updater {
fn from_ref(app: &App) -> Self {
app.updater.clone()
}
}
impl FromRef<App> for app::config::Manager { impl FromRef<App> for app::config::Manager {
fn from_ref(app: &App) -> Self { fn from_ref(app: &App) -> Self {
app.config_manager.clone() app.config_manager.clone()
@ -39,12 +51,6 @@ impl FromRef<App> for app::ddns::Manager {
} }
} }
impl FromRef<App> for app::index::Index {
fn from_ref(app: &App) -> Self {
app.index.clone()
}
}
impl FromRef<App> for app::lastfm::Manager { impl FromRef<App> for app::lastfm::Manager {
fn from_ref(app: &App) -> Self { fn from_ref(app: &App) -> Self {
app.lastfm_manager.clone() app.lastfm_manager.clone()
@ -63,12 +69,6 @@ impl FromRef<App> for app::user::Manager {
} }
} }
impl FromRef<App> for app::scanner::Scanner {
fn from_ref(app: &App) -> Self {
app.scanner.clone()
}
}
impl FromRef<App> for app::settings::Manager { impl FromRef<App> for app::settings::Manager {
fn from_ref(app: &App) -> Self { fn from_ref(app: &App) -> Self {
app.settings_manager.clone() app.settings_manager.clone()

View file

@ -11,7 +11,7 @@ use base64::{prelude::BASE64_STANDARD_NO_PAD, Engine};
use percent_encoding::percent_decode_str; use percent_encoding::percent_decode_str;
use crate::{ use crate::{
app::{config, ddns, index, lastfm, playlist, scanner, settings, thumbnail, user, vfs, App}, app::{collection, config, ddns, lastfm, playlist, settings, thumbnail, user, vfs, App},
server::{dto, error::APIError, APIMajorVersion, API_MAJOR_VERSION, API_MINOR_VERSION}, server::{dto, error::APIError, APIMajorVersion, API_MAJOR_VERSION, API_MINOR_VERSION},
}; };
@ -247,14 +247,14 @@ async fn put_preferences(
async fn post_trigger_index( async fn post_trigger_index(
_admin_rights: AdminRights, _admin_rights: AdminRights,
State(scanner): State<scanner::Scanner>, State(updater): State<collection::Updater>,
) -> Result<(), APIError> { ) -> Result<(), APIError> {
scanner.trigger_scan(); updater.trigger_scan();
Ok(()) Ok(())
} }
fn collection_files_to_response( fn collection_files_to_response(
files: Vec<index::CollectionFile>, files: Vec<collection::File>,
api_version: APIMajorVersion, api_version: APIMajorVersion,
) -> Response { ) -> Response {
match api_version { match api_version {
@ -275,7 +275,7 @@ fn collection_files_to_response(
} }
} }
fn songs_to_response(files: Vec<scanner::Song>, api_version: APIMajorVersion) -> Response { fn songs_to_response(files: Vec<collection::Song>, api_version: APIMajorVersion) -> Response {
match api_version { match api_version {
APIMajorVersion::V7 => Json( APIMajorVersion::V7 => Json(
files files
@ -295,7 +295,7 @@ fn songs_to_response(files: Vec<scanner::Song>, api_version: APIMajorVersion) ->
} }
fn directories_to_response( fn directories_to_response(
files: Vec<scanner::Directory>, files: Vec<collection::Directory>,
api_version: APIMajorVersion, api_version: APIMajorVersion,
) -> Response { ) -> Response {
match api_version { match api_version {
@ -319,9 +319,9 @@ fn directories_to_response(
async fn get_browse_root( async fn get_browse_root(
_auth: Auth, _auth: Auth,
api_version: APIMajorVersion, api_version: APIMajorVersion,
State(index): State<index::Index>, State(browser): State<collection::Browser>,
) -> Response { ) -> Response {
let result = match index.browse(std::path::Path::new("")).await { let result = match browser.browse(std::path::Path::new("")).await {
Ok(r) => r, Ok(r) => r,
Err(e) => return APIError::from(e).into_response(), Err(e) => return APIError::from(e).into_response(),
}; };
@ -331,11 +331,11 @@ async fn get_browse_root(
async fn get_browse( async fn get_browse(
_auth: Auth, _auth: Auth,
api_version: APIMajorVersion, api_version: APIMajorVersion,
State(index): State<index::Index>, State(browser): State<collection::Browser>,
Path(path): Path<String>, Path(path): Path<String>,
) -> Response { ) -> Response {
let path = percent_decode_str(&path).decode_utf8_lossy(); let path = percent_decode_str(&path).decode_utf8_lossy();
let result = match index.browse(std::path::Path::new(path.as_ref())).await { let result = match browser.browse(std::path::Path::new(path.as_ref())).await {
Ok(r) => r, Ok(r) => r,
Err(e) => return APIError::from(e).into_response(), Err(e) => return APIError::from(e).into_response(),
}; };
@ -345,9 +345,9 @@ async fn get_browse(
async fn get_flatten_root( async fn get_flatten_root(
_auth: Auth, _auth: Auth,
api_version: APIMajorVersion, api_version: APIMajorVersion,
State(index): State<index::Index>, State(browser): State<collection::Browser>,
) -> Response { ) -> Response {
let songs = match index.flatten(std::path::Path::new("")).await { let songs = match browser.flatten(std::path::Path::new("")).await {
Ok(s) => s, Ok(s) => s,
Err(e) => return APIError::from(e).into_response(), Err(e) => return APIError::from(e).into_response(),
}; };
@ -357,11 +357,11 @@ async fn get_flatten_root(
async fn get_flatten( async fn get_flatten(
_auth: Auth, _auth: Auth,
api_version: APIMajorVersion, api_version: APIMajorVersion,
State(index): State<index::Index>, State(browser): State<collection::Browser>,
Path(path): Path<String>, Path(path): Path<String>,
) -> Response { ) -> Response {
let path = percent_decode_str(&path).decode_utf8_lossy(); let path = percent_decode_str(&path).decode_utf8_lossy();
let songs = match index.flatten(std::path::Path::new(path.as_ref())).await { let songs = match browser.flatten(std::path::Path::new(path.as_ref())).await {
Ok(s) => s, Ok(s) => s,
Err(e) => return APIError::from(e).into_response(), Err(e) => return APIError::from(e).into_response(),
}; };
@ -371,9 +371,9 @@ async fn get_flatten(
async fn get_random( async fn get_random(
_auth: Auth, _auth: Auth,
api_version: APIMajorVersion, api_version: APIMajorVersion,
State(index): State<index::Index>, State(browser): State<collection::Browser>,
) -> Response { ) -> Response {
let directories = match index.get_random_albums(20).await { let directories = match browser.get_random_albums(20).await {
Ok(d) => d, Ok(d) => d,
Err(e) => return APIError::from(e).into_response(), Err(e) => return APIError::from(e).into_response(),
}; };
@ -383,9 +383,9 @@ async fn get_random(
async fn get_recent( async fn get_recent(
_auth: Auth, _auth: Auth,
api_version: APIMajorVersion, api_version: APIMajorVersion,
State(index): State<index::Index>, State(browser): State<collection::Browser>,
) -> Response { ) -> Response {
let directories = match index.get_recent_albums(20).await { let directories = match browser.get_recent_albums(20).await {
Ok(d) => d, Ok(d) => d,
Err(e) => return APIError::from(e).into_response(), Err(e) => return APIError::from(e).into_response(),
}; };
@ -395,9 +395,9 @@ async fn get_recent(
async fn get_search_root( async fn get_search_root(
_auth: Auth, _auth: Auth,
api_version: APIMajorVersion, api_version: APIMajorVersion,
State(index): State<index::Index>, State(browser): State<collection::Browser>,
) -> Response { ) -> Response {
let files = match index.search("").await { let files = match browser.search("").await {
Ok(f) => f, Ok(f) => f,
Err(e) => return APIError::from(e).into_response(), Err(e) => return APIError::from(e).into_response(),
}; };
@ -407,10 +407,10 @@ async fn get_search_root(
async fn get_search( async fn get_search(
_auth: Auth, _auth: Auth,
api_version: APIMajorVersion, api_version: APIMajorVersion,
State(index): State<index::Index>, State(browser): State<collection::Browser>,
Path(query): Path<String>, Path(query): Path<String>,
) -> Response { ) -> Response {
let files = match index.search(&query).await { let files = match browser.search(&query).await {
Ok(f) => f, Ok(f) => f,
Err(e) => return APIError::from(e).into_response(), Err(e) => return APIError::from(e).into_response(),
}; };

View file

@ -20,6 +20,7 @@ impl IntoResponse for APIError {
} }
APIError::Database(_) => StatusCode::INTERNAL_SERVER_ERROR, APIError::Database(_) => StatusCode::INTERNAL_SERVER_ERROR,
APIError::DeletingOwnAccount => StatusCode::CONFLICT, APIError::DeletingOwnAccount => StatusCode::CONFLICT,
APIError::DirectoryNotFound(_) => StatusCode::NOT_FOUND,
APIError::EmbeddedArtworkNotFound => StatusCode::NOT_FOUND, APIError::EmbeddedArtworkNotFound => StatusCode::NOT_FOUND,
APIError::EmptyPassword => StatusCode::BAD_REQUEST, APIError::EmptyPassword => StatusCode::BAD_REQUEST,
APIError::EmptyUsername => StatusCode::BAD_REQUEST, APIError::EmptyUsername => StatusCode::BAD_REQUEST,
@ -36,7 +37,6 @@ impl IntoResponse for APIError {
APIError::PasswordHashing => StatusCode::INTERNAL_SERVER_ERROR, APIError::PasswordHashing => StatusCode::INTERNAL_SERVER_ERROR,
APIError::PlaylistNotFound => StatusCode::NOT_FOUND, APIError::PlaylistNotFound => StatusCode::NOT_FOUND,
APIError::Settings(_) => StatusCode::INTERNAL_SERVER_ERROR, APIError::Settings(_) => StatusCode::INTERNAL_SERVER_ERROR,
APIError::SongMetadataNotFound => StatusCode::NOT_FOUND,
APIError::ThumbnailFlacDecoding(_, _) => StatusCode::INTERNAL_SERVER_ERROR, APIError::ThumbnailFlacDecoding(_, _) => StatusCode::INTERNAL_SERVER_ERROR,
APIError::ThumbnailFileIOError => StatusCode::NOT_FOUND, APIError::ThumbnailFileIOError => StatusCode::NOT_FOUND,
APIError::ThumbnailId3Decoding(_, _) => StatusCode::INTERNAL_SERVER_ERROR, APIError::ThumbnailId3Decoding(_, _) => StatusCode::INTERNAL_SERVER_ERROR,

View file

@ -1,9 +1,8 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::app::{ use crate::app::{
config, ddns, index, collection::{self, MultiString},
scanner::{self, MultiString}, config, ddns, settings, thumbnail, user, vfs,
settings, thumbnail, user, vfs,
}; };
use std::convert::From; use std::convert::From;
@ -238,11 +237,11 @@ pub enum CollectionFile {
Song(Song), Song(Song),
} }
impl From<index::CollectionFile> for CollectionFile { impl From<collection::File> for CollectionFile {
fn from(f: index::CollectionFile) -> Self { fn from(f: collection::File) -> Self {
match f { match f {
index::CollectionFile::Directory(d) => Self::Directory(d.into()), collection::File::Directory(d) => Self::Directory(d.into()),
index::CollectionFile::Song(s) => Self::Song(s.into()), collection::File::Song(s) => Self::Song(s.into()),
} }
} }
} }
@ -275,10 +274,10 @@ pub struct Song {
pub label: Option<String>, pub label: Option<String>,
} }
impl From<scanner::Song> for Song { impl From<collection::Song> for Song {
fn from(s: scanner::Song) -> Self { fn from(s: collection::Song) -> Self {
Self { Self {
path: s.path, path: s.virtual_path,
track_number: s.track_number, track_number: s.track_number,
disc_number: s.disc_number, disc_number: s.disc_number,
title: s.title, title: s.title,
@ -306,15 +305,15 @@ pub struct Directory {
pub date_added: i64, pub date_added: i64,
} }
impl From<scanner::Directory> for Directory { impl From<collection::Directory> for Directory {
fn from(d: scanner::Directory) -> Self { fn from(d: collection::Directory) -> Self {
Self { Self {
path: d.path, path: d.virtual_path,
artist: d.artists.to_v7_string(), artist: None,
year: d.year, year: None,
album: d.album, album: None,
artwork: d.artwork, artwork: None,
date_added: d.date_added, date_added: 0,
} }
} }
} }

View file

@ -1,6 +1,6 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::app::{config, ddns, index, scanner, settings, thumbnail, user, vfs}; use crate::app::{collection, config, ddns, settings, thumbnail, user, vfs};
use std::convert::From; use std::convert::From;
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize)] #[derive(PartialEq, Eq, Debug, Serialize, Deserialize)]
@ -234,11 +234,11 @@ pub enum CollectionFile {
Song(Song), Song(Song),
} }
impl From<index::CollectionFile> for CollectionFile { impl From<collection::File> for CollectionFile {
fn from(f: index::CollectionFile) -> Self { fn from(f: collection::File) -> Self {
match f { match f {
index::CollectionFile::Directory(d) => Self::Directory(d.into()), collection::File::Directory(d) => Self::Directory(d.into()),
index::CollectionFile::Song(s) => Self::Song(s.into()), collection::File::Song(s) => Self::Song(s.into()),
} }
} }
} }
@ -274,10 +274,10 @@ pub struct Song {
pub labels: Vec<String>, pub labels: Vec<String>,
} }
impl From<scanner::Song> for Song { impl From<collection::Song> for Song {
fn from(s: scanner::Song) -> Self { fn from(s: collection::Song) -> Self {
Self { Self {
path: s.path, path: s.virtual_path,
track_number: s.track_number, track_number: s.track_number,
disc_number: s.disc_number, disc_number: s.disc_number,
title: s.title, title: s.title,
@ -298,26 +298,12 @@ impl From<scanner::Song> for Song {
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Directory { pub struct Directory {
pub path: String, pub path: String,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub artists: Vec<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub year: Option<i64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub album: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub artwork: Option<String>,
pub date_added: i64,
} }
impl From<scanner::Directory> for Directory { impl From<collection::Directory> for Directory {
fn from(d: scanner::Directory) -> Self { fn from(d: collection::Directory) -> Self {
Self { Self {
path: d.path, path: d.virtual_path,
artists: d.artists.0,
year: d.year,
album: d.album,
artwork: d.artwork,
date_added: d.date_added,
} }
} }
} }

View file

@ -1,7 +1,7 @@
use std::path::PathBuf; use std::path::PathBuf;
use thiserror::Error; use thiserror::Error;
use crate::app::{config, ddns, index, lastfm, playlist, settings, thumbnail, user, vfs}; use crate::app::{collection, config, ddns, lastfm, playlist, settings, thumbnail, user, vfs};
use crate::db; use crate::db;
#[derive(Error, Debug)] #[derive(Error, Debug)]
@ -24,6 +24,8 @@ pub enum APIError {
BrancaTokenEncoding, BrancaTokenEncoding,
#[error("Database error:\n\n{0}")] #[error("Database error:\n\n{0}")]
Database(sqlx::Error), Database(sqlx::Error),
#[error("Directory not found: {0}")]
DirectoryNotFound(PathBuf),
#[error("DDNS update query failed with HTTP status {0}")] #[error("DDNS update query failed with HTTP status {0}")]
DdnsUpdateQueryFailed(u16), DdnsUpdateQueryFailed(u16),
#[error("Cannot delete your own account")] #[error("Cannot delete your own account")]
@ -60,8 +62,6 @@ pub enum APIError {
PlaylistNotFound, PlaylistNotFound,
#[error("Settings error:\n\n{0}")] #[error("Settings error:\n\n{0}")]
Settings(settings::Error), Settings(settings::Error),
#[error("Song not found")]
SongMetadataNotFound,
#[error("Could not decode thumbnail from flac file `{0}`:\n\n{1}")] #[error("Could not decode thumbnail from flac file `{0}`:\n\n{1}")]
ThumbnailFlacDecoding(PathBuf, metaflac::Error), ThumbnailFlacDecoding(PathBuf, metaflac::Error),
#[error("Thumbnail file could not be opened")] #[error("Thumbnail file could not be opened")]
@ -82,6 +82,19 @@ pub enum APIError {
VFSPathNotFound, VFSPathNotFound,
} }
impl From<collection::Error> for APIError {
fn from(error: collection::Error) -> APIError {
match error {
collection::Error::DirectoryNotFound(d) => APIError::DirectoryNotFound(d),
collection::Error::Database(e) => APIError::Database(e),
collection::Error::DatabaseConnection(e) => e.into(),
collection::Error::Vfs(e) => e.into(),
collection::Error::ThreadPoolBuilder(_) => APIError::Internal,
collection::Error::ThreadJoining(_) => APIError::Internal,
}
}
}
impl From<config::Error> for APIError { impl From<config::Error> for APIError {
fn from(error: config::Error) -> APIError { fn from(error: config::Error) -> APIError {
match error { match error {
@ -107,17 +120,6 @@ impl From<playlist::Error> for APIError {
} }
} }
impl From<index::Error> for APIError {
fn from(error: index::Error) -> APIError {
match error {
index::Error::Database(e) => APIError::Database(e),
index::Error::DatabaseConnection(e) => e.into(),
index::Error::SongNotFound(_) => APIError::SongMetadataNotFound,
index::Error::Vfs(e) => e.into(),
}
}
}
impl From<settings::Error> for APIError { impl From<settings::Error> for APIError {
fn from(error: settings::Error) -> APIError { fn from(error: settings::Error) -> APIError {
match error { match error {
@ -153,7 +155,6 @@ impl From<user::Error> for APIError {
impl From<vfs::Error> for APIError { impl From<vfs::Error> for APIError {
fn from(error: vfs::Error) -> APIError { fn from(error: vfs::Error) -> APIError {
match error { match error {
vfs::Error::CouldNotMapToVirtualPath(_) => APIError::VFSPathNotFound,
vfs::Error::CouldNotMapToRealPath(_) => APIError::VFSPathNotFound, vfs::Error::CouldNotMapToRealPath(_) => APIError::VFSPathNotFound,
vfs::Error::Database(e) => APIError::Database(e), vfs::Error::Database(e) => APIError::Database(e),
vfs::Error::DatabaseConnection(e) => e.into(), vfs::Error::DatabaseConnection(e) => e.into(),

View file

@ -27,8 +27,8 @@ pub enum AudioFormat {
M4B, M4B,
} }
pub fn get_audio_format(path: &Path) -> Option<AudioFormat> { pub fn get_audio_format<P: AsRef<Path>>(path: P) -> Option<AudioFormat> {
let extension = match path.extension() { let extension = match path.as_ref().extension() {
Some(e) => e, Some(e) => e,
_ => return None, _ => return None,
}; };