Split index into scanner (populates DB) and index (reads from DB)
This commit is contained in:
parent
9d8d543494
commit
3362a828cd
22 changed files with 489 additions and 374 deletions
|
@ -9,6 +9,7 @@ pub mod ddns;
|
||||||
pub mod index;
|
pub mod index;
|
||||||
pub mod lastfm;
|
pub mod lastfm;
|
||||||
pub mod playlist;
|
pub mod playlist;
|
||||||
|
pub mod scanner;
|
||||||
pub mod settings;
|
pub mod settings;
|
||||||
pub mod thumbnail;
|
pub mod thumbnail;
|
||||||
pub mod user;
|
pub mod user;
|
||||||
|
@ -34,6 +35,7 @@ pub struct App {
|
||||||
pub port: u16,
|
pub port: u16,
|
||||||
pub web_dir_path: PathBuf,
|
pub web_dir_path: PathBuf,
|
||||||
pub swagger_dir_path: PathBuf,
|
pub swagger_dir_path: PathBuf,
|
||||||
|
pub scanner: scanner::Scanner,
|
||||||
pub index: index::Index,
|
pub index: index::Index,
|
||||||
pub config_manager: config::Manager,
|
pub config_manager: config::Manager,
|
||||||
pub ddns_manager: ddns::Manager,
|
pub ddns_manager: ddns::Manager,
|
||||||
|
@ -62,7 +64,9 @@ impl App {
|
||||||
let auth_secret = settings_manager.get_auth_secret().await?;
|
let auth_secret = settings_manager.get_auth_secret().await?;
|
||||||
let ddns_manager = ddns::Manager::new(db.clone());
|
let ddns_manager = ddns::Manager::new(db.clone());
|
||||||
let user_manager = user::Manager::new(db.clone(), auth_secret);
|
let user_manager = user::Manager::new(db.clone(), auth_secret);
|
||||||
let index = index::Index::new(db.clone(), vfs_manager.clone(), settings_manager.clone());
|
let scanner =
|
||||||
|
scanner::Scanner::new(db.clone(), vfs_manager.clone(), settings_manager.clone());
|
||||||
|
let index = index::Index::new(db.clone(), vfs_manager.clone());
|
||||||
let config_manager = config::Manager::new(
|
let config_manager = config::Manager::new(
|
||||||
settings_manager.clone(),
|
settings_manager.clone(),
|
||||||
user_manager.clone(),
|
user_manager.clone(),
|
||||||
|
@ -82,6 +86,7 @@ impl App {
|
||||||
port,
|
port,
|
||||||
web_dir_path: paths.web_dir_path,
|
web_dir_path: paths.web_dir_path,
|
||||||
swagger_dir_path: paths.swagger_dir_path,
|
swagger_dir_path: paths.swagger_dir_path,
|
||||||
|
scanner,
|
||||||
index,
|
index,
|
||||||
config_manager,
|
config_manager,
|
||||||
ddns_manager,
|
ddns_manager,
|
||||||
|
|
|
@ -1,74 +1,21 @@
|
||||||
use log::error;
|
use crate::app::vfs;
|
||||||
use std::sync::Arc;
|
|
||||||
use std::time::Duration;
|
|
||||||
use tokio::sync::Notify;
|
|
||||||
|
|
||||||
use crate::app::{settings, vfs};
|
|
||||||
use crate::db::DB;
|
use crate::db::DB;
|
||||||
|
|
||||||
mod metadata;
|
|
||||||
mod query;
|
mod query;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test;
|
mod test;
|
||||||
mod types;
|
mod types;
|
||||||
mod update;
|
|
||||||
|
|
||||||
pub use self::query::*;
|
|
||||||
pub use self::types::*;
|
pub use self::types::*;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Index {
|
pub struct Index {
|
||||||
db: DB,
|
db: DB,
|
||||||
vfs_manager: vfs::Manager,
|
vfs_manager: vfs::Manager,
|
||||||
settings_manager: settings::Manager,
|
|
||||||
pending_reindex: Arc<Notify>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Index {
|
impl Index {
|
||||||
pub fn new(db: DB, vfs_manager: vfs::Manager, settings_manager: settings::Manager) -> Self {
|
pub fn new(db: DB, vfs_manager: vfs::Manager) -> Self {
|
||||||
let index = Self {
|
Self { db, vfs_manager }
|
||||||
db,
|
|
||||||
vfs_manager,
|
|
||||||
settings_manager,
|
|
||||||
pending_reindex: Arc::new(Notify::new()),
|
|
||||||
};
|
|
||||||
|
|
||||||
tokio::spawn({
|
|
||||||
let index = index.clone();
|
|
||||||
async move {
|
|
||||||
loop {
|
|
||||||
index.pending_reindex.notified().await;
|
|
||||||
if let Err(e) = index.update().await {
|
|
||||||
error!("Error while updating index: {}", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
index
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn trigger_reindex(&self) {
|
|
||||||
self.pending_reindex.notify_one();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn begin_periodic_updates(&self) {
|
|
||||||
tokio::spawn({
|
|
||||||
let index = self.clone();
|
|
||||||
async move {
|
|
||||||
loop {
|
|
||||||
index.trigger_reindex();
|
|
||||||
let sleep_duration = index
|
|
||||||
.settings_manager
|
|
||||||
.get_index_sleep_duration()
|
|
||||||
.await
|
|
||||||
.unwrap_or_else(|e| {
|
|
||||||
error!("Could not retrieve index sleep duration: {}", e);
|
|
||||||
Duration::from_secs(1800)
|
|
||||||
});
|
|
||||||
tokio::time::sleep(sleep_duration).await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,22 +1,10 @@
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::Path;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::db;
|
use crate::app::scanner;
|
||||||
|
|
||||||
#[derive(thiserror::Error, Debug)]
|
|
||||||
pub enum QueryError {
|
|
||||||
#[error(transparent)]
|
|
||||||
Database(#[from] sqlx::Error),
|
|
||||||
#[error(transparent)]
|
|
||||||
DatabaseConnection(#[from] db::Error),
|
|
||||||
#[error("Song was not found: `{0}`")]
|
|
||||||
SongNotFound(PathBuf),
|
|
||||||
#[error(transparent)]
|
|
||||||
Vfs(#[from] vfs::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Index {
|
impl Index {
|
||||||
pub async fn browse<P>(&self, virtual_path: P) -> Result<Vec<CollectionFile>, QueryError>
|
pub async fn browse<P>(&self, virtual_path: P) -> Result<Vec<CollectionFile>, Error>
|
||||||
where
|
where
|
||||||
P: AsRef<Path>,
|
P: AsRef<Path>,
|
||||||
{
|
{
|
||||||
|
@ -26,10 +14,12 @@ impl Index {
|
||||||
|
|
||||||
if virtual_path.as_ref().components().count() == 0 {
|
if virtual_path.as_ref().components().count() == 0 {
|
||||||
// Browse top-level
|
// Browse top-level
|
||||||
let real_directories =
|
let real_directories = sqlx::query_as!(
|
||||||
sqlx::query_as!(Directory, "SELECT * FROM directories WHERE parent IS NULL")
|
scanner::Directory,
|
||||||
.fetch_all(connection.as_mut())
|
"SELECT * FROM directories WHERE parent IS NULL"
|
||||||
.await?;
|
)
|
||||||
|
.fetch_all(connection.as_mut())
|
||||||
|
.await?;
|
||||||
let virtual_directories = real_directories
|
let virtual_directories = real_directories
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|d| d.virtualize(&vfs));
|
.filter_map(|d| d.virtualize(&vfs));
|
||||||
|
@ -40,7 +30,7 @@ impl Index {
|
||||||
let real_path_string = real_path.as_path().to_string_lossy().into_owned();
|
let real_path_string = real_path.as_path().to_string_lossy().into_owned();
|
||||||
|
|
||||||
let real_directories = sqlx::query_as!(
|
let real_directories = sqlx::query_as!(
|
||||||
Directory,
|
scanner::Directory,
|
||||||
"SELECT * FROM directories WHERE parent = $1 ORDER BY path COLLATE NOCASE ASC",
|
"SELECT * FROM directories WHERE parent = $1 ORDER BY path COLLATE NOCASE ASC",
|
||||||
real_path_string
|
real_path_string
|
||||||
)
|
)
|
||||||
|
@ -54,7 +44,7 @@ impl Index {
|
||||||
output.extend(virtual_directories.map(CollectionFile::Directory));
|
output.extend(virtual_directories.map(CollectionFile::Directory));
|
||||||
|
|
||||||
let real_songs = sqlx::query_as!(
|
let real_songs = sqlx::query_as!(
|
||||||
Song,
|
scanner::Song,
|
||||||
"SELECT * FROM songs WHERE parent = $1 ORDER BY path COLLATE NOCASE ASC",
|
"SELECT * FROM songs WHERE parent = $1 ORDER BY path COLLATE NOCASE ASC",
|
||||||
real_path_string
|
real_path_string
|
||||||
)
|
)
|
||||||
|
@ -68,7 +58,7 @@ impl Index {
|
||||||
Ok(output)
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn flatten<P>(&self, virtual_path: P) -> Result<Vec<Song>, QueryError>
|
pub async fn flatten<P>(&self, virtual_path: P) -> Result<Vec<scanner::Song>, Error>
|
||||||
where
|
where
|
||||||
P: AsRef<Path>,
|
P: AsRef<Path>,
|
||||||
{
|
{
|
||||||
|
@ -83,28 +73,31 @@ impl Index {
|
||||||
path_buf.as_path().to_string_lossy().into_owned()
|
path_buf.as_path().to_string_lossy().into_owned()
|
||||||
};
|
};
|
||||||
sqlx::query_as!(
|
sqlx::query_as!(
|
||||||
Song,
|
scanner::Song,
|
||||||
"SELECT * FROM songs WHERE path LIKE $1 ORDER BY path COLLATE NOCASE ASC",
|
"SELECT * FROM songs WHERE path LIKE $1 ORDER BY path COLLATE NOCASE ASC",
|
||||||
song_path_filter
|
song_path_filter
|
||||||
)
|
)
|
||||||
.fetch_all(connection.as_mut())
|
.fetch_all(connection.as_mut())
|
||||||
.await?
|
.await?
|
||||||
} else {
|
} else {
|
||||||
sqlx::query_as!(Song, "SELECT * FROM songs ORDER BY path COLLATE NOCASE ASC")
|
sqlx::query_as!(
|
||||||
.fetch_all(connection.as_mut())
|
scanner::Song,
|
||||||
.await?
|
"SELECT * FROM songs ORDER BY path COLLATE NOCASE ASC"
|
||||||
|
)
|
||||||
|
.fetch_all(connection.as_mut())
|
||||||
|
.await?
|
||||||
};
|
};
|
||||||
|
|
||||||
let virtual_songs = real_songs.into_iter().filter_map(|s| s.virtualize(&vfs));
|
let virtual_songs = real_songs.into_iter().filter_map(|s| s.virtualize(&vfs));
|
||||||
Ok(virtual_songs.collect::<Vec<_>>())
|
Ok(virtual_songs.collect::<Vec<_>>())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_random_albums(&self, count: i64) -> Result<Vec<Directory>, QueryError> {
|
pub async fn get_random_albums(&self, count: i64) -> Result<Vec<scanner::Directory>, Error> {
|
||||||
let vfs = self.vfs_manager.get_vfs().await?;
|
let vfs = self.vfs_manager.get_vfs().await?;
|
||||||
let mut connection = self.db.connect().await?;
|
let mut connection = self.db.connect().await?;
|
||||||
|
|
||||||
let real_directories = sqlx::query_as!(
|
let real_directories = sqlx::query_as!(
|
||||||
Directory,
|
scanner::Directory,
|
||||||
"SELECT * FROM directories WHERE album IS NOT NULL ORDER BY RANDOM() DESC LIMIT $1",
|
"SELECT * FROM directories WHERE album IS NOT NULL ORDER BY RANDOM() DESC LIMIT $1",
|
||||||
count
|
count
|
||||||
)
|
)
|
||||||
|
@ -117,12 +110,12 @@ impl Index {
|
||||||
Ok(virtual_directories.collect::<Vec<_>>())
|
Ok(virtual_directories.collect::<Vec<_>>())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_recent_albums(&self, count: i64) -> Result<Vec<Directory>, QueryError> {
|
pub async fn get_recent_albums(&self, count: i64) -> Result<Vec<scanner::Directory>, Error> {
|
||||||
let vfs = self.vfs_manager.get_vfs().await?;
|
let vfs = self.vfs_manager.get_vfs().await?;
|
||||||
let mut connection = self.db.connect().await?;
|
let mut connection = self.db.connect().await?;
|
||||||
|
|
||||||
let real_directories = sqlx::query_as!(
|
let real_directories = sqlx::query_as!(
|
||||||
Directory,
|
scanner::Directory,
|
||||||
"SELECT * FROM directories WHERE album IS NOT NULL ORDER BY date_added DESC LIMIT $1",
|
"SELECT * FROM directories WHERE album IS NOT NULL ORDER BY date_added DESC LIMIT $1",
|
||||||
count
|
count
|
||||||
)
|
)
|
||||||
|
@ -135,7 +128,7 @@ impl Index {
|
||||||
Ok(virtual_directories.collect::<Vec<_>>())
|
Ok(virtual_directories.collect::<Vec<_>>())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn search(&self, query: &str) -> Result<Vec<CollectionFile>, QueryError> {
|
pub async fn search(&self, query: &str) -> Result<Vec<CollectionFile>, Error> {
|
||||||
let vfs = self.vfs_manager.get_vfs().await?;
|
let vfs = self.vfs_manager.get_vfs().await?;
|
||||||
let mut connection = self.db.connect().await?;
|
let mut connection = self.db.connect().await?;
|
||||||
let like_test = format!("%{}%", query);
|
let like_test = format!("%{}%", query);
|
||||||
|
@ -144,7 +137,7 @@ impl Index {
|
||||||
// Find dirs with matching path and parent not matching
|
// Find dirs with matching path and parent not matching
|
||||||
{
|
{
|
||||||
let real_directories = sqlx::query_as!(
|
let real_directories = sqlx::query_as!(
|
||||||
Directory,
|
scanner::Directory,
|
||||||
"SELECT * FROM directories WHERE path LIKE $1 AND parent NOT LIKE $1",
|
"SELECT * FROM directories WHERE path LIKE $1 AND parent NOT LIKE $1",
|
||||||
like_test
|
like_test
|
||||||
)
|
)
|
||||||
|
@ -161,7 +154,7 @@ impl Index {
|
||||||
// Find songs with matching title/album/artist and non-matching parent
|
// Find songs with matching title/album/artist and non-matching parent
|
||||||
{
|
{
|
||||||
let real_songs = sqlx::query_as!(
|
let real_songs = sqlx::query_as!(
|
||||||
Song,
|
scanner::Song,
|
||||||
r#"
|
r#"
|
||||||
SELECT * FROM songs
|
SELECT * FROM songs
|
||||||
WHERE ( path LIKE $1
|
WHERE ( path LIKE $1
|
||||||
|
@ -185,7 +178,7 @@ impl Index {
|
||||||
Ok(output)
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_song(&self, virtual_path: &Path) -> Result<Song, QueryError> {
|
pub async fn get_song(&self, virtual_path: &Path) -> Result<scanner::Song, Error> {
|
||||||
let vfs = self.vfs_manager.get_vfs().await?;
|
let vfs = self.vfs_manager.get_vfs().await?;
|
||||||
let mut connection = self.db.connect().await?;
|
let mut connection = self.db.connect().await?;
|
||||||
|
|
||||||
|
@ -193,7 +186,7 @@ impl Index {
|
||||||
let real_path_string = real_path.as_path().to_string_lossy();
|
let real_path_string = real_path.as_path().to_string_lossy();
|
||||||
|
|
||||||
let real_song = sqlx::query_as!(
|
let real_song = sqlx::query_as!(
|
||||||
Song,
|
scanner::Song,
|
||||||
"SELECT * FROM songs WHERE path = $1",
|
"SELECT * FROM songs WHERE path = $1",
|
||||||
real_path_string
|
real_path_string
|
||||||
)
|
)
|
||||||
|
@ -202,7 +195,7 @@ impl Index {
|
||||||
|
|
||||||
match real_song.virtualize(&vfs) {
|
match real_song.virtualize(&vfs) {
|
||||||
Some(s) => Ok(s),
|
Some(s) => Ok(s),
|
||||||
None => Err(QueryError::SongNotFound(real_path)),
|
None => Err(Error::SongNotFound(real_path)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,96 +1,18 @@
|
||||||
use std::default::Default;
|
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::app::test;
|
use crate::app::{scanner, test};
|
||||||
use crate::test_name;
|
use crate::test_name;
|
||||||
|
|
||||||
const TEST_MOUNT_NAME: &str = "root";
|
const TEST_MOUNT_NAME: &str = "root";
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn update_adds_new_content() {
|
|
||||||
let ctx = test::ContextBuilder::new(test_name!())
|
|
||||||
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
|
||||||
.build()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
ctx.index.update().await.unwrap();
|
|
||||||
ctx.index.update().await.unwrap(); // Validates that subsequent updates don't run into conflicts
|
|
||||||
|
|
||||||
let mut connection = ctx.db.connect().await.unwrap();
|
|
||||||
let all_directories = sqlx::query_as!(Directory, "SELECT * FROM directories")
|
|
||||||
.fetch_all(connection.as_mut())
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
let all_songs = sqlx::query_as!(Song, "SELECT * FROM songs")
|
|
||||||
.fetch_all(connection.as_mut())
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(all_directories.len(), 6);
|
|
||||||
assert_eq!(all_songs.len(), 13);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn update_removes_missing_content() {
|
|
||||||
let builder = test::ContextBuilder::new(test_name!());
|
|
||||||
|
|
||||||
let original_collection_dir: PathBuf = ["test-data", "small-collection"].iter().collect();
|
|
||||||
let test_collection_dir: PathBuf = builder.test_directory.join("small-collection");
|
|
||||||
|
|
||||||
let copy_options = fs_extra::dir::CopyOptions::new();
|
|
||||||
fs_extra::dir::copy(
|
|
||||||
original_collection_dir,
|
|
||||||
&builder.test_directory,
|
|
||||||
©_options,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let ctx = builder
|
|
||||||
.mount(TEST_MOUNT_NAME, test_collection_dir.to_str().unwrap())
|
|
||||||
.build()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
ctx.index.update().await.unwrap();
|
|
||||||
|
|
||||||
{
|
|
||||||
let mut connection = ctx.db.connect().await.unwrap();
|
|
||||||
let all_directories = sqlx::query_as!(Directory, "SELECT * FROM directories")
|
|
||||||
.fetch_all(connection.as_mut())
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
let all_songs = sqlx::query_as!(Song, "SELECT * FROM songs")
|
|
||||||
.fetch_all(connection.as_mut())
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(all_directories.len(), 6);
|
|
||||||
assert_eq!(all_songs.len(), 13);
|
|
||||||
}
|
|
||||||
|
|
||||||
let khemmis_directory = test_collection_dir.join("Khemmis");
|
|
||||||
std::fs::remove_dir_all(khemmis_directory).unwrap();
|
|
||||||
ctx.index.update().await.unwrap();
|
|
||||||
{
|
|
||||||
let mut connection = ctx.db.connect().await.unwrap();
|
|
||||||
let all_directories = sqlx::query_as!(Directory, "SELECT * FROM directories")
|
|
||||||
.fetch_all(connection.as_mut())
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
let all_songs = sqlx::query_as!(Song, "SELECT * FROM songs")
|
|
||||||
.fetch_all(connection.as_mut())
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
assert_eq!(all_directories.len(), 4);
|
|
||||||
assert_eq!(all_songs.len(), 8);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn can_browse_top_level() {
|
async fn can_browse_top_level() {
|
||||||
let ctx = test::ContextBuilder::new(test_name!())
|
let ctx = test::ContextBuilder::new(test_name!())
|
||||||
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
||||||
.build()
|
.build()
|
||||||
.await;
|
.await;
|
||||||
ctx.index.update().await.unwrap();
|
ctx.scanner.scan().await.unwrap();
|
||||||
|
|
||||||
let root_path = Path::new(TEST_MOUNT_NAME);
|
let root_path = Path::new(TEST_MOUNT_NAME);
|
||||||
let files = ctx.index.browse(Path::new("")).await.unwrap();
|
let files = ctx.index.browse(Path::new("")).await.unwrap();
|
||||||
|
@ -110,7 +32,7 @@ async fn can_browse_directory() {
|
||||||
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
||||||
.build()
|
.build()
|
||||||
.await;
|
.await;
|
||||||
ctx.index.update().await.unwrap();
|
ctx.scanner.scan().await.unwrap();
|
||||||
|
|
||||||
let files = ctx.index.browse(Path::new(TEST_MOUNT_NAME)).await.unwrap();
|
let files = ctx.index.browse(Path::new(TEST_MOUNT_NAME)).await.unwrap();
|
||||||
|
|
||||||
|
@ -132,7 +54,7 @@ async fn can_flatten_root() {
|
||||||
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
||||||
.build()
|
.build()
|
||||||
.await;
|
.await;
|
||||||
ctx.index.update().await.unwrap();
|
ctx.scanner.scan().await.unwrap();
|
||||||
let songs = ctx.index.flatten(Path::new(TEST_MOUNT_NAME)).await.unwrap();
|
let songs = ctx.index.flatten(Path::new(TEST_MOUNT_NAME)).await.unwrap();
|
||||||
assert_eq!(songs.len(), 13);
|
assert_eq!(songs.len(), 13);
|
||||||
assert_eq!(songs[0].title, Some("Above The Water".to_owned()));
|
assert_eq!(songs[0].title, Some("Above The Water".to_owned()));
|
||||||
|
@ -144,7 +66,7 @@ async fn can_flatten_directory() {
|
||||||
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
||||||
.build()
|
.build()
|
||||||
.await;
|
.await;
|
||||||
ctx.index.update().await.unwrap();
|
ctx.scanner.scan().await.unwrap();
|
||||||
let path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao"].iter().collect();
|
let path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao"].iter().collect();
|
||||||
let songs = ctx.index.flatten(path).await.unwrap();
|
let songs = ctx.index.flatten(path).await.unwrap();
|
||||||
assert_eq!(songs.len(), 8);
|
assert_eq!(songs.len(), 8);
|
||||||
|
@ -156,7 +78,7 @@ async fn can_flatten_directory_with_shared_prefix() {
|
||||||
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
||||||
.build()
|
.build()
|
||||||
.await;
|
.await;
|
||||||
ctx.index.update().await.unwrap();
|
ctx.scanner.scan().await.unwrap();
|
||||||
let path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect(); // Prefix of '(Picnic Remixes)'
|
let path: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect(); // Prefix of '(Picnic Remixes)'
|
||||||
let songs = ctx.index.flatten(path).await.unwrap();
|
let songs = ctx.index.flatten(path).await.unwrap();
|
||||||
assert_eq!(songs.len(), 7);
|
assert_eq!(songs.len(), 7);
|
||||||
|
@ -168,7 +90,7 @@ async fn can_get_random_albums() {
|
||||||
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
||||||
.build()
|
.build()
|
||||||
.await;
|
.await;
|
||||||
ctx.index.update().await.unwrap();
|
ctx.scanner.scan().await.unwrap();
|
||||||
let albums = ctx.index.get_random_albums(1).await.unwrap();
|
let albums = ctx.index.get_random_albums(1).await.unwrap();
|
||||||
assert_eq!(albums.len(), 1);
|
assert_eq!(albums.len(), 1);
|
||||||
}
|
}
|
||||||
|
@ -179,7 +101,7 @@ async fn can_get_recent_albums() {
|
||||||
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
||||||
.build()
|
.build()
|
||||||
.await;
|
.await;
|
||||||
ctx.index.update().await.unwrap();
|
ctx.scanner.scan().await.unwrap();
|
||||||
let albums = ctx.index.get_recent_albums(2).await.unwrap();
|
let albums = ctx.index.get_recent_albums(2).await.unwrap();
|
||||||
assert_eq!(albums.len(), 2);
|
assert_eq!(albums.len(), 2);
|
||||||
assert!(albums[0].date_added >= albums[1].date_added);
|
assert!(albums[0].date_added >= albums[1].date_added);
|
||||||
|
@ -192,7 +114,7 @@ async fn can_get_a_song() {
|
||||||
.build()
|
.build()
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
ctx.index.update().await.unwrap();
|
ctx.scanner.scan().await.unwrap();
|
||||||
|
|
||||||
let picnic_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect();
|
let picnic_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect();
|
||||||
let song_virtual_path = picnic_virtual_dir.join("05 - シャーベット (Sherbet).mp3");
|
let song_virtual_path = picnic_virtual_dir.join("05 - シャーベット (Sherbet).mp3");
|
||||||
|
@ -203,8 +125,11 @@ async fn can_get_a_song() {
|
||||||
assert_eq!(song.track_number, Some(5));
|
assert_eq!(song.track_number, Some(5));
|
||||||
assert_eq!(song.disc_number, None);
|
assert_eq!(song.disc_number, None);
|
||||||
assert_eq!(song.title, Some("シャーベット (Sherbet)".to_owned()));
|
assert_eq!(song.title, Some("シャーベット (Sherbet)".to_owned()));
|
||||||
assert_eq!(song.artists, MultiString(vec!["Tobokegao".to_owned()]));
|
assert_eq!(
|
||||||
assert_eq!(song.album_artists, MultiString(vec![]));
|
song.artists,
|
||||||
|
scanner::MultiString(vec!["Tobokegao".to_owned()])
|
||||||
|
);
|
||||||
|
assert_eq!(song.album_artists, scanner::MultiString(vec![]));
|
||||||
assert_eq!(song.album, Some("Picnic".to_owned()));
|
assert_eq!(song.album, Some("Picnic".to_owned()));
|
||||||
assert_eq!(song.year, Some(2016));
|
assert_eq!(song.year, Some(2016));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -212,51 +137,3 @@ async fn can_get_a_song() {
|
||||||
Some(artwork_virtual_path.to_string_lossy().into_owned())
|
Some(artwork_virtual_path.to_string_lossy().into_owned())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn indexes_embedded_artwork() {
|
|
||||||
let ctx = test::ContextBuilder::new(test_name!())
|
|
||||||
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
|
||||||
.build()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
ctx.index.update().await.unwrap();
|
|
||||||
|
|
||||||
let picnic_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect();
|
|
||||||
let song_virtual_path = picnic_virtual_dir.join("07 - なぜ (Why).mp3");
|
|
||||||
|
|
||||||
let song = ctx.index.get_song(&song_virtual_path).await.unwrap();
|
|
||||||
assert_eq!(
|
|
||||||
song.artwork,
|
|
||||||
Some(song_virtual_path.to_string_lossy().into_owned())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn album_art_pattern_is_case_insensitive() {
|
|
||||||
let ctx = test::ContextBuilder::new(test_name!())
|
|
||||||
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
|
||||||
.build()
|
|
||||||
.await;
|
|
||||||
|
|
||||||
let patterns = vec!["folder", "FOLDER"];
|
|
||||||
|
|
||||||
for pattern in patterns.into_iter() {
|
|
||||||
ctx.settings_manager
|
|
||||||
.amend(&settings::NewSettings {
|
|
||||||
album_art_pattern: Some(pattern.to_owned()),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
ctx.index.update().await.unwrap();
|
|
||||||
|
|
||||||
let hunted_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Khemmis", "Hunted"].iter().collect();
|
|
||||||
let artwork_virtual_path = hunted_virtual_dir.join("Folder.jpg");
|
|
||||||
let song = &ctx.index.flatten(&hunted_virtual_dir).await.unwrap()[0];
|
|
||||||
assert_eq!(
|
|
||||||
song.artwork,
|
|
||||||
Some(artwork_virtual_path.to_string_lossy().into_owned())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,76 +1,24 @@
|
||||||
use std::path::Path;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::app::vfs::VFS;
|
use crate::{
|
||||||
|
app::{scanner, vfs},
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
db,
|
||||||
pub struct MultiString(pub Vec<String>);
|
};
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
pub enum CollectionFile {
|
pub enum CollectionFile {
|
||||||
Directory(Directory),
|
Directory(scanner::Directory),
|
||||||
Song(Song),
|
Song(scanner::Song),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(thiserror::Error, Debug)]
|
||||||
pub struct Song {
|
pub enum Error {
|
||||||
pub id: i64,
|
#[error(transparent)]
|
||||||
pub path: String,
|
Database(#[from] sqlx::Error),
|
||||||
pub parent: String,
|
#[error(transparent)]
|
||||||
pub track_number: Option<i64>,
|
DatabaseConnection(#[from] db::Error),
|
||||||
pub disc_number: Option<i64>,
|
#[error("Song was not found: `{0}`")]
|
||||||
pub title: Option<String>,
|
SongNotFound(PathBuf),
|
||||||
pub artists: MultiString,
|
#[error(transparent)]
|
||||||
pub album_artists: MultiString,
|
Vfs(#[from] vfs::Error),
|
||||||
pub year: Option<i64>,
|
|
||||||
pub album: Option<String>,
|
|
||||||
pub artwork: Option<String>,
|
|
||||||
pub duration: Option<i64>,
|
|
||||||
pub lyricists: MultiString,
|
|
||||||
pub composers: MultiString,
|
|
||||||
pub genres: MultiString,
|
|
||||||
pub labels: MultiString,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Song {
|
|
||||||
pub fn virtualize(mut self, vfs: &VFS) -> Option<Song> {
|
|
||||||
self.path = match vfs.real_to_virtual(Path::new(&self.path)) {
|
|
||||||
Ok(p) => p.to_string_lossy().into_owned(),
|
|
||||||
_ => return None,
|
|
||||||
};
|
|
||||||
if let Some(artwork_path) = self.artwork {
|
|
||||||
self.artwork = match vfs.real_to_virtual(Path::new(&artwork_path)) {
|
|
||||||
Ok(p) => Some(p.to_string_lossy().into_owned()),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
Some(self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
|
||||||
pub struct Directory {
|
|
||||||
pub id: i64,
|
|
||||||
pub path: String,
|
|
||||||
pub parent: Option<String>,
|
|
||||||
pub artists: MultiString,
|
|
||||||
pub year: Option<i64>,
|
|
||||||
pub album: Option<String>,
|
|
||||||
pub artwork: Option<String>,
|
|
||||||
pub date_added: i64,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Directory {
|
|
||||||
pub fn virtualize(mut self, vfs: &VFS) -> Option<Directory> {
|
|
||||||
self.path = match vfs.real_to_virtual(Path::new(&self.path)) {
|
|
||||||
Ok(p) => p.to_string_lossy().into_owned(),
|
|
||||||
_ => return None,
|
|
||||||
};
|
|
||||||
if let Some(artwork_path) = self.artwork {
|
|
||||||
self.artwork = match vfs.real_to_virtual(Path::new(&artwork_path)) {
|
|
||||||
Ok(p) => Some(p.to_string_lossy().into_owned()),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
Some(self)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::path::Path;
|
||||||
use user::AuthToken;
|
use user::AuthToken;
|
||||||
|
|
||||||
use crate::app::{
|
use crate::app::{
|
||||||
index::{Index, QueryError},
|
index::{self, Index},
|
||||||
user,
|
user,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ pub enum Error {
|
||||||
#[error("Failed to emit last.fm now playing update")]
|
#[error("Failed to emit last.fm now playing update")]
|
||||||
NowPlaying(rustfm_scrobble::ScrobblerError),
|
NowPlaying(rustfm_scrobble::ScrobblerError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Query(#[from] QueryError),
|
Query(#[from] index::Error),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
User(#[from] user::Error),
|
User(#[from] user::Error),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use core::clone::Clone;
|
use core::clone::Clone;
|
||||||
use sqlx::{Acquire, QueryBuilder, Sqlite};
|
use sqlx::{Acquire, QueryBuilder, Sqlite};
|
||||||
|
|
||||||
use crate::app::index::Song;
|
use crate::app::scanner::Song;
|
||||||
use crate::app::vfs;
|
use crate::app::vfs;
|
||||||
use crate::db::{self, DB};
|
use crate::db::{self, DB};
|
||||||
|
|
||||||
|
@ -237,7 +237,7 @@ mod test {
|
||||||
.build()
|
.build()
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
ctx.index.update().await.unwrap();
|
ctx.scanner.scan().await.unwrap();
|
||||||
|
|
||||||
let playlist_content: Vec<String> = ctx
|
let playlist_content: Vec<String> = ctx
|
||||||
.index
|
.index
|
||||||
|
@ -302,7 +302,7 @@ mod test {
|
||||||
.build()
|
.build()
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
ctx.index.update().await.unwrap();
|
ctx.scanner.scan().await.unwrap();
|
||||||
|
|
||||||
let playlist_content: Vec<String> = ctx
|
let playlist_content: Vec<String> = ctx
|
||||||
.index
|
.index
|
||||||
|
|
123
src/app/scanner.rs
Normal file
123
src/app/scanner.rs
Normal file
|
@ -0,0 +1,123 @@
|
||||||
|
use log::{error, info};
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::time::{Duration, Instant};
|
||||||
|
use tokio::sync::Notify;
|
||||||
|
|
||||||
|
use crate::app::{settings, vfs};
|
||||||
|
use crate::db::DB;
|
||||||
|
|
||||||
|
mod cleaner;
|
||||||
|
mod collector;
|
||||||
|
mod inserter;
|
||||||
|
mod metadata;
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test;
|
||||||
|
mod traverser;
|
||||||
|
mod types;
|
||||||
|
|
||||||
|
pub use self::types::*;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Scanner {
|
||||||
|
db: DB,
|
||||||
|
vfs_manager: vfs::Manager,
|
||||||
|
settings_manager: settings::Manager,
|
||||||
|
pending_scan: Arc<Notify>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Scanner {
|
||||||
|
pub fn new(db: DB, vfs_manager: vfs::Manager, settings_manager: settings::Manager) -> Self {
|
||||||
|
let scanner = Self {
|
||||||
|
db,
|
||||||
|
vfs_manager,
|
||||||
|
settings_manager,
|
||||||
|
pending_scan: Arc::new(Notify::new()),
|
||||||
|
};
|
||||||
|
|
||||||
|
tokio::spawn({
|
||||||
|
let scanner = scanner.clone();
|
||||||
|
async move {
|
||||||
|
loop {
|
||||||
|
scanner.pending_scan.notified().await;
|
||||||
|
if let Err(e) = scanner.scan().await {
|
||||||
|
error!("Error while updating index: {}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
scanner
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn trigger_scan(&self) {
|
||||||
|
self.pending_scan.notify_one();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn begin_periodic_scans(&self) {
|
||||||
|
tokio::spawn({
|
||||||
|
let index = self.clone();
|
||||||
|
async move {
|
||||||
|
loop {
|
||||||
|
index.trigger_scan();
|
||||||
|
let sleep_duration = index
|
||||||
|
.settings_manager
|
||||||
|
.get_index_sleep_duration()
|
||||||
|
.await
|
||||||
|
.unwrap_or_else(|e| {
|
||||||
|
error!("Could not retrieve index sleep duration: {}", e);
|
||||||
|
Duration::from_secs(1800)
|
||||||
|
});
|
||||||
|
tokio::time::sleep(sleep_duration).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn scan(&self) -> Result<(), types::Error> {
|
||||||
|
let start = Instant::now();
|
||||||
|
info!("Beginning library index update");
|
||||||
|
|
||||||
|
let album_art_pattern = self
|
||||||
|
.settings_manager
|
||||||
|
.get_index_album_art_pattern()
|
||||||
|
.await
|
||||||
|
.ok();
|
||||||
|
|
||||||
|
let cleaner = cleaner::Cleaner::new(self.db.clone(), self.vfs_manager.clone());
|
||||||
|
cleaner.clean().await?;
|
||||||
|
|
||||||
|
let (insert_sender, insert_receiver) = tokio::sync::mpsc::unbounded_channel();
|
||||||
|
let insertion = tokio::spawn({
|
||||||
|
let db = self.db.clone();
|
||||||
|
async {
|
||||||
|
let mut inserter = inserter::Inserter::new(db, insert_receiver);
|
||||||
|
inserter.insert().await;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
let (collect_sender, collect_receiver) = crossbeam_channel::unbounded();
|
||||||
|
let collection = tokio::task::spawn_blocking(|| {
|
||||||
|
let collector =
|
||||||
|
collector::Collector::new(collect_receiver, insert_sender, album_art_pattern);
|
||||||
|
collector.collect();
|
||||||
|
});
|
||||||
|
|
||||||
|
let vfs = self.vfs_manager.get_vfs().await?;
|
||||||
|
let traversal = tokio::task::spawn_blocking(move || {
|
||||||
|
let mounts = vfs.mounts();
|
||||||
|
let traverser = traverser::Traverser::new(collect_sender);
|
||||||
|
traverser.traverse(mounts.iter().map(|p| p.source.clone()).collect());
|
||||||
|
});
|
||||||
|
|
||||||
|
traversal.await.unwrap();
|
||||||
|
collection.await.unwrap();
|
||||||
|
insertion.await.unwrap();
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"Library index update took {} seconds",
|
||||||
|
start.elapsed().as_millis() as f32 / 1000.0
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,9 +1,10 @@
|
||||||
use log::error;
|
use log::error;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
use crate::app::index::MultiString;
|
use crate::app::scanner::MultiString;
|
||||||
|
|
||||||
use super::*;
|
use super::inserter;
|
||||||
|
use super::traverser;
|
||||||
|
|
||||||
pub struct Collector {
|
pub struct Collector {
|
||||||
receiver: crossbeam_channel::Receiver<traverser::Directory>,
|
receiver: crossbeam_channel::Receiver<traverser::Directory>,
|
|
@ -1,14 +1,8 @@
|
||||||
use std::borrow::Cow;
|
|
||||||
|
|
||||||
use log::error;
|
use log::error;
|
||||||
use sqlx::{
|
use sqlx::{QueryBuilder, Sqlite};
|
||||||
encode::IsNull,
|
|
||||||
sqlite::{SqliteArgumentValue, SqliteTypeInfo},
|
|
||||||
QueryBuilder, Sqlite,
|
|
||||||
};
|
|
||||||
use tokio::sync::mpsc::UnboundedReceiver;
|
use tokio::sync::mpsc::UnboundedReceiver;
|
||||||
|
|
||||||
use crate::{app::index::MultiString, db::DB};
|
use crate::{app::scanner::MultiString, db::DB};
|
||||||
|
|
||||||
const INDEX_BUILDING_INSERT_BUFFER_SIZE: usize = 1000; // Insertions in each transaction
|
const INDEX_BUILDING_INSERT_BUFFER_SIZE: usize = 1000; // Insertions in each transaction
|
||||||
|
|
||||||
|
@ -52,39 +46,6 @@ pub struct Inserter {
|
||||||
db: DB,
|
db: DB,
|
||||||
}
|
}
|
||||||
|
|
||||||
static MULTI_STRING_SEPARATOR: &str = "\u{000C}";
|
|
||||||
|
|
||||||
impl<'q> sqlx::Encode<'q, Sqlite> for MultiString {
|
|
||||||
fn encode_by_ref(&self, args: &mut Vec<SqliteArgumentValue<'q>>) -> IsNull {
|
|
||||||
if self.0.is_empty() {
|
|
||||||
IsNull::Yes
|
|
||||||
} else {
|
|
||||||
let joined = self.0.join(MULTI_STRING_SEPARATOR);
|
|
||||||
args.push(SqliteArgumentValue::Text(Cow::Owned(joined)));
|
|
||||||
IsNull::No
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Option<String>> for MultiString {
|
|
||||||
fn from(value: Option<String>) -> Self {
|
|
||||||
match value {
|
|
||||||
None => MultiString(Vec::new()),
|
|
||||||
Some(s) => MultiString(
|
|
||||||
s.split(MULTI_STRING_SEPARATOR)
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect(),
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl sqlx::Type<Sqlite> for MultiString {
|
|
||||||
fn type_info() -> SqliteTypeInfo {
|
|
||||||
<&str as sqlx::Type<Sqlite>>::type_info()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Inserter {
|
impl Inserter {
|
||||||
pub fn new(db: DB, receiver: UnboundedReceiver<Item>) -> Self {
|
pub fn new(db: DB, receiver: UnboundedReceiver<Item>) -> Self {
|
||||||
let new_directories = Vec::with_capacity(INDEX_BUILDING_INSERT_BUFFER_SIZE);
|
let new_directories = Vec::with_capacity(INDEX_BUILDING_INSERT_BUFFER_SIZE);
|
|
@ -1,11 +1,6 @@
|
||||||
use log::{error, info};
|
use log::{error, info};
|
||||||
use std::time;
|
use std::time;
|
||||||
|
|
||||||
mod cleaner;
|
|
||||||
mod collector;
|
|
||||||
mod inserter;
|
|
||||||
mod traverser;
|
|
||||||
|
|
||||||
use crate::app::index::Index;
|
use crate::app::index::Index;
|
||||||
use crate::app::vfs;
|
use crate::app::vfs;
|
||||||
use crate::db;
|
use crate::db;
|
||||||
|
@ -27,8 +22,8 @@ pub enum Error {
|
||||||
Vfs(#[from] vfs::Error),
|
Vfs(#[from] vfs::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Index {
|
impl Scanner {
|
||||||
pub async fn update(&self) -> Result<(), Error> {
|
pub async fn scan(&self) -> Result<(), Error> {
|
||||||
let start = time::Instant::now();
|
let start = time::Instant::now();
|
||||||
info!("Beginning library index update");
|
info!("Beginning library index update");
|
||||||
|
|
133
src/app/scanner/test.rs
Normal file
133
src/app/scanner/test.rs
Normal file
|
@ -0,0 +1,133 @@
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::{scanner, settings, test},
|
||||||
|
test_name,
|
||||||
|
};
|
||||||
|
|
||||||
|
const TEST_MOUNT_NAME: &str = "root";
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn scan_adds_new_content() {
|
||||||
|
let ctx = test::ContextBuilder::new(test_name!())
|
||||||
|
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
||||||
|
.build()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
ctx.scanner.scan().await.unwrap();
|
||||||
|
ctx.scanner.scan().await.unwrap(); // Validates that subsequent updates don't run into conflicts
|
||||||
|
|
||||||
|
let mut connection = ctx.db.connect().await.unwrap();
|
||||||
|
let all_directories = sqlx::query_as!(scanner::Directory, "SELECT * FROM directories")
|
||||||
|
.fetch_all(connection.as_mut())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let all_songs = sqlx::query_as!(scanner::Song, "SELECT * FROM songs")
|
||||||
|
.fetch_all(connection.as_mut())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(all_directories.len(), 6);
|
||||||
|
assert_eq!(all_songs.len(), 13);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn scan_removes_missing_content() {
|
||||||
|
let builder = test::ContextBuilder::new(test_name!());
|
||||||
|
|
||||||
|
let original_collection_dir: PathBuf = ["test-data", "small-collection"].iter().collect();
|
||||||
|
let test_collection_dir: PathBuf = builder.test_directory.join("small-collection");
|
||||||
|
|
||||||
|
let copy_options = fs_extra::dir::CopyOptions::new();
|
||||||
|
fs_extra::dir::copy(
|
||||||
|
original_collection_dir,
|
||||||
|
&builder.test_directory,
|
||||||
|
©_options,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let ctx = builder
|
||||||
|
.mount(TEST_MOUNT_NAME, test_collection_dir.to_str().unwrap())
|
||||||
|
.build()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
ctx.scanner.scan().await.unwrap();
|
||||||
|
|
||||||
|
{
|
||||||
|
let mut connection = ctx.db.connect().await.unwrap();
|
||||||
|
let all_directories = sqlx::query_as!(scanner::Directory, "SELECT * FROM directories")
|
||||||
|
.fetch_all(connection.as_mut())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let all_songs = sqlx::query_as!(scanner::Song, "SELECT * FROM songs")
|
||||||
|
.fetch_all(connection.as_mut())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(all_directories.len(), 6);
|
||||||
|
assert_eq!(all_songs.len(), 13);
|
||||||
|
}
|
||||||
|
|
||||||
|
let khemmis_directory = test_collection_dir.join("Khemmis");
|
||||||
|
std::fs::remove_dir_all(khemmis_directory).unwrap();
|
||||||
|
ctx.scanner.scan().await.unwrap();
|
||||||
|
{
|
||||||
|
let mut connection = ctx.db.connect().await.unwrap();
|
||||||
|
let all_directories = sqlx::query_as!(scanner::Directory, "SELECT * FROM directories")
|
||||||
|
.fetch_all(connection.as_mut())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let all_songs = sqlx::query_as!(scanner::Song, "SELECT * FROM songs")
|
||||||
|
.fetch_all(connection.as_mut())
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(all_directories.len(), 4);
|
||||||
|
assert_eq!(all_songs.len(), 8);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn finds_embedded_artwork() {
|
||||||
|
let ctx = test::ContextBuilder::new(test_name!())
|
||||||
|
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
||||||
|
.build()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
ctx.scanner.scan().await.unwrap();
|
||||||
|
|
||||||
|
let picnic_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Tobokegao", "Picnic"].iter().collect();
|
||||||
|
let song_virtual_path = picnic_virtual_dir.join("07 - なぜ (Why).mp3");
|
||||||
|
|
||||||
|
let song = ctx.index.get_song(&song_virtual_path).await.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
song.artwork,
|
||||||
|
Some(song_virtual_path.to_string_lossy().into_owned())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn album_art_pattern_is_case_insensitive() {
|
||||||
|
let ctx = test::ContextBuilder::new(test_name!())
|
||||||
|
.mount(TEST_MOUNT_NAME, "test-data/small-collection")
|
||||||
|
.build()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let patterns = vec!["folder", "FOLDER"];
|
||||||
|
|
||||||
|
for pattern in patterns.into_iter() {
|
||||||
|
ctx.settings_manager
|
||||||
|
.amend(&settings::NewSettings {
|
||||||
|
album_art_pattern: Some(pattern.to_owned()),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
ctx.scanner.scan().await.unwrap();
|
||||||
|
|
||||||
|
let hunted_virtual_dir: PathBuf = [TEST_MOUNT_NAME, "Khemmis", "Hunted"].iter().collect();
|
||||||
|
let artwork_virtual_path = hunted_virtual_dir.join("Folder.jpg");
|
||||||
|
let song = &ctx.index.flatten(&hunted_virtual_dir).await.unwrap()[0];
|
||||||
|
assert_eq!(
|
||||||
|
song.artwork,
|
||||||
|
Some(artwork_virtual_path.to_string_lossy().into_owned())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -9,7 +9,7 @@ use std::sync::Arc;
|
||||||
use std::thread;
|
use std::thread;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use crate::app::index::metadata::{self, SongMetadata};
|
use crate::app::scanner::metadata::{self, SongMetadata};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Song {
|
pub struct Song {
|
124
src/app/scanner/types.rs
Normal file
124
src/app/scanner/types.rs
Normal file
|
@ -0,0 +1,124 @@
|
||||||
|
use std::{borrow::Cow, path::Path};
|
||||||
|
|
||||||
|
use sqlx::{
|
||||||
|
encode::IsNull,
|
||||||
|
sqlite::{SqliteArgumentValue, SqliteTypeInfo},
|
||||||
|
Sqlite,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
app::vfs::{self, VFS},
|
||||||
|
db,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(thiserror::Error, Debug)]
|
||||||
|
pub enum Error {
|
||||||
|
#[error(transparent)]
|
||||||
|
IndexClean(#[from] super::cleaner::Error),
|
||||||
|
#[error(transparent)]
|
||||||
|
Database(#[from] sqlx::Error),
|
||||||
|
#[error(transparent)]
|
||||||
|
DatabaseConnection(#[from] db::Error),
|
||||||
|
#[error(transparent)]
|
||||||
|
Vfs(#[from] vfs::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub struct MultiString(pub Vec<String>);
|
||||||
|
|
||||||
|
static MULTI_STRING_SEPARATOR: &str = "\u{000C}";
|
||||||
|
|
||||||
|
impl<'q> sqlx::Encode<'q, Sqlite> for MultiString {
|
||||||
|
fn encode_by_ref(&self, args: &mut Vec<SqliteArgumentValue<'q>>) -> IsNull {
|
||||||
|
if self.0.is_empty() {
|
||||||
|
IsNull::Yes
|
||||||
|
} else {
|
||||||
|
let joined = self.0.join(MULTI_STRING_SEPARATOR);
|
||||||
|
args.push(SqliteArgumentValue::Text(Cow::Owned(joined)));
|
||||||
|
IsNull::No
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Option<String>> for MultiString {
|
||||||
|
fn from(value: Option<String>) -> Self {
|
||||||
|
match value {
|
||||||
|
None => MultiString(Vec::new()),
|
||||||
|
Some(s) => MultiString(
|
||||||
|
s.split(MULTI_STRING_SEPARATOR)
|
||||||
|
.map(|s| s.to_string())
|
||||||
|
.collect(),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl sqlx::Type<Sqlite> for MultiString {
|
||||||
|
fn type_info() -> SqliteTypeInfo {
|
||||||
|
<&str as sqlx::Type<Sqlite>>::type_info()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub struct Song {
|
||||||
|
pub id: i64,
|
||||||
|
pub path: String,
|
||||||
|
pub parent: String,
|
||||||
|
pub track_number: Option<i64>,
|
||||||
|
pub disc_number: Option<i64>,
|
||||||
|
pub title: Option<String>,
|
||||||
|
pub artists: MultiString,
|
||||||
|
pub album_artists: MultiString,
|
||||||
|
pub year: Option<i64>,
|
||||||
|
pub album: Option<String>,
|
||||||
|
pub artwork: Option<String>,
|
||||||
|
pub duration: Option<i64>,
|
||||||
|
pub lyricists: MultiString,
|
||||||
|
pub composers: MultiString,
|
||||||
|
pub genres: MultiString,
|
||||||
|
pub labels: MultiString,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Song {
|
||||||
|
pub fn virtualize(mut self, vfs: &VFS) -> Option<Song> {
|
||||||
|
self.path = match vfs.real_to_virtual(Path::new(&self.path)) {
|
||||||
|
Ok(p) => p.to_string_lossy().into_owned(),
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
if let Some(artwork_path) = self.artwork {
|
||||||
|
self.artwork = match vfs.real_to_virtual(Path::new(&artwork_path)) {
|
||||||
|
Ok(p) => Some(p.to_string_lossy().into_owned()),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Some(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
pub struct Directory {
|
||||||
|
pub id: i64,
|
||||||
|
pub path: String,
|
||||||
|
pub parent: Option<String>,
|
||||||
|
pub artists: MultiString,
|
||||||
|
pub year: Option<i64>,
|
||||||
|
pub album: Option<String>,
|
||||||
|
pub artwork: Option<String>,
|
||||||
|
pub date_added: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Directory {
|
||||||
|
pub fn virtualize(mut self, vfs: &VFS) -> Option<Directory> {
|
||||||
|
self.path = match vfs.real_to_virtual(Path::new(&self.path)) {
|
||||||
|
Ok(p) => p.to_string_lossy().into_owned(),
|
||||||
|
_ => return None,
|
||||||
|
};
|
||||||
|
if let Some(artwork_path) = self.artwork {
|
||||||
|
self.artwork = match vfs.real_to_virtual(Path::new(&artwork_path)) {
|
||||||
|
Ok(p) => Some(p.to_string_lossy().into_owned()),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Some(self)
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,11 +1,12 @@
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::app::{config, ddns, index::Index, playlist, settings, user, vfs};
|
use crate::app::{config, ddns, index::Index, playlist, scanner::Scanner, settings, user, vfs};
|
||||||
use crate::db::DB;
|
use crate::db::DB;
|
||||||
use crate::test::*;
|
use crate::test::*;
|
||||||
|
|
||||||
pub struct Context {
|
pub struct Context {
|
||||||
pub db: DB,
|
pub db: DB,
|
||||||
|
pub scanner: Scanner,
|
||||||
pub index: Index,
|
pub index: Index,
|
||||||
pub config_manager: config::Manager,
|
pub config_manager: config::Manager,
|
||||||
pub ddns_manager: ddns::Manager,
|
pub ddns_manager: ddns::Manager,
|
||||||
|
@ -65,13 +66,15 @@ impl ContextBuilder {
|
||||||
vfs_manager.clone(),
|
vfs_manager.clone(),
|
||||||
ddns_manager.clone(),
|
ddns_manager.clone(),
|
||||||
);
|
);
|
||||||
let index = Index::new(db.clone(), vfs_manager.clone(), settings_manager.clone());
|
let scanner = Scanner::new(db.clone(), vfs_manager.clone(), settings_manager.clone());
|
||||||
|
let index = Index::new(db.clone(), vfs_manager.clone());
|
||||||
let playlist_manager = playlist::Manager::new(db.clone(), vfs_manager.clone());
|
let playlist_manager = playlist::Manager::new(db.clone(), vfs_manager.clone());
|
||||||
|
|
||||||
config_manager.apply(&self.config).await.unwrap();
|
config_manager.apply(&self.config).await.unwrap();
|
||||||
|
|
||||||
Context {
|
Context {
|
||||||
db,
|
db,
|
||||||
|
scanner,
|
||||||
index,
|
index,
|
||||||
config_manager,
|
config_manager,
|
||||||
ddns_manager,
|
ddns_manager,
|
||||||
|
|
|
@ -144,7 +144,7 @@ fn main() -> Result<(), Error> {
|
||||||
async fn async_main(cli_options: CLIOptions, paths: paths::Paths) -> Result<(), Error> {
|
async fn async_main(cli_options: CLIOptions, paths: paths::Paths) -> Result<(), Error> {
|
||||||
// Create and run app
|
// Create and run app
|
||||||
let app = app::App::new(cli_options.port.unwrap_or(5050), paths).await?;
|
let app = app::App::new(cli_options.port.unwrap_or(5050), paths).await?;
|
||||||
app.index.begin_periodic_updates();
|
app.scanner.begin_periodic_scans();
|
||||||
app.ddns_manager.begin_periodic_updates();
|
app.ddns_manager.begin_periodic_updates();
|
||||||
|
|
||||||
// Start server
|
// Start server
|
||||||
|
|
|
@ -62,6 +62,12 @@ impl FromRef<App> for app::user::Manager {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl FromRef<App> for app::scanner::Scanner {
|
||||||
|
fn from_ref(app: &App) -> Self {
|
||||||
|
app.scanner.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl FromRef<App> for app::settings::Manager {
|
impl FromRef<App> for app::settings::Manager {
|
||||||
fn from_ref(app: &App) -> Self {
|
fn from_ref(app: &App) -> Self {
|
||||||
app.settings_manager.clone()
|
app.settings_manager.clone()
|
||||||
|
|
|
@ -11,7 +11,7 @@ use base64::{prelude::BASE64_STANDARD_NO_PAD, Engine};
|
||||||
use percent_encoding::percent_decode_str;
|
use percent_encoding::percent_decode_str;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
app::{config, ddns, index, lastfm, playlist, settings, thumbnail, user, vfs, App},
|
app::{config, ddns, index, lastfm, playlist, scanner, settings, thumbnail, user, vfs, App},
|
||||||
server::{dto, error::APIError},
|
server::{dto, error::APIError},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -246,9 +246,9 @@ async fn put_preferences(
|
||||||
|
|
||||||
async fn post_trigger_index(
|
async fn post_trigger_index(
|
||||||
_admin_rights: AdminRights,
|
_admin_rights: AdminRights,
|
||||||
State(index): State<index::Index>,
|
State(scanner): State<scanner::Scanner>,
|
||||||
) -> Result<(), APIError> {
|
) -> Result<(), APIError> {
|
||||||
index.trigger_reindex();
|
scanner.trigger_scan();
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::app::{config, ddns, index, settings, thumbnail, user, vfs};
|
use crate::app::{config, ddns, index, scanner, settings, thumbnail, user, vfs};
|
||||||
use std::convert::From;
|
use std::convert::From;
|
||||||
|
|
||||||
pub const API_MAJOR_VERSION: i32 = 8;
|
pub const API_MAJOR_VERSION: i32 = 8;
|
||||||
|
@ -277,8 +277,8 @@ pub struct Song {
|
||||||
pub labels: Vec<String>,
|
pub labels: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<index::Song> for Song {
|
impl From<scanner::Song> for Song {
|
||||||
fn from(s: index::Song) -> Self {
|
fn from(s: scanner::Song) -> Self {
|
||||||
Self {
|
Self {
|
||||||
path: s.path,
|
path: s.path,
|
||||||
track_number: s.track_number,
|
track_number: s.track_number,
|
||||||
|
@ -312,8 +312,8 @@ pub struct Directory {
|
||||||
pub date_added: i64,
|
pub date_added: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<index::Directory> for Directory {
|
impl From<scanner::Directory> for Directory {
|
||||||
fn from(d: index::Directory) -> Self {
|
fn from(d: scanner::Directory) -> Self {
|
||||||
Self {
|
Self {
|
||||||
path: d.path,
|
path: d.path,
|
||||||
artists: d.artists.0,
|
artists: d.artists.0,
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use crate::app::index::QueryError;
|
use crate::app::{config, ddns, index, lastfm, playlist, settings, thumbnail, user, vfs};
|
||||||
use crate::app::{config, ddns, lastfm, playlist, settings, thumbnail, user, vfs};
|
|
||||||
use crate::db;
|
use crate::db;
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
|
@ -102,13 +101,13 @@ impl From<playlist::Error> for APIError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<QueryError> for APIError {
|
impl From<index::Error> for APIError {
|
||||||
fn from(error: QueryError) -> APIError {
|
fn from(error: index::Error) -> APIError {
|
||||||
match error {
|
match error {
|
||||||
QueryError::Database(e) => APIError::Database(e),
|
index::Error::Database(e) => APIError::Database(e),
|
||||||
QueryError::DatabaseConnection(e) => e.into(),
|
index::Error::DatabaseConnection(e) => e.into(),
|
||||||
QueryError::SongNotFound(_) => APIError::SongMetadataNotFound,
|
index::Error::SongNotFound(_) => APIError::SongMetadataNotFound,
|
||||||
QueryError::Vfs(e) => e.into(),
|
index::Error::Vfs(e) => e.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue