Removed unecessary index object
This commit is contained in:
parent
ce90377b2d
commit
143e1f6761
3 changed files with 124 additions and 135 deletions
228
src/db/index.rs
228
src/db/index.rs
|
@ -242,133 +242,125 @@ impl<'db> IndexBuilder<'db> {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct Index {}
|
||||
|
||||
impl Index {
|
||||
pub fn new() -> Index {
|
||||
Index {}
|
||||
}
|
||||
|
||||
pub fn update_index(&self, db: &DB) -> Result<()> {
|
||||
let start = time::Instant::now();
|
||||
println!("Beginning library index update");
|
||||
self.clean(db)?;
|
||||
self.populate(db)?;
|
||||
println!("Library index update took {} seconds",
|
||||
start.elapsed().as_secs());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn clean(&self, db: &DB) -> Result<()> {
|
||||
let vfs = db.get_vfs()?;
|
||||
fn clean(db: &DB) -> Result<()> {
|
||||
let vfs = db.get_vfs()?;
|
||||
|
||||
{
|
||||
let all_songs: Vec<String>;
|
||||
{
|
||||
let all_songs: Vec<String>;
|
||||
{
|
||||
let connection = db.get_connection();
|
||||
let connection = connection.lock().unwrap();
|
||||
let connection = connection.deref();
|
||||
all_songs = songs::table.select(songs::path).load(connection)?;
|
||||
}
|
||||
|
||||
let missing_songs = all_songs
|
||||
.into_iter()
|
||||
.filter(|ref song_path| {
|
||||
let path = Path::new(&song_path);
|
||||
!path.exists() || vfs.real_to_virtual(path).is_err()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
{
|
||||
let connection = db.get_connection();
|
||||
let connection = connection.lock().unwrap();
|
||||
let connection = connection.deref();
|
||||
for chunk in missing_songs[..].chunks(INDEX_BUILDING_CLEAN_BUFFER_SIZE) {
|
||||
diesel::delete(songs::table.filter(songs::path.eq_any(chunk)))
|
||||
.execute(connection)?;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
let all_directories: Vec<String>;
|
||||
{
|
||||
let connection = db.get_connection();
|
||||
let connection = connection.lock().unwrap();
|
||||
let connection = connection.deref();
|
||||
all_directories = directories::table
|
||||
.select(directories::path)
|
||||
.load(connection)?;
|
||||
}
|
||||
|
||||
let missing_directories = all_directories
|
||||
.into_iter()
|
||||
.filter(|ref directory_path| {
|
||||
let path = Path::new(&directory_path);
|
||||
!path.exists() || vfs.real_to_virtual(path).is_err()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
{
|
||||
let connection = db.get_connection();
|
||||
let connection = connection.lock().unwrap();
|
||||
let connection = connection.deref();
|
||||
for chunk in missing_directories[..].chunks(INDEX_BUILDING_CLEAN_BUFFER_SIZE) {
|
||||
diesel::delete(directories::table.filter(directories::path.eq_any(chunk)))
|
||||
.execute(connection)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn populate(&self, db: &DB) -> Result<()> {
|
||||
let vfs = db.get_vfs()?;
|
||||
let mount_points = vfs.get_mount_points();
|
||||
let connection = db.get_connection();
|
||||
|
||||
let album_art_pattern;
|
||||
{
|
||||
let connection = db.get_connection();
|
||||
let connection = connection.lock().unwrap();
|
||||
let connection = connection.deref();
|
||||
let settings: MiscSettings = misc_settings::table.get_result(connection)?;
|
||||
album_art_pattern = Regex::new(&settings.index_album_art_pattern)?;
|
||||
all_songs = songs::table.select(songs::path).load(connection)?;
|
||||
}
|
||||
|
||||
let mut builder = IndexBuilder::new(&connection, album_art_pattern)?;
|
||||
for (_, target) in mount_points {
|
||||
builder.populate_directory(None, target.as_path())?;
|
||||
let missing_songs = all_songs
|
||||
.into_iter()
|
||||
.filter(|ref song_path| {
|
||||
let path = Path::new(&song_path);
|
||||
!path.exists() || vfs.real_to_virtual(path).is_err()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
{
|
||||
let connection = db.get_connection();
|
||||
let connection = connection.lock().unwrap();
|
||||
let connection = connection.deref();
|
||||
for chunk in missing_songs[..].chunks(INDEX_BUILDING_CLEAN_BUFFER_SIZE) {
|
||||
diesel::delete(songs::table.filter(songs::path.eq_any(chunk)))
|
||||
.execute(connection)?;
|
||||
}
|
||||
|
||||
}
|
||||
builder.flush_songs()?;
|
||||
builder.flush_directories()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_loop(&self, db: &DB) {
|
||||
loop {
|
||||
if let Err(e) = self.update_index(db) {
|
||||
println!("Error while updating index: {}", e);
|
||||
{
|
||||
let all_directories: Vec<String>;
|
||||
{
|
||||
let connection = db.get_connection();
|
||||
let connection = connection.lock().unwrap();
|
||||
let connection = connection.deref();
|
||||
all_directories = directories::table
|
||||
.select(directories::path)
|
||||
.load(connection)?;
|
||||
}
|
||||
|
||||
let missing_directories = all_directories
|
||||
.into_iter()
|
||||
.filter(|ref directory_path| {
|
||||
let path = Path::new(&directory_path);
|
||||
!path.exists() || vfs.real_to_virtual(path).is_err()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
{
|
||||
let connection = db.get_connection();
|
||||
let connection = connection.lock().unwrap();
|
||||
let connection = connection.deref();
|
||||
for chunk in missing_directories[..].chunks(INDEX_BUILDING_CLEAN_BUFFER_SIZE) {
|
||||
diesel::delete(directories::table.filter(directories::path.eq_any(chunk)))
|
||||
.execute(connection)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn populate(db: &DB) -> Result<()> {
|
||||
let vfs = db.get_vfs()?;
|
||||
let mount_points = vfs.get_mount_points();
|
||||
let connection = db.get_connection();
|
||||
|
||||
let album_art_pattern;
|
||||
{
|
||||
let connection = connection.lock().unwrap();
|
||||
let connection = connection.deref();
|
||||
let settings: MiscSettings = misc_settings::table.get_result(connection)?;
|
||||
album_art_pattern = Regex::new(&settings.index_album_art_pattern)?;
|
||||
}
|
||||
|
||||
let mut builder = IndexBuilder::new(&connection, album_art_pattern)?;
|
||||
for (_, target) in mount_points {
|
||||
builder.populate_directory(None, target.as_path())?;
|
||||
}
|
||||
builder.flush_songs()?;
|
||||
builder.flush_directories()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update(db: &DB) -> Result<()> {
|
||||
let start = time::Instant::now();
|
||||
println!("Beginning library index update");
|
||||
clean(db)?;
|
||||
populate(db)?;
|
||||
println!("Library index update took {} seconds",
|
||||
start.elapsed().as_secs());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_loop(db: &DB) {
|
||||
loop {
|
||||
if let Err(e) = update(db) {
|
||||
println!("Error while updating index: {}", e);
|
||||
}
|
||||
{
|
||||
let sleep_duration;
|
||||
{
|
||||
let sleep_duration;
|
||||
{
|
||||
let connection = db.get_connection();
|
||||
let connection = connection.lock().unwrap();
|
||||
let connection = connection.deref();
|
||||
let settings: Result<MiscSettings> = misc_settings::table
|
||||
.get_result(connection)
|
||||
.map_err(|e| e.into());
|
||||
if let Err(ref e) = settings {
|
||||
println!("Could not retrieve index sleep duration: {}", e);
|
||||
}
|
||||
sleep_duration = settings
|
||||
.map(|s| s.index_sleep_duration_seconds)
|
||||
.unwrap_or(1800);
|
||||
let connection = db.get_connection();
|
||||
let connection = connection.lock().unwrap();
|
||||
let connection = connection.deref();
|
||||
let settings: Result<MiscSettings> = misc_settings::table
|
||||
.get_result(connection)
|
||||
.map_err(|e| e.into());
|
||||
if let Err(ref e) = settings {
|
||||
println!("Could not retrieve index sleep duration: {}", e);
|
||||
}
|
||||
thread::sleep(time::Duration::from_secs(sleep_duration as u64));
|
||||
sleep_duration = settings
|
||||
.map(|s| s.index_sleep_duration_seconds)
|
||||
.unwrap_or(1800);
|
||||
}
|
||||
thread::sleep(time::Duration::from_secs(sleep_duration as u64));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -394,9 +386,8 @@ fn test_populate() {
|
|||
use db::models::*;
|
||||
|
||||
let db = _get_test_db("populate.sqlite");
|
||||
let index = db.get_index();
|
||||
index.update_index(&db).unwrap();
|
||||
index.update_index(&db).unwrap(); // Check that subsequent updates don't run into conflicts
|
||||
update(&db).unwrap();
|
||||
update(&db).unwrap(); // Check that subsequent updates don't run into conflicts
|
||||
|
||||
let connection = db.get_connection();
|
||||
let connection = connection.lock().unwrap();
|
||||
|
@ -424,8 +415,7 @@ fn test_metadata() {
|
|||
artwork_path.push("Folder.png");
|
||||
|
||||
let db = _get_test_db("metadata.sqlite");
|
||||
let index = db.get_index();
|
||||
index.update_index(&db).unwrap();
|
||||
update(&db).unwrap();
|
||||
|
||||
let connection = db.get_connection();
|
||||
let connection = connection.lock().unwrap();
|
||||
|
|
|
@ -18,7 +18,6 @@ mod index;
|
|||
mod models;
|
||||
mod schema;
|
||||
|
||||
pub use self::index::Index;
|
||||
pub use self::models::*;
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
@ -27,7 +26,6 @@ embed_migrations!("src/db/migrations");
|
|||
|
||||
pub struct DB {
|
||||
connection: Arc<Mutex<SqliteConnection>>,
|
||||
index: Index,
|
||||
}
|
||||
|
||||
impl DB {
|
||||
|
@ -35,10 +33,7 @@ impl DB {
|
|||
println!("Database file path: {}", path.to_string_lossy());
|
||||
let connection =
|
||||
Arc::new(Mutex::new(SqliteConnection::establish(&path.to_string_lossy())?));
|
||||
let db = DB {
|
||||
connection: connection.clone(),
|
||||
index: Index::new(),
|
||||
};
|
||||
let db = DB { connection: connection.clone() };
|
||||
db.init()?;
|
||||
Ok(db)
|
||||
}
|
||||
|
@ -56,10 +51,6 @@ impl DB {
|
|||
self.connection.clone()
|
||||
}
|
||||
|
||||
pub fn get_index(&self) -> &Index {
|
||||
&self.index
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn migrate_down(&self) -> Result<()> {
|
||||
let connection = self.connection.lock().unwrap();
|
||||
|
@ -128,6 +119,14 @@ impl DB {
|
|||
vfs.virtual_to_real(virtual_path)
|
||||
}
|
||||
|
||||
pub fn index_update(&self) -> Result<()> {
|
||||
index::update(self)
|
||||
}
|
||||
|
||||
pub fn index_update_loop(&self) {
|
||||
index::update_loop(self);
|
||||
}
|
||||
|
||||
fn get_vfs(&self) -> Result<Vfs> {
|
||||
use self::mount_points::dsl::*;
|
||||
let mut vfs = Vfs::new();
|
||||
|
@ -315,7 +314,7 @@ fn test_browse_top_level() {
|
|||
root_path.push("root");
|
||||
|
||||
let db = _get_test_db("browse_top_level.sqlite");
|
||||
db.get_index().update_index(&db).unwrap();
|
||||
db.index_update().unwrap();
|
||||
let results = db.browse(Path::new("")).unwrap();
|
||||
|
||||
assert_eq!(results.len(), 1);
|
||||
|
@ -336,7 +335,7 @@ fn test_browse() {
|
|||
tobokegao_path.push("Tobokegao");
|
||||
|
||||
let db = _get_test_db("browse.sqlite");
|
||||
db.get_index().update_index(&db).unwrap();
|
||||
db.index_update().unwrap();
|
||||
let results = db.browse(Path::new("root")).unwrap();
|
||||
|
||||
assert_eq!(results.len(), 2);
|
||||
|
@ -353,7 +352,7 @@ fn test_browse() {
|
|||
#[test]
|
||||
fn test_flatten() {
|
||||
let db = _get_test_db("flatten.sqlite");
|
||||
db.get_index().update_index(&db).unwrap();
|
||||
db.index_update().unwrap();
|
||||
let results = db.flatten(Path::new("root")).unwrap();
|
||||
assert_eq!(results.len(), 12);
|
||||
}
|
||||
|
@ -361,7 +360,7 @@ fn test_flatten() {
|
|||
#[test]
|
||||
fn test_random() {
|
||||
let db = _get_test_db("random.sqlite");
|
||||
db.get_index().update_index(&db).unwrap();
|
||||
db.index_update().unwrap();
|
||||
let results = db.get_random_albums(1).unwrap();
|
||||
assert_eq!(results.len(), 1);
|
||||
}
|
||||
|
@ -369,7 +368,7 @@ fn test_random() {
|
|||
#[test]
|
||||
fn test_recent() {
|
||||
let db = _get_test_db("recent.sqlite");
|
||||
db.get_index().update_index(&db).unwrap();
|
||||
db.index_update().unwrap();
|
||||
let results = db.get_recent_albums(2).unwrap();
|
||||
assert_eq!(results.len(), 2);
|
||||
assert!(results[0].date_added >= results[1].date_added);
|
||||
|
|
|
@ -124,7 +124,7 @@ fn run() -> Result<()> {
|
|||
let db_ref = db.clone();
|
||||
std::thread::spawn(move || {
|
||||
let db = db_ref.deref();
|
||||
db.get_index().update_loop(db);
|
||||
db.index_update_loop();
|
||||
});
|
||||
|
||||
// Mount API
|
||||
|
|
Loading…
Add table
Reference in a new issue