Fixed SQL error when reading large playlist

This commit is contained in:
Antoine Gersant 2017-07-10 22:33:00 -07:00
parent 0f0fce66ce
commit 9d0ecc5531

View file

@ -13,6 +13,8 @@ use index::Song;
use vfs::VFSSource; use vfs::VFSSource;
use errors::*; use errors::*;
const PLAYLIST_CHUNK: usize = 500;
#[derive(Insertable)] #[derive(Insertable)]
#[table_name="playlists"] #[table_name="playlists"]
struct NewPlaylist { struct NewPlaylist {
@ -151,6 +153,7 @@ fn read_playlist<T>(playlist_name: &str, owner: &str, db: &T) -> Result<Vec<Song
let song_paths: Vec<String>; let song_paths: Vec<String>;
let unique_songs: Vec<Song>; let unique_songs: Vec<Song>;
let vfs = db.get_vfs()?; let vfs = db.get_vfs()?;
let mut songs_map: HashMap<String, Song> = HashMap::new();
{ {
let connection = db.get_connection(); let connection = db.get_connection();
@ -184,15 +187,15 @@ fn read_playlist<T>(playlist_name: &str, owner: &str, db: &T) -> Result<Vec<Song
// Find Song objects at the relevant paths // Find Song objects at the relevant paths
{ {
use self::songs::dsl::*; use self::songs::dsl::*;
unique_songs = songs for chunk in song_paths[..].chunks(PLAYLIST_CHUNK) {
.filter(path.eq_any(&song_paths)) let unique_songs: Vec<Song> = songs
.filter(path.eq_any(chunk))
.get_results(connection.deref())?; .get_results(connection.deref())?;
}
}
let mut songs_map: HashMap<&str, &Song> = HashMap::new();
for playlist_song in &unique_songs { for playlist_song in &unique_songs {
songs_map.insert(&playlist_song.path, &playlist_song); songs_map.insert(playlist_song.path.clone(), playlist_song.clone());
}
}
}
} }
// Build playlist // Build playlist