Compare commits

...

366 commits

Author SHA1 Message Date
Antoine Gersant
88792f0669 Adds typo fix to changelog 2025-02-05 18:02:14 -08:00
luzpaz
c2fb46f26d
Make Repology badge display 3 columns ()
More future-friendly format as the list expands
2025-02-05 17:59:07 -08:00
luzpaz
26ce7e2550
Fix various typos ()
Found via `codespell -q 3 -S "*.ai,*.rtf" -L ser,uptodate`
2025-02-05 17:58:04 -08:00
Antoine Gersant
46aed8096e Coverage setup fixes 2025-02-04 23:43:46 -08:00
Antoine Gersant
2aeed5f188 yaml fix 2025-02-04 23:39:04 -08:00
Antoine Gersant
41c4088477 Merge branch 'next' 2025-02-04 23:28:09 -08:00
Antoine Gersant
10946330a8 Adds support for multivalue w/ opus files 2025-02-04 22:19:05 -08:00
Antoine Gersant
2ce035f787 Adds more tests for multivalue field support 2025-02-04 21:24:47 -08:00
Antoine Gersant
956301bfdb Reuse file handle when calling mp3 duration 2025-02-04 01:07:27 -08:00
Antoine Gersant
f35c4efac3 Use id3 duration when available 2025-02-04 00:51:10 -08:00
Antoine Gersant
2a1c93c462 Skip superfluous syscall 2025-02-04 00:39:31 -08:00
Antoine Gersant
7625449434 Update ape dependency to new version 2025-02-03 20:58:28 -08:00
Antoine Gersant
1b0b5bd164 Fixed broken test 2025-02-03 18:10:43 -08:00
Antoine Gersant
809e3f878d Don't list genres with zero valid albums (ie. all songs in this genre have no artist / no album tag) 2025-02-03 01:50:06 -08:00
Antoine Gersant
6862cff185 Implement multi-value support for APE files 2025-02-03 01:09:10 -08:00
Antoine Gersant
36da2c3e70 Removed ydns from suggested ddns services 2025-02-02 19:23:47 -08:00
Antoine Gersant
9923b0f40c Cosmetic change 2025-02-02 14:56:18 -08:00
Antoine Gersant
1df3241ea2 Adds avenue to contribute 2025-02-02 13:54:44 -08:00
Antoine Gersant
77c313637f Adds contribution guidelines 2025-02-02 13:54:02 -08:00
Antoine Gersant
baa31f1056 Removed unused tasks file 2025-02-02 00:56:02 -08:00
Antoine Gersant
cf5d1b7931 - Adds project goals
- Updates feature list
- Readme polish
2025-02-02 00:49:59 -08:00
Antoine Gersant
87c4bdc247 Fixed a bug where tray icon didnt appear on Windows 2025-02-02 00:01:28 -08:00
Antoine Gersant
7c92e90d65 Adds example cover arts 2025-02-01 23:42:09 -08:00
Antoine Gersant
0eb1d7ee75 Updates readme screenshots 2025-02-01 23:41:55 -08:00
Antoine Gersant
dd53d8d284 Slightly more info on migration process 2025-01-31 19:25:12 -08:00
Antoine Gersant
98e10ad682 Fixed codecov badge link 2025-01-31 19:17:22 -08:00
Antoine Gersant
f8b30c4e3d Tentative fix for test coverage setup 2025-01-31 19:14:01 -08:00
Antoine Gersant
5a1753218c yaml fix 2025-01-31 19:02:21 -08:00
Antoine Gersant
646f8297d2 Tentative fix for test coverage setup 2025-01-31 19:01:26 -08:00
Antoine Gersant
d6416e0239 Fixed test failure on Windows 2025-01-31 18:02:13 -08:00
Antoine Gersant
6681322370 Fixed a bug where config file would fail to be created when parent directory does not exist 2025-01-31 17:38:42 -08:00
Antoine Gersant
f0cf3d2675 Use small-collection dataset for migration unit tests 2025-01-31 16:55:37 -08:00
Antoine Gersant
dac7145ce4 Removed swagger assets from makefile 2025-01-31 00:48:23 -08:00
Antoine Gersant
677413ef8c Allow file watch setups to fail 2025-01-31 00:45:03 -08:00
Antoine Gersant
fd3f877f93 Tarpaulin -> grcov 2025-01-31 00:33:39 -08:00
Antoine Gersant
95c7d1a620 Bumped old action 2025-01-31 00:28:01 -08:00
Antoine Gersant
ac88bc9af0 Update install check action 2025-01-31 00:22:00 -08:00
Antoine Gersant
7066e264cd Update toolchain setup action 2025-01-31 00:20:09 -08:00
Antoine Gersant
3cea551ce9 Update release script 2025-01-16 18:35:14 -08:00
Antoine Gersant
b1770fc17e Migrate to native_db 0.8.1 2025-01-16 00:14:53 -08:00
Antoine Gersant
f4009a7fa7 Log request methods 2025-01-16 00:09:29 -08:00
Antoine Gersant
d90b51f752 Fixed merge conflicts 2025-01-15 23:11:57 -08:00
Antoine Gersant
8ccc9cc2ee Utoipa polish 2025-01-15 23:07:48 -08:00
Antoine Gersant
4625bf221d Path param examples 2025-01-15 23:07:48 -08:00
Antoine Gersant
b940ca256b Adds endpoint descriptions 2025-01-15 23:07:48 -08:00
Antoine Gersant
24f27e4f08 Utoipa tag descriptions 2025-01-15 23:07:48 -08:00
Antoine Gersant
bd5aeaf591 API consistency improvements 2025-01-15 23:07:48 -08:00
Antoine Gersant
9707f4a96d Utoipa adds auth requirements 2025-01-15 23:07:48 -08:00
Antoine Gersant
2d92ac03ef Example values for DTO fields 2025-01-15 23:07:48 -08:00
Antoine Gersant
3f5e5eca69 Utoipa accept-version header 2025-01-15 23:07:48 -08:00
Antoine Gersant
dabb034964 Move docs endpoint to /api-docs 2025-01-15 23:07:48 -08:00
Antoine Gersant
cc2d2cedd8 Tag endpoints 2025-01-15 23:07:48 -08:00
Antoine Gersant
bbd63e1b42 Utoipa params iter 2025-01-15 23:07:48 -08:00
Antoine Gersant
df402ed7b8 Utoipa media endpoints 2025-01-15 23:07:48 -08:00
Antoine Gersant
b5a8aea1f8 Utoipa for search and playlist endpoints 2025-01-15 23:07:48 -08:00
Antoine Gersant
350557785c Utoipa collection endpoints 2025-01-15 23:07:48 -08:00
Antoine Gersant
07e8077a38 Utoipa for file browser endpoints 2025-01-15 23:07:48 -08:00
Antoine Gersant
23facd96b9 utoipa user management endpoints 2025-01-15 23:07:48 -08:00
Antoine Gersant
1c3ba3d709 utoipa more endpoints 2025-01-15 23:07:48 -08:00
Antoine Gersant
364710ef79 Utoipa auth endpoint 2025-01-15 23:07:48 -08:00
Antoine Gersant
2e2ddf017b Working utoipa setup 2025-01-15 23:07:47 -08:00
Antoine Gersant
1b142b1855 Utoipa hello world wip 2025-01-15 23:06:54 -08:00
Antoine Gersant
d47fffae4f Use upstream axum-range 2025-01-14 17:35:17 -08:00
Antoine Gersant
11c72240ed Dont log 3xx as error 2025-01-14 17:34:34 -08:00
Antoine Gersant
466bbf5cf3 Migrate to axum 0.8 2025-01-13 21:23:30 -08:00
Antoine Gersant
055a81e6f9 Lint 2025-01-13 21:16:23 -08:00
Antoine Gersant
81e0abc59f Flake update 2025-01-13 20:06:11 -08:00
Antoine Gersant
4826e6aa40 Dont log query parameters 2025-01-13 18:36:46 -08:00
Antoine Gersant
2521ff1ddf Log HTTP requests 2025-01-12 20:38:32 -08:00
Antoine Gersant
00236a99e3 Filter out symphonia log spam 2025-01-12 14:41:11 -08:00
Antoine Gersant
cfc848bf7c Mention migration process in changelog 2025-01-09 22:08:49 -08:00
Antoine Gersant
bf775ebc4c Playlist migration 2025-01-09 21:59:59 -08:00
Antoine Gersant
3ad5e97b75 Settings and auth secret migration 2025-01-07 21:51:43 -08:00
Antoine Gersant
73dc59f833 DB migration skeleton 2025-01-05 17:30:33 -08:00
Antoine Gersant
58d1af5edd Updated changelog 2025-01-02 14:35:17 -08:00
Antoine Gersant
ff7291a246 Adds test for multivalue fields 2025-01-02 12:47:00 -08:00
Antoine Gersant
cfa2cedbc1 Test accented character sorting in search results 2025-01-02 00:13:18 -08:00
Antoine Gersant
b9bcdd46b1 Faster song sorting using dictionary ordering 2025-01-01 23:45:52 -08:00
Antoine Gersant
f371d5e331 Support for accented characters when sorting songs 2025-01-01 23:25:13 -08:00
Antoine Gersant
41187199ba Moved song sorting to collection.rs 2025-01-01 16:46:14 -08:00
Antoine Gersant
de39b2f4a5 Search index build optimizations:
- For ascii bigrams, store song occurences in a vec instead of a hashmap to save on hashing costs (~10% faster). Use ascii values to build vec indices.
- For all bigrams, replace IntMaps with Vec. This allows the same song to be counted multiple times for one bigram, but saves a huge amount of hashing cost (~30%)
2025-01-01 15:57:33 -08:00
Antoine Gersant
cb241d21dd Handle accented characters when sorting more collection results 2025-01-01 13:54:02 -08:00
Antoine Gersant
21d7e3049e Use thin LTO 2025-01-01 13:33:47 -08:00
Antoine Gersant
68b8041f97 Sorting for accented characters 2025-01-01 13:31:11 -08:00
Antoine Gersant
e8845c7ef9 Changelog adjustments 2024-10-13 20:04:29 -07:00
Antoine Gersant
b7719edd8e Updated changelog 2024-10-13 20:02:49 -07:00
Antoine Gersant
ed546ed531 Setup guide tweaks 2024-10-13 00:24:10 -07:00
Antoine Gersant
c640086a3e Cleanup 2024-10-13 00:11:33 -07:00
Antoine Gersant
cf6a092ab7 Setup file watches while indexing is happening 2024-10-12 17:15:22 -07:00
Antoine Gersant
768ea095e1 Skip redundant watch setup 2024-10-12 16:56:20 -07:00
Antoine Gersant
5a5f696366 Reset song count when indexing starts 2024-10-12 16:38:53 -07:00
Antoine Gersant
d1d12aecc5 Adds scanner auto-trigger test 2024-10-12 16:23:31 -07:00
Antoine Gersant
ea75497bf1 Rescan collection when content changes 2024-10-12 16:16:01 -07:00
Antoine Gersant
8100dfceae Replaced channel with notifu 2024-10-12 16:08:27 -07:00
Antoine Gersant
f955eb75c5 Automatically reindex when relevant config changes are made 2024-10-12 14:53:13 -07:00
Antoine Gersant
0a7ae8ebad Adds index status endpoint 2024-10-12 00:19:14 -07:00
Antoine Gersant
090ca387ab Async cleanup 2024-10-11 21:34:36 -07:00
Antoine Gersant
d53681b6c0 Cleanup 2024-10-11 21:21:35 -07:00
Antoine Gersant
d555a2e5f0 Watch config file changes 2024-10-11 21:20:16 -07:00
Antoine Gersant
142d400b8b Create config file on startup 2024-10-11 20:09:39 -07:00
Antoine Gersant
5f585a61d8 Doc updates 2024-10-11 20:04:37 -07:00
Antoine Gersant
08052c25a3 Preserve order of mounts points and users 2024-10-09 23:48:22 -07:00
Antoine Gersant
497b3bb545 Allow clearing DDNS url 2024-10-09 17:29:45 -07:00
Antoine Gersant
32e67dc095 DDNS polish 2024-10-09 16:45:50 -07:00
Antoine Gersant
8b31698cf4 Enable fat LTO in release builds 2024-10-09 16:12:29 -07:00
Antoine Gersant
7a84cc0290 Fixed a bug where blank DDNS url turned into '/' 2024-10-09 14:53:42 -07:00
Antoine Gersant
524e072e9f Fixed typo 2024-10-09 11:12:44 -07:00
Antoine Gersant
5ec0b5f7a5 Write config changes to disk 2024-10-08 23:38:11 -07:00
Antoine Gersant
fb18cb3c4f Test config round trip 2024-10-08 23:03:15 -07:00
Antoine Gersant
0058221e88 Fixed duplicate user test triggering a different error 2024-10-08 22:30:44 -07:00
Antoine Gersant
51283d935f Cleanup 2024-10-08 22:29:09 -07:00
Antoine Gersant
a4e9aea1e4 Read config from disk 2024-10-08 22:28:10 -07:00
Antoine Gersant
7f39d8e8b7 Boilerplate 2024-10-08 22:19:57 -07:00
Antoine Gersant
316f5c0219 Service agnostic DDNS 2024-10-08 21:59:40 -07:00
Antoine Gersant
deeb3e8a05 Cleanup 2024-10-08 20:50:00 -07:00
Antoine Gersant
ae5da0f4f3 Config refactor continued 2024-10-08 20:48:26 -07:00
Antoine Gersant
c7a760e2c2 Cleanup 2024-10-07 23:09:10 -07:00
Antoine Gersant
471e39495c Cleanup 2024-10-07 23:08:49 -07:00
Antoine Gersant
67730f55fb Cleanup 2024-10-07 23:07:53 -07:00
Antoine Gersant
1555c784de Config users refactor 2024-10-07 23:05:35 -07:00
Antoine Gersant
c51ce59fba Removed preferences 2024-10-07 18:08:36 -07:00
Antoine Gersant
a89e3d5145 WIP 2024-10-06 23:12:57 -07:00
Antoine Gersant
658c23e70d Removed /config endpoint 2024-10-06 18:21:28 -07:00
Antoine Gersant
053b684f3a Promot partial collection index during initial scan 2024-10-06 14:27:38 -07:00
Antoine Gersant
1a8bf91628 Index disk serialization without DB 2024-10-06 12:50:39 -07:00
Antoine Gersant
a5061dfc92 Removed last.fm support 2024-10-06 00:29:23 -07:00
Antoine Gersant
2c2b12f536 Upsert playlists 2024-10-05 21:15:12 -07:00
Antoine Gersant
f0a2afe01d Sort playlists alphabetically 2024-10-05 21:13:59 -07:00
Antoine Gersant
9e18a221db Adds polaris.ndb to gitignore 2024-10-05 20:18:47 -07:00
Antoine Gersant
e42c3abfe1 Remove accidental artifact 2024-10-05 20:18:27 -07:00
Antoine Gersant
765de35f89 Playlist DTO 2024-10-05 20:17:20 -07:00
Antoine Gersant
98bcd41e43 Async playlist operations 2024-10-05 20:11:50 -07:00
Antoine Gersant
369bf3821b Introduces data_dir 2024-10-05 20:04:39 -07:00
Antoine Gersant
664ff721e2 ndb playlists first pass 2024-10-04 20:43:53 -07:00
Antoine Gersant
b175e319b7 Introduces playlist header 2024-10-04 18:02:32 -07:00
Antoine Gersant
76535b2f87 Adds tests for genre indexing 2024-10-01 21:20:54 -07:00
Antoine Gersant
d1a0b836cf Adds recently added and main artists to genre payload 2024-09-30 23:12:10 -07:00
Antoine Gersant
071aced10a Adds genre endpoint tests 2024-09-30 18:59:09 -07:00
Antoine Gersant
7f3e091e32 More granular test splitting 2024-09-30 18:51:22 -07:00
Antoine Gersant
232eb7ac12 Index related genres 2024-09-30 00:27:49 -07:00
Antoine Gersant
143da76673 Genre album/artists endpoints 2024-09-29 18:46:10 -07:00
Antoine Gersant
8d51344dc3 Index artists by genre 2024-09-29 16:28:53 -07:00
Antoine Gersant
bff82c3a7c Fixed artifact paths 2024-09-29 15:09:41 -07:00
Antoine Gersant
454b4c00fc Troublshooting 2024-09-29 15:09:41 -07:00
Antoine Gersant
2bbfa064d5 Fail when artifacts are missing 2024-09-29 15:09:41 -07:00
Antoine Gersant
f1e21a4f6e CI churn 2024-09-29 15:09:41 -07:00
Antoine Gersant
9e62dc108c Repair tagging 2024-09-29 15:09:41 -07:00
Antoine Gersant
2992ef89b8 Fixed job dependencies 2024-09-29 15:09:41 -07:00
Antoine Gersant
4548574298 Syntax fixes 2024-09-29 15:09:41 -07:00
Antoine Gersant
72f4604f7a More CI churn 2024-09-29 15:09:41 -07:00
Antoine Gersant
646a8fa587 CI churn 2024-09-29 15:09:41 -07:00
Antoine Gersant
647e1d5614 Changelog for release 0.14.3 2024-09-29 15:09:08 -07:00
Antoine Gersant
c1c0cedccc Release script version churn 2024-09-29 15:08:23 -07:00
Antoine Gersant
d06ad07f51 Fixed artifact paths 2024-09-29 14:25:34 -07:00
Antoine Gersant
f188b2943f Troublshooting 2024-09-29 13:43:24 -07:00
Antoine Gersant
e3041fca6f Fail when artifacts are missing 2024-09-29 13:39:27 -07:00
Antoine Gersant
444d261d0b CI churn 2024-09-29 13:19:37 -07:00
Antoine Gersant
47c73f6196 Repair tagging 2024-09-29 13:06:34 -07:00
Antoine Gersant
63b92718d5 Fixed job dependencies 2024-09-29 12:55:58 -07:00
Antoine Gersant
e11344d2b4 Syntax fixes 2024-09-29 12:54:50 -07:00
Antoine Gersant
0b50a10a36 More CI churn 2024-09-29 12:51:29 -07:00
Antoine Gersant
6a46aaeac6 CI churn 2024-09-29 12:46:17 -07:00
Antoine Gersant
7ae10c6f74 Changelog for release 0.14.3 2024-09-29 12:23:35 -07:00
Antoine Gersant
5d03b7919c Release script version churn 2024-09-29 12:21:08 -07:00
Antoine Gersant
6c2b192f8e Genre key cleanup 2024-09-29 12:15:32 -07:00
Antoine Gersant
ef6951faba Artist key cleanup 2024-09-29 12:14:36 -07:00
Antoine Gersant
e06f79c500 Genre endpoints WIP 2024-09-29 12:06:39 -07:00
Antoine Gersant
cb35ef0ebb Reserve ! character 2024-09-25 17:27:14 -07:00
Antoine Gersant
f21f906eaf Fixed a bug where search results were capped at 200 songs 2024-09-25 17:25:01 -07:00
Antoine Gersant
b943d9aa11 Adds NOT search operator 2024-09-25 17:22:40 -07:00
Antoine Gersant
f971b78856 Skip inoperable filters 2024-09-23 21:00:28 -07:00
Antoine Gersant
971b46be45 Store search index fields in array 2024-09-23 20:35:16 -07:00
Antoine Gersant
ee3f9fd5a0 Use disc number when sorting search results 2024-09-22 20:18:01 -07:00
Antoine Gersant
b5762bd7bf Sort search results 2024-09-22 20:05:20 -07:00
Antoine Gersant
99263ddeca Fixed tests for /search endpoint 2024-09-22 14:19:52 -07:00
Antoine Gersant
be97bccab1 Number fields search 2024-09-22 14:19:52 -07:00
Antoine Gersant
0fe3555560 Avoid false positives when all bigrams match 2024-09-22 14:19:52 -07:00
Antoine Gersant
bdc4f840a4 Case insensitive search 2024-09-22 14:19:52 -07:00
Antoine Gersant
409d79d8a2 Additional search tests 2024-09-22 14:19:52 -07:00
Antoine Gersant
390ee03020 Small perf improvement to search index building 2024-09-22 14:19:52 -07:00
Antoine Gersant
cb33c96548 Adds TODO 2024-09-22 14:19:52 -07:00
Antoine Gersant
5128796825 Fixed false positives in search results 2024-09-22 14:19:52 -07:00
Antoine Gersant
81403960b0 Skip allocations 2024-09-22 14:19:52 -07:00
Antoine Gersant
5e8587c39f Search indexing WIP 2024-09-22 14:19:52 -07:00
Antoine Gersant
e5339ab39a End unquoted literals on reserved symbols 2024-09-22 14:19:52 -07:00
Antoine Gersant
caf12f23b4 Avoid .or() for faster compile times 2024-09-22 14:19:52 -07:00
Antoine Gersant
9a14114e50 Parenthesis and implicit AND support 2024-09-22 14:19:52 -07:00
Antoine Gersant
83b5431994 Boolean operators parsing 2024-09-22 14:19:52 -07:00
Antoine Gersant
b96cd2d781 Search syntax first pass 2024-09-22 14:19:52 -07:00
Antoine Gersant
bc17954db9 Fixed borked tests 2024-09-20 23:56:35 -07:00
Antoine Gersant
e5a8c325a6 Updated changelog 2024-09-18 22:51:23 -07:00
Antoine Gersant
625f4bd006 Adds seed support to get_random_albums 2024-09-18 21:19:59 -07:00
Antoine Gersant
ae4200c6ce Adds support for offset and count parameters in get_recent_albums 2024-09-17 22:27:39 -07:00
Antoine Gersant
6bd0c25d7d Fixed a bug where artists with no album could sneak into collection 2024-09-17 22:27:05 -07:00
Antoine Gersant
2b81355f6d Adds get_albums endpoint 2024-09-16 23:25:41 -07:00
Antoine Gersant
e65cee366d Artist schema iteration 2024-09-08 13:46:47 -07:00
Antoine Gersant
ae876915b4 Adds TODO 2024-09-07 17:44:25 -07:00
Antoine Gersant
7be9f25cb3 Drop support for blank album names 2024-09-07 16:22:39 -07:00
Antoine Gersant
4072e3b07d Adds num_songs_by_genre to artist details 2024-09-07 15:37:17 -07:00
Antoine Gersant
54ce646931 Merge values that only differ by cosmetic characters 2024-09-07 13:25:16 -07:00
Antoine Gersant
e0bf259be3 Track num songs by artist 2024-09-06 19:20:21 -07:00
Antoine Gersant
07324ccca6 Track num_songs_by_genre 2024-09-06 01:34:41 -07:00
Antoine Gersant
c1f24ce96b Artist indexing test 2024-09-05 23:11:26 -07:00
Antoine Gersant
0c12729983 Artist indexing 2024-09-05 23:08:40 -07:00
Antoine Gersant
ad37a14cfa Artist list merges case divergences, excludes VA, reports more album info 2024-09-04 23:54:58 -07:00
Antoine Gersant
309620a088 Adds compact/large mode to changelog 2024-09-04 20:14:25 -07:00
Antoine Gersant
1e0a6062f9 Trailing slash normalization 2024-09-04 18:10:21 -07:00
Antoine Gersant
85cacd8bb7 Mentioned addition of new thumbnail size 2024-09-04 01:21:31 -07:00
Antoine Gersant
7c5ff2e895 Updated changelog 2024-09-03 01:18:22 -07:00
Antoine Gersant
afc5fcb4c2 Async support for thumbnails and peaks 2024-09-02 13:57:25 -07:00
Antoine Gersant
9a30065971 Adds new endpoint to generate audio waveforms 2024-09-02 13:27:46 -07:00
Antoine Gersant
f4b0cb9eb7 Fixed lint 2024-08-27 22:58:37 -07:00
Antoine Gersant
e703f69a48 Adds support for tiny thumbnails 2024-08-25 20:05:27 -07:00
Antoine Gersant
57a0163c04 Adds TODO 2024-08-25 15:57:33 -07:00
Antoine Gersant
5444285327 Adds endpoint to retrieve song metata in bulk 2024-08-25 15:28:16 -07:00
Antoine Gersant
6837994433 Return first 200 songs when returning a list of songs 2024-08-24 23:28:22 -07:00
Antoine Gersant
8141e565e0 Added TODO 2024-08-24 20:58:17 -07:00
Antoine Gersant
a3c2b3bc32 Compress static files 2024-08-16 19:16:28 -07:00
Antoine Gersant
570c2b3894 Browse now skips top-level when it only has one mount 2024-08-15 21:41:05 -07:00
Peder Bergebakken Sundt
f625c57d20 update crate time from 0.3.28 to 0.3.36
Fixes build with rust 1.80.0
2024-08-13 09:17:13 -07:00
Antoine Gersant
d492afc885 Flatten perf improvements: gzip response and parallelize sorting 2024-08-10 11:38:29 -07:00
Antoine Gersant
4112c7d79d Sorting improvements 2024-08-10 10:57:07 -07:00
Antoine Gersant
39407c6551 async cleanup 2024-08-10 10:31:53 -07:00
Antoine Gersant
0afab8d634 Implements artists/ endoint 2024-08-10 10:30:21 -07:00
Antoine Gersant
bc3ed59382 Adds collection tests 2024-08-09 23:04:55 -07:00
Antoine Gersant
636803c0df Sort albums by year first 2024-08-09 22:01:02 -07:00
Antoine Gersant
a7c4c90427 Cosmetic changes 2024-08-09 20:02:23 -07:00
Antoine Gersant
91152fdc08 Removed unused field 2024-08-09 20:00:50 -07:00
Antoine Gersant
1bbeee7f39 Fixed a bug where recent albums were not correctly sorted 2024-08-09 19:43:30 -07:00
Antoine Gersant
6564e7d078 Cleaned collection tests 2024-08-09 18:43:13 -07:00
Antoine Gersant
41c043f863 Cleaned scanner tests 2024-08-09 18:02:15 -07:00
Antoine Gersant
3f645d1011 Repair playlists 2024-08-09 17:40:59 -07:00
Antoine Gersant
6b5c291cb7 Clean up browser tests 2024-08-09 17:22:41 -07:00
Antoine Gersant
310e3b6c4d Fixed a bug where browser entries were not sorted 2024-08-09 16:28:30 -07:00
Antoine Gersant
a2232aa9f2 Semantic indexing for composer/lyricist 2024-08-09 13:02:49 -07:00
Antoine Gersant
0841c15f48 Avoid re-parsing regex 2024-08-09 12:26:40 -07:00
Antoine Gersant
763ba94e9b Single threaded rodeo 2024-08-09 12:11:25 -07:00
Antoine Gersant
a4baa2c792 Perf improvements 2024-08-09 11:24:53 -07:00
Antoine Gersant
e6483cf138 Organization 2024-08-09 10:59:59 -07:00
Antoine Gersant
b014c63af4 Fixed empty albums 2024-08-09 10:27:54 -07:00
Antoine Gersant
6821318a4d Intern strings in collection 2024-08-09 10:25:18 -07:00
Antoine Gersant
0a1f3fa78d Skip unecessary allocations 2024-08-09 08:40:44 -07:00
Antoine Gersant
169b2b5cb8 Keep directory entries sorted as we add them 2024-08-09 08:30:10 -07:00
Antoine Gersant
782da35a7b Skip allocations 2024-08-09 08:27:09 -07:00
Antoine Gersant
2cbb249c46 Less aggressive polling 2024-08-09 08:16:47 -07:00
Antoine Gersant
2f2fdf9056 No longer refcount rodeo to avoid redundant serialization 2024-08-09 08:00:24 -07:00
Antoine Gersant
f0fa985f8a Intern strings in flattened 2024-08-04 19:25:39 -07:00
Antoine Gersant
6b1133e27c Intern browser directories 2024-08-04 19:07:10 -07:00
Antoine Gersant
8f6e72fbd6 Removed tarpaulin noise 2024-08-04 19:00:01 -07:00
Antoine Gersant
2c7eb9f643 Removed unused dependencies 2024-08-03 15:05:44 -07:00
Antoine Gersant
7a17cdc195 Rely on Axum to do percent decoding 2024-08-03 15:01:42 -07:00
Antoine Gersant
16434e6c51 Disable default ureq features 2024-08-03 14:55:02 -07:00
Antoine Gersant
5a14830138 Bump depedencies 2024-08-03 13:57:03 -07:00
Antoine Gersant
845105cf38 Fixed integration tests 2024-08-01 02:08:35 -07:00
Antoine Gersant
cd45836924 Error types consolidation 2024-08-01 00:09:21 -07:00
Antoine Gersant
8f2566f574 Refactor index 2024-07-31 23:38:38 -07:00
Antoine Gersant
a0624f7968 Flatten via trie 2024-07-31 18:00:26 -07:00
Antoine Gersant
7a1d433c8a Return album appearances 2024-07-31 17:07:44 -07:00
Antoine Gersant
ae9f94ce4f Removes MultiString 2024-07-31 16:47:12 -07:00
Antoine Gersant
e8af339cde Browsing via index (WIP) 2024-07-31 03:41:32 -07:00
Antoine Gersant
b4b0e1181f Indexing perf work 2024-07-31 01:43:13 -07:00
Antoine Gersant
72ec7b260a Index artists 2024-07-31 00:11:33 -07:00
Antoine Gersant
35736ee1d5 v7 compat for random/recent endpoints 2024-07-30 23:17:41 -07:00
Antoine Gersant
332e39876e Implements get_album endpoint 2024-07-30 00:24:25 -07:00
Antoine Gersant
b42c6d39e8 Seralize index into DB 2024-07-29 22:56:03 -07:00
Antoine Gersant
1f3cc1ea26 Rebuild index on startup 2024-07-29 21:54:07 -07:00
Antoine Gersant
8db6a2352b Adds ID trait 2024-07-29 20:03:25 -07:00
Antoine Gersant
93e8d7d94b Implement recent albums endpoint 2024-07-29 20:00:53 -07:00
Antoine Gersant
64ef7cb21f Index -> IndexManager 2024-07-29 18:13:40 -07:00
Antoine Gersant
2012258a72 Indexing WIP 2024-07-29 02:07:28 -07:00
Antoine Gersant
2965cbdf7e Index/Browser split 2024-07-28 23:15:26 -07:00
Antoine Gersant
efc27757c7 Updated changelog 2024-07-28 12:59:31 -07:00
Antoine Gersant
91352fc13b Cleanup 2024-07-28 02:34:35 -07:00
Antoine Gersant
470fbc6d1c Fixed toolchain setup 2024-07-27 22:23:49 -07:00
Antoine Gersant
9e9d031f4e Rename toolchain file 2024-07-27 22:20:27 -07:00
Antoine Gersant
caf6feea7a API versioning tests 2024-07-27 18:47:32 -07:00
Antoine Gersant
caa8907297 API versioning 2024-07-27 18:06:19 -07:00
Antoine Gersant
6871f41a99 Dev environment setup 2024-07-27 15:20:23 -07:00
Antoine Gersant
00cc18c798 Dev environment setup 2024-07-27 13:30:42 -07:00
Antoine Gersant
3362a828cd Split index into scanner (populates DB) and index (reads from DB) 2024-07-15 02:11:18 -07:00
Antoine Gersant
9d8d543494 Adds multi-value fields (single row) 2024-07-15 01:29:09 -07:00
Antoine Gersant
5a785a2e16 Update build script to follow removal of crate feature 2024-07-13 19:06:19 -07:00
Antoine Gersant
0f25a12877 Dependency bumps 2024-07-13 19:01:06 -07:00
Antoine Gersant
1c4ef6c5ee Dependency bumps 2024-07-13 18:48:55 -07:00
Antoine Gersant
1020f27413 Better migration error message 2024-07-13 18:28:51 -07:00
Antoine Gersant
0e63f64513 Range requests 2024-07-13 18:25:33 -07:00
Antoine Gersant
153943a3ae Add thumbnails endpoint 2024-07-13 17:58:31 -07:00
Antoine Gersant
d82563efc0 Adds playlist endpoints 2024-07-13 17:44:40 -07:00
Antoine Gersant
274a1f2cf7 Adds lastfm endpoints 2024-07-13 17:28:48 -07:00
Antoine Gersant
18858d8d1a Collection endpoints 2024-07-13 17:17:01 -07:00
Antoine Gersant
03d5568765 Implements more endpoints 2024-07-13 15:48:08 -07:00
Antoine Gersant
5c4631c673 Adds settings endpoints 2024-07-13 14:12:54 -07:00
Antoine Gersant
84921f7db3 Static file serving 2024-07-13 12:40:47 -07:00
Antoine Gersant
08353a717f Axum initial setup 2024-07-13 12:30:02 -07:00
Antoine Gersant
138886e55c Cosmetic change 2024-07-13 11:09:20 -07:00
Antoine Gersant
6884548cd0 Trim dependency features 2024-07-13 01:38:55 -07:00
Antoine Gersant
12a9f2ec3c Diesel -> SQLx 2024-07-13 01:20:27 -07:00
Antoine Gersant
138eacc9fc Merge branch 'master' of https://github.com/agersant/polaris 2024-07-10 23:17:41 -07:00
Antoine Gersant
11775d961b Lints 2024-07-10 23:17:38 -07:00
duydl
77dc2eac23
Add support for m4b format ()
* Add support for m4b

* Formatting

* Formatting

---------

Co-authored-by: Antoine Gersant <antoine.gersant@lesforges.org>
2024-05-09 19:59:45 -07:00
Antoine Gersant
7279793d25 0.14.2 changelog 2024-03-13 19:03:43 -07:00
Antoine Gersant
d4a427648e Fixed startup error related to system tray integration 2024-03-13 18:46:35 -07:00
Antoine Gersant
123eee7d2d Tentative fix for Linux install CI 2024-02-02 20:30:04 -08:00
Antoine Gersant
fd6a13083d Autoformat 2024-02-02 20:29:51 -08:00
Antoine Gersant
5ca38939bd Changelog for 0.14.1 2024-02-02 20:26:56 -08:00
Antoine Gersant
c1abd8fe3b Fixed musl linking error 2024-02-02 20:17:36 -08:00
Antoine Gersant
fc0a4fd6eb Tentative fix for linux build 2023-09-12 20:08:12 -07:00
Antoine Gersant
6f24ff248f Depedency bumps 2023-09-08 19:34:39 -07:00
Antoine Gersant
4807b2d3b9 Apply lints 2023-09-08 18:23:34 -07:00
Antoine Gersant
608dabb789 Removed tokio dependency 2023-09-08 18:20:12 -07:00
Peder Bergebakken Sundt
8d38c5b664
id3: -> 1.4.0(git) -> 1.7.0 ()
This removes the git dep, which since rust 1.68.0 are not repoducible.
2023-06-17 18:12:58 -07:00
Etienne Dechamps
f6d45c8387
Demote DDNS disabled message to avoid log spam ()
Fixes 
2023-02-11 15:53:07 -08:00
Elise
930fd67ae3
Add a link to Polarios in the README () 2023-01-05 13:21:10 -08:00
Antoine Gersant
00b6444048 Adds social media preview image for Github links 2022-11-27 20:22:26 -08:00
Antoine Gersant
bd330ddd84 Make patch test coverage informational 2022-11-24 20:38:19 -08:00
Antoine Gersant
46a232219d Update changelog for release 0.14.0 2022-11-24 20:27:09 -08:00
Antoine Gersant
a8660793f8 Use TDOR frame for ID3v2 to populate year 2022-11-24 20:22:35 -08:00
Antoine Gersant
eaec68dff0 Specified API errors 2022-11-21 21:37:47 -08:00
Antoine Gersant
1484ecabe9 Log error details instead of sending them in HTTP responses 2022-11-21 18:37:55 -08:00
Antoine Gersant
1812bedfd2 Fixed a bug where systemd init error would not display 2022-11-21 17:31:12 -08:00
Antoine Gersant
c57583d1d4 Removed anyhow dependency 2022-11-21 17:23:14 -08:00
Antoine Gersant
98d00d261d Remove some usage of anyhow 2022-11-21 17:00:15 -08:00
Antoine Gersant
edc7170b89 Removed unused import 2022-11-21 16:53:32 -08:00
Antoine Gersant
e4959be2f4 metadata module error cleanup 2022-11-21 16:51:31 -08:00
Antoine Gersant
fee2f17fb1 Error cleanup 2022-11-21 16:45:18 -08:00
Antoine Gersant
4c5a6bc2d6 Error cleanup 2022-11-21 16:31:49 -08:00
Antoine Gersant
1e9d307a05 Error cleanup 2022-11-21 16:06:18 -08:00
Antoine Gersant
4ec8f2161b Error cleanup 2022-11-21 16:00:22 -08:00
Antoine Gersant
f609afc5ed Structured errors continued 2022-11-21 15:33:50 -08:00
Antoine Gersant
9f0bc06dac Bump API version 2022-11-17 22:31:19 -08:00
Antoine Gersant
d1cb328523 Migrated changelog to a plain text file 2022-11-14 20:57:23 -08:00
Antoine Gersant
33997fc8e1 Trigger demo deployment on release 2022-11-14 02:06:56 -08:00
Antoine Gersant
602c1c03b5 Added demo to readme 2022-11-14 00:46:14 -08:00
Antoine Gersant
f3abb816ff Fixed a bug where all music sources would be deleted when trying to add sources with duplicate names 2022-11-12 14:07:01 -08:00
Antoine Gersant
96d702b79e Improve build times 2022-11-10 01:56:46 -08:00
Antoine Gersant
223894c2b6 Merge branch 'master' of https://github.com/agersant/polaris 2022-11-09 01:23:32 -08:00
Antoine Gersant
bb8d1142d6 Defined a few unspecified errors 2022-11-09 01:23:23 -08:00
Antoine Gersant
822f3ed073 Merged trivial modules 2022-11-09 00:39:48 -08:00
Antoine Gersant
2873f38e04 Merged trivial modules 2022-11-09 00:33:57 -08:00
Antoine Gersant
388901cf65 Moved manager.rs file contents to parent modules 2022-11-09 00:14:52 -08:00
Antoine Gersant
df0de19567 Renamed mod.s rs files 2022-11-08 23:53:02 -08:00
Tobias Schmitz
29ae862aad
Update dependencies ()
* update dependencies

* Upgrade rust edition to 2021

* make actix-test a dev-dependency
2022-11-08 19:54:24 -08:00
Antoine Gersant
a5f5a77100 Linter suggestions 2022-11-08 02:04:54 -08:00
Antoine Gersant
63e971059a Removed deprecated authentication methods 2022-11-08 02:01:20 -08:00
Antoine Gersant
d41e837561 Linter suggestions 2022-11-08 01:21:26 -08:00
Tobias Schmitz
f5a2eed423
Migrate to diesel 2.0 () 2022-08-30 11:47:16 -07:00
Tobias Schmitz
41a4b21327
Fix clippy warnings () 2022-08-29 21:17:03 -07:00
Tobias Schmitz
374d0ca56f
Migrate to actix-web 4 ()
* Migrate to actix-web 4

* Change expected swagger test status code

* update tokio to 1.0

* fix clippy warnings
2022-04-24 13:55:38 -07:00
Tobias Schmitz
90fd6bbcc9
Update dependencies () 2022-03-20 19:50:14 -07:00
Antoine Gersant
39c8cf7595 Thumbnail and audio endpoints no longer encode payloads 2021-11-28 20:13:54 -08:00
Antoine Gersant
f27bc4ccfc Added TODO 2021-11-27 16:45:25 -08:00
Antoine Gersant
818dfe877c Update codecov github action 2021-11-14 14:17:36 -08:00
Antoine Gersant
b6e9940c76 Updated id3 dependency 2021-11-10 19:47:45 -08:00
pmphfm
e2bf97db99
Code cleanup ()
Fixed all most all clippy warnings.
Test: cargo test && cargo clippy
2021-10-19 19:31:17 -07:00
Tobias Schmitz
d01583b406
add api parameter for thumbnail size ()
* add api parameter for thumbnail size

* make max_dimension optinal in case of native resolution

* add tests for thumbnail size

* fix typo

* fix thumbnail size tests

* make unwrap more explicit

* remove print statement

* update workflows

* reduce thumbnail variations

* add removed token

* Update coverage.yml

* fix typo

* hopefully prevent coverage timeout

- split up thumnail tests
- reduce threadcount used for test execution

* get thread count using github actions specific step

* use fixed thread count of 4

* run coverage tests in release mode

* ignore large and native thumbnail_size tests in coverage
2021-06-05 02:24:25 -07:00
pmphfm
f104355076
Add few more fields to song information ()
* [meta] Add ignore paths to vscode settings

* [feature] Add few more fields to song information

Fields include lyricist, composer, genre, category
and label.
2021-05-20 22:08:43 -07:00
Antoine Gersant
4c25195deb
Updated list of supported formats 2021-04-27 21:43:28 -07:00
gahag
ed581c57cf
Add support for AIFF files ()
The new patch in rust-id3 fixes the AIFF API, which is now used to support AIFF files.
2021-04-24 22:05:52 -07:00
gahag
652772ba0e
Implement support for Wave files ()
* Implement support for Wave files

Metadata extraction for such format is supported by the latest version of rust-id3, which
has been updated in this commit. The code has been updated to handle such files and call
the new APIs.

* Code review
2021-04-19 21:49:23 -07:00
David Futcher
6c27409ef2
Bump rustfm-scrobble dependency to v1.1.1 () 2021-01-13 19:39:58 -08:00
Antoine Gersant
7a73ae7cc0 Don't emit log file when running in foreground (-f on Linux, polaris-cli.exe on Windows) and --log is not set 2021-01-02 16:03:51 -08:00
Antoine Gersant
2f71cf2db7 Checkout release branch when making a release 2020-12-30 22:56:26 -08:00
Antoine Gersant
4ad8d922f7
Platform-specific improvements ()
* Use native-windows-gui crate to manage tray icon
Adds log file support on Windows

* Log file location now works like other paths

* Removed context builder

* Context --> App

* Removed mount URLs from App

* Switch to a nicer crate for forking daemon

* Handle errors from notify_ready

* Add application icon to all Windows Polaris executables, not just those created by the release script

* Add build.rs to release tarball

* Create PID file parent directory if necessary
2020-12-30 21:41:57 -08:00
Antoine Gersant
7edcc38483
Test setup improvements ()
* More descriptive test names

* Test writing tools

* Migrate to new test tooling

* Adds test for collection cleaner
2020-12-29 20:05:04 -08:00
Antoine Gersant
7bc8e142c3 Fixed a bug where missing content was not removed from database 2020-12-29 16:14:02 -08:00
Antoine Gersant
487d261843 Removed unecessary dependency 2020-12-27 16:39:48 -08:00
218 changed files with 15007 additions and 11589 deletions
.codecov.yml.envrc
.github/workflows
.gitignore
.vscode
CHANGELOG.mdCargo.lockCargo.tomlREADME.mdbuild.rsdiesel.toml
docs
flake.lockflake.nix
migrations
201706250006_init
201706250228_directories_date_added
201706272129_users_table
201706272304_misc_settings_table
201706272313_ddns_config_table
201706272327_mount_points_table
201707091522_playlists_tables
20170929203228_add_prefix_url
20171015224223_add_song_duration
20180303211100_add_last_fm_credentials
2019-08-08-042731_blob_auth_secret
2019-09-28-231910_pbkdf2_simple
2020-01-08-231420_add_theme
2020-11-25-174000_remove_prefix_url
res
rust-toolchainrust-toolchain.toml
src

View file

@ -1,6 +1,9 @@
coverage:
range: "0...100"
status:
patch:
default:
informational: true
project:
default:
informational: true

1
.envrc Normal file
View file

@ -0,0 +1 @@
use flake

View file

@ -11,19 +11,14 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, windows-latest]
features: [--no-default-features, --features bundle-sqlite, --features ui]
exclude:
- os: windows-latest
features: --no-default-features
features: ["", --features ui]
steps:
- name: Install libsqlite3-dev
if: contains(matrix.os, 'ubuntu') && !contains(matrix.features, 'bundle-sqlite')
run: sudo apt-get update && sudo apt-get install libsqlite3-dev
- uses: actions/checkout@v1
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- uses: actions/checkout@v4
- uses: actions-rust-lang/setup-rust-toolchain@v1
- uses: actions-rs/cargo@v1
with:
command: test

View file

@ -14,17 +14,35 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout Polaris
uses: actions/checkout@v1
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- name: Install Tarpaulin
run: cargo install cargo-tarpaulin
- name: Run Tests
run: cargo tarpaulin --all-features --ignore-tests --out Xml
- name: Upload Results
uses: codecov/codecov-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: true
- name: Checkout Polaris
uses: actions/checkout@v4
- uses: actions-rust-lang/setup-rust-toolchain@v1
with:
components: llvm-tools-preview
- name: Install grcov
run: cargo install grcov
- name: Run tests
run: cargo test --no-fail-fast
env:
RUSTFLAGS: "-Cinstrument-coverage"
- name: Gather coverage results
run: >
grcov
.
-t lcov
-o coverage.txt
--llvm
--branch
--ignore-not-existing
--binary-path ./target/debug/
--excl-line "#\[derive\("
--excl-br-line "#\[derive\("
--excl-start "mod tests \{"
--excl-br-start "mod tests \{"
- name: Upload Results
uses: codecov/codecov-action@v2
with:
fail_ci_if_error: true
verbose: true
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

19
.github/workflows/deploy-demo.yml vendored Normal file
View file

@ -0,0 +1,19 @@
name: Deploy Demo Server
on:
workflow_dispatch:
release:
types: [released]
jobs:
trigger:
name: Trigger Demo Build
runs-on: ubuntu-latest
steps:
- name: Repository Dispatch
uses: peter-evans/repository-dispatch@v2
with:
token: ${{ secrets.POLARIS_DEMO_ACCESS_TOKEN }}
repository: agersant/polaris-demo
event-type: polaris-release

View file

@ -2,127 +2,107 @@ on:
workflow_dispatch:
inputs:
versionNumber:
description: 'User-facing version number (eg: 0.13.0)'
description: "User-facing version number (eg: 0.13.0)"
required: true
name: Make Release
jobs:
branch_and_tag:
name: Update Release Branch
runs-on: ubuntu-latest
steps:
- name: Merge to Release Branch
uses: devmasx/merge-branch@v1.3.1
with:
type: now
target_branch: release
github_token: ${{ secrets.GITHUB_TOKEN }}
- name: Checkout Release Branch
uses: actions/checkout@master
with:
ref: release
- name: Update Polaris Version in Cargo.toml
run: gawk -i inplace '/^version/ { if (count == 0) { $3 = "\"${{ github.event.inputs.versionNumber }}\""; count++ } } 1' Cargo.toml
- name: Commit Cargo.toml Version Change
uses: EndBug/add-and-commit@v5
with:
branch: release
message: 'Updated version number'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Add <version number> Git Tag
run: |
git config --global user.name ${{ github.actor }}
git config --global user.email "<>"
git tag -f -a ${{ github.event.inputs.versionNumber }} -m "Version number"
git push -f --tags
create_release:
name: Create Github Release
runs-on: ubuntu-latest
needs: branch_and_tag
steps:
- name: Create Github Release
id: create_release
uses: actions/create-release@v1.0.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ github.event.inputs.versionNumber }}
release_name: Polaris ${{ github.event.inputs.versionNumber }}
draft: true
prerelease: false
- name: Write Upload URL To Disk
run: echo "${{ steps.create_release.outputs.upload_url }}" > upload-url
- name: Store Upload URL
uses: actions/upload-artifact@v1
with:
name: release
path: upload-url
- name: Merge to Release Branch
uses: devmasx/merge-branch@v1.3.1
with:
type: now
target_branch: release
github_token: ${{ secrets.GITHUB_TOKEN }}
- name: Checkout Release Branch
uses: actions/checkout@v4
with:
ref: release
- name: Update Polaris Version in Cargo.toml
run: gawk -i inplace '/^version/ { if (count == 0) { $3 = "\"${{ github.event.inputs.versionNumber }}\""; count++ } } 1' Cargo.toml
- name: Commit Cargo.toml Version Change
uses: EndBug/add-and-commit@v9
with:
message: "Updated version number"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Add <version number> Git Tag
run: |
git config --global user.name ${{ github.actor }}
git config --global user.email "<>"
git tag -f -a ${{ github.event.inputs.versionNumber }} -m "Version number"
git push -f --tags
windows:
name: Windows
runs-on: windows-latest
needs: create_release
needs: branch_and_tag
steps:
- name: Checkout Polaris
uses: actions/checkout@v1
- name: Install Rust Toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
- name: Make release
uses: ./.github/actions/make-windows-release
with:
version-number: ${{ github.event.inputs.versionNumber }}
output-file: polaris.msi
- name: Retrieve Upload URL
uses: actions/download-artifact@v1
with:
name: release
- name: Read Upload URL
shell: bash
run: echo "UPLOAD_URL=$(cat release/upload-url)" >> $GITHUB_ENV
- name: Upload Installer To Github Release
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ env.UPLOAD_URL }}
asset_path: polaris.msi
asset_name: Polaris_${{ github.event.inputs.versionNumber }}.msi
asset_content_type: application/x-msi
- name: Checkout Polaris
uses: actions/checkout@v4
with:
ref: release
- name: Install Rust Toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Make release
uses: ./.github/actions/make-windows-release
with:
version-number: ${{ github.event.inputs.versionNumber }}
output-file: Polaris_${{ github.event.inputs.versionNumber }}.msi
- name: Upload installer
uses: actions/upload-artifact@v4
with:
if-no-files-found: error
name: windows-artifact
path: Polaris_${{ github.event.inputs.versionNumber }}.msi
linux:
name: Linux
runs-on: ubuntu-latest
needs: create_release
needs: branch_and_tag
steps:
- name: Checkout Polaris
uses: actions/checkout@v1
- name: Make release
uses: ./.github/actions/make-linux-release
with:
version-number: ${{ github.event.inputs.versionNumber }}
output-file: polaris.tar.gz
- name: Retrieve Upload URL
uses: actions/download-artifact@v1
with:
name: release
- name: Read Upload URL
run: echo "UPLOAD_URL=$(cat release/upload-url)" >> $GITHUB_ENV
- name: Upload To Github Release
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ env.UPLOAD_URL }}
asset_path: polaris.tar.gz
asset_name: Polaris_${{ github.event.inputs.versionNumber }}.tar.gz
asset_content_type: application/gzip
- name: Checkout Polaris
uses: actions/checkout@v4
with:
ref: release
- name: Make release
uses: ./.github/actions/make-linux-release
with:
version-number: ${{ github.event.inputs.versionNumber }}
output-file: Polaris_${{ github.event.inputs.versionNumber }}.tar.gz
- name: Upload release
uses: actions/upload-artifact@v4
with:
if-no-files-found: error
name: linux-artifact
path: Polaris_${{ github.event.inputs.versionNumber }}.tar.gz
create_release:
name: Create Github Release
runs-on: ubuntu-latest
needs: [windows, linux]
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
merge-multiple: true
- name: Make Github release
uses: softprops/action-gh-release@v2
with:
body: 'Release notes are documented in [CHANGELOG.md](https://github.com/agersant/polaris/blob/master/CHANGELOG.md)'
draft: true
prerelease: false
name: Polaris ${{ github.event.inputs.versionNumber }}
tag_name: ${{ github.event.inputs.versionNumber }}
fail_on_unmatched_files: true
files: |
Polaris_${{ github.event.inputs.versionNumber }}.tar.gz
Polaris_${{ github.event.inputs.versionNumber }}.msi

View file

@ -1,32 +1,28 @@
on:
pull_request:
branches:
- master
push:
branches:
- master
name: Validate Install
jobs:
package_linux_release:
name: Package Linux Release
runs-on: ubuntu-latest
steps:
- name: Checkout Polaris
uses: actions/checkout@v1
- name: Make release
uses: ./.github/actions/make-linux-release
with:
version-number: '0.0.0'
output-file: polaris.tar.gz
- name: Upload packaged release
uses: actions/upload-artifact@v2
with:
name: linux-release
path: polaris.tar.gz
- name: Checkout Polaris
uses: actions/checkout@v1
- name: Make release
uses: ./.github/actions/make-linux-release
with:
version-number: "0.0.0"
output-file: polaris.tar.gz
- name: Upload packaged release
uses: actions/upload-artifact@v4
with:
if-no-files-found: error
name: linux-release
path: polaris.tar.gz
validate_linux_system_install:
name: Linux System Install
@ -34,30 +30,28 @@ jobs:
needs: package_linux_release
steps:
- name: Download release
uses: actions/download-artifact@v2
with:
name: linux-release
path: .
- name: Extract release
run: tar -xzvf polaris.tar.gz --strip-components=1
- name: Preview Install
run: make preview
- name: Preview Install w/ Custom Prefix
run: make preview PREFIX=/some/random/prefix
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- name: Install
run: sudo --preserve-env=PATH make install
- name: Run Polaris
run: sudo /usr/local/bin/polaris && sleep 5s
- name: Make a request
run: curl -f http://localhost:5050
- name: Stop Polaris
run: sudo kill -KILL $(cat /usr/local/var/run/polaris/polaris.pid)
- name: Uninstall
run: sudo make uninstall
- name: Download release
uses: actions/download-artifact@v4
with:
name: linux-release
path: .
- name: Extract release
run: tar -xzvf polaris.tar.gz --strip-components=1
- name: Preview Install
run: make preview
- name: Preview Install w/ Custom Prefix
run: make preview PREFIX=/some/random/prefix
- uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Install
run: sudo --preserve-env=PATH make install
- name: Run Polaris
run: sudo /usr/local/bin/polaris && sleep 5s
- name: Make a request
run: curl -f http://localhost:5050
- name: Stop Polaris
run: sudo kill -KILL $(sudo cat /usr/local/var/run/polaris/polaris.pid)
- name: Uninstall
run: sudo make uninstall
validate_linux_xdg_install:
name: Linux XDG Install
@ -65,52 +59,49 @@ jobs:
needs: package_linux_release
steps:
- name: Download release
uses: actions/download-artifact@v2
with:
name: linux-release
path: .
- name: Extract release
run: tar -xzvf polaris.tar.gz --strip-components=1
- name: Preview Install
run: make preview-xdg
- name: Preview Install w/ Custom XDG_DATA_HOME
run: make preview-xdg XDG_DATA_HOME=/my/own/xdg/home
- uses: actions-rs/toolchain@v1
with:
profile: minimal
- name: Install
run: make install-xdg
- name: Run Polaris
run: $HOME/.local/bin/polaris && sleep 5s
- name: Make a request
run: curl -f http://localhost:5050
- name: Stop Polaris
run: kill -KILL $(cat /tmp/polaris-1001/polaris.pid)
- name: Uninstall
run: make uninstall-xdg
- name: Download release
uses: actions/download-artifact@v4
with:
name: linux-release
path: .
- name: Extract release
run: tar -xzvf polaris.tar.gz --strip-components=1
- name: Preview Install
run: make preview-xdg
- name: Preview Install w/ Custom XDG_DATA_HOME
run: make preview-xdg XDG_DATA_HOME=/my/own/xdg/home
- uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Install
run: make install-xdg
- name: Run Polaris
run: $HOME/.local/bin/polaris && sleep 5s
- name: Make a request
run: curl -f http://localhost:5050
- name: Stop Polaris
run: kill -KILL $(cat /tmp/polaris-1001/polaris.pid)
- name: Uninstall
run: make uninstall-xdg
package_windows_release:
name: Package Windows Release
runs-on: windows-latest
steps:
- name: Checkout Polaris
uses: actions/checkout@v1
- name: Install Rust Toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
- name: Make release
uses: ./.github/actions/make-windows-release
with:
version-number: '0.0.0'
output-file: polaris.msi
- name: Upload packaged release
uses: actions/upload-artifact@v2
with:
name: windows-release
path: polaris.msi
- name: Checkout Polaris
uses: actions/checkout@v1
- name: Install Rust Toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Make release
uses: ./.github/actions/make-windows-release
with:
version-number: "0.0.0"
output-file: polaris.msi
- name: Upload packaged release
uses: actions/upload-artifact@v4
with:
if-no-files-found: error
name: windows-release
path: polaris.msi
validate_windows_install:
name: Windows Install
@ -118,20 +109,20 @@ jobs:
needs: package_windows_release
steps:
- name: Download release
uses: actions/download-artifact@v2
with:
name: windows-release
path: .
- name: Install
run: msiexec /i polaris.msi /qn
- name: Run Polaris
run: |
start $env:LOCALAPPDATA/Permafrost/Polaris/polaris-cli.exe
sleep 5
- name: Make a request
run: curl -f http://localhost:5050
- name: Stop Polaris
run: taskkill /IM polaris-cli.exe
- name: Uninstall
run: msiexec /x polaris.msi /qn
- name: Download release
uses: actions/download-artifact@v4
with:
name: windows-release
path: .
- name: Install
run: msiexec /i polaris.msi /qn
- name: Run Polaris
run: |
start $env:LOCALAPPDATA/Permafrost/Polaris/polaris-cli.exe
sleep 5
- name: Make a request
run: curl -f http://localhost:5050
- name: Stop Polaris
run: taskkill /IM polaris-cli.exe
- name: Uninstall
run: msiexec /x polaris.msi /qn

11
.gitignore vendored
View file

@ -1,3 +1,6 @@
# Dev environment
.direnv
# Build output
target
@ -8,7 +11,13 @@ test-output
TestConfig.toml
# Runtime artifacts
*.sqlite
auth.secret
collection.index
polaris.log
polaris.ndb
polaris.pid
profile.json
/peaks
/thumbnails
# Release process artifacts (usually runs on CI)

10
.vscode/settings.json vendored Normal file
View file

@ -0,0 +1,10 @@
{
"files.watcherExclude": {
"**/target/**": true,
"**/test-output/**": true
},
"files.exclude": {
"**/target": true,
"**/test-output": true
}
}

47
.vscode/tasks.json vendored
View file

@ -1,47 +0,0 @@
{
"version": "2.0.0",
"presentation": {
"reveal": "always"
},
"tasks": [
{
"label": "Run",
"options": {
"cwd": "${workspaceRoot}"
},
"command": "cargo",
"args": [
"run",
"--",
"-c",
"./TestConfigWindows.toml",
"-d",
"test/db.sqlite",
"-w",
"../polaris-web"
],
"problemMatcher": []
},
{
"group": "test",
"label": "Test",
"options": {
"cwd": "${workspaceRoot}"
},
"command": "cargo",
"args": [
"test"
]
},
{
"label": "Compile",
"options": {
"cwd": "${workspaceRoot}"
},
"command": "cargo",
"args": [
"check"
]
}
]
}

427
CHANGELOG.md Normal file
View file

@ -0,0 +1,427 @@
# Changelog
## Unreleased Changes
- Fixed a typo in the log message that is written after applying configuration changes. (thanks @luzpaz)
## Polaris 0.15.0
### Server
- Added support for browsing the music collection by metadata (by artist, by genre, etc.).
- Added support for multi-value metadata for the following song fields: `artist`, `album artist`, `composer`, `genre`, `label` and `lyricist`.
- Added support for structured search query syntax.
- Added capability to extract audio waveform data.
- Configuration data (user credentials, music directories, etc.) is now stored in a plain-text file which Polaris can read and write to.
- ⚠️ The configuration format is now ([documented](docs/CONFIGURATION.md)) and slightly simpler than in previous versions.
- Persistent data, such as playlists, is now saved in a directory that may be configured with the `--data` CLI option or the `POLARIS_DATA_DIR` environment variable.
- ⚠️ Upon first launch, configuration data and playlists will be migrated from the Polaris 0.14.0 database into their new homes. After successful migration, the old database file will be deleted and the server will finally start. This migration functionality will be removed in future Polaris versions.
- Collection scans are now automatically triggered when configuration changes or files are added/removed.
- ⚠️ Dynamic DNS now works with any provider that supports updates over HTTP without header-based auth. This means YDNS is no longer an option, and you need to input a new URL for DDNS updates.
- ⚠️ Removed last.fm integration due to maintenance concerns (abandoned libraries, broken account linking) and mismatch with project goals.
- Removed periodic collection scans.
### Web client
- Every page has been updated to a new visual style.
- The file browser is now displayed as an interactive tree on a single page.
- The file browser now supports common navigation keyboard shortcuts.
- The file browser now supports jumping to a visible file or folder by typing the start of its name.
- The file browser now omits the top-level directory when only one music folder has been configured.
- The current playlist now has two display modes: compact or with album art.
- Songs in the current playlist can now be selected and re-ordered with the mouse.
- Added a button to display statistics about the current playlist.
- Added new pages to browse the music collection by genre.
- Added new pages to browse the music collection by artist.
- Added a new page to browse the music collection by album.
- The Recently Added Albums and Random Albums pages now distinguish albums by file metadata instead of file path.
- When navigating back to the Random Albums page, the shuffle ordering is now preserved.
- The current playlist now supports common navigation keyboard shortcuts.
- The seekbar for the current song being played has been replaced with a waveform visualization.
- The title of the current song in the player can be clicked to display its metadata
- Improved responsiveness when queuing large amounts of songs at once.
- The `Settings > Collection` page now shows the current status of collection scanning.
- Theme preferences have been reset and are now stored client-side.
- Accent color is now configured as a saturation multiplier and base hue, which are used to generate a full color ramp.
### API
- API version is now 8.0.
- Documentation is now served under `/api-docs` instead of `/swagger` (eg. `http://localhost:5050/api-docs`)
- Clients are now expected to send their preferred API major version in a `Accept-Version` header. Omitting this currently defaults to `7`, but will become an error in future Polaris releases. Support for API version 7 will be removed entirely in a future release.
- Most API responses now support gzip compression.
- The response format of the `/browse`, `/flatten`, `/get_playlist`, `/search/<query>` endpoints has been modified to accommodate large lists.
- Added new endpoints to query albums and artists.
- The `/random` and `/recent` albums are deprecated in favor of `/albums/random` and `/albums/recent`. These endpoints now have optional parameters for RNG seeding and pagination.
- The `/search/<query>` endpoint now requires a non-empty query (`/search/` now returns HTTP status code 404, regardless of API version).
- The `/search/<query>` endpoint now supports per-field queries and boolean combinators.
- The `/thumbnail` endpoint supports a new size labeled `tiny`, which returns 40x40px images.
- Added a new `/get_songs` endpoint which returns song metadata in bulk.
- Added a new `/peaks` endpoint which returns audio signal peaks that can be used to draw waveform visualizations.
- Added a new `/index_status` endpoint which returns the status of music collection scans.
- Removed the `/config` and `/preferences` API endpoints.
- Removed the `/ddns` API endpoints, merged into the existing `/settings` endpoints.
## Polaris 0.14.3
### Server
- Fixed a build error (https://github.com/rust-lang/rust/issues/127343) with recent versions of the Rust compiler (thanks @pbsds)
- Added support for m4b audio files (thanks @duydl)
## Polaris 0.14.2
### Server
- Fixed a startup error in Windows packaged builds
## Polaris 0.14.1
### Server
- Fixed compilation issue when using musl toolchains
- Log messages that DDNS is not setup have been downgraded to debug level
### Web client
- Fixed a bug where non-ASCII files or directories were not always alphabetically sorted (thanks @dechamps)
- Fixed a bug where after linking a last.fm account, clicking the account name would not link to the expected page
## Polaris 0.14.0
### General
- Changes are now documented in `CHANGELOG.md` instead of inside individual Github releases
### Server
- API version is now 7.0
- ⚠️ Removed support for authentication via cookies (deprecated in Polaris 0.13.0)
- ⚠️ Removed support for authentication via the `Basic` scheme when using the HTTP `Authorization` header (deprecated in Polaris 0.13.0)
- Fixed a bug where all music sources would be deleted when trying to add sources with duplicate names
- Additional metadata fields are now indexed: lyricist, composer, genre and label (thanks @pmphfm)
- Endpoints returning thumbnail images or audio files no longer use HTTP `content-encoding`
- When indexing files with ID3v2 tags, the "Original Date Released" frame can now be used to populate the year associated with a song
- The `/thumbnail` endpoint now supports an optional parameter for small/large/native image sizing. (thanks @Saecki)
- Log file now contain more details about the cause of failed HTTP requests (3xx, 4xx, 5xx)
- Startup failures now generate clearer error messages
### Web client
- Volume slider now applies non-linearly
- Artist names are now displayed in the Random Albums and Recent Albums pages
## Polaris 0.13.5
### Server
- Added support for AIFF and WAVE files (thanks @gahag)
### Web Client
- Improved performance when scrolling large playlists
- Fixed display and playback issues when a song was used multiple times in a playlist
- Playlist duration can now display number of days
- Fixed a bug where the playlist panel could have blank space in very tall browser windows
- Major dependencies updates
## Polaris 0.13.4
### Server
Adjustments to logging behavior.
On Linux:
- Running without `-f` emits a log file
- Running with `-f` and no `--log` option does not emit a log file
- Running with `-f` and `--log` option emits a log file
On Windows:
- Running with UI feature (`polaris.exe` in releases) emits a log file
- Running without UI feature (`polaris-cli.exe` in releases) and no --log option does not emit a log file
- Running without UI feature (`polaris-cli.exe` in releases) and --log option emits a log file
## Polaris 0.13.3
### Server
- Fixed a bug where music that is no longer on disk was still considered in the collection, even after re-indexing
- On Windows, Polaris now creates a log file
- On Linux, Polaris now creates a log file, even when running with the -f option
## Polaris 0.13.2
### Web client
- Fixed a bug where it was not possible to view or edit which users have administrator rights
- Fixed a bug where, in some cases, drag and dropping a specific disc from an album would not queue the entire disc
## Polaris 0.13.1
### Server
- Fixed a bug where the Windows installer would create unusable installations. #122
## Polaris 0.13.0
### API changes
- Bumped API version number to 6.0.
- Added new endpoints to manage users, mount points and settings more granularly.
- Added support for authenticating via bearer tokens generated by the /auth endpoint. These token can be submitted via Bearer HTTP Authorization headers, or as a URL parameters (`?auth_token=…`).
- Authentication using cookies or Basic HTTP Authorization headers is deprecated and will be removed in a future revision.
- Authentication cookies no longer expire after 24 hours. The newly added bearer tokens also have no expiration date.
- Last.fm account linking now requires a short-lived auth token obtain from the newly added `lastfm/link_token' endpoint.
Server
- ⚠Breaking change⚠ If you use a config file, the `reindex_every_n_seconds` and `album_art_pattern` fields must now be in a [settings] section.
- ⚠Breaking change⚠ The installation process on Linux has changed a lot. See the README for updated installation instructions. A summary of the changes is available [here](https://github.com/ogarcia/docker-polaris/issues/2).
- Embedded album art is now supported for mp3, flac and m4a files (thanks @Saecki).
- OPUS files can now be indexed and streamed (thanks @zaethan).
- APE files can now be indexed and streamed.
- The collection indexer has been rewritten for better performance. This also fixed an issue where on some machines, the web client would be unusable while indexing (thanks @inicola for the code reviews).
- Thumbnail generation is now slightly faster, and works with more pixel formats (notably RGBA16).
- Polaris now uses actix-web instead or rocket. This change fixes numerous performance and stability issues.
- Sqlite is now bundled by default when building Polaris and was removed from the list of prerequisites. This can be controlled with the `bundle-sqlite` feature flag when compiling Polaris.
- The default album art pattern now includes the jpeg extension in addition to jpg.
- Album art patterns are now case insensitive.
Web client
- ⚠Breaking change⚠ Your current playlist will appear broken after this update. Please clear the current playlist using the trash can icon. Saved playlists are not affected.
- Added a logout button.
- Reworked interface for managing user accounts.
- Added a shuffle button to randomly re-order the content of the current playlist.
- The total duration of the current playlist is now displayed.
- Audio output can now be toggled on/off by clicking the volume icon.
- Individual discs from multi-disc albums can now be dragged into the playlist.
- When browsing to an album, songs are now displayed and queued in filepath order.
- Fixed a bug where albums could not be dragged from the random or recent views.
- Fixed a bug where directories with a # sign in their name could not be browsed to.
## Polaris 0.12.0
### Server
- Library indexing speed is now significantly faster
- When indexing files that have malformed ID3 tags, information preceding the error will no longer be discarded
- Deleted users can no longer make requests using an existing session
- When using a config file, existing users, mounts points and DDNS settings are no longer removed before applying the configuration
- When using a config file to create users, blank usernames are now ignored
- Improved architecture and added more unit tests
API Changes
- API version number bumped to 4.0
- The auth endpoint now returns HTTP cookies instead of a JSON response
- Client requests to update Last.fm status no longer return an error if no Last.fm account is associated with the user
- The thumbnail endpoint now supports an option to disable padding to a square image
Web client
- The web client now uses Vue instead of Riot as its UI framework
- Added support for theming
## Polaris 0.11.0
### Server
- Compatible with current versions of the Rust nightly compiler
- Fixed a rare crash when indexing corrupted mp3 files
- On Linux, Polaris now notifies systemd after starting up
- Release tarball for Linux version now includes a top-level directory
- User sessions no longer break across server restarts (more improvements still to do on this: #36)
- ⚠️ Breaking change: due to improvements in Polaris credentials management, you will have to re-create your users and playlists after upgrading to this version. If you want to preserve your playlists, you can use a program like DB Browser for SQLite to back up your playlists (from db.sqlite within your Polaris installation directory) and restore them after you re-create users with the same names.
### Web client
- Song durations are now listed when available
- Fixed a bug where clicking on breadcrumbs did not always work when the Polaris server is hosted on Windows
- Current track info now shows in browser tab title
- Fixed a semi-rare bug where indexing would not start during initial setup flow
- Improved handling of untagged songs
- Fixed a bug where playlist had padding in Chrome
- Fixed a bug where folder icons did not render on some systems
Thank you to @lnicola for working on most of the server changes!
## Polaris 0.10.0
### Server
- Polaris servers now ship with an interactive API documentation, available at http://localhost:5050/swagger
- When using a prefix URL in Polaris config files, a / will no longer be added automatically at the end of the prefix
### Web client
- Automatically bring up player panel when songs are queued
- Fixed a bug where songs were not always correctly sorted by track number in browser panel
- Fixed a bug where some button hitboxes didn't match their visuals
## Polaris 0.9.0
### Server
- Rewrote all endpoints and server setup using Rocket instead of Iron
- Fixed a bug where special characters in URL to collection folders were not handled correctly (bumped API version number)
- Server API is now unit tested
- Fixed a bug where lastFM integration endpoints did not work
- ⚠️ Compiling Polaris now requires the nightly version of the Rust compiler
### Web client
- Encode special characters in URL to collection folders
## Polaris 0.8.0
### Server
- Added new API endpoints for search
- Added new API endpoints for Last.fm integration
- Thumbnails are now stored as .jpg images instead of .png
- Duration of some audio files is now being indexed
- On Linux when running as a forking process, a .pid file will be written
- Fixed a bug where usernames were inserted in session even after failed authentication
### Web client
- Added search panel
- Added settings tab to link Last.fm account
## Polaris 0.7.1
### Server
- Added support for prefix_url option in configuration files
- Improved performance of thumbnail creation
## Polaris 0.7.0
### Server
- Added support for the Partial-Content HTTP header when serving music, this fixes several streaming/seeking issues when using the web client (especially in Chrome)
- New API endpoints for playlist management
- New command line argument (-p) to run on a custom port (contribution from @jxs)
- New command line argument (-f) to run in foreground on Linux (contribution from @jxs)
- Fixed a bug where tracks were queued out of order
- Updated program icon on Windows
Web client
- Added support for playlists
- Added a button to to queue the current directory (thanks @jxs)
## Polaris 0.6.0
### Server
- Internal improvements to database management (now using Diesel)
- Configuration settings are now stored in the database, polaris.toml config files are no longer loaded by default
- Added API endpoints to read and write configuration
- User passwords are now encrypted in storage
- Fixed a bug where results of api/browse were not sorted correctly
Web client
- Settings can now be edited from the web UI
- Collection re-index can now be triggered from the web UI
- Added initial setup configuration flow to help set up first user and mount point
- Visual changes
## Polaris 0.5.1
This is a minor release, pushing quite a bit of internal cleanup in the wild.
Server
- Removed OpenSSL dependency on Windows
- No longer send a HTTP cookie after authentication
## Polaris 0.5.0
This releases adds Linux support and a variety of improvements to the web client.
### Server
- Added Linux support
- Moved location of configuration file on Windows to `%appdata%\Permafrost\Polaris\polaris.toml`
### Web client
- Performance improvements from upgrading RiotJS to 3.4.4 (from 2.6.2)
- Added support for browsing random and recently added albums
- Minor visual changes (colors, whitespace, etc.)
- Updated favicon
- Fixed a bug where songs containing special characters in their title would not play
- Persist playlist and player state across sessions
## Polaris 0.4.0
This release adds new features supporting the development of polaris-android.
### Server
- Added API endpoint to pull recently added albums
- Added support for the Authorization HTTP header (in addition to the existing /auth API endpoint)
## Polaris 0.3.0
This release is an intermediate release addressing issues with the installation process and updating internals.
### General
- Fixed missing OpenSSL DLL in Windows installer (fixes Issue #3)
- Split every file into an individual installer component
### Server
- Added API endpoint to pull random albums
- Upgraded dependencies
- Added unit tests to indexing and metadata decoding
### Web client
- Web interface playlist now displays more tracks (enough to fill a 4k monitor at normal font size)
## Polaris 0.2.0
This release is focused on polish and performance, solidifying the basics that were put together in version 0.1.0. Here are the major changes:
### General
- Polaris now has a project logo
- Windows installer now supports upgrading an existing install (from 0.2.0 to higher - versions)
- Added support for multi-disc albums
### Server
- Major performance improvements to /browse and /flatten API requests (up to 1000x - faster for large requests)
- Added API endpoint for version number
- Album covers are now served as thumbnails rather than at source size
- Moved configuration file outside of /Program Files
- Added support for Ogg Vorbis, FLAC and APE metadata
- Fixed a bug where most albums didn't show an artist name
- Fixed a bug where uppercase extensions were not recognized
- Upgraded compiler to Rust 1.13
### Web client
- Complete visual overhaul of the Polaris web client
- Performance improvements for handling large playlist in Polaris web client
- Added error messages when playing songs in unsupported formats
## Polaris 0.1.0
This is the very first Polaris release, celebrating the minimum viable product!
Features in this release:
- Server application with Windows Installer
- Support for multiple users
- Support for serving custom music directories
- Support for custom album art pattern matching
- Support for broadcasting IP to YDNS
- Web UI to browse collection, manage playlist and listen to music

4191
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -2,70 +2,103 @@
name = "polaris"
version = "0.0.0"
authors = ["Antoine Gersant <antoine.gersant@lesforges.org>"]
edition = "2018"
edition = "2021"
build = "build.rs"
[features]
default = ["bundle-sqlite"]
bundle-sqlite = ["libsqlite3-sys"]
ui = ["uuid", "winapi"]
ui = ["native-windows-gui", "native-windows-derive"]
[profile.release]
lto = "thin"
[dependencies]
actix-files = { version = "0.4" }
actix-web = { version = "3" }
actix-web-httpauth = { version = "0.5.0" }
anyhow = "1.0.35"
ape = "0.3.0"
base64 = "0.13"
branca = "0.10.0"
cookie = { version = "0.14", features = ["signed", "key-expansion"] }
crossbeam-channel = "0.5"
diesel_migrations = { version = "1.4", features = ["sqlite"] }
futures-util = { version = "0.3" }
getopts = "0.2.15"
http = "0.2.2"
id3 = "0.5.1"
libsqlite3-sys = { version = "0.18", features = ["bundled", "bundled-windows"], optional = true }
lewton = "0.10.1"
log = "0.4.5"
metaflac = "0.2.3"
mp3-duration = "0.1.9"
mp4ameta = "0.7.1"
num_cpus = "1.13.0"
opus_headers = "0.1.2"
percent-encoding = "2.1"
pbkdf2 = "0.6"
rand = "0.7"
rayon = "1.3"
regex = "1.3.9"
rustfm-scrobble = "1.1"
serde = { version = "1.0.111", features = ["derive"] }
serde_derive = "1.0.111"
serde_json = "1.0.53"
simplelog = "0.8.0"
thiserror = "1.0.19"
time = "0.2"
toml = "0.5"
ureq = "1.5"
url = "2.1"
ape = "0.6"
axum-extra = { version = "0.10.0", features = ["typed-header"] }
axum-range = { version = "0.5.0" }
bitcode = { version = "0.6.3", features = ["serde"] }
branca = "0.10.1"
chumsky = "0.9.3"
enum-map = { version = "2.7.3", features = ["serde"] }
getopts = "0.2.21"
headers = "0.4"
http = "1.1.0"
icu_collator = "1.5.0"
id3 = "1.14.0"
lasso2 = { version = "0.8.2", features = ["serialize"] }
lewton = "0.10.2"
log = "0.4.22"
metaflac = "0.2.7"
mp3-duration = "0.1.10"
mp4ameta = "0.11.0"
native_db = "0.8.1"
native_model = "0.4.20"
nohash-hasher = "0.2.0"
notify = { version = "6.1.1", default-features = false }
notify-debouncer-full = { version = "0.3.1", default-features = false }
num_cpus = "1.14.0"
# TODO upstream PR: https://github.com/yboettcher/opus_headers/pull/7
opus_headers = { git = "https://github.com/agersant/opus_headers", branch = "multivalue" }
pbkdf2 = "0.11"
rand = "0.8"
rayon = "1.10.0"
regex = "1.10.5"
rusqlite = { version = "0.32.0", features = ["bundled"] }
serde = { version = "1.0.147", features = ["derive"] }
serde_derive = "1.0.147"
serde_json = "1.0.122"
simplelog = "0.12.2"
symphonia = { version = "0.5.4", features = [
"all-codecs",
"all-formats",
"opt-simd",
] }
tinyvec = { version = "1.8.0", features = ["serde"] }
thiserror = "1.0.62"
tokio = { version = "1.39", features = ["macros", "rt-multi-thread"] }
tokio-util = { version = "0.7.11", features = ["io"] }
toml = "0.8.19"
tower = { version = "0.5.2" }
tower-http = { version = "0.6.2", features = [
"compression-gzip",
"fs",
"normalize-path",
] }
trie-rs = { version = "0.4.2", features = ["serde"] }
unicase = "2.7.0"
ureq = { version = "2.10.0", default-features = false, features = ["tls"] }
utoipa = { version = "5.3", features = ["axum_extras"] }
utoipa-axum = { version = "0.1" }
utoipa-scalar = { version = "0.2", features = ["axum"] }
[dependencies.diesel]
version = "1.4.5"
default_features = false
features = ["libsqlite3-sys", "r2d2", "sqlite"]
[dependencies.axum]
version = "0.8.1"
default-features = false
features = ["http1", "json", "tokio", "tower-log", "query"]
[dependencies.image]
version = "0.23.12"
default_features = false
version = "0.25.2"
default-features = false
features = ["bmp", "gif", "jpeg", "png"]
[target.'cfg(windows)'.dependencies]
uuid = { version="0.8", optional = true }
winapi = { version = "0.3.3", features = ["winuser", "libloaderapi", "shellapi", "errhandlingapi"], optional = true }
winfolder = { version = "0.1.1" }
native-windows-gui = { version = "1.0.13", default-features = false, features = [
"cursor",
"image-decoder",
"message-window",
"menu",
"tray-notification",
], optional = true }
native-windows-derive = { version = "1.0.5", optional = true }
[target.'cfg(unix)'.dependencies]
sd-notify = "0.1.0"
unix-daemonize = "0.1.2"
daemonize = "0.5"
sd-notify = "0.4.2"
[target.'cfg(windows)'.build-dependencies]
embed-resource = "2.4.2"
winres = "0.1"
[dev-dependencies]
headers = "0.3"
axum-test = "17.0"
bytes = "1.7.1"
percent-encoding = "2.2"

View file

@ -1,38 +1,71 @@
[![Actions Status](https://github.com/agersant/polaris/workflows/Build/badge.svg)](https://github.com/agersant/polaris/actions)
[![codecov.io](http://codecov.io/github/agersant/polaris/branch/master/graphs/badge.svg)](http://codecov.io/github/agersant/polaris)
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE-MIT)
<div align="center">
<h1><img src="res/readme/logo.png?raw=true"/></h1>
<img src="res/readme/logo.png?raw=true"/>
Polaris is a music streaming application, designed to let you enjoy your music collection from any computer or mobile device. Polaris works by streaming your music directly from your own computer, without uploading it to a third-party. It is free and open-source software, without any kind of premium version. The only requirement is that your computer stays on while it streams music!
[![Actions Status](https://github.com/agersant/polaris/workflows/Build/badge.svg)](https://github.com/agersant/polaris/actions)
[![codecov](https://codecov.io/github/agersant/polaris/graph/badge.svg?token=EQqCmBEf2T)](https://codecov.io/github/agersant/polaris)
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE-MIT)
## Features
- Optimized for large music collections
- Can run on Windows, Linux, BSD, or through Docker
- Listen to your music on the web or using the [Polaris Android](https://github.com/agersant/polaris-android) app
- Easy to setup and configure via the built-in web UI
- Support for `flac`, `mp3`, `mp4`, `mpc`, `ogg` and `opus` files
- Support for album art images
- [Last.fm](https://www.last.fm) scrobbling
- Color themes
- Restrict access to your music collection with user accounts
![Polaris Web UI](res/readme/web_ui.png?raw=true "Polaris Web UI")
</div>
## Tutorials
# About
- [Getting Started](docs/SETUP.md)
- [Streaming From Remote Devices](docs/DDNS.md)
Polaris is a self-hosted music streaming server, to enjoy your music collection from any computer or mobile device. It is free and open-source software, without any kind of premium version.
## Screenshots
The goals of this project are:
- 🔥 Exceptional performance and responsiveness
- 📚️ First-class support for large music collections (100,000+ songs)
- 📦️ Ease of installation, deployment and maintenance
- ✨ Beautiful user interface
![Polaris Web UI](res/readme/web_ui.png?raw=true "Polaris Web UI")
![Polaris Web UI Dark Mode](res/readme/dark_mode.png?raw=true "Polaris Web UI")
# Try It Out!
## Documentation
Check out the demo over at https://demo.polaris.stream, featuring a selection of Creative Commons Music. The credentials to access this server are:
- [Contribute to Polaris](docs/CONTRIBUTING.md)
- [Maintenance Runbooks](docs/MAINTENANCE.md)
Username: `demo_user`
Password: `demo_password`
### API Documentation
The Polaris server API is documented via [Swagger](https://agersant.github.io/polaris/swagger). Please note that this Swagger page does not point to a live Polaris server so the `Try it out` buttons are not expected to work.
Every installation of Polaris also distributes this documentation, with the ability to use the `Try it out` buttons. To access it, simply open http://localhost:5050/swagger/ in your browser on the machine running Polaris.
# Features
Feel free to open Github issues or Pull Requests if clarifications are needed.
- 🖥️ Runs on Windows, Linux, BSD, or through Docker
- 🔊 Support for `flac`, `mp3`, `mp4`, `mpc`, `ogg`, `opus`, `ape`, `wav` and `aiff` files
- 🌈 Dark mode variants and customizable color palette
- 💿️ Browse your music by album, artist or genre
- 📂 Browse your music as a file tree
- 🌊 Song audio-waveform visualization
- 🏷️ Support for multi-value fields in song metadata (eg. multiple artists per song)
- 🔍️ Powerful search functionality with per-field queries
- ⚙️ Plain-text configuration also editable with built-in UI
- 👥 Setup multiple users, each with their own playlists
- 📱 Listen to your music on the go:
- Polaris Android ([Google Play Store](https://play.google.com/store/apps/details?id=agersant.polaris) · [F-Droid](https://f-droid.org/packages/agersant.polaris/) · [Repository](https://github.com/agersant/polaris-android))
- Polarios ([App Store](https://apps.apple.com/app/polarios/id1662366309) · [Repository](https://gitlab.com/elise/Polarios)) [third-party]
# Installation
[Installation documentation](docs/SETUP.md)
[Streaming from remote devices](docs/DDNS.md)
[![Packaging status](https://repology.org/badge/vertical-allrepos/polaris-streaming.svg?columns=3)](https://repology.org/project/polaris-streaming/versions)
# Documentation
- 📒 [Changelog](CHANGELOG.md)
- 🔧 [Configuration](docs/CONFIGURATION.md)
- 👷 [Contribute to Polaris](docs/CONTRIBUTING.md)
- 🛟 [Maintenance Runbooks](docs/MAINTENANCE.md)
The Polaris server API is documented via [OpenAPI](https://demo.polaris.stream/api-docs/). Every installation of Polaris distributes this interactive documentation. To access it, open http://localhost:5050/api-docs/ in your browser on the machine running Polaris.
# Credits & License Information
Music featured in the demo installation:
- [Chris Zabriskie - Abandon Babylon](https://chriszabriskie.bandcamp.com/album/abandon-babylon) [(License)](https://creativecommons.org/licenses/by/3.0/)
- [Chris Zabriskie - Angie's Sunday Service](https://chriszabriskie.bandcamp.com/album/angies-sunday-service) [(License)](https://creativecommons.org/licenses/by/3.0/)
- [glaciære - pool water blue](https://steviasphere.bandcamp.com/album/pool-water-blue) [(License)](https://creativecommons.org/licenses/by/3.0/)
- [glaciære - light ripples](https://steviasphere.bandcamp.com/album/light-ripples) [(License)](https://creativecommons.org/licenses/by/3.0/)
- [Koresma South](https://koresma.bandcamp.com/album/south) [(License)](https://creativecommons.org/licenses/by-nc-sa/3.0/)
- [Pete Murphy - Essence EP](https://petemurphy.bandcamp.com/album/falling-down-the-fred-astaires-solo-jazz-piano) [(License)](https://creativecommons.org/licenses/by-nc-sa/3.0/)
- [Rameses B - Essence EP](https://ramesesb.bandcamp.com/album/essence-ep) [(License)](https://creativecommons.org/licenses/by-nc-nd/3.0/)

13
build.rs Normal file
View file

@ -0,0 +1,13 @@
#[cfg(windows)]
fn main() {
let mut res = winres::WindowsResource::new();
res.set_icon("./res/windows/application/icon_polaris_512.ico");
res.compile().unwrap();
embed_resource::compile(
"res/windows/application/polaris-manifest.rc",
embed_resource::NONE,
);
}
#[cfg(unix)]
fn main() {}

View file

@ -1,2 +0,0 @@
[print_schema]
file = "src/db/schema.rs"

50
docs/CONFIGURATION.md Normal file
View file

@ -0,0 +1,50 @@
# Configuration
Polaris configuration resides in a single text file whose format is documented below. You can use the Polaris web UI to modify the configuration, or write to it in any text editor. You may edit the configuration file while Polaris is running.
## Location
The location of the configuration file is always logged during Polaris startup. It is determined as follows:
- From the `--config` (or `-c`) CLI option if present. This option must point to the `.toml` file.
- If the CLI option is not specified, Polaris will look for a `polaris.toml` file, inside the directory specified by the `POLARIS_CONFIG_DIR` environment variable _at compilation time_. When using the Windows installer, this will be `%LOCALAPPDATA%/Permafrost/Polaris/polaris.toml`. When using the supplied Makefile, the default is either `/usr/local/etc/polaris` (for a system-wide installations), or `~/.config/polaris` (for a XDG installation).
- If `POLARIS_CONFIG_DIR` was not set when Polaris was compiled, it will default to `.` on Linux, and the `LOCALAPPDATA` location mentioned above on Windows. This behavior on Windows may change in future releases.
## Format
The configuration file uses the [TOML](https://toml.io/) format. Everything in the configuration file is optional and may be omitted (unless mentioned otherwise).
```toml
# Regular expression used to identify album art in files adjacent to an audio file
album_art_pattern = "Folder.(jpeg|jpg|png)"
# A URL Polaris will regularly make requests to in order to update Dynamic DNS
ddns_url = "https://example.com?token=foobar"
# Array of locations Polaris should scan to find music files
[[mount_dirs]]
# Directory to scan
source = "/home/example/music"
# User-facing name for this directory (must be unique)
name = "My Music 🎧️"
[[mount_dirs]]
source = "/mnt/example/more_music"
name = "Extra Music 🎵"
# Array of user accounts who can connect to the Polaris server
[[users]]
# Username for login
name = "example-user"
# If true, user will have access to all settings in the web UI
admin = true
# Plain text password for this user. Will be ignored if hashed_password is set. Polaris will never write to this field. For each user, at least one of initial_password and hashed_password must be set.
initial_password = "top-secret-password"
# Hashed and salted password for the user. Polaris will create this field if unset.
hashed_password = "$pbkdf2-sha256$i=10000,l=32$SI8LjK1KtvcawhgmWGJgRA$t9btMwhUTQ8r3vqI1xhArn19J7Jezyoi461fFjhZXGU"
[[users]]
name = "other-user"
admin = true
initial_password = "amospheric-strawberry64"
```

View file

@ -1,25 +1,37 @@
# Contributing
## Compiling and Running Polaris
## Guidelines
Compiling and running Polaris is very easy as it only depends on the Rust toolchain.
While Polaris is free and open-source software, it is not very open to code contributions. The reasons behind this are:
- Polaris is a hobby project. I don't want it to feel like my day job, where I do a lot of code reviews, mentoring and tech leadership.
- I am committed to maintaining this software for a very long time. I would rather maintain code that I mostly wrote myself.
1. [Install Rust](https://www.rust-lang.org/en-US/install.html)
2. Clone the polaris depot with this command: `git clone --recursive https://github.com/agersant/polaris.git`
This still leave room for a few avenues to contribute:
- Help answering questions in the issue tracker.
- Package Polaris for a Linux distribution
- Documentation improvements or writing user guides.
- Satellite projects (eg. [docker-polaris](https://github.com/ogarcia/docker-polaris), [polarios](https://gitlab.com/elise/Polarios))
- Bug fixes.
For non-trivial new features, you are welcome to maintain a fork. If you need help finding your way around the code, feel free to open a [discussion thread](https://github.com/agersant/polaris/discussions).
## Compiling and running Polaris
1. [Install Rust](https://www.rust-lang.org/en-US/install.html) (stable toolchain)
2. Clone the polaris depot with this command: `git clone https://github.com/agersant/polaris.git`
3. You can now run compile and run polaris from the newly created directory with the command: `cargo run`
Polaris supports a few command line arguments which are useful during development:
- `-c some/config.toml` sets the location of the [configuration](/docs/CONFIGURATION.md) file.
- `--data some/path` sets the folder Polaris will use to store runtime data such as playlists, collection index and auth secrets.
- `-w some/path/to/web/dir` lets you point to the directory to be served as the web interface. You can find a suitable directory in your Polaris install (under `/web`), or from the [latest polaris-web release](https://github.com/agersant/polaris-web/releases/latest/download/web.zip).
- `-s some/path/to/swagger/dir` lets you point to the directory to be served as the swagger API documentation. You'll probably want to point this to the `/docs/swagger` directory of the polaris repository.
- `-d some/path/to/a/file.db` lets you manually choose where Polaris stores its configuration and music index (you can reuse the same database accross multiple runs).
- `-c some/config.toml` lets you use a configuration file to add content to the database. This can be useful if you frequently delete the database and would like to automate the first time flow. The configuration format is not documented but can be inferred by looking at the `Config` struct in `config.rs`.
- `-f` (on Linux) makes Polaris not fork into a separate process.
Putting it all together, a typical command to compile and run the program would be: `cargo run -- -w web -s docs/swagger -d test-output/my.db`
Putting it all together, a typical command to compile and run the program would be: `cargo run -- -w web -c test-config.toml`
While Polaris is running, access the web UI at [http://localhost:5050](http://localhost:5050).
## Running Unit Tests
## Running unit tests
That's the easy part, simply run `cargo test`!

View file

@ -1,4 +1,10 @@
# Streaming From Other Devices
# Streaming from other devices
These instructions apply to users running Polaris on a home network. When deploying to cloud services or VPS, configurations requirements will differ.
## Port forwarding
Configure port forwarding on your router to redirect port 80 traffic towards port 5050 towards the computer running Polaris. The exact way to do this depends on your router manufacturer and model.
## Dynamic DNS
@ -8,34 +14,8 @@ You can access your Polaris installation from anywhere via your computer's publi
A solution to these problems is to set up Dynamic DNS, so that your installation can always be reached at a fixed URL.
The steps below will walk you through setting up YDNS and Polaris to give your installation a fixed URL. If you have another solution in mind, or prefer using another Dynamic DNS service, skip to the next section.
1. Register for a free account on https://ydns.io
2. On the YDNS website, access the "My Hosts" page and press the + sign for "Add Host"
3. Fill the host form as described below:
- Domain: ydns.eu
- Name: This part is up to you, whatever you enter will be in the URL you use to access Polaris
- Content: Leave the default. Take a note whether the value looks like a IPv4 address (format: xxx.xxx.xxx.xxx) or a IPv6 address (format: xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx)
- Type: Dynamic IP
4. If the content field looked like a IPv4 address: skip to step #6
5. If the content field looked like a IPv6 address:
- Click on your host name (eg. yourdomain.ydns.eu)
- You should now see a page which looks like this:
![YDNS Records](res/ydns_records.png?raw=true "YDNS Records")
- Click on the green "+" icon on the right
- Fill out the new form as described:
- Make sure the `Type` field is set to `A`
- Set content to 0.0.0.0
- You should now be back on the "records" page which was pictured above
- Click on the ID number on the left for the row that has its `Type` listed as `AAAA` (#28717 in the picture above).
- Click on the red trash can icon in the corner to delete this record
- Done!
6. In the Polaris web interface, access the `Dynamic DNS` tab of the settings screen:
- Update the hostname field to match what you set in step 5. (eg. http://yourdomain.ydns.eu)
- Update the username field to the email address you use when creating your YDNS account
- Update the password field with your YDNS API password. You can find this password on https://ydns.io: click on the "User" icon in the top right and then `Preferences > API`.
## Port Forwarding
Configure port forwarding on your router to redirect port 80 towards port 5050 on the computer where you run Polaris. The exact way to do this depends on your router manufacturer and model.
Don't forget to restart Polaris to apply your configuration changes, and access your music from other computers at http://yourdomain.ydns.eu
1. Reserve a URL with a dynamic DNS provider such as https://www.duckdns.org/ or https://freemyip.com/.
2. The dynamic DNS provider gives you a unique Update URL that can be used to tell them where to send traffic. For example, `freemyip.com` gives you this URL immediately after claiming a subdomain. Other providers may show it in your profile page, etc.
3. Access your Polaris instance (http://localhost:5050 by default).
4. Go to the `Setting page` and into the `Dynamic DNS` section.
5. Set the Update URL to the one you obtained in step 2.

View file

@ -1,16 +1,10 @@
# Maintenance
## How to make a release
- Update CHANGELOG.md to reflect new release
- On Github, go to **Actions**, select the **Make Release** workflow and click **Run workflow**
- Select the branch to deploy (usually `master`)
- Input a user-facing version name (eg: **0.13.0**)
- Click the **Run workflow** button
- After CI completes, find the release on Github and write the changelog
- Move the release from Draft to Published
Note that the Github web UI will separate the release from the corresponding tag until published.
## How to change the database schema
- Add a new folder under `migrations` following the existing pattern
- Run `update_db_schema.bat`
- After CI completes, move the release from Draft to Published

View file

@ -1,42 +1,30 @@
# Getting Started
# Installation
## Requirements
## On Windows
One of the following:
- Windows 7 or newer
- Linux (any reasonably modern distribution should do)
### Windows
1. Download the [latest installer](https://github.com/agersant/polaris/releases/latest) (you want the .msi file)
2. Run the installer
3. That's it, you're done!
3. Launch Polaris from the start menu
4. In your web browser, access http://localhost:5050
You can now start Polaris from the start menu or from your desktop, Polaris will also start automatically next time you restart your computer. You can tell when Polaris is running by its icon in the notification area (near the clock and volume controls).
## In a docker container
### Linux
To run polaris from a Docker container, please follow instructions from the [docker-polaris](https://github.com/ogarcia/docker-polaris) repository.
#### Dependencies
## From source on Linux
1. Install OpenSSL, SQLite and their headers, and some development tools. These are available from your distribution's package manager. For instance on Ubuntu, execute `sudo apt-get install binutils pkg-config libssl-dev`
### Dependencies
1. Install OpenSSL, SQLite and their respective headers (eg. `sudo apt-get install libsqlite3-dev libssl-dev`).
2. Install `binutils` and `pkg-config` (eg. `sudo apt-get install binutils pkg-config`).
2. Install the Rust compiler by executing `curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh` or using an [alternative method](https://www.rust-lang.org/en-US/install.html)
#### Polaris installation
### Polaris installation
1. Download the [latest release]((https://github.com/agersant/polaris/releases/latest)) of Polaris (you want the .tar.gz file)
2. Extract the Polaris archive in a directory and open a terminal in that directory
3. To install Polaris within your home directory, execute `make install-xdg`. This installation follows the [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html). You can use `make preview-xdg` to see which directories the install process would use.
4. If you prefer a system-wide install, execute `make install` (without the `-xdg` suffix). If you use `sudo` to perform such a system install, you may need the `-E` option so that your sudo user find the Rust binaries: `sudo -E make install`. This installation follows the [GNU Standard Installation Directories](https://www.gnu.org/prep/standards/html_node/Directory-Variables.html). You can use `make preview` to see which directories the install process would use.
From here, you might want to adjust your system to run Polaris on login using Systemd, Cron or whichever method your distribution endorses.
If you want to uninstall Polaris, execute `make uninstall-xdg` from the extracted archive's directory (or `make uninstall` if you made a system-wide install). This will delete all the files and directories listed above **including your Polaris database**. If you customized the install process by specifying environment variables like `PREFIX`, make sure they are set to the same values when running the uninstall command.
### In a docker container
To run polaris from a Docker container, please follow instructions from the [docker-polaris](https://github.com/ogarcia/docker-polaris) repository.
## Test Run
- Start Polaris using the shortcut on your desktop (Windows) or by running the Polaris executable
- In your Web browser, access http://localhost:5050
- You will see a welcome page that will guide you through the Polaris configuration
If you want to uninstall Polaris, execute `make uninstall-xdg` from the extracted archive's directory (or `make uninstall` if you made a system-wide install). This will delete all the files and directories listed above (including your configuration, playlists, etc.). If you customized the install process by specifying environment variables like `PREFIX`, make sure they are set to the same values when running the uninstall command.

Binary file not shown.

Before

(image error) Size: 665 B

Binary file not shown.

Before

(image error) Size: 628 B

View file

@ -1,60 +0,0 @@
<!-- HTML for static distribution bundle build -->
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Polaris Swagger UI</title>
<link rel="stylesheet" type="text/css" href="swagger-ui.css">
<link rel="icon" type="image/png" href="favicon-32x32.png" sizes="32x32" />
<link rel="icon" type="image/png" href="favicon-16x16.png" sizes="16x16" />
<style>
html {
box-sizing: border-box;
overflow: -moz-scrollbars-vertical;
overflow-y: scroll;
}
*,
*:before,
*:after {
box-sizing: inherit;
}
body {
margin: 0;
background: #fafafa;
}
</style>
</head>
<body>
<div id="swagger-ui"></div>
<script src="swagger-ui-bundle.js"> </script>
<script src="swagger-ui-standalone-preset.js"> </script>
<script>
window.onload = function() {
// Begin Swagger UI call region
const ui = SwaggerUIBundle({
url: "polaris-api.json",
dom_id: '#swagger-ui',
deepLinking: true,
presets: [
SwaggerUIBundle.presets.apis,
SwaggerUIStandalonePreset
],
plugins: [
SwaggerUIBundle.plugins.DownloadUrl
],
layout: "StandaloneLayout"
})
// End Swagger UI call region
window.ui = ui
}
</script>
</body>
</html>

View file

@ -1,67 +0,0 @@
<!doctype html>
<html lang="en-US">
<body onload="run()">
</body>
</html>
<script>
'use strict';
function run () {
var oauth2 = window.opener.swaggerUIRedirectOauth2;
var sentState = oauth2.state;
var redirectUrl = oauth2.redirectUrl;
var isValid, qp, arr;
if (/code|token|error/.test(window.location.hash)) {
qp = window.location.hash.substring(1);
} else {
qp = location.search.substring(1);
}
arr = qp.split("&")
arr.forEach(function (v,i,_arr) { _arr[i] = '"' + v.replace('=', '":"') + '"';})
qp = qp ? JSON.parse('{' + arr.join() + '}',
function (key, value) {
return key === "" ? value : decodeURIComponent(value)
}
) : {}
isValid = qp.state === sentState
if ((
oauth2.auth.schema.get("flow") === "accessCode"||
oauth2.auth.schema.get("flow") === "authorizationCode"
) && !oauth2.auth.code) {
if (!isValid) {
oauth2.errCb({
authId: oauth2.auth.name,
source: "auth",
level: "warning",
message: "Authorization may be unsafe, passed state was changed in server Passed state wasn't returned from auth server"
});
}
if (qp.code) {
delete oauth2.state;
oauth2.auth.code = qp.code;
oauth2.callback({auth: oauth2.auth, redirectUrl: redirectUrl});
} else {
let oauthErrorMsg
if (qp.error) {
oauthErrorMsg = "["+qp.error+"]: " +
(qp.error_description ? qp.error_description+ ". " : "no accessCode received from the server. ") +
(qp.error_uri ? "More info: "+qp.error_uri : "");
}
oauth2.errCb({
authId: oauth2.auth.name,
source: "auth",
level: "error",
message: oauthErrorMsg || "[Authorization failed]: no accessCode received from the server"
});
}
} else {
oauth2.callback({auth: oauth2.auth, token: qp, isValid: isValid, redirectUrl: redirectUrl});
}
window.close();
}
</script>

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -1 +0,0 @@
{"version":3,"sources":[],"names":[],"mappings":"","file":"swagger-ui.css","sourceRoot":""}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

46
flake.lock generated Normal file
View file

@ -0,0 +1,46 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1736701207,
"narHash": "sha256-jG/+MvjVY7SlTakzZ2fJ5dC3V1PrKKrUEOEE30jrOKA=",
"rev": "ed4a395ea001367c1f13d34b1e01aa10290f67d6",
"revCount": 737298,
"type": "tarball",
"url": "https://api.flakehub.com/f/pinned/NixOS/nixpkgs/0.1.737298%2Brev-ed4a395ea001367c1f13d34b1e01aa10290f67d6/01945f5f-4175-7e72-8809-a1e482c4a443/source.tar.gz"
},
"original": {
"type": "tarball",
"url": "https://flakehub.com/f/NixOS/nixpkgs/0.1.%2A.tar.gz"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1736735482,
"narHash": "sha256-QOA4jCDyyUM9Y2Vba+HSZ/5LdtCMGaTE/7NkkUzBr50=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "cf960a1938ee91200fe0d2f7b2582fde2429d562",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

58
flake.nix Normal file
View file

@ -0,0 +1,58 @@
{
description = "A Nix-flake-based Rust development environment";
inputs = {
nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.1.*.tar.gz";
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
};
};
outputs = { self, nixpkgs, rust-overlay }:
let
supportedSystems = [ "x86_64-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" ];
forEachSupportedSystem = f: nixpkgs.lib.genAttrs supportedSystems (system: f {
pkgs = import nixpkgs {
inherit system;
overlays = [ rust-overlay.overlays.default self.overlays.default ];
};
});
in
{
overlays.default = final: prev: {
rustToolchain =
let
rust = prev.rust-bin;
in
if builtins.pathExists ./rust-toolchain.toml then
rust.fromRustupToolchainFile ./rust-toolchain.toml
else if builtins.pathExists ./rust-toolchain then
rust.fromRustupToolchainFile ./rust-toolchain
else
rust.stable.latest.default.override {
extensions = [ "rust-src" "rustfmt" ];
};
};
devShells = forEachSupportedSystem ({ pkgs }: {
default = pkgs.mkShell {
packages = with pkgs; [
rustToolchain
openssl
pkg-config
cargo-deny
cargo-edit
cargo-watch
rust-analyzer
samply
];
env = {
# Required by rust-analyzer
RUST_SRC_PATH = "${pkgs.rustToolchain}/lib/rustlib/src/rust/library";
};
};
});
};
}

View file

@ -1,2 +0,0 @@
DROP TABLE directories;
DROP TABLE songs;

View file

@ -1,25 +0,0 @@
CREATE TABLE directories (
id INTEGER PRIMARY KEY NOT NULL,
path TEXT NOT NULL,
parent TEXT,
artist TEXT,
year INTEGER,
album TEXT,
artwork TEXT,
UNIQUE(path) ON CONFLICT REPLACE
);
CREATE TABLE songs (
id INTEGER PRIMARY KEY NOT NULL,
path TEXT NOT NULL,
parent TEXT NOT NULL,
track_number INTEGER,
disc_number INTEGER,
title TEXT,
artist TEXT,
album_artist TEXT,
year INTEGER,
album TEXT,
artwork TEXT,
UNIQUE(path) ON CONFLICT REPLACE
);

View file

@ -1,15 +0,0 @@
CREATE TEMPORARY TABLE directories_backup(id, path, parent, artist, year, album, artwork);
INSERT INTO directories_backup SELECT id, path, parent, artist, year, album, artwork FROM directories;
DROP TABLE directories;
CREATE TABLE directories (
id INTEGER PRIMARY KEY NOT NULL,
path TEXT NOT NULL,
parent TEXT,
artist TEXT,
year INTEGER,
album TEXT,
artwork TEXT,
UNIQUE(path) ON CONFLICT REPLACE
);
INSERT INTO directories SELECT * FROM directories_backup;
DROP TABLE directories_backup;

View file

@ -1 +0,0 @@
ALTER TABLE directories ADD COLUMN date_added INTEGER DEFAULT 0 NOT NULL;

View file

@ -1 +0,0 @@
DROP TABLE users;

View file

@ -1,8 +0,0 @@
CREATE TABLE users (
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
password_salt BLOB NOT NULL,
password_hash BLOB NOT NULL,
admin INTEGER NOT NULL,
UNIQUE(name)
);

View file

@ -1 +0,0 @@
DROP TABLE misc_settings;

View file

@ -1,7 +0,0 @@
CREATE TABLE misc_settings (
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
auth_secret TEXT NOT NULL,
index_sleep_duration_seconds INTEGER NOT NULL,
index_album_art_pattern TEXT NOT NULL
);
INSERT INTO misc_settings (id, auth_secret, index_sleep_duration_seconds, index_album_art_pattern) VALUES (0, hex(randomblob(64)), 1800, "Folder.(jpeg|jpg|png)");

View file

@ -1 +0,0 @@
DROP TABLE ddns_config;

View file

@ -1,8 +0,0 @@
CREATE TABLE ddns_config (
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
host TEXT NOT NULL,
username TEXT NOT NULL,
password TEXT NOT NULL
);
INSERT INTO ddns_config (id, host, username, password) VALUES (0, "", "", "");

View file

@ -1 +0,0 @@
DROP TABLE mount_points;

View file

@ -1,6 +0,0 @@
CREATE TABLE mount_points (
id INTEGER PRIMARY KEY NOT NULL,
source TEXT NOT NULL,
name TEXT NOT NULL,
UNIQUE(name)
);

View file

@ -1,2 +0,0 @@
DROP TABLE playlists;
DROP TABLE playlist_songs;

View file

@ -1,16 +0,0 @@
CREATE TABLE playlists (
id INTEGER PRIMARY KEY NOT NULL,
owner INTEGER NOT NULL,
name TEXT NOT NULL,
FOREIGN KEY(owner) REFERENCES users(id) ON DELETE CASCADE,
UNIQUE(owner, name) ON CONFLICT REPLACE
);
CREATE TABLE playlist_songs (
id INTEGER PRIMARY KEY NOT NULL,
playlist INTEGER NOT NULL,
path TEXT NOT NULL,
ordering INTEGER NOT NULL,
FOREIGN KEY(playlist) REFERENCES playlists(id) ON DELETE CASCADE ON UPDATE CASCADE,
UNIQUE(playlist, ordering) ON CONFLICT REPLACE
);

View file

@ -1,11 +0,0 @@
CREATE TEMPORARY TABLE misc_settings_backup(id, auth_secret, index_sleep_duration_seconds, index_album_art_pattern);
INSERT INTO misc_settings_backup SELECT id, auth_secret, index_sleep_duration_seconds, index_album_art_pattern FROM misc_settings;
DROP TABLE misc_settings;
CREATE TABLE misc_settings (
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
auth_secret TEXT NOT NULL,
index_sleep_duration_seconds INTEGER NOT NULL,
index_album_art_pattern TEXT NOT NULL
);
INSERT INTO misc_settings SELECT * FROM misc_settings_backup;
DROP TABLE misc_settings_backup;

View file

@ -1 +0,0 @@
ALTER TABLE misc_settings ADD COLUMN prefix_url TEXT NOT NULL DEFAULT "";

View file

@ -1,19 +0,0 @@
CREATE TEMPORARY TABLE songs_backup(id, path, parent, track_number, disc_number, title, artist, album_artist, year, album, artwork);
INSERT INTO songs_backup SELECT id, path, parent, track_number, disc_number, title, artist, album_artist, year, album, artwork FROM songs;
DROP TABLE songs;
CREATE TABLE songs (
id INTEGER PRIMARY KEY NOT NULL,
path TEXT NOT NULL,
parent TEXT NOT NULL,
track_number INTEGER,
disc_number INTEGER,
title TEXT,
artist TEXT,
album_artist TEXT,
year INTEGER,
album TEXT,
artwork TEXT,
UNIQUE(path) ON CONFLICT REPLACE
);
INSERT INTO songs SELECT * FROM songs_backup;
DROP TABLE songs_backup;

View file

@ -1 +0,0 @@
ALTER TABLE songs ADD COLUMN duration INTEGER;

View file

@ -1,13 +0,0 @@
CREATE TEMPORARY TABLE users_backup(id, name, password_salt, password_hash, admin);
INSERT INTO users_backup SELECT id, name, password_salt, password_hash, admin FROM users;
DROP TABLE users;
CREATE TABLE users (
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
password_salt BLOB NOT NULL,
password_hash BLOB NOT NULL,
admin INTEGER NOT NULL,
UNIQUE(name)
);
INSERT INTO users SELECT * FROM users_backup;
DROP TABLE users_backup;

View file

@ -1,2 +0,0 @@
ALTER TABLE users ADD COLUMN lastfm_username TEXT;
ALTER TABLE users ADD COLUMN lastfm_session_key TEXT;

View file

@ -1,15 +0,0 @@
CREATE TEMPORARY TABLE misc_settings_backup(id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url);
INSERT INTO misc_settings_backup
SELECT id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url
FROM misc_settings;
DROP TABLE misc_settings;
CREATE TABLE misc_settings (
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
auth_secret BLOB NOT NULL DEFAULT (hex(randomblob(32))),
index_sleep_duration_seconds INTEGER NOT NULL,
index_album_art_pattern TEXT NOT NULL,
prefix_url TEXT NOT NULL DEFAULT ""
);
INSERT INTO misc_settings(id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url)
SELECT * FROM misc_settings_backup;
DROP TABLE misc_settings_backup;

View file

@ -1,15 +0,0 @@
CREATE TEMPORARY TABLE misc_settings_backup(id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url);
INSERT INTO misc_settings_backup
SELECT id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url
FROM misc_settings;
DROP TABLE misc_settings;
CREATE TABLE misc_settings (
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
auth_secret BLOB NOT NULL DEFAULT (randomblob(32)),
index_sleep_duration_seconds INTEGER NOT NULL,
index_album_art_pattern TEXT NOT NULL,
prefix_url TEXT NOT NULL DEFAULT ""
);
INSERT INTO misc_settings(id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url)
SELECT * FROM misc_settings_backup;
DROP TABLE misc_settings_backup;

View file

@ -1,11 +0,0 @@
DROP TABLE users;
CREATE TABLE users (
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
password_salt BLOB NOT NULL,
password_hash BLOB NOT NULL,
admin INTEGER NOT NULL,
lastfm_username TEXT,
lastfm_session_key TEXT,
UNIQUE(name)
);

View file

@ -1,10 +0,0 @@
DROP TABLE users;
CREATE TABLE users (
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
password_hash TEXT NOT NULL,
admin INTEGER NOT NULL,
lastfm_username TEXT,
lastfm_session_key TEXT,
UNIQUE(name)
);

View file

@ -1,14 +0,0 @@
CREATE TEMPORARY TABLE users_backup(id, name, password_hash, admin, lastfm_username, lastfm_session_key);
INSERT INTO users_backup SELECT id, name, password_hash, admin, lastfm_username, lastfm_session_key FROM users;
DROP TABLE users;
CREATE TABLE users (
id INTEGER PRIMARY KEY NOT NULL,
name TEXT NOT NULL,
password_hash TEXT NOT NULL,
admin INTEGER NOT NULL,
lastfm_username TEXT,
lastfm_session_key TEXT,
UNIQUE(name)
);
INSERT INTO users SELECT * FROM users_backup;
DROP TABLE users_backup;

View file

@ -1,2 +0,0 @@
ALTER TABLE users ADD COLUMN web_theme_base TEXT;
ALTER TABLE users ADD COLUMN web_theme_accent TEXT;

View file

@ -1 +0,0 @@
ALTER TABLE misc_settings ADD COLUMN prefix_url TEXT NOT NULL DEFAULT "";

View file

@ -1,11 +0,0 @@
CREATE TEMPORARY TABLE misc_settings_backup(id, auth_secret, index_sleep_duration_seconds, index_album_art_pattern);
INSERT INTO misc_settings_backup SELECT id, auth_secret, index_sleep_duration_seconds, index_album_art_pattern FROM misc_settings;
DROP TABLE misc_settings;
CREATE TABLE misc_settings (
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
auth_secret BLOB NOT NULL DEFAULT (randomblob(32)),
index_sleep_duration_seconds INTEGER NOT NULL,
index_album_art_pattern TEXT NOT NULL
);
INSERT INTO misc_settings SELECT * FROM misc_settings_backup;
DROP TABLE misc_settings_backup;

Binary file not shown.

After

(image error) Size: 1.2 MiB

Binary file not shown.

After

(image error) Size: 1.3 MiB

Binary file not shown.

Binary file not shown.

Before

(image error) Size: 107 KiB

After

(image error) Size: 723 KiB

Binary file not shown.

Before

(image error) Size: 256 KiB

After

(image error) Size: 722 KiB

View file

@ -7,21 +7,25 @@ EXEC_PREFIX ?= $(PREFIX)
BINDIR ?= $(EXEC_PREFIX)/bin
DATAROOTDIR ?= $(PREFIX)/share
DATADIR ?= $(DATAROOTDIR)
SYSCONFDIR ?= $(PREFIX)/etc
LOCALSTATEDIR ?= $(PREFIX)/var
RUNSTATEDIR ?= $(LOCALSTATEDIR)/run
%-system: POLARIS_BIN_PATH := $(BINDIR)/polaris
%-system: export POLARIS_WEB_DIR := $(DATADIR)/polaris/web
%-system: export POLARIS_SWAGGER_DIR := $(DATADIR)/polaris/swagger
%-system: export POLARIS_CONFIG_DIR := $(SYSCONFDIR)/polaris
%-system: export POLARIS_DATA_DIR := $(LOCALSTATEDIR)/lib/polaris
%-system: export POLARIS_DB_DIR := $(LOCALSTATEDIR)/lib/polaris
%-system: export POLARIS_LOG_DIR := $(LOCALSTATEDIR)/log/polaris
%-system: export POLARIS_CACHE_DIR := $(LOCALSTATEDIR)/cache/polaris
%-system: export POLARIS_PID_DIR := $(RUNSTATEDIR)/polaris
XDG_CACHE_HOME ?= $(HOME)/.cache
XDG_CONFIG_HOME ?= $(HOME)/.config
XDG_DATA_HOME ?= $(HOME)/.local/share
XDG_BINDIR ?= $(HOME)/.local/bin
XDG_DATADIR ?= $(XDG_DATA_HOME)/polaris
XDG_CACHEDIR ?= $(XDG_CACHE_HOME)/polaris
XDG_CONFIGDIR ?= $(XDG_CONFIG_HOME)/polaris
ifdef $(XDG_RUNTIME_DIR)
XDG_PIDDIR ?= $(XDG_RUNTIME_DIR)/polaris
else
@ -29,7 +33,8 @@ XDG_PIDDIR ?= /tmp/polaris-$(UID)
endif
%-xdg: POLARIS_BIN_PATH := $(XDG_BINDIR)/polaris
%-xdg: export POLARIS_WEB_DIR := $(XDG_DATADIR)/web
%-xdg: export POLARIS_SWAGGER_DIR := $(XDG_DATADIR)/swagger
%-xdg: export POLARIS_CONFIG_DIR := $(XDG_CONFIGDIR)
%-xdg: export POLARIS_DATA_DIR := $(XDG_DATADIR)
%-xdg: export POLARIS_DB_DIR := $(XDG_DATADIR)
%-xdg: export POLARIS_LOG_DIR := $(XDG_CACHEDIR)
%-xdg: export POLARIS_CACHE_DIR := $(XDG_CACHEDIR)
@ -57,7 +62,8 @@ preview: preview-system
list-paths:
$(info POLARIS_BIN_PATH is $(POLARIS_BIN_PATH))
$(info POLARIS_WEB_DIR is $(POLARIS_WEB_DIR))
$(info POLARIS_SWAGGER_DIR is $(POLARIS_SWAGGER_DIR))
$(info POLARIS_CONFIG_DIR is $(POLARIS_CONFIG_DIR))
$(info POLARIS_DATA_DIR is $(POLARIS_DATA_DIR))
$(info POLARIS_DB_DIR is $(POLARIS_DB_DIR))
$(info POLARIS_LOG_DIR is $(POLARIS_LOG_DIR))
$(info POLARIS_CACHE_DIR is $(POLARIS_CACHE_DIR))
@ -74,9 +80,7 @@ install-bin: cargo-build
install-data:
install -d $(POLARIS_WEB_DIR)
install -d $(POLARIS_SWAGGER_DIR)
cp -rT ./web $(POLARIS_WEB_DIR)
cp -rT ./swagger $(POLARIS_SWAGGER_DIR)
# Uninstall
@ -89,7 +93,8 @@ uninstall-bin:
uninstall-data:
rm -rf $(POLARIS_WEB_DIR)
rm -rf $(POLARIS_SWAGGER_DIR)
rm -rf $(POLARIS_CONFIG_DIR)
rm -rf $(POLARIS_DATA_DIR)
rm -rf $(POLARIS_DB_DIR)
rm -rf $(POLARIS_LOG_DIR)
rm -rf $(POLARIS_CACHE_DIR)

View file

@ -3,7 +3,7 @@ echo "Creating output directory"
mkdir -p release/tmp/polaris
echo "Copying package files"
cp -r web docs/swagger src migrations test-data Cargo.toml Cargo.lock rust-toolchain res/unix/Makefile release/tmp/polaris
cp -r web src test-data build.rs Cargo.toml Cargo.lock rust-toolchain.toml res/unix/Makefile release/tmp/polaris
echo "Creating tarball"
tar -zc -C release/tmp -f release/polaris.tar.gz polaris

View file

@ -1,15 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false" />
</requestedPrivileges>
</security>
</trustInfo>
<asmv3:application>
<asmv3:windowsSettings xmlns="http://schemas.microsoft.com/SMI/2005/WindowsSettings">
<dpiAware>true</dpiAware>
</asmv3:windowsSettings>
</asmv3:application>
</assembly>

View file

@ -1,7 +0,0 @@
#define IDI_POLARIS 0x101
#define IDI_POLARIS_TRAY 0x102
CREATEPROCESS_MANIFEST_RESOURCE_ID RT_MANIFEST "application.manifest"
IDI_POLARIS ICON "icon_polaris_512.ico"
IDI_POLARIS_TRAY ICON "icon_polaris_outline_64.ico"

Binary file not shown.

After

(image error) Size: 1.8 KiB

Binary file not shown.

Before

(image error) Size: 31 KiB

View file

@ -0,0 +1,2 @@
#define RT_MANIFEST 24
1 RT_MANIFEST "polaris.exe.manifest"

View file

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity
version="1.0.0.0"
processorArchitecture="*"
name="app"
type="win32"
/>
<dependency>
<dependentAssembly>
<assemblyIdentity
type="win32"
name="Microsoft.Windows.Common-Controls"
version="6.0.0.0"
processorArchitecture="*"
publicKeyToken="6595b64144ccf1df"
language="*"
/>
</dependentAssembly>
</dependency>
</assembly>

View file

@ -49,7 +49,6 @@
<ComponentRef Id="ProgramMenuDir" />
<ComponentRef Id="CleanupExtraData" />
<ComponentGroupRef Id="WebUI" />
<ComponentGroupRef Id="SwaggerUI" />
</Feature>
<Icon Id="polaris.exe" SourceFile="polaris.exe" />
<Property Id="ARPPRODUCTICON" Value="polaris.exe" />

View file

@ -2,23 +2,18 @@ if (!(Test-Path env:POLARIS_VERSION)) {
throw "POLARIS_VERSION environment variable is not defined"
}
"Compiling resource file"
$rc_exe = Join-Path "C:\Program Files (x86)\Windows Kits\10\bin\10.0.18362.0\x64" RC.exe
& $rc_exe /fo res\windows\application\application.res res\windows\application\application.rc
""
"Compiling executable"
# TODO: Uncomment the following once Polaris can do variable expansion of %LOCALAPPDATA%
# And remove the code setting these as defaults in `service/mod.rs`
# $script:INSTALL_DIR = "%LOCALAPPDATA%\Permafrost\Polaris"
# $env:POLARIS_WEB_DIR = "$INSTALL_DIR\web"
# $env:POLARIS_SWAGGER_DIR = "$INSTALL_DIR\swagger"
# $env:POLARIS_DB_DIR = "$INSTALL_DIR"
# $env:POLARIS_LOG_DIR = "$INSTALL_DIR"
# $env:POLARIS_CACHE_DIR = "$INSTALL_DIR"
# $env:POLARIS_PID_DIR = "$INSTALL_DIR"
cargo rustc --release --features "ui" -- -C link-args="/SUBSYSTEM:WINDOWS /ENTRY:mainCRTStartup res\windows\application\application.res"
cargo rustc --release -- -o ".\target\release\polaris-cli.exe" -C link-args="res\windows\application\application.res"
cargo rustc --release --features "ui" -- -o ".\target\release\polaris.exe"
cargo rustc --release -- -o ".\target\release\polaris-cli.exe"
""
"Creating output directory"
@ -33,7 +28,6 @@ Copy-Item .\res\windows\installer\dialog.bmp .\release\tmp\
Copy-Item .\target\release\polaris.exe .\release\tmp\
Copy-Item .\target\release\polaris-cli.exe .\release\tmp\
Copy-Item .\web .\release\tmp\web -recurse
Copy-Item .\docs\swagger .\release\tmp\swagger -recurse
""
"Inserting version number in installer config"
@ -45,15 +39,13 @@ $wxs.Save('.\res\windows\installer\installer.wxs')
"Creating installer"
$heat_exe = Join-Path $env:WIX bin\heat.exe
& $heat_exe dir .\release\tmp\web\ -ag -g1 -dr AppDataPolaris -cg WebUI -sfrag -var wix.WebUIDir -out .\release\tmp\web_ui_fragment.wxs
& $heat_exe dir .\release\tmp\swagger\ -ag -g1 -dr AppDataPolaris -cg SwaggerUI -sfrag -var wix.SwaggerUIDir -out .\release\tmp\swagger_ui_fragment.wxs
$candle_exe = Join-Path $env:WIX bin\candle.exe
& $candle_exe -wx -ext WixUtilExtension -arch x64 -out .\release\tmp\web_ui_fragment.wixobj .\release\tmp\web_ui_fragment.wxs
& $candle_exe -wx -ext WixUtilExtension -arch x64 -out .\release\tmp\swagger_ui_fragment.wixobj .\release\tmp\swagger_ui_fragment.wxs
& $candle_exe -wx -ext WixUtilExtension -arch x64 -out .\release\tmp\installer.wixobj .\res\windows\installer\installer.wxs
$light_exe = Join-Path $env:WIX bin\light.exe
& $light_exe -dWebUIDir=".\release\tmp\web" -dSwaggerUIDir=".\release\tmp\swagger" -wx -ext WixUtilExtension -ext WixUIExtension -spdb -sw1076 -sice:ICE38 -sice:ICE64 -out .\release\polaris.msi .\release\tmp\installer.wixobj .\release\tmp\web_ui_fragment.wixobj .\release\tmp\swagger_ui_fragment.wixobj
& $light_exe -dWebUIDir=".\release\tmp\web" -wx -ext WixUtilExtension -ext WixUIExtension -spdb -sw1076 -sice:ICE38 -sice:ICE64 -out .\release\polaris.msi .\release\tmp\installer.wixobj .\release\tmp\web_ui_fragment.wixobj
"Cleaning up"
Remove-Item -Recurse .\release\tmp

View file

@ -1 +0,0 @@
stable

4
rust-toolchain.toml Normal file
View file

@ -0,0 +1,4 @@
[toolchain]
channel = "stable"
components = [ "rust-src", "rustfmt" ]
profile = "default"

317
src/app.rs Normal file
View file

@ -0,0 +1,317 @@
use std::fs;
use std::path::{Path, PathBuf};
use log::info;
use rand::rngs::OsRng;
use rand::RngCore;
use tokio::fs::try_exists;
use tokio::task::spawn_blocking;
use crate::app::legacy::*;
use crate::paths::Paths;
pub mod auth;
pub mod config;
pub mod ddns;
pub mod formats;
pub mod index;
pub mod legacy;
pub mod ndb;
pub mod peaks;
pub mod playlist;
pub mod scanner;
pub mod thumbnail;
#[cfg(test)]
pub mod test;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error(transparent)]
ThreadPoolBuilder(#[from] rayon::ThreadPoolBuildError),
#[error(transparent)]
ThreadJoining(#[from] tokio::task::JoinError),
#[error("Filesystem error for `{0}`: `{1}`")]
Io(PathBuf, std::io::Error),
#[error(transparent)]
FileWatch(#[from] notify::Error),
#[error(transparent)]
SQL(#[from] rusqlite::Error),
#[error(transparent)]
Ape(#[from] ape::Error),
#[error("ID3 error in `{0}`: `{1}`")]
Id3(PathBuf, id3::Error),
#[error("Metaflac error in `{0}`: `{1}`")]
Metaflac(PathBuf, metaflac::Error),
#[error("Mp4aMeta error in `{0}`: `{1}`")]
Mp4aMeta(PathBuf, mp4ameta::Error),
#[error(transparent)]
Opus(#[from] opus_headers::ParseError),
#[error(transparent)]
Vorbis(#[from] lewton::VorbisError),
#[error("Could not find a Vorbis comment within flac file")]
VorbisCommentNotFoundInFlacFile,
#[error("Could not read thumbnail image in `{0}`:\n\n{1}")]
Image(PathBuf, image::error::ImageError),
#[error("This file format is not supported: {0}")]
UnsupportedFormat(&'static str),
#[error("No tracks found in audio file: {0}")]
MediaEmpty(PathBuf),
#[error(transparent)]
MediaDecodeError(symphonia::core::errors::Error),
#[error(transparent)]
MediaDecoderError(symphonia::core::errors::Error),
#[error(transparent)]
MediaPacketError(symphonia::core::errors::Error),
#[error(transparent)]
MediaProbeError(symphonia::core::errors::Error),
#[error(transparent)]
PeaksSerialization(bitcode::Error),
#[error(transparent)]
PeaksDeserialization(bitcode::Error),
#[error(transparent)]
NativeDatabase(#[from] native_db::db_type::Error),
#[error("Could not initialize database")]
NativeDatabaseCreationError(native_db::db_type::Error),
#[error("DDNS update query failed with HTTP status code `{0}`")]
UpdateQueryFailed(u16),
#[error("DDNS update query failed due to a transport error")]
UpdateQueryTransport,
#[error("Auth secret does not have the expected format")]
AuthenticationSecretInvalid,
#[error("Missing auth secret")]
AuthenticationSecretNotFound,
#[error("Missing settings")]
MiscSettingsNotFound,
#[error("Index album art pattern is not a valid regex")]
IndexAlbumArtPatternInvalid,
#[error("DDNS update URL is invalid")]
DDNSUpdateURLInvalid,
#[error("Could not deserialize configuration: `{0}`")]
ConfigDeserialization(toml::de::Error),
#[error("Could not serialize configuration: `{0}`")]
ConfigSerialization(toml::ser::Error),
#[error("Could not deserialize collection")]
IndexDeserializationError,
#[error("Could not serialize collection")]
IndexSerializationError,
#[error("Invalid Directory")]
InvalidDirectory(String),
#[error("The following virtual path could not be mapped to a real path: `{0}`")]
CouldNotMapToRealPath(PathBuf),
#[error("The following real path could not be mapped to a virtual path: `{0}`")]
CouldNotMapToVirtualPath(PathBuf),
#[error("User not found")]
UserNotFound,
#[error("Directory not found: {0}")]
DirectoryNotFound(PathBuf),
#[error("Artist not found")]
ArtistNotFound,
#[error("Album not found")]
AlbumNotFound,
#[error("Genre not found")]
GenreNotFound,
#[error("Song not found")]
SongNotFound,
#[error("Invalid search query syntax")]
SearchQueryParseError,
#[error("Playlist not found")]
PlaylistNotFound,
#[error("No embedded artwork was found in `{0}`")]
EmbeddedArtworkNotFound(PathBuf),
#[error("Cannot use empty username")]
EmptyUsername,
#[error("Cannot use empty password")]
EmptyPassword,
#[error("Username already exists")]
DuplicateUsername,
#[error("Username does not exist")]
IncorrectUsername,
#[error("Password does not match username")]
IncorrectPassword,
#[error("Invalid auth token")]
InvalidAuthToken,
#[error("Incorrect authorization scope")]
IncorrectAuthorizationScope,
#[error("Failed to hash password")]
PasswordHashing,
#[error("Failed to encode authorization token")]
AuthorizationTokenEncoding,
#[error("Failed to encode Branca token")]
BrancaTokenEncoding,
}
#[derive(Clone)]
pub struct App {
pub port: u16,
pub web_dir_path: PathBuf,
pub ddns_manager: ddns::Manager,
pub scanner: scanner::Scanner,
pub index_manager: index::Manager,
pub config_manager: config::Manager,
pub peaks_manager: peaks::Manager,
pub playlist_manager: playlist::Manager,
pub thumbnail_manager: thumbnail::Manager,
}
impl App {
pub async fn new(port: u16, paths: Paths) -> Result<Self, Error> {
fs::create_dir_all(&paths.data_dir_path)
.map_err(|e| Error::Io(paths.data_dir_path.clone(), e))?;
fs::create_dir_all(&paths.web_dir_path)
.map_err(|e| Error::Io(paths.web_dir_path.clone(), e))?;
let peaks_dir_path = paths.cache_dir_path.join("peaks");
fs::create_dir_all(&peaks_dir_path).map_err(|e| Error::Io(peaks_dir_path.clone(), e))?;
let thumbnails_dir_path = paths.cache_dir_path.join("thumbnails");
fs::create_dir_all(&thumbnails_dir_path)
.map_err(|e| Error::Io(thumbnails_dir_path.clone(), e))?;
let auth_secret_file_path = paths.data_dir_path.join("auth.secret");
Self::migrate_legacy_auth_secret(&paths.db_file_path, &auth_secret_file_path).await?;
let auth_secret = Self::get_or_create_auth_secret(&auth_secret_file_path).await?;
let config_manager = config::Manager::new(&paths.config_file_path, auth_secret).await?;
let ddns_manager = ddns::Manager::new(config_manager.clone());
let ndb_manager = ndb::Manager::new(&paths.data_dir_path)?;
let index_manager = index::Manager::new(&paths.data_dir_path).await?;
let scanner = scanner::Scanner::new(index_manager.clone(), config_manager.clone()).await?;
let peaks_manager = peaks::Manager::new(peaks_dir_path);
let playlist_manager = playlist::Manager::new(ndb_manager);
let thumbnail_manager = thumbnail::Manager::new(thumbnails_dir_path);
let app = Self {
port,
web_dir_path: paths.web_dir_path,
ddns_manager,
scanner,
index_manager,
config_manager,
peaks_manager,
playlist_manager,
thumbnail_manager,
};
app.migrate_legacy_db(&paths.db_file_path).await?;
Ok(app)
}
async fn migrate_legacy_auth_secret(
db_file_path: &PathBuf,
secret_file_path: &PathBuf,
) -> Result<(), Error> {
if !try_exists(db_file_path)
.await
.map_err(|e| Error::Io(db_file_path.clone(), e))?
{
return Ok(());
}
if try_exists(secret_file_path)
.await
.map_err(|e| Error::Io(secret_file_path.clone(), e))?
{
return Ok(());
}
info!(
"Migrating auth secret from database at `{}`",
db_file_path.to_string_lossy()
);
let secret = spawn_blocking({
let db_file_path = db_file_path.clone();
move || read_legacy_auth_secret(&db_file_path)
})
.await??;
tokio::fs::write(secret_file_path, &secret)
.await
.map_err(|e| Error::Io(secret_file_path.clone(), e))?;
Ok(())
}
async fn migrate_legacy_db(&self, db_file_path: &PathBuf) -> Result<(), Error> {
if !try_exists(db_file_path)
.await
.map_err(|e| Error::Io(db_file_path.clone(), e))?
{
return Ok(());
}
let Some(config) = tokio::task::spawn_blocking({
let db_file_path = db_file_path.clone();
move || read_legacy_config(&db_file_path)
})
.await??
else {
return Ok(());
};
info!(
"Found usable config in legacy database at `{}`, beginning migration process",
db_file_path.to_string_lossy()
);
info!("Migrating configuration");
self.config_manager.apply_config(config).await?;
self.config_manager.save_config().await?;
info!("Migrating playlists");
for (name, owner, songs) in read_legacy_playlists(
db_file_path,
self.index_manager.clone(),
self.scanner.clone(),
)
.await?
{
self.playlist_manager
.save_playlist(&name, &owner, songs)
.await?;
}
info!(
"Deleting legacy database at `{}`",
db_file_path.to_string_lossy()
);
delete_legacy_db(db_file_path).await?;
info!(
"Completed migration from `{}`",
db_file_path.to_string_lossy()
);
Ok(())
}
async fn get_or_create_auth_secret(path: &Path) -> Result<auth::Secret, Error> {
match tokio::fs::read(&path).await {
Ok(s) => Ok(auth::Secret(
s.try_into()
.map_err(|_| Error::AuthenticationSecretInvalid)?,
)),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
let mut secret = auth::Secret::default();
OsRng.fill_bytes(secret.as_mut());
tokio::fs::write(&path, &secret)
.await
.map_err(|_| Error::AuthenticationSecretInvalid)?;
Ok(secret)
}
Err(e) => return Err(Error::Io(path.to_owned(), e)),
}
}
}

95
src/app/auth.rs Normal file
View file

@ -0,0 +1,95 @@
use std::time::{SystemTime, UNIX_EPOCH};
use pbkdf2::password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, SaltString};
use pbkdf2::Pbkdf2;
use rand::rngs::OsRng;
use serde::{Deserialize, Serialize};
use crate::app::Error;
#[derive(Clone, Default)]
pub struct Secret(pub [u8; 32]);
impl AsRef<[u8]> for Secret {
fn as_ref(&self) -> &[u8] {
&self.0
}
}
impl AsMut<[u8]> for Secret {
fn as_mut(&mut self) -> &mut [u8] {
&mut self.0
}
}
#[derive(Debug)]
pub struct Token(pub String);
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
pub enum Scope {
PolarisAuth,
}
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
pub struct Authorization {
pub username: String,
pub scope: Scope,
}
pub fn hash_password(password: &str) -> Result<String, Error> {
if password.is_empty() {
return Err(Error::EmptyPassword);
}
let salt = SaltString::generate(&mut OsRng);
match Pbkdf2.hash_password(password.as_bytes(), &salt) {
Ok(h) => Ok(h.to_string()),
Err(_) => Err(Error::PasswordHashing),
}
}
pub fn verify_password(password_hash: &str, attempted_password: &str) -> bool {
match PasswordHash::new(password_hash) {
Ok(h) => Pbkdf2
.verify_password(attempted_password.as_bytes(), &h)
.is_ok(),
Err(_) => false,
}
}
pub fn generate_auth_token(
authorization: &Authorization,
auth_secret: &Secret,
) -> Result<Token, Error> {
let serialized_authorization =
serde_json::to_string(&authorization).or(Err(Error::AuthorizationTokenEncoding))?;
branca::encode(
serialized_authorization.as_bytes(),
auth_secret.as_ref(),
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default()
.as_secs() as u32,
)
.or(Err(Error::BrancaTokenEncoding))
.map(Token)
}
pub fn decode_auth_token(
auth_token: &Token,
scope: Scope,
auth_secret: &Secret,
) -> Result<Authorization, Error> {
let Token(data) = auth_token;
let ttl = match scope {
Scope::PolarisAuth => 0, // permanent
};
let authorization =
branca::decode(data, auth_secret.as_ref(), ttl).map_err(|_| Error::InvalidAuthToken)?;
let authorization: Authorization =
serde_json::from_slice(&authorization[..]).map_err(|_| Error::InvalidAuthToken)?;
if authorization.scope != scope {
return Err(Error::IncorrectAuthorizationScope);
}
Ok(authorization)
}

338
src/app/config.rs Normal file
View file

@ -0,0 +1,338 @@
use std::{
path::{Path, PathBuf},
sync::Arc,
time::Duration,
};
use log::{error, info};
use notify::{RecommendedWatcher, RecursiveMode, Watcher};
use notify_debouncer_full::{Debouncer, FileIdMap};
use regex::Regex;
use tokio::sync::{futures::Notified, Notify, RwLock};
use crate::app::Error;
mod mounts;
pub mod storage;
mod user;
pub use mounts::*;
pub use user::*;
use super::auth;
#[derive(Debug, Clone, Default)]
pub struct Config {
pub album_art_pattern: Option<Regex>,
pub ddns_update_url: Option<http::Uri>,
pub mount_dirs: Vec<MountDir>,
pub users: Vec<User>,
}
impl TryFrom<storage::Config> for Config {
type Error = Error;
fn try_from(c: storage::Config) -> Result<Self, Self::Error> {
let mut config = Config::default();
config.set_mounts(c.mount_dirs)?;
config.set_users(c.users)?;
config.album_art_pattern = match c.album_art_pattern.as_deref().map(Regex::new) {
Some(Ok(u)) => Some(u),
Some(Err(_)) => return Err(Error::IndexAlbumArtPatternInvalid),
None => None,
};
config.ddns_update_url = match c.ddns_update_url.map(http::Uri::try_from) {
Some(Ok(u)) => Some(u),
Some(Err(_)) => return Err(Error::DDNSUpdateURLInvalid),
None => None,
};
Ok(config)
}
}
impl From<Config> for storage::Config {
fn from(c: Config) -> Self {
Self {
album_art_pattern: c.album_art_pattern.map(|p| p.as_str().to_owned()),
mount_dirs: c.mount_dirs.into_iter().map(|d| d.into()).collect(),
ddns_update_url: c.ddns_update_url.map(|u| u.to_string()),
users: c.users.into_iter().map(|u| u.into()).collect(),
}
}
}
#[derive(Clone)]
pub struct Manager {
config_file_path: PathBuf,
config: Arc<RwLock<Config>>,
auth_secret: auth::Secret,
#[allow(dead_code)]
file_watcher: Arc<Debouncer<RecommendedWatcher, FileIdMap>>,
change_notify: Arc<Notify>,
}
impl Manager {
pub async fn new(config_file_path: &Path, auth_secret: auth::Secret) -> Result<Self, Error> {
if let Some(parent) = config_file_path.parent() {
tokio::fs::create_dir_all(parent)
.await
.map_err(|e| Error::Io(parent.to_owned(), e))?;
}
match tokio::fs::File::create_new(config_file_path).await {
Ok(_) => (),
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => (),
Err(e) => {
error!("Failed to create config file at {config_file_path:#?}: {e}");
return Err(Error::Io(config_file_path.to_owned(), e));
}
};
let notify = Arc::new(Notify::new());
let mut debouncer = notify_debouncer_full::new_debouncer(Duration::from_secs(1), None, {
let notify = notify.clone();
move |_| {
notify.notify_waiters();
}
})?;
debouncer
.watcher()
.watch(&config_file_path, RecursiveMode::NonRecursive)?;
let manager = Self {
config_file_path: config_file_path.to_owned(),
config: Arc::new(RwLock::new(Config::default())),
auth_secret,
file_watcher: Arc::new(debouncer),
change_notify: Arc::default(),
};
tokio::task::spawn({
let manager = manager.clone();
async move {
loop {
notify.notified().await;
if let Err(e) = manager.reload_config().await {
error!("Configuration error: {e}");
} else {
info!("Successfully applied configuration change");
}
}
}
});
manager.reload_config().await?;
Ok(manager)
}
pub fn on_config_change(&self) -> Notified {
self.change_notify.notified()
}
async fn reload_config(&self) -> Result<(), Error> {
let config = Self::read_config(&self.config_file_path).await?;
self.apply_config(config).await
}
async fn read_config(config_file_path: &Path) -> Result<storage::Config, Error> {
let config_content = tokio::fs::read_to_string(config_file_path)
.await
.map_err(|e| Error::Io(config_file_path.to_owned(), e))?;
toml::de::from_str::<storage::Config>(&config_content).map_err(Error::ConfigDeserialization)
}
pub async fn save_config(&self) -> Result<(), Error> {
let serialized = toml::ser::to_string_pretty::<storage::Config>(
&self.config.read().await.clone().into(),
)
.map_err(Error::ConfigSerialization)?;
tokio::fs::write(&self.config_file_path, serialized.as_bytes())
.await
.map_err(|e| Error::Io(self.config_file_path.clone(), e))?;
Ok(())
}
pub async fn apply_config(&self, new_config: storage::Config) -> Result<(), Error> {
let mut config = self.config.write().await;
*config = new_config.try_into()?;
self.change_notify.notify_waiters();
Ok(())
}
async fn mutate<F: FnOnce(&mut Config)>(&self, op: F) -> Result<(), Error> {
self.mutate_fallible(|c| {
op(c);
Ok(())
})
.await
}
async fn mutate_fallible<F: FnOnce(&mut Config) -> Result<(), Error>>(
&self,
op: F,
) -> Result<(), Error> {
{
let mut config = self.config.write().await;
op(&mut config)?;
}
self.change_notify.notify_waiters();
self.save_config().await?;
Ok(())
}
pub async fn get_index_album_art_pattern(&self) -> Regex {
let config = self.config.read().await;
let pattern = config.album_art_pattern.clone();
pattern.unwrap_or_else(|| Regex::new("Folder.(jpeg|jpg|png)").unwrap())
}
pub async fn set_index_album_art_pattern(&self, regex: Regex) -> Result<(), Error> {
self.mutate(|c| {
c.album_art_pattern = Some(regex);
})
.await
}
pub async fn get_ddns_update_url(&self) -> Option<http::Uri> {
self.config.read().await.ddns_update_url.clone()
}
pub async fn set_ddns_update_url(&self, url: Option<http::Uri>) -> Result<(), Error> {
self.mutate(|c| {
c.ddns_update_url = url;
})
.await
}
pub async fn get_users(&self) -> Vec<User> {
self.config.read().await.users.iter().cloned().collect()
}
pub async fn get_user(&self, username: &str) -> Result<User, Error> {
let config = self.config.read().await;
config
.get_user(username)
.cloned()
.ok_or(Error::UserNotFound)
}
pub async fn create_user(
&self,
username: &str,
password: &str,
admin: bool,
) -> Result<(), Error> {
self.mutate_fallible(|c| c.create_user(username, password, admin))
.await
}
pub async fn login(&self, username: &str, password: &str) -> Result<auth::Token, Error> {
let config = self.config.read().await;
config.login(username, password, &self.auth_secret)
}
pub async fn set_is_admin(&self, username: &str, is_admin: bool) -> Result<(), Error> {
self.mutate_fallible(|c| c.set_is_admin(username, is_admin))
.await
}
pub async fn set_password(&self, username: &str, password: &str) -> Result<(), Error> {
self.mutate_fallible(|c| c.set_password(username, password))
.await
}
pub async fn authenticate(
&self,
auth_token: &auth::Token,
scope: auth::Scope,
) -> Result<auth::Authorization, Error> {
let config = self.config.read().await;
config.authenticate(auth_token, scope, &self.auth_secret)
}
pub async fn delete_user(&self, username: &str) -> Result<(), Error> {
self.mutate(|c| c.delete_user(username)).await
}
pub async fn get_mounts(&self) -> Vec<MountDir> {
let config = self.config.read().await;
config.mount_dirs.iter().cloned().collect()
}
pub async fn resolve_virtual_path<P: AsRef<Path>>(
&self,
virtual_path: P,
) -> Result<PathBuf, Error> {
let config = self.config.read().await;
config.resolve_virtual_path(virtual_path)
}
pub async fn set_mounts(&self, mount_dirs: Vec<storage::MountDir>) -> Result<(), Error> {
self.mutate_fallible(|c| c.set_mounts(mount_dirs)).await
}
}
#[cfg(test)]
mod test {
use crate::app::test;
use crate::test_name;
use super::*;
#[tokio::test]
async fn blank_config_round_trip() {
let config_path = PathBuf::from_iter(["test-data", "blank.toml"]);
let manager = Manager::new(&config_path, auth::Secret([0; 32]))
.await
.unwrap();
let config: storage::Config = manager.config.read().await.clone().into();
assert_eq!(config, storage::Config::default());
}
#[tokio::test]
async fn can_read_config() {
let config_path = PathBuf::from_iter(["test-data", "config.toml"]);
let manager = Manager::new(&config_path, auth::Secret([0; 32]))
.await
.unwrap();
let config: storage::Config = manager.config.read().await.clone().into();
assert_eq!(
config.album_art_pattern,
Some(r#"^Folder\.(png|jpg|jpeg)$"#.to_owned())
);
assert_eq!(
config.mount_dirs,
vec![storage::MountDir {
source: PathBuf::from("test-data/small-collection"),
name: "root".to_owned(),
}]
);
assert_eq!(config.users[0].name, "test_user");
assert_eq!(config.users[0].admin, Some(true));
assert_eq!(
config.users[0].initial_password,
Some("very_secret_password".to_owned())
);
assert!(config.users[0].hashed_password.is_some());
}
#[tokio::test]
async fn can_write_config() {
let ctx = test::ContextBuilder::new(test_name!()).build().await;
ctx.config_manager
.create_user("Walter", "example_password", false)
.await
.unwrap();
let manager = Manager::new(&ctx.config_manager.config_file_path, auth::Secret([0; 32]))
.await
.unwrap();
assert!(manager.get_user("Walter").await.is_ok());
}
}

View file

@ -1,11 +0,0 @@
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("Unspecified")]
Unspecified,
}
impl From<anyhow::Error> for Error {
fn from(_: anyhow::Error) -> Self {
Error::Unspecified
}
}

View file

@ -1,83 +0,0 @@
use super::*;
use crate::app::{ddns, settings, user, vfs};
#[derive(Clone)]
pub struct Manager {
settings_manager: settings::Manager,
user_manager: user::Manager,
vfs_manager: vfs::Manager,
ddns_manager: ddns::Manager,
}
impl Manager {
pub fn new(
settings_manager: settings::Manager,
user_manager: user::Manager,
vfs_manager: vfs::Manager,
ddns_manager: ddns::Manager,
) -> Self {
Self {
settings_manager,
user_manager,
vfs_manager,
ddns_manager,
}
}
pub fn apply(&self, config: &Config) -> Result<(), Error> {
if let Some(new_settings) = &config.settings {
self.settings_manager
.amend(new_settings)
.map_err(|_| Error::Unspecified)?;
}
if let Some(mount_dirs) = &config.mount_dirs {
self.vfs_manager
.set_mount_dirs(&mount_dirs)
.map_err(|_| Error::Unspecified)?;
}
if let Some(ddns_config) = &config.ydns {
self.ddns_manager
.set_config(&ddns_config)
.map_err(|_| Error::Unspecified)?;
}
if let Some(ref users) = config.users {
let old_users: Vec<user::User> =
self.user_manager.list().map_err(|_| Error::Unspecified)?;
// Delete users that are not in new list
for old_user in old_users
.iter()
.filter(|old_user| !users.iter().any(|u| u.name == old_user.name))
{
self.user_manager
.delete(&old_user.name)
.map_err(|_| Error::Unspecified)?;
}
// Insert new users
for new_user in users
.iter()
.filter(|u| !old_users.iter().any(|old_user| old_user.name == u.name))
{
self.user_manager
.create(new_user)
.map_err(|_| Error::Unspecified)?;
}
// Update users
for user in users {
self.user_manager
.set_password(&user.name, &user.password)
.map_err(|_| Error::Unspecified)?;
self.user_manager
.set_is_admin(&user.name, user.admin)
.map_err(|_| Error::Unspecified)?;
}
}
Ok(())
}
}

View file

@ -1,31 +0,0 @@
use serde::Deserialize;
use std::io::Read;
use std::path;
use crate::app::{ddns, settings, user, vfs};
mod error;
mod manager;
#[cfg(test)]
mod test;
pub use error::*;
pub use manager::*;
#[derive(Default, Deserialize)]
pub struct Config {
pub settings: Option<settings::NewSettings>,
pub mount_dirs: Option<Vec<vfs::MountDir>>,
pub ydns: Option<ddns::Config>,
pub users: Option<Vec<user::NewUser>>,
}
impl Config {
pub fn from_path(path: &path::Path) -> anyhow::Result<Config> {
let mut config_file = std::fs::File::open(path)?;
let mut config_file_content = String::new();
config_file.read_to_string(&mut config_file_content)?;
let config = toml::de::from_str::<Self>(&config_file_content)?;
Ok(config)
}
}

149
src/app/config/mounts.rs Normal file
View file

@ -0,0 +1,149 @@
use std::{
ops::Deref,
path::{Path, PathBuf},
};
use regex::Regex;
use crate::app::Error;
use super::storage;
use super::Config;
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct MountDir {
pub source: PathBuf,
pub name: String,
}
impl TryFrom<storage::MountDir> for MountDir {
type Error = Error;
fn try_from(mount_dir: storage::MountDir) -> Result<Self, Self::Error> {
// TODO validation
Ok(Self {
source: sanitize_path(&mount_dir.source),
name: mount_dir.name,
})
}
}
impl From<MountDir> for storage::MountDir {
fn from(m: MountDir) -> Self {
Self {
source: m.source,
name: m.name,
}
}
}
impl Config {
pub fn set_mounts(&mut self, mount_dirs: Vec<storage::MountDir>) -> Result<(), Error> {
let mut new_mount_dirs = Vec::new();
for mount_dir in mount_dirs {
let mount_dir = <storage::MountDir as TryInto<MountDir>>::try_into(mount_dir)?;
new_mount_dirs.push(mount_dir);
}
new_mount_dirs.dedup_by(|a, b| a.name == b.name);
self.mount_dirs = new_mount_dirs;
Ok(())
}
pub fn resolve_virtual_path<P: AsRef<Path>>(&self, virtual_path: P) -> Result<PathBuf, Error> {
for mount in &self.mount_dirs {
if let Ok(p) = virtual_path.as_ref().strip_prefix(&mount.name) {
return if p.components().count() == 0 {
Ok(mount.source.clone())
} else {
Ok(mount.source.join(p))
};
}
}
Err(Error::CouldNotMapToRealPath(virtual_path.as_ref().into()))
}
}
fn sanitize_path(source: &PathBuf) -> PathBuf {
let path_string = source.to_string_lossy();
let separator_regex = Regex::new(r"\\|/").unwrap();
let mut correct_separator = String::new();
correct_separator.push(std::path::MAIN_SEPARATOR);
let path_string = separator_regex.replace_all(&path_string, correct_separator.as_str());
PathBuf::from(path_string.deref())
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn can_resolve_virtual_paths() {
let raw_config = storage::Config {
mount_dirs: vec![storage::MountDir {
name: "root".to_owned(),
source: PathBuf::from("test_dir"),
}],
..Default::default()
};
let config: Config = raw_config.try_into().unwrap();
let test_cases = vec![
(vec!["root"], vec!["test_dir"]),
(
vec!["root", "somewhere", "something.png"],
vec!["test_dir", "somewhere", "something.png"],
),
];
for (r#virtual, real) in test_cases {
let real_path: PathBuf = real.iter().collect();
let virtual_path: PathBuf = r#virtual.iter().collect();
let converted_path = config.resolve_virtual_path(&virtual_path).unwrap();
assert_eq!(converted_path, real_path);
}
}
#[test]
fn sanitizes_paths() {
let mut correct_path = PathBuf::new();
if cfg!(target_os = "windows") {
correct_path.push("C:\\");
} else {
correct_path.push("/usr");
}
correct_path.push("some");
correct_path.push("path");
let tests = if cfg!(target_os = "windows") {
vec![
r#"C:/some/path"#,
r#"C:\some\path"#,
r#"C:\some\path\"#,
r#"C:\some\path\\\\"#,
r#"C:\some/path//"#,
]
} else {
vec![
r#"/usr/some/path"#,
r#"/usr\some\path"#,
r#"/usr\some\path\"#,
r#"/usr\some\path\\\\"#,
r#"/usr\some/path//"#,
]
};
for test in tests {
let raw_config = storage::Config {
mount_dirs: vec![storage::MountDir {
name: "root".to_owned(),
source: PathBuf::from(test),
}],
..Default::default()
};
let config: Config = raw_config.try_into().unwrap();
let converted_path = config.resolve_virtual_path(&PathBuf::from("root")).unwrap();
assert_eq!(converted_path, correct_path);
}
}
}

32
src/app/config/storage.rs Normal file
View file

@ -0,0 +1,32 @@
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
pub struct User {
pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub admin: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub initial_password: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub hashed_password: Option<String>,
}
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
pub struct MountDir {
pub source: PathBuf,
pub name: String,
}
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
pub struct Config {
#[serde(skip_serializing_if = "Option::is_none")]
pub album_art_pattern: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub mount_dirs: Vec<MountDir>,
#[serde(skip_serializing_if = "Option::is_none")]
pub ddns_update_url: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub users: Vec<User>,
}

View file

@ -1,155 +0,0 @@
use std::fs;
use std::path::PathBuf;
use super::*;
use crate::app::{settings, user, vfs};
use crate::db::DB;
use crate::test_name;
#[cfg(test)]
fn get_test_db(name: &str) -> DB {
let mut db_path = PathBuf::new();
db_path.push("test-output");
fs::create_dir_all(&db_path).unwrap();
db_path.push(name);
if db_path.exists() {
fs::remove_file(&db_path).unwrap();
}
DB::new(&db_path).unwrap()
}
#[test]
fn apply_saves_misc_settings() {
let db = get_test_db(&test_name!());
let settings_manager = settings::Manager::new(db.clone());
let auth_secret = settings_manager.get_auth_secret().unwrap();
let user_manager = user::Manager::new(db.clone(), auth_secret);
let vfs_manager = vfs::Manager::new(db.clone());
let ddns_manager = ddns::Manager::new(db.clone());
let config_manager = Manager::new(
settings_manager.clone(),
user_manager.clone(),
vfs_manager.clone(),
ddns_manager.clone(),
);
let new_config = Config {
settings: Some(settings::NewSettings {
album_art_pattern: Some("🖼️\\.jpg".into()),
reindex_every_n_seconds: Some(100),
..Default::default()
}),
..Default::default()
};
config_manager.apply(&new_config).unwrap();
let settings = settings_manager.read().unwrap();
let new_settings = new_config.settings.unwrap();
assert_eq!(
settings.album_art_pattern,
new_settings.album_art_pattern.unwrap()
);
assert_eq!(
settings.reindex_every_n_seconds,
new_settings.reindex_every_n_seconds.unwrap()
);
}
#[test]
fn apply_saves_mount_points() {
let db = get_test_db(&test_name!());
let settings_manager = settings::Manager::new(db.clone());
let auth_secret = settings_manager.get_auth_secret().unwrap();
let user_manager = user::Manager::new(db.clone(), auth_secret);
let vfs_manager = vfs::Manager::new(db.clone());
let ddns_manager = ddns::Manager::new(db.clone());
let config_manager = Manager::new(
settings_manager.clone(),
user_manager.clone(),
vfs_manager.clone(),
ddns_manager.clone(),
);
let new_config = Config {
mount_dirs: Some(vec![vfs::MountDir {
source: "/home/music".into(),
name: "🎵📁".into(),
}]),
..Default::default()
};
config_manager.apply(&new_config).unwrap();
let actual_mount_dirs: Vec<vfs::MountDir> = vfs_manager.mount_dirs().unwrap();
assert_eq!(actual_mount_dirs, new_config.mount_dirs.unwrap());
}
#[test]
fn apply_saves_ddns_settings() {
use crate::app::ddns;
let db = get_test_db(&test_name!());
let settings_manager = settings::Manager::new(db.clone());
let auth_secret = settings_manager.get_auth_secret().unwrap();
let user_manager = user::Manager::new(db.clone(), auth_secret);
let vfs_manager = vfs::Manager::new(db.clone());
let ddns_manager = ddns::Manager::new(db.clone());
let config_manager = Manager::new(
settings_manager.clone(),
user_manager.clone(),
vfs_manager.clone(),
ddns_manager.clone(),
);
let new_config = Config {
ydns: Some(ddns::Config {
host: "🐸🐸🐸.ydns.eu".into(),
username: "kfr🐸g".into(),
password: "tasty🐞".into(),
}),
..Default::default()
};
config_manager.apply(&new_config).unwrap();
let actual_ddns = ddns_manager.config().unwrap();
assert_eq!(actual_ddns, new_config.ydns.unwrap());
}
#[test]
fn apply_can_toggle_admin() {
let db = get_test_db(&test_name!());
let settings_manager = settings::Manager::new(db.clone());
let auth_secret = settings_manager.get_auth_secret().unwrap();
let user_manager = user::Manager::new(db.clone(), auth_secret);
let vfs_manager = vfs::Manager::new(db.clone());
let ddns_manager = ddns::Manager::new(db.clone());
let config_manager = Manager::new(
settings_manager.clone(),
user_manager.clone(),
vfs_manager.clone(),
ddns_manager.clone(),
);
let initial_config = Config {
users: Some(vec![user::NewUser {
name: "Walter".into(),
password: "Tasty🍖".into(),
admin: true,
}]),
..Default::default()
};
config_manager.apply(&initial_config).unwrap();
assert!(user_manager.list().unwrap()[0].is_admin());
let new_config = Config {
users: Some(vec![user::NewUser {
name: "Walter".into(),
password: "Tasty🍖".into(),
admin: false,
}]),
..Default::default()
};
config_manager.apply(&new_config).unwrap();
assert!(!user_manager.list().unwrap()[0].is_admin());
}

308
src/app/config/user.rs Normal file
View file

@ -0,0 +1,308 @@
use crate::app::{auth, Error};
use super::storage;
use super::Config;
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct User {
pub name: String,
pub admin: Option<bool>,
pub initial_password: Option<String>,
pub hashed_password: String,
}
impl User {
pub fn is_admin(&self) -> bool {
self.admin == Some(true)
}
}
impl TryFrom<storage::User> for User {
type Error = Error;
fn try_from(user: storage::User) -> Result<Self, Self::Error> {
let hashed_password = match (&user.initial_password, &user.hashed_password) {
(_, Some(p)) => p.clone(),
(Some(p), None) => auth::hash_password(p)?,
(None, None) => return Err(Error::EmptyPassword),
};
Ok(Self {
name: user.name,
admin: user.admin,
initial_password: user.initial_password,
hashed_password,
})
}
}
impl From<User> for storage::User {
fn from(user: User) -> Self {
Self {
name: user.name,
admin: user.admin,
initial_password: user.initial_password,
hashed_password: Some(user.hashed_password),
}
}
}
impl Config {
pub fn set_users(&mut self, users: Vec<storage::User>) -> Result<(), Error> {
let mut new_users = Vec::new();
for user in users {
let user = <storage::User as TryInto<User>>::try_into(user)?;
new_users.push(user);
}
new_users.dedup_by(|a, b| a.name == b.name);
self.users = new_users;
Ok(())
}
pub fn create_user(
&mut self,
username: &str,
password: &str,
admin: bool,
) -> Result<(), Error> {
if username.is_empty() {
return Err(Error::EmptyUsername);
}
if self.exists(username) {
return Err(Error::DuplicateUsername);
}
let password_hash = auth::hash_password(&password)?;
self.users.push(User {
name: username.to_owned(),
admin: Some(admin),
initial_password: None,
hashed_password: password_hash,
});
Ok(())
}
pub fn exists(&self, username: &str) -> bool {
self.users.iter().any(|u| u.name == username)
}
pub fn get_user(&self, username: &str) -> Option<&User> {
self.users.iter().find(|u| u.name == username)
}
pub fn get_user_mut(&mut self, username: &str) -> Option<&mut User> {
self.users.iter_mut().find(|u| u.name == username)
}
pub fn authenticate(
&self,
auth_token: &auth::Token,
scope: auth::Scope,
auth_secret: &auth::Secret,
) -> Result<auth::Authorization, Error> {
let authorization = auth::decode_auth_token(auth_token, scope, auth_secret)?;
if self.exists(&authorization.username) {
Ok(authorization)
} else {
Err(Error::IncorrectUsername)
}
}
pub fn login(
&self,
username: &str,
password: &str,
auth_secret: &auth::Secret,
) -> Result<auth::Token, Error> {
let user = self.get_user(username).ok_or(Error::IncorrectUsername)?;
if auth::verify_password(&user.hashed_password, password) {
let authorization = auth::Authorization {
username: username.to_owned(),
scope: auth::Scope::PolarisAuth,
};
auth::generate_auth_token(&authorization, auth_secret)
} else {
Err(Error::IncorrectPassword)
}
}
pub fn set_is_admin(&mut self, username: &str, is_admin: bool) -> Result<(), Error> {
let user = self.get_user_mut(username).ok_or(Error::UserNotFound)?;
user.admin = Some(is_admin);
Ok(())
}
pub fn set_password(&mut self, username: &str, password: &str) -> Result<(), Error> {
let user = self.get_user_mut(username).ok_or(Error::UserNotFound)?;
user.hashed_password = auth::hash_password(password)?;
Ok(())
}
pub fn delete_user(&mut self, username: &str) {
self.users.retain(|u| u.name != username);
}
}
#[cfg(test)]
mod test {
use crate::app::test;
use crate::test_name;
use super::*;
const TEST_USERNAME: &str = "Walter";
const TEST_PASSWORD: &str = "super_secret!";
#[test]
fn adds_password_hashes() {
let user_in = storage::User {
name: TEST_USERNAME.to_owned(),
initial_password: Some(TEST_PASSWORD.to_owned()),
..Default::default()
};
let user: User = user_in.try_into().unwrap();
let user_out: storage::User = user.into();
assert_eq!(user_out.name, TEST_USERNAME);
assert_eq!(user_out.initial_password, Some(TEST_PASSWORD.to_owned()));
assert!(user_out.hashed_password.is_some());
}
#[test]
fn preserves_password_hashes() {
let user_in = storage::User {
name: TEST_USERNAME.to_owned(),
hashed_password: Some("hash".to_owned()),
..Default::default()
};
let user: User = user_in.clone().try_into().unwrap();
let user_out: storage::User = user.into();
assert_eq!(user_out, user_in);
}
#[tokio::test]
async fn create_delete_user_golden_path() {
let ctx = test::ContextBuilder::new(test_name!()).build().await;
ctx.config_manager
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
.await
.unwrap();
assert!(ctx.config_manager.get_user(TEST_USERNAME).await.is_ok());
ctx.config_manager.delete_user(TEST_USERNAME).await.unwrap();
assert!(ctx.config_manager.get_user(TEST_USERNAME).await.is_err());
}
#[tokio::test]
async fn cannot_create_user_with_blank_username() {
let ctx = test::ContextBuilder::new(test_name!()).build().await;
let result = ctx.config_manager.create_user("", TEST_PASSWORD, false);
assert!(matches!(result.await.unwrap_err(), Error::EmptyUsername));
}
#[tokio::test]
async fn cannot_create_user_with_blank_password() {
let ctx = test::ContextBuilder::new(test_name!()).build().await;
let result = ctx.config_manager.create_user(TEST_USERNAME, "", false);
assert!(matches!(result.await.unwrap_err(), Error::EmptyPassword));
}
#[tokio::test]
async fn cannot_create_duplicate_user() {
let ctx = test::ContextBuilder::new(test_name!()).build().await;
let result = ctx
.config_manager
.create_user(TEST_USERNAME, TEST_PASSWORD, false);
assert!(result.await.is_ok());
let result = ctx
.config_manager
.create_user(TEST_USERNAME, TEST_PASSWORD, false);
assert!(matches!(
result.await.unwrap_err(),
Error::DuplicateUsername
));
}
#[tokio::test]
async fn login_rejects_bad_password() {
let ctx = test::ContextBuilder::new(test_name!()).build().await;
ctx.config_manager
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
.await
.unwrap();
let result = ctx.config_manager.login(TEST_USERNAME, "not the password");
assert!(matches!(
result.await.unwrap_err(),
Error::IncorrectPassword
));
}
#[tokio::test]
async fn login_golden_path() {
let ctx = test::ContextBuilder::new(test_name!()).build().await;
ctx.config_manager
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
.await
.unwrap();
let result = ctx.config_manager.login(TEST_USERNAME, TEST_PASSWORD);
assert!(result.await.is_ok());
}
#[tokio::test]
async fn authenticate_rejects_bad_token() {
let ctx = test::ContextBuilder::new(test_name!()).build().await;
ctx.config_manager
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
.await
.unwrap();
let fake_token = auth::Token("fake token".to_owned());
assert!(ctx
.config_manager
.authenticate(&fake_token, auth::Scope::PolarisAuth)
.await
.is_err())
}
#[tokio::test]
async fn authenticate_golden_path() {
let ctx = test::ContextBuilder::new(test_name!()).build().await;
ctx.config_manager
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
.await
.unwrap();
let token = ctx
.config_manager
.login(TEST_USERNAME, TEST_PASSWORD)
.await
.unwrap();
let authorization = ctx
.config_manager
.authenticate(&token, auth::Scope::PolarisAuth)
.await
.unwrap();
assert_eq!(
authorization,
auth::Authorization {
username: TEST_USERNAME.to_owned(),
scope: auth::Scope::PolarisAuth,
}
)
}
}

45
src/app/ddns.rs Normal file
View file

@ -0,0 +1,45 @@
use log::{debug, error};
use std::time::Duration;
use crate::app::{config, Error};
#[derive(Clone)]
pub struct Manager {
config_manager: config::Manager,
}
impl Manager {
pub fn new(config_manager: config::Manager) -> Self {
Self { config_manager }
}
pub async fn update_ddns(&self) -> Result<(), Error> {
let url = self.config_manager.get_ddns_update_url().await;
let Some(url) = url else {
debug!("Skipping DDNS update because credentials are missing");
return Ok(());
};
let response = ureq::get(&url.to_string()).call();
match response {
Ok(_) => Ok(()),
Err(ureq::Error::Status(code, _)) => Err(Error::UpdateQueryFailed(code)),
Err(ureq::Error::Transport(_)) => Err(Error::UpdateQueryTransport),
}
}
pub fn begin_periodic_updates(&self) {
tokio::spawn({
let ddns = self.clone();
async move {
loop {
if let Err(e) = ddns.update_ddns().await {
error!("Dynamic DNS update error: {:?}", e);
}
tokio::time::sleep(Duration::from_secs(60 * 30)).await;
}
}
});
}
}

View file

@ -1,11 +0,0 @@
use serde::{Deserialize, Serialize};
use crate::db::ddns_config;
#[derive(Clone, Debug, Deserialize, Insertable, PartialEq, Queryable, Serialize)]
#[table_name = "ddns_config"]
pub struct Config {
pub host: String,
pub username: String,
pub password: String,
}

View file

@ -1,81 +0,0 @@
use anyhow::*;
use diesel::prelude::*;
use log::{error, info};
use std::thread;
use std::time;
use ureq;
use super::*;
use crate::db::DB;
const DDNS_UPDATE_URL: &str = "https://ydns.io/api/v1/update/";
#[derive(Clone)]
pub struct Manager {
db: DB,
}
impl Manager {
pub fn new(db: DB) -> Self {
Self { db }
}
fn update_my_ip(&self) -> Result<()> {
let config = self.config()?;
if config.host.is_empty() || config.username.is_empty() {
info!("Skipping DDNS update because credentials are missing");
return Ok(());
}
let full_url = format!("{}?host={}", DDNS_UPDATE_URL, &config.host);
let response = ureq::get(full_url.as_str())
.auth(&config.username, &config.password)
.call();
if !response.ok() {
bail!(
"DDNS update query failed with status code: {}",
response.status()
);
}
Ok(())
}
pub fn config(&self) -> Result<Config> {
use crate::db::ddns_config::dsl::*;
let connection = self.db.connect()?;
Ok(ddns_config
.select((host, username, password))
.get_result(&connection)?)
}
pub fn set_config(&self, new_config: &Config) -> Result<()> {
use crate::db::ddns_config::dsl::*;
let connection = self.db.connect()?;
diesel::update(ddns_config)
.set((
host.eq(&new_config.host),
username.eq(&new_config.username),
password.eq(&new_config.password),
))
.execute(&connection)?;
Ok(())
}
pub fn begin_periodic_updates(&self) {
let cloned = self.clone();
std::thread::spawn(move || {
cloned.run();
});
}
fn run(&self) {
loop {
if let Err(e) = self.update_my_ip() {
error!("Dynamic DNS update error: {:?}", e);
}
thread::sleep(time::Duration::from_secs(60 * 30));
}
}
}

View file

@ -1,5 +0,0 @@
mod config;
mod manager;
pub use config::Config;
pub use manager::Manager;

444
src/app/formats.rs Normal file
View file

@ -0,0 +1,444 @@
use id3::TagLike;
use lewton::inside_ogg::OggStreamReader;
use log::error;
use std::fs;
use std::io::{Seek, SeekFrom};
use std::path::Path;
use crate::app::Error;
use crate::utils;
use crate::utils::AudioFormat;
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct SongMetadata {
pub disc_number: Option<u32>,
pub track_number: Option<u32>,
pub title: Option<String>,
pub duration: Option<u32>,
pub artists: Vec<String>,
pub album_artists: Vec<String>,
pub album: Option<String>,
pub year: Option<i32>,
pub has_artwork: bool,
pub lyricists: Vec<String>,
pub composers: Vec<String>,
pub genres: Vec<String>,
pub labels: Vec<String>,
}
pub fn read_metadata<P: AsRef<Path>>(path: P) -> Option<SongMetadata> {
let data = match utils::get_audio_format(&path) {
Some(AudioFormat::AIFF) => read_id3(&path),
Some(AudioFormat::FLAC) => read_flac(&path),
Some(AudioFormat::MP3) => read_mp3(&path),
Some(AudioFormat::OGG) => read_vorbis(&path),
Some(AudioFormat::OPUS) => read_opus(&path),
Some(AudioFormat::WAVE) => read_id3(&path),
Some(AudioFormat::APE) | Some(AudioFormat::MPC) => read_ape(&path),
Some(AudioFormat::MP4) | Some(AudioFormat::M4B) => read_mp4(&path),
None => return None,
};
match data {
Ok(d) => Some(d),
Err(e) => {
error!(
"Error while reading file metadata for '{:?}': {}",
path.as_ref(),
e
);
None
}
}
}
trait ID3Ext {
fn get_text_values(&self, frame_name: &str) -> Vec<String>;
}
impl ID3Ext for id3::Tag {
fn get_text_values(&self, frame_name: &str) -> Vec<String> {
self.get(frame_name)
.and_then(|f| f.content().text_values())
.map(|i| i.map(str::to_string).collect())
.unwrap_or_default()
}
}
fn read_id3<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let file = fs::File::open(path.as_ref()).map_err(|e| Error::Io(path.as_ref().to_owned(), e))?;
read_id3_from_file(&file, path)
}
fn read_id3_from_file<P: AsRef<Path>>(file: &fs::File, path: P) -> Result<SongMetadata, Error> {
let tag = id3::Tag::read_from2(file)
.or_else(|error| {
if let Some(tag) = error.partial_tag {
Ok(tag)
} else {
Err(error)
}
})
.map_err(|e| Error::Id3(path.as_ref().to_owned(), e))?;
let artists = tag.get_text_values("TPE1");
let album_artists = tag.get_text_values("TPE2");
let album = tag.album().map(|s| s.to_string());
let title = tag.title().map(|s| s.to_string());
let duration = tag.duration();
let disc_number = tag.disc();
let track_number = tag.track();
let year = tag
.year()
.or_else(|| tag.date_released().map(|d| d.year))
.or_else(|| tag.original_date_released().map(|d| d.year))
.or_else(|| tag.date_recorded().map(|d| d.year));
let has_artwork = tag.pictures().count() > 0;
let lyricists = tag.get_text_values("TEXT");
let composers = tag.get_text_values("TCOM");
let genres = tag.get_text_values("TCON");
let labels = tag.get_text_values("TPUB");
Ok(SongMetadata {
disc_number,
track_number,
title,
duration,
artists,
album_artists,
album,
year,
has_artwork,
lyricists,
composers,
genres,
labels,
})
}
fn read_mp3<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let mut file = fs::File::open(&path).unwrap();
let mut metadata = read_id3_from_file(&file, &path)?;
metadata.duration = metadata.duration.or_else(|| {
file.seek(SeekFrom::Start(0)).unwrap();
mp3_duration::from_file(&file)
.map(|d| d.as_secs() as u32)
.ok()
});
Ok(metadata)
}
mod ape_ext {
use regex::Regex;
use std::sync::LazyLock;
pub fn read_string(item: &ape::Item) -> Option<String> {
item.try_into().ok().map(str::to_string)
}
pub fn read_strings(item: Option<&ape::Item>) -> Vec<String> {
let Some(item) = item else {
return vec![];
};
let strings: Vec<&str> = item.try_into().unwrap_or_default();
strings.into_iter().map(str::to_string).collect()
}
pub fn read_i32(item: &ape::Item) -> Option<i32> {
item.try_into()
.ok()
.map(|s: &str| s.parse::<i32>().ok())
.flatten()
}
static X_OF_Y_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r#"^\d+"#).unwrap());
pub fn read_x_of_y(item: &ape::Item) -> Option<u32> {
item.try_into()
.ok()
.map(|s: &str| {
if let Some(m) = X_OF_Y_REGEX.find(s) {
s[m.start()..m.end()].parse().ok()
} else {
None
}
})
.flatten()
}
}
fn read_ape<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let tag = ape::read_from_path(path)?;
let artists = ape_ext::read_strings(tag.item("Artist"));
let album = tag.item("Album").and_then(ape_ext::read_string);
let album_artists = ape_ext::read_strings(tag.item("Album artist"));
let title = tag.item("Title").and_then(ape_ext::read_string);
let year = tag.item("Year").and_then(ape_ext::read_i32);
let disc_number = tag.item("Disc").and_then(ape_ext::read_x_of_y);
let track_number = tag.item("Track").and_then(ape_ext::read_x_of_y);
let lyricists = ape_ext::read_strings(tag.item("LYRICIST"));
let composers = ape_ext::read_strings(tag.item("COMPOSER"));
let genres = ape_ext::read_strings(tag.item("GENRE"));
let labels = ape_ext::read_strings(tag.item("PUBLISHER"));
Ok(SongMetadata {
artists,
album_artists,
album,
title,
duration: None,
disc_number,
track_number,
year,
has_artwork: false,
lyricists,
composers,
genres,
labels,
})
}
fn read_vorbis<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let file = fs::File::open(&path).map_err(|e| Error::Io(path.as_ref().to_owned(), e))?;
let source = OggStreamReader::new(file)?;
let mut metadata = SongMetadata::default();
for (key, value) in source.comment_hdr.comment_list {
utils::match_ignore_case! {
match key {
"TITLE" => metadata.title = Some(value),
"ALBUM" => metadata.album = Some(value),
"ARTIST" => metadata.artists.push(value),
"ALBUMARTIST" => metadata.album_artists.push(value),
"TRACKNUMBER" => metadata.track_number = value.parse::<u32>().ok(),
"DISCNUMBER" => metadata.disc_number = value.parse::<u32>().ok(),
"DATE" => metadata.year = value.parse::<i32>().ok(),
"LYRICIST" => metadata.lyricists.push(value),
"COMPOSER" => metadata.composers.push(value),
"GENRE" => metadata.genres.push(value),
"PUBLISHER" => metadata.labels.push(value),
_ => (),
}
}
}
Ok(metadata)
}
fn read_opus<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let headers = opus_headers::parse_from_path(path)?;
let mut metadata = SongMetadata::default();
for (key, value) in headers.comments.user_comments {
utils::match_ignore_case! {
match key {
"TITLE" => metadata.title = Some(value),
"ALBUM" => metadata.album = Some(value),
"ARTIST" => metadata.artists.push(value),
"ALBUMARTIST" => metadata.album_artists.push(value),
"TRACKNUMBER" => metadata.track_number = value.parse::<u32>().ok(),
"DISCNUMBER" => metadata.disc_number = value.parse::<u32>().ok(),
"DATE" => metadata.year = value.parse::<i32>().ok(),
"LYRICIST" => metadata.lyricists.push(value),
"COMPOSER" => metadata.composers.push(value),
"GENRE" => metadata.genres.push(value),
"PUBLISHER" => metadata.labels.push(value),
_ => (),
}
}
}
Ok(metadata)
}
fn read_flac<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let tag = metaflac::Tag::read_from_path(&path)
.map_err(|e| Error::Metaflac(path.as_ref().to_owned(), e))?;
let vorbis = tag
.vorbis_comments()
.ok_or(Error::VorbisCommentNotFoundInFlacFile)?;
let disc_number = vorbis
.get("DISCNUMBER")
.and_then(|d| d[0].parse::<u32>().ok());
let year = vorbis.get("DATE").and_then(|d| d[0].parse::<i32>().ok());
let mut streaminfo = tag.get_blocks(metaflac::BlockType::StreamInfo);
let duration = match streaminfo.next() {
Some(metaflac::Block::StreamInfo(s)) => Some(s.total_samples as u32 / s.sample_rate),
_ => None,
};
let has_artwork = tag.pictures().count() > 0;
let multivalue = |o: Option<&Vec<String>>| o.cloned().unwrap_or_default();
Ok(SongMetadata {
artists: multivalue(vorbis.artist()),
album_artists: multivalue(vorbis.album_artist()),
album: vorbis.album().map(|v| v[0].clone()),
title: vorbis.title().map(|v| v[0].clone()),
duration,
disc_number,
track_number: vorbis.track(),
year,
has_artwork,
lyricists: multivalue(vorbis.get("LYRICIST")),
composers: multivalue(vorbis.get("COMPOSER")),
genres: multivalue(vorbis.get("GENRE")),
labels: multivalue(vorbis.get("PUBLISHER")),
})
}
fn read_mp4<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
let mut tag = mp4ameta::Tag::read_from_path(&path)
.map_err(|e| Error::Mp4aMeta(path.as_ref().to_owned(), e))?;
let label_ident = mp4ameta::FreeformIdent::new("com.apple.iTunes", "Label");
Ok(SongMetadata {
artists: tag.take_artists().collect(),
album_artists: tag.take_album_artists().collect(),
album: tag.take_album(),
title: tag.take_title(),
duration: tag.duration().map(|v| v.as_secs() as u32),
disc_number: tag.disc_number().map(|d| d as u32),
track_number: tag.track_number().map(|d| d as u32),
year: tag.year().and_then(|v| v.parse::<i32>().ok()),
has_artwork: tag.artwork().is_some(),
lyricists: tag.take_lyricists().collect(),
composers: tag.take_composers().collect(),
genres: tag.take_genres().collect(),
labels: tag.take_strings_of(&label_ident).collect(),
})
}
#[test]
fn reads_file_metadata() {
let expected_without_duration = SongMetadata {
disc_number: Some(3),
track_number: Some(1),
title: Some("TEST TITLE".into()),
artists: vec!["TEST ARTIST".into()],
album_artists: vec!["TEST ALBUM ARTIST".into()],
album: Some("TEST ALBUM".into()),
duration: None,
year: Some(2016),
has_artwork: false,
lyricists: vec!["TEST LYRICIST".into()],
composers: vec!["TEST COMPOSER".into()],
genres: vec!["TEST GENRE".into()],
labels: vec!["TEST LABEL".into()],
};
let expected_with_duration = SongMetadata {
duration: Some(0),
..expected_without_duration.clone()
};
assert_eq!(
read_metadata(Path::new("test-data/formats/sample.aif")).unwrap(),
expected_without_duration
);
assert_eq!(
read_metadata(Path::new("test-data/formats/sample.mp3")).unwrap(),
expected_with_duration
);
assert_eq!(
read_metadata(Path::new("test-data/formats/sample.ogg")).unwrap(),
expected_without_duration
);
assert_eq!(
read_metadata(Path::new("test-data/formats/sample.flac")).unwrap(),
expected_with_duration
);
assert_eq!(
read_metadata(Path::new("test-data/formats/sample.m4a")).unwrap(),
expected_with_duration
);
assert_eq!(
read_metadata(Path::new("test-data/formats/sample.opus")).unwrap(),
expected_without_duration
);
assert_eq!(
read_metadata(Path::new("test-data/formats/sample.ape")).unwrap(),
expected_without_duration
);
assert_eq!(
read_metadata(Path::new("test-data/formats/sample.wav")).unwrap(),
expected_without_duration
);
}
#[test]
fn reads_embedded_artwork() {
assert!(
read_metadata(Path::new("test-data/artwork/sample.aif"))
.unwrap()
.has_artwork
);
assert!(
read_metadata(Path::new("test-data/artwork/sample.mp3"))
.unwrap()
.has_artwork
);
assert!(
read_metadata(Path::new("test-data/artwork/sample.flac"))
.unwrap()
.has_artwork
);
assert!(
read_metadata(Path::new("test-data/artwork/sample.m4a"))
.unwrap()
.has_artwork
);
assert!(
read_metadata(Path::new("test-data/artwork/sample.wav"))
.unwrap()
.has_artwork
);
}
#[test]
fn reads_multivalue_fields() {
let expected_without_duration = SongMetadata {
disc_number: Some(3),
track_number: Some(1),
title: Some("TEST TITLE".into()),
artists: vec!["TEST ARTIST".into(), "OTHER ARTIST".into()],
album_artists: vec!["TEST ALBUM ARTIST".into(), "OTHER ALBUM ARTIST".into()],
album: Some("TEST ALBUM".into()),
duration: None,
year: Some(2016),
has_artwork: false,
lyricists: vec!["TEST LYRICIST".into(), "OTHER LYRICIST".into()],
composers: vec!["TEST COMPOSER".into(), "OTHER COMPOSER".into()],
genres: vec!["TEST GENRE".into(), "OTHER GENRE".into()],
labels: vec!["TEST LABEL".into(), "OTHER LABEL".into()],
};
let expected_with_duration = SongMetadata {
duration: Some(0),
..expected_without_duration.clone()
};
assert_eq!(
read_metadata(Path::new("test-data/multivalue/multivalue.aif")).unwrap(),
expected_without_duration
);
assert_eq!(
read_metadata(Path::new("test-data/multivalue/multivalue.mp3")).unwrap(),
expected_with_duration
);
assert_eq!(
read_metadata(Path::new("test-data/multivalue/multivalue.ogg")).unwrap(),
expected_without_duration
);
assert_eq!(
read_metadata(Path::new("test-data/multivalue/multivalue.flac")).unwrap(),
expected_with_duration
);
// TODO Test m4a support (likely working). Pending https://tickets.metabrainz.org/browse/PICARD-3029
assert_eq!(
read_metadata(Path::new("test-data/multivalue/multivalue.opus")).unwrap(),
expected_without_duration
);
assert_eq!(
read_metadata(Path::new("test-data/multivalue/multivalue.ape")).unwrap(),
expected_without_duration
);
assert_eq!(
read_metadata(Path::new("test-data/multivalue/multivalue.wav")).unwrap(),
expected_without_duration
);
}

388
src/app/index.rs Normal file
View file

@ -0,0 +1,388 @@
use std::{
path::{Path, PathBuf},
sync::{Arc, RwLock},
};
use log::{error, info};
use serde::{Deserialize, Serialize};
use tokio::task::spawn_blocking;
use crate::app::{scanner, Error};
mod browser;
mod collection;
mod dictionary;
mod query;
mod search;
mod storage;
pub use browser::File;
pub use collection::{Album, AlbumHeader, Artist, ArtistHeader, Genre, GenreHeader, Song};
use storage::{store_song, AlbumKey, ArtistKey, GenreKey, InternPath, SongKey};
#[derive(Clone)]
pub struct Manager {
index_file_path: PathBuf,
index: Arc<RwLock<Index>>, // Not a tokio RwLock as we want to do CPU-bound work with Index and lock this inside spawn_blocking()
}
impl Manager {
pub async fn new(directory: &Path) -> Result<Self, Error> {
tokio::fs::create_dir_all(directory)
.await
.map_err(|e| Error::Io(directory.to_owned(), e))?;
let index_manager = Self {
index_file_path: directory.join("collection.index"),
index: Arc::default(),
};
match index_manager.try_restore_index().await {
Ok(true) => info!("Restored collection index from disk"),
Ok(false) => info!("No existing collection index to restore"),
Err(e) => error!("Failed to restore collection index: {}", e),
};
Ok(index_manager)
}
pub async fn is_index_empty(&self) -> bool {
spawn_blocking({
let index_manager = self.clone();
move || {
let index = index_manager.index.read().unwrap();
index.collection.num_songs() == 0
}
})
.await
.unwrap()
}
pub async fn replace_index(&self, new_index: Index) {
spawn_blocking({
let index_manager = self.clone();
move || {
let mut lock = index_manager.index.write().unwrap();
*lock = new_index;
}
})
.await
.unwrap()
}
pub async fn persist_index(&self, index: &Index) -> Result<(), Error> {
let serialized = match bitcode::serialize(index) {
Ok(s) => s,
Err(_) => return Err(Error::IndexSerializationError),
};
tokio::fs::write(&self.index_file_path, &serialized[..])
.await
.map_err(|e| Error::Io(self.index_file_path.clone(), e))?;
Ok(())
}
async fn try_restore_index(&self) -> Result<bool, Error> {
match tokio::fs::try_exists(&self.index_file_path).await {
Ok(true) => (),
Ok(false) => return Ok(false),
Err(e) => return Err(Error::Io(self.index_file_path.clone(), e)),
};
let serialized = tokio::fs::read(&self.index_file_path)
.await
.map_err(|e| Error::Io(self.index_file_path.clone(), e))?;
let index = match bitcode::deserialize(&serialized[..]) {
Ok(i) => i,
Err(_) => return Err(Error::IndexDeserializationError),
};
self.replace_index(index).await;
Ok(true)
}
pub async fn browse(&self, virtual_path: PathBuf) -> Result<Vec<browser::File>, Error> {
spawn_blocking({
let index_manager = self.clone();
move || {
let index = index_manager.index.read().unwrap();
index.browser.browse(&index.dictionary, virtual_path)
}
})
.await
.unwrap()
}
pub async fn flatten(&self, virtual_path: PathBuf) -> Result<Vec<PathBuf>, Error> {
spawn_blocking({
let index_manager = self.clone();
move || {
let index = index_manager.index.read().unwrap();
index.browser.flatten(&index.dictionary, virtual_path)
}
})
.await
.unwrap()
}
pub async fn get_genres(&self) -> Vec<GenreHeader> {
spawn_blocking({
let index_manager = self.clone();
move || {
let index = index_manager.index.read().unwrap();
index.collection.get_genres(&index.dictionary)
}
})
.await
.unwrap()
}
pub async fn get_genre(&self, name: String) -> Result<Genre, Error> {
spawn_blocking({
let index_manager = self.clone();
move || {
let index = index_manager.index.read().unwrap();
let name = index
.dictionary
.get(&name)
.ok_or_else(|| Error::GenreNotFound)?;
let genre_key = GenreKey(name);
index
.collection
.get_genre(&index.dictionary, genre_key)
.ok_or_else(|| Error::GenreNotFound)
}
})
.await
.unwrap()
}
pub async fn get_albums(&self) -> Vec<AlbumHeader> {
spawn_blocking({
let index_manager = self.clone();
move || {
let index = index_manager.index.read().unwrap();
index.collection.get_albums(&index.dictionary)
}
})
.await
.unwrap()
}
pub async fn get_artists(&self) -> Vec<ArtistHeader> {
spawn_blocking({
let index_manager = self.clone();
move || {
let index = index_manager.index.read().unwrap();
index.collection.get_artists(&index.dictionary)
}
})
.await
.unwrap()
}
pub async fn get_artist(&self, name: String) -> Result<Artist, Error> {
spawn_blocking({
let index_manager = self.clone();
move || {
let index = index_manager.index.read().unwrap();
let name = index
.dictionary
.get(name)
.ok_or_else(|| Error::ArtistNotFound)?;
let artist_key = ArtistKey(name);
index
.collection
.get_artist(&index.dictionary, artist_key)
.ok_or_else(|| Error::ArtistNotFound)
}
})
.await
.unwrap()
}
pub async fn get_album(&self, artists: Vec<String>, name: String) -> Result<Album, Error> {
spawn_blocking({
let index_manager = self.clone();
move || {
let index = index_manager.index.read().unwrap();
let name = index
.dictionary
.get(&name)
.ok_or_else(|| Error::AlbumNotFound)?;
let album_key = AlbumKey {
artists: artists
.into_iter()
.filter_map(|a| index.dictionary.get(a))
.map(|k| ArtistKey(k))
.collect(),
name,
};
index
.collection
.get_album(&index.dictionary, album_key)
.ok_or_else(|| Error::AlbumNotFound)
}
})
.await
.unwrap()
}
pub async fn get_random_albums(
&self,
seed: Option<u64>,
offset: usize,
count: usize,
) -> Result<Vec<Album>, Error> {
spawn_blocking({
let index_manager = self.clone();
move || {
let index = index_manager.index.read().unwrap();
Ok(index
.collection
.get_random_albums(&index.dictionary, seed, offset, count))
}
})
.await
.unwrap()
}
pub async fn get_recent_albums(
&self,
offset: usize,
count: usize,
) -> Result<Vec<Album>, Error> {
spawn_blocking({
let index_manager = self.clone();
move || {
let index = index_manager.index.read().unwrap();
Ok(index
.collection
.get_recent_albums(&index.dictionary, offset, count))
}
})
.await
.unwrap()
}
pub async fn get_songs(&self, virtual_paths: Vec<PathBuf>) -> Vec<Result<Song, Error>> {
spawn_blocking({
let index_manager = self.clone();
move || {
let index = index_manager.index.read().unwrap();
virtual_paths
.into_iter()
.map(|p| {
p.get(&index.dictionary)
.and_then(|virtual_path| {
let key = SongKey { virtual_path };
index.collection.get_song(&index.dictionary, key)
})
.ok_or_else(|| Error::SongNotFound)
})
.collect()
}
})
.await
.unwrap()
}
pub async fn search(&self, query: String) -> Result<Vec<Song>, Error> {
spawn_blocking({
let index_manager = self.clone();
move || {
let index = index_manager.index.read().unwrap();
index
.search
.find_songs(&index.collection, &index.dictionary, &query)
}
})
.await
.unwrap()
}
}
#[derive(Serialize, Deserialize)]
pub struct Index {
pub dictionary: dictionary::Dictionary,
pub browser: browser::Browser,
pub collection: collection::Collection,
pub search: search::Search,
}
impl Default for Index {
fn default() -> Self {
Self {
dictionary: Default::default(),
browser: Default::default(),
collection: Default::default(),
search: Default::default(),
}
}
}
#[derive(Clone)]
pub struct Builder {
dictionary_builder: dictionary::Builder,
browser_builder: browser::Builder,
collection_builder: collection::Builder,
search_builder: search::Builder,
}
impl Builder {
pub fn new() -> Self {
Self {
dictionary_builder: dictionary::Builder::default(),
browser_builder: browser::Builder::default(),
collection_builder: collection::Builder::default(),
search_builder: search::Builder::default(),
}
}
pub fn add_directory(&mut self, directory: scanner::Directory) {
self.browser_builder
.add_directory(&mut self.dictionary_builder, directory);
}
pub fn add_song(&mut self, scanner_song: scanner::Song) {
if let Some(storage_song) = store_song(&mut self.dictionary_builder, &scanner_song) {
self.browser_builder
.add_song(&mut self.dictionary_builder, &scanner_song);
self.collection_builder.add_song(&storage_song);
self.search_builder.add_song(&scanner_song, &storage_song);
}
}
pub fn build(self) -> Index {
Index {
dictionary: self.dictionary_builder.build(),
browser: self.browser_builder.build(),
collection: self.collection_builder.build(),
search: self.search_builder.build(),
}
}
}
impl Default for Builder {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod test {
use crate::{
app::{index, test},
test_name,
};
#[tokio::test]
async fn can_persist_index() {
let ctx = test::ContextBuilder::new(test_name!()).build().await;
assert_eq!(ctx.index_manager.try_restore_index().await.unwrap(), false);
let index = index::Builder::new().build();
ctx.index_manager.persist_index(&index).await.unwrap();
assert_eq!(ctx.index_manager.try_restore_index().await.unwrap(), true);
}
}

389
src/app/index/browser.rs Normal file
View file

@ -0,0 +1,389 @@
use std::{
cmp::Ordering,
collections::{BTreeSet, HashMap},
ffi::OsStr,
hash::Hash,
path::{Path, PathBuf},
};
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use tinyvec::TinyVec;
use trie_rs::{Trie, TrieBuilder};
use crate::app::index::{
dictionary::{self, Dictionary},
storage::{self, PathKey},
InternPath,
};
use crate::app::{scanner, Error};
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub enum File {
Directory(PathBuf),
Song(PathBuf),
}
#[derive(Serialize, Deserialize)]
pub struct Browser {
directories: HashMap<PathKey, BTreeSet<storage::File>>,
flattened: Trie<lasso2::Spur>,
}
impl Default for Browser {
fn default() -> Self {
Self {
directories: HashMap::default(),
flattened: TrieBuilder::new().build(),
}
}
}
impl Browser {
pub fn browse<P: AsRef<Path>>(
&self,
dictionary: &Dictionary,
virtual_path: P,
) -> Result<Vec<File>, Error> {
let path = virtual_path
.as_ref()
.get(dictionary)
.ok_or_else(|| Error::DirectoryNotFound(virtual_path.as_ref().to_owned()))?;
let Some(files) = self.directories.get(&path) else {
return Err(Error::DirectoryNotFound(virtual_path.as_ref().to_owned()));
};
let mut files = files
.iter()
.map(|f| {
let path = match f {
storage::File::Directory(p) => p,
storage::File::Song(p) => p,
};
let path = Path::new(OsStr::new(dictionary.resolve(&path.0))).to_owned();
match f {
storage::File::Directory(_) => File::Directory(path),
storage::File::Song(_) => File::Song(path),
}
})
.collect::<Vec<_>>();
if virtual_path.as_ref().parent().is_none() {
if let [File::Directory(ref p)] = files[..] {
return self.browse(dictionary, p);
}
}
let collator = dictionary::make_collator();
files.sort_by(|a, b| {
let (a, b) = match (a, b) {
(File::Directory(_), File::Song(_)) => return Ordering::Less,
(File::Song(_), File::Directory(_)) => return Ordering::Greater,
(File::Directory(a), File::Directory(b)) => (a, b),
(File::Song(a), File::Song(b)) => (a, b),
};
collator.compare(
a.as_os_str().to_string_lossy().as_ref(),
b.as_os_str().to_string_lossy().as_ref(),
)
});
Ok(files)
}
pub fn flatten<P: AsRef<Path>>(
&self,
dictionary: &Dictionary,
virtual_path: P,
) -> Result<Vec<PathBuf>, Error> {
let path_components = virtual_path
.as_ref()
.components()
.map(|c| c.as_os_str().to_str().unwrap_or_default())
.filter_map(|c| dictionary.get(c))
.collect::<Vec<_>>();
if !self.flattened.is_prefix(&path_components) {
return Err(Error::DirectoryNotFound(virtual_path.as_ref().to_owned()));
}
let mut results: Vec<TinyVec<[_; 8]>> = self
.flattened
.predictive_search(path_components)
.collect::<Vec<_>>();
results.par_sort_unstable_by(|a, b| {
for (x, y) in a.iter().zip(b.iter()) {
match dictionary.cmp(x, y) {
Ordering::Equal => continue,
ordering @ _ => return ordering,
}
}
a.len().cmp(&b.len())
});
let files = results
.into_iter()
.map(|c: TinyVec<[_; 8]>| -> PathBuf {
c.into_iter()
.map(|s| dictionary.resolve(&s))
.collect::<TinyVec<[&str; 8]>>()
.join(std::path::MAIN_SEPARATOR_STR)
.into()
})
.collect::<Vec<_>>();
Ok(files)
}
}
#[derive(Clone, Default)]
pub struct Builder {
directories: HashMap<PathKey, BTreeSet<storage::File>>,
flattened: TrieBuilder<lasso2::Spur>,
}
impl Builder {
pub fn add_directory(
&mut self,
dictionary_builder: &mut dictionary::Builder,
directory: scanner::Directory,
) {
let Some(virtual_path) = (&directory.virtual_path).get_or_intern(dictionary_builder) else {
return;
};
let Some(virtual_parent) = directory
.virtual_path
.parent()
.and_then(|p| p.get_or_intern(dictionary_builder))
else {
return;
};
self.directories.entry(virtual_path).or_default();
self.directories
.entry(virtual_parent)
.or_default()
.insert(storage::File::Directory(virtual_path));
}
pub fn add_song(&mut self, dictionary_builder: &mut dictionary::Builder, song: &scanner::Song) {
let Some(virtual_path) = (&song.virtual_path).get_or_intern(dictionary_builder) else {
return;
};
let Some(virtual_parent) = song
.virtual_path
.parent()
.and_then(|p| p.get_or_intern(dictionary_builder))
else {
return;
};
self.directories
.entry(virtual_parent)
.or_default()
.insert(storage::File::Song(virtual_path));
self.flattened.push(
song.virtual_path
.components()
.map(|c| dictionary_builder.get_or_intern(c.as_os_str().to_str().unwrap()))
.collect::<TinyVec<[lasso2::Spur; 8]>>(),
);
}
pub fn build(self) -> Browser {
Browser {
directories: self.directories,
flattened: self.flattened.build(),
}
}
}
#[cfg(test)]
mod test {
use std::collections::HashSet;
use std::path::PathBuf;
use super::*;
fn setup_test(songs: HashSet<PathBuf>) -> (Browser, Dictionary) {
let mut dictionary_builder = dictionary::Builder::default();
let mut builder = Builder::default();
let directories = songs
.iter()
.flat_map(|k| k.parent().unwrap().ancestors())
.collect::<HashSet<_>>();
for directory in directories {
builder.add_directory(
&mut dictionary_builder,
scanner::Directory {
virtual_path: directory.to_owned(),
},
);
}
for path in songs {
let mut song = scanner::Song::default();
song.virtual_path = path.clone();
builder.add_song(&mut dictionary_builder, &song);
}
let browser = builder.build();
let dictionary = dictionary_builder.build();
(browser, dictionary)
}
#[test]
fn can_browse_top_level() {
let (browser, strings) = setup_test(HashSet::from([
PathBuf::from_iter(["Music", "Iron Maiden", "Moonchild.mp3"]),
PathBuf::from_iter(["Also Music", "Iron Maiden", "The Prisoner.mp3"]),
]));
let files = browser.browse(&strings, PathBuf::new()).unwrap();
assert_eq!(
files[..],
[
File::Directory(PathBuf::from_iter(["Also Music"])),
File::Directory(PathBuf::from_iter(["Music"])),
]
);
}
#[test]
fn browse_skips_redundant_top_level() {
let (browser, strings) = setup_test(HashSet::from([PathBuf::from_iter([
"Music",
"Iron Maiden",
"Moonchild.mp3",
])]));
let files = browser.browse(&strings, PathBuf::new()).unwrap();
assert_eq!(
files[..],
[File::Directory(PathBuf::from_iter([
"Music",
"Iron Maiden"
])),]
);
}
#[test]
fn can_browse_directory() {
let artist_directory = PathBuf::from_iter(["Music", "Iron Maiden"]);
let (browser, strings) = setup_test(HashSet::from([
artist_directory.join("Infinite Dreams.mp3"),
artist_directory.join("Moonchild.mp3"),
]));
let files = browser.browse(&strings, artist_directory.clone()).unwrap();
assert_eq!(
files,
[
File::Song(artist_directory.join("Infinite Dreams.mp3")),
File::Song(artist_directory.join("Moonchild.mp3"))
]
);
}
#[test]
fn browse_entries_are_sorted() {
let (browser, strings) = setup_test(HashSet::from([
PathBuf::from_iter(["Ott", "Mir.mp3"]),
PathBuf::from("Helios.mp3"),
PathBuf::from("asura.mp3"),
PathBuf::from("à la maison.mp3"),
]));
let files = browser.browse(&strings, PathBuf::new()).unwrap();
assert_eq!(
files,
[
File::Directory(PathBuf::from("Ott")),
File::Song(PathBuf::from("à la maison.mp3")),
File::Song(PathBuf::from("asura.mp3")),
File::Song(PathBuf::from("Helios.mp3")),
]
);
}
#[test]
fn can_flatten_root() {
let song_a = PathBuf::from_iter(["Music", "Electronic", "Papua New Guinea.mp3"]);
let song_b = PathBuf::from_iter(["Music", "Metal", "Destiny.mp3"]);
let song_c = PathBuf::from_iter(["Music", "Metal", "No Turning Back.mp3"]);
let (browser, strings) = setup_test(HashSet::from([
song_a.clone(),
song_b.clone(),
song_c.clone(),
]));
let files = browser.flatten(&strings, PathBuf::new()).unwrap();
assert_eq!(files, [song_a, song_b, song_c]);
}
#[test]
fn can_flatten_directory() {
let electronic = PathBuf::from_iter(["Music", "Electronic"]);
let song_a = electronic.join(PathBuf::from_iter(["FSOL", "Papua New Guinea.mp3"]));
let song_b = electronic.join(PathBuf::from_iter(["Kraftwerk", "Autobahn.mp3"]));
let song_c = PathBuf::from_iter(["Music", "Metal", "Destiny.mp3"]);
let (browser, strings) = setup_test(HashSet::from([
song_a.clone(),
song_b.clone(),
song_c.clone(),
]));
let files = browser.flatten(&strings, electronic).unwrap();
assert_eq!(files, [song_a, song_b]);
}
#[test]
fn flatten_entries_are_sorted() {
let (browser, strings) = setup_test(HashSet::from([
PathBuf::from_iter(["Ott", "Mir.mp3"]),
PathBuf::from("Helios.mp3"),
PathBuf::from("à la maison.mp3.mp3"),
PathBuf::from("asura.mp3"),
]));
let files = browser.flatten(&strings, PathBuf::new()).unwrap();
assert_eq!(
files,
[
PathBuf::from("à la maison.mp3.mp3"),
PathBuf::from("asura.mp3"),
PathBuf::from("Helios.mp3"),
PathBuf::from_iter(["Ott", "Mir.mp3"]),
]
);
}
#[test]
fn can_flatten_directory_with_shared_prefix() {
let directory_a = PathBuf::from_iter(["Music", "Therion", "Leviathan II"]);
let directory_b = PathBuf::from_iter(["Music", "Therion", "Leviathan III"]);
let song_a = directory_a.join("Pazuzu.mp3");
let song_b = directory_b.join("Ninkigal.mp3");
let (browser, strings) = setup_test(HashSet::from([song_a.clone(), song_b.clone()]));
let files = browser.flatten(&strings, directory_a).unwrap();
assert_eq!(files, [song_a]);
}
}

1116
src/app/index/collection.rs Normal file

File diff suppressed because it is too large Load diff

110
src/app/index/dictionary.rs Normal file
View file

@ -0,0 +1,110 @@
use std::{cmp::Ordering, collections::HashMap};
use icu_collator::{Collator, CollatorOptions, Strength};
use lasso2::{Rodeo, RodeoReader, Spur};
use rayon::slice::ParallelSliceMut;
use serde::{Deserialize, Serialize};
pub fn sanitize(s: &str) -> String {
// TODO merge inconsistent diacritic usage
let mut cleaned = s.to_owned();
cleaned.retain(|c| match c {
' ' | '_' | '-' | '\'' => false,
_ => true,
});
cleaned.to_lowercase()
}
pub fn make_collator() -> Collator {
let options = {
let mut o = CollatorOptions::new();
o.strength = Some(Strength::Secondary);
o
};
Collator::try_new(&Default::default(), options).unwrap()
}
#[derive(Serialize, Deserialize)]
pub struct Dictionary {
strings: RodeoReader, // Interned strings
canon: HashMap<String, Spur>, // Canonical representation of similar strings
sort_keys: HashMap<Spur, u32>, // All spurs sorted against each other
}
impl Dictionary {
pub fn get<S: AsRef<str>>(&self, string: S) -> Option<Spur> {
self.strings.get(string)
}
pub fn get_canon<S: AsRef<str>>(&self, string: S) -> Option<Spur> {
self.canon.get(&sanitize(string.as_ref())).copied()
}
pub fn resolve(&self, spur: &Spur) -> &str {
self.strings.resolve(spur)
}
pub fn cmp(&self, a: &Spur, b: &Spur) -> Ordering {
self.sort_keys
.get(a)
.copied()
.unwrap_or_default()
.cmp(&self.sort_keys.get(b).copied().unwrap_or_default())
}
}
impl Default for Dictionary {
fn default() -> Self {
Self {
strings: Rodeo::default().into_reader(),
canon: Default::default(),
sort_keys: Default::default(),
}
}
}
#[derive(Clone, Default)]
pub struct Builder {
strings: Rodeo,
canon: HashMap<String, Spur>,
}
impl Builder {
pub fn build(self) -> Dictionary {
let mut sorted_spurs = self.strings.iter().collect::<Vec<_>>();
// TODO this is too slow!
sorted_spurs.par_sort_unstable_by(|(_, a), (_, b)| {
let collator = make_collator();
collator.compare(a, b)
});
let sort_keys = sorted_spurs
.into_iter()
.enumerate()
.map(|(i, (spur, _))| (spur, i as u32))
.collect();
Dictionary {
strings: self.strings.into_reader(),
canon: self.canon,
sort_keys,
}
}
pub fn get_or_intern<S: AsRef<str>>(&mut self, string: S) -> Spur {
self.strings.get_or_intern(string)
}
pub fn get_or_intern_canon<S: AsRef<str>>(&mut self, string: S) -> Option<Spur> {
let cleaned = sanitize(string.as_ref());
match cleaned.is_empty() {
true => None,
false => Some(
self.canon
.entry(cleaned)
.or_insert_with(|| self.strings.get_or_intern(string.as_ref()))
.to_owned(),
),
}
}
}

Some files were not shown because too many files have changed in this diff Show more