Compare commits
360 commits
Author | SHA1 | Date | |
---|---|---|---|
|
88792f0669 | ||
|
c2fb46f26d | ||
|
26ce7e2550 | ||
|
46aed8096e | ||
|
2aeed5f188 | ||
|
41c4088477 | ||
|
10946330a8 | ||
|
2ce035f787 | ||
|
956301bfdb | ||
|
f35c4efac3 | ||
|
2a1c93c462 | ||
|
7625449434 | ||
|
1b0b5bd164 | ||
|
809e3f878d | ||
|
6862cff185 | ||
|
36da2c3e70 | ||
|
9923b0f40c | ||
|
1df3241ea2 | ||
|
77c313637f | ||
|
baa31f1056 | ||
|
cf5d1b7931 | ||
|
87c4bdc247 | ||
|
7c92e90d65 | ||
|
0eb1d7ee75 | ||
|
dd53d8d284 | ||
|
98e10ad682 | ||
|
f8b30c4e3d | ||
|
5a1753218c | ||
|
646f8297d2 | ||
|
d6416e0239 | ||
|
6681322370 | ||
|
f0cf3d2675 | ||
|
dac7145ce4 | ||
|
677413ef8c | ||
|
fd3f877f93 | ||
|
95c7d1a620 | ||
|
ac88bc9af0 | ||
|
7066e264cd | ||
|
3cea551ce9 | ||
|
b1770fc17e | ||
|
f4009a7fa7 | ||
|
d90b51f752 | ||
|
8ccc9cc2ee | ||
|
4625bf221d | ||
|
b940ca256b | ||
|
24f27e4f08 | ||
|
bd5aeaf591 | ||
|
9707f4a96d | ||
|
2d92ac03ef | ||
|
3f5e5eca69 | ||
|
dabb034964 | ||
|
cc2d2cedd8 | ||
|
bbd63e1b42 | ||
|
df402ed7b8 | ||
|
b5a8aea1f8 | ||
|
350557785c | ||
|
07e8077a38 | ||
|
23facd96b9 | ||
|
1c3ba3d709 | ||
|
364710ef79 | ||
|
2e2ddf017b | ||
|
1b142b1855 | ||
|
d47fffae4f | ||
|
11c72240ed | ||
|
466bbf5cf3 | ||
|
055a81e6f9 | ||
|
81e0abc59f | ||
|
4826e6aa40 | ||
|
2521ff1ddf | ||
|
00236a99e3 | ||
|
cfc848bf7c | ||
|
bf775ebc4c | ||
|
3ad5e97b75 | ||
|
73dc59f833 | ||
|
58d1af5edd | ||
|
ff7291a246 | ||
|
cfa2cedbc1 | ||
|
b9bcdd46b1 | ||
|
f371d5e331 | ||
|
41187199ba | ||
|
de39b2f4a5 | ||
|
cb241d21dd | ||
|
21d7e3049e | ||
|
68b8041f97 | ||
|
e8845c7ef9 | ||
|
b7719edd8e | ||
|
ed546ed531 | ||
|
c640086a3e | ||
|
cf6a092ab7 | ||
|
768ea095e1 | ||
|
5a5f696366 | ||
|
d1d12aecc5 | ||
|
ea75497bf1 | ||
|
8100dfceae | ||
|
f955eb75c5 | ||
|
0a7ae8ebad | ||
|
090ca387ab | ||
|
d53681b6c0 | ||
|
d555a2e5f0 | ||
|
142d400b8b | ||
|
5f585a61d8 | ||
|
08052c25a3 | ||
|
497b3bb545 | ||
|
32e67dc095 | ||
|
8b31698cf4 | ||
|
7a84cc0290 | ||
|
524e072e9f | ||
|
5ec0b5f7a5 | ||
|
fb18cb3c4f | ||
|
0058221e88 | ||
|
51283d935f | ||
|
a4e9aea1e4 | ||
|
7f39d8e8b7 | ||
|
316f5c0219 | ||
|
deeb3e8a05 | ||
|
ae5da0f4f3 | ||
|
c7a760e2c2 | ||
|
471e39495c | ||
|
67730f55fb | ||
|
1555c784de | ||
|
c51ce59fba | ||
|
a89e3d5145 | ||
|
658c23e70d | ||
|
053b684f3a | ||
|
1a8bf91628 | ||
|
a5061dfc92 | ||
|
2c2b12f536 | ||
|
f0a2afe01d | ||
|
9e18a221db | ||
|
e42c3abfe1 | ||
|
765de35f89 | ||
|
98bcd41e43 | ||
|
369bf3821b | ||
|
664ff721e2 | ||
|
b175e319b7 | ||
|
76535b2f87 | ||
|
d1a0b836cf | ||
|
071aced10a | ||
|
7f3e091e32 | ||
|
232eb7ac12 | ||
|
143da76673 | ||
|
8d51344dc3 | ||
|
bff82c3a7c | ||
|
454b4c00fc | ||
|
2bbfa064d5 | ||
|
f1e21a4f6e | ||
|
9e62dc108c | ||
|
2992ef89b8 | ||
|
4548574298 | ||
|
72f4604f7a | ||
|
646a8fa587 | ||
|
647e1d5614 | ||
|
c1c0cedccc | ||
|
d06ad07f51 | ||
|
f188b2943f | ||
|
e3041fca6f | ||
|
444d261d0b | ||
|
47c73f6196 | ||
|
63b92718d5 | ||
|
e11344d2b4 | ||
|
0b50a10a36 | ||
|
6a46aaeac6 | ||
|
7ae10c6f74 | ||
|
5d03b7919c | ||
|
6c2b192f8e | ||
|
ef6951faba | ||
|
e06f79c500 | ||
|
cb35ef0ebb | ||
|
f21f906eaf | ||
|
b943d9aa11 | ||
|
f971b78856 | ||
|
971b46be45 | ||
|
ee3f9fd5a0 | ||
|
b5762bd7bf | ||
|
99263ddeca | ||
|
be97bccab1 | ||
|
0fe3555560 | ||
|
bdc4f840a4 | ||
|
409d79d8a2 | ||
|
390ee03020 | ||
|
cb33c96548 | ||
|
5128796825 | ||
|
81403960b0 | ||
|
5e8587c39f | ||
|
e5339ab39a | ||
|
caf12f23b4 | ||
|
9a14114e50 | ||
|
83b5431994 | ||
|
b96cd2d781 | ||
|
bc17954db9 | ||
|
e5a8c325a6 | ||
|
625f4bd006 | ||
|
ae4200c6ce | ||
|
6bd0c25d7d | ||
|
2b81355f6d | ||
|
e65cee366d | ||
|
ae876915b4 | ||
|
7be9f25cb3 | ||
|
4072e3b07d | ||
|
54ce646931 | ||
|
e0bf259be3 | ||
|
07324ccca6 | ||
|
c1f24ce96b | ||
|
0c12729983 | ||
|
ad37a14cfa | ||
|
309620a088 | ||
|
1e0a6062f9 | ||
|
85cacd8bb7 | ||
|
7c5ff2e895 | ||
|
afc5fcb4c2 | ||
|
9a30065971 | ||
|
f4b0cb9eb7 | ||
|
e703f69a48 | ||
|
57a0163c04 | ||
|
5444285327 | ||
|
6837994433 | ||
|
8141e565e0 | ||
|
a3c2b3bc32 | ||
|
570c2b3894 | ||
|
f625c57d20 | ||
|
d492afc885 | ||
|
4112c7d79d | ||
|
39407c6551 | ||
|
0afab8d634 | ||
|
bc3ed59382 | ||
|
636803c0df | ||
|
a7c4c90427 | ||
|
91152fdc08 | ||
|
1bbeee7f39 | ||
|
6564e7d078 | ||
|
41c043f863 | ||
|
3f645d1011 | ||
|
6b5c291cb7 | ||
|
310e3b6c4d | ||
|
a2232aa9f2 | ||
|
0841c15f48 | ||
|
763ba94e9b | ||
|
a4baa2c792 | ||
|
e6483cf138 | ||
|
b014c63af4 | ||
|
6821318a4d | ||
|
0a1f3fa78d | ||
|
169b2b5cb8 | ||
|
782da35a7b | ||
|
2cbb249c46 | ||
|
2f2fdf9056 | ||
|
f0fa985f8a | ||
|
6b1133e27c | ||
|
8f6e72fbd6 | ||
|
2c7eb9f643 | ||
|
7a17cdc195 | ||
|
16434e6c51 | ||
|
5a14830138 | ||
|
845105cf38 | ||
|
cd45836924 | ||
|
8f2566f574 | ||
|
a0624f7968 | ||
|
7a1d433c8a | ||
|
ae9f94ce4f | ||
|
e8af339cde | ||
|
b4b0e1181f | ||
|
72ec7b260a | ||
|
35736ee1d5 | ||
|
332e39876e | ||
|
b42c6d39e8 | ||
|
1f3cc1ea26 | ||
|
8db6a2352b | ||
|
93e8d7d94b | ||
|
64ef7cb21f | ||
|
2012258a72 | ||
|
2965cbdf7e | ||
|
efc27757c7 | ||
|
91352fc13b | ||
|
470fbc6d1c | ||
|
9e9d031f4e | ||
|
caf6feea7a | ||
|
caa8907297 | ||
|
6871f41a99 | ||
|
00cc18c798 | ||
|
3362a828cd | ||
|
9d8d543494 | ||
|
5a785a2e16 | ||
|
0f25a12877 | ||
|
1c4ef6c5ee | ||
|
1020f27413 | ||
|
0e63f64513 | ||
|
153943a3ae | ||
|
d82563efc0 | ||
|
274a1f2cf7 | ||
|
18858d8d1a | ||
|
03d5568765 | ||
|
5c4631c673 | ||
|
84921f7db3 | ||
|
08353a717f | ||
|
138886e55c | ||
|
6884548cd0 | ||
|
12a9f2ec3c | ||
|
138eacc9fc | ||
|
11775d961b | ||
|
77dc2eac23 | ||
|
7279793d25 | ||
|
d4a427648e | ||
|
123eee7d2d | ||
|
fd6a13083d | ||
|
5ca38939bd | ||
|
c1abd8fe3b | ||
|
fc0a4fd6eb | ||
|
6f24ff248f | ||
|
4807b2d3b9 | ||
|
608dabb789 | ||
|
8d38c5b664 | ||
|
f6d45c8387 | ||
|
930fd67ae3 | ||
|
00b6444048 | ||
|
bd330ddd84 | ||
|
46a232219d | ||
|
a8660793f8 | ||
|
eaec68dff0 | ||
|
1484ecabe9 | ||
|
1812bedfd2 | ||
|
c57583d1d4 | ||
|
98d00d261d | ||
|
edc7170b89 | ||
|
e4959be2f4 | ||
|
fee2f17fb1 | ||
|
4c5a6bc2d6 | ||
|
1e9d307a05 | ||
|
4ec8f2161b | ||
|
f609afc5ed | ||
|
9f0bc06dac | ||
|
d1cb328523 | ||
|
33997fc8e1 | ||
|
602c1c03b5 | ||
|
f3abb816ff | ||
|
96d702b79e | ||
|
223894c2b6 | ||
|
bb8d1142d6 | ||
|
822f3ed073 | ||
|
2873f38e04 | ||
|
388901cf65 | ||
|
df0de19567 | ||
|
29ae862aad | ||
|
a5f5a77100 | ||
|
63e971059a | ||
|
d41e837561 | ||
|
f5a2eed423 | ||
|
41a4b21327 | ||
|
374d0ca56f | ||
|
90fd6bbcc9 | ||
|
39c8cf7595 | ||
|
f27bc4ccfc | ||
|
818dfe877c | ||
|
b6e9940c76 | ||
|
e2bf97db99 | ||
|
d01583b406 | ||
|
f104355076 | ||
|
4c25195deb | ||
|
ed581c57cf | ||
|
652772ba0e | ||
|
6c27409ef2 |
213 changed files with 14722 additions and 10771 deletions
.codecov.yml.envrcrust-toolchainrust-toolchain.toml
.github/workflows
.gitignore.vscode
CHANGELOG.mdCargo.lockCargo.tomlREADME.mdbuild.rsdiesel.tomldocs
flake.lockflake.nixmigrations
201706250006_init
201706250228_directories_date_added
201706272129_users_table
201706272304_misc_settings_table
201706272313_ddns_config_table
201706272327_mount_points_table
201707091522_playlists_tables
20170929203228_add_prefix_url
20171015224223_add_song_duration
20180303211100_add_last_fm_credentials
2019-08-08-042731_blob_auth_secret
2019-09-28-231910_pbkdf2_simple
2020-01-08-231420_add_theme
2020-11-25-174000_remove_prefix_url
res
branding
readme
unix
windows
src
app.rs
app
|
@ -1,6 +1,9 @@
|
|||
coverage:
|
||||
range: "0...100"
|
||||
status:
|
||||
patch:
|
||||
default:
|
||||
informational: true
|
||||
project:
|
||||
default:
|
||||
informational: true
|
||||
|
|
1
.envrc
Normal file
1
.envrc
Normal file
|
@ -0,0 +1 @@
|
|||
use flake
|
11
.github/workflows/build.yml
vendored
11
.github/workflows/build.yml
vendored
|
@ -11,19 +11,14 @@ jobs:
|
|||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
features: [--no-default-features, --features bundle-sqlite, --features ui]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
features: --no-default-features
|
||||
features: ["", --features ui]
|
||||
|
||||
steps:
|
||||
- name: Install libsqlite3-dev
|
||||
if: contains(matrix.os, 'ubuntu') && !contains(matrix.features, 'bundle-sqlite')
|
||||
run: sudo apt-get update && sudo apt-get install libsqlite3-dev
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
|
|
46
.github/workflows/coverage.yml
vendored
46
.github/workflows/coverage.yml
vendored
|
@ -14,17 +14,35 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Polaris
|
||||
uses: actions/checkout@v1
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
- name: Install Tarpaulin
|
||||
run: cargo install cargo-tarpaulin
|
||||
- name: Run Tests
|
||||
run: cargo tarpaulin --all-features --ignore-tests --out Xml
|
||||
- name: Upload Results
|
||||
uses: codecov/codecov-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
fail_ci_if_error: true
|
||||
- name: Checkout Polaris
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
components: llvm-tools-preview
|
||||
- name: Install grcov
|
||||
run: cargo install grcov
|
||||
- name: Run tests
|
||||
run: cargo test --no-fail-fast
|
||||
env:
|
||||
RUSTFLAGS: "-Cinstrument-coverage"
|
||||
- name: Gather coverage results
|
||||
run: >
|
||||
grcov
|
||||
.
|
||||
-t lcov
|
||||
-o coverage.txt
|
||||
--llvm
|
||||
--branch
|
||||
--ignore-not-existing
|
||||
--binary-path ./target/debug/
|
||||
--excl-line "#\[derive\("
|
||||
--excl-br-line "#\[derive\("
|
||||
--excl-start "mod tests \{"
|
||||
--excl-br-start "mod tests \{"
|
||||
- name: Upload Results
|
||||
uses: codecov/codecov-action@v2
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
|
19
.github/workflows/deploy-demo.yml
vendored
Normal file
19
.github/workflows/deploy-demo.yml
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
name: Deploy Demo Server
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [released]
|
||||
|
||||
jobs:
|
||||
trigger:
|
||||
name: Trigger Demo Build
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Repository Dispatch
|
||||
uses: peter-evans/repository-dispatch@v2
|
||||
with:
|
||||
token: ${{ secrets.POLARIS_DEMO_ACCESS_TOKEN }}
|
||||
repository: agersant/polaris-demo
|
||||
event-type: polaris-release
|
105
.github/workflows/release.yml
vendored
105
.github/workflows/release.yml
vendored
|
@ -20,15 +20,14 @@ jobs:
|
|||
target_branch: release
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Checkout Release Branch
|
||||
uses: actions/checkout@master
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: release
|
||||
- name: Update Polaris Version in Cargo.toml
|
||||
run: gawk -i inplace '/^version/ { if (count == 0) { $3 = "\"${{ github.event.inputs.versionNumber }}\""; count++ } } 1' Cargo.toml
|
||||
- name: Commit Cargo.toml Version Change
|
||||
uses: EndBug/add-and-commit@v5
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
branch: release
|
||||
message: "Updated version number"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
@ -39,93 +38,71 @@ jobs:
|
|||
git tag -f -a ${{ github.event.inputs.versionNumber }} -m "Version number"
|
||||
git push -f --tags
|
||||
|
||||
create_release:
|
||||
name: Create Github Release
|
||||
runs-on: ubuntu-latest
|
||||
needs: branch_and_tag
|
||||
|
||||
steps:
|
||||
- name: Create Github Release
|
||||
id: create_release
|
||||
uses: actions/create-release@v1.0.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.event.inputs.versionNumber }}
|
||||
release_name: Polaris ${{ github.event.inputs.versionNumber }}
|
||||
draft: true
|
||||
prerelease: false
|
||||
- name: Write Upload URL To Disk
|
||||
run: echo "${{ steps.create_release.outputs.upload_url }}" > upload-url
|
||||
- name: Store Upload URL
|
||||
uses: actions/upload-artifact@v1
|
||||
with:
|
||||
name: release
|
||||
path: upload-url
|
||||
|
||||
windows:
|
||||
name: Windows
|
||||
runs-on: windows-latest
|
||||
needs: create_release
|
||||
needs: branch_and_tag
|
||||
|
||||
steps:
|
||||
- name: Checkout Polaris
|
||||
uses: actions/checkout@v1
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: release
|
||||
- name: Install Rust Toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
- name: Make release
|
||||
uses: ./.github/actions/make-windows-release
|
||||
with:
|
||||
version-number: ${{ github.event.inputs.versionNumber }}
|
||||
output-file: polaris.msi
|
||||
- name: Retrieve Upload URL
|
||||
uses: actions/download-artifact@v1
|
||||
output-file: Polaris_${{ github.event.inputs.versionNumber }}.msi
|
||||
- name: Upload installer
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release
|
||||
- name: Read Upload URL
|
||||
shell: bash
|
||||
run: echo "UPLOAD_URL=$(cat release/upload-url)" >> $GITHUB_ENV
|
||||
- name: Upload Installer To Github Release
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ env.UPLOAD_URL }}
|
||||
asset_path: polaris.msi
|
||||
asset_name: Polaris_${{ github.event.inputs.versionNumber }}.msi
|
||||
asset_content_type: application/x-msi
|
||||
if-no-files-found: error
|
||||
name: windows-artifact
|
||||
path: Polaris_${{ github.event.inputs.versionNumber }}.msi
|
||||
|
||||
linux:
|
||||
name: Linux
|
||||
runs-on: ubuntu-latest
|
||||
needs: create_release
|
||||
needs: branch_and_tag
|
||||
|
||||
steps:
|
||||
- name: Checkout Polaris
|
||||
uses: actions/checkout@v1
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: release
|
||||
- name: Make release
|
||||
uses: ./.github/actions/make-linux-release
|
||||
with:
|
||||
version-number: ${{ github.event.inputs.versionNumber }}
|
||||
output-file: polaris.tar.gz
|
||||
- name: Retrieve Upload URL
|
||||
uses: actions/download-artifact@v1
|
||||
output-file: Polaris_${{ github.event.inputs.versionNumber }}.tar.gz
|
||||
- name: Upload release
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: release
|
||||
- name: Read Upload URL
|
||||
run: echo "UPLOAD_URL=$(cat release/upload-url)" >> $GITHUB_ENV
|
||||
- name: Upload To Github Release
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
if-no-files-found: error
|
||||
name: linux-artifact
|
||||
path: Polaris_${{ github.event.inputs.versionNumber }}.tar.gz
|
||||
|
||||
create_release:
|
||||
name: Create Github Release
|
||||
runs-on: ubuntu-latest
|
||||
needs: [windows, linux]
|
||||
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
upload_url: ${{ env.UPLOAD_URL }}
|
||||
asset_path: polaris.tar.gz
|
||||
asset_name: Polaris_${{ github.event.inputs.versionNumber }}.tar.gz
|
||||
asset_content_type: application/gzip
|
||||
merge-multiple: true
|
||||
- name: Make Github release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
body: 'Release notes are documented in [CHANGELOG.md](https://github.com/agersant/polaris/blob/master/CHANGELOG.md)'
|
||||
draft: true
|
||||
prerelease: false
|
||||
name: Polaris ${{ github.event.inputs.versionNumber }}
|
||||
tag_name: ${{ github.event.inputs.versionNumber }}
|
||||
fail_on_unmatched_files: true
|
||||
files: |
|
||||
Polaris_${{ github.event.inputs.versionNumber }}.tar.gz
|
||||
Polaris_${{ github.event.inputs.versionNumber }}.msi
|
||||
|
|
187
.github/workflows/validate-install.yml
vendored
187
.github/workflows/validate-install.yml
vendored
|
@ -1,32 +1,28 @@
|
|||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
name: Validate Install
|
||||
|
||||
jobs:
|
||||
|
||||
package_linux_release:
|
||||
name: Package Linux Release
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Polaris
|
||||
uses: actions/checkout@v1
|
||||
- name: Make release
|
||||
uses: ./.github/actions/make-linux-release
|
||||
with:
|
||||
version-number: '0.0.0'
|
||||
output-file: polaris.tar.gz
|
||||
- name: Upload packaged release
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: linux-release
|
||||
path: polaris.tar.gz
|
||||
- name: Checkout Polaris
|
||||
uses: actions/checkout@v1
|
||||
- name: Make release
|
||||
uses: ./.github/actions/make-linux-release
|
||||
with:
|
||||
version-number: "0.0.0"
|
||||
output-file: polaris.tar.gz
|
||||
- name: Upload packaged release
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
if-no-files-found: error
|
||||
name: linux-release
|
||||
path: polaris.tar.gz
|
||||
|
||||
validate_linux_system_install:
|
||||
name: Linux System Install
|
||||
|
@ -34,30 +30,28 @@ jobs:
|
|||
needs: package_linux_release
|
||||
|
||||
steps:
|
||||
- name: Download release
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: linux-release
|
||||
path: .
|
||||
- name: Extract release
|
||||
run: tar -xzvf polaris.tar.gz --strip-components=1
|
||||
- name: Preview Install
|
||||
run: make preview
|
||||
- name: Preview Install w/ Custom Prefix
|
||||
run: make preview PREFIX=/some/random/prefix
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
- name: Install
|
||||
run: sudo --preserve-env=PATH make install
|
||||
- name: Run Polaris
|
||||
run: sudo /usr/local/bin/polaris && sleep 5s
|
||||
- name: Make a request
|
||||
run: curl -f http://localhost:5050
|
||||
- name: Stop Polaris
|
||||
run: sudo kill -KILL $(cat /usr/local/var/run/polaris/polaris.pid)
|
||||
- name: Uninstall
|
||||
run: sudo make uninstall
|
||||
- name: Download release
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: linux-release
|
||||
path: .
|
||||
- name: Extract release
|
||||
run: tar -xzvf polaris.tar.gz --strip-components=1
|
||||
- name: Preview Install
|
||||
run: make preview
|
||||
- name: Preview Install w/ Custom Prefix
|
||||
run: make preview PREFIX=/some/random/prefix
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
- name: Install
|
||||
run: sudo --preserve-env=PATH make install
|
||||
- name: Run Polaris
|
||||
run: sudo /usr/local/bin/polaris && sleep 5s
|
||||
- name: Make a request
|
||||
run: curl -f http://localhost:5050
|
||||
- name: Stop Polaris
|
||||
run: sudo kill -KILL $(sudo cat /usr/local/var/run/polaris/polaris.pid)
|
||||
- name: Uninstall
|
||||
run: sudo make uninstall
|
||||
|
||||
validate_linux_xdg_install:
|
||||
name: Linux XDG Install
|
||||
|
@ -65,52 +59,49 @@ jobs:
|
|||
needs: package_linux_release
|
||||
|
||||
steps:
|
||||
- name: Download release
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: linux-release
|
||||
path: .
|
||||
- name: Extract release
|
||||
run: tar -xzvf polaris.tar.gz --strip-components=1
|
||||
- name: Preview Install
|
||||
run: make preview-xdg
|
||||
- name: Preview Install w/ Custom XDG_DATA_HOME
|
||||
run: make preview-xdg XDG_DATA_HOME=/my/own/xdg/home
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
- name: Install
|
||||
run: make install-xdg
|
||||
- name: Run Polaris
|
||||
run: $HOME/.local/bin/polaris && sleep 5s
|
||||
- name: Make a request
|
||||
run: curl -f http://localhost:5050
|
||||
- name: Stop Polaris
|
||||
run: kill -KILL $(cat /tmp/polaris-1001/polaris.pid)
|
||||
- name: Uninstall
|
||||
run: make uninstall-xdg
|
||||
- name: Download release
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: linux-release
|
||||
path: .
|
||||
- name: Extract release
|
||||
run: tar -xzvf polaris.tar.gz --strip-components=1
|
||||
- name: Preview Install
|
||||
run: make preview-xdg
|
||||
- name: Preview Install w/ Custom XDG_DATA_HOME
|
||||
run: make preview-xdg XDG_DATA_HOME=/my/own/xdg/home
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
- name: Install
|
||||
run: make install-xdg
|
||||
- name: Run Polaris
|
||||
run: $HOME/.local/bin/polaris && sleep 5s
|
||||
- name: Make a request
|
||||
run: curl -f http://localhost:5050
|
||||
- name: Stop Polaris
|
||||
run: kill -KILL $(cat /tmp/polaris-1001/polaris.pid)
|
||||
- name: Uninstall
|
||||
run: make uninstall-xdg
|
||||
|
||||
package_windows_release:
|
||||
name: Package Windows Release
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Polaris
|
||||
uses: actions/checkout@v1
|
||||
- name: Install Rust Toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
- name: Make release
|
||||
uses: ./.github/actions/make-windows-release
|
||||
with:
|
||||
version-number: '0.0.0'
|
||||
output-file: polaris.msi
|
||||
- name: Upload packaged release
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: windows-release
|
||||
path: polaris.msi
|
||||
- name: Checkout Polaris
|
||||
uses: actions/checkout@v1
|
||||
- name: Install Rust Toolchain
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
- name: Make release
|
||||
uses: ./.github/actions/make-windows-release
|
||||
with:
|
||||
version-number: "0.0.0"
|
||||
output-file: polaris.msi
|
||||
- name: Upload packaged release
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
if-no-files-found: error
|
||||
name: windows-release
|
||||
path: polaris.msi
|
||||
|
||||
validate_windows_install:
|
||||
name: Windows Install
|
||||
|
@ -118,20 +109,20 @@ jobs:
|
|||
needs: package_windows_release
|
||||
|
||||
steps:
|
||||
- name: Download release
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: windows-release
|
||||
path: .
|
||||
- name: Install
|
||||
run: msiexec /i polaris.msi /qn
|
||||
- name: Run Polaris
|
||||
run: |
|
||||
start $env:LOCALAPPDATA/Permafrost/Polaris/polaris-cli.exe
|
||||
sleep 5
|
||||
- name: Make a request
|
||||
run: curl -f http://localhost:5050
|
||||
- name: Stop Polaris
|
||||
run: taskkill /IM polaris-cli.exe
|
||||
- name: Uninstall
|
||||
run: msiexec /x polaris.msi /qn
|
||||
- name: Download release
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: windows-release
|
||||
path: .
|
||||
- name: Install
|
||||
run: msiexec /i polaris.msi /qn
|
||||
- name: Run Polaris
|
||||
run: |
|
||||
start $env:LOCALAPPDATA/Permafrost/Polaris/polaris-cli.exe
|
||||
sleep 5
|
||||
- name: Make a request
|
||||
run: curl -f http://localhost:5050
|
||||
- name: Stop Polaris
|
||||
run: taskkill /IM polaris-cli.exe
|
||||
- name: Uninstall
|
||||
run: msiexec /x polaris.msi /qn
|
||||
|
|
10
.gitignore
vendored
10
.gitignore
vendored
|
@ -1,3 +1,6 @@
|
|||
# Dev environment
|
||||
.direnv
|
||||
|
||||
# Build output
|
||||
target
|
||||
|
||||
|
@ -8,8 +11,13 @@ test-output
|
|||
TestConfig.toml
|
||||
|
||||
# Runtime artifacts
|
||||
*.sqlite
|
||||
auth.secret
|
||||
collection.index
|
||||
polaris.log
|
||||
polaris.ndb
|
||||
polaris.pid
|
||||
profile.json
|
||||
/peaks
|
||||
/thumbnails
|
||||
|
||||
# Release process artifacts (usually runs on CI)
|
||||
|
|
10
.vscode/settings.json
vendored
Normal file
10
.vscode/settings.json
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"files.watcherExclude": {
|
||||
"**/target/**": true,
|
||||
"**/test-output/**": true
|
||||
},
|
||||
"files.exclude": {
|
||||
"**/target": true,
|
||||
"**/test-output": true
|
||||
}
|
||||
}
|
47
.vscode/tasks.json
vendored
47
.vscode/tasks.json
vendored
|
@ -1,47 +0,0 @@
|
|||
{
|
||||
"version": "2.0.0",
|
||||
"presentation": {
|
||||
"reveal": "always"
|
||||
},
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run",
|
||||
"options": {
|
||||
"cwd": "${workspaceRoot}"
|
||||
},
|
||||
"command": "cargo",
|
||||
"args": [
|
||||
"run",
|
||||
"--",
|
||||
"-c",
|
||||
"./TestConfigWindows.toml",
|
||||
"-d",
|
||||
"test/db.sqlite",
|
||||
"-w",
|
||||
"../polaris-web"
|
||||
],
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"group": "test",
|
||||
"label": "Test",
|
||||
"options": {
|
||||
"cwd": "${workspaceRoot}"
|
||||
},
|
||||
"command": "cargo",
|
||||
"args": [
|
||||
"test"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "Compile",
|
||||
"options": {
|
||||
"cwd": "${workspaceRoot}"
|
||||
},
|
||||
"command": "cargo",
|
||||
"args": [
|
||||
"check"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
427
CHANGELOG.md
Normal file
427
CHANGELOG.md
Normal file
|
@ -0,0 +1,427 @@
|
|||
# Changelog
|
||||
|
||||
## Unreleased Changes
|
||||
|
||||
- Fixed a typo in the log message that is written after applying configuration changes. (thanks @luzpaz)
|
||||
|
||||
## Polaris 0.15.0
|
||||
|
||||
### Server
|
||||
|
||||
- Added support for browsing the music collection by metadata (by artist, by genre, etc.).
|
||||
- Added support for multi-value metadata for the following song fields: `artist`, `album artist`, `composer`, `genre`, `label` and `lyricist`.
|
||||
- Added support for structured search query syntax.
|
||||
- Added capability to extract audio waveform data.
|
||||
- Configuration data (user credentials, music directories, etc.) is now stored in a plain-text file which Polaris can read and write to.
|
||||
- ⚠️ The configuration format is now ([documented](docs/CONFIGURATION.md)) and slightly simpler than in previous versions.
|
||||
- Persistent data, such as playlists, is now saved in a directory that may be configured with the `--data` CLI option or the `POLARIS_DATA_DIR` environment variable.
|
||||
- ⚠️ Upon first launch, configuration data and playlists will be migrated from the Polaris 0.14.0 database into their new homes. After successful migration, the old database file will be deleted and the server will finally start. This migration functionality will be removed in future Polaris versions.
|
||||
- Collection scans are now automatically triggered when configuration changes or files are added/removed.
|
||||
- ⚠️ Dynamic DNS now works with any provider that supports updates over HTTP without header-based auth. This means YDNS is no longer an option, and you need to input a new URL for DDNS updates.
|
||||
- ⚠️ Removed last.fm integration due to maintenance concerns (abandoned libraries, broken account linking) and mismatch with project goals.
|
||||
- Removed periodic collection scans.
|
||||
|
||||
### Web client
|
||||
|
||||
- Every page has been updated to a new visual style.
|
||||
- The file browser is now displayed as an interactive tree on a single page.
|
||||
- The file browser now supports common navigation keyboard shortcuts.
|
||||
- The file browser now supports jumping to a visible file or folder by typing the start of its name.
|
||||
- The file browser now omits the top-level directory when only one music folder has been configured.
|
||||
- The current playlist now has two display modes: compact or with album art.
|
||||
- Songs in the current playlist can now be selected and re-ordered with the mouse.
|
||||
- Added a button to display statistics about the current playlist.
|
||||
- Added new pages to browse the music collection by genre.
|
||||
- Added new pages to browse the music collection by artist.
|
||||
- Added a new page to browse the music collection by album.
|
||||
- The Recently Added Albums and Random Albums pages now distinguish albums by file metadata instead of file path.
|
||||
- When navigating back to the Random Albums page, the shuffle ordering is now preserved.
|
||||
- The current playlist now supports common navigation keyboard shortcuts.
|
||||
- The seekbar for the current song being played has been replaced with a waveform visualization.
|
||||
- The title of the current song in the player can be clicked to display its metadata
|
||||
- Improved responsiveness when queuing large amounts of songs at once.
|
||||
- The `Settings > Collection` page now shows the current status of collection scanning.
|
||||
- Theme preferences have been reset and are now stored client-side.
|
||||
- Accent color is now configured as a saturation multiplier and base hue, which are used to generate a full color ramp.
|
||||
|
||||
### API
|
||||
|
||||
- API version is now 8.0.
|
||||
- Documentation is now served under `/api-docs` instead of `/swagger` (eg. `http://localhost:5050/api-docs`)
|
||||
- Clients are now expected to send their preferred API major version in a `Accept-Version` header. Omitting this currently defaults to `7`, but will become an error in future Polaris releases. Support for API version 7 will be removed entirely in a future release.
|
||||
- Most API responses now support gzip compression.
|
||||
- The response format of the `/browse`, `/flatten`, `/get_playlist`, `/search/<query>` endpoints has been modified to accommodate large lists.
|
||||
- Added new endpoints to query albums and artists.
|
||||
- The `/random` and `/recent` albums are deprecated in favor of `/albums/random` and `/albums/recent`. These endpoints now have optional parameters for RNG seeding and pagination.
|
||||
- The `/search/<query>` endpoint now requires a non-empty query (`/search/` now returns HTTP status code 404, regardless of API version).
|
||||
- The `/search/<query>` endpoint now supports per-field queries and boolean combinators.
|
||||
- The `/thumbnail` endpoint supports a new size labeled `tiny`, which returns 40x40px images.
|
||||
- Added a new `/get_songs` endpoint which returns song metadata in bulk.
|
||||
- Added a new `/peaks` endpoint which returns audio signal peaks that can be used to draw waveform visualizations.
|
||||
- Added a new `/index_status` endpoint which returns the status of music collection scans.
|
||||
- Removed the `/config` and `/preferences` API endpoints.
|
||||
- Removed the `/ddns` API endpoints, merged into the existing `/settings` endpoints.
|
||||
|
||||
## Polaris 0.14.3
|
||||
|
||||
### Server
|
||||
|
||||
- Fixed a build error (https://github.com/rust-lang/rust/issues/127343) with recent versions of the Rust compiler (thanks @pbsds)
|
||||
- Added support for m4b audio files (thanks @duydl)
|
||||
|
||||
## Polaris 0.14.2
|
||||
|
||||
### Server
|
||||
|
||||
- Fixed a startup error in Windows packaged builds
|
||||
|
||||
## Polaris 0.14.1
|
||||
|
||||
### Server
|
||||
|
||||
- Fixed compilation issue when using musl toolchains
|
||||
- Log messages that DDNS is not setup have been downgraded to debug level
|
||||
|
||||
### Web client
|
||||
|
||||
- Fixed a bug where non-ASCII files or directories were not always alphabetically sorted (thanks @dechamps)
|
||||
- Fixed a bug where after linking a last.fm account, clicking the account name would not link to the expected page
|
||||
|
||||
## Polaris 0.14.0
|
||||
|
||||
### General
|
||||
|
||||
- Changes are now documented in `CHANGELOG.md` instead of inside individual Github releases
|
||||
|
||||
### Server
|
||||
|
||||
- API version is now 7.0
|
||||
- ⚠️ Removed support for authentication via cookies (deprecated in Polaris 0.13.0)
|
||||
- ⚠️ Removed support for authentication via the `Basic` scheme when using the HTTP `Authorization` header (deprecated in Polaris 0.13.0)
|
||||
- Fixed a bug where all music sources would be deleted when trying to add sources with duplicate names
|
||||
- Additional metadata fields are now indexed: lyricist, composer, genre and label (thanks @pmphfm)
|
||||
- Endpoints returning thumbnail images or audio files no longer use HTTP `content-encoding`
|
||||
- When indexing files with ID3v2 tags, the "Original Date Released" frame can now be used to populate the year associated with a song
|
||||
- The `/thumbnail` endpoint now supports an optional parameter for small/large/native image sizing. (thanks @Saecki)
|
||||
- Log file now contain more details about the cause of failed HTTP requests (3xx, 4xx, 5xx)
|
||||
- Startup failures now generate clearer error messages
|
||||
|
||||
### Web client
|
||||
|
||||
- Volume slider now applies non-linearly
|
||||
- Artist names are now displayed in the Random Albums and Recent Albums pages
|
||||
|
||||
## Polaris 0.13.5
|
||||
|
||||
### Server
|
||||
|
||||
- Added support for AIFF and WAVE files (thanks @gahag)
|
||||
|
||||
### Web Client
|
||||
|
||||
- Improved performance when scrolling large playlists
|
||||
- Fixed display and playback issues when a song was used multiple times in a playlist
|
||||
- Playlist duration can now display number of days
|
||||
- Fixed a bug where the playlist panel could have blank space in very tall browser windows
|
||||
- Major dependencies updates
|
||||
|
||||
## Polaris 0.13.4
|
||||
|
||||
### Server
|
||||
|
||||
Adjustments to logging behavior.
|
||||
|
||||
On Linux:
|
||||
|
||||
- Running without `-f` emits a log file
|
||||
- Running with `-f` and no `--log` option does not emit a log file
|
||||
- Running with `-f` and `--log` option emits a log file
|
||||
|
||||
On Windows:
|
||||
|
||||
- Running with UI feature (`polaris.exe` in releases) emits a log file
|
||||
- Running without UI feature (`polaris-cli.exe` in releases) and no --log option does not emit a log file
|
||||
- Running without UI feature (`polaris-cli.exe` in releases) and --log option emits a log file
|
||||
|
||||
## Polaris 0.13.3
|
||||
|
||||
### Server
|
||||
|
||||
- Fixed a bug where music that is no longer on disk was still considered in the collection, even after re-indexing
|
||||
- On Windows, Polaris now creates a log file
|
||||
- On Linux, Polaris now creates a log file, even when running with the -f option
|
||||
|
||||
## Polaris 0.13.2
|
||||
|
||||
### Web client
|
||||
|
||||
- Fixed a bug where it was not possible to view or edit which users have administrator rights
|
||||
- Fixed a bug where, in some cases, drag and dropping a specific disc from an album would not queue the entire disc
|
||||
|
||||
## Polaris 0.13.1
|
||||
|
||||
### Server
|
||||
|
||||
- Fixed a bug where the Windows installer would create unusable installations. #122
|
||||
|
||||
## Polaris 0.13.0
|
||||
|
||||
### API changes
|
||||
|
||||
- Bumped API version number to 6.0.
|
||||
- Added new endpoints to manage users, mount points and settings more granularly.
|
||||
- Added support for authenticating via bearer tokens generated by the /auth endpoint. These token can be submitted via Bearer HTTP Authorization headers, or as a URL parameters (`?auth_token=…`).
|
||||
- Authentication using cookies or Basic HTTP Authorization headers is deprecated and will be removed in a future revision.
|
||||
- Authentication cookies no longer expire after 24 hours. The newly added bearer tokens also have no expiration date.
|
||||
- Last.fm account linking now requires a short-lived auth token obtain from the newly added `lastfm/link_token' endpoint.
|
||||
|
||||
Server
|
||||
|
||||
- ⚠️Breaking change⚠️ If you use a config file, the `reindex_every_n_seconds` and `album_art_pattern` fields must now be in a [settings] section.
|
||||
- ⚠️Breaking change⚠️ The installation process on Linux has changed a lot. See the README for updated installation instructions. A summary of the changes is available [here](https://github.com/ogarcia/docker-polaris/issues/2).
|
||||
- Embedded album art is now supported for mp3, flac and m4a files (thanks @Saecki).
|
||||
- OPUS files can now be indexed and streamed (thanks @zaethan).
|
||||
- APE files can now be indexed and streamed.
|
||||
- The collection indexer has been rewritten for better performance. This also fixed an issue where on some machines, the web client would be unusable while indexing (thanks @inicola for the code reviews).
|
||||
- Thumbnail generation is now slightly faster, and works with more pixel formats (notably RGBA16).
|
||||
- Polaris now uses actix-web instead or rocket. This change fixes numerous performance and stability issues.
|
||||
- Sqlite is now bundled by default when building Polaris and was removed from the list of prerequisites. This can be controlled with the `bundle-sqlite` feature flag when compiling Polaris.
|
||||
- The default album art pattern now includes the jpeg extension in addition to jpg.
|
||||
- Album art patterns are now case insensitive.
|
||||
|
||||
Web client
|
||||
|
||||
- ⚠️Breaking change⚠️ Your current playlist will appear broken after this update. Please clear the current playlist using the trash can icon. Saved playlists are not affected.
|
||||
- Added a logout button.
|
||||
- Reworked interface for managing user accounts.
|
||||
- Added a shuffle button to randomly re-order the content of the current playlist.
|
||||
- The total duration of the current playlist is now displayed.
|
||||
- Audio output can now be toggled on/off by clicking the volume icon.
|
||||
- Individual discs from multi-disc albums can now be dragged into the playlist.
|
||||
- When browsing to an album, songs are now displayed and queued in filepath order.
|
||||
- Fixed a bug where albums could not be dragged from the random or recent views.
|
||||
- Fixed a bug where directories with a # sign in their name could not be browsed to.
|
||||
|
||||
## Polaris 0.12.0
|
||||
|
||||
### Server
|
||||
|
||||
- Library indexing speed is now significantly faster
|
||||
- When indexing files that have malformed ID3 tags, information preceding the error will no longer be discarded
|
||||
- Deleted users can no longer make requests using an existing session
|
||||
- When using a config file, existing users, mounts points and DDNS settings are no longer removed before applying the configuration
|
||||
- When using a config file to create users, blank usernames are now ignored
|
||||
- Improved architecture and added more unit tests
|
||||
|
||||
API Changes
|
||||
|
||||
- API version number bumped to 4.0
|
||||
- The auth endpoint now returns HTTP cookies instead of a JSON response
|
||||
- Client requests to update Last.fm status no longer return an error if no Last.fm account is associated with the user
|
||||
- The thumbnail endpoint now supports an option to disable padding to a square image
|
||||
|
||||
Web client
|
||||
|
||||
- The web client now uses Vue instead of Riot as its UI framework
|
||||
- Added support for theming
|
||||
|
||||
## Polaris 0.11.0
|
||||
|
||||
### Server
|
||||
|
||||
- Compatible with current versions of the Rust nightly compiler
|
||||
- Fixed a rare crash when indexing corrupted mp3 files
|
||||
- On Linux, Polaris now notifies systemd after starting up
|
||||
- Release tarball for Linux version now includes a top-level directory
|
||||
- User sessions no longer break across server restarts (more improvements still to do on this: #36)
|
||||
- ⚠️ Breaking change: due to improvements in Polaris credentials management, you will have to re-create your users and playlists after upgrading to this version. If you want to preserve your playlists, you can use a program like DB Browser for SQLite to back up your playlists (from db.sqlite within your Polaris installation directory) and restore them after you re-create users with the same names.
|
||||
|
||||
### Web client
|
||||
|
||||
- Song durations are now listed when available
|
||||
- Fixed a bug where clicking on breadcrumbs did not always work when the Polaris server is hosted on Windows
|
||||
- Current track info now shows in browser tab title
|
||||
- Fixed a semi-rare bug where indexing would not start during initial setup flow
|
||||
- Improved handling of untagged songs
|
||||
- Fixed a bug where playlist had padding in Chrome
|
||||
- Fixed a bug where folder icons did not render on some systems
|
||||
|
||||
Thank you to @lnicola for working on most of the server changes!
|
||||
|
||||
## Polaris 0.10.0
|
||||
|
||||
### Server
|
||||
|
||||
- Polaris servers now ship with an interactive API documentation, available at http://localhost:5050/swagger
|
||||
- When using a prefix URL in Polaris config files, a / will no longer be added automatically at the end of the prefix
|
||||
|
||||
### Web client
|
||||
|
||||
- Automatically bring up player panel when songs are queued
|
||||
- Fixed a bug where songs were not always correctly sorted by track number in browser panel
|
||||
- Fixed a bug where some button hitboxes didn't match their visuals
|
||||
|
||||
## Polaris 0.9.0
|
||||
|
||||
### Server
|
||||
|
||||
- Rewrote all endpoints and server setup using Rocket instead of Iron
|
||||
- Fixed a bug where special characters in URL to collection folders were not handled correctly (bumped API version number)
|
||||
- Server API is now unit tested
|
||||
- Fixed a bug where lastFM integration endpoints did not work
|
||||
- ⚠️ Compiling Polaris now requires the nightly version of the Rust compiler
|
||||
|
||||
### Web client
|
||||
|
||||
- Encode special characters in URL to collection folders
|
||||
|
||||
## Polaris 0.8.0
|
||||
|
||||
### Server
|
||||
|
||||
- Added new API endpoints for search
|
||||
- Added new API endpoints for Last.fm integration
|
||||
- Thumbnails are now stored as .jpg images instead of .png
|
||||
- Duration of some audio files is now being indexed
|
||||
- On Linux when running as a forking process, a .pid file will be written
|
||||
- Fixed a bug where usernames were inserted in session even after failed authentication
|
||||
|
||||
### Web client
|
||||
|
||||
- Added search panel
|
||||
- Added settings tab to link Last.fm account
|
||||
|
||||
## Polaris 0.7.1
|
||||
|
||||
### Server
|
||||
|
||||
- Added support for prefix_url option in configuration files
|
||||
- Improved performance of thumbnail creation
|
||||
|
||||
## Polaris 0.7.0
|
||||
|
||||
### Server
|
||||
|
||||
- Added support for the Partial-Content HTTP header when serving music, this fixes several streaming/seeking issues when using the web client (especially in Chrome)
|
||||
- New API endpoints for playlist management
|
||||
- New command line argument (-p) to run on a custom port (contribution from @jxs)
|
||||
- New command line argument (-f) to run in foreground on Linux (contribution from @jxs)
|
||||
- Fixed a bug where tracks were queued out of order
|
||||
- Updated program icon on Windows
|
||||
|
||||
Web client
|
||||
|
||||
- Added support for playlists
|
||||
- Added a button to to queue the current directory (thanks @jxs)
|
||||
|
||||
## Polaris 0.6.0
|
||||
|
||||
### Server
|
||||
|
||||
- Internal improvements to database management (now using Diesel)
|
||||
- Configuration settings are now stored in the database, polaris.toml config files are no longer loaded by default
|
||||
- Added API endpoints to read and write configuration
|
||||
- User passwords are now encrypted in storage
|
||||
- Fixed a bug where results of api/browse were not sorted correctly
|
||||
|
||||
Web client
|
||||
|
||||
- Settings can now be edited from the web UI
|
||||
- Collection re-index can now be triggered from the web UI
|
||||
- Added initial setup configuration flow to help set up first user and mount point
|
||||
- Visual changes
|
||||
|
||||
## Polaris 0.5.1
|
||||
|
||||
This is a minor release, pushing quite a bit of internal cleanup in the wild.
|
||||
|
||||
Server
|
||||
|
||||
- Removed OpenSSL dependency on Windows
|
||||
- No longer send a HTTP cookie after authentication
|
||||
|
||||
## Polaris 0.5.0
|
||||
|
||||
This releases adds Linux support and a variety of improvements to the web client.
|
||||
|
||||
### Server
|
||||
|
||||
- Added Linux support
|
||||
- Moved location of configuration file on Windows to `%appdata%\Permafrost\Polaris\polaris.toml`
|
||||
|
||||
### Web client
|
||||
|
||||
- Performance improvements from upgrading RiotJS to 3.4.4 (from 2.6.2)
|
||||
- Added support for browsing random and recently added albums
|
||||
- Minor visual changes (colors, whitespace, etc.)
|
||||
- Updated favicon
|
||||
- Fixed a bug where songs containing special characters in their title would not play
|
||||
- Persist playlist and player state across sessions
|
||||
|
||||
## Polaris 0.4.0
|
||||
|
||||
This release adds new features supporting the development of polaris-android.
|
||||
|
||||
### Server
|
||||
|
||||
- Added API endpoint to pull recently added albums
|
||||
- Added support for the Authorization HTTP header (in addition to the existing /auth API endpoint)
|
||||
|
||||
## Polaris 0.3.0
|
||||
|
||||
This release is an intermediate release addressing issues with the installation process and updating internals.
|
||||
|
||||
### General
|
||||
|
||||
- Fixed missing OpenSSL DLL in Windows installer (fixes Issue #3)
|
||||
- Split every file into an individual installer component
|
||||
|
||||
### Server
|
||||
|
||||
- Added API endpoint to pull random albums
|
||||
- Upgraded dependencies
|
||||
- Added unit tests to indexing and metadata decoding
|
||||
|
||||
### Web client
|
||||
|
||||
- Web interface playlist now displays more tracks (enough to fill a 4k monitor at normal font size)
|
||||
|
||||
## Polaris 0.2.0
|
||||
|
||||
This release is focused on polish and performance, solidifying the basics that were put together in version 0.1.0. Here are the major changes:
|
||||
|
||||
### General
|
||||
|
||||
- Polaris now has a project logo
|
||||
- Windows installer now supports upgrading an existing install (from 0.2.0 to higher - versions)
|
||||
- Added support for multi-disc albums
|
||||
|
||||
### Server
|
||||
|
||||
- Major performance improvements to /browse and /flatten API requests (up to 1000x - faster for large requests)
|
||||
- Added API endpoint for version number
|
||||
- Album covers are now served as thumbnails rather than at source size
|
||||
- Moved configuration file outside of /Program Files
|
||||
- Added support for Ogg Vorbis, FLAC and APE metadata
|
||||
- Fixed a bug where most albums didn't show an artist name
|
||||
- Fixed a bug where uppercase extensions were not recognized
|
||||
- Upgraded compiler to Rust 1.13
|
||||
|
||||
### Web client
|
||||
|
||||
- Complete visual overhaul of the Polaris web client
|
||||
- Performance improvements for handling large playlist in Polaris web client
|
||||
- Added error messages when playing songs in unsupported formats
|
||||
|
||||
## Polaris 0.1.0
|
||||
|
||||
This is the very first Polaris release, celebrating the minimum viable product!
|
||||
|
||||
Features in this release:
|
||||
|
||||
- Server application with Windows Installer
|
||||
- Support for multiple users
|
||||
- Support for serving custom music directories
|
||||
- Support for custom album art pattern matching
|
||||
- Support for broadcasting IP to YDNS
|
||||
- Web UI to browse collection, manage playlist and listen to music
|
3974
Cargo.lock
generated
3974
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
135
Cargo.toml
135
Cargo.toml
|
@ -2,74 +2,103 @@
|
|||
name = "polaris"
|
||||
version = "0.0.0"
|
||||
authors = ["Antoine Gersant <antoine.gersant@lesforges.org>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
build = "build.rs"
|
||||
|
||||
[features]
|
||||
default = ["bundle-sqlite"]
|
||||
bundle-sqlite = ["libsqlite3-sys"]
|
||||
ui = ["native-windows-gui", "native-windows-derive"]
|
||||
|
||||
[dependencies]
|
||||
actix-files = { version = "0.4" }
|
||||
actix-web = { version = "3" }
|
||||
actix-web-httpauth = { version = "0.5.0" }
|
||||
anyhow = "1.0.35"
|
||||
ape = "0.3.0"
|
||||
base64 = "0.13"
|
||||
branca = "0.10.0"
|
||||
cookie = { version = "0.14", features = ["signed", "key-expansion"] }
|
||||
crossbeam-channel = "0.5"
|
||||
diesel_migrations = { version = "1.4", features = ["sqlite"] }
|
||||
futures-util = { version = "0.3" }
|
||||
getopts = "0.2.15"
|
||||
http = "0.2.2"
|
||||
id3 = "0.5.1"
|
||||
libsqlite3-sys = { version = "0.18", features = ["bundled", "bundled-windows"], optional = true }
|
||||
lewton = "0.10.1"
|
||||
log = "0.4.5"
|
||||
metaflac = "0.2.3"
|
||||
mp3-duration = "0.1.9"
|
||||
mp4ameta = "0.7.1"
|
||||
num_cpus = "1.13.0"
|
||||
opus_headers = "0.1.2"
|
||||
percent-encoding = "2.1"
|
||||
pbkdf2 = "0.6"
|
||||
rand = "0.7"
|
||||
rayon = "1.3"
|
||||
regex = "1.3.9"
|
||||
rustfm-scrobble = "1.1"
|
||||
serde = { version = "1.0.111", features = ["derive"] }
|
||||
serde_derive = "1.0.111"
|
||||
serde_json = "1.0.53"
|
||||
simplelog = "0.8.0"
|
||||
thiserror = "1.0.19"
|
||||
time = "0.2"
|
||||
toml = "0.5"
|
||||
ureq = "1.5"
|
||||
url = "2.1"
|
||||
[profile.release]
|
||||
lto = "thin"
|
||||
|
||||
[dependencies.diesel]
|
||||
version = "1.4.5"
|
||||
default_features = false
|
||||
features = ["libsqlite3-sys", "r2d2", "sqlite"]
|
||||
[dependencies]
|
||||
ape = "0.6"
|
||||
axum-extra = { version = "0.10.0", features = ["typed-header"] }
|
||||
axum-range = { version = "0.5.0" }
|
||||
bitcode = { version = "0.6.3", features = ["serde"] }
|
||||
branca = "0.10.1"
|
||||
chumsky = "0.9.3"
|
||||
enum-map = { version = "2.7.3", features = ["serde"] }
|
||||
getopts = "0.2.21"
|
||||
headers = "0.4"
|
||||
http = "1.1.0"
|
||||
icu_collator = "1.5.0"
|
||||
id3 = "1.14.0"
|
||||
lasso2 = { version = "0.8.2", features = ["serialize"] }
|
||||
lewton = "0.10.2"
|
||||
log = "0.4.22"
|
||||
metaflac = "0.2.7"
|
||||
mp3-duration = "0.1.10"
|
||||
mp4ameta = "0.11.0"
|
||||
native_db = "0.8.1"
|
||||
native_model = "0.4.20"
|
||||
nohash-hasher = "0.2.0"
|
||||
notify = { version = "6.1.1", default-features = false }
|
||||
notify-debouncer-full = { version = "0.3.1", default-features = false }
|
||||
num_cpus = "1.14.0"
|
||||
# TODO upstream PR: https://github.com/yboettcher/opus_headers/pull/7
|
||||
opus_headers = { git = "https://github.com/agersant/opus_headers", branch = "multivalue" }
|
||||
pbkdf2 = "0.11"
|
||||
rand = "0.8"
|
||||
rayon = "1.10.0"
|
||||
regex = "1.10.5"
|
||||
rusqlite = { version = "0.32.0", features = ["bundled"] }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_derive = "1.0.147"
|
||||
serde_json = "1.0.122"
|
||||
simplelog = "0.12.2"
|
||||
symphonia = { version = "0.5.4", features = [
|
||||
"all-codecs",
|
||||
"all-formats",
|
||||
"opt-simd",
|
||||
] }
|
||||
tinyvec = { version = "1.8.0", features = ["serde"] }
|
||||
thiserror = "1.0.62"
|
||||
tokio = { version = "1.39", features = ["macros", "rt-multi-thread"] }
|
||||
tokio-util = { version = "0.7.11", features = ["io"] }
|
||||
toml = "0.8.19"
|
||||
tower = { version = "0.5.2" }
|
||||
tower-http = { version = "0.6.2", features = [
|
||||
"compression-gzip",
|
||||
"fs",
|
||||
"normalize-path",
|
||||
] }
|
||||
trie-rs = { version = "0.4.2", features = ["serde"] }
|
||||
unicase = "2.7.0"
|
||||
ureq = { version = "2.10.0", default-features = false, features = ["tls"] }
|
||||
utoipa = { version = "5.3", features = ["axum_extras"] }
|
||||
utoipa-axum = { version = "0.1" }
|
||||
utoipa-scalar = { version = "0.2", features = ["axum"] }
|
||||
|
||||
[dependencies.axum]
|
||||
version = "0.8.1"
|
||||
default-features = false
|
||||
features = ["http1", "json", "tokio", "tower-log", "query"]
|
||||
|
||||
[dependencies.image]
|
||||
version = "0.23.12"
|
||||
default_features = false
|
||||
version = "0.25.2"
|
||||
default-features = false
|
||||
features = ["bmp", "gif", "jpeg", "png"]
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
native-windows-gui = {version = "1.0.7", default-features = false, features = ["cursor", "image-decoder", "message-window", "menu", "tray-notification"], optional = true }
|
||||
native-windows-derive = {version = "1.0.2", optional = true }
|
||||
native-windows-gui = { version = "1.0.13", default-features = false, features = [
|
||||
"cursor",
|
||||
"image-decoder",
|
||||
"message-window",
|
||||
"menu",
|
||||
"tray-notification",
|
||||
], optional = true }
|
||||
native-windows-derive = { version = "1.0.5", optional = true }
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
daemonize = "0.4.1"
|
||||
sd-notify = "0.1.0"
|
||||
daemonize = "0.5"
|
||||
sd-notify = "0.4.2"
|
||||
|
||||
[target.'cfg(windows)'.build-dependencies]
|
||||
embed-resource = "2.4.2"
|
||||
winres = "0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
headers = "0.3"
|
||||
fs_extra = "1.2.0"
|
||||
axum-test = "17.0"
|
||||
bytes = "1.7.1"
|
||||
percent-encoding = "2.2"
|
||||
|
|
89
README.md
89
README.md
|
@ -1,38 +1,71 @@
|
|||
[](https://github.com/agersant/polaris/actions)
|
||||
[](http://codecov.io/github/agersant/polaris)
|
||||
[](LICENSE-MIT)
|
||||
<div align="center">
|
||||
<h1><img src="res/readme/logo.png?raw=true"/></h1>
|
||||
|
||||
<img src="res/readme/logo.png?raw=true"/>
|
||||
Polaris is a music streaming application, designed to let you enjoy your music collection from any computer or mobile device. Polaris works by streaming your music directly from your own computer, without uploading it to a third-party. It is free and open-source software, without any kind of premium version. The only requirement is that your computer stays on while it streams music!
|
||||
[](https://github.com/agersant/polaris/actions)
|
||||
[](https://codecov.io/github/agersant/polaris)
|
||||
[](LICENSE-MIT)
|
||||
|
||||
## Features
|
||||
- Optimized for large music collections
|
||||
- Can run on Windows, Linux, BSD, or through Docker
|
||||
- Listen to your music on the web or using the [Polaris Android](https://github.com/agersant/polaris-android) app
|
||||
- Easy to setup and configure via the built-in web UI
|
||||
- Support for `flac`, `mp3`, `mp4`, `mpc`, `ogg` and `opus` files
|
||||
- Support for album art images
|
||||
- [Last.fm](https://www.last.fm) scrobbling
|
||||
- Color themes
|
||||
- Restrict access to your music collection with user accounts
|
||||

|
||||
</div>
|
||||
|
||||
## Tutorials
|
||||
# About
|
||||
|
||||
- [Getting Started](docs/SETUP.md)
|
||||
- [Streaming From Remote Devices](docs/DDNS.md)
|
||||
Polaris is a self-hosted music streaming server, to enjoy your music collection from any computer or mobile device. It is free and open-source software, without any kind of premium version.
|
||||
|
||||
## Screenshots
|
||||
The goals of this project are:
|
||||
- 🔥 Exceptional performance and responsiveness
|
||||
- 📚️ First-class support for large music collections (100,000+ songs)
|
||||
- 📦️ Ease of installation, deployment and maintenance
|
||||
- ✨ Beautiful user interface
|
||||
|
||||

|
||||

|
||||
# Try It Out!
|
||||
|
||||
## Documentation
|
||||
Check out the demo over at https://demo.polaris.stream, featuring a selection of Creative Commons Music. The credentials to access this server are:
|
||||
|
||||
- [Contribute to Polaris](docs/CONTRIBUTING.md)
|
||||
- [Maintenance Runbooks](docs/MAINTENANCE.md)
|
||||
Username: `demo_user`
|
||||
Password: `demo_password`
|
||||
|
||||
### API Documentation
|
||||
The Polaris server API is documented via [Swagger](https://agersant.github.io/polaris/swagger). Please note that this Swagger page does not point to a live Polaris server so the `Try it out` buttons are not expected to work.
|
||||
Every installation of Polaris also distributes this documentation, with the ability to use the `Try it out` buttons. To access it, simply open http://localhost:5050/swagger/ in your browser on the machine running Polaris.
|
||||
# Features
|
||||
|
||||
Feel free to open Github issues or Pull Requests if clarifications are needed.
|
||||
- 🖥️ Runs on Windows, Linux, BSD, or through Docker
|
||||
- 🔊 Support for `flac`, `mp3`, `mp4`, `mpc`, `ogg`, `opus`, `ape`, `wav` and `aiff` files
|
||||
- 🌈 Dark mode variants and customizable color palette
|
||||
- 💿️ Browse your music by album, artist or genre
|
||||
- 📂 Browse your music as a file tree
|
||||
- 🌊 Song audio-waveform visualization
|
||||
- 🏷️ Support for multi-value fields in song metadata (eg. multiple artists per song)
|
||||
- 🔍️ Powerful search functionality with per-field queries
|
||||
- ⚙️ Plain-text configuration also editable with built-in UI
|
||||
- 👥 Setup multiple users, each with their own playlists
|
||||
- 📱 Listen to your music on the go:
|
||||
- Polaris Android ([Google Play Store](https://play.google.com/store/apps/details?id=agersant.polaris) · [F-Droid](https://f-droid.org/packages/agersant.polaris/) · [Repository](https://github.com/agersant/polaris-android))
|
||||
- Polarios ([App Store](https://apps.apple.com/app/polarios/id1662366309) · [Repository](https://gitlab.com/elise/Polarios)) [third-party]
|
||||
|
||||
# Installation
|
||||
|
||||
[Installation documentation](docs/SETUP.md)
|
||||
|
||||
[Streaming from remote devices](docs/DDNS.md)
|
||||
|
||||
[](https://repology.org/project/polaris-streaming/versions)
|
||||
|
||||
# Documentation
|
||||
|
||||
- 📒 [Changelog](CHANGELOG.md)
|
||||
- 🔧 [Configuration](docs/CONFIGURATION.md)
|
||||
- 👷 [Contribute to Polaris](docs/CONTRIBUTING.md)
|
||||
- 🛟 [Maintenance Runbooks](docs/MAINTENANCE.md)
|
||||
|
||||
The Polaris server API is documented via [OpenAPI](https://demo.polaris.stream/api-docs/). Every installation of Polaris distributes this interactive documentation. To access it, open http://localhost:5050/api-docs/ in your browser on the machine running Polaris.
|
||||
|
||||
# Credits & License Information
|
||||
|
||||
Music featured in the demo installation:
|
||||
|
||||
- [Chris Zabriskie - Abandon Babylon](https://chriszabriskie.bandcamp.com/album/abandon-babylon) [(License)](https://creativecommons.org/licenses/by/3.0/)
|
||||
- [Chris Zabriskie - Angie's Sunday Service](https://chriszabriskie.bandcamp.com/album/angies-sunday-service) [(License)](https://creativecommons.org/licenses/by/3.0/)
|
||||
- [glaciære - pool water blue](https://steviasphere.bandcamp.com/album/pool-water-blue) [(License)](https://creativecommons.org/licenses/by/3.0/)
|
||||
- [glaciære - light ripples](https://steviasphere.bandcamp.com/album/light-ripples) [(License)](https://creativecommons.org/licenses/by/3.0/)
|
||||
- [Koresma South](https://koresma.bandcamp.com/album/south) [(License)](https://creativecommons.org/licenses/by-nc-sa/3.0/)
|
||||
- [Pete Murphy - Essence EP](https://petemurphy.bandcamp.com/album/falling-down-the-fred-astaires-solo-jazz-piano) [(License)](https://creativecommons.org/licenses/by-nc-sa/3.0/)
|
||||
- [Rameses B - Essence EP](https://ramesesb.bandcamp.com/album/essence-ep) [(License)](https://creativecommons.org/licenses/by-nc-nd/3.0/)
|
||||
|
|
4
build.rs
4
build.rs
|
@ -3,6 +3,10 @@ fn main() {
|
|||
let mut res = winres::WindowsResource::new();
|
||||
res.set_icon("./res/windows/application/icon_polaris_512.ico");
|
||||
res.compile().unwrap();
|
||||
embed_resource::compile(
|
||||
"res/windows/application/polaris-manifest.rc",
|
||||
embed_resource::NONE,
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
[print_schema]
|
||||
file = "src/db/schema.rs"
|
50
docs/CONFIGURATION.md
Normal file
50
docs/CONFIGURATION.md
Normal file
|
@ -0,0 +1,50 @@
|
|||
# Configuration
|
||||
|
||||
Polaris configuration resides in a single text file whose format is documented below. You can use the Polaris web UI to modify the configuration, or write to it in any text editor. You may edit the configuration file while Polaris is running.
|
||||
|
||||
## Location
|
||||
|
||||
The location of the configuration file is always logged during Polaris startup. It is determined as follows:
|
||||
|
||||
- From the `--config` (or `-c`) CLI option if present. This option must point to the `.toml` file.
|
||||
- If the CLI option is not specified, Polaris will look for a `polaris.toml` file, inside the directory specified by the `POLARIS_CONFIG_DIR` environment variable _at compilation time_. When using the Windows installer, this will be `%LOCALAPPDATA%/Permafrost/Polaris/polaris.toml`. When using the supplied Makefile, the default is either `/usr/local/etc/polaris` (for a system-wide installations), or `~/.config/polaris` (for a XDG installation).
|
||||
- If `POLARIS_CONFIG_DIR` was not set when Polaris was compiled, it will default to `.` on Linux, and the `LOCALAPPDATA` location mentioned above on Windows. This behavior on Windows may change in future releases.
|
||||
|
||||
## Format
|
||||
|
||||
The configuration file uses the [TOML](https://toml.io/) format. Everything in the configuration file is optional and may be omitted (unless mentioned otherwise).
|
||||
|
||||
```toml
|
||||
# Regular expression used to identify album art in files adjacent to an audio file
|
||||
album_art_pattern = "Folder.(jpeg|jpg|png)"
|
||||
# A URL Polaris will regularly make requests to in order to update Dynamic DNS
|
||||
ddns_url = "https://example.com?token=foobar"
|
||||
|
||||
# Array of locations Polaris should scan to find music files
|
||||
[[mount_dirs]]
|
||||
# Directory to scan
|
||||
source = "/home/example/music"
|
||||
# User-facing name for this directory (must be unique)
|
||||
name = "My Music 🎧️"
|
||||
|
||||
[[mount_dirs]]
|
||||
source = "/mnt/example/more_music"
|
||||
name = "Extra Music 🎵"
|
||||
|
||||
# Array of user accounts who can connect to the Polaris server
|
||||
[[users]]
|
||||
# Username for login
|
||||
name = "example-user"
|
||||
# If true, user will have access to all settings in the web UI
|
||||
admin = true
|
||||
# Plain text password for this user. Will be ignored if hashed_password is set. Polaris will never write to this field. For each user, at least one of initial_password and hashed_password must be set.
|
||||
initial_password = "top-secret-password"
|
||||
# Hashed and salted password for the user. Polaris will create this field if unset.
|
||||
hashed_password = "$pbkdf2-sha256$i=10000,l=32$SI8LjK1KtvcawhgmWGJgRA$t9btMwhUTQ8r3vqI1xhArn19J7Jezyoi461fFjhZXGU"
|
||||
|
||||
[[users]]
|
||||
name = "other-user"
|
||||
admin = true
|
||||
initial_password = "amospheric-strawberry64"
|
||||
```
|
||||
|
|
@ -1,25 +1,37 @@
|
|||
# Contributing
|
||||
|
||||
## Compiling and Running Polaris
|
||||
## Guidelines
|
||||
|
||||
Compiling and running Polaris is very easy as it only depends on the Rust toolchain.
|
||||
While Polaris is free and open-source software, it is not very open to code contributions. The reasons behind this are:
|
||||
- Polaris is a hobby project. I don't want it to feel like my day job, where I do a lot of code reviews, mentoring and tech leadership.
|
||||
- I am committed to maintaining this software for a very long time. I would rather maintain code that I mostly wrote myself.
|
||||
|
||||
1. [Install Rust](https://www.rust-lang.org/en-US/install.html)
|
||||
2. Clone the polaris depot with this command: `git clone --recursive https://github.com/agersant/polaris.git`
|
||||
This still leave room for a few avenues to contribute:
|
||||
- Help answering questions in the issue tracker.
|
||||
- Package Polaris for a Linux distribution
|
||||
- Documentation improvements or writing user guides.
|
||||
- Satellite projects (eg. [docker-polaris](https://github.com/ogarcia/docker-polaris), [polarios](https://gitlab.com/elise/Polarios))
|
||||
- Bug fixes.
|
||||
|
||||
For non-trivial new features, you are welcome to maintain a fork. If you need help finding your way around the code, feel free to open a [discussion thread](https://github.com/agersant/polaris/discussions).
|
||||
|
||||
## Compiling and running Polaris
|
||||
|
||||
1. [Install Rust](https://www.rust-lang.org/en-US/install.html) (stable toolchain)
|
||||
2. Clone the polaris depot with this command: `git clone https://github.com/agersant/polaris.git`
|
||||
3. You can now run compile and run polaris from the newly created directory with the command: `cargo run`
|
||||
|
||||
Polaris supports a few command line arguments which are useful during development:
|
||||
|
||||
- `-c some/config.toml` sets the location of the [configuration](/docs/CONFIGURATION.md) file.
|
||||
- `--data some/path` sets the folder Polaris will use to store runtime data such as playlists, collection index and auth secrets.
|
||||
- `-w some/path/to/web/dir` lets you point to the directory to be served as the web interface. You can find a suitable directory in your Polaris install (under `/web`), or from the [latest polaris-web release](https://github.com/agersant/polaris-web/releases/latest/download/web.zip).
|
||||
- `-s some/path/to/swagger/dir` lets you point to the directory to be served as the swagger API documentation. You'll probably want to point this to the `/docs/swagger` directory of the polaris repository.
|
||||
- `-d some/path/to/a/file.db` lets you manually choose where Polaris stores its configuration and music index (you can reuse the same database accross multiple runs).
|
||||
- `-c some/config.toml` lets you use a configuration file to add content to the database. This can be useful if you frequently delete the database and would like to automate the first time flow. The configuration format is not documented but can be inferred by looking at the `Config` struct in `config.rs`.
|
||||
- `-f` (on Linux) makes Polaris not fork into a separate process.
|
||||
|
||||
Putting it all together, a typical command to compile and run the program would be: `cargo run -- -w web -s docs/swagger -d test-output/my.db`
|
||||
Putting it all together, a typical command to compile and run the program would be: `cargo run -- -w web -c test-config.toml`
|
||||
|
||||
While Polaris is running, access the web UI at [http://localhost:5050](http://localhost:5050).
|
||||
|
||||
## Running Unit Tests
|
||||
## Running unit tests
|
||||
|
||||
That's the easy part, simply run `cargo test`!
|
||||
|
|
44
docs/DDNS.md
44
docs/DDNS.md
|
@ -1,4 +1,10 @@
|
|||
# Streaming From Other Devices
|
||||
# Streaming from other devices
|
||||
|
||||
These instructions apply to users running Polaris on a home network. When deploying to cloud services or VPS, configurations requirements will differ.
|
||||
|
||||
## Port forwarding
|
||||
|
||||
Configure port forwarding on your router to redirect port 80 traffic towards port 5050 towards the computer running Polaris. The exact way to do this depends on your router manufacturer and model.
|
||||
|
||||
## Dynamic DNS
|
||||
|
||||
|
@ -8,34 +14,8 @@ You can access your Polaris installation from anywhere via your computer's publi
|
|||
|
||||
A solution to these problems is to set up Dynamic DNS, so that your installation can always be reached at a fixed URL.
|
||||
|
||||
The steps below will walk you through setting up YDNS and Polaris to give your installation a fixed URL. If you have another solution in mind, or prefer using another Dynamic DNS service, skip to the next section.
|
||||
|
||||
1. Register for a free account on https://ydns.io
|
||||
2. On the YDNS website, access the "My Hosts" page and press the + sign for "Add Host"
|
||||
3. Fill the host form as described below:
|
||||
- Domain: ydns.eu
|
||||
- Name: This part is up to you, whatever you enter will be in the URL you use to access Polaris
|
||||
- Content: Leave the default. Take a note whether the value looks like a IPv4 address (format: xxx.xxx.xxx.xxx) or a IPv6 address (format: xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx)
|
||||
- Type: Dynamic IP
|
||||
4. If the content field looked like a IPv4 address: skip to step #6
|
||||
5. If the content field looked like a IPv6 address:
|
||||
- Click on your host name (eg. yourdomain.ydns.eu)
|
||||
- You should now see a page which looks like this:
|
||||

|
||||
- Click on the green "+" icon on the right
|
||||
- Fill out the new form as described:
|
||||
- Make sure the `Type` field is set to `A`
|
||||
- Set content to 0.0.0.0
|
||||
- You should now be back on the "records" page which was pictured above
|
||||
- Click on the ID number on the left for the row that has its `Type` listed as `AAAA` (#28717 in the picture above).
|
||||
- Click on the red trash can icon in the corner to delete this record
|
||||
- Done!
|
||||
6. In the Polaris web interface, access the `Dynamic DNS` tab of the settings screen:
|
||||
- Update the hostname field to match what you set in step 5. (eg. http://yourdomain.ydns.eu)
|
||||
- Update the username field to the email address you use when creating your YDNS account
|
||||
- Update the password field with your YDNS API password. You can find this password on https://ydns.io: click on the "User" icon in the top right and then `Preferences > API`.
|
||||
|
||||
## Port Forwarding
|
||||
Configure port forwarding on your router to redirect port 80 towards port 5050 on the computer where you run Polaris. The exact way to do this depends on your router manufacturer and model.
|
||||
|
||||
Don't forget to restart Polaris to apply your configuration changes, and access your music from other computers at http://yourdomain.ydns.eu
|
||||
1. Reserve a URL with a dynamic DNS provider such as https://www.duckdns.org/ or https://freemyip.com/.
|
||||
2. The dynamic DNS provider gives you a unique Update URL that can be used to tell them where to send traffic. For example, `freemyip.com` gives you this URL immediately after claiming a subdomain. Other providers may show it in your profile page, etc.
|
||||
3. Access your Polaris instance (http://localhost:5050 by default).
|
||||
4. Go to the `Setting page` and into the `Dynamic DNS` section.
|
||||
5. Set the Update URL to the one you obtained in step 2.
|
||||
|
|
|
@ -1,16 +1,10 @@
|
|||
# Maintenance
|
||||
|
||||
## How to make a release
|
||||
|
||||
- Update CHANGELOG.md to reflect new release
|
||||
- On Github, go to **Actions**, select the **Make Release** workflow and click **Run workflow**
|
||||
- Select the branch to deploy (usually `master`)
|
||||
- Input a user-facing version name (eg: **0.13.0**)
|
||||
- Click the **Run workflow** button
|
||||
- After CI completes, find the release on Github and write the changelog
|
||||
- Move the release from Draft to Published
|
||||
|
||||
Note that the Github web UI will separate the release from the corresponding tag until published.
|
||||
|
||||
## How to change the database schema
|
||||
|
||||
- Add a new folder under `migrations` following the existing pattern
|
||||
- Run `update_db_schema.bat`
|
||||
- After CI completes, move the release from Draft to Published
|
||||
|
|
|
@ -1,42 +1,30 @@
|
|||
# Getting Started
|
||||
# Installation
|
||||
|
||||
## Requirements
|
||||
## On Windows
|
||||
|
||||
One of the following:
|
||||
- Windows 7 or newer
|
||||
- Linux (any reasonably modern distribution should do)
|
||||
|
||||
### Windows
|
||||
1. Download the [latest installer](https://github.com/agersant/polaris/releases/latest) (you want the .msi file)
|
||||
2. Run the installer
|
||||
3. That's it, you're done!
|
||||
3. Launch Polaris from the start menu
|
||||
4. In your web browser, access http://localhost:5050
|
||||
|
||||
You can now start Polaris from the start menu or from your desktop, Polaris will also start automatically next time you restart your computer. You can tell when Polaris is running by its icon in the notification area (near the clock and volume controls).
|
||||
## In a docker container
|
||||
|
||||
### Linux
|
||||
To run polaris from a Docker container, please follow instructions from the [docker-polaris](https://github.com/ogarcia/docker-polaris) repository.
|
||||
|
||||
#### Dependencies
|
||||
## From source on Linux
|
||||
|
||||
1. Install OpenSSL, SQLite and their headers, and some development tools. These are available from your distribution's package manager. For instance on Ubuntu, execute `sudo apt-get install binutils pkg-config libssl-dev`
|
||||
### Dependencies
|
||||
|
||||
1. Install OpenSSL, SQLite and their respective headers (eg. `sudo apt-get install libsqlite3-dev libssl-dev`).
|
||||
2. Install `binutils` and `pkg-config` (eg. `sudo apt-get install binutils pkg-config`).
|
||||
2. Install the Rust compiler by executing `curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh` or using an [alternative method](https://www.rust-lang.org/en-US/install.html)
|
||||
|
||||
#### Polaris installation
|
||||
### Polaris installation
|
||||
1. Download the [latest release]((https://github.com/agersant/polaris/releases/latest)) of Polaris (you want the .tar.gz file)
|
||||
2. Extract the Polaris archive in a directory and open a terminal in that directory
|
||||
3. To install Polaris within your home directory, execute `make install-xdg`. This installation follows the [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html). You can use `make preview-xdg` to see which directories the install process would use.
|
||||
4. If you prefer a system-wide install, execute `make install` (without the `-xdg` suffix). If you use `sudo` to perform such a system install, you may need the `-E` option so that your sudo user find the Rust binaries: `sudo -E make install`. This installation follows the [GNU Standard Installation Directories](https://www.gnu.org/prep/standards/html_node/Directory-Variables.html). You can use `make preview` to see which directories the install process would use.
|
||||
|
||||
|
||||
From here, you might want to adjust your system to run Polaris on login using Systemd, Cron or whichever method your distribution endorses.
|
||||
|
||||
If you want to uninstall Polaris, execute `make uninstall-xdg` from the extracted archive's directory (or `make uninstall` if you made a system-wide install). This will delete all the files and directories listed above **including your Polaris database**. If you customized the install process by specifying environment variables like `PREFIX`, make sure they are set to the same values when running the uninstall command.
|
||||
|
||||
### In a docker container
|
||||
|
||||
To run polaris from a Docker container, please follow instructions from the [docker-polaris](https://github.com/ogarcia/docker-polaris) repository.
|
||||
|
||||
## Test Run
|
||||
|
||||
- Start Polaris using the shortcut on your desktop (Windows) or by running the Polaris executable
|
||||
- In your Web browser, access http://localhost:5050
|
||||
- You will see a welcome page that will guide you through the Polaris configuration
|
||||
If you want to uninstall Polaris, execute `make uninstall-xdg` from the extracted archive's directory (or `make uninstall` if you made a system-wide install). This will delete all the files and directories listed above (including your configuration, playlists, etc.). If you customized the install process by specifying environment variables like `PREFIX`, make sure they are set to the same values when running the uninstall command.
|
||||
|
|
Binary file not shown.
Before ![]() (image error) Size: 665 B |
Binary file not shown.
Before ![]() (image error) Size: 628 B |
|
@ -1,60 +0,0 @@
|
|||
<!-- HTML for static distribution bundle build -->
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Polaris Swagger UI</title>
|
||||
<link rel="stylesheet" type="text/css" href="swagger-ui.css">
|
||||
<link rel="icon" type="image/png" href="favicon-32x32.png" sizes="32x32" />
|
||||
<link rel="icon" type="image/png" href="favicon-16x16.png" sizes="16x16" />
|
||||
<style>
|
||||
html {
|
||||
box-sizing: border-box;
|
||||
overflow: -moz-scrollbars-vertical;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
|
||||
*,
|
||||
*:before,
|
||||
*:after {
|
||||
box-sizing: inherit;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
background: #fafafa;
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="swagger-ui"></div>
|
||||
|
||||
<script src="swagger-ui-bundle.js"> </script>
|
||||
<script src="swagger-ui-standalone-preset.js"> </script>
|
||||
<script>
|
||||
window.onload = function() {
|
||||
// Begin Swagger UI call region
|
||||
const ui = SwaggerUIBundle({
|
||||
url: "polaris-api.json",
|
||||
dom_id: '#swagger-ui',
|
||||
deepLinking: true,
|
||||
presets: [
|
||||
SwaggerUIBundle.presets.apis,
|
||||
SwaggerUIStandalonePreset
|
||||
],
|
||||
plugins: [
|
||||
SwaggerUIBundle.plugins.DownloadUrl
|
||||
],
|
||||
layout: "StandaloneLayout"
|
||||
})
|
||||
// End Swagger UI call region
|
||||
|
||||
window.ui = ui
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
|
@ -1,67 +0,0 @@
|
|||
<!doctype html>
|
||||
<html lang="en-US">
|
||||
<body onload="run()">
|
||||
</body>
|
||||
</html>
|
||||
<script>
|
||||
'use strict';
|
||||
function run () {
|
||||
var oauth2 = window.opener.swaggerUIRedirectOauth2;
|
||||
var sentState = oauth2.state;
|
||||
var redirectUrl = oauth2.redirectUrl;
|
||||
var isValid, qp, arr;
|
||||
|
||||
if (/code|token|error/.test(window.location.hash)) {
|
||||
qp = window.location.hash.substring(1);
|
||||
} else {
|
||||
qp = location.search.substring(1);
|
||||
}
|
||||
|
||||
arr = qp.split("&")
|
||||
arr.forEach(function (v,i,_arr) { _arr[i] = '"' + v.replace('=', '":"') + '"';})
|
||||
qp = qp ? JSON.parse('{' + arr.join() + '}',
|
||||
function (key, value) {
|
||||
return key === "" ? value : decodeURIComponent(value)
|
||||
}
|
||||
) : {}
|
||||
|
||||
isValid = qp.state === sentState
|
||||
|
||||
if ((
|
||||
oauth2.auth.schema.get("flow") === "accessCode"||
|
||||
oauth2.auth.schema.get("flow") === "authorizationCode"
|
||||
) && !oauth2.auth.code) {
|
||||
if (!isValid) {
|
||||
oauth2.errCb({
|
||||
authId: oauth2.auth.name,
|
||||
source: "auth",
|
||||
level: "warning",
|
||||
message: "Authorization may be unsafe, passed state was changed in server Passed state wasn't returned from auth server"
|
||||
});
|
||||
}
|
||||
|
||||
if (qp.code) {
|
||||
delete oauth2.state;
|
||||
oauth2.auth.code = qp.code;
|
||||
oauth2.callback({auth: oauth2.auth, redirectUrl: redirectUrl});
|
||||
} else {
|
||||
let oauthErrorMsg
|
||||
if (qp.error) {
|
||||
oauthErrorMsg = "["+qp.error+"]: " +
|
||||
(qp.error_description ? qp.error_description+ ". " : "no accessCode received from the server. ") +
|
||||
(qp.error_uri ? "More info: "+qp.error_uri : "");
|
||||
}
|
||||
|
||||
oauth2.errCb({
|
||||
authId: oauth2.auth.name,
|
||||
source: "auth",
|
||||
level: "error",
|
||||
message: oauthErrorMsg || "[Authorization failed]: no accessCode received from the server"
|
||||
});
|
||||
}
|
||||
} else {
|
||||
oauth2.callback({auth: oauth2.auth, token: qp, isValid: isValid, redirectUrl: redirectUrl});
|
||||
}
|
||||
window.close();
|
||||
}
|
||||
</script>
|
File diff suppressed because it is too large
Load diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1 +0,0 @@
|
|||
{"version":3,"sources":[],"names":[],"mappings":"","file":"swagger-ui.css","sourceRoot":""}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
46
flake.lock
generated
Normal file
46
flake.lock
generated
Normal file
|
@ -0,0 +1,46 @@
|
|||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1736701207,
|
||||
"narHash": "sha256-jG/+MvjVY7SlTakzZ2fJ5dC3V1PrKKrUEOEE30jrOKA=",
|
||||
"rev": "ed4a395ea001367c1f13d34b1e01aa10290f67d6",
|
||||
"revCount": 737298,
|
||||
"type": "tarball",
|
||||
"url": "https://api.flakehub.com/f/pinned/NixOS/nixpkgs/0.1.737298%2Brev-ed4a395ea001367c1f13d34b1e01aa10290f67d6/01945f5f-4175-7e72-8809-a1e482c4a443/source.tar.gz"
|
||||
},
|
||||
"original": {
|
||||
"type": "tarball",
|
||||
"url": "https://flakehub.com/f/NixOS/nixpkgs/0.1.%2A.tar.gz"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs",
|
||||
"rust-overlay": "rust-overlay"
|
||||
}
|
||||
},
|
||||
"rust-overlay": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1736735482,
|
||||
"narHash": "sha256-QOA4jCDyyUM9Y2Vba+HSZ/5LdtCMGaTE/7NkkUzBr50=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "cf960a1938ee91200fe0d2f7b2582fde2429d562",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
58
flake.nix
Normal file
58
flake.nix
Normal file
|
@ -0,0 +1,58 @@
|
|||
{
|
||||
description = "A Nix-flake-based Rust development environment";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.1.*.tar.gz";
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, rust-overlay }:
|
||||
let
|
||||
supportedSystems = [ "x86_64-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" ];
|
||||
forEachSupportedSystem = f: nixpkgs.lib.genAttrs supportedSystems (system: f {
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
overlays = [ rust-overlay.overlays.default self.overlays.default ];
|
||||
};
|
||||
});
|
||||
in
|
||||
{
|
||||
overlays.default = final: prev: {
|
||||
rustToolchain =
|
||||
let
|
||||
rust = prev.rust-bin;
|
||||
in
|
||||
if builtins.pathExists ./rust-toolchain.toml then
|
||||
rust.fromRustupToolchainFile ./rust-toolchain.toml
|
||||
else if builtins.pathExists ./rust-toolchain then
|
||||
rust.fromRustupToolchainFile ./rust-toolchain
|
||||
else
|
||||
rust.stable.latest.default.override {
|
||||
extensions = [ "rust-src" "rustfmt" ];
|
||||
};
|
||||
};
|
||||
|
||||
devShells = forEachSupportedSystem ({ pkgs }: {
|
||||
default = pkgs.mkShell {
|
||||
packages = with pkgs; [
|
||||
rustToolchain
|
||||
openssl
|
||||
pkg-config
|
||||
cargo-deny
|
||||
cargo-edit
|
||||
cargo-watch
|
||||
rust-analyzer
|
||||
samply
|
||||
];
|
||||
|
||||
env = {
|
||||
# Required by rust-analyzer
|
||||
RUST_SRC_PATH = "${pkgs.rustToolchain}/lib/rustlib/src/rust/library";
|
||||
};
|
||||
};
|
||||
});
|
||||
};
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
DROP TABLE directories;
|
||||
DROP TABLE songs;
|
|
@ -1,25 +0,0 @@
|
|||
CREATE TABLE directories (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
parent TEXT,
|
||||
artist TEXT,
|
||||
year INTEGER,
|
||||
album TEXT,
|
||||
artwork TEXT,
|
||||
UNIQUE(path) ON CONFLICT REPLACE
|
||||
);
|
||||
|
||||
CREATE TABLE songs (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
parent TEXT NOT NULL,
|
||||
track_number INTEGER,
|
||||
disc_number INTEGER,
|
||||
title TEXT,
|
||||
artist TEXT,
|
||||
album_artist TEXT,
|
||||
year INTEGER,
|
||||
album TEXT,
|
||||
artwork TEXT,
|
||||
UNIQUE(path) ON CONFLICT REPLACE
|
||||
);
|
|
@ -1,15 +0,0 @@
|
|||
CREATE TEMPORARY TABLE directories_backup(id, path, parent, artist, year, album, artwork);
|
||||
INSERT INTO directories_backup SELECT id, path, parent, artist, year, album, artwork FROM directories;
|
||||
DROP TABLE directories;
|
||||
CREATE TABLE directories (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
parent TEXT,
|
||||
artist TEXT,
|
||||
year INTEGER,
|
||||
album TEXT,
|
||||
artwork TEXT,
|
||||
UNIQUE(path) ON CONFLICT REPLACE
|
||||
);
|
||||
INSERT INTO directories SELECT * FROM directories_backup;
|
||||
DROP TABLE directories_backup;
|
|
@ -1 +0,0 @@
|
|||
ALTER TABLE directories ADD COLUMN date_added INTEGER DEFAULT 0 NOT NULL;
|
|
@ -1 +0,0 @@
|
|||
DROP TABLE users;
|
|
@ -1,8 +0,0 @@
|
|||
CREATE TABLE users (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
password_salt BLOB NOT NULL,
|
||||
password_hash BLOB NOT NULL,
|
||||
admin INTEGER NOT NULL,
|
||||
UNIQUE(name)
|
||||
);
|
|
@ -1 +0,0 @@
|
|||
DROP TABLE misc_settings;
|
|
@ -1,7 +0,0 @@
|
|||
CREATE TABLE misc_settings (
|
||||
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
|
||||
auth_secret TEXT NOT NULL,
|
||||
index_sleep_duration_seconds INTEGER NOT NULL,
|
||||
index_album_art_pattern TEXT NOT NULL
|
||||
);
|
||||
INSERT INTO misc_settings (id, auth_secret, index_sleep_duration_seconds, index_album_art_pattern) VALUES (0, hex(randomblob(64)), 1800, "Folder.(jpeg|jpg|png)");
|
|
@ -1 +0,0 @@
|
|||
DROP TABLE ddns_config;
|
|
@ -1,8 +0,0 @@
|
|||
CREATE TABLE ddns_config (
|
||||
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
|
||||
host TEXT NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
password TEXT NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO ddns_config (id, host, username, password) VALUES (0, "", "", "");
|
|
@ -1 +0,0 @@
|
|||
DROP TABLE mount_points;
|
|
@ -1,6 +0,0 @@
|
|||
CREATE TABLE mount_points (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
UNIQUE(name)
|
||||
);
|
|
@ -1,2 +0,0 @@
|
|||
DROP TABLE playlists;
|
||||
DROP TABLE playlist_songs;
|
|
@ -1,16 +0,0 @@
|
|||
CREATE TABLE playlists (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
owner INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
FOREIGN KEY(owner) REFERENCES users(id) ON DELETE CASCADE,
|
||||
UNIQUE(owner, name) ON CONFLICT REPLACE
|
||||
);
|
||||
|
||||
CREATE TABLE playlist_songs (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
playlist INTEGER NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
ordering INTEGER NOT NULL,
|
||||
FOREIGN KEY(playlist) REFERENCES playlists(id) ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
UNIQUE(playlist, ordering) ON CONFLICT REPLACE
|
||||
);
|
|
@ -1,11 +0,0 @@
|
|||
CREATE TEMPORARY TABLE misc_settings_backup(id, auth_secret, index_sleep_duration_seconds, index_album_art_pattern);
|
||||
INSERT INTO misc_settings_backup SELECT id, auth_secret, index_sleep_duration_seconds, index_album_art_pattern FROM misc_settings;
|
||||
DROP TABLE misc_settings;
|
||||
CREATE TABLE misc_settings (
|
||||
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
|
||||
auth_secret TEXT NOT NULL,
|
||||
index_sleep_duration_seconds INTEGER NOT NULL,
|
||||
index_album_art_pattern TEXT NOT NULL
|
||||
);
|
||||
INSERT INTO misc_settings SELECT * FROM misc_settings_backup;
|
||||
DROP TABLE misc_settings_backup;
|
|
@ -1 +0,0 @@
|
|||
ALTER TABLE misc_settings ADD COLUMN prefix_url TEXT NOT NULL DEFAULT "";
|
|
@ -1,19 +0,0 @@
|
|||
CREATE TEMPORARY TABLE songs_backup(id, path, parent, track_number, disc_number, title, artist, album_artist, year, album, artwork);
|
||||
INSERT INTO songs_backup SELECT id, path, parent, track_number, disc_number, title, artist, album_artist, year, album, artwork FROM songs;
|
||||
DROP TABLE songs;
|
||||
CREATE TABLE songs (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
parent TEXT NOT NULL,
|
||||
track_number INTEGER,
|
||||
disc_number INTEGER,
|
||||
title TEXT,
|
||||
artist TEXT,
|
||||
album_artist TEXT,
|
||||
year INTEGER,
|
||||
album TEXT,
|
||||
artwork TEXT,
|
||||
UNIQUE(path) ON CONFLICT REPLACE
|
||||
);
|
||||
INSERT INTO songs SELECT * FROM songs_backup;
|
||||
DROP TABLE songs_backup;
|
|
@ -1 +0,0 @@
|
|||
ALTER TABLE songs ADD COLUMN duration INTEGER;
|
|
@ -1,13 +0,0 @@
|
|||
CREATE TEMPORARY TABLE users_backup(id, name, password_salt, password_hash, admin);
|
||||
INSERT INTO users_backup SELECT id, name, password_salt, password_hash, admin FROM users;
|
||||
DROP TABLE users;
|
||||
CREATE TABLE users (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
password_salt BLOB NOT NULL,
|
||||
password_hash BLOB NOT NULL,
|
||||
admin INTEGER NOT NULL,
|
||||
UNIQUE(name)
|
||||
);
|
||||
INSERT INTO users SELECT * FROM users_backup;
|
||||
DROP TABLE users_backup;
|
|
@ -1,2 +0,0 @@
|
|||
ALTER TABLE users ADD COLUMN lastfm_username TEXT;
|
||||
ALTER TABLE users ADD COLUMN lastfm_session_key TEXT;
|
|
@ -1,15 +0,0 @@
|
|||
CREATE TEMPORARY TABLE misc_settings_backup(id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url);
|
||||
INSERT INTO misc_settings_backup
|
||||
SELECT id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url
|
||||
FROM misc_settings;
|
||||
DROP TABLE misc_settings;
|
||||
CREATE TABLE misc_settings (
|
||||
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
|
||||
auth_secret BLOB NOT NULL DEFAULT (hex(randomblob(32))),
|
||||
index_sleep_duration_seconds INTEGER NOT NULL,
|
||||
index_album_art_pattern TEXT NOT NULL,
|
||||
prefix_url TEXT NOT NULL DEFAULT ""
|
||||
);
|
||||
INSERT INTO misc_settings(id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url)
|
||||
SELECT * FROM misc_settings_backup;
|
||||
DROP TABLE misc_settings_backup;
|
|
@ -1,15 +0,0 @@
|
|||
CREATE TEMPORARY TABLE misc_settings_backup(id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url);
|
||||
INSERT INTO misc_settings_backup
|
||||
SELECT id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url
|
||||
FROM misc_settings;
|
||||
DROP TABLE misc_settings;
|
||||
CREATE TABLE misc_settings (
|
||||
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
|
||||
auth_secret BLOB NOT NULL DEFAULT (randomblob(32)),
|
||||
index_sleep_duration_seconds INTEGER NOT NULL,
|
||||
index_album_art_pattern TEXT NOT NULL,
|
||||
prefix_url TEXT NOT NULL DEFAULT ""
|
||||
);
|
||||
INSERT INTO misc_settings(id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url)
|
||||
SELECT * FROM misc_settings_backup;
|
||||
DROP TABLE misc_settings_backup;
|
|
@ -1,11 +0,0 @@
|
|||
DROP TABLE users;
|
||||
CREATE TABLE users (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
password_salt BLOB NOT NULL,
|
||||
password_hash BLOB NOT NULL,
|
||||
admin INTEGER NOT NULL,
|
||||
lastfm_username TEXT,
|
||||
lastfm_session_key TEXT,
|
||||
UNIQUE(name)
|
||||
);
|
|
@ -1,10 +0,0 @@
|
|||
DROP TABLE users;
|
||||
CREATE TABLE users (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
admin INTEGER NOT NULL,
|
||||
lastfm_username TEXT,
|
||||
lastfm_session_key TEXT,
|
||||
UNIQUE(name)
|
||||
);
|
|
@ -1,14 +0,0 @@
|
|||
CREATE TEMPORARY TABLE users_backup(id, name, password_hash, admin, lastfm_username, lastfm_session_key);
|
||||
INSERT INTO users_backup SELECT id, name, password_hash, admin, lastfm_username, lastfm_session_key FROM users;
|
||||
DROP TABLE users;
|
||||
CREATE TABLE users (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
admin INTEGER NOT NULL,
|
||||
lastfm_username TEXT,
|
||||
lastfm_session_key TEXT,
|
||||
UNIQUE(name)
|
||||
);
|
||||
INSERT INTO users SELECT * FROM users_backup;
|
||||
DROP TABLE users_backup;
|
|
@ -1,2 +0,0 @@
|
|||
ALTER TABLE users ADD COLUMN web_theme_base TEXT;
|
||||
ALTER TABLE users ADD COLUMN web_theme_accent TEXT;
|
|
@ -1 +0,0 @@
|
|||
ALTER TABLE misc_settings ADD COLUMN prefix_url TEXT NOT NULL DEFAULT "";
|
|
@ -1,11 +0,0 @@
|
|||
CREATE TEMPORARY TABLE misc_settings_backup(id, auth_secret, index_sleep_duration_seconds, index_album_art_pattern);
|
||||
INSERT INTO misc_settings_backup SELECT id, auth_secret, index_sleep_duration_seconds, index_album_art_pattern FROM misc_settings;
|
||||
DROP TABLE misc_settings;
|
||||
CREATE TABLE misc_settings (
|
||||
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
|
||||
auth_secret BLOB NOT NULL DEFAULT (randomblob(32)),
|
||||
index_sleep_duration_seconds INTEGER NOT NULL,
|
||||
index_album_art_pattern TEXT NOT NULL
|
||||
);
|
||||
INSERT INTO misc_settings SELECT * FROM misc_settings_backup;
|
||||
DROP TABLE misc_settings_backup;
|
BIN
res/branding/example-cover-1.png
Normal file
BIN
res/branding/example-cover-1.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 1.2 MiB |
BIN
res/branding/example-cover-2.png
Normal file
BIN
res/branding/example-cover-2.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 1.3 MiB |
BIN
res/branding/logo/social_media_preview.afdesign
Normal file
BIN
res/branding/logo/social_media_preview.afdesign
Normal file
Binary file not shown.
Binary file not shown.
Before ![]() (image error) Size: 107 KiB After ![]() (image error) Size: 723 KiB ![]() ![]() |
Binary file not shown.
Before ![]() (image error) Size: 256 KiB After ![]() (image error) Size: 722 KiB ![]() ![]() |
|
@ -7,21 +7,25 @@ EXEC_PREFIX ?= $(PREFIX)
|
|||
BINDIR ?= $(EXEC_PREFIX)/bin
|
||||
DATAROOTDIR ?= $(PREFIX)/share
|
||||
DATADIR ?= $(DATAROOTDIR)
|
||||
SYSCONFDIR ?= $(PREFIX)/etc
|
||||
LOCALSTATEDIR ?= $(PREFIX)/var
|
||||
RUNSTATEDIR ?= $(LOCALSTATEDIR)/run
|
||||
%-system: POLARIS_BIN_PATH := $(BINDIR)/polaris
|
||||
%-system: export POLARIS_WEB_DIR := $(DATADIR)/polaris/web
|
||||
%-system: export POLARIS_SWAGGER_DIR := $(DATADIR)/polaris/swagger
|
||||
%-system: export POLARIS_CONFIG_DIR := $(SYSCONFDIR)/polaris
|
||||
%-system: export POLARIS_DATA_DIR := $(LOCALSTATEDIR)/lib/polaris
|
||||
%-system: export POLARIS_DB_DIR := $(LOCALSTATEDIR)/lib/polaris
|
||||
%-system: export POLARIS_LOG_DIR := $(LOCALSTATEDIR)/log/polaris
|
||||
%-system: export POLARIS_CACHE_DIR := $(LOCALSTATEDIR)/cache/polaris
|
||||
%-system: export POLARIS_PID_DIR := $(RUNSTATEDIR)/polaris
|
||||
|
||||
XDG_CACHE_HOME ?= $(HOME)/.cache
|
||||
XDG_CONFIG_HOME ?= $(HOME)/.config
|
||||
XDG_DATA_HOME ?= $(HOME)/.local/share
|
||||
XDG_BINDIR ?= $(HOME)/.local/bin
|
||||
XDG_DATADIR ?= $(XDG_DATA_HOME)/polaris
|
||||
XDG_CACHEDIR ?= $(XDG_CACHE_HOME)/polaris
|
||||
XDG_CONFIGDIR ?= $(XDG_CONFIG_HOME)/polaris
|
||||
ifdef $(XDG_RUNTIME_DIR)
|
||||
XDG_PIDDIR ?= $(XDG_RUNTIME_DIR)/polaris
|
||||
else
|
||||
|
@ -29,7 +33,8 @@ XDG_PIDDIR ?= /tmp/polaris-$(UID)
|
|||
endif
|
||||
%-xdg: POLARIS_BIN_PATH := $(XDG_BINDIR)/polaris
|
||||
%-xdg: export POLARIS_WEB_DIR := $(XDG_DATADIR)/web
|
||||
%-xdg: export POLARIS_SWAGGER_DIR := $(XDG_DATADIR)/swagger
|
||||
%-xdg: export POLARIS_CONFIG_DIR := $(XDG_CONFIGDIR)
|
||||
%-xdg: export POLARIS_DATA_DIR := $(XDG_DATADIR)
|
||||
%-xdg: export POLARIS_DB_DIR := $(XDG_DATADIR)
|
||||
%-xdg: export POLARIS_LOG_DIR := $(XDG_CACHEDIR)
|
||||
%-xdg: export POLARIS_CACHE_DIR := $(XDG_CACHEDIR)
|
||||
|
@ -57,7 +62,8 @@ preview: preview-system
|
|||
list-paths:
|
||||
$(info POLARIS_BIN_PATH is $(POLARIS_BIN_PATH))
|
||||
$(info POLARIS_WEB_DIR is $(POLARIS_WEB_DIR))
|
||||
$(info POLARIS_SWAGGER_DIR is $(POLARIS_SWAGGER_DIR))
|
||||
$(info POLARIS_CONFIG_DIR is $(POLARIS_CONFIG_DIR))
|
||||
$(info POLARIS_DATA_DIR is $(POLARIS_DATA_DIR))
|
||||
$(info POLARIS_DB_DIR is $(POLARIS_DB_DIR))
|
||||
$(info POLARIS_LOG_DIR is $(POLARIS_LOG_DIR))
|
||||
$(info POLARIS_CACHE_DIR is $(POLARIS_CACHE_DIR))
|
||||
|
@ -74,9 +80,7 @@ install-bin: cargo-build
|
|||
|
||||
install-data:
|
||||
install -d $(POLARIS_WEB_DIR)
|
||||
install -d $(POLARIS_SWAGGER_DIR)
|
||||
cp -rT ./web $(POLARIS_WEB_DIR)
|
||||
cp -rT ./swagger $(POLARIS_SWAGGER_DIR)
|
||||
|
||||
# Uninstall
|
||||
|
||||
|
@ -89,7 +93,8 @@ uninstall-bin:
|
|||
|
||||
uninstall-data:
|
||||
rm -rf $(POLARIS_WEB_DIR)
|
||||
rm -rf $(POLARIS_SWAGGER_DIR)
|
||||
rm -rf $(POLARIS_CONFIG_DIR)
|
||||
rm -rf $(POLARIS_DATA_DIR)
|
||||
rm -rf $(POLARIS_DB_DIR)
|
||||
rm -rf $(POLARIS_LOG_DIR)
|
||||
rm -rf $(POLARIS_CACHE_DIR)
|
||||
|
|
|
@ -3,7 +3,7 @@ echo "Creating output directory"
|
|||
mkdir -p release/tmp/polaris
|
||||
|
||||
echo "Copying package files"
|
||||
cp -r web docs/swagger src migrations test-data build.rs Cargo.toml Cargo.lock rust-toolchain res/unix/Makefile release/tmp/polaris
|
||||
cp -r web src test-data build.rs Cargo.toml Cargo.lock rust-toolchain.toml res/unix/Makefile release/tmp/polaris
|
||||
|
||||
echo "Creating tarball"
|
||||
tar -zc -C release/tmp -f release/polaris.tar.gz polaris
|
||||
|
|
2
res/windows/application/polaris-manifest.rc
Normal file
2
res/windows/application/polaris-manifest.rc
Normal file
|
@ -0,0 +1,2 @@
|
|||
#define RT_MANIFEST 24
|
||||
1 RT_MANIFEST "polaris.exe.manifest"
|
21
res/windows/application/polaris.exe.manifest
Normal file
21
res/windows/application/polaris.exe.manifest
Normal file
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<assemblyIdentity
|
||||
version="1.0.0.0"
|
||||
processorArchitecture="*"
|
||||
name="app"
|
||||
type="win32"
|
||||
/>
|
||||
<dependency>
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity
|
||||
type="win32"
|
||||
name="Microsoft.Windows.Common-Controls"
|
||||
version="6.0.0.0"
|
||||
processorArchitecture="*"
|
||||
publicKeyToken="6595b64144ccf1df"
|
||||
language="*"
|
||||
/>
|
||||
</dependentAssembly>
|
||||
</dependency>
|
||||
</assembly>
|
|
@ -49,7 +49,6 @@
|
|||
<ComponentRef Id="ProgramMenuDir" />
|
||||
<ComponentRef Id="CleanupExtraData" />
|
||||
<ComponentGroupRef Id="WebUI" />
|
||||
<ComponentGroupRef Id="SwaggerUI" />
|
||||
</Feature>
|
||||
<Icon Id="polaris.exe" SourceFile="polaris.exe" />
|
||||
<Property Id="ARPPRODUCTICON" Value="polaris.exe" />
|
||||
|
|
|
@ -8,7 +8,6 @@ if (!(Test-Path env:POLARIS_VERSION)) {
|
|||
# And remove the code setting these as defaults in `service/mod.rs`
|
||||
# $script:INSTALL_DIR = "%LOCALAPPDATA%\Permafrost\Polaris"
|
||||
# $env:POLARIS_WEB_DIR = "$INSTALL_DIR\web"
|
||||
# $env:POLARIS_SWAGGER_DIR = "$INSTALL_DIR\swagger"
|
||||
# $env:POLARIS_DB_DIR = "$INSTALL_DIR"
|
||||
# $env:POLARIS_LOG_DIR = "$INSTALL_DIR"
|
||||
# $env:POLARIS_CACHE_DIR = "$INSTALL_DIR"
|
||||
|
@ -29,7 +28,6 @@ Copy-Item .\res\windows\installer\dialog.bmp .\release\tmp\
|
|||
Copy-Item .\target\release\polaris.exe .\release\tmp\
|
||||
Copy-Item .\target\release\polaris-cli.exe .\release\tmp\
|
||||
Copy-Item .\web .\release\tmp\web -recurse
|
||||
Copy-Item .\docs\swagger .\release\tmp\swagger -recurse
|
||||
|
||||
""
|
||||
"Inserting version number in installer config"
|
||||
|
@ -41,15 +39,13 @@ $wxs.Save('.\res\windows\installer\installer.wxs')
|
|||
"Creating installer"
|
||||
$heat_exe = Join-Path $env:WIX bin\heat.exe
|
||||
& $heat_exe dir .\release\tmp\web\ -ag -g1 -dr AppDataPolaris -cg WebUI -sfrag -var wix.WebUIDir -out .\release\tmp\web_ui_fragment.wxs
|
||||
& $heat_exe dir .\release\tmp\swagger\ -ag -g1 -dr AppDataPolaris -cg SwaggerUI -sfrag -var wix.SwaggerUIDir -out .\release\tmp\swagger_ui_fragment.wxs
|
||||
|
||||
$candle_exe = Join-Path $env:WIX bin\candle.exe
|
||||
& $candle_exe -wx -ext WixUtilExtension -arch x64 -out .\release\tmp\web_ui_fragment.wixobj .\release\tmp\web_ui_fragment.wxs
|
||||
& $candle_exe -wx -ext WixUtilExtension -arch x64 -out .\release\tmp\swagger_ui_fragment.wixobj .\release\tmp\swagger_ui_fragment.wxs
|
||||
& $candle_exe -wx -ext WixUtilExtension -arch x64 -out .\release\tmp\installer.wixobj .\res\windows\installer\installer.wxs
|
||||
|
||||
$light_exe = Join-Path $env:WIX bin\light.exe
|
||||
& $light_exe -dWebUIDir=".\release\tmp\web" -dSwaggerUIDir=".\release\tmp\swagger" -wx -ext WixUtilExtension -ext WixUIExtension -spdb -sw1076 -sice:ICE38 -sice:ICE64 -out .\release\polaris.msi .\release\tmp\installer.wixobj .\release\tmp\web_ui_fragment.wixobj .\release\tmp\swagger_ui_fragment.wixobj
|
||||
& $light_exe -dWebUIDir=".\release\tmp\web" -wx -ext WixUtilExtension -ext WixUIExtension -spdb -sw1076 -sice:ICE38 -sice:ICE64 -out .\release\polaris.msi .\release\tmp\installer.wixobj .\release\tmp\web_ui_fragment.wixobj
|
||||
|
||||
"Cleaning up"
|
||||
Remove-Item -Recurse .\release\tmp
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
stable
|
4
rust-toolchain.toml
Normal file
4
rust-toolchain.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
[toolchain]
|
||||
channel = "stable"
|
||||
components = [ "rust-src", "rustfmt" ]
|
||||
profile = "default"
|
317
src/app.rs
Normal file
317
src/app.rs
Normal file
|
@ -0,0 +1,317 @@
|
|||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use log::info;
|
||||
use rand::rngs::OsRng;
|
||||
use rand::RngCore;
|
||||
use tokio::fs::try_exists;
|
||||
use tokio::task::spawn_blocking;
|
||||
|
||||
use crate::app::legacy::*;
|
||||
use crate::paths::Paths;
|
||||
|
||||
pub mod auth;
|
||||
pub mod config;
|
||||
pub mod ddns;
|
||||
pub mod formats;
|
||||
pub mod index;
|
||||
pub mod legacy;
|
||||
pub mod ndb;
|
||||
pub mod peaks;
|
||||
pub mod playlist;
|
||||
pub mod scanner;
|
||||
pub mod thumbnail;
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod test;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
ThreadPoolBuilder(#[from] rayon::ThreadPoolBuildError),
|
||||
#[error(transparent)]
|
||||
ThreadJoining(#[from] tokio::task::JoinError),
|
||||
|
||||
#[error("Filesystem error for `{0}`: `{1}`")]
|
||||
Io(PathBuf, std::io::Error),
|
||||
#[error(transparent)]
|
||||
FileWatch(#[from] notify::Error),
|
||||
#[error(transparent)]
|
||||
SQL(#[from] rusqlite::Error),
|
||||
#[error(transparent)]
|
||||
Ape(#[from] ape::Error),
|
||||
#[error("ID3 error in `{0}`: `{1}`")]
|
||||
Id3(PathBuf, id3::Error),
|
||||
#[error("Metaflac error in `{0}`: `{1}`")]
|
||||
Metaflac(PathBuf, metaflac::Error),
|
||||
#[error("Mp4aMeta error in `{0}`: `{1}`")]
|
||||
Mp4aMeta(PathBuf, mp4ameta::Error),
|
||||
#[error(transparent)]
|
||||
Opus(#[from] opus_headers::ParseError),
|
||||
#[error(transparent)]
|
||||
Vorbis(#[from] lewton::VorbisError),
|
||||
#[error("Could not find a Vorbis comment within flac file")]
|
||||
VorbisCommentNotFoundInFlacFile,
|
||||
#[error("Could not read thumbnail image in `{0}`:\n\n{1}")]
|
||||
Image(PathBuf, image::error::ImageError),
|
||||
#[error("This file format is not supported: {0}")]
|
||||
UnsupportedFormat(&'static str),
|
||||
|
||||
#[error("No tracks found in audio file: {0}")]
|
||||
MediaEmpty(PathBuf),
|
||||
#[error(transparent)]
|
||||
MediaDecodeError(symphonia::core::errors::Error),
|
||||
#[error(transparent)]
|
||||
MediaDecoderError(symphonia::core::errors::Error),
|
||||
#[error(transparent)]
|
||||
MediaPacketError(symphonia::core::errors::Error),
|
||||
#[error(transparent)]
|
||||
MediaProbeError(symphonia::core::errors::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
PeaksSerialization(bitcode::Error),
|
||||
#[error(transparent)]
|
||||
PeaksDeserialization(bitcode::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
NativeDatabase(#[from] native_db::db_type::Error),
|
||||
#[error("Could not initialize database")]
|
||||
NativeDatabaseCreationError(native_db::db_type::Error),
|
||||
|
||||
#[error("DDNS update query failed with HTTP status code `{0}`")]
|
||||
UpdateQueryFailed(u16),
|
||||
#[error("DDNS update query failed due to a transport error")]
|
||||
UpdateQueryTransport,
|
||||
|
||||
#[error("Auth secret does not have the expected format")]
|
||||
AuthenticationSecretInvalid,
|
||||
#[error("Missing auth secret")]
|
||||
AuthenticationSecretNotFound,
|
||||
#[error("Missing settings")]
|
||||
MiscSettingsNotFound,
|
||||
#[error("Index album art pattern is not a valid regex")]
|
||||
IndexAlbumArtPatternInvalid,
|
||||
#[error("DDNS update URL is invalid")]
|
||||
DDNSUpdateURLInvalid,
|
||||
|
||||
#[error("Could not deserialize configuration: `{0}`")]
|
||||
ConfigDeserialization(toml::de::Error),
|
||||
#[error("Could not serialize configuration: `{0}`")]
|
||||
ConfigSerialization(toml::ser::Error),
|
||||
#[error("Could not deserialize collection")]
|
||||
IndexDeserializationError,
|
||||
#[error("Could not serialize collection")]
|
||||
IndexSerializationError,
|
||||
|
||||
#[error("Invalid Directory")]
|
||||
InvalidDirectory(String),
|
||||
#[error("The following virtual path could not be mapped to a real path: `{0}`")]
|
||||
CouldNotMapToRealPath(PathBuf),
|
||||
#[error("The following real path could not be mapped to a virtual path: `{0}`")]
|
||||
CouldNotMapToVirtualPath(PathBuf),
|
||||
#[error("User not found")]
|
||||
UserNotFound,
|
||||
#[error("Directory not found: {0}")]
|
||||
DirectoryNotFound(PathBuf),
|
||||
#[error("Artist not found")]
|
||||
ArtistNotFound,
|
||||
#[error("Album not found")]
|
||||
AlbumNotFound,
|
||||
#[error("Genre not found")]
|
||||
GenreNotFound,
|
||||
#[error("Song not found")]
|
||||
SongNotFound,
|
||||
#[error("Invalid search query syntax")]
|
||||
SearchQueryParseError,
|
||||
#[error("Playlist not found")]
|
||||
PlaylistNotFound,
|
||||
#[error("No embedded artwork was found in `{0}`")]
|
||||
EmbeddedArtworkNotFound(PathBuf),
|
||||
|
||||
#[error("Cannot use empty username")]
|
||||
EmptyUsername,
|
||||
#[error("Cannot use empty password")]
|
||||
EmptyPassword,
|
||||
#[error("Username already exists")]
|
||||
DuplicateUsername,
|
||||
#[error("Username does not exist")]
|
||||
IncorrectUsername,
|
||||
#[error("Password does not match username")]
|
||||
IncorrectPassword,
|
||||
#[error("Invalid auth token")]
|
||||
InvalidAuthToken,
|
||||
#[error("Incorrect authorization scope")]
|
||||
IncorrectAuthorizationScope,
|
||||
#[error("Failed to hash password")]
|
||||
PasswordHashing,
|
||||
#[error("Failed to encode authorization token")]
|
||||
AuthorizationTokenEncoding,
|
||||
#[error("Failed to encode Branca token")]
|
||||
BrancaTokenEncoding,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct App {
|
||||
pub port: u16,
|
||||
pub web_dir_path: PathBuf,
|
||||
pub ddns_manager: ddns::Manager,
|
||||
pub scanner: scanner::Scanner,
|
||||
pub index_manager: index::Manager,
|
||||
pub config_manager: config::Manager,
|
||||
pub peaks_manager: peaks::Manager,
|
||||
pub playlist_manager: playlist::Manager,
|
||||
pub thumbnail_manager: thumbnail::Manager,
|
||||
}
|
||||
|
||||
impl App {
|
||||
pub async fn new(port: u16, paths: Paths) -> Result<Self, Error> {
|
||||
fs::create_dir_all(&paths.data_dir_path)
|
||||
.map_err(|e| Error::Io(paths.data_dir_path.clone(), e))?;
|
||||
|
||||
fs::create_dir_all(&paths.web_dir_path)
|
||||
.map_err(|e| Error::Io(paths.web_dir_path.clone(), e))?;
|
||||
|
||||
let peaks_dir_path = paths.cache_dir_path.join("peaks");
|
||||
fs::create_dir_all(&peaks_dir_path).map_err(|e| Error::Io(peaks_dir_path.clone(), e))?;
|
||||
|
||||
let thumbnails_dir_path = paths.cache_dir_path.join("thumbnails");
|
||||
fs::create_dir_all(&thumbnails_dir_path)
|
||||
.map_err(|e| Error::Io(thumbnails_dir_path.clone(), e))?;
|
||||
|
||||
let auth_secret_file_path = paths.data_dir_path.join("auth.secret");
|
||||
Self::migrate_legacy_auth_secret(&paths.db_file_path, &auth_secret_file_path).await?;
|
||||
let auth_secret = Self::get_or_create_auth_secret(&auth_secret_file_path).await?;
|
||||
|
||||
let config_manager = config::Manager::new(&paths.config_file_path, auth_secret).await?;
|
||||
let ddns_manager = ddns::Manager::new(config_manager.clone());
|
||||
let ndb_manager = ndb::Manager::new(&paths.data_dir_path)?;
|
||||
let index_manager = index::Manager::new(&paths.data_dir_path).await?;
|
||||
let scanner = scanner::Scanner::new(index_manager.clone(), config_manager.clone()).await?;
|
||||
let peaks_manager = peaks::Manager::new(peaks_dir_path);
|
||||
let playlist_manager = playlist::Manager::new(ndb_manager);
|
||||
let thumbnail_manager = thumbnail::Manager::new(thumbnails_dir_path);
|
||||
|
||||
let app = Self {
|
||||
port,
|
||||
web_dir_path: paths.web_dir_path,
|
||||
ddns_manager,
|
||||
scanner,
|
||||
index_manager,
|
||||
config_manager,
|
||||
peaks_manager,
|
||||
playlist_manager,
|
||||
thumbnail_manager,
|
||||
};
|
||||
|
||||
app.migrate_legacy_db(&paths.db_file_path).await?;
|
||||
|
||||
Ok(app)
|
||||
}
|
||||
|
||||
async fn migrate_legacy_auth_secret(
|
||||
db_file_path: &PathBuf,
|
||||
secret_file_path: &PathBuf,
|
||||
) -> Result<(), Error> {
|
||||
if !try_exists(db_file_path)
|
||||
.await
|
||||
.map_err(|e| Error::Io(db_file_path.clone(), e))?
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if try_exists(secret_file_path)
|
||||
.await
|
||||
.map_err(|e| Error::Io(secret_file_path.clone(), e))?
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
info!(
|
||||
"Migrating auth secret from database at `{}`",
|
||||
db_file_path.to_string_lossy()
|
||||
);
|
||||
|
||||
let secret = spawn_blocking({
|
||||
let db_file_path = db_file_path.clone();
|
||||
move || read_legacy_auth_secret(&db_file_path)
|
||||
})
|
||||
.await??;
|
||||
|
||||
tokio::fs::write(secret_file_path, &secret)
|
||||
.await
|
||||
.map_err(|e| Error::Io(secret_file_path.clone(), e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn migrate_legacy_db(&self, db_file_path: &PathBuf) -> Result<(), Error> {
|
||||
if !try_exists(db_file_path)
|
||||
.await
|
||||
.map_err(|e| Error::Io(db_file_path.clone(), e))?
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let Some(config) = tokio::task::spawn_blocking({
|
||||
let db_file_path = db_file_path.clone();
|
||||
move || read_legacy_config(&db_file_path)
|
||||
})
|
||||
.await??
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
info!(
|
||||
"Found usable config in legacy database at `{}`, beginning migration process",
|
||||
db_file_path.to_string_lossy()
|
||||
);
|
||||
|
||||
info!("Migrating configuration");
|
||||
self.config_manager.apply_config(config).await?;
|
||||
self.config_manager.save_config().await?;
|
||||
|
||||
info!("Migrating playlists");
|
||||
for (name, owner, songs) in read_legacy_playlists(
|
||||
db_file_path,
|
||||
self.index_manager.clone(),
|
||||
self.scanner.clone(),
|
||||
)
|
||||
.await?
|
||||
{
|
||||
self.playlist_manager
|
||||
.save_playlist(&name, &owner, songs)
|
||||
.await?;
|
||||
}
|
||||
|
||||
info!(
|
||||
"Deleting legacy database at `{}`",
|
||||
db_file_path.to_string_lossy()
|
||||
);
|
||||
delete_legacy_db(db_file_path).await?;
|
||||
|
||||
info!(
|
||||
"Completed migration from `{}`",
|
||||
db_file_path.to_string_lossy()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_or_create_auth_secret(path: &Path) -> Result<auth::Secret, Error> {
|
||||
match tokio::fs::read(&path).await {
|
||||
Ok(s) => Ok(auth::Secret(
|
||||
s.try_into()
|
||||
.map_err(|_| Error::AuthenticationSecretInvalid)?,
|
||||
)),
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
let mut secret = auth::Secret::default();
|
||||
OsRng.fill_bytes(secret.as_mut());
|
||||
tokio::fs::write(&path, &secret)
|
||||
.await
|
||||
.map_err(|_| Error::AuthenticationSecretInvalid)?;
|
||||
Ok(secret)
|
||||
}
|
||||
Err(e) => return Err(Error::Io(path.to_owned(), e)),
|
||||
}
|
||||
}
|
||||
}
|
95
src/app/auth.rs
Normal file
95
src/app/auth.rs
Normal file
|
@ -0,0 +1,95 @@
|
|||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
use pbkdf2::password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, SaltString};
|
||||
use pbkdf2::Pbkdf2;
|
||||
use rand::rngs::OsRng;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::app::Error;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Secret(pub [u8; 32]);
|
||||
|
||||
impl AsRef<[u8]> for Secret {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AsMut<[u8]> for Secret {
|
||||
fn as_mut(&mut self) -> &mut [u8] {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Token(pub String);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
|
||||
pub enum Scope {
|
||||
PolarisAuth,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
|
||||
pub struct Authorization {
|
||||
pub username: String,
|
||||
pub scope: Scope,
|
||||
}
|
||||
|
||||
pub fn hash_password(password: &str) -> Result<String, Error> {
|
||||
if password.is_empty() {
|
||||
return Err(Error::EmptyPassword);
|
||||
}
|
||||
let salt = SaltString::generate(&mut OsRng);
|
||||
match Pbkdf2.hash_password(password.as_bytes(), &salt) {
|
||||
Ok(h) => Ok(h.to_string()),
|
||||
Err(_) => Err(Error::PasswordHashing),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn verify_password(password_hash: &str, attempted_password: &str) -> bool {
|
||||
match PasswordHash::new(password_hash) {
|
||||
Ok(h) => Pbkdf2
|
||||
.verify_password(attempted_password.as_bytes(), &h)
|
||||
.is_ok(),
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_auth_token(
|
||||
authorization: &Authorization,
|
||||
auth_secret: &Secret,
|
||||
) -> Result<Token, Error> {
|
||||
let serialized_authorization =
|
||||
serde_json::to_string(&authorization).or(Err(Error::AuthorizationTokenEncoding))?;
|
||||
branca::encode(
|
||||
serialized_authorization.as_bytes(),
|
||||
auth_secret.as_ref(),
|
||||
SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap_or_default()
|
||||
.as_secs() as u32,
|
||||
)
|
||||
.or(Err(Error::BrancaTokenEncoding))
|
||||
.map(Token)
|
||||
}
|
||||
|
||||
pub fn decode_auth_token(
|
||||
auth_token: &Token,
|
||||
scope: Scope,
|
||||
auth_secret: &Secret,
|
||||
) -> Result<Authorization, Error> {
|
||||
let Token(data) = auth_token;
|
||||
let ttl = match scope {
|
||||
Scope::PolarisAuth => 0, // permanent
|
||||
};
|
||||
let authorization =
|
||||
branca::decode(data, auth_secret.as_ref(), ttl).map_err(|_| Error::InvalidAuthToken)?;
|
||||
let authorization: Authorization =
|
||||
serde_json::from_slice(&authorization[..]).map_err(|_| Error::InvalidAuthToken)?;
|
||||
if authorization.scope != scope {
|
||||
return Err(Error::IncorrectAuthorizationScope);
|
||||
}
|
||||
Ok(authorization)
|
||||
}
|
338
src/app/config.rs
Normal file
338
src/app/config.rs
Normal file
|
@ -0,0 +1,338 @@
|
|||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use log::{error, info};
|
||||
use notify::{RecommendedWatcher, RecursiveMode, Watcher};
|
||||
use notify_debouncer_full::{Debouncer, FileIdMap};
|
||||
use regex::Regex;
|
||||
use tokio::sync::{futures::Notified, Notify, RwLock};
|
||||
|
||||
use crate::app::Error;
|
||||
|
||||
mod mounts;
|
||||
pub mod storage;
|
||||
mod user;
|
||||
|
||||
pub use mounts::*;
|
||||
pub use user::*;
|
||||
|
||||
use super::auth;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Config {
|
||||
pub album_art_pattern: Option<Regex>,
|
||||
pub ddns_update_url: Option<http::Uri>,
|
||||
pub mount_dirs: Vec<MountDir>,
|
||||
pub users: Vec<User>,
|
||||
}
|
||||
|
||||
impl TryFrom<storage::Config> for Config {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(c: storage::Config) -> Result<Self, Self::Error> {
|
||||
let mut config = Config::default();
|
||||
config.set_mounts(c.mount_dirs)?;
|
||||
config.set_users(c.users)?;
|
||||
|
||||
config.album_art_pattern = match c.album_art_pattern.as_deref().map(Regex::new) {
|
||||
Some(Ok(u)) => Some(u),
|
||||
Some(Err(_)) => return Err(Error::IndexAlbumArtPatternInvalid),
|
||||
None => None,
|
||||
};
|
||||
|
||||
config.ddns_update_url = match c.ddns_update_url.map(http::Uri::try_from) {
|
||||
Some(Ok(u)) => Some(u),
|
||||
Some(Err(_)) => return Err(Error::DDNSUpdateURLInvalid),
|
||||
None => None,
|
||||
};
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Config> for storage::Config {
|
||||
fn from(c: Config) -> Self {
|
||||
Self {
|
||||
album_art_pattern: c.album_art_pattern.map(|p| p.as_str().to_owned()),
|
||||
mount_dirs: c.mount_dirs.into_iter().map(|d| d.into()).collect(),
|
||||
ddns_update_url: c.ddns_update_url.map(|u| u.to_string()),
|
||||
users: c.users.into_iter().map(|u| u.into()).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Manager {
|
||||
config_file_path: PathBuf,
|
||||
config: Arc<RwLock<Config>>,
|
||||
auth_secret: auth::Secret,
|
||||
#[allow(dead_code)]
|
||||
file_watcher: Arc<Debouncer<RecommendedWatcher, FileIdMap>>,
|
||||
change_notify: Arc<Notify>,
|
||||
}
|
||||
|
||||
impl Manager {
|
||||
pub async fn new(config_file_path: &Path, auth_secret: auth::Secret) -> Result<Self, Error> {
|
||||
if let Some(parent) = config_file_path.parent() {
|
||||
tokio::fs::create_dir_all(parent)
|
||||
.await
|
||||
.map_err(|e| Error::Io(parent.to_owned(), e))?;
|
||||
}
|
||||
|
||||
match tokio::fs::File::create_new(config_file_path).await {
|
||||
Ok(_) => (),
|
||||
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => (),
|
||||
Err(e) => {
|
||||
error!("Failed to create config file at {config_file_path:#?}: {e}");
|
||||
return Err(Error::Io(config_file_path.to_owned(), e));
|
||||
}
|
||||
};
|
||||
|
||||
let notify = Arc::new(Notify::new());
|
||||
let mut debouncer = notify_debouncer_full::new_debouncer(Duration::from_secs(1), None, {
|
||||
let notify = notify.clone();
|
||||
move |_| {
|
||||
notify.notify_waiters();
|
||||
}
|
||||
})?;
|
||||
|
||||
debouncer
|
||||
.watcher()
|
||||
.watch(&config_file_path, RecursiveMode::NonRecursive)?;
|
||||
|
||||
let manager = Self {
|
||||
config_file_path: config_file_path.to_owned(),
|
||||
config: Arc::new(RwLock::new(Config::default())),
|
||||
auth_secret,
|
||||
file_watcher: Arc::new(debouncer),
|
||||
change_notify: Arc::default(),
|
||||
};
|
||||
|
||||
tokio::task::spawn({
|
||||
let manager = manager.clone();
|
||||
async move {
|
||||
loop {
|
||||
notify.notified().await;
|
||||
if let Err(e) = manager.reload_config().await {
|
||||
error!("Configuration error: {e}");
|
||||
} else {
|
||||
info!("Successfully applied configuration change");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
manager.reload_config().await?;
|
||||
|
||||
Ok(manager)
|
||||
}
|
||||
|
||||
pub fn on_config_change(&self) -> Notified {
|
||||
self.change_notify.notified()
|
||||
}
|
||||
|
||||
async fn reload_config(&self) -> Result<(), Error> {
|
||||
let config = Self::read_config(&self.config_file_path).await?;
|
||||
self.apply_config(config).await
|
||||
}
|
||||
|
||||
async fn read_config(config_file_path: &Path) -> Result<storage::Config, Error> {
|
||||
let config_content = tokio::fs::read_to_string(config_file_path)
|
||||
.await
|
||||
.map_err(|e| Error::Io(config_file_path.to_owned(), e))?;
|
||||
toml::de::from_str::<storage::Config>(&config_content).map_err(Error::ConfigDeserialization)
|
||||
}
|
||||
|
||||
pub async fn save_config(&self) -> Result<(), Error> {
|
||||
let serialized = toml::ser::to_string_pretty::<storage::Config>(
|
||||
&self.config.read().await.clone().into(),
|
||||
)
|
||||
.map_err(Error::ConfigSerialization)?;
|
||||
tokio::fs::write(&self.config_file_path, serialized.as_bytes())
|
||||
.await
|
||||
.map_err(|e| Error::Io(self.config_file_path.clone(), e))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn apply_config(&self, new_config: storage::Config) -> Result<(), Error> {
|
||||
let mut config = self.config.write().await;
|
||||
*config = new_config.try_into()?;
|
||||
self.change_notify.notify_waiters();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn mutate<F: FnOnce(&mut Config)>(&self, op: F) -> Result<(), Error> {
|
||||
self.mutate_fallible(|c| {
|
||||
op(c);
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn mutate_fallible<F: FnOnce(&mut Config) -> Result<(), Error>>(
|
||||
&self,
|
||||
op: F,
|
||||
) -> Result<(), Error> {
|
||||
{
|
||||
let mut config = self.config.write().await;
|
||||
op(&mut config)?;
|
||||
}
|
||||
self.change_notify.notify_waiters();
|
||||
self.save_config().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_index_album_art_pattern(&self) -> Regex {
|
||||
let config = self.config.read().await;
|
||||
let pattern = config.album_art_pattern.clone();
|
||||
pattern.unwrap_or_else(|| Regex::new("Folder.(jpeg|jpg|png)").unwrap())
|
||||
}
|
||||
|
||||
pub async fn set_index_album_art_pattern(&self, regex: Regex) -> Result<(), Error> {
|
||||
self.mutate(|c| {
|
||||
c.album_art_pattern = Some(regex);
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_ddns_update_url(&self) -> Option<http::Uri> {
|
||||
self.config.read().await.ddns_update_url.clone()
|
||||
}
|
||||
|
||||
pub async fn set_ddns_update_url(&self, url: Option<http::Uri>) -> Result<(), Error> {
|
||||
self.mutate(|c| {
|
||||
c.ddns_update_url = url;
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_users(&self) -> Vec<User> {
|
||||
self.config.read().await.users.iter().cloned().collect()
|
||||
}
|
||||
|
||||
pub async fn get_user(&self, username: &str) -> Result<User, Error> {
|
||||
let config = self.config.read().await;
|
||||
config
|
||||
.get_user(username)
|
||||
.cloned()
|
||||
.ok_or(Error::UserNotFound)
|
||||
}
|
||||
|
||||
pub async fn create_user(
|
||||
&self,
|
||||
username: &str,
|
||||
password: &str,
|
||||
admin: bool,
|
||||
) -> Result<(), Error> {
|
||||
self.mutate_fallible(|c| c.create_user(username, password, admin))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn login(&self, username: &str, password: &str) -> Result<auth::Token, Error> {
|
||||
let config = self.config.read().await;
|
||||
config.login(username, password, &self.auth_secret)
|
||||
}
|
||||
|
||||
pub async fn set_is_admin(&self, username: &str, is_admin: bool) -> Result<(), Error> {
|
||||
self.mutate_fallible(|c| c.set_is_admin(username, is_admin))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn set_password(&self, username: &str, password: &str) -> Result<(), Error> {
|
||||
self.mutate_fallible(|c| c.set_password(username, password))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn authenticate(
|
||||
&self,
|
||||
auth_token: &auth::Token,
|
||||
scope: auth::Scope,
|
||||
) -> Result<auth::Authorization, Error> {
|
||||
let config = self.config.read().await;
|
||||
config.authenticate(auth_token, scope, &self.auth_secret)
|
||||
}
|
||||
|
||||
pub async fn delete_user(&self, username: &str) -> Result<(), Error> {
|
||||
self.mutate(|c| c.delete_user(username)).await
|
||||
}
|
||||
|
||||
pub async fn get_mounts(&self) -> Vec<MountDir> {
|
||||
let config = self.config.read().await;
|
||||
config.mount_dirs.iter().cloned().collect()
|
||||
}
|
||||
|
||||
pub async fn resolve_virtual_path<P: AsRef<Path>>(
|
||||
&self,
|
||||
virtual_path: P,
|
||||
) -> Result<PathBuf, Error> {
|
||||
let config = self.config.read().await;
|
||||
config.resolve_virtual_path(virtual_path)
|
||||
}
|
||||
|
||||
pub async fn set_mounts(&self, mount_dirs: Vec<storage::MountDir>) -> Result<(), Error> {
|
||||
self.mutate_fallible(|c| c.set_mounts(mount_dirs)).await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::app::test;
|
||||
use crate::test_name;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn blank_config_round_trip() {
|
||||
let config_path = PathBuf::from_iter(["test-data", "blank.toml"]);
|
||||
let manager = Manager::new(&config_path, auth::Secret([0; 32]))
|
||||
.await
|
||||
.unwrap();
|
||||
let config: storage::Config = manager.config.read().await.clone().into();
|
||||
assert_eq!(config, storage::Config::default());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn can_read_config() {
|
||||
let config_path = PathBuf::from_iter(["test-data", "config.toml"]);
|
||||
let manager = Manager::new(&config_path, auth::Secret([0; 32]))
|
||||
.await
|
||||
.unwrap();
|
||||
let config: storage::Config = manager.config.read().await.clone().into();
|
||||
|
||||
assert_eq!(
|
||||
config.album_art_pattern,
|
||||
Some(r#"^Folder\.(png|jpg|jpeg)$"#.to_owned())
|
||||
);
|
||||
assert_eq!(
|
||||
config.mount_dirs,
|
||||
vec![storage::MountDir {
|
||||
source: PathBuf::from("test-data/small-collection"),
|
||||
name: "root".to_owned(),
|
||||
}]
|
||||
);
|
||||
assert_eq!(config.users[0].name, "test_user");
|
||||
assert_eq!(config.users[0].admin, Some(true));
|
||||
assert_eq!(
|
||||
config.users[0].initial_password,
|
||||
Some("very_secret_password".to_owned())
|
||||
);
|
||||
assert!(config.users[0].hashed_password.is_some());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn can_write_config() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
ctx.config_manager
|
||||
.create_user("Walter", "example_password", false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let manager = Manager::new(&ctx.config_manager.config_file_path, auth::Secret([0; 32]))
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(manager.get_user("Walter").await.is_ok());
|
||||
}
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error("Unspecified")]
|
||||
Unspecified,
|
||||
}
|
||||
|
||||
impl From<anyhow::Error> for Error {
|
||||
fn from(_: anyhow::Error) -> Self {
|
||||
Error::Unspecified
|
||||
}
|
||||
}
|
|
@ -1,83 +0,0 @@
|
|||
use super::*;
|
||||
use crate::app::{ddns, settings, user, vfs};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Manager {
|
||||
settings_manager: settings::Manager,
|
||||
user_manager: user::Manager,
|
||||
vfs_manager: vfs::Manager,
|
||||
ddns_manager: ddns::Manager,
|
||||
}
|
||||
|
||||
impl Manager {
|
||||
pub fn new(
|
||||
settings_manager: settings::Manager,
|
||||
user_manager: user::Manager,
|
||||
vfs_manager: vfs::Manager,
|
||||
ddns_manager: ddns::Manager,
|
||||
) -> Self {
|
||||
Self {
|
||||
settings_manager,
|
||||
user_manager,
|
||||
vfs_manager,
|
||||
ddns_manager,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply(&self, config: &Config) -> Result<(), Error> {
|
||||
if let Some(new_settings) = &config.settings {
|
||||
self.settings_manager
|
||||
.amend(new_settings)
|
||||
.map_err(|_| Error::Unspecified)?;
|
||||
}
|
||||
|
||||
if let Some(mount_dirs) = &config.mount_dirs {
|
||||
self.vfs_manager
|
||||
.set_mount_dirs(&mount_dirs)
|
||||
.map_err(|_| Error::Unspecified)?;
|
||||
}
|
||||
|
||||
if let Some(ddns_config) = &config.ydns {
|
||||
self.ddns_manager
|
||||
.set_config(&ddns_config)
|
||||
.map_err(|_| Error::Unspecified)?;
|
||||
}
|
||||
|
||||
if let Some(ref users) = config.users {
|
||||
let old_users: Vec<user::User> =
|
||||
self.user_manager.list().map_err(|_| Error::Unspecified)?;
|
||||
|
||||
// Delete users that are not in new list
|
||||
for old_user in old_users
|
||||
.iter()
|
||||
.filter(|old_user| !users.iter().any(|u| u.name == old_user.name))
|
||||
{
|
||||
self.user_manager
|
||||
.delete(&old_user.name)
|
||||
.map_err(|_| Error::Unspecified)?;
|
||||
}
|
||||
|
||||
// Insert new users
|
||||
for new_user in users
|
||||
.iter()
|
||||
.filter(|u| !old_users.iter().any(|old_user| old_user.name == u.name))
|
||||
{
|
||||
self.user_manager
|
||||
.create(new_user)
|
||||
.map_err(|_| Error::Unspecified)?;
|
||||
}
|
||||
|
||||
// Update users
|
||||
for user in users {
|
||||
self.user_manager
|
||||
.set_password(&user.name, &user.password)
|
||||
.map_err(|_| Error::Unspecified)?;
|
||||
self.user_manager
|
||||
.set_is_admin(&user.name, user.admin)
|
||||
.map_err(|_| Error::Unspecified)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
use serde::Deserialize;
|
||||
use std::io::Read;
|
||||
use std::path;
|
||||
|
||||
use crate::app::{ddns, settings, user, vfs};
|
||||
|
||||
mod error;
|
||||
mod manager;
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
pub use error::*;
|
||||
pub use manager::*;
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
pub struct Config {
|
||||
pub settings: Option<settings::NewSettings>,
|
||||
pub mount_dirs: Option<Vec<vfs::MountDir>>,
|
||||
pub ydns: Option<ddns::Config>,
|
||||
pub users: Option<Vec<user::NewUser>>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn from_path(path: &path::Path) -> anyhow::Result<Config> {
|
||||
let mut config_file = std::fs::File::open(path)?;
|
||||
let mut config_file_content = String::new();
|
||||
config_file.read_to_string(&mut config_file_content)?;
|
||||
let config = toml::de::from_str::<Self>(&config_file_content)?;
|
||||
Ok(config)
|
||||
}
|
||||
}
|
149
src/app/config/mounts.rs
Normal file
149
src/app/config/mounts.rs
Normal file
|
@ -0,0 +1,149 @@
|
|||
use std::{
|
||||
ops::Deref,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use regex::Regex;
|
||||
|
||||
use crate::app::Error;
|
||||
|
||||
use super::storage;
|
||||
use super::Config;
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct MountDir {
|
||||
pub source: PathBuf,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl TryFrom<storage::MountDir> for MountDir {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(mount_dir: storage::MountDir) -> Result<Self, Self::Error> {
|
||||
// TODO validation
|
||||
Ok(Self {
|
||||
source: sanitize_path(&mount_dir.source),
|
||||
name: mount_dir.name,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<MountDir> for storage::MountDir {
|
||||
fn from(m: MountDir) -> Self {
|
||||
Self {
|
||||
source: m.source,
|
||||
name: m.name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn set_mounts(&mut self, mount_dirs: Vec<storage::MountDir>) -> Result<(), Error> {
|
||||
let mut new_mount_dirs = Vec::new();
|
||||
for mount_dir in mount_dirs {
|
||||
let mount_dir = <storage::MountDir as TryInto<MountDir>>::try_into(mount_dir)?;
|
||||
new_mount_dirs.push(mount_dir);
|
||||
}
|
||||
new_mount_dirs.dedup_by(|a, b| a.name == b.name);
|
||||
self.mount_dirs = new_mount_dirs;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn resolve_virtual_path<P: AsRef<Path>>(&self, virtual_path: P) -> Result<PathBuf, Error> {
|
||||
for mount in &self.mount_dirs {
|
||||
if let Ok(p) = virtual_path.as_ref().strip_prefix(&mount.name) {
|
||||
return if p.components().count() == 0 {
|
||||
Ok(mount.source.clone())
|
||||
} else {
|
||||
Ok(mount.source.join(p))
|
||||
};
|
||||
}
|
||||
}
|
||||
Err(Error::CouldNotMapToRealPath(virtual_path.as_ref().into()))
|
||||
}
|
||||
}
|
||||
|
||||
fn sanitize_path(source: &PathBuf) -> PathBuf {
|
||||
let path_string = source.to_string_lossy();
|
||||
let separator_regex = Regex::new(r"\\|/").unwrap();
|
||||
let mut correct_separator = String::new();
|
||||
correct_separator.push(std::path::MAIN_SEPARATOR);
|
||||
let path_string = separator_regex.replace_all(&path_string, correct_separator.as_str());
|
||||
PathBuf::from(path_string.deref())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn can_resolve_virtual_paths() {
|
||||
let raw_config = storage::Config {
|
||||
mount_dirs: vec![storage::MountDir {
|
||||
name: "root".to_owned(),
|
||||
source: PathBuf::from("test_dir"),
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let config: Config = raw_config.try_into().unwrap();
|
||||
|
||||
let test_cases = vec![
|
||||
(vec!["root"], vec!["test_dir"]),
|
||||
(
|
||||
vec!["root", "somewhere", "something.png"],
|
||||
vec!["test_dir", "somewhere", "something.png"],
|
||||
),
|
||||
];
|
||||
|
||||
for (r#virtual, real) in test_cases {
|
||||
let real_path: PathBuf = real.iter().collect();
|
||||
let virtual_path: PathBuf = r#virtual.iter().collect();
|
||||
let converted_path = config.resolve_virtual_path(&virtual_path).unwrap();
|
||||
assert_eq!(converted_path, real_path);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sanitizes_paths() {
|
||||
let mut correct_path = PathBuf::new();
|
||||
if cfg!(target_os = "windows") {
|
||||
correct_path.push("C:\\");
|
||||
} else {
|
||||
correct_path.push("/usr");
|
||||
}
|
||||
correct_path.push("some");
|
||||
correct_path.push("path");
|
||||
|
||||
let tests = if cfg!(target_os = "windows") {
|
||||
vec![
|
||||
r#"C:/some/path"#,
|
||||
r#"C:\some\path"#,
|
||||
r#"C:\some\path\"#,
|
||||
r#"C:\some\path\\\\"#,
|
||||
r#"C:\some/path//"#,
|
||||
]
|
||||
} else {
|
||||
vec![
|
||||
r#"/usr/some/path"#,
|
||||
r#"/usr\some\path"#,
|
||||
r#"/usr\some\path\"#,
|
||||
r#"/usr\some\path\\\\"#,
|
||||
r#"/usr\some/path//"#,
|
||||
]
|
||||
};
|
||||
|
||||
for test in tests {
|
||||
let raw_config = storage::Config {
|
||||
mount_dirs: vec![storage::MountDir {
|
||||
name: "root".to_owned(),
|
||||
source: PathBuf::from(test),
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
let config: Config = raw_config.try_into().unwrap();
|
||||
let converted_path = config.resolve_virtual_path(&PathBuf::from("root")).unwrap();
|
||||
assert_eq!(converted_path, correct_path);
|
||||
}
|
||||
}
|
||||
}
|
32
src/app/config/storage.rs
Normal file
32
src/app/config/storage.rs
Normal file
|
@ -0,0 +1,32 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct User {
|
||||
pub name: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub admin: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub initial_password: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub hashed_password: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct MountDir {
|
||||
pub source: PathBuf,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct Config {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub album_art_pattern: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub mount_dirs: Vec<MountDir>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub ddns_update_url: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub users: Vec<User>,
|
||||
}
|
|
@ -1,83 +0,0 @@
|
|||
use super::*;
|
||||
use crate::app::{ddns, settings, test, user, vfs};
|
||||
use crate::test_name;
|
||||
|
||||
#[test]
|
||||
fn apply_saves_misc_settings() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build();
|
||||
let new_config = Config {
|
||||
settings: Some(settings::NewSettings {
|
||||
album_art_pattern: Some("🖼️\\.jpg".into()),
|
||||
reindex_every_n_seconds: Some(100),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
ctx.config_manager.apply(&new_config).unwrap();
|
||||
let settings = ctx.settings_manager.read().unwrap();
|
||||
let new_settings = new_config.settings.unwrap();
|
||||
assert_eq!(
|
||||
settings.album_art_pattern,
|
||||
new_settings.album_art_pattern.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
settings.reindex_every_n_seconds,
|
||||
new_settings.reindex_every_n_seconds.unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn apply_saves_mount_points() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build();
|
||||
|
||||
let new_config = Config {
|
||||
mount_dirs: Some(vec![vfs::MountDir {
|
||||
source: "/home/music".into(),
|
||||
name: "🎵📁".into(),
|
||||
}]),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
ctx.config_manager.apply(&new_config).unwrap();
|
||||
let actual_mount_dirs: Vec<vfs::MountDir> = ctx.vfs_manager.mount_dirs().unwrap();
|
||||
assert_eq!(actual_mount_dirs, new_config.mount_dirs.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn apply_saves_ddns_settings() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build();
|
||||
|
||||
let new_config = Config {
|
||||
ydns: Some(ddns::Config {
|
||||
host: "🐸🐸🐸.ydns.eu".into(),
|
||||
username: "kfr🐸g".into(),
|
||||
password: "tasty🐞".into(),
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
ctx.config_manager.apply(&new_config).unwrap();
|
||||
let actual_ddns = ctx.ddns_manager.config().unwrap();
|
||||
assert_eq!(actual_ddns, new_config.ydns.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn apply_can_toggle_admin() {
|
||||
let ctx = test::ContextBuilder::new(test_name!())
|
||||
.user("Walter", "Tasty🍖", true)
|
||||
.build();
|
||||
|
||||
assert!(ctx.user_manager.list().unwrap()[0].is_admin());
|
||||
|
||||
let new_config = Config {
|
||||
users: Some(vec![user::NewUser {
|
||||
name: "Walter".into(),
|
||||
password: "Tasty🍖".into(),
|
||||
admin: false,
|
||||
}]),
|
||||
..Default::default()
|
||||
};
|
||||
ctx.config_manager.apply(&new_config).unwrap();
|
||||
assert!(!ctx.user_manager.list().unwrap()[0].is_admin());
|
||||
}
|
308
src/app/config/user.rs
Normal file
308
src/app/config/user.rs
Normal file
|
@ -0,0 +1,308 @@
|
|||
use crate::app::{auth, Error};
|
||||
|
||||
use super::storage;
|
||||
use super::Config;
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct User {
|
||||
pub name: String,
|
||||
pub admin: Option<bool>,
|
||||
pub initial_password: Option<String>,
|
||||
pub hashed_password: String,
|
||||
}
|
||||
|
||||
impl User {
|
||||
pub fn is_admin(&self) -> bool {
|
||||
self.admin == Some(true)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<storage::User> for User {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(user: storage::User) -> Result<Self, Self::Error> {
|
||||
let hashed_password = match (&user.initial_password, &user.hashed_password) {
|
||||
(_, Some(p)) => p.clone(),
|
||||
(Some(p), None) => auth::hash_password(p)?,
|
||||
(None, None) => return Err(Error::EmptyPassword),
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
name: user.name,
|
||||
admin: user.admin,
|
||||
initial_password: user.initial_password,
|
||||
hashed_password,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<User> for storage::User {
|
||||
fn from(user: User) -> Self {
|
||||
Self {
|
||||
name: user.name,
|
||||
admin: user.admin,
|
||||
initial_password: user.initial_password,
|
||||
hashed_password: Some(user.hashed_password),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn set_users(&mut self, users: Vec<storage::User>) -> Result<(), Error> {
|
||||
let mut new_users = Vec::new();
|
||||
for user in users {
|
||||
let user = <storage::User as TryInto<User>>::try_into(user)?;
|
||||
new_users.push(user);
|
||||
}
|
||||
new_users.dedup_by(|a, b| a.name == b.name);
|
||||
self.users = new_users;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn create_user(
|
||||
&mut self,
|
||||
username: &str,
|
||||
password: &str,
|
||||
admin: bool,
|
||||
) -> Result<(), Error> {
|
||||
if username.is_empty() {
|
||||
return Err(Error::EmptyUsername);
|
||||
}
|
||||
|
||||
if self.exists(username) {
|
||||
return Err(Error::DuplicateUsername);
|
||||
}
|
||||
|
||||
let password_hash = auth::hash_password(&password)?;
|
||||
|
||||
self.users.push(User {
|
||||
name: username.to_owned(),
|
||||
admin: Some(admin),
|
||||
initial_password: None,
|
||||
hashed_password: password_hash,
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn exists(&self, username: &str) -> bool {
|
||||
self.users.iter().any(|u| u.name == username)
|
||||
}
|
||||
|
||||
pub fn get_user(&self, username: &str) -> Option<&User> {
|
||||
self.users.iter().find(|u| u.name == username)
|
||||
}
|
||||
|
||||
pub fn get_user_mut(&mut self, username: &str) -> Option<&mut User> {
|
||||
self.users.iter_mut().find(|u| u.name == username)
|
||||
}
|
||||
|
||||
pub fn authenticate(
|
||||
&self,
|
||||
auth_token: &auth::Token,
|
||||
scope: auth::Scope,
|
||||
auth_secret: &auth::Secret,
|
||||
) -> Result<auth::Authorization, Error> {
|
||||
let authorization = auth::decode_auth_token(auth_token, scope, auth_secret)?;
|
||||
if self.exists(&authorization.username) {
|
||||
Ok(authorization)
|
||||
} else {
|
||||
Err(Error::IncorrectUsername)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn login(
|
||||
&self,
|
||||
username: &str,
|
||||
password: &str,
|
||||
auth_secret: &auth::Secret,
|
||||
) -> Result<auth::Token, Error> {
|
||||
let user = self.get_user(username).ok_or(Error::IncorrectUsername)?;
|
||||
if auth::verify_password(&user.hashed_password, password) {
|
||||
let authorization = auth::Authorization {
|
||||
username: username.to_owned(),
|
||||
scope: auth::Scope::PolarisAuth,
|
||||
};
|
||||
auth::generate_auth_token(&authorization, auth_secret)
|
||||
} else {
|
||||
Err(Error::IncorrectPassword)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_is_admin(&mut self, username: &str, is_admin: bool) -> Result<(), Error> {
|
||||
let user = self.get_user_mut(username).ok_or(Error::UserNotFound)?;
|
||||
user.admin = Some(is_admin);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_password(&mut self, username: &str, password: &str) -> Result<(), Error> {
|
||||
let user = self.get_user_mut(username).ok_or(Error::UserNotFound)?;
|
||||
user.hashed_password = auth::hash_password(password)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn delete_user(&mut self, username: &str) {
|
||||
self.users.retain(|u| u.name != username);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::app::test;
|
||||
use crate::test_name;
|
||||
|
||||
use super::*;
|
||||
|
||||
const TEST_USERNAME: &str = "Walter";
|
||||
const TEST_PASSWORD: &str = "super_secret!";
|
||||
|
||||
#[test]
|
||||
fn adds_password_hashes() {
|
||||
let user_in = storage::User {
|
||||
name: TEST_USERNAME.to_owned(),
|
||||
initial_password: Some(TEST_PASSWORD.to_owned()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let user: User = user_in.try_into().unwrap();
|
||||
|
||||
let user_out: storage::User = user.into();
|
||||
|
||||
assert_eq!(user_out.name, TEST_USERNAME);
|
||||
assert_eq!(user_out.initial_password, Some(TEST_PASSWORD.to_owned()));
|
||||
assert!(user_out.hashed_password.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn preserves_password_hashes() {
|
||||
let user_in = storage::User {
|
||||
name: TEST_USERNAME.to_owned(),
|
||||
hashed_password: Some("hash".to_owned()),
|
||||
..Default::default()
|
||||
};
|
||||
let user: User = user_in.clone().try_into().unwrap();
|
||||
let user_out: storage::User = user.into();
|
||||
assert_eq!(user_out, user_in);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn create_delete_user_golden_path() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
|
||||
ctx.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(ctx.config_manager.get_user(TEST_USERNAME).await.is_ok());
|
||||
|
||||
ctx.config_manager.delete_user(TEST_USERNAME).await.unwrap();
|
||||
assert!(ctx.config_manager.get_user(TEST_USERNAME).await.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn cannot_create_user_with_blank_username() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
let result = ctx.config_manager.create_user("", TEST_PASSWORD, false);
|
||||
assert!(matches!(result.await.unwrap_err(), Error::EmptyUsername));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn cannot_create_user_with_blank_password() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
let result = ctx.config_manager.create_user(TEST_USERNAME, "", false);
|
||||
assert!(matches!(result.await.unwrap_err(), Error::EmptyPassword));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn cannot_create_duplicate_user() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
let result = ctx
|
||||
.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false);
|
||||
assert!(result.await.is_ok());
|
||||
|
||||
let result = ctx
|
||||
.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false);
|
||||
assert!(matches!(
|
||||
result.await.unwrap_err(),
|
||||
Error::DuplicateUsername
|
||||
));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_rejects_bad_password() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
|
||||
ctx.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let result = ctx.config_manager.login(TEST_USERNAME, "not the password");
|
||||
assert!(matches!(
|
||||
result.await.unwrap_err(),
|
||||
Error::IncorrectPassword
|
||||
));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_golden_path() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
|
||||
ctx.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let result = ctx.config_manager.login(TEST_USERNAME, TEST_PASSWORD);
|
||||
assert!(result.await.is_ok());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn authenticate_rejects_bad_token() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
|
||||
ctx.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let fake_token = auth::Token("fake token".to_owned());
|
||||
assert!(ctx
|
||||
.config_manager
|
||||
.authenticate(&fake_token, auth::Scope::PolarisAuth)
|
||||
.await
|
||||
.is_err())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn authenticate_golden_path() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
|
||||
ctx.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let token = ctx
|
||||
.config_manager
|
||||
.login(TEST_USERNAME, TEST_PASSWORD)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let authorization = ctx
|
||||
.config_manager
|
||||
.authenticate(&token, auth::Scope::PolarisAuth)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
authorization,
|
||||
auth::Authorization {
|
||||
username: TEST_USERNAME.to_owned(),
|
||||
scope: auth::Scope::PolarisAuth,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
45
src/app/ddns.rs
Normal file
45
src/app/ddns.rs
Normal file
|
@ -0,0 +1,45 @@
|
|||
use log::{debug, error};
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::app::{config, Error};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Manager {
|
||||
config_manager: config::Manager,
|
||||
}
|
||||
|
||||
impl Manager {
|
||||
pub fn new(config_manager: config::Manager) -> Self {
|
||||
Self { config_manager }
|
||||
}
|
||||
|
||||
pub async fn update_ddns(&self) -> Result<(), Error> {
|
||||
let url = self.config_manager.get_ddns_update_url().await;
|
||||
let Some(url) = url else {
|
||||
debug!("Skipping DDNS update because credentials are missing");
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let response = ureq::get(&url.to_string()).call();
|
||||
|
||||
match response {
|
||||
Ok(_) => Ok(()),
|
||||
Err(ureq::Error::Status(code, _)) => Err(Error::UpdateQueryFailed(code)),
|
||||
Err(ureq::Error::Transport(_)) => Err(Error::UpdateQueryTransport),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn begin_periodic_updates(&self) {
|
||||
tokio::spawn({
|
||||
let ddns = self.clone();
|
||||
async move {
|
||||
loop {
|
||||
if let Err(e) = ddns.update_ddns().await {
|
||||
error!("Dynamic DNS update error: {:?}", e);
|
||||
}
|
||||
tokio::time::sleep(Duration::from_secs(60 * 30)).await;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::db::ddns_config;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Insertable, PartialEq, Queryable, Serialize)]
|
||||
#[table_name = "ddns_config"]
|
||||
pub struct Config {
|
||||
pub host: String,
|
||||
pub username: String,
|
||||
pub password: String,
|
||||
}
|
|
@ -1,81 +0,0 @@
|
|||
use anyhow::*;
|
||||
use diesel::prelude::*;
|
||||
use log::{error, info};
|
||||
use std::thread;
|
||||
use std::time;
|
||||
use ureq;
|
||||
|
||||
use super::*;
|
||||
use crate::db::DB;
|
||||
|
||||
const DDNS_UPDATE_URL: &str = "https://ydns.io/api/v1/update/";
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Manager {
|
||||
db: DB,
|
||||
}
|
||||
|
||||
impl Manager {
|
||||
pub fn new(db: DB) -> Self {
|
||||
Self { db }
|
||||
}
|
||||
|
||||
fn update_my_ip(&self) -> Result<()> {
|
||||
let config = self.config()?;
|
||||
if config.host.is_empty() || config.username.is_empty() {
|
||||
info!("Skipping DDNS update because credentials are missing");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let full_url = format!("{}?host={}", DDNS_UPDATE_URL, &config.host);
|
||||
let response = ureq::get(full_url.as_str())
|
||||
.auth(&config.username, &config.password)
|
||||
.call();
|
||||
|
||||
if !response.ok() {
|
||||
bail!(
|
||||
"DDNS update query failed with status code: {}",
|
||||
response.status()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn config(&self) -> Result<Config> {
|
||||
use crate::db::ddns_config::dsl::*;
|
||||
let connection = self.db.connect()?;
|
||||
Ok(ddns_config
|
||||
.select((host, username, password))
|
||||
.get_result(&connection)?)
|
||||
}
|
||||
|
||||
pub fn set_config(&self, new_config: &Config) -> Result<()> {
|
||||
use crate::db::ddns_config::dsl::*;
|
||||
let connection = self.db.connect()?;
|
||||
diesel::update(ddns_config)
|
||||
.set((
|
||||
host.eq(&new_config.host),
|
||||
username.eq(&new_config.username),
|
||||
password.eq(&new_config.password),
|
||||
))
|
||||
.execute(&connection)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn begin_periodic_updates(&self) {
|
||||
let cloned = self.clone();
|
||||
std::thread::spawn(move || {
|
||||
cloned.run();
|
||||
});
|
||||
}
|
||||
|
||||
fn run(&self) {
|
||||
loop {
|
||||
if let Err(e) = self.update_my_ip() {
|
||||
error!("Dynamic DNS update error: {:?}", e);
|
||||
}
|
||||
thread::sleep(time::Duration::from_secs(60 * 30));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
mod config;
|
||||
mod manager;
|
||||
|
||||
pub use config::Config;
|
||||
pub use manager::Manager;
|
444
src/app/formats.rs
Normal file
444
src/app/formats.rs
Normal file
|
@ -0,0 +1,444 @@
|
|||
use id3::TagLike;
|
||||
use lewton::inside_ogg::OggStreamReader;
|
||||
use log::error;
|
||||
use std::fs;
|
||||
use std::io::{Seek, SeekFrom};
|
||||
use std::path::Path;
|
||||
|
||||
use crate::app::Error;
|
||||
use crate::utils;
|
||||
use crate::utils::AudioFormat;
|
||||
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
pub struct SongMetadata {
|
||||
pub disc_number: Option<u32>,
|
||||
pub track_number: Option<u32>,
|
||||
pub title: Option<String>,
|
||||
pub duration: Option<u32>,
|
||||
pub artists: Vec<String>,
|
||||
pub album_artists: Vec<String>,
|
||||
pub album: Option<String>,
|
||||
pub year: Option<i32>,
|
||||
pub has_artwork: bool,
|
||||
pub lyricists: Vec<String>,
|
||||
pub composers: Vec<String>,
|
||||
pub genres: Vec<String>,
|
||||
pub labels: Vec<String>,
|
||||
}
|
||||
|
||||
pub fn read_metadata<P: AsRef<Path>>(path: P) -> Option<SongMetadata> {
|
||||
let data = match utils::get_audio_format(&path) {
|
||||
Some(AudioFormat::AIFF) => read_id3(&path),
|
||||
Some(AudioFormat::FLAC) => read_flac(&path),
|
||||
Some(AudioFormat::MP3) => read_mp3(&path),
|
||||
Some(AudioFormat::OGG) => read_vorbis(&path),
|
||||
Some(AudioFormat::OPUS) => read_opus(&path),
|
||||
Some(AudioFormat::WAVE) => read_id3(&path),
|
||||
Some(AudioFormat::APE) | Some(AudioFormat::MPC) => read_ape(&path),
|
||||
Some(AudioFormat::MP4) | Some(AudioFormat::M4B) => read_mp4(&path),
|
||||
None => return None,
|
||||
};
|
||||
match data {
|
||||
Ok(d) => Some(d),
|
||||
Err(e) => {
|
||||
error!(
|
||||
"Error while reading file metadata for '{:?}': {}",
|
||||
path.as_ref(),
|
||||
e
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait ID3Ext {
|
||||
fn get_text_values(&self, frame_name: &str) -> Vec<String>;
|
||||
}
|
||||
|
||||
impl ID3Ext for id3::Tag {
|
||||
fn get_text_values(&self, frame_name: &str) -> Vec<String> {
|
||||
self.get(frame_name)
|
||||
.and_then(|f| f.content().text_values())
|
||||
.map(|i| i.map(str::to_string).collect())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
fn read_id3<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let file = fs::File::open(path.as_ref()).map_err(|e| Error::Io(path.as_ref().to_owned(), e))?;
|
||||
read_id3_from_file(&file, path)
|
||||
}
|
||||
|
||||
fn read_id3_from_file<P: AsRef<Path>>(file: &fs::File, path: P) -> Result<SongMetadata, Error> {
|
||||
let tag = id3::Tag::read_from2(file)
|
||||
.or_else(|error| {
|
||||
if let Some(tag) = error.partial_tag {
|
||||
Ok(tag)
|
||||
} else {
|
||||
Err(error)
|
||||
}
|
||||
})
|
||||
.map_err(|e| Error::Id3(path.as_ref().to_owned(), e))?;
|
||||
|
||||
let artists = tag.get_text_values("TPE1");
|
||||
let album_artists = tag.get_text_values("TPE2");
|
||||
let album = tag.album().map(|s| s.to_string());
|
||||
let title = tag.title().map(|s| s.to_string());
|
||||
let duration = tag.duration();
|
||||
let disc_number = tag.disc();
|
||||
let track_number = tag.track();
|
||||
let year = tag
|
||||
.year()
|
||||
.or_else(|| tag.date_released().map(|d| d.year))
|
||||
.or_else(|| tag.original_date_released().map(|d| d.year))
|
||||
.or_else(|| tag.date_recorded().map(|d| d.year));
|
||||
let has_artwork = tag.pictures().count() > 0;
|
||||
let lyricists = tag.get_text_values("TEXT");
|
||||
let composers = tag.get_text_values("TCOM");
|
||||
let genres = tag.get_text_values("TCON");
|
||||
let labels = tag.get_text_values("TPUB");
|
||||
|
||||
Ok(SongMetadata {
|
||||
disc_number,
|
||||
track_number,
|
||||
title,
|
||||
duration,
|
||||
artists,
|
||||
album_artists,
|
||||
album,
|
||||
year,
|
||||
has_artwork,
|
||||
lyricists,
|
||||
composers,
|
||||
genres,
|
||||
labels,
|
||||
})
|
||||
}
|
||||
|
||||
fn read_mp3<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let mut file = fs::File::open(&path).unwrap();
|
||||
let mut metadata = read_id3_from_file(&file, &path)?;
|
||||
metadata.duration = metadata.duration.or_else(|| {
|
||||
file.seek(SeekFrom::Start(0)).unwrap();
|
||||
mp3_duration::from_file(&file)
|
||||
.map(|d| d.as_secs() as u32)
|
||||
.ok()
|
||||
});
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
mod ape_ext {
|
||||
use regex::Regex;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
pub fn read_string(item: &ape::Item) -> Option<String> {
|
||||
item.try_into().ok().map(str::to_string)
|
||||
}
|
||||
|
||||
pub fn read_strings(item: Option<&ape::Item>) -> Vec<String> {
|
||||
let Some(item) = item else {
|
||||
return vec![];
|
||||
};
|
||||
let strings: Vec<&str> = item.try_into().unwrap_or_default();
|
||||
strings.into_iter().map(str::to_string).collect()
|
||||
}
|
||||
|
||||
pub fn read_i32(item: &ape::Item) -> Option<i32> {
|
||||
item.try_into()
|
||||
.ok()
|
||||
.map(|s: &str| s.parse::<i32>().ok())
|
||||
.flatten()
|
||||
}
|
||||
|
||||
static X_OF_Y_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r#"^\d+"#).unwrap());
|
||||
|
||||
pub fn read_x_of_y(item: &ape::Item) -> Option<u32> {
|
||||
item.try_into()
|
||||
.ok()
|
||||
.map(|s: &str| {
|
||||
if let Some(m) = X_OF_Y_REGEX.find(s) {
|
||||
s[m.start()..m.end()].parse().ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
}
|
||||
}
|
||||
|
||||
fn read_ape<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let tag = ape::read_from_path(path)?;
|
||||
let artists = ape_ext::read_strings(tag.item("Artist"));
|
||||
let album = tag.item("Album").and_then(ape_ext::read_string);
|
||||
let album_artists = ape_ext::read_strings(tag.item("Album artist"));
|
||||
let title = tag.item("Title").and_then(ape_ext::read_string);
|
||||
let year = tag.item("Year").and_then(ape_ext::read_i32);
|
||||
let disc_number = tag.item("Disc").and_then(ape_ext::read_x_of_y);
|
||||
let track_number = tag.item("Track").and_then(ape_ext::read_x_of_y);
|
||||
let lyricists = ape_ext::read_strings(tag.item("LYRICIST"));
|
||||
let composers = ape_ext::read_strings(tag.item("COMPOSER"));
|
||||
let genres = ape_ext::read_strings(tag.item("GENRE"));
|
||||
let labels = ape_ext::read_strings(tag.item("PUBLISHER"));
|
||||
Ok(SongMetadata {
|
||||
artists,
|
||||
album_artists,
|
||||
album,
|
||||
title,
|
||||
duration: None,
|
||||
disc_number,
|
||||
track_number,
|
||||
year,
|
||||
has_artwork: false,
|
||||
lyricists,
|
||||
composers,
|
||||
genres,
|
||||
labels,
|
||||
})
|
||||
}
|
||||
|
||||
fn read_vorbis<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let file = fs::File::open(&path).map_err(|e| Error::Io(path.as_ref().to_owned(), e))?;
|
||||
let source = OggStreamReader::new(file)?;
|
||||
|
||||
let mut metadata = SongMetadata::default();
|
||||
for (key, value) in source.comment_hdr.comment_list {
|
||||
utils::match_ignore_case! {
|
||||
match key {
|
||||
"TITLE" => metadata.title = Some(value),
|
||||
"ALBUM" => metadata.album = Some(value),
|
||||
"ARTIST" => metadata.artists.push(value),
|
||||
"ALBUMARTIST" => metadata.album_artists.push(value),
|
||||
"TRACKNUMBER" => metadata.track_number = value.parse::<u32>().ok(),
|
||||
"DISCNUMBER" => metadata.disc_number = value.parse::<u32>().ok(),
|
||||
"DATE" => metadata.year = value.parse::<i32>().ok(),
|
||||
"LYRICIST" => metadata.lyricists.push(value),
|
||||
"COMPOSER" => metadata.composers.push(value),
|
||||
"GENRE" => metadata.genres.push(value),
|
||||
"PUBLISHER" => metadata.labels.push(value),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
fn read_opus<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let headers = opus_headers::parse_from_path(path)?;
|
||||
|
||||
let mut metadata = SongMetadata::default();
|
||||
for (key, value) in headers.comments.user_comments {
|
||||
utils::match_ignore_case! {
|
||||
match key {
|
||||
"TITLE" => metadata.title = Some(value),
|
||||
"ALBUM" => metadata.album = Some(value),
|
||||
"ARTIST" => metadata.artists.push(value),
|
||||
"ALBUMARTIST" => metadata.album_artists.push(value),
|
||||
"TRACKNUMBER" => metadata.track_number = value.parse::<u32>().ok(),
|
||||
"DISCNUMBER" => metadata.disc_number = value.parse::<u32>().ok(),
|
||||
"DATE" => metadata.year = value.parse::<i32>().ok(),
|
||||
"LYRICIST" => metadata.lyricists.push(value),
|
||||
"COMPOSER" => metadata.composers.push(value),
|
||||
"GENRE" => metadata.genres.push(value),
|
||||
"PUBLISHER" => metadata.labels.push(value),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
fn read_flac<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let tag = metaflac::Tag::read_from_path(&path)
|
||||
.map_err(|e| Error::Metaflac(path.as_ref().to_owned(), e))?;
|
||||
let vorbis = tag
|
||||
.vorbis_comments()
|
||||
.ok_or(Error::VorbisCommentNotFoundInFlacFile)?;
|
||||
let disc_number = vorbis
|
||||
.get("DISCNUMBER")
|
||||
.and_then(|d| d[0].parse::<u32>().ok());
|
||||
let year = vorbis.get("DATE").and_then(|d| d[0].parse::<i32>().ok());
|
||||
let mut streaminfo = tag.get_blocks(metaflac::BlockType::StreamInfo);
|
||||
let duration = match streaminfo.next() {
|
||||
Some(metaflac::Block::StreamInfo(s)) => Some(s.total_samples as u32 / s.sample_rate),
|
||||
_ => None,
|
||||
};
|
||||
let has_artwork = tag.pictures().count() > 0;
|
||||
|
||||
let multivalue = |o: Option<&Vec<String>>| o.cloned().unwrap_or_default();
|
||||
|
||||
Ok(SongMetadata {
|
||||
artists: multivalue(vorbis.artist()),
|
||||
album_artists: multivalue(vorbis.album_artist()),
|
||||
album: vorbis.album().map(|v| v[0].clone()),
|
||||
title: vorbis.title().map(|v| v[0].clone()),
|
||||
duration,
|
||||
disc_number,
|
||||
track_number: vorbis.track(),
|
||||
year,
|
||||
has_artwork,
|
||||
lyricists: multivalue(vorbis.get("LYRICIST")),
|
||||
composers: multivalue(vorbis.get("COMPOSER")),
|
||||
genres: multivalue(vorbis.get("GENRE")),
|
||||
labels: multivalue(vorbis.get("PUBLISHER")),
|
||||
})
|
||||
}
|
||||
|
||||
fn read_mp4<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let mut tag = mp4ameta::Tag::read_from_path(&path)
|
||||
.map_err(|e| Error::Mp4aMeta(path.as_ref().to_owned(), e))?;
|
||||
let label_ident = mp4ameta::FreeformIdent::new("com.apple.iTunes", "Label");
|
||||
|
||||
Ok(SongMetadata {
|
||||
artists: tag.take_artists().collect(),
|
||||
album_artists: tag.take_album_artists().collect(),
|
||||
album: tag.take_album(),
|
||||
title: tag.take_title(),
|
||||
duration: tag.duration().map(|v| v.as_secs() as u32),
|
||||
disc_number: tag.disc_number().map(|d| d as u32),
|
||||
track_number: tag.track_number().map(|d| d as u32),
|
||||
year: tag.year().and_then(|v| v.parse::<i32>().ok()),
|
||||
has_artwork: tag.artwork().is_some(),
|
||||
lyricists: tag.take_lyricists().collect(),
|
||||
composers: tag.take_composers().collect(),
|
||||
genres: tag.take_genres().collect(),
|
||||
labels: tag.take_strings_of(&label_ident).collect(),
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reads_file_metadata() {
|
||||
let expected_without_duration = SongMetadata {
|
||||
disc_number: Some(3),
|
||||
track_number: Some(1),
|
||||
title: Some("TEST TITLE".into()),
|
||||
artists: vec!["TEST ARTIST".into()],
|
||||
album_artists: vec!["TEST ALBUM ARTIST".into()],
|
||||
album: Some("TEST ALBUM".into()),
|
||||
duration: None,
|
||||
year: Some(2016),
|
||||
has_artwork: false,
|
||||
lyricists: vec!["TEST LYRICIST".into()],
|
||||
composers: vec!["TEST COMPOSER".into()],
|
||||
genres: vec!["TEST GENRE".into()],
|
||||
labels: vec!["TEST LABEL".into()],
|
||||
};
|
||||
let expected_with_duration = SongMetadata {
|
||||
duration: Some(0),
|
||||
..expected_without_duration.clone()
|
||||
};
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.aif")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.mp3")).unwrap(),
|
||||
expected_with_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.ogg")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.flac")).unwrap(),
|
||||
expected_with_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.m4a")).unwrap(),
|
||||
expected_with_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.opus")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.ape")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.wav")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reads_embedded_artwork() {
|
||||
assert!(
|
||||
read_metadata(Path::new("test-data/artwork/sample.aif"))
|
||||
.unwrap()
|
||||
.has_artwork
|
||||
);
|
||||
assert!(
|
||||
read_metadata(Path::new("test-data/artwork/sample.mp3"))
|
||||
.unwrap()
|
||||
.has_artwork
|
||||
);
|
||||
assert!(
|
||||
read_metadata(Path::new("test-data/artwork/sample.flac"))
|
||||
.unwrap()
|
||||
.has_artwork
|
||||
);
|
||||
assert!(
|
||||
read_metadata(Path::new("test-data/artwork/sample.m4a"))
|
||||
.unwrap()
|
||||
.has_artwork
|
||||
);
|
||||
assert!(
|
||||
read_metadata(Path::new("test-data/artwork/sample.wav"))
|
||||
.unwrap()
|
||||
.has_artwork
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reads_multivalue_fields() {
|
||||
let expected_without_duration = SongMetadata {
|
||||
disc_number: Some(3),
|
||||
track_number: Some(1),
|
||||
title: Some("TEST TITLE".into()),
|
||||
artists: vec!["TEST ARTIST".into(), "OTHER ARTIST".into()],
|
||||
album_artists: vec!["TEST ALBUM ARTIST".into(), "OTHER ALBUM ARTIST".into()],
|
||||
album: Some("TEST ALBUM".into()),
|
||||
duration: None,
|
||||
year: Some(2016),
|
||||
has_artwork: false,
|
||||
lyricists: vec!["TEST LYRICIST".into(), "OTHER LYRICIST".into()],
|
||||
composers: vec!["TEST COMPOSER".into(), "OTHER COMPOSER".into()],
|
||||
genres: vec!["TEST GENRE".into(), "OTHER GENRE".into()],
|
||||
labels: vec!["TEST LABEL".into(), "OTHER LABEL".into()],
|
||||
};
|
||||
let expected_with_duration = SongMetadata {
|
||||
duration: Some(0),
|
||||
..expected_without_duration.clone()
|
||||
};
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.aif")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.mp3")).unwrap(),
|
||||
expected_with_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.ogg")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.flac")).unwrap(),
|
||||
expected_with_duration
|
||||
);
|
||||
// TODO Test m4a support (likely working). Pending https://tickets.metabrainz.org/browse/PICARD-3029
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.opus")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.ape")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.wav")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
}
|
388
src/app/index.rs
Normal file
388
src/app/index.rs
Normal file
|
@ -0,0 +1,388 @@
|
|||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::{Arc, RwLock},
|
||||
};
|
||||
|
||||
use log::{error, info};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::task::spawn_blocking;
|
||||
|
||||
use crate::app::{scanner, Error};
|
||||
|
||||
mod browser;
|
||||
mod collection;
|
||||
mod dictionary;
|
||||
mod query;
|
||||
mod search;
|
||||
mod storage;
|
||||
|
||||
pub use browser::File;
|
||||
pub use collection::{Album, AlbumHeader, Artist, ArtistHeader, Genre, GenreHeader, Song};
|
||||
use storage::{store_song, AlbumKey, ArtistKey, GenreKey, InternPath, SongKey};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Manager {
|
||||
index_file_path: PathBuf,
|
||||
index: Arc<RwLock<Index>>, // Not a tokio RwLock as we want to do CPU-bound work with Index and lock this inside spawn_blocking()
|
||||
}
|
||||
|
||||
impl Manager {
|
||||
pub async fn new(directory: &Path) -> Result<Self, Error> {
|
||||
tokio::fs::create_dir_all(directory)
|
||||
.await
|
||||
.map_err(|e| Error::Io(directory.to_owned(), e))?;
|
||||
|
||||
let index_manager = Self {
|
||||
index_file_path: directory.join("collection.index"),
|
||||
index: Arc::default(),
|
||||
};
|
||||
|
||||
match index_manager.try_restore_index().await {
|
||||
Ok(true) => info!("Restored collection index from disk"),
|
||||
Ok(false) => info!("No existing collection index to restore"),
|
||||
Err(e) => error!("Failed to restore collection index: {}", e),
|
||||
};
|
||||
|
||||
Ok(index_manager)
|
||||
}
|
||||
|
||||
pub async fn is_index_empty(&self) -> bool {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index.collection.num_songs() == 0
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn replace_index(&self, new_index: Index) {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let mut lock = index_manager.index.write().unwrap();
|
||||
*lock = new_index;
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn persist_index(&self, index: &Index) -> Result<(), Error> {
|
||||
let serialized = match bitcode::serialize(index) {
|
||||
Ok(s) => s,
|
||||
Err(_) => return Err(Error::IndexSerializationError),
|
||||
};
|
||||
tokio::fs::write(&self.index_file_path, &serialized[..])
|
||||
.await
|
||||
.map_err(|e| Error::Io(self.index_file_path.clone(), e))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn try_restore_index(&self) -> Result<bool, Error> {
|
||||
match tokio::fs::try_exists(&self.index_file_path).await {
|
||||
Ok(true) => (),
|
||||
Ok(false) => return Ok(false),
|
||||
Err(e) => return Err(Error::Io(self.index_file_path.clone(), e)),
|
||||
};
|
||||
|
||||
let serialized = tokio::fs::read(&self.index_file_path)
|
||||
.await
|
||||
.map_err(|e| Error::Io(self.index_file_path.clone(), e))?;
|
||||
|
||||
let index = match bitcode::deserialize(&serialized[..]) {
|
||||
Ok(i) => i,
|
||||
Err(_) => return Err(Error::IndexDeserializationError),
|
||||
};
|
||||
|
||||
self.replace_index(index).await;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub async fn browse(&self, virtual_path: PathBuf) -> Result<Vec<browser::File>, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index.browser.browse(&index.dictionary, virtual_path)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn flatten(&self, virtual_path: PathBuf) -> Result<Vec<PathBuf>, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index.browser.flatten(&index.dictionary, virtual_path)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_genres(&self) -> Vec<GenreHeader> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index.collection.get_genres(&index.dictionary)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_genre(&self, name: String) -> Result<Genre, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
let name = index
|
||||
.dictionary
|
||||
.get(&name)
|
||||
.ok_or_else(|| Error::GenreNotFound)?;
|
||||
let genre_key = GenreKey(name);
|
||||
index
|
||||
.collection
|
||||
.get_genre(&index.dictionary, genre_key)
|
||||
.ok_or_else(|| Error::GenreNotFound)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_albums(&self) -> Vec<AlbumHeader> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index.collection.get_albums(&index.dictionary)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_artists(&self) -> Vec<ArtistHeader> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index.collection.get_artists(&index.dictionary)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_artist(&self, name: String) -> Result<Artist, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
let name = index
|
||||
.dictionary
|
||||
.get(name)
|
||||
.ok_or_else(|| Error::ArtistNotFound)?;
|
||||
let artist_key = ArtistKey(name);
|
||||
index
|
||||
.collection
|
||||
.get_artist(&index.dictionary, artist_key)
|
||||
.ok_or_else(|| Error::ArtistNotFound)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_album(&self, artists: Vec<String>, name: String) -> Result<Album, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
let name = index
|
||||
.dictionary
|
||||
.get(&name)
|
||||
.ok_or_else(|| Error::AlbumNotFound)?;
|
||||
let album_key = AlbumKey {
|
||||
artists: artists
|
||||
.into_iter()
|
||||
.filter_map(|a| index.dictionary.get(a))
|
||||
.map(|k| ArtistKey(k))
|
||||
.collect(),
|
||||
name,
|
||||
};
|
||||
index
|
||||
.collection
|
||||
.get_album(&index.dictionary, album_key)
|
||||
.ok_or_else(|| Error::AlbumNotFound)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_random_albums(
|
||||
&self,
|
||||
seed: Option<u64>,
|
||||
offset: usize,
|
||||
count: usize,
|
||||
) -> Result<Vec<Album>, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
Ok(index
|
||||
.collection
|
||||
.get_random_albums(&index.dictionary, seed, offset, count))
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_recent_albums(
|
||||
&self,
|
||||
offset: usize,
|
||||
count: usize,
|
||||
) -> Result<Vec<Album>, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
Ok(index
|
||||
.collection
|
||||
.get_recent_albums(&index.dictionary, offset, count))
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_songs(&self, virtual_paths: Vec<PathBuf>) -> Vec<Result<Song, Error>> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
virtual_paths
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
p.get(&index.dictionary)
|
||||
.and_then(|virtual_path| {
|
||||
let key = SongKey { virtual_path };
|
||||
index.collection.get_song(&index.dictionary, key)
|
||||
})
|
||||
.ok_or_else(|| Error::SongNotFound)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn search(&self, query: String) -> Result<Vec<Song>, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index
|
||||
.search
|
||||
.find_songs(&index.collection, &index.dictionary, &query)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Index {
|
||||
pub dictionary: dictionary::Dictionary,
|
||||
pub browser: browser::Browser,
|
||||
pub collection: collection::Collection,
|
||||
pub search: search::Search,
|
||||
}
|
||||
|
||||
impl Default for Index {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
dictionary: Default::default(),
|
||||
browser: Default::default(),
|
||||
collection: Default::default(),
|
||||
search: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Builder {
|
||||
dictionary_builder: dictionary::Builder,
|
||||
browser_builder: browser::Builder,
|
||||
collection_builder: collection::Builder,
|
||||
search_builder: search::Builder,
|
||||
}
|
||||
|
||||
impl Builder {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
dictionary_builder: dictionary::Builder::default(),
|
||||
browser_builder: browser::Builder::default(),
|
||||
collection_builder: collection::Builder::default(),
|
||||
search_builder: search::Builder::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_directory(&mut self, directory: scanner::Directory) {
|
||||
self.browser_builder
|
||||
.add_directory(&mut self.dictionary_builder, directory);
|
||||
}
|
||||
|
||||
pub fn add_song(&mut self, scanner_song: scanner::Song) {
|
||||
if let Some(storage_song) = store_song(&mut self.dictionary_builder, &scanner_song) {
|
||||
self.browser_builder
|
||||
.add_song(&mut self.dictionary_builder, &scanner_song);
|
||||
self.collection_builder.add_song(&storage_song);
|
||||
self.search_builder.add_song(&scanner_song, &storage_song);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(self) -> Index {
|
||||
Index {
|
||||
dictionary: self.dictionary_builder.build(),
|
||||
browser: self.browser_builder.build(),
|
||||
collection: self.collection_builder.build(),
|
||||
search: self.search_builder.build(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Builder {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::{
|
||||
app::{index, test},
|
||||
test_name,
|
||||
};
|
||||
|
||||
#[tokio::test]
|
||||
async fn can_persist_index() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
assert_eq!(ctx.index_manager.try_restore_index().await.unwrap(), false);
|
||||
let index = index::Builder::new().build();
|
||||
ctx.index_manager.persist_index(&index).await.unwrap();
|
||||
assert_eq!(ctx.index_manager.try_restore_index().await.unwrap(), true);
|
||||
}
|
||||
}
|
389
src/app/index/browser.rs
Normal file
389
src/app/index/browser.rs
Normal file
|
@ -0,0 +1,389 @@
|
|||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::{BTreeSet, HashMap},
|
||||
ffi::OsStr,
|
||||
hash::Hash,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use rayon::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tinyvec::TinyVec;
|
||||
use trie_rs::{Trie, TrieBuilder};
|
||||
|
||||
use crate::app::index::{
|
||||
dictionary::{self, Dictionary},
|
||||
storage::{self, PathKey},
|
||||
InternPath,
|
||||
};
|
||||
use crate::app::{scanner, Error};
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
|
||||
pub enum File {
|
||||
Directory(PathBuf),
|
||||
Song(PathBuf),
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Browser {
|
||||
directories: HashMap<PathKey, BTreeSet<storage::File>>,
|
||||
flattened: Trie<lasso2::Spur>,
|
||||
}
|
||||
|
||||
impl Default for Browser {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
directories: HashMap::default(),
|
||||
flattened: TrieBuilder::new().build(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Browser {
|
||||
pub fn browse<P: AsRef<Path>>(
|
||||
&self,
|
||||
dictionary: &Dictionary,
|
||||
virtual_path: P,
|
||||
) -> Result<Vec<File>, Error> {
|
||||
let path = virtual_path
|
||||
.as_ref()
|
||||
.get(dictionary)
|
||||
.ok_or_else(|| Error::DirectoryNotFound(virtual_path.as_ref().to_owned()))?;
|
||||
|
||||
let Some(files) = self.directories.get(&path) else {
|
||||
return Err(Error::DirectoryNotFound(virtual_path.as_ref().to_owned()));
|
||||
};
|
||||
|
||||
let mut files = files
|
||||
.iter()
|
||||
.map(|f| {
|
||||
let path = match f {
|
||||
storage::File::Directory(p) => p,
|
||||
storage::File::Song(p) => p,
|
||||
};
|
||||
let path = Path::new(OsStr::new(dictionary.resolve(&path.0))).to_owned();
|
||||
match f {
|
||||
storage::File::Directory(_) => File::Directory(path),
|
||||
storage::File::Song(_) => File::Song(path),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if virtual_path.as_ref().parent().is_none() {
|
||||
if let [File::Directory(ref p)] = files[..] {
|
||||
return self.browse(dictionary, p);
|
||||
}
|
||||
}
|
||||
|
||||
let collator = dictionary::make_collator();
|
||||
files.sort_by(|a, b| {
|
||||
let (a, b) = match (a, b) {
|
||||
(File::Directory(_), File::Song(_)) => return Ordering::Less,
|
||||
(File::Song(_), File::Directory(_)) => return Ordering::Greater,
|
||||
(File::Directory(a), File::Directory(b)) => (a, b),
|
||||
(File::Song(a), File::Song(b)) => (a, b),
|
||||
};
|
||||
collator.compare(
|
||||
a.as_os_str().to_string_lossy().as_ref(),
|
||||
b.as_os_str().to_string_lossy().as_ref(),
|
||||
)
|
||||
});
|
||||
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
pub fn flatten<P: AsRef<Path>>(
|
||||
&self,
|
||||
dictionary: &Dictionary,
|
||||
virtual_path: P,
|
||||
) -> Result<Vec<PathBuf>, Error> {
|
||||
let path_components = virtual_path
|
||||
.as_ref()
|
||||
.components()
|
||||
.map(|c| c.as_os_str().to_str().unwrap_or_default())
|
||||
.filter_map(|c| dictionary.get(c))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !self.flattened.is_prefix(&path_components) {
|
||||
return Err(Error::DirectoryNotFound(virtual_path.as_ref().to_owned()));
|
||||
}
|
||||
|
||||
let mut results: Vec<TinyVec<[_; 8]>> = self
|
||||
.flattened
|
||||
.predictive_search(path_components)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
results.par_sort_unstable_by(|a, b| {
|
||||
for (x, y) in a.iter().zip(b.iter()) {
|
||||
match dictionary.cmp(x, y) {
|
||||
Ordering::Equal => continue,
|
||||
ordering @ _ => return ordering,
|
||||
}
|
||||
}
|
||||
a.len().cmp(&b.len())
|
||||
});
|
||||
|
||||
let files = results
|
||||
.into_iter()
|
||||
.map(|c: TinyVec<[_; 8]>| -> PathBuf {
|
||||
c.into_iter()
|
||||
.map(|s| dictionary.resolve(&s))
|
||||
.collect::<TinyVec<[&str; 8]>>()
|
||||
.join(std::path::MAIN_SEPARATOR_STR)
|
||||
.into()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(files)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Builder {
|
||||
directories: HashMap<PathKey, BTreeSet<storage::File>>,
|
||||
flattened: TrieBuilder<lasso2::Spur>,
|
||||
}
|
||||
|
||||
impl Builder {
|
||||
pub fn add_directory(
|
||||
&mut self,
|
||||
dictionary_builder: &mut dictionary::Builder,
|
||||
directory: scanner::Directory,
|
||||
) {
|
||||
let Some(virtual_path) = (&directory.virtual_path).get_or_intern(dictionary_builder) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(virtual_parent) = directory
|
||||
.virtual_path
|
||||
.parent()
|
||||
.and_then(|p| p.get_or_intern(dictionary_builder))
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.directories.entry(virtual_path).or_default();
|
||||
|
||||
self.directories
|
||||
.entry(virtual_parent)
|
||||
.or_default()
|
||||
.insert(storage::File::Directory(virtual_path));
|
||||
}
|
||||
|
||||
pub fn add_song(&mut self, dictionary_builder: &mut dictionary::Builder, song: &scanner::Song) {
|
||||
let Some(virtual_path) = (&song.virtual_path).get_or_intern(dictionary_builder) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(virtual_parent) = song
|
||||
.virtual_path
|
||||
.parent()
|
||||
.and_then(|p| p.get_or_intern(dictionary_builder))
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.directories
|
||||
.entry(virtual_parent)
|
||||
.or_default()
|
||||
.insert(storage::File::Song(virtual_path));
|
||||
|
||||
self.flattened.push(
|
||||
song.virtual_path
|
||||
.components()
|
||||
.map(|c| dictionary_builder.get_or_intern(c.as_os_str().to_str().unwrap()))
|
||||
.collect::<TinyVec<[lasso2::Spur; 8]>>(),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn build(self) -> Browser {
|
||||
Browser {
|
||||
directories: self.directories,
|
||||
flattened: self.flattened.build(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::*;
|
||||
|
||||
fn setup_test(songs: HashSet<PathBuf>) -> (Browser, Dictionary) {
|
||||
let mut dictionary_builder = dictionary::Builder::default();
|
||||
let mut builder = Builder::default();
|
||||
|
||||
let directories = songs
|
||||
.iter()
|
||||
.flat_map(|k| k.parent().unwrap().ancestors())
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
for directory in directories {
|
||||
builder.add_directory(
|
||||
&mut dictionary_builder,
|
||||
scanner::Directory {
|
||||
virtual_path: directory.to_owned(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
for path in songs {
|
||||
let mut song = scanner::Song::default();
|
||||
song.virtual_path = path.clone();
|
||||
builder.add_song(&mut dictionary_builder, &song);
|
||||
}
|
||||
|
||||
let browser = builder.build();
|
||||
let dictionary = dictionary_builder.build();
|
||||
|
||||
(browser, dictionary)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_browse_top_level() {
|
||||
let (browser, strings) = setup_test(HashSet::from([
|
||||
PathBuf::from_iter(["Music", "Iron Maiden", "Moonchild.mp3"]),
|
||||
PathBuf::from_iter(["Also Music", "Iron Maiden", "The Prisoner.mp3"]),
|
||||
]));
|
||||
let files = browser.browse(&strings, PathBuf::new()).unwrap();
|
||||
assert_eq!(
|
||||
files[..],
|
||||
[
|
||||
File::Directory(PathBuf::from_iter(["Also Music"])),
|
||||
File::Directory(PathBuf::from_iter(["Music"])),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn browse_skips_redundant_top_level() {
|
||||
let (browser, strings) = setup_test(HashSet::from([PathBuf::from_iter([
|
||||
"Music",
|
||||
"Iron Maiden",
|
||||
"Moonchild.mp3",
|
||||
])]));
|
||||
let files = browser.browse(&strings, PathBuf::new()).unwrap();
|
||||
assert_eq!(
|
||||
files[..],
|
||||
[File::Directory(PathBuf::from_iter([
|
||||
"Music",
|
||||
"Iron Maiden"
|
||||
])),]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_browse_directory() {
|
||||
let artist_directory = PathBuf::from_iter(["Music", "Iron Maiden"]);
|
||||
|
||||
let (browser, strings) = setup_test(HashSet::from([
|
||||
artist_directory.join("Infinite Dreams.mp3"),
|
||||
artist_directory.join("Moonchild.mp3"),
|
||||
]));
|
||||
|
||||
let files = browser.browse(&strings, artist_directory.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
files,
|
||||
[
|
||||
File::Song(artist_directory.join("Infinite Dreams.mp3")),
|
||||
File::Song(artist_directory.join("Moonchild.mp3"))
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn browse_entries_are_sorted() {
|
||||
let (browser, strings) = setup_test(HashSet::from([
|
||||
PathBuf::from_iter(["Ott", "Mir.mp3"]),
|
||||
PathBuf::from("Helios.mp3"),
|
||||
PathBuf::from("asura.mp3"),
|
||||
PathBuf::from("à la maison.mp3"),
|
||||
]));
|
||||
|
||||
let files = browser.browse(&strings, PathBuf::new()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
files,
|
||||
[
|
||||
File::Directory(PathBuf::from("Ott")),
|
||||
File::Song(PathBuf::from("à la maison.mp3")),
|
||||
File::Song(PathBuf::from("asura.mp3")),
|
||||
File::Song(PathBuf::from("Helios.mp3")),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_flatten_root() {
|
||||
let song_a = PathBuf::from_iter(["Music", "Electronic", "Papua New Guinea.mp3"]);
|
||||
let song_b = PathBuf::from_iter(["Music", "Metal", "Destiny.mp3"]);
|
||||
let song_c = PathBuf::from_iter(["Music", "Metal", "No Turning Back.mp3"]);
|
||||
|
||||
let (browser, strings) = setup_test(HashSet::from([
|
||||
song_a.clone(),
|
||||
song_b.clone(),
|
||||
song_c.clone(),
|
||||
]));
|
||||
|
||||
let files = browser.flatten(&strings, PathBuf::new()).unwrap();
|
||||
|
||||
assert_eq!(files, [song_a, song_b, song_c]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_flatten_directory() {
|
||||
let electronic = PathBuf::from_iter(["Music", "Electronic"]);
|
||||
let song_a = electronic.join(PathBuf::from_iter(["FSOL", "Papua New Guinea.mp3"]));
|
||||
let song_b = electronic.join(PathBuf::from_iter(["Kraftwerk", "Autobahn.mp3"]));
|
||||
let song_c = PathBuf::from_iter(["Music", "Metal", "Destiny.mp3"]);
|
||||
|
||||
let (browser, strings) = setup_test(HashSet::from([
|
||||
song_a.clone(),
|
||||
song_b.clone(),
|
||||
song_c.clone(),
|
||||
]));
|
||||
|
||||
let files = browser.flatten(&strings, electronic).unwrap();
|
||||
|
||||
assert_eq!(files, [song_a, song_b]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flatten_entries_are_sorted() {
|
||||
let (browser, strings) = setup_test(HashSet::from([
|
||||
PathBuf::from_iter(["Ott", "Mir.mp3"]),
|
||||
PathBuf::from("Helios.mp3"),
|
||||
PathBuf::from("à la maison.mp3.mp3"),
|
||||
PathBuf::from("asura.mp3"),
|
||||
]));
|
||||
|
||||
let files = browser.flatten(&strings, PathBuf::new()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
files,
|
||||
[
|
||||
PathBuf::from("à la maison.mp3.mp3"),
|
||||
PathBuf::from("asura.mp3"),
|
||||
PathBuf::from("Helios.mp3"),
|
||||
PathBuf::from_iter(["Ott", "Mir.mp3"]),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_flatten_directory_with_shared_prefix() {
|
||||
let directory_a = PathBuf::from_iter(["Music", "Therion", "Leviathan II"]);
|
||||
let directory_b = PathBuf::from_iter(["Music", "Therion", "Leviathan III"]);
|
||||
let song_a = directory_a.join("Pazuzu.mp3");
|
||||
let song_b = directory_b.join("Ninkigal.mp3");
|
||||
|
||||
let (browser, strings) = setup_test(HashSet::from([song_a.clone(), song_b.clone()]));
|
||||
|
||||
let files = browser.flatten(&strings, directory_a).unwrap();
|
||||
|
||||
assert_eq!(files, [song_a]);
|
||||
}
|
||||
}
|
1116
src/app/index/collection.rs
Normal file
1116
src/app/index/collection.rs
Normal file
File diff suppressed because it is too large
Load diff
110
src/app/index/dictionary.rs
Normal file
110
src/app/index/dictionary.rs
Normal file
|
@ -0,0 +1,110 @@
|
|||
use std::{cmp::Ordering, collections::HashMap};
|
||||
|
||||
use icu_collator::{Collator, CollatorOptions, Strength};
|
||||
use lasso2::{Rodeo, RodeoReader, Spur};
|
||||
use rayon::slice::ParallelSliceMut;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub fn sanitize(s: &str) -> String {
|
||||
// TODO merge inconsistent diacritic usage
|
||||
let mut cleaned = s.to_owned();
|
||||
cleaned.retain(|c| match c {
|
||||
' ' | '_' | '-' | '\'' => false,
|
||||
_ => true,
|
||||
});
|
||||
cleaned.to_lowercase()
|
||||
}
|
||||
|
||||
pub fn make_collator() -> Collator {
|
||||
let options = {
|
||||
let mut o = CollatorOptions::new();
|
||||
o.strength = Some(Strength::Secondary);
|
||||
o
|
||||
};
|
||||
Collator::try_new(&Default::default(), options).unwrap()
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Dictionary {
|
||||
strings: RodeoReader, // Interned strings
|
||||
canon: HashMap<String, Spur>, // Canonical representation of similar strings
|
||||
sort_keys: HashMap<Spur, u32>, // All spurs sorted against each other
|
||||
}
|
||||
|
||||
impl Dictionary {
|
||||
pub fn get<S: AsRef<str>>(&self, string: S) -> Option<Spur> {
|
||||
self.strings.get(string)
|
||||
}
|
||||
|
||||
pub fn get_canon<S: AsRef<str>>(&self, string: S) -> Option<Spur> {
|
||||
self.canon.get(&sanitize(string.as_ref())).copied()
|
||||
}
|
||||
|
||||
pub fn resolve(&self, spur: &Spur) -> &str {
|
||||
self.strings.resolve(spur)
|
||||
}
|
||||
|
||||
pub fn cmp(&self, a: &Spur, b: &Spur) -> Ordering {
|
||||
self.sort_keys
|
||||
.get(a)
|
||||
.copied()
|
||||
.unwrap_or_default()
|
||||
.cmp(&self.sort_keys.get(b).copied().unwrap_or_default())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Dictionary {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
strings: Rodeo::default().into_reader(),
|
||||
canon: Default::default(),
|
||||
sort_keys: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Builder {
|
||||
strings: Rodeo,
|
||||
canon: HashMap<String, Spur>,
|
||||
}
|
||||
|
||||
impl Builder {
|
||||
pub fn build(self) -> Dictionary {
|
||||
let mut sorted_spurs = self.strings.iter().collect::<Vec<_>>();
|
||||
// TODO this is too slow!
|
||||
sorted_spurs.par_sort_unstable_by(|(_, a), (_, b)| {
|
||||
let collator = make_collator();
|
||||
collator.compare(a, b)
|
||||
});
|
||||
|
||||
let sort_keys = sorted_spurs
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, (spur, _))| (spur, i as u32))
|
||||
.collect();
|
||||
|
||||
Dictionary {
|
||||
strings: self.strings.into_reader(),
|
||||
canon: self.canon,
|
||||
sort_keys,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_or_intern<S: AsRef<str>>(&mut self, string: S) -> Spur {
|
||||
self.strings.get_or_intern(string)
|
||||
}
|
||||
|
||||
pub fn get_or_intern_canon<S: AsRef<str>>(&mut self, string: S) -> Option<Spur> {
|
||||
let cleaned = sanitize(string.as_ref());
|
||||
match cleaned.is_empty() {
|
||||
true => None,
|
||||
false => Some(
|
||||
self.canon
|
||||
.entry(cleaned)
|
||||
.or_insert_with(|| self.strings.get_or_intern(string.as_ref()))
|
||||
.to_owned(),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,328 +0,0 @@
|
|||
use anyhow::*;
|
||||
use ape;
|
||||
use id3;
|
||||
use lewton::inside_ogg::OggStreamReader;
|
||||
use log::error;
|
||||
use metaflac;
|
||||
use mp3_duration;
|
||||
use mp4ameta;
|
||||
use opus_headers;
|
||||
use regex::Regex;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::utils;
|
||||
use crate::utils::AudioFormat;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct SongTags {
|
||||
pub disc_number: Option<u32>,
|
||||
pub track_number: Option<u32>,
|
||||
pub title: Option<String>,
|
||||
pub duration: Option<u32>,
|
||||
pub artist: Option<String>,
|
||||
pub album_artist: Option<String>,
|
||||
pub album: Option<String>,
|
||||
pub year: Option<i32>,
|
||||
pub has_artwork: bool,
|
||||
}
|
||||
|
||||
pub fn read(path: &Path) -> Option<SongTags> {
|
||||
let data = match utils::get_audio_format(path) {
|
||||
Some(AudioFormat::APE) => Some(read_ape(path)),
|
||||
Some(AudioFormat::FLAC) => Some(read_flac(path)),
|
||||
Some(AudioFormat::MP3) => Some(read_id3(path)),
|
||||
Some(AudioFormat::MP4) => Some(read_mp4(path)),
|
||||
Some(AudioFormat::MPC) => Some(read_ape(path)),
|
||||
Some(AudioFormat::OGG) => Some(read_vorbis(path)),
|
||||
Some(AudioFormat::OPUS) => Some(read_opus(path)),
|
||||
None => None,
|
||||
};
|
||||
match data {
|
||||
Some(Ok(d)) => Some(d),
|
||||
Some(Err(e)) => {
|
||||
error!("Error while reading file metadata for '{:?}': {}", path, e);
|
||||
None
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn read_id3(path: &Path) -> Result<SongTags> {
|
||||
let tag = {
|
||||
match id3::Tag::read_from_path(&path) {
|
||||
Ok(t) => Ok(t),
|
||||
Err(e) => {
|
||||
if let Some(t) = e.partial_tag {
|
||||
Ok(t)
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}?
|
||||
};
|
||||
let duration = {
|
||||
mp3_duration::from_path(&path)
|
||||
.map(|d| d.as_secs() as u32)
|
||||
.ok()
|
||||
};
|
||||
|
||||
let artist = tag.artist().map(|s| s.to_string());
|
||||
let album_artist = tag.album_artist().map(|s| s.to_string());
|
||||
let album = tag.album().map(|s| s.to_string());
|
||||
let title = tag.title().map(|s| s.to_string());
|
||||
let disc_number = tag.disc();
|
||||
let track_number = tag.track();
|
||||
let year = tag
|
||||
.year()
|
||||
.map(|y| y as i32)
|
||||
.or_else(|| tag.date_released().and_then(|d| Some(d.year)))
|
||||
.or_else(|| tag.date_recorded().and_then(|d| Some(d.year)));
|
||||
let has_artwork = tag.pictures().count() > 0;
|
||||
|
||||
Ok(SongTags {
|
||||
artist,
|
||||
album_artist,
|
||||
album,
|
||||
title,
|
||||
duration,
|
||||
disc_number,
|
||||
track_number,
|
||||
year,
|
||||
has_artwork,
|
||||
})
|
||||
}
|
||||
|
||||
fn read_ape_string(item: &ape::Item) -> Option<String> {
|
||||
match item.value {
|
||||
ape::ItemValue::Text(ref s) => Some(s.clone()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn read_ape_i32(item: &ape::Item) -> Option<i32> {
|
||||
match item.value {
|
||||
ape::ItemValue::Text(ref s) => s.parse::<i32>().ok(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn read_ape_x_of_y(item: &ape::Item) -> Option<u32> {
|
||||
match item.value {
|
||||
ape::ItemValue::Text(ref s) => {
|
||||
let format = Regex::new(r#"^\d+"#).unwrap();
|
||||
if let Some(m) = format.find(s) {
|
||||
s[m.start()..m.end()].parse().ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn read_ape(path: &Path) -> Result<SongTags> {
|
||||
let tag = ape::read(path)?;
|
||||
let artist = tag.item("Artist").and_then(read_ape_string);
|
||||
let album = tag.item("Album").and_then(read_ape_string);
|
||||
let album_artist = tag.item("Album artist").and_then(read_ape_string);
|
||||
let title = tag.item("Title").and_then(read_ape_string);
|
||||
let year = tag.item("Year").and_then(read_ape_i32);
|
||||
let disc_number = tag.item("Disc").and_then(read_ape_x_of_y);
|
||||
let track_number = tag.item("Track").and_then(read_ape_x_of_y);
|
||||
Ok(SongTags {
|
||||
artist,
|
||||
album_artist,
|
||||
album,
|
||||
title,
|
||||
duration: None,
|
||||
disc_number,
|
||||
track_number,
|
||||
year,
|
||||
has_artwork: false,
|
||||
})
|
||||
}
|
||||
|
||||
fn read_vorbis(path: &Path) -> Result<SongTags> {
|
||||
let file = fs::File::open(path)?;
|
||||
let source = OggStreamReader::new(file)?;
|
||||
|
||||
let mut tags = SongTags {
|
||||
artist: None,
|
||||
album_artist: None,
|
||||
album: None,
|
||||
title: None,
|
||||
duration: None,
|
||||
disc_number: None,
|
||||
track_number: None,
|
||||
year: None,
|
||||
has_artwork: false,
|
||||
};
|
||||
|
||||
for (key, value) in source.comment_hdr.comment_list {
|
||||
utils::match_ignore_case! {
|
||||
match key {
|
||||
"TITLE" => tags.title = Some(value),
|
||||
"ALBUM" => tags.album = Some(value),
|
||||
"ARTIST" => tags.artist = Some(value),
|
||||
"ALBUMARTIST" => tags.album_artist = Some(value),
|
||||
"TRACKNUMBER" => tags.track_number = value.parse::<u32>().ok(),
|
||||
"DISCNUMBER" => tags.disc_number = value.parse::<u32>().ok(),
|
||||
"DATE" => tags.year = value.parse::<i32>().ok(),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(tags)
|
||||
}
|
||||
|
||||
fn read_opus(path: &Path) -> Result<SongTags> {
|
||||
let headers = opus_headers::parse_from_path(path)?;
|
||||
|
||||
let mut tags = SongTags {
|
||||
artist: None,
|
||||
album_artist: None,
|
||||
album: None,
|
||||
title: None,
|
||||
duration: None,
|
||||
disc_number: None,
|
||||
track_number: None,
|
||||
year: None,
|
||||
has_artwork: false,
|
||||
};
|
||||
|
||||
for (key, value) in headers.comments.user_comments {
|
||||
utils::match_ignore_case! {
|
||||
match key {
|
||||
"TITLE" => tags.title = Some(value),
|
||||
"ALBUM" => tags.album = Some(value),
|
||||
"ARTIST" => tags.artist = Some(value),
|
||||
"ALBUMARTIST" => tags.album_artist = Some(value),
|
||||
"TRACKNUMBER" => tags.track_number = value.parse::<u32>().ok(),
|
||||
"DISCNUMBER" => tags.disc_number = value.parse::<u32>().ok(),
|
||||
"DATE" => tags.year = value.parse::<i32>().ok(),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(tags)
|
||||
}
|
||||
|
||||
fn read_flac(path: &Path) -> Result<SongTags> {
|
||||
let tag = metaflac::Tag::read_from_path(path)?;
|
||||
let vorbis = tag
|
||||
.vorbis_comments()
|
||||
.ok_or(anyhow!("Missing Vorbis comments"))?;
|
||||
let disc_number = vorbis
|
||||
.get("DISCNUMBER")
|
||||
.and_then(|d| d[0].parse::<u32>().ok());
|
||||
let year = vorbis.get("DATE").and_then(|d| d[0].parse::<i32>().ok());
|
||||
let mut streaminfo = tag.get_blocks(metaflac::BlockType::StreamInfo);
|
||||
let duration = match streaminfo.next() {
|
||||
Some(&metaflac::Block::StreamInfo(ref s)) => {
|
||||
Some((s.total_samples as u32 / s.sample_rate) as u32)
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
let has_artwork = tag.pictures().count() > 0;
|
||||
|
||||
Ok(SongTags {
|
||||
artist: vorbis.artist().map(|v| v[0].clone()),
|
||||
album_artist: vorbis.album_artist().map(|v| v[0].clone()),
|
||||
album: vorbis.album().map(|v| v[0].clone()),
|
||||
title: vorbis.title().map(|v| v[0].clone()),
|
||||
duration,
|
||||
disc_number,
|
||||
track_number: vorbis.track(),
|
||||
year,
|
||||
has_artwork,
|
||||
})
|
||||
}
|
||||
|
||||
fn read_mp4(path: &Path) -> Result<SongTags> {
|
||||
let mut tag = mp4ameta::Tag::read_from_path(path)?;
|
||||
|
||||
Ok(SongTags {
|
||||
artist: tag.take_artist(),
|
||||
album_artist: tag.take_album_artist(),
|
||||
album: tag.take_album(),
|
||||
title: tag.take_title(),
|
||||
duration: tag.duration().map(|v| v as u32),
|
||||
disc_number: tag.disc_number().map(|d| d as u32),
|
||||
track_number: tag.track_number().map(|d| d as u32),
|
||||
year: tag.year().and_then(|v| v.parse::<i32>().ok()),
|
||||
has_artwork: tag.artwork().is_some(),
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reads_file_metadata() {
|
||||
let sample_tags = SongTags {
|
||||
disc_number: Some(3),
|
||||
track_number: Some(1),
|
||||
title: Some("TEST TITLE".into()),
|
||||
artist: Some("TEST ARTIST".into()),
|
||||
album_artist: Some("TEST ALBUM ARTIST".into()),
|
||||
album: Some("TEST ALBUM".into()),
|
||||
duration: None,
|
||||
year: Some(2016),
|
||||
has_artwork: false,
|
||||
};
|
||||
let flac_sample_tag = SongTags {
|
||||
duration: Some(0),
|
||||
..sample_tags.clone()
|
||||
};
|
||||
let mp3_sample_tag = SongTags {
|
||||
duration: Some(0),
|
||||
..sample_tags.clone()
|
||||
};
|
||||
let m4a_sample_tag = SongTags {
|
||||
duration: Some(0),
|
||||
..sample_tags.clone()
|
||||
};
|
||||
assert_eq!(
|
||||
read(Path::new("test-data/formats/sample.mp3")).unwrap(),
|
||||
mp3_sample_tag
|
||||
);
|
||||
assert_eq!(
|
||||
read(Path::new("test-data/formats/sample.ogg")).unwrap(),
|
||||
sample_tags
|
||||
);
|
||||
assert_eq!(
|
||||
read(Path::new("test-data/formats/sample.flac")).unwrap(),
|
||||
flac_sample_tag
|
||||
);
|
||||
assert_eq!(
|
||||
read(Path::new("test-data/formats/sample.m4a")).unwrap(),
|
||||
m4a_sample_tag
|
||||
);
|
||||
assert_eq!(
|
||||
read(Path::new("test-data/formats/sample.opus")).unwrap(),
|
||||
sample_tags
|
||||
);
|
||||
assert_eq!(
|
||||
read(Path::new("test-data/formats/sample.ape")).unwrap(),
|
||||
sample_tags
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reads_embedded_artwork() {
|
||||
assert!(
|
||||
read(Path::new("test-data/artwork/sample.mp3"))
|
||||
.unwrap()
|
||||
.has_artwork
|
||||
);
|
||||
assert!(
|
||||
read(Path::new("test-data/artwork/sample.flac"))
|
||||
.unwrap()
|
||||
.has_artwork
|
||||
);
|
||||
assert!(
|
||||
read(Path::new("test-data/artwork/sample.m4a"))
|
||||
.unwrap()
|
||||
.has_artwork
|
||||
);
|
||||
}
|
|
@ -1,88 +0,0 @@
|
|||
use diesel;
|
||||
use log::error;
|
||||
use std::sync::{Arc, Condvar, Mutex};
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::app::{settings, vfs};
|
||||
use crate::db::DB;
|
||||
|
||||
mod metadata;
|
||||
mod query;
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
mod types;
|
||||
mod update;
|
||||
|
||||
pub use self::query::*;
|
||||
pub use self::types::*;
|
||||
pub use self::update::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Index {
|
||||
db: DB,
|
||||
vfs_manager: vfs::Manager,
|
||||
settings_manager: settings::Manager,
|
||||
pending_reindex: Arc<(Mutex<bool>, Condvar)>,
|
||||
}
|
||||
|
||||
impl Index {
|
||||
pub fn new(db: DB, vfs_manager: vfs::Manager, settings_manager: settings::Manager) -> Self {
|
||||
let index = Self {
|
||||
db,
|
||||
vfs_manager,
|
||||
settings_manager,
|
||||
pending_reindex: Arc::new((Mutex::new(false), Condvar::new())),
|
||||
};
|
||||
|
||||
let commands_index = index.clone();
|
||||
std::thread::spawn(move || {
|
||||
commands_index.process_commands();
|
||||
});
|
||||
|
||||
index
|
||||
}
|
||||
|
||||
pub fn trigger_reindex(&self) {
|
||||
let (lock, cvar) = &*self.pending_reindex;
|
||||
let mut pending_reindex = lock.lock().unwrap();
|
||||
*pending_reindex = true;
|
||||
cvar.notify_one();
|
||||
}
|
||||
|
||||
pub fn begin_periodic_updates(&self) {
|
||||
let auto_index = self.clone();
|
||||
std::thread::spawn(move || {
|
||||
auto_index.automatic_reindex();
|
||||
});
|
||||
}
|
||||
|
||||
fn process_commands(&self) {
|
||||
loop {
|
||||
{
|
||||
let (lock, cvar) = &*self.pending_reindex;
|
||||
let mut pending = lock.lock().unwrap();
|
||||
while !*pending {
|
||||
pending = cvar.wait(pending).unwrap();
|
||||
}
|
||||
*pending = false;
|
||||
}
|
||||
if let Err(e) = self.update() {
|
||||
error!("Error while updating index: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn automatic_reindex(&self) {
|
||||
loop {
|
||||
self.trigger_reindex();
|
||||
let sleep_duration = self
|
||||
.settings_manager
|
||||
.get_index_sleep_duration()
|
||||
.unwrap_or_else(|e| {
|
||||
error!("Could not retrieve index sleep duration: {}", e);
|
||||
Duration::from_secs(1800)
|
||||
});
|
||||
std::thread::sleep(sleep_duration);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,202 +1,478 @@
|
|||
use anyhow::*;
|
||||
use diesel;
|
||||
use diesel::dsl::sql;
|
||||
use diesel::prelude::*;
|
||||
use diesel::sql_types;
|
||||
use std::path::Path;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use super::*;
|
||||
use crate::db::{directories, songs};
|
||||
use chumsky::{
|
||||
error::Simple,
|
||||
prelude::{choice, end, filter, just, none_of, recursive},
|
||||
text::{int, keyword, whitespace, TextParser},
|
||||
Parser,
|
||||
};
|
||||
use enum_map::Enum;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum QueryError {
|
||||
#[error("VFS path not found")]
|
||||
VFSPathNotFound,
|
||||
#[error("Unspecified")]
|
||||
Unspecified,
|
||||
#[derive(Clone, Copy, Debug, Deserialize, Enum, Eq, Hash, PartialEq, Serialize)]
|
||||
pub enum TextField {
|
||||
Album,
|
||||
AlbumArtist,
|
||||
Artist,
|
||||
Composer,
|
||||
Genre,
|
||||
Label,
|
||||
Lyricist,
|
||||
Path,
|
||||
Title,
|
||||
}
|
||||
|
||||
impl From<anyhow::Error> for QueryError {
|
||||
fn from(_: anyhow::Error) -> Self {
|
||||
QueryError::Unspecified
|
||||
}
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
pub enum TextOp {
|
||||
Eq,
|
||||
Like,
|
||||
}
|
||||
|
||||
no_arg_sql_function!(
|
||||
random,
|
||||
sql_types::Integer,
|
||||
"Represents the SQL RANDOM() function"
|
||||
);
|
||||
|
||||
impl Index {
|
||||
pub fn browse<P>(&self, virtual_path: P) -> Result<Vec<CollectionFile>, QueryError>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
let mut output = Vec::new();
|
||||
let vfs = self.vfs_manager.get_vfs()?;
|
||||
let connection = self.db.connect()?;
|
||||
|
||||
if virtual_path.as_ref().components().count() == 0 {
|
||||
// Browse top-level
|
||||
let real_directories: Vec<Directory> = directories::table
|
||||
.filter(directories::parent.is_null())
|
||||
.load(&connection)
|
||||
.map_err(anyhow::Error::new)?;
|
||||
let virtual_directories = real_directories
|
||||
.into_iter()
|
||||
.filter_map(|d| d.virtualize(&vfs));
|
||||
output.extend(virtual_directories.map(CollectionFile::Directory));
|
||||
} else {
|
||||
// Browse sub-directory
|
||||
let real_path = vfs
|
||||
.virtual_to_real(virtual_path)
|
||||
.map_err(|_| QueryError::VFSPathNotFound)?;
|
||||
let real_path_string = real_path.as_path().to_string_lossy().into_owned();
|
||||
|
||||
let real_directories: Vec<Directory> = directories::table
|
||||
.filter(directories::parent.eq(&real_path_string))
|
||||
.order(sql::<sql_types::Bool>("path COLLATE NOCASE ASC"))
|
||||
.load(&connection)
|
||||
.map_err(anyhow::Error::new)?;
|
||||
let virtual_directories = real_directories
|
||||
.into_iter()
|
||||
.filter_map(|d| d.virtualize(&vfs));
|
||||
output.extend(virtual_directories.map(CollectionFile::Directory));
|
||||
|
||||
let real_songs: Vec<Song> = songs::table
|
||||
.filter(songs::parent.eq(&real_path_string))
|
||||
.order(sql::<sql_types::Bool>("path COLLATE NOCASE ASC"))
|
||||
.load(&connection)
|
||||
.map_err(anyhow::Error::new)?;
|
||||
let virtual_songs = real_songs.into_iter().filter_map(|s| s.virtualize(&vfs));
|
||||
output.extend(virtual_songs.map(CollectionFile::Song));
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
pub fn flatten<P>(&self, virtual_path: P) -> Result<Vec<Song>, QueryError>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
use self::songs::dsl::*;
|
||||
let vfs = self.vfs_manager.get_vfs()?;
|
||||
let connection = self.db.connect()?;
|
||||
|
||||
let real_songs: Vec<Song> = if virtual_path.as_ref().parent() != None {
|
||||
let real_path = vfs
|
||||
.virtual_to_real(virtual_path)
|
||||
.map_err(|_| QueryError::VFSPathNotFound)?;
|
||||
let song_path_filter = {
|
||||
let mut path_buf = real_path.clone();
|
||||
path_buf.push("%");
|
||||
path_buf.as_path().to_string_lossy().into_owned()
|
||||
};
|
||||
songs
|
||||
.filter(path.like(&song_path_filter))
|
||||
.order(path)
|
||||
.load(&connection)
|
||||
.map_err(anyhow::Error::new)?
|
||||
} else {
|
||||
songs
|
||||
.order(path)
|
||||
.load(&connection)
|
||||
.map_err(anyhow::Error::new)?
|
||||
};
|
||||
|
||||
let virtual_songs = real_songs.into_iter().filter_map(|s| s.virtualize(&vfs));
|
||||
Ok(virtual_songs.collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
pub fn get_random_albums(&self, count: i64) -> Result<Vec<Directory>> {
|
||||
use self::directories::dsl::*;
|
||||
let vfs = self.vfs_manager.get_vfs()?;
|
||||
let connection = self.db.connect()?;
|
||||
let real_directories: Vec<Directory> = directories
|
||||
.filter(album.is_not_null())
|
||||
.limit(count)
|
||||
.order(random)
|
||||
.load(&connection)?;
|
||||
let virtual_directories = real_directories
|
||||
.into_iter()
|
||||
.filter_map(|d| d.virtualize(&vfs));
|
||||
Ok(virtual_directories.collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
pub fn get_recent_albums(&self, count: i64) -> Result<Vec<Directory>> {
|
||||
use self::directories::dsl::*;
|
||||
let vfs = self.vfs_manager.get_vfs()?;
|
||||
let connection = self.db.connect()?;
|
||||
let real_directories: Vec<Directory> = directories
|
||||
.filter(album.is_not_null())
|
||||
.order(date_added.desc())
|
||||
.limit(count)
|
||||
.load(&connection)?;
|
||||
let virtual_directories = real_directories
|
||||
.into_iter()
|
||||
.filter_map(|d| d.virtualize(&vfs));
|
||||
Ok(virtual_directories.collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
pub fn search(&self, query: &str) -> Result<Vec<CollectionFile>> {
|
||||
let vfs = self.vfs_manager.get_vfs()?;
|
||||
let connection = self.db.connect()?;
|
||||
let like_test = format!("%{}%", query);
|
||||
let mut output = Vec::new();
|
||||
|
||||
// Find dirs with matching path and parent not matching
|
||||
{
|
||||
use self::directories::dsl::*;
|
||||
let real_directories: Vec<Directory> = directories
|
||||
.filter(path.like(&like_test))
|
||||
.filter(parent.not_like(&like_test))
|
||||
.load(&connection)?;
|
||||
|
||||
let virtual_directories = real_directories
|
||||
.into_iter()
|
||||
.filter_map(|d| d.virtualize(&vfs));
|
||||
|
||||
output.extend(virtual_directories.map(CollectionFile::Directory));
|
||||
}
|
||||
|
||||
// Find songs with matching title/album/artist and non-matching parent
|
||||
{
|
||||
use self::songs::dsl::*;
|
||||
let real_songs: Vec<Song> = songs
|
||||
.filter(
|
||||
path.like(&like_test)
|
||||
.or(title.like(&like_test))
|
||||
.or(album.like(&like_test))
|
||||
.or(artist.like(&like_test))
|
||||
.or(album_artist.like(&like_test)),
|
||||
)
|
||||
.filter(parent.not_like(&like_test))
|
||||
.load(&connection)?;
|
||||
|
||||
let virtual_songs = real_songs.into_iter().filter_map(|d| d.virtualize(&vfs));
|
||||
|
||||
output.extend(virtual_songs.map(CollectionFile::Song));
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
pub fn get_song(&self, virtual_path: &Path) -> Result<Song> {
|
||||
let vfs = self.vfs_manager.get_vfs()?;
|
||||
let connection = self.db.connect()?;
|
||||
|
||||
let real_path = vfs.virtual_to_real(virtual_path)?;
|
||||
let real_path_string = real_path.as_path().to_string_lossy();
|
||||
|
||||
use self::songs::dsl::*;
|
||||
let real_song: Song = songs
|
||||
.filter(path.eq(real_path_string))
|
||||
.get_result(&connection)?;
|
||||
|
||||
match real_song.virtualize(&vfs) {
|
||||
Some(s) => Ok(s),
|
||||
_ => bail!("Missing VFS mapping"),
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Copy, Debug, Deserialize, Enum, Eq, Hash, PartialEq, Serialize)]
|
||||
pub enum NumberField {
|
||||
DiscNumber,
|
||||
TrackNumber,
|
||||
Year,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
pub enum NumberOp {
|
||||
Eq,
|
||||
Greater,
|
||||
GreaterOrEq,
|
||||
Less,
|
||||
LessOrEq,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub enum Literal {
|
||||
Text(String),
|
||||
Number(i32),
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
|
||||
pub enum BoolOp {
|
||||
And,
|
||||
Or,
|
||||
Not,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub enum Expr {
|
||||
Fuzzy(Literal),
|
||||
TextCmp(TextField, TextOp, String),
|
||||
NumberCmp(NumberField, NumberOp, i32),
|
||||
Combined(Box<Expr>, BoolOp, Box<Expr>),
|
||||
}
|
||||
|
||||
pub fn make_parser() -> impl Parser<char, Expr, Error = Simple<char>> {
|
||||
recursive(|expr| {
|
||||
let quoted_str = just('"')
|
||||
.ignore_then(none_of('"').repeated().collect::<String>())
|
||||
.then_ignore(just('"'));
|
||||
|
||||
let symbols = r#"()<>"|&=!"#.chars().collect::<HashSet<_>>();
|
||||
|
||||
let raw_str = filter(move |c: &char| !c.is_whitespace() && !symbols.contains(c))
|
||||
.repeated()
|
||||
.at_least(1)
|
||||
.collect::<String>();
|
||||
|
||||
let str_ = choice((quoted_str, raw_str)).padded();
|
||||
|
||||
let number = int(10).from_str().unwrapped().padded();
|
||||
|
||||
let text_field = choice((
|
||||
keyword("album").to(TextField::Album),
|
||||
keyword("albumartist").to(TextField::AlbumArtist),
|
||||
keyword("artist").to(TextField::Artist),
|
||||
keyword("composer").to(TextField::Composer),
|
||||
keyword("genre").to(TextField::Genre),
|
||||
keyword("label").to(TextField::Label),
|
||||
keyword("lyricist").to(TextField::Lyricist),
|
||||
keyword("path").to(TextField::Path),
|
||||
keyword("title").to(TextField::Title),
|
||||
))
|
||||
.padded();
|
||||
|
||||
let text_op = choice((just("=").to(TextOp::Eq), just("%").to(TextOp::Like))).padded();
|
||||
|
||||
let text_cmp = text_field
|
||||
.then(text_op)
|
||||
.then(str_.clone())
|
||||
.map(|((a, b), c)| Expr::TextCmp(a, b, c));
|
||||
|
||||
let number_field = choice((
|
||||
keyword("discnumber").to(NumberField::DiscNumber),
|
||||
keyword("tracknumber").to(NumberField::TrackNumber),
|
||||
keyword("year").to(NumberField::Year),
|
||||
))
|
||||
.padded();
|
||||
|
||||
let number_op = choice((
|
||||
just("=").to(NumberOp::Eq),
|
||||
just(">=").to(NumberOp::GreaterOrEq),
|
||||
just(">").to(NumberOp::Greater),
|
||||
just("<=").to(NumberOp::LessOrEq),
|
||||
just("<").to(NumberOp::Less),
|
||||
))
|
||||
.padded();
|
||||
|
||||
let number_cmp = number_field
|
||||
.then(number_op)
|
||||
.then(number)
|
||||
.map(|((a, b), c)| Expr::NumberCmp(a, b, c));
|
||||
|
||||
let literal = choice((number.map(Literal::Number), str_.map(Literal::Text)));
|
||||
let fuzzy = literal.map(Expr::Fuzzy);
|
||||
|
||||
let filter = choice((text_cmp, number_cmp, fuzzy));
|
||||
let atom = choice((filter, expr.delimited_by(just('('), just(')'))));
|
||||
|
||||
let bool_op = choice((
|
||||
just("&&").to(BoolOp::And),
|
||||
just("||").to(BoolOp::Or),
|
||||
just("!!").to(BoolOp::Not),
|
||||
))
|
||||
.padded();
|
||||
|
||||
let combined = atom
|
||||
.clone()
|
||||
.then(bool_op.then(atom).repeated())
|
||||
.foldl(|a, (b, c)| Expr::Combined(Box::new(a), b, Box::new(c)));
|
||||
|
||||
let implicit_and = combined
|
||||
.clone()
|
||||
.then(whitespace().ignore_then(combined).repeated())
|
||||
.foldl(|a: Expr, b: Expr| Expr::Combined(Box::new(a), BoolOp::And, Box::new(b)));
|
||||
|
||||
implicit_and
|
||||
})
|
||||
.then_ignore(end())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_parse_fuzzy_query() {
|
||||
let parser = make_parser();
|
||||
assert_eq!(
|
||||
parser.parse(r#"rhapsody"#).unwrap(),
|
||||
Expr::Fuzzy(Literal::Text("rhapsody".to_owned())),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"2005"#).unwrap(),
|
||||
Expr::Fuzzy(Literal::Number(2005)),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_repeat_fuzzy_queries() {
|
||||
let parser = make_parser();
|
||||
assert_eq!(
|
||||
parser.parse(r#"rhapsody "of victory""#).unwrap(),
|
||||
Expr::Combined(
|
||||
Box::new(Expr::Fuzzy(Literal::Text("rhapsody".to_owned()))),
|
||||
BoolOp::And,
|
||||
Box::new(Expr::Fuzzy(Literal::Text("of victory".to_owned()))),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_mix_fuzzy_and_structured() {
|
||||
let parser = make_parser();
|
||||
assert_eq!(
|
||||
parser.parse(r#"rhapsody album % dragonflame"#).unwrap(),
|
||||
Expr::Combined(
|
||||
Box::new(Expr::Fuzzy(Literal::Text("rhapsody".to_owned()))),
|
||||
BoolOp::And,
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Album,
|
||||
TextOp::Like,
|
||||
"dragonflame".to_owned()
|
||||
)),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_parse_text_fields() {
|
||||
let parser = make_parser();
|
||||
assert_eq!(
|
||||
parser.parse(r#"album = "legendary tales""#).unwrap(),
|
||||
Expr::TextCmp(TextField::Album, TextOp::Eq, "legendary tales".to_owned()),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"albumartist = "rhapsody""#).unwrap(),
|
||||
Expr::TextCmp(TextField::AlbumArtist, TextOp::Eq, "rhapsody".to_owned()),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"artist = "rhapsody""#).unwrap(),
|
||||
Expr::TextCmp(TextField::Artist, TextOp::Eq, "rhapsody".to_owned()),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"composer = "yoko kanno""#).unwrap(),
|
||||
Expr::TextCmp(TextField::Composer, TextOp::Eq, "yoko kanno".to_owned()),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"genre = "jazz""#).unwrap(),
|
||||
Expr::TextCmp(TextField::Genre, TextOp::Eq, "jazz".to_owned()),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"label = "diverse system""#).unwrap(),
|
||||
Expr::TextCmp(TextField::Label, TextOp::Eq, "diverse system".to_owned()),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"lyricist = "dalida""#).unwrap(),
|
||||
Expr::TextCmp(TextField::Lyricist, TextOp::Eq, "dalida".to_owned()),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"path = "electronic/big beat""#).unwrap(),
|
||||
Expr::TextCmp(
|
||||
TextField::Path,
|
||||
TextOp::Eq,
|
||||
"electronic/big beat".to_owned()
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"title = "emerald sword""#).unwrap(),
|
||||
Expr::TextCmp(TextField::Title, TextOp::Eq, "emerald sword".to_owned()),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_parse_text_operators() {
|
||||
let parser = make_parser();
|
||||
assert_eq!(
|
||||
parser.parse(r#"album = "legendary tales""#).unwrap(),
|
||||
Expr::TextCmp(TextField::Album, TextOp::Eq, "legendary tales".to_owned()),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"album % "legendary tales""#).unwrap(),
|
||||
Expr::TextCmp(TextField::Album, TextOp::Like, "legendary tales".to_owned()),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_parse_number_fields() {
|
||||
let parser = make_parser();
|
||||
assert_eq!(
|
||||
parser.parse(r#"discnumber = 6"#).unwrap(),
|
||||
Expr::NumberCmp(NumberField::DiscNumber, NumberOp::Eq, 6),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"tracknumber = 12"#).unwrap(),
|
||||
Expr::NumberCmp(NumberField::TrackNumber, NumberOp::Eq, 12),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"year = 1999"#).unwrap(),
|
||||
Expr::NumberCmp(NumberField::Year, NumberOp::Eq, 1999),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_parse_number_operators() {
|
||||
let parser = make_parser();
|
||||
assert_eq!(
|
||||
parser.parse(r#"discnumber = 6"#).unwrap(),
|
||||
Expr::NumberCmp(NumberField::DiscNumber, NumberOp::Eq, 6),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"discnumber > 6"#).unwrap(),
|
||||
Expr::NumberCmp(NumberField::DiscNumber, NumberOp::Greater, 6),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"discnumber >= 6"#).unwrap(),
|
||||
Expr::NumberCmp(NumberField::DiscNumber, NumberOp::GreaterOrEq, 6),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"discnumber < 6"#).unwrap(),
|
||||
Expr::NumberCmp(NumberField::DiscNumber, NumberOp::Less, 6),
|
||||
);
|
||||
assert_eq!(
|
||||
parser.parse(r#"discnumber <= 6"#).unwrap(),
|
||||
Expr::NumberCmp(NumberField::DiscNumber, NumberOp::LessOrEq, 6),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_use_and_operator() {
|
||||
let parser = make_parser();
|
||||
|
||||
assert_eq!(
|
||||
parser.parse(r#"album % lands && title % "sword""#).unwrap(),
|
||||
Expr::Combined(
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Album,
|
||||
TextOp::Like,
|
||||
"lands".to_owned()
|
||||
)),
|
||||
BoolOp::And,
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Title,
|
||||
TextOp::Like,
|
||||
"sword".to_owned()
|
||||
))
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_use_or_operator() {
|
||||
let parser = make_parser();
|
||||
|
||||
assert_eq!(
|
||||
parser.parse(r#"album % lands || title % "sword""#).unwrap(),
|
||||
Expr::Combined(
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Album,
|
||||
TextOp::Like,
|
||||
"lands".to_owned()
|
||||
)),
|
||||
BoolOp::Or,
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Title,
|
||||
TextOp::Like,
|
||||
"sword".to_owned()
|
||||
))
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_use_not_operator() {
|
||||
let parser = make_parser();
|
||||
|
||||
assert_eq!(
|
||||
parser.parse(r#"album % lands !! title % "sword""#).unwrap(),
|
||||
Expr::Combined(
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Album,
|
||||
TextOp::Like,
|
||||
"lands".to_owned()
|
||||
)),
|
||||
BoolOp::Not,
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Title,
|
||||
TextOp::Like,
|
||||
"sword".to_owned()
|
||||
))
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn boolean_operators_share_precedence() {
|
||||
let parser = make_parser();
|
||||
|
||||
assert_eq!(
|
||||
parser
|
||||
.parse(r#"album % lands || album % tales && title % "sword""#)
|
||||
.unwrap(),
|
||||
Expr::Combined(
|
||||
Box::new(Expr::Combined(
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Album,
|
||||
TextOp::Like,
|
||||
"lands".to_owned()
|
||||
)),
|
||||
BoolOp::Or,
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Album,
|
||||
TextOp::Like,
|
||||
"tales".to_owned()
|
||||
))
|
||||
)),
|
||||
BoolOp::And,
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Title,
|
||||
TextOp::Like,
|
||||
"sword".to_owned()
|
||||
))
|
||||
),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
parser
|
||||
.parse(r#"album % lands && album % tales || title % "sword""#)
|
||||
.unwrap(),
|
||||
Expr::Combined(
|
||||
Box::new(Expr::Combined(
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Album,
|
||||
TextOp::Like,
|
||||
"lands".to_owned()
|
||||
)),
|
||||
BoolOp::And,
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Album,
|
||||
TextOp::Like,
|
||||
"tales".to_owned()
|
||||
))
|
||||
)),
|
||||
BoolOp::Or,
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Title,
|
||||
TextOp::Like,
|
||||
"sword".to_owned()
|
||||
))
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_use_parenthesis_for_precedence() {
|
||||
let parser = make_parser();
|
||||
assert_eq!(
|
||||
parser
|
||||
.parse(r#"album % lands || (album % tales && title % sword)"#)
|
||||
.unwrap(),
|
||||
Expr::Combined(
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Album,
|
||||
TextOp::Like,
|
||||
"lands".to_owned()
|
||||
)),
|
||||
BoolOp::Or,
|
||||
Box::new(Expr::Combined(
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Album,
|
||||
TextOp::Like,
|
||||
"tales".to_owned()
|
||||
)),
|
||||
BoolOp::And,
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Title,
|
||||
TextOp::Like,
|
||||
"sword".to_owned()
|
||||
)),
|
||||
))
|
||||
),
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
parser
|
||||
.parse(r#"(album % lands || album % tales) && title % "sword""#)
|
||||
.unwrap(),
|
||||
Expr::Combined(
|
||||
Box::new(Expr::Combined(
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Album,
|
||||
TextOp::Like,
|
||||
"lands".to_owned()
|
||||
)),
|
||||
BoolOp::Or,
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Album,
|
||||
TextOp::Like,
|
||||
"tales".to_owned()
|
||||
))
|
||||
)),
|
||||
BoolOp::And,
|
||||
Box::new(Expr::TextCmp(
|
||||
TextField::Title,
|
||||
TextOp::Like,
|
||||
"sword".to_owned()
|
||||
))
|
||||
),
|
||||
);
|
||||
}
|
||||
|
|
708
src/app/index/search.rs
Normal file
708
src/app/index/search.rs
Normal file
|
@ -0,0 +1,708 @@
|
|||
use chumsky::Parser;
|
||||
use enum_map::EnumMap;
|
||||
use lasso2::Spur;
|
||||
use nohash_hasher::IntSet;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use tinyvec::TinyVec;
|
||||
|
||||
use crate::app::{
|
||||
index::{
|
||||
dictionary::Dictionary,
|
||||
query::{BoolOp, Expr, Literal, NumberField, NumberOp, TextField, TextOp},
|
||||
storage::SongKey,
|
||||
},
|
||||
scanner, Error,
|
||||
};
|
||||
|
||||
use super::{collection, dictionary::sanitize, query::make_parser, storage};
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Search {
|
||||
text_fields: EnumMap<TextField, TextFieldIndex>,
|
||||
number_fields: EnumMap<NumberField, NumberFieldIndex>,
|
||||
}
|
||||
|
||||
impl Default for Search {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
text_fields: Default::default(),
|
||||
number_fields: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Search {
|
||||
pub fn find_songs(
|
||||
&self,
|
||||
collection: &collection::Collection,
|
||||
dictionary: &Dictionary,
|
||||
query: &str,
|
||||
) -> Result<Vec<collection::Song>, Error> {
|
||||
let parser = make_parser();
|
||||
let parsed_query = parser
|
||||
.parse(query)
|
||||
.map_err(|_| Error::SearchQueryParseError)?;
|
||||
|
||||
let mut songs = self
|
||||
.eval(dictionary, &parsed_query)
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>();
|
||||
collection.sort_songs(&mut songs, dictionary);
|
||||
let songs = songs
|
||||
.into_iter()
|
||||
.filter_map(|song_key| collection.get_song(dictionary, song_key))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(songs)
|
||||
}
|
||||
|
||||
fn eval(&self, dictionary: &Dictionary, expr: &Expr) -> IntSet<SongKey> {
|
||||
match expr {
|
||||
Expr::Fuzzy(s) => self.eval_fuzzy(dictionary, s),
|
||||
Expr::TextCmp(field, op, s) => self.eval_text_operator(dictionary, *field, *op, &s),
|
||||
Expr::NumberCmp(field, op, n) => self.eval_number_operator(*field, *op, *n),
|
||||
Expr::Combined(e, op, f) => self.combine(dictionary, e, *op, f),
|
||||
}
|
||||
}
|
||||
|
||||
fn combine(
|
||||
&self,
|
||||
dictionary: &Dictionary,
|
||||
e: &Box<Expr>,
|
||||
op: BoolOp,
|
||||
f: &Box<Expr>,
|
||||
) -> IntSet<SongKey> {
|
||||
let is_operable = |expr: &Expr| match expr {
|
||||
Expr::Fuzzy(Literal::Text(s)) if s.chars().count() < BIGRAM_SIZE => false,
|
||||
Expr::Fuzzy(Literal::Number(n)) if *n < 10 => false,
|
||||
Expr::TextCmp(_, _, s) if s.chars().count() < BIGRAM_SIZE => false,
|
||||
_ => true,
|
||||
};
|
||||
|
||||
let left = is_operable(e).then(|| self.eval(dictionary, e));
|
||||
let right = is_operable(f).then(|| self.eval(dictionary, f));
|
||||
|
||||
match (left, op, right) {
|
||||
(Some(l), BoolOp::And, Some(r)) => l.intersection(&r).cloned().collect(),
|
||||
(Some(l), BoolOp::Or, Some(r)) => l.union(&r).cloned().collect(),
|
||||
(Some(l), BoolOp::Not, Some(r)) => l.difference(&r).cloned().collect(),
|
||||
(None, BoolOp::Not, _) => IntSet::default(),
|
||||
(Some(l), _, None) => l,
|
||||
(None, _, Some(r)) => r,
|
||||
(None, _, None) => IntSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_fuzzy(&self, dictionary: &Dictionary, value: &Literal) -> IntSet<SongKey> {
|
||||
match value {
|
||||
Literal::Text(s) => {
|
||||
let mut songs = IntSet::default();
|
||||
for field in self.text_fields.values() {
|
||||
songs.extend(field.find_like(dictionary, s));
|
||||
}
|
||||
songs
|
||||
}
|
||||
Literal::Number(n) => {
|
||||
let mut songs = IntSet::default();
|
||||
for field in self.number_fields.values() {
|
||||
songs.extend(field.find(*n as i64, NumberOp::Eq));
|
||||
}
|
||||
songs
|
||||
.union(&self.eval_fuzzy(dictionary, &Literal::Text(n.to_string())))
|
||||
.copied()
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_text_operator(
|
||||
&self,
|
||||
dictionary: &Dictionary,
|
||||
field: TextField,
|
||||
operator: TextOp,
|
||||
value: &str,
|
||||
) -> IntSet<SongKey> {
|
||||
match operator {
|
||||
TextOp::Eq => self.text_fields[field].find_exact(dictionary, value),
|
||||
TextOp::Like => self.text_fields[field].find_like(dictionary, value),
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_number_operator(
|
||||
&self,
|
||||
field: NumberField,
|
||||
operator: NumberOp,
|
||||
value: i32,
|
||||
) -> IntSet<SongKey> {
|
||||
self.number_fields[field].find(value as i64, operator)
|
||||
}
|
||||
}
|
||||
|
||||
const BIGRAM_SIZE: usize = 2;
|
||||
const ASCII_RANGE: usize = u8::MAX as usize;
|
||||
|
||||
#[derive(Clone, Deserialize, Serialize)]
|
||||
struct TextFieldIndex {
|
||||
exact: HashMap<Spur, IntSet<SongKey>>,
|
||||
ascii_bigrams: Vec<Vec<(SongKey, Spur)>>,
|
||||
other_bigrams: HashMap<[char; BIGRAM_SIZE], Vec<(SongKey, Spur)>>,
|
||||
}
|
||||
|
||||
impl Default for TextFieldIndex {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
exact: Default::default(),
|
||||
ascii_bigrams: vec![Default::default(); ASCII_RANGE * ASCII_RANGE],
|
||||
other_bigrams: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TextFieldIndex {
|
||||
fn ascii_bigram_to_index(a: char, b: char) -> usize {
|
||||
assert!(a.is_ascii());
|
||||
assert!(b.is_ascii());
|
||||
(a as usize) * ASCII_RANGE + (b as usize) as usize
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, raw_value: &str, value: Spur, song: SongKey) {
|
||||
let characters = sanitize(raw_value).chars().collect::<TinyVec<[char; 32]>>();
|
||||
for substring in characters[..].windows(BIGRAM_SIZE) {
|
||||
if substring.iter().all(|c| c.is_ascii()) {
|
||||
let index = Self::ascii_bigram_to_index(substring[0], substring[1]);
|
||||
self.ascii_bigrams[index].push((song, value));
|
||||
} else {
|
||||
self.other_bigrams
|
||||
.entry(substring.try_into().unwrap())
|
||||
.or_default()
|
||||
.push((song, value));
|
||||
}
|
||||
}
|
||||
|
||||
self.exact.entry(value).or_default().insert(song);
|
||||
}
|
||||
|
||||
pub fn find_like(&self, dictionary: &Dictionary, value: &str) -> IntSet<SongKey> {
|
||||
let sanitized = sanitize(value);
|
||||
let characters = sanitized.chars().collect::<Vec<_>>();
|
||||
let empty = Vec::new();
|
||||
|
||||
let candidates_by_bigram = characters[..]
|
||||
.windows(BIGRAM_SIZE)
|
||||
.map(|s| {
|
||||
if s.iter().all(|c| c.is_ascii()) {
|
||||
let index = Self::ascii_bigram_to_index(s[0], s[1]);
|
||||
&self.ascii_bigrams[index]
|
||||
} else {
|
||||
self.other_bigrams
|
||||
.get::<[char; BIGRAM_SIZE]>(s.try_into().unwrap())
|
||||
.unwrap_or(&empty)
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
candidates_by_bigram
|
||||
.into_iter()
|
||||
.min_by_key(|h| h.len()) // Only check songs that contain the least common bigram from the search term
|
||||
.unwrap_or(&empty)
|
||||
.iter()
|
||||
.filter(|(_song_key, indexed_value)| {
|
||||
// Only keep songs that actually contain the search term in full
|
||||
let resolved = dictionary.resolve(indexed_value);
|
||||
sanitize(resolved).contains(&sanitized)
|
||||
})
|
||||
.map(|(k, _v)| k)
|
||||
.copied()
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn find_exact(&self, dictionary: &Dictionary, value: &str) -> IntSet<SongKey> {
|
||||
dictionary
|
||||
.get_canon(value)
|
||||
.and_then(|s| self.exact.get(&s))
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Deserialize, Serialize)]
|
||||
struct NumberFieldIndex {
|
||||
values: BTreeMap<i64, IntSet<SongKey>>,
|
||||
}
|
||||
|
||||
impl NumberFieldIndex {
|
||||
pub fn insert(&mut self, value: i64, key: SongKey) {
|
||||
self.values.entry(value).or_default().insert(key);
|
||||
}
|
||||
|
||||
pub fn find(&self, value: i64, operator: NumberOp) -> IntSet<SongKey> {
|
||||
let range = match operator {
|
||||
NumberOp::Eq => self.values.range(value..=value),
|
||||
NumberOp::Greater => self.values.range((value + 1)..),
|
||||
NumberOp::GreaterOrEq => self.values.range(value..),
|
||||
NumberOp::Less => self.values.range(..value),
|
||||
NumberOp::LessOrEq => self.values.range(..=value),
|
||||
};
|
||||
let candidates = range.map(|(_n, songs)| songs).collect::<Vec<_>>();
|
||||
let mut results = Vec::with_capacity(candidates.iter().map(|c| c.len()).sum());
|
||||
candidates
|
||||
.into_iter()
|
||||
.for_each(|songs| results.extend(songs.iter()));
|
||||
IntSet::from_iter(results)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Builder {
|
||||
text_fields: EnumMap<TextField, TextFieldIndex>,
|
||||
number_fields: EnumMap<NumberField, NumberFieldIndex>,
|
||||
}
|
||||
|
||||
impl Builder {
|
||||
pub fn add_song(&mut self, scanner_song: &scanner::Song, storage_song: &storage::Song) {
|
||||
let song_key = SongKey {
|
||||
virtual_path: storage_song.virtual_path,
|
||||
};
|
||||
|
||||
if let (Some(str), Some(spur)) = (&scanner_song.album, storage_song.album) {
|
||||
self.text_fields[TextField::Album].insert(str, spur, song_key);
|
||||
}
|
||||
|
||||
for (str, artist_key) in scanner_song
|
||||
.album_artists
|
||||
.iter()
|
||||
.zip(storage_song.album_artists.iter())
|
||||
{
|
||||
self.text_fields[TextField::AlbumArtist].insert(str, artist_key.0, song_key);
|
||||
}
|
||||
|
||||
for (str, artist_key) in scanner_song.artists.iter().zip(storage_song.artists.iter()) {
|
||||
self.text_fields[TextField::Artist].insert(str, artist_key.0, song_key);
|
||||
}
|
||||
|
||||
for (str, artist_key) in scanner_song
|
||||
.composers
|
||||
.iter()
|
||||
.zip(storage_song.composers.iter())
|
||||
{
|
||||
self.text_fields[TextField::Composer].insert(str, artist_key.0, song_key);
|
||||
}
|
||||
|
||||
if let Some(disc_number) = &scanner_song.disc_number {
|
||||
self.number_fields[NumberField::DiscNumber].insert(*disc_number, song_key);
|
||||
}
|
||||
|
||||
for (str, spur) in scanner_song.genres.iter().zip(storage_song.genres.iter()) {
|
||||
self.text_fields[TextField::Genre].insert(str, *spur, song_key);
|
||||
}
|
||||
|
||||
for (str, spur) in scanner_song.labels.iter().zip(storage_song.labels.iter()) {
|
||||
self.text_fields[TextField::Label].insert(str, *spur, song_key);
|
||||
}
|
||||
|
||||
for (str, artist_key) in scanner_song
|
||||
.lyricists
|
||||
.iter()
|
||||
.zip(storage_song.lyricists.iter())
|
||||
{
|
||||
self.text_fields[TextField::Lyricist].insert(str, artist_key.0, song_key);
|
||||
}
|
||||
|
||||
self.text_fields[TextField::Path].insert(
|
||||
scanner_song.virtual_path.to_string_lossy().as_ref(),
|
||||
storage_song.virtual_path.0,
|
||||
song_key,
|
||||
);
|
||||
|
||||
if let (Some(str), Some(spur)) = (&scanner_song.title, storage_song.title) {
|
||||
self.text_fields[TextField::Title].insert(str, spur, song_key);
|
||||
}
|
||||
|
||||
if let Some(track_number) = &scanner_song.track_number {
|
||||
self.number_fields[NumberField::TrackNumber].insert(*track_number, song_key);
|
||||
}
|
||||
|
||||
if let Some(year) = &scanner_song.year {
|
||||
self.number_fields[NumberField::Year].insert(*year, song_key);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(self) -> Search {
|
||||
Search {
|
||||
text_fields: self.text_fields,
|
||||
number_fields: self.number_fields,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::*;
|
||||
use crate::app::index::dictionary;
|
||||
use collection::Collection;
|
||||
use storage::store_song;
|
||||
|
||||
struct Context {
|
||||
dictionary: Dictionary,
|
||||
collection: Collection,
|
||||
search: Search,
|
||||
}
|
||||
|
||||
impl Context {
|
||||
pub fn search(&self, query: &str) -> Vec<PathBuf> {
|
||||
self.search
|
||||
.find_songs(&self.collection, &self.dictionary, query)
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|s| s.virtual_path)
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
fn setup_test(songs: Vec<scanner::Song>) -> Context {
|
||||
let mut dictionary_builder = dictionary::Builder::default();
|
||||
let mut collection_builder = collection::Builder::default();
|
||||
let mut search_builder = Builder::default();
|
||||
for song in songs {
|
||||
let storage_song = store_song(&mut dictionary_builder, &song).unwrap();
|
||||
collection_builder.add_song(&storage_song);
|
||||
search_builder.add_song(&song, &storage_song);
|
||||
}
|
||||
|
||||
Context {
|
||||
collection: collection_builder.build(),
|
||||
search: search_builder.build(),
|
||||
dictionary: dictionary_builder.build(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_fuzzy() {
|
||||
let ctx = setup_test(vec![
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("seasons.mp3"),
|
||||
title: Some("Seasons".to_owned()),
|
||||
artists: vec!["Dragonforce".to_owned()],
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("potd.mp3"),
|
||||
title: Some("Power of the Dragonflame".to_owned()),
|
||||
artists: vec!["Rhapsody".to_owned()],
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("calcium.mp3"),
|
||||
title: Some("Calcium".to_owned()),
|
||||
artists: vec!["FSOL".to_owned()],
|
||||
..Default::default()
|
||||
},
|
||||
]);
|
||||
|
||||
let songs = ctx.search("agon");
|
||||
assert_eq!(songs.len(), 2);
|
||||
assert!(songs.contains(&PathBuf::from("seasons.mp3")));
|
||||
assert!(songs.contains(&PathBuf::from("potd.mp3")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_field_like() {
|
||||
let ctx = setup_test(vec![
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("seasons.mp3"),
|
||||
title: Some("Seasons".to_owned()),
|
||||
artists: vec!["Dragonforce".to_owned()],
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("potd.mp3"),
|
||||
title: Some("Power of the Dragonflame".to_owned()),
|
||||
artists: vec!["Rhapsody".to_owned()],
|
||||
..Default::default()
|
||||
},
|
||||
]);
|
||||
|
||||
let songs = ctx.search("artist % agon");
|
||||
assert_eq!(songs.len(), 1);
|
||||
assert!(songs.contains(&PathBuf::from("seasons.mp3")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn text_is_case_insensitive() {
|
||||
let ctx = setup_test(vec![scanner::Song {
|
||||
virtual_path: PathBuf::from("seasons.mp3"),
|
||||
artists: vec!["Dragonforce".to_owned()],
|
||||
..Default::default()
|
||||
}]);
|
||||
|
||||
let songs = ctx.search("dragonforce");
|
||||
assert_eq!(songs.len(), 1);
|
||||
assert!(songs.contains(&PathBuf::from("seasons.mp3")));
|
||||
|
||||
let songs = ctx.search("artist = dragonforce");
|
||||
assert_eq!(songs.len(), 1);
|
||||
assert!(songs.contains(&PathBuf::from("seasons.mp3")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_field_exact() {
|
||||
let ctx = setup_test(vec![
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("seasons.mp3"),
|
||||
title: Some("Seasons".to_owned()),
|
||||
artists: vec!["Dragonforce".to_owned()],
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("potd.mp3"),
|
||||
title: Some("Power of the Dragonflame".to_owned()),
|
||||
artists: vec!["Rhapsody".to_owned()],
|
||||
..Default::default()
|
||||
},
|
||||
]);
|
||||
|
||||
let songs = ctx.search("artist = Dragon");
|
||||
assert!(songs.is_empty());
|
||||
|
||||
let songs = ctx.search("artist = Dragonforce");
|
||||
assert_eq!(songs.len(), 1);
|
||||
assert!(songs.contains(&PathBuf::from("seasons.mp3")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_query_number_fields() {
|
||||
let ctx = setup_test(vec![
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("1999.mp3"),
|
||||
year: Some(1999),
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("2000.mp3"),
|
||||
year: Some(2000),
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("2001.mp3"),
|
||||
year: Some(2001),
|
||||
..Default::default()
|
||||
},
|
||||
]);
|
||||
|
||||
let songs = ctx.search("year=2000");
|
||||
assert_eq!(songs.len(), 1);
|
||||
assert!(songs.contains(&PathBuf::from("2000.mp3")));
|
||||
|
||||
let songs = ctx.search("year>2000");
|
||||
assert_eq!(songs.len(), 1);
|
||||
assert!(songs.contains(&PathBuf::from("2001.mp3")));
|
||||
|
||||
let songs = ctx.search("year<2000");
|
||||
assert_eq!(songs.len(), 1);
|
||||
assert!(songs.contains(&PathBuf::from("1999.mp3")));
|
||||
|
||||
let songs = ctx.search("year>=2000");
|
||||
assert_eq!(songs.len(), 2);
|
||||
assert!(songs.contains(&PathBuf::from("2000.mp3")));
|
||||
assert!(songs.contains(&PathBuf::from("2001.mp3")));
|
||||
|
||||
let songs = ctx.search("year<=2000");
|
||||
assert_eq!(songs.len(), 2);
|
||||
assert!(songs.contains(&PathBuf::from("1999.mp3")));
|
||||
assert!(songs.contains(&PathBuf::from("2000.mp3")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fuzzy_numbers_query_all_fields() {
|
||||
let ctx = setup_test(vec![
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("music.mp3"),
|
||||
year: Some(2000),
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("fireworks 2000.mp3"),
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("calcium.mp3"),
|
||||
..Default::default()
|
||||
},
|
||||
]);
|
||||
|
||||
let songs = ctx.search("2000");
|
||||
assert_eq!(songs.len(), 2);
|
||||
assert!(songs.contains(&PathBuf::from("music.mp3")));
|
||||
assert!(songs.contains(&PathBuf::from("fireworks 2000.mp3")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_use_and_operator() {
|
||||
let ctx = setup_test(vec![
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("whale.mp3"),
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("space.mp3"),
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("whales in space.mp3"),
|
||||
..Default::default()
|
||||
},
|
||||
]);
|
||||
|
||||
let songs = ctx.search("space && whale");
|
||||
assert_eq!(songs.len(), 1);
|
||||
assert!(songs.contains(&PathBuf::from("whales in space.mp3")));
|
||||
|
||||
let songs = ctx.search("space whale");
|
||||
assert_eq!(songs.len(), 1);
|
||||
assert!(songs.contains(&PathBuf::from("whales in space.mp3")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_use_or_operator() {
|
||||
let ctx = setup_test(vec![
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("whale.mp3"),
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("space.mp3"),
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("whales in space.mp3"),
|
||||
..Default::default()
|
||||
},
|
||||
]);
|
||||
|
||||
let songs = ctx.search("space || whale");
|
||||
assert_eq!(songs.len(), 3);
|
||||
assert!(songs.contains(&PathBuf::from("whale.mp3")));
|
||||
assert!(songs.contains(&PathBuf::from("space.mp3")));
|
||||
assert!(songs.contains(&PathBuf::from("whales in space.mp3")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_use_not_operator() {
|
||||
let ctx = setup_test(vec![
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("whale.mp3"),
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("space.mp3"),
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("whales in space.mp3"),
|
||||
..Default::default()
|
||||
},
|
||||
]);
|
||||
|
||||
let songs = ctx.search("whale !! space");
|
||||
assert_eq!(songs.len(), 1);
|
||||
assert!(songs.contains(&PathBuf::from("whale.mp3")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn results_are_sorted() {
|
||||
let ctx = setup_test(vec![
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("accented.mp3"),
|
||||
artists: vec!["à la maison".to_owned()],
|
||||
genres: vec!["Metal".to_owned()],
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("cry thunder.mp3"),
|
||||
artists: vec!["Dragonforce".to_owned()],
|
||||
album: Some("The Power Within".to_owned()),
|
||||
year: Some(2012),
|
||||
genres: vec!["Metal".to_owned()],
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("revelations.mp3"),
|
||||
artists: vec!["Dragonforce".to_owned()],
|
||||
album: Some("Valley of the Damned".to_owned()),
|
||||
year: Some(2003),
|
||||
track_number: Some(7),
|
||||
genres: vec!["Metal".to_owned()],
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("starfire.mp3"),
|
||||
artists: vec!["Dragonforce".to_owned()],
|
||||
album: Some("Valley of the Damned".to_owned()),
|
||||
year: Some(2003),
|
||||
track_number: Some(5),
|
||||
genres: vec!["Metal".to_owned()],
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("eternal snow.mp3"),
|
||||
artists: vec!["Rhapsody".to_owned()],
|
||||
genres: vec!["Metal".to_owned()],
|
||||
..Default::default()
|
||||
},
|
||||
scanner::Song {
|
||||
virtual_path: PathBuf::from("alchemy.mp3"),
|
||||
artists: vec!["Avantasia".to_owned()],
|
||||
genres: vec!["Metal".to_owned()],
|
||||
..Default::default()
|
||||
},
|
||||
]);
|
||||
|
||||
let songs = ctx.search("metal");
|
||||
assert_eq!(songs.len(), 6);
|
||||
assert_eq!(
|
||||
songs,
|
||||
vec![
|
||||
PathBuf::from("accented.mp3"),
|
||||
PathBuf::from("alchemy.mp3"),
|
||||
PathBuf::from("starfire.mp3"),
|
||||
PathBuf::from("revelations.mp3"),
|
||||
PathBuf::from("cry thunder.mp3"),
|
||||
PathBuf::from("eternal snow.mp3"),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn avoids_bigram_false_positives() {
|
||||
let ctx = setup_test(vec![scanner::Song {
|
||||
virtual_path: PathBuf::from("lorry bovine vehicle.mp3"),
|
||||
..Default::default()
|
||||
}]);
|
||||
|
||||
let songs = ctx.search("love");
|
||||
assert!(songs.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignores_single_letter_components() {
|
||||
let ctx = setup_test(vec![scanner::Song {
|
||||
virtual_path: PathBuf::from("seasons.mp3"),
|
||||
..Default::default()
|
||||
}]);
|
||||
|
||||
let songs = ctx.search("seas u");
|
||||
assert_eq!(songs.len(), 1);
|
||||
|
||||
let songs = ctx.search("seas 2");
|
||||
assert_eq!(songs.len(), 1);
|
||||
|
||||
let songs = ctx.search("seas || u");
|
||||
assert_eq!(songs.len(), 1);
|
||||
|
||||
let songs = ctx.search("seas || 2");
|
||||
assert_eq!(songs.len(), 1);
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue