Compare commits
600 commits
Author | SHA1 | Date | |
---|---|---|---|
|
88792f0669 | ||
|
c2fb46f26d | ||
|
26ce7e2550 | ||
|
46aed8096e | ||
|
2aeed5f188 | ||
|
41c4088477 | ||
|
10946330a8 | ||
|
2ce035f787 | ||
|
956301bfdb | ||
|
f35c4efac3 | ||
|
2a1c93c462 | ||
|
7625449434 | ||
|
1b0b5bd164 | ||
|
809e3f878d | ||
|
6862cff185 | ||
|
36da2c3e70 | ||
|
9923b0f40c | ||
|
1df3241ea2 | ||
|
77c313637f | ||
|
baa31f1056 | ||
|
cf5d1b7931 | ||
|
87c4bdc247 | ||
|
7c92e90d65 | ||
|
0eb1d7ee75 | ||
|
dd53d8d284 | ||
|
98e10ad682 | ||
|
f8b30c4e3d | ||
|
5a1753218c | ||
|
646f8297d2 | ||
|
d6416e0239 | ||
|
6681322370 | ||
|
f0cf3d2675 | ||
|
dac7145ce4 | ||
|
677413ef8c | ||
|
fd3f877f93 | ||
|
95c7d1a620 | ||
|
ac88bc9af0 | ||
|
7066e264cd | ||
|
3cea551ce9 | ||
|
b1770fc17e | ||
|
f4009a7fa7 | ||
|
d90b51f752 | ||
|
8ccc9cc2ee | ||
|
4625bf221d | ||
|
b940ca256b | ||
|
24f27e4f08 | ||
|
bd5aeaf591 | ||
|
9707f4a96d | ||
|
2d92ac03ef | ||
|
3f5e5eca69 | ||
|
dabb034964 | ||
|
cc2d2cedd8 | ||
|
bbd63e1b42 | ||
|
df402ed7b8 | ||
|
b5a8aea1f8 | ||
|
350557785c | ||
|
07e8077a38 | ||
|
23facd96b9 | ||
|
1c3ba3d709 | ||
|
364710ef79 | ||
|
2e2ddf017b | ||
|
1b142b1855 | ||
|
d47fffae4f | ||
|
11c72240ed | ||
|
466bbf5cf3 | ||
|
055a81e6f9 | ||
|
81e0abc59f | ||
|
4826e6aa40 | ||
|
2521ff1ddf | ||
|
00236a99e3 | ||
|
cfc848bf7c | ||
|
bf775ebc4c | ||
|
3ad5e97b75 | ||
|
73dc59f833 | ||
|
58d1af5edd | ||
|
ff7291a246 | ||
|
cfa2cedbc1 | ||
|
b9bcdd46b1 | ||
|
f371d5e331 | ||
|
41187199ba | ||
|
de39b2f4a5 | ||
|
cb241d21dd | ||
|
21d7e3049e | ||
|
68b8041f97 | ||
|
e8845c7ef9 | ||
|
b7719edd8e | ||
|
ed546ed531 | ||
|
c640086a3e | ||
|
cf6a092ab7 | ||
|
768ea095e1 | ||
|
5a5f696366 | ||
|
d1d12aecc5 | ||
|
ea75497bf1 | ||
|
8100dfceae | ||
|
f955eb75c5 | ||
|
0a7ae8ebad | ||
|
090ca387ab | ||
|
d53681b6c0 | ||
|
d555a2e5f0 | ||
|
142d400b8b | ||
|
5f585a61d8 | ||
|
08052c25a3 | ||
|
497b3bb545 | ||
|
32e67dc095 | ||
|
8b31698cf4 | ||
|
7a84cc0290 | ||
|
524e072e9f | ||
|
5ec0b5f7a5 | ||
|
fb18cb3c4f | ||
|
0058221e88 | ||
|
51283d935f | ||
|
a4e9aea1e4 | ||
|
7f39d8e8b7 | ||
|
316f5c0219 | ||
|
deeb3e8a05 | ||
|
ae5da0f4f3 | ||
|
c7a760e2c2 | ||
|
471e39495c | ||
|
67730f55fb | ||
|
1555c784de | ||
|
c51ce59fba | ||
|
a89e3d5145 | ||
|
658c23e70d | ||
|
053b684f3a | ||
|
1a8bf91628 | ||
|
a5061dfc92 | ||
|
2c2b12f536 | ||
|
f0a2afe01d | ||
|
9e18a221db | ||
|
e42c3abfe1 | ||
|
765de35f89 | ||
|
98bcd41e43 | ||
|
369bf3821b | ||
|
664ff721e2 | ||
|
b175e319b7 | ||
|
76535b2f87 | ||
|
d1a0b836cf | ||
|
071aced10a | ||
|
7f3e091e32 | ||
|
232eb7ac12 | ||
|
143da76673 | ||
|
8d51344dc3 | ||
|
bff82c3a7c | ||
|
454b4c00fc | ||
|
2bbfa064d5 | ||
|
f1e21a4f6e | ||
|
9e62dc108c | ||
|
2992ef89b8 | ||
|
4548574298 | ||
|
72f4604f7a | ||
|
646a8fa587 | ||
|
647e1d5614 | ||
|
c1c0cedccc | ||
|
d06ad07f51 | ||
|
f188b2943f | ||
|
e3041fca6f | ||
|
444d261d0b | ||
|
47c73f6196 | ||
|
63b92718d5 | ||
|
e11344d2b4 | ||
|
0b50a10a36 | ||
|
6a46aaeac6 | ||
|
7ae10c6f74 | ||
|
5d03b7919c | ||
|
6c2b192f8e | ||
|
ef6951faba | ||
|
e06f79c500 | ||
|
cb35ef0ebb | ||
|
f21f906eaf | ||
|
b943d9aa11 | ||
|
f971b78856 | ||
|
971b46be45 | ||
|
ee3f9fd5a0 | ||
|
b5762bd7bf | ||
|
99263ddeca | ||
|
be97bccab1 | ||
|
0fe3555560 | ||
|
bdc4f840a4 | ||
|
409d79d8a2 | ||
|
390ee03020 | ||
|
cb33c96548 | ||
|
5128796825 | ||
|
81403960b0 | ||
|
5e8587c39f | ||
|
e5339ab39a | ||
|
caf12f23b4 | ||
|
9a14114e50 | ||
|
83b5431994 | ||
|
b96cd2d781 | ||
|
bc17954db9 | ||
|
e5a8c325a6 | ||
|
625f4bd006 | ||
|
ae4200c6ce | ||
|
6bd0c25d7d | ||
|
2b81355f6d | ||
|
e65cee366d | ||
|
ae876915b4 | ||
|
7be9f25cb3 | ||
|
4072e3b07d | ||
|
54ce646931 | ||
|
e0bf259be3 | ||
|
07324ccca6 | ||
|
c1f24ce96b | ||
|
0c12729983 | ||
|
ad37a14cfa | ||
|
309620a088 | ||
|
1e0a6062f9 | ||
|
85cacd8bb7 | ||
|
7c5ff2e895 | ||
|
afc5fcb4c2 | ||
|
9a30065971 | ||
|
f4b0cb9eb7 | ||
|
e703f69a48 | ||
|
57a0163c04 | ||
|
5444285327 | ||
|
6837994433 | ||
|
8141e565e0 | ||
|
a3c2b3bc32 | ||
|
570c2b3894 | ||
|
f625c57d20 | ||
|
d492afc885 | ||
|
4112c7d79d | ||
|
39407c6551 | ||
|
0afab8d634 | ||
|
bc3ed59382 | ||
|
636803c0df | ||
|
a7c4c90427 | ||
|
91152fdc08 | ||
|
1bbeee7f39 | ||
|
6564e7d078 | ||
|
41c043f863 | ||
|
3f645d1011 | ||
|
6b5c291cb7 | ||
|
310e3b6c4d | ||
|
a2232aa9f2 | ||
|
0841c15f48 | ||
|
763ba94e9b | ||
|
a4baa2c792 | ||
|
e6483cf138 | ||
|
b014c63af4 | ||
|
6821318a4d | ||
|
0a1f3fa78d | ||
|
169b2b5cb8 | ||
|
782da35a7b | ||
|
2cbb249c46 | ||
|
2f2fdf9056 | ||
|
f0fa985f8a | ||
|
6b1133e27c | ||
|
8f6e72fbd6 | ||
|
2c7eb9f643 | ||
|
7a17cdc195 | ||
|
16434e6c51 | ||
|
5a14830138 | ||
|
845105cf38 | ||
|
cd45836924 | ||
|
8f2566f574 | ||
|
a0624f7968 | ||
|
7a1d433c8a | ||
|
ae9f94ce4f | ||
|
e8af339cde | ||
|
b4b0e1181f | ||
|
72ec7b260a | ||
|
35736ee1d5 | ||
|
332e39876e | ||
|
b42c6d39e8 | ||
|
1f3cc1ea26 | ||
|
8db6a2352b | ||
|
93e8d7d94b | ||
|
64ef7cb21f | ||
|
2012258a72 | ||
|
2965cbdf7e | ||
|
efc27757c7 | ||
|
91352fc13b | ||
|
470fbc6d1c | ||
|
9e9d031f4e | ||
|
caf6feea7a | ||
|
caa8907297 | ||
|
6871f41a99 | ||
|
00cc18c798 | ||
|
3362a828cd | ||
|
9d8d543494 | ||
|
5a785a2e16 | ||
|
0f25a12877 | ||
|
1c4ef6c5ee | ||
|
1020f27413 | ||
|
0e63f64513 | ||
|
153943a3ae | ||
|
d82563efc0 | ||
|
274a1f2cf7 | ||
|
18858d8d1a | ||
|
03d5568765 | ||
|
5c4631c673 | ||
|
84921f7db3 | ||
|
08353a717f | ||
|
138886e55c | ||
|
6884548cd0 | ||
|
12a9f2ec3c | ||
|
138eacc9fc | ||
|
11775d961b | ||
|
77dc2eac23 | ||
|
7279793d25 | ||
|
d4a427648e | ||
|
123eee7d2d | ||
|
fd6a13083d | ||
|
5ca38939bd | ||
|
c1abd8fe3b | ||
|
fc0a4fd6eb | ||
|
6f24ff248f | ||
|
4807b2d3b9 | ||
|
608dabb789 | ||
|
8d38c5b664 | ||
|
f6d45c8387 | ||
|
930fd67ae3 | ||
|
00b6444048 | ||
|
bd330ddd84 | ||
|
46a232219d | ||
|
a8660793f8 | ||
|
eaec68dff0 | ||
|
1484ecabe9 | ||
|
1812bedfd2 | ||
|
c57583d1d4 | ||
|
98d00d261d | ||
|
edc7170b89 | ||
|
e4959be2f4 | ||
|
fee2f17fb1 | ||
|
4c5a6bc2d6 | ||
|
1e9d307a05 | ||
|
4ec8f2161b | ||
|
f609afc5ed | ||
|
9f0bc06dac | ||
|
d1cb328523 | ||
|
33997fc8e1 | ||
|
602c1c03b5 | ||
|
f3abb816ff | ||
|
96d702b79e | ||
|
223894c2b6 | ||
|
bb8d1142d6 | ||
|
822f3ed073 | ||
|
2873f38e04 | ||
|
388901cf65 | ||
|
df0de19567 | ||
|
29ae862aad | ||
|
a5f5a77100 | ||
|
63e971059a | ||
|
d41e837561 | ||
|
f5a2eed423 | ||
|
41a4b21327 | ||
|
374d0ca56f | ||
|
90fd6bbcc9 | ||
|
39c8cf7595 | ||
|
f27bc4ccfc | ||
|
818dfe877c | ||
|
b6e9940c76 | ||
|
e2bf97db99 | ||
|
d01583b406 | ||
|
f104355076 | ||
|
4c25195deb | ||
|
ed581c57cf | ||
|
652772ba0e | ||
|
6c27409ef2 | ||
|
7a73ae7cc0 | ||
|
2f71cf2db7 | ||
|
4ad8d922f7 | ||
|
7edcc38483 | ||
|
7bc8e142c3 | ||
|
487d261843 | ||
|
ea7edea79b | ||
|
5ec2ae3f7a | ||
|
cab03f2538 | ||
|
1d57691e8b | ||
|
72c8ed9289 | ||
|
5e065c5e6a | ||
|
e5c1d86577 | ||
|
d4c78a0a31 | ||
|
ec39e696bb | ||
|
5b2d0a2216 | ||
|
404d42d254 | ||
|
bd48ad1a5c | ||
|
c2807b60de | ||
|
6be6d2a7dc | ||
|
e25af0e9b5 | ||
|
2c21609699 | ||
|
847d61f62b | ||
|
9c45ad5238 | ||
|
dd92d3e6eb | ||
|
7e4c0fa610 | ||
|
34e0538562 | ||
|
866d82a16c | ||
|
454d73c754 | ||
|
7477158891 | ||
|
55a4f64b3a | ||
|
eb917bb9d6 | ||
|
e40121c4d0 | ||
|
0e52047417 | ||
|
1ebc0d9f44 | ||
|
b709a8cd64 | ||
|
5b412718dc | ||
|
b678973ef0 | ||
|
351f1b0768 | ||
|
eef75163ce | ||
|
33121bc0a3 | ||
|
2f67d280fa | ||
|
f03d12de3e | ||
|
b6c446fa02 | ||
|
8524c7d5fe | ||
|
a3f7a306e5 | ||
|
7ea97b0abf | ||
|
8d2ed31fef | ||
|
0930ef45bb | ||
|
f80a42e666 | ||
|
2eed57cc47 | ||
|
e1934a8e92 | ||
|
847c26ddfe | ||
|
1ffea255df | ||
|
875a52f1b2 | ||
|
538b41a2b4 | ||
|
0927f3815e | ||
|
bcebaf499e | ||
|
e0d1f396a8 | ||
|
1c5a536277 | ||
|
bff49c22ec | ||
|
4534a84c05 | ||
|
d78011e6bc | ||
|
23a144761e | ||
|
7e46c6cd5a | ||
|
42522ffc78 | ||
|
209813f25c | ||
|
a7ef7b2bd0 | ||
|
cf67e44d20 | ||
|
ca8f046142 | ||
|
8c32d7351c | ||
|
341a03574b | ||
|
d9bdea8745 | ||
|
ef8246ecfb | ||
|
cd63564c03 | ||
|
00a1ca18cf | ||
|
b7415e6304 | ||
|
d8f38e88f0 | ||
|
dcfd01c7a3 | ||
|
c91f5815d6 | ||
|
1fa8bbc0ca | ||
|
7901cb43bf | ||
|
04757fc20d | ||
|
cf6f30345c | ||
|
8e55d31eb0 | ||
|
e65d57e24a | ||
|
66bef4d006 | ||
|
7cbf27fce0 | ||
|
8de736e563 | ||
|
943174bafa | ||
|
1ff845d48e | ||
|
9ee9786c0a | ||
|
6c4b4d3e20 | ||
|
b2152cecc3 | ||
|
86a935fd79 | ||
|
70c7463f6f | ||
|
14a6466c2d | ||
|
db97dbea46 | ||
|
60e8f3ec46 | ||
|
fee96e6b49 | ||
|
fe5ab5ba48 | ||
|
7db8894d65 | ||
|
797e2e9526 | ||
|
a2dc0ce37a | ||
|
b423b76e29 | ||
|
b0ca61ae2f | ||
|
2b7f098836 | ||
|
6472349523 | ||
|
17976dc99f | ||
|
dbb5f79371 | ||
|
8035856fb6 | ||
|
d19ccc7da2 | ||
|
337a4020fe | ||
|
f0fc9e8fba | ||
|
7637037e3d | ||
|
e9346e29ee | ||
|
107e63caca | ||
|
2797d5ed91 | ||
|
d8b1f0c002 | ||
|
eb2fd23281 | ||
|
eb0c141ebe | ||
|
31f9a3ecc5 | ||
|
6142697b4a | ||
|
8ef6757acb | ||
|
d5c186579a | ||
|
99db3eda13 | ||
|
45f4369301 | ||
|
c3466ca248 | ||
|
0c5bd28d56 | ||
|
21cf831f74 | ||
|
7b12f8f294 | ||
|
29819b930a | ||
|
371b0d0333 | ||
|
5e1aeb10fb | ||
|
d7c66c3745 | ||
|
eb7c833de5 | ||
|
d005b86fb0 | ||
|
2577015872 | ||
|
1af2ba49be | ||
|
d7eb66d529 | ||
|
1825056f26 | ||
|
b02d4da979 | ||
|
f79b4615c4 | ||
|
a05c838c5b | ||
|
768ee1122c | ||
|
cea8906c3d | ||
|
6e3f439d8a | ||
|
7428891bde | ||
|
86d61dd964 | ||
|
b413125a46 | ||
|
f9f69cd55c | ||
|
7562bb306c | ||
|
68c7a0f14b | ||
|
4e76a11e7a | ||
|
503eed8b62 | ||
|
be268a2004 | ||
|
72fdad45db | ||
|
186e3173cd | ||
|
312eb15a2b | ||
|
fc36bb4cee | ||
|
cdb1a5233e | ||
|
23fc43cbf7 | ||
|
b7b7c6e737 | ||
|
78c8ca8aa2 | ||
|
0c28f54f01 | ||
|
27d0a9e158 | ||
|
b2ee2fe701 | ||
|
aec941b97f | ||
|
3a0fda972b | ||
|
e2c9e64bf7 | ||
|
645d5163f3 | ||
|
36260dcdce | ||
|
028633d0e6 | ||
|
2b30307488 | ||
|
f71a8320e9 | ||
|
d1bb60a1c7 | ||
|
f6b9e67d4e | ||
|
b8b3c80be9 | ||
|
1764f3da4d | ||
|
18bc9594a4 | ||
|
b6d985859c | ||
|
25b36be073 | ||
|
f9a6d6b6d4 | ||
|
b97ace68ea | ||
|
e53b9f5867 | ||
|
b1e4be2f8f | ||
|
f12d0809d4 | ||
|
2efc0df04e | ||
|
ee71df9d0b | ||
|
0e6be32a8c | ||
|
f9ebb432b2 | ||
|
50421e91d6 | ||
|
e64435efa5 | ||
|
95f6c62531 | ||
|
a4991a620e | ||
|
9df21737fa | ||
|
fa178b92be | ||
|
5ccc006515 | ||
|
9f4f6b4337 | ||
|
7f7da0050b | ||
|
3c45150651 | ||
|
ed66200689 | ||
|
9ed0526075 | ||
|
76118756b9 | ||
|
acffa576e2 | ||
|
0dba7e2e4f | ||
|
6f642c34e2 | ||
|
f7efeef653 | ||
|
4194509f45 | ||
|
289827d6a3 | ||
|
1c84cde158 | ||
|
61c221a2d2 | ||
|
a83e1af69b | ||
|
9e48dc408e | ||
|
052dc88f14 | ||
|
e248f3b983 | ||
|
0a4d05cdc8 | ||
|
ba901c7873 | ||
|
60f2e330a4 | ||
|
fd8a6c64f5 | ||
|
448198acb6 | ||
|
c49fdbab37 | ||
|
18367198a7 | ||
|
eca4f68834 | ||
|
360d864148 | ||
|
811a35ab4c | ||
|
26596f16bd | ||
|
54a4f9d394 | ||
|
59366c6b03 | ||
|
2de5b34a48 | ||
|
0a0a6ce955 | ||
|
b83f16e6f5 | ||
|
d823dce7db | ||
|
70388095a5 | ||
|
ddae5cc24f | ||
|
f41f45f600 | ||
|
0b0bfac8fb | ||
|
cbb7e7b97c | ||
|
28bb240ae0 | ||
|
bca8f4ced8 |
9
.codecov.yml
Normal file
|
@ -0,0 +1,9 @@
|
|||
coverage:
|
||||
range: "0...100"
|
||||
status:
|
||||
patch:
|
||||
default:
|
||||
informational: true
|
||||
project:
|
||||
default:
|
||||
informational: true
|
1
.envrc
Normal file
|
@ -0,0 +1 @@
|
|||
use flake
|
28
.github/actions/make-linux-release/action.yml
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
name: 'Make a Linux Release'
|
||||
description: 'Creates archive containing files to install Polaris on on a Linux system'
|
||||
inputs:
|
||||
version-number:
|
||||
description: 'Polaris version number'
|
||||
required: true
|
||||
default: '0.0'
|
||||
output-file:
|
||||
description: 'File path where the resulting archive should be stored'
|
||||
required: false
|
||||
default: 'polaris.tar.gz'
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Download Polaris Web
|
||||
run: |
|
||||
curl -L -o web.zip https://github.com/agersant/polaris-web/releases/latest/download/web.zip
|
||||
unzip web.zip
|
||||
shell: bash
|
||||
- name: Set Polaris version
|
||||
run: echo "POLARIS_VERSION=${{ inputs.version-number }}" >> $GITHUB_ENV
|
||||
shell: bash
|
||||
- name: Build archive
|
||||
run: res/unix/release_script.sh
|
||||
shell: bash
|
||||
- name: Copy archive to output location
|
||||
run: cp release/polaris.tar.gz ${{ inputs.output-file }}
|
||||
shell: bash
|
28
.github/actions/make-windows-release/action.yml
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
name: 'Make a Windows Release'
|
||||
description: 'Creates archive containing files to install Polaris on on a Windows system'
|
||||
inputs:
|
||||
version-number:
|
||||
description: 'Polaris version number'
|
||||
required: true
|
||||
default: '0.0'
|
||||
output-file:
|
||||
description: 'File path where the resulting installer should be stored'
|
||||
required: false
|
||||
default: 'polaris.msi'
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Download Polaris Web
|
||||
run: |
|
||||
curl -L -o web.zip https://github.com/agersant/polaris-web/releases/latest/download/web.zip
|
||||
unzip web.zip
|
||||
shell: bash
|
||||
- name: Set Polaris Version
|
||||
run: echo "POLARIS_VERSION=${{ inputs.version-number }}" >> $GITHUB_ENV
|
||||
shell: bash
|
||||
- name: Build Installer
|
||||
run: res/windows/release_script
|
||||
shell: pwsh
|
||||
- name: Copy installer to output location
|
||||
run: cp release/polaris.msi ${{ inputs.output-file }}
|
||||
shell: bash
|
33
.github/workflows/build.yaml
vendored
|
@ -1,33 +0,0 @@
|
|||
name: Build
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Run Tests
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
features: [--all-features, --no-default-features]
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
- name: Install libsqlite3-dev
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: sudo apt-get install libsqlite3-dev
|
||||
- uses: actions/checkout@v1
|
||||
with:
|
||||
submodules: true
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
toolchain: nightly
|
||||
args: --release ${{ matrix.features }}
|
25
.github/workflows/build.yml
vendored
Normal file
|
@ -0,0 +1,25 @@
|
|||
name: Build
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Run Tests
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
features: ["", --features ui]
|
||||
|
||||
steps:
|
||||
- name: Install libsqlite3-dev
|
||||
if: contains(matrix.os, 'ubuntu') && !contains(matrix.features, 'bundle-sqlite')
|
||||
run: sudo apt-get update && sudo apt-get install libsqlite3-dev
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
- uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --release ${{ matrix.features }}
|
48
.github/workflows/coverage.yml
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
name: Test Coverage
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Measure Test Coverage
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Polaris
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
components: llvm-tools-preview
|
||||
- name: Install grcov
|
||||
run: cargo install grcov
|
||||
- name: Run tests
|
||||
run: cargo test --no-fail-fast
|
||||
env:
|
||||
RUSTFLAGS: "-Cinstrument-coverage"
|
||||
- name: Gather coverage results
|
||||
run: >
|
||||
grcov
|
||||
.
|
||||
-t lcov
|
||||
-o coverage.txt
|
||||
--llvm
|
||||
--branch
|
||||
--ignore-not-existing
|
||||
--binary-path ./target/debug/
|
||||
--excl-line "#\[derive\("
|
||||
--excl-br-line "#\[derive\("
|
||||
--excl-start "mod tests \{"
|
||||
--excl-br-start "mod tests \{"
|
||||
- name: Upload Results
|
||||
uses: codecov/codecov-action@v2
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
19
.github/workflows/deploy-demo.yml
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
name: Deploy Demo Server
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [released]
|
||||
|
||||
jobs:
|
||||
trigger:
|
||||
name: Trigger Demo Build
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Repository Dispatch
|
||||
uses: peter-evans/repository-dispatch@v2
|
||||
with:
|
||||
token: ${{ secrets.POLARIS_DEMO_ACCESS_TOKEN }}
|
||||
repository: agersant/polaris-demo
|
||||
event-type: polaris-release
|
108
.github/workflows/release.yml
vendored
Normal file
|
@ -0,0 +1,108 @@
|
|||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
versionNumber:
|
||||
description: "User-facing version number (eg: 0.13.0)"
|
||||
required: true
|
||||
|
||||
name: Make Release
|
||||
|
||||
jobs:
|
||||
branch_and_tag:
|
||||
name: Update Release Branch
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Merge to Release Branch
|
||||
uses: devmasx/merge-branch@v1.3.1
|
||||
with:
|
||||
type: now
|
||||
target_branch: release
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Checkout Release Branch
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: release
|
||||
- name: Update Polaris Version in Cargo.toml
|
||||
run: gawk -i inplace '/^version/ { if (count == 0) { $3 = "\"${{ github.event.inputs.versionNumber }}\""; count++ } } 1' Cargo.toml
|
||||
- name: Commit Cargo.toml Version Change
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
message: "Updated version number"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Add <version number> Git Tag
|
||||
run: |
|
||||
git config --global user.name ${{ github.actor }}
|
||||
git config --global user.email "<>"
|
||||
git tag -f -a ${{ github.event.inputs.versionNumber }} -m "Version number"
|
||||
git push -f --tags
|
||||
|
||||
windows:
|
||||
name: Windows
|
||||
runs-on: windows-latest
|
||||
needs: branch_and_tag
|
||||
|
||||
steps:
|
||||
- name: Checkout Polaris
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: release
|
||||
- name: Install Rust Toolchain
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
- name: Make release
|
||||
uses: ./.github/actions/make-windows-release
|
||||
with:
|
||||
version-number: ${{ github.event.inputs.versionNumber }}
|
||||
output-file: Polaris_${{ github.event.inputs.versionNumber }}.msi
|
||||
- name: Upload installer
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
if-no-files-found: error
|
||||
name: windows-artifact
|
||||
path: Polaris_${{ github.event.inputs.versionNumber }}.msi
|
||||
|
||||
linux:
|
||||
name: Linux
|
||||
runs-on: ubuntu-latest
|
||||
needs: branch_and_tag
|
||||
|
||||
steps:
|
||||
- name: Checkout Polaris
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: release
|
||||
- name: Make release
|
||||
uses: ./.github/actions/make-linux-release
|
||||
with:
|
||||
version-number: ${{ github.event.inputs.versionNumber }}
|
||||
output-file: Polaris_${{ github.event.inputs.versionNumber }}.tar.gz
|
||||
- name: Upload release
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
if-no-files-found: error
|
||||
name: linux-artifact
|
||||
path: Polaris_${{ github.event.inputs.versionNumber }}.tar.gz
|
||||
|
||||
create_release:
|
||||
name: Create Github Release
|
||||
runs-on: ubuntu-latest
|
||||
needs: [windows, linux]
|
||||
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
merge-multiple: true
|
||||
- name: Make Github release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
body: 'Release notes are documented in [CHANGELOG.md](https://github.com/agersant/polaris/blob/master/CHANGELOG.md)'
|
||||
draft: true
|
||||
prerelease: false
|
||||
name: Polaris ${{ github.event.inputs.versionNumber }}
|
||||
tag_name: ${{ github.event.inputs.versionNumber }}
|
||||
fail_on_unmatched_files: true
|
||||
files: |
|
||||
Polaris_${{ github.event.inputs.versionNumber }}.tar.gz
|
||||
Polaris_${{ github.event.inputs.versionNumber }}.msi
|
128
.github/workflows/validate-install.yml
vendored
Normal file
|
@ -0,0 +1,128 @@
|
|||
on:
|
||||
pull_request:
|
||||
push:
|
||||
|
||||
name: Validate Install
|
||||
|
||||
jobs:
|
||||
package_linux_release:
|
||||
name: Package Linux Release
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Polaris
|
||||
uses: actions/checkout@v1
|
||||
- name: Make release
|
||||
uses: ./.github/actions/make-linux-release
|
||||
with:
|
||||
version-number: "0.0.0"
|
||||
output-file: polaris.tar.gz
|
||||
- name: Upload packaged release
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
if-no-files-found: error
|
||||
name: linux-release
|
||||
path: polaris.tar.gz
|
||||
|
||||
validate_linux_system_install:
|
||||
name: Linux System Install
|
||||
runs-on: ubuntu-latest
|
||||
needs: package_linux_release
|
||||
|
||||
steps:
|
||||
- name: Download release
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: linux-release
|
||||
path: .
|
||||
- name: Extract release
|
||||
run: tar -xzvf polaris.tar.gz --strip-components=1
|
||||
- name: Preview Install
|
||||
run: make preview
|
||||
- name: Preview Install w/ Custom Prefix
|
||||
run: make preview PREFIX=/some/random/prefix
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
- name: Install
|
||||
run: sudo --preserve-env=PATH make install
|
||||
- name: Run Polaris
|
||||
run: sudo /usr/local/bin/polaris && sleep 5s
|
||||
- name: Make a request
|
||||
run: curl -f http://localhost:5050
|
||||
- name: Stop Polaris
|
||||
run: sudo kill -KILL $(sudo cat /usr/local/var/run/polaris/polaris.pid)
|
||||
- name: Uninstall
|
||||
run: sudo make uninstall
|
||||
|
||||
validate_linux_xdg_install:
|
||||
name: Linux XDG Install
|
||||
runs-on: ubuntu-latest
|
||||
needs: package_linux_release
|
||||
|
||||
steps:
|
||||
- name: Download release
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: linux-release
|
||||
path: .
|
||||
- name: Extract release
|
||||
run: tar -xzvf polaris.tar.gz --strip-components=1
|
||||
- name: Preview Install
|
||||
run: make preview-xdg
|
||||
- name: Preview Install w/ Custom XDG_DATA_HOME
|
||||
run: make preview-xdg XDG_DATA_HOME=/my/own/xdg/home
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
- name: Install
|
||||
run: make install-xdg
|
||||
- name: Run Polaris
|
||||
run: $HOME/.local/bin/polaris && sleep 5s
|
||||
- name: Make a request
|
||||
run: curl -f http://localhost:5050
|
||||
- name: Stop Polaris
|
||||
run: kill -KILL $(cat /tmp/polaris-1001/polaris.pid)
|
||||
- name: Uninstall
|
||||
run: make uninstall-xdg
|
||||
|
||||
package_windows_release:
|
||||
name: Package Windows Release
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout Polaris
|
||||
uses: actions/checkout@v1
|
||||
- name: Install Rust Toolchain
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
- name: Make release
|
||||
uses: ./.github/actions/make-windows-release
|
||||
with:
|
||||
version-number: "0.0.0"
|
||||
output-file: polaris.msi
|
||||
- name: Upload packaged release
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
if-no-files-found: error
|
||||
name: windows-release
|
||||
path: polaris.msi
|
||||
|
||||
validate_windows_install:
|
||||
name: Windows Install
|
||||
runs-on: windows-latest
|
||||
needs: package_windows_release
|
||||
|
||||
steps:
|
||||
- name: Download release
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: windows-release
|
||||
path: .
|
||||
- name: Install
|
||||
run: msiexec /i polaris.msi /qn
|
||||
- name: Run Polaris
|
||||
run: |
|
||||
start $env:LOCALAPPDATA/Permafrost/Polaris/polaris-cli.exe
|
||||
sleep 5
|
||||
- name: Make a request
|
||||
run: curl -f http://localhost:5050
|
||||
- name: Stop Polaris
|
||||
run: taskkill /IM polaris-cli.exe
|
||||
- name: Uninstall
|
||||
run: msiexec /x polaris.msi /qn
|
30
.gitignore
vendored
|
@ -1,11 +1,25 @@
|
|||
# Dev environment
|
||||
.direnv
|
||||
|
||||
# Build output
|
||||
target
|
||||
|
||||
# Test output
|
||||
test-output
|
||||
|
||||
# Local config for quick iteration
|
||||
TestConfig.toml
|
||||
|
||||
# Runtime artifacts
|
||||
auth.secret
|
||||
collection.index
|
||||
polaris.log
|
||||
polaris.ndb
|
||||
polaris.pid
|
||||
profile.json
|
||||
/peaks
|
||||
/thumbnails
|
||||
|
||||
# Release process artifacts (usually runs on CI)
|
||||
release
|
||||
*.res
|
||||
test/*.sqlite
|
||||
*.sqlite-journal
|
||||
*.sqlite-wal
|
||||
*.sqlite-shm
|
||||
tmp
|
||||
TestConfigLinux.toml
|
||||
TestConfigWindows.toml
|
||||
index-flame-graph.html
|
4
.gitmodules
vendored
|
@ -1,4 +0,0 @@
|
|||
[submodule "web"]
|
||||
path = web
|
||||
url = https://github.com/agersant/polaris-web.git
|
||||
branch = .
|
10
.vscode/settings.json
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"files.watcherExclude": {
|
||||
"**/target/**": true,
|
||||
"**/test-output/**": true
|
||||
},
|
||||
"files.exclude": {
|
||||
"**/target": true,
|
||||
"**/test-output": true
|
||||
}
|
||||
}
|
47
.vscode/tasks.json
vendored
|
@ -1,47 +0,0 @@
|
|||
{
|
||||
"version": "2.0.0",
|
||||
"presentation": {
|
||||
"reveal": "always"
|
||||
},
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run",
|
||||
"options": {
|
||||
"cwd": "${workspaceRoot}"
|
||||
},
|
||||
"command": "cargo",
|
||||
"args": [
|
||||
"run",
|
||||
"--",
|
||||
"-c",
|
||||
"./TestConfigWindows.toml",
|
||||
"-d",
|
||||
"test/db.sqlite",
|
||||
"-w",
|
||||
"../polaris-web"
|
||||
],
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"group": "test",
|
||||
"label": "Test",
|
||||
"options": {
|
||||
"cwd": "${workspaceRoot}"
|
||||
},
|
||||
"command": "cargo",
|
||||
"args": [
|
||||
"test"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "Compile",
|
||||
"options": {
|
||||
"cwd": "${workspaceRoot}"
|
||||
},
|
||||
"command": "cargo",
|
||||
"args": [
|
||||
"check"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
427
CHANGELOG.md
Normal file
|
@ -0,0 +1,427 @@
|
|||
# Changelog
|
||||
|
||||
## Unreleased Changes
|
||||
|
||||
- Fixed a typo in the log message that is written after applying configuration changes. (thanks @luzpaz)
|
||||
|
||||
## Polaris 0.15.0
|
||||
|
||||
### Server
|
||||
|
||||
- Added support for browsing the music collection by metadata (by artist, by genre, etc.).
|
||||
- Added support for multi-value metadata for the following song fields: `artist`, `album artist`, `composer`, `genre`, `label` and `lyricist`.
|
||||
- Added support for structured search query syntax.
|
||||
- Added capability to extract audio waveform data.
|
||||
- Configuration data (user credentials, music directories, etc.) is now stored in a plain-text file which Polaris can read and write to.
|
||||
- ⚠️ The configuration format is now ([documented](docs/CONFIGURATION.md)) and slightly simpler than in previous versions.
|
||||
- Persistent data, such as playlists, is now saved in a directory that may be configured with the `--data` CLI option or the `POLARIS_DATA_DIR` environment variable.
|
||||
- ⚠️ Upon first launch, configuration data and playlists will be migrated from the Polaris 0.14.0 database into their new homes. After successful migration, the old database file will be deleted and the server will finally start. This migration functionality will be removed in future Polaris versions.
|
||||
- Collection scans are now automatically triggered when configuration changes or files are added/removed.
|
||||
- ⚠️ Dynamic DNS now works with any provider that supports updates over HTTP without header-based auth. This means YDNS is no longer an option, and you need to input a new URL for DDNS updates.
|
||||
- ⚠️ Removed last.fm integration due to maintenance concerns (abandoned libraries, broken account linking) and mismatch with project goals.
|
||||
- Removed periodic collection scans.
|
||||
|
||||
### Web client
|
||||
|
||||
- Every page has been updated to a new visual style.
|
||||
- The file browser is now displayed as an interactive tree on a single page.
|
||||
- The file browser now supports common navigation keyboard shortcuts.
|
||||
- The file browser now supports jumping to a visible file or folder by typing the start of its name.
|
||||
- The file browser now omits the top-level directory when only one music folder has been configured.
|
||||
- The current playlist now has two display modes: compact or with album art.
|
||||
- Songs in the current playlist can now be selected and re-ordered with the mouse.
|
||||
- Added a button to display statistics about the current playlist.
|
||||
- Added new pages to browse the music collection by genre.
|
||||
- Added new pages to browse the music collection by artist.
|
||||
- Added a new page to browse the music collection by album.
|
||||
- The Recently Added Albums and Random Albums pages now distinguish albums by file metadata instead of file path.
|
||||
- When navigating back to the Random Albums page, the shuffle ordering is now preserved.
|
||||
- The current playlist now supports common navigation keyboard shortcuts.
|
||||
- The seekbar for the current song being played has been replaced with a waveform visualization.
|
||||
- The title of the current song in the player can be clicked to display its metadata
|
||||
- Improved responsiveness when queuing large amounts of songs at once.
|
||||
- The `Settings > Collection` page now shows the current status of collection scanning.
|
||||
- Theme preferences have been reset and are now stored client-side.
|
||||
- Accent color is now configured as a saturation multiplier and base hue, which are used to generate a full color ramp.
|
||||
|
||||
### API
|
||||
|
||||
- API version is now 8.0.
|
||||
- Documentation is now served under `/api-docs` instead of `/swagger` (eg. `http://localhost:5050/api-docs`)
|
||||
- Clients are now expected to send their preferred API major version in a `Accept-Version` header. Omitting this currently defaults to `7`, but will become an error in future Polaris releases. Support for API version 7 will be removed entirely in a future release.
|
||||
- Most API responses now support gzip compression.
|
||||
- The response format of the `/browse`, `/flatten`, `/get_playlist`, `/search/<query>` endpoints has been modified to accommodate large lists.
|
||||
- Added new endpoints to query albums and artists.
|
||||
- The `/random` and `/recent` albums are deprecated in favor of `/albums/random` and `/albums/recent`. These endpoints now have optional parameters for RNG seeding and pagination.
|
||||
- The `/search/<query>` endpoint now requires a non-empty query (`/search/` now returns HTTP status code 404, regardless of API version).
|
||||
- The `/search/<query>` endpoint now supports per-field queries and boolean combinators.
|
||||
- The `/thumbnail` endpoint supports a new size labeled `tiny`, which returns 40x40px images.
|
||||
- Added a new `/get_songs` endpoint which returns song metadata in bulk.
|
||||
- Added a new `/peaks` endpoint which returns audio signal peaks that can be used to draw waveform visualizations.
|
||||
- Added a new `/index_status` endpoint which returns the status of music collection scans.
|
||||
- Removed the `/config` and `/preferences` API endpoints.
|
||||
- Removed the `/ddns` API endpoints, merged into the existing `/settings` endpoints.
|
||||
|
||||
## Polaris 0.14.3
|
||||
|
||||
### Server
|
||||
|
||||
- Fixed a build error (https://github.com/rust-lang/rust/issues/127343) with recent versions of the Rust compiler (thanks @pbsds)
|
||||
- Added support for m4b audio files (thanks @duydl)
|
||||
|
||||
## Polaris 0.14.2
|
||||
|
||||
### Server
|
||||
|
||||
- Fixed a startup error in Windows packaged builds
|
||||
|
||||
## Polaris 0.14.1
|
||||
|
||||
### Server
|
||||
|
||||
- Fixed compilation issue when using musl toolchains
|
||||
- Log messages that DDNS is not setup have been downgraded to debug level
|
||||
|
||||
### Web client
|
||||
|
||||
- Fixed a bug where non-ASCII files or directories were not always alphabetically sorted (thanks @dechamps)
|
||||
- Fixed a bug where after linking a last.fm account, clicking the account name would not link to the expected page
|
||||
|
||||
## Polaris 0.14.0
|
||||
|
||||
### General
|
||||
|
||||
- Changes are now documented in `CHANGELOG.md` instead of inside individual Github releases
|
||||
|
||||
### Server
|
||||
|
||||
- API version is now 7.0
|
||||
- ⚠️ Removed support for authentication via cookies (deprecated in Polaris 0.13.0)
|
||||
- ⚠️ Removed support for authentication via the `Basic` scheme when using the HTTP `Authorization` header (deprecated in Polaris 0.13.0)
|
||||
- Fixed a bug where all music sources would be deleted when trying to add sources with duplicate names
|
||||
- Additional metadata fields are now indexed: lyricist, composer, genre and label (thanks @pmphfm)
|
||||
- Endpoints returning thumbnail images or audio files no longer use HTTP `content-encoding`
|
||||
- When indexing files with ID3v2 tags, the "Original Date Released" frame can now be used to populate the year associated with a song
|
||||
- The `/thumbnail` endpoint now supports an optional parameter for small/large/native image sizing. (thanks @Saecki)
|
||||
- Log file now contain more details about the cause of failed HTTP requests (3xx, 4xx, 5xx)
|
||||
- Startup failures now generate clearer error messages
|
||||
|
||||
### Web client
|
||||
|
||||
- Volume slider now applies non-linearly
|
||||
- Artist names are now displayed in the Random Albums and Recent Albums pages
|
||||
|
||||
## Polaris 0.13.5
|
||||
|
||||
### Server
|
||||
|
||||
- Added support for AIFF and WAVE files (thanks @gahag)
|
||||
|
||||
### Web Client
|
||||
|
||||
- Improved performance when scrolling large playlists
|
||||
- Fixed display and playback issues when a song was used multiple times in a playlist
|
||||
- Playlist duration can now display number of days
|
||||
- Fixed a bug where the playlist panel could have blank space in very tall browser windows
|
||||
- Major dependencies updates
|
||||
|
||||
## Polaris 0.13.4
|
||||
|
||||
### Server
|
||||
|
||||
Adjustments to logging behavior.
|
||||
|
||||
On Linux:
|
||||
|
||||
- Running without `-f` emits a log file
|
||||
- Running with `-f` and no `--log` option does not emit a log file
|
||||
- Running with `-f` and `--log` option emits a log file
|
||||
|
||||
On Windows:
|
||||
|
||||
- Running with UI feature (`polaris.exe` in releases) emits a log file
|
||||
- Running without UI feature (`polaris-cli.exe` in releases) and no --log option does not emit a log file
|
||||
- Running without UI feature (`polaris-cli.exe` in releases) and --log option emits a log file
|
||||
|
||||
## Polaris 0.13.3
|
||||
|
||||
### Server
|
||||
|
||||
- Fixed a bug where music that is no longer on disk was still considered in the collection, even after re-indexing
|
||||
- On Windows, Polaris now creates a log file
|
||||
- On Linux, Polaris now creates a log file, even when running with the -f option
|
||||
|
||||
## Polaris 0.13.2
|
||||
|
||||
### Web client
|
||||
|
||||
- Fixed a bug where it was not possible to view or edit which users have administrator rights
|
||||
- Fixed a bug where, in some cases, drag and dropping a specific disc from an album would not queue the entire disc
|
||||
|
||||
## Polaris 0.13.1
|
||||
|
||||
### Server
|
||||
|
||||
- Fixed a bug where the Windows installer would create unusable installations. #122
|
||||
|
||||
## Polaris 0.13.0
|
||||
|
||||
### API changes
|
||||
|
||||
- Bumped API version number to 6.0.
|
||||
- Added new endpoints to manage users, mount points and settings more granularly.
|
||||
- Added support for authenticating via bearer tokens generated by the /auth endpoint. These token can be submitted via Bearer HTTP Authorization headers, or as a URL parameters (`?auth_token=…`).
|
||||
- Authentication using cookies or Basic HTTP Authorization headers is deprecated and will be removed in a future revision.
|
||||
- Authentication cookies no longer expire after 24 hours. The newly added bearer tokens also have no expiration date.
|
||||
- Last.fm account linking now requires a short-lived auth token obtain from the newly added `lastfm/link_token' endpoint.
|
||||
|
||||
Server
|
||||
|
||||
- ⚠️Breaking change⚠️ If you use a config file, the `reindex_every_n_seconds` and `album_art_pattern` fields must now be in a [settings] section.
|
||||
- ⚠️Breaking change⚠️ The installation process on Linux has changed a lot. See the README for updated installation instructions. A summary of the changes is available [here](https://github.com/ogarcia/docker-polaris/issues/2).
|
||||
- Embedded album art is now supported for mp3, flac and m4a files (thanks @Saecki).
|
||||
- OPUS files can now be indexed and streamed (thanks @zaethan).
|
||||
- APE files can now be indexed and streamed.
|
||||
- The collection indexer has been rewritten for better performance. This also fixed an issue where on some machines, the web client would be unusable while indexing (thanks @inicola for the code reviews).
|
||||
- Thumbnail generation is now slightly faster, and works with more pixel formats (notably RGBA16).
|
||||
- Polaris now uses actix-web instead or rocket. This change fixes numerous performance and stability issues.
|
||||
- Sqlite is now bundled by default when building Polaris and was removed from the list of prerequisites. This can be controlled with the `bundle-sqlite` feature flag when compiling Polaris.
|
||||
- The default album art pattern now includes the jpeg extension in addition to jpg.
|
||||
- Album art patterns are now case insensitive.
|
||||
|
||||
Web client
|
||||
|
||||
- ⚠️Breaking change⚠️ Your current playlist will appear broken after this update. Please clear the current playlist using the trash can icon. Saved playlists are not affected.
|
||||
- Added a logout button.
|
||||
- Reworked interface for managing user accounts.
|
||||
- Added a shuffle button to randomly re-order the content of the current playlist.
|
||||
- The total duration of the current playlist is now displayed.
|
||||
- Audio output can now be toggled on/off by clicking the volume icon.
|
||||
- Individual discs from multi-disc albums can now be dragged into the playlist.
|
||||
- When browsing to an album, songs are now displayed and queued in filepath order.
|
||||
- Fixed a bug where albums could not be dragged from the random or recent views.
|
||||
- Fixed a bug where directories with a # sign in their name could not be browsed to.
|
||||
|
||||
## Polaris 0.12.0
|
||||
|
||||
### Server
|
||||
|
||||
- Library indexing speed is now significantly faster
|
||||
- When indexing files that have malformed ID3 tags, information preceding the error will no longer be discarded
|
||||
- Deleted users can no longer make requests using an existing session
|
||||
- When using a config file, existing users, mounts points and DDNS settings are no longer removed before applying the configuration
|
||||
- When using a config file to create users, blank usernames are now ignored
|
||||
- Improved architecture and added more unit tests
|
||||
|
||||
API Changes
|
||||
|
||||
- API version number bumped to 4.0
|
||||
- The auth endpoint now returns HTTP cookies instead of a JSON response
|
||||
- Client requests to update Last.fm status no longer return an error if no Last.fm account is associated with the user
|
||||
- The thumbnail endpoint now supports an option to disable padding to a square image
|
||||
|
||||
Web client
|
||||
|
||||
- The web client now uses Vue instead of Riot as its UI framework
|
||||
- Added support for theming
|
||||
|
||||
## Polaris 0.11.0
|
||||
|
||||
### Server
|
||||
|
||||
- Compatible with current versions of the Rust nightly compiler
|
||||
- Fixed a rare crash when indexing corrupted mp3 files
|
||||
- On Linux, Polaris now notifies systemd after starting up
|
||||
- Release tarball for Linux version now includes a top-level directory
|
||||
- User sessions no longer break across server restarts (more improvements still to do on this: #36)
|
||||
- ⚠️ Breaking change: due to improvements in Polaris credentials management, you will have to re-create your users and playlists after upgrading to this version. If you want to preserve your playlists, you can use a program like DB Browser for SQLite to back up your playlists (from db.sqlite within your Polaris installation directory) and restore them after you re-create users with the same names.
|
||||
|
||||
### Web client
|
||||
|
||||
- Song durations are now listed when available
|
||||
- Fixed a bug where clicking on breadcrumbs did not always work when the Polaris server is hosted on Windows
|
||||
- Current track info now shows in browser tab title
|
||||
- Fixed a semi-rare bug where indexing would not start during initial setup flow
|
||||
- Improved handling of untagged songs
|
||||
- Fixed a bug where playlist had padding in Chrome
|
||||
- Fixed a bug where folder icons did not render on some systems
|
||||
|
||||
Thank you to @lnicola for working on most of the server changes!
|
||||
|
||||
## Polaris 0.10.0
|
||||
|
||||
### Server
|
||||
|
||||
- Polaris servers now ship with an interactive API documentation, available at http://localhost:5050/swagger
|
||||
- When using a prefix URL in Polaris config files, a / will no longer be added automatically at the end of the prefix
|
||||
|
||||
### Web client
|
||||
|
||||
- Automatically bring up player panel when songs are queued
|
||||
- Fixed a bug where songs were not always correctly sorted by track number in browser panel
|
||||
- Fixed a bug where some button hitboxes didn't match their visuals
|
||||
|
||||
## Polaris 0.9.0
|
||||
|
||||
### Server
|
||||
|
||||
- Rewrote all endpoints and server setup using Rocket instead of Iron
|
||||
- Fixed a bug where special characters in URL to collection folders were not handled correctly (bumped API version number)
|
||||
- Server API is now unit tested
|
||||
- Fixed a bug where lastFM integration endpoints did not work
|
||||
- ⚠️ Compiling Polaris now requires the nightly version of the Rust compiler
|
||||
|
||||
### Web client
|
||||
|
||||
- Encode special characters in URL to collection folders
|
||||
|
||||
## Polaris 0.8.0
|
||||
|
||||
### Server
|
||||
|
||||
- Added new API endpoints for search
|
||||
- Added new API endpoints for Last.fm integration
|
||||
- Thumbnails are now stored as .jpg images instead of .png
|
||||
- Duration of some audio files is now being indexed
|
||||
- On Linux when running as a forking process, a .pid file will be written
|
||||
- Fixed a bug where usernames were inserted in session even after failed authentication
|
||||
|
||||
### Web client
|
||||
|
||||
- Added search panel
|
||||
- Added settings tab to link Last.fm account
|
||||
|
||||
## Polaris 0.7.1
|
||||
|
||||
### Server
|
||||
|
||||
- Added support for prefix_url option in configuration files
|
||||
- Improved performance of thumbnail creation
|
||||
|
||||
## Polaris 0.7.0
|
||||
|
||||
### Server
|
||||
|
||||
- Added support for the Partial-Content HTTP header when serving music, this fixes several streaming/seeking issues when using the web client (especially in Chrome)
|
||||
- New API endpoints for playlist management
|
||||
- New command line argument (-p) to run on a custom port (contribution from @jxs)
|
||||
- New command line argument (-f) to run in foreground on Linux (contribution from @jxs)
|
||||
- Fixed a bug where tracks were queued out of order
|
||||
- Updated program icon on Windows
|
||||
|
||||
Web client
|
||||
|
||||
- Added support for playlists
|
||||
- Added a button to to queue the current directory (thanks @jxs)
|
||||
|
||||
## Polaris 0.6.0
|
||||
|
||||
### Server
|
||||
|
||||
- Internal improvements to database management (now using Diesel)
|
||||
- Configuration settings are now stored in the database, polaris.toml config files are no longer loaded by default
|
||||
- Added API endpoints to read and write configuration
|
||||
- User passwords are now encrypted in storage
|
||||
- Fixed a bug where results of api/browse were not sorted correctly
|
||||
|
||||
Web client
|
||||
|
||||
- Settings can now be edited from the web UI
|
||||
- Collection re-index can now be triggered from the web UI
|
||||
- Added initial setup configuration flow to help set up first user and mount point
|
||||
- Visual changes
|
||||
|
||||
## Polaris 0.5.1
|
||||
|
||||
This is a minor release, pushing quite a bit of internal cleanup in the wild.
|
||||
|
||||
Server
|
||||
|
||||
- Removed OpenSSL dependency on Windows
|
||||
- No longer send a HTTP cookie after authentication
|
||||
|
||||
## Polaris 0.5.0
|
||||
|
||||
This releases adds Linux support and a variety of improvements to the web client.
|
||||
|
||||
### Server
|
||||
|
||||
- Added Linux support
|
||||
- Moved location of configuration file on Windows to `%appdata%\Permafrost\Polaris\polaris.toml`
|
||||
|
||||
### Web client
|
||||
|
||||
- Performance improvements from upgrading RiotJS to 3.4.4 (from 2.6.2)
|
||||
- Added support for browsing random and recently added albums
|
||||
- Minor visual changes (colors, whitespace, etc.)
|
||||
- Updated favicon
|
||||
- Fixed a bug where songs containing special characters in their title would not play
|
||||
- Persist playlist and player state across sessions
|
||||
|
||||
## Polaris 0.4.0
|
||||
|
||||
This release adds new features supporting the development of polaris-android.
|
||||
|
||||
### Server
|
||||
|
||||
- Added API endpoint to pull recently added albums
|
||||
- Added support for the Authorization HTTP header (in addition to the existing /auth API endpoint)
|
||||
|
||||
## Polaris 0.3.0
|
||||
|
||||
This release is an intermediate release addressing issues with the installation process and updating internals.
|
||||
|
||||
### General
|
||||
|
||||
- Fixed missing OpenSSL DLL in Windows installer (fixes Issue #3)
|
||||
- Split every file into an individual installer component
|
||||
|
||||
### Server
|
||||
|
||||
- Added API endpoint to pull random albums
|
||||
- Upgraded dependencies
|
||||
- Added unit tests to indexing and metadata decoding
|
||||
|
||||
### Web client
|
||||
|
||||
- Web interface playlist now displays more tracks (enough to fill a 4k monitor at normal font size)
|
||||
|
||||
## Polaris 0.2.0
|
||||
|
||||
This release is focused on polish and performance, solidifying the basics that were put together in version 0.1.0. Here are the major changes:
|
||||
|
||||
### General
|
||||
|
||||
- Polaris now has a project logo
|
||||
- Windows installer now supports upgrading an existing install (from 0.2.0 to higher - versions)
|
||||
- Added support for multi-disc albums
|
||||
|
||||
### Server
|
||||
|
||||
- Major performance improvements to /browse and /flatten API requests (up to 1000x - faster for large requests)
|
||||
- Added API endpoint for version number
|
||||
- Album covers are now served as thumbnails rather than at source size
|
||||
- Moved configuration file outside of /Program Files
|
||||
- Added support for Ogg Vorbis, FLAC and APE metadata
|
||||
- Fixed a bug where most albums didn't show an artist name
|
||||
- Fixed a bug where uppercase extensions were not recognized
|
||||
- Upgraded compiler to Rust 1.13
|
||||
|
||||
### Web client
|
||||
|
||||
- Complete visual overhaul of the Polaris web client
|
||||
- Performance improvements for handling large playlist in Polaris web client
|
||||
- Added error messages when playing songs in unsupported formats
|
||||
|
||||
## Polaris 0.1.0
|
||||
|
||||
This is the very first Polaris release, celebrating the minimum viable product!
|
||||
|
||||
Features in this release:
|
||||
|
||||
- Server application with Windows Installer
|
||||
- Support for multiple users
|
||||
- Support for serving custom music directories
|
||||
- Support for custom album art pattern matching
|
||||
- Support for broadcasting IP to YDNS
|
||||
- Web UI to browse collection, manage playlist and listen to music
|
|
@ -1,21 +0,0 @@
|
|||
# Compiling and Running Polaris
|
||||
|
||||
Compiling and running Polaris is very easy as it only depends on the Rust toolchain.
|
||||
|
||||
1. [Install Rust](https://www.rust-lang.org/en-US/install.html)
|
||||
2. Clone the polaris depot with this command: `git clone --recursive https://github.com/agersant/polaris.git`
|
||||
3. You can now run compile and run polaris from the newly created directory with the command: `cargo run`
|
||||
|
||||
Polaris supports a few command line arguments which are useful during development:
|
||||
|
||||
- `-w some/path/to/web/dir` lets you point to the directory to be served as the web interface. You'll probably want to point this to the `/web` directory of the polaris repository.
|
||||
- `-d some/path/to/a/file.db` lets you manually choose where Polaris stores its configuration and music index (you can reuse the same database accross multiple runs)
|
||||
- `-f` (on Linux) makes Polaris not fork into a separate process
|
||||
|
||||
Putting it all together, a typical command to compile and run the program would be: `cargo run -- -w web -d test/my.db`
|
||||
|
||||
While Polaris is running, access the web UI at [http://localhost:5050](http://localhost:5050).
|
||||
|
||||
# Running Unit Tests
|
||||
|
||||
That's the easy part, simply run `cargo test`!
|
4457
Cargo.lock
generated
135
Cargo.toml
|
@ -1,55 +1,104 @@
|
|||
[package]
|
||||
name = "polaris"
|
||||
version = "0.11.0"
|
||||
version = "0.0.0"
|
||||
authors = ["Antoine Gersant <antoine.gersant@lesforges.org>"]
|
||||
edition = "2018"
|
||||
edition = "2021"
|
||||
build = "build.rs"
|
||||
|
||||
[features]
|
||||
ui = []
|
||||
profile-index = ["flame", "flamer"]
|
||||
ui = ["native-windows-gui", "native-windows-derive"]
|
||||
|
||||
[profile.release]
|
||||
lto = "thin"
|
||||
|
||||
[dependencies]
|
||||
ape = "0.2.0"
|
||||
app_dirs = "1.1.1"
|
||||
base64 = "0.11.0"
|
||||
diesel = { version = "1.4", features = ["sqlite"] }
|
||||
diesel_migrations = { version = "1.4", features = ["sqlite"] }
|
||||
error-chain = "0.12.0"
|
||||
flame = { version = "0.2.2", optional = true }
|
||||
flamer = { version = "0.4", optional = true }
|
||||
getopts = "0.2.15"
|
||||
id3 = "0.3"
|
||||
image = "0.22"
|
||||
libsqlite3-sys = { version = "0.16", features = ["bundled-windows"] }
|
||||
rustfm-scrobble = "0.9.2"
|
||||
lewton = "0.9.1"
|
||||
log = "0.4.5"
|
||||
metaflac = "0.2"
|
||||
mp3-duration = "0.1"
|
||||
pbkdf2 = "0.3"
|
||||
rand = "0.7"
|
||||
regex = "1.2"
|
||||
reqwest = "0.9.2"
|
||||
rocket = "0.4.2"
|
||||
rust-crypto = "0.2.36"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_derive = "1.0"
|
||||
serde_json = "1.0"
|
||||
simplelog = "0.7"
|
||||
toml = "0.5"
|
||||
ape = "0.6"
|
||||
axum-extra = { version = "0.10.0", features = ["typed-header"] }
|
||||
axum-range = { version = "0.5.0" }
|
||||
bitcode = { version = "0.6.3", features = ["serde"] }
|
||||
branca = "0.10.1"
|
||||
chumsky = "0.9.3"
|
||||
enum-map = { version = "2.7.3", features = ["serde"] }
|
||||
getopts = "0.2.21"
|
||||
headers = "0.4"
|
||||
http = "1.1.0"
|
||||
icu_collator = "1.5.0"
|
||||
id3 = "1.14.0"
|
||||
lasso2 = { version = "0.8.2", features = ["serialize"] }
|
||||
lewton = "0.10.2"
|
||||
log = "0.4.22"
|
||||
metaflac = "0.2.7"
|
||||
mp3-duration = "0.1.10"
|
||||
mp4ameta = "0.11.0"
|
||||
native_db = "0.8.1"
|
||||
native_model = "0.4.20"
|
||||
nohash-hasher = "0.2.0"
|
||||
notify = { version = "6.1.1", default-features = false }
|
||||
notify-debouncer-full = { version = "0.3.1", default-features = false }
|
||||
num_cpus = "1.14.0"
|
||||
# TODO upstream PR: https://github.com/yboettcher/opus_headers/pull/7
|
||||
opus_headers = { git = "https://github.com/agersant/opus_headers", branch = "multivalue" }
|
||||
pbkdf2 = "0.11"
|
||||
rand = "0.8"
|
||||
rayon = "1.10.0"
|
||||
regex = "1.10.5"
|
||||
rusqlite = { version = "0.32.0", features = ["bundled"] }
|
||||
serde = { version = "1.0.147", features = ["derive"] }
|
||||
serde_derive = "1.0.147"
|
||||
serde_json = "1.0.122"
|
||||
simplelog = "0.12.2"
|
||||
symphonia = { version = "0.5.4", features = [
|
||||
"all-codecs",
|
||||
"all-formats",
|
||||
"opt-simd",
|
||||
] }
|
||||
tinyvec = { version = "1.8.0", features = ["serde"] }
|
||||
thiserror = "1.0.62"
|
||||
tokio = { version = "1.39", features = ["macros", "rt-multi-thread"] }
|
||||
tokio-util = { version = "0.7.11", features = ["io"] }
|
||||
toml = "0.8.19"
|
||||
tower = { version = "0.5.2" }
|
||||
tower-http = { version = "0.6.2", features = [
|
||||
"compression-gzip",
|
||||
"fs",
|
||||
"normalize-path",
|
||||
] }
|
||||
trie-rs = { version = "0.4.2", features = ["serde"] }
|
||||
unicase = "2.7.0"
|
||||
ureq = { version = "2.10.0", default-features = false, features = ["tls"] }
|
||||
utoipa = { version = "5.3", features = ["axum_extras"] }
|
||||
utoipa-axum = { version = "0.1" }
|
||||
utoipa-scalar = { version = "0.2", features = ["axum"] }
|
||||
|
||||
[dependencies.rocket_contrib]
|
||||
version = "0.4.2"
|
||||
default_features = false
|
||||
features = ["json", "serve"]
|
||||
[dependencies.axum]
|
||||
version = "0.8.1"
|
||||
default-features = false
|
||||
features = ["http1", "json", "tokio", "tower-log", "query"]
|
||||
|
||||
[dependencies.image]
|
||||
version = "0.25.2"
|
||||
default-features = false
|
||||
features = ["bmp", "gif", "jpeg", "png"]
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
uuid = "0.8"
|
||||
|
||||
[target.'cfg(windows)'.dependencies.winapi]
|
||||
version = "0.3.3"
|
||||
features = ["winuser", "libloaderapi", "shellapi", "errhandlingapi"]
|
||||
native-windows-gui = { version = "1.0.13", default-features = false, features = [
|
||||
"cursor",
|
||||
"image-decoder",
|
||||
"message-window",
|
||||
"menu",
|
||||
"tray-notification",
|
||||
], optional = true }
|
||||
native-windows-derive = { version = "1.0.5", optional = true }
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
sd-notify = "0.1.0"
|
||||
unix-daemonize = "0.1.2"
|
||||
daemonize = "0.5"
|
||||
sd-notify = "0.4.2"
|
||||
|
||||
[target.'cfg(windows)'.build-dependencies]
|
||||
embed-resource = "2.4.2"
|
||||
winres = "0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
axum-test = "17.0"
|
||||
bytes = "1.7.1"
|
||||
percent-encoding = "2.2"
|
||||
|
|
140
README.md
|
@ -1,105 +1,71 @@
|
|||
[](https://github.com/agersant/polaris/actions)
|
||||
<div align="center">
|
||||
<h1><img src="res/readme/logo.png?raw=true"/></h1>
|
||||
|
||||
<img src="res/readme/logo.png?raw=true"/>
|
||||
Polaris is a music streaming application, designed to let you enjoy your music collection from any computer or mobile device. Polaris works by streaming your music directly from your own computer, without uploading it to a third-party. It is free and open-source software, without any kind of premium version. The only requirement is that your computer stays on while it streams music!
|
||||
[](https://github.com/agersant/polaris/actions)
|
||||
[](https://codecov.io/github/agersant/polaris)
|
||||
[](LICENSE-MIT)
|
||||
|
||||
# Getting Started
|
||||

|
||||
</div>
|
||||
|
||||
## Requirements
|
||||
# About
|
||||
|
||||
One of the following:
|
||||
- Windows 7 or newer
|
||||
- Linux (any reasonably modern distribution should do)
|
||||
Polaris is a self-hosted music streaming server, to enjoy your music collection from any computer or mobile device. It is free and open-source software, without any kind of premium version.
|
||||
|
||||
## Installation
|
||||
The goals of this project are:
|
||||
- 🔥 Exceptional performance and responsiveness
|
||||
- 📚️ First-class support for large music collections (100,000+ songs)
|
||||
- 📦️ Ease of installation, deployment and maintenance
|
||||
- ✨ Beautiful user interface
|
||||
|
||||
### Windows
|
||||
1. Download the [latest installer](https://github.com/agersant/polaris/releases/latest) (you want the .msi file)
|
||||
2. Run the installer
|
||||
3. That's it, you're done!
|
||||
# Try It Out!
|
||||
|
||||
You can now start Polaris from the start menu or from your desktop, Polaris will also start automatically next time you restart your computer. You can tell when Polaris is running by its icon in the notification area (near the clock and volume controls).
|
||||
Check out the demo over at https://demo.polaris.stream, featuring a selection of Creative Commons Music. The credentials to access this server are:
|
||||
|
||||
### Linux
|
||||
Username: `demo_user`
|
||||
Password: `demo_password`
|
||||
|
||||
#### Dependencies
|
||||
# Features
|
||||
|
||||
1. Install OpenSSL, SQLite and their headers, and some development tools. These are available from your distribution's package manager. For instance on Ubuntu, execute `sudo apt-get install binutils pkg-config libssl-dev libsqlite3-dev`
|
||||
2. Install the nightly Rust compiler by executing `curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain nightly` or using an [alternative method](https://www.rust-lang.org/en-US/install.html)
|
||||
- 🖥️ Runs on Windows, Linux, BSD, or through Docker
|
||||
- 🔊 Support for `flac`, `mp3`, `mp4`, `mpc`, `ogg`, `opus`, `ape`, `wav` and `aiff` files
|
||||
- 🌈 Dark mode variants and customizable color palette
|
||||
- 💿️ Browse your music by album, artist or genre
|
||||
- 📂 Browse your music as a file tree
|
||||
- 🌊 Song audio-waveform visualization
|
||||
- 🏷️ Support for multi-value fields in song metadata (eg. multiple artists per song)
|
||||
- 🔍️ Powerful search functionality with per-field queries
|
||||
- ⚙️ Plain-text configuration also editable with built-in UI
|
||||
- 👥 Setup multiple users, each with their own playlists
|
||||
- 📱 Listen to your music on the go:
|
||||
- Polaris Android ([Google Play Store](https://play.google.com/store/apps/details?id=agersant.polaris) · [F-Droid](https://f-droid.org/packages/agersant.polaris/) · [Repository](https://github.com/agersant/polaris-android))
|
||||
- Polarios ([App Store](https://apps.apple.com/app/polarios/id1662366309) · [Repository](https://gitlab.com/elise/Polarios)) [third-party]
|
||||
|
||||
#### Polaris installation
|
||||
1. Download the [latest release]((https://github.com/agersant/polaris/releases/latest)) of Polaris (you want the .tar.gz file)
|
||||
2. Extract the polaris archive in a directory and open a terminal in that directory
|
||||
3. Execute `make install` (this may take several minutes)
|
||||
# Installation
|
||||
|
||||
This installation process puts the polaris executable in `~/.local/bin/polaris` and several data files under `~/.local/share/polaris`.
|
||||
[Installation documentation](docs/SETUP.md)
|
||||
|
||||
From here, you might want to adjust your system to run Polaris on login using Cron, Systemd or whichever method your distribution endorses.
|
||||
[Streaming from remote devices](docs/DDNS.md)
|
||||
|
||||
If you want to uninstall Polaris, execute `make uninstall` from the extracted archive's directory. This will simply delete the directories created by the install process.
|
||||
|
||||
### In a docker container
|
||||
|
||||
To run polaris from a Docker container, please follow instructions from the [docker-polaris](https://github.com/ogarcia/docker-polaris) repository.
|
||||
|
||||
## Test Run
|
||||
|
||||
- Start Polaris using the shortcut on your desktop (Windows) or by running the executable in `~/.local/bin/polaris` (Linux)
|
||||
- In your Web browser, access http://localhost:5050
|
||||
- You will see a welcome page that will guide you through the Polaris configuration
|
||||
|
||||

|
||||
|
||||
## Streaming From Other Devices
|
||||
|
||||
If you're only interested in streaming on your local network, you can skip this section. If you want to stream from school, from work, or on the go, this is for you.
|
||||
|
||||
### Dynamic DNS
|
||||
|
||||
You can access your Polaris installation from anywhere via your computer's public IP address, but there are two problems with that:
|
||||
- IP addresses are difficult to remember
|
||||
- Most ISP don't give you a fixed IP address
|
||||
|
||||
A solution to these problems is to set up Dynamic DNS, so that your installation can always be reached at a fixed URL.
|
||||
|
||||
The steps below will walk you through setting up YDNS and Polaris to give your installation a fixed URL. If you have another solution in mind, or prefer using another Dynamic DNS service, skip to the next section.
|
||||
|
||||
1. Register for a free account on https://ydns.io
|
||||
2. On the YDNS website, access the "My Hosts" page and press the + sign for "Add Host"
|
||||
3. Fill the host form as described below:
|
||||
- Domain: ydns.eu
|
||||
- Name: This part is up to you, whatever you enter will be in the URL you use to access Polaris
|
||||
- Content: Leave the default. Take a note whether the value looks like a IPv4 address (format: xxx.xxx.xxx.xxx) or a IPv6 address (format: xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx)
|
||||
- Type: Dynamic IP
|
||||
4. If the content field looked like a IPv4 address: skip to step #6
|
||||
5. If the content field looked like a IPv6 address:
|
||||
- Click on your host name (eg. yourdomain.ydns.eu)
|
||||
- You should now see a page which looks like this:
|
||||

|
||||
- Click on the green "+" icon on the right
|
||||
- Fill out the new form as described:
|
||||
- Make sure the `Type` field is set to `A`
|
||||
- Set content to 0.0.0.0
|
||||
- You should now be back on the "records" page which was pictured above
|
||||
- Click on the ID number on the left (#28717 in the example above) of the column that has AAAA listed as its "Type".
|
||||
- Click on the red trash can icon in the corner to delete this record
|
||||
- Done!
|
||||
6. In the Polaris web interface, access the `Dynamic DNS` tab of the settings screen:
|
||||
- Update the hostname field to match what you set in step 5. (eg. http://yourdomain.ydns.eu)
|
||||
- Update the username field to the email address you use when creating your YDNS account
|
||||
- Update the password field with your YDNS API password. You can find this password on https://ydns.io: click on the "User" icon in the top right and then `Preferences > API`.
|
||||
|
||||
### Port Forwarding
|
||||
Configure port forwarding on your router to redirect port 80 towards port 5050 on the computer where you run Polaris. The exact way to do this depends on your router manufacturer and model.
|
||||
|
||||
Don't forget to restart Polaris to apply your configuration changes, and access your music from other computers at http://yourdomain.ydns.eu
|
||||
|
||||
## Additional clients
|
||||
When you install Polaris, it comes with a web interface which can be accessed using any modern browser. However, it may be more convenient to use a native app on your mobile device. Currently, the only such app is the official [Polaris client for Android](https://github.com/agersant/polaris-android).
|
||||
[](https://repology.org/project/polaris-streaming/versions)
|
||||
|
||||
# Documentation
|
||||
|
||||
The Polaris server API is documented [here](https://agersant.github.io/polaris/swagger/). Please note that this Swagger page does not point to a live Polaris server so the `Try it out` buttons are not expected to work.
|
||||
Every installation of Polaris also distributes this documentation, with the ability to use the `Try it out` buttons. To access it, simply open http://localhost:5050/swagger/ in your browser on the machine running Polaris.
|
||||
- 📒 [Changelog](CHANGELOG.md)
|
||||
- 🔧 [Configuration](docs/CONFIGURATION.md)
|
||||
- 👷 [Contribute to Polaris](docs/CONTRIBUTING.md)
|
||||
- 🛟 [Maintenance Runbooks](docs/MAINTENANCE.md)
|
||||
|
||||
Feel free to open Github issues (or Pull Requests) if clarifications are needed.
|
||||
The Polaris server API is documented via [OpenAPI](https://demo.polaris.stream/api-docs/). Every installation of Polaris distributes this interactive documentation. To access it, open http://localhost:5050/api-docs/ in your browser on the machine running Polaris.
|
||||
|
||||
# Credits & License Information
|
||||
|
||||
Music featured in the demo installation:
|
||||
|
||||
- [Chris Zabriskie - Abandon Babylon](https://chriszabriskie.bandcamp.com/album/abandon-babylon) [(License)](https://creativecommons.org/licenses/by/3.0/)
|
||||
- [Chris Zabriskie - Angie's Sunday Service](https://chriszabriskie.bandcamp.com/album/angies-sunday-service) [(License)](https://creativecommons.org/licenses/by/3.0/)
|
||||
- [glaciære - pool water blue](https://steviasphere.bandcamp.com/album/pool-water-blue) [(License)](https://creativecommons.org/licenses/by/3.0/)
|
||||
- [glaciære - light ripples](https://steviasphere.bandcamp.com/album/light-ripples) [(License)](https://creativecommons.org/licenses/by/3.0/)
|
||||
- [Koresma South](https://koresma.bandcamp.com/album/south) [(License)](https://creativecommons.org/licenses/by-nc-sa/3.0/)
|
||||
- [Pete Murphy - Essence EP](https://petemurphy.bandcamp.com/album/falling-down-the-fred-astaires-solo-jazz-piano) [(License)](https://creativecommons.org/licenses/by-nc-sa/3.0/)
|
||||
- [Rameses B - Essence EP](https://ramesesb.bandcamp.com/album/essence-ep) [(License)](https://creativecommons.org/licenses/by-nc-nd/3.0/)
|
||||
|
|
13
build.rs
Normal file
|
@ -0,0 +1,13 @@
|
|||
#[cfg(windows)]
|
||||
fn main() {
|
||||
let mut res = winres::WindowsResource::new();
|
||||
res.set_icon("./res/windows/application/icon_polaris_512.ico");
|
||||
res.compile().unwrap();
|
||||
embed_resource::compile(
|
||||
"res/windows/application/polaris-manifest.rc",
|
||||
embed_resource::NONE,
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn main() {}
|
|
@ -1,13 +0,0 @@
|
|||
#!/bin/sh
|
||||
echo "Creating output directory"
|
||||
mkdir -p release/tmp/polaris
|
||||
|
||||
echo "Copying package files"
|
||||
cp -r web docs/swagger src migrations Cargo.toml Cargo.lock res/unix/Makefile release/tmp/polaris
|
||||
|
||||
echo "Creating tarball"
|
||||
POLARIS_VERSION=$(grep -m 1 ^version Cargo.toml | awk '{print $3}' | tr -d '"\r\n')
|
||||
tar -zc -C release/tmp -f release/polaris-$POLARIS_VERSION.tar.gz polaris
|
||||
|
||||
echo "Cleaning up"
|
||||
rm -rf release/tmp
|
|
@ -1,49 +0,0 @@
|
|||
Get-ChildItem "Cargo.toml" | ForEach-Object {
|
||||
$conf = $_ | Get-Content -raw
|
||||
$conf -match 'version\s+=\s+"(.*)"' | out-null
|
||||
$script:POLARIS_VERSION = $matches[1]
|
||||
}
|
||||
|
||||
"Compiling resource file"
|
||||
RC /fo res\windows\application\application.res res\windows\application\application.rc
|
||||
|
||||
""
|
||||
"Compiling executable"
|
||||
cargo rustc --release --features "ui" -- -C link-args="/SUBSYSTEM:WINDOWS /ENTRY:mainCRTStartup res\windows\application\application.res"
|
||||
|
||||
""
|
||||
"Creating output directory"
|
||||
New-Item .\release\tmp -type directory -Force | Out-Null
|
||||
Remove-Item -Recurse .\release\tmp\*
|
||||
|
||||
""
|
||||
"Copying to output directory"
|
||||
Copy-Item .\res\windows\installer\license.rtf .\release\tmp\
|
||||
Copy-Item .\res\windows\installer\banner.bmp .\release\tmp\
|
||||
Copy-Item .\res\windows\installer\dialog.bmp .\release\tmp\
|
||||
Copy-Item .\target\release\polaris.exe .\release\tmp\
|
||||
Copy-Item .\web\img .\release\tmp\web\img -recurse
|
||||
Copy-Item .\web\js .\release\tmp\web\js -recurse
|
||||
Copy-Item .\web\lib .\release\tmp\web\lib -recurse
|
||||
Copy-Item .\web\style .\release\tmp\web\style -recurse
|
||||
Copy-Item .\web\tags .\release\tmp\web\tags -recurse
|
||||
Copy-Item .\web\favicon.png .\release\tmp\web\
|
||||
Copy-Item .\web\index.html .\release\tmp\web\
|
||||
Copy-Item .\docs\swagger .\release\tmp\swagger -recurse
|
||||
|
||||
""
|
||||
"Creating installer"
|
||||
heat dir .\release\tmp\web\ -ag -g1 -dr AppDataPolaris -cg WebUI -sfrag -var wix.WebUIDir -out .\release\tmp\web_ui_fragment.wxs
|
||||
heat dir .\release\tmp\swagger\ -ag -g1 -dr AppDataPolaris -cg SwaggerUI -sfrag -var wix.SwaggerUIDir -out .\release\tmp\swagger_ui_fragment.wxs
|
||||
|
||||
candle -wx -ext WixUtilExtension -arch x64 -out .\release\tmp\web_ui_fragment.wixobj .\release\tmp\web_ui_fragment.wxs
|
||||
candle -wx -ext WixUtilExtension -arch x64 -out .\release\tmp\swagger_ui_fragment.wixobj .\release\tmp\swagger_ui_fragment.wxs
|
||||
candle -wx -ext WixUtilExtension -arch x64 -out .\release\tmp\installer.wixobj .\res\windows\installer\installer.wxs
|
||||
|
||||
light -dWebUIDir=".\release\tmp\web" -dSwaggerUIDir=".\release\tmp\swagger" -wx -ext WixUtilExtension -ext WixUIExtension -spdb -sw1076 -sice:ICE38 -sice:ICE64 -out .\release\Polaris_$POLARIS_VERSION.msi .\release\tmp\installer.wixobj .\release\tmp\web_ui_fragment.wixobj .\release\tmp\swagger_ui_fragment.wixobj
|
||||
|
||||
"Cleaning up"
|
||||
Remove-Item -Recurse .\release\tmp
|
||||
|
||||
""
|
||||
Read-Host -Prompt "All clear! Press Enter to exit"
|
|
@ -1,2 +0,0 @@
|
|||
[print_schema]
|
||||
file = "src/db/schema.rs"
|
50
docs/CONFIGURATION.md
Normal file
|
@ -0,0 +1,50 @@
|
|||
# Configuration
|
||||
|
||||
Polaris configuration resides in a single text file whose format is documented below. You can use the Polaris web UI to modify the configuration, or write to it in any text editor. You may edit the configuration file while Polaris is running.
|
||||
|
||||
## Location
|
||||
|
||||
The location of the configuration file is always logged during Polaris startup. It is determined as follows:
|
||||
|
||||
- From the `--config` (or `-c`) CLI option if present. This option must point to the `.toml` file.
|
||||
- If the CLI option is not specified, Polaris will look for a `polaris.toml` file, inside the directory specified by the `POLARIS_CONFIG_DIR` environment variable _at compilation time_. When using the Windows installer, this will be `%LOCALAPPDATA%/Permafrost/Polaris/polaris.toml`. When using the supplied Makefile, the default is either `/usr/local/etc/polaris` (for a system-wide installations), or `~/.config/polaris` (for a XDG installation).
|
||||
- If `POLARIS_CONFIG_DIR` was not set when Polaris was compiled, it will default to `.` on Linux, and the `LOCALAPPDATA` location mentioned above on Windows. This behavior on Windows may change in future releases.
|
||||
|
||||
## Format
|
||||
|
||||
The configuration file uses the [TOML](https://toml.io/) format. Everything in the configuration file is optional and may be omitted (unless mentioned otherwise).
|
||||
|
||||
```toml
|
||||
# Regular expression used to identify album art in files adjacent to an audio file
|
||||
album_art_pattern = "Folder.(jpeg|jpg|png)"
|
||||
# A URL Polaris will regularly make requests to in order to update Dynamic DNS
|
||||
ddns_url = "https://example.com?token=foobar"
|
||||
|
||||
# Array of locations Polaris should scan to find music files
|
||||
[[mount_dirs]]
|
||||
# Directory to scan
|
||||
source = "/home/example/music"
|
||||
# User-facing name for this directory (must be unique)
|
||||
name = "My Music 🎧️"
|
||||
|
||||
[[mount_dirs]]
|
||||
source = "/mnt/example/more_music"
|
||||
name = "Extra Music 🎵"
|
||||
|
||||
# Array of user accounts who can connect to the Polaris server
|
||||
[[users]]
|
||||
# Username for login
|
||||
name = "example-user"
|
||||
# If true, user will have access to all settings in the web UI
|
||||
admin = true
|
||||
# Plain text password for this user. Will be ignored if hashed_password is set. Polaris will never write to this field. For each user, at least one of initial_password and hashed_password must be set.
|
||||
initial_password = "top-secret-password"
|
||||
# Hashed and salted password for the user. Polaris will create this field if unset.
|
||||
hashed_password = "$pbkdf2-sha256$i=10000,l=32$SI8LjK1KtvcawhgmWGJgRA$t9btMwhUTQ8r3vqI1xhArn19J7Jezyoi461fFjhZXGU"
|
||||
|
||||
[[users]]
|
||||
name = "other-user"
|
||||
admin = true
|
||||
initial_password = "amospheric-strawberry64"
|
||||
```
|
||||
|
37
docs/CONTRIBUTING.md
Normal file
|
@ -0,0 +1,37 @@
|
|||
# Contributing
|
||||
|
||||
## Guidelines
|
||||
|
||||
While Polaris is free and open-source software, it is not very open to code contributions. The reasons behind this are:
|
||||
- Polaris is a hobby project. I don't want it to feel like my day job, where I do a lot of code reviews, mentoring and tech leadership.
|
||||
- I am committed to maintaining this software for a very long time. I would rather maintain code that I mostly wrote myself.
|
||||
|
||||
This still leave room for a few avenues to contribute:
|
||||
- Help answering questions in the issue tracker.
|
||||
- Package Polaris for a Linux distribution
|
||||
- Documentation improvements or writing user guides.
|
||||
- Satellite projects (eg. [docker-polaris](https://github.com/ogarcia/docker-polaris), [polarios](https://gitlab.com/elise/Polarios))
|
||||
- Bug fixes.
|
||||
|
||||
For non-trivial new features, you are welcome to maintain a fork. If you need help finding your way around the code, feel free to open a [discussion thread](https://github.com/agersant/polaris/discussions).
|
||||
|
||||
## Compiling and running Polaris
|
||||
|
||||
1. [Install Rust](https://www.rust-lang.org/en-US/install.html) (stable toolchain)
|
||||
2. Clone the polaris depot with this command: `git clone https://github.com/agersant/polaris.git`
|
||||
3. You can now run compile and run polaris from the newly created directory with the command: `cargo run`
|
||||
|
||||
Polaris supports a few command line arguments which are useful during development:
|
||||
|
||||
- `-c some/config.toml` sets the location of the [configuration](/docs/CONFIGURATION.md) file.
|
||||
- `--data some/path` sets the folder Polaris will use to store runtime data such as playlists, collection index and auth secrets.
|
||||
- `-w some/path/to/web/dir` lets you point to the directory to be served as the web interface. You can find a suitable directory in your Polaris install (under `/web`), or from the [latest polaris-web release](https://github.com/agersant/polaris-web/releases/latest/download/web.zip).
|
||||
- `-f` (on Linux) makes Polaris not fork into a separate process.
|
||||
|
||||
Putting it all together, a typical command to compile and run the program would be: `cargo run -- -w web -c test-config.toml`
|
||||
|
||||
While Polaris is running, access the web UI at [http://localhost:5050](http://localhost:5050).
|
||||
|
||||
## Running unit tests
|
||||
|
||||
That's the easy part, simply run `cargo test`!
|
21
docs/DDNS.md
Normal file
|
@ -0,0 +1,21 @@
|
|||
# Streaming from other devices
|
||||
|
||||
These instructions apply to users running Polaris on a home network. When deploying to cloud services or VPS, configurations requirements will differ.
|
||||
|
||||
## Port forwarding
|
||||
|
||||
Configure port forwarding on your router to redirect port 80 traffic towards port 5050 towards the computer running Polaris. The exact way to do this depends on your router manufacturer and model.
|
||||
|
||||
## Dynamic DNS
|
||||
|
||||
You can access your Polaris installation from anywhere via your computer's public IP address, but there are two problems with that:
|
||||
- IP addresses are difficult to remember
|
||||
- Most ISP don't give you a fixed IP address
|
||||
|
||||
A solution to these problems is to set up Dynamic DNS, so that your installation can always be reached at a fixed URL.
|
||||
|
||||
1. Reserve a URL with a dynamic DNS provider such as https://www.duckdns.org/ or https://freemyip.com/.
|
||||
2. The dynamic DNS provider gives you a unique Update URL that can be used to tell them where to send traffic. For example, `freemyip.com` gives you this URL immediately after claiming a subdomain. Other providers may show it in your profile page, etc.
|
||||
3. Access your Polaris instance (http://localhost:5050 by default).
|
||||
4. Go to the `Setting page` and into the `Dynamic DNS` section.
|
||||
5. Set the Update URL to the one you obtained in step 2.
|
10
docs/MAINTENANCE.md
Normal file
|
@ -0,0 +1,10 @@
|
|||
# Maintenance
|
||||
|
||||
## How to make a release
|
||||
|
||||
- Update CHANGELOG.md to reflect new release
|
||||
- On Github, go to **Actions**, select the **Make Release** workflow and click **Run workflow**
|
||||
- Select the branch to deploy (usually `master`)
|
||||
- Input a user-facing version name (eg: **0.13.0**)
|
||||
- Click the **Run workflow** button
|
||||
- After CI completes, move the release from Draft to Published
|
|
@ -1,3 +0,0 @@
|
|||
## Documentation
|
||||
|
||||
- [API Documentation](swagger)
|
30
docs/SETUP.md
Normal file
|
@ -0,0 +1,30 @@
|
|||
# Installation
|
||||
|
||||
## On Windows
|
||||
|
||||
1. Download the [latest installer](https://github.com/agersant/polaris/releases/latest) (you want the .msi file)
|
||||
2. Run the installer
|
||||
3. Launch Polaris from the start menu
|
||||
4. In your web browser, access http://localhost:5050
|
||||
|
||||
## In a docker container
|
||||
|
||||
To run polaris from a Docker container, please follow instructions from the [docker-polaris](https://github.com/ogarcia/docker-polaris) repository.
|
||||
|
||||
## From source on Linux
|
||||
|
||||
### Dependencies
|
||||
|
||||
1. Install OpenSSL, SQLite and their respective headers (eg. `sudo apt-get install libsqlite3-dev libssl-dev`).
|
||||
2. Install `binutils` and `pkg-config` (eg. `sudo apt-get install binutils pkg-config`).
|
||||
2. Install the Rust compiler by executing `curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh` or using an [alternative method](https://www.rust-lang.org/en-US/install.html)
|
||||
|
||||
### Polaris installation
|
||||
1. Download the [latest release]((https://github.com/agersant/polaris/releases/latest)) of Polaris (you want the .tar.gz file)
|
||||
2. Extract the Polaris archive in a directory and open a terminal in that directory
|
||||
3. To install Polaris within your home directory, execute `make install-xdg`. This installation follows the [XDG Base Directory Specification](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html). You can use `make preview-xdg` to see which directories the install process would use.
|
||||
4. If you prefer a system-wide install, execute `make install` (without the `-xdg` suffix). If you use `sudo` to perform such a system install, you may need the `-E` option so that your sudo user find the Rust binaries: `sudo -E make install`. This installation follows the [GNU Standard Installation Directories](https://www.gnu.org/prep/standards/html_node/Directory-Variables.html). You can use `make preview` to see which directories the install process would use.
|
||||
|
||||
From here, you might want to adjust your system to run Polaris on login using Systemd, Cron or whichever method your distribution endorses.
|
||||
|
||||
If you want to uninstall Polaris, execute `make uninstall-xdg` from the extracted archive's directory (or `make uninstall` if you made a system-wide install). This will delete all the files and directories listed above (including your configuration, playlists, etc.). If you customized the install process by specifying environment variables like `PREFIX`, make sure they are set to the same values when running the uninstall command.
|
|
@ -1,3 +0,0 @@
|
|||
theme: jekyll-theme-minimal
|
||||
title: Polaris
|
||||
logo: res/logo_no_text.png
|
Before Width: | Height: | Size: 16 KiB After Width: | Height: | Size: 16 KiB |
Before Width: | Height: | Size: 665 B |
Before Width: | Height: | Size: 628 B |
|
@ -1,68 +0,0 @@
|
|||
<!-- HTML for static distribution bundle build -->
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<title>Polaris Swagger UI</title>
|
||||
<script type="text/javascript">
|
||||
var pathname = document.location.pathname;
|
||||
pathname = pathname.replace(/\/index\.html$/, "");
|
||||
if (!pathname.endsWith('/')) {
|
||||
pathname += "/";
|
||||
}
|
||||
document.write("<base href='" + pathname + "' />");
|
||||
</script>
|
||||
<link rel="stylesheet" type="text/css" href="swagger-ui.css">
|
||||
<link rel="icon" type="image/png" href="favicon-32x32.png" sizes="32x32" />
|
||||
<link rel="icon" type="image/png" href="favicon-16x16.png" sizes="16x16" />
|
||||
<style>
|
||||
html {
|
||||
box-sizing: border-box;
|
||||
overflow: -moz-scrollbars-vertical;
|
||||
overflow-y: scroll;
|
||||
}
|
||||
|
||||
*,
|
||||
*:before,
|
||||
*:after {
|
||||
box-sizing: inherit;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
background: #fafafa;
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="swagger-ui"></div>
|
||||
|
||||
<script src="swagger-ui-bundle.js"> </script>
|
||||
<script src="swagger-ui-standalone-preset.js"> </script>
|
||||
<script>
|
||||
window.onload = function() {
|
||||
// Begin Swagger UI call region
|
||||
const ui = SwaggerUIBundle({
|
||||
url: "polaris-api.json",
|
||||
dom_id: '#swagger-ui',
|
||||
deepLinking: true,
|
||||
presets: [
|
||||
SwaggerUIBundle.presets.apis,
|
||||
SwaggerUIStandalonePreset
|
||||
],
|
||||
plugins: [
|
||||
SwaggerUIBundle.plugins.DownloadUrl
|
||||
],
|
||||
layout: "StandaloneLayout"
|
||||
})
|
||||
// End Swagger UI call region
|
||||
|
||||
window.ui = ui
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
|
@ -1,67 +0,0 @@
|
|||
<!doctype html>
|
||||
<html lang="en-US">
|
||||
<body onload="run()">
|
||||
</body>
|
||||
</html>
|
||||
<script>
|
||||
'use strict';
|
||||
function run () {
|
||||
var oauth2 = window.opener.swaggerUIRedirectOauth2;
|
||||
var sentState = oauth2.state;
|
||||
var redirectUrl = oauth2.redirectUrl;
|
||||
var isValid, qp, arr;
|
||||
|
||||
if (/code|token|error/.test(window.location.hash)) {
|
||||
qp = window.location.hash.substring(1);
|
||||
} else {
|
||||
qp = location.search.substring(1);
|
||||
}
|
||||
|
||||
arr = qp.split("&")
|
||||
arr.forEach(function (v,i,_arr) { _arr[i] = '"' + v.replace('=', '":"') + '"';})
|
||||
qp = qp ? JSON.parse('{' + arr.join() + '}',
|
||||
function (key, value) {
|
||||
return key === "" ? value : decodeURIComponent(value)
|
||||
}
|
||||
) : {}
|
||||
|
||||
isValid = qp.state === sentState
|
||||
|
||||
if ((
|
||||
oauth2.auth.schema.get("flow") === "accessCode"||
|
||||
oauth2.auth.schema.get("flow") === "authorizationCode"
|
||||
) && !oauth2.auth.code) {
|
||||
if (!isValid) {
|
||||
oauth2.errCb({
|
||||
authId: oauth2.auth.name,
|
||||
source: "auth",
|
||||
level: "warning",
|
||||
message: "Authorization may be unsafe, passed state was changed in server Passed state wasn't returned from auth server"
|
||||
});
|
||||
}
|
||||
|
||||
if (qp.code) {
|
||||
delete oauth2.state;
|
||||
oauth2.auth.code = qp.code;
|
||||
oauth2.callback({auth: oauth2.auth, redirectUrl: redirectUrl});
|
||||
} else {
|
||||
let oauthErrorMsg
|
||||
if (qp.error) {
|
||||
oauthErrorMsg = "["+qp.error+"]: " +
|
||||
(qp.error_description ? qp.error_description+ ". " : "no accessCode received from the server. ") +
|
||||
(qp.error_uri ? "More info: "+qp.error_uri : "");
|
||||
}
|
||||
|
||||
oauth2.errCb({
|
||||
authId: oauth2.auth.name,
|
||||
source: "auth",
|
||||
level: "error",
|
||||
message: oauthErrorMsg || "[Authorization failed]: no accessCode received from the server"
|
||||
});
|
||||
}
|
||||
} else {
|
||||
oauth2.callback({auth: oauth2.auth, token: qp, isValid: isValid, redirectUrl: redirectUrl});
|
||||
}
|
||||
window.close();
|
||||
}
|
||||
</script>
|
|
@ -1,959 +0,0 @@
|
|||
{
|
||||
"openapi": "3.0.0",
|
||||
"info": {
|
||||
"description": "",
|
||||
"version": "3.0",
|
||||
"title": "Polaris",
|
||||
"termsOfService": ""
|
||||
},
|
||||
"servers": [
|
||||
{
|
||||
"url": "/api"
|
||||
}
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"name": "Collection",
|
||||
"description": "Browsing the music collection"
|
||||
},
|
||||
{
|
||||
"name": "Last.fm",
|
||||
"description": "Integrating with Last.fm"
|
||||
},
|
||||
{
|
||||
"name": "Settings",
|
||||
"description": "Managing the polaris installation"
|
||||
},
|
||||
{
|
||||
"name": "Playlists",
|
||||
"description": "Managing playlists"
|
||||
},
|
||||
{
|
||||
"name": "Other"
|
||||
}
|
||||
],
|
||||
"paths": {
|
||||
"/version": {
|
||||
"get": {
|
||||
"tags": ["Other"],
|
||||
"summary": "Returns which API version this server implements",
|
||||
"operationId": "getVersion",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Version"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/initial_setup": {
|
||||
"get": {
|
||||
"tags": ["Other"],
|
||||
"summary": "Returns the current state of the initial setup flow",
|
||||
"operationId": "getInitialSetup",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/InitialSetup"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/trigger_index": {
|
||||
"post": {
|
||||
"tags": ["Other"],
|
||||
"summary": "Begins or queues a crawl of the music collection",
|
||||
"operationId": "postTriggerIndex",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation"
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"admin_http_header": [],
|
||||
"admin_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/settings": {
|
||||
"get": {
|
||||
"tags": ["Settings"],
|
||||
"summary": "Reads the existing server configuration",
|
||||
"operationId": "getSettings",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#components/schemas/Config"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"admin_http_header": [],
|
||||
"admin_cookie": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"put": {
|
||||
"tags": ["Settings"],
|
||||
"summary": "Overwrites the server configuration",
|
||||
"operationId": "getSettings",
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": { "application/json": { "schema": { "$ref": "#components/schemas/Config" } } }
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation"
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"admin_http_header": [],
|
||||
"admin_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/preferences": {
|
||||
"get": {
|
||||
"tags": ["Settings"],
|
||||
"summary": "Reads the preferences of the current user",
|
||||
"operationId": "getPreferences",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#components/schemas/Preferences"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/auth": {
|
||||
"post": {
|
||||
"tags": ["Other"],
|
||||
"summary": "Returns information about user permissions and a session cookie for future authenticated requests.",
|
||||
"operationId": "postAuth",
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": { "application/json": { "schema": { "$ref": "#components/schemas/AuthCredentials" } } }
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": { "application/json": { "schema": { "$ref":"#components/schemas/AuthOutput" } } }
|
||||
},
|
||||
"401": {
|
||||
"description": "Invalid credentials"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/browse": {
|
||||
"get": {
|
||||
"tags": ["Collection"],
|
||||
"summary": "Reads the content of the top-level directory in the music collection",
|
||||
"operationId": "getBrowse",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/CollectionFile"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/browse/{location}": {
|
||||
"get": {
|
||||
"tags": ["Collection"],
|
||||
"summary": "Reads the content of a directory in the music collection",
|
||||
"operationId": "getBrowsePath",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "location",
|
||||
"in": "path",
|
||||
"description": "Path to the collection directory begin explored",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/CollectionFile"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/flatten": {
|
||||
"get": {
|
||||
"tags": ["Collection"],
|
||||
"summary": "Recursively lists all the songs in the music collection",
|
||||
"operationId": "getFlatten",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Song"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/flatten/{location}": {
|
||||
"get": {
|
||||
"tags": ["Collection"],
|
||||
"summary": "Recursively lists all the songs within a directory of the music collection",
|
||||
"operationId": "getFlattenPath",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "location",
|
||||
"in": "path",
|
||||
"description": "Path to the collection directory begin explored",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Song"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/random": {
|
||||
"get": {
|
||||
"tags": ["Collection"],
|
||||
"summary": "Returns a list of random albums",
|
||||
"operationId": "getRandom",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Directory"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/recent": {
|
||||
"get": {
|
||||
"tags": ["Collection"],
|
||||
"summary": "Returns the albums most recently added to the collection",
|
||||
"operationId": "getRecent",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Directory"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/search/{query}": {
|
||||
"get": {
|
||||
"tags": ["Collection"],
|
||||
"summary": "Searches for songs and directories",
|
||||
"operationId": "getSearch",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "query",
|
||||
"in": "path",
|
||||
"description": "Search query used to filter results",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/CollectionFile"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/serve/{file}": {
|
||||
"get": {
|
||||
"tags": ["Collection"],
|
||||
"summary": "Access a media file in the collection",
|
||||
"operationId": "getServe",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "file",
|
||||
"in": "path",
|
||||
"description": "Path to the desired file",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"image/*": { "schema": { "format": "binary" } },
|
||||
"audio/*": { "schema": { "format": "binary" } }
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/playlists": {
|
||||
"get": {
|
||||
"tags": ["Playlists"],
|
||||
"summary": "Lists the playlists belonging to the current user",
|
||||
"operationId": "getPlaylists",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ListPlaylistsEntry"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/playlist/{playlistName}": {
|
||||
"get": {
|
||||
"tags": ["Playlists"],
|
||||
"summary": "Reads the content of a playlist",
|
||||
"operationId": "getPlaylist",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "playlistName",
|
||||
"in": "path",
|
||||
"description": "Name of the playlist to read",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Song"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"put": {
|
||||
"tags": ["Playlists"],
|
||||
"summary": "Saves a playlist",
|
||||
"operationId": "putPlaylist",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "playlistName",
|
||||
"in": "path",
|
||||
"description": "Name of the playlist to save",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json":{
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/SavePlaylistInput"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation"
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"delete": {
|
||||
"tags": ["Playlists"],
|
||||
"summary": "Deletes a playlist",
|
||||
"operationId": "deletePlaylist",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "playlistName",
|
||||
"in": "path",
|
||||
"description": "Name of the playlist to delete",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation"
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/lastfm/now_playing/{song}": {
|
||||
"put": {
|
||||
"tags": ["Last.fm"],
|
||||
"summary": "Tells Last.fm the song currently being played",
|
||||
"operationId": "putLastFMNowPlaying",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "song",
|
||||
"in": "path",
|
||||
"description": "Path to the song being played",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation"
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/lastfm/scrobble/{song}": {
|
||||
"post": {
|
||||
"tags": ["Last.fm"],
|
||||
"summary": "Tells Last.fm that a song has been playing for long enough to be scrobbled",
|
||||
"operationId": "postLastFMScrobble",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "song",
|
||||
"in": "path",
|
||||
"description": "Path to the song being played",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation"
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"/lastfm/link": {
|
||||
"get": {
|
||||
"tags": ["Last.fm"],
|
||||
"summary": "Links a Polaris user with a Last.fm account.",
|
||||
"externalDocs": {
|
||||
"description": "This endpoint is meant to be used as a Last.fm authentication handler, as described here:",
|
||||
"url": "https://www.last.fm/api/webauth"
|
||||
},
|
||||
"operationId": "getLastFMLink",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "token",
|
||||
"in": "query",
|
||||
"required": true,
|
||||
"description": "Last.fm authentication token",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "content",
|
||||
"in": "query",
|
||||
"required": true,
|
||||
"description": "Base64 encoded HTML content to be returned to the client initiating the link operation",
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation",
|
||||
"content": {
|
||||
"text/html": {
|
||||
"description": "The same content originally present in the 'content' parameter"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"tags": ["Last.fm"],
|
||||
"summary": "Unlinks Polaris user and Last.fm account",
|
||||
"operationId": "deleteLastFMLink",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful operation"
|
||||
}
|
||||
},
|
||||
"security": [
|
||||
{
|
||||
"auth_http_header": [],
|
||||
"auth_cookie": []
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"components": {
|
||||
"schemas": {
|
||||
"Version": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"major": {
|
||||
"type": "integer",
|
||||
"format": "int64",
|
||||
"example": 3
|
||||
},
|
||||
"minor": {
|
||||
"type": "integer",
|
||||
"format": "int64",
|
||||
"example": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"InitialSetup": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"has_any_users": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Config": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"album_art_pattern": {
|
||||
"type": "string",
|
||||
"example": "^Folder.(png|jpg|jpeg)$"
|
||||
},
|
||||
"reindex_every_n_seconds": {
|
||||
"type": "integer",
|
||||
"example": 3600
|
||||
},
|
||||
"mount_dirs": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/MountPoint"
|
||||
}
|
||||
},
|
||||
"prefix_url": {
|
||||
"type": "string"
|
||||
},
|
||||
"users": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/ConfigUser"
|
||||
}
|
||||
},
|
||||
"ydns": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string",
|
||||
"example": "yourname.ydns.eu"
|
||||
},
|
||||
"username": {
|
||||
"type": "string",
|
||||
"example": "you@host.com"
|
||||
},
|
||||
"password": {
|
||||
"type": "string",
|
||||
"example": "hunter2"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"ConfigUser": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"password": {
|
||||
"type": "string",
|
||||
"description": "Always blank when this field appear in a server response"
|
||||
},
|
||||
"admin": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
"MountPoint": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"source": {
|
||||
"type": "string",
|
||||
"example": "/mnt/some_drive/music"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"example": "My Music"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Preferences": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"lastfm_username": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"AuthCredentials": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"username": {
|
||||
"type": "string"
|
||||
},
|
||||
"password": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"AuthOutput": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"admin": {
|
||||
"type": "boolean"
|
||||
}
|
||||
}
|
||||
},
|
||||
"CollectionFile": {
|
||||
"oneOf": [
|
||||
{
|
||||
"$ref": "#/components/schemas/CollectionFileDirectory"
|
||||
},
|
||||
{
|
||||
"$ref": "#/components/schemas/CollectionFileSong"
|
||||
}
|
||||
]
|
||||
},
|
||||
"CollectionFileDirectory": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Directory": {
|
||||
"$ref": "#/components/schemas/Directory"
|
||||
}
|
||||
}
|
||||
},
|
||||
"CollectionFileSong": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"Song": {
|
||||
"$ref": "#/components/schemas/Song"
|
||||
}
|
||||
}
|
||||
},
|
||||
"Directory": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string",
|
||||
"example": "My Music/Metal/Stratovarius/Destiny",
|
||||
"required": true
|
||||
},
|
||||
"artist": {
|
||||
"type": "string",
|
||||
"example": "Stratovarius"
|
||||
},
|
||||
"year": {
|
||||
"type": "integer",
|
||||
"example": 1998
|
||||
},
|
||||
"album": {
|
||||
"type": "string",
|
||||
"example": "Destiny"
|
||||
},
|
||||
"artwork": {
|
||||
"type": "string",
|
||||
"example": "My Music/Metal/Stratovarius/Destiny/Folder.png"
|
||||
},
|
||||
"date_added": {
|
||||
"type": "integer",
|
||||
"example": 1453179635,
|
||||
"required": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"Song": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"path": {
|
||||
"type": "string",
|
||||
"example": "My Music/Metal/Stratovarius/Destiny/Anthem of the World.mp3",
|
||||
"required": true
|
||||
},
|
||||
"track_number": {
|
||||
"type": "integer",
|
||||
"example": 9
|
||||
},
|
||||
"disc_number": {
|
||||
"type": "integer",
|
||||
"example": 1
|
||||
},
|
||||
"title": {
|
||||
"type": "string",
|
||||
"example": "Anthem of the World"
|
||||
},
|
||||
"artist": {
|
||||
"type": "string",
|
||||
"example": "Stratovarius"
|
||||
},
|
||||
"album_artist": {
|
||||
"type": "string",
|
||||
"example": null
|
||||
},
|
||||
"year": {
|
||||
"type": "integer",
|
||||
"example": 1998
|
||||
},
|
||||
"album": {
|
||||
"type": "string",
|
||||
"example": "Destiny"
|
||||
},
|
||||
"artwork": {
|
||||
"type": "string",
|
||||
"example": "My Music/Metal/Stratovarius/Destiny/Folder.png"
|
||||
},
|
||||
"duration": {
|
||||
"type": "integer",
|
||||
"example": 571
|
||||
}
|
||||
}
|
||||
},
|
||||
"ListPlaylistsEntry": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"example": "Friday Chill"
|
||||
}
|
||||
}
|
||||
},
|
||||
"SavePlaylistInput": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"tracks": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"example": "My Music/Metal/Stratovarius/Destiny/Anthem of the World.mp3"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"securitySchemes": {
|
||||
"auth_http_header": {
|
||||
"type": "http",
|
||||
"scheme": "basic"
|
||||
},
|
||||
"auth_cookie": {
|
||||
"type": "apikey",
|
||||
"in": "cookie",
|
||||
"name": "session",
|
||||
"description": "A session token obtained returned as a server cookie by making a request via the auth_http_header scheme."
|
||||
},
|
||||
"admin_http_header": {
|
||||
"type": "http",
|
||||
"scheme": "basic",
|
||||
"description": "Identical to the auth_http_header scheme but only for users recognized as admin by the Polaris server"
|
||||
},
|
||||
"admin_cookie": {
|
||||
"type": "apikey",
|
||||
"in": "cookie",
|
||||
"name": "session",
|
||||
"description": "Identical to the auth_cookie scheme but only for users recognized as admin by the Polaris server"
|
||||
}
|
||||
},
|
||||
"links": {},
|
||||
"callbacks": {}
|
||||
},
|
||||
"security": []
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
{"version":3,"sources":[],"names":[],"mappings":"","file":"swagger-ui.css","sourceRoot":""}
|
46
flake.lock
generated
Normal file
|
@ -0,0 +1,46 @@
|
|||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1736701207,
|
||||
"narHash": "sha256-jG/+MvjVY7SlTakzZ2fJ5dC3V1PrKKrUEOEE30jrOKA=",
|
||||
"rev": "ed4a395ea001367c1f13d34b1e01aa10290f67d6",
|
||||
"revCount": 737298,
|
||||
"type": "tarball",
|
||||
"url": "https://api.flakehub.com/f/pinned/NixOS/nixpkgs/0.1.737298%2Brev-ed4a395ea001367c1f13d34b1e01aa10290f67d6/01945f5f-4175-7e72-8809-a1e482c4a443/source.tar.gz"
|
||||
},
|
||||
"original": {
|
||||
"type": "tarball",
|
||||
"url": "https://flakehub.com/f/NixOS/nixpkgs/0.1.%2A.tar.gz"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs",
|
||||
"rust-overlay": "rust-overlay"
|
||||
}
|
||||
},
|
||||
"rust-overlay": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1736735482,
|
||||
"narHash": "sha256-QOA4jCDyyUM9Y2Vba+HSZ/5LdtCMGaTE/7NkkUzBr50=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "cf960a1938ee91200fe0d2f7b2582fde2429d562",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
58
flake.nix
Normal file
|
@ -0,0 +1,58 @@
|
|||
{
|
||||
description = "A Nix-flake-based Rust development environment";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "https://flakehub.com/f/NixOS/nixpkgs/0.1.*.tar.gz";
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, rust-overlay }:
|
||||
let
|
||||
supportedSystems = [ "x86_64-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" ];
|
||||
forEachSupportedSystem = f: nixpkgs.lib.genAttrs supportedSystems (system: f {
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
overlays = [ rust-overlay.overlays.default self.overlays.default ];
|
||||
};
|
||||
});
|
||||
in
|
||||
{
|
||||
overlays.default = final: prev: {
|
||||
rustToolchain =
|
||||
let
|
||||
rust = prev.rust-bin;
|
||||
in
|
||||
if builtins.pathExists ./rust-toolchain.toml then
|
||||
rust.fromRustupToolchainFile ./rust-toolchain.toml
|
||||
else if builtins.pathExists ./rust-toolchain then
|
||||
rust.fromRustupToolchainFile ./rust-toolchain
|
||||
else
|
||||
rust.stable.latest.default.override {
|
||||
extensions = [ "rust-src" "rustfmt" ];
|
||||
};
|
||||
};
|
||||
|
||||
devShells = forEachSupportedSystem ({ pkgs }: {
|
||||
default = pkgs.mkShell {
|
||||
packages = with pkgs; [
|
||||
rustToolchain
|
||||
openssl
|
||||
pkg-config
|
||||
cargo-deny
|
||||
cargo-edit
|
||||
cargo-watch
|
||||
rust-analyzer
|
||||
samply
|
||||
];
|
||||
|
||||
env = {
|
||||
# Required by rust-analyzer
|
||||
RUST_SRC_PATH = "${pkgs.rustToolchain}/lib/rustlib/src/rust/library";
|
||||
};
|
||||
};
|
||||
});
|
||||
};
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
DROP TABLE directories;
|
||||
DROP TABLE songs;
|
|
@ -1,25 +0,0 @@
|
|||
CREATE TABLE directories (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
parent TEXT,
|
||||
artist TEXT,
|
||||
year INTEGER,
|
||||
album TEXT,
|
||||
artwork TEXT,
|
||||
UNIQUE(path) ON CONFLICT REPLACE
|
||||
);
|
||||
|
||||
CREATE TABLE songs (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
parent TEXT NOT NULL,
|
||||
track_number INTEGER,
|
||||
disc_number INTEGER,
|
||||
title TEXT,
|
||||
artist TEXT,
|
||||
album_artist TEXT,
|
||||
year INTEGER,
|
||||
album TEXT,
|
||||
artwork TEXT,
|
||||
UNIQUE(path) ON CONFLICT REPLACE
|
||||
);
|
|
@ -1,15 +0,0 @@
|
|||
CREATE TEMPORARY TABLE directories_backup(id, path, parent, artist, year, album, artwork);
|
||||
INSERT INTO directories_backup SELECT id, path, parent, artist, year, album, artwork FROM directories;
|
||||
DROP TABLE directories;
|
||||
CREATE TABLE directories (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
parent TEXT,
|
||||
artist TEXT,
|
||||
year INTEGER,
|
||||
album TEXT,
|
||||
artwork TEXT,
|
||||
UNIQUE(path) ON CONFLICT REPLACE
|
||||
);
|
||||
INSERT INTO directories SELECT * FROM directories_backup;
|
||||
DROP TABLE directories_backup;
|
|
@ -1 +0,0 @@
|
|||
ALTER TABLE directories ADD COLUMN date_added INTEGER DEFAULT 0 NOT NULL;
|
|
@ -1 +0,0 @@
|
|||
DROP TABLE users;
|
|
@ -1,8 +0,0 @@
|
|||
CREATE TABLE users (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
password_salt BLOB NOT NULL,
|
||||
password_hash BLOB NOT NULL,
|
||||
admin INTEGER NOT NULL,
|
||||
UNIQUE(name)
|
||||
);
|
|
@ -1 +0,0 @@
|
|||
DROP TABLE misc_settings;
|
|
@ -1,7 +0,0 @@
|
|||
CREATE TABLE misc_settings (
|
||||
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
|
||||
auth_secret TEXT NOT NULL,
|
||||
index_sleep_duration_seconds INTEGER NOT NULL,
|
||||
index_album_art_pattern TEXT NOT NULL
|
||||
);
|
||||
INSERT INTO misc_settings (id, auth_secret, index_sleep_duration_seconds, index_album_art_pattern) VALUES (0, hex(randomblob(64)), 1800, "Folder.(jpg|png)");
|
|
@ -1 +0,0 @@
|
|||
DROP TABLE ddns_config;
|
|
@ -1,8 +0,0 @@
|
|||
CREATE TABLE ddns_config (
|
||||
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
|
||||
host TEXT NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
password TEXT NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO ddns_config (id, host, username, password) VALUES (0, "", "", "");
|
|
@ -1 +0,0 @@
|
|||
DROP TABLE mount_points;
|
|
@ -1,6 +0,0 @@
|
|||
CREATE TABLE mount_points (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
UNIQUE(name)
|
||||
);
|
|
@ -1,2 +0,0 @@
|
|||
DROP TABLE playlists;
|
||||
DROP TABLE playlist_songs;
|
|
@ -1,16 +0,0 @@
|
|||
CREATE TABLE playlists (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
owner INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
FOREIGN KEY(owner) REFERENCES users(id) ON DELETE CASCADE,
|
||||
UNIQUE(owner, name) ON CONFLICT REPLACE
|
||||
);
|
||||
|
||||
CREATE TABLE playlist_songs (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
playlist INTEGER NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
ordering INTEGER NOT NULL,
|
||||
FOREIGN KEY(playlist) REFERENCES playlists(id) ON DELETE CASCADE ON UPDATE CASCADE,
|
||||
UNIQUE(playlist, ordering) ON CONFLICT REPLACE
|
||||
);
|
|
@ -1,11 +0,0 @@
|
|||
CREATE TEMPORARY TABLE misc_settings_backup(id, auth_secret, index_sleep_duration_seconds, index_album_art_pattern);
|
||||
INSERT INTO misc_settings_backup SELECT id, auth_secret, index_sleep_duration_seconds, index_album_art_pattern FROM misc_settings;
|
||||
DROP TABLE misc_settings;
|
||||
CREATE TABLE misc_settings (
|
||||
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
|
||||
auth_secret TEXT NOT NULL,
|
||||
index_sleep_duration_seconds INTEGER NOT NULL,
|
||||
index_album_art_pattern TEXT NOT NULL
|
||||
);
|
||||
INSERT INTO misc_settings SELECT * FROM misc_settings_backup;
|
||||
DROP TABLE misc_settings_backup;
|
|
@ -1 +0,0 @@
|
|||
ALTER TABLE misc_settings ADD COLUMN prefix_url TEXT NOT NULL DEFAULT "";
|
|
@ -1,19 +0,0 @@
|
|||
CREATE TEMPORARY TABLE songs_backup(id, path, parent, track_number, disc_number, title, artist, album_artist, year, album, artwork);
|
||||
INSERT INTO songs_backup SELECT id, path, parent, track_number, disc_number, title, artist, album_artist, year, album, artwork FROM songs;
|
||||
DROP TABLE songs;
|
||||
CREATE TABLE songs (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
path TEXT NOT NULL,
|
||||
parent TEXT NOT NULL,
|
||||
track_number INTEGER,
|
||||
disc_number INTEGER,
|
||||
title TEXT,
|
||||
artist TEXT,
|
||||
album_artist TEXT,
|
||||
year INTEGER,
|
||||
album TEXT,
|
||||
artwork TEXT,
|
||||
UNIQUE(path) ON CONFLICT REPLACE
|
||||
);
|
||||
INSERT INTO songs SELECT * FROM songs_backup;
|
||||
DROP TABLE songs_backup;
|
|
@ -1 +0,0 @@
|
|||
ALTER TABLE songs ADD COLUMN duration INTEGER;
|
|
@ -1,13 +0,0 @@
|
|||
CREATE TEMPORARY TABLE users_backup(id, name, password_salt, password_hash, admin);
|
||||
INSERT INTO users_backup SELECT id, name, password_salt, password_hash, admin FROM users;
|
||||
DROP TABLE users;
|
||||
CREATE TABLE users (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
password_salt BLOB NOT NULL,
|
||||
password_hash BLOB NOT NULL,
|
||||
admin INTEGER NOT NULL,
|
||||
UNIQUE(name)
|
||||
);
|
||||
INSERT INTO users SELECT * FROM users_backup;
|
||||
DROP TABLE users_backup;
|
|
@ -1,2 +0,0 @@
|
|||
ALTER TABLE users ADD COLUMN lastfm_username TEXT;
|
||||
ALTER TABLE users ADD COLUMN lastfm_session_key TEXT;
|
|
@ -1,15 +0,0 @@
|
|||
CREATE TEMPORARY TABLE misc_settings_backup(id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url);
|
||||
INSERT INTO misc_settings_backup
|
||||
SELECT id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url
|
||||
FROM misc_settings;
|
||||
DROP TABLE misc_settings;
|
||||
CREATE TABLE misc_settings (
|
||||
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
|
||||
auth_secret BLOB NOT NULL DEFAULT (hex(randomblob(32))),
|
||||
index_sleep_duration_seconds INTEGER NOT NULL,
|
||||
index_album_art_pattern TEXT NOT NULL,
|
||||
prefix_url TEXT NOT NULL DEFAULT ""
|
||||
);
|
||||
INSERT INTO misc_settings(id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url)
|
||||
SELECT * FROM misc_settings_backup;
|
||||
DROP TABLE misc_settings_backup;
|
|
@ -1,15 +0,0 @@
|
|||
CREATE TEMPORARY TABLE misc_settings_backup(id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url);
|
||||
INSERT INTO misc_settings_backup
|
||||
SELECT id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url
|
||||
FROM misc_settings;
|
||||
DROP TABLE misc_settings;
|
||||
CREATE TABLE misc_settings (
|
||||
id INTEGER PRIMARY KEY NOT NULL CHECK(id = 0),
|
||||
auth_secret BLOB NOT NULL DEFAULT (randomblob(32)),
|
||||
index_sleep_duration_seconds INTEGER NOT NULL,
|
||||
index_album_art_pattern TEXT NOT NULL,
|
||||
prefix_url TEXT NOT NULL DEFAULT ""
|
||||
);
|
||||
INSERT INTO misc_settings(id, index_sleep_duration_seconds, index_album_art_pattern, prefix_url)
|
||||
SELECT * FROM misc_settings_backup;
|
||||
DROP TABLE misc_settings_backup;
|
|
@ -1,11 +0,0 @@
|
|||
DROP TABLE users;
|
||||
CREATE TABLE users (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
password_salt BLOB NOT NULL,
|
||||
password_hash BLOB NOT NULL,
|
||||
admin INTEGER NOT NULL,
|
||||
lastfm_username TEXT,
|
||||
lastfm_session_key TEXT,
|
||||
UNIQUE(name)
|
||||
);
|
|
@ -1,10 +0,0 @@
|
|||
DROP TABLE users;
|
||||
CREATE TABLE users (
|
||||
id INTEGER PRIMARY KEY NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
admin INTEGER NOT NULL,
|
||||
lastfm_username TEXT,
|
||||
lastfm_session_key TEXT,
|
||||
UNIQUE(name)
|
||||
);
|
BIN
res/branding/example-cover-1.png
Normal file
After Width: | Height: | Size: 1.2 MiB |
BIN
res/branding/example-cover-2.png
Normal file
After Width: | Height: | Size: 1.3 MiB |
BIN
res/branding/logo/social_media_preview.afdesign
Normal file
BIN
res/readme/dark_mode.png
Normal file
After Width: | Height: | Size: 723 KiB |
BIN
res/readme/logo.afdesign
Normal file
Before Width: | Height: | Size: 48 KiB After Width: | Height: | Size: 47 KiB |
Before Width: | Height: | Size: 276 KiB After Width: | Height: | Size: 722 KiB |
|
@ -1,22 +1,101 @@
|
|||
POLARIS_BIN_DIR := ~/.local/bin/polaris
|
||||
POLARIS_DATA_DIR := ~/.local/share/polaris
|
||||
.PHONY: all build build-system build-xdg cargo-build clean preview preview-system preview-xdg list-paths install install-bin install-data install-system install-xdg uninstall uninstall-bin uninstall-data uninstall-system uninstall-xdg
|
||||
|
||||
all: build
|
||||
UID := $(shell id -u)
|
||||
|
||||
build:
|
||||
PREFIX ?= /usr/local
|
||||
EXEC_PREFIX ?= $(PREFIX)
|
||||
BINDIR ?= $(EXEC_PREFIX)/bin
|
||||
DATAROOTDIR ?= $(PREFIX)/share
|
||||
DATADIR ?= $(DATAROOTDIR)
|
||||
SYSCONFDIR ?= $(PREFIX)/etc
|
||||
LOCALSTATEDIR ?= $(PREFIX)/var
|
||||
RUNSTATEDIR ?= $(LOCALSTATEDIR)/run
|
||||
%-system: POLARIS_BIN_PATH := $(BINDIR)/polaris
|
||||
%-system: export POLARIS_WEB_DIR := $(DATADIR)/polaris/web
|
||||
%-system: export POLARIS_CONFIG_DIR := $(SYSCONFDIR)/polaris
|
||||
%-system: export POLARIS_DATA_DIR := $(LOCALSTATEDIR)/lib/polaris
|
||||
%-system: export POLARIS_DB_DIR := $(LOCALSTATEDIR)/lib/polaris
|
||||
%-system: export POLARIS_LOG_DIR := $(LOCALSTATEDIR)/log/polaris
|
||||
%-system: export POLARIS_CACHE_DIR := $(LOCALSTATEDIR)/cache/polaris
|
||||
%-system: export POLARIS_PID_DIR := $(RUNSTATEDIR)/polaris
|
||||
|
||||
XDG_CACHE_HOME ?= $(HOME)/.cache
|
||||
XDG_CONFIG_HOME ?= $(HOME)/.config
|
||||
XDG_DATA_HOME ?= $(HOME)/.local/share
|
||||
XDG_BINDIR ?= $(HOME)/.local/bin
|
||||
XDG_DATADIR ?= $(XDG_DATA_HOME)/polaris
|
||||
XDG_CACHEDIR ?= $(XDG_CACHE_HOME)/polaris
|
||||
XDG_CONFIGDIR ?= $(XDG_CONFIG_HOME)/polaris
|
||||
ifdef $(XDG_RUNTIME_DIR)
|
||||
XDG_PIDDIR ?= $(XDG_RUNTIME_DIR)/polaris
|
||||
else
|
||||
XDG_PIDDIR ?= /tmp/polaris-$(UID)
|
||||
endif
|
||||
%-xdg: POLARIS_BIN_PATH := $(XDG_BINDIR)/polaris
|
||||
%-xdg: export POLARIS_WEB_DIR := $(XDG_DATADIR)/web
|
||||
%-xdg: export POLARIS_CONFIG_DIR := $(XDG_CONFIGDIR)
|
||||
%-xdg: export POLARIS_DATA_DIR := $(XDG_DATADIR)
|
||||
%-xdg: export POLARIS_DB_DIR := $(XDG_DATADIR)
|
||||
%-xdg: export POLARIS_LOG_DIR := $(XDG_CACHEDIR)
|
||||
%-xdg: export POLARIS_CACHE_DIR := $(XDG_CACHEDIR)
|
||||
%-xdg: export POLARIS_PID_DIR := $(XDG_PIDDIR)
|
||||
|
||||
# Build
|
||||
|
||||
build-system: cargo-build
|
||||
build-xdg: cargo-build
|
||||
build: build-system
|
||||
all: build-system
|
||||
|
||||
cargo-build:
|
||||
cargo build --release
|
||||
|
||||
install: build
|
||||
install -d $(POLARIS_BIN_DIR)
|
||||
install -d $(POLARIS_DATA_DIR)
|
||||
install ./target/release/polaris $(POLARIS_BIN_DIR)
|
||||
cp -r ./web $(POLARIS_DATA_DIR)
|
||||
cp -r ./swagger $(POLARIS_DATA_DIR)
|
||||
@echo "Polaris installation complete!"
|
||||
|
||||
clean:
|
||||
cargo clean
|
||||
|
||||
uninstall:
|
||||
rm -r $(POLARIS_BIN_DIR)
|
||||
rm -r $(POLARIS_DATA_DIR)
|
||||
# Preview
|
||||
|
||||
preview-system: list-paths
|
||||
preview-xdg: list-paths
|
||||
preview: preview-system
|
||||
|
||||
list-paths:
|
||||
$(info POLARIS_BIN_PATH is $(POLARIS_BIN_PATH))
|
||||
$(info POLARIS_WEB_DIR is $(POLARIS_WEB_DIR))
|
||||
$(info POLARIS_CONFIG_DIR is $(POLARIS_CONFIG_DIR))
|
||||
$(info POLARIS_DATA_DIR is $(POLARIS_DATA_DIR))
|
||||
$(info POLARIS_DB_DIR is $(POLARIS_DB_DIR))
|
||||
$(info POLARIS_LOG_DIR is $(POLARIS_LOG_DIR))
|
||||
$(info POLARIS_CACHE_DIR is $(POLARIS_CACHE_DIR))
|
||||
$(info POLARIS_PID_DIR is $(POLARIS_PID_DIR))
|
||||
|
||||
# Install
|
||||
|
||||
install-system: install-bin install-data
|
||||
install-xdg: install-bin install-data
|
||||
install: install-system
|
||||
|
||||
install-bin: cargo-build
|
||||
install -Dm755 ./target/release/polaris $(POLARIS_BIN_PATH)
|
||||
|
||||
install-data:
|
||||
install -d $(POLARIS_WEB_DIR)
|
||||
cp -rT ./web $(POLARIS_WEB_DIR)
|
||||
|
||||
# Uninstall
|
||||
|
||||
uninstall-system: uninstall-bin uninstall-data
|
||||
uninstall-xdg: uninstall-bin uninstall-data
|
||||
uninstall: uninstall-system
|
||||
|
||||
uninstall-bin:
|
||||
rm $(POLARIS_BIN_PATH)
|
||||
|
||||
uninstall-data:
|
||||
rm -rf $(POLARIS_WEB_DIR)
|
||||
rm -rf $(POLARIS_CONFIG_DIR)
|
||||
rm -rf $(POLARIS_DATA_DIR)
|
||||
rm -rf $(POLARIS_DB_DIR)
|
||||
rm -rf $(POLARIS_LOG_DIR)
|
||||
rm -rf $(POLARIS_CACHE_DIR)
|
||||
rm -rf $(POLARIS_PID_DIR)
|
||||
|
|
12
res/unix/release_script.sh
Executable file
|
@ -0,0 +1,12 @@
|
|||
#!/bin/sh
|
||||
echo "Creating output directory"
|
||||
mkdir -p release/tmp/polaris
|
||||
|
||||
echo "Copying package files"
|
||||
cp -r web src test-data build.rs Cargo.toml Cargo.lock rust-toolchain.toml res/unix/Makefile release/tmp/polaris
|
||||
|
||||
echo "Creating tarball"
|
||||
tar -zc -C release/tmp -f release/polaris.tar.gz polaris
|
||||
|
||||
echo "Cleaning up"
|
||||
rm -rf release/tmp
|
|
@ -1,15 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
||||
<security>
|
||||
<requestedPrivileges>
|
||||
<requestedExecutionLevel level="asInvoker" uiAccess="false" />
|
||||
</requestedPrivileges>
|
||||
</security>
|
||||
</trustInfo>
|
||||
<asmv3:application>
|
||||
<asmv3:windowsSettings xmlns="http://schemas.microsoft.com/SMI/2005/WindowsSettings">
|
||||
<dpiAware>true</dpiAware>
|
||||
</asmv3:windowsSettings>
|
||||
</asmv3:application>
|
||||
</assembly>
|
|
@ -1,7 +0,0 @@
|
|||
#define IDI_POLARIS 0x101
|
||||
#define IDI_POLARIS_TRAY 0x102
|
||||
|
||||
CREATEPROCESS_MANIFEST_RESOURCE_ID RT_MANIFEST "application.manifest"
|
||||
|
||||
IDI_POLARIS ICON "icon_polaris_512.ico"
|
||||
IDI_POLARIS_TRAY ICON "icon_polaris_outline_64.ico"
|
BIN
res/windows/application/icon_polaris_outline_16.png
Normal file
After Width: | Height: | Size: 1.8 KiB |
Before Width: | Height: | Size: 31 KiB |
2
res/windows/application/polaris-manifest.rc
Normal file
|
@ -0,0 +1,2 @@
|
|||
#define RT_MANIFEST 24
|
||||
1 RT_MANIFEST "polaris.exe.manifest"
|
21
res/windows/application/polaris.exe.manifest
Normal file
|
@ -0,0 +1,21 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
||||
<assemblyIdentity
|
||||
version="1.0.0.0"
|
||||
processorArchitecture="*"
|
||||
name="app"
|
||||
type="win32"
|
||||
/>
|
||||
<dependency>
|
||||
<dependentAssembly>
|
||||
<assemblyIdentity
|
||||
type="win32"
|
||||
name="Microsoft.Windows.Common-Controls"
|
||||
version="6.0.0.0"
|
||||
processorArchitecture="*"
|
||||
publicKeyToken="6595b64144ccf1df"
|
||||
language="*"
|
||||
/>
|
||||
</dependentAssembly>
|
||||
</dependency>
|
||||
</assembly>
|
|
@ -1,68 +1,61 @@
|
|||
<?xml version='1.0' encoding='windows-1252'?>
|
||||
<Wix xmlns='http://schemas.microsoft.com/wix/2006/wi' xmlns:util="http://schemas.microsoft.com/wix/UtilExtension">
|
||||
<Product Name='Polaris' Id='7D8EB5AC-6020-40B6-9A53-4E0A0CF7B23A' UpgradeCode='FF16B075-1D36-47F4-BE37-D95BBC1A412C' Language='1033' Codepage='1252' Version='0.10.0' Manufacturer='Permafrost'>
|
||||
|
||||
<Package Id='*' Keywords='Installer' Platform='x64' InstallScope='perUser' Description='Polaris Installer' Manufacturer='Permafrost' Languages='1033' Compressed='yes' SummaryCodepage='1252' />
|
||||
|
||||
<MajorUpgrade DowngradeErrorMessage='A newer version of Polaris is already installed.' Schedule='afterInstallExecute' />
|
||||
|
||||
<Media Id='1' Cabinet='Sample.cab' EmbedCab='yes' DiskPrompt='Installation Media #1' />
|
||||
<Property Id='DiskPrompt' Value='Polaris Installation [1]' />
|
||||
|
||||
<Directory Id='TARGETDIR' Name='SourceDir'>
|
||||
<Directory Id='LocalAppDataFolder'>
|
||||
<Directory Id='AppDataPermafrost' Name='Permafrost'>
|
||||
<Directory Id='AppDataPolaris' Name='Polaris' FileSource='.'>
|
||||
<Component Id='MainExecutable' Guid='*'>
|
||||
<File Source='polaris.exe' KeyPath='yes' Checksum='yes'>
|
||||
<Shortcut Id='StartupMenuPolaris' Directory='StartupFolder' Name='Polaris' WorkingDirectory='AppDataPolaris' Icon='polaris.exe' IconIndex='0' Advertise='yes' />
|
||||
<Shortcut Id='StartMenuPolaris' Directory='ProgramMenuDir' Name='Polaris' WorkingDirectory='AppDataPolaris' Icon='polaris.exe' IconIndex='0' Advertise='yes' />
|
||||
<Shortcut Id='DesktopPolaris' Directory='DesktopFolder' Name='Polaris' WorkingDirectory='AppDataPolaris' Icon='polaris.exe' IconIndex='0' Advertise='yes' />
|
||||
</File>
|
||||
</Component>
|
||||
</Directory>
|
||||
</Directory>
|
||||
</Directory>
|
||||
|
||||
<Directory Id='DesktopFolder' Name='Desktop' />
|
||||
<Directory Id='StartupFolder' Name='Startup' />
|
||||
<Directory Id='ProgramMenuFolder' Name='Programs'>
|
||||
<Directory Id='ProgramMenuDir' Name='Permafrost'>
|
||||
<Component Id="ProgramMenuDir" Guid='*'>
|
||||
<RemoveFolder Id='ProgramMenuDir' On='uninstall' />
|
||||
<RegistryValue Root='HKCU' Key='Software\Permafrost\Polaris' Name='ProgramMenuEntry' Type='string' Value='' KeyPath='yes' />
|
||||
</Component>
|
||||
</Directory>
|
||||
</Directory>
|
||||
</Directory>
|
||||
|
||||
<!--Remove extra files after uninstall (db, thumbnails, etc.)-->
|
||||
<Property Id="EXTRADATAPATH">
|
||||
<RegistrySearch Root="HKCU" Key="Software\Permafrost\Polaris" Name="CleanupExtraData" Type="raw" Id="ExtraDataPathSearch" />
|
||||
</Property>
|
||||
<DirectoryRef Id="AppDataPermafrost">
|
||||
<Component Id="CleanupExtraData" Guid="DF415F12-A1B4-48EE-98BC-E0B75AF556AD">
|
||||
<RegistryValue Root="HKCU" Key="Software\Permafrost\Polaris" Name="CleanupExtraData" Type="string" Value="[AppDataPermafrost]" KeyPath="yes" />
|
||||
<util:RemoveFolderEx On="uninstall" Property="EXTRADATAPATH" />
|
||||
</Component>
|
||||
</DirectoryRef>
|
||||
|
||||
<Feature Id='Complete' Level='1'>
|
||||
<ComponentRef Id='MainExecutable' />
|
||||
<ComponentRef Id='ProgramMenuDir' />
|
||||
<ComponentRef Id='CleanupExtraData' />
|
||||
<ComponentGroupRef Id="WebUI" />
|
||||
<ComponentGroupRef Id="SwaggerUI" />
|
||||
</Feature>
|
||||
|
||||
<Icon Id='polaris.exe' SourceFile='polaris.exe' />
|
||||
<Property Id='ARPPRODUCTICON' Value='polaris.exe' />
|
||||
|
||||
<Property Id='WIXUI_INSTALLDIR' Value='INSTALL_DIR' />
|
||||
<UIRef Id='WixUI_Minimal' />
|
||||
<WixVariable Id='WixUILicenseRtf' Value='license.rtf' />
|
||||
<WixVariable Id='WixUIDialogBmp' Value='dialog.bmp' />
|
||||
<WixVariable Id='WixUIBannerBmp' Value='banner.bmp' />
|
||||
|
||||
</Product>
|
||||
</Wix>
|
||||
<?xml version="1.0" encoding="windows-1252"?>
|
||||
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi" xmlns:util="http://schemas.microsoft.com/wix/UtilExtension">
|
||||
<Product Name="Polaris" Id="*" UpgradeCode="FF16B075-1D36-47F4-BE37-D95BBC1A412C" Language="1033" Codepage="1252" Manufacturer="Permafrost" Version="0.12.3">
|
||||
<Package Id="*" Keywords="Installer" Platform="x64" InstallScope="perUser" Description="Polaris Installer" Manufacturer="Permafrost" Languages="1033" Compressed="yes" SummaryCodepage="1252" />
|
||||
<MajorUpgrade DowngradeErrorMessage="A newer version of Polaris is already installed." Schedule="afterInstallExecute" />
|
||||
<Media Id="1" Cabinet="Sample.cab" EmbedCab="yes" DiskPrompt="Installation Media #1" />
|
||||
<Property Id="DiskPrompt" Value="Polaris Installation [1]" />
|
||||
<Directory Id="TARGETDIR" Name="SourceDir">
|
||||
<Directory Id="LocalAppDataFolder">
|
||||
<Directory Id="AppDataPermafrost" Name="Permafrost">
|
||||
<Directory Id="AppDataPolaris" Name="Polaris" FileSource=".">
|
||||
<Component Id="MainExecutable" Guid="*">
|
||||
<File Source="polaris.exe" KeyPath="yes" Checksum="yes">
|
||||
<Shortcut Id="StartupMenuPolaris" Directory="StartupFolder" Name="Polaris" WorkingDirectory="AppDataPolaris" Icon="polaris.exe" IconIndex="0" Advertise="yes" />
|
||||
<Shortcut Id="StartMenuPolaris" Directory="ProgramMenuDir" Name="Polaris" WorkingDirectory="AppDataPolaris" Icon="polaris.exe" IconIndex="0" Advertise="yes" />
|
||||
<Shortcut Id="DesktopPolaris" Directory="DesktopFolder" Name="Polaris" WorkingDirectory="AppDataPolaris" Icon="polaris.exe" IconIndex="0" Advertise="yes" />
|
||||
</File>
|
||||
</Component>
|
||||
<Component Id="CLIExecutable" Guid="*">
|
||||
<File Source="polaris-cli.exe" KeyPath="yes" Checksum="yes" />
|
||||
</Component>
|
||||
</Directory>
|
||||
</Directory>
|
||||
</Directory>
|
||||
<Directory Id="DesktopFolder" Name="Desktop" />
|
||||
<Directory Id="StartupFolder" Name="Startup" />
|
||||
<Directory Id="ProgramMenuFolder" Name="Programs">
|
||||
<Directory Id="ProgramMenuDir" Name="Permafrost">
|
||||
<Component Id="ProgramMenuDir" Guid="*">
|
||||
<RemoveFolder Id="ProgramMenuDir" On="uninstall" />
|
||||
<RegistryValue Root="HKCU" Key="Software\Permafrost\Polaris" Name="ProgramMenuEntry" Type="string" Value="" KeyPath="yes" />
|
||||
</Component>
|
||||
</Directory>
|
||||
</Directory>
|
||||
</Directory>
|
||||
<!--Remove extra files after uninstall (db, thumbnails, etc.)-->
|
||||
<Property Id="EXTRADATAPATH">
|
||||
<RegistrySearch Root="HKCU" Key="Software\Permafrost\Polaris" Name="CleanupExtraData" Type="raw" Id="ExtraDataPathSearch" />
|
||||
</Property>
|
||||
<DirectoryRef Id="AppDataPermafrost">
|
||||
<Component Id="CleanupExtraData" Guid="DF415F12-A1B4-48EE-98BC-E0B75AF556AD">
|
||||
<RegistryValue Root="HKCU" Key="Software\Permafrost\Polaris" Name="CleanupExtraData" Type="string" Value="[AppDataPermafrost]" KeyPath="yes" />
|
||||
<util:RemoveFolderEx On="uninstall" Property="EXTRADATAPATH" />
|
||||
</Component>
|
||||
</DirectoryRef>
|
||||
<Feature Id="Complete" Level="1">
|
||||
<ComponentRef Id="MainExecutable" />
|
||||
<ComponentRef Id="CLIExecutable" />
|
||||
<ComponentRef Id="ProgramMenuDir" />
|
||||
<ComponentRef Id="CleanupExtraData" />
|
||||
<ComponentGroupRef Id="WebUI" />
|
||||
</Feature>
|
||||
<Icon Id="polaris.exe" SourceFile="polaris.exe" />
|
||||
<Property Id="ARPPRODUCTICON" Value="polaris.exe" />
|
||||
<Property Id="WIXUI_INSTALLDIR" Value="INSTALL_DIR" />
|
||||
<UIRef Id="WixUI_Minimal" />
|
||||
<WixVariable Id="WixUILicenseRtf" Value="license.rtf" />
|
||||
<WixVariable Id="WixUIDialogBmp" Value="dialog.bmp" />
|
||||
<WixVariable Id="WixUIBannerBmp" Value="banner.bmp" />
|
||||
</Product>
|
||||
</Wix>
|
52
res/windows/release_script.ps1
Normal file
|
@ -0,0 +1,52 @@
|
|||
if (!(Test-Path env:POLARIS_VERSION)) {
|
||||
throw "POLARIS_VERSION environment variable is not defined"
|
||||
}
|
||||
|
||||
""
|
||||
"Compiling executable"
|
||||
# TODO: Uncomment the following once Polaris can do variable expansion of %LOCALAPPDATA%
|
||||
# And remove the code setting these as defaults in `service/mod.rs`
|
||||
# $script:INSTALL_DIR = "%LOCALAPPDATA%\Permafrost\Polaris"
|
||||
# $env:POLARIS_WEB_DIR = "$INSTALL_DIR\web"
|
||||
# $env:POLARIS_DB_DIR = "$INSTALL_DIR"
|
||||
# $env:POLARIS_LOG_DIR = "$INSTALL_DIR"
|
||||
# $env:POLARIS_CACHE_DIR = "$INSTALL_DIR"
|
||||
# $env:POLARIS_PID_DIR = "$INSTALL_DIR"
|
||||
cargo rustc --release --features "ui" -- -o ".\target\release\polaris.exe"
|
||||
cargo rustc --release -- -o ".\target\release\polaris-cli.exe"
|
||||
|
||||
""
|
||||
"Creating output directory"
|
||||
New-Item .\release\tmp -type directory -Force | Out-Null
|
||||
Remove-Item -Recurse .\release\tmp\*
|
||||
|
||||
""
|
||||
"Copying to output directory"
|
||||
Copy-Item .\res\windows\installer\license.rtf .\release\tmp\
|
||||
Copy-Item .\res\windows\installer\banner.bmp .\release\tmp\
|
||||
Copy-Item .\res\windows\installer\dialog.bmp .\release\tmp\
|
||||
Copy-Item .\target\release\polaris.exe .\release\tmp\
|
||||
Copy-Item .\target\release\polaris-cli.exe .\release\tmp\
|
||||
Copy-Item .\web .\release\tmp\web -recurse
|
||||
|
||||
""
|
||||
"Inserting version number in installer config"
|
||||
[xml]$wxs = Get-Content .\res\windows\installer\installer.wxs
|
||||
$wxs.Wix.Product.SetAttribute("Version", $env:POLARIS_VERSION)
|
||||
$wxs.Save('.\res\windows\installer\installer.wxs')
|
||||
|
||||
""
|
||||
"Creating installer"
|
||||
$heat_exe = Join-Path $env:WIX bin\heat.exe
|
||||
& $heat_exe dir .\release\tmp\web\ -ag -g1 -dr AppDataPolaris -cg WebUI -sfrag -var wix.WebUIDir -out .\release\tmp\web_ui_fragment.wxs
|
||||
|
||||
$candle_exe = Join-Path $env:WIX bin\candle.exe
|
||||
& $candle_exe -wx -ext WixUtilExtension -arch x64 -out .\release\tmp\web_ui_fragment.wixobj .\release\tmp\web_ui_fragment.wxs
|
||||
& $candle_exe -wx -ext WixUtilExtension -arch x64 -out .\release\tmp\installer.wixobj .\res\windows\installer\installer.wxs
|
||||
|
||||
$light_exe = Join-Path $env:WIX bin\light.exe
|
||||
& $light_exe -dWebUIDir=".\release\tmp\web" -wx -ext WixUtilExtension -ext WixUIExtension -spdb -sw1076 -sice:ICE38 -sice:ICE64 -out .\release\polaris.msi .\release\tmp\installer.wixobj .\release\tmp\web_ui_fragment.wixobj
|
||||
|
||||
"Cleaning up"
|
||||
Remove-Item -Recurse .\release\tmp
|
||||
|
4
rust-toolchain.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
[toolchain]
|
||||
channel = "stable"
|
||||
components = [ "rust-src", "rustfmt" ]
|
||||
profile = "default"
|
451
src/api.rs
|
@ -1,451 +0,0 @@
|
|||
use error_chain::bail;
|
||||
use rocket::http::{Cookie, Cookies, RawStr, Status};
|
||||
use rocket::request::{self, FromParam, FromRequest, Request};
|
||||
use rocket::response::content::Html;
|
||||
use rocket::{delete, get, post, put, routes, Outcome, State};
|
||||
use rocket_contrib::json::Json;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs::File;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::str;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::config::{self, Config, Preferences};
|
||||
use crate::db::DB;
|
||||
use crate::errors;
|
||||
use crate::index;
|
||||
use crate::lastfm;
|
||||
use crate::playlist;
|
||||
use crate::serve;
|
||||
use crate::thumbnails;
|
||||
use crate::user;
|
||||
use crate::utils;
|
||||
use crate::vfs::VFSSource;
|
||||
|
||||
const CURRENT_MAJOR_VERSION: i32 = 3;
|
||||
const CURRENT_MINOR_VERSION: i32 = 0;
|
||||
const COOKIE_SESSION: &str = "session";
|
||||
|
||||
pub fn get_routes() -> Vec<rocket::Route> {
|
||||
routes![
|
||||
version,
|
||||
initial_setup,
|
||||
get_settings,
|
||||
put_settings,
|
||||
get_preferences,
|
||||
put_preferences,
|
||||
trigger_index,
|
||||
auth,
|
||||
browse_root,
|
||||
browse,
|
||||
flatten_root,
|
||||
flatten,
|
||||
random,
|
||||
recent,
|
||||
search_root,
|
||||
search,
|
||||
serve,
|
||||
list_playlists,
|
||||
save_playlist,
|
||||
read_playlist,
|
||||
delete_playlist,
|
||||
lastfm_link,
|
||||
lastfm_unlink,
|
||||
lastfm_now_playing,
|
||||
lastfm_scrobble,
|
||||
]
|
||||
}
|
||||
|
||||
struct Auth {
|
||||
username: String,
|
||||
}
|
||||
|
||||
fn get_session_cookie(username: &str) -> Cookie<'static> {
|
||||
Cookie::build(COOKIE_SESSION, username.to_owned())
|
||||
.same_site(rocket::http::SameSite::Lax)
|
||||
.http_only(true)
|
||||
.finish()
|
||||
}
|
||||
|
||||
impl<'a, 'r> FromRequest<'a, 'r> for Auth {
|
||||
type Error = ();
|
||||
|
||||
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, ()> {
|
||||
let mut cookies = request.guard::<Cookies<'_>>().unwrap();
|
||||
if let Some(u) = cookies.get_private(COOKIE_SESSION) {
|
||||
return Outcome::Success(Auth {
|
||||
username: u.value().to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(auth_header_string) = request.headers().get_one("Authorization") {
|
||||
use rocket::http::hyper::header::*;
|
||||
if let Ok(Basic {
|
||||
username,
|
||||
password: Some(password),
|
||||
}) = Basic::from_str(auth_header_string.trim_start_matches("Basic "))
|
||||
{
|
||||
let db = match request.guard::<State<'_, Arc<DB>>>() {
|
||||
Outcome::Success(d) => d,
|
||||
_ => return Outcome::Failure((Status::InternalServerError, ())),
|
||||
};
|
||||
if user::auth(db.deref().deref(), &username, &password).unwrap_or(false) {
|
||||
cookies.add_private(get_session_cookie(&username));
|
||||
return Outcome::Success(Auth {
|
||||
username: username.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Outcome::Failure((Status::Unauthorized, ()))
|
||||
}
|
||||
}
|
||||
|
||||
struct AdminRights {}
|
||||
impl<'a, 'r> FromRequest<'a, 'r> for AdminRights {
|
||||
type Error = ();
|
||||
|
||||
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, ()> {
|
||||
let db = request.guard::<State<'_, Arc<DB>>>()?;
|
||||
|
||||
match user::count::<DB>(&db) {
|
||||
Err(_) => return Outcome::Failure((Status::InternalServerError, ())),
|
||||
Ok(0) => return Outcome::Success(AdminRights {}),
|
||||
_ => (),
|
||||
};
|
||||
|
||||
let auth = request.guard::<Auth>()?;
|
||||
match user::is_admin::<DB>(&db, &auth.username) {
|
||||
Err(_) => Outcome::Failure((Status::InternalServerError, ())),
|
||||
Ok(true) => Outcome::Success(AdminRights {}),
|
||||
Ok(false) => Outcome::Failure((Status::Forbidden, ())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct VFSPathBuf {
|
||||
path_buf: PathBuf,
|
||||
}
|
||||
|
||||
impl<'r> FromParam<'r> for VFSPathBuf {
|
||||
type Error = &'r RawStr;
|
||||
|
||||
fn from_param(param: &'r RawStr) -> Result<Self, Self::Error> {
|
||||
let decoded_path = param.percent_decode_lossy();
|
||||
Ok(VFSPathBuf {
|
||||
path_buf: PathBuf::from(decoded_path.into_owned()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<VFSPathBuf> for PathBuf {
|
||||
fn from(vfs_path_buf: VFSPathBuf) -> Self {
|
||||
vfs_path_buf.path_buf.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, Serialize, Deserialize)]
|
||||
pub struct Version {
|
||||
pub major: i32,
|
||||
pub minor: i32,
|
||||
}
|
||||
|
||||
#[get("/version")]
|
||||
fn version() -> Json<Version> {
|
||||
let current_version = Version {
|
||||
major: CURRENT_MAJOR_VERSION,
|
||||
minor: CURRENT_MINOR_VERSION,
|
||||
};
|
||||
Json(current_version)
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, Serialize, Deserialize)]
|
||||
pub struct InitialSetup {
|
||||
pub has_any_users: bool,
|
||||
}
|
||||
|
||||
#[get("/initial_setup")]
|
||||
fn initial_setup(db: State<'_, Arc<DB>>) -> Result<Json<InitialSetup>, errors::Error> {
|
||||
let initial_setup = InitialSetup {
|
||||
has_any_users: user::count::<DB>(&db)? > 0,
|
||||
};
|
||||
Ok(Json(initial_setup))
|
||||
}
|
||||
|
||||
#[get("/settings")]
|
||||
fn get_settings(
|
||||
db: State<'_, Arc<DB>>,
|
||||
_admin_rights: AdminRights,
|
||||
) -> Result<Json<Config>, errors::Error> {
|
||||
let config = config::read::<DB>(&db)?;
|
||||
Ok(Json(config))
|
||||
}
|
||||
|
||||
#[put("/settings", data = "<config>")]
|
||||
fn put_settings(
|
||||
db: State<'_, Arc<DB>>,
|
||||
_admin_rights: AdminRights,
|
||||
config: Json<Config>,
|
||||
) -> Result<(), errors::Error> {
|
||||
config::amend::<DB>(&db, &config)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[get("/preferences")]
|
||||
fn get_preferences(db: State<'_, Arc<DB>>, auth: Auth) -> Result<Json<Preferences>, errors::Error> {
|
||||
let preferences = config::read_preferences::<DB>(&db, &auth.username)?;
|
||||
Ok(Json(preferences))
|
||||
}
|
||||
|
||||
#[put("/preferences", data = "<preferences>")]
|
||||
fn put_preferences(
|
||||
db: State<'_, Arc<DB>>,
|
||||
auth: Auth,
|
||||
preferences: Json<Preferences>,
|
||||
) -> Result<(), errors::Error> {
|
||||
config::write_preferences::<DB>(&db, &auth.username, &preferences)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[post("/trigger_index")]
|
||||
fn trigger_index(
|
||||
command_sender: State<'_, Arc<index::CommandSender>>,
|
||||
_admin_rights: AdminRights,
|
||||
) -> Result<(), errors::Error> {
|
||||
command_sender.trigger_reindex()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct AuthCredentials {
|
||||
pub username: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct AuthOutput {
|
||||
admin: bool,
|
||||
}
|
||||
|
||||
#[post("/auth", data = "<credentials>")]
|
||||
fn auth(
|
||||
db: State<'_, Arc<DB>>,
|
||||
credentials: Json<AuthCredentials>,
|
||||
mut cookies: Cookies<'_>,
|
||||
) -> Result<Json<AuthOutput>, errors::Error> {
|
||||
if !user::auth::<DB>(&db, &credentials.username, &credentials.password)? {
|
||||
bail!(errors::ErrorKind::IncorrectCredentials)
|
||||
}
|
||||
|
||||
cookies.add_private(get_session_cookie(&credentials.username));
|
||||
|
||||
let auth_output = AuthOutput {
|
||||
admin: user::is_admin::<DB>(&db, &credentials.username)?,
|
||||
};
|
||||
Ok(Json(auth_output))
|
||||
}
|
||||
|
||||
#[get("/browse")]
|
||||
fn browse_root(
|
||||
db: State<'_, Arc<DB>>,
|
||||
_auth: Auth,
|
||||
) -> Result<Json<Vec<index::CollectionFile>>, errors::Error> {
|
||||
let result = index::browse(db.deref().deref(), &PathBuf::new())?;
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
#[get("/browse/<path>")]
|
||||
fn browse(
|
||||
db: State<'_, Arc<DB>>,
|
||||
_auth: Auth,
|
||||
path: VFSPathBuf,
|
||||
) -> Result<Json<Vec<index::CollectionFile>>, errors::Error> {
|
||||
let result = index::browse(db.deref().deref(), &path.into() as &PathBuf)?;
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
#[get("/flatten")]
|
||||
fn flatten_root(
|
||||
db: State<'_, Arc<DB>>,
|
||||
_auth: Auth,
|
||||
) -> Result<Json<Vec<index::Song>>, errors::Error> {
|
||||
let result = index::flatten(db.deref().deref(), &PathBuf::new())?;
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
#[get("/flatten/<path>")]
|
||||
fn flatten(
|
||||
db: State<'_, Arc<DB>>,
|
||||
_auth: Auth,
|
||||
path: VFSPathBuf,
|
||||
) -> Result<Json<Vec<index::Song>>, errors::Error> {
|
||||
let result = index::flatten(db.deref().deref(), &path.into() as &PathBuf)?;
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
#[get("/random")]
|
||||
fn random(
|
||||
db: State<'_, Arc<DB>>,
|
||||
_auth: Auth,
|
||||
) -> Result<Json<Vec<index::Directory>>, errors::Error> {
|
||||
let result = index::get_random_albums(db.deref().deref(), 20)?;
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
#[get("/recent")]
|
||||
fn recent(
|
||||
db: State<'_, Arc<DB>>,
|
||||
_auth: Auth,
|
||||
) -> Result<Json<Vec<index::Directory>>, errors::Error> {
|
||||
let result = index::get_recent_albums(db.deref().deref(), 20)?;
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
#[get("/search")]
|
||||
fn search_root(
|
||||
db: State<'_, Arc<DB>>,
|
||||
_auth: Auth,
|
||||
) -> Result<Json<Vec<index::CollectionFile>>, errors::Error> {
|
||||
let result = index::search(db.deref().deref(), "")?;
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
#[get("/search/<query>")]
|
||||
fn search(
|
||||
db: State<'_, Arc<DB>>,
|
||||
_auth: Auth,
|
||||
query: String,
|
||||
) -> Result<Json<Vec<index::CollectionFile>>, errors::Error> {
|
||||
let result = index::search(db.deref().deref(), &query)?;
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
#[get("/serve/<path>")]
|
||||
fn serve(
|
||||
db: State<'_, Arc<DB>>,
|
||||
_auth: Auth,
|
||||
path: VFSPathBuf,
|
||||
) -> Result<serve::RangeResponder<File>, errors::Error> {
|
||||
let db: &DB = db.deref().deref();
|
||||
let vfs = db.get_vfs()?;
|
||||
let real_path = vfs.virtual_to_real(&path.into() as &PathBuf)?;
|
||||
|
||||
let serve_path = if utils::is_image(&real_path) {
|
||||
thumbnails::get_thumbnail(&real_path, 400)?
|
||||
} else {
|
||||
real_path
|
||||
};
|
||||
|
||||
let file = File::open(serve_path)?;
|
||||
Ok(serve::RangeResponder::new(file))
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct ListPlaylistsEntry {
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[get("/playlists")]
|
||||
fn list_playlists(
|
||||
db: State<'_, Arc<DB>>,
|
||||
auth: Auth,
|
||||
) -> Result<Json<Vec<ListPlaylistsEntry>>, errors::Error> {
|
||||
let playlist_names = playlist::list_playlists(&auth.username, db.deref().deref())?;
|
||||
let playlists: Vec<ListPlaylistsEntry> = playlist_names
|
||||
.into_iter()
|
||||
.map(|p| ListPlaylistsEntry { name: p })
|
||||
.collect();
|
||||
|
||||
Ok(Json(playlists))
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SavePlaylistInput {
|
||||
pub tracks: Vec<String>,
|
||||
}
|
||||
|
||||
#[put("/playlist/<name>", data = "<playlist>")]
|
||||
fn save_playlist(
|
||||
db: State<'_, Arc<DB>>,
|
||||
auth: Auth,
|
||||
name: String,
|
||||
playlist: Json<SavePlaylistInput>,
|
||||
) -> Result<(), errors::Error> {
|
||||
playlist::save_playlist(&name, &auth.username, &playlist.tracks, db.deref().deref())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[get("/playlist/<name>")]
|
||||
fn read_playlist(
|
||||
db: State<'_, Arc<DB>>,
|
||||
auth: Auth,
|
||||
name: String,
|
||||
) -> Result<Json<Vec<index::Song>>, errors::Error> {
|
||||
let songs = playlist::read_playlist(&name, &auth.username, db.deref().deref())?;
|
||||
Ok(Json(songs))
|
||||
}
|
||||
|
||||
#[delete("/playlist/<name>")]
|
||||
fn delete_playlist(db: State<'_, Arc<DB>>, auth: Auth, name: String) -> Result<(), errors::Error> {
|
||||
playlist::delete_playlist(&name, &auth.username, db.deref().deref())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[put("/lastfm/now_playing/<path>")]
|
||||
fn lastfm_now_playing(
|
||||
db: State<'_, Arc<DB>>,
|
||||
auth: Auth,
|
||||
path: VFSPathBuf,
|
||||
) -> Result<(), errors::Error> {
|
||||
lastfm::now_playing(db.deref().deref(), &auth.username, &path.into() as &PathBuf)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[post("/lastfm/scrobble/<path>")]
|
||||
fn lastfm_scrobble(
|
||||
db: State<'_, Arc<DB>>,
|
||||
auth: Auth,
|
||||
path: VFSPathBuf,
|
||||
) -> Result<(), errors::Error> {
|
||||
lastfm::scrobble(db.deref().deref(), &auth.username, &path.into() as &PathBuf)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[get("/lastfm/link?<token>&<content>")]
|
||||
fn lastfm_link(
|
||||
db: State<'_, Arc<DB>>,
|
||||
auth: Auth,
|
||||
token: String,
|
||||
content: String,
|
||||
) -> Result<Html<String>, errors::Error> {
|
||||
lastfm::link(db.deref().deref(), &auth.username, &token)?;
|
||||
|
||||
// Percent decode
|
||||
let base64_content = match RawStr::from_str(&content).percent_decode() {
|
||||
Ok(s) => s,
|
||||
Err(_) => bail!(errors::ErrorKind::EncodingError),
|
||||
};
|
||||
|
||||
// Base64 decode
|
||||
let popup_content = match base64::decode(base64_content.as_bytes()) {
|
||||
Ok(c) => c,
|
||||
Err(_) => bail!(errors::ErrorKind::EncodingError),
|
||||
};
|
||||
|
||||
// UTF-8 decode
|
||||
let popup_content_string = match str::from_utf8(&popup_content) {
|
||||
Ok(s) => s,
|
||||
Err(_) => bail!(errors::ErrorKind::EncodingError),
|
||||
};
|
||||
|
||||
Ok(Html(popup_content_string.to_string()))
|
||||
}
|
||||
|
||||
#[delete("/lastfm/link")]
|
||||
fn lastfm_unlink(db: State<'_, Arc<DB>>, auth: Auth) -> Result<(), errors::Error> {
|
||||
lastfm::unlink(db.deref().deref(), &auth.username)?;
|
||||
Ok(())
|
||||
}
|
515
src/api_tests.rs
|
@ -1,515 +0,0 @@
|
|||
use rocket::http::hyper::header::*;
|
||||
use rocket::http::uri::Uri;
|
||||
use rocket::http::Status;
|
||||
use rocket::local::Client;
|
||||
use std::{thread, time};
|
||||
|
||||
use crate::api;
|
||||
use crate::config;
|
||||
use crate::ddns;
|
||||
use crate::index;
|
||||
use crate::vfs;
|
||||
|
||||
use crate::test::get_test_environment;
|
||||
|
||||
const TEST_USERNAME: &str = "test_user";
|
||||
const TEST_PASSWORD: &str = "test_password";
|
||||
const TEST_MOUNT_NAME: &str = "collection";
|
||||
const TEST_MOUNT_SOURCE: &str = "test/collection";
|
||||
|
||||
fn complete_initial_setup(client: &Client) {
|
||||
let configuration = config::Config {
|
||||
album_art_pattern: None,
|
||||
prefix_url: None,
|
||||
reindex_every_n_seconds: None,
|
||||
ydns: None,
|
||||
users: Some(vec![config::ConfigUser {
|
||||
name: TEST_USERNAME.into(),
|
||||
password: TEST_PASSWORD.into(),
|
||||
admin: true,
|
||||
}]),
|
||||
mount_dirs: Some(vec![vfs::MountPoint {
|
||||
name: TEST_MOUNT_NAME.into(),
|
||||
source: TEST_MOUNT_SOURCE.into(),
|
||||
}]),
|
||||
};
|
||||
let body = serde_json::to_string(&configuration).unwrap();
|
||||
let response = client.put("/api/settings").body(&body).dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
}
|
||||
|
||||
fn do_auth(client: &Client) {
|
||||
let credentials = api::AuthCredentials {
|
||||
username: TEST_USERNAME.into(),
|
||||
password: TEST_PASSWORD.into(),
|
||||
};
|
||||
let body = serde_json::to_string(&credentials).unwrap();
|
||||
let response = client.post("/api/auth").body(body).dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version() {
|
||||
let env = get_test_environment("api_version.sqlite");
|
||||
let client = &env.client;
|
||||
let mut response = client.get("/api/version").dispatch();
|
||||
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: api::Version = serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json, api::Version { major: 3, minor: 0 });
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn initial_setup() {
|
||||
let env = get_test_environment("api_initial_setup.sqlite");
|
||||
let client = &env.client;
|
||||
|
||||
{
|
||||
let mut response = client.get("/api/initial_setup").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: api::InitialSetup = serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(
|
||||
response_json,
|
||||
api::InitialSetup {
|
||||
has_any_users: false
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
complete_initial_setup(client);
|
||||
|
||||
{
|
||||
let mut response = client.get("/api/initial_setup").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: api::InitialSetup = serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(
|
||||
response_json,
|
||||
api::InitialSetup {
|
||||
has_any_users: true
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn settings() {
|
||||
let env = get_test_environment("api_settings.sqlite");
|
||||
let client = &env.client;
|
||||
complete_initial_setup(client);
|
||||
|
||||
{
|
||||
let response = client.get("/api/settings").dispatch();
|
||||
assert_eq!(response.status(), Status::Unauthorized);
|
||||
}
|
||||
|
||||
do_auth(client);
|
||||
|
||||
{
|
||||
let mut response = client.get("/api/settings").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: config::Config = serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(
|
||||
response_json,
|
||||
config::Config {
|
||||
album_art_pattern: Some("Folder.(jpg|png)".to_string()),
|
||||
reindex_every_n_seconds: Some(1800),
|
||||
mount_dirs: Some(vec![vfs::MountPoint {
|
||||
name: TEST_MOUNT_NAME.into(),
|
||||
source: TEST_MOUNT_SOURCE.into()
|
||||
}]),
|
||||
prefix_url: None,
|
||||
users: Some(vec![config::ConfigUser {
|
||||
name: TEST_USERNAME.into(),
|
||||
password: "".into(),
|
||||
admin: true
|
||||
}]),
|
||||
ydns: Some(ddns::DDNSConfig {
|
||||
host: "".into(),
|
||||
username: "".into(),
|
||||
password: "".into()
|
||||
}),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
let mut configuration = config::Config {
|
||||
album_art_pattern: Some("my_pattern".to_owned()),
|
||||
reindex_every_n_seconds: Some(3600),
|
||||
mount_dirs: Some(vec![
|
||||
vfs::MountPoint {
|
||||
name: TEST_MOUNT_NAME.into(),
|
||||
source: TEST_MOUNT_SOURCE.into(),
|
||||
},
|
||||
vfs::MountPoint {
|
||||
name: "more_music".into(),
|
||||
source: "test/collection".into(),
|
||||
},
|
||||
]),
|
||||
prefix_url: Some("my_prefix".to_owned()),
|
||||
users: Some(vec![
|
||||
config::ConfigUser {
|
||||
name: "test_user".into(),
|
||||
password: "some_password".into(),
|
||||
admin: true,
|
||||
},
|
||||
config::ConfigUser {
|
||||
name: "other_user".into(),
|
||||
password: "some_other_password".into(),
|
||||
admin: false,
|
||||
},
|
||||
]),
|
||||
ydns: Some(ddns::DDNSConfig {
|
||||
host: "my_host".into(),
|
||||
username: "my_username".into(),
|
||||
password: "my_password".into(),
|
||||
}),
|
||||
};
|
||||
|
||||
let body = serde_json::to_string(&configuration).unwrap();
|
||||
|
||||
configuration.users = Some(vec![
|
||||
config::ConfigUser {
|
||||
name: "test_user".into(),
|
||||
password: "".into(),
|
||||
admin: true,
|
||||
},
|
||||
config::ConfigUser {
|
||||
name: "other_user".into(),
|
||||
password: "".into(),
|
||||
admin: false,
|
||||
},
|
||||
]);
|
||||
|
||||
client.put("/api/settings").body(body).dispatch();
|
||||
|
||||
{
|
||||
let mut response = client.get("/api/settings").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: config::Config = serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json, configuration);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn preferences() {
|
||||
// TODO
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trigger_index() {
|
||||
let env = get_test_environment("api_trigger_index.sqlite");
|
||||
let client = &env.client;
|
||||
complete_initial_setup(client);
|
||||
do_auth(client);
|
||||
|
||||
{
|
||||
let mut response = client.get("/api/random").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<index::Directory> = serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 0);
|
||||
}
|
||||
|
||||
{
|
||||
let response = client.post("/api/trigger_index").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
}
|
||||
|
||||
let timeout = time::Duration::from_secs(5);
|
||||
thread::sleep(timeout);
|
||||
|
||||
{
|
||||
let mut response = client.get("/api/random").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<index::Directory> = serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 2);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auth() {
|
||||
let env = get_test_environment("api_auth.sqlite");
|
||||
let client = &env.client;
|
||||
complete_initial_setup(client);
|
||||
|
||||
{
|
||||
let credentials = api::AuthCredentials {
|
||||
username: "garbage".into(),
|
||||
password: "garbage".into(),
|
||||
};
|
||||
let response = client
|
||||
.post("/api/auth")
|
||||
.body(serde_json::to_string(&credentials).unwrap())
|
||||
.dispatch();
|
||||
assert_eq!(response.status(), Status::Unauthorized);
|
||||
}
|
||||
{
|
||||
let credentials = api::AuthCredentials {
|
||||
username: TEST_USERNAME.into(),
|
||||
password: "garbage".into(),
|
||||
};
|
||||
let response = client
|
||||
.post("/api/auth")
|
||||
.body(serde_json::to_string(&credentials).unwrap())
|
||||
.dispatch();
|
||||
assert_eq!(response.status(), Status::Unauthorized);
|
||||
}
|
||||
{
|
||||
let credentials = api::AuthCredentials {
|
||||
username: TEST_USERNAME.into(),
|
||||
password: TEST_PASSWORD.into(),
|
||||
};
|
||||
let response = client
|
||||
.post("/api/auth")
|
||||
.body(serde_json::to_string(&credentials).unwrap())
|
||||
.dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
assert_eq!(response.cookies()[0].name(), "session");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn browse() {
|
||||
let env = get_test_environment("api_browse.sqlite");
|
||||
let client = &env.client;
|
||||
complete_initial_setup(client);
|
||||
do_auth(client);
|
||||
env.update_index();
|
||||
|
||||
{
|
||||
let mut response = client.get("/api/browse").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<index::CollectionFile> =
|
||||
serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 1);
|
||||
}
|
||||
|
||||
let mut next;
|
||||
{
|
||||
let mut response = client.get("/api/browse/collection").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<index::CollectionFile> =
|
||||
serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 2);
|
||||
|
||||
match response_json[0] {
|
||||
index::CollectionFile::Directory(ref d) => {
|
||||
next = d.path.clone();
|
||||
}
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
|
||||
// /api/browse/collection/Khemmis
|
||||
{
|
||||
let url = format!("/api/browse/{}", Uri::percent_encode(&next));
|
||||
let mut response = client.get(url).dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<index::CollectionFile> =
|
||||
serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 1);
|
||||
match response_json[0] {
|
||||
index::CollectionFile::Directory(ref d) => {
|
||||
next = d.path.clone();
|
||||
}
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
|
||||
// /api/browse/collection/Khemmis/Hunted
|
||||
{
|
||||
let url = format!("/api/browse/{}", Uri::percent_encode(&next));
|
||||
let mut response = client.get(url).dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<index::CollectionFile> =
|
||||
serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 5);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flatten() {
|
||||
let env = get_test_environment("api_flatten.sqlite");
|
||||
let client = &env.client;
|
||||
complete_initial_setup(client);
|
||||
do_auth(client);
|
||||
env.update_index();
|
||||
|
||||
{
|
||||
let mut response = client.get("/api/flatten").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<index::Song> = serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 12);
|
||||
}
|
||||
|
||||
{
|
||||
let mut response = client.get("/api/flatten/collection").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<index::Song> = serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 12);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn random() {
|
||||
let env = get_test_environment("api_random.sqlite");
|
||||
let client = &env.client;
|
||||
complete_initial_setup(client);
|
||||
do_auth(client);
|
||||
env.update_index();
|
||||
|
||||
let mut response = client.get("/api/random").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<index::Directory> = serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn recent() {
|
||||
let env = get_test_environment("api_recent.sqlite");
|
||||
let client = &env.client;
|
||||
complete_initial_setup(client);
|
||||
do_auth(client);
|
||||
env.update_index();
|
||||
|
||||
let mut response = client.get("/api/recent").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<index::Directory> = serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn search() {
|
||||
let env = get_test_environment("api_search.sqlite");
|
||||
let client = &env.client;
|
||||
complete_initial_setup(client);
|
||||
do_auth(client);
|
||||
env.update_index();
|
||||
|
||||
let mut response = client.get("/api/search/door").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<index::CollectionFile> = serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 1);
|
||||
match response_json[0] {
|
||||
index::CollectionFile::Song(ref s) => assert_eq!(s.title, Some("Beyond The Door".into())),
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serve() {
|
||||
let env = get_test_environment("api_serve.sqlite");
|
||||
let client = &env.client;
|
||||
complete_initial_setup(client);
|
||||
do_auth(client);
|
||||
env.update_index();
|
||||
|
||||
{
|
||||
let mut response = client
|
||||
.get("/api/serve/collection%2FKhemmis%2FHunted%2F02%20-%20Candlelight.mp3")
|
||||
.dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let body = response.body().unwrap();
|
||||
let body = body.into_bytes().unwrap();
|
||||
assert_eq!(body.len(), 24_142);
|
||||
}
|
||||
|
||||
{
|
||||
let mut response = client
|
||||
.get("/api/serve/collection%2FKhemmis%2FHunted%2F02%20-%20Candlelight.mp3")
|
||||
.header(Range::bytes(100, 299))
|
||||
.dispatch();
|
||||
assert_eq!(response.status(), Status::PartialContent);
|
||||
let body = response.body().unwrap();
|
||||
let body = body.into_bytes().unwrap();
|
||||
assert_eq!(body.len(), 200);
|
||||
assert_eq!(response.headers().get_one("Content-Length").unwrap(), "200");
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn playlists() {
|
||||
let env = get_test_environment("api_playlists.sqlite");
|
||||
let client = &env.client;
|
||||
complete_initial_setup(client);
|
||||
do_auth(client);
|
||||
env.update_index();
|
||||
|
||||
{
|
||||
let mut response = client.get("/api/playlists").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<api::ListPlaylistsEntry> =
|
||||
serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 0);
|
||||
}
|
||||
|
||||
{
|
||||
let songs: Vec<index::Song>;
|
||||
{
|
||||
let mut response = client.get("/api/flatten").dispatch();
|
||||
let response_body = response.body_string().unwrap();
|
||||
songs = serde_json::from_str(&response_body).unwrap();
|
||||
}
|
||||
let my_playlist = api::SavePlaylistInput {
|
||||
tracks: songs[2..6].into_iter().map(|s| s.path.clone()).collect(),
|
||||
};
|
||||
let response = client
|
||||
.put("/api/playlist/my_playlist")
|
||||
.body(serde_json::to_string(&my_playlist).unwrap())
|
||||
.dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
}
|
||||
|
||||
{
|
||||
let mut response = client.get("/api/playlists").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<api::ListPlaylistsEntry> =
|
||||
serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(
|
||||
response_json,
|
||||
vec![api::ListPlaylistsEntry {
|
||||
name: "my_playlist".into()
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
let mut response = client.get("/api/playlist/my_playlist").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<index::Song> = serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 4);
|
||||
}
|
||||
|
||||
{
|
||||
let response = client.delete("/api/playlist/my_playlist").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
}
|
||||
|
||||
{
|
||||
let mut response = client.get("/api/playlists").dispatch();
|
||||
assert_eq!(response.status(), Status::Ok);
|
||||
let response_body = response.body_string().unwrap();
|
||||
let response_json: Vec<api::ListPlaylistsEntry> =
|
||||
serde_json::from_str(&response_body).unwrap();
|
||||
assert_eq!(response_json.len(), 0);
|
||||
}
|
||||
}
|
317
src/app.rs
Normal file
|
@ -0,0 +1,317 @@
|
|||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use log::info;
|
||||
use rand::rngs::OsRng;
|
||||
use rand::RngCore;
|
||||
use tokio::fs::try_exists;
|
||||
use tokio::task::spawn_blocking;
|
||||
|
||||
use crate::app::legacy::*;
|
||||
use crate::paths::Paths;
|
||||
|
||||
pub mod auth;
|
||||
pub mod config;
|
||||
pub mod ddns;
|
||||
pub mod formats;
|
||||
pub mod index;
|
||||
pub mod legacy;
|
||||
pub mod ndb;
|
||||
pub mod peaks;
|
||||
pub mod playlist;
|
||||
pub mod scanner;
|
||||
pub mod thumbnail;
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod test;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
ThreadPoolBuilder(#[from] rayon::ThreadPoolBuildError),
|
||||
#[error(transparent)]
|
||||
ThreadJoining(#[from] tokio::task::JoinError),
|
||||
|
||||
#[error("Filesystem error for `{0}`: `{1}`")]
|
||||
Io(PathBuf, std::io::Error),
|
||||
#[error(transparent)]
|
||||
FileWatch(#[from] notify::Error),
|
||||
#[error(transparent)]
|
||||
SQL(#[from] rusqlite::Error),
|
||||
#[error(transparent)]
|
||||
Ape(#[from] ape::Error),
|
||||
#[error("ID3 error in `{0}`: `{1}`")]
|
||||
Id3(PathBuf, id3::Error),
|
||||
#[error("Metaflac error in `{0}`: `{1}`")]
|
||||
Metaflac(PathBuf, metaflac::Error),
|
||||
#[error("Mp4aMeta error in `{0}`: `{1}`")]
|
||||
Mp4aMeta(PathBuf, mp4ameta::Error),
|
||||
#[error(transparent)]
|
||||
Opus(#[from] opus_headers::ParseError),
|
||||
#[error(transparent)]
|
||||
Vorbis(#[from] lewton::VorbisError),
|
||||
#[error("Could not find a Vorbis comment within flac file")]
|
||||
VorbisCommentNotFoundInFlacFile,
|
||||
#[error("Could not read thumbnail image in `{0}`:\n\n{1}")]
|
||||
Image(PathBuf, image::error::ImageError),
|
||||
#[error("This file format is not supported: {0}")]
|
||||
UnsupportedFormat(&'static str),
|
||||
|
||||
#[error("No tracks found in audio file: {0}")]
|
||||
MediaEmpty(PathBuf),
|
||||
#[error(transparent)]
|
||||
MediaDecodeError(symphonia::core::errors::Error),
|
||||
#[error(transparent)]
|
||||
MediaDecoderError(symphonia::core::errors::Error),
|
||||
#[error(transparent)]
|
||||
MediaPacketError(symphonia::core::errors::Error),
|
||||
#[error(transparent)]
|
||||
MediaProbeError(symphonia::core::errors::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
PeaksSerialization(bitcode::Error),
|
||||
#[error(transparent)]
|
||||
PeaksDeserialization(bitcode::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
NativeDatabase(#[from] native_db::db_type::Error),
|
||||
#[error("Could not initialize database")]
|
||||
NativeDatabaseCreationError(native_db::db_type::Error),
|
||||
|
||||
#[error("DDNS update query failed with HTTP status code `{0}`")]
|
||||
UpdateQueryFailed(u16),
|
||||
#[error("DDNS update query failed due to a transport error")]
|
||||
UpdateQueryTransport,
|
||||
|
||||
#[error("Auth secret does not have the expected format")]
|
||||
AuthenticationSecretInvalid,
|
||||
#[error("Missing auth secret")]
|
||||
AuthenticationSecretNotFound,
|
||||
#[error("Missing settings")]
|
||||
MiscSettingsNotFound,
|
||||
#[error("Index album art pattern is not a valid regex")]
|
||||
IndexAlbumArtPatternInvalid,
|
||||
#[error("DDNS update URL is invalid")]
|
||||
DDNSUpdateURLInvalid,
|
||||
|
||||
#[error("Could not deserialize configuration: `{0}`")]
|
||||
ConfigDeserialization(toml::de::Error),
|
||||
#[error("Could not serialize configuration: `{0}`")]
|
||||
ConfigSerialization(toml::ser::Error),
|
||||
#[error("Could not deserialize collection")]
|
||||
IndexDeserializationError,
|
||||
#[error("Could not serialize collection")]
|
||||
IndexSerializationError,
|
||||
|
||||
#[error("Invalid Directory")]
|
||||
InvalidDirectory(String),
|
||||
#[error("The following virtual path could not be mapped to a real path: `{0}`")]
|
||||
CouldNotMapToRealPath(PathBuf),
|
||||
#[error("The following real path could not be mapped to a virtual path: `{0}`")]
|
||||
CouldNotMapToVirtualPath(PathBuf),
|
||||
#[error("User not found")]
|
||||
UserNotFound,
|
||||
#[error("Directory not found: {0}")]
|
||||
DirectoryNotFound(PathBuf),
|
||||
#[error("Artist not found")]
|
||||
ArtistNotFound,
|
||||
#[error("Album not found")]
|
||||
AlbumNotFound,
|
||||
#[error("Genre not found")]
|
||||
GenreNotFound,
|
||||
#[error("Song not found")]
|
||||
SongNotFound,
|
||||
#[error("Invalid search query syntax")]
|
||||
SearchQueryParseError,
|
||||
#[error("Playlist not found")]
|
||||
PlaylistNotFound,
|
||||
#[error("No embedded artwork was found in `{0}`")]
|
||||
EmbeddedArtworkNotFound(PathBuf),
|
||||
|
||||
#[error("Cannot use empty username")]
|
||||
EmptyUsername,
|
||||
#[error("Cannot use empty password")]
|
||||
EmptyPassword,
|
||||
#[error("Username already exists")]
|
||||
DuplicateUsername,
|
||||
#[error("Username does not exist")]
|
||||
IncorrectUsername,
|
||||
#[error("Password does not match username")]
|
||||
IncorrectPassword,
|
||||
#[error("Invalid auth token")]
|
||||
InvalidAuthToken,
|
||||
#[error("Incorrect authorization scope")]
|
||||
IncorrectAuthorizationScope,
|
||||
#[error("Failed to hash password")]
|
||||
PasswordHashing,
|
||||
#[error("Failed to encode authorization token")]
|
||||
AuthorizationTokenEncoding,
|
||||
#[error("Failed to encode Branca token")]
|
||||
BrancaTokenEncoding,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct App {
|
||||
pub port: u16,
|
||||
pub web_dir_path: PathBuf,
|
||||
pub ddns_manager: ddns::Manager,
|
||||
pub scanner: scanner::Scanner,
|
||||
pub index_manager: index::Manager,
|
||||
pub config_manager: config::Manager,
|
||||
pub peaks_manager: peaks::Manager,
|
||||
pub playlist_manager: playlist::Manager,
|
||||
pub thumbnail_manager: thumbnail::Manager,
|
||||
}
|
||||
|
||||
impl App {
|
||||
pub async fn new(port: u16, paths: Paths) -> Result<Self, Error> {
|
||||
fs::create_dir_all(&paths.data_dir_path)
|
||||
.map_err(|e| Error::Io(paths.data_dir_path.clone(), e))?;
|
||||
|
||||
fs::create_dir_all(&paths.web_dir_path)
|
||||
.map_err(|e| Error::Io(paths.web_dir_path.clone(), e))?;
|
||||
|
||||
let peaks_dir_path = paths.cache_dir_path.join("peaks");
|
||||
fs::create_dir_all(&peaks_dir_path).map_err(|e| Error::Io(peaks_dir_path.clone(), e))?;
|
||||
|
||||
let thumbnails_dir_path = paths.cache_dir_path.join("thumbnails");
|
||||
fs::create_dir_all(&thumbnails_dir_path)
|
||||
.map_err(|e| Error::Io(thumbnails_dir_path.clone(), e))?;
|
||||
|
||||
let auth_secret_file_path = paths.data_dir_path.join("auth.secret");
|
||||
Self::migrate_legacy_auth_secret(&paths.db_file_path, &auth_secret_file_path).await?;
|
||||
let auth_secret = Self::get_or_create_auth_secret(&auth_secret_file_path).await?;
|
||||
|
||||
let config_manager = config::Manager::new(&paths.config_file_path, auth_secret).await?;
|
||||
let ddns_manager = ddns::Manager::new(config_manager.clone());
|
||||
let ndb_manager = ndb::Manager::new(&paths.data_dir_path)?;
|
||||
let index_manager = index::Manager::new(&paths.data_dir_path).await?;
|
||||
let scanner = scanner::Scanner::new(index_manager.clone(), config_manager.clone()).await?;
|
||||
let peaks_manager = peaks::Manager::new(peaks_dir_path);
|
||||
let playlist_manager = playlist::Manager::new(ndb_manager);
|
||||
let thumbnail_manager = thumbnail::Manager::new(thumbnails_dir_path);
|
||||
|
||||
let app = Self {
|
||||
port,
|
||||
web_dir_path: paths.web_dir_path,
|
||||
ddns_manager,
|
||||
scanner,
|
||||
index_manager,
|
||||
config_manager,
|
||||
peaks_manager,
|
||||
playlist_manager,
|
||||
thumbnail_manager,
|
||||
};
|
||||
|
||||
app.migrate_legacy_db(&paths.db_file_path).await?;
|
||||
|
||||
Ok(app)
|
||||
}
|
||||
|
||||
async fn migrate_legacy_auth_secret(
|
||||
db_file_path: &PathBuf,
|
||||
secret_file_path: &PathBuf,
|
||||
) -> Result<(), Error> {
|
||||
if !try_exists(db_file_path)
|
||||
.await
|
||||
.map_err(|e| Error::Io(db_file_path.clone(), e))?
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if try_exists(secret_file_path)
|
||||
.await
|
||||
.map_err(|e| Error::Io(secret_file_path.clone(), e))?
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
info!(
|
||||
"Migrating auth secret from database at `{}`",
|
||||
db_file_path.to_string_lossy()
|
||||
);
|
||||
|
||||
let secret = spawn_blocking({
|
||||
let db_file_path = db_file_path.clone();
|
||||
move || read_legacy_auth_secret(&db_file_path)
|
||||
})
|
||||
.await??;
|
||||
|
||||
tokio::fs::write(secret_file_path, &secret)
|
||||
.await
|
||||
.map_err(|e| Error::Io(secret_file_path.clone(), e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn migrate_legacy_db(&self, db_file_path: &PathBuf) -> Result<(), Error> {
|
||||
if !try_exists(db_file_path)
|
||||
.await
|
||||
.map_err(|e| Error::Io(db_file_path.clone(), e))?
|
||||
{
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let Some(config) = tokio::task::spawn_blocking({
|
||||
let db_file_path = db_file_path.clone();
|
||||
move || read_legacy_config(&db_file_path)
|
||||
})
|
||||
.await??
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
info!(
|
||||
"Found usable config in legacy database at `{}`, beginning migration process",
|
||||
db_file_path.to_string_lossy()
|
||||
);
|
||||
|
||||
info!("Migrating configuration");
|
||||
self.config_manager.apply_config(config).await?;
|
||||
self.config_manager.save_config().await?;
|
||||
|
||||
info!("Migrating playlists");
|
||||
for (name, owner, songs) in read_legacy_playlists(
|
||||
db_file_path,
|
||||
self.index_manager.clone(),
|
||||
self.scanner.clone(),
|
||||
)
|
||||
.await?
|
||||
{
|
||||
self.playlist_manager
|
||||
.save_playlist(&name, &owner, songs)
|
||||
.await?;
|
||||
}
|
||||
|
||||
info!(
|
||||
"Deleting legacy database at `{}`",
|
||||
db_file_path.to_string_lossy()
|
||||
);
|
||||
delete_legacy_db(db_file_path).await?;
|
||||
|
||||
info!(
|
||||
"Completed migration from `{}`",
|
||||
db_file_path.to_string_lossy()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_or_create_auth_secret(path: &Path) -> Result<auth::Secret, Error> {
|
||||
match tokio::fs::read(&path).await {
|
||||
Ok(s) => Ok(auth::Secret(
|
||||
s.try_into()
|
||||
.map_err(|_| Error::AuthenticationSecretInvalid)?,
|
||||
)),
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
let mut secret = auth::Secret::default();
|
||||
OsRng.fill_bytes(secret.as_mut());
|
||||
tokio::fs::write(&path, &secret)
|
||||
.await
|
||||
.map_err(|_| Error::AuthenticationSecretInvalid)?;
|
||||
Ok(secret)
|
||||
}
|
||||
Err(e) => return Err(Error::Io(path.to_owned(), e)),
|
||||
}
|
||||
}
|
||||
}
|
95
src/app/auth.rs
Normal file
|
@ -0,0 +1,95 @@
|
|||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
use pbkdf2::password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, SaltString};
|
||||
use pbkdf2::Pbkdf2;
|
||||
use rand::rngs::OsRng;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::app::Error;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Secret(pub [u8; 32]);
|
||||
|
||||
impl AsRef<[u8]> for Secret {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl AsMut<[u8]> for Secret {
|
||||
fn as_mut(&mut self) -> &mut [u8] {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Token(pub String);
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
|
||||
pub enum Scope {
|
||||
PolarisAuth,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)]
|
||||
pub struct Authorization {
|
||||
pub username: String,
|
||||
pub scope: Scope,
|
||||
}
|
||||
|
||||
pub fn hash_password(password: &str) -> Result<String, Error> {
|
||||
if password.is_empty() {
|
||||
return Err(Error::EmptyPassword);
|
||||
}
|
||||
let salt = SaltString::generate(&mut OsRng);
|
||||
match Pbkdf2.hash_password(password.as_bytes(), &salt) {
|
||||
Ok(h) => Ok(h.to_string()),
|
||||
Err(_) => Err(Error::PasswordHashing),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn verify_password(password_hash: &str, attempted_password: &str) -> bool {
|
||||
match PasswordHash::new(password_hash) {
|
||||
Ok(h) => Pbkdf2
|
||||
.verify_password(attempted_password.as_bytes(), &h)
|
||||
.is_ok(),
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_auth_token(
|
||||
authorization: &Authorization,
|
||||
auth_secret: &Secret,
|
||||
) -> Result<Token, Error> {
|
||||
let serialized_authorization =
|
||||
serde_json::to_string(&authorization).or(Err(Error::AuthorizationTokenEncoding))?;
|
||||
branca::encode(
|
||||
serialized_authorization.as_bytes(),
|
||||
auth_secret.as_ref(),
|
||||
SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap_or_default()
|
||||
.as_secs() as u32,
|
||||
)
|
||||
.or(Err(Error::BrancaTokenEncoding))
|
||||
.map(Token)
|
||||
}
|
||||
|
||||
pub fn decode_auth_token(
|
||||
auth_token: &Token,
|
||||
scope: Scope,
|
||||
auth_secret: &Secret,
|
||||
) -> Result<Authorization, Error> {
|
||||
let Token(data) = auth_token;
|
||||
let ttl = match scope {
|
||||
Scope::PolarisAuth => 0, // permanent
|
||||
};
|
||||
let authorization =
|
||||
branca::decode(data, auth_secret.as_ref(), ttl).map_err(|_| Error::InvalidAuthToken)?;
|
||||
let authorization: Authorization =
|
||||
serde_json::from_slice(&authorization[..]).map_err(|_| Error::InvalidAuthToken)?;
|
||||
if authorization.scope != scope {
|
||||
return Err(Error::IncorrectAuthorizationScope);
|
||||
}
|
||||
Ok(authorization)
|
||||
}
|
338
src/app/config.rs
Normal file
|
@ -0,0 +1,338 @@
|
|||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use log::{error, info};
|
||||
use notify::{RecommendedWatcher, RecursiveMode, Watcher};
|
||||
use notify_debouncer_full::{Debouncer, FileIdMap};
|
||||
use regex::Regex;
|
||||
use tokio::sync::{futures::Notified, Notify, RwLock};
|
||||
|
||||
use crate::app::Error;
|
||||
|
||||
mod mounts;
|
||||
pub mod storage;
|
||||
mod user;
|
||||
|
||||
pub use mounts::*;
|
||||
pub use user::*;
|
||||
|
||||
use super::auth;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Config {
|
||||
pub album_art_pattern: Option<Regex>,
|
||||
pub ddns_update_url: Option<http::Uri>,
|
||||
pub mount_dirs: Vec<MountDir>,
|
||||
pub users: Vec<User>,
|
||||
}
|
||||
|
||||
impl TryFrom<storage::Config> for Config {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(c: storage::Config) -> Result<Self, Self::Error> {
|
||||
let mut config = Config::default();
|
||||
config.set_mounts(c.mount_dirs)?;
|
||||
config.set_users(c.users)?;
|
||||
|
||||
config.album_art_pattern = match c.album_art_pattern.as_deref().map(Regex::new) {
|
||||
Some(Ok(u)) => Some(u),
|
||||
Some(Err(_)) => return Err(Error::IndexAlbumArtPatternInvalid),
|
||||
None => None,
|
||||
};
|
||||
|
||||
config.ddns_update_url = match c.ddns_update_url.map(http::Uri::try_from) {
|
||||
Some(Ok(u)) => Some(u),
|
||||
Some(Err(_)) => return Err(Error::DDNSUpdateURLInvalid),
|
||||
None => None,
|
||||
};
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Config> for storage::Config {
|
||||
fn from(c: Config) -> Self {
|
||||
Self {
|
||||
album_art_pattern: c.album_art_pattern.map(|p| p.as_str().to_owned()),
|
||||
mount_dirs: c.mount_dirs.into_iter().map(|d| d.into()).collect(),
|
||||
ddns_update_url: c.ddns_update_url.map(|u| u.to_string()),
|
||||
users: c.users.into_iter().map(|u| u.into()).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Manager {
|
||||
config_file_path: PathBuf,
|
||||
config: Arc<RwLock<Config>>,
|
||||
auth_secret: auth::Secret,
|
||||
#[allow(dead_code)]
|
||||
file_watcher: Arc<Debouncer<RecommendedWatcher, FileIdMap>>,
|
||||
change_notify: Arc<Notify>,
|
||||
}
|
||||
|
||||
impl Manager {
|
||||
pub async fn new(config_file_path: &Path, auth_secret: auth::Secret) -> Result<Self, Error> {
|
||||
if let Some(parent) = config_file_path.parent() {
|
||||
tokio::fs::create_dir_all(parent)
|
||||
.await
|
||||
.map_err(|e| Error::Io(parent.to_owned(), e))?;
|
||||
}
|
||||
|
||||
match tokio::fs::File::create_new(config_file_path).await {
|
||||
Ok(_) => (),
|
||||
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => (),
|
||||
Err(e) => {
|
||||
error!("Failed to create config file at {config_file_path:#?}: {e}");
|
||||
return Err(Error::Io(config_file_path.to_owned(), e));
|
||||
}
|
||||
};
|
||||
|
||||
let notify = Arc::new(Notify::new());
|
||||
let mut debouncer = notify_debouncer_full::new_debouncer(Duration::from_secs(1), None, {
|
||||
let notify = notify.clone();
|
||||
move |_| {
|
||||
notify.notify_waiters();
|
||||
}
|
||||
})?;
|
||||
|
||||
debouncer
|
||||
.watcher()
|
||||
.watch(&config_file_path, RecursiveMode::NonRecursive)?;
|
||||
|
||||
let manager = Self {
|
||||
config_file_path: config_file_path.to_owned(),
|
||||
config: Arc::new(RwLock::new(Config::default())),
|
||||
auth_secret,
|
||||
file_watcher: Arc::new(debouncer),
|
||||
change_notify: Arc::default(),
|
||||
};
|
||||
|
||||
tokio::task::spawn({
|
||||
let manager = manager.clone();
|
||||
async move {
|
||||
loop {
|
||||
notify.notified().await;
|
||||
if let Err(e) = manager.reload_config().await {
|
||||
error!("Configuration error: {e}");
|
||||
} else {
|
||||
info!("Successfully applied configuration change");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
manager.reload_config().await?;
|
||||
|
||||
Ok(manager)
|
||||
}
|
||||
|
||||
pub fn on_config_change(&self) -> Notified {
|
||||
self.change_notify.notified()
|
||||
}
|
||||
|
||||
async fn reload_config(&self) -> Result<(), Error> {
|
||||
let config = Self::read_config(&self.config_file_path).await?;
|
||||
self.apply_config(config).await
|
||||
}
|
||||
|
||||
async fn read_config(config_file_path: &Path) -> Result<storage::Config, Error> {
|
||||
let config_content = tokio::fs::read_to_string(config_file_path)
|
||||
.await
|
||||
.map_err(|e| Error::Io(config_file_path.to_owned(), e))?;
|
||||
toml::de::from_str::<storage::Config>(&config_content).map_err(Error::ConfigDeserialization)
|
||||
}
|
||||
|
||||
pub async fn save_config(&self) -> Result<(), Error> {
|
||||
let serialized = toml::ser::to_string_pretty::<storage::Config>(
|
||||
&self.config.read().await.clone().into(),
|
||||
)
|
||||
.map_err(Error::ConfigSerialization)?;
|
||||
tokio::fs::write(&self.config_file_path, serialized.as_bytes())
|
||||
.await
|
||||
.map_err(|e| Error::Io(self.config_file_path.clone(), e))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn apply_config(&self, new_config: storage::Config) -> Result<(), Error> {
|
||||
let mut config = self.config.write().await;
|
||||
*config = new_config.try_into()?;
|
||||
self.change_notify.notify_waiters();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn mutate<F: FnOnce(&mut Config)>(&self, op: F) -> Result<(), Error> {
|
||||
self.mutate_fallible(|c| {
|
||||
op(c);
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn mutate_fallible<F: FnOnce(&mut Config) -> Result<(), Error>>(
|
||||
&self,
|
||||
op: F,
|
||||
) -> Result<(), Error> {
|
||||
{
|
||||
let mut config = self.config.write().await;
|
||||
op(&mut config)?;
|
||||
}
|
||||
self.change_notify.notify_waiters();
|
||||
self.save_config().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_index_album_art_pattern(&self) -> Regex {
|
||||
let config = self.config.read().await;
|
||||
let pattern = config.album_art_pattern.clone();
|
||||
pattern.unwrap_or_else(|| Regex::new("Folder.(jpeg|jpg|png)").unwrap())
|
||||
}
|
||||
|
||||
pub async fn set_index_album_art_pattern(&self, regex: Regex) -> Result<(), Error> {
|
||||
self.mutate(|c| {
|
||||
c.album_art_pattern = Some(regex);
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_ddns_update_url(&self) -> Option<http::Uri> {
|
||||
self.config.read().await.ddns_update_url.clone()
|
||||
}
|
||||
|
||||
pub async fn set_ddns_update_url(&self, url: Option<http::Uri>) -> Result<(), Error> {
|
||||
self.mutate(|c| {
|
||||
c.ddns_update_url = url;
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_users(&self) -> Vec<User> {
|
||||
self.config.read().await.users.iter().cloned().collect()
|
||||
}
|
||||
|
||||
pub async fn get_user(&self, username: &str) -> Result<User, Error> {
|
||||
let config = self.config.read().await;
|
||||
config
|
||||
.get_user(username)
|
||||
.cloned()
|
||||
.ok_or(Error::UserNotFound)
|
||||
}
|
||||
|
||||
pub async fn create_user(
|
||||
&self,
|
||||
username: &str,
|
||||
password: &str,
|
||||
admin: bool,
|
||||
) -> Result<(), Error> {
|
||||
self.mutate_fallible(|c| c.create_user(username, password, admin))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn login(&self, username: &str, password: &str) -> Result<auth::Token, Error> {
|
||||
let config = self.config.read().await;
|
||||
config.login(username, password, &self.auth_secret)
|
||||
}
|
||||
|
||||
pub async fn set_is_admin(&self, username: &str, is_admin: bool) -> Result<(), Error> {
|
||||
self.mutate_fallible(|c| c.set_is_admin(username, is_admin))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn set_password(&self, username: &str, password: &str) -> Result<(), Error> {
|
||||
self.mutate_fallible(|c| c.set_password(username, password))
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn authenticate(
|
||||
&self,
|
||||
auth_token: &auth::Token,
|
||||
scope: auth::Scope,
|
||||
) -> Result<auth::Authorization, Error> {
|
||||
let config = self.config.read().await;
|
||||
config.authenticate(auth_token, scope, &self.auth_secret)
|
||||
}
|
||||
|
||||
pub async fn delete_user(&self, username: &str) -> Result<(), Error> {
|
||||
self.mutate(|c| c.delete_user(username)).await
|
||||
}
|
||||
|
||||
pub async fn get_mounts(&self) -> Vec<MountDir> {
|
||||
let config = self.config.read().await;
|
||||
config.mount_dirs.iter().cloned().collect()
|
||||
}
|
||||
|
||||
pub async fn resolve_virtual_path<P: AsRef<Path>>(
|
||||
&self,
|
||||
virtual_path: P,
|
||||
) -> Result<PathBuf, Error> {
|
||||
let config = self.config.read().await;
|
||||
config.resolve_virtual_path(virtual_path)
|
||||
}
|
||||
|
||||
pub async fn set_mounts(&self, mount_dirs: Vec<storage::MountDir>) -> Result<(), Error> {
|
||||
self.mutate_fallible(|c| c.set_mounts(mount_dirs)).await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::app::test;
|
||||
use crate::test_name;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn blank_config_round_trip() {
|
||||
let config_path = PathBuf::from_iter(["test-data", "blank.toml"]);
|
||||
let manager = Manager::new(&config_path, auth::Secret([0; 32]))
|
||||
.await
|
||||
.unwrap();
|
||||
let config: storage::Config = manager.config.read().await.clone().into();
|
||||
assert_eq!(config, storage::Config::default());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn can_read_config() {
|
||||
let config_path = PathBuf::from_iter(["test-data", "config.toml"]);
|
||||
let manager = Manager::new(&config_path, auth::Secret([0; 32]))
|
||||
.await
|
||||
.unwrap();
|
||||
let config: storage::Config = manager.config.read().await.clone().into();
|
||||
|
||||
assert_eq!(
|
||||
config.album_art_pattern,
|
||||
Some(r#"^Folder\.(png|jpg|jpeg)$"#.to_owned())
|
||||
);
|
||||
assert_eq!(
|
||||
config.mount_dirs,
|
||||
vec![storage::MountDir {
|
||||
source: PathBuf::from("test-data/small-collection"),
|
||||
name: "root".to_owned(),
|
||||
}]
|
||||
);
|
||||
assert_eq!(config.users[0].name, "test_user");
|
||||
assert_eq!(config.users[0].admin, Some(true));
|
||||
assert_eq!(
|
||||
config.users[0].initial_password,
|
||||
Some("very_secret_password".to_owned())
|
||||
);
|
||||
assert!(config.users[0].hashed_password.is_some());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn can_write_config() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
ctx.config_manager
|
||||
.create_user("Walter", "example_password", false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let manager = Manager::new(&ctx.config_manager.config_file_path, auth::Secret([0; 32]))
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(manager.get_user("Walter").await.is_ok());
|
||||
}
|
||||
}
|
149
src/app/config/mounts.rs
Normal file
|
@ -0,0 +1,149 @@
|
|||
use std::{
|
||||
ops::Deref,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use regex::Regex;
|
||||
|
||||
use crate::app::Error;
|
||||
|
||||
use super::storage;
|
||||
use super::Config;
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct MountDir {
|
||||
pub source: PathBuf,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl TryFrom<storage::MountDir> for MountDir {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(mount_dir: storage::MountDir) -> Result<Self, Self::Error> {
|
||||
// TODO validation
|
||||
Ok(Self {
|
||||
source: sanitize_path(&mount_dir.source),
|
||||
name: mount_dir.name,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<MountDir> for storage::MountDir {
|
||||
fn from(m: MountDir) -> Self {
|
||||
Self {
|
||||
source: m.source,
|
||||
name: m.name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn set_mounts(&mut self, mount_dirs: Vec<storage::MountDir>) -> Result<(), Error> {
|
||||
let mut new_mount_dirs = Vec::new();
|
||||
for mount_dir in mount_dirs {
|
||||
let mount_dir = <storage::MountDir as TryInto<MountDir>>::try_into(mount_dir)?;
|
||||
new_mount_dirs.push(mount_dir);
|
||||
}
|
||||
new_mount_dirs.dedup_by(|a, b| a.name == b.name);
|
||||
self.mount_dirs = new_mount_dirs;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn resolve_virtual_path<P: AsRef<Path>>(&self, virtual_path: P) -> Result<PathBuf, Error> {
|
||||
for mount in &self.mount_dirs {
|
||||
if let Ok(p) = virtual_path.as_ref().strip_prefix(&mount.name) {
|
||||
return if p.components().count() == 0 {
|
||||
Ok(mount.source.clone())
|
||||
} else {
|
||||
Ok(mount.source.join(p))
|
||||
};
|
||||
}
|
||||
}
|
||||
Err(Error::CouldNotMapToRealPath(virtual_path.as_ref().into()))
|
||||
}
|
||||
}
|
||||
|
||||
fn sanitize_path(source: &PathBuf) -> PathBuf {
|
||||
let path_string = source.to_string_lossy();
|
||||
let separator_regex = Regex::new(r"\\|/").unwrap();
|
||||
let mut correct_separator = String::new();
|
||||
correct_separator.push(std::path::MAIN_SEPARATOR);
|
||||
let path_string = separator_regex.replace_all(&path_string, correct_separator.as_str());
|
||||
PathBuf::from(path_string.deref())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn can_resolve_virtual_paths() {
|
||||
let raw_config = storage::Config {
|
||||
mount_dirs: vec![storage::MountDir {
|
||||
name: "root".to_owned(),
|
||||
source: PathBuf::from("test_dir"),
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let config: Config = raw_config.try_into().unwrap();
|
||||
|
||||
let test_cases = vec![
|
||||
(vec!["root"], vec!["test_dir"]),
|
||||
(
|
||||
vec!["root", "somewhere", "something.png"],
|
||||
vec!["test_dir", "somewhere", "something.png"],
|
||||
),
|
||||
];
|
||||
|
||||
for (r#virtual, real) in test_cases {
|
||||
let real_path: PathBuf = real.iter().collect();
|
||||
let virtual_path: PathBuf = r#virtual.iter().collect();
|
||||
let converted_path = config.resolve_virtual_path(&virtual_path).unwrap();
|
||||
assert_eq!(converted_path, real_path);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sanitizes_paths() {
|
||||
let mut correct_path = PathBuf::new();
|
||||
if cfg!(target_os = "windows") {
|
||||
correct_path.push("C:\\");
|
||||
} else {
|
||||
correct_path.push("/usr");
|
||||
}
|
||||
correct_path.push("some");
|
||||
correct_path.push("path");
|
||||
|
||||
let tests = if cfg!(target_os = "windows") {
|
||||
vec![
|
||||
r#"C:/some/path"#,
|
||||
r#"C:\some\path"#,
|
||||
r#"C:\some\path\"#,
|
||||
r#"C:\some\path\\\\"#,
|
||||
r#"C:\some/path//"#,
|
||||
]
|
||||
} else {
|
||||
vec![
|
||||
r#"/usr/some/path"#,
|
||||
r#"/usr\some\path"#,
|
||||
r#"/usr\some\path\"#,
|
||||
r#"/usr\some\path\\\\"#,
|
||||
r#"/usr\some/path//"#,
|
||||
]
|
||||
};
|
||||
|
||||
for test in tests {
|
||||
let raw_config = storage::Config {
|
||||
mount_dirs: vec![storage::MountDir {
|
||||
name: "root".to_owned(),
|
||||
source: PathBuf::from(test),
|
||||
}],
|
||||
..Default::default()
|
||||
};
|
||||
let config: Config = raw_config.try_into().unwrap();
|
||||
let converted_path = config.resolve_virtual_path(&PathBuf::from("root")).unwrap();
|
||||
assert_eq!(converted_path, correct_path);
|
||||
}
|
||||
}
|
||||
}
|
32
src/app/config/storage.rs
Normal file
|
@ -0,0 +1,32 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct User {
|
||||
pub name: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub admin: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub initial_password: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub hashed_password: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct MountDir {
|
||||
pub source: PathBuf,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct Config {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub album_art_pattern: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub mount_dirs: Vec<MountDir>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub ddns_update_url: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub users: Vec<User>,
|
||||
}
|
308
src/app/config/user.rs
Normal file
|
@ -0,0 +1,308 @@
|
|||
use crate::app::{auth, Error};
|
||||
|
||||
use super::storage;
|
||||
use super::Config;
|
||||
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct User {
|
||||
pub name: String,
|
||||
pub admin: Option<bool>,
|
||||
pub initial_password: Option<String>,
|
||||
pub hashed_password: String,
|
||||
}
|
||||
|
||||
impl User {
|
||||
pub fn is_admin(&self) -> bool {
|
||||
self.admin == Some(true)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<storage::User> for User {
|
||||
type Error = Error;
|
||||
|
||||
fn try_from(user: storage::User) -> Result<Self, Self::Error> {
|
||||
let hashed_password = match (&user.initial_password, &user.hashed_password) {
|
||||
(_, Some(p)) => p.clone(),
|
||||
(Some(p), None) => auth::hash_password(p)?,
|
||||
(None, None) => return Err(Error::EmptyPassword),
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
name: user.name,
|
||||
admin: user.admin,
|
||||
initial_password: user.initial_password,
|
||||
hashed_password,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<User> for storage::User {
|
||||
fn from(user: User) -> Self {
|
||||
Self {
|
||||
name: user.name,
|
||||
admin: user.admin,
|
||||
initial_password: user.initial_password,
|
||||
hashed_password: Some(user.hashed_password),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn set_users(&mut self, users: Vec<storage::User>) -> Result<(), Error> {
|
||||
let mut new_users = Vec::new();
|
||||
for user in users {
|
||||
let user = <storage::User as TryInto<User>>::try_into(user)?;
|
||||
new_users.push(user);
|
||||
}
|
||||
new_users.dedup_by(|a, b| a.name == b.name);
|
||||
self.users = new_users;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn create_user(
|
||||
&mut self,
|
||||
username: &str,
|
||||
password: &str,
|
||||
admin: bool,
|
||||
) -> Result<(), Error> {
|
||||
if username.is_empty() {
|
||||
return Err(Error::EmptyUsername);
|
||||
}
|
||||
|
||||
if self.exists(username) {
|
||||
return Err(Error::DuplicateUsername);
|
||||
}
|
||||
|
||||
let password_hash = auth::hash_password(&password)?;
|
||||
|
||||
self.users.push(User {
|
||||
name: username.to_owned(),
|
||||
admin: Some(admin),
|
||||
initial_password: None,
|
||||
hashed_password: password_hash,
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn exists(&self, username: &str) -> bool {
|
||||
self.users.iter().any(|u| u.name == username)
|
||||
}
|
||||
|
||||
pub fn get_user(&self, username: &str) -> Option<&User> {
|
||||
self.users.iter().find(|u| u.name == username)
|
||||
}
|
||||
|
||||
pub fn get_user_mut(&mut self, username: &str) -> Option<&mut User> {
|
||||
self.users.iter_mut().find(|u| u.name == username)
|
||||
}
|
||||
|
||||
pub fn authenticate(
|
||||
&self,
|
||||
auth_token: &auth::Token,
|
||||
scope: auth::Scope,
|
||||
auth_secret: &auth::Secret,
|
||||
) -> Result<auth::Authorization, Error> {
|
||||
let authorization = auth::decode_auth_token(auth_token, scope, auth_secret)?;
|
||||
if self.exists(&authorization.username) {
|
||||
Ok(authorization)
|
||||
} else {
|
||||
Err(Error::IncorrectUsername)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn login(
|
||||
&self,
|
||||
username: &str,
|
||||
password: &str,
|
||||
auth_secret: &auth::Secret,
|
||||
) -> Result<auth::Token, Error> {
|
||||
let user = self.get_user(username).ok_or(Error::IncorrectUsername)?;
|
||||
if auth::verify_password(&user.hashed_password, password) {
|
||||
let authorization = auth::Authorization {
|
||||
username: username.to_owned(),
|
||||
scope: auth::Scope::PolarisAuth,
|
||||
};
|
||||
auth::generate_auth_token(&authorization, auth_secret)
|
||||
} else {
|
||||
Err(Error::IncorrectPassword)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_is_admin(&mut self, username: &str, is_admin: bool) -> Result<(), Error> {
|
||||
let user = self.get_user_mut(username).ok_or(Error::UserNotFound)?;
|
||||
user.admin = Some(is_admin);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_password(&mut self, username: &str, password: &str) -> Result<(), Error> {
|
||||
let user = self.get_user_mut(username).ok_or(Error::UserNotFound)?;
|
||||
user.hashed_password = auth::hash_password(password)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn delete_user(&mut self, username: &str) {
|
||||
self.users.retain(|u| u.name != username);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::app::test;
|
||||
use crate::test_name;
|
||||
|
||||
use super::*;
|
||||
|
||||
const TEST_USERNAME: &str = "Walter";
|
||||
const TEST_PASSWORD: &str = "super_secret!";
|
||||
|
||||
#[test]
|
||||
fn adds_password_hashes() {
|
||||
let user_in = storage::User {
|
||||
name: TEST_USERNAME.to_owned(),
|
||||
initial_password: Some(TEST_PASSWORD.to_owned()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let user: User = user_in.try_into().unwrap();
|
||||
|
||||
let user_out: storage::User = user.into();
|
||||
|
||||
assert_eq!(user_out.name, TEST_USERNAME);
|
||||
assert_eq!(user_out.initial_password, Some(TEST_PASSWORD.to_owned()));
|
||||
assert!(user_out.hashed_password.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn preserves_password_hashes() {
|
||||
let user_in = storage::User {
|
||||
name: TEST_USERNAME.to_owned(),
|
||||
hashed_password: Some("hash".to_owned()),
|
||||
..Default::default()
|
||||
};
|
||||
let user: User = user_in.clone().try_into().unwrap();
|
||||
let user_out: storage::User = user.into();
|
||||
assert_eq!(user_out, user_in);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn create_delete_user_golden_path() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
|
||||
ctx.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(ctx.config_manager.get_user(TEST_USERNAME).await.is_ok());
|
||||
|
||||
ctx.config_manager.delete_user(TEST_USERNAME).await.unwrap();
|
||||
assert!(ctx.config_manager.get_user(TEST_USERNAME).await.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn cannot_create_user_with_blank_username() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
let result = ctx.config_manager.create_user("", TEST_PASSWORD, false);
|
||||
assert!(matches!(result.await.unwrap_err(), Error::EmptyUsername));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn cannot_create_user_with_blank_password() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
let result = ctx.config_manager.create_user(TEST_USERNAME, "", false);
|
||||
assert!(matches!(result.await.unwrap_err(), Error::EmptyPassword));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn cannot_create_duplicate_user() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
let result = ctx
|
||||
.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false);
|
||||
assert!(result.await.is_ok());
|
||||
|
||||
let result = ctx
|
||||
.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false);
|
||||
assert!(matches!(
|
||||
result.await.unwrap_err(),
|
||||
Error::DuplicateUsername
|
||||
));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_rejects_bad_password() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
|
||||
ctx.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let result = ctx.config_manager.login(TEST_USERNAME, "not the password");
|
||||
assert!(matches!(
|
||||
result.await.unwrap_err(),
|
||||
Error::IncorrectPassword
|
||||
));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn login_golden_path() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
|
||||
ctx.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let result = ctx.config_manager.login(TEST_USERNAME, TEST_PASSWORD);
|
||||
assert!(result.await.is_ok());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn authenticate_rejects_bad_token() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
|
||||
ctx.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let fake_token = auth::Token("fake token".to_owned());
|
||||
assert!(ctx
|
||||
.config_manager
|
||||
.authenticate(&fake_token, auth::Scope::PolarisAuth)
|
||||
.await
|
||||
.is_err())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn authenticate_golden_path() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
|
||||
ctx.config_manager
|
||||
.create_user(TEST_USERNAME, TEST_PASSWORD, false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let token = ctx
|
||||
.config_manager
|
||||
.login(TEST_USERNAME, TEST_PASSWORD)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let authorization = ctx
|
||||
.config_manager
|
||||
.authenticate(&token, auth::Scope::PolarisAuth)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
authorization,
|
||||
auth::Authorization {
|
||||
username: TEST_USERNAME.to_owned(),
|
||||
scope: auth::Scope::PolarisAuth,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
45
src/app/ddns.rs
Normal file
|
@ -0,0 +1,45 @@
|
|||
use log::{debug, error};
|
||||
use std::time::Duration;
|
||||
|
||||
use crate::app::{config, Error};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Manager {
|
||||
config_manager: config::Manager,
|
||||
}
|
||||
|
||||
impl Manager {
|
||||
pub fn new(config_manager: config::Manager) -> Self {
|
||||
Self { config_manager }
|
||||
}
|
||||
|
||||
pub async fn update_ddns(&self) -> Result<(), Error> {
|
||||
let url = self.config_manager.get_ddns_update_url().await;
|
||||
let Some(url) = url else {
|
||||
debug!("Skipping DDNS update because credentials are missing");
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let response = ureq::get(&url.to_string()).call();
|
||||
|
||||
match response {
|
||||
Ok(_) => Ok(()),
|
||||
Err(ureq::Error::Status(code, _)) => Err(Error::UpdateQueryFailed(code)),
|
||||
Err(ureq::Error::Transport(_)) => Err(Error::UpdateQueryTransport),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn begin_periodic_updates(&self) {
|
||||
tokio::spawn({
|
||||
let ddns = self.clone();
|
||||
async move {
|
||||
loop {
|
||||
if let Err(e) = ddns.update_ddns().await {
|
||||
error!("Dynamic DNS update error: {:?}", e);
|
||||
}
|
||||
tokio::time::sleep(Duration::from_secs(60 * 30)).await;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
444
src/app/formats.rs
Normal file
|
@ -0,0 +1,444 @@
|
|||
use id3::TagLike;
|
||||
use lewton::inside_ogg::OggStreamReader;
|
||||
use log::error;
|
||||
use std::fs;
|
||||
use std::io::{Seek, SeekFrom};
|
||||
use std::path::Path;
|
||||
|
||||
use crate::app::Error;
|
||||
use crate::utils;
|
||||
use crate::utils::AudioFormat;
|
||||
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
pub struct SongMetadata {
|
||||
pub disc_number: Option<u32>,
|
||||
pub track_number: Option<u32>,
|
||||
pub title: Option<String>,
|
||||
pub duration: Option<u32>,
|
||||
pub artists: Vec<String>,
|
||||
pub album_artists: Vec<String>,
|
||||
pub album: Option<String>,
|
||||
pub year: Option<i32>,
|
||||
pub has_artwork: bool,
|
||||
pub lyricists: Vec<String>,
|
||||
pub composers: Vec<String>,
|
||||
pub genres: Vec<String>,
|
||||
pub labels: Vec<String>,
|
||||
}
|
||||
|
||||
pub fn read_metadata<P: AsRef<Path>>(path: P) -> Option<SongMetadata> {
|
||||
let data = match utils::get_audio_format(&path) {
|
||||
Some(AudioFormat::AIFF) => read_id3(&path),
|
||||
Some(AudioFormat::FLAC) => read_flac(&path),
|
||||
Some(AudioFormat::MP3) => read_mp3(&path),
|
||||
Some(AudioFormat::OGG) => read_vorbis(&path),
|
||||
Some(AudioFormat::OPUS) => read_opus(&path),
|
||||
Some(AudioFormat::WAVE) => read_id3(&path),
|
||||
Some(AudioFormat::APE) | Some(AudioFormat::MPC) => read_ape(&path),
|
||||
Some(AudioFormat::MP4) | Some(AudioFormat::M4B) => read_mp4(&path),
|
||||
None => return None,
|
||||
};
|
||||
match data {
|
||||
Ok(d) => Some(d),
|
||||
Err(e) => {
|
||||
error!(
|
||||
"Error while reading file metadata for '{:?}': {}",
|
||||
path.as_ref(),
|
||||
e
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait ID3Ext {
|
||||
fn get_text_values(&self, frame_name: &str) -> Vec<String>;
|
||||
}
|
||||
|
||||
impl ID3Ext for id3::Tag {
|
||||
fn get_text_values(&self, frame_name: &str) -> Vec<String> {
|
||||
self.get(frame_name)
|
||||
.and_then(|f| f.content().text_values())
|
||||
.map(|i| i.map(str::to_string).collect())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
fn read_id3<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let file = fs::File::open(path.as_ref()).map_err(|e| Error::Io(path.as_ref().to_owned(), e))?;
|
||||
read_id3_from_file(&file, path)
|
||||
}
|
||||
|
||||
fn read_id3_from_file<P: AsRef<Path>>(file: &fs::File, path: P) -> Result<SongMetadata, Error> {
|
||||
let tag = id3::Tag::read_from2(file)
|
||||
.or_else(|error| {
|
||||
if let Some(tag) = error.partial_tag {
|
||||
Ok(tag)
|
||||
} else {
|
||||
Err(error)
|
||||
}
|
||||
})
|
||||
.map_err(|e| Error::Id3(path.as_ref().to_owned(), e))?;
|
||||
|
||||
let artists = tag.get_text_values("TPE1");
|
||||
let album_artists = tag.get_text_values("TPE2");
|
||||
let album = tag.album().map(|s| s.to_string());
|
||||
let title = tag.title().map(|s| s.to_string());
|
||||
let duration = tag.duration();
|
||||
let disc_number = tag.disc();
|
||||
let track_number = tag.track();
|
||||
let year = tag
|
||||
.year()
|
||||
.or_else(|| tag.date_released().map(|d| d.year))
|
||||
.or_else(|| tag.original_date_released().map(|d| d.year))
|
||||
.or_else(|| tag.date_recorded().map(|d| d.year));
|
||||
let has_artwork = tag.pictures().count() > 0;
|
||||
let lyricists = tag.get_text_values("TEXT");
|
||||
let composers = tag.get_text_values("TCOM");
|
||||
let genres = tag.get_text_values("TCON");
|
||||
let labels = tag.get_text_values("TPUB");
|
||||
|
||||
Ok(SongMetadata {
|
||||
disc_number,
|
||||
track_number,
|
||||
title,
|
||||
duration,
|
||||
artists,
|
||||
album_artists,
|
||||
album,
|
||||
year,
|
||||
has_artwork,
|
||||
lyricists,
|
||||
composers,
|
||||
genres,
|
||||
labels,
|
||||
})
|
||||
}
|
||||
|
||||
fn read_mp3<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let mut file = fs::File::open(&path).unwrap();
|
||||
let mut metadata = read_id3_from_file(&file, &path)?;
|
||||
metadata.duration = metadata.duration.or_else(|| {
|
||||
file.seek(SeekFrom::Start(0)).unwrap();
|
||||
mp3_duration::from_file(&file)
|
||||
.map(|d| d.as_secs() as u32)
|
||||
.ok()
|
||||
});
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
mod ape_ext {
|
||||
use regex::Regex;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
pub fn read_string(item: &ape::Item) -> Option<String> {
|
||||
item.try_into().ok().map(str::to_string)
|
||||
}
|
||||
|
||||
pub fn read_strings(item: Option<&ape::Item>) -> Vec<String> {
|
||||
let Some(item) = item else {
|
||||
return vec![];
|
||||
};
|
||||
let strings: Vec<&str> = item.try_into().unwrap_or_default();
|
||||
strings.into_iter().map(str::to_string).collect()
|
||||
}
|
||||
|
||||
pub fn read_i32(item: &ape::Item) -> Option<i32> {
|
||||
item.try_into()
|
||||
.ok()
|
||||
.map(|s: &str| s.parse::<i32>().ok())
|
||||
.flatten()
|
||||
}
|
||||
|
||||
static X_OF_Y_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r#"^\d+"#).unwrap());
|
||||
|
||||
pub fn read_x_of_y(item: &ape::Item) -> Option<u32> {
|
||||
item.try_into()
|
||||
.ok()
|
||||
.map(|s: &str| {
|
||||
if let Some(m) = X_OF_Y_REGEX.find(s) {
|
||||
s[m.start()..m.end()].parse().ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
}
|
||||
}
|
||||
|
||||
fn read_ape<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let tag = ape::read_from_path(path)?;
|
||||
let artists = ape_ext::read_strings(tag.item("Artist"));
|
||||
let album = tag.item("Album").and_then(ape_ext::read_string);
|
||||
let album_artists = ape_ext::read_strings(tag.item("Album artist"));
|
||||
let title = tag.item("Title").and_then(ape_ext::read_string);
|
||||
let year = tag.item("Year").and_then(ape_ext::read_i32);
|
||||
let disc_number = tag.item("Disc").and_then(ape_ext::read_x_of_y);
|
||||
let track_number = tag.item("Track").and_then(ape_ext::read_x_of_y);
|
||||
let lyricists = ape_ext::read_strings(tag.item("LYRICIST"));
|
||||
let composers = ape_ext::read_strings(tag.item("COMPOSER"));
|
||||
let genres = ape_ext::read_strings(tag.item("GENRE"));
|
||||
let labels = ape_ext::read_strings(tag.item("PUBLISHER"));
|
||||
Ok(SongMetadata {
|
||||
artists,
|
||||
album_artists,
|
||||
album,
|
||||
title,
|
||||
duration: None,
|
||||
disc_number,
|
||||
track_number,
|
||||
year,
|
||||
has_artwork: false,
|
||||
lyricists,
|
||||
composers,
|
||||
genres,
|
||||
labels,
|
||||
})
|
||||
}
|
||||
|
||||
fn read_vorbis<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let file = fs::File::open(&path).map_err(|e| Error::Io(path.as_ref().to_owned(), e))?;
|
||||
let source = OggStreamReader::new(file)?;
|
||||
|
||||
let mut metadata = SongMetadata::default();
|
||||
for (key, value) in source.comment_hdr.comment_list {
|
||||
utils::match_ignore_case! {
|
||||
match key {
|
||||
"TITLE" => metadata.title = Some(value),
|
||||
"ALBUM" => metadata.album = Some(value),
|
||||
"ARTIST" => metadata.artists.push(value),
|
||||
"ALBUMARTIST" => metadata.album_artists.push(value),
|
||||
"TRACKNUMBER" => metadata.track_number = value.parse::<u32>().ok(),
|
||||
"DISCNUMBER" => metadata.disc_number = value.parse::<u32>().ok(),
|
||||
"DATE" => metadata.year = value.parse::<i32>().ok(),
|
||||
"LYRICIST" => metadata.lyricists.push(value),
|
||||
"COMPOSER" => metadata.composers.push(value),
|
||||
"GENRE" => metadata.genres.push(value),
|
||||
"PUBLISHER" => metadata.labels.push(value),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
fn read_opus<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let headers = opus_headers::parse_from_path(path)?;
|
||||
|
||||
let mut metadata = SongMetadata::default();
|
||||
for (key, value) in headers.comments.user_comments {
|
||||
utils::match_ignore_case! {
|
||||
match key {
|
||||
"TITLE" => metadata.title = Some(value),
|
||||
"ALBUM" => metadata.album = Some(value),
|
||||
"ARTIST" => metadata.artists.push(value),
|
||||
"ALBUMARTIST" => metadata.album_artists.push(value),
|
||||
"TRACKNUMBER" => metadata.track_number = value.parse::<u32>().ok(),
|
||||
"DISCNUMBER" => metadata.disc_number = value.parse::<u32>().ok(),
|
||||
"DATE" => metadata.year = value.parse::<i32>().ok(),
|
||||
"LYRICIST" => metadata.lyricists.push(value),
|
||||
"COMPOSER" => metadata.composers.push(value),
|
||||
"GENRE" => metadata.genres.push(value),
|
||||
"PUBLISHER" => metadata.labels.push(value),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
fn read_flac<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let tag = metaflac::Tag::read_from_path(&path)
|
||||
.map_err(|e| Error::Metaflac(path.as_ref().to_owned(), e))?;
|
||||
let vorbis = tag
|
||||
.vorbis_comments()
|
||||
.ok_or(Error::VorbisCommentNotFoundInFlacFile)?;
|
||||
let disc_number = vorbis
|
||||
.get("DISCNUMBER")
|
||||
.and_then(|d| d[0].parse::<u32>().ok());
|
||||
let year = vorbis.get("DATE").and_then(|d| d[0].parse::<i32>().ok());
|
||||
let mut streaminfo = tag.get_blocks(metaflac::BlockType::StreamInfo);
|
||||
let duration = match streaminfo.next() {
|
||||
Some(metaflac::Block::StreamInfo(s)) => Some(s.total_samples as u32 / s.sample_rate),
|
||||
_ => None,
|
||||
};
|
||||
let has_artwork = tag.pictures().count() > 0;
|
||||
|
||||
let multivalue = |o: Option<&Vec<String>>| o.cloned().unwrap_or_default();
|
||||
|
||||
Ok(SongMetadata {
|
||||
artists: multivalue(vorbis.artist()),
|
||||
album_artists: multivalue(vorbis.album_artist()),
|
||||
album: vorbis.album().map(|v| v[0].clone()),
|
||||
title: vorbis.title().map(|v| v[0].clone()),
|
||||
duration,
|
||||
disc_number,
|
||||
track_number: vorbis.track(),
|
||||
year,
|
||||
has_artwork,
|
||||
lyricists: multivalue(vorbis.get("LYRICIST")),
|
||||
composers: multivalue(vorbis.get("COMPOSER")),
|
||||
genres: multivalue(vorbis.get("GENRE")),
|
||||
labels: multivalue(vorbis.get("PUBLISHER")),
|
||||
})
|
||||
}
|
||||
|
||||
fn read_mp4<P: AsRef<Path>>(path: P) -> Result<SongMetadata, Error> {
|
||||
let mut tag = mp4ameta::Tag::read_from_path(&path)
|
||||
.map_err(|e| Error::Mp4aMeta(path.as_ref().to_owned(), e))?;
|
||||
let label_ident = mp4ameta::FreeformIdent::new("com.apple.iTunes", "Label");
|
||||
|
||||
Ok(SongMetadata {
|
||||
artists: tag.take_artists().collect(),
|
||||
album_artists: tag.take_album_artists().collect(),
|
||||
album: tag.take_album(),
|
||||
title: tag.take_title(),
|
||||
duration: tag.duration().map(|v| v.as_secs() as u32),
|
||||
disc_number: tag.disc_number().map(|d| d as u32),
|
||||
track_number: tag.track_number().map(|d| d as u32),
|
||||
year: tag.year().and_then(|v| v.parse::<i32>().ok()),
|
||||
has_artwork: tag.artwork().is_some(),
|
||||
lyricists: tag.take_lyricists().collect(),
|
||||
composers: tag.take_composers().collect(),
|
||||
genres: tag.take_genres().collect(),
|
||||
labels: tag.take_strings_of(&label_ident).collect(),
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reads_file_metadata() {
|
||||
let expected_without_duration = SongMetadata {
|
||||
disc_number: Some(3),
|
||||
track_number: Some(1),
|
||||
title: Some("TEST TITLE".into()),
|
||||
artists: vec!["TEST ARTIST".into()],
|
||||
album_artists: vec!["TEST ALBUM ARTIST".into()],
|
||||
album: Some("TEST ALBUM".into()),
|
||||
duration: None,
|
||||
year: Some(2016),
|
||||
has_artwork: false,
|
||||
lyricists: vec!["TEST LYRICIST".into()],
|
||||
composers: vec!["TEST COMPOSER".into()],
|
||||
genres: vec!["TEST GENRE".into()],
|
||||
labels: vec!["TEST LABEL".into()],
|
||||
};
|
||||
let expected_with_duration = SongMetadata {
|
||||
duration: Some(0),
|
||||
..expected_without_duration.clone()
|
||||
};
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.aif")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.mp3")).unwrap(),
|
||||
expected_with_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.ogg")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.flac")).unwrap(),
|
||||
expected_with_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.m4a")).unwrap(),
|
||||
expected_with_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.opus")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.ape")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/formats/sample.wav")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reads_embedded_artwork() {
|
||||
assert!(
|
||||
read_metadata(Path::new("test-data/artwork/sample.aif"))
|
||||
.unwrap()
|
||||
.has_artwork
|
||||
);
|
||||
assert!(
|
||||
read_metadata(Path::new("test-data/artwork/sample.mp3"))
|
||||
.unwrap()
|
||||
.has_artwork
|
||||
);
|
||||
assert!(
|
||||
read_metadata(Path::new("test-data/artwork/sample.flac"))
|
||||
.unwrap()
|
||||
.has_artwork
|
||||
);
|
||||
assert!(
|
||||
read_metadata(Path::new("test-data/artwork/sample.m4a"))
|
||||
.unwrap()
|
||||
.has_artwork
|
||||
);
|
||||
assert!(
|
||||
read_metadata(Path::new("test-data/artwork/sample.wav"))
|
||||
.unwrap()
|
||||
.has_artwork
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reads_multivalue_fields() {
|
||||
let expected_without_duration = SongMetadata {
|
||||
disc_number: Some(3),
|
||||
track_number: Some(1),
|
||||
title: Some("TEST TITLE".into()),
|
||||
artists: vec!["TEST ARTIST".into(), "OTHER ARTIST".into()],
|
||||
album_artists: vec!["TEST ALBUM ARTIST".into(), "OTHER ALBUM ARTIST".into()],
|
||||
album: Some("TEST ALBUM".into()),
|
||||
duration: None,
|
||||
year: Some(2016),
|
||||
has_artwork: false,
|
||||
lyricists: vec!["TEST LYRICIST".into(), "OTHER LYRICIST".into()],
|
||||
composers: vec!["TEST COMPOSER".into(), "OTHER COMPOSER".into()],
|
||||
genres: vec!["TEST GENRE".into(), "OTHER GENRE".into()],
|
||||
labels: vec!["TEST LABEL".into(), "OTHER LABEL".into()],
|
||||
};
|
||||
let expected_with_duration = SongMetadata {
|
||||
duration: Some(0),
|
||||
..expected_without_duration.clone()
|
||||
};
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.aif")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.mp3")).unwrap(),
|
||||
expected_with_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.ogg")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.flac")).unwrap(),
|
||||
expected_with_duration
|
||||
);
|
||||
// TODO Test m4a support (likely working). Pending https://tickets.metabrainz.org/browse/PICARD-3029
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.opus")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.ape")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
assert_eq!(
|
||||
read_metadata(Path::new("test-data/multivalue/multivalue.wav")).unwrap(),
|
||||
expected_without_duration
|
||||
);
|
||||
}
|
388
src/app/index.rs
Normal file
|
@ -0,0 +1,388 @@
|
|||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::{Arc, RwLock},
|
||||
};
|
||||
|
||||
use log::{error, info};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::task::spawn_blocking;
|
||||
|
||||
use crate::app::{scanner, Error};
|
||||
|
||||
mod browser;
|
||||
mod collection;
|
||||
mod dictionary;
|
||||
mod query;
|
||||
mod search;
|
||||
mod storage;
|
||||
|
||||
pub use browser::File;
|
||||
pub use collection::{Album, AlbumHeader, Artist, ArtistHeader, Genre, GenreHeader, Song};
|
||||
use storage::{store_song, AlbumKey, ArtistKey, GenreKey, InternPath, SongKey};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Manager {
|
||||
index_file_path: PathBuf,
|
||||
index: Arc<RwLock<Index>>, // Not a tokio RwLock as we want to do CPU-bound work with Index and lock this inside spawn_blocking()
|
||||
}
|
||||
|
||||
impl Manager {
|
||||
pub async fn new(directory: &Path) -> Result<Self, Error> {
|
||||
tokio::fs::create_dir_all(directory)
|
||||
.await
|
||||
.map_err(|e| Error::Io(directory.to_owned(), e))?;
|
||||
|
||||
let index_manager = Self {
|
||||
index_file_path: directory.join("collection.index"),
|
||||
index: Arc::default(),
|
||||
};
|
||||
|
||||
match index_manager.try_restore_index().await {
|
||||
Ok(true) => info!("Restored collection index from disk"),
|
||||
Ok(false) => info!("No existing collection index to restore"),
|
||||
Err(e) => error!("Failed to restore collection index: {}", e),
|
||||
};
|
||||
|
||||
Ok(index_manager)
|
||||
}
|
||||
|
||||
pub async fn is_index_empty(&self) -> bool {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index.collection.num_songs() == 0
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn replace_index(&self, new_index: Index) {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let mut lock = index_manager.index.write().unwrap();
|
||||
*lock = new_index;
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn persist_index(&self, index: &Index) -> Result<(), Error> {
|
||||
let serialized = match bitcode::serialize(index) {
|
||||
Ok(s) => s,
|
||||
Err(_) => return Err(Error::IndexSerializationError),
|
||||
};
|
||||
tokio::fs::write(&self.index_file_path, &serialized[..])
|
||||
.await
|
||||
.map_err(|e| Error::Io(self.index_file_path.clone(), e))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn try_restore_index(&self) -> Result<bool, Error> {
|
||||
match tokio::fs::try_exists(&self.index_file_path).await {
|
||||
Ok(true) => (),
|
||||
Ok(false) => return Ok(false),
|
||||
Err(e) => return Err(Error::Io(self.index_file_path.clone(), e)),
|
||||
};
|
||||
|
||||
let serialized = tokio::fs::read(&self.index_file_path)
|
||||
.await
|
||||
.map_err(|e| Error::Io(self.index_file_path.clone(), e))?;
|
||||
|
||||
let index = match bitcode::deserialize(&serialized[..]) {
|
||||
Ok(i) => i,
|
||||
Err(_) => return Err(Error::IndexDeserializationError),
|
||||
};
|
||||
|
||||
self.replace_index(index).await;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub async fn browse(&self, virtual_path: PathBuf) -> Result<Vec<browser::File>, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index.browser.browse(&index.dictionary, virtual_path)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn flatten(&self, virtual_path: PathBuf) -> Result<Vec<PathBuf>, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index.browser.flatten(&index.dictionary, virtual_path)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_genres(&self) -> Vec<GenreHeader> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index.collection.get_genres(&index.dictionary)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_genre(&self, name: String) -> Result<Genre, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
let name = index
|
||||
.dictionary
|
||||
.get(&name)
|
||||
.ok_or_else(|| Error::GenreNotFound)?;
|
||||
let genre_key = GenreKey(name);
|
||||
index
|
||||
.collection
|
||||
.get_genre(&index.dictionary, genre_key)
|
||||
.ok_or_else(|| Error::GenreNotFound)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_albums(&self) -> Vec<AlbumHeader> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index.collection.get_albums(&index.dictionary)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_artists(&self) -> Vec<ArtistHeader> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index.collection.get_artists(&index.dictionary)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_artist(&self, name: String) -> Result<Artist, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
let name = index
|
||||
.dictionary
|
||||
.get(name)
|
||||
.ok_or_else(|| Error::ArtistNotFound)?;
|
||||
let artist_key = ArtistKey(name);
|
||||
index
|
||||
.collection
|
||||
.get_artist(&index.dictionary, artist_key)
|
||||
.ok_or_else(|| Error::ArtistNotFound)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_album(&self, artists: Vec<String>, name: String) -> Result<Album, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
let name = index
|
||||
.dictionary
|
||||
.get(&name)
|
||||
.ok_or_else(|| Error::AlbumNotFound)?;
|
||||
let album_key = AlbumKey {
|
||||
artists: artists
|
||||
.into_iter()
|
||||
.filter_map(|a| index.dictionary.get(a))
|
||||
.map(|k| ArtistKey(k))
|
||||
.collect(),
|
||||
name,
|
||||
};
|
||||
index
|
||||
.collection
|
||||
.get_album(&index.dictionary, album_key)
|
||||
.ok_or_else(|| Error::AlbumNotFound)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_random_albums(
|
||||
&self,
|
||||
seed: Option<u64>,
|
||||
offset: usize,
|
||||
count: usize,
|
||||
) -> Result<Vec<Album>, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
Ok(index
|
||||
.collection
|
||||
.get_random_albums(&index.dictionary, seed, offset, count))
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_recent_albums(
|
||||
&self,
|
||||
offset: usize,
|
||||
count: usize,
|
||||
) -> Result<Vec<Album>, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
Ok(index
|
||||
.collection
|
||||
.get_recent_albums(&index.dictionary, offset, count))
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn get_songs(&self, virtual_paths: Vec<PathBuf>) -> Vec<Result<Song, Error>> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
virtual_paths
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
p.get(&index.dictionary)
|
||||
.and_then(|virtual_path| {
|
||||
let key = SongKey { virtual_path };
|
||||
index.collection.get_song(&index.dictionary, key)
|
||||
})
|
||||
.ok_or_else(|| Error::SongNotFound)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn search(&self, query: String) -> Result<Vec<Song>, Error> {
|
||||
spawn_blocking({
|
||||
let index_manager = self.clone();
|
||||
move || {
|
||||
let index = index_manager.index.read().unwrap();
|
||||
index
|
||||
.search
|
||||
.find_songs(&index.collection, &index.dictionary, &query)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Index {
|
||||
pub dictionary: dictionary::Dictionary,
|
||||
pub browser: browser::Browser,
|
||||
pub collection: collection::Collection,
|
||||
pub search: search::Search,
|
||||
}
|
||||
|
||||
impl Default for Index {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
dictionary: Default::default(),
|
||||
browser: Default::default(),
|
||||
collection: Default::default(),
|
||||
search: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Builder {
|
||||
dictionary_builder: dictionary::Builder,
|
||||
browser_builder: browser::Builder,
|
||||
collection_builder: collection::Builder,
|
||||
search_builder: search::Builder,
|
||||
}
|
||||
|
||||
impl Builder {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
dictionary_builder: dictionary::Builder::default(),
|
||||
browser_builder: browser::Builder::default(),
|
||||
collection_builder: collection::Builder::default(),
|
||||
search_builder: search::Builder::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_directory(&mut self, directory: scanner::Directory) {
|
||||
self.browser_builder
|
||||
.add_directory(&mut self.dictionary_builder, directory);
|
||||
}
|
||||
|
||||
pub fn add_song(&mut self, scanner_song: scanner::Song) {
|
||||
if let Some(storage_song) = store_song(&mut self.dictionary_builder, &scanner_song) {
|
||||
self.browser_builder
|
||||
.add_song(&mut self.dictionary_builder, &scanner_song);
|
||||
self.collection_builder.add_song(&storage_song);
|
||||
self.search_builder.add_song(&scanner_song, &storage_song);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(self) -> Index {
|
||||
Index {
|
||||
dictionary: self.dictionary_builder.build(),
|
||||
browser: self.browser_builder.build(),
|
||||
collection: self.collection_builder.build(),
|
||||
search: self.search_builder.build(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Builder {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::{
|
||||
app::{index, test},
|
||||
test_name,
|
||||
};
|
||||
|
||||
#[tokio::test]
|
||||
async fn can_persist_index() {
|
||||
let ctx = test::ContextBuilder::new(test_name!()).build().await;
|
||||
assert_eq!(ctx.index_manager.try_restore_index().await.unwrap(), false);
|
||||
let index = index::Builder::new().build();
|
||||
ctx.index_manager.persist_index(&index).await.unwrap();
|
||||
assert_eq!(ctx.index_manager.try_restore_index().await.unwrap(), true);
|
||||
}
|
||||
}
|
389
src/app/index/browser.rs
Normal file
|
@ -0,0 +1,389 @@
|
|||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::{BTreeSet, HashMap},
|
||||
ffi::OsStr,
|
||||
hash::Hash,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use rayon::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tinyvec::TinyVec;
|
||||
use trie_rs::{Trie, TrieBuilder};
|
||||
|
||||
use crate::app::index::{
|
||||
dictionary::{self, Dictionary},
|
||||
storage::{self, PathKey},
|
||||
InternPath,
|
||||
};
|
||||
use crate::app::{scanner, Error};
|
||||
|
||||
#[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
|
||||
pub enum File {
|
||||
Directory(PathBuf),
|
||||
Song(PathBuf),
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Browser {
|
||||
directories: HashMap<PathKey, BTreeSet<storage::File>>,
|
||||
flattened: Trie<lasso2::Spur>,
|
||||
}
|
||||
|
||||
impl Default for Browser {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
directories: HashMap::default(),
|
||||
flattened: TrieBuilder::new().build(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Browser {
|
||||
pub fn browse<P: AsRef<Path>>(
|
||||
&self,
|
||||
dictionary: &Dictionary,
|
||||
virtual_path: P,
|
||||
) -> Result<Vec<File>, Error> {
|
||||
let path = virtual_path
|
||||
.as_ref()
|
||||
.get(dictionary)
|
||||
.ok_or_else(|| Error::DirectoryNotFound(virtual_path.as_ref().to_owned()))?;
|
||||
|
||||
let Some(files) = self.directories.get(&path) else {
|
||||
return Err(Error::DirectoryNotFound(virtual_path.as_ref().to_owned()));
|
||||
};
|
||||
|
||||
let mut files = files
|
||||
.iter()
|
||||
.map(|f| {
|
||||
let path = match f {
|
||||
storage::File::Directory(p) => p,
|
||||
storage::File::Song(p) => p,
|
||||
};
|
||||
let path = Path::new(OsStr::new(dictionary.resolve(&path.0))).to_owned();
|
||||
match f {
|
||||
storage::File::Directory(_) => File::Directory(path),
|
||||
storage::File::Song(_) => File::Song(path),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if virtual_path.as_ref().parent().is_none() {
|
||||
if let [File::Directory(ref p)] = files[..] {
|
||||
return self.browse(dictionary, p);
|
||||
}
|
||||
}
|
||||
|
||||
let collator = dictionary::make_collator();
|
||||
files.sort_by(|a, b| {
|
||||
let (a, b) = match (a, b) {
|
||||
(File::Directory(_), File::Song(_)) => return Ordering::Less,
|
||||
(File::Song(_), File::Directory(_)) => return Ordering::Greater,
|
||||
(File::Directory(a), File::Directory(b)) => (a, b),
|
||||
(File::Song(a), File::Song(b)) => (a, b),
|
||||
};
|
||||
collator.compare(
|
||||
a.as_os_str().to_string_lossy().as_ref(),
|
||||
b.as_os_str().to_string_lossy().as_ref(),
|
||||
)
|
||||
});
|
||||
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
pub fn flatten<P: AsRef<Path>>(
|
||||
&self,
|
||||
dictionary: &Dictionary,
|
||||
virtual_path: P,
|
||||
) -> Result<Vec<PathBuf>, Error> {
|
||||
let path_components = virtual_path
|
||||
.as_ref()
|
||||
.components()
|
||||
.map(|c| c.as_os_str().to_str().unwrap_or_default())
|
||||
.filter_map(|c| dictionary.get(c))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !self.flattened.is_prefix(&path_components) {
|
||||
return Err(Error::DirectoryNotFound(virtual_path.as_ref().to_owned()));
|
||||
}
|
||||
|
||||
let mut results: Vec<TinyVec<[_; 8]>> = self
|
||||
.flattened
|
||||
.predictive_search(path_components)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
results.par_sort_unstable_by(|a, b| {
|
||||
for (x, y) in a.iter().zip(b.iter()) {
|
||||
match dictionary.cmp(x, y) {
|
||||
Ordering::Equal => continue,
|
||||
ordering @ _ => return ordering,
|
||||
}
|
||||
}
|
||||
a.len().cmp(&b.len())
|
||||
});
|
||||
|
||||
let files = results
|
||||
.into_iter()
|
||||
.map(|c: TinyVec<[_; 8]>| -> PathBuf {
|
||||
c.into_iter()
|
||||
.map(|s| dictionary.resolve(&s))
|
||||
.collect::<TinyVec<[&str; 8]>>()
|
||||
.join(std::path::MAIN_SEPARATOR_STR)
|
||||
.into()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(files)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Builder {
|
||||
directories: HashMap<PathKey, BTreeSet<storage::File>>,
|
||||
flattened: TrieBuilder<lasso2::Spur>,
|
||||
}
|
||||
|
||||
impl Builder {
|
||||
pub fn add_directory(
|
||||
&mut self,
|
||||
dictionary_builder: &mut dictionary::Builder,
|
||||
directory: scanner::Directory,
|
||||
) {
|
||||
let Some(virtual_path) = (&directory.virtual_path).get_or_intern(dictionary_builder) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(virtual_parent) = directory
|
||||
.virtual_path
|
||||
.parent()
|
||||
.and_then(|p| p.get_or_intern(dictionary_builder))
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.directories.entry(virtual_path).or_default();
|
||||
|
||||
self.directories
|
||||
.entry(virtual_parent)
|
||||
.or_default()
|
||||
.insert(storage::File::Directory(virtual_path));
|
||||
}
|
||||
|
||||
pub fn add_song(&mut self, dictionary_builder: &mut dictionary::Builder, song: &scanner::Song) {
|
||||
let Some(virtual_path) = (&song.virtual_path).get_or_intern(dictionary_builder) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(virtual_parent) = song
|
||||
.virtual_path
|
||||
.parent()
|
||||
.and_then(|p| p.get_or_intern(dictionary_builder))
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.directories
|
||||
.entry(virtual_parent)
|
||||
.or_default()
|
||||
.insert(storage::File::Song(virtual_path));
|
||||
|
||||
self.flattened.push(
|
||||
song.virtual_path
|
||||
.components()
|
||||
.map(|c| dictionary_builder.get_or_intern(c.as_os_str().to_str().unwrap()))
|
||||
.collect::<TinyVec<[lasso2::Spur; 8]>>(),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn build(self) -> Browser {
|
||||
Browser {
|
||||
directories: self.directories,
|
||||
flattened: self.flattened.build(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::*;
|
||||
|
||||
fn setup_test(songs: HashSet<PathBuf>) -> (Browser, Dictionary) {
|
||||
let mut dictionary_builder = dictionary::Builder::default();
|
||||
let mut builder = Builder::default();
|
||||
|
||||
let directories = songs
|
||||
.iter()
|
||||
.flat_map(|k| k.parent().unwrap().ancestors())
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
for directory in directories {
|
||||
builder.add_directory(
|
||||
&mut dictionary_builder,
|
||||
scanner::Directory {
|
||||
virtual_path: directory.to_owned(),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
for path in songs {
|
||||
let mut song = scanner::Song::default();
|
||||
song.virtual_path = path.clone();
|
||||
builder.add_song(&mut dictionary_builder, &song);
|
||||
}
|
||||
|
||||
let browser = builder.build();
|
||||
let dictionary = dictionary_builder.build();
|
||||
|
||||
(browser, dictionary)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_browse_top_level() {
|
||||
let (browser, strings) = setup_test(HashSet::from([
|
||||
PathBuf::from_iter(["Music", "Iron Maiden", "Moonchild.mp3"]),
|
||||
PathBuf::from_iter(["Also Music", "Iron Maiden", "The Prisoner.mp3"]),
|
||||
]));
|
||||
let files = browser.browse(&strings, PathBuf::new()).unwrap();
|
||||
assert_eq!(
|
||||
files[..],
|
||||
[
|
||||
File::Directory(PathBuf::from_iter(["Also Music"])),
|
||||
File::Directory(PathBuf::from_iter(["Music"])),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn browse_skips_redundant_top_level() {
|
||||
let (browser, strings) = setup_test(HashSet::from([PathBuf::from_iter([
|
||||
"Music",
|
||||
"Iron Maiden",
|
||||
"Moonchild.mp3",
|
||||
])]));
|
||||
let files = browser.browse(&strings, PathBuf::new()).unwrap();
|
||||
assert_eq!(
|
||||
files[..],
|
||||
[File::Directory(PathBuf::from_iter([
|
||||
"Music",
|
||||
"Iron Maiden"
|
||||
])),]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_browse_directory() {
|
||||
let artist_directory = PathBuf::from_iter(["Music", "Iron Maiden"]);
|
||||
|
||||
let (browser, strings) = setup_test(HashSet::from([
|
||||
artist_directory.join("Infinite Dreams.mp3"),
|
||||
artist_directory.join("Moonchild.mp3"),
|
||||
]));
|
||||
|
||||
let files = browser.browse(&strings, artist_directory.clone()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
files,
|
||||
[
|
||||
File::Song(artist_directory.join("Infinite Dreams.mp3")),
|
||||
File::Song(artist_directory.join("Moonchild.mp3"))
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn browse_entries_are_sorted() {
|
||||
let (browser, strings) = setup_test(HashSet::from([
|
||||
PathBuf::from_iter(["Ott", "Mir.mp3"]),
|
||||
PathBuf::from("Helios.mp3"),
|
||||
PathBuf::from("asura.mp3"),
|
||||
PathBuf::from("à la maison.mp3"),
|
||||
]));
|
||||
|
||||
let files = browser.browse(&strings, PathBuf::new()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
files,
|
||||
[
|
||||
File::Directory(PathBuf::from("Ott")),
|
||||
File::Song(PathBuf::from("à la maison.mp3")),
|
||||
File::Song(PathBuf::from("asura.mp3")),
|
||||
File::Song(PathBuf::from("Helios.mp3")),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_flatten_root() {
|
||||
let song_a = PathBuf::from_iter(["Music", "Electronic", "Papua New Guinea.mp3"]);
|
||||
let song_b = PathBuf::from_iter(["Music", "Metal", "Destiny.mp3"]);
|
||||
let song_c = PathBuf::from_iter(["Music", "Metal", "No Turning Back.mp3"]);
|
||||
|
||||
let (browser, strings) = setup_test(HashSet::from([
|
||||
song_a.clone(),
|
||||
song_b.clone(),
|
||||
song_c.clone(),
|
||||
]));
|
||||
|
||||
let files = browser.flatten(&strings, PathBuf::new()).unwrap();
|
||||
|
||||
assert_eq!(files, [song_a, song_b, song_c]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_flatten_directory() {
|
||||
let electronic = PathBuf::from_iter(["Music", "Electronic"]);
|
||||
let song_a = electronic.join(PathBuf::from_iter(["FSOL", "Papua New Guinea.mp3"]));
|
||||
let song_b = electronic.join(PathBuf::from_iter(["Kraftwerk", "Autobahn.mp3"]));
|
||||
let song_c = PathBuf::from_iter(["Music", "Metal", "Destiny.mp3"]);
|
||||
|
||||
let (browser, strings) = setup_test(HashSet::from([
|
||||
song_a.clone(),
|
||||
song_b.clone(),
|
||||
song_c.clone(),
|
||||
]));
|
||||
|
||||
let files = browser.flatten(&strings, electronic).unwrap();
|
||||
|
||||
assert_eq!(files, [song_a, song_b]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn flatten_entries_are_sorted() {
|
||||
let (browser, strings) = setup_test(HashSet::from([
|
||||
PathBuf::from_iter(["Ott", "Mir.mp3"]),
|
||||
PathBuf::from("Helios.mp3"),
|
||||
PathBuf::from("à la maison.mp3.mp3"),
|
||||
PathBuf::from("asura.mp3"),
|
||||
]));
|
||||
|
||||
let files = browser.flatten(&strings, PathBuf::new()).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
files,
|
||||
[
|
||||
PathBuf::from("à la maison.mp3.mp3"),
|
||||
PathBuf::from("asura.mp3"),
|
||||
PathBuf::from("Helios.mp3"),
|
||||
PathBuf::from_iter(["Ott", "Mir.mp3"]),
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_flatten_directory_with_shared_prefix() {
|
||||
let directory_a = PathBuf::from_iter(["Music", "Therion", "Leviathan II"]);
|
||||
let directory_b = PathBuf::from_iter(["Music", "Therion", "Leviathan III"]);
|
||||
let song_a = directory_a.join("Pazuzu.mp3");
|
||||
let song_b = directory_b.join("Ninkigal.mp3");
|
||||
|
||||
let (browser, strings) = setup_test(HashSet::from([song_a.clone(), song_b.clone()]));
|
||||
|
||||
let files = browser.flatten(&strings, directory_a).unwrap();
|
||||
|
||||
assert_eq!(files, [song_a]);
|
||||
}
|
||||
}
|